From 6e60e8b2b2bab889379b380a28a167a0edd9d1d3 Mon Sep 17 00:00:00 2001 From: Brad Bishop Date: Thu, 1 Feb 2018 10:27:11 -0500 Subject: Yocto 2.3 Move OpenBMC to Yocto 2.3(pyro). Tested: Built and verified Witherspoon and Palmetto images Change-Id: I50744030e771f4850afc2a93a10d3507e76d36bc Signed-off-by: Brad Bishop Resolves: openbmc/openbmc#2461 --- import-layers/yocto-poky/README.hardware | 71 +- import-layers/yocto-poky/bitbake/LICENSE | 2 + import-layers/yocto-poky/bitbake/bin/bitbake | 2 +- .../yocto-poky/bitbake/bin/bitbake-diffsigs | 190 +- .../yocto-poky/bitbake/bin/bitbake-dumpsig | 73 +- .../yocto-poky/bitbake/bin/bitbake-layers | 42 +- .../yocto-poky/bitbake/bin/bitbake-worker | 41 +- import-layers/yocto-poky/bitbake/bin/toaster | 70 +- .../bitbake-user-manual-fetching.xml | 43 +- .../bitbake-user-manual-hello.xml | 8 +- .../bitbake-user-manual-intro.xml | 41 +- .../bitbake-user-manual-metadata.xml | 238 +- .../bitbake-user-manual-ref-variables.xml | 18 - import-layers/yocto-poky/bitbake/doc/bitbake.1 | 2 +- import-layers/yocto-poky/bitbake/lib/bb/COW.py | 24 +- .../yocto-poky/bitbake/lib/bb/__init__.py | 2 +- import-layers/yocto-poky/bitbake/lib/bb/build.py | 96 +- import-layers/yocto-poky/bitbake/lib/bb/cache.py | 16 +- .../yocto-poky/bitbake/lib/bb/codeparser.py | 37 +- import-layers/yocto-poky/bitbake/lib/bb/command.py | 302 +- import-layers/yocto-poky/bitbake/lib/bb/cooker.py | 304 +- .../yocto-poky/bitbake/lib/bb/cookerdata.py | 17 +- import-layers/yocto-poky/bitbake/lib/bb/data.py | 95 +- .../yocto-poky/bitbake/lib/bb/data_smart.py | 120 +- import-layers/yocto-poky/bitbake/lib/bb/event.py | 44 +- .../yocto-poky/bitbake/lib/bb/fetch2/__init__.py | 151 +- .../yocto-poky/bitbake/lib/bb/fetch2/bzr.py | 9 +- .../yocto-poky/bitbake/lib/bb/fetch2/clearcase.py | 11 +- .../yocto-poky/bitbake/lib/bb/fetch2/cvs.py | 16 +- .../yocto-poky/bitbake/lib/bb/fetch2/git.py | 65 +- .../yocto-poky/bitbake/lib/bb/fetch2/gitannex.py | 1 - .../yocto-poky/bitbake/lib/bb/fetch2/gitsm.py | 3 +- .../yocto-poky/bitbake/lib/bb/fetch2/hg.py | 15 +- .../yocto-poky/bitbake/lib/bb/fetch2/local.py | 18 +- .../yocto-poky/bitbake/lib/bb/fetch2/npm.py | 58 +- .../yocto-poky/bitbake/lib/bb/fetch2/osc.py | 15 +- .../yocto-poky/bitbake/lib/bb/fetch2/perforce.py | 21 +- .../yocto-poky/bitbake/lib/bb/fetch2/repo.py | 7 +- .../yocto-poky/bitbake/lib/bb/fetch2/s3.py | 98 + .../yocto-poky/bitbake/lib/bb/fetch2/sftp.py | 6 +- .../yocto-poky/bitbake/lib/bb/fetch2/ssh.py | 5 +- .../yocto-poky/bitbake/lib/bb/fetch2/svn.py | 21 +- .../yocto-poky/bitbake/lib/bb/fetch2/wget.py | 37 +- import-layers/yocto-poky/bitbake/lib/bb/main.py | 108 +- .../yocto-poky/bitbake/lib/bb/monitordisk.py | 27 +- import-layers/yocto-poky/bitbake/lib/bb/msg.py | 15 + .../yocto-poky/bitbake/lib/bb/parse/__init__.py | 2 +- .../yocto-poky/bitbake/lib/bb/parse/ast.py | 79 +- .../bitbake/lib/bb/parse/parse_py/BBHandler.py | 24 +- .../bitbake/lib/bb/parse/parse_py/ConfHandler.py | 14 +- .../yocto-poky/bitbake/lib/bb/persist_data.py | 10 +- import-layers/yocto-poky/bitbake/lib/bb/process.py | 4 +- .../yocto-poky/bitbake/lib/bb/providers.py | 13 +- .../yocto-poky/bitbake/lib/bb/remotedata.py | 116 + .../yocto-poky/bitbake/lib/bb/runqueue.py | 305 +- .../yocto-poky/bitbake/lib/bb/server/process.py | 30 +- .../yocto-poky/bitbake/lib/bb/server/xmlrpc.py | 2 +- import-layers/yocto-poky/bitbake/lib/bb/siggen.py | 189 +- .../yocto-poky/bitbake/lib/bb/taskdata.py | 33 +- .../yocto-poky/bitbake/lib/bb/tests/codeparser.py | 66 +- .../yocto-poky/bitbake/lib/bb/tests/data.py | 253 +- .../yocto-poky/bitbake/lib/bb/tests/fetch.py | 2 + .../yocto-poky/bitbake/lib/bb/tests/parse.py | 28 +- import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py | 490 +- .../bitbake/lib/bb/ui/buildinfohelper.py | 167 +- .../yocto-poky/bitbake/lib/bb/ui/depexp.py | 358 - .../yocto-poky/bitbake/lib/bb/ui/knotty.py | 50 +- .../yocto-poky/bitbake/lib/bb/ui/ncurses.py | 2 +- .../yocto-poky/bitbake/lib/bb/ui/taskexp.py | 342 + .../yocto-poky/bitbake/lib/bb/ui/toasterui.py | 10 +- import-layers/yocto-poky/bitbake/lib/bb/utils.py | 90 +- .../yocto-poky/bitbake/lib/bblayers/action.py | 2 +- .../yocto-poky/bitbake/lib/bblayers/common.py | 2 +- .../yocto-poky/bitbake/lib/bblayers/layerindex.py | 10 +- .../yocto-poky/bitbake/lib/bblayers/query.py | 52 +- .../yocto-poky/bitbake/lib/prserv/serv.py | 32 +- .../yocto-poky/bitbake/lib/simplediff/LICENSE | 22 + .../yocto-poky/bitbake/lib/simplediff/__init__.py | 198 + .../toaster/bldcontrol/localhostbecontroller.py | 164 +- .../bldcontrol/management/commands/runbuilds.py | 21 +- .../bitbake/lib/toaster/bldcontrol/tests.py | 141 - .../yocto-poky/bitbake/lib/toaster/contrib/README | 6 - .../bitbake/lib/toaster/contrib/tts/README | 41 - .../bitbake/lib/toaster/contrib/tts/TODO | 9 - .../bitbake/lib/toaster/contrib/tts/config.py | 98 - .../bitbake/lib/toaster/contrib/tts/launcher.py | 101 - .../bitbake/lib/toaster/contrib/tts/log/.create | 0 .../bitbake/lib/toaster/contrib/tts/recv.py | 56 - .../bitbake/lib/toaster/contrib/tts/runner.py | 222 - .../bitbake/lib/toaster/contrib/tts/settings.json | 5 - .../bitbake/lib/toaster/contrib/tts/shellutils.py | 141 - .../bitbake/lib/toaster/contrib/tts/tests.py | 115 - .../contrib/tts/toasteruitest/run_toastertests.py | 155 - .../tts/toasteruitest/toaster_automation_test.py | 2376 --- .../contrib/tts/toasteruitest/toaster_test.cfg | 25 - .../bitbake/lib/toaster/contrib/tts/urlcheck.py | 53 - .../bitbake/lib/toaster/contrib/tts/urllist.py | 39 - .../bitbake/lib/toaster/orm/fixtures/oe-core.xml | 38 +- .../bitbake/lib/toaster/orm/fixtures/poky.xml | 103 +- .../bitbake/lib/toaster/orm/fixtures/settings.xml | 2 +- .../toaster/orm/management/commands/lsupdates.py | 63 +- .../yocto-poky/bitbake/lib/toaster/orm/models.py | 126 +- .../bitbake/lib/toaster/tests/browser/README | 25 +- .../toaster/tests/browser/selenium_helpers_base.py | 13 +- .../tests/browser/test_layerdetails_page.py | 3 +- .../bitbake/lib/toaster/tests/builds/buildtest.py | 135 +- .../toaster/tests/builds/test_core_image_min.py | 50 +- .../bitbake/lib/toaster/tests/commands/__init__.py | 0 .../lib/toaster/tests/commands/test_loaddata.py | 61 + .../lib/toaster/tests/commands/test_lsupdates.py | 45 + .../lib/toaster/tests/commands/test_runbuilds.py | 88 + .../bitbake/lib/toaster/tests/eventreplay/README | 22 + .../lib/toaster/tests/eventreplay/__init__.py | 97 + .../lib/toaster/tests/functional/__init__.py | 0 .../toaster/tests/functional/functional_helpers.py | 122 + .../tests/functional/test_functional_basic.py | 243 + .../bitbake/lib/toaster/tests/views/README | 4 + .../bitbake/lib/toaster/tests/views/__init__.py | 0 .../bitbake/lib/toaster/tests/views/test_views.py | 540 + .../bitbake/lib/toaster/toastergui/api.py | 166 +- .../bitbake/lib/toaster/toastergui/buildtables.py | 3 + .../lib/toaster/toastergui/static/css/default.css | 1 - .../lib/toaster/toastergui/static/css/prettify.css | 1 - .../toaster/toastergui/static/js/customrecipe.js | 6 + .../toaster/toastergui/static/js/highlight.pack.js | 2 + .../toaster/toastergui/static/js/importlayer.js | 55 +- .../lib/toaster/toastergui/static/js/libtoaster.js | 12 +- .../lib/toaster/toastergui/static/js/prettify.js | 28 - .../toaster/toastergui/static/js/projectpage.js | 17 + .../lib/toaster/toastergui/static/js/table.js | 21 +- .../lib/toaster/toastergui/static/js/tests/test.js | 8 +- .../lib/toaster/toastergui/templates/base.html | 10 +- .../toastergui/templates/basebuildpage.html | 20 +- .../toaster/toastergui/templates/importlayer.html | 11 +- .../toastergui/templates/js-unit-tests.html | 2 +- .../toaster/toastergui/templates/layer_btn.html | 15 +- .../toaster/toastergui/templates/layerdetails.html | 1 + .../toaster/toastergui/templates/machine_btn.html | 7 +- .../lib/toaster/toastergui/templates/project.html | 3 +- .../toaster/toastergui/templates/projectconf.html | 11 +- .../toaster/toastergui/templates/recipe_btn.html | 8 +- .../bitbake/lib/toaster/toastergui/tests.py | 540 - .../bitbake/lib/toaster/toastergui/typeaheads.py | 112 +- .../bitbake/lib/toaster/toastergui/urls.py | 11 +- .../bitbake/lib/toaster/toastergui/views.py | 148 +- .../bitbake/lib/toaster/toastergui/widgets.py | 8 +- .../bitbake/lib/toaster/toastermain/settings.py | 3 +- .../toastermain/settings_production_example.py | 58 + .../documentation/bsp-guide/bsp-guide.xml | 59 +- .../yocto-poky/documentation/bsp-guide/bsp.xml | 640 +- .../dev-manual/dev-manual-common-tasks.xml | 968 +- .../documentation/dev-manual/dev-manual-intro.xml | 8 + .../documentation/dev-manual/dev-manual-newbie.xml | 367 +- .../documentation/dev-manual/dev-manual-start.xml | 92 +- .../documentation/dev-manual/dev-manual.xml | 57 +- .../kernel-dev/kernel-dev-advanced.xml | 715 +- .../documentation/kernel-dev/kernel-dev-common.xml | 171 +- .../documentation/kernel-dev/kernel-dev-faq.xml | 2 +- .../documentation/kernel-dev/kernel-dev.xml | 58 +- .../mega-manual/figures/yocto-environment.png | Bin 73095 -> 185562 bytes .../documentation/mega-manual/mega-manual.xml | 60 +- import-layers/yocto-poky/documentation/poky.ent | 31 +- .../profile-manual/profile-manual.xml | 58 +- .../documentation/ref-manual/closer-look.xml | 63 +- .../documentation/ref-manual/introduction.xml | 42 +- .../documentation/ref-manual/migration.xml | 764 +- .../documentation/ref-manual/ref-bitbake.xml | 2 +- .../documentation/ref-manual/ref-classes.xml | 193 +- .../ref-manual/ref-devtool-reference.xml | 45 + .../documentation/ref-manual/ref-features.xml | 10 + .../documentation/ref-manual/ref-images.xml | 3 - .../documentation/ref-manual/ref-manual.xml | 59 +- .../documentation/ref-manual/ref-qa-checks.xml | 39 - .../ref-manual/ref-release-process.xml | 254 + .../documentation/ref-manual/ref-structure.xml | 137 +- .../documentation/ref-manual/ref-tasks.xml | 167 +- .../documentation/ref-manual/ref-variables.xml | 594 +- .../documentation/ref-manual/technical-details.xml | 28 +- .../documentation/ref-manual/usingpoky.xml | 116 +- .../documentation/sdk-manual/sdk-appendix-mars.xml | 2 +- .../sdk-manual/sdk-appendix-obtain.xml | 10 +- .../documentation/sdk-manual/sdk-manual.xml | 58 +- .../sdk-manual/sdk-working-projects.xml | 4 +- .../toaster-manual/toaster-manual.xml | 57 +- .../yocto-poky/documentation/tools/mega-manual.sed | 40 +- .../yocto-project-qs/figures/yocto-environment.png | Bin 73095 -> 185562 bytes .../yocto-project-qs/yocto-project-qs.xml | 167 +- .../meta-poky/conf/distro/include/maintainers.inc | 190 +- .../conf/distro/include/poky-world-exclude.inc | 2 + .../meta-poky/conf/distro/poky-tiny.conf | 4 +- .../yocto-poky/meta-poky/conf/distro/poky.conf | 49 +- .../yocto-poky/meta-poky/conf/local.conf.sample | 2 +- .../meta-poky/conf/local.conf.sample.extended | 7 + .../yocto-poky/meta-poky/conf/toasterconf.json | 90 - .../yocto-poky/meta-selftest/lib/devtool/bbpath.py | 44 + .../lib/oeqa/runtime/cases/selftest.json | 6 + .../lib/oeqa/runtime/cases/selftest.py | 73 + .../meta-selftest/lib/oeqa/runtime/selftest.json | 6 - .../meta-selftest/lib/oeqa/runtime/selftest.py | 55 - .../meta-selftest/lib/recipetool/bbpath.py | 41 + .../recipes-test/aspell/aspell_0.0.0.1.bb | 2 + .../container-image/container-image-testpkg.bb | 8 + .../container-image/container-test-image.bb | 8 + .../recipes-test/devtool/devtool-test-localonly.bb | 7 + .../devtool/devtool-test-localonly/file1 | 1 + .../devtool/devtool-test-localonly/file2 | 1 + .../recipes-test/devtool/devtool-test-patch-gz.bb | 17 + .../devtool/devtool-test-patch-gz/readme.patch.gz | Bin 0 -> 449 bytes .../recipes-test/devtool/devtool-test-subdir.bb | 9 + .../devtool-test-subdir/devtool-test-subdir.tar.gz | Bin 0 -> 181 bytes .../devtool/devtool-test-subdir/testfile | 1 + .../devtool/devtool-upgrade-test1_1.5.3.bb | 2 + .../devtool-upgrade-test1_1.5.3.bb.upgraded | 2 + .../devtool/devtool-upgrade-test2_git.bb | 2 + .../devtool/devtool-upgrade-test2_git.bb.upgraded | 2 + .../meta-selftest/recipes-test/error/error.bb | 1 - .../recipes-test/images/oe-selftest-image.bb | 2 +- .../recipes-test/images/wic-image-minimal.bb | 8 +- .../recipes-test/images/wic-image-minimal.wks | 7 +- .../meta-selftest/recipes-test/m4/m4_%.bbappend | 2 + .../recipes-test/m4/m4_1.4.17.bbappend | 2 - .../recipes-test/postinst/postinst_1.0.bb | 126 + .../recipetool/selftest-recipetool-appendfile.bb | 2 + .../recipes-test/selftest-ed/selftest-ed_0.5.bb | 22 + .../recipes-test/selftest-ed/selftest-ed_1.14.1.bb | 35 + .../meta-selftest/wic/test_rawcopy_plugin.wks.in | 6 + .../yocto-poky/meta-selftest/wic/wictestdisk.wks | 7 + .../recipes-skeleton/useradd/useradd-example.bb | 5 +- .../meta-yocto-bsp/conf/machine/beaglebone.conf | 12 +- .../meta-yocto-bsp/conf/machine/edgerouter.conf | 6 +- .../meta-yocto-bsp/conf/machine/genericx86-64.conf | 2 +- .../meta-yocto-bsp/conf/machine/genericx86.conf | 2 +- .../conf/machine/include/genericx86-common.inc | 5 +- .../meta-yocto-bsp/conf/machine/mpc8315e-rdb.conf | 9 +- .../meta-yocto-bsp/lib/oeqa/selftest/gummiboot.py | 83 - .../xserver-xf86-config/beaglebone/xorg.conf | 20 + .../recipes-kernel/linux/linux-yocto_4.1.bbappend | 20 +- .../recipes-kernel/linux/linux-yocto_4.10.bbappend | 26 + .../recipes-kernel/linux/linux-yocto_4.4.bbappend | 20 +- .../recipes-kernel/linux/linux-yocto_4.8.bbappend | 26 - .../recipes-kernel/linux/linux-yocto_4.9.bbappend | 26 + .../yocto-poky/meta-yocto-bsp/wic/beaglebone.wks | 6 + .../yocto-poky/meta-yocto-bsp/wic/edgerouter.wks | 4 + .../yocto-poky/meta-yocto-bsp/wic/genericx86.wks | 7 + .../yocto-poky/meta-yocto-bsp/wic/mpc8315e-rdb.wks | 4 + .../yocto-poky/meta/classes/allarch.bbclass | 13 +- .../yocto-poky/meta/classes/archiver.bbclass | 169 +- .../yocto-poky/meta/classes/autotools.bbclass | 145 +- import-layers/yocto-poky/meta/classes/base.bbclass | 155 +- .../meta/classes/binconfig-disabled.bbclass | 1 + .../yocto-poky/meta/classes/binconfig.bbclass | 8 +- .../yocto-poky/meta/classes/blacklist.bbclass | 27 +- .../yocto-poky/meta/classes/bugzilla.bbclass | 28 +- .../yocto-poky/meta/classes/buildhistory.bbclass | 150 +- .../meta/classes/buildstats-summary.bbclass | 2 +- .../yocto-poky/meta/classes/buildstats.bbclass | 45 +- .../yocto-poky/meta/classes/ccache.bbclass | 13 +- .../yocto-poky/meta/classes/chrpath.bbclass | 17 +- .../yocto-poky/meta/classes/cmake.bbclass | 28 +- import-layers/yocto-poky/meta/classes/cml1.bbclass | 6 +- .../yocto-poky/meta/classes/compress_doc.bbclass | 42 +- .../meta/classes/copyleft_compliance.bbclass | 10 +- .../meta/classes/copyleft_filter.bbclass | 40 +- .../yocto-poky/meta/classes/core-image.bbclass | 4 + .../yocto-poky/meta/classes/cpan-base.bbclass | 24 +- .../yocto-poky/meta/classes/cross-canadian.bbclass | 44 +- .../yocto-poky/meta/classes/cross.bbclass | 13 +- .../yocto-poky/meta/classes/crosssdk.bbclass | 12 +- .../yocto-poky/meta/classes/cve-check.bbclass | 59 +- .../yocto-poky/meta/classes/debian.bbclass | 24 +- .../yocto-poky/meta/classes/devshell.bbclass | 10 +- .../yocto-poky/meta/classes/devupstream.bbclass | 48 + .../meta/classes/distro_features_check.bbclass | 8 +- .../yocto-poky/meta/classes/distrodata.bbclass | 152 +- .../yocto-poky/meta/classes/distutils-base.bbclass | 2 +- .../meta/classes/distutils-tools.bbclass | 2 +- .../yocto-poky/meta/classes/distutils.bbclass | 10 +- .../meta/classes/distutils3-base.bbclass | 2 +- .../yocto-poky/meta/classes/distutils3.bbclass | 6 +- .../yocto-poky/meta/classes/externalsrc.bbclass | 79 +- .../yocto-poky/meta/classes/extrausers.bbclass | 6 +- .../yocto-poky/meta/classes/fontcache.bbclass | 16 +- .../yocto-poky/meta/classes/fs-uuid.bbclass | 4 +- .../yocto-poky/meta/classes/gconf.bbclass | 15 +- .../yocto-poky/meta/classes/gettext.bbclass | 8 +- .../meta/classes/gio-module-cache.bbclass | 12 +- import-layers/yocto-poky/meta/classes/go.bbclass | 77 + .../yocto-poky/meta/classes/goarch.bbclass | 53 + .../meta/classes/gobject-introspection.bbclass | 2 +- .../yocto-poky/meta/classes/grub-efi.bbclass | 33 +- .../yocto-poky/meta/classes/gsettings.bbclass | 13 +- .../yocto-poky/meta/classes/gtk-doc.bbclass | 2 +- .../yocto-poky/meta/classes/gtk-icon-cache.bbclass | 16 +- .../meta/classes/gtk-immodules-cache.bbclass | 12 +- .../yocto-poky/meta/classes/gummiboot.bbclass | 121 - .../yocto-poky/meta/classes/gzipnative.bbclass | 5 - .../yocto-poky/meta/classes/icecc.bbclass | 4 +- .../meta/classes/image-buildinfo.bbclass | 21 +- .../meta/classes/image-container.bbclass | 21 + .../yocto-poky/meta/classes/image-live.bbclass | 8 +- .../yocto-poky/meta/classes/image-vm.bbclass | 14 +- .../yocto-poky/meta/classes/image.bbclass | 156 +- .../yocto-poky/meta/classes/image_types.bbclass | 131 +- .../meta/classes/image_types_uboot.bbclass | 23 - .../meta/classes/image_types_wic.bbclass | 117 + .../yocto-poky/meta/classes/insane.bbclass | 258 +- .../yocto-poky/meta/classes/kernel-arch.bbclass | 9 +- .../meta/classes/kernel-fitimage.bbclass | 69 +- .../yocto-poky/meta/classes/kernel-grub.bbclass | 2 +- .../meta/classes/kernel-module-split.bbclass | 51 +- .../yocto-poky/meta/classes/kernel-uboot.bbclass | 10 +- .../yocto-poky/meta/classes/kernel-uimage.bbclass | 34 +- .../yocto-poky/meta/classes/kernel-yocto.bbclass | 33 +- .../yocto-poky/meta/classes/kernel.bbclass | 120 +- .../yocto-poky/meta/classes/kernelsrc.bbclass | 6 +- .../yocto-poky/meta/classes/libc-common.bbclass | 10 +- .../yocto-poky/meta/classes/libc-package.bbclass | 95 +- .../yocto-poky/meta/classes/license.bbclass | 199 +- .../yocto-poky/meta/classes/live-vm-common.bbclass | 8 +- .../yocto-poky/meta/classes/manpages.bbclass | 5 + .../yocto-poky/meta/classes/metadata_scm.bbclass | 2 +- .../meta/classes/migrate_localcount.bbclass | 12 +- import-layers/yocto-poky/meta/classes/mime.bbclass | 15 +- .../yocto-poky/meta/classes/mirrors.bbclass | 17 +- .../yocto-poky/meta/classes/module.bbclass | 24 +- .../yocto-poky/meta/classes/multilib.bbclass | 39 +- .../meta/classes/multilib_global.bbclass | 33 +- .../meta/classes/multilib_header.bbclass | 7 - .../yocto-poky/meta/classes/native.bbclass | 32 +- .../yocto-poky/meta/classes/nativesdk.bbclass | 22 +- import-layers/yocto-poky/meta/classes/npm.bbclass | 26 +- .../yocto-poky/meta/classes/oelint.bbclass | 4 +- .../yocto-poky/meta/classes/own-mirrors.bbclass | 2 +- .../yocto-poky/meta/classes/package.bbclass | 350 +- .../yocto-poky/meta/classes/package_deb.bbclass | 133 +- .../yocto-poky/meta/classes/package_ipk.bbclass | 132 +- .../yocto-poky/meta/classes/package_rpm.bbclass | 277 +- .../yocto-poky/meta/classes/package_tar.bbclass | 17 +- .../yocto-poky/meta/classes/packagedata.bbclass | 10 +- .../meta/classes/packagefeed-stability.bbclass | 20 +- .../yocto-poky/meta/classes/packagegroup.bbclass | 24 +- .../yocto-poky/meta/classes/patch.bbclass | 157 +- .../yocto-poky/meta/classes/perl-version.bbclass | 24 + .../yocto-poky/meta/classes/pixbufcache.bbclass | 33 +- .../meta/classes/populate_sdk_base.bbclass | 56 +- .../meta/classes/populate_sdk_ext.bbclass | 187 +- .../yocto-poky/meta/classes/prexport.bbclass | 4 +- .../yocto-poky/meta/classes/ptest.bbclass | 4 +- import-layers/yocto-poky/meta/classes/qemu.bbclass | 10 +- .../yocto-poky/meta/classes/qemuboot.bbclass | 79 +- .../yocto-poky/meta/classes/recipe_sanity.bbclass | 37 +- .../meta/classes/relative_symlinks.bbclass | 5 + .../yocto-poky/meta/classes/relocatable.bbclass | 13 +- .../yocto-poky/meta/classes/report-error.bbclass | 22 +- .../yocto-poky/meta/classes/rm_work.bbclass | 67 +- .../meta/classes/rm_work_and_downloads.bbclass | 33 + .../meta/classes/rootfs-postcommands.bbclass | 92 +- .../yocto-poky/meta/classes/rootfs_deb.bbclass | 6 +- .../yocto-poky/meta/classes/rootfs_ipk.bbclass | 6 +- .../yocto-poky/meta/classes/rootfs_rpm.bbclass | 28 +- .../yocto-poky/meta/classes/sanity.bbclass | 199 +- .../yocto-poky/meta/classes/sign_ipk.bbclass | 12 +- .../meta/classes/sign_package_feed.bbclass | 4 +- .../yocto-poky/meta/classes/sign_rpm.bbclass | 19 +- .../yocto-poky/meta/classes/siteconfig.bbclass | 4 +- .../yocto-poky/meta/classes/siteinfo.bbclass | 16 +- import-layers/yocto-poky/meta/classes/spdx.bbclass | 26 +- .../yocto-poky/meta/classes/sstate.bbclass | 380 +- .../yocto-poky/meta/classes/staging.bbclass | 531 +- .../yocto-poky/meta/classes/syslinux.bbclass | 35 +- .../yocto-poky/meta/classes/systemd-boot.bbclass | 25 +- .../yocto-poky/meta/classes/systemd.bbclass | 46 +- .../yocto-poky/meta/classes/terminal.bbclass | 23 +- .../yocto-poky/meta/classes/testexport.bbclass | 233 +- .../yocto-poky/meta/classes/testimage.bbclass | 302 +- .../yocto-poky/meta/classes/testsdk.bbclass | 198 +- .../yocto-poky/meta/classes/texinfo.bbclass | 11 +- .../yocto-poky/meta/classes/tinderclient.bbclass | 82 +- .../yocto-poky/meta/classes/toaster.bbclass | 76 +- .../meta/classes/toolchain-scripts.bbclass | 7 +- .../yocto-poky/meta/classes/typecheck.bbclass | 2 +- .../yocto-poky/meta/classes/uboot-config.bbclass | 14 +- .../meta/classes/uboot-extlinux-config.bbclass | 56 +- .../yocto-poky/meta/classes/uboot-sign.bbclass | 8 +- .../yocto-poky/meta/classes/uninative.bbclass | 40 +- .../meta/classes/update-alternatives.bbclass | 72 +- .../yocto-poky/meta/classes/update-rc.d.bbclass | 56 +- .../meta/classes/upstream-version-is-even.bbclass | 2 +- .../meta/classes/useradd-staticids.bbclass | 122 +- .../yocto-poky/meta/classes/useradd.bbclass | 120 +- .../yocto-poky/meta/classes/useradd_base.bbclass | 18 +- .../yocto-poky/meta/classes/utility-tasks.bbclass | 12 +- .../yocto-poky/meta/classes/utils.bbclass | 70 +- import-layers/yocto-poky/meta/classes/waf.bbclass | 4 +- .../yocto-poky/meta/conf/abi_version.conf | 2 +- import-layers/yocto-poky/meta/conf/bitbake.conf | 112 +- .../yocto-poky/meta/conf/distro/defaultsetup.conf | 4 +- .../conf/distro/include/default-distrovars.inc | 2 +- .../meta/conf/distro/include/default-versions.inc | 3 - .../meta/conf/distro/include/distro_alias.inc | 3 +- .../meta/conf/distro/include/no-static-libs.inc | 6 +- .../meta/conf/distro/include/security_flags.inc | 11 +- .../meta/conf/distro/include/tclibc-glibc.inc | 6 +- .../meta/conf/distro/include/tclibc-musl.inc | 6 +- .../meta/conf/distro/include/tcmode-default.inc | 10 +- .../meta/conf/distro/include/uninative-flags.inc | 8 + .../yocto-poky/meta/conf/documentation.conf | 2 +- import-layers/yocto-poky/meta/conf/layer.conf | 4 +- import-layers/yocto-poky/meta/conf/licenses.conf | 5 +- .../meta/conf/machine/include/arm/arch-arm.inc | 2 +- .../meta/conf/machine/include/arm/arch-arm64.inc | 2 +- .../conf/machine/include/arm/arch-armv5-dsp.inc | 30 +- .../meta/conf/machine/include/arm/arch-armv5.inc | 22 +- .../meta/conf/machine/include/arm/arch-armv6.inc | 32 +- .../meta/conf/machine/include/arm/arch-armv7a.inc | 104 +- .../meta/conf/machine/include/arm/arch-armv7ve.inc | 104 +- .../conf/machine/include/arm/feature-arm-thumb.inc | 25 +- .../conf/machine/include/arm/feature-arm-vfp.inc | 10 +- .../meta/conf/machine/include/mips/README | 3 + .../meta/conf/machine/include/mips/arch-mips.inc | 16 +- .../machine/include/mips/feature-mips-mips16e.inc | 6 +- .../yocto-poky/meta/conf/machine/include/qemu.inc | 4 +- .../meta/conf/machine/include/qemuboot-x86.inc | 13 +- .../meta/conf/machine/include/tune-arm1136jf-s.inc | 2 +- .../meta/conf/machine/include/tune-cortexa15.inc | 34 +- .../meta/conf/machine/include/tune-cortexa17.inc | 34 +- .../meta/conf/machine/include/tune-cortexa5.inc | 26 +- .../meta/conf/machine/include/tune-cortexa7.inc | 34 +- .../meta/conf/machine/include/tune-cortexa8.inc | 26 +- .../meta/conf/machine/include/tune-cortexa9.inc | 38 +- .../meta/conf/machine/include/tune-mips32r6.inc | 8 +- .../meta/conf/machine/include/tune-mips64r6.inc | 26 + .../meta/conf/machine/include/tune-ppce500.inc | 2 +- .../meta/conf/machine/include/tune-ppce500v2.inc | 2 +- .../meta/conf/machine/include/x86-base.inc | 6 +- .../yocto-poky/meta/conf/machine/qemuarm.conf | 2 +- .../yocto-poky/meta/conf/machine/qemuarm64.conf | 4 +- .../yocto-poky/meta/conf/machine/qemuppc.conf | 2 +- .../yocto-poky/meta/conf/machine/qemux86-64.conf | 5 +- .../yocto-poky/meta/conf/machine/qemux86.conf | 7 +- .../yocto-poky/meta/conf/multiconfig/default.conf | 0 import-layers/yocto-poky/meta/conf/multilib.conf | 14 +- import-layers/yocto-poky/meta/conf/sanity.conf | 2 +- .../yocto-poky/meta/conf/toasterconf.json | 77 - .../yocto-poky/meta/files/ext-sdk-prepare.py | 2 + import-layers/yocto-poky/meta/files/fs-perms.txt | 5 +- .../meta/files/toolchain-shar-extract.sh | 22 + import-layers/yocto-poky/meta/lib/buildstats.py | 158 + .../meta/lib/oe/buildhistory_analysis.py | 143 +- .../yocto-poky/meta/lib/oe/classextend.py | 12 +- import-layers/yocto-poky/meta/lib/oe/classutils.py | 2 +- .../yocto-poky/meta/lib/oe/copy_buildsystem.py | 12 +- import-layers/yocto-poky/meta/lib/oe/data.py | 34 +- .../yocto-poky/meta/lib/oe/distro_check.py | 305 +- import-layers/yocto-poky/meta/lib/oe/gpg_sign.py | 11 +- import-layers/yocto-poky/meta/lib/oe/lsb.py | 76 +- import-layers/yocto-poky/meta/lib/oe/manifest.py | 28 +- import-layers/yocto-poky/meta/lib/oe/package.py | 69 +- .../yocto-poky/meta/lib/oe/package_manager.py | 1436 +- .../yocto-poky/meta/lib/oe/packagedata.py | 2 +- .../yocto-poky/meta/lib/oe/packagegroup.py | 10 +- import-layers/yocto-poky/meta/lib/oe/patch.py | 129 +- import-layers/yocto-poky/meta/lib/oe/path.py | 33 +- import-layers/yocto-poky/meta/lib/oe/prservice.py | 26 +- import-layers/yocto-poky/meta/lib/oe/qa.py | 4 +- .../yocto-poky/meta/lib/oe/recipeutils.py | 109 +- import-layers/yocto-poky/meta/lib/oe/rootfs.py | 183 +- import-layers/yocto-poky/meta/lib/oe/sdk.py | 96 +- import-layers/yocto-poky/meta/lib/oe/sstatesig.py | 88 +- import-layers/yocto-poky/meta/lib/oe/terminal.py | 81 +- .../yocto-poky/meta/lib/oe/tests/__init__.py | 0 .../yocto-poky/meta/lib/oe/tests/test_elf.py | 21 - .../yocto-poky/meta/lib/oe/tests/test_license.py | 68 - .../yocto-poky/meta/lib/oe/tests/test_path.py | 89 - .../yocto-poky/meta/lib/oe/tests/test_types.py | 62 - .../yocto-poky/meta/lib/oe/tests/test_utils.py | 51 - import-layers/yocto-poky/meta/lib/oe/utils.py | 48 +- .../yocto-poky/meta/lib/oeqa/buildperf/base.py | 403 +- .../meta/lib/oeqa/buildperf/test_basic.py | 42 +- .../meta/lib/oeqa/controllers/masterimage.py | 20 +- import-layers/yocto-poky/meta/lib/oeqa/core/README | 38 + .../yocto-poky/meta/lib/oeqa/core/__init__.py | 0 .../yocto-poky/meta/lib/oeqa/core/case.py | 46 + .../meta/lib/oeqa/core/cases/__init__.py | 0 .../meta/lib/oeqa/core/cases/example/data.json | 1 + .../meta/lib/oeqa/core/cases/example/test_basic.py | 20 + .../yocto-poky/meta/lib/oeqa/core/context.py | 243 + .../meta/lib/oeqa/core/decorator/__init__.py | 71 + .../meta/lib/oeqa/core/decorator/data.py | 98 + .../meta/lib/oeqa/core/decorator/depends.py | 94 + .../meta/lib/oeqa/core/decorator/oeid.py | 23 + .../meta/lib/oeqa/core/decorator/oetag.py | 24 + .../meta/lib/oeqa/core/decorator/oetimeout.py | 25 + .../yocto-poky/meta/lib/oeqa/core/exception.py | 14 + .../yocto-poky/meta/lib/oeqa/core/loader.py | 272 + .../yocto-poky/meta/lib/oeqa/core/runner.py | 76 + .../meta/lib/oeqa/core/target/__init__.py | 33 + .../yocto-poky/meta/lib/oeqa/core/target/qemu.py | 45 + .../yocto-poky/meta/lib/oeqa/core/target/ssh.py | 266 + .../meta/lib/oeqa/core/tests/__init__.py | 0 .../meta/lib/oeqa/core/tests/cases/data.py | 20 + .../meta/lib/oeqa/core/tests/cases/depends.py | 38 + .../oeqa/core/tests/cases/loader/invalid/oeid.py | 15 + .../oeqa/core/tests/cases/loader/valid/another.py | 9 + .../meta/lib/oeqa/core/tests/cases/oeid.py | 18 + .../meta/lib/oeqa/core/tests/cases/oetag.py | 18 + .../meta/lib/oeqa/core/tests/cases/timeout.py | 18 + .../yocto-poky/meta/lib/oeqa/core/tests/common.py | 35 + .../meta/lib/oeqa/core/tests/test_data.py | 51 + .../meta/lib/oeqa/core/tests/test_decorators.py | 135 + .../meta/lib/oeqa/core/tests/test_loader.py | 86 + .../meta/lib/oeqa/core/tests/test_runner.py | 38 + .../meta/lib/oeqa/core/utils/__init__.py | 0 .../yocto-poky/meta/lib/oeqa/core/utils/misc.py | 44 + .../yocto-poky/meta/lib/oeqa/core/utils/path.py | 19 + .../yocto-poky/meta/lib/oeqa/core/utils/test.py | 86 + .../yocto-poky/meta/lib/oeqa/files/test.c | 26 + .../yocto-poky/meta/lib/oeqa/files/test.cpp | 3 + .../yocto-poky/meta/lib/oeqa/files/test.pl | 2 + .../yocto-poky/meta/lib/oeqa/files/test.py | 6 + import-layers/yocto-poky/meta/lib/oeqa/oetest.py | 164 +- .../yocto-poky/meta/lib/oeqa/runexported.py | 8 +- .../yocto-poky/meta/lib/oeqa/runtime/_ptest.py | 125 - .../yocto-poky/meta/lib/oeqa/runtime/_qemutiny.py | 9 - .../yocto-poky/meta/lib/oeqa/runtime/buildcvs.py | 31 - .../meta/lib/oeqa/runtime/buildgalculator.py | 23 - .../meta/lib/oeqa/runtime/buildiptables.py | 31 - .../yocto-poky/meta/lib/oeqa/runtime/case.py | 17 + .../meta/lib/oeqa/runtime/cases/_ptest.py | 103 + .../meta/lib/oeqa/runtime/cases/_qemutiny.py | 8 + .../meta/lib/oeqa/runtime/cases/buildcpio.py | 30 + .../meta/lib/oeqa/runtime/cases/buildgalculator.py | 28 + .../meta/lib/oeqa/runtime/cases/buildlzip.py | 34 + .../meta/lib/oeqa/runtime/cases/connman.py | 30 + .../yocto-poky/meta/lib/oeqa/runtime/cases/date.py | 38 + .../yocto-poky/meta/lib/oeqa/runtime/cases/df.py | 13 + .../yocto-poky/meta/lib/oeqa/runtime/cases/dnf.py | 123 + .../yocto-poky/meta/lib/oeqa/runtime/cases/gcc.py | 73 + .../meta/lib/oeqa/runtime/cases/kernelmodule.py | 40 + .../yocto-poky/meta/lib/oeqa/runtime/cases/ldd.py | 25 + .../meta/lib/oeqa/runtime/cases/logrotate.py | 42 + .../meta/lib/oeqa/runtime/cases/multilib.py | 41 + .../meta/lib/oeqa/runtime/cases/oe_syslog.py | 66 + .../yocto-poky/meta/lib/oeqa/runtime/cases/pam.py | 33 + .../meta/lib/oeqa/runtime/cases/parselogs.py | 359 + .../yocto-poky/meta/lib/oeqa/runtime/cases/perl.py | 37 + .../yocto-poky/meta/lib/oeqa/runtime/cases/ping.py | 24 + .../meta/lib/oeqa/runtime/cases/python.py | 43 + .../yocto-poky/meta/lib/oeqa/runtime/cases/rpm.py | 142 + .../meta/lib/oeqa/runtime/cases/scanelf.py | 26 + .../yocto-poky/meta/lib/oeqa/runtime/cases/scp.py | 33 + .../meta/lib/oeqa/runtime/cases/skeletoninit.py | 33 + .../yocto-poky/meta/lib/oeqa/runtime/cases/ssh.py | 15 + .../meta/lib/oeqa/runtime/cases/systemd.py | 181 + .../meta/lib/oeqa/runtime/cases/x32lib.py | 19 + .../yocto-poky/meta/lib/oeqa/runtime/cases/xorg.py | 17 + .../yocto-poky/meta/lib/oeqa/runtime/connman.py | 31 - .../yocto-poky/meta/lib/oeqa/runtime/context.py | 220 + .../yocto-poky/meta/lib/oeqa/runtime/date.py | 31 - .../meta/lib/oeqa/runtime/decorator/package.py | 53 + .../yocto-poky/meta/lib/oeqa/runtime/df.py | 12 - .../yocto-poky/meta/lib/oeqa/runtime/files/test.c | 26 - .../meta/lib/oeqa/runtime/files/test.cpp | 3 - .../yocto-poky/meta/lib/oeqa/runtime/files/test.pl | 2 - .../yocto-poky/meta/lib/oeqa/runtime/files/test.py | 6 - .../meta/lib/oeqa/runtime/files/testsdkmakefile | 5 - .../yocto-poky/meta/lib/oeqa/runtime/gcc.py | 47 - .../meta/lib/oeqa/runtime/kernelmodule.py | 34 - .../yocto-poky/meta/lib/oeqa/runtime/ldd.py | 21 - .../yocto-poky/meta/lib/oeqa/runtime/loader.py | 16 + .../yocto-poky/meta/lib/oeqa/runtime/logrotate.py | 28 - .../yocto-poky/meta/lib/oeqa/runtime/multilib.py | 42 - .../yocto-poky/meta/lib/oeqa/runtime/pam.py | 25 - .../yocto-poky/meta/lib/oeqa/runtime/parselogs.py | 313 - .../yocto-poky/meta/lib/oeqa/runtime/perl.py | 30 - .../yocto-poky/meta/lib/oeqa/runtime/ping.py | 22 - .../yocto-poky/meta/lib/oeqa/runtime/python.py | 35 - .../yocto-poky/meta/lib/oeqa/runtime/rpm.py | 120 - .../yocto-poky/meta/lib/oeqa/runtime/scanelf.py | 28 - .../yocto-poky/meta/lib/oeqa/runtime/scp.py | 22 - .../meta/lib/oeqa/runtime/skeletoninit.py | 29 - .../yocto-poky/meta/lib/oeqa/runtime/smart.py | 218 - .../yocto-poky/meta/lib/oeqa/runtime/ssh.py | 19 - .../yocto-poky/meta/lib/oeqa/runtime/syslog.py | 52 - .../yocto-poky/meta/lib/oeqa/runtime/systemd.py | 178 - .../meta/lib/oeqa/runtime/utils/__init__.py | 0 .../lib/oeqa/runtime/utils/targetbuildproject.py | 39 + .../yocto-poky/meta/lib/oeqa/runtime/x32lib.py | 18 - .../yocto-poky/meta/lib/oeqa/runtime/xorg.py | 16 - .../yocto-poky/meta/lib/oeqa/sdk/__init__.py | 3 - .../yocto-poky/meta/lib/oeqa/sdk/buildcvs.py | 25 - .../meta/lib/oeqa/sdk/buildgalculator.py | 27 - .../yocto-poky/meta/lib/oeqa/sdk/buildiptables.py | 26 - import-layers/yocto-poky/meta/lib/oeqa/sdk/case.py | 12 + .../meta/lib/oeqa/sdk/cases/buildcpio.py | 33 + .../meta/lib/oeqa/sdk/cases/buildgalculator.py | 35 + .../meta/lib/oeqa/sdk/cases/buildlzip.py | 35 + .../yocto-poky/meta/lib/oeqa/sdk/cases/gcc.py | 42 + .../yocto-poky/meta/lib/oeqa/sdk/cases/perl.py | 27 + .../yocto-poky/meta/lib/oeqa/sdk/cases/python.py | 31 + .../yocto-poky/meta/lib/oeqa/sdk/context.py | 133 + .../meta/lib/oeqa/sdk/files/testsdkmakefile | 5 + import-layers/yocto-poky/meta/lib/oeqa/sdk/gcc.py | 36 - import-layers/yocto-poky/meta/lib/oeqa/sdk/perl.py | 28 - .../yocto-poky/meta/lib/oeqa/sdk/python.py | 32 - .../yocto-poky/meta/lib/oeqa/sdk/utils/__init__.py | 0 .../meta/lib/oeqa/sdk/utils/sdkbuildproject.py | 45 + .../yocto-poky/meta/lib/oeqa/sdkext/__init__.py | 3 - .../yocto-poky/meta/lib/oeqa/sdkext/case.py | 21 + .../meta/lib/oeqa/sdkext/cases/devtool.py | 97 + .../meta/lib/oeqa/sdkext/cases/sdk_update.py | 39 + .../yocto-poky/meta/lib/oeqa/sdkext/context.py | 29 + .../yocto-poky/meta/lib/oeqa/sdkext/devtool.py | 108 - .../yocto-poky/meta/lib/oeqa/sdkext/sdk_update.py | 36 - .../yocto-poky/meta/lib/oeqa/selftest/_toaster.py | 320 - .../yocto-poky/meta/lib/oeqa/selftest/archiver.py | 101 +- .../yocto-poky/meta/lib/oeqa/selftest/base.py | 4 +- .../yocto-poky/meta/lib/oeqa/selftest/bblayers.py | 5 - .../yocto-poky/meta/lib/oeqa/selftest/bbtests.py | 51 +- .../meta/lib/oeqa/selftest/buildhistory.py | 5 +- .../meta/lib/oeqa/selftest/buildoptions.py | 27 +- .../meta/lib/oeqa/selftest/containerimage.py | 83 + .../yocto-poky/meta/lib/oeqa/selftest/devtool.py | 440 +- .../yocto-poky/meta/lib/oeqa/selftest/eSDK.py | 64 +- .../meta/lib/oeqa/selftest/image_typedep.py | 51 + .../meta/lib/oeqa/selftest/imagefeatures.py | 6 +- .../meta/lib/oeqa/selftest/layerappend.py | 3 +- .../yocto-poky/meta/lib/oeqa/selftest/liboe.py | 22 +- .../yocto-poky/meta/lib/oeqa/selftest/manifest.py | 7 +- .../meta/lib/oeqa/selftest/oelib/__init__.py | 0 .../meta/lib/oeqa/selftest/oelib/buildhistory.py | 88 + .../yocto-poky/meta/lib/oeqa/selftest/oelib/elf.py | 21 + .../meta/lib/oeqa/selftest/oelib/license.py | 68 + .../meta/lib/oeqa/selftest/oelib/path.py | 89 + .../meta/lib/oeqa/selftest/oelib/types.py | 50 + .../meta/lib/oeqa/selftest/oelib/utils.py | 51 + .../yocto-poky/meta/lib/oeqa/selftest/oescripts.py | 28 - .../yocto-poky/meta/lib/oeqa/selftest/pkgdata.py | 47 +- .../yocto-poky/meta/lib/oeqa/selftest/prservice.py | 9 +- .../meta/lib/oeqa/selftest/recipetool.py | 88 +- .../yocto-poky/meta/lib/oeqa/selftest/runqemu.py | 140 + .../meta/lib/oeqa/selftest/runtime-test.py | 176 +- .../yocto-poky/meta/lib/oeqa/selftest/signing.py | 37 +- .../yocto-poky/meta/lib/oeqa/selftest/sstate.py | 25 +- .../meta/lib/oeqa/selftest/sstatetests.py | 103 +- .../yocto-poky/meta/lib/oeqa/selftest/tinfoil.py | 190 + .../yocto-poky/meta/lib/oeqa/selftest/wic.py | 798 +- .../yocto-poky/meta/lib/oeqa/targetcontrol.py | 92 +- .../yocto-poky/meta/lib/oeqa/utils/__init__.py | 30 + .../yocto-poky/meta/lib/oeqa/utils/buildproject.py | 55 + .../yocto-poky/meta/lib/oeqa/utils/commands.py | 49 +- .../yocto-poky/meta/lib/oeqa/utils/decorators.py | 7 +- .../yocto-poky/meta/lib/oeqa/utils/dump.py | 11 +- .../yocto-poky/meta/lib/oeqa/utils/git.py | 20 +- .../yocto-poky/meta/lib/oeqa/utils/metadata.py | 118 + .../meta/lib/oeqa/utils/package_manager.py | 193 +- .../yocto-poky/meta/lib/oeqa/utils/qemurunner.py | 114 +- .../meta/lib/oeqa/utils/qemutinyrunner.py | 13 +- .../meta/lib/oeqa/utils/subprocesstweak.py | 19 + .../yocto-poky/meta/lib/oeqa/utils/targetbuild.py | 22 +- .../yocto-poky/meta/lib/oeqa/utils/testexport.py | 14 +- .../yocto-poky/meta/lib/rootfspostcommands.py | 56 + .../yocto-poky/meta/recipes-bsp/acpid/acpid.inc | 2 + .../meta/recipes-bsp/acpid/acpid_2.0.27.bb | 7 - .../meta/recipes-bsp/acpid/acpid_2.0.28.bb | 7 + .../meta/recipes-bsp/formfactor/formfactor_0.0.bb | 3 +- ...plicit-fall-through-so-Wextra-will-work-i.patch | 34 + .../aarch64-initplat.c-fix-const-qualifier.patch | 35 - .../meta/recipes-bsp/gnu-efi/gnu-efi_3.0.4.bb | 54 - .../meta/recipes-bsp/gnu-efi/gnu-efi_3.0.5.bb | 71 + ...-avoid-used-uninitialized-error-with-GCC7.patch | 36 + ...Use-AC_HEADER_MAJOR-to-find-device-macros.patch | 92 + ...-ppc-fix-switch-fallthrough-cases-with-GC.patch | 248 + ...0003-Add-gnulib-fix-gcc7-fallthrough.diff.patch | 38 + ...aining-cases-of-gcc-7-fallthrough-warning.patch | 175 + .../meta/recipes-bsp/grub/grub-0.97/autohell.patch | 21 - .../grub-0.97/grub-support-256byte-inode.patch | 101 - .../grub-0.97/grub_fix_for_automake-1.12.patch | 74 - .../grub/grub-0.97/no-reorder-functions.patch | 31 - .../grub/grub-0.97/objcopy-absolute.patch | 40 - .../meta/recipes-bsp/grub/grub-efi_2.00.bb | 17 +- .../yocto-poky/meta/recipes-bsp/grub/grub2.inc | 5 + .../yocto-poky/meta/recipes-bsp/grub/grub_0.97.bb | 35 - .../yocto-poky/meta/recipes-bsp/grub/grub_2.00.bb | 6 +- .../yocto-poky/meta/recipes-bsp/grub/grub_git.bb | 11 +- ...-C-syntax-errors-for-function-declaration.patch | 74 - .../gummiboot/gummiboot/fix-objcopy.patch | 45 - .../meta/recipes-bsp/gummiboot/gummiboot_git.bb | 37 - .../meta/recipes-bsp/keymaps/keymaps_1.0.bb | 2 +- .../meta/recipes-bsp/pciutils/pciutils_3.5.1.bb | 60 - .../meta/recipes-bsp/pciutils/pciutils_3.5.2.bb | 60 + .../meta/recipes-bsp/pcmciautils/pcmciautils.inc | 2 + .../meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb | 4 +- .../meta/recipes-bsp/systemd-boot/systemd-boot.bb | 38 - .../recipes-bsp/systemd-boot/systemd-boot_232.bb | 39 + .../recipes-bsp/u-boot/files/default-gcc.patch | 39 + .../recipes-bsp/u-boot/u-boot-common_2017.01.inc | 14 + .../recipes-bsp/u-boot/u-boot-fw-utils_2016.03.bb | 46 - .../recipes-bsp/u-boot/u-boot-fw-utils_2017.01.bb | 36 + .../recipes-bsp/u-boot/u-boot-mkimage_2016.03.bb | 32 - .../recipes-bsp/u-boot/u-boot-mkimage_2017.01.bb | 29 + .../yocto-poky/meta/recipes-bsp/u-boot/u-boot.inc | 14 +- .../meta/recipes-bsp/u-boot/u-boot_2016.03.bb | 10 - .../meta/recipes-bsp/u-boot/u-boot_2017.01.bb | 4 + .../meta/recipes-bsp/usbutils/usbutils_008.bb | 2 +- .../yocto-poky/meta/recipes-bsp/v86d/v86d/fbsetup | 3 - .../meta/recipes-bsp/v86d/v86d/uvesafb.conf | 2 - .../meta/recipes-bsp/v86d/v86d_0.1.10.bb | 42 +- .../meta/recipes-connectivity/avahi/avahi.inc | 12 +- .../recipes-connectivity/bind/bind_9.10.3-P3.bb | 2 +- .../meta/recipes-connectivity/bluez5/bluez5.inc | 14 +- .../bluez5/bluez5/cve-2017-1000250.patch | 34 + .../recipes-connectivity/bluez5/bluez5_5.41.bb | 55 - .../recipes-connectivity/bluez5/bluez5_5.43.bb | 55 + .../meta/recipes-connectivity/connman/connman.inc | 8 +- ...x-compile-on-musl-with-kernel-4.9-headers.patch | 64 + .../recipes-connectivity/connman/connman_1.33.bb | 3 +- .../meta/recipes-connectivity/dhcp/dhcp_4.3.4.bb | 18 - .../meta/recipes-connectivity/dhcp/dhcp_4.3.5.bb | 18 + .../recipes-connectivity/iproute2/iproute2.inc | 2 +- .../0001-libc-compat.h-add-musl-workaround.patch | 41 + .../iproute2/iproute2/iproute2-4.3.0-musl.patch | 85 - .../iproute2/iproute2_4.10.0.bb | 14 + .../iproute2/iproute2_4.7.0.bb | 13 - .../meta/recipes-connectivity/iw/iw_4.7.bb | 33 - .../meta/recipes-connectivity/iw/iw_4.9.bb | 33 + .../meta/recipes-connectivity/libpcap/libpcap.inc | 7 +- ...r_state_t.ai-usage-when-INET6-is-not-defi.patch | 41 + ...02-Add-missing-compiler_state_t-parameter.patch | 67 + .../libpcap/libpcap/aclocal.patch | 167 - .../libpcap/libpcap/disable-remote.patch | 36 + .../libpcap/libpcap/fix-grammar-deps.patch | 29 + .../libpcap/libpcap-pkgconfig-support.patch | 32 +- .../recipes-connectivity/libpcap/libpcap_1.7.4.bb | 26 - .../recipes-connectivity/libpcap/libpcap_1.8.1.bb | 31 + ...0001-Add-header-dependency-to-nciattach.o.patch | 35 + .../meta/recipes-connectivity/neard/neard_0.16.bb | 3 +- .../files/nfs-utils-debianize-start-statd.patch | 23 +- ...tatd-fix-a-segfault-caused-by-improper-us.patch | 113 - .../fix-protocol-minor-version-fall-back.patch | 55 - .../nfs-utils/nfs-utils/nfs-mountd.service | 8 +- .../nfs-utils/nfs-utils/nfs-server.service | 12 +- .../nfs-utils/nfs-utils/nfs-statd.service | 9 +- .../nfs-utils/nfs-utils/nfsserver | 2 +- .../nfs-utils/nfs-utils_1.3.3.bb | 151 - .../nfs-utils/nfs-utils_1.3.4.bb | 148 + .../meta/recipes-connectivity/ofono/ofono.inc | 2 +- .../meta/recipes-connectivity/ofono/ofono_1.18.bb | 10 - .../meta/recipes-connectivity/ofono/ofono_1.19.bb | 10 + .../openssh/openssh/fix-CVE-2016-8858.patch | 39 - .../meta/recipes-connectivity/openssh/openssh/init | 31 +- ...h-7.1p1-conditional-compile-des-in-cipher.patch | 39 +- ...h-7.1p1-conditional-compile-des-in-pkcs11.patch | 12 +- .../openssh/openssh/sshd_check_keys | 64 + .../openssh/openssh/sshdgenkeys.service | 16 +- .../recipes-connectivity/openssh/openssh_7.3p1.bb | 165 - .../recipes-connectivity/openssh/openssh_7.4p1.bb | 172 + .../meta/recipes-connectivity/openssl/openssl.inc | 16 +- .../openssl/openssl/0001-CVE-2017-3731.patch | 46 - ...build-with-clang-using-external-assembler.patch | 45 + .../openssl/openssl/0002-CVE-2017-3731.patch | 53 - .../openssl/openssl/CVE-2016-7055.patch | 43 - .../openssl/openssl/debian1.0.2/soname.patch | 13 + .../recipes-connectivity/openssl/openssl_1.0.2j.bb | 62 - .../recipes-connectivity/openssl/openssl_1.0.2k.bb | 62 + .../recipes-connectivity/portmap/portmap_6.0.bb | 2 +- .../ppp-dialin/ppp-dialin_0.1.bb | 3 +- ...01-ppp-Fix-compilation-errors-in-Makefile.patch | 51 +- .../ppp/ppp/0001-ppp-Remove-unneeded-include.patch | 43 + .../meta/recipes-connectivity/ppp/ppp_2.4.7.bb | 3 +- .../recipes-connectivity/socat/socat_1.7.3.1.bb | 38 - .../recipes-connectivity/socat/socat_1.7.3.2.bb | 41 + .../wireless-tools/wireless-tools_30.pre9.bb | 6 +- ...parameter-set-with-invalid-passphrase-cha.patch | 55 - ...ject-a-Credential-with-invalid-passphrase.patch | 86 - ...CRED-commands-with-newline-characters-in-.patch | 66 - ...ines-from-wpa_supplicant-config-network-o.patch | 86 - ...commands-with-newline-characters-in-the-s.patch | 54 - .../wpa-supplicant/key-replay-cve-multiple.patch | 1025 + .../wpa-supplicant/wpa-supplicant_2.5.bb | 113 - .../wpa-supplicant/wpa-supplicant_2.6.bb | 111 + .../recipes-core/base-files/base-files/profile | 31 +- .../recipes-core/base-files/base-files_3.0.14.bb | 17 +- .../recipes-core/base-passwd/base-passwd_3.5.29.bb | 43 +- .../meta/recipes-core/busybox/busybox.inc | 32 +- ...mproper-optimization-req.r.rtm_scope-may-.patch | 33 + .../0001-iproute-support-scope-.-Closes-8561.patch | 122 + .../busybox/busybox/CVE-2016-2147_2.patch | 2 +- .../busybox-tar-add-IF_FEATURE_-checks.patch | 70 + .../meta/recipes-core/busybox/busybox/defconfig | 10 +- .../meta/recipes-core/busybox/busybox_1.24.1.bb | 7 +- .../meta/recipes-core/busybox/busybox_git.bb | 52 - ...001-Fix-format-security-compilation-error.patch | 57 + .../console-tools/console-tools_0.3.2.bb | 1 + .../coreutils-6.9/coreutils-6.9-cp-i-u.patch | 120 - .../coreutils-6.9/coreutils-fix-install.patch | 101 - .../coreutils-6.9/coreutils-fix-texinfo.patch | 375 - .../coreutils/coreutils-6.9/coreutils-i18n.patch | 4051 ---- .../coreutils/coreutils-6.9/coreutils-ls-x.patch | 117 - .../coreutils-6.9/coreutils-overflow.patch | 19 - .../coreutils_fix_for_automake-1.12.patch | 32 - .../coreutils-6.9/fix_for_manpage_building.patch | 85 - .../coreutils/coreutils-6.9/futimens.patch | 63 - .../coreutils/coreutils-6.9/gnulib_m4.patch | 21 - .../coreutils/coreutils-6.9/loadavg.patch | 18 - .../coreutils/coreutils-6.9/man-touch.patch | 24 - ...need_charset_alias-when-building-for-musl.patch | 33 - ...e-report-processor-and-hardware-correctly.patch | 64 - .../coreutils-8.25/disable-ls-output-quoting.patch | 49 - .../coreutils-8.25/fix-selinux-flask.patch | 39 - .../man-decouple-manpages-from-build.patch | 27 - .../remove-usr-local-lib-from-m4.patch | 31 - ...need_charset_alias-when-building-for-musl.patch | 33 + ...0001-local.mk-fix-cross-compiling-problem.patch | 26 + ...e-report-processor-and-hardware-correctly.patch | 64 + .../coreutils-8.26/disable-ls-output-quoting.patch | 49 + .../coreutils-8.26/fix-selinux-flask.patch | 39 + .../man-decouple-manpages-from-build.patch | 27 + .../remove-usr-local-lib-from-m4.patch | 31 + .../meta/recipes-core/coreutils/coreutils_6.9.bb | 107 - .../meta/recipes-core/coreutils/coreutils_8.25.bb | 142 - .../meta/recipes-core/coreutils/coreutils_8.26.bb | 142 + .../meta/recipes-core/dbus/dbus-glib_0.106.bb | 4 - .../meta/recipes-core/dbus/dbus-glib_0.108.bb | 4 + .../meta/recipes-core/dbus/dbus-test_1.10.10.bb | 58 - .../meta/recipes-core/dbus/dbus-test_1.10.14.bb | 58 + .../meta/recipes-core/dbus/dbus_1.10.10.bb | 181 - .../meta/recipes-core/dbus/dbus_1.10.14.bb | 180 + .../meta/recipes-core/dropbear/dropbear.inc | 6 +- .../meta/recipes-core/dropbear/dropbear/init | 36 +- .../yocto-poky/meta/recipes-core/expat/expat.inc | 6 +- .../yocto-poky/meta/recipes-core/fts/fts.bb | 5 +- .../gettext/gettext-0.16.1/disable_java.patch | 39 - .../gettext-0.16.1/fix_aclocal_version.patch | 110 - .../gettext-0.16.1/fix_gnu_source_circular.patch | 348 - .../gettext-autoconf-lib-link-no-L.patch | 19 - .../gettext/gettext-0.16.1/gettext-vpath.patch | 20 - .../gettext-0.16.1/hardcode_macro_version.patch | 51 - .../gettext/gettext-0.16.1/linklib_from_0.17.patch | 720 - ...atement.c-timsort.h-fix-formatting-issues.patch | 87 + .../meta/recipes-core/gettext/gettext_0.16.1.bb | 124 - .../meta/recipes-core/gettext/gettext_0.19.8.1.bb | 6 +- ...st-for-pthread_getname_np-before-using-it.patch | 70 + .../glib-2.0/0002-tests-Ignore-y2k-warnings.patch | 42 - .../glib-2.0/glib-2.0/gi-exclude.patch | 59 - .../meta/recipes-core/glib-2.0/glib-2.0_2.48.2.bb | 26 - .../meta/recipes-core/glib-2.0/glib-2.0_2.50.3.bb | 25 + .../yocto-poky/meta/recipes-core/glib-2.0/glib.inc | 34 +- .../glib-networking/glib-networking_2.48.2.bb | 29 - .../glib-networking/glib-networking_2.50.0.bb | 29 + .../glibc/cross-localedef-native_2.24.bb | 52 - .../glibc/cross-localedef-native_2.25.bb | 53 + .../meta/recipes-core/glibc/glibc-collateral.inc | 8 +- .../meta/recipes-core/glibc/glibc-common.inc | 1 + .../meta/recipes-core/glibc/glibc-initial.inc | 29 +- .../meta/recipes-core/glibc/glibc-initial_2.24.bb | 9 - .../meta/recipes-core/glibc/glibc-initial_2.25.bb | 9 + .../meta/recipes-core/glibc/glibc-ld.inc | 27 +- .../meta/recipes-core/glibc/glibc-locale.inc | 16 +- .../meta/recipes-core/glibc/glibc-locale_2.24.bb | 1 - .../meta/recipes-core/glibc/glibc-locale_2.25.bb | 1 + .../meta/recipes-core/glibc/glibc-mtrace.inc | 4 +- .../meta/recipes-core/glibc/glibc-mtrace_2.24.bb | 1 - .../meta/recipes-core/glibc/glibc-mtrace_2.25.bb | 1 + .../meta/recipes-core/glibc/glibc-package.inc | 79 +- .../meta/recipes-core/glibc/glibc-scripts.inc | 4 +- .../meta/recipes-core/glibc/glibc-scripts_2.24.bb | 1 - .../meta/recipes-core/glibc/glibc-scripts_2.25.bb | 1 + .../yocto-poky/meta/recipes-core/glibc/glibc.inc | 8 +- .../glibc/0001-Add-atomic_exchange_relaxed.patch | 58 - ...1-Include-locale_t.h-compatibility-header.patch | 29 + ...-locale-fix-hard-coded-reference-to-gcc-E.patch | 39 - ...libc-Look-for-host-system-ld.so.cache-as-.patch | 12 +- ...operations-required-by-the-new-condition-.patch | 124 - ...libc-Fix-buffer-overrun-with-a-relocated-.patch | 10 +- ...d-pretty-printers-for-the-NPTL-lock-types.patch | 3197 --- ...libc-Raise-the-size-of-arrays-containing-.patch | 123 +- ...-implementation-that-provides-stronger-or.patch | 7171 ------- ...ivesdk-glibc-Allow-64-bit-atomics-for-x86.patch | 8 +- .../glibc/0005-Remove-__ASSUME_REQUEUE_PI.patch | 149 - ...500-e5500-e6500-603e-fsqrt-implementation.patch | 6 +- .../glibc/0006-Fix-atomic_fetch_xor_release.patch | 81 - ...-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch | 6 +- ...-Fix-undefined-reference-to-__sqrt_finite.patch | 6 +- ...qrt-f-are-now-inline-functions-and-call-o.patch | 6 +- ...bug-1443-which-explains-what-the-patch-do.patch | 6 +- ...n-libm-err-tab.pl-with-specific-dirs-in-S.patch | 6 +- ...qrt-f-are-now-inline-functions-and-call-o.patch | 6 +- ...ersion-output-matching-grok-gold-s-output.patch | 14 +- ...-configure.ac-handle-correctly-libc_cv_ro.patch | 6 +- .../glibc/glibc/0014-Add-unused-attribute.patch | 6 +- ...thin-the-path-sets-wrong-config-variables.patch | 6 +- ...-timezone-re-written-tzselect-as-posix-sh.patch | 6 +- ...move-bash-dependency-for-nscd-init-script.patch | 8 +- ...c-Cross-building-and-testing-instructions.patch | 6 +- ...019-eglibc-Help-bootstrap-cross-toolchain.patch | 14 +- .../glibc/0020-eglibc-cherry-picked-from.patch | 6 +- .../0021-eglibc-Clear-cache-lines-on-ppc8xx.patch | 6 +- ...0022-eglibc-Resolve-__fpscr_values-on-SH4.patch | 6 +- .../glibc/0023-eglibc-Install-PIC-archives.patch | 20 +- ...ward-port-cross-locale-generation-support.patch | 18 +- ...0025-Define-DUMMY_LOCALE_T-if-not-defined.patch | 6 +- .../glibc/glibc/0026-build_local_scope.patch | 56 - ....c-Make-_dl_build_local_scope-breadth-fir.patch | 56 + ...-locale-fix-hard-coded-reference-to-gcc-E.patch | 38 + ...0116-Fix-use-after-free-in-pthread_create.patch | 668 - ...rk-fno-omit-frame-pointer-support-on-i386.patch | 268 + .../recipes-core/glibc/glibc/CVE-2016-6323.patch | 39 - .../meta/recipes-core/glibc/glibc_2.24.bb | 148 - .../meta/recipes-core/glibc/glibc_2.25.bb | 141 + ...-6-.defn-fix-inverted-checks-for-loopback.patch | 395 +- .../meta/recipes-core/ifupdown/ifupdown_0.8.16.bb | 46 + .../meta/recipes-core/ifupdown/ifupdown_0.8.2.bb | 49 - .../README_VirtualBox_Toaster.txt | 78 + .../images/build-appliance-image_15.0.0.bb | 28 +- .../meta/recipes-core/images/core-image-minimal.bb | 2 +- .../images/core-image-tiny-initramfs.bb | 42 + .../initrdscripts/files/init-install-efi-testfs.sh | 12 +- .../initrdscripts/files/init-install-efi.sh | 17 +- .../initrdscripts/files/init-install.sh | 5 +- .../initscripts-1.0/populate-volatile.sh | 14 +- .../initscripts/initscripts-1.0/sysfs.sh | 4 + .../initscripts/initscripts-1.0/volatiles | 6 +- .../recipes-core/initscripts/initscripts_1.0.bb | 2 +- .../yocto-poky/meta/recipes-core/kbd/kbd_2.0.3.bb | 53 - .../yocto-poky/meta/recipes-core/kbd/kbd_2.0.4.bb | 53 + ...run-the-python-tests-if-python-is-enabled.patch | 99 + .../libxml/libxml2/CVE-2016-9318.patch | 207 - .../libxml/libxml2/libxml2-CVE-2017-0663.patch | 40 + .../libxml/libxml2/libxml2-CVE-2017-5969.patch | 62 + .../libxml/libxml2/libxml2-CVE-2017-8872.patch | 37 + .../libxml2-CVE-2017-9047_CVE-2017-9048.patch | 103 + .../libxml2-CVE-2017-9049_CVE-2017-9050.patch | 291 + ...ibxml2-fix_and_simplify_xmlParseStartTag2.patch | 590 + .../meta/recipes-core/libxml/libxml2/runtest.patch | 25 +- .../meta/recipes-core/libxml/libxml2_2.9.4.bb | 43 +- .../meta/recipes-core/meta/build-sysroots.bb | 38 + .../meta/recipes-core/meta/buildtools-tarball.bb | 2 - .../recipes-core/meta/meta-environment-extsdk.bb | 4 +- .../meta/recipes-core/meta/meta-environment.bb | 23 +- .../recipes-core/meta/meta-extsdk-toolchain.bb | 8 +- .../meta/recipes-core/meta/meta-ide-support.bb | 3 +- .../meta/recipes-core/meta/meta-toolchain.bb | 3 - .../meta/recipes-core/meta/meta-world-pkgdata.bb | 16 +- .../meta/nativesdk-buildtools-perl-dummy.bb | 1 - .../meta/recipes-core/meta/package-index.bb | 24 +- .../meta/recipes-core/meta/signing-keys.bb | 20 +- .../meta/recipes-core/meta/testexport-tarball.bb | 2 - .../meta/recipes-core/meta/uninative-tarball.bb | 5 +- .../yocto-poky/meta/recipes-core/meta/wic-tools.bb | 32 + ...dynamic-linker-a-relative-symlink-to-libc.patch | 62 +- .../yocto-poky/meta/recipes-core/musl/musl.inc | 1 - .../yocto-poky/meta/recipes-core/musl/musl_git.bb | 6 +- .../ncurses/files/configure-reproducible.patch | 20 + .../ncurses/files/fix-cflags-mangle.patch | 18 + .../meta/recipes-core/ncurses/ncurses.inc | 8 +- .../recipes-core/ncurses/ncurses_6.0+20160625.bb | 10 - .../recipes-core/ncurses/ncurses_6.0+20161126.bb | 12 + ...netbase-add-rpcbind-as-an-alias-to-sunrpc.patch | 24 +- .../meta/recipes-core/netbase/netbase_5.3.bb | 25 - .../meta/recipes-core/netbase/netbase_5.4.bb | 25 + .../meta/recipes-core/os-release/os-release.bb | 5 +- .../meta/recipes-core/ovmf/ovmf-shell-image.bb | 17 + ...g-UefiHiiLib-Fix-incorrect-comparison-exp.patch | 39 + ...0002-ovmf-update-path-to-native-BaseTools.patch | 32 + ...makefile-adjust-to-build-in-under-bitbake.patch | 39 + ...ollDefaultKeys-application-for-enrolling-.patch | 1124 + .../VfrCompile-increase-path-length-limit.patch | 33 + .../recipes-core/ovmf/ovmf/ovmf-shell-image.wks | 4 + .../yocto-poky/meta/recipes-core/ovmf/ovmf_git.bb | 243 + .../nativesdk-packagegroup-sdk-host.bb | 3 +- .../packagegroups/packagegroup-base.bb | 4 +- .../packagegroups/packagegroup-core-sdk.bb | 7 +- .../packagegroup-core-tools-profile.bb | 11 +- .../packagegroups/packagegroup-self-hosted.bb | 3 + .../meta/recipes-core/psplash/psplash_git.bb | 22 +- .../readline/files/config-dirent-symbols.patch | 34 - .../readline/readline-5.2/configure-fix.patch | 26 - .../readline-5.2/fix-redundant-rpath.patch | 21 - .../readline/readline-6.3/configure-fix.patch | 35 - .../readline/readline-6.3/norpath.patch | 21 - .../readline/readline-7.0/configure-fix.patch | 35 + .../readline/readline-7.0/norpath.patch | 21 + .../meta/recipes-core/readline/readline_5.2.bb | 84 - .../meta/recipes-core/readline/readline_6.3.bb | 34 - .../meta/recipes-core/readline/readline_7.0.bb | 7 + .../recipes-core/systemd/systemd-compat-units.bb | 3 +- .../systemd/systemd-systemctl-native.bb | 2 +- ...ragment-refuse-units-with-errors-in-certa.patch | 329 + .../systemd/systemd/validate-user.patch | 856 - .../meta/recipes-core/systemd/systemd_232.bb | 23 +- .../sysvinit/sysvinit-inittab/start_getty | 2 +- .../sysvinit/sysvinit-inittab_2.88dsf.bb | 4 +- ...evert-rules-remove-firmware-loading-rules.patch | 28 + ...-remove-userspace-firmware-loading-suppor.patch | 364 + .../meta/recipes-core/udev/eudev_3.2.1.bb | 106 + .../yocto-poky/meta/recipes-core/udev/eudev_3.2.bb | 102 - .../meta/recipes-core/udev/udev-extraconf_1.1.bb | 3 +- .../meta/recipes-core/util-linux/util-linux.inc | 27 +- .../recipes-core/util-linux/util-linux_2.28.1.bb | 31 - .../recipes-core/util-linux/util-linux_2.29.1.bb | 31 + .../recipes-core/volatile-binds/volatile-binds.bb | 2 +- .../zlib/zlib-1.2.11/Makefile-runtests.patch | 38 + .../zlib/zlib-1.2.11/ldflags-tests.patch | 45 + .../zlib/zlib-1.2.11/remove.ldconfig.call.patch | 20 + .../meta/recipes-core/zlib/zlib-1.2.11/run-ptest | 2 + .../zlib/zlib-1.2.8/Makefile-runtests.patch | 38 - .../zlib/zlib-1.2.8/ldflags-tests.patch | 45 - .../zlib/zlib-1.2.8/remove.ldconfig.call.patch | 20 - .../meta/recipes-core/zlib/zlib-1.2.8/run-ptest | 2 - .../meta/recipes-core/zlib/zlib_1.2.11.bb | 62 + .../meta/recipes-core/zlib/zlib_1.2.8.bb | 59 - .../meta/recipes-devtools/apt/apt-native.inc | 4 +- .../meta/recipes-devtools/apt/apt-package.inc | 21 +- .../meta/recipes-devtools/autoconf/autoconf.inc | 7 +- .../AC_HEADER_MAJOR-port-to-glibc-2.25.patch | 162 + ...tion-in-shebangs-with-modern-use-warnings.patch | 120 + .../recipes-devtools/autoconf/autoconf_2.69.bb | 2 + .../autogen/autogen-native_5.18.10.bb | 36 - .../autogen/autogen-native_5.18.12.bb | 40 + ...pts.m4-regenerate-it-from-config-libopts..patch | 39 + ...2-autoopts-mk-tpl-config.sh-fix-perl-path.patch | 32 + .../meta/recipes-devtools/automake/automake.inc | 2 +- ...tion-in-shebangs-with-modern-use-warnings.patch | 99 + .../recipes-devtools/automake/automake_1.15.bb | 10 +- .../recipes-devtools/binutils/binutils-2.27.inc | 53 - .../recipes-devtools/binutils/binutils-2.28.inc | 85 + .../binutils/binutils-cross-canadian.inc | 3 + .../binutils/binutils-cross-canadian_2.27.bb | 3 - .../binutils/binutils-cross-canadian_2.28.bb | 3 + .../recipes-devtools/binutils/binutils-cross.inc | 8 + .../binutils/binutils-cross_2.27.bb | 3 - .../binutils/binutils-cross_2.28.bb | 3 + .../binutils/binutils-crosssdk_2.27.bb | 13 - .../binutils/binutils-crosssdk_2.28.bb | 13 + .../meta/recipes-devtools/binutils/binutils.inc | 8 +- .../binutils/0001-Generate-relocatable-SDKs.patch | 62 - ...nutils-crosssdk-Generate-relocatable-SDKs.patch | 62 + ...01-ppc-apuinfo-for-spe-parsed-incorrectly.patch | 37 - ...oss-Do-not-generate-linker-script-directo.patch | 61 + ...ure-widen-the-regexp-for-SH-architectures.patch | 56 - .../0003-Point-scripts-location-to-libdir.patch | 42 - ...003-gprof-add-uclibc-support-to-configure.patch | 31 + ...te-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch | 41 - .../0004-Point-scripts-location-to-libdir.patch | 42 + .../0005-Explicitly-link-with-libm-on-uclibc.patch | 52 - ...te-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch | 41 + .../0006-Explicitly-link-with-libm-on-uclibc.patch | 52 + .../binutils/binutils/0006-Use-libtool-2.4.patch | 21177 ------------------- ...7-Add-the-armv5e-architecture-to-binutils.patch | 35 - .../binutils/binutils/0007-Use-libtool-2.4.patch | 21137 ++++++++++++++++++ ...8-Add-the-armv5e-architecture-to-binutils.patch | 35 + ...he-distro-compiler-point-to-the-wrong-ins.patch | 35 - ...he-distro-compiler-point-to-the-wrong-ins.patch | 35 + ...es-of-system-directories-when-cross-linki.patch | 273 - ...-rpath-in-libtool-when-sysroot-is-enabled.patch | 52 - ...es-of-system-directories-when-cross-linki.patch | 273 + ...Change-default-emulation-for-mips64-linux.patch | 59 - ...-rpath-in-libtool-when-sysroot-is-enabled.patch | 52 + .../0012-Add-support-for-Netlogic-XLP.patch | 413 - ...Change-default-emulation-for-mips64-linux.patch | 59 + .../0013-Add-support-for-Netlogic-XLP.patch | 399 + ...ncorrect-assembling-for-ppc-wait-mnemonic.patch | 36 - ...ncorrect-assembling-for-ppc-wait-mnemonic.patch | 36 + .../binutils/0014-libtool-remove-rpath.patch | 100 - ..._sections-check-to-only-consider-compact-.patch | 97 - .../0015-binutils-mips-gas-pic-relax-linkonce.diff | 65 - .../0015-sync-with-OE-libtool-changes.patch | 89 + .../binutils/0016-Detect-64-bit-MIPS-targets.patch | 50 + ...lt-in-ARM-linker-when-trying-to-parse-a-b.patch | 31 - ...eration-of-alignment-frags-in-code-sectio.patch | 139 - ...-lookup-of-file-line-information-for-erro.patch | 75 + ...segfault-in-_bfd_dwarf2_find_nearest_line.patch | 33 + .../binutils/binutils/CVE-2017-6965.patch | 23 +- .../binutils/binutils/CVE-2017-6966.patch | 27 +- .../binutils/binutils/CVE-2017-6969.patch | 15 +- .../binutils/binutils/CVE-2017-6969_2.patch | 16 +- .../binutils/binutils/CVE-2017-7209.patch | 11 +- .../binutils/binutils/CVE-2017-7210.patch | 6 +- .../binutils/binutils/CVE-2017-7223.patch | 52 + .../binutils/binutils/CVE-2017-7614.patch | 103 + .../binutils/binutils/CVE-2017-8393.patch | 205 + .../binutils/binutils/CVE-2017-8394.patch | 118 + .../binutils/binutils/CVE-2017-8395.patch | 72 + .../binutils/binutils/CVE-2017-8396_8397.patch | 102 + .../binutils/binutils/CVE-2017-8398.patch | 147 + .../binutils/binutils/CVE-2017-8421.patch | 52 + .../binutils/binutils/CVE-2017-9038_9044.patch | 51 + .../binutils/binutils/CVE-2017-9039.patch | 61 + .../binutils/binutils/CVE-2017-9040_9042.patch | 57 + .../binutils/binutils/CVE-2017-9742.patch | 45 + .../binutils/binutils/CVE-2017-9744.patch | 46 + .../binutils/binutils/CVE-2017-9745.patch | 35 + .../binutils/binutils/CVE-2017-9746.patch | 91 + .../binutils/binutils/CVE-2017-9747.patch | 43 + .../binutils/binutils/CVE-2017-9748.patch | 46 + .../binutils/binutils/CVE-2017-9749.patch | 77 + .../binutils/binutils/CVE-2017-9750.patch | 247 + .../binutils/binutils/CVE-2017-9751.patch | 3748 ++++ .../binutils/binutils/CVE-2017-9752.patch | 208 + .../binutils/binutils/CVE-2017-9753.patch | 79 + .../binutils/binutils/CVE-2017-9755.patch | 63 + .../binutils/binutils/CVE-2017-9756.patch | 50 + .../binutils/binutils/CVE-2017-9954.patch | 58 + .../binutils/binutils/CVE-2017-9955_1.patch | 168 + .../binutils/binutils/CVE-2017-9955_2.patch | 122 + .../binutils/binutils/CVE-2017-9955_3.patch | 48 + .../binutils/binutils/CVE-2017-9955_4.patch | 51 + .../binutils/binutils/CVE-2017-9955_5.patch | 89 + .../binutils/binutils/CVE-2017-9955_6.patch | 56 + .../binutils/binutils/CVE-2017-9955_7.patch | 80 + .../binutils/binutils/CVE-2017-9955_8.patch | 187 + .../binutils/binutils/CVE-2017-9955_9.patch | 361 + .../recipes-devtools/binutils/binutils_2.27.bb | 45 - .../recipes-devtools/binutils/binutils_2.28.bb | 45 + .../bison/bison/bison-2.3_m4.patch | 591 - .../meta/recipes-devtools/bison/bison_2.3.bb | 26 - .../btrfs-tools/btrfs-tools/fix-parallel.patch | 32 - .../btrfs-tools/btrfs-tools_4.7.1.bb | 33 - .../btrfs-tools/btrfs-tools_4.9.1.bb | 33 + .../build-compare/build-compare_git.bb | 6 +- .../meta/recipes-devtools/ccache/ccache.inc | 4 +- .../meta/recipes-devtools/ccache/ccache_3.2.5.bb | 11 - .../meta/recipes-devtools/ccache/ccache_3.3.4.bb | 12 + .../cdrtools/cdrtools-native_3.01.bb | 3 + .../meta/recipes-devtools/chrpath/chrpath_0.16.bb | 2 +- .../recipes-devtools/cmake/cmake-native_3.6.1.bb | 36 - .../recipes-devtools/cmake/cmake-native_3.7.2.bb | 37 + .../meta/recipes-devtools/cmake/cmake.inc | 12 +- ...L-tests-Remove-format-security-from-flags.patch | 33 + ...void-gcc-warnings-with-Wstrict-prototypes.patch | 42 + .../meta/recipes-devtools/cmake/cmake_3.6.1.bb | 48 - .../meta/recipes-devtools/cmake/cmake_3.7.2.bb | 49 + ...0001-Correctly-install-the-shared-library.patch | 28 + ...-set-PYTHON_INSTALL_DIR-by-running-python.patch | 27 + .../createrepo-c/createrepo-c_git.bb | 31 + .../cve-check-tool/cve-check-tool_5.6.4.bb | 10 +- ...ow-overriding-default-CA-certificate-file.patch | 215 + ...are-computed-vs-expected-sha256-digit-str.patch | 52 + .../recipes-devtools/devel-config/distcc-config.bb | 3 +- .../devel-config/nfs-export-root.bb | 3 +- .../recipes-devtools/diffstat/diffstat_1.61.bb | 2 +- .../meta/recipes-devtools/distcc/distcc_3.2.bb | 4 - ...Corretly-install-tmpfiles.d-configuration.patch | 21 + ...hardcode-etc-and-systemd-unit-directories.patch | 29 + ...0001-Do-not-prepend-installroot-to-logdir.patch | 31 + ...-set-PYTHON_INSTALL_DIR-by-running-python.patch | 27 + .../dnf/0030-Run-python-scripts-using-env.patch | 48 + .../meta/recipes-devtools/dnf/dnf_git.bb | 49 + .../docbook-dsssl-stylesheets-native_1.79.bb | 71 - .../docbook-sgml-dtd-3.1-native_3.1.bb | 26 - .../docbook-sgml-dtd-4.1-native_4.1.bb | 26 - .../docbook-sgml-dtd-4.5-native.bb | 18 - .../docbook-sgml-dtd/docbook-sgml-dtd-native.inc | 70 - .../docbook-sgml-dtd/files/LICENSE-OASIS | 16 - .../docbook-utils/docbook-utils-0.6.14/re.patch | 29 - .../docbook-utils/docbook-utils-native_0.6.14.bb | 63 - .../docbook-xml/docbook-xml-dtd4_4.5.bb | 21 +- .../docbook-xml/docbook-xsl-stylesheets_1.79.1.bb | 19 +- ...ude-fcntl.h-for-getting-loff_t-definition.patch | 41 - .../dosfstools/dosfstools/alignment_hack.patch | 38 - .../dosfstools/dosfstools-msdos_fs-types.patch | 37 - .../dosfstools/fix_populated_dosfs_creation.patch | 489 - .../dosfstools/include-linux-types.patch | 22 - .../dosfstools/dosfstools/mkdosfs-bootcode.patch | 241 - .../dosfstools/dosfstools/mkdosfs-dir.patch | 639 - .../dosfstools/msdos_fat12_undefined.patch | 19 - .../dosfstools/dosfstools/nofat32_autoselect.patch | 27 - .../recipes-devtools/dosfstools/dosfstools_2.11.bb | 34 - .../recipes-devtools/dosfstools/dosfstools_4.0.bb | 25 - .../recipes-devtools/dosfstools/dosfstools_4.1.bb | 25 + .../yocto-poky/meta/recipes-devtools/dpkg/dpkg.inc | 49 +- ...tinsts-expect-D-to-be-set-when-running-in.patch | 31 +- ...uild.c-Remove-usage-of-clamp-mtime-in-tar.patch | 40 + .../meta/recipes-devtools/dpkg/dpkg_1.18.10.bb | 20 + .../meta/recipes-devtools/dpkg/dpkg_1.18.7.bb | 19 - .../meta/recipes-devtools/e2fsprogs/e2fsprogs.inc | 2 +- ...-with-exit-status-0-if-no-errors-were-fix.patch | 70 +- ...fs-enable-the-metadata_csum-and-64bit-fea.patch | 9 +- .../e2fsprogs/e2fsprogs-1.43-sysmacros.patch | 130 + .../e2fsprogs/e2fsprogs/ptest.patch | 25 +- .../recipes-devtools/e2fsprogs/e2fsprogs/run-ptest | 2 +- .../recipes-devtools/e2fsprogs/e2fsprogs_1.43.4.bb | 112 + .../recipes-devtools/e2fsprogs/e2fsprogs_1.43.bb | 113 - .../elfutils/elfutils-0.148/arm_backend.diff | 449 - .../elfutils/elfutils-0.148/elf_additions.diff | 71 - .../elf_begin.c-CVE-2014-9447-fix.patch | 37 - .../elfutils-ar-c-fix-num-passed-to-memset.patch | 23 - .../elfutils/elfutils-0.148/elfutils-fsize.patch | 39 - .../elfutils-0.148/fix-build-gcc-4.8.patch | 57 - .../elfutils/elfutils-0.148/fix_for_gcc-4.7.patch | 73 - .../elfutils/elfutils-0.148/gcc6.patch | 23 - .../elfutils/elfutils-0.148/hppa_backend.diff | 801 - .../elfutils/elfutils-0.148/i386_dis.h | 1657 -- .../elfutils/elfutils-0.148/m68k_backend.diff | 309 - .../elfutils/elfutils-0.148/mips_backend.diff | 713 - ...ssed-to-snprintf-for-invalid-sh_name-case.patch | 27 - .../elfutils-0.148/redhat-portability.diff | 756 - .../elfutils/elfutils-0.148/redhat-robustify.diff | 1709 -- .../elfutils/elfutils-0.148/remove-unused.patch | 154 - .../elfutils-0.148/testsuite-ignore-elflint.diff | 21 - .../uclibc-support-for-elfutils-0.148.patch | 91 - .../elfutils/elfutils-0.148/x86_64_dis.h | 1632 -- ...-GCC7-Wimplicit-fallthrough-support-fixes.patch | 318 - ...ferences-between-mips-machine-identifiers.patch | 34 - .../0001-ar-Fix-GCC7-Wformat-length-issues.patch | 125 - ...de-alternatives-for-glibc-assumptions-hel.patch | 1163 - ...m-Silence-Werror-maybe-uninitialized-fals.patch | 35 - .../0001-fix-a-stack-usage-warning.patch | 28 - .../0001-remove-the-unneed-checking.patch | 38 - ...-support-for-mips64-abis-in-mips_retval.c.patch | 168 - .../0003-Add-mips-n64-relocation-format-hack.patch | 226 - .../elfutils-0.166/Fix_one_GCC7_warning.patch | 45 - .../elfutils/elfutils-0.166/aarch64_uio.patch | 47 - .../elfutils/elfutils-0.166/arm_backend.diff | 600 - .../elfcmp-fix-self-comparision.patch | 41 - .../elfutils/elfutils-0.166/fallthrough.patch | 36 - .../elfutils/elfutils-0.166/fixheadercheck.patch | 23 - .../elfutils/elfutils-0.166/hppa_backend.diff | 799 - .../elfutils/elfutils-0.166/kfreebsd_path.patch | 15 - .../elfutils/elfutils-0.166/m68k_backend.diff | 492 - .../elfutils/elfutils-0.166/mips_backend.diff | 711 - .../elfutils/elfutils-0.166/mips_readelf_w.patch | 22 - .../elfutils/elfutils-0.166/shadow.patch | 23 - .../elfutils-0.166/testsuite-ignore-elflint.diff | 39 - .../elfutils/elfutils-0.166/uclibc-support.patch | 128 - ...de-alternatives-for-glibc-assumptions-hel.patch | 1051 + ...m-Silence-Werror-maybe-uninitialized-fals.patch | 35 + .../0001-fix-a-stack-usage-warning.patch | 28 + .../0001-remove-the-unneed-checking.patch | 38 + .../elfutils-0.168/Fix_one_GCC7_warning.patch | 44 + .../elfutils/elfutils-0.168/aarch64_uio.patch | 47 + ...ferences-between-mips-machine-identifiers.patch | 35 + ...-support-for-mips64-abis-in-mips_retval.c.patch | 171 + .../0003-Add-mips-n64-relocation-format-hack.patch | 229 + .../elfutils-0.168/debian/arm_backend.diff | 603 + .../elfutils-0.168/debian/hppa_backend.diff | 802 + .../elfutils/elfutils-0.168/debian/hurd_path.patch | 17 + .../elfutils-0.168/debian/ignore_strmerge.diff | 14 + .../elfutils-0.168/debian/kfreebsd_path.patch | 20 + .../elfutils-0.168/debian/mips_backend.diff | 686 + .../elfutils-0.168/debian/mips_readelf_w.patch | 25 + .../debian/testsuite-ignore-elflint.diff | 42 + .../elfutils/elfutils-0.168/fallthrough.patch | 36 + .../elfutils/elfutils-0.168/fixheadercheck.patch | 23 + .../elfutils/elfutils-0.168/shadow.patch | 23 + .../recipes-devtools/elfutils/elfutils_0.148.bb | 90 - .../recipes-devtools/elfutils/elfutils_0.166.bb | 94 - .../recipes-devtools/elfutils/elfutils_0.168.bb | 90 + .../0001-Resolve-string-formatting-issues.patch | 29 + .../meta/recipes-devtools/expect/expect_5.45.bb | 10 +- .../meta/recipes-devtools/fdisk/gptfdisk_1.0.1.bb | 2 +- .../meta/recipes-devtools/file/file_5.28.bb | 48 - .../meta/recipes-devtools/file/file_5.30.bb | 48 + .../meta/recipes-devtools/gcc/gcc-5.4.inc | 1 + .../gcc/gcc-5.4/CVE-2016-6131.patch | 251 + .../meta/recipes-devtools/gcc/gcc-6.2.inc | 134 - .../0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch | 42 - .../gcc/gcc-6.2/0002-uclibc-conf.patch | 53 - .../0003-gcc-uclibc-locale-ctype_touplow_t.patch | 87 - .../gcc/gcc-6.2/0004-uclibc-locale.patch | 2862 --- .../gcc/gcc-6.2/0005-uclibc-locale-no__x.patch | 257 - .../gcc/gcc-6.2/0006-uclibc-locale-wchar_fix.patch | 68 - .../gcc/gcc-6.2/0007-uclibc-locale-update.patch | 542 - .../gcc/gcc-6.2/0008-missing-execinfo_h.patch | 28 - .../gcc/gcc-6.2/0009-c99-snprintf.patch | 28 - .../0010-gcc-poison-system-directories.patch | 192 - .../gcc/gcc-6.2/0011-gcc-poison-dir-extend.patch | 39 - .../0012-gcc-4.3.3-SYSROOT_CFLAGS_FOR_TARGET.patch | 73 - .../gcc/gcc-6.2/0013-64-bit-multilib-hack.patch | 85 - .../gcc/gcc-6.2/0014-optional-libstdc.patch | 125 - ...0015-gcc-disable-MASK_RELAX_PIC_CALLS-bit.patch | 59 - .../gcc/gcc-6.2/0016-COLLECT_GCC_OPTIONS.patch | 38 - ...efaults.h-in-B-instead-of-S-and-t-oe-in-B.patch | 96 - .../gcc-6.2/0018-fortran-cross-compile-hack.patch | 46 - .../gcc/gcc-6.2/0019-cpp-honor-sysroot.patch | 54 - .../gcc-6.2/0020-MIPS64-Default-to-N64-ABI.patch | 57 - ...C_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch | 234 - ...0022-gcc-Fix-argument-list-too-long-error.patch | 40 - .../gcc/gcc-6.2/0023-Disable-sdt.patch | 113 - .../gcc/gcc-6.2/0024-libtool.patch | 42 - ...4-pass-fix-v4bx-to-linker-to-support-EABI.patch | 43 - ...tilib-config-files-from-B-instead-of-usin.patch | 102 - ...-libdir-from-.la-which-usually-points-to-.patch | 31 - .../gcc/gcc-6.2/0028-export-CPP.patch | 53 - ...AltiVec-generation-on-powepc-linux-target.patch | 56 - ...-MULTILIB_OSDIRNAMES-and-other-multilib-o.patch | 42 - ...Ensure-target-gcc-headers-can-be-included.patch | 98 - ...-t-build-with-disable-dependency-tracking.patch | 54 - ...h-host-directory-during-relink-if-inst_pr.patch | 38 - ...IBS_DIR-replacement-instead-of-hardcoding.patch | 29 - .../0035-aarch64-Add-support-for-musl-ldso.patch | 28 - ...ibcc1-fix-libcc1-s-install-path-and-rpath.patch | 54 - ...-handle-sysroot-support-for-nativesdk-gcc.patch | 213 - ...et-sysroot-gcc-version-specific-dirs-with.patch | 102 - ...-various-_FOR_BUILD-and-related-variables.patch | 137 - .../0040-nios2-Define-MUSL_DYNAMIC_LINKER.patch | 28 - .../gcc/gcc-6.2/0041-ssp_nonshared.patch | 28 - ...c-libcpp-support-ffile-prefix-map-old-new.patch | 292 - ...ug-prefix-map-to-replace-ffile-prefix-map.patch | 43 - ...-fdebug-prefix-map-support-to-remap-sourc.patch | 54 - ...45-libgcc-Add-knob-to-use-ldbl-128-on-ppc.patch | 125 - ...bgcc-using-LDFLAGS-not-just-SHLIB_LDFLAGS.patch | 29 - ...e-alias-for-__cpu_indicator_init-instead-.patch | 85 - ...et-71056-Don-t-use-vectorized-builtins-wh.patch | 92 - .../gcc/gcc-6.2/CVE-2016-4490.patch | 290 - .../gcc/gcc-6.2/ubsan-fix-check-empty-string.patch | 28 - .../meta/recipes-devtools/gcc/gcc-6.3.inc | 138 + .../0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch | 42 + .../gcc/gcc-6.3/0002-uclibc-conf.patch | 53 + .../0003-gcc-uclibc-locale-ctype_touplow_t.patch | 87 + .../gcc/gcc-6.3/0004-uclibc-locale.patch | 2862 +++ .../gcc/gcc-6.3/0005-uclibc-locale-no__x.patch | 257 + .../gcc/gcc-6.3/0006-uclibc-locale-wchar_fix.patch | 68 + .../gcc/gcc-6.3/0007-uclibc-locale-update.patch | 542 + .../gcc/gcc-6.3/0008-missing-execinfo_h.patch | 28 + .../gcc/gcc-6.3/0009-c99-snprintf.patch | 28 + .../0010-gcc-poison-system-directories.patch | 192 + .../gcc/gcc-6.3/0011-gcc-poison-dir-extend.patch | 39 + .../0012-gcc-4.3.3-SYSROOT_CFLAGS_FOR_TARGET.patch | 73 + .../gcc/gcc-6.3/0013-64-bit-multilib-hack.patch | 85 + .../gcc/gcc-6.3/0014-optional-libstdc.patch | 125 + ...0015-gcc-disable-MASK_RELAX_PIC_CALLS-bit.patch | 59 + .../gcc/gcc-6.3/0016-COLLECT_GCC_OPTIONS.patch | 38 + ...efaults.h-in-B-instead-of-S-and-t-oe-in-B.patch | 96 + .../gcc-6.3/0018-fortran-cross-compile-hack.patch | 46 + .../gcc/gcc-6.3/0019-cpp-honor-sysroot.patch | 54 + .../gcc-6.3/0020-MIPS64-Default-to-N64-ABI.patch | 57 + ...C_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch | 234 + ...0022-gcc-Fix-argument-list-too-long-error.patch | 40 + .../gcc/gcc-6.3/0023-Disable-sdt.patch | 113 + .../gcc/gcc-6.3/0024-libtool.patch | 42 + ...4-pass-fix-v4bx-to-linker-to-support-EABI.patch | 43 + ...tilib-config-files-from-B-instead-of-usin.patch | 102 + ...-libdir-from-.la-which-usually-points-to-.patch | 31 + .../gcc/gcc-6.3/0028-export-CPP.patch | 53 + ...AltiVec-generation-on-powepc-linux-target.patch | 56 + ...-MULTILIB_OSDIRNAMES-and-other-multilib-o.patch | 42 + ...Ensure-target-gcc-headers-can-be-included.patch | 98 + ...-t-build-with-disable-dependency-tracking.patch | 54 + ...h-host-directory-during-relink-if-inst_pr.patch | 38 + ...IBS_DIR-replacement-instead-of-hardcoding.patch | 29 + .../0035-aarch64-Add-support-for-musl-ldso.patch | 28 + ...ibcc1-fix-libcc1-s-install-path-and-rpath.patch | 54 + ...-handle-sysroot-support-for-nativesdk-gcc.patch | 213 + ...et-sysroot-gcc-version-specific-dirs-with.patch | 102 + ...-various-_FOR_BUILD-and-related-variables.patch | 137 + .../0040-nios2-Define-MUSL_DYNAMIC_LINKER.patch | 28 + ...shared-to-link-commandline-for-musl-targe.patch | 87 + ...c-libcpp-support-ffile-prefix-map-old-new.patch | 292 + ...ug-prefix-map-to-replace-ffile-prefix-map.patch | 43 + ...-fdebug-prefix-map-support-to-remap-sourc.patch | 54 + ...45-libgcc-Add-knob-to-use-ldbl-128-on-ppc.patch | 125 + ...bgcc-using-LDFLAGS-not-just-SHLIB_LDFLAGS.patch | 29 + ...e-alias-for-__cpu_indicator_init-instead-.patch | 85 + .../gcc-6.3/0048-sync-gcc-stddef.h-with-musl.patch | 91 + .../gcc/gcc-6.3/0054_all_nopie-all-flags.patch | 22 + .../gcc/gcc-6.3/0055-unwind_h-glibc26.patch | 139 + .../gcc/gcc-6.3/CVE-2016-6131.patch | 251 + .../gcc/gcc-6.3/ubsan-fix-check-empty-string.patch | 28 + .../meta/recipes-devtools/gcc/gcc-common.inc | 45 +- .../recipes-devtools/gcc/gcc-configure-common.inc | 2 +- .../recipes-devtools/gcc/gcc-cross-canadian_6.2.bb | 5 - .../recipes-devtools/gcc/gcc-cross-canadian_6.3.bb | 5 + .../recipes-devtools/gcc/gcc-cross-initial.inc | 32 +- .../recipes-devtools/gcc/gcc-cross-initial_6.2.bb | 2 - .../recipes-devtools/gcc/gcc-cross-initial_6.3.bb | 2 + .../meta/recipes-devtools/gcc/gcc-cross.inc | 34 +- .../meta/recipes-devtools/gcc/gcc-cross_6.2.bb | 3 - .../meta/recipes-devtools/gcc/gcc-cross_6.3.bb | 3 + .../gcc/gcc-crosssdk-initial_6.2.bb | 3 - .../gcc/gcc-crosssdk-initial_6.3.bb | 3 + .../meta/recipes-devtools/gcc/gcc-crosssdk_6.2.bb | 2 - .../meta/recipes-devtools/gcc/gcc-crosssdk_6.3.bb | 2 + .../recipes-devtools/gcc/gcc-multilib-config.inc | 20 +- .../meta/recipes-devtools/gcc/gcc-runtime.inc | 62 +- .../meta/recipes-devtools/gcc/gcc-runtime_6.2.bb | 7 - .../meta/recipes-devtools/gcc/gcc-runtime_6.3.bb | 7 + .../meta/recipes-devtools/gcc/gcc-sanitizers.inc | 31 +- .../recipes-devtools/gcc/gcc-sanitizers_6.2.bb | 2 - .../recipes-devtools/gcc/gcc-sanitizers_6.3.bb | 2 + .../meta/recipes-devtools/gcc/gcc-source.inc | 9 +- .../meta/recipes-devtools/gcc/gcc-source_6.2.bb | 4 - .../meta/recipes-devtools/gcc/gcc-source_6.3.bb | 4 + .../meta/recipes-devtools/gcc/gcc-target.inc | 5 +- .../meta/recipes-devtools/gcc/gcc_5.4.bb | 6 - .../meta/recipes-devtools/gcc/gcc_6.2.bb | 15 - .../meta/recipes-devtools/gcc/gcc_6.3.bb | 9 + .../meta/recipes-devtools/gcc/libgcc-common.inc | 50 +- .../meta/recipes-devtools/gcc/libgcc-initial.inc | 7 +- .../recipes-devtools/gcc/libgcc-initial_6.2.bb | 2 - .../recipes-devtools/gcc/libgcc-initial_6.3.bb | 2 + .../meta/recipes-devtools/gcc/libgcc.inc | 2 - .../meta/recipes-devtools/gcc/libgcc_6.2.bb | 2 - .../meta/recipes-devtools/gcc/libgcc_6.3.bb | 2 + .../meta/recipes-devtools/gcc/libgfortran.inc | 29 +- .../meta/recipes-devtools/gcc/libgfortran_6.2.bb | 3 - .../meta/recipes-devtools/gcc/libgfortran_6.3.bb | 3 + .../meta/recipes-devtools/gdb/gdb-7.11.1.inc | 22 - .../meta/recipes-devtools/gdb/gdb-7.12.1.inc | 22 + .../meta/recipes-devtools/gdb/gdb-common.inc | 10 +- .../gdb/gdb-cross-canadian_7.11.1.bb | 3 - .../gdb/gdb-cross-canadian_7.12.1.bb | 3 + .../meta/recipes-devtools/gdb/gdb-cross.inc | 7 +- .../meta/recipes-devtools/gdb/gdb-cross_7.11.1.bb | 2 - .../meta/recipes-devtools/gdb/gdb-cross_7.12.1.bb | 2 + .../gdb/0001-include-sys-types.h-for-mode_t.patch | 6 +- ...0002-make-man-install-relative-to-DESTDIR.patch | 6 +- ...s-linux-nat-Define-_ABIO32-if-not-defined.patch | 6 +- ...Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch | 12 +- ...d-support-for-Renesas-SH-sh4-architecture.patch | 42 +- ...e-libreadline.a-when-using-disable-static.patch | 10 +- .../gdb/gdb/0007-use-asm-sgidefs.h.patch | 6 +- .../0008-Use-exorted-definitions-of-SIGRTMIN.patch | 10 +- .../gdb/gdb/0009-Change-order-of-CFLAGS.patch | 6 +- .../0010-resolve-restrict-keyword-conflict.patch | 6 +- .../recipes-devtools/gdb/gdb/0011-avx_mpx.patch | 2601 --- ..._gdb_patches_120-sigprocmask-invalid-call.patch | 45 + .../meta/recipes-devtools/gdb/gdb_7.11.1.bb | 26 - .../meta/recipes-devtools/gdb/gdb_7.12.1.bb | 27 + .../yocto-poky/meta/recipes-devtools/git/git.inc | 25 +- .../meta/recipes-devtools/git/git_2.11.1.bb | 11 + .../meta/recipes-devtools/git/git_2.9.3.bb | 11 - .../recipes-devtools/gnu-config/gnu-config_git.bb | 2 +- .../yocto-poky/meta/recipes-devtools/go/go-1.4.inc | 16 + ...alignment-for-the-.rel.plt-section-on-32-.patch | 33 + .../go/go-1.4/016-armhf-elf-header.patch | 24 + ...ckport-cmd-link-support-new-386-amd64-rel.patch | 225 + .../meta/recipes-devtools/go/go-1.4/syslog.patch | 62 + .../yocto-poky/meta/recipes-devtools/go/go-1.6.inc | 19 + .../go/go-1.6/armhf-elf-header.patch | 23 + .../go/go-1.6/fix-cc-handling.patch | 50 + .../go/go-1.6/fix-target-cc-for-build.patch | 17 + .../recipes-devtools/go/go-1.6/gotooldir.patch | 30 + .../go/go-1.6/split-host-and-target-build.patch | 63 + .../meta/recipes-devtools/go/go-1.6/syslog.patch | 62 + .../yocto-poky/meta/recipes-devtools/go/go-1.7.inc | 19 + .../go/go-1.7/armhf-elf-header.patch | 23 + .../go/go-1.7/fix-cc-handling.patch | 50 + .../go/go-1.7/fix-target-cc-for-build.patch | 17 + .../recipes-devtools/go/go-1.7/gotooldir.patch | 30 + .../go/go-1.7/split-host-and-target-build.patch | 62 + .../meta/recipes-devtools/go/go-1.7/syslog.patch | 62 + .../yocto-poky/meta/recipes-devtools/go/go-1.8.inc | 19 + .../go/go-1.8/armhf-elf-header.patch | 23 + .../go/go-1.8/fix-cc-handling.patch | 50 + .../go/go-1.8/fix-target-cc-for-build.patch | 17 + .../recipes-devtools/go/go-1.8/gotooldir.patch | 30 + .../go/go-1.8/split-host-and-target-build.patch | 62 + .../meta/recipes-devtools/go/go-1.8/syslog.patch | 62 + .../recipes-devtools/go/go-bootstrap-native_1.4.bb | 3 + .../meta/recipes-devtools/go/go-common.inc | 22 + .../meta/recipes-devtools/go/go-cross.inc | 18 + .../meta/recipes-devtools/go/go-cross_1.7.bb | 5 + .../meta/recipes-devtools/go/go-cross_1.8.bb | 5 + .../meta/recipes-devtools/go/go-native.inc | 56 + .../meta/recipes-devtools/go/go-native_1.8.bb | 3 + .../yocto-poky/meta/recipes-devtools/go/go.inc | 86 + .../yocto-poky/meta/recipes-devtools/go/go_1.6.bb | 4 + .../yocto-poky/meta/recipes-devtools/go/go_1.7.bb | 2 + .../yocto-poky/meta/recipes-devtools/go/go_1.8.bb | 3 + ...002-Recognize-nios2-as-compilation-target.patch | 32 - .../meta/recipes-devtools/guile/guile_2.0.12.bb | 121 - .../meta/recipes-devtools/guile/guile_2.0.14.bb | 125 + .../recipes-devtools/guilt/files/guilt-bash.patch | 288 - .../recipes-devtools/guilt/guilt-native_git.bb | 23 - .../recipes-devtools/intltool/intltool_0.51.0.bb | 3 +- ...-FALLTHRU-comment-to-handle-GCC7-warnings.patch | 77 + .../meta/recipes-devtools/json-c/json-c_0.12.bb | 1 + .../kconfig-frontends_3.12.0.0.bb | 11 +- ...-set-PYTHON_INSTALL_DIR-by-running-python.patch | 41 + ...-Make-__comps_objmrtree_all-static-inline.patch | 35 + ...2-Set-library-installation-path-correctly.patch | 27 + .../meta/recipes-devtools/libcomps/libcomps_git.bb | 24 + ...cmake-drop-the-requirement-for-GTKDOC_SCA.patch | 31 + ...ers-for-both-libsolv-and-libsolvext-libdn.patch | 28 + ...-sysroot-path-to-introspection-tools-path.patch | 36 + ...-library-installation-directory-correctly.patch | 29 + ...-variables-with-pkg-config-cmake-s-own-mo.patch | 29 + .../meta/recipes-devtools/libdnf/libdnf_git.bb | 29 + ...ly-set-the-library-installation-directory.patch | 28 + ...to-obtain-PYTHON_INSTALL_DIR-by-running-p.patch | 41 + ...-race-when-deleting-temporary-directories.patch | 41 + ...ariables-with-pkg-config-not-with-cmake-m.patch | 29 + .../meta/recipes-devtools/librepo/librepo_git.bb | 24 + .../meta/recipes-devtools/libtool/libtool_2.4.6.bb | 2 + .../linuxdoc-tools/linuxdoc-tools-native_0.9.69.bb | 26 - .../linuxdoc-tools/disable_dvips_doc.patch | 33 - .../linuxdoc-tools/disable_sgml2rtf.patch | 64 - .../linuxdoc-tools/disable_tex_doc.patch | 29 - .../linuxdoc-tools/disable_txt_doc.patch | 36 - .../meta/recipes-devtools/m4/m4-1.4.17.inc | 18 - .../meta/recipes-devtools/m4/m4-1.4.18.inc | 22 + .../meta/recipes-devtools/m4/m4-1.4.9.inc | 13 - .../meta/recipes-devtools/m4/m4-native_1.4.17.bb | 13 - .../meta/recipes-devtools/m4/m4-native_1.4.18.bb | 13 + .../m4/m4/fix_for_circular_dependency.patch | 77 - .../meta/recipes-devtools/m4/m4/remove-gets.patch | 15 +- .../meta/recipes-devtools/m4/m4_1.4.17.bb | 3 - .../meta/recipes-devtools/m4/m4_1.4.18.bb | 3 + .../meta/recipes-devtools/m4/m4_1.4.9.bb | 3 - .../make-3.81/make_fix_for_automake-1.12.patch | 43 - .../recipes-devtools/make/make-3.81/makeinfo.patch | 22 - .../meta/recipes-devtools/make/make_3.81.bb | 15 - .../mkelfimage/fix-makefile-to-find-libz.patch | 15 +- .../mklibs/mklibs-native_0.1.41.bb | 21 - .../mklibs/mklibs-native_0.1.43.bb | 25 + .../meta/recipes-devtools/mmc/mmc-utils_git.bb | 6 +- .../mtd/mtd-utils/0001-Fix-build-with-musl.patch | 19 +- .../mtd/mtd-utils/010-fix-rpmatch.patch | 45 +- .../add-exclusion-to-mkfs-jffs2-git-2.patch | 2 +- .../mtd/mtd-utils/fix-armv7-neon-alignment.patch | 25 +- ...t-cleanmarker-with-flash_erase--j-command.patch | 4 +- .../meta/recipes-devtools/mtd/mtd-utils_git.bb | 11 +- .../mtools/mtools/fix-broken-lz.patch | 23 - .../recipes-devtools/mtools/mtools/mtools.patch | 129 - .../recipes-devtools/mtools/mtools/no-x11.patch | 21 - .../meta/recipes-devtools/mtools/mtools_3.9.9.bb | 55 - .../meta/recipes-devtools/nasm/nasm_2.12.02.bb | 1 - .../openjade/openjade-1.3.2/fix-regex.patch | 32 - .../openjade/openjade-1.3.2/makefile.patch | 39 - .../openjade/openjade-1.3.2/msggen.pl.patch | 44 - .../openjade/openjade-1.3.2/no-libtool.patch | 20 - .../openjade/openjade-1.3.2/reautoconf.patch | 83 - .../user-declared-default-constructor.patch | 92 - .../openjade/openjade-native_1.3.2.bb | 121 - .../opensp-1.5.2/obsolete_automake_macros.patch | 15 - .../meta/recipes-devtools/opensp/opensp_1.5.2.bb | 59 - .../opkg-utils/opkg-utils/tar_ignore_error.patch | 2 +- .../recipes-devtools/opkg-utils/opkg-utils_git.bb | 14 +- .../recipes-devtools/opkg/opkg-arch-config_1.0.bb | 1 - ...reate-opkg.lock-in-run-instead-of-var-run.patch | 8 +- .../opkg/opkg/status-conffile.patch | 69 + .../meta/recipes-devtools/opkg/opkg_0.3.3.bb | 73 - .../meta/recipes-devtools/opkg/opkg_0.3.4.bb | 76 + .../meta/recipes-devtools/orc/orc_0.4.25.bb | 27 - .../meta/recipes-devtools/orc/orc_0.4.26.bb | 27 + .../meta/recipes-devtools/patch/patch/debian.patch | 10426 --------- .../patch/patch/global-reject-file.diff | 203 - .../recipes-devtools/patch/patch/install.patch | 43 - .../patch/patch/unified-reject-files.diff | 307 - .../meta/recipes-devtools/patch/patch_2.5.9.bb | 12 - .../meta/recipes-devtools/patch/patch_2.7.5.bb | 2 +- .../patchelf/patchelf/avoidholes.patch | 163 + .../meta/recipes-devtools/patchelf/patchelf_0.9.bb | 1 + .../recipes-devtools/pax-utils/pax-utils_1.1.6.bb | 35 - .../recipes-devtools/pax-utils/pax-utils_1.2.2.bb | 38 + .../meta/recipes-devtools/perl/liburi-perl_1.60.bb | 29 - .../meta/recipes-devtools/perl/liburi-perl_1.71.bb | 30 + .../perl/libxml-parser-perl_2.44.bb | 4 +- .../recipes-devtools/perl/perl-native_5.22.1.bb | 128 - .../recipes-devtools/perl/perl-native_5.24.1.bb | 136 + .../meta/recipes-devtools/perl/perl-ptest.inc | 11 +- .../recipes-devtools/perl/perl-rdepends_5.22.1.inc | 2563 --- .../recipes-devtools/perl/perl-rdepends_5.24.1.inc | 2575 +++ ...emove-fstack-protector-strong-for-native-.patch | 103 + .../recipes-devtools/perl/perl/Makefile.SH.patch | 114 +- .../meta/recipes-devtools/perl/perl/config.sh | 98 +- .../perl/perl/debian/errno_ver.diff | 28 +- .../perl/perl/debian/prune_libs.diff | 19 +- .../perl/perl/dynaloaderhack.patch | 24 +- .../perl/perl/perl-fix-CVE-2015-8607.patch | 74 - .../perl/perl/perl-fix-CVE-2016-1238.patch | 352 - .../perl/perl/perl-fix-CVE-2016-2381.patch | 114 - .../perl/perl/perl-fix-CVE-2016-6185.patch | 128 - .../perl/perl/perl-remove-nm-from-libswanted.patch | 30 - .../perl/perl/perl-test-customized.patch | 137 +- .../meta/recipes-devtools/perl/perl_5.22.1.bb | 375 - .../meta/recipes-devtools/perl/perl_5.24.1.bb | 358 + .../recipes-devtools/pkgconfig/pkgconfig_git.bb | 17 +- .../nativesdk-postinst-intercept_1.0.bb | 1 - .../meta/recipes-devtools/prelink/prelink_git.bb | 2 +- .../files/0001-Don-t-send-SIGUSR1-to-init.patch | 48 - ...t-diagnostics-during-startup-for-pseudo-d.patch | 54 - .../files/0002-Use-correct-file-descriptor.patch | 53 - ...nameat-parallel-to-previous-fix-to-rename.patch | 64 - .../pseudo/files/Fix-xattr-performance.patch | 117 - .../pseudo/files/More-correctly-fix-xattrs.patch | 37 - .../b6b68db896f9963558334aff7fca61adde4ec10f.patch | 48 + .../efe0be279901006f939cd357ccee47b651c786da.patch | 99 + .../recipes-devtools/pseudo/files/fallback-passwd | 1 + .../meta/recipes-devtools/pseudo/pseudo_1.8.1.bb | 17 - .../meta/recipes-devtools/pseudo/pseudo_1.8.2.bb | 13 + .../meta/recipes-devtools/pseudo/pseudo_git.bb | 4 +- .../python-numpy/files/mips/_numpyconfig.h | 32 - .../python-numpy/files/mips/config.h | 139 - .../python-numpy/files/mips64/_numpyconfig.h | 32 - .../python-numpy/files/mips64/config.h | 139 - .../python-numpy/files/mips64n32/_numpyconfig.h | 31 - .../python-numpy/files/mips64n32/config.h | 139 - .../files/mipsarchn32eb/_numpyconfig.h | 31 + .../python-numpy/files/mipsarchn32eb/config.h | 139 + .../files/mipsarchn32el/_numpyconfig.h | 31 + .../python-numpy/files/mipsarchn32el/config.h | 138 + .../files/mipsarchn64eb/_numpyconfig.h | 32 + .../python-numpy/files/mipsarchn64eb/config.h | 139 + .../files/mipsarchn64el/_numpyconfig.h | 32 + .../python-numpy/files/mipsarchn64el/config.h | 138 + .../files/mipsarcho32eb/_numpyconfig.h | 32 + .../python-numpy/files/mipsarcho32eb/config.h | 139 + .../python-numpy/files/mipsarcho32el/config.h | 21 + .../python-numpy/files/mipsarcho32el/numpyconfig.h | 18 + .../python-numpy/files/mipsel/config.h | 21 - .../python-numpy/files/mipsel/numpyconfig.h | 18 - .../python-numpy/python-numpy_1.11.1.bb | 105 - .../python-numpy/python-numpy_1.11.2.bb | 113 + .../python-numpy/python3-numpy_1.11.0.bb | 106 - .../python-numpy/python3-numpy_1.11.2.bb | 114 + .../python/python-2.7-manifest.inc | 5 +- .../python/python-3.5-manifest.inc | 147 +- .../meta/recipes-devtools/python/python-git.inc | 4 +- .../recipes-devtools/python/python-git_2.0.7.bb | 7 - .../recipes-devtools/python/python-git_2.1.1.bb | 7 + .../meta/recipes-devtools/python/python-mako.inc | 4 +- .../recipes-devtools/python/python-mako_1.0.4.bb | 17 - .../recipes-devtools/python/python-mako_1.0.6.bb | 17 + .../python/python-native-2.7-manifest.inc | 10 + .../python/python-native-3.5-manifest.inc | 10 + .../avoid_parallel_make_races_on_pgen.patch | 27 - .../python/python-native/multilib.patch | 96 +- .../python/python-native_2.7.12.bb | 61 - .../python/python-native_2.7.13.bb | 61 + .../python/python-pexpect_4.2.0.bb | 28 - .../python/python-pexpect_4.2.1.bb | 28 + .../python/python-ptyprocess_0.5.1.bb | 2 +- .../meta/recipes-devtools/python/python-pycurl.inc | 31 + .../python/python-pycurl_7.21.5.bb | 32 +- .../python/python-scons-native_2.5.0.bb | 8 - .../python/python-scons-native_2.5.1.bb | 8 + .../recipes-devtools/python/python-scons_2.5.0.bb | 23 - .../recipes-devtools/python/python-scons_2.5.1.bb | 23 + .../recipes-devtools/python/python-setuptools.inc | 6 +- .../python/python-setuptools_22.0.5.bb | 38 - .../python/python-setuptools_32.1.1.bb | 38 + ...m_sys-use-md5sum-instead-of-mtime-as-the-.patch | 38 - ...art-add-deugging-when-targetpath-is-empty.patch | 47 - .../smart-add-for-rpm-ignoresize-check.patch | 35 - .../smart-already-installed-message.patch | 54 - .../python/python-smartpm/smart-attempt-fix.patch | 158 - .../python/python-smartpm/smart-attempt.patch | 177 - ...cache.py-getPackages-matches-name-version.patch | 43 - .../python-smartpm/smart-channel-remove-all.patch | 33 - .../python/python-smartpm/smart-channelsdir.patch | 24 - .../python/python-smartpm/smart-locale.patch | 27 - .../python/python-smartpm/smart-recommends.patch | 381 - .../smart-rpm-transaction-failure-check.patch | 57 - .../smart-set-noprogress-for-pycurl.patch | 20 - .../smartpm-rpm5-support-check-signatures.patch | 112 - .../recipes-devtools/python/python-smartpm_git.bb | 141 - .../meta/recipes-devtools/python/python.inc | 14 +- .../01-use-proper-tools-for-cross-build.patch | 34 +- .../python/python/CVE-2016-5636.patch | 44 - .../python/Don-t-use-getentropy-on-Linux.patch | 41 + .../python/avoid_parallel_make_races_on_pgen.patch | 27 - .../recipes-devtools/python/python/multilib.patch | 118 +- .../python/python-fix-CVE-2016-1000110.patch | 162 - .../python/python3-docutils_0.12.bb | 18 - .../python/python3-docutils_0.13.1.bb | 18 + .../recipes-devtools/python/python3-git_2.0.7.bb | 7 - .../recipes-devtools/python/python3-git_2.1.1.bb | 7 + .../0001-Add-python-3-compatibility.patch | 552 + .../python/python3-iniparse_0.4.bb | 20 + .../recipes-devtools/python/python3-mako_1.0.4.bb | 11 - .../recipes-devtools/python/python3-mako_1.0.6.bb | 11 + .../python/python3-native_3.5.2.bb | 29 +- .../recipes-devtools/python/python3-pip_8.1.2.bb | 47 - .../recipes-devtools/python/python3-pip_9.0.1.bb | 56 + .../python/python3-pycurl_7.21.5.bb | 5 + .../python/python3-pygobject_3.20.1.bb | 25 - .../python/python3-pygobject_3.22.0.bb | 31 + .../recipes-devtools/python/python3-pygpgme_0.3.bb | 18 + .../python/python3-setuptools_22.0.5.bb | 37 - .../python/python3-setuptools_32.1.1.bb | 37 + .../python3/020-dont-compile-python-files.patch | 48 - .../python/python3/CVE-2016-5636.patch | 44 - .../python/python3/python-3.3-multilib.patch | 301 +- .../python/python3/upstream-random-fixes.patch | 721 + .../python/python3/use_packed_importlib.patch | 31 - .../meta/recipes-devtools/python/python3_3.5.2.bb | 2 +- .../meta/recipes-devtools/python/python_2.7.12.bb | 171 - .../meta/recipes-devtools/python/python_2.7.13.bb | 171 + .../qemu/qemu-helper-native_1.0.bb | 4 + .../meta/recipes-devtools/qemu/qemu-targets.inc | 4 +- .../yocto-poky/meta/recipes-devtools/qemu/qemu.inc | 91 +- .../0001-Provide-support-for-the-CUSE-TPM.patch | 870 + ...0001-target-mips-add-24KEc-CPU-definition.patch | 54 - ...0001-virtio-zero-vq-inuse-in-virtio_reset.patch | 57 - ...ondition-to-notify-waiters-of-completed-c.patch | 86 + .../qemu/qemu/0002-fix-CVE-2016-7423.patch | 45 - ...condition-in-TPM-backend-for-notification.patch | 79 + .../qemu/qemu/0003-fix-CVE-2016-7908.patch | 62 - ...support-for-VM-suspend-resume-for-TPM-TIS.patch | 719 + .../qemu/qemu/0004-fix-CVE-2016-7909.patch | 42 - .../recipes-devtools/qemu/qemu/CVE-2016-9908.patch | 44 + .../recipes-devtools/qemu/qemu/CVE-2016-9912.patch | 45 + ...-Arm-versatilepb-Add-memory-size-checking.patch | 46 - .../exclude-some-arm-EABI-obsolete-syscalls.patch | 22 +- .../recipes-devtools/qemu/qemu/glibc-2.25.patch | 88 + .../qemu/qemu/target-ppc-fix-user-mode.patch | 48 + .../meta/recipes-devtools/qemu/qemu_2.7.0.bb | 28 - .../meta/recipes-devtools/qemu/qemu_2.8.0.bb | 65 + .../recipes-devtools/qemu/qemuwrapper-cross_1.0.bb | 5 +- .../meta/recipes-devtools/quilt/quilt-native.inc | 2 +- .../recipes-devtools/quilt/quilt-native_0.64.bb | 2 - .../recipes-devtools/quilt/quilt-native_0.65.bb | 2 + .../meta/recipes-devtools/quilt/quilt.inc | 21 +- ...0001-tests-Allow-different-output-from-mv.patch | 29 + .../recipes-devtools/quilt/quilt/install.patch | 13 - .../meta/recipes-devtools/quilt/quilt_0.64.bb | 16 - .../meta/recipes-devtools/quilt/quilt_0.65.bb | 6 + ...N_ABI-when-searching-for-python-libraries.patch | 30 + ...d-a-color-setting-for-mips64_n32-binaries.patch | 40 + ...an-unsatisfiable-dependency-when-building.patch | 33 + ...code-lib-rpm-as-the-installation-path-for.patch | 61 + .../0001-Do-not-read-config-files-from-HOME.patch | 38 + ...t-the-PATH-environment-variable-before-ru.patch | 30 + .../files/0001-Fix-build-with-musl-C-library.patch | 74 + ...installing-execute-package-scriptlets-wit.patch | 37 + ...alue-cannot-be-reset-issue-a-notice-inste.patch | 31 + .../rpm/files/0001-perl-disable-auto-reqs.patch | 32 + ...-for-prefixing-etc-from-RPM_ETCCONFIGDIR-.patch | 72 + ...ire-that-ELF-binaries-are-executable-to-b.patch | 32 + ...onditional-to-access-_docdir-in-macros.in.patch | 36 + .../0013-Add-a-new-option-alldeps-to-rpmdeps.patch | 152 + ...1-Disable-__sync_add_and_fetch_8-on-nios2.patch | 30 - .../rpm/rpm/0001-define-EM_AARCH64.patch | 35 - .../0001-macros-add-_gpg_sign_cmd_extra_args.patch | 43 - .../rpm/rpm/0001-rpm-Fix-build-on-musl.patch | 294 - ...0001-system.h-query.c-support-nosignature.patch | 63 - ...arseArgvString-to-parse-the-_gpg_check_pa.patch | 49 - .../configure.ac-check-for-both-gpg2-and-gpg.patch | 29 - .../meta/recipes-devtools/rpm/rpm/dbconvert.patch | 27 - .../recipes-devtools/rpm/rpm/debugedit-segv.patch | 100 - ...debugedit-valid-file-to-fix-segment-fault.patch | 65 - .../recipes-devtools/rpm/rpm/gcc6-stdlib.patch | 54 - .../rpm/rpm/header-include-fix.patch | 36 - .../rpm/rpm/makefile-am-exec-hook.patch | 33 - .../rpm/rpm/no-ldflags-in-pkgconfig.patch | 14 - .../recipes-devtools/rpm/rpm/perfile_rpmdeps.sh | 50 - .../rpm/popt-disable-auto-stack-protector.patch | 27 - .../rpm/rpm/python-rpm-rpmsense.patch | 31 - .../meta/recipes-devtools/rpm/rpm/pythondeps.sh | 16 - .../recipes-devtools/rpm/rpm/rpm-atomic-ops.patch | 73 - .../rpm/rpm/rpm-autogen-force.patch | 78 - .../recipes-devtools/rpm/rpm/rpm-autogen.patch | 25 - .../recipes-devtools/rpm/rpm/rpm-canonarch.patch | 136 - .../rpm/rpm-check-rootpath-reasonableness.patch | 96 - .../recipes-devtools/rpm/rpm/rpm-db-reduce.patch | 19 - .../recipes-devtools/rpm/rpm/rpm-db5-or-db6.patch | 174 - .../meta/recipes-devtools/rpm/rpm/rpm-db60.patch | 56 - .../rpm/rpm/rpm-db_buffer_small.patch | 77 - .../rpm/rpm/rpm-debug-platform.patch | 65 - .../rpm/rpm/rpm-disable-auto-stack-protector.patch | 24 - .../rpm/rpm/rpm-disable-blaketest.patch | 28 - ...-ensure-rpm2cpio-call-rpm-relocation-code.patch | 25 - .../recipes-devtools/rpm/rpm/rpm-fileclass.patch | 36 - .../rpm/rpm/rpm-fix-logio-cp.patch | 30 - .../rpm-fix-lua-tests-compilation-failure.patch | 43 - .../rpm/rpm/rpm-fix-parseEmbedded.patch | 27 - .../recipes-devtools/rpm/rpm/rpm-gnu-atomic.patch | 64 - .../rpm/rpm/rpm-hardlink-segfault-fix.patch | 43 - .../rpm/rpm/rpm-keccak-sse-intrin.patch | 27 - ...ction.c-fix-file-conflicts-for-mips64-N32.patch | 52 - .../recipes-devtools/rpm/rpm/rpm-libsql-fix.patch | 22 - .../recipes-devtools/rpm/rpm/rpm-log-auto-rm.patch | 15 - .../rpm/rpm/rpm-lsb-compatibility.patch | 24 - .../meta/recipes-devtools/rpm/rpm/rpm-lua.patch | 33 - ...rpm-macros.in-disable-external-key-server.patch | 31 - .../meta/recipes-devtools/rpm/rpm/rpm-macros.patch | 64 - .../rpm/rpm/rpm-mongodb-sasl.patch | 69 - .../recipes-devtools/rpm/rpm/rpm-no-loopmsg.patch | 19 - .../rpm/rpm/rpm-no-perl-urpm.patch | 47 - ...b-before-verifyscript-to-avoid-null-point.patch | 24 - .../recipes-devtools/rpm/rpm/rpm-ossp-uuid.patch | 23 - .../rpm/rpm/rpm-packageorigin.patch | 25 - .../rpm/rpm/rpm-payload-use-hashed-inode.patch | 126 - .../rpm/rpm/rpm-pkgconfigdeps.patch | 37 - .../rpm/rpm/rpm-platform-file-fix.patch | 28 - .../recipes-devtools/rpm/rpm/rpm-platform.patch | 137 - .../recipes-devtools/rpm/rpm/rpm-platform2.patch | 105 - .../recipes-devtools/rpm/rpm/rpm-py-init.patch | 29 - .../rpm/rpm/rpm-python-AddErase.patch | 35 - .../rpm/rpm/rpm-python-restore-origin.patch | 49 - .../rpm/rpm/rpm-python-tagname.patch | 24 - .../recipes-devtools/rpm/rpm/rpm-realpath.patch | 24 - .../rpm/rpm/rpm-reloc-macros.patch | 31 - .../recipes-devtools/rpm/rpm/rpm-resolvedep.patch | 40 - .../rpm/rpm/rpm-rpmdb-grammar.patch | 124 - .../rpm/rpm/rpm-rpmfc.c-fix-for-N32-MIPS64.patch | 34 - .../rpm/rpm/rpm-rpmio-headers.patch | 19 - .../recipes-devtools/rpm/rpm/rpm-rpmpgp-popt.patch | 26 - .../rpm/rpm/rpm-scriptletexechelper.patch | 159 - .../meta/recipes-devtools/rpm/rpm/rpm-showrc.patch | 26 - .../rpm/rpm/rpm-syck-fix-gram.patch | 1081 - .../rpm-tag-generate-endian-conversion-fix.patch | 50 - .../rpm/rpm/rpm-tagname-type.patch | 25 - .../rpm/rpm/rpm-tools-mtree-LDFLAGS.patch | 24 - .../rpm/rpm/rpm-uuid-include.patch | 40 - .../meta/recipes-devtools/rpm/rpm/rpm2cpio | 39 - .../meta/recipes-devtools/rpm/rpm/rpmatch.patch | 34 - ...more-verbose-error-logging-in-rpmTempFile.patch | 31 - .../rpmdb-prevent-race-in-tmpdir-creation.patch | 41 - .../recipes-devtools/rpm/rpm/rpmdeps-oecore.patch | 194 - ...heck-_gpg_passphrase-before-ask-for-input.patch | 70 - .../recipes-devtools/rpm/rpm/rpmqv_cc_b_gone.patch | 50 - .../recipes-devtools/rpm/rpm/uclibc-support.patch | 63 - .../meta/recipes-devtools/rpm/rpm_5.4.16.bb | 709 - .../meta/recipes-devtools/rpm/rpm_git.bb | 121 + .../recipes-devtools/rpm/rpmresolve/rpmresolve.c | 426 - .../meta/recipes-devtools/rpm/rpmresolve_1.0.bb | 29 - .../rsync/rsync-2.6.9/rsync-2.6.9-fname-obo.patch | 71 - .../recipes-devtools/rsync/rsync-2.6.9/rsyncd.conf | 15 - .../meta/recipes-devtools/rsync/rsync.inc | 2 +- .../meta/recipes-devtools/rsync/rsync_2.6.9.bb | 15 - .../meta/recipes-devtools/rsync/rsync_3.1.2.bb | 2 +- .../yocto-poky/meta/recipes-devtools/ruby/ruby.inc | 5 +- .../ruby/ruby/CVE-2017-14064.patch | 353 + .../ruby/ruby/ruby-CVE-2017-9224.patch | 41 + .../ruby/ruby/ruby-CVE-2017-9226.patch | 41 + .../ruby/ruby/ruby-CVE-2017-9227.patch | 32 + .../ruby/ruby/ruby-CVE-2017-9228.patch | 34 + .../ruby/ruby/ruby-CVE-2017-9229.patch | 59 + .../meta/recipes-devtools/ruby/ruby_2.2.5.bb | 42 - .../meta/recipes-devtools/ruby/ruby_2.4.0.bb | 53 + .../run-postinsts/run-postinsts/run-postinsts | 13 +- .../run-postinsts/run-postinsts_1.0.bb | 3 +- .../sgml-common/sgml-common-0.6.3/autohell.patch | 61 - .../sgml-common/sgml-common-0.6.3/license.patch | 29 - .../sgml-common/sgml-common-native_0.6.3.bb | 70 - .../sgml-common/sgml-common_0.6.3.bb | 68 - .../recipes-devtools/sgmlspl/sgmlspl-native_git.bb | 21 - .../0001-caps-abbrev.awk-fix-gawk-s-path.patch | 49 + .../strace/strace/Makefile-ptest.patch | 13 +- .../strace/strace/disable-git-version-gen.patch | 18 +- .../strace/strace/use-asm-sgidefs.h.patch | 51 - .../meta/recipes-devtools/strace/strace_4.13.bb | 49 - .../meta/recipes-devtools/strace/strace_4.16.bb | 49 + .../0001-fix-svnadmin-create-fail-on-x86.patch | 56 - .../subversion/subversion_1.9.4.bb | 56 - .../subversion/subversion_1.9.5.bb | 55 + .../yocto-poky/meta/recipes-devtools/swig/swig.inc | 4 +- .../meta/recipes-devtools/swig/swig_3.0.12.bb | 8 + .../meta/recipes-devtools/swig/swig_3.0.8.bb | 8 - .../syslinux/syslinux-6.03-sysmacros.patch | 45 + .../recipes-devtools/syslinux/syslinux_6.03.bb | 1 + .../systemd-bootchart/systemd-bootchart_230.bb | 29 - .../systemd-bootchart/systemd-bootchart_231.bb | 29 + .../tcf-agent/tcf-agent/tcf-agent.init | 12 +- .../tcf-agent/tcf-agent/tcf-agent.service | 2 + .../meta/recipes-devtools/tcltk/tcl_8.6.6.bb | 6 +- .../recipes-devtools/unfs3/unfs3_0.9.22.r497.bb | 9 +- .../meta/recipes-devtools/unifdef/unifdef_2.11.bb | 2 + .../yocto-poky/meta/recipes-devtools/vala/vala.inc | 4 +- .../meta/recipes-devtools/vala/vala_0.32.1.bb | 8 - .../meta/recipes-devtools/vala/vala_0.34.4.bb | 8 + ...-vg_test-wrapper-to-support-PTEST-formats.patch | 4 +- .../valgrind/valgrind/fixed-perl-path.patch | 33 +- .../recipes-devtools/valgrind/valgrind_3.12.0.bb | 13 +- .../meta/recipes-devtools/xmlto/xmlto_0.0.28.bb | 2 +- .../meta/recipes-devtools/yasm/yasm_1.3.0.bb | 2 + .../recipes-extended/acpica/acpica_20150515.bb | 49 + .../acpica/acpitests/aapits-linux.patch | 336 + .../acpica/acpitests/aapits-makefile.patch | 34 + .../recipes-extended/acpica/acpitests_20140828.bb | 36 + .../files/manipulate-fds-instead-of-FILE.patch | 71 + .../recipes-extended/acpica/files/no-werror.patch | 32 + .../files/rename-yy_scan_string-manually.patch | 64 + .../meta/recipes-extended/at/at_3.1.20.bb | 2 +- .../bash/bash-3.2.57/build-tests.patch | 44 - ...ont-include-target-CFLAGS-in-host-LDFLAGS.patch | 33 - .../bash-3.2.57/mkbuiltins_have_stringize.patch | 29 - .../recipes-extended/bash/bash-3.2.57/run-ptest | 2 - .../bash/bash-3.2.57/string-format.patch | 21 - .../bash/bash-3.2.57/test-output.patch | 25 - .../yocto-poky/meta/recipes-extended/bash/bash.inc | 10 +- ...-memleak-bug-fix-for-builtin-command-read.patch | 35 + .../meta/recipes-extended/bash/bash_3.2.57.bb | 18 - .../meta/recipes-extended/bash/bash_4.3.30.bb | 1 + .../meta/recipes-extended/byacc/byacc.inc | 3 +- .../0001-byacc-do-not-reorder-CC-and-CFLAGS.patch | 161 + .../meta/recipes-extended/byacc/byacc_20160606.bb | 12 - .../meta/recipes-extended/byacc/byacc_20161202.bb | 12 + .../meta/recipes-extended/bzip2/bzip2_1.0.6.bb | 2 +- .../chkconfig-alternatives-native_1.3.59.bb | 2 +- .../cpio/cpio-2.8/avoid_heap_overflow.patch | 26 - .../cpio/cpio-2.8/fix-memory-overrun.patch | 217 - .../cpio/cpio-2.8/m4extensions.patch | 31 - .../recipes-extended/cpio/cpio-2.8/statdef.patch | 15 - .../meta/recipes-extended/cpio/cpio_2.12.bb | 2 +- .../meta/recipes-extended/cpio/cpio_2.8.bb | 19 - .../meta/recipes-extended/cronie/cronie_1.5.1.bb | 2 +- .../yocto-poky/meta/recipes-extended/cups/cups.inc | 9 +- .../meta/recipes-extended/cups/cups_2.1.4.bb | 6 - .../meta/recipes-extended/cups/cups_2.2.2.bb | 6 + .../diffutils_fix_for_automake-1.12.patch | 27 - .../diffutils/diffutils-2.8.1/fix_gcc6.patch | 31 - ...need_charset_alias-when-building-for-musl.patch | 33 - .../diffutils/diffutils-3.4/run-ptest | 3 - ...need_charset_alias-when-building-for-musl.patch | 33 + .../diffutils/diffutils-3.5/run-ptest | 3 + .../recipes-extended/diffutils/diffutils_2.8.1.bb | 17 - .../recipes-extended/diffutils/diffutils_3.4.bb | 40 - .../recipes-extended/diffutils/diffutils_3.5.bb | 40 + .../yocto-poky/meta/recipes-extended/ed/ed_0.5.bb | 20 - .../meta/recipes-extended/ed/ed_1.14.1.bb | 35 + .../yocto-poky/meta/recipes-extended/ed/ed_1.9.bb | 33 - .../meta/recipes-extended/ethtool/ethtool_4.6.bb | 30 - .../meta/recipes-extended/ethtool/ethtool_4.8.bb | 30 + .../findutils-fix-doc-build-error.patch | 53 - .../findutils_fix_for_automake-1.12.patch | 23 - .../findutils-4.2.31/gnulib-extension.patch | 56 - .../recipes-extended/findutils/findutils_4.2.31.bb | 14 - ...0001-gawk-fix-non-glibc-gcc-4-compilation.patch | 67 - .../gawk-3.1.5_fix_for_automake-1.12.patch | 41 - .../recipes-extended/gawk/gawk-4.1.3/run-ptest | 10 - .../recipes-extended/gawk/gawk-4.1.4/run-ptest | 10 + .../test-arrayind1-Remove-hashbang-line.patch | 30 + .../meta/recipes-extended/gawk/gawk_3.1.5.bb | 44 - .../meta/recipes-extended/gawk/gawk_4.1.3.bb | 46 - .../meta/recipes-extended/gawk/gawk_4.1.4.bb | 49 + .../ghostscript/ghostscript/CVE-2016-7977.patch | 33 + .../ghostscript/ghostscript/CVE-2016-7978.patch | 30 + .../ghostscript/ghostscript/CVE-2016-7979.patch | 48 + .../ghostscript/ghostscript/CVE-2016-8602.patch | 47 + .../ghostscript/ghostscript/CVE-2017-11714.patch | 61 + .../ghostscript/ghostscript/CVE-2017-7975.patch | 36 + .../ghostscript/ghostscript/CVE-2017-9216.patch | 36 + .../ghostscript/ghostscript/CVE-2017-9611.patch | 34 + .../ghostscript/ghostscript/CVE-2017-9612.patch | 35 + .../ghostscript/ghostscript/CVE-2017-9726.patch | 33 + .../ghostscript/ghostscript/CVE-2017-9727.patch | 35 + .../ghostscript/ghostscript/CVE-2017-9739.patch | 37 + .../ghostscript/ghostscript/CVE-2017-9835.patch | 125 + .../ghostscript-9.15-parallel-make.patch | 35 +- .../ghostscript/ghostscript/mips/objarch.h | 40 - .../ghostscript/ghostscript/mips64/objarch.h | 40 - .../ghostscript/ghostscript/mips64el/objarch.h | 40 - .../ghostscript/ghostscript/mips64eln32/objarch.h | 40 - .../ghostscript/ghostscript/mips64n32/objarch.h | 40 - .../ghostscript/mipsarchn32eb/objarch.h | 40 + .../ghostscript/mipsarchn32el/objarch.h | 40 + .../ghostscript/mipsarchn64eb/objarch.h | 40 + .../ghostscript/mipsarchn64el/objarch.h | 40 + .../ghostscript/mipsarcho32eb/objarch.h | 40 + .../ghostscript/mipsarcho32el/objarch.h | 40 + .../ghostscript/ghostscript/mipsel/objarch.h | 40 - .../ghostscript/ghostscript_9.19.bb | 110 - .../ghostscript/ghostscript_9.20.bb | 123 + .../go-examples/files/helloworld.go | 10 + .../recipes-extended/go-examples/go-examples.inc | 10 + .../go-examples/go-helloworld_0.1.bb | 13 + .../meta/recipes-extended/gperf/gperf_3.0.3.bb | 10 - .../recipes-extended/grep/grep-2.5.1a/Makevars | 25 - .../grep/grep-2.5.1a/fix-for-texinfo-5.1.patch | 17 - .../grep/grep-2.5.1a/fix64-int-to-pointer.patch | 17 - .../grep/grep-2.5.1a/gettext.patch | 15 - .../grep/grep-2.5.1a/grep-CVE-2012-5667.patch | 33 - .../grep-egrep-fgrep-Fix-LSB-NG-cases.patch | 1342 -- .../grep-2.5.1a/grep_fix_for_automake-1.12.patch | 52 - .../grep/grep-2.5.1a/uclibc-fix.patch | 55 - .../meta/recipes-extended/grep/grep_2.25.bb | 43 - .../meta/recipes-extended/grep/grep_2.5.1a.bb | 56 - .../meta/recipes-extended/grep/grep_3.0.bb | 46 + .../fix-narrowing-conversion-error.patch | 61 - .../groff-1.18.1.4/groff-1.18.1.4-fix-bindir.patch | 39 - .../groff-1.18.1.4/groff-1.18.1.4-remove-mom.patch | 39 - .../groff/groff-1.18.1.4/man-local.patch | 36 - .../groff/groff-1.18.1.4/mdoc-local.patch | 36 - .../0001-replace-perl-w-with-use-warnings.patch | 102 + .../meta/recipes-extended/groff/groff_1.18.1.4.bb | 50 - .../meta/recipes-extended/groff/groff_1.22.3.bb | 3 + .../gzip/gzip-1.3.12/dup-def-fix.patch | 48 - .../gzip/gzip-1.3.12/m4-extensions-fix.patch | 56 - .../meta/recipes-extended/gzip/gzip_1.3.12.bb | 15 - .../meta/recipes-extended/gzip/gzip_1.8.bb | 1 - .../meta/recipes-extended/hdparm/hdparm_9.48.bb | 40 - .../meta/recipes-extended/hdparm/hdparm_9.51.bb | 40 + .../recipes-extended/images/core-image-lsb-sdk.bb | 3 + ...Add-option-to-enable-disable-libnfnetlink.patch | 28 +- ...check-conntrack-when-libnfnetlink-enabled.patch | 53 +- .../recipes-extended/iptables/iptables_1.6.0.bb | 50 - .../recipes-extended/iptables/iptables_1.6.1.bb | 49 + .../recipes-extended/iputils/iputils_s20151218.bb | 11 +- .../meta/recipes-extended/less/less_481.bb | 42 - .../meta/recipes-extended/less/less_487.bb | 42 + ...te_disk_posix.c-make-_fsobj-functions-mor.patch | 245 + ...02-Fix-extracting-hardlinks-over-symlinks.patch | 120 + .../files/non-recursive-extract-and-list.patch | 153 + .../libarchive/libarchive_3.2.2.bb | 15 +- .../meta/recipes-extended/libidn/libidn_0.6.14.bb | 36 - ...1-Add-fallback-fopencookie-implementation.patch | 58 +- ...es-to-internal-fopencookie-implementation.patch | 104 + .../recipes-extended/libsolv/libsolv_0.6.23.bb | 29 - .../recipes-extended/libsolv/libsolv_0.6.26.bb | 31 + .../meta/recipes-extended/libuser/libuser_0.62.bb | 2 +- .../recipes-extended/lighttpd/lighttpd_1.4.41.bb | 79 - .../recipes-extended/lighttpd/lighttpd_1.4.45.bb | 85 + .../recipes-extended/logrotate/logrotate_3.9.1.bb | 5 +- ...elease-to-work-with-busybox-head-and-find.patch | 38 + .../meta/recipes-extended/lsb/lsb_4.1.bb | 74 +- .../meta/recipes-extended/lsb/lsbtest/LSB_Test.sh | 4 +- .../0001-add-_GNU_SOURCE-to-pec_listener.c.patch | 39 + .../ltp/0001-ltp-Don-t-link-against-libfl.patch | 30 - ...etwork-nfsv4-acl-acl1.c-Security-fix-on-s.patch | 41 - ...TH_MAX-undeclared-when-building-with-musl.patch | 30 + ...sendfile-Use-off64_t-instead-of-__off64_t.patch | 31 - ...RDSIZE-undeclared-when-building-with-musl.patch | 30 + .../ltp/ltp/0007-replace-SIGCLD-with-SIGCHLD.patch | 394 - ...__GLIBC_PREREQ-is-defined-before-using-it.patch | 7 +- .../ltp/0009-Guard-error.h-with-__GLIBC__.patch | 270 - ...ition-of-struct-msgbuf-error-building-wit.patch | 35 + .../0011-Rename-sigset-variable-to-sigset1.patch | 77 +- ...essat01.c-build-fails-with-security-flags.patch | 70 + ...fsstress.c-Replace-__int64_t-with-int64_t.patch | 351 - ...nclude-fcntl.h-for-getting-O_-definitions.patch | 67 - ...ing-Include-sys-types.h-for-pid_t-definit.patch | 56 - ...015-mincore01-Rename-PAGESIZE-to-pagesize.patch | 64 - ...Change-header-from-ustat.h-to-sys-ustat.h.patch | 45 - .../0017-replace-sigval_t-with-union-sigval.patch | 88 - ...lace-canonicalize_file_name-with-realpath.patch | 32 - .../ltp/ltp/0022-include-sys-types.h.patch | 29 - ...sconf01-Use-_SC_2_C_VERSION-conditionally.patch | 23 +- ...n.h-Use-sighandler_t-instead-of-__sighand.patch | 31 +- .../ltp/0029-trace_shed-Fix-build-with-musl.patch | 32 - ...READ_MUTEX_RECURSIVE-in-place-of-PTHREAD_.patch | 26 +- ...age-size-offset-as-per-page-size-alignmen.patch | 33 - ...-regen.sh-Include-asm-unistd.h-explicitly.patch | 30 - .../ltp/ltp/0035-fix-test_proc_kill-hang.patch | 27 +- ...etwork-nfsv4-acl-acl1.c-Security-fix-on-s.patch | 41 + ...0039-fcntl-fix-the-time-def-to-use-time_t.patch | 29 + .../meta/recipes-extended/ltp/ltp_20160126.bb | 118 - .../meta/recipes-extended/ltp/ltp_20170116.bb | 114 + .../meta/recipes-extended/lzip/lzip_1.18.bb | 41 + .../recipes-extended/man-pages/man-pages_4.07.bb | 31 - .../recipes-extended/man-pages/man-pages_4.09.bb | 31 + .../meta/recipes-extended/man/man_1.6g.bb | 4 +- .../0001-mc-replace-perl-w-with-use-warnings.patch | 129 + .../meta/recipes-extended/mc/mc/mc-CTRL.patch | 31 - .../meta/recipes-extended/mc/mc_4.7.5.2.bb | 47 - .../meta/recipes-extended/mc/mc_4.8.17.bb | 49 - .../meta/recipes-extended/mc/mc_4.8.18.bb | 52 + .../0001-Fix-some-type-comparison-problems.patch | 50 - ...th-of-corosync-and-dlm-header-files-check.patch | 13 +- .../mdadm/files/0001-Fix-typo-in-comparision.patch | 86 - ...ys-sysmacros.h-for-major-minor-defintions.patch | 48 + ...dadm.h-bswap-is-already-defined-in-uclibc.patch | 55 - .../0001-raid6check-Fix-if-else-indentation.patch | 37 - ...il.c-include-poll.h-instead-of-sys-poll.h.patch | 45 - .../mdadm/files/mdadm-3.2.2_fix_for_x32.patch | 23 - .../meta/recipes-extended/mdadm/mdadm_3.4.bb | 75 - .../meta/recipes-extended/mdadm/mdadm_4.0.bb | 72 + .../meta/recipes-extended/mktemp/mktemp_1.7.bb | 8 +- .../meta/recipes-extended/msmtp/msmtp_1.6.5.bb | 30 - .../meta/recipes-extended/msmtp/msmtp_1.6.6.bb | 30 + .../net-tools/net-tools_1.60-26.bb | 27 +- .../packagegroup-core-full-cmdline.bb | 2 +- .../packagegroups/packagegroup-core-lsb.bb | 9 +- .../meta/recipes-extended/pam/libpam_1.3.0.bb | 16 +- ...s_resize-link-against-libuuid-explicitly-.patch | 34 + .../parted/parted/parted-3.2-sysmacros.patch | 32 + .../meta/recipes-extended/parted/parted_3.2.bb | 2 + ...s-sysmacros.h-for-major-minor-definitions.patch | 65 + .../meta/recipes-extended/pax/pax_3.4.bb | 4 +- .../yocto-poky/meta/recipes-extended/pigz/pigz.inc | 2 - .../recipes-extended/pigz/pigz/link-order.patch | 38 - .../meta/recipes-extended/pigz/pigz_2.3.3.bb | 14 - .../meta/recipes-extended/pigz/pigz_2.3.4.bb | 11 + .../meta/recipes-extended/pixz/pixz_1.0.6.bb | 5 +- .../meta/recipes-extended/procps/procps_3.3.12.bb | 9 +- .../meta/recipes-extended/psmisc/psmisc.inc | 2 +- .../meta/recipes-extended/quota/quota_4.03.bb | 2 + ...001-Avoid-use-of-glibc-sys-cdefs.h-header.patch | 221 - ...ly-Don-t-use-the-xp_auth-pointer-directly.patch | 43 - ...r-all-svc_getargs-calls-with-svc_freeargs.patch | 221 + .../rpcbind/rpcbind/cve-2015-7236.patch | 84 - .../meta/recipes-extended/rpcbind/rpcbind_0.2.3.bb | 73 - .../meta/recipes-extended/rpcbind/rpcbind_0.2.4.bb | 71 + .../meta/recipes-extended/screen/screen_4.4.0.bb | 52 - .../meta/recipes-extended/screen/screen_4.5.1.bb | 52 + .../meta/recipes-extended/sed/sed-4.1.2/Makevars | 25 - .../sed/sed-4.1.2/fix_return_type.patch | 18 - .../sed-4.1.2_fix_for_automake-1.12.patch | 37 - .../meta/recipes-extended/sed/sed_4.1.2.bb | 35 - ...-useradd-copy-extended-attributes-of-home.patch | 47 + ...-create-parent-directories-when-necessary.patch | 10 +- .../allow-for-setting-password-in-clear-text.patch | 8 +- .../meta/recipes-extended/shadow/shadow.inc | 11 +- ...x-error-conflicting-types-for-posix_close.patch | 39 - .../slang/slang/dont-link-to-host.patch | 16 + .../slang/slang/fix-check-pcre.patch | 116 - .../meta/recipes-extended/slang/slang/no-x.patch | 4 + .../recipes-extended/slang/slang/rpathfix.patch | 82 - .../slang-fix-the-iconv-existence-checking.patch | 125 - .../meta/recipes-extended/slang/slang_2.3.0.bb | 58 - .../meta/recipes-extended/slang/slang_2.3.1a.bb | 57 + .../yocto-poky/meta/recipes-extended/sudo/sudo.inc | 2 +- .../meta/recipes-extended/sudo/sudo_1.8.17p1.bb | 36 - .../meta/recipes-extended/sudo/sudo_1.8.19p2.bb | 36 + .../recipes-extended/sysklogd/files/klogd.service | 13 + .../sysklogd/files/syslogd.service | 14 + .../sysklogd/files/tmpfiles.sysklogd.conf | 1 + .../meta/recipes-extended/sysklogd/sysklogd.inc | 24 +- .../recipes-extended/sysstat/sysstat_11.4.0.bb | 8 - .../recipes-extended/sysstat/sysstat_11.5.4.bb | 8 + .../tar/tar-1.17/avoid_heap_overflow.patch | 23 - .../recipes-extended/tar/tar-1.17/gcc43build.patch | 37 - .../tar/tar-1.17/m4extensions.patch | 30 - .../meta/recipes-extended/tar/tar_1.17.bb | 14 - .../recipes-extended/texi2html/texi2html_5.0.bb | 2 + .../texinfo/texinfo-4.8/check-locale-h.patch | 28 - .../texinfo-4.8/do-compile-native-tools.patch | 49 - .../texinfo-4.8/using-native-makeinfo.patch | 24 - .../texinfo/texinfo/dont-depend-on-help2man.patch | 19 +- .../texinfo/texinfo/texinfo-4.12-zlib.patch | 65 +- .../meta/recipes-extended/texinfo/texinfo_4.8.bb | 55 - .../meta/recipes-extended/texinfo/texinfo_6.0.bb | 82 - .../meta/recipes-extended/texinfo/texinfo_6.3.bb | 82 + .../yocto-poky/meta/recipes-extended/time/time.inc | 2 + .../recipes-extended/tzcode/tzcode-native_2017a.bb | 28 - .../recipes-extended/tzcode/tzcode-native_2017b.bb | 28 + .../meta/recipes-extended/tzdata/tzdata_2017a.bb | 213 - .../meta/recipes-extended/tzdata/tzdata_2017b.bb | 215 + .../18-cve-2014-9913-unzip-buffer-overflow.patch | 33 + .../19-cve-2016-9844-zipinfo-buffer-overflow.patch | 32 + .../meta/recipes-extended/unzip/unzip_6.0.bb | 4 +- .../recipes-extended/watchdog/watchdog_5.15.bb | 9 +- .../yocto-poky/meta/recipes-extended/wget/wget.inc | 6 +- .../recipes-extended/wget/wget/CVE-2017-6508.patch | 44 + .../meta/recipes-extended/wget/wget_1.18.bb | 8 - .../meta/recipes-extended/wget/wget_1.19.1.bb | 9 + .../which/which-2.18/automake-foreign.patch | 28 - .../fix_name_conflict_group_member.patch | 53 - .../meta/recipes-extended/which/which_2.18.bb | 34 - .../meta/recipes-extended/which/which_2.21.bb | 3 + .../meta/recipes-extended/xz/xz_5.2.2.bb | 36 - .../meta/recipes-extended/xz/xz_5.2.3.bb | 36 + .../meta/recipes-gnome/epiphany/epiphany_3.20.3.bb | 25 - .../meta/recipes-gnome/epiphany/epiphany_3.22.6.bb | 23 + .../gcr-add-missing-dependencies-for-vapi.patch | 51 + .../meta/recipes-gnome/gcr/gcr_3.20.0.bb | 6 +- ...-around-thumbnailer-cross-compile-failure.patch | 47 + .../recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.34.0.bb | 102 - .../recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.36.5.bb | 99 + ...op-thumbnail-don-t-convert-time_t-to-long.patch | 72 + .../gnome-desktop/gnome-desktop3_3.20.2.bb | 23 - .../gnome-desktop/gnome-desktop3_3.22.2.bb | 25 + .../recipes-gnome/gnome/adwaita-icon-theme_3.20.bb | 41 - .../gnome/adwaita-icon-theme_3.22.0.bb | 42 + .../gnome/gnome-themes-standard_3.20.2.bb | 39 - .../gnome/gnome-themes-standard_3.22.2.bb | 45 + .../gobject-introspection_1.48.0.bb | 173 - .../gobject-introspection_1.50.0.bb | 178 + .../gsettings-desktop-schemas_3.20.0.bb | 13 - .../gsettings-desktop-schemas_3.22.0.bb | 13 + .../yocto-poky/meta/recipes-gnome/gtk+/gtk+.inc | 9 +- .../yocto-poky/meta/recipes-gnome/gtk+/gtk+3.inc | 14 +- .../gtk+/gtk+3/0001-Hardcoded-libtool.patch | 12 +- ...Redo-focus-handling-in-treeview-once-more.patch | 39 - ...Do-not-try-to-initialize-GL-without-libGL.patch | 8 +- .../0003-Add-disable-opengl-configure-option.patch | 225 +- ...4-configure.ac-Fix-wayland-protocols-path.patch | 8 +- .../meta/recipes-gnome/gtk+/gtk+3_3.20.9.bb | 20 - .../meta/recipes-gnome/gtk+/gtk+3_3.22.8.bb | 19 + .../meta/recipes-gnome/gtk+/gtk+_2.24.30.bb | 34 - .../meta/recipes-gnome/gtk+/gtk+_2.24.31.bb | 34 + .../gtk+/gtk-icon-utils-native_3.20.9.bb | 60 - .../gtk+/gtk-icon-utils-native_3.22.8.bb | 60 + .../meta/recipes-gnome/gtk-doc/gtk-doc_1.25.bb | 2 +- .../recipes-gnome/json-glib/json-glib_1.2.2.bb | 6 +- .../meta/recipes-gnome/libgudev/libgudev_230.bb | 16 - .../meta/recipes-gnome/libgudev/libgudev_231.bb | 18 + .../recipes-gnome/libnotify/libnotify_0.7.6.bb | 20 - .../recipes-gnome/libnotify/libnotify_0.7.7.bb | 20 + .../recipes-gnome/libsecret/libsecret_0.18.5.bb | 9 +- .../meta/recipes-gnome/libwnck/libwnck3_3.20.1.bb | 19 - .../meta/recipes-graphics/cairo/cairo-fpu.inc | 2 +- .../meta/recipes-graphics/cairo/cairo.inc | 3 +- .../meta/recipes-graphics/cairo/cairo_1.14.6.bb | 43 - .../meta/recipes-graphics/cairo/cairo_1.14.8.bb | 43 + .../cantarell-fonts/cantarell-fonts_git.bb | 2 +- .../meta/recipes-graphics/clutter/clutter-1.0.inc | 2 +- .../clutter/clutter-gst-3.0_3.0.18.bb | 7 - .../clutter/clutter-gst-3.0_3.0.22.bb | 7 + .../clutter/clutter-gtk-1.0_1.8.0.bb | 6 - .../clutter/clutter-gtk-1.0_1.8.2.bb | 6 + ...Fix-an-incorrect-preprocessor-conditional.patch | 32 + .../meta/recipes-graphics/cogl/cogl-1.0_1.22.0.bb | 7 - .../meta/recipes-graphics/cogl/cogl-1.0_1.22.2.bb | 8 + .../recipes-graphics/drm/libdrm/installtests.patch | 28 +- .../meta/recipes-graphics/drm/libdrm_2.4.70.bb | 47 - .../meta/recipes-graphics/drm/libdrm_2.4.75.bb | 51 + .../recipes-graphics/eglinfo/eglinfo-fb_1.0.0.bb | 2 +- .../recipes-graphics/eglinfo/eglinfo-x11_1.0.0.bb | 5 +- .../meta/recipes-graphics/eglinfo/eglinfo.inc | 2 + ...icts-with-integer-width-macros-from-TS-18.patch | 72 + .../fontconfig/fontconfig_2.12.1.bb | 3 +- .../recipes-graphics/freetype/freetype_2.6.5.bb | 49 - .../recipes-graphics/freetype/freetype_2.7.1.bb | 49 + .../meta/recipes-graphics/glew/glew_2.0.0.bb | 19 +- .../recipes-graphics/harfbuzz/harfbuzz_1.3.0.bb | 39 - .../recipes-graphics/harfbuzz/harfbuzz_1.4.1.bb | 39 + .../jpeg/libjpeg-turbo/fix-mips.patch | 45 + .../recipes-graphics/jpeg/libjpeg-turbo_1.5.0.bb | 46 - .../recipes-graphics/jpeg/libjpeg-turbo_1.5.1.bb | 51 + ...-platforms-based-on-configuration-results.patch | 128 - ...0002-add-an-option-to-disable-glx-support.patch | 42 - .../libepoxy/libepoxy/no-need-for-python3.patch | 20 - .../recipes-graphics/libepoxy/libepoxy_1.4.0.bb | 20 + .../meta/recipes-graphics/libepoxy/libepoxy_git.bb | 27 - .../meta/recipes-graphics/libsdl/libsdl_1.2.15.bb | 9 +- ...-sysroot-path-so-that-make-finds-our-wayl.patch | 34 + ...ideo-make-it-compatible-with-wayland-1.10.patch | 57 - ...void-finding-build-host-s-wayland-scanner.patch | 31 + .../libsdl2/libsdl2/fix-build-failure-on-ppc.patch | 50 + .../meta/recipes-graphics/libsdl2/libsdl2_2.0.4.bb | 68 - .../meta/recipes-graphics/libsdl2/libsdl2_2.0.5.bb | 69 + ...-configure.ac-Use-wayland-scanner-in-PATH.patch | 37 + ...and-Don-t-commit-and-ship-generated-files.patch | 339 + .../meta/recipes-graphics/libva/libva_1.7.3.bb | 53 + .../matchbox-wm/matchbox-wm_1.2.1.bb | 39 - .../matchbox-wm/matchbox-wm_1.2.2.bb | 39 + .../menu-cache/menu-cache_1.0.1.bb | 18 - .../menu-cache/menu-cache_1.0.2.bb | 18 + .../0001-Use-wayland-scanner-in-the-path.patch | 37 + .../mesa/files/clang-compile-PR89599.patch | 116 - .../files/replace_glibc_check_with_linux.patch | 25 +- .../meta/recipes-graphics/mesa/libglu_9.0.0.bb | 2 +- .../meta/recipes-graphics/mesa/mesa-demos_8.3.0.bb | 4 +- .../meta/recipes-graphics/mesa/mesa-gl_12.0.1.bb | 13 - .../meta/recipes-graphics/mesa/mesa-gl_17.0.2.bb | 13 + .../yocto-poky/meta/recipes-graphics/mesa/mesa.inc | 41 +- .../meta/recipes-graphics/mesa/mesa_12.0.1.bb | 18 - .../meta/recipes-graphics/mesa/mesa_17.0.2.bb | 18 + .../packagegroups/packagegroup-core-x11-xserver.bb | 2 +- .../packagegroups/packagegroup-core-x11.bb | 9 +- .../meta/recipes-graphics/pango/pango_1.40.1.bb | 52 - .../meta/recipes-graphics/pango/pango_1.40.3.bb | 52 + ...fine-GBM_BO_MAP-only-when-symbol-is-found.patch | 51 + .../0001-cmake-Link-test-utils-with-ldl.patch | 41 - ...0001-cmake-Link-utils-with-xcb-explicitly.patch | 54 - ...nour-Surfaceless-MESA-in-get_default_disp.patch | 54 + ...atform_surfaceless-Don-t-use-eglGetPlatfo.patch | 36 + ...atform_surfaceless-Use-EXT-functions-for-.patch | 78 + .../meta/recipes-graphics/piglit/piglit_git.bb | 17 +- .../recipes-graphics/pong-clock/pong-clock_1.0.bb | 2 +- ...se-getenv-if-secure_getenv-does-not-exist.patch | 34 + .../vulkan/demos-Don-t-build-tri-or-cube.patch | 106 + .../recipes-graphics/vulkan/vulkan_1.0.39.1.bb | 34 + .../meta/recipes-graphics/waffle/waffle_1.5.2.bb | 11 +- .../recipes-graphics/wayland/libinput_1.4.1.bb | 25 - .../recipes-graphics/wayland/libinput_1.6.1.bb | 25 + ...0001-scanner-Use-unit32_t-instead-of-uint.patch | 30 - .../recipes-graphics/wayland/wayland_1.11.0.bb | 43 - .../recipes-graphics/wayland/wayland_1.13.0.bb | 41 + .../meta/recipes-graphics/wayland/weston-init.bb | 2 +- ...1-configure.ac-Fix-wayland-protocols-path.patch | 11 +- .../wayland/weston/0001-make-error-portable.patch | 30 +- .../0001-shared-include-stdint.h-for-int32_t.patch | 28 - ...ch-Provide-a-default-version-that-doesn-t.patch | 45 +- ...eston-launch-exit-for-unrecognized-option.patch | 33 - ...on-1.11-config-option-for-no-input-device.patch | 123 - ...t-pitch-correctly-for-subsampled-textures.patch | 55 + .../meta/recipes-graphics/wayland/weston_1.11.0.bb | 118 - .../meta/recipes-graphics/wayland/weston_2.0.0.bb | 115 + .../x11-common/x11-common/etc/X11/Xdefaults | 3 - .../x11-common/x11-common/etc/X11/Xsession | 38 - .../x11-common/etc/X11/Xsession.d/12keymap.sh | 4 - .../x11-common/etc/X11/Xsession.d/13xdgbasedirs.sh | 13 - .../etc/X11/Xsession.d/89xdgautostart.sh | 7 - .../etc/X11/Xsession.d/90XWindowManager.sh | 7 - .../x11-common/x11-common/etc/X11/default.xmodmap | 260 - .../x11-common/x11-common/gplv2-license.patch | 355 - .../recipes-graphics/x11-common/x11-common_0.1.bb | 22 - .../x11-common/xserver-nodm-init.bb | 61 - .../x11-common/xserver-nodm-init/X11/Xsession | 38 + .../X11/Xsession.d/13xdgbasedirs.sh | 13 + .../X11/Xsession.d/89xdgautostart.sh | 7 + .../X11/Xsession.d/90XWindowManager.sh | 7 + .../xserver-nodm-init/xserver-nodm.conf.in | 2 +- .../x11-common/xserver-nodm-init_3.0.bb | 69 + .../add-geometry-input-when-calibrating.patch | 34 - .../xinput-calibrator/xinput-calibrator_git.bb | 9 +- .../recipes-graphics/xorg-app/mkfontdir_1.0.7.bb | 3 - .../recipes-graphics/xorg-app/mkfontscale_1.1.2.bb | 2 - .../meta/recipes-graphics/xorg-app/xauth_1.0.10.bb | 15 + .../meta/recipes-graphics/xorg-app/xauth_1.0.9.bb | 15 - .../meta/recipes-graphics/xorg-app/xhost_1.0.7.bb | 2 +- .../xorg-driver/xf86-input-evdev_2.10.3.bb | 20 - .../xorg-driver/xf86-input-evdev_2.10.5.bb | 21 + .../xorg-driver/xf86-input-keyboard_1.8.1.bb | 13 - .../xorg-driver/xf86-input-keyboard_1.9.0.bb | 13 + .../xorg-driver/xf86-input-keyboard_git.bb | 16 - .../xorg-driver/xf86-input-libinput_0.19.0.bb | 11 - .../xorg-driver/xf86-input-libinput_0.24.0.bb | 11 + .../xorg-driver/xf86-input-mouse/unbreak.patch | 21 - .../xorg-driver/xf86-input-mouse_1.9.1.bb | 15 - .../xorg-driver/xf86-input-mouse_1.9.2.bb | 14 + .../xorg-driver/xf86-input-mouse_git.bb | 18 - .../xorg-driver/xf86-input-synaptics_1.8.3.bb | 16 - .../xorg-driver/xf86-input-synaptics_1.9.0.bb | 16 + .../xorg-driver/xf86-input-synaptics_git.bb | 18 - .../xorg-driver/xf86-video-intel_git.bb | 3 +- .../xorg-driver/xf86-video-omap_0.4.4.bb | 41 - .../xorg-driver/xf86-video-omap_0.4.5.bb | 41 + .../0002-add-option-for-vmwgfx.patch | 37 +- .../xorg-driver/xf86-video-vmware_13.1.0.bb | 19 - .../xorg-driver/xf86-video-vmware_13.2.1.bb | 19 + .../xorg-driver/xorg-driver-common.inc | 4 +- .../xorg-driver/xorg-driver-input.inc | 2 +- .../xorg-driver/xorg-driver-video.inc | 2 +- .../xorg-font/xorg-font-common.inc | 4 +- .../meta/recipes-graphics/xorg-lib/libice_1.0.9.bb | 2 +- .../meta/recipes-graphics/xorg-lib/libsm_1.2.2.bb | 2 +- .../recipes-graphics/xorg-lib/libx11-diet_1.6.3.bb | 19 - .../recipes-graphics/xorg-lib/libx11-diet_1.6.4.bb | 19 + .../xorg-lib/libx11/CVE-2016-7942.patch | 69 - .../xorg-lib/libx11/CVE-2016-7943.patch | 103 - .../libx11/libX11-Add-missing-NULL-check.patch | 72 - .../meta/recipes-graphics/xorg-lib/libx11_1.6.3.bb | 13 - .../meta/recipes-graphics/xorg-lib/libx11_1.6.4.bb | 10 + .../recipes-graphics/xorg-lib/libxfixes_5.0.2.bb | 22 - .../recipes-graphics/xorg-lib/libxfixes_5.0.3.bb | 23 + .../recipes-graphics/xorg-lib/libxfont2_2.0.1.bb | 22 + .../recipes-graphics/xorg-lib/libxfont_1.5.1.bb | 25 - .../recipes-graphics/xorg-lib/libxfont_1.5.2.bb | 25 + .../meta/recipes-graphics/xorg-lib/libxft_2.3.2.bb | 2 +- .../meta/recipes-graphics/xorg-lib/libxi_1.7.6.bb | 21 - .../meta/recipes-graphics/xorg-lib/libxi_1.7.9.bb | 22 + .../xorg-lib/libxkbcommon_0.6.1.bb | 22 - .../xorg-lib/libxkbcommon_0.7.1.bb | 22 + .../meta/recipes-graphics/xorg-lib/libxmu_1.1.2.bb | 2 +- .../recipes-graphics/xorg-lib/libxpm_3.5.11.bb | 27 - .../recipes-graphics/xorg-lib/libxpm_3.5.12.bb | 27 + .../libxrandr/CVE-2016-7947_CVE-2016-7948.patch | 439 - .../recipes-graphics/xorg-lib/libxrandr_1.5.0.bb | 26 - .../recipes-graphics/xorg-lib/libxrandr_1.5.1.bb | 23 + .../xorg-lib/libxrender/CVE-2016-7949.patch | 59 - .../recipes-graphics/xorg-lib/libxrender_0.9.10.bb | 24 + .../recipes-graphics/xorg-lib/libxrender_0.9.9.bb | 26 - .../recipes-graphics/xorg-lib/libxtst_1.2.3.bb | 1 + .../meta/recipes-graphics/xorg-lib/libxv_1.0.10.bb | 18 - .../meta/recipes-graphics/xorg-lib/libxv_1.0.11.bb | 19 + .../recipes-graphics/xorg-lib/libxvmc_1.0.10.bb | 19 + .../recipes-graphics/xorg-lib/libxvmc_1.0.9.bb | 19 - .../xorg-lib/xkeyboard-config_2.18.bb | 31 - .../xorg-lib/xkeyboard-config_2.20.bb | 31 + .../xorg-proto/presentproto_git.bb | 6 +- .../recipes-graphics/xorg-proto/xproto_7.0.29.bb | 19 - .../recipes-graphics/xorg-proto/xproto_7.0.31.bb | 19 + .../xorg-util/util-macros_1.19.0.bb | 19 - .../xorg-util/util-macros_1.19.1.bb | 19 + .../xserver-xf86-config/10-preload-modules.conf | 9 - .../xserver-xf86-config/qemuarm/xorg.conf | 31 - .../xserver-xf86-config/qemumips/xorg.conf | 31 - .../xserver-xf86-config/qemumips64/xorg.conf | 31 - .../xserver-xf86-config/qemuppc/xorg.conf | 31 - .../xserver-xf86-config/qemush4/xorg.conf | 31 - .../xserver-xf86-config/qemux86-64/xorg.conf | 31 - .../xserver-xf86-config/qemux86/xorg.conf | 31 - .../xorg-xserver/xserver-xf86-config_0.1.bb | 8 - .../recipes-graphics/xorg-xserver/xserver-xorg.inc | 18 +- ...onfigure.ac-Fix-check-for-CLOCK_MONOTONIC.patch | 61 + ...c-Fix-wayland-scanner-and-protocols-locat.patch | 38 + .../0003-Remove-check-for-useSIGIO-option.patch | 47 + ...003-modesetting-Fix-16-bit-depth-bpp-mode.patch | 46 + .../xserver-xorg/CVE-2017-10971-1.patch | 76 + .../xserver-xorg/CVE-2017-10971-2.patch | 55 + .../xserver-xorg/CVE-2017-10971-3.patch | 50 + .../xorg-xserver/xserver-xorg_1.18.4.bb | 27 - .../xorg-xserver/xserver-xorg_1.19.1.bb | 35 + .../meta/recipes-kernel/cryptodev/cryptodev.inc | 4 +- ...st-to-another-change-in-the-user-page-API.patch | 40 + .../cryptodev/files/kernel-4-10-changes.patch | 57 + .../meta/recipes-kernel/dtc/dtc_1.4.2.bb | 10 + .../yocto-poky/meta/recipes-kernel/dtc/dtc_git.bb | 11 - .../kern-tools/kern-tools-native_git.bb | 2 +- ...nd-the-semantics-of-kexec_iomem_for_each_.patch | 78 + ...01-x86-x86_64-Fix-format-warning-with-die.patch | 78 + ...eneralize-and-rename-get_kernel_stext_sym.patch | 194 + .../0002-ppc-Fix-format-warning-with-die.patch | 43 + ...0003-arm64-identify-PHYS_OFFSET-correctly.patch | 76 + .../0004-arm64-kdump-identify-memory-regions.patch | 202 + ...5-arm64-kdump-add-elf-core-header-segment.patch | 191 + ...6-arm64-kdump-set-up-kernel-image-segment.patch | 137 + .../0007-arm64-kdump-set-up-other-segments.patch | 35 + ...-add-DT-properties-to-crash-dump-kernel-s.patch | 150 + ...-kdump-Add-support-for-binary-image-files.patch | 52 + ...-ARM-Fix-add_buffer_phys_virt-align-issue.patch | 52 + .../meta/recipes-kernel/kexec/kexec-tools/kdump | 2 +- .../recipes-kernel/kexec/kexec-tools_2.0.12.bb | 37 - .../recipes-kernel/kexec/kexec-tools_2.0.14.bb | 48 + .../recipes-kernel/kmod/depmodwrapper-cross_1.0.bb | 4 +- .../yocto-poky/meta/recipes-kernel/kmod/kmod.inc | 8 +- .../recipes-kernel/kmod/kmod/kcmdline_quotes.patch | 44 + .../recipes-kernel/latencytop/latencytop_0.5.bb | 4 +- .../linux-firmware/linux-firmware_git.bb | 409 +- .../linux-libc-headers/linux-libc-headers.inc | 19 +- ....h-musl-_does_-define-IFF_LOWER_UP-DORMAN.patch | 46 + .../linux-libc-headers/linux-libc-headers_4.10.bb | 11 + .../linux-libc-headers/linux-libc-headers_4.4.bb | 9 - .../linux-libc-headers/linux-libc-headers_4.8.bb | 12 - .../meta/recipes-kernel/linux/kernel-devsrc.bb | 8 +- .../meta/recipes-kernel/linux/linux-dtb.inc | 6 +- .../meta/recipes-kernel/linux/linux-yocto-dev.bb | 4 +- .../recipes-kernel/linux/linux-yocto-rt_4.1.bb | 8 +- .../recipes-kernel/linux/linux-yocto-rt_4.10.bb | 38 + .../recipes-kernel/linux/linux-yocto-rt_4.4.bb | 8 +- .../recipes-kernel/linux/linux-yocto-rt_4.8.bb | 38 - .../recipes-kernel/linux/linux-yocto-rt_4.9.bb | 38 + .../recipes-kernel/linux/linux-yocto-tiny_4.1.bb | 6 +- .../recipes-kernel/linux/linux-yocto-tiny_4.10.bb | 25 + .../recipes-kernel/linux/linux-yocto-tiny_4.4.bb | 6 +- .../recipes-kernel/linux/linux-yocto-tiny_4.8.bb | 25 - .../recipes-kernel/linux/linux-yocto-tiny_4.9.bb | 25 + .../meta/recipes-kernel/linux/linux-yocto.inc | 5 +- .../meta/recipes-kernel/linux/linux-yocto_4.1.bb | 20 +- .../meta/recipes-kernel/linux/linux-yocto_4.10.bb | 44 + .../meta/recipes-kernel/linux/linux-yocto_4.4.bb | 20 +- .../meta/recipes-kernel/linux/linux-yocto_4.8.bb | 44 - .../meta/recipes-kernel/linux/linux-yocto_4.9.bb | 44 + .../meta/recipes-kernel/lttng/babeltrace_1.4.0.bb | 20 - .../meta/recipes-kernel/lttng/babeltrace_1.5.2.bb | 19 + .../recipes-kernel/lttng/lttng-modules_2.9.1.bb | 32 + .../meta/recipes-kernel/lttng/lttng-modules_git.bb | 34 - .../0001-Fix-error.h-common-error.h.patch | 33 - .../recipes-kernel/lttng/lttng-tools/run-ptest | 5 +- .../recipes-kernel/lttng/lttng-tools/x32.patch | 16 + .../meta/recipes-kernel/lttng/lttng-tools_2.9.4.bb | 122 + .../meta/recipes-kernel/lttng/lttng-tools_git.bb | 119 - .../lttng-ust/lttng-ust-doc-examples-disable.patch | 2 +- .../meta/recipes-kernel/lttng/lttng-ust_2.9.0.bb | 37 + .../meta/recipes-kernel/lttng/lttng-ust_git.bb | 42 - .../modutils-initscripts/modutils-initscripts.bb | 2 +- .../meta/recipes-kernel/perf/perf-features.inc | 22 - .../yocto-poky/meta/recipes-kernel/perf/perf.bb | 2 +- ...ward-port-mips-arm-memory-barrier-patches.patch | 38 - .../files/0001-callgraph-Use-U64_TO_POINTER.patch | 29 - .../meta/recipes-kernel/sysprof/sysprof_3.22.3.bb | 34 + .../meta/recipes-kernel/sysprof/sysprof_git.bb | 40 - .../systemtap/systemtap-native_git.bb | 7 + .../meta/recipes-kernel/systemtap/systemtap_git.bb | 2 +- .../meta/recipes-lsb4/libpng/libpng12_1.2.56.bb | 40 - .../meta/recipes-lsb4/libpng/libpng12_1.2.57.bb | 36 + .../meta/recipes-multimedia/alsa/alsa-fpu.inc | 8 +- .../alsa-lib/0001-ucm-parser-needs-limits.h.patch | 33 + .../meta/recipes-multimedia/alsa/alsa-lib_1.1.2.bb | 44 - .../meta/recipes-multimedia/alsa/alsa-lib_1.1.3.bb | 46 + .../recipes-multimedia/alsa/alsa-plugins_1.1.1.bb | 4 +- ...idi-Explicitly-cast-constant-to-char-type.patch | 75 - .../recipes-multimedia/alsa/alsa-tools_1.1.0.bb | 42 - .../recipes-multimedia/alsa/alsa-tools_1.1.3.bb | 41 + .../alsa/alsa-utils-scripts_1.1.2.bb | 25 - .../alsa/alsa-utils-scripts_1.1.3.bb | 25 + .../recipes-multimedia/alsa/alsa-utils_1.1.2.bb | 109 - .../recipes-multimedia/alsa/alsa-utils_1.1.3.bb | 115 + .../meta/recipes-multimedia/ffmpeg/ffmpeg_3.1.3.bb | 142 - .../meta/recipes-multimedia/ffmpeg/ffmpeg_3.2.4.bb | 147 + .../meta/recipes-multimedia/flac/flac_1.3.1.bb | 42 - .../meta/recipes-multimedia/flac/flac_1.3.2.bb | 44 + .../0001-gtk-play-Disable-visualizations.patch | 59 + ...ovide-similar-behaviour-for-quit-and-clos.patch | 32 - .../recipes-multimedia/gstreamer/gst-player_git.bb | 6 +- .../gstreamer/gst-plugins-package.inc | 10 +- ...ideobufferpool-create-allocator-if-needed.patch | 61 + .../gstreamer/gstreamer1.0-libav.inc | 4 + .../gstreamer1.0-libav/mips64_cpu_detection.patch | 32 + .../gstreamer/gstreamer1.0-libav_1.10.4.bb | 20 + .../gstreamer/gstreamer1.0-libav_1.8.3.bb | 20 - .../gstreamer/gstreamer1.0-libav_git.bb | 38 - .../gstreamer/gstreamer1.0-omx.inc | 4 +- .../0001-omx-fixed-type-error-in-printf-call.patch | 30 - .../gstreamer/gstreamer1.0-omx_1.10.4.bb | 11 + .../gstreamer/gstreamer1.0-omx_1.2.0.bb | 11 - .../gstreamer/gstreamer1.0-omx_git.bb | 25 - .../gstreamer/gstreamer1.0-plugins-bad.inc | 20 +- ...-don-t-hardcode-libtool-name-when-running.patch | 35 +- ...G_CONFIG_SYSROOT_DIR-to-pkg-config-output.patch | 34 + ...gl.pc.in-don-t-append-GL_CFLAGS-to-CFLAGS.patch | 15 +- ...1-mssdemux-improved-live-playback-support.patch | 929 + ...ming-implement-adaptivedemux-s-get_live_s.patch | 183 + ...ming-use-the-duration-from-the-list-of-fr.patch | 62 + ...lplugin-enable-gldeinterlace-on-OpenGL-ES.patch | 634 - ...ert-implement-multiple-render-targets-for.patch | 244 - ...ert-don-t-use-the-predefined-variable-nam.patch | 32 - .../0005-glshader-add-glBindFragDataLocation.patch | 77 - ...ert-GLES3-deprecates-texture2D-and-it-doe.patch | 51 - .../0008-gl-implement-GstGLMemoryEGL.patch | 495 - ...valid-sentinels-for-gst_structure_get-etc.patch | 63 +- .../gstreamer/gstreamer1.0-plugins-bad_1.10.4.bb | 28 + .../gstreamer/gstreamer1.0-plugins-bad_1.8.3.bb | 26 - .../gstreamer/gstreamer1.0-plugins-bad_git.bb | 45 - .../gstreamer/gstreamer1.0-plugins-base.inc | 5 +- .../gstreamer/gstreamer1.0-plugins-base_1.10.4.bb | 19 + .../gstreamer/gstreamer1.0-plugins-base_1.8.3.bb | 19 - .../gstreamer/gstreamer1.0-plugins-base_git.bb | 31 - .../gstreamer/gstreamer1.0-plugins-good.inc | 3 +- ...Also-add-videometa-if-there-is-padding-to.patch | 35 + .../gstreamer/gstreamer1.0-plugins-good_1.10.4.bb | 18 + .../gstreamer/gstreamer1.0-plugins-good_1.8.3.bb | 17 - .../gstreamer/gstreamer1.0-plugins-good_git.bb | 29 - .../gstreamer/gstreamer1.0-plugins-ugly_1.10.4.bb | 13 + .../gstreamer/gstreamer1.0-plugins-ugly_1.8.3.bb | 13 - .../gstreamer/gstreamer1.0-plugins-ugly_git.bb | 37 - .../gstreamer/gstreamer1.0-plugins.inc | 2 - .../gstreamer/gstreamer1.0-rtsp-server.inc | 2 +- .../gstreamer/gstreamer1.0-rtsp-server_1.10.4.bb | 6 + .../gstreamer/gstreamer1.0-rtsp-server_1.8.3.bb | 6 - .../gstreamer/gstreamer1.0-vaapi.inc | 37 + .../gstreamer/gstreamer1.0-vaapi_1.10.4.bb | 7 + .../recipes-multimedia/gstreamer/gstreamer1.0.inc | 2 +- .../gstreamer1.0/deterministic-unwind.patch | 24 + .../gstreamer/gstreamer1.0_1.10.4.bb | 13 + .../gstreamer/gstreamer1.0_1.8.3.bb | 13 - .../gstreamer/gstreamer1.0_git.bb | 25 - .../recipes-multimedia/libpng/libpng_1.6.24.bb | 25 - .../recipes-multimedia/libpng/libpng_1.6.28.bb | 25 + .../0001-configure.ac-improve-alsa-handling.patch | 62 + .../libsamplerate/libsamplerate0_0.1.8.bb | 21 - .../libsamplerate/libsamplerate0_0.1.9.bb | 28 + .../libsndfile/libsndfile1/CVE-2017-6892.patch | 34 + .../libsndfile1/CVE-2017-8361-8365.patch | 73 + .../libsndfile/libsndfile1/CVE-2017-8362.patch | 51 + .../libsndfile/libsndfile1/CVE-2017-8363.patch | 37 + .../libsndfile/libsndfile1_1.0.27.bb | 11 +- .../libtiff/files/CVE-2016-10093.patch | 47 + .../libtiff/files/CVE-2016-10266.patch | 60 + .../libtiff/files/CVE-2016-10267.patch | 70 + .../libtiff/files/CVE-2016-10268.patch | 30 + .../libtiff/files/CVE-2016-10269.patch | 131 + .../libtiff/files/CVE-2016-10270.patch | 134 + .../libtiff/files/CVE-2016-10271.patch | 30 + .../libtiff/files/CVE-2017-10688.patch | 88 + .../libtiff/files/CVE-2017-11335.patch | 54 + .../libtiff/files/CVE-2017-7592.patch | 40 + .../libtiff/files/CVE-2017-7593.patch | 98 + .../libtiff/files/CVE-2017-7594-p1.patch | 43 + .../libtiff/files/CVE-2017-7594-p2.patch | 50 + .../libtiff/files/CVE-2017-7595.patch | 48 + .../libtiff/files/CVE-2017-7596.patch | 308 + .../libtiff/files/CVE-2017-7598.patch | 65 + .../libtiff/files/CVE-2017-7601.patch | 52 + .../libtiff/files/CVE-2017-7602.patch | 69 + .../libtiff/files/CVE-2017-9147.patch | 203 + .../libtiff/files/CVE-2017-9936.patch | 46 + .../libtiff/files/libtiff-CVE-2017-5225.patch | 92 + .../meta/recipes-multimedia/libtiff/tiff_4.0.7.bb | 23 +- .../libvorbis/0001-configure-Check-for-clang.patch | 56 + .../libvorbis/libvorbis_1.3.5.bb | 4 +- .../recipes-multimedia/mpeg2dec/mpeg2dec_0.4.1.bb | 2 +- .../recipes-multimedia/mpg123/mpg123_1.23.6.bb | 63 - .../recipes-multimedia/mpg123/mpg123_1.23.8.bb | 58 + .../recipes-multimedia/pulseaudio/pulseaudio.inc | 87 +- ...oth-fail-if-user-requested-profile-doesn-.patch | 61 - ...ard-don-t-allow-the-CARD_NEW-hook-to-fail.patch | 37 - ...-move-profile-selection-after-pa_card_new.patch | 429 - ...rd-remove-pa_card_new_data.active_profile.patch | 72 - ...vailability-for-some-unavailable-profiles.patch | 79 - .../pulseaudio/pulseaudio_10.0.bb | 14 + .../pulseaudio/pulseaudio_9.0.bb | 19 - .../tremor/tremor/obsolete_automake_macros.patch | 15 - .../tremor/tremor/tremor-arm-thumb2.patch | 104 - .../recipes-multimedia/tremor/tremor_20150107.bb | 27 - .../meta/recipes-multimedia/webp/libwebp_0.5.1.bb | 54 - .../meta/recipes-multimedia/webp/libwebp_0.6.0.bb | 54 + .../x264/x264/Fix-X32-build-by-disabling-asm.patch | 53 + .../don-t-default-to-cortex-a9-with-neon.patch | 13 +- .../meta/recipes-multimedia/x264/x264_git.bb | 15 +- .../meta/recipes-rt/images/core-image-rt-sdk.bb | 2 +- .../meta/recipes-rt/images/core-image-rt.bb | 2 +- .../meta/recipes-sato/images/core-image-sato.bb | 3 + .../matchbox-desktop/matchbox-desktop_2.1.bb | 2 +- .../matchbox-panel-2/matchbox-panel-2_2.11.bb | 8 +- .../matchbox-sato/matchbox-session-sato_0.1.bb | 2 +- .../meta/recipes-sato/pcmanfm/pcmanfm_1.2.4.bb | 43 - .../meta/recipes-sato/pcmanfm/pcmanfm_1.2.5.bb | 43 + ...arify-conditions-to-avoid-compiler-errors.patch | 48 + ...mpiler-errors-about-uninitialized-use-of-.patch | 32 - .../meta/recipes-sato/puzzles/puzzles_git.bb | 4 +- .../recipes-sato/rxvt-unicode/rxvt-unicode_9.20.bb | 8 - .../shutdown-desktop/shutdown-desktop.bb | 3 +- ...Introspection.cmake-prefix-variables-obta.patch | 27 - ...ix-racy-parallel-build-of-WebKit2-4.0.gir.patch | 48 - ...cmake-drop-the-hardcoded-introspection-gt.patch | 32 - ...c-settings-so-that-gtkdoc-generation-work.patch | 49 - ...bKitMacros-Append-to-I-and-not-to-isystem.patch | 223 - ...ng-introspection-files-add-CMAKE_C_FLAGS-.patch | 37 - .../recipes-sato/webkit/files/musl-fixes.patch | 48 - .../recipes-sato/webkit/files/ppc-musl-fix.patch | 26 - ...Introspection.cmake-prefix-variables-obta.patch | 27 + .../webkitgtk/0001-Fix-build-with-musl.patch | 77 + ...ix-racy-parallel-build-of-WebKit2-4.0.gir.patch | 49 + ...cmake-drop-the-hardcoded-introspection-gt.patch | 32 + ...c-settings-so-that-gtkdoc-generation-work.patch | 50 + ...bKitMacros-Append-to-I-and-not-to-isystem.patch | 126 + ...ng-introspection-files-add-CMAKE_C_FLAGS-.patch | 41 + .../webkit/webkitgtk/cross-compile.patch | 23 + .../detect-atomics-during-configure.patch | 46 + .../webkit/webkitgtk/x32_support.patch | 21 + .../meta/recipes-sato/webkit/webkitgtk_2.12.5.bb | 108 - .../meta/recipes-sato/webkit/webkitgtk_2.18.5.bb | 121 + ..._t-size-doesn-t-match-in-glibc-when-cross.patch | 76 + ...link-libapr-against-phtread-to-make-gold-.patch | 50 + .../meta/recipes-support/apr/apr_1.5.2.bb | 4 +- .../meta/recipes-support/atk/at-spi2-atk_2.20.1.bb | 21 - .../meta/recipes-support/atk/at-spi2-atk_2.22.0.bb | 21 + .../recipes-support/atk/at-spi2-core_2.20.2.bb | 29 - .../recipes-support/atk/at-spi2-core_2.22.0.bb | 29 + .../meta/recipes-support/atk/atk_2.20.0.bb | 21 - .../meta/recipes-support/atk/atk_2.22.0.bb | 21 + .../yocto-poky/meta/recipes-support/attr/acl.inc | 42 - .../Makefile-libacl-should-depend-on-include.patch | 51 + .../attr/acl/add-missing-configure.ac.patch | 59 - .../meta/recipes-support/attr/acl/configure.ac | 49 + .../meta/recipes-support/attr/acl_2.2.52.bb | 45 +- .../attr/attr/attr-Missing-configure.ac.patch | 63 + .../attr/attr/dont-use-decl-macros.patch | 56 + .../meta/recipes-support/attr/ea-acl.inc | 12 +- ...gure-option-to-enable-disable-static-libr.patch | 70 + .../attr/files/attr-Missing-configure.ac.patch | 63 - .../attr/files/dont-use-decl-macros.patch | 56 - .../bash-completion/bash-completion_2.4.bb | 42 - .../bash-completion/bash-completion_2.5.bb | 42 + .../recipes-support/boost/bjam-native_1.61.0.bb | 18 - .../recipes-support/boost/bjam-native_1.63.0.bb | 18 + .../meta/recipes-support/boost/boost-1.61.0.inc | 19 - .../meta/recipes-support/boost/boost-1.63.0.inc | 24 + .../meta/recipes-support/boost/boost.inc | 53 +- ...-Apply-boost-1.62.0-no-forced-flags.patch.patch | 100 + ...soft-float-on-ARM-we-should-not-expect-th.patch | 29 + ...Don-t-set-up-m32-m64-we-do-that-ourselves.patch | 97 + ...execution_monitor.hpp-fix-mips-soft-float.patch | 145 - ...p-arch-instruction-set-flags-we-do-that-o.patch | 143 + ...ips-assembly-doesn-t-compile-in-mips16e-m.patch | 60 - .../boost/boost/consider-hardfp.patch | 19 - .../meta/recipes-support/boost/boost/py3.patch | 269 + .../meta/recipes-support/boost/boost_1.61.0.bb | 13 - .../meta/recipes-support/boost/boost_1.63.0.bb | 15 + .../ca-certificates/ca-certificates_20160104.bb | 82 - .../ca-certificates/ca-certificates_20161130.bb | 84 + .../recipes-support/consolekit/consolekit_0.4.6.bb | 3 +- .../createrepo/createrepo/createrepo-dbpath.patch | 61 - .../createrepo/createrepo/createrepo-rpm549.patch | 22 - ...dumpMetadata-disable-signature-validation.patch | 31 - .../createrepo/createrepo/fix-native-install.patch | 163 - .../createrepo/createrepo/fixstat.patch | 19 - ...n-scripts-should-use-interpreter-from-env.patch | 47 - .../createrepo/createrepo/recommends.patch | 71 - .../createrepo/createrepo/rpm-createsolvedb.py | 64 - .../createrepo/createrepo_0.4.11.bb | 51 - .../curl/curl/CVE-2017-1000100.patch | 50 + .../curl/curl/CVE-2017-1000101.patch | 92 + .../meta/recipes-support/curl/curl_7.50.1.bb | 72 - .../meta/recipes-support/curl/curl_7.53.1.bb | 76 + ...me-local-__atomic_compare_exchange-to-avo.patch | 45 + ...Makefile-let-libdb-6.0.la-depend-os_map.l.patch | 29 - ...akefile-let-libso_target-depend-on-bt_rec.patch | 30 - .../meta/recipes-support/db/db_5.3.28.bb | 6 +- .../meta/recipes-support/db/db_6.0.35.bb | 113 - .../debianutils/debianutils_4.8.1.bb | 54 + .../recipes-support/debianutils/debianutils_4.8.bb | 48 - .../recipes-support/gdbm/gdbm-1.8.3/ldflags.patch | 22 - .../gdbm/gdbm-1.8.3/libtool-mode.patch | 22 - .../recipes-support/gdbm/gdbm-1.8.3/makefile.patch | 60 - .../meta/recipes-support/gdbm/gdbm_1.8.3.bb | 30 - .../gmp-4.2.1/Use-__gnu_inline__-attribute.patch | 36 - .../avoid-h-asm-constraint-for-MIPS.patch | 57 - .../gmp/gmp-4.2.1/gmp_fix_for_automake-1.12.patch | 56 - ...user-provided-flags-to-the-auto-detected-.patch | 61 - ...re.ac-Believe-the-cflags-from-environment.patch | 52 - .../meta/recipes-support/gmp/gmp-6.1.1/amd64.patch | 18 - .../gmp/gmp-6.1.1/use-includedir.patch | 15 - ...user-provided-flags-to-the-auto-detected-.patch | 61 + ...re.ac-Believe-the-cflags-from-environment.patch | 52 + .../meta/recipes-support/gmp/gmp-6.1.2/amd64.patch | 18 + .../gmp/gmp-6.1.2/use-includedir.patch | 15 + .../meta/recipes-support/gmp/gmp_4.2.1.bb | 17 - .../meta/recipes-support/gmp/gmp_6.1.1.bb | 39 - .../meta/recipes-support/gmp/gmp_6.1.2.bb | 38 + .../gnupg/gnupg-1.4.7/CVE-2013-4242.patch | 63 - .../gnupg/gnupg-1.4.7/CVE-2013-4351.patch | 45 - .../gnupg/gnupg-1.4.7/CVE-2013-4576.patch | 154 - .../gnupg/gnupg-1.4.7/GnuPG1-CVE-2012-6085.patch | 64 - .../gnupg/gnupg-1.4.7/configure.patch | 17 - .../gnupg-1.4.7/curl_typeof_fix_backport.patch | 27 - .../gnupg/gnupg-1.4.7/long-long-thumb.patch | 19 - .../gnupg/gnupg-1.4.7/mips_gcc4.4.patch | 50 - .../meta/recipes-support/gnupg/gnupg_1.4.7.bb | 104 - .../meta/recipes-support/gnupg/gnupg_2.1.14.bb | 46 - .../meta/recipes-support/gnupg/gnupg_2.1.18.bb | 46 + .../meta/recipes-support/gnutls/gnutls.inc | 10 +- ...001-Use-correct-include-dir-with-minitasn.patch | 31 - .../gnutls/0001-configure.ac-fix-sed-command.patch | 26 +- .../gnutls/gnutls/CVE-2016-7444.patch | 35 - .../recipes-support/gnutls/gnutls/arm_eabi.patch | 19 + .../correct_rpl_gettimeofday_signature.patch | 11 +- .../meta/recipes-support/gnutls/gnutls_3.5.3.bb | 13 - .../meta/recipes-support/gnutls/gnutls_3.5.9.bb | 10 + ...configure-don-t-add-Werror-to-build-flags.patch | 28 - ...reintroduce-GNULIB_OVERRIDES_WINT_T-check.patch | 63 + .../0002-ASN.y-corrected-compiler-warning.patch | 28 - ...corrected-potential-null-pointer-derefere.patch | 73 - .../0004-tools-eliminated-compiler-warnings.patch | 56 - .../gnutls/libtasn1/CVE-2017-10790.patch | 63 + .../meta/recipes-support/gnutls/libtasn1_4.10.bb | 24 + .../meta/recipes-support/gnutls/libtasn1_4.9.bb | 24 - .../0001-Correctly-install-python-modules.patch | 26 + ...g-skip-all-lib-or-usr-lib-directories-in-.patch | 31 + .../meta/recipes-support/gpgme/gpgme/gpgme.pc | 10 - .../recipes-support/gpgme/gpgme/pkgconfig.patch | 42 +- .../gpgme/gpgme/python-import.patch | 19 + .../gpgme/gpgme/python-lang-config.patch | 52 + .../meta/recipes-support/gpgme/gpgme_1.6.0.bb | 41 - .../meta/recipes-support/gpgme/gpgme_1.8.0.bb | 76 + .../yocto-poky/meta/recipes-support/icu/icu.inc | 14 +- .../icu/icu/0001-i18n-Drop-include-xlocale.h.patch | 31 + .../meta/recipes-support/icu/icu_57.1.bb | 28 - .../meta/recipes-support/icu/icu_58.2.bb | 29 + .../recipes-support/iso-codes/iso-codes_3.70.bb | 15 - .../recipes-support/iso-codes/iso-codes_3.74.bb | 15 + .../recipes-support/libassuan/libassuan_2.4.3.bb | 2 +- ...overlay.pc.in-Set-Cflags-to-use-I-instead.patch | 33 + .../meta/recipes-support/libbsd/libbsd_0.8.3.bb | 1 + .../meta/recipes-support/libcap/libcap_2.25.bb | 6 +- .../recipes-support/libcheck/libcheck_0.10.0.bb | 2 +- .../recipes-support/libevdev/libevdev_1.5.2.bb | 14 - .../recipes-support/libevdev/libevdev_1.5.6.bb | 14 + .../recipes-support/libfm/libfm-extra_1.2.4.bb | 23 - .../recipes-support/libfm/libfm-extra_1.2.5.bb | 23 + ...heck-the-stamp-file-that-indicates-if-we-.patch | 28 + .../meta/recipes-support/libfm/libfm_1.2.4.bb | 38 - .../meta/recipes-support/libfm/libfm_1.2.5.bb | 39 + ...-Store-EdDSA-session-key-in-secure-memory.patch | 2 +- .../libgcrypt/files/CVE-2017-7526.patch | 10 +- .../meta/recipes-support/libgcrypt/libgcrypt.inc | 9 +- .../recipes-support/libgcrypt/libgcrypt_1.7.3.bb | 4 - .../recipes-support/libgcrypt/libgcrypt_1.7.6.bb | 4 + .../libgpg-error/libgpg-error_1.24.bb | 53 - .../libgpg-error/libgpg-error_1.26.bb | 62 + .../libiconv/libiconv-1.11.1/autoconf.patch | 50 - .../shared_preloadable_libiconv_linux.patch | 26 - .../recipes-support/libiconv/libiconv_1.11.1.bb | 47 - .../meta/recipes-support/libiconv/libiconv_1.14.bb | 4 +- .../meta/recipes-support/libksba/libksba_1.3.4.bb | 26 - .../meta/recipes-support/libksba/libksba_1.3.5.bb | 26 + ...01-lib-add-utility-function-nl_strerror_l.patch | 146 - ...to-using-strerror_l-instead-of-strerror_r.patch | 403 - ...to-using-strerror_l-instead-of-strerror_r.patch | 82 - .../meta/recipes-support/libnl/libnl_3.2.28.bb | 48 - .../meta/recipes-support/libnl/libnl_3.2.29.bb | 45 + .../libpcre/libpcre2/pcre-cross.patch | 65 + .../meta/recipes-support/libpcre/libpcre2_10.22.bb | 58 + .../meta/recipes-support/libpcre/libpcre_8.39.bb | 83 - .../meta/recipes-support/libpcre/libpcre_8.40.bb | 83 + .../recipes-support/libproxy/libproxy_0.4.13.bb | 36 - .../recipes-support/libproxy/libproxy_0.4.14.bb | 36 + .../recipes-support/libsoup/libsoup-2.4_2.54.1.bb | 35 - .../recipes-support/libsoup/libsoup-2.4_2.56.0.bb | 35 + .../libunistring/libunistring_0.9.6.bb | 28 - .../libunistring/libunistring_0.9.7.bb | 30 + .../meta/recipes-support/libunwind/libunwind.inc | 1 + .../libunwind/libunwind/libunwind-1.1-x32.patch | 31 + .../recipes-support/libunwind/libunwind_git.bb | 1 + .../0001-Support-for-NIOS2-architecture.patch | 13 +- .../liburcu/0002-Support-for-aarch64_be.patch | 19 - .../meta/recipes-support/liburcu/liburcu_0.9.2.bb | 21 - .../meta/recipes-support/liburcu/liburcu_0.9.3.bb | 19 + .../meta/recipes-support/libusb/libusb1_1.0.20.bb | 35 - .../meta/recipes-support/libusb/libusb1_1.0.21.bb | 35 + .../meta/recipes-support/libyaml/libyaml_0.1.7.bb | 2 +- .../yocto-poky/meta/recipes-support/lz4/lz4.bb | 2 +- .../meta/recipes-support/lzop/lzop_1.03.bb | 2 +- .../meta/recipes-support/mpfr/mpfr_3.1.4.bb | 18 - .../meta/recipes-support/mpfr/mpfr_3.1.5.bb | 18 + .../neon/neon/gnutls_4.3_fixup.patch | 68 - .../meta/recipes-support/neon/neon_0.30.1.bb | 38 - .../meta/recipes-support/neon/neon_0.30.2.bb | 38 + .../nettle/nettle-2.7.1/CVE-2015-8803_8805.patch | 71 - .../nettle/nettle-2.7.1/CVE-2015-8804.patch | 272 - ...k-header-files-of-openssl-only-if-enable_.patch | 38 + .../meta/recipes-support/nettle/nettle_2.7.1.bb | 18 - .../meta/recipes-support/nettle/nettle_3.2.bb | 11 - .../meta/recipes-support/nettle/nettle_3.3.bb | 15 + .../meta/recipes-support/npth/npth_1.2.bb | 22 - .../meta/recipes-support/npth/npth_1.3.bb | 22 + ...int.h-for-SSIZE_MAX-and-SIZE_MAX-definiti.patch | 30 + .../meta/recipes-support/nspr/nspr_4.12.bb | 187 - .../meta/recipes-support/nspr/nspr_4.13.1.bb | 190 + .../nss/0001-nss-fix-support-cross-compiling.patch | 48 + .../nss/nss/Fix-compilation-for-X32.patch | 31 + .../nss/nss/nss-fix-support-cross-compiling.patch | 71 - .../recipes-support/nss/nss/pqg.c-ULL_addend.patch | 8 +- .../meta/recipes-support/nss/nss_3.25.bb | 242 - .../meta/recipes-support/nss/nss_3.28.1.bb | 246 + .../ptest-runner/ptest-runner_2.0.2.bb | 26 + .../ptest-runner/ptest-runner_2.0.bb | 26 - .../meta/recipes-support/serf/serf_1.3.8.bb | 28 - .../meta/recipes-support/serf/serf_1.3.9.bb | 26 + .../shared-mime-info/shared-mime-info_1.6.bb | 7 - .../shared-mime-info/shared-mime-info_1.8.bb | 7 + ...1c7962-that-brings-2-increase-of-build-ti.patch | 56 - .../meta/recipes-support/sqlite/sqlite3.inc | 6 +- .../meta/recipes-support/sqlite/sqlite3_3.14.1.bb | 12 - .../meta/recipes-support/sqlite/sqlite3_3.17.0.bb | 10 + .../taglib/taglib/CVE-2017-12678.patch | 40 + .../meta/recipes-support/taglib/taglib_1.11.1.bb | 43 + .../meta/recipes-support/taglib/taglib_1.9.1.bb | 32 - .../meta/recipes-support/vte/vte_0.44.2.bb | 43 - .../meta/recipes-support/vte/vte_0.46.1.bb | 43 + .../scripts/buildhistory-collect-srcrevs | 2 +- import-layers/yocto-poky/scripts/buildhistory-diff | 13 +- import-layers/yocto-poky/scripts/buildstats-diff | 36 +- import-layers/yocto-poky/scripts/cleanup-workdir | 198 - import-layers/yocto-poky/scripts/combo-layer | 10 +- .../scripts/contrib/bb-perf/buildstats-plot.sh | 157 + .../scripts/contrib/bb-perf/buildstats.sh | 99 +- .../scripts/contrib/build-perf-test-wrapper.sh | 96 +- .../scripts/contrib/list-packageconfig-flags.py | 6 +- .../yocto-poky/scripts/contrib/mkefidisk.sh | 33 +- .../scripts/contrib/oe-build-perf-report-email.py | 269 + .../contrib/python/generate-manifest-2.7.py | 45 +- .../contrib/python/generate-manifest-3.5.py | 70 +- .../yocto-poky/scripts/contrib/verify-homepage.py | 2 +- .../scripts/contrib/yocto-bsp-kernel-update.sh | 60 + .../yocto-poky/scripts/create-pull-request | 2 +- import-layers/yocto-poky/scripts/crosstap | 11 +- import-layers/yocto-poky/scripts/devtool | 19 +- .../yocto-poky/scripts/gen-lockedsig-cache | 6 +- import-layers/yocto-poky/scripts/lib/bsp/kernel.py | 1 - .../target/arch/arm/conf/machine/machine.conf | 4 +- .../arm/recipes-kernel/linux/kernel-list.noinstall | 4 +- .../linux/linux-yocto-tiny_4.1.bbappend | 6 +- .../linux/linux-yocto-tiny_4.10.bbappend | 35 + .../linux/linux-yocto-tiny_4.4.bbappend | 6 +- .../linux/linux-yocto-tiny_4.8.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.1.bbappend | 8 +- .../recipes-kernel/linux/linux-yocto_4.10.bbappend | 34 + .../recipes-kernel/linux/linux-yocto_4.4.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.8.bbappend | 6 +- .../i386/recipes-kernel/linux/files/machine.cfg | 3 + .../recipes-kernel/linux/kernel-list.noinstall | 4 +- .../linux/linux-yocto-tiny_4.1.bbappend | 6 +- .../linux/linux-yocto-tiny_4.10.bbappend | 35 + .../linux/linux-yocto-tiny_4.4.bbappend | 6 +- .../linux/linux-yocto-tiny_4.8.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.1.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.10.bbappend | 34 + .../recipes-kernel/linux/linux-yocto_4.4.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.8.bbappend | 6 +- .../recipes-example/example/example-recipe-0.1.bb | 2 +- .../target/arch/mips/conf/machine/machine.conf | 1 - .../recipes-kernel/linux/kernel-list.noinstall | 4 +- .../linux/linux-yocto-tiny_4.1.bbappend | 6 +- .../linux/linux-yocto-tiny_4.10.bbappend | 35 + .../linux/linux-yocto-tiny_4.4.bbappend | 6 +- .../linux/linux-yocto-tiny_4.8.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.1.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.10.bbappend | 34 + .../recipes-kernel/linux/linux-yocto_4.4.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.8.bbappend | 6 +- .../target/arch/mips64/conf/machine/machine.conf | 1 - .../recipes-kernel/linux/kernel-list.noinstall | 4 +- .../linux/linux-yocto-tiny_4.1.bbappend | 6 +- .../linux/linux-yocto-tiny_4.10.bbappend | 35 + .../linux/linux-yocto-tiny_4.4.bbappend | 6 +- .../linux/linux-yocto-tiny_4.8.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.1.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.10.bbappend | 34 + .../recipes-kernel/linux/linux-yocto_4.4.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.8.bbappend | 6 +- .../target/arch/powerpc/conf/machine/machine.conf | 1 - .../recipes-kernel/linux/kernel-list.noinstall | 4 +- .../linux/linux-yocto-tiny_4.1.bbappend | 6 +- .../linux/linux-yocto-tiny_4.10.bbappend | 35 + .../linux/linux-yocto-tiny_4.4.bbappend | 6 +- .../linux/linux-yocto-tiny_4.8.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.1.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.10.bbappend | 34 + .../recipes-kernel/linux/linux-yocto_4.4.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.8.bbappend | 6 +- .../target/arch/qemu/conf/machine/machine.conf | 3 - .../recipes-kernel/linux/kernel-list.noinstall | 4 +- .../linux/linux-yocto-tiny_4.1.bbappend | 6 +- .../linux/linux-yocto-tiny_4.10.bbappend | 64 + .../linux/linux-yocto-tiny_4.4.bbappend | 6 +- .../linux/linux-yocto-tiny_4.8.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.1.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.10.bbappend | 63 + .../recipes-kernel/linux/linux-yocto_4.4.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.8.bbappend | 6 +- .../recipes-kernel/linux/kernel-list.noinstall | 4 +- .../linux/linux-yocto-tiny_4.1.bbappend | 6 +- .../linux/linux-yocto-tiny_4.10.bbappend | 35 + .../linux/linux-yocto-tiny_4.4.bbappend | 6 +- .../linux/linux-yocto-tiny_4.8.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.1.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.10.bbappend | 34 + .../recipes-kernel/linux/linux-yocto_4.4.bbappend | 6 +- .../recipes-kernel/linux/linux-yocto_4.8.bbappend | 6 +- .../yocto-poky/scripts/lib/build_perf/__init__.py | 31 + .../yocto-poky/scripts/lib/build_perf/html.py | 19 + .../lib/build_perf/html/measurement_chart.html | 50 + .../scripts/lib/build_perf/html/report.html | 206 + .../yocto-poky/scripts/lib/build_perf/report.py | 342 + .../scripts/lib/build_perf/scrape-html-report.js | 56 + .../yocto-poky/scripts/lib/compatlayer/__init__.py | 392 + .../yocto-poky/scripts/lib/compatlayer/case.py | 7 + .../scripts/lib/compatlayer/cases/__init__.py | 0 .../scripts/lib/compatlayer/cases/bsp.py | 204 + .../scripts/lib/compatlayer/cases/common.py | 53 + .../scripts/lib/compatlayer/cases/distro.py | 26 + .../yocto-poky/scripts/lib/compatlayer/context.py | 15 + .../yocto-poky/scripts/lib/devtool/__init__.py | 78 +- .../yocto-poky/scripts/lib/devtool/build.py | 2 +- .../yocto-poky/scripts/lib/devtool/build_image.py | 10 +- .../yocto-poky/scripts/lib/devtool/deploy.py | 30 +- .../yocto-poky/scripts/lib/devtool/package.py | 8 +- .../yocto-poky/scripts/lib/devtool/runqemu.py | 6 +- .../yocto-poky/scripts/lib/devtool/sdk.py | 14 +- .../yocto-poky/scripts/lib/devtool/search.py | 4 +- .../yocto-poky/scripts/lib/devtool/standard.py | 570 +- .../yocto-poky/scripts/lib/devtool/upgrade.py | 30 +- .../yocto-poky/scripts/lib/devtool/utilcmds.py | 18 +- .../yocto-poky/scripts/lib/recipetool/append.py | 49 +- .../yocto-poky/scripts/lib/recipetool/create.py | 165 +- .../lib/recipetool/create_buildsys_python.py | 4 +- .../scripts/lib/recipetool/create_kernel.py | 4 +- .../scripts/lib/recipetool/create_npm.py | 111 +- .../yocto-poky/scripts/lib/recipetool/newappend.py | 16 +- .../yocto-poky/scripts/lib/recipetool/setvar.py | 2 +- .../yocto-poky/scripts/lib/scriptutils.py | 77 +- .../yocto-poky/scripts/lib/wic/__init__.py | 22 +- .../yocto-poky/scripts/lib/wic/__version__.py | 1 - .../canned-wks/directdisk-bootloader-config.cfg | 28 +- .../scripts/lib/wic/canned-wks/mkgummidisk.wks | 11 - .../lib/wic/canned-wks/qemux86-directdisk.wks | 2 +- import-layers/yocto-poky/scripts/lib/wic/conf.py | 103 - .../yocto-poky/scripts/lib/wic/config/wic.conf | 6 - .../yocto-poky/scripts/lib/wic/creator.py | 125 - import-layers/yocto-poky/scripts/lib/wic/engine.py | 98 +- .../yocto-poky/scripts/lib/wic/filemap.py | 7 +- import-layers/yocto-poky/scripts/lib/wic/help.py | 41 +- .../yocto-poky/scripts/lib/wic/imager/__init__.py | 0 .../scripts/lib/wic/imager/baseimager.py | 191 - .../yocto-poky/scripts/lib/wic/imager/direct.py | 419 - .../yocto-poky/scripts/lib/wic/ksparser.py | 57 +- import-layers/yocto-poky/scripts/lib/wic/msger.py | 235 - .../yocto-poky/scripts/lib/wic/partition.py | 312 +- import-layers/yocto-poky/scripts/lib/wic/plugin.py | 150 - .../yocto-poky/scripts/lib/wic/pluginbase.py | 72 +- .../scripts/lib/wic/plugins/imager/direct.py | 561 + .../lib/wic/plugins/imager/direct_plugin.py | 103 - .../scripts/lib/wic/plugins/source/bootimg-efi.py | 111 +- .../lib/wic/plugins/source/bootimg-partition.py | 61 +- .../lib/wic/plugins/source/bootimg-pcbios.py | 98 +- .../scripts/lib/wic/plugins/source/fsimage.py | 73 - .../lib/wic/plugins/source/isoimage-isohybrid.py | 188 +- .../scripts/lib/wic/plugins/source/rawcopy.py | 42 +- .../scripts/lib/wic/plugins/source/rootfs.py | 68 +- .../lib/wic/plugins/source/rootfs_pcbios_ext.py | 177 - import-layers/yocto-poky/scripts/lib/wic/test | 1 - .../yocto-poky/scripts/lib/wic/utils/errors.py | 29 - .../yocto-poky/scripts/lib/wic/utils/misc.py | 283 +- .../scripts/lib/wic/utils/oe/__init__.py | 22 - .../yocto-poky/scripts/lib/wic/utils/oe/misc.py | 246 - .../scripts/lib/wic/utils/partitionedfs.py | 379 - .../yocto-poky/scripts/lib/wic/utils/runner.py | 74 +- .../yocto-poky/scripts/lib/wic/utils/syslinux.py | 58 - .../yocto-poky/scripts/multilib_header_wrapper.h | 26 +- .../yocto-poky/scripts/oe-build-perf-report | 534 + .../yocto-poky/scripts/oe-build-perf-test | 100 +- .../yocto-poky/scripts/oe-buildenv-internal | 12 + .../yocto-poky/scripts/oe-find-native-sysroot | 70 +- import-layers/yocto-poky/scripts/oe-git-archive | 271 + import-layers/yocto-poky/scripts/oe-git-proxy | 21 + import-layers/yocto-poky/scripts/oe-pkgdata-util | 30 +- import-layers/yocto-poky/scripts/oe-publish-sdk | 4 +- import-layers/yocto-poky/scripts/oe-run-native | 34 +- import-layers/yocto-poky/scripts/oe-selftest | 164 +- import-layers/yocto-poky/scripts/oe-setup-builddir | 8 + import-layers/yocto-poky/scripts/oe-setup-rpmrepo | 30 +- import-layers/yocto-poky/scripts/oe-test | 105 + import-layers/yocto-poky/scripts/oe-trim-schemas | 9 + .../yocto-poky/scripts/oepydevshell-internal.py | 15 +- .../postinst-intercepts/update_gio_module_cache | 4 +- .../scripts/pybootchartgui/pybootchartgui/draw.py | 192 +- .../pybootchartgui/pybootchartgui/parsing.py | 122 +- .../pybootchartgui/pybootchartgui/samples.py | 27 + import-layers/yocto-poky/scripts/recipetool | 5 +- import-layers/yocto-poky/scripts/relocate_sdk.py | 2 + import-layers/yocto-poky/scripts/rpm2cpio.sh | 108 +- import-layers/yocto-poky/scripts/runqemu | 366 +- .../yocto-poky/scripts/runqemu-export-rootfs | 2 +- .../yocto-poky/scripts/runqemu-extract-sdk | 2 +- .../yocto-poky/scripts/runqemu-gen-tapdevs | 10 +- import-layers/yocto-poky/scripts/runqemu-ifdown | 6 +- import-layers/yocto-poky/scripts/runqemu-ifup | 6 +- .../yocto-poky/scripts/sysroot-relativelinks.py | 2 +- import-layers/yocto-poky/scripts/task-time | 132 + import-layers/yocto-poky/scripts/tiny/ksize.py | 17 +- import-layers/yocto-poky/scripts/tiny/ksum.py | 168 + import-layers/yocto-poky/scripts/verify-bashisms | 100 +- import-layers/yocto-poky/scripts/wic | 127 +- import-layers/yocto-poky/scripts/wipe-sysroot | 54 - .../yocto-poky/scripts/yocto-compat-layer-wrapper | 27 + .../yocto-poky/scripts/yocto-compat-layer.py | 205 + 3005 files changed, 122894 insertions(+), 148168 deletions(-) create mode 100644 import-layers/yocto-poky/bitbake/lib/bb/fetch2/s3.py create mode 100644 import-layers/yocto-poky/bitbake/lib/bb/remotedata.py delete mode 100644 import-layers/yocto-poky/bitbake/lib/bb/ui/depexp.py create mode 100644 import-layers/yocto-poky/bitbake/lib/bb/ui/taskexp.py create mode 100644 import-layers/yocto-poky/bitbake/lib/simplediff/LICENSE create mode 100644 import-layers/yocto-poky/bitbake/lib/simplediff/__init__.py delete mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/bldcontrol/tests.py delete mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/contrib/README delete mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/README delete mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/TODO delete mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/config.py delete mode 100755 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/launcher.py delete mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/log/.create delete mode 100755 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/recv.py delete mode 100755 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/runner.py delete mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/settings.json delete mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/shellutils.py delete mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/tests.py delete mode 100755 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/run_toastertests.py delete mode 100755 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_automation_test.py delete mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_test.cfg delete mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/urlcheck.py delete mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/urllist.py create mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/__init__.py create mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/test_loaddata.py create mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/test_lsupdates.py create mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/test_runbuilds.py create mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/tests/eventreplay/README create mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/tests/eventreplay/__init__.py create mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/tests/functional/__init__.py create mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/tests/functional/functional_helpers.py create mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/tests/functional/test_functional_basic.py create mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/tests/views/README create mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/tests/views/__init__.py create mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/tests/views/test_views.py delete mode 100755 import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/css/prettify.css create mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/highlight.pack.js delete mode 100755 import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/prettify.js delete mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/toastergui/tests.py create mode 100644 import-layers/yocto-poky/bitbake/lib/toaster/toastermain/settings_production_example.py create mode 100644 import-layers/yocto-poky/documentation/ref-manual/ref-release-process.xml delete mode 100644 import-layers/yocto-poky/meta-poky/conf/toasterconf.json create mode 100644 import-layers/yocto-poky/meta-selftest/lib/devtool/bbpath.py create mode 100644 import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/cases/selftest.json create mode 100644 import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/cases/selftest.py delete mode 100644 import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/selftest.json delete mode 100644 import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/selftest.py create mode 100644 import-layers/yocto-poky/meta-selftest/lib/recipetool/bbpath.py create mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/container-image/container-image-testpkg.bb create mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/container-image/container-test-image.bb create mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-localonly.bb create mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-localonly/file1 create mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-localonly/file2 create mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-patch-gz.bb create mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-patch-gz/readme.patch.gz create mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-subdir.bb create mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-subdir/devtool-test-subdir.tar.gz create mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-subdir/testfile create mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/m4/m4_%.bbappend delete mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/m4/m4_1.4.17.bbappend create mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/postinst/postinst_1.0.bb create mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/selftest-ed/selftest-ed_0.5.bb create mode 100644 import-layers/yocto-poky/meta-selftest/recipes-test/selftest-ed/selftest-ed_1.14.1.bb create mode 100644 import-layers/yocto-poky/meta-selftest/wic/test_rawcopy_plugin.wks.in create mode 100644 import-layers/yocto-poky/meta-selftest/wic/wictestdisk.wks delete mode 100644 import-layers/yocto-poky/meta-yocto-bsp/lib/oeqa/selftest/gummiboot.py create mode 100644 import-layers/yocto-poky/meta-yocto-bsp/recipes-graphics/xorg-xserver/xserver-xf86-config/beaglebone/xorg.conf create mode 100644 import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.10.bbappend delete mode 100644 import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.8.bbappend create mode 100644 import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.9.bbappend create mode 100644 import-layers/yocto-poky/meta-yocto-bsp/wic/beaglebone.wks create mode 100644 import-layers/yocto-poky/meta-yocto-bsp/wic/edgerouter.wks create mode 100644 import-layers/yocto-poky/meta-yocto-bsp/wic/genericx86.wks create mode 100644 import-layers/yocto-poky/meta-yocto-bsp/wic/mpc8315e-rdb.wks create mode 100644 import-layers/yocto-poky/meta/classes/devupstream.bbclass create mode 100644 import-layers/yocto-poky/meta/classes/go.bbclass create mode 100644 import-layers/yocto-poky/meta/classes/goarch.bbclass delete mode 100644 import-layers/yocto-poky/meta/classes/gummiboot.bbclass delete mode 100644 import-layers/yocto-poky/meta/classes/gzipnative.bbclass create mode 100644 import-layers/yocto-poky/meta/classes/image-container.bbclass delete mode 100644 import-layers/yocto-poky/meta/classes/image_types_uboot.bbclass create mode 100644 import-layers/yocto-poky/meta/classes/image_types_wic.bbclass create mode 100644 import-layers/yocto-poky/meta/classes/manpages.bbclass create mode 100644 import-layers/yocto-poky/meta/classes/perl-version.bbclass create mode 100644 import-layers/yocto-poky/meta/classes/relative_symlinks.bbclass create mode 100644 import-layers/yocto-poky/meta/classes/rm_work_and_downloads.bbclass create mode 100644 import-layers/yocto-poky/meta/conf/multiconfig/default.conf delete mode 100644 import-layers/yocto-poky/meta/conf/toasterconf.json create mode 100644 import-layers/yocto-poky/meta/lib/buildstats.py delete mode 100644 import-layers/yocto-poky/meta/lib/oe/tests/__init__.py delete mode 100644 import-layers/yocto-poky/meta/lib/oe/tests/test_elf.py delete mode 100644 import-layers/yocto-poky/meta/lib/oe/tests/test_license.py delete mode 100644 import-layers/yocto-poky/meta/lib/oe/tests/test_path.py delete mode 100644 import-layers/yocto-poky/meta/lib/oe/tests/test_types.py delete mode 100644 import-layers/yocto-poky/meta/lib/oe/tests/test_utils.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/README create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/__init__.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/case.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/cases/__init__.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/cases/example/data.json create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/cases/example/test_basic.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/context.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/decorator/__init__.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/decorator/data.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/decorator/depends.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/decorator/oeid.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/decorator/oetag.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/decorator/oetimeout.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/exception.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/loader.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/runner.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/target/__init__.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/target/qemu.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/target/ssh.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/tests/__init__.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/data.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/depends.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/loader/invalid/oeid.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/loader/valid/another.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/oeid.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/oetag.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/timeout.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/tests/common.py create mode 100755 import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_data.py create mode 100755 import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_decorators.py create mode 100755 import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_loader.py create mode 100755 import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_runner.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/utils/__init__.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/utils/misc.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/utils/path.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/core/utils/test.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/files/test.c create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/files/test.cpp create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/files/test.pl create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/files/test.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/_ptest.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/_qemutiny.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/buildcvs.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/buildgalculator.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/buildiptables.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/case.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/_ptest.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/_qemutiny.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/buildcpio.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/buildgalculator.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/buildlzip.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/connman.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/date.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/df.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/dnf.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/gcc.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/kernelmodule.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/ldd.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/logrotate.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/multilib.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/oe_syslog.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/pam.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/parselogs.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/perl.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/ping.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/python.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/rpm.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/scanelf.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/scp.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/skeletoninit.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/ssh.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/systemd.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/x32lib.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/xorg.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/connman.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/context.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/date.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/decorator/package.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/df.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.c delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.cpp delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.pl delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/files/testsdkmakefile delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/gcc.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/kernelmodule.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/ldd.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/loader.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/logrotate.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/multilib.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/pam.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/parselogs.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/perl.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/ping.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/python.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/rpm.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/scanelf.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/scp.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/skeletoninit.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/smart.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/ssh.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/syslog.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/systemd.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/utils/__init__.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/utils/targetbuildproject.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/x32lib.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/runtime/xorg.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/buildcvs.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/buildgalculator.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/buildiptables.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/case.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/buildcpio.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/buildgalculator.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/buildlzip.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/gcc.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/perl.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/python.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/context.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/files/testsdkmakefile delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/gcc.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/perl.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/python.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/utils/__init__.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdk/utils/sdkbuildproject.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdkext/case.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdkext/cases/devtool.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdkext/cases/sdk_update.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdkext/context.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdkext/devtool.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/sdkext/sdk_update.py delete mode 100644 import-layers/yocto-poky/meta/lib/oeqa/selftest/_toaster.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/selftest/containerimage.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/selftest/image_typedep.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/__init__.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/buildhistory.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/elf.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/license.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/path.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/types.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/utils.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/selftest/runqemu.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/selftest/tinfoil.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/utils/buildproject.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/utils/metadata.py create mode 100644 import-layers/yocto-poky/meta/lib/oeqa/utils/subprocesstweak.py create mode 100644 import-layers/yocto-poky/meta/lib/rootfspostcommands.py delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/acpid/acpid_2.0.27.bb create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/acpid/acpid_2.0.28.bb create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi/0001-Mark-our-explicit-fall-through-so-Wextra-will-work-i.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi/aarch64-initplat.c-fix-const-qualifier.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/grub/files/0001-btrfs-avoid-used-uninitialized-error-with-GCC7.patch create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/grub/files/0001-build-Use-AC_HEADER_MAJOR-to-find-device-macros.patch create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/grub/files/0002-i386-x86_64-ppc-fix-switch-fallthrough-cases-with-GC.patch create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/grub/files/0003-Add-gnulib-fix-gcc7-fallthrough.diff.patch create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/grub/files/0004-Fix-remaining-cases-of-gcc-7-fallthrough-warning.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/autohell.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/grub-support-256byte-inode.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/grub_fix_for_automake-1.12.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/no-reorder-functions.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/objcopy-absolute.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/grub/grub_0.97.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot/0001-console-Fix-C-syntax-errors-for-function-declaration.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot/fix-objcopy.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot_git.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/pciutils/pciutils_3.5.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/pciutils/pciutils_3.5.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/systemd-boot/systemd-boot.bb create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/systemd-boot/systemd-boot_232.bb create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/u-boot/files/default-gcc.patch create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-common_2017.01.inc delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-fw-utils_2016.03.bb create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-fw-utils_2017.01.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-mkimage_2016.03.bb create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-mkimage_2017.01.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot_2016.03.bb create mode 100644 import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot_2017.01.bb delete mode 100755 import-layers/yocto-poky/meta/recipes-bsp/v86d/v86d/fbsetup delete mode 100644 import-layers/yocto-poky/meta/recipes-bsp/v86d/v86d/uvesafb.conf create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5/cve-2017-1000250.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5_5.41.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5_5.43.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/connman/connman/0001-Fix-compile-on-musl-with-kernel-4.9-headers.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/dhcp/dhcp_4.3.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/dhcp/dhcp_4.3.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2/0001-libc-compat.h-add-musl-workaround.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2/iproute2-4.3.0-musl.patch create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2_4.10.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2_4.7.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/iw/iw_4.7.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/iw/iw_4.9.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/0001-Fix-compiler_state_t.ai-usage-when-INET6-is-not-defi.patch create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/0002-Add-missing-compiler_state_t-parameter.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/aclocal.patch create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/disable-remote.patch create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/fix-grammar-deps.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap_1.7.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap_1.8.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/neard/neard/0001-Add-header-dependency-to-nciattach.o.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-nfs-utils-statd-fix-a-segfault-caused-by-improper-us.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/fix-protocol-minor-version-fall-back.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils_1.3.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils_1.3.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/ofono/ofono_1.18.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/ofono/ofono_1.19.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/fix-CVE-2016-8858.patch create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshd_check_keys delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh_7.3p1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh_7.4p1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/0001-CVE-2017-3731.patch create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/0001-Fix-build-with-clang-using-external-assembler.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/0002-CVE-2017-3731.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2016-7055.patch create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/debian1.0.2/soname.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl_1.0.2j.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl_1.0.2k.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/ppp/ppp/0001-ppp-Remove-unneeded-include.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/socat/socat_1.7.3.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/socat/socat_1.7.3.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0001-Reject-psk-parameter-set-with-invalid-passphrase-cha.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0001-WPS-Reject-a-Credential-with-invalid-passphrase.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0002-Reject-SET_CRED-commands-with-newline-characters-in-.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0002-Remove-newlines-from-wpa_supplicant-config-network-o.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0003-Reject-SET-commands-with-newline-characters-in-the-s.patch create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/key-replay-cve-multiple.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.6.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/busybox/busybox/0001-ip-fix-an-improper-optimization-req.r.rtm_scope-may-.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/busybox/busybox/0001-iproute-support-scope-.-Closes-8561.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/busybox/busybox/busybox-tar-add-IF_FEATURE_-checks.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/busybox/busybox_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/console-tools/console-tools-0.3.2/0001-Fix-format-security-compilation-error.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-6.9-cp-i-u.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-fix-install.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-fix-texinfo.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-i18n.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-ls-x.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-overflow.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils_fix_for_automake-1.12.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/fix_for_manpage_building.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/futimens.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/gnulib_m4.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/loadavg.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/man-touch.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/0001-Unset-need_charset_alias-when-building-for-musl.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/0001-uname-report-processor-and-hardware-correctly.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/disable-ls-output-quoting.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/fix-selinux-flask.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/man-decouple-manpages-from-build.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/remove-usr-local-lib-from-m4.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/0001-Unset-need_charset_alias-when-building-for-musl.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/0001-local.mk-fix-cross-compiling-problem.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/0001-uname-report-processor-and-hardware-correctly.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/disable-ls-output-quoting.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/fix-selinux-flask.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/man-decouple-manpages-from-build.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/remove-usr-local-lib-from-m4.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils_6.9.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils_8.25.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils_8.26.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/dbus/dbus-glib_0.106.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/dbus/dbus-glib_0.108.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/dbus/dbus-test_1.10.10.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/dbus/dbus-test_1.10.14.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/dbus/dbus_1.10.10.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/dbus/dbus_1.10.14.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/disable_java.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/fix_aclocal_version.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/fix_gnu_source_circular.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/gettext-autoconf-lib-link-no-L.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/gettext-vpath.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/hardcode_macro_version.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/linklib_from_0.17.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.19.8.1/cr-statement.c-timsort.h-fix-formatting-issues.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/gettext/gettext_0.16.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0/0001-Test-for-pthread_getname_np-before-using-it.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0/0002-tests-Ignore-y2k-warnings.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0/gi-exclude.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0_2.48.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0_2.50.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glib-networking/glib-networking_2.48.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/glib-networking/glib-networking_2.50.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/cross-localedef-native_2.24.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/cross-localedef-native_2.25.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc-initial_2.24.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc-initial_2.25.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc-locale_2.24.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc-locale_2.25.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc-mtrace_2.24.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc-mtrace_2.25.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc-scripts_2.24.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc-scripts_2.25.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-Add-atomic_exchange_relaxed.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-Include-locale_t.h-compatibility-header.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-locale-fix-hard-coded-reference-to-gcc-E.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0002-Add-atomic-operations-required-by-the-new-condition-.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0003-Add-pretty-printers-for-the-NPTL-lock-types.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0004-New-condvar-implementation-that-provides-stronger-or.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0005-Remove-__ASSUME_REQUEUE_PI.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0006-Fix-atomic_fetch_xor_release.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0026-build_local_scope.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0026-elf-dl-deps.c-Make-_dl_build_local_scope-breadth-fir.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0027-locale-fix-hard-coded-reference-to-gcc-E.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0028-Bug-20116-Fix-use-after-free-in-pthread_create.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0028-Rework-fno-omit-frame-pointer-support-on-i386.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2016-6323.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc_2.24.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/glibc/glibc_2.25.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/ifupdown/ifupdown_0.8.16.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/ifupdown/ifupdown_0.8.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/images/build-appliance-image/README_VirtualBox_Toaster.txt create mode 100644 import-layers/yocto-poky/meta/recipes-core/images/core-image-tiny-initramfs.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/kbd/kbd_2.0.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/kbd/kbd_2.0.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/0001-Make-ptest-run-the-python-tests-if-python-is-enabled.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2016-9318.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-0663.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-5969.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-8872.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-9047_CVE-2017-9048.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-9049_CVE-2017-9050.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-fix_and_simplify_xmlParseStartTag2.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/meta/build-sysroots.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/meta/wic-tools.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/ncurses/files/configure-reproducible.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/ncurses/files/fix-cflags-mangle.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/ncurses/ncurses_6.0+20160625.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/ncurses/ncurses_6.0+20161126.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/netbase/netbase_5.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/netbase/netbase_5.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf-shell-image.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0001-MdeModulePkg-UefiHiiLib-Fix-incorrect-comparison-exp.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0002-ovmf-update-path-to-native-BaseTools.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0003-BaseTools-makefile-adjust-to-build-in-under-bitbake.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0007-OvmfPkg-EnrollDefaultKeys-application-for-enrolling-.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/VfrCompile-increase-path-length-limit.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/ovmf-shell-image.wks create mode 100644 import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf_git.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/readline/files/config-dirent-symbols.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/readline/readline-5.2/configure-fix.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/readline/readline-5.2/fix-redundant-rpath.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/readline/readline-6.3/configure-fix.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/readline/readline-6.3/norpath.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/readline/readline-7.0/configure-fix.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/readline/readline-7.0/norpath.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/readline/readline_5.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/readline/readline_6.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/readline/readline_7.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/systemd/systemd/0001-core-load-fragment-refuse-units-with-errors-in-certa.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/systemd/systemd/validate-user.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/udev/eudev/0014-Revert-rules-remove-firmware-loading-rules.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/udev/eudev/Revert-udev-remove-userspace-firmware-loading-suppor.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/udev/eudev_3.2.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/udev/eudev_3.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/util-linux/util-linux_2.28.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/util-linux/util-linux_2.29.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.11/Makefile-runtests.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.11/ldflags-tests.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.11/remove.ldconfig.call.patch create mode 100644 import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.11/run-ptest delete mode 100644 import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/Makefile-runtests.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/ldflags-tests.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/remove.ldconfig.call.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/run-ptest create mode 100644 import-layers/yocto-poky/meta/recipes-core/zlib/zlib_1.2.11.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-core/zlib/zlib_1.2.8.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf/AC_HEADER_MAJOR-port-to-glibc-2.25.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf/autoconf-replace-w-option-in-shebangs-with-modern-use-warnings.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen-native_5.18.10.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen-native_5.18.12.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen/0001-config-libopts.m4-regenerate-it-from-config-libopts..patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen/0002-autoopts-mk-tpl-config.sh-fix-perl-path.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/automake/automake/automake-replace-w-option-in-shebangs-with-modern-use-warnings.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-2.27.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-2.28.inc delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross-canadian_2.27.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross-canadian_2.28.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross_2.27.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross_2.28.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-crosssdk_2.27.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-crosssdk_2.28.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0001-Generate-relocatable-SDKs.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0001-binutils-crosssdk-Generate-relocatable-SDKs.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0001-ppc-apuinfo-for-spe-parsed-incorrectly.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0002-binutils-cross-Do-not-generate-linker-script-directo.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0002-configure-widen-the-regexp-for-SH-architectures.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0003-Point-scripts-location-to-libdir.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0003-gprof-add-uclibc-support-to-configure.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0004-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0004-Point-scripts-location-to-libdir.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0005-Explicitly-link-with-libm-on-uclibc.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0005-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0006-Explicitly-link-with-libm-on-uclibc.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0006-Use-libtool-2.4.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0007-Add-the-armv5e-architecture-to-binutils.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0007-Use-libtool-2.4.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0008-Add-the-armv5e-architecture-to-binutils.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0008-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0009-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0009-warn-for-uses-of-system-directories-when-cross-linki.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0010-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0010-warn-for-uses-of-system-directories-when-cross-linki.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0011-Change-default-emulation-for-mips64-linux.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0011-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0012-Add-support-for-Netlogic-XLP.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0012-Change-default-emulation-for-mips64-linux.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0013-Add-support-for-Netlogic-XLP.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0013-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0014-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0014-libtool-remove-rpath.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0015-Refine-.cfi_sections-check-to-only-consider-compact-.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0015-binutils-mips-gas-pic-relax-linkonce.diff create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0015-sync-with-OE-libtool-changes.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0016-Detect-64-bit-MIPS-targets.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0016-Fix-seg-fault-in-ARM-linker-when-trying-to-parse-a-b.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0017-Fix-the-generation-of-alignment-frags-in-code-sectio.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0017-bfd-Improve-lookup-of-file-line-information-for-erro.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0018-PR-21409-segfault-in-_bfd_dwarf2_find_nearest_line.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7223.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7614.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8393.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8394.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8395.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8396_8397.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8398.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8421.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9038_9044.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9039.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9040_9042.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9742.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9744.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9745.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9746.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9747.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9748.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9749.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9750.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9751.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9752.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9753.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9755.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9756.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9954.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_1.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_2.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_3.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_4.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_5.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_6.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_7.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_8.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_9.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils_2.27.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils_2.28.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/bison/bison/bison-2.3_m4.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/bison/bison_2.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/btrfs-tools/btrfs-tools/fix-parallel.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/btrfs-tools/btrfs-tools_4.7.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/btrfs-tools/btrfs-tools_4.9.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/ccache/ccache_3.2.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/ccache/ccache_3.3.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake-native_3.6.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake-native_3.7.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake/0001-KWIML-tests-Remove-format-security-from-flags.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake/avoid-gcc-warnings-with-Wstrict-prototypes.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake_3.6.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake_3.7.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/createrepo-c/createrepo-c/0001-Correctly-install-the-shared-library.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/createrepo-c/createrepo-c/0001-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/createrepo-c/createrepo-c_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/cve-check-tool/files/0001-curl-allow-overriding-default-CA-certificate-file.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/cve-check-tool/files/0001-update-Compare-computed-vs-expected-sha256-digit-str.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0001-Corretly-install-tmpfiles.d-configuration.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0001-Do-not-hardcode-etc-and-systemd-unit-directories.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0001-Do-not-prepend-installroot-to-logdir.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0029-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0030-Run-python-scripts-using-env.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf_git.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/docbook-dsssl-stylesheets/docbook-dsssl-stylesheets-native_1.79.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-3.1-native_3.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-4.1-native_4.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-4.5-native.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-native.inc delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/files/LICENSE-OASIS delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/docbook-utils/docbook-utils-0.6.14/re.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/docbook-utils/docbook-utils-native_0.6.14.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/0001-Include-fcntl.h-for-getting-loff_t-definition.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/alignment_hack.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/dosfstools-msdos_fs-types.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/fix_populated_dosfs_creation.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/include-linux-types.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/mkdosfs-bootcode.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/mkdosfs-dir.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/msdos_fat12_undefined.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/nofat32_autoselect.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools_2.11.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools_4.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools_4.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg/0007-dpkg-deb-build.c-Remove-usage-of-clamp-mtime-in-tar.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg_1.18.10.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg_1.18.7.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/e2fsprogs-1.43-sysmacros.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.43.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.43.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/arm_backend.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elf_additions.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elf_begin.c-CVE-2014-9447-fix.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elfutils-ar-c-fix-num-passed-to-memset.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elfutils-fsize.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/fix-build-gcc-4.8.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/fix_for_gcc-4.7.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/gcc6.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/hppa_backend.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/i386_dis.h delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/m68k_backend.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/mips_backend.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/nm-Fix-size-passed-to-snprintf-for-invalid-sh_name-case.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/redhat-portability.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/redhat-robustify.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/remove-unused.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/testsuite-ignore-elflint.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/uclibc-support-for-elfutils-0.148.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/x86_64_dis.h delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-Add-GCC7-Wimplicit-fallthrough-support-fixes.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-Ignore-differences-between-mips-machine-identifiers.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-ar-Fix-GCC7-Wformat-length-issues.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-build-Provide-alternatives-for-glibc-assumptions-hel.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-elf_getarsym-Silence-Werror-maybe-uninitialized-fals.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-fix-a-stack-usage-warning.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-remove-the-unneed-checking.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0002-Add-support-for-mips64-abis-in-mips_retval.c.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0003-Add-mips-n64-relocation-format-hack.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/Fix_one_GCC7_warning.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/aarch64_uio.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/arm_backend.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/elfcmp-fix-self-comparision.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/fallthrough.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/fixheadercheck.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/hppa_backend.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/kfreebsd_path.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/m68k_backend.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/mips_backend.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/mips_readelf_w.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/shadow.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/testsuite-ignore-elflint.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/uclibc-support.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-build-Provide-alternatives-for-glibc-assumptions-hel.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-elf_getarsym-Silence-Werror-maybe-uninitialized-fals.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-fix-a-stack-usage-warning.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-remove-the-unneed-checking.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/Fix_one_GCC7_warning.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/aarch64_uio.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/0001-Ignore-differences-between-mips-machine-identifiers.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/0002-Add-support-for-mips64-abis-in-mips_retval.c.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/0003-Add-mips-n64-relocation-format-hack.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/arm_backend.diff create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/hppa_backend.diff create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/hurd_path.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/ignore_strmerge.diff create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/kfreebsd_path.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/mips_backend.diff create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/mips_readelf_w.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/testsuite-ignore-elflint.diff create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/fallthrough.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/fixheadercheck.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/shadow.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils_0.148.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils_0.166.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils_0.168.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/expect/expect/0001-Resolve-string-formatting-issues.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/file/file_5.28.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/file/file_5.30.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-5.4/CVE-2016-6131.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2.inc delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0002-uclibc-conf.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0003-gcc-uclibc-locale-ctype_touplow_t.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0004-uclibc-locale.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0005-uclibc-locale-no__x.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0006-uclibc-locale-wchar_fix.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0007-uclibc-locale-update.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0008-missing-execinfo_h.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0009-c99-snprintf.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0010-gcc-poison-system-directories.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0011-gcc-poison-dir-extend.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0012-gcc-4.3.3-SYSROOT_CFLAGS_FOR_TARGET.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0013-64-bit-multilib-hack.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0014-optional-libstdc.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0015-gcc-disable-MASK_RELAX_PIC_CALLS-bit.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0016-COLLECT_GCC_OPTIONS.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0017-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0018-fortran-cross-compile-hack.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0019-cpp-honor-sysroot.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0020-MIPS64-Default-to-N64-ABI.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0021-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0022-gcc-Fix-argument-list-too-long-error.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0023-Disable-sdt.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0024-libtool.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0025-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0026-Use-the-multilib-config-files-from-B-instead-of-usin.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0027-Avoid-using-libdir-from-.la-which-usually-points-to-.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0028-export-CPP.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0029-Enable-SPE-AltiVec-generation-on-powepc-linux-target.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0030-Disable-the-MULTILIB_OSDIRNAMES-and-other-multilib-o.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0031-Ensure-target-gcc-headers-can-be-included.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0032-gcc-4.8-won-t-build-with-disable-dependency-tracking.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0033-Don-t-search-host-directory-during-relink-if-inst_pr.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0034-Use-SYSTEMLIBS_DIR-replacement-instead-of-hardcoding.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0035-aarch64-Add-support-for-musl-ldso.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0036-libcc1-fix-libcc1-s-install-path-and-rpath.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0037-handle-sysroot-support-for-nativesdk-gcc.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0038-Search-target-sysroot-gcc-version-specific-dirs-with.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0039-Fix-various-_FOR_BUILD-and-related-variables.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0040-nios2-Define-MUSL_DYNAMIC_LINKER.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0041-ssp_nonshared.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0042-gcc-libcpp-support-ffile-prefix-map-old-new.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0043-Reuse-fdebug-prefix-map-to-replace-ffile-prefix-map.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0044-gcc-final.c-fdebug-prefix-map-support-to-remap-sourc.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0045-libgcc-Add-knob-to-use-ldbl-128-on-ppc.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0046-Link-libgcc-using-LDFLAGS-not-just-SHLIB_LDFLAGS.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0047-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0048-ARM-PR-target-71056-Don-t-use-vectorized-builtins-wh.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/CVE-2016-4490.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/ubsan-fix-check-empty-string.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0002-uclibc-conf.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0003-gcc-uclibc-locale-ctype_touplow_t.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0004-uclibc-locale.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0005-uclibc-locale-no__x.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0006-uclibc-locale-wchar_fix.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0007-uclibc-locale-update.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0008-missing-execinfo_h.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0009-c99-snprintf.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0010-gcc-poison-system-directories.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0011-gcc-poison-dir-extend.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0012-gcc-4.3.3-SYSROOT_CFLAGS_FOR_TARGET.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0013-64-bit-multilib-hack.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0014-optional-libstdc.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0015-gcc-disable-MASK_RELAX_PIC_CALLS-bit.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0016-COLLECT_GCC_OPTIONS.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0017-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0018-fortran-cross-compile-hack.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0019-cpp-honor-sysroot.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0020-MIPS64-Default-to-N64-ABI.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0021-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0022-gcc-Fix-argument-list-too-long-error.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0023-Disable-sdt.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0024-libtool.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0025-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0026-Use-the-multilib-config-files-from-B-instead-of-usin.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0027-Avoid-using-libdir-from-.la-which-usually-points-to-.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0028-export-CPP.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0029-Enable-SPE-AltiVec-generation-on-powepc-linux-target.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0030-Disable-the-MULTILIB_OSDIRNAMES-and-other-multilib-o.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0031-Ensure-target-gcc-headers-can-be-included.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0032-gcc-4.8-won-t-build-with-disable-dependency-tracking.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0033-Don-t-search-host-directory-during-relink-if-inst_pr.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0034-Use-SYSTEMLIBS_DIR-replacement-instead-of-hardcoding.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0035-aarch64-Add-support-for-musl-ldso.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0036-libcc1-fix-libcc1-s-install-path-and-rpath.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0037-handle-sysroot-support-for-nativesdk-gcc.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0038-Search-target-sysroot-gcc-version-specific-dirs-with.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0039-Fix-various-_FOR_BUILD-and-related-variables.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0040-nios2-Define-MUSL_DYNAMIC_LINKER.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0041-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0042-gcc-libcpp-support-ffile-prefix-map-old-new.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0043-Reuse-fdebug-prefix-map-to-replace-ffile-prefix-map.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0044-gcc-final.c-fdebug-prefix-map-support-to-remap-sourc.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0045-libgcc-Add-knob-to-use-ldbl-128-on-ppc.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0046-Link-libgcc-using-LDFLAGS-not-just-SHLIB_LDFLAGS.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0047-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0048-sync-gcc-stddef.h-with-musl.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0054_all_nopie-all-flags.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0055-unwind_h-glibc26.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/CVE-2016-6131.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/ubsan-fix-check-empty-string.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-canadian_6.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-canadian_6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial_6.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial_6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross_6.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross_6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk-initial_6.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk-initial_6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk_6.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk_6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime_6.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime_6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-sanitizers_6.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-sanitizers_6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-source_6.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-source_6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc_6.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc_6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-initial_6.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-initial_6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc_6.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc_6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/libgfortran_6.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gcc/libgfortran_6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-7.11.1.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-7.12.1.inc delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross-canadian_7.11.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross-canadian_7.12.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross_7.11.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross_7.12.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0011-avx_mpx.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/package_devel_gdb_patches_120-sigprocmask-invalid-call.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb_7.11.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb_7.12.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/git/git_2.11.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/git/git_2.9.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/0001-cmd-ld-set-alignment-for-the-.rel.plt-section-on-32-.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/016-armhf-elf-header.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/go-cross-backport-cmd-link-support-new-386-amd64-rel.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/syslog.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/armhf-elf-header.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/fix-cc-handling.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/fix-target-cc-for-build.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/gotooldir.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/split-host-and-target-build.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/syslog.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/armhf-elf-header.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/fix-cc-handling.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/fix-target-cc-for-build.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/gotooldir.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/split-host-and-target-build.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/syslog.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/armhf-elf-header.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/fix-cc-handling.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/fix-target-cc-for-build.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/gotooldir.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/split-host-and-target-build.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/syslog.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-bootstrap-native_1.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-common.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-cross.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-cross_1.7.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-cross_1.8.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-native.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go-native_1.8.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go_1.6.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go_1.7.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/go/go_1.8.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/guile/files/0002-Recognize-nios2-as-compilation-target.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/guile/guile_2.0.12.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/guile/guile_2.0.14.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/guilt/files/guilt-bash.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/guilt/guilt-native_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/json-c/json-c/0001-Add-FALLTHRU-comment-to-handle-GCC7-warnings.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps/0001-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps/0001-Make-__comps_objmrtree_all-static-inline.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps/0002-Set-library-installation-path-correctly.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0001-FindGtkDoc.cmake-drop-the-requirement-for-GTKDOC_SCA.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0001-Get-parameters-for-both-libsolv-and-libsolvext-libdn.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0002-Prefix-sysroot-path-to-introspection-tools-path.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0003-Set-the-library-installation-directory-correctly.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0004-Set-libsolv-variables-with-pkg-config-cmake-s-own-mo.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0001-Correctly-set-the-library-installation-directory.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0002-Do-not-try-to-obtain-PYTHON_INSTALL_DIR-by-running-p.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0003-tests-fix-a-race-when-deleting-temporary-directories.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0004-Set-gpgme-variables-with-pkg-config-not-with-cmake-m.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo_git.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools-native_0.9.69.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_dvips_doc.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_sgml2rtf.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_tex_doc.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_txt_doc.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/m4/m4-1.4.17.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/m4/m4-1.4.18.inc delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/m4/m4-1.4.9.inc delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/m4/m4-native_1.4.17.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/m4/m4-native_1.4.18.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/m4/m4/fix_for_circular_dependency.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/m4/m4_1.4.17.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/m4/m4_1.4.18.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/m4/m4_1.4.9.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/make/make-3.81/make_fix_for_automake-1.12.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/make/make-3.81/makeinfo.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/make/make_3.81.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/mklibs/mklibs-native_0.1.41.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/mklibs/mklibs-native_0.1.43.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools/fix-broken-lz.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools/mtools.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools/no-x11.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools_3.9.9.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/fix-regex.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/makefile.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/msggen.pl.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/no-libtool.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/reautoconf.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/user-declared-default-constructor.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-native_1.3.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/opensp/opensp-1.5.2/obsolete_automake_macros.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/opensp/opensp_1.5.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg/status-conffile.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg_0.3.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg_0.3.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/orc/orc_0.4.25.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/orc/orc_0.4.26.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/patch/patch/debian.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/patch/patch/global-reject-file.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/patch/patch/install.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/patch/patch/unified-reject-files.diff delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/patch/patch_2.5.9.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/patchelf/patchelf/avoidholes.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/pax-utils/pax-utils_1.1.6.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/pax-utils/pax-utils_1.2.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/perl/liburi-perl_1.60.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/perl/liburi-perl_1.71.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/perl/perl-native_5.22.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/perl/perl-native_5.24.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/perl/perl-rdepends_5.22.1.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/perl/perl-rdepends_5.24.1.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/perl/perl/0001-Configure-Remove-fstack-protector-strong-for-native-.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2015-8607.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2016-1238.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2016-2381.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2016-6185.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-remove-nm-from-libswanted.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/perl/perl_5.22.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/perl/perl_5.24.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0001-Don-t-send-SIGUSR1-to-init.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0001-Quiet-diagnostics-during-startup-for-pseudo-d.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0002-Use-correct-file-descriptor.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0003-Fix-renameat-parallel-to-previous-fix-to-rename.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/Fix-xattr-performance.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/More-correctly-fix-xattrs.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/b6b68db896f9963558334aff7fca61adde4ec10f.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/efe0be279901006f939cd357ccee47b651c786da.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/pseudo/pseudo_1.8.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/pseudo/pseudo_1.8.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips/_numpyconfig.h delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips/config.h delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64/_numpyconfig.h delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64/config.h delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64n32/_numpyconfig.h delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64n32/config.h create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32eb/_numpyconfig.h create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32eb/config.h create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32el/_numpyconfig.h create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32el/config.h create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64eb/_numpyconfig.h create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64eb/config.h create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64el/_numpyconfig.h create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64el/config.h create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32eb/_numpyconfig.h create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32eb/config.h create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32el/config.h create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32el/numpyconfig.h delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsel/config.h delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsel/numpyconfig.h delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python-numpy_1.11.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python-numpy_1.11.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python3-numpy_1.11.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python3-numpy_1.11.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-git_2.0.7.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-git_2.1.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-mako_1.0.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-mako_1.0.6.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-native-2.7-manifest.inc create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-native-3.5-manifest.inc delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-native/avoid_parallel_make_races_on_pgen.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-native_2.7.12.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-native_2.7.13.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-pexpect_4.2.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-pexpect_4.2.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-pycurl.inc delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-scons-native_2.5.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-scons-native_2.5.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-scons_2.5.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-scons_2.5.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-setuptools_22.0.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-setuptools_32.1.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/channels-rpm_sys-use-md5sum-instead-of-mtime-as-the-.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-add-deugging-when-targetpath-is-empty.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-add-for-rpm-ignoresize-check.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-already-installed-message.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-attempt-fix.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-attempt.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-cache.py-getPackages-matches-name-version.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-channel-remove-all.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-channelsdir.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-locale.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-recommends.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-rpm-transaction-failure-check.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-set-noprogress-for-pycurl.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smartpm-rpm5-support-check-signatures.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm_git.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python/CVE-2016-5636.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python/Don-t-use-getentropy-on-Linux.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python/avoid_parallel_make_races_on_pgen.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python/python-fix-CVE-2016-1000110.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-docutils_0.12.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-docutils_0.13.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-git_2.0.7.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-git_2.1.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-iniparse/0001-Add-python-3-compatibility.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-iniparse_0.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-mako_1.0.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-mako_1.0.6.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-pip_8.1.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-pip_9.0.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-pycurl_7.21.5.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-pygobject_3.20.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-pygobject_3.22.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-pygpgme_0.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-setuptools_22.0.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3-setuptools_32.1.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3/020-dont-compile-python-files.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3/CVE-2016-5636.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3/upstream-random-fixes.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python3/use_packed_importlib.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python_2.7.12.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/python/python_2.7.13.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0001-Provide-support-for-the-CUSE-TPM.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0001-target-mips-add-24KEc-CPU-definition.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0001-virtio-zero-vq-inuse-in-virtio_reset.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0002-Introduce-condition-to-notify-waiters-of-completed-c.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0002-fix-CVE-2016-7423.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0003-Introduce-condition-in-TPM-backend-for-notification.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0003-fix-CVE-2016-7908.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0004-Add-support-for-VM-suspend-resume-for-TPM-TIS.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0004-fix-CVE-2016-7909.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-9908.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-9912.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/Qemu-Arm-versatilepb-Add-memory-size-checking.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/glibc-2.25.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/target-ppc-fix-user-mode.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu_2.7.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu_2.8.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt-native_0.64.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt-native_0.65.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt/0001-tests-Allow-different-output-from-mv.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt/install.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt_0.64.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt_0.65.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Add-PYTHON_ABI-when-searching-for-python-libraries.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Add-a-color-setting-for-mips64_n32-binaries.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-add-an-unsatisfiable-dependency-when-building.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-hardcode-lib-rpm-as-the-installation-path-for.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-read-config-files-from-HOME.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-reset-the-PATH-environment-variable-before-ru.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Fix-build-with-musl-C-library.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-When-cross-installing-execute-package-scriptlets-wit.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-When-nice-value-cannot-be-reset-issue-a-notice-inste.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-perl-disable-auto-reqs.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0002-Add-support-for-prefixing-etc-from-RPM_ETCCONFIGDIR-.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0011-Do-not-require-that-ELF-binaries-are-executable-to-b.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0012-Use-conditional-to-access-_docdir-in-macros.in.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0013-Add-a-new-option-alldeps-to-rpmdeps.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-Disable-__sync_add_and_fetch_8-on-nios2.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-define-EM_AARCH64.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-macros-add-_gpg_sign_cmd_extra_args.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-rpm-Fix-build-on-musl.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-system.h-query.c-support-nosignature.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-using-poptParseArgvString-to-parse-the-_gpg_check_pa.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/configure.ac-check-for-both-gpg2-and-gpg.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/dbconvert.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/debugedit-segv.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/debugedit-valid-file-to-fix-segment-fault.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/gcc6-stdlib.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/header-include-fix.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/makefile-am-exec-hook.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/no-ldflags-in-pkgconfig.patch delete mode 100755 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/perfile_rpmdeps.sh delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/popt-disable-auto-stack-protector.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/python-rpm-rpmsense.patch delete mode 100755 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/pythondeps.sh delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-atomic-ops.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-autogen-force.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-autogen.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-canonarch.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-check-rootpath-reasonableness.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db-reduce.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db5-or-db6.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db60.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db_buffer_small.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-debug-platform.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-disable-auto-stack-protector.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-disable-blaketest.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-ensure-rpm2cpio-call-rpm-relocation-code.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fileclass.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fix-logio-cp.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fix-lua-tests-compilation-failure.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fix-parseEmbedded.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-gnu-atomic.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-hardlink-segfault-fix.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-keccak-sse-intrin.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-lib-transaction.c-fix-file-conflicts-for-mips64-N32.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-libsql-fix.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-log-auto-rm.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-lsb-compatibility.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-lua.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-macros.in-disable-external-key-server.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-macros.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-mongodb-sasl.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-no-loopmsg.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-no-perl-urpm.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-opendb-before-verifyscript-to-avoid-null-point.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-ossp-uuid.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-packageorigin.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-payload-use-hashed-inode.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-pkgconfigdeps.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-platform-file-fix.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-platform.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-platform2.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-py-init.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-python-AddErase.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-python-restore-origin.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-python-tagname.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-realpath.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-reloc-macros.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-resolvedep.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmdb-grammar.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmfc.c-fix-for-N32-MIPS64.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmio-headers.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmpgp-popt.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-scriptletexechelper.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-showrc.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-syck-fix-gram.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-tag-generate-endian-conversion-fix.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-tagname-type.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-tools-mtree-LDFLAGS.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-uuid-include.patch delete mode 100755 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm2cpio delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpmatch.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpmdb-more-verbose-error-logging-in-rpmTempFile.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpmdb-prevent-race-in-tmpdir-creation.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpmdeps-oecore.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpmqv.c-check-_gpg_passphrase-before-ask-for-input.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpmqv_cc_b_gone.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/uclibc-support.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm_5.4.16.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm_git.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpmresolve/rpmresolve.c delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rpm/rpmresolve_1.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rsync/rsync-2.6.9/rsync-2.6.9-fname-obo.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rsync/rsync-2.6.9/rsyncd.conf delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/rsync/rsync_2.6.9.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/ruby/ruby/CVE-2017-14064.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/ruby/ruby/ruby-CVE-2017-9224.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/ruby/ruby/ruby-CVE-2017-9226.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/ruby/ruby/ruby-CVE-2017-9227.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/ruby/ruby/ruby-CVE-2017-9228.patch create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/ruby/ruby/ruby-CVE-2017-9229.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/ruby/ruby_2.2.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/ruby/ruby_2.4.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/sgml-common/sgml-common-0.6.3/autohell.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/sgml-common/sgml-common-0.6.3/license.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/sgml-common/sgml-common-native_0.6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/sgml-common/sgml-common_0.6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/sgmlspl/sgmlspl-native_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/strace/strace/0001-caps-abbrev.awk-fix-gawk-s-path.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/strace/strace/use-asm-sgidefs.h.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/strace/strace_4.13.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/strace/strace_4.16.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/subversion/subversion/0001-fix-svnadmin-create-fail-on-x86.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/subversion/subversion_1.9.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/subversion/subversion_1.9.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/swig/swig_3.0.12.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/swig/swig_3.0.8.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/syslinux/syslinux/syslinux-6.03-sysmacros.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/systemd-bootchart/systemd-bootchart_230.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/systemd-bootchart/systemd-bootchart_231.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-devtools/vala/vala_0.32.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-devtools/vala/vala_0.34.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/acpica/acpica_20150515.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/acpica/acpitests/aapits-linux.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/acpica/acpitests/aapits-makefile.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/acpica/acpitests_20140828.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/acpica/files/manipulate-fds-instead-of-FILE.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/acpica/files/no-werror.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/acpica/files/rename-yy_scan_string-manually.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/bash/bash-3.2.57/build-tests.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/bash/bash-3.2.57/dont-include-target-CFLAGS-in-host-LDFLAGS.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/bash/bash-3.2.57/mkbuiltins_have_stringize.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/bash/bash-3.2.57/run-ptest delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/bash/bash-3.2.57/string-format.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/bash/bash-3.2.57/test-output.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/bash/bash/bash-memleak-bug-fix-for-builtin-command-read.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/bash/bash_3.2.57.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/byacc/byacc/0001-byacc-do-not-reorder-CC-and-CFLAGS.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/byacc/byacc_20160606.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/byacc/byacc_20161202.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/cpio/cpio-2.8/avoid_heap_overflow.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/cpio/cpio-2.8/fix-memory-overrun.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/cpio/cpio-2.8/m4extensions.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/cpio/cpio-2.8/statdef.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/cpio/cpio_2.8.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/cups/cups_2.1.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/cups/cups_2.2.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/diffutils/diffutils-2.8.1/diffutils_fix_for_automake-1.12.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/diffutils/diffutils-2.8.1/fix_gcc6.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/diffutils/diffutils-3.4/0001-Unset-need_charset_alias-when-building-for-musl.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/diffutils/diffutils-3.4/run-ptest create mode 100644 import-layers/yocto-poky/meta/recipes-extended/diffutils/diffutils-3.5/0001-Unset-need_charset_alias-when-building-for-musl.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/diffutils/diffutils-3.5/run-ptest delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/diffutils/diffutils_2.8.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/diffutils/diffutils_3.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/diffutils/diffutils_3.5.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ed/ed_0.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ed/ed_1.14.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ed/ed_1.9.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ethtool/ethtool_4.6.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ethtool/ethtool_4.8.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/findutils/findutils-4.2.31/findutils-fix-doc-build-error.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/findutils/findutils-4.2.31/findutils_fix_for_automake-1.12.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/findutils/findutils-4.2.31/gnulib-extension.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/findutils/findutils_4.2.31.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/gawk/gawk-3.1.5/0001-gawk-fix-non-glibc-gcc-4-compilation.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/gawk/gawk-3.1.5/gawk-3.1.5_fix_for_automake-1.12.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/gawk/gawk-4.1.3/run-ptest create mode 100644 import-layers/yocto-poky/meta/recipes-extended/gawk/gawk-4.1.4/run-ptest create mode 100644 import-layers/yocto-poky/meta/recipes-extended/gawk/gawk-4.1.4/test-arrayind1-Remove-hashbang-line.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/gawk/gawk_3.1.5.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/gawk/gawk_4.1.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/gawk/gawk_4.1.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/CVE-2016-7977.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/CVE-2016-7978.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/CVE-2016-7979.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/CVE-2016-8602.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/CVE-2017-11714.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/CVE-2017-7975.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/CVE-2017-9216.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/CVE-2017-9611.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/CVE-2017-9612.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/CVE-2017-9726.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/CVE-2017-9727.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/CVE-2017-9739.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/CVE-2017-9835.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/mips/objarch.h delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/mips64/objarch.h delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/mips64el/objarch.h delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/mips64eln32/objarch.h delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/mips64n32/objarch.h create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/mipsarchn32eb/objarch.h create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/mipsarchn32el/objarch.h create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/mipsarchn64eb/objarch.h create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/mipsarchn64el/objarch.h create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/mipsarcho32eb/objarch.h create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/mipsarcho32el/objarch.h delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript/mipsel/objarch.h delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript_9.19.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ghostscript/ghostscript_9.20.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/go-examples/files/helloworld.go create mode 100644 import-layers/yocto-poky/meta/recipes-extended/go-examples/go-examples.inc create mode 100644 import-layers/yocto-poky/meta/recipes-extended/go-examples/go-helloworld_0.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/gperf/gperf_3.0.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/grep/grep-2.5.1a/Makevars delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/grep/grep-2.5.1a/fix-for-texinfo-5.1.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/grep/grep-2.5.1a/fix64-int-to-pointer.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/grep/grep-2.5.1a/gettext.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/grep/grep-2.5.1a/grep-CVE-2012-5667.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/grep/grep-2.5.1a/grep-egrep-fgrep-Fix-LSB-NG-cases.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/grep/grep-2.5.1a/grep_fix_for_automake-1.12.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/grep/grep-2.5.1a/uclibc-fix.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/grep/grep_2.25.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/grep/grep_2.5.1a.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/grep/grep_3.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/groff/groff-1.18.1.4/fix-narrowing-conversion-error.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/groff/groff-1.18.1.4/groff-1.18.1.4-fix-bindir.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/groff/groff-1.18.1.4/groff-1.18.1.4-remove-mom.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/groff/groff-1.18.1.4/man-local.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/groff/groff-1.18.1.4/mdoc-local.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/groff/groff-1.22.3/0001-replace-perl-w-with-use-warnings.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/groff/groff_1.18.1.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/gzip/gzip-1.3.12/dup-def-fix.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/gzip/gzip-1.3.12/m4-extensions-fix.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/gzip/gzip_1.3.12.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/hdparm/hdparm_9.48.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/hdparm/hdparm_9.51.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/iptables/iptables_1.6.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/iptables/iptables_1.6.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/less/less_481.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/less/less_487.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/libarchive/files/0001-archive_write_disk_posix.c-make-_fsobj-functions-mor.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/libarchive/files/0002-Fix-extracting-hardlinks-over-symlinks.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/libarchive/files/non-recursive-extract-and-list.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/libidn/libidn_0.6.14.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/libsolv/libsolv/0002-Fixes-to-internal-fopencookie-implementation.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/libsolv/libsolv_0.6.23.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/libsolv/libsolv_0.6.26.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/lighttpd/lighttpd_1.4.41.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/lighttpd/lighttpd_1.4.45.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/lsb/lsb/0001-fix-lsb_release-to-work-with-busybox-head-and-find.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0001-add-_GNU_SOURCE-to-pec_listener.c.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0001-ltp-Don-t-link-against-libfl.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0001-testcases-network-nfsv4-acl-acl1.c-Security-fix-on-s.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0006-fix-PATH_MAX-undeclared-when-building-with-musl.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0006-sendfile-Use-off64_t-instead-of-__off64_t.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0007-fix-__WORDSIZE-undeclared-when-building-with-musl.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0007-replace-SIGCLD-with-SIGCHLD.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0009-Guard-error.h-with-__GLIBC__.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0009-fix-redefinition-of-struct-msgbuf-error-building-wit.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0012-fix-faccessat01.c-build-fails-with-security-flags.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0012-fsstress.c-Replace-__int64_t-with-int64_t.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0013-include-fcntl.h-for-getting-O_-definitions.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0014-hyperthreading-Include-sys-types.h-for-pid_t-definit.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0015-mincore01-Rename-PAGESIZE-to-pagesize.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0016-ustat-Change-header-from-ustat.h-to-sys-ustat.h.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0017-replace-sigval_t-with-union-sigval.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0019-tomoyo-Replace-canonicalize_file_name-with-realpath.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0022-include-sys-types.h.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0029-trace_shed-Fix-build-with-musl.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0031-vma03-fix-page-size-offset-as-per-page-size-alignmen.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0032-regen.sh-Include-asm-unistd.h-explicitly.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0036-testcases-network-nfsv4-acl-acl1.c-Security-fix-on-s.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp/0039-fcntl-fix-the-time-def-to-use-time_t.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp_20160126.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/ltp/ltp_20170116.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/lzip/lzip_1.18.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/man-pages/man-pages_4.07.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/man-pages/man-pages_4.09.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/mc/files/0001-mc-replace-perl-w-with-use-warnings.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/mc/mc/mc-CTRL.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/mc/mc_4.7.5.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/mc/mc_4.8.17.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/mc/mc_4.8.18.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/mdadm/files/0001-Fix-some-type-comparison-problems.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/mdadm/files/0001-Fix-typo-in-comparision.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/mdadm/files/0001-include-sys-sysmacros.h-for-major-minor-defintions.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/mdadm/files/0001-mdadm.h-bswap-is-already-defined-in-uclibc.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/mdadm/files/0001-raid6check-Fix-if-else-indentation.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/mdadm/files/0001-util.c-include-poll.h-instead-of-sys-poll.h.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/mdadm/files/mdadm-3.2.2_fix_for_x32.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/mdadm/mdadm_3.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/mdadm/mdadm_4.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/msmtp/msmtp_1.6.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/msmtp/msmtp_1.6.6.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/parted/files/0002-libparted_fs_resize-link-against-libuuid-explicitly-.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/parted/parted/parted-3.2-sysmacros.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/pax/pax/0001-include-sys-sysmacros.h-for-major-minor-definitions.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/pigz/pigz/link-order.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/pigz/pigz_2.3.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/pigz/pigz_2.3.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/rpcbind/rpcbind/0001-Avoid-use-of-glibc-sys-cdefs.h-header.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/rpcbind/rpcbind/0001-handle_reply-Don-t-use-the-xp_auth-pointer-directly.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/rpcbind/rpcbind/0001-rpcbind-pair-all-svc_getargs-calls-with-svc_freeargs.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/rpcbind/rpcbind/cve-2015-7236.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/rpcbind/rpcbind_0.2.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/rpcbind/rpcbind_0.2.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/screen/screen_4.4.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/screen/screen_4.5.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/sed/sed-4.1.2/Makevars delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/sed/sed-4.1.2/fix_return_type.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/sed/sed-4.1.2/sed-4.1.2_fix_for_automake-1.12.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/sed/sed_4.1.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/shadow/files/0001-useradd-copy-extended-attributes-of-home.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/slang/slang/0001-Fix-error-conflicting-types-for-posix_close.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/slang/slang/dont-link-to-host.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/slang/slang/fix-check-pcre.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/slang/slang/rpathfix.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/slang/slang/slang-fix-the-iconv-existence-checking.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/slang/slang_2.3.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/slang/slang_2.3.1a.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/sudo/sudo_1.8.17p1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/sudo/sudo_1.8.19p2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/sysklogd/files/klogd.service create mode 100644 import-layers/yocto-poky/meta/recipes-extended/sysklogd/files/syslogd.service create mode 100644 import-layers/yocto-poky/meta/recipes-extended/sysklogd/files/tmpfiles.sysklogd.conf delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/sysstat/sysstat_11.4.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/sysstat/sysstat_11.5.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/tar/tar-1.17/avoid_heap_overflow.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/tar/tar-1.17/gcc43build.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/tar/tar-1.17/m4extensions.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/tar/tar_1.17.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/texinfo/texinfo-4.8/check-locale-h.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/texinfo/texinfo-4.8/do-compile-native-tools.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/texinfo/texinfo-4.8/using-native-makeinfo.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/texinfo/texinfo_4.8.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/texinfo/texinfo_6.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/texinfo/texinfo_6.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/tzcode/tzcode-native_2017a.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/tzcode/tzcode-native_2017b.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/tzdata/tzdata_2017a.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/tzdata/tzdata_2017b.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/unzip/unzip/18-cve-2014-9913-unzip-buffer-overflow.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/unzip/unzip/19-cve-2016-9844-zipinfo-buffer-overflow.patch create mode 100644 import-layers/yocto-poky/meta/recipes-extended/wget/wget/CVE-2017-6508.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/wget/wget_1.18.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/wget/wget_1.19.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/which/which-2.18/automake-foreign.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/which/which-2.18/fix_name_conflict_group_member.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/which/which_2.18.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-extended/xz/xz_5.2.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-extended/xz/xz_5.2.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-gnome/epiphany/epiphany_3.20.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/epiphany/epiphany_3.22.6.bb create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gcr/files/gcr-add-missing-dependencies-for-vapi.patch create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf/0001-Work-around-thumbnailer-cross-compile-failure.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.34.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gdk-pixbuf/gdk-pixbuf_2.36.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gnome-desktop/gnome-desktop/gnome-desktop-thumbnail-don-t-convert-time_t-to-long.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gnome-desktop/gnome-desktop3_3.20.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gnome-desktop/gnome-desktop3_3.22.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gnome/adwaita-icon-theme_3.20.bb create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gnome/adwaita-icon-theme_3.22.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gnome/gnome-themes-standard_3.20.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gnome/gnome-themes-standard_3.22.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.48.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gobject-introspection/gobject-introspection_1.50.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gsettings-desktop-schemas/gsettings-desktop-schemas_3.20.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gsettings-desktop-schemas/gsettings-desktop-schemas_3.22.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gtk+/gtk+3/0001-Redo-focus-handling-in-treeview-once-more.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gtk+/gtk+3_3.20.9.bb create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gtk+/gtk+3_3.22.8.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gtk+/gtk+_2.24.30.bb create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gtk+/gtk+_2.24.31.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gtk+/gtk-icon-utils-native_3.20.9.bb create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/gtk+/gtk-icon-utils-native_3.22.8.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-gnome/libgudev/libgudev_230.bb create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/libgudev/libgudev_231.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-gnome/libnotify/libnotify_0.7.6.bb create mode 100644 import-layers/yocto-poky/meta/recipes-gnome/libnotify/libnotify_0.7.7.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-gnome/libwnck/libwnck3_3.20.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/cairo/cairo_1.14.6.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/cairo/cairo_1.14.8.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/clutter/clutter-gst-3.0_3.0.18.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/clutter/clutter-gst-3.0_3.0.22.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/clutter/clutter-gtk-1.0_1.8.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/clutter/clutter-gtk-1.0_1.8.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/cogl/cogl-1.0/0001-Fix-an-incorrect-preprocessor-conditional.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/cogl/cogl-1.0_1.22.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/cogl/cogl-1.0_1.22.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/drm/libdrm_2.4.70.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/drm/libdrm_2.4.75.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/fontconfig/fontconfig/0001-Avoid-conflicts-with-integer-width-macros-from-TS-18.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/freetype/freetype_2.6.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/freetype/freetype_2.7.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/harfbuzz/harfbuzz_1.3.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/harfbuzz/harfbuzz_1.4.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/jpeg/libjpeg-turbo/fix-mips.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/jpeg/libjpeg-turbo_1.5.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/jpeg/libjpeg-turbo_1.5.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/libepoxy/libepoxy/0001-select-platforms-based-on-configuration-results.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/libepoxy/libepoxy/0002-add-an-option-to-disable-glx-support.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/libepoxy/libepoxy/no-need-for-python3.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/libepoxy/libepoxy_1.4.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/libepoxy/libepoxy_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/libsdl2/libsdl2/0001-prepend-our-sysroot-path-so-that-make-finds-our-wayl.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/libsdl2/libsdl2/0001-src-video-make-it-compatible-with-wayland-1.10.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/libsdl2/libsdl2/0002-Avoid-finding-build-host-s-wayland-scanner.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/libsdl2/libsdl2/fix-build-failure-on-ppc.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/libsdl2/libsdl2_2.0.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/libsdl2/libsdl2_2.0.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/libva/files/0001-configure.ac-Use-wayland-scanner-in-PATH.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/libva/files/0001-wayland-Don-t-commit-and-ship-generated-files.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/libva/libva_1.7.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/matchbox-wm/matchbox-wm_1.2.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/matchbox-wm/matchbox-wm_1.2.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/menu-cache/menu-cache_1.0.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/menu-cache/menu-cache_1.0.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/mesa/files/0001-Use-wayland-scanner-in-the-path.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/mesa/files/clang-compile-PR89599.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/mesa/mesa-gl_12.0.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/mesa/mesa-gl_17.0.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/mesa/mesa_12.0.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/mesa/mesa_17.0.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/pango/pango_1.40.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/pango/pango_1.40.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/piglit/piglit/0001-CMake-define-GBM_BO_MAP-only-when-symbol-is-found.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/piglit/piglit/0001-cmake-Link-test-utils-with-ldl.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/piglit/piglit/0001-cmake-Link-utils-with-xcb-explicitly.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/piglit/piglit/0002-util-egl-Honour-Surfaceless-MESA-in-get_default_disp.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/piglit/piglit/0003-egl_mesa_platform_surfaceless-Don-t-use-eglGetPlatfo.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/piglit/piglit/0004-egl_mesa_platform_surfaceless-Use-EXT-functions-for-.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/vulkan/vulkan/0001-Use-getenv-if-secure_getenv-does-not-exist.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/vulkan/vulkan/demos-Don-t-build-tri-or-cube.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/vulkan/vulkan_1.0.39.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/wayland/libinput_1.4.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/wayland/libinput_1.6.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/wayland/wayland/0001-scanner-Use-unit32_t-instead-of-uint.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/wayland/wayland_1.11.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/wayland/wayland_1.13.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/wayland/weston/0001-shared-include-stdint.h-for-int32_t.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/wayland/weston/make-weston-launch-exit-for-unrecognized-option.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/wayland/weston/weston-1.11-config-option-for-no-input-device.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/wayland/weston/weston-gl-renderer-Set-pitch-correctly-for-subsampled-textures.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/wayland/weston_1.11.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/wayland/weston_2.0.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/x11-common/etc/X11/Xdefaults delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/x11-common/etc/X11/Xsession delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/x11-common/etc/X11/Xsession.d/12keymap.sh delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/x11-common/etc/X11/Xsession.d/13xdgbasedirs.sh delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/x11-common/etc/X11/Xsession.d/89xdgautostart.sh delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/x11-common/etc/X11/Xsession.d/90XWindowManager.sh delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/x11-common/etc/X11/default.xmodmap delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/x11-common/gplv2-license.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/x11-common_0.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/xserver-nodm-init.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/xserver-nodm-init/X11/Xsession create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/xserver-nodm-init/X11/Xsession.d/13xdgbasedirs.sh create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/xserver-nodm-init/X11/Xsession.d/89xdgautostart.sh create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/xserver-nodm-init/X11/Xsession.d/90XWindowManager.sh create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/x11-common/xserver-nodm-init_3.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xinput-calibrator/xinput-calibrator/add-geometry-input-when-calibrating.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-app/xauth_1.0.10.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-app/xauth_1.0.9.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-input-evdev_2.10.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-input-evdev_2.10.5.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-input-keyboard_1.8.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-input-keyboard_1.9.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-input-keyboard_git.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-input-libinput_0.19.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-input-libinput_0.24.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-input-mouse/unbreak.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-input-mouse_1.9.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-input-mouse_git.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.8.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-input-synaptics_1.9.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-input-synaptics_git.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-video-omap_0.4.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-video-omap_0.4.5.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-video-vmware_13.1.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-driver/xf86-video-vmware_13.2.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libx11-diet_1.6.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libx11-diet_1.6.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libx11/CVE-2016-7942.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libx11/CVE-2016-7943.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libx11/libX11-Add-missing-NULL-check.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libx11_1.6.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libx11_1.6.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxfixes_5.0.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxfixes_5.0.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxfont2_2.0.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxfont_1.5.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxfont_1.5.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxi_1.7.6.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxi_1.7.9.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxkbcommon_0.6.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxkbcommon_0.7.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxpm_3.5.11.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxpm_3.5.12.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxrandr/CVE-2016-7947_CVE-2016-7948.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxrandr_1.5.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxrandr_1.5.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxrender/CVE-2016-7949.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxrender_0.9.10.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxrender_0.9.9.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxv_1.0.10.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxv_1.0.11.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxvmc_1.0.10.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/libxvmc_1.0.9.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/xkeyboard-config_2.18.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-lib/xkeyboard-config_2.20.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-proto/xproto_7.0.29.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-proto/xproto_7.0.31.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-util/util-macros_1.19.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-util/util-macros_1.19.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-xserver/xserver-xf86-config/10-preload-modules.conf create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-xserver/xserver-xorg/0001-configure.ac-Fix-check-for-CLOCK_MONOTONIC.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-xserver/xserver-xorg/0002-configure.ac-Fix-wayland-scanner-and-protocols-locat.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-xserver/xserver-xorg/0003-Remove-check-for-useSIGIO-option.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-xserver/xserver-xorg/0003-modesetting-Fix-16-bit-depth-bpp-mode.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-xserver/xserver-xorg/CVE-2017-10971-1.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-xserver/xserver-xorg/CVE-2017-10971-2.patch create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-xserver/xserver-xorg/CVE-2017-10971-3.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-xserver/xserver-xorg_1.18.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-graphics/xorg-xserver/xserver-xorg_1.19.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/cryptodev/files/0001-Adjust-to-another-change-in-the-user-page-API.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/cryptodev/files/kernel-4-10-changes.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/dtc/dtc_1.4.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/dtc/dtc_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kexec/kexec-tools/0001-kexec-exntend-the-semantics-of-kexec_iomem_for_each_.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kexec/kexec-tools/0001-x86-x86_64-Fix-format-warning-with-die.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kexec/kexec-tools/0002-kexec-generalize-and-rename-get_kernel_stext_sym.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kexec/kexec-tools/0002-ppc-Fix-format-warning-with-die.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kexec/kexec-tools/0003-arm64-identify-PHYS_OFFSET-correctly.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kexec/kexec-tools/0004-arm64-kdump-identify-memory-regions.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kexec/kexec-tools/0005-arm64-kdump-add-elf-core-header-segment.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kexec/kexec-tools/0006-arm64-kdump-set-up-kernel-image-segment.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kexec/kexec-tools/0007-arm64-kdump-set-up-other-segments.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kexec/kexec-tools/0008-arm64-kdump-add-DT-properties-to-crash-dump-kernel-s.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kexec/kexec-tools/0009-arm64-kdump-Add-support-for-binary-image-files.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kexec/kexec-tools/0010-kexec-ARM-Fix-add_buffer_phys_virt-align-issue.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kexec/kexec-tools_2.0.12.bb create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kexec/kexec-tools_2.0.14.bb create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/kmod/kmod/kcmdline_quotes.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/linux-libc-headers/linux-libc-headers/0001-libc-compat.h-musl-_does_-define-IFF_LOWER_UP-DORMAN.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_4.10.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_4.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/linux-libc-headers/linux-libc-headers_4.8.bb create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/linux/linux-yocto-rt_4.10.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/linux/linux-yocto-rt_4.8.bb create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/linux/linux-yocto-rt_4.9.bb create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/linux/linux-yocto-tiny_4.10.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/linux/linux-yocto-tiny_4.8.bb create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/linux/linux-yocto-tiny_4.9.bb create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/linux/linux-yocto_4.10.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/linux/linux-yocto_4.8.bb create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/linux/linux-yocto_4.9.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/lttng/babeltrace_1.4.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/lttng/babeltrace_1.5.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/lttng/lttng-modules_2.9.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/lttng/lttng-modules_git.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/lttng/lttng-tools/0001-Fix-error.h-common-error.h.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/lttng/lttng-tools/x32.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/lttng/lttng-tools_2.9.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/lttng/lttng-tools_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/lttng/lttng-ust_2.9.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/lttng/lttng-ust_git.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/perf/perf-features.inc delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/sysprof/files/0001-Forward-port-mips-arm-memory-barrier-patches.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/sysprof/files/0001-callgraph-Use-U64_TO_POINTER.patch create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/sysprof/sysprof_3.22.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-kernel/sysprof/sysprof_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-kernel/systemtap/systemtap-native_git.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-lsb4/libpng/libpng12_1.2.56.bb create mode 100644 import-layers/yocto-poky/meta/recipes-lsb4/libpng/libpng12_1.2.57.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/alsa/alsa-lib/0001-ucm-parser-needs-limits.h.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/alsa/alsa-lib_1.1.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/alsa/alsa-lib_1.1.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/alsa/alsa-tools/0001-Cus428Midi-Explicitly-cast-constant-to-char-type.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/alsa/alsa-tools_1.1.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/alsa/alsa-tools_1.1.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/alsa/alsa-utils-scripts_1.1.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/alsa/alsa-utils-scripts_1.1.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/alsa/alsa-utils_1.1.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/alsa/alsa-utils_1.1.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/ffmpeg/ffmpeg_3.1.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/ffmpeg/ffmpeg_3.2.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/flac/flac_1.3.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/flac/flac_1.3.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gst-player/0001-gtk-play-Disable-visualizations.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gst-player/0001-gtk-play-provide-similar-behaviour-for-quit-and-clos.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer-vaapi/vaapivideobufferpool-create-allocator-if-needed.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav/mips64_cpu_detection.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.10.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_1.8.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-libav_git.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx/0001-omx-fixed-type-error-in-printf-call.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.10.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_1.2.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-omx_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-Prepend-PKG_CONFIG_SYSROOT_DIR-to-pkg-config-output.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-mssdemux-improved-live-playback-support.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-smoothstreaming-implement-adaptivedemux-s-get_live_s.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0001-smoothstreaming-use-the-duration-from-the-list-of-fr.patch delete mode 100755 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0002-glplugin-enable-gldeinterlace-on-OpenGL-ES.patch delete mode 100755 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0003-glcolorconvert-implement-multiple-render-targets-for.patch delete mode 100755 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0004-glcolorconvert-don-t-use-the-predefined-variable-nam.patch delete mode 100755 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0005-glshader-add-glBindFragDataLocation.patch delete mode 100755 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0006-glcolorconvert-GLES3-deprecates-texture2D-and-it-doe.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad/0008-gl-implement-GstGLMemoryEGL.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.10.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_1.8.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-bad_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.10.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_1.8.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-base_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good/0001-v4l2object-Also-add-videometa-if-there-is-padding-to.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.10.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_1.8.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-good_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.10.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_1.8.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-plugins-ugly_git.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.10.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-rtsp-server_1.8.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi.inc create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0-vaapi_1.10.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0/deterministic-unwind.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.10.4.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0_1.8.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/gstreamer/gstreamer1.0_git.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libpng/libpng_1.6.24.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libpng/libpng_1.6.28.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libsamplerate/libsamplerate0/0001-configure.ac-improve-alsa-handling.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libsamplerate/libsamplerate0_0.1.8.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libsamplerate/libsamplerate0_0.1.9.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libsndfile/libsndfile1/CVE-2017-6892.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libsndfile/libsndfile1/CVE-2017-8361-8365.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libsndfile/libsndfile1/CVE-2017-8362.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libsndfile/libsndfile1/CVE-2017-8363.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2016-10093.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2016-10266.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2016-10267.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2016-10268.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2016-10269.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2016-10270.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2016-10271.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2017-10688.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2017-11335.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2017-7592.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2017-7593.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2017-7594-p1.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2017-7594-p2.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2017-7595.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2017-7596.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2017-7598.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2017-7601.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2017-7602.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2017-9147.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/CVE-2017-9936.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libtiff/files/libtiff-CVE-2017-5225.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/libvorbis/libvorbis/0001-configure-Check-for-clang.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/mpg123/mpg123_1.23.6.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/mpg123/mpg123_1.23.8.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0001-alsa-bluetooth-fail-if-user-requested-profile-doesn-.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0002-card-don-t-allow-the-CARD_NEW-hook-to-fail.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0003-card-move-profile-selection-after-pa_card_new.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0004-card-remove-pa_card_new_data.active_profile.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio/0005-alsa-set-availability-for-some-unavailable-profiles.patch create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio_10.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/pulseaudio/pulseaudio_9.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/tremor/tremor/obsolete_automake_macros.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/tremor/tremor/tremor-arm-thumb2.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/tremor/tremor_20150107.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/webp/libwebp_0.5.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/webp/libwebp_0.6.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-multimedia/x264/x264/Fix-X32-build-by-disabling-asm.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-sato/pcmanfm/pcmanfm_1.2.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-sato/pcmanfm/pcmanfm_1.2.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-sato/puzzles/files/0001-Clarify-conditions-to-avoid-compiler-errors.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-sato/puzzles/files/0001-rect-Fix-compiler-errors-about-uninitialized-use-of-.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-sato/rxvt-unicode/rxvt-unicode_9.20.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/files/0001-FindGObjectIntrospection.cmake-prefix-variables-obta.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/files/0001-Fix-racy-parallel-build-of-WebKit2-4.0.gir.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/files/0001-OptionsGTK.cmake-drop-the-hardcoded-introspection-gt.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/files/0001-Tweak-gtkdoc-settings-so-that-gtkdoc-generation-work.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/files/0001-WebKitMacros-Append-to-I-and-not-to-isystem.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/files/0001-When-building-introspection-files-add-CMAKE_C_FLAGS-.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/files/musl-fixes.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/files/ppc-musl-fix.patch create mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/webkitgtk/0001-FindGObjectIntrospection.cmake-prefix-variables-obta.patch create mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/webkitgtk/0001-Fix-build-with-musl.patch create mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/webkitgtk/0001-Fix-racy-parallel-build-of-WebKit2-4.0.gir.patch create mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/webkitgtk/0001-OptionsGTK.cmake-drop-the-hardcoded-introspection-gt.patch create mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/webkitgtk/0001-Tweak-gtkdoc-settings-so-that-gtkdoc-generation-work.patch create mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/webkitgtk/0001-WebKitMacros-Append-to-I-and-not-to-isystem.patch create mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/webkitgtk/0001-When-building-introspection-files-add-CMAKE_C_FLAGS-.patch create mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/webkitgtk/cross-compile.patch create mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/webkitgtk/detect-atomics-during-configure.patch create mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/webkitgtk/x32_support.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/webkitgtk_2.12.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-sato/webkit/webkitgtk_2.18.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/apr/apr/0001-apr-fix-off_t-size-doesn-t-match-in-glibc-when-cross.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/apr/apr/0002-explicitly-link-libapr-against-phtread-to-make-gold-.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/atk/at-spi2-atk_2.20.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/atk/at-spi2-atk_2.22.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/atk/at-spi2-core_2.20.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/atk/at-spi2-core_2.22.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/atk/atk_2.20.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/atk/atk_2.22.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/attr/acl.inc create mode 100644 import-layers/yocto-poky/meta/recipes-support/attr/acl/Makefile-libacl-should-depend-on-include.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/attr/acl/add-missing-configure.ac.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/attr/acl/configure.ac create mode 100644 import-layers/yocto-poky/meta/recipes-support/attr/attr/attr-Missing-configure.ac.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/attr/attr/dont-use-decl-macros.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/attr/files/0001-Added-configure-option-to-enable-disable-static-libr.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/attr/files/attr-Missing-configure.ac.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/attr/files/dont-use-decl-macros.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/bash-completion/bash-completion_2.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/bash-completion/bash-completion_2.5.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/boost/bjam-native_1.61.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/boost/bjam-native_1.63.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/boost/boost-1.61.0.inc create mode 100644 import-layers/yocto-poky/meta/recipes-support/boost/boost-1.63.0.inc create mode 100644 import-layers/yocto-poky/meta/recipes-support/boost/boost/0001-Apply-boost-1.62.0-no-forced-flags.patch.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/boost/boost/0001-When-using-soft-float-on-ARM-we-should-not-expect-th.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/boost/boost/0002-Don-t-set-up-m32-m64-we-do-that-ourselves.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/boost/boost/0002-boost-test-execution_monitor.hpp-fix-mips-soft-float.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/boost/boost/0003-Don-t-set-up-arch-instruction-set-flags-we-do-that-o.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/boost/boost/0003-smart_ptr-mips-assembly-doesn-t-compile-in-mips16e-m.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/boost/boost/consider-hardfp.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/boost/boost/py3.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/boost/boost_1.61.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/boost/boost_1.63.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/ca-certificates/ca-certificates_20160104.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/ca-certificates/ca-certificates_20161130.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/createrepo/createrepo/createrepo-dbpath.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/createrepo/createrepo/createrepo-rpm549.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/createrepo/createrepo/dumpMetadata-disable-signature-validation.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/createrepo/createrepo/fix-native-install.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/createrepo/createrepo/fixstat.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/createrepo/createrepo/python-scripts-should-use-interpreter-from-env.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/createrepo/createrepo/recommends.patch delete mode 100755 import-layers/yocto-poky/meta/recipes-support/createrepo/createrepo/rpm-createsolvedb.py delete mode 100644 import-layers/yocto-poky/meta/recipes-support/createrepo/createrepo_0.4.11.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/curl/curl/CVE-2017-1000100.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/curl/curl/CVE-2017-1000101.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/curl/curl_7.50.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/curl/curl_7.53.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/db/db/0001-atomic-Rename-local-__atomic_compare_exchange-to-avo.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/db/db/Makefile-let-libdb-6.0.la-depend-os_map.l.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/db/db/Makefile-let-libso_target-depend-on-bt_rec.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/db/db_6.0.35.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/debianutils/debianutils_4.8.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/debianutils/debianutils_4.8.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gdbm/gdbm-1.8.3/ldflags.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gdbm/gdbm-1.8.3/libtool-mode.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gdbm/gdbm-1.8.3/makefile.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gdbm/gdbm_1.8.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gmp/gmp-4.2.1/Use-__gnu_inline__-attribute.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gmp/gmp-4.2.1/avoid-h-asm-constraint-for-MIPS.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gmp/gmp-4.2.1/gmp_fix_for_automake-1.12.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gmp/gmp-6.1.1/0001-Append-the-user-provided-flags-to-the-auto-detected-.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gmp/gmp-6.1.1/0001-confiure.ac-Believe-the-cflags-from-environment.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gmp/gmp-6.1.1/amd64.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gmp/gmp-6.1.1/use-includedir.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/gmp/gmp-6.1.2/0001-Append-the-user-provided-flags-to-the-auto-detected-.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/gmp/gmp-6.1.2/0001-confiure.ac-Believe-the-cflags-from-environment.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/gmp/gmp-6.1.2/amd64.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/gmp/gmp-6.1.2/use-includedir.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gmp/gmp_4.2.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gmp/gmp_6.1.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/gmp/gmp_6.1.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnupg/gnupg-1.4.7/CVE-2013-4242.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnupg/gnupg-1.4.7/CVE-2013-4351.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnupg/gnupg-1.4.7/CVE-2013-4576.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnupg/gnupg-1.4.7/GnuPG1-CVE-2012-6085.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnupg/gnupg-1.4.7/configure.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnupg/gnupg-1.4.7/curl_typeof_fix_backport.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnupg/gnupg-1.4.7/long-long-thumb.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnupg/gnupg-1.4.7/mips_gcc4.4.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnupg/gnupg_1.4.7.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnupg/gnupg_2.1.14.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/gnupg/gnupg_2.1.18.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnutls/gnutls/0001-Use-correct-include-dir-with-minitasn.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnutls/gnutls/CVE-2016-7444.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/gnutls/gnutls/arm_eabi.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnutls/gnutls_3.5.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/gnutls/gnutls_3.5.9.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnutls/libtasn1/0001-configure-don-t-add-Werror-to-build-flags.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/gnutls/libtasn1/0001-stdint.m4-reintroduce-GNULIB_OVERRIDES_WINT_T-check.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnutls/libtasn1/0002-ASN.y-corrected-compiler-warning.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnutls/libtasn1/0003-parser_aux-corrected-potential-null-pointer-derefere.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnutls/libtasn1/0004-tools-eliminated-compiler-warnings.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/gnutls/libtasn1/CVE-2017-10790.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/gnutls/libtasn1_4.10.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gnutls/libtasn1_4.9.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/gpgme/gpgme/0001-Correctly-install-python-modules.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/gpgme/gpgme/0001-gpgme-config-skip-all-lib-or-usr-lib-directories-in-.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gpgme/gpgme/gpgme.pc create mode 100644 import-layers/yocto-poky/meta/recipes-support/gpgme/gpgme/python-import.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/gpgme/gpgme/python-lang-config.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/gpgme/gpgme_1.6.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/gpgme/gpgme_1.8.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/icu/icu/0001-i18n-Drop-include-xlocale.h.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/icu/icu_57.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/icu/icu_58.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/iso-codes/iso-codes_3.70.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/iso-codes/iso-codes_3.74.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libbsd/libbsd/0001-src-libbsd-overlay.pc.in-Set-Cflags-to-use-I-instead.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libevdev/libevdev_1.5.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libevdev/libevdev_1.5.6.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libfm/libfm-extra_1.2.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libfm/libfm-extra_1.2.5.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libfm/libfm/0001-Correctly-check-the-stamp-file-that-indicates-if-we-.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libfm/libfm_1.2.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libfm/libfm_1.2.5.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libgcrypt/libgcrypt_1.7.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libgcrypt/libgcrypt_1.7.6.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libgpg-error/libgpg-error_1.24.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libgpg-error/libgpg-error_1.26.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libiconv/libiconv-1.11.1/autoconf.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libiconv/libiconv-1.11.1/shared_preloadable_libiconv_linux.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libiconv/libiconv_1.11.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libksba/libksba_1.3.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libksba/libksba_1.3.5.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libnl/libnl/0001-lib-add-utility-function-nl_strerror_l.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libnl/libnl/0002-lib-switch-to-using-strerror_l-instead-of-strerror_r.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libnl/libnl/0003-src-switch-to-using-strerror_l-instead-of-strerror_r.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libnl/libnl_3.2.28.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libnl/libnl_3.2.29.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libpcre/libpcre2/pcre-cross.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/libpcre/libpcre2_10.22.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libpcre/libpcre_8.39.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libpcre/libpcre_8.40.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libproxy/libproxy_0.4.13.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libproxy/libproxy_0.4.14.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libsoup/libsoup-2.4_2.54.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libsoup/libsoup-2.4_2.56.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libunistring/libunistring_0.9.6.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libunistring/libunistring_0.9.7.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libunwind/libunwind/libunwind-1.1-x32.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/liburcu/liburcu/0002-Support-for-aarch64_be.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/liburcu/liburcu_0.9.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/liburcu/liburcu_0.9.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/libusb/libusb1_1.0.20.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/libusb/libusb1_1.0.21.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/mpfr/mpfr_3.1.4.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/mpfr/mpfr_3.1.5.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/neon/neon/gnutls_4.3_fixup.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/neon/neon_0.30.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/neon/neon_0.30.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/nettle/nettle-2.7.1/CVE-2015-8803_8805.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/nettle/nettle-2.7.1/CVE-2015-8804.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/nettle/nettle-3.3/check-header-files-of-openssl-only-if-enable_.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/nettle/nettle_2.7.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/nettle/nettle_3.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/nettle/nettle_3.3.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/npth/npth_1.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/npth/npth_1.3.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/nspr/nspr/0001-include-stdint.h-for-SSIZE_MAX-and-SIZE_MAX-definiti.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/nspr/nspr_4.12.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/nspr/nspr_4.13.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/nss/nss/0001-nss-fix-support-cross-compiling.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/nss/nss/Fix-compilation-for-X32.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/nss/nss/nss-fix-support-cross-compiling.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/nss/nss_3.25.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/nss/nss_3.28.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/ptest-runner/ptest-runner_2.0.2.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/ptest-runner/ptest-runner_2.0.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/serf/serf_1.3.8.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/serf/serf_1.3.9.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/shared-mime-info/shared-mime-info_1.6.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/shared-mime-info/shared-mime-info_1.8.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/sqlite/files/0001-revert-ad601c7962-that-brings-2-increase-of-build-ti.patch delete mode 100644 import-layers/yocto-poky/meta/recipes-support/sqlite/sqlite3_3.14.1.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/sqlite/sqlite3_3.17.0.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/taglib/taglib/CVE-2017-12678.patch create mode 100644 import-layers/yocto-poky/meta/recipes-support/taglib/taglib_1.11.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/taglib/taglib_1.9.1.bb delete mode 100644 import-layers/yocto-poky/meta/recipes-support/vte/vte_0.44.2.bb create mode 100644 import-layers/yocto-poky/meta/recipes-support/vte/vte_0.46.1.bb delete mode 100755 import-layers/yocto-poky/scripts/cleanup-workdir create mode 100755 import-layers/yocto-poky/scripts/contrib/bb-perf/buildstats-plot.sh create mode 100755 import-layers/yocto-poky/scripts/contrib/oe-build-perf-report-email.py create mode 100755 import-layers/yocto-poky/scripts/contrib/yocto-bsp-kernel-update.sh create mode 100644 import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend create mode 100644 import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/arm/recipes-kernel/linux/linux-yocto_4.10.bbappend create mode 100644 import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend create mode 100644 import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/i386/recipes-kernel/linux/linux-yocto_4.10.bbappend create mode 100644 import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend create mode 100644 import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips/recipes-kernel/linux/linux-yocto_4.10.bbappend create mode 100644 import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend create mode 100644 import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/mips64/recipes-kernel/linux/linux-yocto_4.10.bbappend create mode 100644 import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend create mode 100644 import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/powerpc/recipes-kernel/linux/linux-yocto_4.10.bbappend create mode 100644 import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend create mode 100644 import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/qemu/recipes-kernel/linux/linux-yocto_4.10.bbappend create mode 100644 import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto-tiny_4.10.bbappend create mode 100644 import-layers/yocto-poky/scripts/lib/bsp/substrate/target/arch/x86_64/recipes-kernel/linux/linux-yocto_4.10.bbappend create mode 100644 import-layers/yocto-poky/scripts/lib/build_perf/__init__.py create mode 100644 import-layers/yocto-poky/scripts/lib/build_perf/html.py create mode 100644 import-layers/yocto-poky/scripts/lib/build_perf/html/measurement_chart.html create mode 100644 import-layers/yocto-poky/scripts/lib/build_perf/html/report.html create mode 100644 import-layers/yocto-poky/scripts/lib/build_perf/report.py create mode 100644 import-layers/yocto-poky/scripts/lib/build_perf/scrape-html-report.js create mode 100644 import-layers/yocto-poky/scripts/lib/compatlayer/__init__.py create mode 100644 import-layers/yocto-poky/scripts/lib/compatlayer/case.py create mode 100644 import-layers/yocto-poky/scripts/lib/compatlayer/cases/__init__.py create mode 100644 import-layers/yocto-poky/scripts/lib/compatlayer/cases/bsp.py create mode 100644 import-layers/yocto-poky/scripts/lib/compatlayer/cases/common.py create mode 100644 import-layers/yocto-poky/scripts/lib/compatlayer/cases/distro.py create mode 100644 import-layers/yocto-poky/scripts/lib/compatlayer/context.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/__version__.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/canned-wks/mkgummidisk.wks delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/conf.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/config/wic.conf delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/creator.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/imager/__init__.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/imager/baseimager.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/imager/direct.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/msger.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/plugin.py create mode 100644 import-layers/yocto-poky/scripts/lib/wic/plugins/imager/direct.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/plugins/imager/direct_plugin.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/plugins/source/fsimage.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/plugins/source/rootfs_pcbios_ext.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/test delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/utils/errors.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/utils/oe/__init__.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/utils/oe/misc.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/utils/partitionedfs.py delete mode 100644 import-layers/yocto-poky/scripts/lib/wic/utils/syslinux.py create mode 100755 import-layers/yocto-poky/scripts/oe-build-perf-report create mode 100755 import-layers/yocto-poky/scripts/oe-git-archive create mode 100755 import-layers/yocto-poky/scripts/oe-test create mode 100755 import-layers/yocto-poky/scripts/task-time create mode 100755 import-layers/yocto-poky/scripts/tiny/ksum.py delete mode 100755 import-layers/yocto-poky/scripts/wipe-sysroot create mode 100755 import-layers/yocto-poky/scripts/yocto-compat-layer-wrapper create mode 100755 import-layers/yocto-poky/scripts/yocto-compat-layer.py (limited to 'import-layers/yocto-poky') diff --git a/import-layers/yocto-poky/README.hardware b/import-layers/yocto-poky/README.hardware index d7e24d228..e6ccf7808 100644 --- a/import-layers/yocto-poky/README.hardware +++ b/import-layers/yocto-poky/README.hardware @@ -203,6 +203,75 @@ linux/arch/powerpc/boot/dts/mpc8315erdb.dts within the kernel source). If you have left them at the factory default then you shouldn't need to do anything here. +Note: To boot from USB disk you need u-boot that supports 'ext2load usb' +command. You need to setup TFTP server, load u-boot from there and +flash it to NOR flash. + +Beware! Flashing bootloader is potentially dangerous operation that can +brick your device if done incorrectly. Please, make sure you understand +what below commands mean before executing them. + +Load the new u-boot.bin from TFTP server to memory address 200000 +=> tftp 200000 u-boot.bin + +Disable flash protection +=> protect off all + +Erase the old u-boot from fe000000 to fe06ffff in NOR flash. +The size is 0x70000 (458752 bytes) +=> erase fe000000 fe06ffff + +Copy the new u-boot from address 200000 to fe000000 +the size is 0x70000. It has to be greater or equal to u-boot.bin size +=> cp.b 200000 fe000000 70000 + +Enable flash protection again +=> protect on all + +Reset the board +=> reset + +--- Booting from USB disk --- + + 1. Flash partitioned image to the USB disk + + # dd if=core-image-minimal-mpc8315e-rdb.wic of=/dev/sdb + + 2. Plug USB disk into the MPC8315 board + + 3. Connect the board's first serial port to your workstation and then start up + your favourite serial terminal so that you will be able to interact with + the serial console. If you don't have a favourite, picocom is suggested: + + $ picocom /dev/ttyUSB0 -b 115200 + + 4. Power up or reset the board and press a key on the terminal when prompted + to get to the U-Boot command line + + 5. Optional. Load the u-boot.bin from the USB disk: + + => usb start + => ext2load usb 0:1 200000 u-boot.bin + + and flash it to NOR flash as described above. + + 6. Set fdtaddr and loadaddr. This is not necessary if you set them before. + + => setenv fdtaddr a00000 + => setenv loadaddr 1000000 + + 7. Load the kernel and dtb from first partition of the USB disk: + + => usb start + => ext2load usb 0:1 $loadaddr uImage + => ext2load usb 0:1 $fdtaddr dtb + + 8. Set bootargs and boot up the device + + => setenv bootargs root=/dev/sdb2 rw rootwait console=ttyS0,115200 + => bootm $loadaddr - $fdtaddr + + --- Booting from NFS root --- Load the kernel and dtb (device tree blob), and boot the system as follows: @@ -317,7 +386,7 @@ Steps: 2. Flash the image. - # dd core-image-minimal-edgerouter.wic of=/dev/sdb + # dd if=core-image-minimal-edgerouter.wic of=/dev/sdb 3. Insert USB disk into the edgerouter and boot it. diff --git a/import-layers/yocto-poky/bitbake/LICENSE b/import-layers/yocto-poky/bitbake/LICENSE index 5d4a4c2a8..7d4e5f44b 100644 --- a/import-layers/yocto-poky/bitbake/LICENSE +++ b/import-layers/yocto-poky/bitbake/LICENSE @@ -15,3 +15,5 @@ Foundation and individual contributors. * QUnit is redistributed under the MIT license. * Font Awesome fonts redistributed under the SIL Open Font License 1.1 + +* simplediff is distributed under the zlib license. diff --git a/import-layers/yocto-poky/bitbake/bin/bitbake b/import-layers/yocto-poky/bitbake/bin/bitbake index 2a4fc7203..9f5c2d40a 100755 --- a/import-layers/yocto-poky/bitbake/bin/bitbake +++ b/import-layers/yocto-poky/bitbake/bin/bitbake @@ -38,7 +38,7 @@ from bb.main import bitbake_main, BitBakeConfigParameters, BBMainException if sys.getfilesystemencoding() != "utf-8": sys.exit("Please use a locale setting which supports utf-8.\nPython can't change the filesystem locale after loading so we need a utf-8 when python starts or things won't work.") -__version__ = "1.32.0" +__version__ = "1.34.0" if __name__ == "__main__": if __version__ != bb.__version__: diff --git a/import-layers/yocto-poky/bitbake/bin/bitbake-diffsigs b/import-layers/yocto-poky/bitbake/bin/bitbake-diffsigs index 527d2c7a9..eb2f85979 100755 --- a/import-layers/yocto-poky/bitbake/bin/bitbake-diffsigs +++ b/import-layers/yocto-poky/bitbake/bin/bitbake-diffsigs @@ -3,7 +3,7 @@ # bitbake-diffsigs # BitBake task signature data comparison utility # -# Copyright (C) 2012-2013 Intel Corporation +# Copyright (C) 2012-2013, 2017 Intel Corporation # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as @@ -22,7 +22,7 @@ import os import sys import warnings import fnmatch -import optparse +import argparse import logging import pickle @@ -30,29 +30,13 @@ sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), ' import bb.tinfoil import bb.siggen +import bb.msg -def logger_create(name, output=sys.stderr): - logger = logging.getLogger(name) - console = logging.StreamHandler(output) - format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") - if output.isatty(): - format.enable_color() - console.setFormatter(format) - logger.addHandler(console) - logger.setLevel(logging.INFO) - return logger +logger = bb.msg.logger_create('bitbake-diffsigs') -logger = logger_create('bitbake-diffsigs') - -def find_compare_task(bbhandler, pn, taskname): +def find_compare_task(bbhandler, pn, taskname, sig1=None, sig2=None, color=False): """ Find the most recent signature files for the specified PN/task and compare them """ - def get_hashval(siginfo): - if siginfo.endswith('.siginfo'): - return siginfo.rpartition(':')[2].partition('_')[0] - else: - return siginfo.rpartition('.')[2] - if not hasattr(bb.siggen, 'find_siginfo'): logger.error('Metadata does not support finding signature data files') sys.exit(1) @@ -60,79 +44,119 @@ def find_compare_task(bbhandler, pn, taskname): if not taskname.startswith('do_'): taskname = 'do_%s' % taskname - filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data) - latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-3:] - if not latestfiles: - logger.error('No sigdata files found matching %s %s' % (pn, taskname)) - sys.exit(1) - elif len(latestfiles) < 2: - logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (pn, taskname)) - sys.exit(1) + if sig1 and sig2: + sigfiles = bb.siggen.find_siginfo(pn, taskname, [sig1, sig2], bbhandler.config_data) + if len(sigfiles) == 0: + logger.error('No sigdata files found matching %s %s matching either %s or %s' % (pn, taskname, sig1, sig2)) + sys.exit(1) + elif not sig1 in sigfiles: + logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig1)) + sys.exit(1) + elif not sig2 in sigfiles: + logger.error('No sigdata files found matching %s %s with signature %s' % (pn, taskname, sig2)) + sys.exit(1) + latestfiles = [sigfiles[sig1], sigfiles[sig2]] else: - # It's possible that latestfiles contain 3 elements and the first two have the same hash value. - # In this case, we delete the second element. - # The above case is actually the most common one. Because we may have sigdata file and siginfo - # file having the same hash value. Comparing such two files makes no sense. - if len(latestfiles) == 3: - hash0 = get_hashval(latestfiles[0]) - hash1 = get_hashval(latestfiles[1]) - if hash0 == hash1: - latestfiles.pop(1) - - # Define recursion callback - def recursecb(key, hash1, hash2): - hashes = [hash1, hash2] - hashfiles = bb.siggen.find_siginfo(key, None, hashes, bbhandler.config_data) - - recout = [] - if len(hashfiles) == 2: - out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb) - recout.extend(list(' ' + l for l in out2)) - else: - recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2)) - - return recout - - # Recurse into signature comparison - output = bb.siggen.compare_sigfiles(latestfiles[0], latestfiles[1], recursecb) - if output: - print('\n'.join(output)) + filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data) + latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-3:] + if not latestfiles: + logger.error('No sigdata files found matching %s %s' % (pn, taskname)) + sys.exit(1) + elif len(latestfiles) < 2: + logger.error('Only one matching sigdata file found for the specified task (%s %s)' % (pn, taskname)) + sys.exit(1) + + # Define recursion callback + def recursecb(key, hash1, hash2): + hashes = [hash1, hash2] + hashfiles = bb.siggen.find_siginfo(key, None, hashes, bbhandler.config_data) + + recout = [] + if len(hashfiles) == 0: + recout.append("Unable to find matching sigdata for %s with hashes %s or %s" % (key, hash1, hash2)) + elif not hash1 in hashfiles: + recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash1)) + elif not hash2 in hashfiles: + recout.append("Unable to find matching sigdata for %s with hash %s" % (key, hash2)) + else: + out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, color=color) + for change in out2: + for line in change.splitlines(): + recout.append(' ' + line) + + return recout + + # Recurse into signature comparison + logger.debug("Signature file (previous): %s" % latestfiles[-2]) + logger.debug("Signature file (latest): %s" % latestfiles[-1]) + output = bb.siggen.compare_sigfiles(latestfiles[-2], latestfiles[-1], recursecb, color=color) + if output: + print('\n'.join(output)) sys.exit(0) -parser = optparse.OptionParser( - description = "Compares siginfo/sigdata files written out by BitBake", - usage = """ - %prog -t recipename taskname - %prog sigdatafile1 sigdatafile2 - %prog sigdatafile1""") +parser = argparse.ArgumentParser( + description="Compares siginfo/sigdata files written out by BitBake") + +parser.add_argument('-d', '--debug', + help='Enable debug output', + action='store_true') + +parser.add_argument('--color', + help='Colorize output (where %(metavar)s is %(choices)s)', + choices=['auto', 'always', 'never'], default='auto', metavar='color') -parser.add_option("-t", "--task", - help = "find the signature data files for last two runs of the specified task and compare them", - action="store", dest="taskargs", nargs=2, metavar='recipename taskname') +parser.add_argument("-t", "--task", + help="find the signature data files for last two runs of the specified task and compare them", + action="store", dest="taskargs", nargs=2, metavar=('recipename', 'taskname')) -options, args = parser.parse_args(sys.argv) +parser.add_argument("-s", "--signature", + help="With -t/--task, specify the signatures to look for instead of taking the last two", + action="store", dest="sigargs", nargs=2, metavar=('fromsig', 'tosig')) + +parser.add_argument("sigdatafile1", + help="First signature file to compare (or signature file to dump, if second not specified). Not used when using -t/--task.", + action="store", nargs='?') + +parser.add_argument("sigdatafile2", + help="Second signature file to compare", + action="store", nargs='?') + + +options = parser.parse_args() + +if options.debug: + logger.setLevel(logging.DEBUG) + +color = (options.color == 'always' or (options.color == 'auto' and sys.stdout.isatty())) if options.taskargs: with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) - find_compare_task(tinfoil, options.taskargs[0], options.taskargs[1]) + if options.sigargs: + find_compare_task(tinfoil, options.taskargs[0], options.taskargs[1], options.sigargs[0], options.sigargs[1], color=color) + else: + find_compare_task(tinfoil, options.taskargs[0], options.taskargs[1], color=color) else: - if len(args) == 1: - parser.print_help() - else: - try: - if len(args) == 2: - output = bb.siggen.dump_sigfile(sys.argv[1]) - else: - output = bb.siggen.compare_sigfiles(sys.argv[1], sys.argv[2]) - except IOError as e: - logger.error(str(e)) - sys.exit(1) - except (pickle.UnpicklingError, EOFError): - logger.error('Invalid signature data - ensure you are specifying sigdata/siginfo files') + if options.sigargs: + logger.error('-s/--signature can only be used together with -t/--task') + sys.exit(1) + try: + if options.sigdatafile1 and options.sigdatafile2: + output = bb.siggen.compare_sigfiles(options.sigdatafile1, options.sigdatafile2, color=color) + elif options.sigdatafile1: + output = bb.siggen.dump_sigfile(options.sigdatafile1) + else: + logger.error('Must specify signature file(s) or -t/--task') + parser.print_help() sys.exit(1) + except IOError as e: + logger.error(str(e)) + sys.exit(1) + except (pickle.UnpicklingError, EOFError): + logger.error('Invalid signature data - ensure you are specifying sigdata/siginfo files') + sys.exit(1) - if output: - print('\n'.join(output)) + if output: + print('\n'.join(output)) diff --git a/import-layers/yocto-poky/bitbake/bin/bitbake-dumpsig b/import-layers/yocto-poky/bitbake/bin/bitbake-dumpsig index 58ba1cad0..95ebd9354 100755 --- a/import-layers/yocto-poky/bitbake/bin/bitbake-dumpsig +++ b/import-layers/yocto-poky/bitbake/bin/bitbake-dumpsig @@ -27,39 +27,68 @@ import pickle sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib')) +import bb.tinfoil import bb.siggen +import bb.msg -def logger_create(name, output=sys.stderr): - logger = logging.getLogger(name) - console = logging.StreamHandler(output) - format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") - if output.isatty(): - format.enable_color() - console.setFormatter(format) - logger.addHandler(console) - logger.setLevel(logging.INFO) - return logger +logger = bb.msg.logger_create('bitbake-dumpsig') -logger = logger_create('bitbake-dumpsig') +def find_siginfo_task(bbhandler, pn, taskname): + """ Find the most recent signature file for the specified PN/task """ + + if not hasattr(bb.siggen, 'find_siginfo'): + logger.error('Metadata does not support finding signature data files') + sys.exit(1) + + if not taskname.startswith('do_'): + taskname = 'do_%s' % taskname + + filedates = bb.siggen.find_siginfo(pn, taskname, None, bbhandler.config_data) + latestfiles = sorted(filedates.keys(), key=lambda f: filedates[f])[-1:] + if not latestfiles: + logger.error('No sigdata files found matching %s %s' % (pn, taskname)) + sys.exit(1) + + return latestfiles[0] parser = optparse.OptionParser( description = "Dumps siginfo/sigdata files written out by BitBake", usage = """ + %prog -t recipename taskname %prog sigdatafile""") +parser.add_option("-D", "--debug", + help = "enable debug", + action = "store_true", dest="debug", default = False) + +parser.add_option("-t", "--task", + help = "find the signature data file for the specified task", + action="store", dest="taskargs", nargs=2, metavar='recipename taskname') + options, args = parser.parse_args(sys.argv) -if len(args) == 1: +if options.debug: + logger.setLevel(logging.DEBUG) + +if options.taskargs: + tinfoil = bb.tinfoil.Tinfoil() + tinfoil.prepare(config_only = True) + file = find_siginfo_task(tinfoil, options.taskargs[0], options.taskargs[1]) + logger.debug("Signature file: %s" % file) +elif len(args) == 1: parser.print_help() + sys.exit(0) else: - try: - output = bb.siggen.dump_sigfile(args[1]) - except IOError as e: - logger.error(str(e)) - sys.exit(1) - except (pickle.UnpicklingError, EOFError): - logger.error('Invalid signature data - ensure you are specifying a sigdata/siginfo file') - sys.exit(1) + file = args[1] + +try: + output = bb.siggen.dump_sigfile(file) +except IOError as e: + logger.error(str(e)) + sys.exit(1) +except (pickle.UnpicklingError, EOFError): + logger.error('Invalid signature data - ensure you are specifying a sigdata/siginfo file') + sys.exit(1) - if output: - print('\n'.join(output)) +if output: + print('\n'.join(output)) diff --git a/import-layers/yocto-poky/bitbake/bin/bitbake-layers b/import-layers/yocto-poky/bitbake/bin/bitbake-layers index 946def220..2b05d2847 100755 --- a/import-layers/yocto-poky/bitbake/bin/bitbake-layers +++ b/import-layers/yocto-poky/bitbake/bin/bitbake-layers @@ -24,43 +24,19 @@ import logging import os import sys import argparse +import signal bindir = os.path.dirname(__file__) topdir = os.path.dirname(bindir) sys.path[0:0] = [os.path.join(topdir, 'lib')] import bb.tinfoil +import bb.msg - -def tinfoil_init(parserecipes): - import bb.tinfoil - tinfoil = bb.tinfoil.Tinfoil(tracking=True) - tinfoil.prepare(not parserecipes) - tinfoil.logger.setLevel(logger.getEffectiveLevel()) - return tinfoil - - -def logger_create(name, output=sys.stderr): - logger = logging.getLogger(name) - loggerhandler = logging.StreamHandler(output) - loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s")) - logger.addHandler(loggerhandler) - logger.setLevel(logging.INFO) - return logger - -def logger_setup_color(logger, color='auto'): - from bb.msg import BBLogFormatter - console = logging.StreamHandler(sys.stdout) - formatter = BBLogFormatter("%(levelname)s: %(message)s") - console.setFormatter(formatter) - logger.handlers = [console] - if color == 'always' or (color == 'auto' and console.stream.isatty()): - formatter.enable_color() - - -logger = logger_create('bitbake-layers', sys.stdout) +logger = bb.msg.logger_create('bitbake-layers', sys.stdout) def main(): + signal.signal(signal.SIGPIPE, signal.SIG_DFL) parser = argparse.ArgumentParser( description="BitBake layers utility", epilog="Use %(prog)s --help to get help on a specific command", @@ -83,13 +59,17 @@ def main(): elif global_args.quiet: logger.setLevel(logging.ERROR) - logger_setup_color(logger, global_args.color) + # Need to re-run logger_create with color argument + # (will be the same logger since it has the same name) + bb.msg.logger_create('bitbake-layers', output=sys.stdout, color=global_args.color) plugins = [] - tinfoil = tinfoil_init(False) + tinfoil = bb.tinfoil.Tinfoil(tracking=True) + tinfoil.logger.setLevel(logger.getEffectiveLevel()) try: + tinfoil.prepare(True) for path in ([topdir] + - tinfoil.config_data.getVar('BBPATH', True).split(':')): + tinfoil.config_data.getVar('BBPATH').split(':')): pluginpath = os.path.join(path, 'lib', 'bblayers') bb.utils.load_plugins(logger, plugins, pluginpath) diff --git a/import-layers/yocto-poky/bitbake/bin/bitbake-worker b/import-layers/yocto-poky/bitbake/bin/bitbake-worker index db3c4b184..ee2d6224a 100755 --- a/import-layers/yocto-poky/bitbake/bin/bitbake-worker +++ b/import-layers/yocto-poky/bitbake/bin/bitbake-worker @@ -95,6 +95,7 @@ def worker_flush(worker_queue): pass while (worker_queue_int or not worker_queue.empty()): try: + (_, ready, _) = select.select([], [worker_pipe], [], 1) if not worker_queue.empty(): worker_queue_int = worker_queue_int + worker_queue.get() written = os.write(worker_pipe, worker_queue_int) @@ -135,7 +136,7 @@ def sigterm_handler(signum, frame): os.killpg(0, signal.SIGTERM) sys.exit() -def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, appends, taskdepdata, quieterrors=False): +def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, appends, taskdepdata, extraconfigdata, quieterrors=False, dry_run_exec=False): # We need to setup the environment BEFORE the fork, since # a fork() or exec*() activates PSEUDO... @@ -151,8 +152,10 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, append except TypeError: umask = taskdep['umask'][taskname] + dry_run = cfg.dry_run or dry_run_exec + # We can't use the fakeroot environment in a dry run as it possibly hasn't been built - if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not cfg.dry_run: + if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not dry_run: envvars = (workerdata["fakerootenv"][fn] or "").split() for key, value in (var.split('=') for var in envvars): envbackup[key] = os.environ.get(key) @@ -219,16 +222,21 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, append the_data = databuilder.mcdata[mc] the_data.setVar("BB_WORKERCONTEXT", "1") the_data.setVar("BB_TASKDEPDATA", taskdepdata) + if cfg.limited_deps: + the_data.setVar("BB_LIMITEDDEPS", "1") the_data.setVar("BUILDNAME", workerdata["buildname"]) the_data.setVar("DATE", workerdata["date"]) the_data.setVar("TIME", workerdata["time"]) + for varname, value in extraconfigdata.items(): + the_data.setVar(varname, value) + bb.parse.siggen.set_taskdata(workerdata["sigdata"]) ret = 0 the_data = bb_cache.loadDataFull(fn, appends) the_data.setVar('BB_TASKHASH', workerdata["runq_hash"][task]) - bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN", True), taskname.replace("do_", ""))) + bb.utils.set_process_name("%s:%s" % (the_data.getVar("PN"), taskname.replace("do_", ""))) # exported_vars() returns a generator which *cannot* be passed to os.environ.update() # successfully. We also need to unset anything from the environment which shouldn't be there @@ -243,11 +251,11 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, append the_data.setVar(e, fakeenv[e]) the_data.setVarFlag(e, 'export', "1") - task_exports = the_data.getVarFlag(taskname, 'exports', True) + task_exports = the_data.getVarFlag(taskname, 'exports') if task_exports: for e in task_exports.split(): the_data.setVarFlag(e, 'export', '1') - v = the_data.getVar(e, True) + v = the_data.getVar(e) if v is not None: os.environ[e] = v @@ -259,7 +267,7 @@ def fork_off_task(cfg, data, databuilder, workerdata, fn, task, taskname, append logger.critical(traceback.format_exc()) os._exit(1) try: - if cfg.dry_run: + if dry_run: return 0 return bb.build.exec_task(fn, taskname, the_data, cfg.profile) except: @@ -328,6 +336,7 @@ class BitbakeWorker(object): self.cookercfg = None self.databuilder = None self.data = None + self.extraconfigdata = None self.build_pids = {} self.build_pipes = {} @@ -362,6 +371,7 @@ class BitbakeWorker(object): pass if len(self.queue): self.handle_item(b"cookerconfig", self.handle_cookercfg) + self.handle_item(b"extraconfigdata", self.handle_extraconfigdata) self.handle_item(b"workerdata", self.handle_workerdata) self.handle_item(b"runtask", self.handle_runtask) self.handle_item(b"finishnow", self.handle_finishnow) @@ -369,9 +379,11 @@ class BitbakeWorker(object): self.handle_item(b"quit", self.handle_quit) for pipe in self.build_pipes: - self.build_pipes[pipe].read() + if self.build_pipes[pipe].input in ready: + self.build_pipes[pipe].read() if len(self.build_pids): - self.process_waitpid() + while self.process_waitpid(): + continue def handle_item(self, item, func): @@ -388,6 +400,9 @@ class BitbakeWorker(object): self.databuilder.parseBaseConfiguration() self.data = self.databuilder.data + def handle_extraconfigdata(self, data): + self.extraconfigdata = pickle.loads(data) + def handle_workerdata(self, data): self.workerdata = pickle.loads(data) bb.msg.loggerDefaultDebugLevel = self.workerdata["logdefaultdebug"] @@ -410,10 +425,10 @@ class BitbakeWorker(object): sys.exit(0) def handle_runtask(self, data): - fn, task, taskname, quieterrors, appends, taskdepdata = pickle.loads(data) + fn, task, taskname, quieterrors, appends, taskdepdata, dry_run_exec = pickle.loads(data) workerlog_write("Handling runtask %s %s %s\n" % (task, fn, taskname)) - pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, appends, taskdepdata, quieterrors) + pid, pipein, pipeout = fork_off_task(self.cookercfg, self.data, self.databuilder, self.workerdata, fn, task, taskname, appends, taskdepdata, self.extraconfigdata, quieterrors, dry_run_exec) self.build_pids[pid] = task self.build_pipes[pid] = runQueueWorkerPipe(pipein, pipeout) @@ -426,9 +441,9 @@ class BitbakeWorker(object): try: pid, status = os.waitpid(-1, os.WNOHANG) if pid == 0 or os.WIFSTOPPED(status): - return None + return False except OSError: - return None + return False workerlog_write("Exit code of %s for pid %s\n" % (status, pid)) @@ -447,6 +462,8 @@ class BitbakeWorker(object): worker_fire_prepickled(b"" + pickle.dumps((task, status)) + b"") + return True + def handle_finishnow(self, _): if self.build_pids: logger.info("Sending SIGTERM to remaining %s tasks", len(self.build_pids)) diff --git a/import-layers/yocto-poky/bitbake/bin/toaster b/import-layers/yocto-poky/bitbake/bin/toaster index f92d38eca..61a4a0f85 100755 --- a/import-layers/yocto-poky/bitbake/bin/toaster +++ b/import-layers/yocto-poky/bitbake/bin/toaster @@ -24,6 +24,29 @@ Usage: source toaster start|stop [webport=] [noweb] [webport] Set the development server (default: localhost:8000) " +databaseCheck() +{ + retval=0 + # you can always add a superuser later via + # ../bitbake/lib/toaster/manage.py createsuperuser --username= + $MANAGE migrate --noinput || retval=1 + + if [ $retval -eq 1 ]; then + echo "Failed migrations, aborting system start" 1>&2 + return $retval + fi + # Make sure that checksettings can pick up any value for TEMPLATECONF + export TEMPLATECONF + $MANAGE checksettings --traceback || retval=1 + + if [ $retval -eq 1 ]; then + printf "\nError while checking settings; aborting\n" + return $retval + fi + + return $retval +} + webserverKillAll() { local pidfile @@ -48,22 +71,9 @@ webserverStartAll() fi retval=0 - # you can always add a superuser later via - # ../bitbake/lib/toaster/manage.py createsuperuser --username= - $MANAGE migrate --noinput || retval=1 - if [ $retval -eq 1 ]; then - echo "Failed migrations, aborting system start" 1>&2 - return $retval - fi - # Make sure that checksettings can pick up any value for TEMPLATECONF - export TEMPLATECONF - $MANAGE checksettings --traceback || retval=1 - - if [ $retval -eq 1 ]; then - printf "\nError while checking settings; aborting\n" - return $retval - fi + # check the database + databaseCheck || return 1 echo "Starting webserver..." @@ -111,7 +121,7 @@ verify_prereq() { if ! sed -n "$exp" $reqfile | python3 - ; then req=`grep ^Django $reqfile` echo "This program needs $req" - echo "Please install with pip install -r $reqfile" + echo "Please install with pip3 install -r $reqfile" return 2 fi @@ -151,14 +161,7 @@ fi unset OE_ROOT -# this defines the dir toaster will use for -# 1) clones of layers (in _toaster_clones ) -# 2) the build dir (in build) -# 3) the sqlite db if that is being used. -# 4) pid's we need to clean up on exit/shutdown -# note: for future. in order to make this an arbitrary directory, we need to -# make sure that the toaster.sqlite file doesn't default to `pwd` like it currently does. -export TOASTER_DIR=`pwd` + WEBSERVER=1 ADDR_PORT="localhost:8000" @@ -214,10 +217,8 @@ fi # 2) the build dir (in build) # 3) the sqlite db if that is being used. # 4) pid's we need to clean up on exit/shutdown -# note: for future. in order to make this an arbitrary directory, we need to -# make sure that the toaster.sqlite file doesn't default to `pwd` -# like it currently does. export TOASTER_DIR=`dirname $BUILDDIR` +export BB_ENV_EXTRAWHITE="$BB_ENV_EXTRAWHITE TOASTER_DIR" # Determine the action. If specified by arguments, fine, if not, toggle it if [ "$CMD" = "start" ] ; then @@ -249,12 +250,25 @@ case $CMD in line='INHERIT+="toaster buildhistory"' grep -q "$line" $conf || echo $line >> $conf + if [ $WEBSERVER -eq 0 ] ; then + # Do not update the database for "noweb" unless + # it does not yet exist + if [ ! -f "$TOASTER_DIR/toaster.sqlite" ] ; then + if ! databaseCheck; then + echo "Failed ${CMD}." + return 4 + fi + fi + fi if [ $WEBSERVER -gt 0 ] && ! webserverStartAll; then echo "Failed ${CMD}." return 4 fi export BITBAKE_UI='toasterui' - $MANAGE runbuilds & echo $! >${BUILDDIR}/.runbuilds.pid + $MANAGE runbuilds \ + >${BUILDDIR}/toaster_runbuilds.log 2>&1 \ + & echo $! >${BUILDDIR}/.runbuilds.pid + # set fail safe stop system on terminal exit trap stop_system SIGHUP echo "Successful ${CMD}." diff --git a/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml b/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml index 2a3340b39..d1ce43e23 100644 --- a/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml +++ b/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-fetching.xml @@ -38,7 +38,7 @@ The code to execute the first part of this process, a fetch, looks something like the following: - src_uri = (d.getVar('SRC_URI', True) or "").split() + src_uri = (d.getVar('SRC_URI') or "").split() fetcher = bb.fetch2.Fetch(src_uri, d) fetcher.download() @@ -52,7 +52,7 @@ The instantiation of the fetch class is usually followed by: - rootdir = l.getVar('WORKDIR', True) + rootdir = l.getVar('WORKDIR') fetcher.unpack(rootdir) This code unpacks the downloaded files to the @@ -268,15 +268,6 @@ FILESPATH variable is used in the same way PATH is used to find executables. - Failing that, - FILESDIR - is used to find the appropriate relative file. - - FILESDIR is deprecated and can - be replaced with FILESPATH. - Because FILESDIR is likely to be - removed, you should not use this variable in any new code. - If the file cannot be found, it is assumed that it is available in DL_DIR by the time the download() method is called. @@ -385,7 +376,8 @@ The supported parameters are as follows: "method": - The protocol over which to communicate with the CVS server. + The protocol over which to communicate with the CVS + server. By default, this protocol is "pserver". If "method" is set to "ext", BitBake examines the "rsh" parameter and sets CVS_RSH. @@ -469,25 +461,29 @@ You can think of this parameter as the top-level directory of the repository data you want. + "path_spec": + A specific directory in which to checkout the + specified svn module. + "protocol": The protocol to use, which defaults to "svn". - Other options are "svn+ssh" and "rsh". - For "rsh", the "rsh" parameter is also used. + If "protocol" is set to "svn+ssh", the "ssh" + parameter is also used. "rev": The revision of the source code to checkout. - "date": - The date of the source code to checkout. - Specific revisions are generally much safer to checkout - rather than by date as they do not involve timezones - (e.g. they are much more deterministic). - "scmdata": Causes the “.svn” directories to be available during compile-time when set to "keep". By default, these directories are removed. + "ssh": + An optional parameter used when "protocol" is set + to "svn+ssh". + You can use this parameter to specify the ssh + program used by svn. + "transportuser": When required, sets the username for the transport. By default, this parameter is empty. @@ -496,10 +492,11 @@ command. - Following are two examples using svn: + Following are three examples using svn: - SRC_URI = "svn://svn.oe.handhelds.org/svn;module=vip;proto=http;rev=667" - SRC_URI = "svn://svn.oe.handhelds.org/svn/;module=opie;proto=svn+ssh;date=20060126" + SRC_URI = "svn://myrepos/proj1;module=vip;protocol=http;rev=667" + SRC_URI = "svn://myrepos/proj1;module=opie;protocol=svn+ssh" + SRC_URI = "svn://myrepos/proj1;module=trunk;protocol=http;path_spec=${MY_DIR}/proj1" diff --git a/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml b/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml index 8b7edbff5..2685c0ebd 100644 --- a/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml +++ b/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-hello.xml @@ -134,7 +134,7 @@ Mailing List post - The BitBake equivalent of "Hello, World!" - Hambedded Linux blog post - From Bitbake Hello World to an Image + Hambedded Linux blog post - From Bitbake Hello World to an Image @@ -269,7 +269,7 @@ and define some key BitBake variables. For more information on the bitbake.conf, see - + Use the following commands to create the conf directory in the project directory: @@ -354,7 +354,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh supporting. For more information on the base.bbclass file, you can look at - . + . Run Bitbake: After making sure that the classes/base.bbclass @@ -376,7 +376,7 @@ ERROR: Unable to parse base: ParseError in configuration INHERITs: Could not inh Thus, this example creates and uses a layer called "mylayer". You can find additional information on adding a layer at - . + . Minimally, you need a recipe file and a layer configuration diff --git a/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml b/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml index 4d58dc456..ca7f7246c 100644 --- a/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml +++ b/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-intro.xml @@ -504,9 +504,19 @@ Read the specified file before bitbake.conf. -R POSTFILE, --postread=POSTFILE Read the specified file after bitbake.conf. - -v, --verbose Output more log message data to the terminal. - -D, --debug Increase the debug level. You can specify this more - than once. + -v, --verbose Enable tracing of shell tasks (with 'set -x'). + Also print bb.note(...) messages to stdout (in + addition to writing them to ${T}/log.do_<task>). + -D, --debug Increase the debug level. You can specify this + more than once. -D sets the debug level to 1, + where only bb.debug(1, ...) messages are printed + to stdout; -DD sets the debug level to 2, where + both bb.debug(1, ...) and bb.debug(2, ...) + messages are printed; etc. Without -D, no debug + messages are printed. Note that -D only affects + output to stdout. All debug messages are written + to ${T}/log.do_taskname, regardless of the debug + level. -n, --dry-run Don't execute, just go through the motions. -S SIGNATURE_HANDLER, --dump-signatures=SIGNATURE_HANDLER Dump out the signature construction information, with @@ -529,8 +539,8 @@ -l DEBUG_DOMAINS, --log-domains=DEBUG_DOMAINS Show debug logging for the specified logging domains -P, --profile Profile the command and save reports. - -u UI, --ui=UI The user interface to use (depexp, goggle, hob, knotty - or ncurses - default knotty). + -u UI, --ui=UI The user interface to use (taskexp, knotty or + ncurses - default knotty). -t SERVERTYPE, --servertype=SERVERTYPE Choose which server type to use (process or xmlrpc - default process). @@ -543,6 +553,7 @@ -B BIND, --bind=BIND The name/address for the bitbake server to bind to. --no-setscene Do not run any setscene tasks. sstate will be ignored and everything needed, built. + --setscene-only Only run setscene tasks, don't run any real tasks. --remote-server=REMOTE_SERVER Connect to the specified server. -m, --kill-server Terminate the remote server. @@ -665,21 +676,21 @@ - When you generate a dependency graph, BitBake writes four files + When you generate a dependency graph, BitBake writes three files to the current working directory: - package-depends.dot: - Shows BitBake's knowledge of dependencies between - runtime targets. + + recipe-depends.dot: + Shows dependencies between recipes (i.e. a collapsed version of + task-depends.dot). - pn-depends.dot: - Shows dependencies between build-time targets - (i.e. recipes). - - task-depends.dot: + + task-depends.dot: Shows dependencies between tasks. + These dependencies match BitBake's internal task execution list. - pn-buildlist: + + pn-buildlist: Shows a simple list of targets that are to be built. diff --git a/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml b/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml index 71bb25bf7..1d1e5b35f 100644 --- a/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml +++ b/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-metadata.xml @@ -61,6 +61,48 @@ +
+ Line Joining + + + Outside of + functions, BitBake joins + any line ending in a backslash character ("\") + with the following line before parsing statements. + The most common use for the "\" character is to split variable + assignments over multiple lines, as in the following example: + + FOO = "bar \ + baz \ + qaz" + + Both the "\" character and the newline character + that follow it are removed when joining lines. + Thus, no newline characters end up in the value of + FOO. + + + + Consider this additional example where the two + assignments both assign "barbaz" to + FOO: + + FOO = "barbaz" + + FOO = "bar\ + baz" + + + BitBake does not interpret escape sequences like + "\n" in variable values. + For these to have an effect, the value must be passed + to some utility that interprets escape sequences, + such as printf or + echo -n. + + +
+
Variable Expansion @@ -463,14 +505,14 @@ Unseting variables - It is possible to completely remove a variable or a variable flag + It is possible to completely remove a variable or a variable flag from BitBake's internal data dictionary by using the "unset" keyword. Here is an example: unset DATE unset do_fetch[noexec] - These two statements remove the DATE and the + These two statements remove the DATE and the do_fetch[noexec] flag. @@ -1165,7 +1207,7 @@ python some_python_function () { d.setVar("TEXT", "Hello World") - print d.getVar("TEXT", True) + print d.getVar("TEXT") } Because the Python "bb" and "os" modules are already @@ -1180,7 +1222,7 @@ to freely set variable values to expandable expressions without having them expanded prematurely. If you do wish to expand a variable within a Python - function, use d.getVar("X", True). + function, use d.getVar("X"). Or, for more complicated expressions, use d.expand(). @@ -1232,7 +1274,7 @@ Here is an example: def get_depends(d): - if d.getVar('SOMECONDITION', True): + if d.getVar('SOMECONDITION'): return "dependencywithcond" else: return "dependency" @@ -1367,7 +1409,7 @@ based on the value of another variable: python () { - if d.getVar('SOMEVAR', True) == 'value': + if d.getVar('SOMEVAR') == 'value': d.setVar('ANOTHERVAR', 'value2') } @@ -1942,128 +1984,194 @@ Events - BitBake allows installation of event handlers within - recipe and class files. - Events are triggered at certain points during operation, - such as the beginning of an operation against a given recipe - (*.bb file), the start of a given task, - task failure, task success, and so forth. + BitBake allows installation of event handlers within recipe + and class files. + Events are triggered at certain points during operation, such + as the beginning of operation against a given recipe + (i.e. *.bb), the start of a given task, + a task failure, a task success, and so forth. The intent is to make it easy to do things like email - notification on build failure. + notification on build failures. - Following is an example event handler that - prints the name of the event and the content of - the FILE variable: + Following is an example event handler that prints the name + of the event and the content of the + FILE variable: addhandler myclass_eventhandler python myclass_eventhandler() { from bb.event import getName - from bb import data print("The name of the Event is %s" % getName(e)) - print("The file we run for is %s" % data.getVar('FILE', e.data, True)) + print("The file we run for is %s" % d.getVar('FILE')) } + myclass_eventhandler[eventmask] = "bb.event.BuildStarted bb.event.BuildCompleted" - This event handler gets called every time an event is - triggered. - A global variable "e" is defined and - "e.data" contains an instance of - "bb.data". - With the getName(e) method, one can get + In the previous example, an eventmask has been set so that + the handler only sees the "BuildStarted" and "BuildCompleted" + events. + This event handler gets called every time an event matching + the eventmask is triggered. + A global variable "e" is defined, which represents the current + event. + With the getName(e) method, you can get the name of the triggered event. + The global datastore is available as "d". + In legacy code, you might see "e.data" used to get the datastore. + However, realize that "e.data" is deprecated and you should use + "d" going forward. - Because you probably are only interested in a subset of events, - you would likely use the [eventmask] flag - for your event handler to be sure that only certain events - trigger the handler. - Given the previous example, suppose you only wanted the - bb.build.TaskFailed event to trigger that - event handler. - Use the flag as follows: - - addhandler myclass_eventhandler - myclass_eventhandler[eventmask] = "bb.build.TaskFailed" - python myclass_eventhandler() { - from bb.event import getName - from bb import data - print("The name of the Event is %s" % getName(e)) - print("The file we run for is %s" % data.getVar('FILE', e.data, True)) - } - + The context of the datastore is appropriate to the event + in question. + For example, "BuildStarted" and "BuildCompleted" events run + before any tasks are executed so would be in the global + configuration datastore namespace. + No recipe-specific metadata exists in that namespace. + The "BuildStarted" and "buildCompleted" events also run in + the main cooker/server process rather than any worker context. + Thus, any changes made to the datastore would be seen by other + cooker/server events within the current build but not seen + outside of that build or in any worker context. + Task events run in the actual tasks in question consequently + have recipe-specific and task-specific contents. + These events run in the worker context and are discarded at + the end of task execution. - During a standard build, the following common events might occur: + During a standard build, the following common events might + occur. + The following events are the most common kinds of events that + most metadata might have an interest in viewing: - bb.event.ConfigParsed() + bb.event.ConfigParsed(): + Fired when the base configuration; which consists of + bitbake.conf, + base.bbclass and any global + INHERIT statements; has been parsed. + You can see multiple such events when each of the + workers parse the base configuration or if the server + changes configuration and reparses. + Any given datastore only has one such event executed + against it, however. + If + BB_INVALIDCONF + is set in the datastore by the event handler, the + configuration is reparsed and a new event triggered, + allowing the metadata to update configuration. + + + bb.event.HeartbeatEvent(): + Fires at regular time intervals of one second. + You can configure the interval time using the + BB_HEARTBEAT_EVENT variable. + The event's "time" attribute is the + time.time() value when the + event is triggered. + This event is useful for activities such as + system state monitoring. - bb.event.ParseStarted() + bb.event.ParseStarted(): + Fired when BitBake is about to start parsing recipes. + This event's "total" attribute represents the number of + recipes BitBake plans to parse. - bb.event.ParseProgress() + bb.event.ParseProgress(): + Fired as parsing progresses. + This event's "current" attribute is the number of + recipes parsed as well as the "total" attribute. - bb.event.ParseCompleted() + bb.event.ParseCompleted(): + Fired when parsing is complete. + This event's "cached", "parsed", "skipped", "virtuals", + "masked", and "errors" attributes provide statistics + for the parsing results. - bb.event.BuildStarted() + bb.event.BuildStarted(): + Fired when a new build starts. - bb.build.TaskStarted() + bb.build.TaskStarted(): + Fired when a task starts. + This event's "taskfile" attribute points to the recipe + from which the task originates. + The "taskname" attribute, which is the task's name, + includes the do_ prefix, and the + "logfile" attribute point to where the task's output is + stored. + Finally, the "time" attribute is the task's execution start + time. - bb.build.TaskInvalid() + bb.build.TaskInvalid(): + Fired if BitBake tries to execute a task that does not exist. - bb.build.TaskFailedSilent() + bb.build.TaskFailedSilent(): + Fired for setscene tasks that fail and should not be + presented to the user verbosely. - bb.build.TaskFailed() + bb.build.TaskFailed(): + Fired for normal tasks that fail. - bb.build.TaskSucceeded() + bb.build.TaskSucceeded(): + Fired when a task successfully completes. - bb.event.BuildCompleted() + bb.event.BuildCompleted(): + Fired when a build finishes. - bb.cooker.CookerExit() + bb.cooker.CookerExit(): + Fired when the BitBake server/cooker shuts down. + This event is usually only seen by the UIs as a + sign they should also shutdown. - Here is a list of other events that occur based on specific requests - to the server: + + + + This next list of example events occur based on specific + requests to the server. + These events are often used to communicate larger pieces of + information from the BitBake server to other parts of + BitBake such as user interfaces: bb.event.TreeDataPreparationStarted() - bb.event.TreeDataPreparationProgress + bb.event.TreeDataPreparationProgress() - bb.event.TreeDataPreparationCompleted + bb.event.TreeDataPreparationCompleted() - bb.event.DepTreeGenerated + bb.event.DepTreeGenerated() - bb.event.CoreBaseFilesFound + bb.event.CoreBaseFilesFound() - bb.event.ConfigFilePathFound + bb.event.ConfigFilePathFound() - bb.event.FilesMatchingFound + bb.event.FilesMatchingFound() - bb.event.ConfigFilesFound + bb.event.ConfigFilesFound() - bb.event.TargetsTreeGenerated + bb.event.TargetsTreeGenerated() diff --git a/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml b/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml index 66d8f844e..0e89bf239 100644 --- a/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml +++ b/import-layers/yocto-poky/bitbake/doc/bitbake-user-manual/bitbake-user-manual-ref-variables.xml @@ -1539,24 +1539,6 @@ - FILESDIR - - - Specifies directories BitBake uses when searching for - patches and files. - The "local" fetcher module uses these directories when - handling file:// URLs if the file - was not found using - FILESPATH. - - The FILESDIR variable is - deprecated and you should use - FILESPATH in all new code. - - - - - FILESPATH diff --git a/import-layers/yocto-poky/bitbake/doc/bitbake.1 b/import-layers/yocto-poky/bitbake/doc/bitbake.1 index a6c8d9727..7fc1652ec 100644 --- a/import-layers/yocto-poky/bitbake/doc/bitbake.1 +++ b/import-layers/yocto-poky/bitbake/doc/bitbake.1 @@ -105,7 +105,7 @@ Show debug logging for the specified logging domains profile the command and print a report .TP .B \-uUI, \-\-ui=UI -User interface to use. Currently, hob, depexp, goggle or ncurses can be specified as UI. +User interface to use. Currently, knotty, taskexp or ncurses can be specified as UI. .TP .B \-tSERVERTYPE, \-\-servertype=SERVERTYPE Choose which server to use, none, process or xmlrpc. diff --git a/import-layers/yocto-poky/bitbake/lib/bb/COW.py b/import-layers/yocto-poky/bitbake/lib/bb/COW.py index 77a05cfe3..36ebbd9d1 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/COW.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/COW.py @@ -213,11 +213,11 @@ if __name__ == "__main__": print() print("a", a) - for x in a.items(): + for x in a.iteritems(): print(x) print("--") print("b", b) - for x in b.items(): + for x in b.iteritems(): print(x) print() @@ -225,11 +225,11 @@ if __name__ == "__main__": b['a'] = 'c' print("a", a) - for x in a.items(): + for x in a.iteritems(): print(x) print("--") print("b", b) - for x in b.items(): + for x in b.iteritems(): print(x) print() @@ -244,22 +244,22 @@ if __name__ == "__main__": a['set'].add("o2") print("a", a) - for x in a['set'].values(): + for x in a['set'].itervalues(): print(x) print("--") print("b", b) - for x in b['set'].values(): + for x in b['set'].itervalues(): print(x) print() b['set'].add('o3') print("a", a) - for x in a['set'].values(): + for x in a['set'].itervalues(): print(x) print("--") print("b", b) - for x in b['set'].values(): + for x in b['set'].itervalues(): print(x) print() @@ -269,7 +269,7 @@ if __name__ == "__main__": a['set2'].add("o2") print("a", a) - for x in a.items(): + for x in a.iteritems(): print(x) print("--") print("b", b) @@ -289,7 +289,7 @@ if __name__ == "__main__": print("Yay - has_key with delete works!") print("a", a) - for x in a.items(): + for x in a.iteritems(): print(x) print("--") print("b", b) @@ -300,7 +300,7 @@ if __name__ == "__main__": b.__revertitem__('b') print("a", a) - for x in a.items(): + for x in a.iteritems(): print(x) print("--") print("b", b) @@ -310,7 +310,7 @@ if __name__ == "__main__": b.__revertitem__('dict') print("a", a) - for x in a.items(): + for x in a.iteritems(): print(x) print("--") print("b", b) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/__init__.py b/import-layers/yocto-poky/bitbake/lib/bb/__init__.py index f019d4831..bfe0ca5d8 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/__init__.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/__init__.py @@ -21,7 +21,7 @@ # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -__version__ = "1.32.0" +__version__ = "1.34.0" import sys if sys.version_info < (3, 4, 0): diff --git a/import-layers/yocto-poky/bitbake/lib/bb/build.py b/import-layers/yocto-poky/bitbake/lib/bb/build.py index b59a49bc1..0d0100a06 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/build.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/build.py @@ -91,14 +91,14 @@ class TaskBase(event.Event): def __init__(self, t, logfile, d): self._task = t - self._package = d.getVar("PF", True) - self._mc = d.getVar("BB_CURRENT_MC", True) - self.taskfile = d.getVar("FILE", True) + self._package = d.getVar("PF") + self._mc = d.getVar("BB_CURRENT_MC") + self.taskfile = d.getVar("FILE") self.taskname = self._task self.logfile = logfile self.time = time.time() event.Event.__init__(self) - self._message = "recipe %s: task %s: %s" % (d.getVar("PF", True), t, self.getDisplayName()) + self._message = "recipe %s: task %s: %s" % (d.getVar("PF"), t, self.getDisplayName()) def getTask(self): return self._task @@ -195,13 +195,13 @@ def exec_func(func, d, dirs = None, pythonexception=False): oldcwd = None flags = d.getVarFlags(func) - cleandirs = flags.get('cleandirs') + cleandirs = flags.get('cleandirs') if flags else None if cleandirs: for cdir in d.expand(cleandirs).split(): bb.utils.remove(cdir, True) bb.utils.mkdirhier(cdir) - if dirs is None: + if flags and dirs is None: dirs = flags.get('dirs') if dirs: dirs = d.expand(dirs).split() @@ -227,17 +227,17 @@ def exec_func(func, d, dirs = None, pythonexception=False): else: lockfiles = None - tempdir = d.getVar('T', True) + tempdir = d.getVar('T') # or func allows items to be executed outside of the normal # task set, such as buildhistory - task = d.getVar('BB_RUNTASK', True) or func + task = d.getVar('BB_RUNTASK') or func if task == func: taskfunc = task else: taskfunc = "%s.%s" % (task, func) - runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}" + runfmt = d.getVar('BB_RUNFMT') or "run.{func}.{pid}" runfn = runfmt.format(taskfunc=taskfunc, task=task, func=func, pid=os.getpid()) runfile = os.path.join(tempdir, runfn) bb.utils.mkdirhier(os.path.dirname(runfile)) @@ -369,7 +369,7 @@ exit $ret cmd = runfile if d.getVarFlag(func, 'fakeroot', False): - fakerootcmd = d.getVar('FAKEROOT', True) + fakerootcmd = d.getVar('FAKEROOT') if fakerootcmd: cmd = [fakerootcmd, runfile] @@ -378,7 +378,7 @@ exit $ret else: logfile = sys.stdout - progress = d.getVarFlag(func, 'progress', True) + progress = d.getVarFlag(func, 'progress') if progress: if progress == 'percent': # Use default regex @@ -430,7 +430,7 @@ exit $ret else: break - tempdir = d.getVar('T', True) + tempdir = d.getVar('T') fifopath = os.path.join(tempdir, 'fifo.%s' % os.getpid()) if os.path.exists(fifopath): os.unlink(fifopath) @@ -443,7 +443,7 @@ exit $ret with open(os.devnull, 'r+') as stdin: bb.process.run(cmd, shell=False, stdin=stdin, log=logfile, extrafiles=[(fifo,readfifo)]) except bb.process.CmdError: - logfn = d.getVar('BB_LOGFILE', True) + logfn = d.getVar('BB_LOGFILE') raise FuncFailed(func, logfn) finally: os.unlink(fifopath) @@ -474,18 +474,18 @@ def _exec_task(fn, task, d, quieterr): logger.debug(1, "Executing task %s", task) localdata = _task_data(fn, task, d) - tempdir = localdata.getVar('T', True) + tempdir = localdata.getVar('T') if not tempdir: bb.fatal("T variable not set, unable to build") # Change nice level if we're asked to - nice = localdata.getVar("BB_TASK_NICE_LEVEL", True) + nice = localdata.getVar("BB_TASK_NICE_LEVEL") if nice: curnice = os.nice(0) nice = int(nice) - curnice newnice = os.nice(nice) logger.debug(1, "Renice to %s " % newnice) - ionice = localdata.getVar("BB_TASK_IONICE_LEVEL", True) + ionice = localdata.getVar("BB_TASK_IONICE_LEVEL") if ionice: try: cls, prio = ionice.split(".", 1) @@ -496,7 +496,7 @@ def _exec_task(fn, task, d, quieterr): bb.utils.mkdirhier(tempdir) # Determine the logfile to generate - logfmt = localdata.getVar('BB_LOGFMT', True) or 'log.{task}.{pid}' + logfmt = localdata.getVar('BB_LOGFMT') or 'log.{task}.{pid}' logbase = logfmt.format(task=task, pid=os.getpid()) # Document the order of the tasks... @@ -563,6 +563,7 @@ def _exec_task(fn, task, d, quieterr): localdata.setVar('BB_LOGFILE', logfn) localdata.setVar('BB_RUNTASK', task) + localdata.setVar('BB_TASK_LOGGER', bblogger) flags = localdata.getVarFlags(task) @@ -628,7 +629,7 @@ def exec_task(fn, task, d, profile = False): quieterr = True if profile: - profname = "profile-%s.log" % (d.getVar("PN", True) + "-" + task) + profname = "profile-%s.log" % (d.getVar("PN") + "-" + task) try: import cProfile as profile except: @@ -668,9 +669,9 @@ def stamp_internal(taskname, d, file_name, baseonly=False, noextra=False): stamp = d.stamp[file_name] extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" else: - stamp = d.getVar('STAMP', True) - file_name = d.getVar('BB_FILENAME', True) - extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or "" + stamp = d.getVar('STAMP') + file_name = d.getVar('BB_FILENAME') + extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or "" if baseonly: return stamp @@ -704,9 +705,9 @@ def stamp_cleanmask_internal(taskname, d, file_name): stamp = d.stampclean[file_name] extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or "" else: - stamp = d.getVar('STAMPCLEAN', True) - file_name = d.getVar('BB_FILENAME', True) - extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or "" + stamp = d.getVar('STAMPCLEAN') + file_name = d.getVar('BB_FILENAME') + extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info') or "" if not stamp: return [] @@ -742,7 +743,7 @@ def make_stamp(task, d, file_name = None): # as it completes if not task.endswith("_setscene") and task != "do_setscene" and not file_name: stampbase = stamp_internal(task, d, None, True) - file_name = d.getVar('BB_FILENAME', True) + file_name = d.getVar('BB_FILENAME') bb.parse.siggen.dump_sigtask(file_name, task, stampbase, True) def del_stamp(task, d, file_name = None): @@ -764,7 +765,7 @@ def write_taint(task, d, file_name = None): if file_name: taintfn = d.stamp[file_name] + '.' + task + '.taint' else: - taintfn = d.getVar('STAMP', True) + '.' + task + '.taint' + taintfn = d.getVar('STAMP') + '.' + task + '.taint' bb.utils.mkdirhier(os.path.dirname(taintfn)) # The specific content of the taint file is not really important, # we just need it to be random, so a random UUID is used @@ -861,3 +862,46 @@ def deltask(task, d): if task in deps: deps.remove(task) d.setVarFlag(bbtask, 'deps', deps) + +def preceedtask(task, with_recrdeptasks, d): + """ + Returns a set of tasks in the current recipe which were specified as + precondition by the task itself ("after") or which listed themselves + as precondition ("before"). Preceeding tasks specified via the + "recrdeptask" are included in the result only if requested. Beware + that this may lead to the task itself being listed. + """ + preceed = set() + preceed.update(d.getVarFlag(task, 'deps') or []) + if with_recrdeptasks: + recrdeptask = d.getVarFlag(task, 'recrdeptask') + if recrdeptask: + preceed.update(recrdeptask.split()) + return preceed + +def tasksbetween(task_start, task_end, d): + """ + Return the list of tasks between two tasks in the current recipe, + where task_start is to start at and task_end is the task to end at + (and task_end has a dependency chain back to task_start). + """ + outtasks = [] + tasks = list(filter(lambda k: d.getVarFlag(k, "task"), d.keys())) + def follow_chain(task, endtask, chain=None): + if not chain: + chain = [] + chain.append(task) + for othertask in tasks: + if othertask == task: + continue + if task == endtask: + for ctask in chain: + if ctask not in outtasks: + outtasks.append(ctask) + else: + deps = d.getVarFlag(othertask, 'deps', False) + if task in deps: + follow_chain(othertask, endtask, chain) + chain.pop() + follow_chain(task_start, task_end) + return outtasks diff --git a/import-layers/yocto-poky/bitbake/lib/bb/cache.py b/import-layers/yocto-poky/bitbake/lib/bb/cache.py index dd9cfdfac..e7eeb4f50 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/cache.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/cache.py @@ -37,7 +37,7 @@ import bb.utils logger = logging.getLogger("BitBake.Cache") -__cache_version__ = "150" +__cache_version__ = "151" def getCacheFile(path, filename, data_hash): return os.path.join(path, filename + "." + data_hash) @@ -71,7 +71,7 @@ class RecipeInfoCommon(object): @classmethod def flaglist(cls, flag, varlist, metadata, squash=False): - out_dict = dict((var, metadata.getVarFlag(var, flag, True)) + out_dict = dict((var, metadata.getVarFlag(var, flag)) for var in varlist) if squash: return dict((k,v) for (k,v) in out_dict.items() if v) @@ -296,7 +296,7 @@ def parse_recipe(bb_data, bbfile, appends, mc=''): bb_data.setVar("__BBMULTICONFIG", mc) # expand tmpdir to include this topdir - bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR', True) or "") + bb_data.setVar('TMPDIR', bb_data.getVar('TMPDIR') or "") bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) oldpath = os.path.abspath(os.getcwd()) bb.parse.cached_mtime_noerror(bbfile_loc) @@ -378,7 +378,7 @@ class Cache(NoCache): # It will be used later for deciding whether we # need extra cache file dump/load support self.caches_array = caches_array - self.cachedir = data.getVar("CACHE", True) + self.cachedir = data.getVar("CACHE") self.clean = set() self.checked = set() self.depends_cache = {} @@ -462,6 +462,10 @@ class Cache(NoCache): self.depends_cache[key] = [value] # only fire events on even percentage boundaries current_progress = cachefile.tell() + previous_progress + if current_progress > cachesize: + # we might have calculated incorrect total size because a file + # might've been written out just after we checked its size + cachesize = current_progress current_percent = 100 * current_progress / cachesize if current_percent > previous_percent: previous_percent = current_percent @@ -792,8 +796,8 @@ class MultiProcessCache(object): self.cachedata_extras = self.create_cachedata() def init_cache(self, d, cache_file_name=None): - cachedir = (d.getVar("PERSISTENT_DIR", True) or - d.getVar("CACHE", True)) + cachedir = (d.getVar("PERSISTENT_DIR") or + d.getVar("CACHE")) if cachedir in [None, '']: return bb.utils.mkdirhier(cachedir) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/codeparser.py b/import-layers/yocto-poky/bitbake/lib/bb/codeparser.py index 5d2d44065..530f44e57 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/codeparser.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/codeparser.py @@ -1,3 +1,22 @@ +""" +BitBake code parser + +Parses actual code (i.e. python and shell) for functions and in-line +expressions. Used mainly to determine dependencies on other functions +and variables within the BitBake metadata. Also provides a cache for +this information in order to speed up processing. + +(Not to be confused with the code that parses the metadata itself, +see lib/bb/parse/ for that). + +NOTE: if you change how the parsers gather information you will almost +certainly need to increment CodeParserCache.CACHE_VERSION below so that +any existing codeparser cache gets invalidated. Additionally you'll need +to increment __cache_version__ in cache.py in order to ensure that old +recipe caches don't trigger "Taskhash mismatch" errors. + +""" + import ast import sys import codegen @@ -117,7 +136,11 @@ class shellCacheLine(object): class CodeParserCache(MultiProcessCache): cache_file_name = "bb_codeparser.dat" - CACHE_VERSION = 8 + # NOTE: you must increment this if you change how the parsers gather information, + # so that an existing cache gets invalidated. Additionally you'll need + # to increment __cache_version__ in cache.py in order to ensure that old + # recipe caches don't trigger "Taskhash mismatch" errors. + CACHE_VERSION = 9 def __init__(self): MultiProcessCache.__init__(self) @@ -186,13 +209,15 @@ class BufferedLogger(Logger): def flush(self): for record in self.buffer: - self.target.handle(record) + if self.target.isEnabledFor(record.levelno): + self.target.handle(record) self.buffer = [] class PythonParser(): getvars = (".getVar", ".appendVar", ".prependVar") getvarflags = (".getVarFlag", ".appendVarFlag", ".prependVarFlag") - containsfuncs = ("bb.utils.contains", "base_contains", "bb.utils.contains_any") + containsfuncs = ("bb.utils.contains", "base_contains") + containsanyfuncs = ("bb.utils.contains_any", "bb.utils.filter") execfuncs = ("bb.build.exec_func", "bb.build.exec_task") def warn(self, func, arg): @@ -211,13 +236,17 @@ class PythonParser(): def visit_Call(self, node): name = self.called_node_name(node.func) - if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs): + if name and (name.endswith(self.getvars) or name.endswith(self.getvarflags) or name in self.containsfuncs or name in self.containsanyfuncs): if isinstance(node.args[0], ast.Str): varname = node.args[0].s if name in self.containsfuncs and isinstance(node.args[1], ast.Str): if varname not in self.contains: self.contains[varname] = set() self.contains[varname].add(node.args[1].s) + elif name in self.containsanyfuncs and isinstance(node.args[1], ast.Str): + if varname not in self.contains: + self.contains[varname] = set() + self.contains[varname].update(node.args[1].s.split()) elif name.endswith(self.getvarflags): if isinstance(node.args[1], ast.Str): self.references.add('%s[%s]' % (varname, node.args[1].s)) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/command.py b/import-layers/yocto-poky/bitbake/lib/bb/command.py index caa3e4d45..a919f58d2 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/command.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/command.py @@ -28,8 +28,15 @@ and must not trigger events, directly or indirectly. Commands are queued in a CommandQueue """ +from collections import OrderedDict, defaultdict + import bb.event import bb.cooker +import bb.remotedata + +class DataStoreConnectionHandle(object): + def __init__(self, dsindex=0): + self.dsindex = dsindex class CommandCompleted(bb.event.Event): pass @@ -55,6 +62,7 @@ class Command: self.cooker = cooker self.cmds_sync = CommandsSync() self.cmds_async = CommandsAsync() + self.remotedatastores = bb.remotedata.RemoteDatastores(cooker) # FIXME Add lock for this self.currentAsyncCommand = None @@ -125,14 +133,20 @@ class Command: def finishAsyncCommand(self, msg=None, code=None): if msg or msg == "": - bb.event.fire(CommandFailed(msg), self.cooker.expanded_data) + bb.event.fire(CommandFailed(msg), self.cooker.data) elif code: - bb.event.fire(CommandExit(code), self.cooker.expanded_data) + bb.event.fire(CommandExit(code), self.cooker.data) else: - bb.event.fire(CommandCompleted(), self.cooker.expanded_data) + bb.event.fire(CommandCompleted(), self.cooker.data) self.currentAsyncCommand = None self.cooker.finishcommand() +def split_mc_pn(pn): + if pn.startswith("multiconfig:"): + _, mc, pn = pn.split(":", 2) + return (mc, pn) + return ('', pn) + class CommandsSync: """ A class of synchronous commands @@ -179,6 +193,7 @@ class CommandsSync: """ varname = params[0] value = str(params[1]) + command.cooker.extraconfigdata[varname] = value command.cooker.data.setVar(varname, value) def getSetVariable(self, command, params): @@ -295,9 +310,274 @@ class CommandsSync: def updateConfig(self, command, params): options = params[0] environment = params[1] - command.cooker.updateConfigOpts(options, environment) + cmdline = params[2] + command.cooker.updateConfigOpts(options, environment, cmdline) updateConfig.needconfig = False + def parseConfiguration(self, command, params): + """Instruct bitbake to parse its configuration + NOTE: it is only necessary to call this if you aren't calling any normal action + (otherwise parsing is taken care of automatically) + """ + command.cooker.parseConfiguration() + parseConfiguration.needconfig = False + + def getLayerPriorities(self, command, params): + ret = [] + # regex objects cannot be marshalled by xmlrpc + for collection, pattern, regex, pri in command.cooker.bbfile_config_priorities: + ret.append((collection, pattern, regex.pattern, pri)) + return ret + getLayerPriorities.readonly = True + + def getRecipes(self, command, params): + try: + mc = params[0] + except IndexError: + mc = '' + return list(command.cooker.recipecaches[mc].pkg_pn.items()) + getRecipes.readonly = True + + def getRecipeDepends(self, command, params): + try: + mc = params[0] + except IndexError: + mc = '' + return list(command.cooker.recipecaches[mc].deps.items()) + getRecipeDepends.readonly = True + + def getRecipeVersions(self, command, params): + try: + mc = params[0] + except IndexError: + mc = '' + return command.cooker.recipecaches[mc].pkg_pepvpr + getRecipeVersions.readonly = True + + def getRuntimeDepends(self, command, params): + ret = [] + try: + mc = params[0] + except IndexError: + mc = '' + rundeps = command.cooker.recipecaches[mc].rundeps + for key, value in rundeps.items(): + if isinstance(value, defaultdict): + value = dict(value) + ret.append((key, value)) + return ret + getRuntimeDepends.readonly = True + + def getRuntimeRecommends(self, command, params): + ret = [] + try: + mc = params[0] + except IndexError: + mc = '' + runrecs = command.cooker.recipecaches[mc].runrecs + for key, value in runrecs.items(): + if isinstance(value, defaultdict): + value = dict(value) + ret.append((key, value)) + return ret + getRuntimeRecommends.readonly = True + + def getRecipeInherits(self, command, params): + try: + mc = params[0] + except IndexError: + mc = '' + return command.cooker.recipecaches[mc].inherits + getRecipeInherits.readonly = True + + def getBbFilePriority(self, command, params): + try: + mc = params[0] + except IndexError: + mc = '' + return command.cooker.recipecaches[mc].bbfile_priority + getBbFilePriority.readonly = True + + def getDefaultPreference(self, command, params): + try: + mc = params[0] + except IndexError: + mc = '' + return command.cooker.recipecaches[mc].pkg_dp + getDefaultPreference.readonly = True + + def getSkippedRecipes(self, command, params): + # Return list sorted by reverse priority order + import bb.cache + skipdict = OrderedDict(sorted(command.cooker.skiplist.items(), + key=lambda x: (-command.cooker.collection.calc_bbfile_priority(bb.cache.virtualfn2realfn(x[0])[0]), x[0]))) + return list(skipdict.items()) + getSkippedRecipes.readonly = True + + def getOverlayedRecipes(self, command, params): + return list(command.cooker.collection.overlayed.items()) + getOverlayedRecipes.readonly = True + + def getFileAppends(self, command, params): + fn = params[0] + return command.cooker.collection.get_file_appends(fn) + getFileAppends.readonly = True + + def getAllAppends(self, command, params): + return command.cooker.collection.bbappends + getAllAppends.readonly = True + + def findProviders(self, command, params): + return command.cooker.findProviders() + findProviders.readonly = True + + def findBestProvider(self, command, params): + (mc, pn) = split_mc_pn(params[0]) + return command.cooker.findBestProvider(pn, mc) + findBestProvider.readonly = True + + def allProviders(self, command, params): + try: + mc = params[0] + except IndexError: + mc = '' + return list(bb.providers.allProviders(command.cooker.recipecaches[mc]).items()) + allProviders.readonly = True + + def getRuntimeProviders(self, command, params): + rprovide = params[0] + try: + mc = params[1] + except IndexError: + mc = '' + all_p = bb.providers.getRuntimeProviders(command.cooker.recipecaches[mc], rprovide) + if all_p: + best = bb.providers.filterProvidersRunTime(all_p, rprovide, + command.cooker.data, + command.cooker.recipecaches[mc])[0][0] + else: + best = None + return all_p, best + getRuntimeProviders.readonly = True + + def dataStoreConnectorFindVar(self, command, params): + dsindex = params[0] + name = params[1] + datastore = command.remotedatastores[dsindex] + value, overridedata = datastore._findVar(name) + + if value: + content = value.get('_content', None) + if isinstance(content, bb.data_smart.DataSmart): + # Value is a datastore (e.g. BB_ORIGENV) - need to handle this carefully + idx = command.remotedatastores.check_store(content, True) + return {'_content': DataStoreConnectionHandle(idx), + '_connector_origtype': 'DataStoreConnectionHandle', + '_connector_overrides': overridedata} + elif isinstance(content, set): + return {'_content': list(content), + '_connector_origtype': 'set', + '_connector_overrides': overridedata} + else: + value['_connector_overrides'] = overridedata + else: + value = {} + value['_connector_overrides'] = overridedata + return value + dataStoreConnectorFindVar.readonly = True + + def dataStoreConnectorGetKeys(self, command, params): + dsindex = params[0] + datastore = command.remotedatastores[dsindex] + return list(datastore.keys()) + dataStoreConnectorGetKeys.readonly = True + + def dataStoreConnectorGetVarHistory(self, command, params): + dsindex = params[0] + name = params[1] + datastore = command.remotedatastores[dsindex] + return datastore.varhistory.variable(name) + dataStoreConnectorGetVarHistory.readonly = True + + def dataStoreConnectorExpandPythonRef(self, command, params): + config_data_dict = params[0] + varname = params[1] + expr = params[2] + + config_data = command.remotedatastores.receive_datastore(config_data_dict) + + varparse = bb.data_smart.VariableParse(varname, config_data) + return varparse.python_sub(expr) + + def dataStoreConnectorRelease(self, command, params): + dsindex = params[0] + if dsindex <= 0: + raise CommandError('dataStoreConnectorRelease: invalid index %d' % dsindex) + command.remotedatastores.release(dsindex) + + def dataStoreConnectorSetVarFlag(self, command, params): + dsindex = params[0] + name = params[1] + flag = params[2] + value = params[3] + datastore = command.remotedatastores[dsindex] + datastore.setVarFlag(name, flag, value) + + def dataStoreConnectorDelVar(self, command, params): + dsindex = params[0] + name = params[1] + datastore = command.remotedatastores[dsindex] + if len(params) > 2: + flag = params[2] + datastore.delVarFlag(name, flag) + else: + datastore.delVar(name) + + def dataStoreConnectorRenameVar(self, command, params): + dsindex = params[0] + name = params[1] + newname = params[2] + datastore = command.remotedatastores[dsindex] + datastore.renameVar(name, newname) + + def parseRecipeFile(self, command, params): + """ + Parse the specified recipe file (with or without bbappends) + and return a datastore object representing the environment + for the recipe. + """ + fn = params[0] + appends = params[1] + appendlist = params[2] + if len(params) > 3: + config_data_dict = params[3] + config_data = command.remotedatastores.receive_datastore(config_data_dict) + else: + config_data = None + + if appends: + if appendlist is not None: + appendfiles = appendlist + else: + appendfiles = command.cooker.collection.get_file_appends(fn) + else: + appendfiles = [] + # We are calling bb.cache locally here rather than on the server, + # but that's OK because it doesn't actually need anything from + # the server barring the global datastore (which we have a remote + # version of) + if config_data: + # We have to use a different function here if we're passing in a datastore + # NOTE: we took a copy above, so we don't do it here again + envdata = bb.cache.parse_recipe(config_data, fn, appendfiles)[''] + else: + # Use the standard path + parser = bb.cache.NoCache(command.cooker.databuilder) + envdata = parser.loadDataFull(fn, appendfiles) + idx = command.remotedatastores.store(envdata) + return DataStoreConnectionHandle(idx) + parseRecipeFile.readonly = True + class CommandsAsync: """ A class of asynchronous commands @@ -311,8 +591,12 @@ class CommandsAsync: """ bfile = params[0] task = params[1] + if len(params) > 2: + hidewarning = params[2] + else: + hidewarning = False - command.cooker.buildFile(bfile, task) + command.cooker.buildFile(bfile, task, hidewarning) buildFile.needcache = False def buildTargets(self, command, params): @@ -472,3 +756,11 @@ class CommandsAsync: command.finishAsyncCommand() resetCooker.needcache = False + def clientComplete(self, command, params): + """ + Do the right thing when the controlling client exits + """ + command.cooker.clientComplete() + command.finishAsyncCommand() + clientComplete.needcache = False + diff --git a/import-layers/yocto-poky/bitbake/lib/bb/cooker.py b/import-layers/yocto-poky/bitbake/lib/bb/cooker.py index 07897be27..3c9e88cd2 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/cooker.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/cooker.py @@ -192,6 +192,8 @@ class BBCooker: bb.parse.__mtime_cache = {} bb.parse.BBHandler.cached_statements = {} + self.ui_cmdline = None + self.initConfigurationData() # we log all events to a file if so directed @@ -271,12 +273,15 @@ class BBCooker: self.inotify_modified_files.append(event.pathname) self.parsecache_valid = False - def add_filewatch(self, deps, watcher=None): + def add_filewatch(self, deps, watcher=None, dirs=False): if not watcher: watcher = self.watcher for i in deps: watcher.bbwatchedfiles.append(i[0]) - f = os.path.dirname(i[0]) + if dirs: + f = i[0] + else: + f = os.path.dirname(i[0]) if f in watcher.bbseen: continue watcher.bbseen.append(f) @@ -331,7 +336,7 @@ class BBCooker: # Need to preserve BB_CONSOLELOG over resets consolelog = None if hasattr(self, "data"): - consolelog = self.data.getVar("BB_CONSOLELOG", True) + consolelog = self.data.getVar("BB_CONSOLELOG") if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: self.enableDataTracking() @@ -358,17 +363,18 @@ class BBCooker: self.databuilder.parseBaseConfiguration() self.data = self.databuilder.data self.data_hash = self.databuilder.data_hash + self.extraconfigdata = {} if consolelog: self.data.setVar("BB_CONSOLELOG", consolelog) + self.data.setVar('BB_CMDLINE', self.ui_cmdline) + # # Copy of the data store which has been expanded. # Used for firing events and accessing variables where expansion needs to be accounted for # - self.expanded_data = bb.data.createCopy(self.data) - bb.data.update_data(self.expanded_data) - bb.parse.init_parser(self.expanded_data) + bb.parse.init_parser(self.data) if CookerFeatures.BASEDATASTORE_TRACKING in self.featureset: self.disableDataTracking() @@ -526,7 +532,7 @@ class BBCooker: bb.msg.loggerVerboseLogs = True # Change nice level if we're asked to - nice = self.data.getVar("BB_NICE_LEVEL", True) + nice = self.data.getVar("BB_NICE_LEVEL") if nice: curnice = os.nice(0) nice = int(nice) - curnice @@ -539,9 +545,10 @@ class BBCooker: for mc in self.multiconfigs: self.recipecaches[mc] = bb.cache.CacheData(self.caches_array) - self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS", True)) + self.handleCollections(self.data.getVar("BBFILE_COLLECTIONS")) - def updateConfigOpts(self, options, environment): + def updateConfigOpts(self, options, environment, cmdline): + self.ui_cmdline = cmdline clean = True for o in options: if o in ['prefile', 'postfile']: @@ -583,13 +590,12 @@ class BBCooker: def showVersions(self): - pkg_pn = self.recipecaches[''].pkg_pn - (latest_versions, preferred_versions) = bb.providers.findProviders(self.data, self.recipecaches[''], pkg_pn) + (latest_versions, preferred_versions) = self.findProviders() logger.plain("%-35s %25s %25s", "Recipe Name", "Latest Version", "Preferred Version") logger.plain("%-35s %25s %25s\n", "===========", "==============", "=================") - for p in sorted(pkg_pn): + for p in sorted(self.recipecaches[''].pkg_pn): pref = preferred_versions[p] latest = latest_versions[p] @@ -619,7 +625,7 @@ class BBCooker: fn = self.matchFile(fn) fn = bb.cache.realfn2virtual(fn, cls, mc) elif len(pkgs_to_build) == 1: - ignore = self.expanded_data.getVar("ASSUME_PROVIDED", True) or "" + ignore = self.data.getVar("ASSUME_PROVIDED") or "" if pkgs_to_build[0] in set(ignore.split()): bb.fatal("%s is in ASSUME_PROVIDED" % pkgs_to_build[0]) @@ -644,14 +650,13 @@ class BBCooker: logger.plain(env.getvalue()) # emit variables and shell functions - data.update_data(envdata) with closing(StringIO()) as env: data.emit_env(env, envdata, True) logger.plain(env.getvalue()) # emit the metadata which isnt valid shell data.expandKeys(envdata) - for e in envdata.keys(): + for e in sorted(envdata.keys()): if envdata.getVarFlag(e, 'func', False) and envdata.getVarFlag(e, 'python', False): logger.plain("\npython %s () {\n%s}\n", e, envdata.getVar(e, False)) @@ -705,7 +710,6 @@ class BBCooker: for mc in self.multiconfigs: taskdata[mc] = bb.taskdata.TaskData(abort, skiplist=self.skiplist, allowincomplete=allowincomplete) localdata[mc] = data.createCopy(self.databuilder.mcdata[mc]) - bb.data.update_data(localdata[mc]) bb.data.expandKeys(localdata[mc]) current = 0 @@ -766,7 +770,7 @@ class BBCooker: @staticmethod def add_mc_prefix(mc, pn): if mc: - return "multiconfig:%s.%s" % (mc, pn) + return "multiconfig:%s:%s" % (mc, pn) return pn def buildDependTree(self, rq, taskdata): @@ -951,62 +955,54 @@ class BBCooker: depgraph = self.generateTaskDepTreeData(pkgs_to_build, task) - # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn - depends_file = open('pn-depends.dot', 'w' ) - buildlist_file = open('pn-buildlist', 'w' ) - print("digraph depends {", file=depends_file) - for pn in depgraph["pn"]: - fn = depgraph["pn"][pn]["filename"] - version = depgraph["pn"][pn]["version"] - print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file) - print("%s" % pn, file=buildlist_file) - buildlist_file.close() + with open('pn-buildlist', 'w') as f: + for pn in depgraph["pn"]: + f.write(pn + "\n") logger.info("PN build list saved to 'pn-buildlist'") - for pn in depgraph["depends"]: - for depend in depgraph["depends"][pn]: - print('"%s" -> "%s" [style=solid]' % (pn, depend), file=depends_file) - for pn in depgraph["rdepends-pn"]: - for rdepend in depgraph["rdepends-pn"][pn]: - print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file) - print("}", file=depends_file) - depends_file.close() - logger.info("PN dependencies saved to 'pn-depends.dot'") - - depends_file = open('package-depends.dot', 'w' ) - print("digraph depends {", file=depends_file) - for package in depgraph["packages"]: - pn = depgraph["packages"][package]["pn"] - fn = depgraph["packages"][package]["filename"] - version = depgraph["packages"][package]["version"] - if package == pn: - print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file) - else: - print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file) - for depend in depgraph["depends"][pn]: - print('"%s" -> "%s" [style=solid]' % (package, depend), file=depends_file) - for package in depgraph["rdepends-pkg"]: - for rdepend in depgraph["rdepends-pkg"][package]: - print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file) - for package in depgraph["rrecs-pkg"]: - for rdepend in depgraph["rrecs-pkg"][package]: - print('"%s" -> "%s" [style=dotted]' % (package, rdepend), file=depends_file) - print("}", file=depends_file) - depends_file.close() - logger.info("Package dependencies saved to 'package-depends.dot'") - - tdepends_file = open('task-depends.dot', 'w' ) - print("digraph depends {", file=tdepends_file) - for task in depgraph["tdepends"]: - (pn, taskname) = task.rsplit(".", 1) - fn = depgraph["pn"][pn]["filename"] - version = depgraph["pn"][pn]["version"] - print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file) - for dep in depgraph["tdepends"][task]: - print('"%s" -> "%s"' % (task, dep), file=tdepends_file) - print("}", file=tdepends_file) - tdepends_file.close() + + # Remove old format output files to ensure no confusion with stale data + try: + os.unlink('pn-depends.dot') + except FileNotFoundError: + pass + try: + os.unlink('package-depends.dot') + except FileNotFoundError: + pass + + with open('task-depends.dot', 'w') as f: + f.write("digraph depends {\n") + for task in depgraph["tdepends"]: + (pn, taskname) = task.rsplit(".", 1) + fn = depgraph["pn"][pn]["filename"] + version = depgraph["pn"][pn]["version"] + f.write('"%s.%s" [label="%s %s\\n%s\\n%s"]\n' % (pn, taskname, pn, taskname, version, fn)) + for dep in depgraph["tdepends"][task]: + f.write('"%s" -> "%s"\n' % (task, dep)) + f.write("}\n") logger.info("Task dependencies saved to 'task-depends.dot'") + with open('recipe-depends.dot', 'w') as f: + f.write("digraph depends {\n") + pndeps = {} + for task in depgraph["tdepends"]: + (pn, taskname) = task.rsplit(".", 1) + if pn not in pndeps: + pndeps[pn] = set() + for dep in depgraph["tdepends"][task]: + (deppn, deptaskname) = dep.rsplit(".", 1) + pndeps[pn].add(deppn) + for pn in pndeps: + fn = depgraph["pn"][pn]["filename"] + version = depgraph["pn"][pn]["version"] + f.write('"%s" [label="%s\\n%s\\n%s"]\n' % (pn, pn, version, fn)) + for dep in pndeps[pn]: + if dep == pn: + continue + f.write('"%s" -> "%s"\n' % (pn, dep)) + f.write("}\n") + logger.info("Flatened recipe dependencies saved to 'recipe-depends.dot'") + def show_appends_with_no_recipes(self): # Determine which bbappends haven't been applied @@ -1037,11 +1033,10 @@ class BBCooker: for mc in self.multiconfigs: localdata = data.createCopy(self.databuilder.mcdata[mc]) - bb.data.update_data(localdata) bb.data.expandKeys(localdata) # Handle PREFERRED_PROVIDERS - for p in (localdata.getVar('PREFERRED_PROVIDERS', True) or "").split(): + for p in (localdata.getVar('PREFERRED_PROVIDERS') or "").split(): try: (providee, provider) = p.split(':') except: @@ -1052,7 +1047,7 @@ class BBCooker: self.recipecaches[mc].preferred[providee] = provider def findCoreBaseFiles(self, subdir, configfile): - corebase = self.data.getVar('COREBASE', True) or "" + corebase = self.data.getVar('COREBASE') or "" paths = [] for root, dirs, files in os.walk(corebase + '/' + subdir): for d in dirs: @@ -1102,7 +1097,7 @@ class BBCooker: """ matches = [] - bbpaths = self.data.getVar('BBPATH', True).split(':') + bbpaths = self.data.getVar('BBPATH').split(':') for path in bbpaths: dirpath = os.path.join(path, directory) if os.path.exists(dirpath): @@ -1114,6 +1109,20 @@ class BBCooker: if matches: bb.event.fire(bb.event.FilesMatchingFound(filepattern, matches), self.data) + def findProviders(self, mc=''): + return bb.providers.findProviders(self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn) + + def findBestProvider(self, pn, mc=''): + if pn in self.recipecaches[mc].providers: + filenames = self.recipecaches[mc].providers[pn] + eligible, foundUnique = bb.providers.filterProviders(filenames, pn, self.data, self.recipecaches[mc]) + filename = eligible[0] + return None, None, None, filename + elif pn in self.recipecaches[mc].pkg_pn: + return bb.providers.findBestProvider(pn, self.data, self.recipecaches[mc], self.recipecaches[mc].pkg_pn) + else: + return None, None, None, None + def findConfigFiles(self, varname): """ Find config files which are appropriate values for varname. @@ -1124,7 +1133,7 @@ class BBCooker: data = self.data # iterate configs - bbpaths = data.getVar('BBPATH', True).split(':') + bbpaths = data.getVar('BBPATH').split(':') for path in bbpaths: confpath = os.path.join(path, "conf", var) if os.path.exists(confpath): @@ -1193,7 +1202,7 @@ class BBCooker: bb.debug(1,'Processing %s in collection list' % (c)) # Get collection priority if defined explicitly - priority = self.data.getVar("BBFILE_PRIORITY_%s" % c, True) + priority = self.data.getVar("BBFILE_PRIORITY_%s" % c) if priority: try: prio = int(priority) @@ -1207,7 +1216,7 @@ class BBCooker: collection_priorities[c] = None # Check dependencies and store information for priority calculation - deps = self.data.getVar("LAYERDEPENDS_%s" % c, True) + deps = self.data.getVar("LAYERDEPENDS_%s" % c) if deps: try: depDict = bb.utils.explode_dep_versions2(deps) @@ -1216,7 +1225,7 @@ class BBCooker: for dep, oplist in list(depDict.items()): if dep in collection_list: for opstr in oplist: - layerver = self.data.getVar("LAYERVERSION_%s" % dep, True) + layerver = self.data.getVar("LAYERVERSION_%s" % dep) (op, depver) = opstr.split() if layerver: try: @@ -1237,7 +1246,7 @@ class BBCooker: collection_depends[c] = [] # Check recommends and store information for priority calculation - recs = self.data.getVar("LAYERRECOMMENDS_%s" % c, True) + recs = self.data.getVar("LAYERRECOMMENDS_%s" % c) if recs: try: recDict = bb.utils.explode_dep_versions2(recs) @@ -1247,7 +1256,7 @@ class BBCooker: if rec in collection_list: if oplist: opstr = oplist[0] - layerver = self.data.getVar("LAYERVERSION_%s" % rec, True) + layerver = self.data.getVar("LAYERVERSION_%s" % rec) if layerver: (op, recver) = opstr.split() try: @@ -1281,17 +1290,21 @@ class BBCooker: # Calculate all layer priorities using calc_layer_priority and store in bbfile_config_priorities for c in collection_list: calc_layer_priority(c) - regex = self.data.getVar("BBFILE_PATTERN_%s" % c, True) + regex = self.data.getVar("BBFILE_PATTERN_%s" % c) if regex == None: parselog.error("BBFILE_PATTERN_%s not defined" % c) errors = True continue - try: - cre = re.compile(regex) - except re.error: - parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex) - errors = True - continue + elif regex == "": + parselog.debug(1, "BBFILE_PATTERN_%s is empty" % c) + errors = False + else: + try: + cre = re.compile(regex) + except re.error: + parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex) + errors = True + continue self.bbfile_config_priorities.append((c, regex, cre, collection_priorities[c])) if errors: # We've already printed the actual error(s) @@ -1316,7 +1329,7 @@ class BBCooker: bf = os.path.abspath(bf) self.collection = CookerCollectFiles(self.bbfile_config_priorities) - filelist, masked = self.collection.collect_bbfiles(self.data, self.expanded_data) + filelist, masked, searchdirs = self.collection.collect_bbfiles(self.data, self.data) try: os.stat(bf) bf = os.path.abspath(bf) @@ -1347,15 +1360,16 @@ class BBCooker: raise NoSpecificMatch return matches[0] - def buildFile(self, buildfile, task): + def buildFile(self, buildfile, task, hidewarning=False): """ Build the file matching regexp buildfile """ - bb.event.fire(bb.event.BuildInit(), self.expanded_data) + bb.event.fire(bb.event.BuildInit(), self.data) - # Too many people use -b because they think it's how you normally - # specify a target to be built, so show a warning - bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.") + if not hidewarning: + # Too many people use -b because they think it's how you normally + # specify a target to be built, so show a warning + bb.warn("Buildfile specified, dependencies will not be handled. If this is not what you want, do not use -b / --buildfile.") # Parse the configuration here. We need to do it explicitly here since # buildFile() doesn't use the cache @@ -1392,6 +1406,7 @@ class BBCooker: item = info_array[0].pn self.recipecaches[mc].ignored_dependencies = set() self.recipecaches[mc].bbfile_priority[fn] = 1 + self.configuration.limited_deps = True # Remove external dependencies self.recipecaches[mc].task_deps[fn]['depends'] = {} @@ -1409,8 +1424,8 @@ class BBCooker: taskdata[mc] = bb.taskdata.TaskData(self.configuration.abort) taskdata[mc].add_provider(self.data, self.recipecaches[mc], item) - buildname = self.data.getVar("BUILDNAME", True) - bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.expanded_data) + buildname = self.data.getVar("BUILDNAME") + bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.data) # Execute the runqueue runlist = [[mc, item, task, fn]] @@ -1440,7 +1455,7 @@ class BBCooker: return False if not retval: - bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.expanded_data) + bb.event.fire(bb.event.BuildCompleted(len(rq.rqdata.runtaskentries), buildname, item, failures, interrupted), self.data) self.command.finishAsyncCommand(msg) return False if retval is True: @@ -1495,7 +1510,7 @@ class BBCooker: packages = [target if ':' in target else '%s:%s' % (target, task) for target in targets] - bb.event.fire(bb.event.BuildInit(packages), self.expanded_data) + bb.event.fire(bb.event.BuildInit(packages), self.data) taskdata, runlist = self.buildTaskData(targets, task, self.configuration.abort) @@ -1528,7 +1543,7 @@ class BBCooker: v = self.data.getVar(k, expand) if not k.startswith("__") and not isinstance(v, bb.data_smart.DataSmart): dump[k] = { - 'v' : v , + 'v' : str(v) , 'history' : self.data.varhistory.variable(k), } for d in flaglist: @@ -1627,14 +1642,18 @@ class BBCooker: bb.event.fire(bb.event.SanityCheck(False), self.databuilder.mcdata[mc]) for mc in self.multiconfigs: - ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED", True) or "" + ignore = self.databuilder.mcdata[mc].getVar("ASSUME_PROVIDED") or "" self.recipecaches[mc].ignored_dependencies = set(ignore.split()) for dep in self.configuration.extra_assume_provided: self.recipecaches[mc].ignored_dependencies.add(dep) self.collection = CookerCollectFiles(self.bbfile_config_priorities) - (filelist, masked) = self.collection.collect_bbfiles(self.data, self.expanded_data) + (filelist, masked, searchdirs) = self.collection.collect_bbfiles(self.data, self.data) + + # Add inotify watches for directories searched for bb/bbappend files + for dirent in searchdirs: + self.add_filewatch([[dirent]], dirs=True) self.parser = CookerParser(self, filelist, masked) self.parsecache_valid = True @@ -1668,7 +1687,7 @@ class BBCooker: if len(pkgs_to_build) == 0: raise NothingToBuild - ignore = (self.expanded_data.getVar("ASSUME_PROVIDED", True) or "").split() + ignore = (self.data.getVar("ASSUME_PROVIDED") or "").split() for pkg in pkgs_to_build: if pkg in ignore: parselog.warning("Explicit target \"%s\" is in ASSUME_PROVIDED, ignoring" % pkg) @@ -1688,6 +1707,15 @@ class BBCooker: pkgs_to_build.remove('universe') for mc in self.multiconfigs: for t in self.recipecaches[mc].universe_target: + if task: + foundtask = False + for provider_fn in self.recipecaches[mc].providers[t]: + if task in self.recipecaches[mc].task_deps[provider_fn]['tasks']: + foundtask = True + break + if not foundtask: + bb.debug(1, "Skipping %s for universe tasks as task %s doesn't exist" % (t, task)) + continue if mc: t = "multiconfig:" + mc + ":" + t pkgs_to_build.append(t) @@ -1701,13 +1729,13 @@ class BBCooker: try: self.prhost = prserv.serv.auto_start(self.data) except prserv.serv.PRServiceConfigError: - bb.event.fire(CookerExit(), self.expanded_data) + bb.event.fire(CookerExit(), self.data) self.state = state.error return def post_serve(self): prserv.serv.auto_shutdown(self.data) - bb.event.fire(CookerExit(), self.expanded_data) + bb.event.fire(CookerExit(), self.data) lockfile = self.lock.name self.lock.close() self.lock = None @@ -1745,6 +1773,8 @@ class BBCooker: if self.parser: self.parser.shutdown(clean=not force, force=force) + self.notifier.stop() + self.confignotifier.stop() def finishcommand(self): self.state = state.initial @@ -1752,6 +1782,13 @@ class BBCooker: def reset(self): self.initConfigurationData() + def clientComplete(self): + """Called when the client is done using the server""" + if self.configuration.server_only: + self.finishcommand() + else: + self.shutdown(True) + def lockBitbake(self): if not hasattr(self, 'lock'): self.lock = None @@ -1838,7 +1875,7 @@ class CookerCollectFiles(object): collectlog.debug(1, "collecting .bb files") - files = (config.getVar( "BBFILES", True) or "").split() + files = (config.getVar( "BBFILES") or "").split() config.setVar("BBFILES", " ".join(files)) # Sort files by priority @@ -1851,30 +1888,49 @@ class CookerCollectFiles(object): collectlog.error("no recipe files to build, check your BBPATH and BBFILES?") bb.event.fire(CookerExit(), eventdata) - # Can't use set here as order is important - newfiles = [] - for f in files: - if os.path.isdir(f): - dirfiles = self.find_bbfiles(f) - for g in dirfiles: - if g not in newfiles: - newfiles.append(g) - else: - globbed = glob.glob(f) - if not globbed and os.path.exists(f): - globbed = [f] - # glob gives files in order on disk. Sort to be deterministic. - for g in sorted(globbed): - if g not in newfiles: - newfiles.append(g) + # We need to track where we look so that we can add inotify watches. There + # is no nice way to do this, this is horrid. We intercept the os.listdir() + # calls while we run glob(). + origlistdir = os.listdir + searchdirs = [] + + def ourlistdir(d): + searchdirs.append(d) + return origlistdir(d) + + os.listdir = ourlistdir + try: + # Can't use set here as order is important + newfiles = [] + for f in files: + if os.path.isdir(f): + dirfiles = self.find_bbfiles(f) + for g in dirfiles: + if g not in newfiles: + newfiles.append(g) + else: + globbed = glob.glob(f) + if not globbed and os.path.exists(f): + globbed = [f] + # glob gives files in order on disk. Sort to be deterministic. + for g in sorted(globbed): + if g not in newfiles: + newfiles.append(g) + finally: + os.listdir = origlistdir - bbmask = config.getVar('BBMASK', True) + bbmask = config.getVar('BBMASK') if bbmask: # First validate the individual regular expressions and ignore any # that do not compile bbmasks = [] for mask in bbmask.split(): + # When constructing an older style single regex, it's possible for BBMASK + # to end up beginning with '|', which matches and masks _everything_. + if mask.startswith("|"): + collectlog.warn("BBMASK contains regular expression beginning with '|', fixing: %s" % mask) + mask = mask[1:] try: re.compile(mask) bbmasks.append(mask) @@ -1921,7 +1977,7 @@ class CookerCollectFiles(object): topfile = bbfile_seen[base] self.overlayed[topfile].append(f) - return (bbfiles, masked) + return (bbfiles, masked, searchdirs) def get_file_appends(self, fn): """ @@ -1964,7 +2020,7 @@ class CookerCollectFiles(object): for collection, pattern, regex, _ in self.bbfile_config_priorities: if regex in unmatched: - if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection, True) != '1': + if d.getVar('BBFILE_PATTERN_IGNORE_EMPTY_%s' % collection) != '1': collectlog.warning("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern)) return priorities @@ -2121,7 +2177,7 @@ class CookerParser(object): self.toparse = self.total - len(self.fromcache) self.progress_chunk = int(max(self.toparse / 100, 1)) - self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or + self.num_processes = min(int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS") or multiprocessing.cpu_count()), len(self.willparse)) self.start() diff --git a/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py b/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py index 98f56ac7b..e408a35e1 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/cookerdata.py @@ -79,7 +79,7 @@ class ConfigParameters(object): "prefile", "postfile"]: options[o] = getattr(self.options, o) - ret, error = server.runCommand(["updateConfig", options, environment]) + ret, error = server.runCommand(["updateConfig", options, environment, sys.argv]) if error: raise Exception("Unable to update the server configuration with local parameters: %s" % error) @@ -146,6 +146,9 @@ class CookerConfiguration(object): self.tracking = False self.interface = [] self.writeeventlog = False + self.server_only = False + self.limited_deps = False + self.runall = None self.env = {} @@ -212,7 +215,7 @@ def _inherit(bbclass, data): def findConfigFile(configfile, data): search = [] - bbpath = data.getVar("BBPATH", True) + bbpath = data.getVar("BBPATH") if bbpath: for i in bbpath.split(":"): search.append(os.path.join(i, "conf", configfile)) @@ -286,7 +289,7 @@ class CookerDataBuilder(object): self.data_hash = self.data.get_hash() self.mcdata[''] = self.data - multiconfig = (self.data.getVar("BBMULTICONFIG", True) or "").split() + multiconfig = (self.data.getVar("BBMULTICONFIG") or "").split() for config in multiconfig: mcdata = self.parseConfigurationFiles(self.prefiles, self.postfiles, config) bb.event.fire(bb.event.ConfigParsed(), mcdata) @@ -320,7 +323,7 @@ class CookerDataBuilder(object): data.setVar("TOPDIR", os.path.dirname(os.path.dirname(layerconf))) data = parse_config_file(layerconf, data) - layers = (data.getVar('BBLAYERS', True) or "").split() + layers = (data.getVar('BBLAYERS') or "").split() data = bb.data.createCopy(data) approved = bb.utils.approved_variables() @@ -343,7 +346,7 @@ class CookerDataBuilder(object): data.delVar('LAYERDIR_RE') data.delVar('LAYERDIR') - if not data.getVar("BBPATH", True): + if not data.getVar("BBPATH"): msg = "The BBPATH variable is not set" if not layerconf: msg += (" and bitbake did not find a conf/bblayers.conf file in" @@ -358,7 +361,7 @@ class CookerDataBuilder(object): data = parse_config_file(p, data) # Handle any INHERITs and inherit the base class - bbclasses = ["base"] + (data.getVar('INHERIT', True) or "").split() + bbclasses = ["base"] + (data.getVar('INHERIT') or "").split() for bbclass in bbclasses: data = _inherit(bbclass, data) @@ -370,7 +373,7 @@ class CookerDataBuilder(object): parselog.critical("Undefined event handler function '%s'" % var) sys.exit(1) handlerln = int(data.getVarFlag(var, "lineno", False)) - bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln) + bb.event.register(var, data.getVar(var, False), (data.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln) data.setVar('BBINCLUDED',bb.parse.get_file_depends(data)) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/data.py b/import-layers/yocto-poky/bitbake/lib/bb/data.py index c56965c60..134afaacc 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/data.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/data.py @@ -78,59 +78,6 @@ def initVar(var, d): """Non-destructive var init for data structure""" d.initVar(var) - -def setVar(var, value, d): - """Set a variable to a given value""" - d.setVar(var, value) - - -def getVar(var, d, exp = False): - """Gets the value of a variable""" - return d.getVar(var, exp) - - -def renameVar(key, newkey, d): - """Renames a variable from key to newkey""" - d.renameVar(key, newkey) - -def delVar(var, d): - """Removes a variable from the data set""" - d.delVar(var) - -def appendVar(var, value, d): - """Append additional value to a variable""" - d.appendVar(var, value) - -def setVarFlag(var, flag, flagvalue, d): - """Set a flag for a given variable to a given value""" - d.setVarFlag(var, flag, flagvalue) - -def getVarFlag(var, flag, d): - """Gets given flag from given var""" - return d.getVarFlag(var, flag, False) - -def delVarFlag(var, flag, d): - """Removes a given flag from the variable's flags""" - d.delVarFlag(var, flag) - -def setVarFlags(var, flags, d): - """Set the flags for a given variable - - Note: - setVarFlags will not clear previous - flags. Think of this method as - addVarFlags - """ - d.setVarFlags(var, flags) - -def getVarFlags(var, d): - """Gets a variable's flags""" - return d.getVarFlags(var) - -def delVarFlags(var, d): - """Removes a variable's flags""" - d.delVarFlags(var) - def keys(d): """Return a list of keys in d""" return d.keys() @@ -174,7 +121,7 @@ def inheritFromOS(d, savedenv, permitted): for s in savedenv.keys(): if s in permitted: try: - d.setVar(s, savedenv.getVar(s, True), op = 'from env') + d.setVar(s, savedenv.getVar(s), op = 'from env') if s in exportlist: d.setVarFlag(s, "export", True, op = 'auto env export') except TypeError: @@ -194,7 +141,7 @@ def emit_var(var, o=sys.__stdout__, d = init(), all=False): try: if all: oval = d.getVar(var, False) - val = d.getVar(var, True) + val = d.getVar(var) except (KeyboardInterrupt, bb.build.FuncFailed): raise except Exception as exc: @@ -249,7 +196,7 @@ def emit_env(o=sys.__stdout__, d = init(), all=False): keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc) grouped = groupby(keys, isfunc) for isfunc, keys in grouped: - for key in keys: + for key in sorted(keys): emit_var(key, o, d, all and not isfunc) and o.write('\n') def exported_keys(d): @@ -261,9 +208,9 @@ def exported_vars(d): k = list(exported_keys(d)) for key in k: try: - value = d.getVar(key, True) + value = d.getVar(key) except Exception as err: - bb.warn("%s: Unable to export ${%s}: %s" % (d.getVar("FILE", True), key, err)) + bb.warn("%s: Unable to export ${%s}: %s" % (d.getVar("FILE"), key, err)) continue if value is not None: @@ -273,13 +220,13 @@ def emit_func(func, o=sys.__stdout__, d = init()): """Emits all items in the data store in a format such that it can be sourced by a shell.""" keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func", False)) - for key in keys: + for key in sorted(keys): emit_var(key, o, d, False) o.write('\n') emit_var(func, o, d, False) and o.write('\n') - newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True)) - newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split()) + newdeps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func)) + newdeps |= set((d.getVarFlag(func, "vardeps") or "").split()) seen = set() while newdeps: deps = newdeps @@ -288,8 +235,8 @@ def emit_func(func, o=sys.__stdout__, d = init()): for dep in deps: if d.getVarFlag(dep, "func", False) and not d.getVarFlag(dep, "python", False): emit_var(dep, o, d, False) and o.write('\n') - newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep, True)) - newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split()) + newdeps |= bb.codeparser.ShellParser(dep, logger).parse_shell(d.getVar(dep)) + newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split()) newdeps -= seen _functionfmt = """ @@ -312,7 +259,7 @@ def emit_func_python(func, o=sys.__stdout__, d = init()): pp = bb.codeparser.PythonParser(func, logger) pp.parse_python(d.getVar(func, False)) newdeps = pp.execs - newdeps |= set((d.getVarFlag(func, "vardeps", True) or "").split()) + newdeps |= set((d.getVarFlag(func, "vardeps") or "").split()) seen = set() while newdeps: deps = newdeps @@ -324,7 +271,7 @@ def emit_func_python(func, o=sys.__stdout__, d = init()): pp = bb.codeparser.PythonParser(dep, logger) pp.parse_python(d.getVar(dep, False)) newdeps |= pp.execs - newdeps |= set((d.getVarFlag(dep, "vardeps", True) or "").split()) + newdeps |= set((d.getVarFlag(dep, "vardeps") or "").split()) newdeps -= seen def update_data(d): @@ -348,12 +295,14 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d): def handle_contains(value, contains, d): newvalue = "" for k in sorted(contains): - l = (d.getVar(k, True) or "").split() - for word in sorted(contains[k]): - if word in l: - newvalue += "\n%s{%s} = Set" % (k, word) + l = (d.getVar(k) or "").split() + for item in sorted(contains[k]): + for word in item.split(): + if not word in l: + newvalue += "\n%s{%s} = Unset" % (k, item) + break else: - newvalue += "\n%s{%s} = Unset" % (k, word) + newvalue += "\n%s{%s} = Set" % (k, item) if not newvalue: return value if not value: @@ -366,7 +315,7 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d): if varflags.get("python"): parser = bb.codeparser.PythonParser(key, logger) if value and "\t" in value: - logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE", True))) + logger.warning("Variable %s contains tabs, please remove these (%s)" % (key, d.getVar("FILE"))) parser.parse_python(value, filename=varflags.get("filename"), lineno=varflags.get("lineno")) deps = deps | parser.references deps = deps | (keys & parser.execs) @@ -410,6 +359,8 @@ def build_dependencies(key, keys, shelldeps, varflagsexcl, d): deps |= set((vardeps or "").split()) deps -= set(varflags.get("vardepsexclude", "").split()) + except bb.parse.SkipRecipe: + raise except Exception as e: bb.warn("Exception during build_dependencies for %s" % key) raise @@ -421,7 +372,7 @@ def generate_dependencies(d): keys = set(key for key in d if not key.startswith("__")) shelldeps = set(key for key in d.getVar("__exportlist", False) if d.getVarFlag(key, "export", False) and not d.getVarFlag(key, "unexport", False)) - varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS', True) + varflagsexcl = d.getVar('BB_SIGNATURE_EXCLUDE_FLAGS') deps = {} values = {} diff --git a/import-layers/yocto-poky/bitbake/lib/bb/data_smart.py b/import-layers/yocto-poky/bitbake/lib/bb/data_smart.py index 805a9a71f..7dc1c6870 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/data_smart.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/data_smart.py @@ -108,7 +108,7 @@ class VariableParse: varparse = self.d.expand_cache[key] var = varparse.value else: - var = self.d.getVarFlag(key, "_content", True) + var = self.d.getVarFlag(key, "_content") self.references.add(key) if var is not None: return var @@ -116,13 +116,21 @@ class VariableParse: return match.group() def python_sub(self, match): - code = match.group()[3:-1] + if isinstance(match, str): + code = match + else: + code = match.group()[3:-1] + + if "_remote_data" in self.d: + connector = self.d["_remote_data"] + return connector.expandPythonRef(self.varname, code, self.d) + codeobj = compile(code.strip(), self.varname or "", "eval") parser = bb.codeparser.PythonParser(self.varname, logger) parser.parse_python(code) if self.varname: - vardeps = self.d.getVarFlag(self.varname, "vardeps", True) + vardeps = self.d.getVarFlag(self.varname, "vardeps") if vardeps is None: parser.log.flush() else: @@ -146,7 +154,7 @@ class DataContext(dict): self['d'] = metadata def __missing__(self, key): - value = self.metadata.getVar(key, True) + value = self.metadata.getVar(key) if value is None or self.metadata.getVarFlag(key, 'func', False): raise KeyError(key) else: @@ -222,6 +230,19 @@ class VariableHistory(object): new.variables = self.variables.copy() return new + def __getstate__(self): + vardict = {} + for k, v in self.variables.iteritems(): + vardict[k] = v + return {'dataroot': self.dataroot, + 'variables': vardict} + + def __setstate__(self, state): + self.dataroot = state['dataroot'] + self.variables = COWDictBase.copy() + for k, v in state['variables'].items(): + self.variables[k] = v + def record(self, *kwonly, **loginfo): if not self.dataroot._tracking: return @@ -247,10 +268,15 @@ class VariableHistory(object): self.variables[var].append(loginfo.copy()) def variable(self, var): - if var in self.variables: - return self.variables[var] + remote_connector = self.dataroot.getVar('_remote_data', False) + if remote_connector: + varhistory = remote_connector.getVarHistory(var) else: - return [] + varhistory = [] + + if var in self.variables: + varhistory.extend(self.variables[var]) + return varhistory def emit(self, var, oval, val, o, d): history = self.variable(var) @@ -318,7 +344,7 @@ class VariableHistory(object): the files in which they were added. """ history = self.variable(var) - finalitems = (d.getVar(var, True) or '').split() + finalitems = (d.getVar(var) or '').split() filemap = {} isset = False for event in history: @@ -426,11 +452,11 @@ class DataSmart(MutableMapping): # Can end up here recursively so setup dummy values self.overrides = [] self.overridesset = set() - self.overrides = (self.getVar("OVERRIDES", True) or "").split(":") or [] + self.overrides = (self.getVar("OVERRIDES") or "").split(":") or [] self.overridesset = set(self.overrides) self.inoverride = False self.expand_cache = {} - newoverrides = (self.getVar("OVERRIDES", True) or "").split(":") or [] + newoverrides = (self.getVar("OVERRIDES") or "").split(":") or [] if newoverrides == self.overrides: break self.overrides = newoverrides @@ -447,17 +473,22 @@ class DataSmart(MutableMapping): dest = self.dict while dest: if var in dest: - return dest[var] + return dest[var], self.overridedata.get(var, None) + + if "_remote_data" in dest: + connector = dest["_remote_data"]["_content"] + return connector.getVar(var) if "_data" not in dest: break dest = dest["_data"] + return None, self.overridedata.get(var, None) def _makeShadowCopy(self, var): if var in self.dict: return - local_var = self._findVar(var) + local_var, _ = self._findVar(var) if local_var: self.dict[var] = copy.copy(local_var) @@ -471,6 +502,12 @@ class DataSmart(MutableMapping): if 'parsing' in loginfo: parsing=True + if '_remote_data' in self.dict: + connector = self.dict["_remote_data"]["_content"] + res = connector.setVar(var, value) + if not res: + return + if 'op' not in loginfo: loginfo['op'] = "set" self.expand_cache = {} @@ -509,6 +546,8 @@ class DataSmart(MutableMapping): del self.dict[var]["_append"] if "_prepend" in self.dict[var]: del self.dict[var]["_prepend"] + if "_remove" in self.dict[var]: + del self.dict[var]["_remove"] if var in self.overridedata: active = [] self.need_overrides() @@ -541,7 +580,7 @@ class DataSmart(MutableMapping): nextnew = set() self.overridevars.update(new) for i in new: - vardata = self.expandWithRefs(self.getVar(i, True), i) + vardata = self.expandWithRefs(self.getVar(i), i) nextnew.update(vardata.references) nextnew.update(vardata.contains.keys()) new = nextnew @@ -565,13 +604,19 @@ class DataSmart(MutableMapping): if len(shortvar) == 0: override = None - def getVar(self, var, expand, noweakdefault=False, parsing=False): + def getVar(self, var, expand=True, noweakdefault=False, parsing=False): return self.getVarFlag(var, "_content", expand, noweakdefault, parsing) def renameVar(self, key, newkey, **loginfo): """ Rename the variable key to newkey """ + if '_remote_data' in self.dict: + connector = self.dict["_remote_data"]["_content"] + res = connector.renameVar(key, newkey) + if not res: + return + val = self.getVar(key, 0, parsing=True) if val is not None: loginfo['variable'] = newkey @@ -615,6 +660,12 @@ class DataSmart(MutableMapping): self.setVar(var + "_prepend", value, ignore=True, parsing=True) def delVar(self, var, **loginfo): + if '_remote_data' in self.dict: + connector = self.dict["_remote_data"]["_content"] + res = connector.delVar(var) + if not res: + return + loginfo['detail'] = "" loginfo['op'] = 'del' self.varhistory.record(**loginfo) @@ -641,6 +692,12 @@ class DataSmart(MutableMapping): override = None def setVarFlag(self, var, flag, value, **loginfo): + if '_remote_data' in self.dict: + connector = self.dict["_remote_data"]["_content"] + res = connector.setVarFlag(var, flag, value) + if not res: + return + self.expand_cache = {} if 'op' not in loginfo: loginfo['op'] = "set" @@ -662,14 +719,14 @@ class DataSmart(MutableMapping): self.dict["__exportlist"]["_content"] = set() self.dict["__exportlist"]["_content"].add(var) - def getVarFlag(self, var, flag, expand, noweakdefault=False, parsing=False): - local_var = self._findVar(var) + def getVarFlag(self, var, flag, expand=True, noweakdefault=False, parsing=False): + local_var, overridedata = self._findVar(var) value = None - if flag == "_content" and var in self.overridedata and not parsing: + if flag == "_content" and overridedata is not None and not parsing: match = False active = {} self.need_overrides() - for (r, o) in self.overridedata[var]: + for (r, o) in overridedata: # What about double overrides both with "_" in the name? if o in self.overridesset: active[o] = r @@ -759,8 +816,14 @@ class DataSmart(MutableMapping): return value def delVarFlag(self, var, flag, **loginfo): + if '_remote_data' in self.dict: + connector = self.dict["_remote_data"]["_content"] + res = connector.delVarFlag(var, flag) + if not res: + return + self.expand_cache = {} - local_var = self._findVar(var) + local_var, _ = self._findVar(var) if not local_var: return if not var in self.dict: @@ -803,7 +866,7 @@ class DataSmart(MutableMapping): self.dict[var][i] = flags[i] def getVarFlags(self, var, expand = False, internalflags=False): - local_var = self._findVar(var) + local_var, _ = self._findVar(var) flags = {} if local_var: @@ -845,7 +908,7 @@ class DataSmart(MutableMapping): data = DataSmart() data.dict["_data"] = self.dict data.varhistory = self.varhistory.copy() - data.varhistory.datasmart = data + data.varhistory.dataroot = data data.inchistory = self.inchistory.copy() data._tracking = self._tracking @@ -876,7 +939,7 @@ class DataSmart(MutableMapping): def localkeys(self): for key in self.dict: - if key != '_data': + if key not in ['_data', '_remote_data']: yield key def __iter__(self): @@ -885,7 +948,7 @@ class DataSmart(MutableMapping): def keylist(d): klist = set() for key in d: - if key == "_data": + if key in ["_data", "_remote_data"]: continue if key in deleted: continue @@ -899,6 +962,13 @@ class DataSmart(MutableMapping): if "_data" in d: klist |= keylist(d["_data"]) + if "_remote_data" in d: + connector = d["_remote_data"]["_content"] + for key in connector.getKeys(): + if key in deleted: + continue + klist.add(key) + return klist self.need_overrides() @@ -936,9 +1006,8 @@ class DataSmart(MutableMapping): data = {} d = self.createCopy() bb.data.expandKeys(d) - bb.data.update_data(d) - config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST", True) or "").split()) + config_whitelist = set((d.getVar("BB_HASHCONFIG_WHITELIST") or "").split()) keys = set(key for key in iter(d) if not key.startswith("__")) for key in keys: if key in config_whitelist: @@ -957,7 +1026,6 @@ class DataSmart(MutableMapping): for key in ["__BBTASKS", "__BBANONFUNCS", "__BBHANDLERS"]: bb_list = d.getVar(key, False) or [] - bb_list.sort() data.update({key:str(bb_list)}) if key == "__BBANONFUNCS": diff --git a/import-layers/yocto-poky/bitbake/lib/bb/event.py b/import-layers/yocto-poky/bitbake/lib/bb/event.py index 6f1cb101f..6d8493b17 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/event.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/event.py @@ -48,6 +48,16 @@ class Event(object): def __init__(self): self.pid = worker_pid + +class HeartbeatEvent(Event): + """Triggered at regular time intervals of 10 seconds. Other events can fire much more often + (runQueueTaskStarted when there are many short tasks) or not at all for long periods + of time (again runQueueTaskStarted, when there is just one long-running task), so this + event is more suitable for doing some task-independent work occassionally.""" + def __init__(self, time): + Event.__init__(self) + self.time = time + Registered = 10 AlreadyRegistered = 14 @@ -351,6 +361,17 @@ class RecipeEvent(Event): class RecipePreFinalise(RecipeEvent): """ Recipe Parsing Complete but not yet finialised""" +class RecipeTaskPreProcess(RecipeEvent): + """ + Recipe Tasks about to be finalised + The list of tasks should be final at this point and handlers + are only able to change interdependencies + """ + def __init__(self, fn, tasklist): + self.fn = fn + self.tasklist = tasklist + Event.__init__(self) + class RecipeParsed(RecipeEvent): """ Recipe Parsing Complete """ @@ -372,7 +393,7 @@ class StampUpdate(Event): targets = property(getTargets) class BuildBase(Event): - """Base class for bbmake run events""" + """Base class for bitbake build events""" def __init__(self, n, p, failures = 0): self._name = n @@ -417,13 +438,13 @@ class BuildInit(BuildBase): BuildBase.__init__(self, name, p) class BuildStarted(BuildBase, OperationStarted): - """bbmake build run started""" + """Event when builds start""" def __init__(self, n, p, failures = 0): OperationStarted.__init__(self, "Building Started") BuildBase.__init__(self, n, p, failures) class BuildCompleted(BuildBase, OperationCompleted): - """bbmake build run completed""" + """Event when builds have completed""" def __init__(self, total, n, p, failures=0, interrupted=0): if not failures: OperationCompleted.__init__(self, total, "Building Succeeded") @@ -441,6 +462,23 @@ class DiskFull(Event): self._free = freespace self._mountpoint = mountpoint +class DiskUsageSample: + def __init__(self, available_bytes, free_bytes, total_bytes): + # Number of bytes available to non-root processes. + self.available_bytes = available_bytes + # Number of bytes available to root processes. + self.free_bytes = free_bytes + # Total capacity of the volume. + self.total_bytes = total_bytes + +class MonitorDiskEvent(Event): + """If BB_DISKMON_DIRS is set, then this event gets triggered each time disk space is checked. + Provides information about devices that are getting monitored.""" + def __init__(self, disk_usage): + Event.__init__(self) + # hash of device root path -> DiskUsageSample + self.disk_usage = disk_usage + class NoProvider(Event): """No Provider for an Event""" diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py index cd7362c44..b853da30b 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/__init__.py @@ -35,9 +35,9 @@ import operator import collections import subprocess import pickle +import errno import bb.persist_data, bb.utils import bb.checksum -from bb import data import bb.process __version__ = "2" @@ -355,7 +355,7 @@ def decodeurl(url): user, password, parameters). """ - m = re.compile('(?P[^:]*)://((?P[^/]+)@)?(?P[^;]+)(;(?P.*))?').match(url) + m = re.compile('(?P[^:]*)://((?P[^/;]+)@)?(?P[^;]+)(;(?P.*))?').match(url) if not m: raise MalformedUrl(url) @@ -491,7 +491,7 @@ def fetcher_init(d): Calls before this must not hit the cache. """ # When to drop SCM head revisions controlled by user policy - srcrev_policy = d.getVar('BB_SRCREV_POLICY', True) or "clear" + srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear" if srcrev_policy == "cache": logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy) elif srcrev_policy == "clear": @@ -537,7 +537,11 @@ def fetcher_compare_revisions(): return False def mirror_from_string(data): - return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ] + mirrors = (data or "").replace('\\n',' ').split() + # Split into pairs + if len(mirrors) % 2 != 0: + bb.warn('Invalid mirror data %s, should have paired members.' % data) + return list(zip(*[iter(mirrors)]*2)) def verify_checksum(ud, d, precomputed={}): """ @@ -572,7 +576,7 @@ def verify_checksum(ud, d, precomputed={}): if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected: # If strict checking enabled and neither sum defined, raise error - strict = d.getVar("BB_STRICT_CHECKSUM", True) or "0" + strict = d.getVar("BB_STRICT_CHECKSUM") or "0" if strict == "1": logger.error('No checksum specified for %s, please add at least one to the recipe:\n' 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' % @@ -621,7 +625,7 @@ def verify_donestamp(ud, d, origud=None): Returns True, if the donestamp exists and is valid, False otherwise. When returning False, any existing done stamps are removed. """ - if not ud.needdonestamp: + if not ud.needdonestamp or (origud and not origud.needdonestamp): return True if not os.path.exists(ud.donestamp): @@ -718,13 +722,13 @@ def subprocess_setup(): def get_autorev(d): # only not cache src rev in autorev case - if d.getVar('BB_SRCREV_POLICY', True) != "cache": + if d.getVar('BB_SRCREV_POLICY') != "cache": d.setVar('BB_DONT_CACHE', '1') return "AUTOINC" def get_srcrev(d, method_name='sortable_revision'): """ - Return the revsion string, usually for use in the version string (PV) of the current package + Return the revision string, usually for use in the version string (PV) of the current package Most packages usually only have one SCM so we just pass on the call. In the multi SCM case, we build a value based on SRCREV_FORMAT which must have been set. @@ -737,7 +741,7 @@ def get_srcrev(d, method_name='sortable_revision'): """ scms = [] - fetcher = Fetch(d.getVar('SRC_URI', True).split(), d) + fetcher = Fetch(d.getVar('SRC_URI').split(), d) urldata = fetcher.ud for u in urldata: if urldata[u].method.supports_srcrev(): @@ -757,7 +761,7 @@ def get_srcrev(d, method_name='sortable_revision'): # # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT # - format = d.getVar('SRCREV_FORMAT', True) + format = d.getVar('SRCREV_FORMAT') if not format: raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.") @@ -819,9 +823,18 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): if not cleanup: cleanup = [] + # If PATH contains WORKDIR which contains PV which contains SRCPV we + # can end up in circular recursion here so give the option of breaking it + # in a data store copy. + try: + d.getVar("PV") + except bb.data_smart.ExpansionError: + d = bb.data.createCopy(d) + d.setVar("PV", "fetcheravoidrecurse") + origenv = d.getVar("BB_ORIGENV", False) for var in exportvars: - val = d.getVar(var, True) or (origenv and origenv.getVar(var, True)) + val = d.getVar(var) or (origenv and origenv.getVar(var)) if val: cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd) @@ -856,12 +869,15 @@ def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None): return output -def check_network_access(d, info = "", url = None): +def check_network_access(d, info, url): """ - log remote network access, and error if BB_NO_NETWORK is set + log remote network access, and error if BB_NO_NETWORK is set or the given + URI is untrusted """ - if d.getVar("BB_NO_NETWORK", True) == "1": + if d.getVar("BB_NO_NETWORK") == "1": raise NetworkAccess(url, info) + elif not trusted_network(d, url): + raise UntrustedUrl(url, info) else: logger.debug(1, "Fetcher accessed the network with the command %s" % info) @@ -958,7 +974,7 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): # We may be obtaining a mirror tarball which needs further processing by the real fetcher # If that tarball is a local file:// we need to provide a symlink to it - dldir = ld.getVar("DL_DIR", True) + dldir = ld.getVar("DL_DIR") if origud.mirrortarball and os.path.basename(ud.localpath) == os.path.basename(origud.mirrortarball) \ and os.path.basename(ud.localpath) != os.path.basename(origud.localpath): # Create donestamp in old format to avoid triggering a re-download @@ -967,7 +983,14 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): open(ud.donestamp, 'w').close() dest = os.path.join(dldir, os.path.basename(ud.localpath)) if not os.path.exists(dest): - os.symlink(ud.localpath, dest) + # In case this is executing without any file locks held (as is + # the case for file:// URLs), two tasks may end up here at the + # same time, in which case we do not want the second task to + # fail when the link has already been created by the first task. + try: + os.symlink(ud.localpath, dest) + except FileExistsError: + pass if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld): origud.method.download(origud, ld) if hasattr(origud.method,"build_mirror_data"): @@ -979,13 +1002,23 @@ def try_mirror_url(fetch, origud, ud, ld, check = False): # Broken symbolic link os.unlink(origud.localpath) - os.symlink(ud.localpath, origud.localpath) + # As per above, in case two tasks end up here simultaneously. + try: + os.symlink(ud.localpath, origud.localpath) + except FileExistsError: + pass update_stamp(origud, ld) return ud.localpath except bb.fetch2.NetworkAccess: raise + except IOError as e: + if e.errno in [os.errno.ESTALE]: + logger.warn("Stale Error Observed %s." % ud.url) + return False + raise + except bb.fetch2.BBFetchException as e: if isinstance(e, ChecksumError): logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url)) @@ -1032,14 +1065,14 @@ def trusted_network(d, url): BB_ALLOWED_NETWORKS is set globally or for a specific recipe. Note: modifies SRC_URI & mirrors. """ - if d.getVar('BB_NO_NETWORK', True) == "1": + if d.getVar('BB_NO_NETWORK') == "1": return True pkgname = d.expand(d.getVar('PN', False)) trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False) if not trusted_hosts: - trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS', True) + trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS') # Not enabled. if not trusted_hosts: @@ -1071,7 +1104,7 @@ def srcrev_internal_helper(ud, d, name): """ srcrev = None - pn = d.getVar("PN", True) + pn = d.getVar("PN") attempts = [] if name != '' and pn: attempts.append("SRCREV_%s_pn-%s" % (name, pn)) @@ -1082,7 +1115,7 @@ def srcrev_internal_helper(ud, d, name): attempts.append("SRCREV") for a in attempts: - srcrev = d.getVar(a, True) + srcrev = d.getVar(a) if srcrev and srcrev != "INVALID": break @@ -1115,7 +1148,7 @@ def get_checksum_file_list(d): """ fetch = Fetch([], d, cache = False, localonly = True) - dl_dir = d.getVar('DL_DIR', True) + dl_dir = d.getVar('DL_DIR') filelist = [] for u in fetch.urls: ud = fetch.ud[u] @@ -1129,9 +1162,9 @@ def get_checksum_file_list(d): if f.startswith(dl_dir): # The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else if os.path.exists(f): - bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN', True), os.path.basename(f))) + bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f))) else: - bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN', True), os.path.basename(f))) + bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f))) filelist.append(f + ":" + str(os.path.exists(f))) return " ".join(filelist) @@ -1160,7 +1193,7 @@ class FetchData(object): self.mirrortarball = None self.basename = None self.basepath = None - (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d)) + (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(d.expand(url)) self.date = self.getSRCDate(d) self.url = url if not self.user and "user" in self.parm: @@ -1177,16 +1210,16 @@ class FetchData(object): self.sha256_name = "sha256sum" if self.md5_name in self.parm: self.md5_expected = self.parm[self.md5_name] - elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]: + elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]: self.md5_expected = None else: - self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name, True) + self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name) if self.sha256_name in self.parm: self.sha256_expected = self.parm[self.sha256_name] - elif self.type not in ["http", "https", "ftp", "ftps", "sftp"]: + elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]: self.sha256_expected = None else: - self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name, True) + self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name) self.ignore_checksums = False self.names = self.parm.get("name",'default').split(',') @@ -1204,7 +1237,7 @@ class FetchData(object): raise NonLocalMethod() if self.parm.get("proto", None) and "protocol" not in self.parm: - logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN', True)) + logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN')) self.parm["protocol"] = self.parm.get("proto", None) if hasattr(self.method, "urldata_init"): @@ -1217,7 +1250,7 @@ class FetchData(object): elif self.localfile: self.localpath = self.method.localpath(self, d) - dldir = d.getVar("DL_DIR", True) + dldir = d.getVar("DL_DIR") if not self.needdonestamp: return @@ -1235,7 +1268,7 @@ class FetchData(object): self.donestamp = basepath + '.done' self.lockfile = basepath + '.lock' - def setup_revisons(self, d): + def setup_revisions(self, d): self.revisions = {} for name in self.names: self.revisions[name] = srcrev_internal_helper(self, d, name) @@ -1257,12 +1290,12 @@ class FetchData(object): if "srcdate" in self.parm: return self.parm['srcdate'] - pn = d.getVar("PN", True) + pn = d.getVar("PN") if pn: - return d.getVar("SRCDATE_%s" % pn, True) or d.getVar("SRCDATE", True) or d.getVar("DATE", True) + return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE") - return d.getVar("SRCDATE", True) or d.getVar("DATE", True) + return d.getVar("SRCDATE") or d.getVar("DATE") class FetchMethod(object): """Base class for 'fetch'ing data""" @@ -1282,7 +1315,7 @@ class FetchMethod(object): Can also setup variables in urldata for use in go (saving code duplication and duplicate code execution) """ - return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile) + return os.path.join(d.getVar("DL_DIR"), urldata.localfile) def supports_checksum(self, urldata): """ @@ -1382,6 +1415,10 @@ class FetchMethod(object): cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file elif file.endswith('.lz'): cmd = 'lzip -dc %s > %s' % (file, efile) + elif file.endswith('.tar.7z'): + cmd = '7z x -so %s | tar x --no-same-owner -f -' % file + elif file.endswith('.7z'): + cmd = '7za x -y %s 1>/dev/null' % file elif file.endswith('.zip') or file.endswith('.jar'): try: dos = bb.utils.to_boolean(urldata.parm.get('dos'), False) @@ -1413,10 +1450,6 @@ class FetchMethod(object): else: raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url) cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile) - elif file.endswith('.tar.7z'): - cmd = '7z x -so %s | tar xf - ' % file - elif file.endswith('.7z'): - cmd = '7za x -y %s 1>/dev/null' % file # If 'subdir' param exists, create a dir and use it as destination for unpack cmd if 'subdir' in urldata.parm: @@ -1450,7 +1483,7 @@ class FetchMethod(object): if not cmd: return - path = data.getVar('PATH', True) + path = data.getVar('PATH') if path: cmd = "PATH=\"%s\" %s" % (path, cmd) bb.note("Unpacking %s to %s/" % (file, unpackdir)) @@ -1507,7 +1540,7 @@ class FetchMethod(object): def generate_revision_key(self, ud, d, name): key = self._revision_key(ud, d, name) - return "%s-%s" % (key, d.getVar("PN", True) or "") + return "%s-%s" % (key, d.getVar("PN") or "") class Fetch(object): def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None): @@ -1515,14 +1548,14 @@ class Fetch(object): raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time") if len(urls) == 0: - urls = d.getVar("SRC_URI", True).split() + urls = d.getVar("SRC_URI").split() self.urls = urls self.d = d self.ud = {} self.connection_cache = connection_cache - fn = d.getVar('FILE', True) - mc = d.getVar('__BBMULTICONFIG', True) or "" + fn = d.getVar('FILE') + mc = d.getVar('__BBMULTICONFIG') or "" if cache and fn and mc + fn in urldata_cache: self.ud = urldata_cache[mc + fn] @@ -1565,8 +1598,8 @@ class Fetch(object): if not urls: urls = self.urls - network = self.d.getVar("BB_NO_NETWORK", True) - premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY", True) == "1") + network = self.d.getVar("BB_NO_NETWORK") + premirroronly = (self.d.getVar("BB_FETCH_PREMIRRORONLY") == "1") for u in urls: ud = self.ud[u] @@ -1584,8 +1617,17 @@ class Fetch(object): localpath = ud.localpath elif m.try_premirror(ud, self.d): logger.debug(1, "Trying PREMIRRORS") - mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) + mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) localpath = try_mirrors(self, self.d, ud, mirrors, False) + if localpath: + try: + # early checksum verification so that if the checksum of the premirror + # contents mismatch the fetcher can still try upstream and mirrors + update_stamp(ud, self.d) + except ChecksumError as e: + logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u) + logger.debug(1, str(e)) + localpath = "" if premirroronly: self.d.setVar("BB_NO_NETWORK", "1") @@ -1624,7 +1666,7 @@ class Fetch(object): if not verified_stamp: m.clean(ud, self.d) logger.debug(1, "Trying MIRRORS") - mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) + mirrors = mirror_from_string(self.d.getVar('MIRRORS')) localpath = try_mirrors(self, self.d, ud, mirrors) if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1): @@ -1634,6 +1676,11 @@ class Fetch(object): update_stamp(ud, self.d) + except IOError as e: + if e.errno in [os.errno.ESTALE]: + logger.error("Stale Error Observed %s." % u) + raise ChecksumError("Stale Error Detected") + except BBFetchException as e: if isinstance(e, ChecksumError): logger.error("Checksum failure fetching %s" % u) @@ -1657,7 +1704,7 @@ class Fetch(object): m = ud.method logger.debug(1, "Testing URL %s", u) # First try checking uri, u, from PREMIRRORS - mirrors = mirror_from_string(self.d.getVar('PREMIRRORS', True)) + mirrors = mirror_from_string(self.d.getVar('PREMIRRORS')) ret = try_mirrors(self, self.d, ud, mirrors, True) if not ret: # Next try checking from the original uri, u @@ -1665,7 +1712,7 @@ class Fetch(object): ret = m.checkstatus(self, ud, self.d) except: # Finally, try checking uri, u, from MIRRORS - mirrors = mirror_from_string(self.d.getVar('MIRRORS', True)) + mirrors = mirror_from_string(self.d.getVar('MIRRORS')) ret = try_mirrors(self, self.d, ud, mirrors, True) if not ret: @@ -1763,6 +1810,7 @@ from . import svn from . import wget from . import ssh from . import sftp +from . import s3 from . import perforce from . import bzr from . import hg @@ -1780,6 +1828,7 @@ methods.append(gitannex.GitANNEX()) methods.append(cvs.Cvs()) methods.append(ssh.SSH()) methods.append(sftp.SFTP()) +methods.append(s3.S3()) methods.append(perforce.Perforce()) methods.append(bzr.Bzr()) methods.append(hg.Hg()) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py index 72264afb5..16123f8af 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/bzr.py @@ -27,7 +27,6 @@ import os import sys import logging import bb -from bb import data from bb.fetch2 import FetchMethod from bb.fetch2 import FetchError from bb.fetch2 import runfetchcmd @@ -43,14 +42,14 @@ class Bzr(FetchMethod): """ # Create paths to bzr checkouts relpath = self._strip_leading_slashes(ud.path) - ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath) + ud.pkgdir = os.path.join(d.expand('${BZRDIR}'), ud.host, relpath) - ud.setup_revisons(d) + ud.setup_revisions(d) if not ud.revision: ud.revision = self.latest_revision(ud, d) - ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d) + ud.localfile = d.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision)) def _buildbzrcommand(self, ud, d, command): """ @@ -58,7 +57,7 @@ class Bzr(FetchMethod): command is "fetch", "update", "revno" """ - basecmd = data.expand('${FETCHCMD_bzr}', d) + basecmd = d.expand('${FETCHCMD_bzr}') proto = ud.parm.get('protocol', 'http') diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py index 70e280a8d..36beab6a5 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/clearcase.py @@ -65,7 +65,6 @@ import os import sys import shutil import bb -from bb import data from bb.fetch2 import FetchMethod from bb.fetch2 import FetchError from bb.fetch2 import runfetchcmd @@ -108,13 +107,13 @@ class ClearCase(FetchMethod): else: ud.module = "" - ud.basecmd = d.getVar("FETCHCMD_ccrc", True) or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool") + ud.basecmd = d.getVar("FETCHCMD_ccrc") or spawn.find_executable("cleartool") or spawn.find_executable("rcleartool") - if data.getVar("SRCREV", d, True) == "INVALID": + if d.getVar("SRCREV") == "INVALID": raise FetchError("Set a valid SRCREV for the clearcase fetcher in your recipe, e.g. SRCREV = \"/main/LATEST\" or any other label of your choice.") ud.label = d.getVar("SRCREV", False) - ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC", True) + ud.customspec = d.getVar("CCASE_CUSTOM_CONFIG_SPEC") ud.server = "%s://%s%s" % (ud.proto, ud.host, ud.path) @@ -124,7 +123,7 @@ class ClearCase(FetchMethod): ud.viewname = "%s-view%s" % (ud.identifier, d.getVar("DATETIME", d, True)) ud.csname = "%s-config-spec" % (ud.identifier) - ud.ccasedir = os.path.join(data.getVar("DL_DIR", d, True), ud.type) + ud.ccasedir = os.path.join(d.getVar("DL_DIR"), ud.type) ud.viewdir = os.path.join(ud.ccasedir, ud.viewname) ud.configspecfile = os.path.join(ud.ccasedir, ud.csname) ud.localfile = "%s.tar.gz" % (ud.identifier) @@ -144,7 +143,7 @@ class ClearCase(FetchMethod): self.debug("configspecfile = %s" % ud.configspecfile) self.debug("localfile = %s" % ud.localfile) - ud.localfile = os.path.join(data.getVar("DL_DIR", d, True), ud.localfile) + ud.localfile = os.path.join(d.getVar("DL_DIR"), ud.localfile) def _build_ccase_command(self, ud, command): """ diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py index 5ff70ba92..490c95471 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/cvs.py @@ -63,7 +63,7 @@ class Cvs(FetchMethod): if 'fullpath' in ud.parm: fullpath = '_fullpath' - ud.localfile = bb.data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d) + ud.localfile = d.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath)) def need_update(self, ud, d): if (ud.date == "now"): @@ -87,10 +87,10 @@ class Cvs(FetchMethod): cvsroot = ud.path else: cvsroot = ":" + method - cvsproxyhost = d.getVar('CVS_PROXY_HOST', True) + cvsproxyhost = d.getVar('CVS_PROXY_HOST') if cvsproxyhost: cvsroot += ";proxy=" + cvsproxyhost - cvsproxyport = d.getVar('CVS_PROXY_PORT', True) + cvsproxyport = d.getVar('CVS_PROXY_PORT') if cvsproxyport: cvsroot += ";proxyport=" + cvsproxyport cvsroot += ":" + ud.user @@ -110,7 +110,7 @@ class Cvs(FetchMethod): if ud.tag: options.append("-r %s" % ud.tag) - cvsbasecmd = d.getVar("FETCHCMD_cvs", True) + cvsbasecmd = d.getVar("FETCHCMD_cvs") cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(options) + " " + ud.module cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(options) @@ -120,8 +120,8 @@ class Cvs(FetchMethod): # create module directory logger.debug(2, "Fetch: checking for module directory") - pkg = d.getVar('PN', True) - pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg) + pkg = d.getVar('PN') + pkgdir = os.path.join(d.getVar('CVSDIR'), pkg) moddir = os.path.join(pkgdir, localdir) workdir = None if os.access(os.path.join(moddir, 'CVS'), os.R_OK): @@ -164,8 +164,8 @@ class Cvs(FetchMethod): def clean(self, ud, d): """ Clean CVS Files and tarballs """ - pkg = d.getVar('PN', True) - pkgdir = os.path.join(d.getVar("CVSDIR", True), pkg) + pkg = d.getVar('PN') + pkgdir = os.path.join(d.getVar("CVSDIR"), pkg) bb.utils.remove(pkgdir, True) bb.utils.remove(ud.localpath) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py index 792c18376..7442f8441 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/git.py @@ -50,7 +50,7 @@ Supported SRC_URI options are: The default is "0", set nobranch=1 if needed. - usehead - For local git:// urls to use the current branch HEAD as the revsion for use with + For local git:// urls to use the current branch HEAD as the revision for use with AUTOREV. Implies nobranch. """ @@ -76,7 +76,6 @@ import re import bb import errno import bb.progress -from bb import data from bb.fetch2 import FetchMethod from bb.fetch2 import runfetchcmd from bb.fetch2 import logger @@ -174,19 +173,19 @@ class Git(FetchMethod): if len(branches) != len(ud.names): raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url) ud.branches = {} - for name in ud.names: - branch = branches[ud.names.index(name)] + for pos, name in enumerate(ud.names): + branch = branches[pos] ud.branches[name] = branch ud.unresolvedrev[name] = branch if ud.usehead: ud.unresolvedrev['default'] = 'HEAD' - ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git -c core.fsyncobjectfiles=0" + ud.basecmd = d.getVar("FETCHCMD_git") or "git -c core.fsyncobjectfiles=0" - ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") or ud.rebaseable + ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0") or ud.rebaseable - ud.setup_revisons(d) + ud.setup_revisions(d) for name in ud.names: # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one @@ -206,9 +205,9 @@ class Git(FetchMethod): if ud.rebaseable: for name in ud.names: gitsrcname = gitsrcname + '_' + ud.revisions[name] - ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname) - ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball) - gitdir = d.getVar("GITDIR", True) or (d.getVar("DL_DIR", True) + "/git2/") + ud.mirrortarball = 'git2_%s.tar.gz' % gitsrcname + ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball) + gitdir = d.getVar("GITDIR") or (d.getVar("DL_DIR") + "/git2/") ud.clonedir = os.path.join(gitdir, gitsrcname) ud.localfile = ud.clonedir @@ -229,7 +228,7 @@ class Git(FetchMethod): def try_premirror(self, ud, d): # If we don't do this, updating an existing checkout with only premirrors # is not possible - if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None: + if d.getVar("BB_FETCH_PREMIRRORONLY") is not None: return True if os.path.exists(ud.clonedir): return False @@ -241,7 +240,7 @@ class Git(FetchMethod): # If the checkout doesn't exist and the mirror tarball does, extract it if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror): bb.utils.mkdirhier(ud.clonedir) - runfetchcmd("tar -xzf %s" % (ud.fullmirror), d, workdir=ud.clonedir) + runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir) repourl = self._get_repo_url(ud) @@ -252,7 +251,7 @@ class Git(FetchMethod): repourl = repourl[7:] clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (ud.basecmd, repourl, ud.clonedir) if ud.proto.lower() != 'file': - bb.fetch2.check_network_access(d, clone_cmd) + bb.fetch2.check_network_access(d, clone_cmd, ud.url) progresshandler = GitProgressHandler(d) runfetchcmd(clone_cmd, d, log=progresshandler) @@ -292,15 +291,15 @@ class Git(FetchMethod): os.unlink(ud.fullmirror) logger.info("Creating tarball of git repository") - runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d, workdir=ud.clonedir) - runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.clonedir) + runfetchcmd("tar -czf %s ." % ud.fullmirror, d, workdir=ud.clonedir) + runfetchcmd("touch %s.done" % ud.fullmirror, d) def unpack(self, ud, destdir, d): """ unpack the downloaded src to destdir""" subdir = ud.parm.get("subpath", "") if subdir != "": - readpathspec = ":%s" % (subdir) + readpathspec = ":%s" % subdir def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/')) else: readpathspec = "" @@ -380,14 +379,26 @@ class Git(FetchMethod): """ Run git ls-remote with the specified search string """ - repourl = self._get_repo_url(ud) - cmd = "%s ls-remote %s %s" % \ - (ud.basecmd, repourl, search) - if ud.proto.lower() != 'file': - bb.fetch2.check_network_access(d, cmd) - output = runfetchcmd(cmd, d, True) - if not output: - raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url) + # Prevent recursion e.g. in OE if SRCPV is in PV, PV is in WORKDIR, + # and WORKDIR is in PATH (as a result of RSS), our call to + # runfetchcmd() exports PATH so this function will get called again (!) + # In this scenario the return call of the function isn't actually + # important - WORKDIR isn't needed in PATH to call git ls-remote + # anyway. + if d.getVar('_BB_GIT_IN_LSREMOTE', False): + return '' + d.setVar('_BB_GIT_IN_LSREMOTE', '1') + try: + repourl = self._get_repo_url(ud) + cmd = "%s ls-remote %s %s" % \ + (ud.basecmd, repourl, search) + if ud.proto.lower() != 'file': + bb.fetch2.check_network_access(d, cmd, repourl) + output = runfetchcmd(cmd, d, True) + if not output: + raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, ud.url) + finally: + d.delVar('_BB_GIT_IN_LSREMOTE') return output def _latest_revision(self, ud, d, name): @@ -418,7 +429,7 @@ class Git(FetchMethod): """ pupver = ('', '') - tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX', True) or "(?P([0-9][\.|_]?)+)") + tagregex = re.compile(d.getVar('UPSTREAM_CHECK_GITTAGREGEX') or "(?P([0-9][\.|_]?)+)") try: output = self._lsremote(ud, d, "refs/tags/*") except bb.fetch2.FetchError or bb.fetch2.NetworkAccess: @@ -470,7 +481,7 @@ class Git(FetchMethod): if not os.path.exists(rev_file) or not os.path.getsize(rev_file): from pipes import quote commits = bb.fetch2.runfetchcmd( - "git rev-list %s -- | wc -l" % (quote(rev)), + "git rev-list %s -- | wc -l" % quote(rev), d, quiet=True).strip().lstrip('0') if commits: open(rev_file, "w").write("%d\n" % int(commits)) @@ -485,5 +496,5 @@ class Git(FetchMethod): try: self._lsremote(ud, d, "") return True - except FetchError: + except bb.fetch2.FetchError: return False diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py index 4937a1089..c66c21142 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitannex.py @@ -22,7 +22,6 @@ BitBake 'Fetch' git annex implementation import os import bb -from bb import data from bb.fetch2.git import Git from bb.fetch2 import runfetchcmd from bb.fetch2 import logger diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py index 661376204..a95584c82 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/gitsm.py @@ -31,7 +31,6 @@ NOTE: Switching a SRC_URI from "git://" to "gitsm://" requires a clean of your r import os import bb -from bb import data from bb.fetch2.git import Git from bb.fetch2 import runfetchcmd from bb.fetch2 import logger @@ -108,7 +107,7 @@ class GitSM(Git): os.rename(ud.clonedir, gitdir) runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d) runfetchcmd(ud.basecmd + " reset --hard", d, workdir=tmpclonedir) - runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir) + runfetchcmd(ud.basecmd + " checkout -f " + ud.revisions[ud.names[0]], d, workdir=tmpclonedir) runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=tmpclonedir) self._set_relative_paths(tmpclonedir) runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d, workdir=tmpclonedir) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py index 20df8016d..b5f268601 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/hg.py @@ -29,7 +29,6 @@ import sys import logging import bb import errno -from bb import data from bb.fetch2 import FetchMethod from bb.fetch2 import FetchError from bb.fetch2 import MissingParameterError @@ -67,7 +66,7 @@ class Hg(FetchMethod): else: ud.proto = "hg" - ud.setup_revisons(d) + ud.setup_revisions(d) if 'rev' in ud.parm: ud.revision = ud.parm['rev'] @@ -78,15 +77,15 @@ class Hg(FetchMethod): hgsrcname = '%s_%s_%s' % (ud.module.replace('/', '.'), \ ud.host, ud.path.replace('/', '.')) ud.mirrortarball = 'hg_%s.tar.gz' % hgsrcname - ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball) + ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball) - hgdir = d.getVar("HGDIR", True) or (d.getVar("DL_DIR", True) + "/hg/") + hgdir = d.getVar("HGDIR") or (d.getVar("DL_DIR") + "/hg/") ud.pkgdir = os.path.join(hgdir, hgsrcname) ud.moddir = os.path.join(ud.pkgdir, ud.module) ud.localfile = ud.moddir - ud.basecmd = data.getVar("FETCHCMD_hg", d, True) or "/usr/bin/env hg" + ud.basecmd = d.getVar("FETCHCMD_hg") or "/usr/bin/env hg" - ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS", True) + ud.write_tarballs = d.getVar("BB_GENERATE_MIRROR_TARBALLS") def need_update(self, ud, d): revTag = ud.parm.get('rev', 'tip') @@ -99,7 +98,7 @@ class Hg(FetchMethod): def try_premirror(self, ud, d): # If we don't do this, updating an existing checkout with only premirrors # is not possible - if d.getVar("BB_FETCH_PREMIRRORONLY", True) is not None: + if d.getVar("BB_FETCH_PREMIRRORONLY") is not None: return True if os.path.exists(ud.moddir): return False @@ -221,7 +220,7 @@ class Hg(FetchMethod): """ Compute tip revision for the url """ - bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info")) + bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"), ud.url) output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d) return output.strip() diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py index 51ca78d12..a114ac12e 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/local.py @@ -29,7 +29,6 @@ import os import urllib.request, urllib.parse, urllib.error import bb import bb.utils -from bb import data from bb.fetch2 import FetchMethod, FetchError from bb.fetch2 import logger @@ -63,17 +62,11 @@ class Local(FetchMethod): newpath = path if path[0] == "/": return [path] - filespath = data.getVar('FILESPATH', d, True) + filespath = d.getVar('FILESPATH') if filespath: logger.debug(2, "Searching for %s in paths:\n %s" % (path, "\n ".join(filespath.split(":")))) newpath, hist = bb.utils.which(filespath, path, history=True) searched.extend(hist) - if not newpath: - filesdir = data.getVar('FILESDIR', d, True) - if filesdir: - logger.debug(2, "Searching for %s in path: %s" % (path, filesdir)) - newpath = os.path.join(filesdir, path) - searched.append(newpath) if (not newpath or not os.path.exists(newpath)) and path.find("*") != -1: # For expressions using '*', best we can do is take the first directory in FILESPATH that exists newpath, hist = bb.utils.which(filespath, ".", history=True) @@ -81,7 +74,7 @@ class Local(FetchMethod): logger.debug(2, "Searching for %s in path: %s" % (path, newpath)) return searched if not os.path.exists(newpath): - dldirfile = os.path.join(d.getVar("DL_DIR", True), path) + dldirfile = os.path.join(d.getVar("DL_DIR"), path) logger.debug(2, "Defaulting to %s for %s" % (dldirfile, path)) bb.utils.mkdirhier(os.path.dirname(dldirfile)) searched.append(dldirfile) @@ -100,13 +93,10 @@ class Local(FetchMethod): # no need to fetch local files, we'll deal with them in place. if self.supports_checksum(urldata) and not os.path.exists(urldata.localpath): locations = [] - filespath = data.getVar('FILESPATH', d, True) + filespath = d.getVar('FILESPATH') if filespath: locations = filespath.split(":") - filesdir = data.getVar('FILESDIR', d, True) - if filesdir: - locations.append(filesdir) - locations.append(d.getVar("DL_DIR", True)) + locations.append(d.getVar("DL_DIR")) msg = "Unable to find file " + urldata.url + " anywhere. The paths that were searched were:\n " + "\n ".join(locations) raise FetchError(msg) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py index 699ae72e0..73a75fe98 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/npm.py @@ -25,7 +25,6 @@ import json import subprocess import signal import bb -from bb import data from bb.fetch2 import FetchMethod from bb.fetch2 import FetchError from bb.fetch2 import ChecksumError @@ -80,6 +79,7 @@ class Npm(FetchMethod): if not ud.version: raise ParameterError("NPM fetcher requires a version parameter", ud.url) ud.bbnpmmanifest = "%s-%s.deps.json" % (ud.pkgname, ud.version) + ud.bbnpmmanifest = ud.bbnpmmanifest.replace('/', '-') ud.registry = "http://%s" % (ud.url.replace('npm://', '', 1).split(';'))[0] prefixdir = "npm/%s" % ud.pkgname ud.pkgdatadir = d.expand("${DL_DIR}/%s" % prefixdir) @@ -87,12 +87,13 @@ class Npm(FetchMethod): bb.utils.mkdirhier(ud.pkgdatadir) ud.localpath = d.expand("${DL_DIR}/npm/%s" % ud.bbnpmmanifest) - self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate " + self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -O -t 2 -T 30 -nv --passive-ftp --no-check-certificate " ud.prefixdir = prefixdir - ud.write_tarballs = ((data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0") + ud.write_tarballs = ((d.getVar("BB_GENERATE_MIRROR_TARBALLS") or "0") != "0") ud.mirrortarball = 'npm_%s-%s.tar.xz' % (ud.pkgname, ud.version) - ud.fullmirror = os.path.join(d.getVar("DL_DIR", True), ud.mirrortarball) + ud.mirrortarball = ud.mirrortarball.replace('/', '-') + ud.fullmirror = os.path.join(d.getVar("DL_DIR"), ud.mirrortarball) def need_update(self, ud, d): if os.path.exists(ud.localpath): @@ -101,8 +102,8 @@ class Npm(FetchMethod): def _runwget(self, ud, d, command, quiet): logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command)) - bb.fetch2.check_network_access(d, command) - dldir = d.getVar("DL_DIR", True) + bb.fetch2.check_network_access(d, command, ud.url) + dldir = d.getVar("DL_DIR") runfetchcmd(command, d, quiet, workdir=dldir) def _unpackdep(self, ud, pkg, data, destdir, dldir, d): @@ -116,7 +117,7 @@ class Npm(FetchMethod): # Change to subdir before executing command if not os.path.exists(destdir): os.makedirs(destdir) - path = d.getVar('PATH', True) + path = d.getVar('PATH') if path: cmd = "PATH=\"%s\" %s" % (path, cmd) bb.note("Unpacking %s to %s/" % (file, destdir)) @@ -132,9 +133,8 @@ class Npm(FetchMethod): def unpack(self, ud, destdir, d): - dldir = d.getVar("DL_DIR", True) - depdumpfile = "%s-%s.deps.json" % (ud.pkgname, ud.version) - with open("%s/npm/%s" % (dldir, depdumpfile)) as datafile: + dldir = d.getVar("DL_DIR") + with open("%s/npm/%s" % (dldir, ud.bbnpmmanifest)) as datafile: workobj = json.load(datafile) dldir = "%s/%s" % (os.path.dirname(ud.localpath), ud.pkgname) @@ -182,7 +182,12 @@ class Npm(FetchMethod): if pkg_os: if not isinstance(pkg_os, list): pkg_os = [pkg_os] - if 'linux' not in pkg_os or '!linux' in pkg_os: + blacklist = False + for item in pkg_os: + if item.startswith('!'): + blacklist = True + break + if (not blacklist and 'linux' not in pkg_os) or '!linux' in pkg_os: logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg) return #logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile)) @@ -195,6 +200,7 @@ class Npm(FetchMethod): dependencies = pdata.get('dependencies', {}) optionalDependencies = pdata.get('optionalDependencies', {}) + dependencies.update(optionalDependencies) depsfound = {} optdepsfound = {} data[pkg]['deps'] = {} @@ -251,24 +257,30 @@ class Npm(FetchMethod): lockdown = {} if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror): - dest = d.getVar("DL_DIR", True) + dest = d.getVar("DL_DIR") bb.utils.mkdirhier(dest) runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest) return - shwrf = d.getVar('NPM_SHRINKWRAP', True) + shwrf = d.getVar('NPM_SHRINKWRAP') logger.debug(2, "NPM shrinkwrap file is %s" % shwrf) - try: - with open(shwrf) as datafile: - shrinkobj = json.load(datafile) - except: + if shwrf: + try: + with open(shwrf) as datafile: + shrinkobj = json.load(datafile) + except Exception as e: + raise FetchError('Error loading NPM_SHRINKWRAP file "%s" for %s: %s' % (shwrf, ud.pkgname, str(e))) + elif not ud.ignore_checksums: logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname) - lckdf = d.getVar('NPM_LOCKDOWN', True) + lckdf = d.getVar('NPM_LOCKDOWN') logger.debug(2, "NPM lockdown file is %s" % lckdf) - try: - with open(lckdf) as datafile: - lockdown = json.load(datafile) - except: + if lckdf: + try: + with open(lckdf) as datafile: + lockdown = json.load(datafile) + except Exception as e: + raise FetchError('Error loading NPM_LOCKDOWN file "%s" for %s: %s' % (lckdf, ud.pkgname, str(e))) + elif not ud.ignore_checksums: logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname) if ('name' not in shrinkobj): @@ -286,7 +298,7 @@ class Npm(FetchMethod): if os.path.islink(ud.fullmirror): os.unlink(ud.fullmirror) - dldir = d.getVar("DL_DIR", True) + dldir = d.getVar("DL_DIR") logger.info("Creating tarball of npm data") runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d, workdir=dldir) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py index 295abf953..2b4f7d9c1 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/osc.py @@ -10,7 +10,6 @@ import os import sys import logging import bb -from bb import data from bb.fetch2 import FetchMethod from bb.fetch2 import FetchError from bb.fetch2 import MissingParameterError @@ -34,7 +33,7 @@ class Osc(FetchMethod): # Create paths to osc checkouts relpath = self._strip_leading_slashes(ud.path) - ud.pkgdir = os.path.join(d.getVar('OSCDIR', True), ud.host) + ud.pkgdir = os.path.join(d.getVar('OSCDIR'), ud.host) ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module) if 'rev' in ud.parm: @@ -47,7 +46,7 @@ class Osc(FetchMethod): else: ud.revision = "" - ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d) + ud.localfile = d.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision)) def _buildosccommand(self, ud, d, command): """ @@ -55,7 +54,7 @@ class Osc(FetchMethod): command is "fetch", "update", "info" """ - basecmd = data.expand('${FETCHCMD_osc}', d) + basecmd = d.expand('${FETCHCMD_osc}') proto = ud.parm.get('protocol', 'ocs') @@ -84,7 +83,7 @@ class Osc(FetchMethod): logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'") - if os.access(os.path.join(d.getVar('OSCDIR', True), ud.path, ud.module), os.R_OK): + if os.access(os.path.join(d.getVar('OSCDIR'), ud.path, ud.module), os.R_OK): oscupdatecmd = self._buildosccommand(ud, d, "update") logger.info("Update "+ ud.url) # update sources there @@ -112,7 +111,7 @@ class Osc(FetchMethod): Generate a .oscrc to be used for this run. """ - config_path = os.path.join(d.getVar('OSCDIR', True), "oscrc") + config_path = os.path.join(d.getVar('OSCDIR'), "oscrc") if (os.path.exists(config_path)): os.remove(config_path) @@ -121,8 +120,8 @@ class Osc(FetchMethod): f.write("apisrv = %s\n" % ud.host) f.write("scheme = http\n") f.write("su-wrapper = su -c\n") - f.write("build-root = %s\n" % d.getVar('WORKDIR', True)) - f.write("urllist = %s\n" % d.getVar("OSCURLLIST", True)) + f.write("build-root = %s\n" % d.getVar('WORKDIR')) + f.write("urllist = %s\n" % d.getVar("OSCURLLIST")) f.write("extra-pkgs = gzip\n") f.write("\n") f.write("[%s]\n" % ud.host) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py index 50cb47909..3debad59f 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/perforce.py @@ -26,7 +26,6 @@ BitBake 'Fetch' implementation for perforce import os import logging import bb -from bb import data from bb.fetch2 import FetchMethod from bb.fetch2 import FetchError from bb.fetch2 import logger @@ -44,13 +43,13 @@ class Perforce(FetchMethod): provided by the env, use it. If P4PORT is specified by the recipe, use its values, which may override the settings in P4CONFIG. """ - ud.basecmd = d.getVar('FETCHCMD_p4', True) + ud.basecmd = d.getVar('FETCHCMD_p4') if not ud.basecmd: ud.basecmd = "/usr/bin/env p4" - ud.dldir = d.getVar('P4DIR', True) + ud.dldir = d.getVar('P4DIR') if not ud.dldir: - ud.dldir = '%s/%s' % (d.getVar('DL_DIR', True), 'p4') + ud.dldir = '%s/%s' % (d.getVar('DL_DIR'), 'p4') path = ud.url.split('://')[1] path = path.split(';')[0] @@ -62,7 +61,7 @@ class Perforce(FetchMethod): ud.path = path ud.usingp4config = False - p4port = d.getVar('P4PORT', True) + p4port = d.getVar('P4PORT') if p4port: logger.debug(1, 'Using recipe provided P4PORT: %s' % p4port) @@ -71,7 +70,7 @@ class Perforce(FetchMethod): logger.debug(1, 'Trying to use P4CONFIG to automatically set P4PORT...') ud.usingp4config = True p4cmd = '%s info | grep "Server address"' % ud.basecmd - bb.fetch2.check_network_access(d, p4cmd) + bb.fetch2.check_network_access(d, p4cmd, ud.url) ud.host = runfetchcmd(p4cmd, d, True) ud.host = ud.host.split(': ')[1].strip() logger.debug(1, 'Determined P4PORT to be: %s' % ud.host) @@ -87,9 +86,9 @@ class Perforce(FetchMethod): cleanedhost = ud.host.replace(':', '.') ud.pkgdir = os.path.join(ud.dldir, cleanedhost, cleanedpath) - ud.setup_revisons(d) + ud.setup_revisions(d) - ud.localfile = data.expand('%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, ud.revision), d) + ud.localfile = d.expand('%s_%s_%s.tar.gz' % (cleanedhost, cleanedpath, ud.revision)) def _buildp4command(self, ud, d, command, depot_filename=None): """ @@ -140,7 +139,7 @@ class Perforce(FetchMethod): 'p4 files' command, including trailing '#rev' file revision indicator """ p4cmd = self._buildp4command(ud, d, 'files') - bb.fetch2.check_network_access(d, p4cmd) + bb.fetch2.check_network_access(d, p4cmd, ud.url) p4fileslist = runfetchcmd(p4cmd, d, True) p4fileslist = [f.rstrip() for f in p4fileslist.splitlines()] @@ -171,7 +170,7 @@ class Perforce(FetchMethod): for afile in filelist: p4fetchcmd = self._buildp4command(ud, d, 'print', afile) - bb.fetch2.check_network_access(d, p4fetchcmd) + bb.fetch2.check_network_access(d, p4fetchcmd, ud.url) runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir) runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup=[ud.localpath], workdir=ud.pkgdir) @@ -191,7 +190,7 @@ class Perforce(FetchMethod): def _latest_revision(self, ud, d, name): """ Return the latest upstream scm revision number """ p4cmd = self._buildp4command(ud, d, "changes") - bb.fetch2.check_network_access(d, p4cmd) + bb.fetch2.check_network_access(d, p4cmd, ud.url) tip = runfetchcmd(p4cmd, d, True) if not tip: diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py index ecc6e68e9..1be91cc69 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/repo.py @@ -25,7 +25,6 @@ BitBake "Fetch" repo (git) implementation import os import bb -from bb import data from bb.fetch2 import FetchMethod from bb.fetch2 import runfetchcmd @@ -51,17 +50,17 @@ class Repo(FetchMethod): if not ud.manifest.endswith('.xml'): ud.manifest += '.xml' - ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d) + ud.localfile = d.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch)) def download(self, ud, d): """Fetch url""" - if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK): + if os.access(os.path.join(d.getVar("DL_DIR"), ud.localfile), os.R_OK): logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath) return gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", ".")) - repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo") + repodir = d.getVar("REPODIR") or os.path.join(d.getVar("DL_DIR"), "repo") codir = os.path.join(repodir, gitsrcname, ud.manifest) if ud.user: diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/s3.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/s3.py new file mode 100644 index 000000000..162928862 --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/s3.py @@ -0,0 +1,98 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementation for Amazon AWS S3. + +Class for fetching files from Amazon S3 using the AWS Command Line Interface. +The aws tool must be correctly installed and configured prior to use. + +""" + +# Copyright (C) 2017, Andre McCurdy +# +# Based in part on bb.fetch2.wget: +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# Based on functions from the base bb module, Copyright 2003 Holger Schurig + +import os +import bb +import urllib.request, urllib.parse, urllib.error +from bb.fetch2 import FetchMethod +from bb.fetch2 import FetchError +from bb.fetch2 import runfetchcmd + +class S3(FetchMethod): + """Class to fetch urls via 'aws s3'""" + + def supports(self, ud, d): + """ + Check to see if a given url can be fetched with s3. + """ + return ud.type in ['s3'] + + def recommends_checksum(self, urldata): + return True + + def urldata_init(self, ud, d): + if 'downloadfilename' in ud.parm: + ud.basename = ud.parm['downloadfilename'] + else: + ud.basename = os.path.basename(ud.path) + + ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) + + ud.basecmd = d.getVar("FETCHCMD_s3") or "/usr/bin/env aws s3" + + def download(self, ud, d): + """ + Fetch urls + Assumes localpath was called first + """ + + cmd = '%s cp s3://%s%s %s' % (ud.basecmd, ud.host, ud.path, ud.localpath) + bb.fetch2.check_network_access(d, cmd, ud.url) + runfetchcmd(cmd, d) + + # Additional sanity checks copied from the wget class (although there + # are no known issues which mean these are required, treat the aws cli + # tool with a little healthy suspicion). + + if not os.path.exists(ud.localpath): + raise FetchError("The aws cp command returned success for s3://%s%s but %s doesn't exist?!" % (ud.host, ud.path, ud.localpath)) + + if os.path.getsize(ud.localpath) == 0: + os.remove(ud.localpath) + raise FetchError("The aws cp command for s3://%s%s resulted in a zero size file?! Deleting and failing since this isn't right." % (ud.host, ud.path)) + + return True + + def checkstatus(self, fetch, ud, d): + """ + Check the status of a URL + """ + + cmd = '%s ls s3://%s%s' % (ud.basecmd, ud.host, ud.path) + bb.fetch2.check_network_access(d, cmd, ud.url) + output = runfetchcmd(cmd, d) + + # "aws s3 ls s3://mybucket/foo" will exit with success even if the file + # is not found, so check output of the command to confirm success. + + if not output: + raise FetchError("The aws ls command for s3://%s%s gave empty output" % (ud.host, ud.path)) + + return True diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py index 7989fccc7..81884a6aa 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/sftp.py @@ -62,12 +62,10 @@ SRC_URI = "sftp://user@host.example.com/dir/path.file.txt" import os import bb import urllib.request, urllib.parse, urllib.error -from bb import data from bb.fetch2 import URI from bb.fetch2 import FetchMethod from bb.fetch2 import runfetchcmd - class SFTP(FetchMethod): """Class to fetch urls via 'sftp'""" @@ -92,7 +90,7 @@ class SFTP(FetchMethod): else: ud.basename = os.path.basename(ud.path) - ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d) + ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) def download(self, ud, d): """Fetch urls""" @@ -104,7 +102,7 @@ class SFTP(FetchMethod): port = '-P %d' % urlo.port urlo.port = None - dldir = data.getVar('DL_DIR', d, True) + dldir = d.getVar('DL_DIR') lpath = os.path.join(dldir, ud.localfile) user = '' diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py index 56f9b7eb3..6047ee417 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/ssh.py @@ -43,7 +43,6 @@ IETF secsh internet draft: # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. import re, os -from bb import data from bb.fetch2 import FetchMethod from bb.fetch2 import FetchError from bb.fetch2 import logger @@ -87,11 +86,11 @@ class SSH(FetchMethod): m = __pattern__.match(urldata.url) path = m.group('path') host = m.group('host') - urldata.localpath = os.path.join(d.getVar('DL_DIR', True), + urldata.localpath = os.path.join(d.getVar('DL_DIR'), os.path.basename(os.path.normpath(path))) def download(self, urldata, d): - dldir = d.getVar('DL_DIR', True) + dldir = d.getVar('DL_DIR') m = __pattern__.match(urldata.url) path = m.group('path') diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py index 6ca79d35d..3f172eec9 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/svn.py @@ -28,7 +28,6 @@ import sys import logging import bb import re -from bb import data from bb.fetch2 import FetchMethod from bb.fetch2 import FetchError from bb.fetch2 import MissingParameterError @@ -50,7 +49,7 @@ class Svn(FetchMethod): if not "module" in ud.parm: raise MissingParameterError('module', ud.url) - ud.basecmd = d.getVar('FETCHCMD_svn', True) + ud.basecmd = d.getVar('FETCHCMD_svn') ud.module = ud.parm["module"] @@ -61,15 +60,15 @@ class Svn(FetchMethod): # Create paths to svn checkouts relpath = self._strip_leading_slashes(ud.path) - ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath) + ud.pkgdir = os.path.join(d.expand('${SVNDIR}'), ud.host, relpath) ud.moddir = os.path.join(ud.pkgdir, ud.module) - ud.setup_revisons(d) + ud.setup_revisions(d) if 'rev' in ud.parm: ud.revision = ud.parm['rev'] - ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d) + ud.localfile = d.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision)) def _buildsvncommand(self, ud, d, command): """ @@ -79,9 +78,9 @@ class Svn(FetchMethod): proto = ud.parm.get('protocol', 'svn') - svn_rsh = None - if proto == "svn+ssh" and "rsh" in ud.parm: - svn_rsh = ud.parm["rsh"] + svn_ssh = None + if proto == "svn+ssh" and "ssh" in ud.parm: + svn_ssh = ud.parm["ssh"] svnroot = ud.host + ud.path @@ -113,8 +112,8 @@ class Svn(FetchMethod): else: raise FetchError("Invalid svn command %s" % command, ud.url) - if svn_rsh: - svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) + if svn_ssh: + svncmd = "SVN_SSH=\"%s\" %s" % (svn_ssh, svncmd) return svncmd @@ -173,7 +172,7 @@ class Svn(FetchMethod): """ Return the latest upstream revision number """ - bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "log1")) + bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "log1"), ud.url) output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "log1"), d, True) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py index 23d48acb0..ae0ffa8c9 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/fetch2/wget.py @@ -33,7 +33,6 @@ import logging import bb import bb.progress import urllib.request, urllib.parse, urllib.error -from bb import data from bb.fetch2 import FetchMethod from bb.fetch2 import FetchError from bb.fetch2 import logger @@ -84,18 +83,18 @@ class Wget(FetchMethod): else: ud.basename = os.path.basename(ud.path) - ud.localfile = data.expand(urllib.parse.unquote(ud.basename), d) + ud.localfile = d.expand(urllib.parse.unquote(ud.basename)) if not ud.localfile: - ud.localfile = data.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."), d) + ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", ".")) - self.basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate" + self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 30 --passive-ftp --no-check-certificate" def _runwget(self, ud, d, command, quiet): progresshandler = WgetProgressHandler(d) logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command)) - bb.fetch2.check_network_access(d, command) + bb.fetch2.check_network_access(d, command, ud.url) runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler) def download(self, ud, d): @@ -104,7 +103,7 @@ class Wget(FetchMethod): fetchcmd = self.basecmd if 'downloadfilename' in ud.parm: - dldir = d.getVar("DL_DIR", True) + dldir = d.getVar("DL_DIR") bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile)) fetchcmd += " -O " + dldir + os.sep + ud.localfile @@ -304,12 +303,24 @@ class Wget(FetchMethod): r = urllib.request.Request(uri) r.get_method = lambda: "HEAD" - if ud.user: + def add_basic_auth(login_str, request): + '''Adds Basic auth to http request, pass in login:password as string''' import base64 - encodeuser = base64.b64encode(ud.user.encode('utf-8')).decode("utf-8") + encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8") authheader = "Basic %s" % encodeuser r.add_header("Authorization", authheader) + if ud.user: + add_basic_auth(ud.user, r) + + try: + import netrc, urllib.parse + n = netrc.netrc() + login, unused, password = n.authenticators(urllib.parse.urlparse(uri).hostname) + add_basic_auth("%s:%s" % (login, password), r) + except (TypeError, ImportError, IOError, netrc.NetrcParseError): + pass + opener.open(r) except urllib.error.URLError as e: if try_again: @@ -534,7 +545,7 @@ class Wget(FetchMethod): # src.rpm extension was added only for rpm package. Can be removed if the rpm # packaged will always be considered as having to be manually upgraded - psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)" + psuffix_regex = "(tar\.gz|tgz|tar\.bz2|zip|xz|tar\.lz|rpm|bz2|orig\.tar\.gz|tar\.xz|src\.tar\.gz|src\.tgz|svnr\d+\.tar\.bz2|stable\.tar\.gz|src\.rpm)" # match name, version and archive type of a package package_regex_comp = re.compile("(?P%s?\.?v?)(?P%s)(?P%s)?[\.-](?P%s$)" @@ -542,7 +553,7 @@ class Wget(FetchMethod): self.suffix_regex_comp = re.compile(psuffix_regex) # compile regex, can be specific by package or generic regex - pn_regex = d.getVar('UPSTREAM_CHECK_REGEX', True) + pn_regex = d.getVar('UPSTREAM_CHECK_REGEX') if pn_regex: package_custom_regex_comp = re.compile(pn_regex) else: @@ -563,7 +574,7 @@ class Wget(FetchMethod): sanity check to ensure same name and type. """ package = ud.path.split("/")[-1] - current_version = ['', d.getVar('PV', True), ''] + current_version = ['', d.getVar('PV'), ''] """possible to have no version in pkg name, such as spectrum-fw""" if not re.search("\d+", package): @@ -578,7 +589,7 @@ class Wget(FetchMethod): bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern)) uri = "" - regex_uri = d.getVar("UPSTREAM_CHECK_URI", True) + regex_uri = d.getVar("UPSTREAM_CHECK_URI") if not regex_uri: path = ud.path.split(package)[0] @@ -587,7 +598,7 @@ class Wget(FetchMethod): dirver_regex = re.compile("(?P[^/]*(\d+\.)*\d+([-_]r\d+)*)/") m = dirver_regex.search(path) if m: - pn = d.getVar('PN', True) + pn = d.getVar('PN') dirver = m.group('dirver') dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn))) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/main.py b/import-layers/yocto-poky/bitbake/lib/bb/main.py index f2f59f670..8c948c2c1 100755 --- a/import-layers/yocto-poky/bitbake/lib/bb/main.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/main.py @@ -174,13 +174,24 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters): help="Read the specified file after bitbake.conf.") parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, - help="Output more log message data to the terminal.") + help="Enable tracing of shell tasks (with 'set -x'). " + "Also print bb.note(...) messages to stdout (in " + "addition to writing them to ${T}/log.do_).") parser.add_option("-D", "--debug", action="count", dest="debug", default=0, - help="Increase the debug level. You can specify this more than once.") - - parser.add_option("-q", "--quiet", action="store_true", dest="quiet", default=False, - help="Output less log message data to the terminal.") + help="Increase the debug level. You can specify this " + "more than once. -D sets the debug level to 1, " + "where only bb.debug(1, ...) messages are printed " + "to stdout; -DD sets the debug level to 2, where " + "both bb.debug(1, ...) and bb.debug(2, ...) " + "messages are printed; etc. Without -D, no debug " + "messages are printed. Note that -D only affects " + "output to stdout. All debug messages are written " + "to ${T}/log.do_taskname, regardless of the debug " + "level.") + + parser.add_option("-q", "--quiet", action="count", dest="quiet", default=0, + help="Output less log message data to the terminal. You can specify this more than once.") parser.add_option("-n", "--dry-run", action="store_true", dest="dry_run", default=False, help="Don't execute, just go through the motions.") @@ -287,6 +298,9 @@ class BitBakeConfigParameters(cookerdata.ConfigParameters): help="Writes the event log of the build to a bitbake event json file. " "Use '' (empty string) to assign the name automatically.") + parser.add_option("", "--runall", action="store", dest="runall", + help="Run the specified task for all build targets and their dependencies.") + options, targets = parser.parse_args(argv) if options.quiet and options.verbose: @@ -367,6 +381,7 @@ def start_server(servermodule, configParams, configuration, features): raise if not configParams.foreground: server.detach() + cooker.shutdown() cooker.lock.close() return server @@ -389,12 +404,8 @@ def bitbake_main(configParams, configuration): except: pass - configuration.setConfigParameters(configParams) - ui_module = import_extension_module(bb.ui, configParams.ui, 'main') - servermodule = import_extension_module(bb.server, configParams.servertype, 'BitBakeServer') - if configParams.server_only: if configParams.servertype != "xmlrpc": raise BBMainException("FATAL: If '--server-only' is defined, we must set the " @@ -442,6 +453,31 @@ def bitbake_main(configParams, configuration): bb.msg.init_msgconfig(configParams.verbose, configuration.debug, configuration.debug_domains) + server, server_connection, ui_module = setup_bitbake(configParams, configuration) + if server_connection is None and configParams.kill_server: + return 0 + + if not configParams.server_only: + if configParams.status_only: + server_connection.terminate() + return 0 + + try: + return ui_module.main(server_connection.connection, server_connection.events, + configParams) + finally: + bb.event.ui_queue = [] + server_connection.terminate() + else: + print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host, + server.serverImpl.port)) + if configParams.foreground: + server.serverImpl.serve_forever() + return 0 + + return 1 + +def setup_bitbake(configParams, configuration, extrafeatures=None): # Ensure logging messages get sent to the UI as events handler = bb.event.LogHandler() if not configParams.status_only: @@ -451,8 +487,11 @@ def bitbake_main(configParams, configuration): # Clear away any spurious environment variables while we stoke up the cooker cleanedvars = bb.utils.clean_environment() - featureset = [] - if not configParams.server_only: + if configParams.server_only: + featureset = [] + ui_module = None + else: + ui_module = import_extension_module(bb.ui, configParams.ui, 'main') # Collect the feature set for the UI featureset = getattr(ui_module, "featureSet", []) @@ -463,11 +502,15 @@ def bitbake_main(configParams, configuration): setattr(configuration, "%s_server" % param, value) param = "%s_server" % param - if not configParams.remote_server: - # we start a server with a given configuration - server = start_server(servermodule, configParams, configuration, featureset) - bb.event.ui_queue = [] - else: + if extrafeatures: + for feature in extrafeatures: + if not feature in featureset: + featureset.append(feature) + + servermodule = import_extension_module(bb.server, + configParams.servertype, + 'BitBakeServer') + if configParams.remote_server: if os.getenv('BBSERVER') == 'autostart': if configParams.remote_server == 'autostart' or \ not servermodule.check_connection(configParams.remote_server, timeout=2): @@ -475,14 +518,19 @@ def bitbake_main(configParams, configuration): srv = start_server(servermodule, configParams, configuration, featureset) configParams.remote_server = '%s:%d' % tuple(configuration.interface) bb.event.ui_queue = [] - # we start a stub server that is actually a XMLRPClient that connects to a real server + from bb.server.xmlrpc import BitBakeXMLRPCClient server = servermodule.BitBakeXMLRPCClient(configParams.observe_only, configParams.xmlrpctoken) server.saveConnectionDetails(configParams.remote_server) + else: + # we start a server with a given configuration + server = start_server(servermodule, configParams, configuration, featureset) + bb.event.ui_queue = [] - - if not configParams.server_only: + if configParams.server_only: + server_connection = None + else: try: server_connection = server.establishConnection(featureset) except Exception as e: @@ -491,7 +539,7 @@ def bitbake_main(configParams, configuration): if configParams.kill_server: server_connection.connection.terminateServer() bb.event.ui_queue = [] - return 0 + return None, None, None server_connection.setupEventQueue() @@ -501,22 +549,4 @@ def bitbake_main(configParams, configuration): logger.removeHandler(handler) - - if configParams.status_only: - server_connection.terminate() - return 0 - - try: - return ui_module.main(server_connection.connection, server_connection.events, - configParams) - finally: - bb.event.ui_queue = [] - server_connection.terminate() - else: - print("Bitbake server address: %s, server port: %s" % (server.serverImpl.host, - server.serverImpl.port)) - if configParams.foreground: - server.serverImpl.serve_forever() - return 0 - - return 1 + return server, server_connection, ui_module diff --git a/import-layers/yocto-poky/bitbake/lib/bb/monitordisk.py b/import-layers/yocto-poky/bitbake/lib/bb/monitordisk.py index 203c40504..833cd3d34 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/monitordisk.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/monitordisk.py @@ -129,7 +129,7 @@ def getDiskData(BBDirs, configuration): bb.utils.mkdirhier(path) dev = getMountedDev(path) # Use path/action as the key - devDict[os.path.join(path, action)] = [dev, minSpace, minInode] + devDict[(path, action)] = [dev, minSpace, minInode] return devDict @@ -141,7 +141,7 @@ def getInterval(configuration): spaceDefault = 50 * 1024 * 1024 inodeDefault = 5 * 1024 - interval = configuration.getVar("BB_DISKMON_WARNINTERVAL", True) + interval = configuration.getVar("BB_DISKMON_WARNINTERVAL") if not interval: return spaceDefault, inodeDefault else: @@ -179,7 +179,7 @@ class diskMonitor: self.enableMonitor = False self.configuration = configuration - BBDirs = configuration.getVar("BB_DISKMON_DIRS", True) or None + BBDirs = configuration.getVar("BB_DISKMON_DIRS") or None if BBDirs: self.devDict = getDiskData(BBDirs, configuration) if self.devDict: @@ -205,18 +205,21 @@ class diskMonitor: """ Take action for the monitor """ if self.enableMonitor: - for k in self.devDict: - path = os.path.dirname(k) - action = os.path.basename(k) - dev = self.devDict[k][0] - minSpace = self.devDict[k][1] - minInode = self.devDict[k][2] + diskUsage = {} + for k, attributes in self.devDict.items(): + path, action = k + dev, minSpace, minInode = attributes st = os.statvfs(path) - # The free space, float point number + # The available free space, integer number freeSpace = st.f_bavail * st.f_frsize + # Send all relevant information in the event. + freeSpaceRoot = st.f_bfree * st.f_frsize + totalSpace = st.f_blocks * st.f_frsize + diskUsage[dev] = bb.event.DiskUsageSample(freeSpace, freeSpaceRoot, totalSpace) + if minSpace and freeSpace < minSpace: # Always show warning, the self.checked would always be False if the action is WARN if self.preFreeS[k] == 0 or self.preFreeS[k] - freeSpace > self.spaceInterval and not self.checked[k]: @@ -235,7 +238,7 @@ class diskMonitor: rq.finish_runqueue(True) bb.event.fire(bb.event.DiskFull(dev, 'disk', freeSpace, path), self.configuration) - # The free inodes, float point number + # The free inodes, integer number freeInode = st.f_favail if minInode and freeInode < minInode: @@ -260,4 +263,6 @@ class diskMonitor: self.checked[k] = True rq.finish_runqueue(True) bb.event.fire(bb.event.DiskFull(dev, 'inode', freeInode, path), self.configuration) + + bb.event.fire(bb.event.MonitorDiskEvent(diskUsage), self.configuration) return diff --git a/import-layers/yocto-poky/bitbake/lib/bb/msg.py b/import-layers/yocto-poky/bitbake/lib/bb/msg.py index b7c39fa13..90b158238 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/msg.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/msg.py @@ -201,3 +201,18 @@ def fatal(msgdomain, msg): logger = logging.getLogger("BitBake") logger.critical(msg) sys.exit(1) + +def logger_create(name, output=sys.stderr, level=logging.INFO, preserve_handlers=False, color='auto'): + """Standalone logger creation function""" + logger = logging.getLogger(name) + console = logging.StreamHandler(output) + format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") + if color == 'always' or (color == 'auto' and output.isatty()): + format.enable_color() + console.setFormatter(format) + if preserve_handlers: + logger.addHandler(console) + else: + logger.handlers = [console] + logger.setLevel(level) + return logger diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/__init__.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/__init__.py index 26ae7ead8..a2952ecc0 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/parse/__init__.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/__init__.py @@ -123,7 +123,7 @@ def init_parser(d): def resolve_file(fn, d): if not os.path.isabs(fn): - bbpath = d.getVar("BBPATH", True) + bbpath = d.getVar("BBPATH") newfn, attempts = bb.utils.which(bbpath, fn, history=True) for af in attempts: mark_dependency(d, af) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py index fa83b1898..dba4540f5 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/ast.py @@ -30,8 +30,6 @@ import itertools from bb import methodpool from bb.parse import logger -_bbversions_re = re.compile(r"\[(?P[0-9]+)-(?P[0-9]+)\]") - class StatementGroup(list): def eval(self, data): for statement in self: @@ -132,7 +130,6 @@ class DataNode(AstNode): val = groupd["value"] elif "colon" in groupd and groupd["colon"] != None: e = data.createCopy() - bb.data.update_data(e) op = "immediate" val = e.expand(groupd["value"], key + "[:=]") elif "append" in groupd and groupd["append"] != None: @@ -347,19 +344,18 @@ def finalize(fn, d, variant = None): if not handlerfn: bb.fatal("Undefined event handler function '%s'" % var) handlerln = int(d.getVarFlag(var, "lineno", False)) - bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask", True) or "").split(), handlerfn, handlerln) + bb.event.register(var, d.getVar(var, False), (d.getVarFlag(var, "eventmask") or "").split(), handlerfn, handlerln) bb.event.fire(bb.event.RecipePreFinalise(fn), d) bb.data.expandKeys(d) - bb.data.update_data(d) code = [] for funcname in d.getVar("__BBANONFUNCS", False) or []: code.append("%s(d)" % funcname) bb.utils.better_exec("\n".join(code), {"d": d}) - bb.data.update_data(d) tasklist = d.getVar('__BBTASKS', False) or [] + bb.event.fire(bb.event.RecipeTaskPreProcess(fn, list(tasklist)), d) bb.build.add_tasks(tasklist, d) bb.parse.siggen.finalise(fn, d, variant) @@ -385,29 +381,8 @@ def _create_variants(datastores, names, function, onlyfinalise): else: create_variant("%s-%s" % (variant, name), datastores[variant], name) -def _expand_versions(versions): - def expand_one(version, start, end): - for i in range(start, end + 1): - ver = _bbversions_re.sub(str(i), version, 1) - yield ver - - versions = iter(versions) - while True: - try: - version = next(versions) - except StopIteration: - break - - range_ver = _bbversions_re.search(version) - if not range_ver: - yield version - else: - newversions = expand_one(version, int(range_ver.group("from")), - int(range_ver.group("to"))) - versions = itertools.chain(newversions, versions) - def multi_finalize(fn, d): - appends = (d.getVar("__BBAPPEND", True) or "").split() + appends = (d.getVar("__BBAPPEND") or "").split() for append in appends: logger.debug(1, "Appending .bbappend file %s to %s", append, fn) bb.parse.BBHandler.handle(append, d, True) @@ -422,51 +397,7 @@ def multi_finalize(fn, d): d.setVar("__SKIPPED", e.args[0]) datastores = {"": safe_d} - versions = (d.getVar("BBVERSIONS", True) or "").split() - if versions: - pv = orig_pv = d.getVar("PV", True) - baseversions = {} - - def verfunc(ver, d, pv_d = None): - if pv_d is None: - pv_d = d - - overrides = d.getVar("OVERRIDES", True).split(":") - pv_d.setVar("PV", ver) - overrides.append(ver) - bpv = baseversions.get(ver) or orig_pv - pv_d.setVar("BPV", bpv) - overrides.append(bpv) - d.setVar("OVERRIDES", ":".join(overrides)) - - versions = list(_expand_versions(versions)) - for pos, version in enumerate(list(versions)): - try: - pv, bpv = version.split(":", 2) - except ValueError: - pass - else: - versions[pos] = pv - baseversions[pv] = bpv - - if pv in versions and not baseversions.get(pv): - versions.remove(pv) - else: - pv = versions.pop() - - # This is necessary because our existing main datastore - # has already been finalized with the old PV, we need one - # that's been finalized with the new PV. - d = bb.data.createCopy(safe_d) - verfunc(pv, d, safe_d) - try: - finalize(fn, d) - except bb.parse.SkipRecipe as e: - d.setVar("__SKIPPED", e.args[0]) - - _create_variants(datastores, versions, verfunc, onlyfinalise) - - extended = d.getVar("BBCLASSEXTEND", True) or "" + extended = d.getVar("BBCLASSEXTEND") or "" if extended: # the following is to support bbextends with arguments, for e.g. multilib # an example is as follows: @@ -484,7 +415,7 @@ def multi_finalize(fn, d): else: extendedmap[ext] = ext - pn = d.getVar("PN", True) + pn = d.getVar("PN") def extendfunc(name, d): if name != extendedmap[name]: d.setVar("BBEXTENDCURR", extendedmap[name]) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py index c54a07979..fe918a41f 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/BBHandler.py @@ -66,7 +66,7 @@ def inherit(files, fn, lineno, d): file = os.path.join('classes', '%s.bbclass' % file) if not os.path.isabs(file): - bbpath = d.getVar("BBPATH", True) + bbpath = d.getVar("BBPATH") abs_fn, attempts = bb.utils.which(bbpath, file, history=True) for af in attempts: if af != abs_fn: @@ -87,17 +87,17 @@ def get_statements(filename, absolute_filename, base_name): try: return cached_statements[absolute_filename] except KeyError: - file = open(absolute_filename, 'r') - statements = ast.StatementGroup() - - lineno = 0 - while True: - lineno = lineno + 1 - s = file.readline() - if not s: break - s = s.rstrip() - feeder(lineno, s, filename, base_name, statements) - file.close() + with open(absolute_filename, 'r') as f: + statements = ast.StatementGroup() + + lineno = 0 + while True: + lineno = lineno + 1 + s = f.readline() + if not s: break + s = s.rstrip() + feeder(lineno, s, filename, base_name, statements) + if __inpython__: # add a blank line to close out any python definition feeder(lineno, "", filename, base_name, statements, eof=True) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py index 875250de4..f7d0cf74a 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/parse/parse_py/ConfHandler.py @@ -33,7 +33,7 @@ from bb.parse import ParseError, resolve_file, ast, logger, handle __config_regexp__ = re.compile( r""" ^ (?Pexport\s*)? - (?P[a-zA-Z0-9\-~_+.${}/]+?) + (?P[a-zA-Z0-9\-_+.${}/~]+?) (\[(?P[a-zA-Z0-9\-_+.]+)\])? \s* ( @@ -56,9 +56,9 @@ __config_regexp__ = re.compile( r""" """, re.X) __include_regexp__ = re.compile( r"include\s+(.+)" ) __require_regexp__ = re.compile( r"require\s+(.+)" ) -__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/]+)$" ) -__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/]+)$" ) -__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/]+)\[([a-zA-Z0-9\-_+.${}/]+)\]$" ) +__export_regexp__ = re.compile( r"export\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) +__unset_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)$" ) +__unset_flag_regexp__ = re.compile( r"unset\s+([a-zA-Z0-9\-_+.${}/~]+)\[([a-zA-Z0-9\-_+.]+)\]$" ) def init(data): topdir = data.getVar('TOPDIR', False) @@ -83,16 +83,16 @@ def include(parentfn, fn, lineno, data, error_out): if not os.path.isabs(fn): dname = os.path.dirname(parentfn) - bbpath = "%s:%s" % (dname, data.getVar("BBPATH", True)) + bbpath = "%s:%s" % (dname, data.getVar("BBPATH")) abs_fn, attempts = bb.utils.which(bbpath, fn, history=True) if abs_fn and bb.parse.check_dependency(data, abs_fn): - logger.warning("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE', True))) + logger.warning("Duplicate inclusion for %s in %s" % (abs_fn, data.getVar('FILE'))) for af in attempts: bb.parse.mark_dependency(data, af) if abs_fn: fn = abs_fn elif bb.parse.check_dependency(data, fn): - logger.warning("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE', True))) + logger.warning("Duplicate inclusion for %s in %s" % (fn, data.getVar('FILE'))) try: bb.parse.handle(fn, data, True) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/persist_data.py b/import-layers/yocto-poky/bitbake/lib/bb/persist_data.py index bb6deca52..bef701861 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/persist_data.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/persist_data.py @@ -28,11 +28,7 @@ import sys import warnings from bb.compat import total_ordering from collections import Mapping - -try: - import sqlite3 -except ImportError: - from pysqlite2 import dbapi2 as sqlite3 +import sqlite3 sqlversion = sqlite3.sqlite_version_info if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3): @@ -207,8 +203,8 @@ def connect(database): def persist(domain, d): """Convenience factory for SQLTable objects based upon metadata""" import bb.utils - cachedir = (d.getVar("PERSISTENT_DIR", True) or - d.getVar("CACHE", True)) + cachedir = (d.getVar("PERSISTENT_DIR") or + d.getVar("CACHE")) if not cachedir: logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") sys.exit(1) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/process.py b/import-layers/yocto-poky/bitbake/lib/bb/process.py index c62d7bca4..a4a559982 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/process.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/process.py @@ -162,9 +162,9 @@ def run(cmd, input=None, log=None, extrafiles=None, **options): stdout, stderr = _logged_communicate(pipe, log, input, extrafiles) else: stdout, stderr = pipe.communicate(input) - if stdout: + if not stdout is None: stdout = stdout.decode("utf-8") - if stderr: + if not stderr is None: stderr = stderr.decode("utf-8") if pipe.returncode != 0: diff --git a/import-layers/yocto-poky/bitbake/lib/bb/providers.py b/import-layers/yocto-poky/bitbake/lib/bb/providers.py index db02a0b0d..443187e17 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/providers.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/providers.py @@ -48,7 +48,6 @@ def findProviders(cfgData, dataCache, pkg_pn = None): # Need to ensure data store is expanded localdata = data.createCopy(cfgData) - bb.data.update_data(localdata) bb.data.expandKeys(localdata) preferred_versions = {} @@ -123,11 +122,11 @@ def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None): # pn can contain '_', e.g. gcc-cross-x86_64 and an override cannot # hence we do this manually rather than use OVERRIDES - preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn, True) + preferred_v = cfgData.getVar("PREFERRED_VERSION_pn-%s" % pn) if not preferred_v: - preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn, True) + preferred_v = cfgData.getVar("PREFERRED_VERSION_%s" % pn) if not preferred_v: - preferred_v = cfgData.getVar("PREFERRED_VERSION", True) + preferred_v = cfgData.getVar("PREFERRED_VERSION") if preferred_v: m = re.match('(\d+:)*(.*)(_.*)*', preferred_v) @@ -289,7 +288,7 @@ def filterProviders(providers, item, cfgData, dataCache): eligible = _filterProviders(providers, item, cfgData, dataCache) - prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % item, True) + prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % item) if prefervar: dataCache.preferred[item] = prefervar @@ -318,7 +317,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache): eligible = _filterProviders(providers, item, cfgData, dataCache) # First try and match any PREFERRED_RPROVIDER entry - prefervar = cfgData.getVar('PREFERRED_RPROVIDER_%s' % item, True) + prefervar = cfgData.getVar('PREFERRED_RPROVIDER_%s' % item) foundUnique = False if prefervar: for p in eligible: @@ -345,7 +344,7 @@ def filterProvidersRunTime(providers, item, cfgData, dataCache): pn = dataCache.pkg_fn[p] provides = dataCache.pn_provides[pn] for provide in provides: - prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide, True) + prefervar = cfgData.getVar('PREFERRED_PROVIDER_%s' % provide) #logger.debug(1, "checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys()) if prefervar in pns and pns[prefervar] not in preferred: var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/remotedata.py b/import-layers/yocto-poky/bitbake/lib/bb/remotedata.py new file mode 100644 index 000000000..68ecffc19 --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/bb/remotedata.py @@ -0,0 +1,116 @@ +""" +BitBake 'remotedata' module + +Provides support for using a datastore from the bitbake client +""" + +# Copyright (C) 2016 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import bb.data + +class RemoteDatastores: + """Used on the server side to manage references to server-side datastores""" + def __init__(self, cooker): + self.cooker = cooker + self.datastores = {} + self.locked = [] + self.nextindex = 1 + + def __len__(self): + return len(self.datastores) + + def __getitem__(self, key): + if key is None: + return self.cooker.data + else: + return self.datastores[key] + + def items(self): + return self.datastores.items() + + def store(self, d, locked=False): + """ + Put a datastore into the collection. If locked=True then the datastore + is understood to be managed externally and cannot be released by calling + release(). + """ + idx = self.nextindex + self.datastores[idx] = d + if locked: + self.locked.append(idx) + self.nextindex += 1 + return idx + + def check_store(self, d, locked=False): + """ + Put a datastore into the collection if it's not already in there; + in either case return the index + """ + for key, val in self.datastores.items(): + if val is d: + idx = key + break + else: + idx = self.store(d, locked) + return idx + + def release(self, idx): + """Discard a datastore in the collection""" + if idx in self.locked: + raise Exception('Tried to release locked datastore %d' % idx) + del self.datastores[idx] + + def receive_datastore(self, remote_data): + """Receive a datastore object sent from the client (as prepared by transmit_datastore())""" + dct = dict(remote_data) + d = bb.data_smart.DataSmart() + d.dict = dct + while True: + if '_remote_data' in dct: + dsindex = dct['_remote_data']['_content'] + del dct['_remote_data'] + if dsindex is None: + dct['_data'] = self.cooker.data.dict + else: + dct['_data'] = self.datastores[dsindex].dict + break + elif '_data' in dct: + idct = dict(dct['_data']) + dct['_data'] = idct + dct = idct + else: + break + return d + + @staticmethod + def transmit_datastore(d): + """Prepare a datastore object for sending over IPC from the client end""" + # FIXME content might be a dict, need to turn that into a list as well + def copy_dicts(dct): + if '_remote_data' in dct: + dsindex = dct['_remote_data']['_content'].dsindex + newdct = dct.copy() + newdct['_remote_data'] = {'_content': dsindex} + return list(newdct.items()) + elif '_data' in dct: + newdct = dct.copy() + newdata = copy_dicts(dct['_data']) + if newdata: + newdct['_data'] = newdata + return list(newdct.items()) + return None + main_dict = copy_dicts(d.dict) + return main_dict diff --git a/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py b/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py index 9384c72ba..7d2ff818e 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/runqueue.py @@ -36,6 +36,7 @@ from bb import msg, data, event from bb import monitordisk import subprocess import pickle +from multiprocessing import Process bblogger = logging.getLogger("BitBake") logger = logging.getLogger("BitBake.RunQueue") @@ -183,6 +184,18 @@ class RunQueueScheduler(object): def newbuilable(self, task): self.buildable.append(task) + def describe_task(self, taskid): + result = 'ID %s' % taskid + if self.rev_prio_map: + result = result + (' pri %d' % self.rev_prio_map[taskid]) + return result + + def dump_prio(self, comment): + bb.debug(3, '%s (most important first):\n%s' % + (comment, + '\n'.join(['%d. %s' % (index + 1, self.describe_task(taskid)) for + index, taskid in enumerate(self.prio_map)]))) + class RunQueueSchedulerSpeed(RunQueueScheduler): """ A scheduler optimised for speed. The priority map is sorted by task weight, @@ -212,35 +225,100 @@ class RunQueueSchedulerSpeed(RunQueueScheduler): class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed): """ - A scheduler optimised to complete .bb files are quickly as possible. The + A scheduler optimised to complete .bb files as quickly as possible. The priority map is sorted by task weight, but then reordered so once a given - .bb file starts to build, it's completed as quickly as possible. This works - well where disk space is at a premium and classes like OE's rm_work are in - force. + .bb file starts to build, it's completed as quickly as possible by + running all tasks related to the same .bb file one after the after. + This works well where disk space is at a premium and classes like OE's + rm_work are in force. """ name = "completion" def __init__(self, runqueue, rqdata): - RunQueueSchedulerSpeed.__init__(self, runqueue, rqdata) - - #FIXME - whilst this groups all fns together it does not reorder the - #fn groups optimally. - - basemap = copy.deepcopy(self.prio_map) - self.prio_map = [] - while (len(basemap) > 0): - entry = basemap.pop(0) - self.prio_map.append(entry) - fn = fn_from_tid(entry) - todel = [] - for entry in basemap: - entry_fn = fn_from_tid(entry) - if entry_fn == fn: - todel.append(basemap.index(entry)) - self.prio_map.append(entry) - todel.reverse() - for idx in todel: - del basemap[idx] + super(RunQueueSchedulerCompletion, self).__init__(runqueue, rqdata) + + # Extract list of tasks for each recipe, with tasks sorted + # ascending from "must run first" (typically do_fetch) to + # "runs last" (do_build). The speed scheduler prioritizes + # tasks that must run first before the ones that run later; + # this is what we depend on here. + task_lists = {} + for taskid in self.prio_map: + fn, taskname = taskid.rsplit(':', 1) + task_lists.setdefault(fn, []).append(taskname) + + # Now unify the different task lists. The strategy is that + # common tasks get skipped and new ones get inserted after the + # preceeding common one(s) as they are found. Because task + # lists should differ only by their number of tasks, but not + # the ordering of the common tasks, this should result in a + # deterministic result that is a superset of the individual + # task ordering. + all_tasks = [] + for recipe, new_tasks in task_lists.items(): + index = 0 + old_task = all_tasks[index] if index < len(all_tasks) else None + for new_task in new_tasks: + if old_task == new_task: + # Common task, skip it. This is the fast-path which + # avoids a full search. + index += 1 + old_task = all_tasks[index] if index < len(all_tasks) else None + else: + try: + index = all_tasks.index(new_task) + # Already present, just not at the current + # place. We re-synchronized by changing the + # index so that it matches again. Now + # move on to the next existing task. + index += 1 + old_task = all_tasks[index] if index < len(all_tasks) else None + except ValueError: + # Not present. Insert before old_task, which + # remains the same (but gets shifted back). + all_tasks.insert(index, new_task) + index += 1 + bb.debug(3, 'merged task list: %s' % all_tasks) + + # Now reverse the order so that tasks that finish the work on one + # recipe are considered more imporant (= come first). The ordering + # is now so that do_build is most important. + all_tasks.reverse() + + # Group tasks of the same kind before tasks of less important + # kinds at the head of the queue (because earlier = lower + # priority number = runs earlier), while preserving the + # ordering by recipe. If recipe foo is more important than + # bar, then the goal is to work on foo's do_populate_sysroot + # before bar's do_populate_sysroot and on the more important + # tasks of foo before any of the less important tasks in any + # other recipe (if those other recipes are more important than + # foo). + # + # All of this only applies when tasks are runable. Explicit + # dependencies still override this ordering by priority. + # + # Here's an example why this priority re-ordering helps with + # minimizing disk usage. Consider a recipe foo with a higher + # priority than bar where foo DEPENDS on bar. Then the + # implicit rule (from base.bbclass) is that foo's do_configure + # depends on bar's do_populate_sysroot. This ensures that + # bar's do_populate_sysroot gets done first. Normally the + # tasks from foo would continue to run once that is done, and + # bar only gets completed and cleaned up later. By ordering + # bar's task that depend on bar's do_populate_sysroot before foo's + # do_configure, that problem gets avoided. + task_index = 0 + self.dump_prio('original priorities') + for task in all_tasks: + for index in range(task_index, self.numTasks): + taskid = self.prio_map[index] + taskname = taskid.rsplit(':', 1)[1] + if taskname == task: + del self.prio_map[index] + self.prio_map.insert(task_index, taskid) + task_index += 1 + self.dump_prio('completion priorities') class RunTaskEntry(object): def __init__(self): @@ -262,10 +340,11 @@ class RunQueueData: self.rq = rq self.warn_multi_bb = False - self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST", True) or "" - self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() + self.stampwhitelist = cfgData.getVar("BB_STAMP_WHITELIST") or "" + self.multi_provider_whitelist = (cfgData.getVar("MULTI_PROVIDER_WHITELIST") or "").split() self.setscenewhitelist = get_setscene_enforce_whitelist(cfgData) self.setscenewhitelist_checked = False + self.setscene_enforce = (cfgData.getVar('BB_SETSCENE_ENFORCE') == "1") self.init_progress_reporter = bb.progress.DummyMultiStageProcessProgressReporter() self.reset() @@ -565,6 +644,8 @@ class RunQueueData: for (depname, idependtask) in irdepends: if depname in taskData[mc].run_targets: # Won't be in run_targets if ASSUME_PROVIDED + if not taskData[mc].run_targets[depname]: + continue depdata = taskData[mc].run_targets[depname][0] if depdata is not None: t = depdata + ":" + idependtask @@ -616,6 +697,9 @@ class RunQueueData: seendeps.add(t) newdeps.add(t) for i in newdeps: + if i not in self.runtaskentries: + # Not all recipes might have the recrdeptask task as a task + continue task = self.runtaskentries[i].task for n in self.runtaskentries[i].depends: if n not in seendeps: @@ -722,6 +806,23 @@ class RunQueueData: self.init_progress_reporter.next_stage() + if self.cooker.configuration.runall is not None: + runall = "do_%s" % self.cooker.configuration.runall + runall_tids = { k: v for k, v in self.runtaskentries.items() if taskname_from_tid(k) == runall } + + # re-run the mark_active and then drop unused tasks from new list + runq_build = {} + for tid in list(runall_tids): + mark_active(tid,1) + + for tid in list(self.runtaskentries.keys()): + if tid not in runq_build: + del self.runtaskentries[tid] + delcount += 1 + + if len(self.runtaskentries) == 0: + bb.msg.fatal("RunQueue", "No remaining tasks to run for build target %s with runall %s" % (target, runall)) + # # Step D - Sanity checks and computation # @@ -976,16 +1077,22 @@ class RunQueue: self.cfgData = cfgData self.rqdata = RunQueueData(self, cooker, cfgData, dataCaches, taskData, targets) - self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY", True) or "perfile" - self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION", True) or None - self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION2", True) or None - self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID", True) or None + self.stamppolicy = cfgData.getVar("BB_STAMP_POLICY") or "perfile" + self.hashvalidate = cfgData.getVar("BB_HASHCHECK_FUNCTION") or None + self.setsceneverify = cfgData.getVar("BB_SETSCENE_VERIFY_FUNCTION2") or None + self.depvalidate = cfgData.getVar("BB_SETSCENE_DEPVALID") or None self.state = runQueuePrepare # For disk space monitor + # Invoked at regular time intervals via the bitbake heartbeat event + # while the build is running. We generate a unique name for the handler + # here, just in case that there ever is more than one RunQueue instance, + # start the handler when reaching runQueueSceneRun, and stop it when + # done with the build. self.dm = monitordisk.diskMonitor(cfgData) - + self.dm_event_handler_name = '_bb_diskmonitor_' + str(id(self)) + self.dm_event_handler_registered = False self.rqexe = None self.worker = {} self.fakeworker = {} @@ -998,8 +1105,8 @@ class RunQueue: if fakeroot: magic = magic + "beef" mcdata = self.cooker.databuilder.mcdata[mc] - fakerootcmd = mcdata.getVar("FAKEROOTCMD", True) - fakerootenv = (mcdata.getVar("FAKEROOTBASEENV", True) or "").split() + fakerootcmd = mcdata.getVar("FAKEROOTCMD") + fakerootenv = (mcdata.getVar("FAKEROOTBASEENV") or "").split() env = os.environ.copy() for key, value in (var.split('=') for var in fakerootenv): env[key] = value @@ -1025,12 +1132,13 @@ class RunQueue: "logdefaultverboselogs" : bb.msg.loggerVerboseLogs, "logdefaultdomain" : bb.msg.loggerDefaultDomains, "prhost" : self.cooker.prhost, - "buildname" : self.cfgData.getVar("BUILDNAME", True), - "date" : self.cfgData.getVar("DATE", True), - "time" : self.cfgData.getVar("TIME", True), + "buildname" : self.cfgData.getVar("BUILDNAME"), + "date" : self.cfgData.getVar("DATE"), + "time" : self.cfgData.getVar("TIME"), } worker.stdin.write(b"" + pickle.dumps(self.cooker.configuration) + b"") + worker.stdin.write(b"" + pickle.dumps(self.cooker.extraconfigdata) + b"") worker.stdin.write(b"" + pickle.dumps(workerdata) + b"") worker.stdin.flush() @@ -1208,10 +1316,12 @@ class RunQueue: self.rqdata.init_progress_reporter.next_stage() self.rqexe = RunQueueExecuteScenequeue(self) - if self.state in [runQueueSceneRun, runQueueRunning, runQueueCleanUp]: - self.dm.check(self) - if self.state is runQueueSceneRun: + if not self.dm_event_handler_registered: + res = bb.event.register(self.dm_event_handler_name, + lambda x: self.dm.check(self) if self.state in [runQueueSceneRun, runQueueRunning, runQueueCleanUp] else False, + ('bb.event.HeartbeatEvent',)) + self.dm_event_handler_registered = True retval = self.rqexe.execute() if self.state is runQueueRunInit: @@ -1230,7 +1340,13 @@ class RunQueue: if self.state is runQueueCleanUp: retval = self.rqexe.finish() - if (self.state is runQueueComplete or self.state is runQueueFailed) and self.rqexe: + build_done = self.state is runQueueComplete or self.state is runQueueFailed + + if build_done and self.dm_event_handler_registered: + bb.event.remove(self.dm_event_handler_name, None) + self.dm_event_handler_registered = False + + if build_done and self.rqexe: self.teardown_workers() if self.rqexe.stats.failed: logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed + self.rqexe.stats.failed, self.rqexe.stats.skipped, self.rqexe.stats.failed) @@ -1287,15 +1403,36 @@ class RunQueue: else: self.rqexe.finish() + def rq_dump_sigfn(self, fn, options): + bb_cache = bb.cache.NoCache(self.cooker.databuilder) + the_data = bb_cache.loadDataFull(fn, self.cooker.collection.get_file_appends(fn)) + siggen = bb.parse.siggen + dataCaches = self.rqdata.dataCaches + siggen.dump_sigfn(fn, dataCaches, options) + def dump_signatures(self, options): - done = set() + fns = set() bb.note("Reparsing files to collect dependency data") - bb_cache = bb.cache.NoCache(self.cooker.databuilder) + for tid in self.rqdata.runtaskentries: fn = fn_from_tid(tid) - if fn not in done: - the_data = bb_cache.loadDataFull(fn, self.cooker.collection.get_file_appends(fn)) - done.add(fn) + fns.add(fn) + + max_process = int(self.cfgData.getVar("BB_NUMBER_PARSE_THREADS") or os.cpu_count() or 1) + # We cannot use the real multiprocessing.Pool easily due to some local data + # that can't be pickled. This is a cheap multi-process solution. + launched = [] + while fns: + if len(launched) < max_process: + p = Process(target=self.rq_dump_sigfn, args=(fns.pop(), options)) + p.start() + launched.append(p) + for q in launched: + # The finished processes are joined when calling is_alive() + if not q.is_alive(): + launched.remove(q) + for p in launched: + p.join() bb.parse.siggen.dump_sigs(self.rqdata.dataCaches, options) @@ -1326,7 +1463,7 @@ class RunQueue: sq_hash.append(self.rqdata.runtaskentries[tid].hash) sq_taskname.append(taskname) sq_task.append(tid) - locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data } + locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data } try: call = self.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=True)" valid = bb.utils.better_eval(call, locs) @@ -1427,8 +1564,8 @@ class RunQueueExecute: self.cfgData = rq.cfgData self.rqdata = rq.rqdata - self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS", True) or 1) - self.scheduler = self.cfgData.getVar("BB_SCHEDULER", True) or "speed" + self.number_tasks = int(self.cfgData.getVar("BB_NUMBER_THREADS") or 1) + self.scheduler = self.cfgData.getVar("BB_SCHEDULER") or "speed" self.runq_buildable = set() self.runq_running = set() @@ -1510,7 +1647,7 @@ class RunQueueExecute: pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] taskdata[dep] = [pn, taskname, fn] call = self.rq.depvalidate + "(task, taskdata, notneeded, d)" - locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.expanded_data } + locs = { "task" : task, "taskdata" : taskdata, "notneeded" : self.scenequeue_notneeded, "d" : self.cooker.data } valid = bb.utils.better_eval(call, locs) return valid @@ -1578,7 +1715,7 @@ class RunQueueExecuteTasks(RunQueueExecute): invalidtasks.append(tid) call = self.rq.setsceneverify + "(covered, tasknames, fns, d, invalidtasks=invalidtasks)" - locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : tasknames, "fns" : fns, "d" : self.cooker.expanded_data, "invalidtasks" : invalidtasks } + locs = { "covered" : self.rq.scenequeue_covered, "tasknames" : tasknames, "fns" : fns, "d" : self.cooker.data, "invalidtasks" : invalidtasks } covered_remove = bb.utils.better_eval(call, locs) def removecoveredtask(tid): @@ -1630,7 +1767,7 @@ class RunQueueExecuteTasks(RunQueueExecute): if type(obj) is type and issubclass(obj, RunQueueScheduler)) - user_schedulers = self.cfgData.getVar("BB_SCHEDULERS", True) + user_schedulers = self.cfgData.getVar("BB_SCHEDULERS") if user_schedulers: for sched in user_schedulers.split(): if not "." in sched: @@ -1775,7 +1912,7 @@ class RunQueueExecuteTasks(RunQueueExecute): bb.event.fire(startevent, self.cfgData) self.runq_running.add(task) self.stats.taskActive() - if not self.cooker.configuration.dry_run: + if not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce): bb.build.make_stamp(taskname, self.rqdata.dataCaches[mc], taskfn) self.task_complete(task) return True @@ -1786,7 +1923,7 @@ class RunQueueExecuteTasks(RunQueueExecute): taskdepdata = self.build_taskdepdata(task) taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] - if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run: + if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not (self.cooker.configuration.dry_run or self.rqdata.setscene_enforce): if not mc in self.rq.fakeworker: try: self.rq.start_fakeworker(self, mc) @@ -1795,10 +1932,10 @@ class RunQueueExecuteTasks(RunQueueExecute): self.rq.state = runQueueFailed self.stats.taskFailed() return True - self.rq.fakeworker[mc].process.stdin.write(b"" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata)) + b"") + self.rq.fakeworker[mc].process.stdin.write(b"" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(fn), taskdepdata, self.rqdata.setscene_enforce)) + b"") self.rq.fakeworker[mc].process.stdin.flush() else: - self.rq.worker[mc].process.stdin.write(b"" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata)) + b"") + self.rq.worker[mc].process.stdin.write(b"" + pickle.dumps((taskfn, task, taskname, False, self.cooker.collection.get_file_appends(taskfn), taskdepdata, self.rqdata.setscene_enforce)) + b"") self.rq.worker[mc].process.stdin.flush() self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) @@ -1839,7 +1976,8 @@ class RunQueueExecuteTasks(RunQueueExecute): pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] deps = self.rqdata.runtaskentries[revdep].depends provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] - taskdepdata[revdep] = [pn, taskname, fn, deps, provides] + taskhash = self.rqdata.runtaskentries[revdep].hash + taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash] for revdep2 in deps: if revdep2 not in taskdepdata: additional.append(revdep2) @@ -1892,6 +2030,8 @@ class RunQueueExecuteScenequeue(RunQueueExecute): for tid in self.rqdata.runq_setscene_tids: #bb.warn("Added endpoint 2 %s" % (tid)) for dep in self.rqdata.runtaskentries[tid].depends: + if tid in sq_revdeps[dep]: + sq_revdeps[dep].remove(tid) if dep not in endpoints: endpoints[dep] = set() #bb.warn(" Added endpoint 3 %s" % (dep)) @@ -1911,12 +2051,13 @@ class RunQueueExecuteScenequeue(RunQueueExecute): if point in self.rqdata.runq_setscene_tids: sq_revdeps_new[point] = tasks tasks = set() + continue for dep in self.rqdata.runtaskentries[point].depends: if point in sq_revdeps[dep]: sq_revdeps[dep].remove(point) if tasks: sq_revdeps_new[dep] |= tasks - if (len(sq_revdeps[dep]) == 0 or len(sq_revdeps_new[dep]) != 0) and dep not in self.rqdata.runq_setscene_tids: + if len(sq_revdeps[dep]) == 0 and dep not in self.rqdata.runq_setscene_tids: newendpoints[dep] = task if len(newendpoints) != 0: process_endpoints(newendpoints) @@ -2072,7 +2213,7 @@ class RunQueueExecuteScenequeue(RunQueueExecute): sq_taskname.append(taskname) sq_task.append(tid) call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)" - locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.expanded_data } + locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.data } valid = bb.utils.better_eval(call, locs) valid_new = stamppresent @@ -2199,14 +2340,16 @@ class RunQueueExecuteScenequeue(RunQueueExecute): startevent = sceneQueueTaskStarted(task, self.stats, self.rq) bb.event.fire(startevent, self.cfgData) + taskdepdata = self.build_taskdepdata(task) + taskdep = self.rqdata.dataCaches[mc].task_deps[taskfn] if 'fakeroot' in taskdep and taskname in taskdep['fakeroot'] and not self.cooker.configuration.dry_run: if not mc in self.rq.fakeworker: self.rq.start_fakeworker(self, mc) - self.rq.fakeworker[mc].process.stdin.write(b"" + pickle.dumps((taskfn, task, taskname, True, self.cooker.collection.get_file_appends(taskfn), None)) + b"") + self.rq.fakeworker[mc].process.stdin.write(b"" + pickle.dumps((taskfn, task, taskname, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"") self.rq.fakeworker[mc].process.stdin.flush() else: - self.rq.worker[mc].process.stdin.write(b"" + pickle.dumps((taskfn, task, taskname, True, self.cooker.collection.get_file_appends(taskfn), None)) + b"") + self.rq.worker[mc].process.stdin.write(b"" + pickle.dumps((taskfn, task, taskname, True, self.cooker.collection.get_file_appends(taskfn), taskdepdata, False)) + b"") self.rq.worker[mc].process.stdin.flush() self.build_stamps[task] = bb.build.stampfile(taskname, self.rqdata.dataCaches[mc], taskfn, noextra=True) @@ -2241,6 +2384,44 @@ class RunQueueExecuteScenequeue(RunQueueExecute): def runqueue_process_waitpid(self, task, status): RunQueueExecute.runqueue_process_waitpid(self, task, status) + + def build_taskdepdata(self, task): + def getsetscenedeps(tid): + deps = set() + (mc, fn, taskname, _) = split_tid_mcfn(tid) + realtid = tid + "_setscene" + idepends = self.rqdata.taskData[mc].taskentries[realtid].idepends + for (depname, idependtask) in idepends: + if depname not in self.rqdata.taskData[mc].build_targets: + continue + + depfn = self.rqdata.taskData[mc].build_targets[depname][0] + if depfn is None: + continue + deptid = depfn + ":" + idependtask.replace("_setscene", "") + deps.add(deptid) + return deps + + taskdepdata = {} + next = getsetscenedeps(task) + next.add(task) + while next: + additional = [] + for revdep in next: + (mc, fn, taskname, taskfn) = split_tid_mcfn(revdep) + pn = self.rqdata.dataCaches[mc].pkg_fn[taskfn] + deps = getsetscenedeps(revdep) + provides = self.rqdata.dataCaches[mc].fn_provides[taskfn] + taskhash = self.rqdata.runtaskentries[revdep].hash + taskdepdata[revdep] = [pn, taskname, fn, deps, provides, taskhash] + for revdep2 in deps: + if revdep2 not in taskdepdata: + additional.append(revdep2) + next = additional + + #bb.note("Task %s: " % task + str(taskdepdata).replace("], ", "],\n")) + return taskdepdata + class TaskFailure(Exception): """ Exception raised when a task in a runqueue fails @@ -2406,9 +2587,9 @@ class runQueuePipe(): self.input.close() def get_setscene_enforce_whitelist(d): - if d.getVar('BB_SETSCENE_ENFORCE', True) != '1': + if d.getVar('BB_SETSCENE_ENFORCE') != '1': return None - whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST", True) or "").split() + whitelist = (d.getVar("BB_SETSCENE_ENFORCE_WHITELIST") or "").split() outlist = [] for item in whitelist[:]: if item.startswith('%:'): diff --git a/import-layers/yocto-poky/bitbake/lib/bb/server/process.py b/import-layers/yocto-poky/bitbake/lib/bb/server/process.py index 982fcf71c..c3c1450a5 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/server/process.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/server/process.py @@ -92,6 +92,8 @@ class ProcessServer(Process, BaseImplServer): self.event = EventAdapter(event_queue) self.featurelist = featurelist self.quit = False + self.heartbeat_seconds = 1 # default, BB_HEARTBEAT_EVENT will be checked once we have a datastore. + self.next_heartbeat = time.time() self.quitin, self.quitout = Pipe() self.event_handle = multiprocessing.Value("i") @@ -101,6 +103,14 @@ class ProcessServer(Process, BaseImplServer): self.event_queue.put(event) self.event_handle.value = bb.event.register_UIHhandler(self, True) + heartbeat_event = self.cooker.data.getVar('BB_HEARTBEAT_EVENT') + if heartbeat_event: + try: + self.heartbeat_seconds = float(heartbeat_event) + except: + # Throwing an exception here causes bitbake to hang. + # Just warn about the invalid setting and continue + bb.warn('Ignoring invalid BB_HEARTBEAT_EVENT=%s, must be a float specifying seconds.' % heartbeat_event) bb.cooker.server_main(self.cooker, self.main) def main(self): @@ -160,6 +170,21 @@ class ProcessServer(Process, BaseImplServer): del self._idlefuns[function] self.quit = True + # Create new heartbeat event? + now = time.time() + if now >= self.next_heartbeat: + # We might have missed heartbeats. Just trigger once in + # that case and continue after the usual delay. + self.next_heartbeat += self.heartbeat_seconds + if self.next_heartbeat <= now: + self.next_heartbeat = now + self.heartbeat_seconds + heartbeat = bb.event.HeartbeatEvent(now) + bb.event.fire(heartbeat, self.cooker.data) + if nextsleep and now + nextsleep > self.next_heartbeat: + # Shorten timeout so that we we wake up in time for + # the heartbeat. + nextsleep = self.next_heartbeat - now + if nextsleep is not None: select.select(fds,[],[],nextsleep) @@ -199,7 +224,6 @@ class BitBakeProcessServerConnection(BitBakeBaseServerConnection): if isinstance(event, logging.LogRecord): logger.handle(event) - signal.signal(signal.SIGINT, signal.SIG_IGN) self.procserver.stop() while self.procserver.is_alive(): @@ -209,6 +233,9 @@ class BitBakeProcessServerConnection(BitBakeBaseServerConnection): self.ui_channel.close() self.event_queue.close() self.event_queue.setexit() + # XXX: Call explicity close in _writer to avoid + # fd leakage because isn't called on Queue.close() + self.event_queue._writer.close() # Wrap Queue to provide API which isn't server implementation specific class ProcessEventQueue(multiprocessing.queues.Queue): @@ -240,7 +267,6 @@ class ProcessEventQueue(multiprocessing.queues.Queue): sys.exit(1) return None - class BitBakeServer(BitBakeBaseServer): def initServer(self, single_use=True): # establish communication channels. We use bidirectional pipes for diff --git a/import-layers/yocto-poky/bitbake/lib/bb/server/xmlrpc.py b/import-layers/yocto-poky/bitbake/lib/bb/server/xmlrpc.py index 452f14bb3..a06007f5a 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/server/xmlrpc.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/server/xmlrpc.py @@ -190,7 +190,7 @@ class BitBakeXMLRPCRequestHandler(SimpleXMLRPCRequestHandler): self.send_header("Content-type", "text/plain") self.send_header("Content-length", str(len(response))) self.end_headers() - self.wfile.write(response) + self.wfile.write(bytes(response, 'utf-8')) class XMLRPCProxyServer(BaseImplServer): diff --git a/import-layers/yocto-poky/bitbake/lib/bb/siggen.py b/import-layers/yocto-poky/bitbake/lib/bb/siggen.py index 542bbb9d1..f71190ad4 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/siggen.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/siggen.py @@ -5,6 +5,8 @@ import re import tempfile import pickle import bb.data +import difflib +import simplediff from bb.checksum import FileChecksumCache logger = logging.getLogger('BitBake.SigGen') @@ -13,7 +15,7 @@ def init(d): siggens = [obj for obj in globals().values() if type(obj) is type and issubclass(obj, SignatureGenerator)] - desired = d.getVar("BB_SIGNATURE_HANDLER", True) or "noop" + desired = d.getVar("BB_SIGNATURE_HANDLER") or "noop" for sg in siggens: if desired == sg.name: return sg(d) @@ -82,10 +84,10 @@ class SignatureGeneratorBasic(SignatureGenerator): self.gendeps = {} self.lookupcache = {} self.pkgnameextract = re.compile("(?P.*)\..*") - self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split()) + self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST") or "").split()) self.taskwhitelist = None self.init_rundepcheck(data) - checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE", True) + checksum_cache_file = data.getVar("BB_HASH_CHECKSUM_CACHE_FILE") if checksum_cache_file: self.checksum_cache = FileChecksumCache() self.checksum_cache.init_cache(data, checksum_cache_file) @@ -93,7 +95,7 @@ class SignatureGeneratorBasic(SignatureGenerator): self.checksum_cache = None def init_rundepcheck(self, data): - self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None + self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST") or None if self.taskwhitelist: self.twl = re.compile(self.taskwhitelist) else: @@ -101,6 +103,7 @@ class SignatureGeneratorBasic(SignatureGenerator): def _build_data(self, fn, d): + ignore_mismatch = ((d.getVar("BB_HASH_IGNORE_MISMATCH") or '') == '1') tasklist, gendeps, lookupcache = bb.data.generate_dependencies(d) taskdeps = {} @@ -135,7 +138,7 @@ class SignatureGeneratorBasic(SignatureGenerator): data = data + str(var) datahash = hashlib.md5(data.encode("utf-8")).hexdigest() k = fn + "." + task - if k in self.basehash and self.basehash[k] != datahash: + if not ignore_mismatch and k in self.basehash and self.basehash[k] != datahash: bb.error("When reparsing %s, the basehash value changed from %s to %s. The metadata is not deterministic and this needs to be fixed." % (k, self.basehash[k], datahash)) self.basehash[k] = datahash taskdeps[task] = alldeps @@ -154,13 +157,15 @@ class SignatureGeneratorBasic(SignatureGenerator): try: taskdeps = self._build_data(fn, d) + except bb.parse.SkipRecipe: + raise except: bb.warn("Error during finalise of %s" % fn) raise #Slow but can be useful for debugging mismatched basehashes #for task in self.taskdeps[fn]: - # self.dump_sigtask(fn, task, d.getVar("STAMP", True), False) + # self.dump_sigtask(fn, task, d.getVar("STAMP"), False) for task in taskdeps: d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task]) @@ -306,8 +311,8 @@ class SignatureGeneratorBasic(SignatureGenerator): pass raise err - def dump_sigs(self, dataCaches, options): - for fn in self.taskdeps: + def dump_sigfn(self, fn, dataCaches, options): + if fn in self.taskdeps: for task in self.taskdeps[fn]: tid = fn + ":" + task (mc, _, _) = bb.runqueue.split_tid(tid) @@ -345,16 +350,67 @@ class SignatureGeneratorBasicHash(SignatureGeneratorBasic): def dump_this_task(outfile, d): import bb.parse - fn = d.getVar("BB_FILENAME", True) - task = "do_" + d.getVar("BB_CURRENTTASK", True) + fn = d.getVar("BB_FILENAME") + task = "do_" + d.getVar("BB_CURRENTTASK") referencestamp = bb.build.stamp_internal(task, d, None, True) bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile:" + referencestamp) +def init_colors(enable_color): + """Initialise colour dict for passing to compare_sigfiles()""" + # First set up the colours + colors = {'color_title': '\033[1;37;40m', + 'color_default': '\033[0;37;40m', + 'color_add': '\033[1;32;40m', + 'color_remove': '\033[1;31;40m', + } + # Leave all keys present but clear the values + if not enable_color: + for k in colors.keys(): + colors[k] = '' + return colors + +def worddiff_str(oldstr, newstr, colors=None): + if not colors: + colors = init_colors(False) + diff = simplediff.diff(oldstr.split(' '), newstr.split(' ')) + ret = [] + for change, value in diff: + value = ' '.join(value) + if change == '=': + ret.append(value) + elif change == '+': + item = '{color_add}{{+{value}+}}{color_default}'.format(value=value, **colors) + ret.append(item) + elif change == '-': + item = '{color_remove}[-{value}-]{color_default}'.format(value=value, **colors) + ret.append(item) + whitespace_note = '' + if oldstr != newstr and ' '.join(oldstr.split()) == ' '.join(newstr.split()): + whitespace_note = ' (whitespace changed)' + return '"%s"%s' % (' '.join(ret), whitespace_note) + +def list_inline_diff(oldlist, newlist, colors=None): + if not colors: + colors = init_colors(False) + diff = simplediff.diff(oldlist, newlist) + ret = [] + for change, value in diff: + value = ' '.join(value) + if change == '=': + ret.append("'%s'" % value) + elif change == '+': + item = '{color_add}+{value}{color_default}'.format(value=value, **colors) + ret.append(item) + elif change == '-': + item = '{color_remove}-{value}{color_default}'.format(value=value, **colors) + ret.append(item) + return '[%s]' % (', '.join(ret)) + def clean_basepath(a): mc = None if a.startswith("multiconfig:"): _, mc, a = a.split(":", 2) - b = a.rsplit("/", 2)[1] + a.rsplit("/", 2)[2] + b = a.rsplit("/", 2)[1] + '/' + a.rsplit("/", 2)[2] if a.startswith("virtual:"): b = b + ":" + a.rsplit(":", 1)[0] if mc: @@ -373,9 +429,26 @@ def clean_basepaths_list(a): b.append(clean_basepath(x)) return b -def compare_sigfiles(a, b, recursecb = None): +def compare_sigfiles(a, b, recursecb=None, color=False, collapsed=False): output = [] + colors = init_colors(color) + def color_format(formatstr, **values): + """ + Return colour formatted string. + NOTE: call with the format string, not an already formatted string + containing values (otherwise you could have trouble with { and } + characters) + """ + if not formatstr.endswith('{color_default}'): + formatstr += '{color_default}' + # In newer python 3 versions you can pass both of these directly, + # but we only require 3.4 at the moment + formatparams = {} + formatparams.update(colors) + formatparams.update(values) + return formatstr.format(**formatparams) + with open(a, 'rb') as f: p1 = pickle.Unpickler(f) a_data = p1.load() @@ -429,39 +502,59 @@ def compare_sigfiles(a, b, recursecb = None): return changed, added, removed if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']: - output.append("basewhitelist changed from '%s' to '%s'" % (a_data['basewhitelist'], b_data['basewhitelist'])) + output.append(color_format("{color_title}basewhitelist changed{color_default} from '%s' to '%s'") % (a_data['basewhitelist'], b_data['basewhitelist'])) if a_data['basewhitelist'] and b_data['basewhitelist']: output.append("changed items: %s" % a_data['basewhitelist'].symmetric_difference(b_data['basewhitelist'])) if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']: - output.append("taskwhitelist changed from '%s' to '%s'" % (a_data['taskwhitelist'], b_data['taskwhitelist'])) + output.append(color_format("{color_title}taskwhitelist changed{color_default} from '%s' to '%s'") % (a_data['taskwhitelist'], b_data['taskwhitelist'])) if a_data['taskwhitelist'] and b_data['taskwhitelist']: output.append("changed items: %s" % a_data['taskwhitelist'].symmetric_difference(b_data['taskwhitelist'])) if a_data['taskdeps'] != b_data['taskdeps']: - output.append("Task dependencies changed from:\n%s\nto:\n%s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))) + output.append(color_format("{color_title}Task dependencies changed{color_default} from:\n%s\nto:\n%s") % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))) - if a_data['basehash'] != b_data['basehash']: - output.append("basehash changed from %s to %s" % (a_data['basehash'], b_data['basehash'])) + if a_data['basehash'] != b_data['basehash'] and not collapsed: + output.append(color_format("{color_title}basehash changed{color_default} from %s to %s") % (a_data['basehash'], b_data['basehash'])) changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'], a_data['basewhitelist'] & b_data['basewhitelist']) if changed: for dep in changed: - output.append("List of dependencies for variable %s changed from '%s' to '%s'" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])) + output.append(color_format("{color_title}List of dependencies for variable %s changed from '{color_default}%s{color_title}' to '{color_default}%s{color_title}'") % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])) if a_data['gendeps'][dep] and b_data['gendeps'][dep]: output.append("changed items: %s" % a_data['gendeps'][dep].symmetric_difference(b_data['gendeps'][dep])) if added: for dep in added: - output.append("Dependency on variable %s was added" % (dep)) + output.append(color_format("{color_title}Dependency on variable %s was added") % (dep)) if removed: for dep in removed: - output.append("Dependency on Variable %s was removed" % (dep)) + output.append(color_format("{color_title}Dependency on Variable %s was removed") % (dep)) changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals']) if changed: for dep in changed: - output.append("Variable %s value changed from '%s' to '%s'" % (dep, a_data['varvals'][dep], b_data['varvals'][dep])) + oldval = a_data['varvals'][dep] + newval = b_data['varvals'][dep] + if newval and oldval and ('\n' in oldval or '\n' in newval): + diff = difflib.unified_diff(oldval.splitlines(), newval.splitlines(), lineterm='') + # Cut off the first two lines, since we aren't interested in + # the old/new filename (they are blank anyway in this case) + difflines = list(diff)[2:] + if color: + # Add colour to diff output + for i, line in enumerate(difflines): + if line.startswith('+'): + line = color_format('{color_add}{line}', line=line) + difflines[i] = line + elif line.startswith('-'): + line = color_format('{color_remove}{line}', line=line) + difflines[i] = line + output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff='\n'.join(difflines))) + elif newval and oldval and (' ' in oldval or ' ' in newval): + output.append(color_format("{color_title}Variable {var} value changed:{color_default}\n{diff}", var=dep, diff=worddiff_str(oldval, newval, colors))) + else: + output.append(color_format("{color_title}Variable {var} value changed from '{color_default}{oldval}{color_title}' to '{color_default}{newval}{color_title}'{color_default}", var=dep, oldval=oldval, newval=newval)) if not 'file_checksum_values' in a_data: a_data['file_checksum_values'] = {} @@ -471,32 +564,38 @@ def compare_sigfiles(a, b, recursecb = None): changed, added, removed = file_checksums_diff(a_data['file_checksum_values'], b_data['file_checksum_values']) if changed: for f, old, new in changed: - output.append("Checksum for file %s changed from %s to %s" % (f, old, new)) + output.append(color_format("{color_title}Checksum for file %s changed{color_default} from %s to %s") % (f, old, new)) if added: for f in added: - output.append("Dependency on checksum of file %s was added" % (f)) + output.append(color_format("{color_title}Dependency on checksum of file %s was added") % (f)) if removed: for f in removed: - output.append("Dependency on checksum of file %s was removed" % (f)) + output.append(color_format("{color_title}Dependency on checksum of file %s was removed") % (f)) if not 'runtaskdeps' in a_data: a_data['runtaskdeps'] = {} if not 'runtaskdeps' in b_data: b_data['runtaskdeps'] = {} - if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']): - changed = ["Number of task dependencies changed"] - else: - changed = [] - for idx, task in enumerate(a_data['runtaskdeps']): - a = a_data['runtaskdeps'][idx] - b = b_data['runtaskdeps'][idx] - if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b]: - changed.append("%s with hash %s\n changed to\n%s with hash %s" % (a, a_data['runtaskhashes'][a], b, b_data['runtaskhashes'][b])) + if not collapsed: + if len(a_data['runtaskdeps']) != len(b_data['runtaskdeps']): + changed = ["Number of task dependencies changed"] + else: + changed = [] + for idx, task in enumerate(a_data['runtaskdeps']): + a = a_data['runtaskdeps'][idx] + b = b_data['runtaskdeps'][idx] + if a_data['runtaskhashes'][a] != b_data['runtaskhashes'][b] and not collapsed: + changed.append("%s with hash %s\n changed to\n%s with hash %s" % (clean_basepath(a), a_data['runtaskhashes'][a], clean_basepath(b), b_data['runtaskhashes'][b])) - if changed: - output.append("runtaskdeps changed from %s to %s" % (clean_basepaths_list(a_data['runtaskdeps']), clean_basepaths_list(b_data['runtaskdeps']))) - output.append("\n".join(changed)) + if changed: + clean_a = clean_basepaths_list(a_data['runtaskdeps']) + clean_b = clean_basepaths_list(b_data['runtaskdeps']) + if clean_a != clean_b: + output.append(color_format("{color_title}runtaskdeps changed:{color_default}\n%s") % list_inline_diff(clean_a, clean_b, colors)) + else: + output.append(color_format("{color_title}runtaskdeps changed:")) + output.append("\n".join(changed)) if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data: @@ -512,7 +611,7 @@ def compare_sigfiles(a, b, recursecb = None): #output.append("Dependency on task %s was replaced by %s with same hash" % (dep, bdep)) bdep_found = True if not bdep_found: - output.append("Dependency on task %s was added with hash %s" % (clean_basepath(dep), b[dep])) + output.append(color_format("{color_title}Dependency on task %s was added{color_default} with hash %s") % (clean_basepath(dep), b[dep])) if removed: for dep in removed: adep_found = False @@ -522,21 +621,25 @@ def compare_sigfiles(a, b, recursecb = None): #output.append("Dependency on task %s was replaced by %s with same hash" % (adep, dep)) adep_found = True if not adep_found: - output.append("Dependency on task %s was removed with hash %s" % (clean_basepath(dep), a[dep])) + output.append(color_format("{color_title}Dependency on task %s was removed{color_default} with hash %s") % (clean_basepath(dep), a[dep])) if changed: for dep in changed: - output.append("Hash for dependent task %s changed from %s to %s" % (clean_basepath(dep), a[dep], b[dep])) + if not collapsed: + output.append(color_format("{color_title}Hash for dependent task %s changed{color_default} from %s to %s") % (clean_basepath(dep), a[dep], b[dep])) if callable(recursecb): - # If a dependent hash changed, might as well print the line above and then defer to the changes in - # that hash since in all likelyhood, they're the same changes this task also saw. recout = recursecb(dep, a[dep], b[dep]) if recout: - output = [output[-1]] + recout + if collapsed: + output.extend(recout) + else: + # If a dependent hash changed, might as well print the line above and then defer to the changes in + # that hash since in all likelyhood, they're the same changes this task also saw. + output = [output[-1]] + recout a_taint = a_data.get('taint', None) b_taint = b_data.get('taint', None) if a_taint != b_taint: - output.append("Taint (by forced/invalidated task) changed from %s to %s" % (a_taint, b_taint)) + output.append(color_format("{color_title}Taint (by forced/invalidated task) changed{color_default} from %s to %s") % (a_taint, b_taint)) return output diff --git a/import-layers/yocto-poky/bitbake/lib/bb/taskdata.py b/import-layers/yocto-poky/bitbake/lib/bb/taskdata.py index d8bdbcabf..8c96a5628 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/taskdata.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/taskdata.py @@ -89,6 +89,19 @@ class TaskData: self.add_extra_deps(fn, dataCache) + # Common code for dep_name/depends = 'depends'/idepends and 'rdepends'/irdepends + def handle_deps(task, dep_name, depends, seen): + if dep_name in task_deps and task in task_deps[dep_name]: + ids = [] + for dep in task_deps[dep_name][task].split(): + if dep: + parts = dep.split(":") + if len(parts) != 2: + bb.msg.fatal("TaskData", "Error for %s:%s[%s], dependency %s in '%s' does not contain exactly one ':' character.\n Task '%s' should be specified in the form 'packagename:task'" % (fn, task, dep_name, dep, task_deps[dep_name][task], dep_name)) + ids.append((parts[0], parts[1])) + seen(parts[0]) + depends.extend(ids) + for task in task_deps['tasks']: tid = "%s:%s" % (fn, task) @@ -105,24 +118,8 @@ class TaskData: self.taskentries[tid].tdepends.extend(parentids) # Touch all intertask dependencies - if 'depends' in task_deps and task in task_deps['depends']: - ids = [] - for dep in task_deps['depends'][task].split(): - if dep: - if ":" not in dep: - bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'depends' should be specified in the form 'packagename:task'" % (fn, dep)) - ids.append(((dep.split(":")[0]), dep.split(":")[1])) - self.seen_build_target(dep.split(":")[0]) - self.taskentries[tid].idepends.extend(ids) - if 'rdepends' in task_deps and task in task_deps['rdepends']: - ids = [] - for dep in task_deps['rdepends'][task].split(): - if dep: - if ":" not in dep: - bb.msg.fatal("TaskData", "Error for %s, dependency %s does not contain ':' character\n. Task 'rdepends' should be specified in the form 'packagename:task'" % (fn, dep)) - ids.append(((dep.split(":")[0]), dep.split(":")[1])) - self.seen_run_target(dep.split(":")[0]) - self.taskentries[tid].irdepends.extend(ids) + handle_deps(task, 'depends', self.taskentries[tid].idepends, self.seen_build_target) + handle_deps(task, 'rdepends', self.taskentries[tid].irdepends, self.seen_run_target) # Work out build dependencies if not fn in self.depids: diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/codeparser.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/codeparser.py index 14f0e2572..e30e78c15 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/tests/codeparser.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/codeparser.py @@ -49,6 +49,9 @@ class ReferenceTest(unittest.TestCase): def assertExecs(self, execs): self.assertEqual(self.execs, execs) + def assertContains(self, contains): + self.assertEqual(self.contains, contains) + class VariableReferenceTest(ReferenceTest): def parseExpression(self, exp): @@ -68,7 +71,7 @@ class VariableReferenceTest(ReferenceTest): def test_python_reference(self): self.setEmptyVars(["BAR"]) - self.parseExpression("${@bb.data.getVar('BAR', d, True) + 'foo'}") + self.parseExpression("${@d.getVar('BAR') + 'foo'}") self.assertReferences(set(["BAR"])) class ShellReferenceTest(ReferenceTest): @@ -201,6 +204,7 @@ class PythonReferenceTest(ReferenceTest): self.references = parsedvar.references | parser.references self.execs = parser.execs + self.contains = parser.contains @staticmethod def indent(value): @@ -209,17 +213,17 @@ be. These unit tests are testing snippets.""" return " " + value def test_getvar_reference(self): - self.parseExpression("bb.data.getVar('foo', d, True)") + self.parseExpression("d.getVar('foo')") self.assertReferences(set(["foo"])) self.assertExecs(set()) def test_getvar_computed_reference(self): - self.parseExpression("bb.data.getVar('f' + 'o' + 'o', d, True)") + self.parseExpression("d.getVar('f' + 'o' + 'o')") self.assertReferences(set()) self.assertExecs(set()) def test_getvar_exec_reference(self): - self.parseExpression("eval('bb.data.getVar(\"foo\", d, True)')") + self.parseExpression("eval('d.getVar(\"foo\")')") self.assertReferences(set()) self.assertExecs(set(["eval"])) @@ -265,15 +269,35 @@ be. These unit tests are testing snippets.""" self.assertExecs(set(["testget"])) del self.context["testget"] + def test_contains(self): + self.parseExpression('bb.utils.contains("TESTVAR", "one", "true", "false", d)') + self.assertContains({'TESTVAR': {'one'}}) + + def test_contains_multi(self): + self.parseExpression('bb.utils.contains("TESTVAR", "one two", "true", "false", d)') + self.assertContains({'TESTVAR': {'one two'}}) + + def test_contains_any(self): + self.parseExpression('bb.utils.contains_any("TESTVAR", "hello", "true", "false", d)') + self.assertContains({'TESTVAR': {'hello'}}) + + def test_contains_any_multi(self): + self.parseExpression('bb.utils.contains_any("TESTVAR", "one two three", "true", "false", d)') + self.assertContains({'TESTVAR': {'one', 'two', 'three'}}) + + def test_contains_filter(self): + self.parseExpression('bb.utils.filter("TESTVAR", "hello there world", d)') + self.assertContains({'TESTVAR': {'hello', 'there', 'world'}}) + class DependencyReferenceTest(ReferenceTest): pydata = """ -bb.data.getVar('somevar', d, True) +d.getVar('somevar') def test(d): foo = 'bar %s' % 'foo' def test2(d): - d.getVar(foo, True) + d.getVar(foo) d.getVar('bar', False) test2(d) @@ -285,9 +309,9 @@ def a(): test(d) -bb.data.expand(bb.data.getVar("something", False, d), d) -bb.data.expand("${inexpand} somethingelse", d) -bb.data.getVar(a(), d, False) +d.expand(d.getVar("something", False)) +d.expand("${inexpand} somethingelse") +d.getVar(a(), False) """ def test_python(self): @@ -370,6 +394,30 @@ esac self.assertEqual(deps, set(["oe_libinstall"])) + def test_contains_vardeps(self): + expr = '${@bb.utils.filter("TESTVAR", "somevalue anothervalue", d)} \ + ${@bb.utils.contains("TESTVAR", "testval testval2", "yetanothervalue", "", d)} \ + ${@bb.utils.contains("TESTVAR", "testval2 testval3", "blah", "", d)} \ + ${@bb.utils.contains_any("TESTVAR", "testval2 testval3", "lastone", "", d)}' + parsedvar = self.d.expandWithRefs(expr, None) + # Check contains + self.assertEqual(parsedvar.contains, {'TESTVAR': {'testval2 testval3', 'anothervalue', 'somevalue', 'testval testval2', 'testval2', 'testval3'}}) + # Check dependencies + self.d.setVar('ANOTHERVAR', expr) + self.d.setVar('TESTVAR', 'anothervalue testval testval2') + deps, values = bb.data.build_dependencies("ANOTHERVAR", set(self.d.keys()), set(), set(), self.d) + self.assertEqual(sorted(values.splitlines()), + sorted([expr, + 'TESTVAR{anothervalue} = Set', + 'TESTVAR{somevalue} = Unset', + 'TESTVAR{testval testval2} = Set', + 'TESTVAR{testval2 testval3} = Unset', + 'TESTVAR{testval2} = Set', + 'TESTVAR{testval3} = Unset' + ])) + # Check final value + self.assertEqual(self.d.getVar('ANOTHERVAR').split(), ['anothervalue', 'yetanothervalue', 'lastone']) + #Currently no wildcard support #def test_vardeps_wildcards(self): # self.d.setVar("oe_libinstall", "echo test") diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/data.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/data.py index b54eb0679..a4a9dd30f 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/tests/data.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/data.py @@ -77,13 +77,13 @@ class DataExpansions(unittest.TestCase): self.assertEqual(str(val), "boo value_of_foo") def test_python_snippet_getvar(self): - val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}") + val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") self.assertEqual(str(val), "value_of_foo value_of_bar") def test_python_unexpanded(self): self.d.setVar("bar", "${unsetvar}") - val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}") - self.assertEqual(str(val), "${@d.getVar('foo', True) + ' ${unsetvar}'}") + val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") + self.assertEqual(str(val), "${@d.getVar('foo') + ' ${unsetvar}'}") def test_python_snippet_syntax_error(self): self.d.setVar("FOO", "${@foo = 5}") @@ -99,7 +99,7 @@ class DataExpansions(unittest.TestCase): self.assertRaises(bb.data_smart.ExpansionError, self.d.getVar, "FOO", True) def test_value_containing_value(self): - val = self.d.expand("${@d.getVar('foo', True) + ' ${bar}'}") + val = self.d.expand("${@d.getVar('foo') + ' ${bar}'}") self.assertEqual(str(val), "value_of_foo value_of_bar") def test_reference_undefined_var(self): @@ -109,7 +109,7 @@ class DataExpansions(unittest.TestCase): def test_double_reference(self): self.d.setVar("BAR", "bar value") self.d.setVar("FOO", "${BAR} foo ${BAR}") - val = self.d.getVar("FOO", True) + val = self.d.getVar("FOO") self.assertEqual(str(val), "bar value foo bar value") def test_direct_recursion(self): @@ -129,12 +129,12 @@ class DataExpansions(unittest.TestCase): def test_incomplete_varexp_single_quotes(self): self.d.setVar("FOO", "sed -i -e 's:IP{:I${:g' $pc") - val = self.d.getVar("FOO", True) + val = self.d.getVar("FOO") self.assertEqual(str(val), "sed -i -e 's:IP{:I${:g' $pc") def test_nonstring(self): self.d.setVar("TEST", 5) - val = self.d.getVar("TEST", True) + val = self.d.getVar("TEST") self.assertEqual(str(val), "5") def test_rename(self): @@ -234,19 +234,19 @@ class TestConcat(unittest.TestCase): def test_prepend(self): self.d.setVar("TEST", "${VAL}") self.d.prependVar("TEST", "${FOO}:") - self.assertEqual(self.d.getVar("TEST", True), "foo:val") + self.assertEqual(self.d.getVar("TEST"), "foo:val") def test_append(self): self.d.setVar("TEST", "${VAL}") self.d.appendVar("TEST", ":${BAR}") - self.assertEqual(self.d.getVar("TEST", True), "val:bar") + self.assertEqual(self.d.getVar("TEST"), "val:bar") def test_multiple_append(self): self.d.setVar("TEST", "${VAL}") self.d.prependVar("TEST", "${FOO}:") self.d.appendVar("TEST", ":val2") self.d.appendVar("TEST", ":${BAR}") - self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar") + self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar") class TestConcatOverride(unittest.TestCase): def setUp(self): @@ -258,62 +258,66 @@ class TestConcatOverride(unittest.TestCase): def test_prepend(self): self.d.setVar("TEST", "${VAL}") self.d.setVar("TEST_prepend", "${FOO}:") - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST", True), "foo:val") + self.assertEqual(self.d.getVar("TEST"), "foo:val") def test_append(self): self.d.setVar("TEST", "${VAL}") self.d.setVar("TEST_append", ":${BAR}") - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST", True), "val:bar") + self.assertEqual(self.d.getVar("TEST"), "val:bar") def test_multiple_append(self): self.d.setVar("TEST", "${VAL}") self.d.setVar("TEST_prepend", "${FOO}:") self.d.setVar("TEST_append", ":val2") self.d.setVar("TEST_append", ":${BAR}") - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST", True), "foo:val:val2:bar") + self.assertEqual(self.d.getVar("TEST"), "foo:val:val2:bar") def test_append_unset(self): self.d.setVar("TEST_prepend", "${FOO}:") self.d.setVar("TEST_append", ":val2") self.d.setVar("TEST_append", ":${BAR}") - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST", True), "foo::val2:bar") + self.assertEqual(self.d.getVar("TEST"), "foo::val2:bar") def test_remove(self): self.d.setVar("TEST", "${VAL} ${BAR}") self.d.setVar("TEST_remove", "val") - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST", True), "bar") + self.assertEqual(self.d.getVar("TEST"), "bar") + + def test_remove_cleared(self): + self.d.setVar("TEST", "${VAL} ${BAR}") + self.d.setVar("TEST_remove", "val") + self.d.setVar("TEST", "${VAL} ${BAR}") + self.assertEqual(self.d.getVar("TEST"), "val bar") + + # Ensure the value is unchanged if we have an inactive remove override + # (including that whitespace is preserved) + def test_remove_inactive_override(self): + self.d.setVar("TEST", "${VAL} ${BAR} 123") + self.d.setVar("TEST_remove_inactiveoverride", "val") + self.assertEqual(self.d.getVar("TEST"), "val bar 123") def test_doubleref_remove(self): self.d.setVar("TEST", "${VAL} ${BAR}") self.d.setVar("TEST_remove", "val") self.d.setVar("TEST_TEST", "${TEST} ${TEST}") - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST_TEST", True), "bar bar") + self.assertEqual(self.d.getVar("TEST_TEST"), "bar bar") def test_empty_remove(self): self.d.setVar("TEST", "") self.d.setVar("TEST_remove", "val") - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST", True), "") + self.assertEqual(self.d.getVar("TEST"), "") def test_remove_expansion(self): self.d.setVar("BAR", "Z") self.d.setVar("TEST", "${BAR}/X Y") self.d.setVar("TEST_remove", "${BAR}/X") - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST", True), "Y") + self.assertEqual(self.d.getVar("TEST"), "Y") def test_remove_expansion_items(self): self.d.setVar("TEST", "A B C D") self.d.setVar("BAR", "B D") self.d.setVar("TEST_remove", "${BAR}") - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST", True), "A C") + self.assertEqual(self.d.getVar("TEST"), "A C") class TestOverrides(unittest.TestCase): def setUp(self): @@ -322,60 +326,53 @@ class TestOverrides(unittest.TestCase): self.d.setVar("TEST", "testvalue") def test_no_override(self): - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST", True), "testvalue") + self.assertEqual(self.d.getVar("TEST"), "testvalue") def test_one_override(self): self.d.setVar("TEST_bar", "testvalue2") - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST", True), "testvalue2") + self.assertEqual(self.d.getVar("TEST"), "testvalue2") def test_one_override_unset(self): self.d.setVar("TEST2_bar", "testvalue2") - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST2", True), "testvalue2") + + self.assertEqual(self.d.getVar("TEST2"), "testvalue2") self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST2', 'OVERRIDES', 'TEST2_bar']) def test_multiple_override(self): self.d.setVar("TEST_bar", "testvalue2") self.d.setVar("TEST_local", "testvalue3") self.d.setVar("TEST_foo", "testvalue4") - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST", True), "testvalue3") + self.assertEqual(self.d.getVar("TEST"), "testvalue3") self.assertCountEqual(list(self.d.keys()), ['TEST', 'TEST_foo', 'OVERRIDES', 'TEST_bar', 'TEST_local']) def test_multiple_combined_overrides(self): self.d.setVar("TEST_local_foo_bar", "testvalue3") - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST", True), "testvalue3") + self.assertEqual(self.d.getVar("TEST"), "testvalue3") def test_multiple_overrides_unset(self): self.d.setVar("TEST2_local_foo_bar", "testvalue3") - bb.data.update_data(self.d) - self.assertEqual(self.d.getVar("TEST2", True), "testvalue3") + self.assertEqual(self.d.getVar("TEST2"), "testvalue3") def test_keyexpansion_override(self): self.d.setVar("LOCAL", "local") self.d.setVar("TEST_bar", "testvalue2") self.d.setVar("TEST_${LOCAL}", "testvalue3") self.d.setVar("TEST_foo", "testvalue4") - bb.data.update_data(self.d) bb.data.expandKeys(self.d) - self.assertEqual(self.d.getVar("TEST", True), "testvalue3") + self.assertEqual(self.d.getVar("TEST"), "testvalue3") def test_rename_override(self): self.d.setVar("ALTERNATIVE_ncurses-tools_class-target", "a") self.d.setVar("OVERRIDES", "class-target") - bb.data.update_data(self.d) self.d.renameVar("ALTERNATIVE_ncurses-tools", "ALTERNATIVE_lib32-ncurses-tools") - self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools", True), "a") + self.assertEqual(self.d.getVar("ALTERNATIVE_lib32-ncurses-tools"), "a") def test_underscore_override(self): self.d.setVar("TEST_bar", "testvalue2") self.d.setVar("TEST_some_val", "testvalue3") self.d.setVar("TEST_foo", "testvalue4") self.d.setVar("OVERRIDES", "foo:bar:some_val") - self.assertEqual(self.d.getVar("TEST", True), "testvalue3") + self.assertEqual(self.d.getVar("TEST"), "testvalue3") class TestKeyExpansion(unittest.TestCase): def setUp(self): @@ -389,7 +386,7 @@ class TestKeyExpansion(unittest.TestCase): with LogRecord() as logs: bb.data.expandKeys(self.d) self.assertTrue(logContains("Variable key VAL_${FOO} (A) replaces original key VAL_foo (B)", logs)) - self.assertEqual(self.d.getVar("VAL_foo", True), "A") + self.assertEqual(self.d.getVar("VAL_foo"), "A") class TestFlags(unittest.TestCase): def setUp(self): @@ -444,3 +441,167 @@ class Contains(unittest.TestCase): self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x", True, False, self.d)) self.assertFalse(bb.utils.contains_any("SOMEFLAG", "x y z", True, False, self.d)) + + +class Serialize(unittest.TestCase): + + def test_serialize(self): + import tempfile + import pickle + d = bb.data.init() + d.enableTracking() + d.setVar('HELLO', 'world') + d.setVarFlag('HELLO', 'other', 'planet') + with tempfile.NamedTemporaryFile(delete=False) as tmpfile: + tmpfilename = tmpfile.name + pickle.dump(d, tmpfile) + + with open(tmpfilename, 'rb') as f: + newd = pickle.load(f) + + os.remove(tmpfilename) + + self.assertEqual(d, newd) + self.assertEqual(newd.getVar('HELLO'), 'world') + self.assertEqual(newd.getVarFlag('HELLO', 'other'), 'planet') + + +# Remote datastore tests +# These really only test the interface, since in actual usage we have a +# tinfoil connector that does everything over RPC, and this doesn't test +# that. + +class TestConnector: + d = None + def __init__(self, d): + self.d = d + def getVar(self, name): + return self.d._findVar(name) + def getKeys(self): + return set(self.d.keys()) + def getVarHistory(self, name): + return self.d.varhistory.variable(name) + def expandPythonRef(self, varname, expr, d): + localdata = self.d.createCopy() + for key in d.localkeys(): + localdata.setVar(d.getVar(key)) + varparse = bb.data_smart.VariableParse(varname, localdata) + return varparse.python_sub(expr) + def setVar(self, name, value): + self.d.setVar(name, value) + def setVarFlag(self, name, flag, value): + self.d.setVarFlag(name, flag, value) + def delVar(self, name): + self.d.delVar(name) + return False + def delVarFlag(self, name, flag): + self.d.delVarFlag(name, flag) + return False + def renameVar(self, name, newname): + self.d.renameVar(name, newname) + return False + +class Remote(unittest.TestCase): + def test_remote(self): + + d1 = bb.data.init() + d1.enableTracking() + d2 = bb.data.init() + d2.enableTracking() + connector = TestConnector(d1) + + d2.setVar('_remote_data', connector) + + d1.setVar('HELLO', 'world') + d1.setVarFlag('OTHER', 'flagname', 'flagvalue') + self.assertEqual(d2.getVar('HELLO'), 'world') + self.assertEqual(d2.expand('${HELLO}'), 'world') + self.assertEqual(d2.expand('${@d.getVar("HELLO")}'), 'world') + self.assertIn('flagname', d2.getVarFlags('OTHER')) + self.assertEqual(d2.getVarFlag('OTHER', 'flagname'), 'flagvalue') + self.assertEqual(d1.varhistory.variable('HELLO'), d2.varhistory.variable('HELLO')) + # Test setVar on client side affects server + d2.setVar('HELLO', 'other-world') + self.assertEqual(d1.getVar('HELLO'), 'other-world') + # Test setVarFlag on client side affects server + d2.setVarFlag('HELLO', 'flagname', 'flagvalue') + self.assertEqual(d1.getVarFlag('HELLO', 'flagname'), 'flagvalue') + # Test client side data is incorporated in python expansion (which is done on server) + d2.setVar('FOO', 'bar') + self.assertEqual(d2.expand('${@d.getVar("FOO")}'), 'bar') + # Test overrides work + d1.setVar('FOO_test', 'baz') + d1.appendVar('OVERRIDES', ':test') + self.assertEqual(d2.getVar('FOO'), 'baz') + + +# Remote equivalents of local test classes +# Note that these aren't perfect since we only test in one direction + +class RemoteDataExpansions(DataExpansions): + def setUp(self): + self.d1 = bb.data.init() + self.d = bb.data.init() + self.d1["foo"] = "value_of_foo" + self.d1["bar"] = "value_of_bar" + self.d1["value_of_foo"] = "value_of_'value_of_foo'" + connector = TestConnector(self.d1) + self.d.setVar('_remote_data', connector) + +class TestRemoteNestedExpansions(TestNestedExpansions): + def setUp(self): + self.d1 = bb.data.init() + self.d = bb.data.init() + self.d1["foo"] = "foo" + self.d1["bar"] = "bar" + self.d1["value_of_foobar"] = "187" + connector = TestConnector(self.d1) + self.d.setVar('_remote_data', connector) + +class TestRemoteConcat(TestConcat): + def setUp(self): + self.d1 = bb.data.init() + self.d = bb.data.init() + self.d1.setVar("FOO", "foo") + self.d1.setVar("VAL", "val") + self.d1.setVar("BAR", "bar") + connector = TestConnector(self.d1) + self.d.setVar('_remote_data', connector) + +class TestRemoteConcatOverride(TestConcatOverride): + def setUp(self): + self.d1 = bb.data.init() + self.d = bb.data.init() + self.d1.setVar("FOO", "foo") + self.d1.setVar("VAL", "val") + self.d1.setVar("BAR", "bar") + connector = TestConnector(self.d1) + self.d.setVar('_remote_data', connector) + +class TestRemoteOverrides(TestOverrides): + def setUp(self): + self.d1 = bb.data.init() + self.d = bb.data.init() + self.d1.setVar("OVERRIDES", "foo:bar:local") + self.d1.setVar("TEST", "testvalue") + connector = TestConnector(self.d1) + self.d.setVar('_remote_data', connector) + +class TestRemoteKeyExpansion(TestKeyExpansion): + def setUp(self): + self.d1 = bb.data.init() + self.d = bb.data.init() + self.d1.setVar("FOO", "foo") + self.d1.setVar("BAR", "foo") + connector = TestConnector(self.d1) + self.d.setVar('_remote_data', connector) + +class TestRemoteFlags(TestFlags): + def setUp(self): + self.d1 = bb.data.init() + self.d = bb.data.init() + self.d1.setVar("foo", "value of foo") + self.d1.setVarFlag("foo", "flag1", "value of flag1") + self.d1.setVarFlag("foo", "flag2", "value of flag2") + connector = TestConnector(self.d1) + self.d.setVar('_remote_data', connector) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py index 0fd2c0216..5a8d89285 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/fetch.py @@ -793,6 +793,7 @@ class FetchLatestVersionTest(FetcherTest): ud = bb.fetch2.FetchData(k[1], self.d) pupver= ud.method.latest_versionstring(ud, self.d) verstring = pupver[0] + self.assertTrue(verstring, msg="Could not find upstream version") r = bb.utils.vercmp_string(v, verstring) self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) @@ -804,6 +805,7 @@ class FetchLatestVersionTest(FetcherTest): ud = bb.fetch2.FetchData(k[1], self.d) pupver = ud.method.latest_versionstring(ud, self.d) verstring = pupver[0] + self.assertTrue(verstring, msg="Could not find upstream version") r = bb.utils.vercmp_string(v, verstring) self.assertTrue(r == -1 or r == 0, msg="Package %s, version: %s <= %s" % (k[0], v, verstring)) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tests/parse.py b/import-layers/yocto-poky/bitbake/lib/bb/tests/parse.py index 0b2706af0..ab6ca9031 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/tests/parse.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/tests/parse.py @@ -58,9 +58,9 @@ C = "3" def test_parse_simple(self): f = self.parsehelper(self.testfile) d = bb.parse.handle(f.name, self.d)[''] - self.assertEqual(d.getVar("A", True), "1") - self.assertEqual(d.getVar("B", True), "2") - self.assertEqual(d.getVar("C", True), "3") + self.assertEqual(d.getVar("A"), "1") + self.assertEqual(d.getVar("B"), "2") + self.assertEqual(d.getVar("C"), "3") def test_parse_incomplete_function(self): testfileB = self.testfile.replace("}", "") @@ -80,9 +80,9 @@ unset B[flag] def test_parse_unset(self): f = self.parsehelper(self.unsettest) d = bb.parse.handle(f.name, self.d)[''] - self.assertEqual(d.getVar("A", True), None) - self.assertEqual(d.getVarFlag("A","flag", True), None) - self.assertEqual(d.getVar("B", True), "2") + self.assertEqual(d.getVar("A"), None) + self.assertEqual(d.getVarFlag("A","flag"), None) + self.assertEqual(d.getVar("B"), "2") overridetest = """ @@ -95,11 +95,11 @@ PN = "gtk+" def test_parse_overrides(self): f = self.parsehelper(self.overridetest) d = bb.parse.handle(f.name, self.d)[''] - self.assertEqual(d.getVar("RRECOMMENDS", True), "b") + self.assertEqual(d.getVar("RRECOMMENDS"), "b") bb.data.expandKeys(d) - self.assertEqual(d.getVar("RRECOMMENDS", True), "b") + self.assertEqual(d.getVar("RRECOMMENDS"), "b") d.setVar("RRECOMMENDS_gtk+", "c") - self.assertEqual(d.getVar("RRECOMMENDS", True), "c") + self.assertEqual(d.getVar("RRECOMMENDS"), "c") overridetest2 = """ EXTRA_OECONF = "" @@ -112,7 +112,7 @@ EXTRA_OECONF_append = " c" d = bb.parse.handle(f.name, self.d)[''] d.appendVar("EXTRA_OECONF", " d") d.setVar("OVERRIDES", "class-target") - self.assertEqual(d.getVar("EXTRA_OECONF", True), "b c d") + self.assertEqual(d.getVar("EXTRA_OECONF"), "b c d") overridetest3 = """ DESCRIPTION = "A" @@ -124,11 +124,11 @@ PN = "bc" f = self.parsehelper(self.overridetest3) d = bb.parse.handle(f.name, self.d)[''] bb.data.expandKeys(d) - self.assertEqual(d.getVar("DESCRIPTION_bc-dev", True), "A B") + self.assertEqual(d.getVar("DESCRIPTION_bc-dev"), "A B") d.setVar("DESCRIPTION", "E") d.setVar("DESCRIPTION_bc-dev", "C D") d.setVar("OVERRIDES", "bc-dev") - self.assertEqual(d.getVar("DESCRIPTION", True), "C D") + self.assertEqual(d.getVar("DESCRIPTION"), "C D") classextend = """ @@ -159,6 +159,6 @@ python () { alldata = bb.parse.handle(f.name, self.d) d1 = alldata[''] d2 = alldata[cls.name] - self.assertEqual(d1.getVar("VAR_var", True), "B") - self.assertEqual(d2.getVar("VAR_var", True), None) + self.assertEqual(d1.getVar("VAR_var"), "B") + self.assertEqual(d2.getVar("VAR_var"), None) diff --git a/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py b/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py index 9fa5b5b3d..928333a50 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/tinfoil.py @@ -1,6 +1,6 @@ # tinfoil: a simple wrapper around cooker for bitbake-based command-line utilities # -# Copyright (C) 2012 Intel Corporation +# Copyright (C) 2012-2017 Intel Corporation # Copyright (C) 2011 Mentor Graphics Corporation # # This program is free software; you can redistribute it and/or modify @@ -17,50 +17,210 @@ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. import logging -import warnings import os import sys +import atexit +import re +from collections import OrderedDict, defaultdict import bb.cache import bb.cooker import bb.providers +import bb.taskdata import bb.utils -from bb.cooker import state, BBCooker, CookerFeatures +import bb.command +import bb.remotedata from bb.cookerdata import CookerConfiguration, ConfigParameters +from bb.main import setup_bitbake, BitBakeConfigParameters, BBMainException import bb.fetch2 + +# We need this in order to shut down the connection to the bitbake server, +# otherwise the process will never properly exit +_server_connections = [] +def _terminate_connections(): + for connection in _server_connections: + connection.terminate() +atexit.register(_terminate_connections) + +class TinfoilUIException(Exception): + """Exception raised when the UI returns non-zero from its main function""" + def __init__(self, returncode): + self.returncode = returncode + def __repr__(self): + return 'UI module main returned %d' % self.returncode + +class TinfoilCommandFailed(Exception): + """Exception raised when run_command fails""" + +class TinfoilDataStoreConnector: + + def __init__(self, tinfoil, dsindex): + self.tinfoil = tinfoil + self.dsindex = dsindex + def getVar(self, name): + value = self.tinfoil.run_command('dataStoreConnectorFindVar', self.dsindex, name) + overrides = None + if isinstance(value, dict): + if '_connector_origtype' in value: + value['_content'] = self.tinfoil._reconvert_type(value['_content'], value['_connector_origtype']) + del value['_connector_origtype'] + if '_connector_overrides' in value: + overrides = value['_connector_overrides'] + del value['_connector_overrides'] + return value, overrides + def getKeys(self): + return set(self.tinfoil.run_command('dataStoreConnectorGetKeys', self.dsindex)) + def getVarHistory(self, name): + return self.tinfoil.run_command('dataStoreConnectorGetVarHistory', self.dsindex, name) + def expandPythonRef(self, varname, expr, d): + ds = bb.remotedata.RemoteDatastores.transmit_datastore(d) + ret = self.tinfoil.run_command('dataStoreConnectorExpandPythonRef', ds, varname, expr) + return ret + def setVar(self, varname, value): + if self.dsindex is None: + self.tinfoil.run_command('setVariable', varname, value) + else: + # Not currently implemented - indicate that setting should + # be redirected to local side + return True + def setVarFlag(self, varname, flagname, value): + if self.dsindex is None: + self.tinfoil.run_command('dataStoreConnectorSetVarFlag', self.dsindex, varname, flagname, value) + else: + # Not currently implemented - indicate that setting should + # be redirected to local side + return True + def delVar(self, varname): + if self.dsindex is None: + self.tinfoil.run_command('dataStoreConnectorDelVar', self.dsindex, varname) + else: + # Not currently implemented - indicate that setting should + # be redirected to local side + return True + def delVarFlag(self, varname, flagname): + if self.dsindex is None: + self.tinfoil.run_command('dataStoreConnectorDelVar', self.dsindex, varname, flagname) + else: + # Not currently implemented - indicate that setting should + # be redirected to local side + return True + def renameVar(self, name, newname): + if self.dsindex is None: + self.tinfoil.run_command('dataStoreConnectorRenameVar', self.dsindex, name, newname) + else: + # Not currently implemented - indicate that setting should + # be redirected to local side + return True + +class TinfoilCookerAdapter: + """ + Provide an adapter for existing code that expects to access a cooker object via Tinfoil, + since now Tinfoil is on the client side it no longer has direct access. + """ + + class TinfoilCookerCollectionAdapter: + """ cooker.collection adapter """ + def __init__(self, tinfoil): + self.tinfoil = tinfoil + def get_file_appends(self, fn): + return self.tinfoil.get_file_appends(fn) + def __getattr__(self, name): + if name == 'overlayed': + return self.tinfoil.get_overlayed_recipes() + elif name == 'bbappends': + return self.tinfoil.run_command('getAllAppends') + else: + raise AttributeError("%s instance has no attribute '%s'" % (self.__class__.__name__, name)) + + class TinfoilRecipeCacheAdapter: + """ cooker.recipecache adapter """ + def __init__(self, tinfoil): + self.tinfoil = tinfoil + self._cache = {} + + def get_pkg_pn_fn(self): + pkg_pn = defaultdict(list, self.tinfoil.run_command('getRecipes') or []) + pkg_fn = {} + for pn, fnlist in pkg_pn.items(): + for fn in fnlist: + pkg_fn[fn] = pn + self._cache['pkg_pn'] = pkg_pn + self._cache['pkg_fn'] = pkg_fn + + def __getattr__(self, name): + # Grab these only when they are requested since they aren't always used + if name in self._cache: + return self._cache[name] + elif name == 'pkg_pn': + self.get_pkg_pn_fn() + return self._cache[name] + elif name == 'pkg_fn': + self.get_pkg_pn_fn() + return self._cache[name] + elif name == 'deps': + attrvalue = defaultdict(list, self.tinfoil.run_command('getRecipeDepends') or []) + elif name == 'rundeps': + attrvalue = defaultdict(lambda: defaultdict(list), self.tinfoil.run_command('getRuntimeDepends') or []) + elif name == 'runrecs': + attrvalue = defaultdict(lambda: defaultdict(list), self.tinfoil.run_command('getRuntimeRecommends') or []) + elif name == 'pkg_pepvpr': + attrvalue = self.tinfoil.run_command('getRecipeVersions') or {} + elif name == 'inherits': + attrvalue = self.tinfoil.run_command('getRecipeInherits') or {} + elif name == 'bbfile_priority': + attrvalue = self.tinfoil.run_command('getBbFilePriority') or {} + elif name == 'pkg_dp': + attrvalue = self.tinfoil.run_command('getDefaultPreference') or {} + else: + raise AttributeError("%s instance has no attribute '%s'" % (self.__class__.__name__, name)) + + self._cache[name] = attrvalue + return attrvalue + + def __init__(self, tinfoil): + self.tinfoil = tinfoil + self.collection = self.TinfoilCookerCollectionAdapter(tinfoil) + self.recipecaches = {} + # FIXME all machines + self.recipecaches[''] = self.TinfoilRecipeCacheAdapter(tinfoil) + self._cache = {} + def __getattr__(self, name): + # Grab these only when they are requested since they aren't always used + if name in self._cache: + return self._cache[name] + elif name == 'skiplist': + attrvalue = self.tinfoil.get_skipped_recipes() + elif name == 'bbfile_config_priorities': + ret = self.tinfoil.run_command('getLayerPriorities') + bbfile_config_priorities = [] + for collection, pattern, regex, pri in ret: + bbfile_config_priorities.append((collection, pattern, re.compile(regex), pri)) + + attrvalue = bbfile_config_priorities + else: + raise AttributeError("%s instance has no attribute '%s'" % (self.__class__.__name__, name)) + + self._cache[name] = attrvalue + return attrvalue + + def findBestProvider(self, pn): + return self.tinfoil.find_best_provider(pn) + + class Tinfoil: - def __init__(self, output=sys.stdout, tracking=False): - # Needed to avoid deprecation warnings with python 2.6 - warnings.filterwarnings("ignore", category=DeprecationWarning) - # Set up logging + def __init__(self, output=sys.stdout, tracking=False, setup_logging=True): self.logger = logging.getLogger('BitBake') - self._log_hdlr = logging.StreamHandler(output) - bb.msg.addDefaultlogFilter(self._log_hdlr) - format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") - if output.isatty(): - format.enable_color() - self._log_hdlr.setFormatter(format) - self.logger.addHandler(self._log_hdlr) - - self.config = CookerConfiguration() - configparams = TinfoilConfigParameters(parse_only=True) - self.config.setConfigParameters(configparams) - self.config.setServerRegIdleCallback(self.register_idle_function) - features = [] - if tracking: - features.append(CookerFeatures.BASEDATASTORE_TRACKING) - cleanedvars = bb.utils.clean_environment() - self.cooker = BBCooker(self.config, features) - self.config_data = self.cooker.data - bb.providers.logger.setLevel(logging.ERROR) - self.cooker_data = None - for k in cleanedvars: - os.environ[k] = cleanedvars[k] - - def register_idle_function(self, function, data): - pass + self.config_data = None + self.cooker = None + self.tracking = tracking + self.ui_module = None + self.server_connection = None + if setup_logging: + # This is the *client-side* logger, nothing to do with + # logging messages from the server + bb.msg.logger_create('BitBake', output) def __enter__(self): return self @@ -68,30 +228,161 @@ class Tinfoil: def __exit__(self, type, value, traceback): self.shutdown() - def parseRecipes(self): - sys.stderr.write("Parsing recipes..") - self.logger.setLevel(logging.WARNING) + def prepare(self, config_only=False, config_params=None, quiet=0): + if self.tracking: + extrafeatures = [bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING] + else: + extrafeatures = [] + + if not config_params: + config_params = TinfoilConfigParameters(config_only=config_only, quiet=quiet) - try: - while self.cooker.state in (state.initial, state.parsing): - self.cooker.updateCache() - except KeyboardInterrupt: - self.cooker.shutdown() - self.cooker.updateCache() - sys.exit(2) + cookerconfig = CookerConfiguration() + cookerconfig.setConfigParameters(config_params) - self.logger.setLevel(logging.INFO) - sys.stderr.write("done.\n") + server, self.server_connection, ui_module = setup_bitbake(config_params, + cookerconfig, + extrafeatures) - self.cooker_data = self.cooker.recipecaches[''] + self.ui_module = ui_module + + # Ensure the path to bitbake's bin directory is in PATH so that things like + # bitbake-worker can be run (usually this is the case, but it doesn't have to be) + path = os.getenv('PATH').split(':') + bitbakebinpath = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', 'bin')) + for entry in path: + if entry.endswith(os.sep): + entry = entry[:-1] + if os.path.abspath(entry) == bitbakebinpath: + break + else: + path.insert(0, bitbakebinpath) + os.environ['PATH'] = ':'.join(path) - def prepare(self, config_only = False): - if not self.cooker_data: + if self.server_connection: + _server_connections.append(self.server_connection) if config_only: - self.cooker.parseConfiguration() - self.cooker_data = self.cooker.recipecaches[''] + config_params.updateToServer(self.server_connection.connection, os.environ.copy()) + self.run_command('parseConfiguration') else: - self.parseRecipes() + self.run_actions(config_params) + + self.config_data = bb.data.init() + connector = TinfoilDataStoreConnector(self, None) + self.config_data.setVar('_remote_data', connector) + self.cooker = TinfoilCookerAdapter(self) + self.cooker_data = self.cooker.recipecaches[''] + else: + raise Exception('Failed to start bitbake server') + + def run_actions(self, config_params): + """ + Run the actions specified in config_params through the UI. + """ + ret = self.ui_module.main(self.server_connection.connection, self.server_connection.events, config_params) + if ret: + raise TinfoilUIException(ret) + + def parseRecipes(self): + """ + Force a parse of all recipes. Normally you should specify + config_only=False when calling prepare() instead of using this + function; this function is designed for situations where you need + to initialise Tinfoil and use it with config_only=True first and + then conditionally call this function to parse recipes later. + """ + config_params = TinfoilConfigParameters(config_only=False) + self.run_actions(config_params) + + def run_command(self, command, *params): + """ + Run a command on the server (as implemented in bb.command). + Note that there are two types of command - synchronous and + asynchronous; in order to receive the results of asynchronous + commands you will need to set an appropriate event mask + using set_event_mask() and listen for the result using + wait_event() - with the correct event mask you'll at least get + bb.command.CommandCompleted and possibly other events before + that depending on the command. + """ + if not self.server_connection: + raise Exception('Not connected to server (did you call .prepare()?)') + + commandline = [command] + if params: + commandline.extend(params) + result = self.server_connection.connection.runCommand(commandline) + if result[1]: + raise TinfoilCommandFailed(result[1]) + return result[0] + + def set_event_mask(self, eventlist): + """Set the event mask which will be applied within wait_event()""" + if not self.server_connection: + raise Exception('Not connected to server (did you call .prepare()?)') + llevel, debug_domains = bb.msg.constructLogOptions() + ret = self.run_command('setEventMask', self.server_connection.connection.getEventHandle(), llevel, debug_domains, eventlist) + if not ret: + raise Exception('setEventMask failed') + + def wait_event(self, timeout=0): + """ + Wait for an event from the server for the specified time. + A timeout of 0 means don't wait if there are no events in the queue. + Returns the next event in the queue or None if the timeout was + reached. Note that in order to recieve any events you will + first need to set the internal event mask using set_event_mask() + (otherwise whatever event mask the UI set up will be in effect). + """ + if not self.server_connection: + raise Exception('Not connected to server (did you call .prepare()?)') + return self.server_connection.events.waitEvent(timeout) + + def get_overlayed_recipes(self): + return defaultdict(list, self.run_command('getOverlayedRecipes')) + + def get_skipped_recipes(self): + return OrderedDict(self.run_command('getSkippedRecipes')) + + def get_all_providers(self): + return defaultdict(list, self.run_command('allProviders')) + + def find_providers(self): + return self.run_command('findProviders') + + def find_best_provider(self, pn): + return self.run_command('findBestProvider', pn) + + def get_runtime_providers(self, rdep): + return self.run_command('getRuntimeProviders', rdep) + + def get_recipe_file(self, pn): + """ + Get the file name for the specified recipe/target. Raises + bb.providers.NoProvider if there is no match or the recipe was + skipped. + """ + best = self.find_best_provider(pn) + if not best or (len(best) > 3 and not best[3]): + skiplist = self.get_skipped_recipes() + taskdata = bb.taskdata.TaskData(None, skiplist=skiplist) + skipreasons = taskdata.get_reasons(pn) + if skipreasons: + raise bb.providers.NoProvider('%s is unavailable:\n %s' % (pn, ' \n'.join(skipreasons))) + else: + raise bb.providers.NoProvider('Unable to find any recipe file matching "%s"' % pn) + return best[3] + + def get_file_appends(self, fn): + return self.run_command('getFileAppends', fn) + + def parse_recipe(self, pn): + """ + Parse the specified recipe and return a datastore object + representing the environment for the recipe. + """ + fn = self.get_recipe_file(pn) + return self.parse_recipe_file(fn) def parse_recipe_file(self, fn, appends=True, appendlist=None, config_data=None): """ @@ -110,41 +401,82 @@ class Tinfoil: """ if appends and appendlist == []: appends = False - if appends: - if appendlist: - appendfiles = appendlist - else: - if not hasattr(self.cooker, 'collection'): - raise Exception('You must call tinfoil.prepare() with config_only=False in order to get bbappends') - appendfiles = self.cooker.collection.get_file_appends(fn) - else: - appendfiles = None if config_data: - # We have to use a different function here if we're passing in a datastore - localdata = bb.data.createCopy(config_data) - envdata = bb.cache.parse_recipe(localdata, fn, appendfiles)[''] + dctr = bb.remotedata.RemoteDatastores.transmit_datastore(config_data) + dscon = self.run_command('parseRecipeFile', fn, appends, appendlist, dctr) else: - # Use the standard path - parser = bb.cache.NoCache(self.cooker.databuilder) - envdata = parser.loadDataFull(fn, appendfiles) - return envdata + dscon = self.run_command('parseRecipeFile', fn, appends, appendlist) + if dscon: + return self._reconvert_type(dscon, 'DataStoreConnectionHandle') + else: + return None + + def build_file(self, buildfile, task): + """ + Runs the specified task for just a single recipe (i.e. no dependencies). + This is equivalent to bitbake -b, except no warning will be printed. + """ + return self.run_command('buildFile', buildfile, task, True) def shutdown(self): - self.cooker.shutdown(force=True) - self.cooker.post_serve() - self.cooker.unlockBitbake() - self.logger.removeHandler(self._log_hdlr) + if self.server_connection: + self.run_command('clientComplete') + _server_connections.remove(self.server_connection) + bb.event.ui_queue = [] + self.server_connection.terminate() + self.server_connection = None -class TinfoilConfigParameters(ConfigParameters): + def _reconvert_type(self, obj, origtypename): + """ + Convert an object back to the right type, in the case + that marshalling has changed it (especially with xmlrpc) + """ + supported_types = { + 'set': set, + 'DataStoreConnectionHandle': bb.command.DataStoreConnectionHandle, + } + + origtype = supported_types.get(origtypename, None) + if origtype is None: + raise Exception('Unsupported type "%s"' % origtypename) + if type(obj) == origtype: + newobj = obj + elif isinstance(obj, dict): + # New style class + newobj = origtype() + for k,v in obj.items(): + setattr(newobj, k, v) + else: + # Assume we can coerce the type + newobj = origtype(obj) + + if isinstance(newobj, bb.command.DataStoreConnectionHandle): + connector = TinfoilDataStoreConnector(self, newobj.dsindex) + newobj = bb.data.init() + newobj.setVar('_remote_data', connector) + + return newobj - def __init__(self, **options): + +class TinfoilConfigParameters(BitBakeConfigParameters): + + def __init__(self, config_only, **options): self.initial_options = options - super(TinfoilConfigParameters, self).__init__() + # Apply some sane defaults + if not 'parse_only' in options: + self.initial_options['parse_only'] = not config_only + #if not 'status_only' in options: + # self.initial_options['status_only'] = config_only + if not 'ui' in options: + self.initial_options['ui'] = 'knotty' + if not 'argv' in options: + self.initial_options['argv'] = [] - def parseCommandLine(self, argv=sys.argv): - class DummyOptions: - def __init__(self, initial_options): - for key, val in initial_options.items(): - setattr(self, key, val) + super(TinfoilConfigParameters, self).__init__() - return DummyOptions(self.initial_options), None + def parseCommandLine(self, argv=None): + # We don't want any parameters parsed from the command line + opts = super(TinfoilConfigParameters, self).parseCommandLine([]) + for key, val in self.initial_options.items(): + setattr(opts[0], key, val) + return opts diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py index 3ddcb2ac6..e451c630d 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/buildinfohelper.py @@ -42,10 +42,12 @@ from orm.models import Variable, VariableHistory from orm.models import Package, Package_File, Target_Installed_Package, Target_File from orm.models import Task_Dependency, Package_Dependency from orm.models import Recipe_Dependency, Provides -from orm.models import Project, CustomImagePackage, CustomImageRecipe +from orm.models import Project, CustomImagePackage from orm.models import signal_runbuilds from bldcontrol.models import BuildEnvironment, BuildRequest +from bldcontrol.models import BRLayer +from bldcontrol import bbcontroller from bb.msg import BBLogFormatter as formatter from django.db import models @@ -361,11 +363,6 @@ class ORMWrapper(object): def get_update_layer_version_object(self, build_obj, layer_obj, layer_version_information): if isinstance(layer_obj, Layer_Version): - # Special case the toaster-custom-images layer which is created - # on the fly so don't update the values which may cause the layer - # to be duplicated on a future get_or_create - if layer_obj.layer.name == CustomImageRecipe.LAYER_NAME: - return layer_obj # We already found our layer version for this build so just # update it with the new build information logger.debug("We found our layer from toaster") @@ -384,8 +381,8 @@ class ORMWrapper(object): local_path=layer_version_information['local_path'], ) - logger.info("created new historical layer version %d", - layer_copy.pk) + logger.debug("Created new layer version %s for build history", + layer_copy.layer.name) self.layer_version_built.append(layer_copy) @@ -441,48 +438,33 @@ class ORMWrapper(object): else: br_id, be_id = brbe.split(":") - # find layer by checkout path; - from bldcontrol import bbcontroller - bc = bbcontroller.getBuildEnvironmentController(pk = be_id) - - # we might have a race condition here, as the project layers may change between the build trigger and the actual build execution - # but we can only match on the layer name, so the worst thing can happen is a mis-identification of the layer, not a total failure - - # note that this is different - buildrequest = BuildRequest.objects.get(pk = br_id) - for brl in buildrequest.brlayer_set.all(): - if brl.local_source_dir: - localdirname = os.path.join(brl.local_source_dir, - brl.dirpath) - else: - localdirname = os.path.join(bc.getGitCloneDirectory(brl.giturl, brl.commit), brl.dirpath) - # we get a relative path, unless running in HEAD mode where the path is absolute - if not localdirname.startswith("/"): - localdirname = os.path.join(bc.be.sourcedir, localdirname) - #logger.debug(1, "Localdirname %s lcal_path %s" % (localdirname, layer_information['local_path'])) - if localdirname.startswith(layer_information['local_path']): - # If the build request came from toaster this field - # should contain the information from the layer_version - # That created this build request. - if brl.layer_version: - return brl.layer_version - - # This might be a local layer (i.e. no git info) so try - # matching local_source_dir - if brl.local_source_dir and brl.local_source_dir == layer_information["local_path"]: - return brl.layer_version - - # we matched the BRLayer, but we need the layer_version that generated this BR; reverse of the Project.schedule_build() - #logger.debug(1, "Matched %s to BRlayer %s" % (pformat(layer_information["local_path"]), localdirname)) - - for pl in buildrequest.project.projectlayer_set.filter(layercommit__layer__name = brl.name): - if pl.layercommit.layer.vcs_url == brl.giturl : - layer = pl.layercommit.layer - layer.save() - return layer - - raise NotExisting("Unidentified layer %s" % pformat(layer_information)) + # Find the layer version by matching the layer event information + # against the metadata we have in Toaster + try: + br_layer = BRLayer.objects.get(req=br_id, + name=layer_information['name']) + return br_layer.layer_version + except (BRLayer.MultipleObjectsReturned, BRLayer.DoesNotExist): + # There are multiple of the same layer name or the name + # hasn't been determined by the toaster.bbclass layer + # so let's filter by the local_path + bc = bbcontroller.getBuildEnvironmentController(pk=be_id) + for br_layer in BRLayer.objects.filter(req=br_id): + if br_layer.giturl and \ + layer_information['local_path'].endswith( + bc.getGitCloneDirectory(br_layer.giturl, + br_layer.commit)): + return br_layer.layer_version + + if br_layer.local_source_dir == \ + layer_information['local_path']: + return br_layer.layer_version + + # We've reached the end of our search and couldn't find the layer + # we can continue but some data may be missing + raise NotExisting("Unidentified layer %s" % + pformat(layer_information)) def save_target_file_information(self, build_obj, target_obj, filedata): assert isinstance(build_obj, Build) @@ -876,6 +858,12 @@ class MockEvent(object): self.pathname = None self.lineno = None + def getMessage(self): + """ + Simulate LogRecord message return + """ + return self.msg + class BuildInfoHelper(object): """ This class gathers the build information from the server and sends it @@ -983,9 +971,10 @@ class BuildInfoHelper(object): return task_information def _get_layer_version_for_dependency(self, pathRE): - """ Returns the layer in the toaster db that has a full regex match to the pathRE. - pathRE - the layer path passed as a regex in the event. It is created in - cooker.py as a collection for the layer priorities. + """ Returns the layer in the toaster db that has a full regex + match to the pathRE. pathRE - the layer path passed as a regex in the + event. It is created in cooker.py as a collection for the layer + priorities. """ self._ensure_build() @@ -993,19 +982,31 @@ class BuildInfoHelper(object): assert isinstance(layer_version, Layer_Version) return len(layer_version.local_path) - # we don't care if we match the trailing slashes - p = re.compile(re.sub("/[^/]*?$","",pathRE)) - # Heuristics: we always match recipe to the deepest layer path in the discovered layers - for lvo in sorted(self.orm_wrapper.layer_version_objects, reverse=True, key=_sort_longest_path): - if p.fullmatch(lvo.local_path): + # Our paths don't append a trailing slash + if pathRE.endswith("/"): + pathRE = pathRE[:-1] + + p = re.compile(pathRE) + path=re.sub(r'[$^]',r'',pathRE) + # Heuristics: we always match recipe to the deepest layer path in + # the discovered layers + for lvo in sorted(self.orm_wrapper.layer_version_objects, + reverse=True, key=_sort_longest_path): + if p.fullmatch(os.path.abspath(lvo.local_path)): return lvo if lvo.layer.local_source_dir: - if p.fullmatch(lvo.layer.local_source_dir): + if p.fullmatch(os.path.abspath(lvo.layer.local_source_dir)): return lvo - #if we get here, we didn't read layers correctly; dump whatever information we have on the error log - logger.warning("Could not match layer dependency for path %s : %s", path, self.orm_wrapper.layer_version_objects) - + if 0 == path.find(lvo.local_path): + # sub-layer path inside existing layer + return lvo + # if we get here, we didn't read layers correctly; + # dump whatever information we have on the error log + logger.warning("Could not match layer dependency for path %s : %s", + pathRE, + self.orm_wrapper.layer_version_objects) + return None def _get_layer_version_for_path(self, path): self._ensure_build() @@ -1268,6 +1269,14 @@ class BuildInfoHelper(object): candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)] if len(candidates) == 1: identifier = candidates[0] + elif len(candidates) > 1 and hasattr(event,'_package'): + if 'native-' in event._package: + identifier = 'native:' + identifier + if 'nativesdk-' in event._package: + identifier = 'nativesdk:' + identifier + candidates = [x for x in self.internal_state['taskdata'].keys() if x.endswith(identifier)] + if len(candidates) == 1: + identifier = candidates[0] assert identifier in self.internal_state['taskdata'] identifierlist = identifier.split(":") @@ -1398,9 +1407,9 @@ class BuildInfoHelper(object): for lv in event._depgraph['layer-priorities']: (_, path, _, priority) = lv layer_version_obj = self._get_layer_version_for_dependency(path) - assert layer_version_obj is not None - layer_version_obj.priority = priority - layer_version_obj.save() + if layer_version_obj: + layer_version_obj.priority = priority + layer_version_obj.save() # save recipe information self.internal_state['recipes'] = {} @@ -1665,6 +1674,36 @@ class BuildInfoHelper(object): break return endswith + def scan_task_artifacts(self, event): + """ + The 'TaskArtifacts' event passes the manifest file content for the + tasks 'do_deploy', 'do_image_complete', 'do_populate_sdk', and + 'do_populate_sdk_ext'. The first two will be implemented later. + """ + task_vars = BuildInfoHelper._get_data_from_event(event) + task_name = task_vars['task'][task_vars['task'].find(':')+1:] + task_artifacts = task_vars['artifacts'] + + if task_name in ['do_populate_sdk', 'do_populate_sdk_ext']: + targets = [target for target in self.internal_state['targets'] \ + if target.task == task_name[3:]] + if not targets: + logger.warning("scan_task_artifacts: SDK targets not found: %s\n", task_name) + return + for artifact_path in task_artifacts: + if not os.path.isfile(artifact_path): + logger.warning("scan_task_artifacts: artifact file not found: %s\n", artifact_path) + continue + for target in targets: + # don't record the file if it's already been added + # to this target + matching_files = TargetSDKFile.objects.filter( + target=target, file_name=artifact_path) + if matching_files.count() == 0: + artifact_size = os.stat(artifact_path).st_size + self.orm_wrapper.save_target_sdk_file( + target, artifact_path, artifact_size) + def _get_image_files(self, deploy_dir_image, image_name, image_file_extensions): """ Find files in deploy_dir_image whose basename starts with the diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/depexp.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/depexp.py deleted file mode 100644 index d879e04c0..000000000 --- a/import-layers/yocto-poky/bitbake/lib/bb/ui/depexp.py +++ /dev/null @@ -1,358 +0,0 @@ -# -# BitBake Graphical GTK based Dependency Explorer -# -# Copyright (C) 2007 Ross Burton -# Copyright (C) 2007 - 2008 Richard Purdie -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -import sys -import gi -gi.require_version('Gtk', '3.0') -from gi.repository import Gtk, Gdk, GObject -from multiprocessing import Queue -import threading -from xmlrpc import client -import time -import bb -import bb.event - -# Package Model -(COL_PKG_NAME) = (0) - -# Dependency Model -(TYPE_DEP, TYPE_RDEP) = (0, 1) -(COL_DEP_TYPE, COL_DEP_PARENT, COL_DEP_PACKAGE) = (0, 1, 2) - - -class PackageDepView(Gtk.TreeView): - def __init__(self, model, dep_type, label): - Gtk.TreeView.__init__(self) - self.current = None - self.dep_type = dep_type - self.filter_model = model.filter_new() - self.filter_model.set_visible_func(self._filter, data=None) - self.set_model(self.filter_model) - self.append_column(Gtk.TreeViewColumn(label, Gtk.CellRendererText(), text=COL_DEP_PACKAGE)) - - def _filter(self, model, iter, data): - this_type = model[iter][COL_DEP_TYPE] - package = model[iter][COL_DEP_PARENT] - if this_type != self.dep_type: return False - return package == self.current - - def set_current_package(self, package): - self.current = package - self.filter_model.refilter() - - -class PackageReverseDepView(Gtk.TreeView): - def __init__(self, model, label): - Gtk.TreeView.__init__(self) - self.current = None - self.filter_model = model.filter_new() - self.filter_model.set_visible_func(self._filter) - self.set_model(self.filter_model) - self.append_column(Gtk.TreeViewColumn(label, Gtk.CellRendererText(), text=COL_DEP_PARENT)) - - def _filter(self, model, iter, data): - package = model[iter][COL_DEP_PACKAGE] - return package == self.current - - def set_current_package(self, package): - self.current = package - self.filter_model.refilter() - - -class DepExplorer(Gtk.Window): - def __init__(self): - Gtk.Window.__init__(self) - self.set_title("Dependency Explorer") - self.set_default_size(500, 500) - self.connect("delete-event", Gtk.main_quit) - - # Create the data models - self.pkg_model = Gtk.ListStore(GObject.TYPE_STRING) - self.pkg_model.set_sort_column_id(COL_PKG_NAME, Gtk.SortType.ASCENDING) - self.depends_model = Gtk.ListStore(GObject.TYPE_INT, GObject.TYPE_STRING, GObject.TYPE_STRING) - self.depends_model.set_sort_column_id(COL_DEP_PACKAGE, Gtk.SortType.ASCENDING) - - pane = Gtk.HPaned() - pane.set_position(250) - self.add(pane) - - # The master list of packages - scrolled = Gtk.ScrolledWindow() - scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC) - scrolled.set_shadow_type(Gtk.ShadowType.IN) - - self.pkg_treeview = Gtk.TreeView(self.pkg_model) - self.pkg_treeview.get_selection().connect("changed", self.on_cursor_changed) - column = Gtk.TreeViewColumn("Package", Gtk.CellRendererText(), text=COL_PKG_NAME) - self.pkg_treeview.append_column(column) - pane.add1(scrolled) - scrolled.add(self.pkg_treeview) - - box = Gtk.VBox(homogeneous=True, spacing=4) - - # Runtime Depends - scrolled = Gtk.ScrolledWindow() - scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC) - scrolled.set_shadow_type(Gtk.ShadowType.IN) - self.rdep_treeview = PackageDepView(self.depends_model, TYPE_RDEP, "Runtime Depends") - self.rdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE) - scrolled.add(self.rdep_treeview) - box.add(scrolled) - - # Build Depends - scrolled = Gtk.ScrolledWindow() - scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC) - scrolled.set_shadow_type(Gtk.ShadowType.IN) - self.dep_treeview = PackageDepView(self.depends_model, TYPE_DEP, "Build Depends") - self.dep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE) - scrolled.add(self.dep_treeview) - box.add(scrolled) - pane.add2(box) - - # Reverse Depends - scrolled = Gtk.ScrolledWindow() - scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC) - scrolled.set_shadow_type(Gtk.ShadowType.IN) - self.revdep_treeview = PackageReverseDepView(self.depends_model, "Reverse Depends") - self.revdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PARENT) - scrolled.add(self.revdep_treeview) - box.add(scrolled) - pane.add2(box) - - self.show_all() - - def on_package_activated(self, treeview, path, column, data_col): - model = treeview.get_model() - package = model.get_value(model.get_iter(path), data_col) - - pkg_path = [] - def finder(model, path, iter, needle): - package = model.get_value(iter, COL_PKG_NAME) - if package == needle: - pkg_path.append(path) - return True - else: - return False - self.pkg_model.foreach(finder, package) - if pkg_path: - self.pkg_treeview.get_selection().select_path(pkg_path[0]) - self.pkg_treeview.scroll_to_cell(pkg_path[0]) - - def on_cursor_changed(self, selection): - (model, it) = selection.get_selected() - if it is None: - current_package = None - else: - current_package = model.get_value(it, COL_PKG_NAME) - self.rdep_treeview.set_current_package(current_package) - self.dep_treeview.set_current_package(current_package) - self.revdep_treeview.set_current_package(current_package) - - - def parse(self, depgraph): - for package in depgraph["pn"]: - self.pkg_model.insert(0, (package,)) - - for package in depgraph["depends"]: - for depend in depgraph["depends"][package]: - self.depends_model.insert (0, (TYPE_DEP, package, depend)) - - for package in depgraph["rdepends-pn"]: - for rdepend in depgraph["rdepends-pn"][package]: - self.depends_model.insert (0, (TYPE_RDEP, package, rdepend)) - - -class gtkthread(threading.Thread): - quit = threading.Event() - def __init__(self, shutdown): - threading.Thread.__init__(self) - self.setDaemon(True) - self.shutdown = shutdown - if not Gtk.init_check()[0]: - sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n") - gtkthread.quit.set() - - def run(self): - GObject.threads_init() - Gdk.threads_init() - Gtk.main() - gtkthread.quit.set() - - -def main(server, eventHandler, params): - shutdown = 0 - - gtkgui = gtkthread(shutdown) - gtkgui.start() - - try: - params.updateFromServer(server) - cmdline = params.parseActions() - if not cmdline: - print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.") - return 1 - if 'msg' in cmdline and cmdline['msg']: - print(cmdline['msg']) - return 1 - cmdline = cmdline['action'] - if not cmdline or cmdline[0] != "generateDotGraph": - print("This UI requires the -g option") - return 1 - ret, error = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]]) - if error: - print("Error running command '%s': %s" % (cmdline, error)) - return 1 - elif ret != True: - print("Error running command '%s': returned %s" % (cmdline, ret)) - return 1 - except client.Fault as x: - print("XMLRPC Fault getting commandline:\n %s" % x) - return - - if gtkthread.quit.isSet(): - return - - Gdk.threads_enter() - dep = DepExplorer() - bardialog = Gtk.Dialog(parent=dep, - flags=Gtk.DialogFlags.MODAL|Gtk.DialogFlags.DESTROY_WITH_PARENT) - bardialog.set_default_size(400, 50) - box = bardialog.get_content_area() - pbar = Gtk.ProgressBar() - box.pack_start(pbar, True, True, 0) - bardialog.show_all() - bardialog.connect("delete-event", Gtk.main_quit) - Gdk.threads_leave() - - progress_total = 0 - while True: - try: - event = eventHandler.waitEvent(0.25) - if gtkthread.quit.isSet(): - _, error = server.runCommand(["stateForceShutdown"]) - if error: - print('Unable to cleanly stop: %s' % error) - break - - if event is None: - continue - - if isinstance(event, bb.event.CacheLoadStarted): - progress_total = event.total - Gdk.threads_enter() - bardialog.set_title("Loading Cache") - pbar.set_fraction(0.0) - Gdk.threads_leave() - - if isinstance(event, bb.event.CacheLoadProgress): - x = event.current - Gdk.threads_enter() - pbar.set_fraction(x * 1.0 / progress_total) - Gdk.threads_leave() - continue - - if isinstance(event, bb.event.CacheLoadCompleted): - continue - - if isinstance(event, bb.event.ParseStarted): - progress_total = event.total - if progress_total == 0: - continue - Gdk.threads_enter() - pbar.set_fraction(0.0) - bardialog.set_title("Processing recipes") - Gdk.threads_leave() - - if isinstance(event, bb.event.ParseProgress): - x = event.current - Gdk.threads_enter() - pbar.set_fraction(x * 1.0 / progress_total) - Gdk.threads_leave() - continue - - if isinstance(event, bb.event.ParseCompleted): - Gdk.threads_enter() - bardialog.set_title("Generating dependency tree") - Gdk.threads_leave() - continue - - if isinstance(event, bb.event.DepTreeGenerated): - Gdk.threads_enter() - bardialog.hide() - dep.parse(event._depgraph) - Gdk.threads_leave() - - if isinstance(event, bb.command.CommandCompleted): - continue - - if isinstance(event, bb.event.NoProvider): - if event._runtime: - r = "R" - else: - r = "" - - extra = '' - if not event._reasons: - if event._close_matches: - extra = ". Close matches:\n %s" % '\n '.join(event._close_matches) - - if event._dependees: - print("Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)%s" % r, event._item, ", ".join(event._dependees), r, extra) - else: - print("Nothing %sPROVIDES '%s'%s" % (r, event._item, extra)) - if event._reasons: - for reason in event._reasons: - print(reason) - - _, error = server.runCommand(["stateShutdown"]) - if error: - print('Unable to cleanly shutdown: %s' % error) - break - - if isinstance(event, bb.command.CommandFailed): - print("Command execution failed: %s" % event.error) - return event.exitcode - - if isinstance(event, bb.command.CommandExit): - return event.exitcode - - if isinstance(event, bb.cooker.CookerExit): - break - - continue - except EnvironmentError as ioerror: - # ignore interrupted io - if ioerror.args[0] == 4: - pass - except KeyboardInterrupt: - if shutdown == 2: - print("\nThird Keyboard Interrupt, exit.\n") - break - if shutdown == 1: - print("\nSecond Keyboard Interrupt, stopping...\n") - _, error = server.runCommand(["stateForceShutdown"]) - if error: - print('Unable to cleanly stop: %s' % error) - if shutdown == 0: - print("\nKeyboard Interrupt, closing down...\n") - _, error = server.runCommand(["stateShutdown"]) - if error: - print('Unable to cleanly shutdown: %s' % error) - shutdown = shutdown + 1 - pass diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/knotty.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/knotty.py index 948f52769..82aa7c464 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/ui/knotty.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/knotty.py @@ -75,10 +75,8 @@ class BBProgress(progressbar.ProgressBar): extrastr = str(extra) if extrastr[0] != ' ': extrastr = ' ' + extrastr - if extrastr[-1] != ' ': - extrastr += ' ' else: - extrastr = ' ' + extrastr = '' self.widgets[self.extrapos] = extrastr def _need_update(self): @@ -284,7 +282,7 @@ class TerminalFilter(object): content = self.main_progress.update(progress) print('') lines = 1 + int(len(content) / (self.columns + 1)) - if not self.quiet: + if self.quiet == 0: for tasknum, task in enumerate(tasks[:(self.rows - 2)]): if isinstance(task, tuple): pbar, progress, rate, start_time = task @@ -312,7 +310,7 @@ class TerminalFilter(object): fd = sys.stdin.fileno() self.termios.tcsetattr(fd, self.termios.TCSADRAIN, self.stdinbackup) -def _log_settings_from_server(server): +def _log_settings_from_server(server, observe_only): # Get values of variables which control our output includelogs, error = server.runCommand(["getVariable", "BBINCLUDELOGS"]) if error: @@ -322,7 +320,11 @@ def _log_settings_from_server(server): if error: logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error) raise BaseException(error) - consolelogfile, error = server.runCommand(["getSetVariable", "BB_CONSOLELOG"]) + if observe_only: + cmd = 'getVariable' + else: + cmd = 'getSetVariable' + consolelogfile, error = server.runCommand([cmd, "BB_CONSOLELOG"]) if error: logger.error("Unable to get the value of BB_CONSOLELOG variable: %s" % error) raise BaseException(error) @@ -340,7 +342,7 @@ _evt_list = [ "bb.runqueue.runQueueExitWait", "bb.event.LogExecTTY", "logging.Lo def main(server, eventHandler, params, tf = TerminalFilter): - includelogs, loglines, consolelogfile = _log_settings_from_server(server) + includelogs, loglines, consolelogfile = _log_settings_from_server(server, params.observe_only) if sys.stdin.isatty() and sys.stdout.isatty(): log_exec_tty = True @@ -353,10 +355,13 @@ def main(server, eventHandler, params, tf = TerminalFilter): errconsole = logging.StreamHandler(sys.stderr) format_str = "%(levelname)s: %(message)s" format = bb.msg.BBLogFormatter(format_str) - if params.options.quiet: - bb.msg.addDefaultlogFilter(console, bb.msg.BBLogFilterStdOut, bb.msg.BBLogFormatter.WARNING) + if params.options.quiet == 0: + forcelevel = None + elif params.options.quiet > 2: + forcelevel = bb.msg.BBLogFormatter.ERROR else: - bb.msg.addDefaultlogFilter(console, bb.msg.BBLogFilterStdOut) + forcelevel = bb.msg.BBLogFormatter.WARNING + bb.msg.addDefaultlogFilter(console, bb.msg.BBLogFilterStdOut, forcelevel) bb.msg.addDefaultlogFilter(errconsole, bb.msg.BBLogFilterStdErr) console.setFormatter(format) errconsole.setFormatter(format) @@ -506,35 +511,47 @@ def main(server, eventHandler, params, tf = TerminalFilter): logger.info(event._message) continue if isinstance(event, bb.event.ParseStarted): + if params.options.quiet > 1: + continue if event.total == 0: continue parseprogress = new_progress("Parsing recipes", event.total).start() continue if isinstance(event, bb.event.ParseProgress): + if params.options.quiet > 1: + continue if parseprogress: parseprogress.update(event.current) else: bb.warn("Got ParseProgress event for parsing that never started?") continue if isinstance(event, bb.event.ParseCompleted): + if params.options.quiet > 1: + continue if not parseprogress: continue parseprogress.finish() pasreprogress = None - if not params.options.quiet: + if params.options.quiet == 0: print(("Parsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors." % ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors))) continue if isinstance(event, bb.event.CacheLoadStarted): + if params.options.quiet > 1: + continue cacheprogress = new_progress("Loading cache", event.total).start() continue if isinstance(event, bb.event.CacheLoadProgress): + if params.options.quiet > 1: + continue cacheprogress.update(event.current) continue if isinstance(event, bb.event.CacheLoadCompleted): + if params.options.quiet > 1: + continue cacheprogress.finish() - if not params.options.quiet: + if params.options.quiet == 0: print("Loaded %d entries from dependency cache." % event.num_entries) continue @@ -620,16 +637,22 @@ def main(server, eventHandler, params, tf = TerminalFilter): continue if isinstance(event, bb.event.ProcessStarted): + if params.options.quiet > 1: + continue parseprogress = new_progress(event.processname, event.total) parseprogress.start(False) continue if isinstance(event, bb.event.ProcessProgress): + if params.options.quiet > 1: + continue if parseprogress: parseprogress.update(event.progress) else: bb.warn("Got ProcessProgress event for someting that never started?") continue if isinstance(event, bb.event.ProcessFinished): + if params.options.quiet > 1: + continue if parseprogress: parseprogress.finish() parseprogress = None @@ -647,6 +670,7 @@ def main(server, eventHandler, params, tf = TerminalFilter): bb.event.OperationCompleted, bb.event.OperationProgress, bb.event.DiskFull, + bb.event.HeartbeatEvent, bb.build.TaskProgress)): continue @@ -700,7 +724,7 @@ def main(server, eventHandler, params, tf = TerminalFilter): if return_value and errors: summary += pluralise("\nSummary: There was %s ERROR message shown, returning a non-zero exit code.", "\nSummary: There were %s ERROR messages shown, returning a non-zero exit code.", errors) - if summary and not params.options.quiet: + if summary and params.options.quiet == 0: print(summary) if interrupted: diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/ncurses.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/ncurses.py index d81e4138b..ca845a32a 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/ui/ncurses.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/ncurses.py @@ -297,7 +297,7 @@ class NCursesUI: # bb.error("log data follows (%s)" % logfile) # number_of_lines = data.getVar("BBINCLUDELOGS_LINES", d) # if number_of_lines: -# subprocess.call('tail -n%s %s' % (number_of_lines, logfile), shell=True) +# subprocess.check_call('tail -n%s %s' % (number_of_lines, logfile), shell=True) # else: # f = open(logfile, "r") # while True: diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/taskexp.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/taskexp.py new file mode 100644 index 000000000..9d14ecefa --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/taskexp.py @@ -0,0 +1,342 @@ +# +# BitBake Graphical GTK based Dependency Explorer +# +# Copyright (C) 2007 Ross Burton +# Copyright (C) 2007 - 2008 Richard Purdie +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import sys +import gi +gi.require_version('Gtk', '3.0') +from gi.repository import Gtk, Gdk, GObject +from multiprocessing import Queue +import threading +from xmlrpc import client +import time +import bb +import bb.event + +# Package Model +(COL_PKG_NAME) = (0) + +# Dependency Model +(TYPE_DEP, TYPE_RDEP) = (0, 1) +(COL_DEP_TYPE, COL_DEP_PARENT, COL_DEP_PACKAGE) = (0, 1, 2) + + +class PackageDepView(Gtk.TreeView): + def __init__(self, model, dep_type, label): + Gtk.TreeView.__init__(self) + self.current = None + self.dep_type = dep_type + self.filter_model = model.filter_new() + self.filter_model.set_visible_func(self._filter, data=None) + self.set_model(self.filter_model) + self.append_column(Gtk.TreeViewColumn(label, Gtk.CellRendererText(), text=COL_DEP_PACKAGE)) + + def _filter(self, model, iter, data): + this_type = model[iter][COL_DEP_TYPE] + package = model[iter][COL_DEP_PARENT] + if this_type != self.dep_type: return False + return package == self.current + + def set_current_package(self, package): + self.current = package + self.filter_model.refilter() + + +class PackageReverseDepView(Gtk.TreeView): + def __init__(self, model, label): + Gtk.TreeView.__init__(self) + self.current = None + self.filter_model = model.filter_new() + self.filter_model.set_visible_func(self._filter) + self.set_model(self.filter_model) + self.append_column(Gtk.TreeViewColumn(label, Gtk.CellRendererText(), text=COL_DEP_PARENT)) + + def _filter(self, model, iter, data): + package = model[iter][COL_DEP_PACKAGE] + return package == self.current + + def set_current_package(self, package): + self.current = package + self.filter_model.refilter() + + +class DepExplorer(Gtk.Window): + def __init__(self): + Gtk.Window.__init__(self) + self.set_title("Task Dependency Explorer") + self.set_default_size(500, 500) + self.connect("delete-event", Gtk.main_quit) + + # Create the data models + self.pkg_model = Gtk.ListStore(GObject.TYPE_STRING) + self.pkg_model.set_sort_column_id(COL_PKG_NAME, Gtk.SortType.ASCENDING) + self.depends_model = Gtk.ListStore(GObject.TYPE_INT, GObject.TYPE_STRING, GObject.TYPE_STRING) + self.depends_model.set_sort_column_id(COL_DEP_PACKAGE, Gtk.SortType.ASCENDING) + + pane = Gtk.HPaned() + pane.set_position(250) + self.add(pane) + + # The master list of packages + scrolled = Gtk.ScrolledWindow() + scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC) + scrolled.set_shadow_type(Gtk.ShadowType.IN) + + self.pkg_treeview = Gtk.TreeView(self.pkg_model) + self.pkg_treeview.get_selection().connect("changed", self.on_cursor_changed) + column = Gtk.TreeViewColumn("Package", Gtk.CellRendererText(), text=COL_PKG_NAME) + self.pkg_treeview.append_column(column) + pane.add1(scrolled) + scrolled.add(self.pkg_treeview) + + box = Gtk.VBox(homogeneous=True, spacing=4) + + # Task Depends + scrolled = Gtk.ScrolledWindow() + scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC) + scrolled.set_shadow_type(Gtk.ShadowType.IN) + self.dep_treeview = PackageDepView(self.depends_model, TYPE_DEP, "Dependencies") + self.dep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE) + scrolled.add(self.dep_treeview) + box.add(scrolled) + pane.add2(box) + + # Reverse Task Depends + scrolled = Gtk.ScrolledWindow() + scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC) + scrolled.set_shadow_type(Gtk.ShadowType.IN) + self.revdep_treeview = PackageReverseDepView(self.depends_model, "Dependent Tasks") + self.revdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PARENT) + scrolled.add(self.revdep_treeview) + box.add(scrolled) + pane.add2(box) + + self.show_all() + + def on_package_activated(self, treeview, path, column, data_col): + model = treeview.get_model() + package = model.get_value(model.get_iter(path), data_col) + + pkg_path = [] + def finder(model, path, iter, needle): + package = model.get_value(iter, COL_PKG_NAME) + if package == needle: + pkg_path.append(path) + return True + else: + return False + self.pkg_model.foreach(finder, package) + if pkg_path: + self.pkg_treeview.get_selection().select_path(pkg_path[0]) + self.pkg_treeview.scroll_to_cell(pkg_path[0]) + + def on_cursor_changed(self, selection): + (model, it) = selection.get_selected() + if it is None: + current_package = None + else: + current_package = model.get_value(it, COL_PKG_NAME) + self.dep_treeview.set_current_package(current_package) + self.revdep_treeview.set_current_package(current_package) + + + def parse(self, depgraph): + for task in depgraph["tdepends"]: + self.pkg_model.insert(0, (task,)) + for depend in depgraph["tdepends"][task]: + self.depends_model.insert (0, (TYPE_DEP, task, depend)) + + +class gtkthread(threading.Thread): + quit = threading.Event() + def __init__(self, shutdown): + threading.Thread.__init__(self) + self.setDaemon(True) + self.shutdown = shutdown + if not Gtk.init_check()[0]: + sys.stderr.write("Gtk+ init failed. Make sure DISPLAY variable is set.\n") + gtkthread.quit.set() + + def run(self): + GObject.threads_init() + Gdk.threads_init() + Gtk.main() + gtkthread.quit.set() + + +def main(server, eventHandler, params): + shutdown = 0 + + gtkgui = gtkthread(shutdown) + gtkgui.start() + + try: + params.updateFromServer(server) + cmdline = params.parseActions() + if not cmdline: + print("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.") + return 1 + if 'msg' in cmdline and cmdline['msg']: + print(cmdline['msg']) + return 1 + cmdline = cmdline['action'] + if not cmdline or cmdline[0] != "generateDotGraph": + print("This UI requires the -g option") + return 1 + ret, error = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]]) + if error: + print("Error running command '%s': %s" % (cmdline, error)) + return 1 + elif ret != True: + print("Error running command '%s': returned %s" % (cmdline, ret)) + return 1 + except client.Fault as x: + print("XMLRPC Fault getting commandline:\n %s" % x) + return + + if gtkthread.quit.isSet(): + return + + Gdk.threads_enter() + dep = DepExplorer() + bardialog = Gtk.Dialog(parent=dep, + flags=Gtk.DialogFlags.MODAL|Gtk.DialogFlags.DESTROY_WITH_PARENT) + bardialog.set_default_size(400, 50) + box = bardialog.get_content_area() + pbar = Gtk.ProgressBar() + box.pack_start(pbar, True, True, 0) + bardialog.show_all() + bardialog.connect("delete-event", Gtk.main_quit) + Gdk.threads_leave() + + progress_total = 0 + while True: + try: + event = eventHandler.waitEvent(0.25) + if gtkthread.quit.isSet(): + _, error = server.runCommand(["stateForceShutdown"]) + if error: + print('Unable to cleanly stop: %s' % error) + break + + if event is None: + continue + + if isinstance(event, bb.event.CacheLoadStarted): + progress_total = event.total + Gdk.threads_enter() + bardialog.set_title("Loading Cache") + pbar.set_fraction(0.0) + Gdk.threads_leave() + + if isinstance(event, bb.event.CacheLoadProgress): + x = event.current + Gdk.threads_enter() + pbar.set_fraction(x * 1.0 / progress_total) + Gdk.threads_leave() + continue + + if isinstance(event, bb.event.CacheLoadCompleted): + continue + + if isinstance(event, bb.event.ParseStarted): + progress_total = event.total + if progress_total == 0: + continue + Gdk.threads_enter() + pbar.set_fraction(0.0) + bardialog.set_title("Processing recipes") + Gdk.threads_leave() + + if isinstance(event, bb.event.ParseProgress): + x = event.current + Gdk.threads_enter() + pbar.set_fraction(x * 1.0 / progress_total) + Gdk.threads_leave() + continue + + if isinstance(event, bb.event.ParseCompleted): + Gdk.threads_enter() + bardialog.set_title("Generating dependency tree") + Gdk.threads_leave() + continue + + if isinstance(event, bb.event.DepTreeGenerated): + Gdk.threads_enter() + bardialog.hide() + dep.parse(event._depgraph) + Gdk.threads_leave() + + if isinstance(event, bb.command.CommandCompleted): + continue + + if isinstance(event, bb.event.NoProvider): + if event._runtime: + r = "R" + else: + r = "" + + extra = '' + if not event._reasons: + if event._close_matches: + extra = ". Close matches:\n %s" % '\n '.join(event._close_matches) + + if event._dependees: + print("Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)%s" % (r, event._item, ", ".join(event._dependees), r, extra)) + else: + print("Nothing %sPROVIDES '%s'%s" % (r, event._item, extra)) + if event._reasons: + for reason in event._reasons: + print(reason) + + _, error = server.runCommand(["stateShutdown"]) + if error: + print('Unable to cleanly shutdown: %s' % error) + break + + if isinstance(event, bb.command.CommandFailed): + print("Command execution failed: %s" % event.error) + return event.exitcode + + if isinstance(event, bb.command.CommandExit): + return event.exitcode + + if isinstance(event, bb.cooker.CookerExit): + break + + continue + except EnvironmentError as ioerror: + # ignore interrupted io + if ioerror.args[0] == 4: + pass + except KeyboardInterrupt: + if shutdown == 2: + print("\nThird Keyboard Interrupt, exit.\n") + break + if shutdown == 1: + print("\nSecond Keyboard Interrupt, stopping...\n") + _, error = server.runCommand(["stateForceShutdown"]) + if error: + print('Unable to cleanly stop: %s' % error) + if shutdown == 0: + print("\nKeyboard Interrupt, closing down...\n") + _, error = server.runCommand(["stateShutdown"]) + if error: + print('Unable to cleanly shutdown: %s' % error) + shutdown = shutdown + 1 + pass diff --git a/import-layers/yocto-poky/bitbake/lib/bb/ui/toasterui.py b/import-layers/yocto-poky/bitbake/lib/bb/ui/toasterui.py index 9808f6bc8..71f04fa5c 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/ui/toasterui.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/ui/toasterui.py @@ -168,6 +168,9 @@ def main(server, eventHandler, params): logger.warning("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.") build_history_enabled = False + if not "buildstats" in inheritlist.split(" "): + logger.warning("buildstats is not enabled. Please enable INHERIT += \"buildstats\" to generate build statistics.") + if not params.observe_only: params.updateFromServer(server) params.updateToServer(server, os.environ.copy()) @@ -233,6 +236,9 @@ def main(server, eventHandler, params): # pylint: disable=protected-access # the code will look into the protected variables of the event; no easy way around this + if isinstance(event, bb.event.HeartbeatEvent): + continue + if isinstance(event, bb.event.ParseStarted): if not (build_log and build_log_file_path): build_log, build_log_file_path = _open_build_log(log_dir) @@ -432,9 +438,7 @@ def main(server, eventHandler, params): elif event.type == "SetBRBE": buildinfohelper.brbe = buildinfohelper._get_data_from_event(event) elif event.type == "TaskArtifacts": - # not implemented yet - # see https://bugzilla.yoctoproject.org/show_bug.cgi?id=10283 for details - pass + buildinfohelper.scan_task_artifacts(event) elif event.type == "OSErrorException": logger.error(event) else: diff --git a/import-layers/yocto-poky/bitbake/lib/bb/utils.py b/import-layers/yocto-poky/bitbake/lib/bb/utils.py index 16fc9db25..6a44db57d 100644 --- a/import-layers/yocto-poky/bitbake/lib/bb/utils.py +++ b/import-layers/yocto-poky/bitbake/lib/bb/utils.py @@ -523,12 +523,8 @@ def md5_file(filename): """ Return the hex string representation of the MD5 checksum of filename. """ - try: - import hashlib - m = hashlib.md5() - except ImportError: - import md5 - m = md5.new() + import hashlib + m = hashlib.md5() with open(filename, "rb") as f: for line in f: @@ -538,14 +534,9 @@ def md5_file(filename): def sha256_file(filename): """ Return the hex string representation of the 256-bit SHA checksum of - filename. On Python 2.4 this will return None, so callers will need to - handle that by either skipping SHA checks, or running a standalone sha256sum - binary. + filename. """ - try: - import hashlib - except ImportError: - return None + import hashlib s = hashlib.sha256() with open(filename, "rb") as f: @@ -557,10 +548,7 @@ def sha1_file(filename): """ Return the hex string representation of the SHA1 checksum of the filename """ - try: - import hashlib - except ImportError: - return None + import hashlib s = hashlib.sha1() with open(filename, "rb") as f: @@ -665,7 +653,7 @@ def build_environment(d): for var in bb.data.keys(d): export = d.getVarFlag(var, "export", False) if export: - os.environ[var] = d.getVar(var, True) or "" + os.environ[var] = d.getVar(var) or "" def _check_unsafe_delete_path(path): """ @@ -692,7 +680,7 @@ def remove(path, recurse=False): if _check_unsafe_delete_path(path): raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path) # shutil.rmtree(name) would be ideal but its too slow - subprocess.call(['rm', '-rf'] + glob.glob(path)) + subprocess.check_call(['rm', '-rf'] + glob.glob(path)) return for name in glob.glob(path): try: @@ -911,11 +899,20 @@ def copyfile(src, dest, newmtime = None, sstat = None): newmtime = sstat[stat.ST_MTIME] return newmtime -def which(path, item, direction = 0, history = False): +def which(path, item, direction = 0, history = False, executable=False): """ - Locate a file in a PATH + Locate `item` in the list of paths `path` (colon separated string like $PATH). + If `direction` is non-zero then the list is reversed. + If `history` is True then the list of candidates also returned as result,history. + If `executable` is True then the candidate has to be an executable file, + otherwise the candidate simply has to exist. """ + if executable: + is_candidate = lambda p: os.path.isfile(p) and os.access(p, os.X_OK) + else: + is_candidate = lambda p: os.path.exists(p) + hist = [] paths = (path or "").split(':') if direction != 0: @@ -924,7 +921,7 @@ def which(path, item, direction = 0, history = False): for p in paths: next = os.path.join(p, item) hist.append(next) - if os.path.exists(next): + if is_candidate(next): if not os.path.isabs(next): next = os.path.abspath(next) if history: @@ -953,7 +950,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d): Arguments: variable -- the variable name. This will be fetched and expanded (using - d.getVar(variable, True)) and then split into a set(). + d.getVar(variable)) and then split into a set(). checkvalues -- if this is a string it is split on whitespace into a set(), otherwise coerced directly into a set(). @@ -966,7 +963,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d): d -- the data store. """ - val = d.getVar(variable, True) + val = d.getVar(variable) if not val: return falsevalue val = set(val.split()) @@ -979,7 +976,7 @@ def contains(variable, checkvalues, truevalue, falsevalue, d): return falsevalue def contains_any(variable, checkvalues, truevalue, falsevalue, d): - val = d.getVar(variable, True) + val = d.getVar(variable) if not val: return falsevalue val = set(val.split()) @@ -991,6 +988,30 @@ def contains_any(variable, checkvalues, truevalue, falsevalue, d): return truevalue return falsevalue +def filter(variable, checkvalues, d): + """Return all words in the variable that are present in the checkvalues. + + Arguments: + + variable -- the variable name. This will be fetched and expanded (using + d.getVar(variable)) and then split into a set(). + + checkvalues -- if this is a string it is split on whitespace into a set(), + otherwise coerced directly into a set(). + + d -- the data store. + """ + + val = d.getVar(variable) + if not val: + return '' + val = set(val.split()) + if isinstance(checkvalues, str): + checkvalues = set(checkvalues.split()) + else: + checkvalues = set(checkvalues) + return ' '.join(sorted(checkvalues & val)) + def cpu_count(): return multiprocessing.cpu_count() @@ -1378,10 +1399,10 @@ def edit_bblayers_conf(bblayers_conf, add, remove): def get_file_layer(filename, d): """Determine the collection (as defined by a layer's layer.conf file) containing the specified file""" - collections = (d.getVar('BBFILE_COLLECTIONS', True) or '').split() + collections = (d.getVar('BBFILE_COLLECTIONS') or '').split() collection_res = {} for collection in collections: - collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection, True) or '' + collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or '' def path_to_layer(path): # Use longest path so we handle nested layers @@ -1394,7 +1415,7 @@ def get_file_layer(filename, d): return match result = None - bbfiles = (d.getVar('BBFILES', True) or '').split() + bbfiles = (d.getVar('BBFILES') or '').split() bbfilesmatch = False for bbfilesentry in bbfiles: if fnmatch.fnmatch(filename, bbfilesentry): @@ -1471,7 +1492,7 @@ def export_proxies(d): if v in os.environ.keys(): exported = True else: - v_proxy = d.getVar(v, True) + v_proxy = d.getVar(v) if v_proxy is not None: os.environ[v] = v_proxy exported = True @@ -1503,3 +1524,14 @@ def load_plugins(logger, plugins, pluginpath): plugins.append(obj or plugin) else: plugins.append(plugin) + + +class LogCatcher(logging.Handler): + """Logging handler for collecting logged messages so you can check them later""" + def __init__(self): + self.messages = [] + logging.Handler.__init__(self, logging.WARNING) + def emit(self, record): + self.messages.append(bb.build.logformatter.format(record)) + def contains(self, message): + return (message in self.messages) diff --git a/import-layers/yocto-poky/bitbake/lib/bblayers/action.py b/import-layers/yocto-poky/bitbake/lib/bblayers/action.py index 739ae27b9..cf9470427 100644 --- a/import-layers/yocto-poky/bitbake/lib/bblayers/action.py +++ b/import-layers/yocto-poky/bitbake/lib/bblayers/action.py @@ -180,7 +180,7 @@ build results (as the layer priority order has effectively changed). if first_regex: # Find the BBFILES entries that match (which will have come from this conf/layer.conf file) - bbfiles = str(self.tinfoil.config_data.getVar('BBFILES', True)).split() + bbfiles = str(self.tinfoil.config_data.getVar('BBFILES')).split() bbfiles_layer = [] for item in bbfiles: if first_regex.match(item): diff --git a/import-layers/yocto-poky/bitbake/lib/bblayers/common.py b/import-layers/yocto-poky/bitbake/lib/bblayers/common.py index b10fb4cea..98515ced4 100644 --- a/import-layers/yocto-poky/bitbake/lib/bblayers/common.py +++ b/import-layers/yocto-poky/bitbake/lib/bblayers/common.py @@ -12,7 +12,7 @@ class LayerPlugin(): def tinfoil_init(self, tinfoil): self.tinfoil = tinfoil - self.bblayers = (self.tinfoil.config_data.getVar('BBLAYERS', True) or "").split() + self.bblayers = (self.tinfoil.config_data.getVar('BBLAYERS') or "").split() layerconfs = self.tinfoil.config_data.varhistory.get_variable_items_files('BBFILE_COLLECTIONS', self.tinfoil.config_data) self.bbfile_collections = {layer: os.path.dirname(os.path.dirname(path)) for layer, path in layerconfs.items()} diff --git a/import-layers/yocto-poky/bitbake/lib/bblayers/layerindex.py b/import-layers/yocto-poky/bitbake/lib/bblayers/layerindex.py index 10ad718eb..506c1109d 100644 --- a/import-layers/yocto-poky/bitbake/lib/bblayers/layerindex.py +++ b/import-layers/yocto-poky/bitbake/lib/bblayers/layerindex.py @@ -56,7 +56,7 @@ class LayerIndexPlugin(ActionPlugin): r = conn.getresponse() if r.status != 200: raise Exception("Failed to read " + path + ": %d %s" % (r.status, r.reason)) - return json.loads(r.read()) + return json.loads(r.read().decode()) def get_layer_deps(self, layername, layeritems, layerbranches, layerdependencies, branchnum, selfname=False): def layeritems_info_id(items_name, layeritems): @@ -151,7 +151,7 @@ class LayerIndexPlugin(ActionPlugin): def do_layerindex_fetch(self, args): """Fetches a layer from a layer index along with its dependent layers, and adds them to conf/bblayers.conf. """ - apiurl = self.tinfoil.config_data.getVar('BBLAYERS_LAYERINDEX_URL', True) + apiurl = self.tinfoil.config_data.getVar('BBLAYERS_LAYERINDEX_URL') if not apiurl: logger.error("Cannot get BBLAYERS_LAYERINDEX_URL") return 1 @@ -173,8 +173,8 @@ class LayerIndexPlugin(ActionPlugin): return 1 ignore_layers = [] - for collection in self.tinfoil.config_data.getVar('BBFILE_COLLECTIONS', True).split(): - lname = self.tinfoil.config_data.getVar('BBLAYERS_LAYERINDEX_NAME_%s' % collection, True) + for collection in self.tinfoil.config_data.getVar('BBFILE_COLLECTIONS').split(): + lname = self.tinfoil.config_data.getVar('BBLAYERS_LAYERINDEX_NAME_%s' % collection) if lname: ignore_layers.append(lname) @@ -225,7 +225,7 @@ class LayerIndexPlugin(ActionPlugin): printedlayers.append(dependency) if repourls: - fetchdir = self.tinfoil.config_data.getVar('BBLAYERS_FETCH_DIR', True) + fetchdir = self.tinfoil.config_data.getVar('BBLAYERS_FETCH_DIR') if not fetchdir: logger.error("Cannot get BBLAYERS_FETCH_DIR") return 1 diff --git a/import-layers/yocto-poky/bitbake/lib/bblayers/query.py b/import-layers/yocto-poky/bitbake/lib/bblayers/query.py index ee1e7c8a1..bef3af31a 100644 --- a/import-layers/yocto-poky/bitbake/lib/bblayers/query.py +++ b/import-layers/yocto-poky/bitbake/lib/bblayers/query.py @@ -5,8 +5,6 @@ import sys import os import re -import bb.cache -import bb.providers import bb.utils from bblayers.common import LayerPlugin @@ -62,7 +60,7 @@ are overlayed will also be listed, with a " (skipped)" suffix. # factor - however, each layer.conf is free to either prepend or append to # BBPATH (or indeed do crazy stuff with it). Thus the order in BBPATH might # not be exactly the order present in bblayers.conf either. - bbpath = str(self.tinfoil.config_data.getVar('BBPATH', True)) + bbpath = str(self.tinfoil.config_data.getVar('BBPATH')) overlayed_class_found = False for (classfile, classdirs) in classes.items(): if len(classdirs) > 1: @@ -114,7 +112,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix. def list_recipes(self, title, pnspec, show_overlayed_only, show_same_ver_only, show_filenames, show_multi_provider_only, inherits): if inherits: - bbpath = str(self.tinfoil.config_data.getVar('BBPATH', True)) + bbpath = str(self.tinfoil.config_data.getVar('BBPATH')) for classname in inherits: classfile = 'classes/%s.bbclass' % classname if not bb.utils.which(bbpath, classfile, history=False): @@ -122,15 +120,13 @@ skipped recipes will also be listed, with a " (skipped)" suffix. sys.exit(1) pkg_pn = self.tinfoil.cooker.recipecaches[''].pkg_pn - (latest_versions, preferred_versions) = bb.providers.findProviders(self.tinfoil.config_data, self.tinfoil.cooker.recipecaches[''], pkg_pn) - allproviders = bb.providers.allProviders(self.tinfoil.cooker.recipecaches['']) + (latest_versions, preferred_versions) = self.tinfoil.find_providers() + allproviders = self.tinfoil.get_all_providers() # Ensure we list skipped recipes # We are largely guessing about PN, PV and the preferred version here, # but we have no choice since skipped recipes are not fully parsed skiplist = list(self.tinfoil.cooker.skiplist.keys()) - skiplist.sort( key=lambda fileitem: self.tinfoil.cooker.collection.calc_bbfile_priority(fileitem) ) - skiplist.reverse() for fn in skiplist: recipe_parts = os.path.splitext(os.path.basename(fn))[0].split('_') p = recipe_parts[0] @@ -158,7 +154,7 @@ skipped recipes will also be listed, with a " (skipped)" suffix. logger.plain("%s:", pn) logger.plain(" %s %s%s", layer.ljust(20), ver, skipped) - global_inherit = (self.tinfoil.config_data.getVar('INHERIT', True) or "").split() + global_inherit = (self.tinfoil.config_data.getVar('INHERIT') or "").split() cls_re = re.compile('classes/') preffiles = [] @@ -246,17 +242,22 @@ skipped recipes will also be listed, with a " (skipped)" suffix. Lists recipes with the bbappends that apply to them as subitems. """ - - logger.plain('=== Appended recipes ===') + if args.pnspec: + logger.plain('=== Matched appended recipes ===') + else: + logger.plain('=== Appended recipes ===') pnlist = list(self.tinfoil.cooker_data.pkg_pn.keys()) pnlist.sort() appends = False for pn in pnlist: + if args.pnspec and pn != args.pnspec: + continue + if self.show_appends_for_pn(pn): appends = True - if self.show_appends_for_skipped(): + if not args.pnspec and self.show_appends_for_skipped(): appends = True if not appends: @@ -265,10 +266,7 @@ Lists recipes with the bbappends that apply to them as subitems. def show_appends_for_pn(self, pn): filenames = self.tinfoil.cooker_data.pkg_pn[pn] - best = bb.providers.findBestProvider(pn, - self.tinfoil.config_data, - self.tinfoil.cooker_data, - self.tinfoil.cooker_data.pkg_pn) + best = self.tinfoil.find_best_provider(pn) best_filename = os.path.basename(best[3]) return self.show_appends_output(filenames, best_filename) @@ -319,12 +317,12 @@ NOTE: .bbappend files can impact the dependencies. ignore_layers = (args.ignore or '').split(',') pkg_fn = self.tinfoil.cooker_data.pkg_fn - bbpath = str(self.tinfoil.config_data.getVar('BBPATH', True)) + bbpath = str(self.tinfoil.config_data.getVar('BBPATH')) self.require_re = re.compile(r"require\s+(.+)") self.include_re = re.compile(r"include\s+(.+)") self.inherit_re = re.compile(r"inherit\s+(.+)") - global_inherit = (self.tinfoil.config_data.getVar('INHERIT', True) or "").split() + global_inherit = (self.tinfoil.config_data.getVar('INHERIT') or "").split() # The bb's DEPENDS and RDEPENDS for f in pkg_fn: @@ -336,10 +334,7 @@ NOTE: .bbappend files can impact the dependencies. deps = self.tinfoil.cooker_data.deps[f] for pn in deps: if pn in self.tinfoil.cooker_data.pkg_pn: - best = bb.providers.findBestProvider(pn, - self.tinfoil.config_data, - self.tinfoil.cooker_data, - self.tinfoil.cooker_data.pkg_pn) + best = self.tinfoil.find_best_provider(pn) self.check_cross_depends("DEPENDS", layername, f, best[3], args.filenames, ignore_layers) # The RDPENDS @@ -352,14 +347,11 @@ NOTE: .bbappend files can impact the dependencies. sorted_rdeps[k2] = 1 all_rdeps = sorted_rdeps.keys() for rdep in all_rdeps: - all_p = bb.providers.getRuntimeProviders(self.tinfoil.cooker_data, rdep) + all_p, best = self.tinfoil.get_runtime_providers(rdep) if all_p: if f in all_p: # The recipe provides this one itself, ignore continue - best = bb.providers.filterProvidersRunTime(all_p, rdep, - self.tinfoil.config_data, - self.tinfoil.cooker_data)[0][0] self.check_cross_depends("RDEPENDS", layername, f, best, args.filenames, ignore_layers) # The RRECOMMENDS @@ -372,14 +364,11 @@ NOTE: .bbappend files can impact the dependencies. sorted_rrecs[k2] = 1 all_rrecs = sorted_rrecs.keys() for rrec in all_rrecs: - all_p = bb.providers.getRuntimeProviders(self.tinfoil.cooker_data, rrec) + all_p, best = self.tinfoil.get_runtime_providers(rrec) if all_p: if f in all_p: # The recipe provides this one itself, ignore continue - best = bb.providers.filterProvidersRunTime(all_p, rrec, - self.tinfoil.config_data, - self.tinfoil.cooker_data)[0][0] self.check_cross_depends("RRECOMMENDS", layername, f, best, args.filenames, ignore_layers) # The inherit class @@ -493,7 +482,8 @@ NOTE: .bbappend files can impact the dependencies. parser_show_recipes.add_argument('-i', '--inherits', help='only list recipes that inherit the named class', metavar='CLASS', default='') parser_show_recipes.add_argument('pnspec', nargs='?', help='optional recipe name specification (wildcards allowed, enclose in quotes to avoid shell expansion)') - self.add_command(sp, 'show-appends', self.do_show_appends) + parser_show_appends = self.add_command(sp, 'show-appends', self.do_show_appends) + parser_show_appends.add_argument('pnspec', nargs='?', help='optional recipe name specification (wildcards allowed, enclose in quotes to avoid shell expansion)') parser_show_cross_depends = self.add_command(sp, 'show-cross-depends', self.do_show_cross_depends) parser_show_cross_depends.add_argument('-f', '--filenames', help='show full file path', action='store_true') diff --git a/import-layers/yocto-poky/bitbake/lib/prserv/serv.py b/import-layers/yocto-poky/bitbake/lib/prserv/serv.py index cafcc820c..a7efa58bc 100644 --- a/import-layers/yocto-poky/bitbake/lib/prserv/serv.py +++ b/import-layers/yocto-poky/bitbake/lib/prserv/serv.py @@ -5,12 +5,7 @@ import threading import queue import socket import io - -try: - import sqlite3 -except ImportError: - from pysqlite2 import dbapi2 as sqlite3 - +import sqlite3 import bb.server.xmlrpc import prserv import prserv.db @@ -242,12 +237,25 @@ class PRServer(SimpleXMLRPCServer): sys.stdout.flush() sys.stderr.flush() + + # We could be called from a python thread with io.StringIO as + # stdout/stderr or it could be 'real' unix fd forking where we need + # to physically close the fds to prevent the program launching us from + # potentially hanging on a pipe. Handle both cases. si = open('/dev/null', 'r') + try: + os.dup2(si.fileno(),sys.stdin.fileno()) + except (AttributeError, io.UnsupportedOperation): + sys.stdin = si so = open(self.logfile, 'a+') - se = so - os.dup2(si.fileno(),sys.stdin.fileno()) - os.dup2(so.fileno(),sys.stdout.fileno()) - os.dup2(se.fileno(),sys.stderr.fileno()) + try: + os.dup2(so.fileno(),sys.stdout.fileno()) + except (AttributeError, io.UnsupportedOperation): + sys.stdout = so + try: + os.dup2(so.fileno(),sys.stderr.fileno()) + except (AttributeError, io.UnsupportedOperation): + sys.stderr = so # Clear out all log handlers prior to the fork() to avoid calling # event handlers not part of the PRserver @@ -420,7 +428,7 @@ class PRServiceConfigError(Exception): def auto_start(d): global singleton - host_params = list(filter(None, (d.getVar('PRSERV_HOST', True) or '').split(':'))) + host_params = list(filter(None, (d.getVar('PRSERV_HOST') or '').split(':'))) if not host_params: return None @@ -431,7 +439,7 @@ def auto_start(d): if is_local_special(host_params[0], int(host_params[1])) and not singleton: import bb.utils - cachedir = (d.getVar("PERSISTENT_DIR", True) or d.getVar("CACHE", True)) + cachedir = (d.getVar("PERSISTENT_DIR") or d.getVar("CACHE")) if not cachedir: logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable") raise PRServiceConfigError diff --git a/import-layers/yocto-poky/bitbake/lib/simplediff/LICENSE b/import-layers/yocto-poky/bitbake/lib/simplediff/LICENSE new file mode 100644 index 000000000..8242dde97 --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/simplediff/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2008 - 2013 Paul Butler and contributors + +This sofware may be used under a zlib/libpng-style license: + +This software is provided 'as-is', without any express or implied warranty. In +no event will the authors be held liable for any damages arising from the use +of this software. + +Permission is granted to anyone to use this software for any purpose, including +commercial applications, and to alter it and redistribute it freely, subject to +the following restrictions: + +1. The origin of this software must not be misrepresented; you must not claim +that you wrote the original software. If you use this software in a product, an +acknowledgment in the product documentation would be appreciated but is not +required. + +2. Altered source versions must be plainly marked as such, and must not be +misrepresented as being the original software. + +3. This notice may not be removed or altered from any source distribution. + diff --git a/import-layers/yocto-poky/bitbake/lib/simplediff/__init__.py b/import-layers/yocto-poky/bitbake/lib/simplediff/__init__.py new file mode 100644 index 000000000..57ee3c5c4 --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/simplediff/__init__.py @@ -0,0 +1,198 @@ +''' +Simple Diff for Python version 1.0 + +Annotate two versions of a list with the values that have been +changed between the versions, similar to unix's `diff` but with +a dead-simple Python interface. + +(C) Paul Butler 2008-2012 +May be used and distributed under the zlib/libpng license + +''' + +__all__ = ['diff', 'string_diff', 'html_diff'] +__version__ = '1.0' + + +def diff(old, new): + ''' + Find the differences between two lists. Returns a list of pairs, where the + first value is in ['+','-','='] and represents an insertion, deletion, or + no change for that list. The second value of the pair is the list + of elements. + + Params: + old the old list of immutable, comparable values (ie. a list + of strings) + new the new list of immutable, comparable values + + Returns: + A list of pairs, with the first part of the pair being one of three + strings ('-', '+', '=') and the second part being a list of values from + the original old and/or new lists. The first part of the pair + corresponds to whether the list of values is a deletion, insertion, or + unchanged, respectively. + + Examples: + >>> diff([1,2,3,4],[1,3,4]) + [('=', [1]), ('-', [2]), ('=', [3, 4])] + + >>> diff([1,2,3,4],[2,3,4,1]) + [('-', [1]), ('=', [2, 3, 4]), ('+', [1])] + + >>> diff('The quick brown fox jumps over the lazy dog'.split(), + ... 'The slow blue cheese drips over the lazy carrot'.split()) + ... # doctest: +NORMALIZE_WHITESPACE + [('=', ['The']), + ('-', ['quick', 'brown', 'fox', 'jumps']), + ('+', ['slow', 'blue', 'cheese', 'drips']), + ('=', ['over', 'the', 'lazy']), + ('-', ['dog']), + ('+', ['carrot'])] + + ''' + + # Create a map from old values to their indices + old_index_map = dict() + for i, val in enumerate(old): + old_index_map.setdefault(val,list()).append(i) + + # Find the largest substring common to old and new. + # We use a dynamic programming approach here. + # + # We iterate over each value in the `new` list, calling the + # index `inew`. At each iteration, `overlap[i]` is the + # length of the largest suffix of `old[:i]` equal to a suffix + # of `new[:inew]` (or unset when `old[i]` != `new[inew]`). + # + # At each stage of iteration, the new `overlap` (called + # `_overlap` until the original `overlap` is no longer needed) + # is built from the old one. + # + # If the length of overlap exceeds the largest substring + # seen so far (`sub_length`), we update the largest substring + # to the overlapping strings. + + overlap = dict() + # `sub_start_old` is the index of the beginning of the largest overlapping + # substring in the old list. `sub_start_new` is the index of the beginning + # of the same substring in the new list. `sub_length` is the length that + # overlaps in both. + # These track the largest overlapping substring seen so far, so naturally + # we start with a 0-length substring. + sub_start_old = 0 + sub_start_new = 0 + sub_length = 0 + + for inew, val in enumerate(new): + _overlap = dict() + for iold in old_index_map.get(val,list()): + # now we are considering all values of iold such that + # `old[iold] == new[inew]`. + _overlap[iold] = (iold and overlap.get(iold - 1, 0)) + 1 + if(_overlap[iold] > sub_length): + # this is the largest substring seen so far, so store its + # indices + sub_length = _overlap[iold] + sub_start_old = iold - sub_length + 1 + sub_start_new = inew - sub_length + 1 + overlap = _overlap + + if sub_length == 0: + # If no common substring is found, we return an insert and delete... + return (old and [('-', old)] or []) + (new and [('+', new)] or []) + else: + # ...otherwise, the common substring is unchanged and we recursively + # diff the text before and after that substring + return diff(old[ : sub_start_old], new[ : sub_start_new]) + \ + [('=', new[sub_start_new : sub_start_new + sub_length])] + \ + diff(old[sub_start_old + sub_length : ], + new[sub_start_new + sub_length : ]) + + +def string_diff(old, new): + ''' + Returns the difference between the old and new strings when split on + whitespace. Considers punctuation a part of the word + + This function is intended as an example; you'll probably want + a more sophisticated wrapper in practice. + + Params: + old the old string + new the new string + + Returns: + the output of `diff` on the two strings after splitting them + on whitespace (a list of change instructions; see the docstring + of `diff`) + + Examples: + >>> string_diff('The quick brown fox', 'The fast blue fox') + ... # doctest: +NORMALIZE_WHITESPACE + [('=', ['The']), + ('-', ['quick', 'brown']), + ('+', ['fast', 'blue']), + ('=', ['fox'])] + + ''' + return diff(old.split(), new.split()) + + +def html_diff(old, new): + ''' + Returns the difference between two strings (as in stringDiff) in + HTML format. HTML code in the strings is NOT escaped, so you + will get weird results if the strings contain HTML. + + This function is intended as an example; you'll probably want + a more sophisticated wrapper in practice. + + Params: + old the old string + new the new string + + Returns: + the output of the diff expressed with HTML and + tags. + + Examples: + >>> html_diff('The quick brown fox', 'The fast blue fox') + 'The quick brown fast blue fox' + ''' + con = {'=': (lambda x: x), + '+': (lambda x: "" + x + ""), + '-': (lambda x: "" + x + "")} + return " ".join([(con[a])(" ".join(b)) for a, b in string_diff(old, new)]) + + +def check_diff(old, new): + ''' + This tests that diffs returned by `diff` are valid. You probably won't + want to use this function, but it's provided for documentation and + testing. + + A diff should satisfy the property that the old input is equal to the + elements of the result annotated with '-' or '=' concatenated together. + Likewise, the new input is equal to the elements of the result annotated + with '+' or '=' concatenated together. This function compares `old`, + `new`, and the results of `diff(old, new)` to ensure this is true. + + Tests: + >>> check_diff('ABCBA', 'CBABA') + >>> check_diff('Foobarbaz', 'Foobarbaz') + >>> check_diff('Foobarbaz', 'Boobazbam') + >>> check_diff('The quick brown fox', 'Some quick brown car') + >>> check_diff('A thick red book', 'A quick blue book') + >>> check_diff('dafhjkdashfkhasfjsdafdasfsda', 'asdfaskjfhksahkfjsdha') + >>> check_diff('88288822828828288282828', '88288882882828282882828') + >>> check_diff('1234567890', '24689') + ''' + old = list(old) + new = list(new) + result = diff(old, new) + _old = [val for (a, vals) in result if (a in '=-') for val in vals] + assert old == _old, 'Expected %s, got %s' % (old, _old) + _new = [val for (a, vals) in result if (a in '=+') for val in vals] + assert new == _new, 'Expected %s, got %s' % (new, _new) + diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py b/import-layers/yocto-poky/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py index e5f7c988c..12071029a 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py +++ b/import-layers/yocto-poky/bitbake/lib/toaster/bldcontrol/localhostbecontroller.py @@ -27,7 +27,7 @@ import shutil from django.db import transaction from django.db.models import Q from bldcontrol.models import BuildEnvironment, BRLayer, BRVariable, BRTarget, BRBitbake -from orm.models import CustomImageRecipe, Layer, Layer_Version, ProjectLayer +from orm.models import CustomImageRecipe, Layer, Layer_Version, ProjectLayer, ToasterSetting import subprocess from toastermain import settings @@ -200,72 +200,93 @@ class LocalhostBEController(BuildEnvironmentController): logger.debug("localhostbecontroller: current layer list %s " % pformat(layerlist)) + if self.pokydirname is None and os.path.exists(os.path.join(self.be.sourcedir, "oe-init-build-env")): + logger.debug("localhostbecontroller: selected poky dir name %s" % self.be.sourcedir) + self.pokydirname = self.be.sourcedir + # 5. create custom layer and add custom recipes to it - layerpath = os.path.join(self.be.builddir, - CustomImageRecipe.LAYER_NAME) for target in targets: try: - customrecipe = CustomImageRecipe.objects.get(name=target.target, - project=bitbake.req.project) + customrecipe = CustomImageRecipe.objects.get( + name=target.target, + project=bitbake.req.project) + + custom_layer_path = self.setup_custom_image_recipe( + customrecipe, layers) + + if os.path.isdir(custom_layer_path): + layerlist.append(custom_layer_path) + except CustomImageRecipe.DoesNotExist: - continue # not a custom recipe, skip - - # create directory structure - for name in ("conf", "recipes"): - path = os.path.join(layerpath, name) - if not os.path.isdir(path): - os.makedirs(path) - - # create layer.oonf - config = os.path.join(layerpath, "conf", "layer.conf") - if not os.path.isfile(config): - with open(config, "w") as conf: - conf.write('BBPATH .= ":${LAYERDIR}"\nBBFILES += "${LAYERDIR}/recipes/*.bb"\n') - - # Update the Layer_Version dirpath that has our base_recipe in - # to be able to read the base recipe to then generate the - # custom recipe. - br_layer_base_recipe = layers.get( - layer_version=customrecipe.base_recipe.layer_version) - - br_layer_base_dirpath = \ - os.path.join(self.be.sourcedir, - self.getGitCloneDirectory( - br_layer_base_recipe.giturl, - br_layer_base_recipe.commit), - customrecipe.base_recipe.layer_version.dirpath - ) - - customrecipe.base_recipe.layer_version.dirpath = \ - br_layer_base_dirpath - - customrecipe.base_recipe.layer_version.save() - - # create recipe - recipe_path = \ - os.path.join(layerpath, "recipes", "%s.bb" % target.target) - with open(recipe_path, "w") as recipef: - recipef.write(customrecipe.generate_recipe_file_contents()) - - # Update the layer and recipe objects - customrecipe.layer_version.dirpath = layerpath - customrecipe.layer_version.save() - - customrecipe.file_path = recipe_path - customrecipe.save() - - # create *Layer* objects needed for build machinery to work - BRLayer.objects.get_or_create(req=target.req, - name=layer.name, - dirpath=layerpath, - giturl="file://%s" % layerpath) - if os.path.isdir(layerpath): - layerlist.append(layerpath) + continue # not a custom recipe, skip - self.islayerset = True layerlist.extend(nongitlayerlist) + logger.debug("\n\nset layers gives this list %s" % pformat(layerlist)) + self.islayerset = True return layerlist + def setup_custom_image_recipe(self, customrecipe, layers): + """ Set up toaster-custom-images layer and recipe files """ + layerpath = os.path.join(self.be.builddir, + CustomImageRecipe.LAYER_NAME) + + # create directory structure + for name in ("conf", "recipes"): + path = os.path.join(layerpath, name) + if not os.path.isdir(path): + os.makedirs(path) + + # create layer.conf + config = os.path.join(layerpath, "conf", "layer.conf") + if not os.path.isfile(config): + with open(config, "w") as conf: + conf.write('BBPATH .= ":${LAYERDIR}"\nBBFILES += "${LAYERDIR}/recipes/*.bb"\n') + + # Update the Layer_Version dirpath that has our base_recipe in + # to be able to read the base recipe to then generate the + # custom recipe. + br_layer_base_recipe = layers.get( + layer_version=customrecipe.base_recipe.layer_version) + + # If the layer is one that we've cloned we know where it lives + if br_layer_base_recipe.giturl and br_layer_base_recipe.commit: + layer_path = self.getGitCloneDirectory( + br_layer_base_recipe.giturl, + br_layer_base_recipe.commit) + # Otherwise it's a local layer + elif br_layer_base_recipe.local_source_dir: + layer_path = br_layer_base_recipe.local_source_dir + else: + logger.error("Unable to workout the dir path for the custom" + " image recipe") + + br_layer_base_dirpath = os.path.join( + self.be.sourcedir, + layer_path, + customrecipe.base_recipe.layer_version.dirpath) + + customrecipe.base_recipe.layer_version.dirpath = br_layer_base_dirpath + + customrecipe.base_recipe.layer_version.save() + + # create recipe + recipe_path = os.path.join(layerpath, "recipes", "%s.bb" % + customrecipe.name) + with open(recipe_path, "w") as recipef: + recipef.write(customrecipe.generate_recipe_file_contents()) + + # Update the layer and recipe objects + customrecipe.layer_version.dirpath = layerpath + customrecipe.layer_version.layer.local_source_dir = layerpath + customrecipe.layer_version.layer.save() + customrecipe.layer_version.save() + + customrecipe.file_path = recipe_path + customrecipe.save() + + return layerpath + + def readServerLogFile(self): return open(os.path.join(self.be.builddir, "toaster_server.log"), "r").read() @@ -277,23 +298,17 @@ class LocalhostBEController(BuildEnvironmentController): builddir = '%s-toaster-%d' % (self.be.builddir, bitbake.req.project.id) oe_init = os.path.join(self.pokydirname, 'oe-init-build-env') # init build environment - self._shellcmd("bash -c 'source %s %s'" % (oe_init, builddir), + try: + custom_script = ToasterSetting.objects.get(name="CUSTOM_BUILD_INIT_SCRIPT").value + custom_script = custom_script.replace("%BUILDDIR%" ,builddir) + self._shellcmd("bash -c 'source %s'" % (custom_script)) + except ToasterSetting.DoesNotExist: + self._shellcmd("bash -c 'source %s %s'" % (oe_init, builddir), self.be.sourcedir) # update bblayers.conf - bblconfpath = os.path.join(builddir, "conf/bblayers.conf") - conflines = open(bblconfpath, "r").readlines() - skip = False + bblconfpath = os.path.join(builddir, "conf/toaster-bblayers.conf") with open(bblconfpath, 'w') as bblayers: - for line in conflines: - if line.startswith("# line added by toaster"): - skip = True - continue - if skip: - skip = False - else: - bblayers.write(line) - bblayers.write('# line added by toaster build control\n' 'BBLAYERS = "%s"' % ' '.join(layers)) @@ -306,9 +321,10 @@ class LocalhostBEController(BuildEnvironmentController): # run bitbake server from the clone bitbake = os.path.join(self.pokydirname, 'bitbake', 'bin', 'bitbake') - self._shellcmd('bash -c \"source %s %s; BITBAKE_UI="knotty" %s --read %s ' + toasterlayers = os.path.join(builddir,"conf/toaster-bblayers.conf") + self._shellcmd('bash -c \"source %s %s; BITBAKE_UI="knotty" %s --read %s --read %s ' '--server-only -t xmlrpc -B 0.0.0.0:0\"' % (oe_init, - builddir, bitbake, confpath), self.be.sourcedir) + builddir, bitbake, confpath, toasterlayers), self.be.sourcedir) # read port number from bitbake.lock self.be.bbport = "" diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py b/import-layers/yocto-poky/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py index 7f7a5a955..df11f9d16 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py +++ b/import-layers/yocto-poky/bitbake/lib/toaster/bldcontrol/management/commands/runbuilds.py @@ -11,9 +11,11 @@ from orm.models import Build, LogMessage, Target import logging import traceback import signal +import os logger = logging.getLogger("toaster") + class Command(NoArgsCommand): args = "" help = "Schedules and executes build requests as possible. "\ @@ -50,7 +52,7 @@ class Command(NoArgsCommand): logger.debug("runbuilds: No build env") return - logger.info("runbuilds: starting build %s, environment %s" % \ + logger.info("runbuilds: starting build %s, environment %s" % (br, bec.be)) # let the build request know where it is being executed @@ -80,7 +82,7 @@ class Command(NoArgsCommand): def archive(self): for br in BuildRequest.objects.filter(state=BuildRequest.REQ_ARCHIVE): - if br.build == None: + if br.build is None: br.state = BuildRequest.REQ_FAILED else: br.state = BuildRequest.REQ_COMPLETED @@ -99,10 +101,10 @@ class Command(NoArgsCommand): Q(updated__lt=timezone.now() - timedelta(seconds=30)) ).update(lock=BuildEnvironment.LOCK_FREE) - # update all Builds that were in progress and failed to start - for br in BuildRequest.objects.filter(state=BuildRequest.REQ_FAILED, - build__outcome=Build.IN_PROGRESS): + for br in BuildRequest.objects.filter( + state=BuildRequest.REQ_FAILED, + build__outcome=Build.IN_PROGRESS): # transpose the launch errors in ToasterExceptions br.build.outcome = Build.FAILED for brerror in br.brerror_set.all(): @@ -117,7 +119,6 @@ class Command(NoArgsCommand): br.environment.lock = BuildEnvironment.LOCK_FREE br.environment.save() - # update all BuildRequests without a build created for br in BuildRequest.objects.filter(build=None): br.build = Build.objects.create(project=br.project, @@ -144,7 +145,7 @@ class Command(NoArgsCommand): # Make sure the LOCK is removed for builds which have been fully # cancelled - for br in BuildRequest.objects.filter(\ + for br in BuildRequest.objects.filter( Q(build__outcome=Build.CANCELLED) & Q(state=BuildRequest.REQ_CANCELLING) & ~Q(environment=None)): @@ -168,6 +169,12 @@ class Command(NoArgsCommand): logger.warn("runbuilds: schedule exception %s" % str(e)) def handle_noargs(self, **options): + pidfile_path = os.path.join(os.environ.get("BUILDDIR", "."), + ".runbuilds.pid") + + with open(pidfile_path, 'w') as pidfile: + pidfile.write("%s" % os.getpid()) + self.runbuild() signal.signal(signal.SIGUSR1, lambda sig, frame: None) diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/bldcontrol/tests.py b/import-layers/yocto-poky/bitbake/lib/toaster/bldcontrol/tests.py deleted file mode 100644 index 475ac0a16..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/bldcontrol/tests.py +++ /dev/null @@ -1,141 +0,0 @@ -""" -This file demonstrates writing tests using the unittest module. These will pass -when you run "manage.py test". - -Replace this with more appropriate tests for your application. -""" - -from django.test import TestCase - -from bldcontrol.bbcontroller import BitbakeController, BuildSetupException -from bldcontrol.localhostbecontroller import LocalhostBEController -from bldcontrol.models import BuildEnvironment, BuildRequest -from bldcontrol.management.commands.runbuilds import Command - -import socket -import subprocess -import os - -# standard poky data hardcoded for testing -BITBAKE_LAYER = type('bitbake_info', (object,), { "giturl": "git://git.yoctoproject.org/poky.git", "dirpath": "", "commit": "HEAD"}) -POKY_LAYERS = [ - type('poky_info', (object,), { "name": "meta", "giturl": "git://git.yoctoproject.org/poky.git", "dirpath": "meta", "commit": "HEAD"}), - type('poky_info', (object,), { "name": "meta-yocto", "giturl": "git://git.yoctoproject.org/poky.git", "dirpath": "meta-yocto", "commit": "HEAD"}), - type('poky_info', (object,), { "name": "meta-yocto-bsp", "giturl": "git://git.yoctoproject.org/poky.git", "dirpath": "meta-yocto-bsp", "commit": "HEAD"}), - ] - - - -# we have an abstract test class designed to ensure that the controllers use a single interface -# specific controller tests only need to override the _getBuildEnvironment() method - -test_sourcedir = os.getenv("TTS_SOURCE_DIR") -test_builddir = os.getenv("TTS_BUILD_DIR") -test_address = os.getenv("TTS_TEST_ADDRESS", "localhost") - -if test_sourcedir == None or test_builddir == None or test_address == None: - raise Exception("Please set TTTS_SOURCE_DIR, TTS_BUILD_DIR and TTS_TEST_ADDRESS") - -# The bb server will expect a toaster-pre.conf file to exist. If it doesn't exit then we make -# an empty one here. -open(test_builddir + 'conf/toaster-pre.conf', 'a').close() - -class BEControllerTests(object): - - def _serverForceStop(self, bc): - err = bc._shellcmd("netstat -tapn 2>/dev/null | grep 8200 | awk '{print $7}' | sort -fu | cut -d \"/\" -f 1 | grep -v -- - | tee /dev/fd/2 | xargs -r kill") - self.assertTrue(err == '', "bitbake server pid %s not stopped" % err) - - def test_serverStartAndStop(self): - obe = self._getBuildEnvironment() - bc = self._getBEController(obe) - try: - # setting layers, skip any layer info - bc.setLayers(BITBAKE_LAYER, POKY_LAYERS) - except NotImplementedError: - print("Test skipped due to command not implemented yet") - return True - # We are ok with the exception as we're handling the git already exists - except BuildSetupException: - pass - - bc.pokydirname = test_sourcedir - bc.islayerset = True - - hostname = test_address.split("@")[-1] - - # test start server and stop - bc.startBBServer() - - self.assertFalse(socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect_ex((hostname, int(bc.be.bbport))), "Server not answering") - - self._serverForceStop(bc) - - def test_getBBController(self): - obe = self._getBuildEnvironment() - bc = self._getBEController(obe) - layerSet = False - try: - # setting layers, skip any layer info - layerSet = bc.setLayers(BITBAKE_LAYER, POKY_LAYERS) - except NotImplementedError: - print("Test skipped due to command not implemented yet") - return True - # We are ok with the exception as we're handling the git already exists - except BuildSetupException: - pass - - bc.pokydirname = test_sourcedir - bc.islayerset = True - - bbc = bc.getBBController() - self.assertTrue(isinstance(bbc, BitbakeController)) - - self._serverForceStop(bc) - -class LocalhostBEControllerTests(TestCase, BEControllerTests): - def __init__(self, *args): - super(LocalhostBEControllerTests, self).__init__(*args) - - - def _getBuildEnvironment(self): - return BuildEnvironment.objects.create( - lock = BuildEnvironment.LOCK_FREE, - betype = BuildEnvironment.TYPE_LOCAL, - address = test_address, - sourcedir = test_sourcedir, - builddir = test_builddir ) - - def _getBEController(self, obe): - return LocalhostBEController(obe) - -class RunBuildsCommandTests(TestCase): - def test_bec_select(self): - """ - Tests that we can find and lock a build environment - """ - - obe = BuildEnvironment.objects.create(lock = BuildEnvironment.LOCK_FREE, betype = BuildEnvironment.TYPE_LOCAL) - command = Command() - bec = command._selectBuildEnvironment() - - # make sure we select the object we've just built - self.assertTrue(bec.be.id == obe.id, "Environment is not properly selected") - # we have a locked environment - self.assertTrue(bec.be.lock == BuildEnvironment.LOCK_LOCK, "Environment is not locked") - # no more selections possible here - self.assertRaises(IndexError, command._selectBuildEnvironment) - - def test_br_select(self): - from orm.models import Project, Release, BitbakeVersion, Branch - p = Project.objects.create_project("test", Release.objects.get_or_create(name = "HEAD", bitbake_version = BitbakeVersion.objects.get_or_create(name="HEAD", branch=Branch.objects.get_or_create(name="HEAD"))[0])[0]) - obr = BuildRequest.objects.create(state = BuildRequest.REQ_QUEUED, project = p) - command = Command() - br = command._selectBuildRequest() - - # make sure we select the object we've just built - self.assertTrue(obr.id == br.id, "Request is not properly selected") - # we have a locked environment - self.assertTrue(br.state == BuildRequest.REQ_INPROGRESS, "Request is not updated") - # no more selections possible here - self.assertRaises(IndexError, command._selectBuildRequest) diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/README b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/README deleted file mode 100644 index 46d0ff008..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/README +++ /dev/null @@ -1,6 +0,0 @@ -contrib directory for toaster - -This directory holds code that works with Toaster, without being an integral part of the Toaster project. -It is intended for testing code, testing fixtures, tools for Toaster, etc. - -NOTE: This directory is NOT a Python module. diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/README b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/README deleted file mode 100644 index 22fa5673b..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/README +++ /dev/null @@ -1,41 +0,0 @@ - -Toaster Testing Framework -Yocto Project - - -Rationale ------------- -As Toaster contributions grow with the number of people that contribute code, verifying each patch prior to submitting upstream becomes a hard-to-scale problem for humans. We devised this system in order to run patch-level validation, trying to eliminate common problems from submitted patches, in an automated fashion. - -The Toaster Testing Framework is a set of Python scripts that provides an extensible way to write smoke and regression tests that will be run on each patch set sent for review on the toaster mailing list. - - -Usage ------------- -There are three main executable scripts in this directory. - * runner.py is designed to be run from the command line. It requires, as mandatory parameter, a branch name on poky-contrib, branch which contains the patches to be tested. The program will auto-discover the available tests residing in this directory by looking for unittest classes, and will run the tests on the branch dumping the output to the standard output. Optionally, it can take parameters inhibiting the branch checkout, or specifying a single test to be run, for debugging purposes. - * launcher.py is a designed to be run from a crontab or similar scheduling mechanism. It looks up a backlog file containing branches-to-test (named tasks in the source code), select the first one in FIFO manner, and launch runner.py on it. It will await for completion, and email the standard output and standard error dumps from the runner.py execution - * recv.py is an email receiver, designed to be called as a pipe from a .forward file. It is used to monitor a mailing list, for example, and add tasks to the backlog based on review requests coming on the mailing list. - - -Installation ------------- -As prerequisite, we expect a functioning email system on a machine with Python2. - -The broad steps to installation -* set up the .forward on the receiving email account to pipe to the recv.py file -* edit config.py and settings.json to alter for local installation settings -* on email receive, verify backlog.txt to see that the tasks are received and marked for processing -* execute launcher.py in command line to verify that a test occurs with no problems, and that the outgoing email is delivered -* add launcher.py - - - -Contribute ------------- -What we need are tests. Add your own tests to either tests.py file, or to a new file. -Use "config.logger" to write logs that will make it to email. - -Commonly used code should be going to shellutils, and configuration to config.py. - -Contribute code by emailing patches to the list: toaster@yoctoproject.org (membership required) diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/TODO b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/TODO deleted file mode 100644 index 117192106..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/TODO +++ /dev/null @@ -1,9 +0,0 @@ -We need to implement tests: - -automated link checker; currently -$ linkchecker -t 1000 -F csv http://localhost:8000/ - -integrate the w3c-validation service; currently -$ python urlcheck.py - - diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/config.py b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/config.py deleted file mode 100644 index 87b427cc3..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/config.py +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/python - -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# Copyright (C) 2015 Alexandru Damian for Intel Corp. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -# This is the configuration/single module for tts -# everything that would be a global variable goes here - -import os, sys, logging -import socket - -LOGDIR = "log" -SETTINGS_FILE = os.path.join(os.path.dirname(__file__), "settings.json") -TEST_DIR_NAME = "tts_testdir" - -DEBUG = True - -OWN_PID = os.getpid() - -W3C_VALIDATOR = "http://icarus.local/w3c-validator/check?doctype=HTML5&uri=" - -TOASTER_PORT = 56789 - -TESTDIR = None - -#we parse the w3c URL to know where to connect - -import urlparse - -def get_public_ip(): - temp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - parsed_url = urlparse.urlparse("http://icarus.local/w3c-validator/check?doctype=HTML5&uri=") - temp_socket.connect((parsed_url.netloc, 80 if parsed_url.port is None else parsed_url.port)) - public_ip = temp_socket.getsockname()[0] - temp_socket.close() - return public_ip - -TOASTER_BASEURL = "http://%s:%d/" % (get_public_ip(), TOASTER_PORT) - - -OWN_EMAIL_ADDRESS = "Toaster Testing Framework " -REPORT_EMAIL_ADDRESS = "alexandru.damian@intel.com" - -# make sure we have the basic logging infrastructure - -#pylint: disable=invalid-name -# we disable the invalid name because the module-level "logger" is used througout bitbake -logger = logging.getLogger("toastertest") -__console__ = logging.StreamHandler(sys.stdout) -__console__.setFormatter(logging.Formatter("%(asctime)s %(levelname)s: %(message)s")) -logger.addHandler(__console__) -logger.setLevel(logging.DEBUG) - - -# singleton file names -LOCKFILE = "/tmp/ttf.lock" -BACKLOGFILE = os.path.join(os.path.dirname(__file__), "backlog.txt") - -# task states -def enum(*sequential, **named): - enums = dict(zip(sequential, range(len(sequential))), **named) - reverse = dict((value, key) for key, value in enums.items()) - enums['reverse_mapping'] = reverse - return type('Enum', (), enums) - - -class TASKS(object): - #pylint: disable=too-few-public-methods - PENDING = "PENDING" - INPROGRESS = "INPROGRESS" - DONE = "DONE" - - @staticmethod - def next_task(task): - if task == TASKS.PENDING: - return TASKS.INPROGRESS - if task == TASKS.INPROGRESS: - return TASKS.DONE - raise Exception("Invalid next task state for %s" % task) - -# TTS specific -CONTRIB_REPO = "git@git.yoctoproject.org:poky-contrib" - diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/launcher.py b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/launcher.py deleted file mode 100755 index e5794c1c5..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/launcher.py +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/python - -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# Copyright (C) 2015 Alexandru Damian for Intel Corp. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -# Program to run the next task listed from the backlog.txt; designed to be -# run from crontab. - -from __future__ import print_function -import sys, os, config, shellutils -from shellutils import ShellCmdException - -# Import smtplib for the actual sending function -import smtplib - -# Import the email modules we'll need -from email.mime.text import MIMEText - -def _take_lockfile(): - return shellutils.lockfile(shellutils.mk_lock_filename()) - - -def read_next_task_by_state(task_state, task_name=None): - if not os.path.exists(os.path.join(os.path.dirname(__file__), config.BACKLOGFILE)): - return None - os.rename(config.BACKLOGFILE, config.BACKLOGFILE + ".tmp") - task = None - with open(config.BACKLOGFILE + ".tmp", "r") as f_in: - with open(config.BACKLOGFILE, "w") as f_out: - for line in f_in.readlines(): - if task is None: - fields = line.strip().split("|", 2) - if fields[1] == task_state: - if task_name is None or task_name == fields[0]: - task = fields[0] - print("Updating %s %s to %s" % (task, task_state, config.TASKS.next_task(task_state))) - line = "%s|%s\n" % (task, config.TASKS.next_task(task_state)) - f_out.write(line) - os.remove(config.BACKLOGFILE + ".tmp") - return task - -def send_report(task_name, plaintext, errtext=None): - if errtext is None: - msg = MIMEText(plaintext) - else: - if plaintext is None: - plaintext = "" - msg = MIMEText("--STDOUT dump--\n\n%s\n\n--STDERR dump--\n\n%s" % (plaintext, errtext)) - - msg['Subject'] = "[review-request] %s - smoke test results" % task_name - msg['From'] = config.OWN_EMAIL_ADDRESS - msg['To'] = config.REPORT_EMAIL_ADDRESS - - smtp_connection = smtplib.SMTP("localhost") - smtp_connection.sendmail(config.OWN_EMAIL_ADDRESS, [config.REPORT_EMAIL_ADDRESS], msg.as_string()) - smtp_connection.quit() - -def main(): - # we don't do anything if we have another instance of us running - lock_file = _take_lockfile() - - if lock_file is None: - if config.DEBUG: - print("Concurrent script in progress, exiting") - sys.exit(1) - - next_task = read_next_task_by_state(config.TASKS.PENDING) - if next_task is not None: - print("Next task is", next_task) - errtext = None - out = None - try: - out = shellutils.run_shell_cmd("%s %s" % (os.path.join(os.path.dirname(__file__), "runner.py"), next_task)) - except ShellCmdException as exc: - print("Failed while running the test runner: %s", exc) - errtext = exc.__str__() - send_report(next_task, out, errtext) - read_next_task_by_state(config.TASKS.INPROGRESS, next_task) - else: - print("No task") - - shellutils.unlockfile(lock_file) - - -if __name__ == "__main__": - main() diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/log/.create b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/log/.create deleted file mode 100644 index e69de29bb..000000000 diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/recv.py b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/recv.py deleted file mode 100755 index 07efdac44..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/recv.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/python - -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# Copyright (C) 2015 Alexandru Damian for Intel Corp. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -# Program to receive review requests by email and log tasks to backlog.txt -# Designed to be run by the email system from a .forward file: -# -# cat .forward -# |[full/path]/recv.py - -from __future__ import print_function -import sys, config, shellutils - -from email.parser import Parser - -def recv_mail(datastring): - headers = Parser().parsestr(datastring) - return headers['subject'] - -def main(): - lock_file = shellutils.lockfile(shellutils.mk_lock_filename(), retry=True) - - if lock_file is None: - if config.DEBUG: - print("Concurrent script in progress, exiting") - sys.exit(1) - - subject = recv_mail(sys.stdin.read()) - - subject_parts = subject.split() - if "[review-request]" in subject_parts: - task_name = subject_parts[subject_parts.index("[review-request]") + 1] - with open(config.BACKLOGFILE, "a") as fout: - line = "%s|%s\n" % (task_name, config.TASKS.PENDING) - fout.write(line) - - shellutils.unlockfile(lock_file) - -if __name__ == "__main__": - main() diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/runner.py b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/runner.py deleted file mode 100755 index d01386acf..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/runner.py +++ /dev/null @@ -1,222 +0,0 @@ -#!/usr/bin/python - -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# Copyright (C) 2015 Alexandru Damian for Intel Corp. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - - -# This is the main test execution controller. It is designed to be run -# manually from the command line, or to be called from a different program -# that schedules test execution. -# -# Execute runner.py -h for help. - - - -from __future__ import print_function -import sys, os -import unittest, importlib -import logging, pprint, json -import re -from shellutils import ShellCmdException, mkdirhier, run_shell_cmd - -import config - -# we also log to a file, in addition to console, because our output is important -__log_file_name__ = os.path.join(os.path.dirname(__file__), "log/tts_%d.log" % config.OWN_PID) -mkdirhier(os.path.dirname(__log_file_name__)) -__log_file__ = open(__log_file_name__, "w") -__file_handler__ = logging.StreamHandler(__log_file__) -__file_handler__.setFormatter(logging.Formatter("%(asctime)s %(levelname)s: %(message)s")) - -config.logger.addHandler(__file_handler__) - -# set up log directory -try: - if not os.path.exists(config.LOGDIR): - os.mkdir(config.LOGDIR) - else: - if not os.path.isdir(config.LOGDIR): - raise Exception("Expected log dir '%s' is not actually a directory." % config.LOGDIR) -except OSError as exc: - raise exc - -# creates the under-test-branch as a separate directory -def set_up_test_branch(settings, branch_name): - testdir = "%s/%s.%d" % (settings['workdir'], config.TEST_DIR_NAME, config.OWN_PID) - - # creates the host dir - if os.path.exists(testdir): - raise Exception("Test dir '%s'is already there, aborting" % testdir) - - # may raise OSError, is to be handled by the caller - os.makedirs(testdir) - - - # copies over the .git from the localclone - run_shell_cmd("cp -a '%s'/.git '%s'" % (settings['localclone'], testdir)) - - # add the remote if it doesn't exist - crt_remotes = run_shell_cmd("git remote -v", cwd=testdir) - remotes = [word for line in crt_remotes.split("\n") for word in line.split()] - if not config.CONTRIB_REPO in remotes: - remote_name = "tts_contrib" - run_shell_cmd("git remote add %s %s" % (remote_name, config.CONTRIB_REPO), cwd=testdir) - else: - remote_name = remotes[remotes.index(config.CONTRIB_REPO) - 1] - - # do the fetch - run_shell_cmd("git fetch %s -p" % remote_name, cwd=testdir) - - # do the checkout - run_shell_cmd("git checkout origin/master && git branch -D %s; git checkout %s/%s -b %s && git reset --hard" % (branch_name, remote_name, branch_name, branch_name), cwd=testdir) - - return testdir - - -def __search_for_tests(): - # we find all classes that can run, and run them - tests = [] - for _, _, files_list in os.walk(os.path.dirname(os.path.abspath(__file__))): - for module_file in [f[:-3] for f in files_list if f.endswith(".py") and not f.startswith("__init__")]: - config.logger.debug("Inspecting module %s", module_file) - current_module = importlib.import_module(module_file) - crtclass_names = vars(current_module) - for name in crtclass_names: - tested_value = crtclass_names[name] - if isinstance(tested_value, type(unittest.TestCase)) and issubclass(tested_value, unittest.TestCase): - tests.append((module_file, name)) - break - return tests - - -# boilerplate to self discover tests and run them -def execute_tests(dir_under_test, testname): - - if testname is not None and "." in testname: - tests = [] - tests.append(tuple(testname.split(".", 2))) - else: - tests = __search_for_tests() - - # let's move to the directory under test - crt_dir = os.getcwd() - os.chdir(dir_under_test) - - # execute each module - # pylint: disable=broad-except - # we disable the broad-except because we want to actually catch all possible exceptions - try: - # sorting the tests by the numeric order in the class name - tests = sorted(tests, key=lambda x: int(re.search(r"[0-9]+", x[1]).group(0))) - config.logger.debug("Discovered test clases: %s", pprint.pformat(tests)) - unittest.installHandler() - suite = unittest.TestSuite() - loader = unittest.TestLoader() - result = unittest.TestResult() - result.failfast = True - for module_file, test_name in tests: - suite.addTest(loader.loadTestsFromName("%s.%s" % (module_file, test_name))) - config.logger.info("Running %d test(s)", suite.countTestCases()) - suite.run(result) - - for error in result.errors: - config.logger.error("Exception on test: %s\n%s", error[0], - "\n".join(["-- %s" % x for x in error[1].split("\n")])) - - for failure in result.failures: - config.logger.error("Failed test: %s:\n%s\n", failure[0], - "\n".join(["-- %s" % x for x in failure[1].split("\n")])) - - config.logger.info("Test results: %d ran, %d errors, %d failures", result.testsRun, len(result.errors), len(result.failures)) - - except Exception as exc: - import traceback - config.logger.error("Exception while running test. Tracedump: \n%s", traceback.format_exc()) - finally: - os.chdir(crt_dir) - return len(result.failures) - -# verify that we had a branch-under-test name as parameter -def validate_args(): - from optparse import OptionParser - parser = OptionParser(usage="usage: %prog [options] branch_under_test") - - parser.add_option("-t", "--test-dir", dest="testdir", default=None, help="Use specified directory to run tests, inhibits the checkout.") - parser.add_option("-s", "--single", dest="singletest", default=None, help="Run only the specified test") - - (options, args) = parser.parse_args() - if len(args) < 1: - raise Exception("Please specify the branch to run on. Use option '-h' when in doubt.") - return (options, args) - - - - -# load the configuration options -def read_settings(): - if not os.path.exists(config.SETTINGS_FILE) or not os.path.isfile(config.SETTINGS_FILE): - raise Exception("Config file '%s' cannot be openend" % config.SETTINGS_FILE) - return json.loads(open(config.SETTINGS_FILE, "r").read()) - - -# cleanup ! -def clean_up(testdir): - run_shell_cmd("rm -rf -- '%s'" % testdir) - -def dump_info(settings, options, args): - """ detailed information about current run configuration, for debugging purposes. - """ - config.logger.debug("Settings:\n%s\nOptions:\n%s\nArguments:\n%s\n", settings, options, args) - -def main(): - (options, args) = validate_args() - - settings = read_settings() - need_cleanup = False - - # dump debug info - dump_info(settings, options, args) - - testdir = None - no_failures = 1 - try: - if options.testdir is not None and os.path.exists(options.testdir): - testdir = os.path.abspath(options.testdir) - config.logger.info("No checkout, using %s", testdir) - else: - need_cleanup = True - testdir = set_up_test_branch(settings, args[0]) # we expect a branch name as first argument - - config.TESTDIR = testdir # we let tests know where to run - - # ensure that the test dir only contains no *.pyc leftovers - run_shell_cmd("find '%s' -type f -name *.pyc -exec rm {} \\;" % testdir) - - no_failures = execute_tests(testdir, options.singletest) - - except ShellCmdException as exc: - import traceback - config.logger.error("Error while setting up testing. Traceback: \n%s", traceback.format_exc()) - finally: - if need_cleanup and testdir is not None: - clean_up(testdir) - - sys.exit(no_failures) - -if __name__ == "__main__": - main() diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/settings.json b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/settings.json deleted file mode 100644 index bb671eaf2..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/settings.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "repo": "git@git.yoctoproject.org:poky-contrib", - "localclone": "/home/ddalex/ssd/yocto/poky", - "workdir": "/home/ddalex/ssd/yocto" -} diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/shellutils.py b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/shellutils.py deleted file mode 100644 index ce64c0634..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/shellutils.py +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/python - -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# Copyright (C) 2015 Alexandru Damian for Intel Corp. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - -# Utilities shared by tests and other common bits of code. - -import sys, os, subprocess, fcntl, errno -import config -from config import logger - - -# License warning; this code is copied from the BitBake project, file bitbake/lib/bb/utils.py -# The code is originally licensed GPL-2.0, and we redistribute it under still GPL-2.0 - -# End of copy is marked with #ENDOFCOPY marker - -def mkdirhier(directory): - """Create a directory like 'mkdir -p', but does not complain if - directory already exists like os.makedirs - """ - - try: - os.makedirs(directory) - except OSError as exc: - if exc.errno != errno.EEXIST: - raise exc - -def lockfile(name, shared=False, retry=True): - """ - Use the file fn as a lock file, return when the lock has been acquired. - Returns a variable to pass to unlockfile(). - """ - config.logger.debug("take lockfile %s", name) - dirname = os.path.dirname(name) - mkdirhier(dirname) - - if not os.access(dirname, os.W_OK): - logger.error("Unable to acquire lock '%s', directory is not writable", - name) - sys.exit(1) - - operation = fcntl.LOCK_EX - if shared: - operation = fcntl.LOCK_SH - if not retry: - operation = operation | fcntl.LOCK_NB - - while True: - # If we leave the lockfiles lying around there is no problem - # but we should clean up after ourselves. This gives potential - # for races though. To work around this, when we acquire the lock - # we check the file we locked was still the lock file on disk. - # by comparing inode numbers. If they don't match or the lockfile - # no longer exists, we start again. - - # This implementation is unfair since the last person to request the - # lock is the most likely to win it. - - # pylint: disable=broad-except - # we disable the broad-except because we want to actually catch all possible exceptions - try: - lock_file = open(name, 'a+') - fileno = lock_file.fileno() - fcntl.flock(fileno, operation) - statinfo = os.fstat(fileno) - if os.path.exists(lock_file.name): - statinfo2 = os.stat(lock_file.name) - if statinfo.st_ino == statinfo2.st_ino: - return lock_file - lock_file.close() - except Exception as exc: - try: - lock_file.close() - except Exception as exc2: - config.logger.error("Failed to close the lockfile: %s", exc2) - config.logger.error("Failed to acquire the lockfile: %s", exc) - if not retry: - return None - -def unlockfile(lock_file): - """ - Unlock a file locked using lockfile() - """ - try: - # If we had a shared lock, we need to promote to exclusive before - # removing the lockfile. Attempt this, ignore failures. - fcntl.flock(lock_file.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB) - os.unlink(lock_file.name) - except (IOError, OSError): - pass - fcntl.flock(lock_file.fileno(), fcntl.LOCK_UN) - lock_file.close() - -#ENDOFCOPY - - -def mk_lock_filename(): - our_name = os.path.basename(__file__) - our_name = ".%s" % ".".join(reversed(our_name.split("."))) - return config.LOCKFILE + our_name - - - -class ShellCmdException(Exception): - pass - -def run_shell_cmd(command, cwd=None): - if cwd is None: - cwd = os.getcwd() - - config.logger.debug("_shellcmd: (%s) %s", cwd, command) - process = subprocess.Popen(command, cwd=cwd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - (out, err) = process.communicate() - process.wait() - if process.returncode: - if len(err) == 0: - err = "command: %s \n%s" % (command, out) - else: - err = "command: %s \n%s" % (command, err) - config.logger.warning("_shellcmd: error \n%s\n%s", out, err) - raise ShellCmdException(err) - else: - #config.logger.debug("localhostbecontroller: shellcmd success\n%s" % out) - return out - diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/tests.py b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/tests.py deleted file mode 100644 index c510ebb10..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/tests.py +++ /dev/null @@ -1,115 +0,0 @@ -#!/usr/bin/python - -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# Copyright (C) 2015 Alexandru Damian for Intel Corp. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. - - -# Test definitions. The runner will look for and auto-discover the tests -# no matter what they file are they in, as long as they are in the same directory -# as this file. - -import unittest -from shellutils import run_shell_cmd, ShellCmdException -import config - -import pexpect -import sys, os, signal, time - -class Test00PyCompilable(unittest.TestCase): - ''' Verifies that all Python files are syntactically correct ''' - def test_compile_file(self): - try: - run_shell_cmd("find . -name *py -type f -print0 | xargs -0 -n1 -P20 python -m py_compile", config.TESTDIR) - except ShellCmdException as exc: - self.fail("Error compiling python files: %s" % (exc)) - - def test_pylint_file(self): - try: - run_shell_cmd(r"find . -iname \"*\.py\" -type f -print0 | PYTHONPATH=${PYTHONPATH}:. xargs -r -0 -n1 pylint --load-plugins pylint_django -E --reports=n 2>&1", cwd=config.TESTDIR + "/bitbake/lib/toaster") - except ShellCmdException as exc: - self.fail("Pylint fails: %s\n" % exc) - -class Test01PySystemStart(unittest.TestCase): - ''' Attempts to start Toaster, verify that it is succesfull, and stop it ''' - def setUp(self): - run_shell_cmd("bash -c 'rm -f build/*log'") - - def test_start_interactive_mode(self): - try: - run_shell_cmd("bash -c 'source %s/oe-init-build-env && source toaster start webport=%d && source toaster stop'" % (config.TESTDIR, config.TOASTER_PORT), config.TESTDIR) - except ShellCmdException as exc: - self.fail("Failed starting interactive mode: %s" % (exc)) - - def test_start_managed_mode(self): - try: - run_shell_cmd("%s/bitbake/bin/toaster webport=%d nobrowser & sleep 10 && curl http://localhost:%d/ && kill -2 %%1" % (config.TESTDIR, config.TOASTER_PORT, config.TOASTER_PORT), config.TESTDIR) - except ShellCmdException as exc: - self.fail("Failed starting managed mode: %s" % (exc)) - -class Test02HTML5Compliance(unittest.TestCase): - def setUp(self): - self.origdir = os.getcwd() - self.crtdir = os.path.dirname(config.TESTDIR) - self.cleanup_database = False - os.chdir(self.crtdir) - if not os.path.exists(os.path.join(self.crtdir, "toaster.sqlite")): - self.cleanup_database = True - run_shell_cmd("%s/bitbake/lib/toaster/manage.py syncdb --noinput" % config.TESTDIR) - run_shell_cmd("%s/bitbake/lib/toaster/manage.py migrate orm" % config.TESTDIR) - run_shell_cmd("%s/bitbake/lib/toaster/manage.py migrate bldcontrol" % config.TESTDIR) - run_shell_cmd("%s/bitbake/lib/toaster/manage.py loadconf %s/meta-yocto/conf/toasterconf.json" % (config.TESTDIR, config.TESTDIR)) - run_shell_cmd("%s/bitbake/lib/toaster/manage.py lsupdates" % config.TESTDIR) - - setup = pexpect.spawn("%s/bitbake/lib/toaster/manage.py checksettings" % config.TESTDIR) - setup.logfile = sys.stdout - setup.expect(r".*or type the full path to a different directory: ") - setup.sendline('') - setup.sendline('') - setup.expect(r".*or type the full path to a different directory: ") - setup.sendline('') - setup.expect(r"Enter your option: ") - setup.sendline('0') - - self.child = pexpect.spawn("bash", ["%s/bitbake/bin/toaster" % config.TESTDIR, "webport=%d" % config.TOASTER_PORT, "nobrowser"], cwd=self.crtdir) - self.child.logfile = sys.stdout - self.child.expect("Toaster is now running. You can stop it with Ctrl-C") - - def test_html5_compliance(self): - import urllist, urlcheck - results = {} - for url in urllist.URLS: - results[url] = urlcheck.validate_html5(config.TOASTER_BASEURL + url) - - failed = [] - for url in results: - if results[url][1] != 0: - failed.append((url, results[url])) - - - self.assertTrue(len(failed) == 0, "Not all URLs validate: \n%s " % "\n".join(["".join(str(x)) for x in failed])) - - #(config.TOASTER_BASEURL + url, status, errors, warnings)) - - def tearDown(self): - while self.child.isalive(): - self.child.kill(signal.SIGINT) - time.sleep(1) - os.chdir(self.origdir) - toaster_sqlite_path = os.path.join(self.crtdir, "toaster.sqlite") - if self.cleanup_database and os.path.exists(toaster_sqlite_path): - os.remove(toaster_sqlite_path) diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/run_toastertests.py b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/run_toastertests.py deleted file mode 100755 index 8ca45a813..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/run_toastertests.py +++ /dev/null @@ -1,155 +0,0 @@ -#!/usr/bin/python - -# Copyright - -# DESCRIPTION -# This is script for running all selected toaster cases on -# selected web browsers manifested in toaster_test.cfg. - -# 1. How to start toaster in yocto: -# $ source poky/oe-init-build-env -# $ source toaster start -# $ bitbake core-image-minimal - -# 2. How to install selenium on Ubuntu: -# $ sudo apt-get install scrot python-pip -# $ sudo pip install selenium - -# 3. How to install selenium addon in firefox: -# Download the lastest firefox addon from http://release.seleniumhq.org/selenium-ide/ -# Then install it. You can also install firebug and firepath addon - -# 4. How to start writing a new case: -# All you need to do is to implement the function test_xxx() and pile it on. - -# 5. How to test with Chrome browser -# Download/install chrome on host -# Download chromedriver from https://code.google.com/p/chromedriver/downloads/list according to your host type -# put chromedriver in PATH, (e.g. /usr/bin/, bear in mind to chmod) -# For windows host, you may put chromedriver.exe in the same directory as chrome.exe - -import unittest, sys, os, platform -import ConfigParser -import argparse -from toaster_automation_test import toaster_cases - - -def get_args_parser(): - description = "Script that runs toaster auto tests." - parser = argparse.ArgumentParser(description=description) - parser.add_argument('--run-all-tests', required=False, action="store_true", dest="run_all_tests", default=False, - help='Run all tests.') - parser.add_argument('--run-suite', required=False, dest='run_suite', default=False, - help='run suite (defined in cfg file)') - - return parser - - -def get_tests(): - testslist = [] - - prefix = 'toaster_automation_test.toaster_cases' - - for t in dir(toaster_cases): - if t.startswith('test_'): - testslist.append('.'.join((prefix, t))) - - return testslist - - -def get_tests_from_cfg(suite=None): - - testslist = [] - config = ConfigParser.SafeConfigParser() - config.read('toaster_test.cfg') - - if suite is not None: - target_suite = suite.lower() - - # TODO: if suite is valid suite - - else: - target_suite = platform.system().lower() - - try: - tests_from_cfg = eval(config.get('toaster_test_' + target_suite, 'test_cases')) - except: - print('Failed to get test cases from cfg file. Make sure the format is correct.') - return None - - prefix = 'toaster_automation_test.toaster_cases.test_' - for t in tests_from_cfg: - testslist.append(prefix + str(t)) - - return testslist - -def main(): - - # In case this script is called from other directory - os.chdir(os.path.abspath(sys.path[0])) - - parser = get_args_parser() - args = parser.parse_args() - - if args.run_all_tests: - testslist = get_tests() - elif args.run_suite: - testslist = get_tests_from_cfg(args.run_suite) - os.environ['TOASTER_SUITE'] = args.run_suite - else: - testslist = get_tests_from_cfg() - - if not testslist: - print('Failed to get test cases.') - exit(1) - - suite = unittest.TestSuite() - loader = unittest.TestLoader() - loader.sortTestMethodsUsing = None - runner = unittest.TextTestRunner(verbosity=2, resultclass=buildResultClass(args)) - - for test in testslist: - try: - suite.addTests(loader.loadTestsFromName(test)) - except: - return 1 - - result = runner.run(suite) - - if result.wasSuccessful(): - return 0 - else: - return 1 - - -def buildResultClass(args): - """Build a Result Class to use in the testcase execution""" - - class StampedResult(unittest.TextTestResult): - """ - Custom TestResult that prints the time when a test starts. As toaster-auto - can take a long time (ie a few hours) to run, timestamps help us understand - what tests are taking a long time to execute. - """ - def startTest(self, test): - import time - self.stream.write(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + " - ") - super(StampedResult, self).startTest(test) - - return StampedResult - - -if __name__ == "__main__": - - try: - ret = main() - except: - ret = 1 - import traceback - traceback.print_exc() - finally: - if os.getenv('TOASTER_SUITE'): - del os.environ['TOASTER_SUITE'] - sys.exit(ret) - - diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_automation_test.py b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_automation_test.py deleted file mode 100755 index 1a786fa02..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_automation_test.py +++ /dev/null @@ -1,2376 +0,0 @@ -#!/usr/bin/python -# Copyright - -# DESCRIPTION -# This is toaster automation base class and test cases file - -# History: -# 2015.03.09 inital version -# 2015.03.23 adding toaster_test.cfg, run_toastertest.py so we can run case by case from outside - -# Briefs: -# This file is comprised of 3 parts: -# I: common utils like sorting, getting attribute.. etc -# II: base class part, which complies with unittest frame work and -# contains class selenium-based functions -# III: test cases -# to add new case: just implement new test_xxx() function in class toaster_cases - -# NOTES for cases: -# case 946: -# step 6 - 8 needs to be observed using screenshots -# case 956: -# step 2 - 3 needs to be run manually - -import unittest, time, re, sys, getopt, os, logging, string, errno, exceptions -import shutil, argparse, ConfigParser, platform, json -from selenium import webdriver -from selenium.common.exceptions import NoSuchElementException -from selenium import selenium -from selenium.webdriver.common.by import By -from selenium.webdriver.common.keys import Keys -from selenium.webdriver.support.ui import Select -import sqlite3 as sqlite - - -########################################### -# # -# PART I: utils stuff # -# # -########################################### - -class Listattr(object): - """ - Set of list attribute. This is used to determine what the list content is. - Later on we may add more attributes here. - """ - NULL = "null" - NUMBERS = "numbers" - STRINGS = "strings" - PERCENT = "percentage" - SIZE = "size" - UNKNOWN = "unknown" - - -def get_log_root_dir(): - max_depth = 5 - parent_dir = '../' - for number in range(0, max_depth): - if os.path.isdir(sys.path[0] + os.sep + (os.pardir + os.sep)*number + 'log'): - log_root_dir = os.path.abspath(sys.path[0] + os.sep + (os.pardir + os.sep)*number + 'log') - break - - if number == (max_depth - 1): - print('No log dir found. Please check') - raise Exception - - return log_root_dir - - -def mkdir_p(dir): - try: - os.makedirs(dir) - except OSError as exc: - if exc.errno == errno.EEXIST and os.path.isdir(dir): - pass - else: - raise - - -def get_list_attr(testlist): - """ - To determine the list content - """ - if not testlist: - return Listattr.NULL - listtest = testlist[:] - try: - listtest.remove('') - except ValueError: - pass - pattern_percent = re.compile(r"^([0-9])+(\.)?([0-9])*%$") - pattern_size = re.compile(r"^([0-9])+(\.)?([0-9])*( )*(K)*(M)*(G)*B$") - pattern_number = re.compile(r"^([0-9])+(\.)?([0-9])*$") - def get_patterned_number(pattern, tlist): - count = 0 - for item in tlist: - if re.search(pattern, item): - count += 1 - return count - if get_patterned_number(pattern_percent, listtest) == len(listtest): - return Listattr.PERCENT - elif get_patterned_number(pattern_size, listtest) == len(listtest): - return Listattr.SIZE - elif get_patterned_number(pattern_number, listtest) == len(listtest): - return Listattr.NUMBERS - else: - return Listattr.STRINGS - - -def is_list_sequenced(testlist): - """ - Function to tell if list is sequenced - Currently we may have list made up of: Strings ; numbers ; percentage ; time; size - Each has respective way to determine if it's sequenced. - """ - test_list = testlist[:] - try: - test_list.remove('') - except ValueError: - pass - - if get_list_attr(testlist) == Listattr.NULL : - return True - - elif get_list_attr(testlist) == Listattr.STRINGS : - return (sorted(test_list) == test_list) - - elif get_list_attr(testlist) == Listattr.NUMBERS : - list_number = [] - for item in test_list: - list_number.append(eval(item)) - return (sorted(list_number) == list_number) - - elif get_list_attr(testlist) == Listattr.PERCENT : - list_number = [] - for item in test_list: - list_number.append(eval(item.strip('%'))) - return (sorted(list_number) == list_number) - - elif get_list_attr(testlist) == Listattr.SIZE : - list_number = [] - # currently SIZE is splitted by space - for item in test_list: - if item.split()[1].upper() == "KB": - list_number.append(1024 * eval(item.split()[0])) - elif item.split()[1].upper() == "MB": - list_number.append(1024 * 1024 * eval(item.split()[0])) - elif item.split()[1].upper() == "GB": - list_number.append(1024 * 1024 * 1024 * eval(item.split()[0])) - else: - list_number.append(eval(item.split()[0])) - return (sorted(list_number) == list_number) - - else: - print('Unrecognized list type, please check') - return False - - -def is_list_inverted(testlist): - """ - Function to tell if list is inverted - Currently we may have list made up of: Strings ; numbers ; percentage ; time; size - Each has respective way to determine if it's inverted. - """ - test_list = testlist[:] - try: - test_list.remove('') - except ValueError: - pass - - if get_list_attr(testlist) == Listattr.NULL : - return True - - elif get_list_attr(testlist) == Listattr.STRINGS : - return (sorted(test_list, reverse = True) == test_list) - - elif get_list_attr(testlist) == Listattr.NUMBERS : - list_number = [] - for item in test_list: - list_number.append(eval(item)) - return (sorted(list_number, reverse = True) == list_number) - - elif get_list_attr(testlist) == Listattr.PERCENT : - list_number = [] - for item in test_list: - list_number.append(eval(item.strip('%'))) - return (sorted(list_number, reverse = True) == list_number) - - elif get_list_attr(testlist) == Listattr.SIZE : - list_number = [] - # currently SIZE is splitted by space. such as 0 B; 1 KB; 2 MB - for item in test_list: - if item.split()[1].upper() == "KB": - list_number.append(1024 * eval(item.split()[0])) - elif item.split()[1].upper() == "MB": - list_number.append(1024 * 1024 * eval(item.split()[0])) - elif item.split()[1].upper() == "GB": - list_number.append(1024 * 1024 * 1024 * eval(item.split()[0])) - else: - list_number.append(eval(item.split()[0])) - return (sorted(list_number, reverse = True) == list_number) - - else: - print('Unrecognized list type, please check') - return False - -def replace_file_content(filename, item, option): - f = open(filename) - lines = f.readlines() - f.close() - output = open(filename, 'w') - for line in lines: - if line.startswith(item): - output.write(item + " = '" + option + "'\n") - else: - output.write(line) - output.close() - -def extract_number_from_string(s): - """ - extract the numbers in a string. return type is 'list' - """ - return re.findall(r'([0-9]+)', s) - -# Below is decorator derived from toaster backend test code -class NoParsingFilter(logging.Filter): - def filter(self, record): - return record.levelno == 100 - -def LogResults(original_class): - orig_method = original_class.run - - from time import strftime, gmtime - caller = 'toaster' - timestamp = strftime('%Y%m%d%H%M%S',gmtime()) - logfile = os.path.join(os.getcwd(),'results-'+caller+'.'+timestamp+'.log') - linkfile = os.path.join(os.getcwd(),'results-'+caller+'.log') - - #rewrite the run method of unittest.TestCase to add testcase logging - def run(self, result, *args, **kws): - orig_method(self, result, *args, **kws) - passed = True - testMethod = getattr(self, self._testMethodName) - #if test case is decorated then use it's number, else use it's name - try: - test_case = testMethod.test_case - except AttributeError: - test_case = self._testMethodName - - class_name = str(testMethod.im_class).split("'")[1] - - #create custom logging level for filtering. - custom_log_level = 100 - logging.addLevelName(custom_log_level, 'RESULTS') - - def results(self, message, *args, **kws): - if self.isEnabledFor(custom_log_level): - self.log(custom_log_level, message, *args, **kws) - logging.Logger.results = results - - logging.basicConfig(filename=logfile, - filemode='w', - format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', - datefmt='%H:%M:%S', - level=custom_log_level) - for handler in logging.root.handlers: - handler.addFilter(NoParsingFilter()) - local_log = logging.getLogger(caller) - - #check status of tests and record it - - for (name, msg) in result.errors: - if (self._testMethodName == str(name).split(' ')[0]) and (class_name in str(name).split(' ')[1]): - local_log.results("Testcase "+str(test_case)+": ERROR") - local_log.results("Testcase "+str(test_case)+":\n"+msg) - passed = False - for (name, msg) in result.failures: - if (self._testMethodName == str(name).split(' ')[0]) and (class_name in str(name).split(' ')[1]): - local_log.results("Testcase "+str(test_case)+": FAILED") - local_log.results("Testcase "+str(test_case)+":\n"+msg) - passed = False - for (name, msg) in result.skipped: - if (self._testMethodName == str(name).split(' ')[0]) and (class_name in str(name).split(' ')[1]): - local_log.results("Testcase "+str(test_case)+": SKIPPED") - passed = False - if passed: - local_log.results("Testcase "+str(test_case)+": PASSED") - - # Create symlink to the current log - if os.path.exists(linkfile): - os.remove(linkfile) - os.symlink(logfile, linkfile) - - original_class.run = run - - return original_class - - -########################################### -# # -# PART II: base class # -# # -########################################### - -@LogResults -class toaster_cases_base(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.log = cls.logger_create() - - def setUp(self): - self.screenshot_sequence = 1 - self.verificationErrors = [] - self.accept_next_alert = True - self.host_os = platform.system().lower() - if os.getenv('TOASTER_SUITE'): - self.target_suite = os.getenv('TOASTER_SUITE') - else: - self.target_suite = self.host_os - - self.parser = ConfigParser.SafeConfigParser() - self.parser.read('toaster_test.cfg') - self.base_url = eval(self.parser.get('toaster_test_' + self.target_suite, 'toaster_url')) - - # create log dir . Currently , we put log files in log/tmp. After all - # test cases are done, move them to log/$datetime dir - self.log_tmp_dir = os.path.abspath(sys.path[0]) + os.sep + 'log' + os.sep + 'tmp' - try: - mkdir_p(self.log_tmp_dir) - except OSError : - logging.error("%(asctime)s Cannot create tmp dir under log, please check your privilege") - # self.log = self.logger_create() - # driver setup - self.setup_browser() - - @staticmethod - def logger_create(): - log_file = "toaster-auto-" + time.strftime("%Y%m%d%H%M%S") + ".log" - if os.path.exists("toaster-auto.log"): os.remove("toaster-auto.log") - os.symlink(log_file, "toaster-auto.log") - - log = logging.getLogger("toaster") - log.setLevel(logging.DEBUG) - - fh = logging.FileHandler(filename=log_file, mode='w') - fh.setLevel(logging.DEBUG) - - ch = logging.StreamHandler(sys.stdout) - ch.setLevel(logging.INFO) - - formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') - fh.setFormatter(formatter) - ch.setFormatter(formatter) - - log.addHandler(fh) - log.addHandler(ch) - - return log - - - def setup_browser(self, *browser_path): - self.browser = eval(self.parser.get('toaster_test_' + self.target_suite, 'test_browser')) - print(self.browser) - if self.browser == "firefox": - driver = webdriver.Firefox() - elif self.browser == "chrome": - driver = webdriver.Chrome() - elif self.browser == "ie": - driver = webdriver.Ie() - else: - driver = None - print("unrecognized browser type, please check") - self.driver = driver - self.driver.implicitly_wait(30) - return self.driver - - - def save_screenshot(self, **log_args): - """ - This function is used to save screen either by os interface or selenium interface. - How to use: - self.save_screenshot(screenshot_type = 'native'/'selenium', log_sub_dir = 'xxx', - append_name = 'stepx') - where native means screenshot func provided by OS, - selenium means screenshot func provided by selenium webdriver - """ - types = [log_args.get('screenshot_type')] - # when no screenshot_type is specified - if types == [None]: - types = ['native', 'selenium'] - # normally append_name is used to specify which step.. - add_name = log_args.get('append_name') - if not add_name: - add_name = '-' - # normally there's no need to specify sub_dir - sub_dir = log_args.get('log_sub_dir') - if not sub_dir: - # use casexxx as sub_dir name - sub_dir = 'case' + str(self.case_no) - for item in types: - log_dir = self.log_tmp_dir + os.sep + sub_dir - mkdir_p(log_dir) - log_path = log_dir + os.sep + self.browser + '-' +\ - item + '-' + add_name + '-' + str(self.screenshot_sequence) + '.png' - if item == 'native': - if self.host_os == "linux": - os.system("scrot " + log_path) - elif self.host_os=="darwin": - os.system("screencapture -x " + log_path) - elif item == 'selenium': - self.driver.get_screenshot_as_file(log_path) - self.screenshot_sequence += 1 - - def browser_delay(self): - """ - currently this is a workaround for some chrome test. - Sometimes we need a delay to accomplish some operation. - But for firefox, mostly we don't need this. - To be discussed - """ - if self.browser == "chrome": - time.sleep(1) - return - - -# these functions are not contained in WebDriver class.. - def find_element_by_text(self, string): - return self.driver.find_element_by_xpath("//*[text()='" + string + "']") - - - def find_elements_by_text(self, string): - return self.driver.find_elements_by_xpath("//*[text()='" + string + "']") - - - def find_element_by_text_in_table(self, table_id, text_string): - """ - This is used to search some certain 'text' in certain table - """ - try: - table_element = self.get_table_element(table_id) - element = table_element.find_element_by_xpath("//*[text()='" + text_string + "']") - except NoSuchElementException as e: - print('no element found') - raise - return element - - - def find_element_by_link_text_in_table(self, table_id, link_text): - """ - Assume there're multiple suitable "find_element_by_link_text". - In this circumstance we need to specify "table". - """ - try: - table_element = self.get_table_element(table_id) - element = table_element.find_element_by_link_text(link_text) - except NoSuchElementException as e: - print('no element found') - raise - return element - - - def find_elements_by_link_text_in_table(self, table_id, link_text): - """ - Search link-text in certain table. This helps to narrow down search area. - """ - try: - table_element = self.get_table_element(table_id) - element_list = table_element.find_elements_by_link_text(link_text) - except NoSuchElementException as e: - print('no element found') - raise - return element_list - - - def find_element_by_partial_link_text_in_table(self, table_id, link_text): - """ - Search element by partial link text in certain table. - """ - try: - table_element = self.get_table_element(table_id) - element = table_element.find_element_by_partial_link_text(link_text) - return element - except NoSuchElementException as e: - print('no element found') - raise - - - def find_elements_by_partial_link_text_in_table(self, table_id, link_text): - """ - Assume there're multiple suitable "find_partial_element_by_link_text". - """ - try: - table_element = self.get_table_element(table_id) - element_list = table_element.find_elements_by_partial_link_text(link_text) - return element_list - except NoSuchElementException as e: - print('no element found') - raise - - - def find_element_by_xpath_in_table(self, table_id, xpath): - """ - This helps to narrow down search area. Especially useful when dealing with pop-up form. - """ - try: - table_element = self.get_table_element(table_id) - element = table_element.find_element_by_xpath(xpath) - except NoSuchElementException as e: - print('no element found') - raise - return element - - - def find_elements_by_xpath_in_table(self, table_id, xpath): - """ - This helps to narrow down search area. Especially useful when dealing with pop-up form. - """ - try: - table_element = self.get_table_element(table_id) - element_list = table_element.find_elements_by_xpath(xpath) - except NoSuchElementException as e: - print('no elements found') - raise - return element_list - - - def shortest_xpath(self, pname, pvalue): - return "//*[@" + pname + "='" + pvalue + "']" - - -#usually elements in the same column are with same class name. for instance: class="outcome" .TBD - def get_table_column_text(self, attr_name, attr_value): - c_xpath = self.shortest_xpath(attr_name, attr_value) - elements = self.driver.find_elements_by_xpath(c_xpath) - c_list = [] - for element in elements: - c_list.append(element.text) - return c_list - - - def get_table_column_text_by_column_number(self, table_id, column_number): - c_xpath = "//*[@id='" + table_id + "']//td[" + str(column_number) + "]" - elements = self.driver.find_elements_by_xpath(c_xpath) - c_list = [] - for element in elements: - c_list.append(element.text) - return c_list - - - def get_table_head_text(self, *table_id): -#now table_id is a tuple... - if table_id: - thead_xpath = "//*[@id='" + table_id[0] + "']//thead//th[text()]" - elements = self.driver.find_elements_by_xpath(thead_xpath) - c_list = [] - for element in elements: - if element.text: - c_list.append(element.text) - return c_list -#default table on page - else: - return self.driver.find_element_by_xpath("//*/table/thead").text - - - - def get_table_element(self, table_id, *coordinate): - if len(coordinate) == 0: -#return whole-table element - element_xpath = "//*[@id='" + table_id + "']" - try: - element = self.driver.find_element_by_xpath(element_xpath) - except NoSuchElementException as e: - raise - return element - row = coordinate[0] - - if len(coordinate) == 1: -#return whole-row element - element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]" - try: - element = self.driver.find_element_by_xpath(element_xpath) - except NoSuchElementException as e: - return False - return element -#now we are looking for an element with specified X and Y - column = coordinate[1] - - element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]/td[" + str(column) + "]" - try: - element = self.driver.find_element_by_xpath(element_xpath) - except NoSuchElementException as e: - return False - return element - - - def get_table_data(self, table_id, row_count, column_count): - row = 1 - Lists = [] - while row <= row_count: - column = 1 - row_content=[] - while column <= column_count: - s= "//*[@id='" + table_id + "']/tbody/tr[" + str(row) +"]/td[" + str(column) + "]" - v = self.driver.find_element_by_xpath(s).text - row_content.append(v) - column = column + 1 - print("row_content=",row_content) - Lists.extend(row_content) - print(Lists[row-1][0]) - row = row + 1 - return Lists - - # The is_xxx_present functions only returns True/False - # All the log work is done in test procedure, so we can easily trace back - # using logging - def is_text_present (self, patterns): - for pattern in patterns: - if str(pattern) not in self.driver.page_source: - print('Text "'+pattern+'" is missing') - return False - return True - - - def is_element_present(self, how, what): - try: - self.driver.find_element(how, what) - except NoSuchElementException as e: - print('Could not find element '+str(what)+' by ' + str(how)) - return False - return True - - - def is_alert_present(self): - try: self.driver.switch_to_alert() - except NoAlertPresentException as e: return False - return True - - - def close_alert_and_get_its_text(self): - try: - alert = self.driver.switch_to_alert() - alert_text = alert.text - if self.accept_next_alert: - alert.accept() - else: - alert.dismiss() - return alert_text - finally: self.accept_next_alert = True - - - def get_case_number(self): - """ - what case are we running now - """ - funcname = sys._getframe(1).f_code.co_name - caseno_str = funcname.strip('test_') - try: - caseno = int(caseno_str) - except ValueError: - print("get case number error! please check if func name is test_xxx") - return False - return caseno - - - def tearDown(self): - self.log.info(' END: CASE %s log \n\n' % str(self.case_no)) - self.driver.quit() - self.assertEqual([], self.verificationErrors) - - -################################################################### -# # -# PART III: test cases # -# please refer to # -# https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=xxx # -# # -################################################################### - -# Note: to comply with the unittest framework, we call these test_xxx functions -# from run_toastercases.py to avoid calling setUp() and tearDown() multiple times - - -class toaster_cases(toaster_cases_base): - ############## - # CASE 901 # - ############## - def test_901(self): - # the reason why get_case_number is not in setUp function is that - # otherwise it returns "setUp" instead of "test_xxx" - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - # open all columns - self.driver.find_element_by_id("edit-columns-button").click() - # adding explicitly wait for chromedriver..-_- - self.browser_delay() - self.driver.find_element_by_id("started_on").click() - self.browser_delay() - self.driver.find_element_by_id("time").click() - self.driver.find_element_by_id("edit-columns-button").click() - # dict: {lint text name : actual class name} - table_head_dict = {'Outcome':'outcome', 'Recipe':'target', 'Machine':'machine', 'Started on':'started_on', 'Completed on':'completed_on', \ - 'Errors':'errors_no', 'Warnings':'warnings_no', 'Time':'time'} - for key in table_head_dict: - try: - self.driver.find_element_by_link_text(key).click() - except Exception as e: - self.log.error("%s cannot be found on page" % key) - raise - column_list = self.get_table_column_text("class", table_head_dict[key]) - # after 1st click, the list should be either sequenced or inverted, but we don't have a "default order" here - # the point is, after another click, it should be another order - if is_list_inverted(column_list): - self.driver.find_element_by_link_text(key).click() - column_list = self.get_table_column_text("class", table_head_dict[key]) - self.assertTrue(is_list_sequenced(column_list), msg=("%s column not in order" % key)) - else: - self.assertTrue(is_list_sequenced(column_list), msg=("%s column not sequenced" % key)) - self.driver.find_element_by_link_text(key).click() - column_list = self.get_table_column_text("class", table_head_dict[key]) - self.assertTrue(is_list_inverted(column_list), msg=("%s column not inverted" % key)) - self.log.info("case passed") - - - ############## - # CASE 902 # - ############## - def test_902(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - # Could add more test patterns here in the future. Also, could search some items other than target column in future.. - patterns = ["minimal", "sato"] - for pattern in patterns: - ori_target_column_texts = self.get_table_column_text("class", "target") - print(ori_target_column_texts) - self.driver.find_element_by_id("search").clear() - self.driver.find_element_by_id("search").send_keys(pattern) - self.driver.find_element_by_id("search-button").click() - new_target_column_texts = self.get_table_column_text("class", "target") - # if nothing found, we still count it as "pass" - if new_target_column_texts: - for text in new_target_column_texts: - self.assertTrue(text.find(pattern), msg=("%s item doesn't exist " % pattern)) - self.driver.find_element_by_css_selector("i.icon-remove").click() - target_column_texts = self.get_table_column_text("class", "target") - self.assertTrue(ori_target_column_texts == target_column_texts, msg=("builds changed after operations")) - - - ############## - # CASE 903 # - ############## - def test_903(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - # when opening a new page, "started_on" is not displayed by default - self.driver.find_element_by_id("edit-columns-button").click() - # currently all the delay are for chrome driver -_- - self.browser_delay() - self.driver.find_element_by_id("started_on").click() - self.driver.find_element_by_id("edit-columns-button").click() - # step 4 - items = ["Outcome", "Completed on", "Started on"] - for item in items: - try: - temp_element = self.find_element_by_text_in_table('otable', item) - # this is how we find "filter icon" in the same level as temp_element(where "a" means clickable, "i" means icon) - self.assertTrue(temp_element.find_element_by_xpath("..//*/a/i[@class='icon-filter filtered']")) - except Exception as e: - self.assertFalse(True, msg=(" %s cannot be found! %s" % (item, e))) - raise - # step 5-6 - temp_element = self.find_element_by_link_text_in_table('otable', 'Outcome') - temp_element.find_element_by_xpath("..//*/a/i[@class='icon-filter filtered']").click() - self.browser_delay() - # the 2nd option, whatever it is - self.driver.find_element_by_xpath("(//input[@name='filter'])[2]").click() - # click "Apply" button - self.driver.find_element_by_xpath("//*[@id='filter_outcome']//*[text()='Apply']").click() - # save screen here - time.sleep(1) - self.save_screenshot(screenshot_type='selenium', append_name='step5') - temp_element = self.find_element_by_link_text_in_table('otable', 'Completed on') - temp_element.find_element_by_xpath("..//*/a/i[@class='icon-filter filtered']").click() - self.browser_delay() - self.driver.find_element_by_xpath("//*[@id='filter_completed_on']//*[text()='Apply']").click() - # save screen here to compare to previous one - # please note that for chrome driver, need a little break before saving - # screen here -_- - self.browser_delay() - self.save_screenshot(screenshot_type='selenium', append_name='step6') - self.driver.find_element_by_id("search").clear() - self.driver.find_element_by_id("search").send_keys("core-image") - self.driver.find_element_by_id("search-button").click() - - - ############## - # CASE 904 # - ############## - def test_904(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_partial_link_text("core-image").click() - self.driver.find_element_by_link_text("Tasks").click() - self.table_name = 'otable' - # This is how we find the "default" rows-number! - rows_displayed = int(Select(self.driver.find_element_by_css_selector("select.pagesize")).first_selected_option.text) - print(rows_displayed) - self.assertTrue(self.get_table_element(self.table_name, rows_displayed), msg=("not enough rows displayed")) - self.assertFalse(self.get_table_element(self.table_name, rows_displayed + 1), \ - msg=("more rows displayed than expected")) - # Search text box background text is "Search tasks" - self.assertTrue(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search tasks']"),\ - msg=("background text doesn't exist")) - - self.driver.find_element_by_id("search").clear() - self.driver.find_element_by_id("search").send_keys("busybox") - self.driver.find_element_by_id("search-button").click() - self.browser_delay() - self.save_screenshot(screenshot_type='selenium', append_name='step5') - self.driver.find_element_by_css_selector("i.icon-remove").click() - # Save screen here - self.save_screenshot(screenshot_type='selenium', append_name='step5_2') - self.driver.find_element_by_id("edit-columns-button").click() - self.driver.find_element_by_id("cpu_used").click() - self.driver.find_element_by_id("disk_io").click() - self.driver.find_element_by_id("recipe_version").click() - self.driver.find_element_by_id("time_taken").click() - self.driver.find_element_by_id("edit-columns-button").click() - # The operation is the same as case901 - # dict: {lint text name : actual class name} - table_head_dict = {'Order':'order', 'Recipe':'recipe_name', 'Task':'task_name', 'Executed':'executed', \ - 'Outcome':'outcome', 'Cache attempt':'cache_attempt', 'Time (secs)':'time_taken', 'CPU usage':'cpu_used', \ - 'Disk I/O (ms)':'disk_io'} - for key in table_head_dict: - # This is tricky here: we are doing so because there may be more than 1 - # same-name link_text in one page. So we only find element inside the table - self.find_element_by_link_text_in_table(self.table_name, key).click() - column_list = self.get_table_column_text("class", table_head_dict[key]) - # after 1st click, the list should be either sequenced or inverted, but we don't have a "default order" here - # the point is, after another click, it should be another order - # the first case is special:this means every item in column_list is the same, so - # after one click, either sequenced or inverted will be fine - if (is_list_inverted(column_list) and is_list_sequenced(column_list)) \ - or (not column_list) : - self.find_element_by_link_text_in_table(self.table_name, key).click() - column_list = self.get_table_column_text("class", table_head_dict[key]) - self.assertTrue(is_list_sequenced(column_list) or is_list_inverted(column_list), \ - msg=("%s column not in any order" % key)) - elif is_list_inverted(column_list): - self.find_element_by_link_text_in_table(self.table_name, key).click() - column_list = self.get_table_column_text("class", table_head_dict[key]) - self.assertTrue(is_list_sequenced(column_list), msg=("%s column not in order" % key)) - else: - self.assertTrue(is_list_sequenced(column_list), msg=("%s column not in order" % key)) - self.find_element_by_link_text_in_table(self.table_name, key).click() - column_list = self.get_table_column_text("class", table_head_dict[key]) - self.assertTrue(is_list_inverted(column_list), msg=("%s column not inverted" % key)) - # step 8-10 - # filter dict: {link text name : filter table name in xpath} - filter_dict = {'Executed':'filter_executed', 'Outcome':'filter_outcome', 'Cache attempt':'filter_cache_attempt'} - for key in filter_dict: - temp_element = self.find_element_by_link_text_in_table(self.table_name, key) - # find the filter icon besides it. - # And here we must have break (1 sec) to get the popup stuff - temp_element.find_element_by_xpath("..//*[@class='icon-filter filtered']").click() - self.browser_delay() - avail_options = self.driver.find_elements_by_xpath("//*[@id='" + filter_dict[key] + "']//*[@name='filter'][not(@disabled)]") - for number in range(0, len(avail_options)): - avail_options[number].click() - self.browser_delay() - # click "Apply" - self.driver.find_element_by_xpath("//*[@id='" + filter_dict[key] + "']//*[@type='submit']").click() - # insert screen capture here - self.browser_delay() - self.save_screenshot(screenshot_type='selenium', append_name='step8') - # after the last option was clicked, we don't need operation below anymore - if number < len(avail_options)-1: - try: - temp_element = self.find_element_by_link_text_in_table(self.table_name, key) - temp_element.find_element_by_xpath("..//*[@class='icon-filter filtered']").click() - avail_options = self.driver.find_elements_by_xpath("//*[@id='" + filter_dict[key] + "']//*[@name='filter'][not(@disabled)]") - except: - print("in exception") - self.find_element_by_text("Show all tasks").click() -# self.driver.find_element_by_xpath("//*[@id='searchform']/button[2]").click() - temp_element = self.find_element_by_link_text_in_table(self.table_name, key) - temp_element.find_element_by_xpath("..//*[@class='icon-filter filtered']").click() - avail_options = self.driver.find_elements_by_xpath("//*[@id='" + filter_dict[key] + "']//*[@name='filter'][not(@disabled)]") - self.browser_delay() - # step 11 - for item in ['order', 'task_name', 'executed', 'outcome', 'recipe_name', 'recipe_version']: - try: - self.find_element_by_xpath_in_table(self.table_name, "./tbody/tr[1]/*[@class='" + item + "']/a").click() - except NoSuchElementException as e: - # let it go... - print('no item in the colum' + item) - # insert screen shot here - self.save_screenshot(screenshot_type='selenium', append_name='step11') - self.driver.back() - # step 12-14 - # about test_dict: please refer to testcase 904 requirement step 12-14 - test_dict = { - 'Time':{ - 'class':'time_taken', - 'check_head_list':['Recipe', 'Task', 'Executed', 'Outcome', 'Time (secs)'], - 'check_column_list':['cpu_used', 'cache_attempt', 'disk_io', 'order', 'recipe_version'] - }, - 'CPU usage':{ - 'class':'cpu_used', - 'check_head_list':['Recipe', 'Task', 'Executed', 'Outcome', 'CPU usage'], - 'check_column_list':['cache_attempt', 'disk_io', 'order', 'recipe_version', 'time_taken'] - }, - 'Disk I/O':{ - 'class':'disk_io', - 'check_head_list':['Recipe', 'Task', 'Executed', 'Outcome', 'Disk I/O (ms)'], - 'check_column_list':['cpu_used', 'cache_attempt', 'order', 'recipe_version', 'time_taken'] - } - } - for key in test_dict: - self.find_element_by_partial_link_text_in_table('nav', 'core-image').click() - self.find_element_by_link_text_in_table('nav', key).click() - head_list = self.get_table_head_text('otable') - for item in test_dict[key]['check_head_list']: - self.assertTrue(item in head_list, msg=("%s not in head row" % item)) - column_list = self.get_table_column_text('class', test_dict[key]['class']) - self.assertTrue(is_list_inverted(column_list), msg=("%s column not inverted" % key)) - - self.driver.find_element_by_id("edit-columns-button").click() - for item2 in test_dict[key]['check_column_list']: - self.driver.find_element_by_id(item2).click() - self.driver.find_element_by_id("edit-columns-button").click() - # TBD: save screen here - - - ############## - # CASE 906 # - ############## - def test_906(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - self.find_element_by_link_text_in_table('nav', 'Packages').click() - # find "bash" in first column (Packages) - self.driver.find_element_by_xpath("//*[@id='otable']//td[1]//*[text()='bash']").click() - # save sceen here to observe... - # step 6 - self.driver.find_element_by_partial_link_text("Generated files").click() - head_list = self.get_table_head_text('otable') - for item in ['File', 'Size']: - self.assertTrue(item in head_list, msg=("%s not in head row" % item)) - c_list = self.get_table_column_text('class', 'path') - self.assertTrue(is_list_sequenced(c_list), msg=("column not in order")) - # step 7 - self.driver.find_element_by_partial_link_text("Runtime dependencies").click() - # save sceen here to observe... - # note that here table name is not 'otable' - head_list = self.get_table_head_text('dependencies') - for item in ['Package', 'Version', 'Size']: - self.assertTrue(item in head_list, msg=("%s not in head row" % item)) - c_list = self.get_table_column_text_by_column_number('dependencies', 1) - self.assertTrue(is_list_sequenced(c_list), msg=("list not in order")) - texts = ['Size', 'License', 'Recipe', 'Recipe version', 'Layer', \ - 'Layer commit'] - self.failUnless(self.is_text_present(texts)) - - - ############## - # CASE 910 # - ############## - def test_910(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - image_type="core-image-minimal" - test_package1="busybox" - test_package2="lib" - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text(image_type).click() - self.driver.find_element_by_link_text("Recipes").click() - self.save_screenshot(screenshot_type='selenium', append_name='step3') - - self.table_name = 'otable' - # This is how we find the "default" rows-number! - rows_displayed = int(Select(self.driver.find_element_by_css_selector("select.pagesize")).first_selected_option.text) - print(rows_displayed) - self.assertTrue(self.get_table_element(self.table_name, rows_displayed)) - self.assertFalse(self.get_table_element(self.table_name, rows_displayed + 1)) - - # Check the default table is sorted by Recipe - tasks_column_count = len(self.driver.find_elements_by_xpath("/html/body/div[2]/div/div[2]/div[2]/table/tbody/tr/td[1]")) - print(tasks_column_count) - default_column_list = self.get_table_column_text_by_column_number(self.table_name, 1) - #print default_column_list - - self.assertTrue(is_list_sequenced(default_column_list)) - - # Search text box background text is "Search recipes" - self.assertTrue(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search recipes']")) - - self.driver.find_element_by_id("search").clear() - self.driver.find_element_by_id("search").send_keys(test_package1) - self.driver.find_element_by_id("search-button").click() - # Save screen here - self.save_screenshot(screenshot_type='selenium', append_name='step4') - self.driver.find_element_by_css_selector("i.icon-remove").click() - self.save_screenshot(screenshot_type='selenium', append_name='step4_2') - - self.driver.find_element_by_id("edit-columns-button").click() - self.driver.find_element_by_id("depends_on").click() - self.driver.find_element_by_id("layer_version__branch").click() - self.driver.find_element_by_id("layer_version__layer__commit").click() - self.driver.find_element_by_id("depends_by").click() - self.driver.find_element_by_id("edit-columns-button").click() - - self.find_element_by_link_text_in_table(self.table_name, 'Recipe').click() - # Check the inverted table by Recipe - # Recipe doesn't have class name - #inverted_tasks_column_count = len(self.driver.find_elements_by_xpath("/html/body/div[2]/div/div[2]/div[2]/table/tbody/tr/td[1]")) - #print inverted_tasks_column_count - #inverted_column_list = self.get_table_column_text_by_column_number(self.table_name, 1) - #print inverted_column_list - - #self.driver.find_element_by_partial_link_text("zlib").click() - #self.driver.back() - #self.assertTrue(is_list_inverted(inverted_column_list)) - #self.find_element_by_link_text_in_table(self.table_name, 'Recipe').click() - - table_head_dict = {'Recipe':'recipe__name', 'Recipe file':'recipe_file', 'Section':'recipe_section', \ - 'License':'recipe_license', 'Layer':'layer_version__layer__name', \ - 'Layer branch':'layer_version__branch'} - for key in table_head_dict: - self.find_element_by_link_text_in_table(self.table_name, key).click() - column_list = self.get_table_column_text("class", table_head_dict[key]) - if (is_list_inverted(column_list) and is_list_sequenced(column_list)) \ - or (not column_list) : - self.find_element_by_link_text_in_table(self.table_name, key).click() - column_list = self.get_table_column_text("class", table_head_dict[key]) - self.assertTrue(is_list_sequenced(column_list) or is_list_inverted(column_list)) - self.driver.find_element_by_partial_link_text("acl").click() - self.driver.back() - self.assertTrue(is_list_sequenced(column_list) or is_list_inverted(column_list)) - # Search text box background text is "Search recipes" - self.assertTrue(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search recipes']")) - self.driver.find_element_by_id("search").clear() - self.driver.find_element_by_id("search").send_keys(test_package2) - self.driver.find_element_by_id("search-button").click() - column_search_list = self.get_table_column_text("class", table_head_dict[key]) - self.assertTrue(is_list_sequenced(column_search_list) or is_list_inverted(column_search_list)) - self.driver.find_element_by_css_selector("i.icon-remove").click() - elif is_list_inverted(column_list): - self.find_element_by_link_text_in_table(self.table_name, key).click() - column_list = self.get_table_column_text("class", table_head_dict[key]) - self.assertTrue(is_list_sequenced(column_list)) - self.driver.find_element_by_partial_link_text("acl").click() - self.driver.back() - self.assertTrue(is_list_sequenced(column_list)) - # Search text box background text is "Search recipes" - self.assertTrue(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search recipes']")) - self.driver.find_element_by_id("search").clear() - self.driver.find_element_by_id("search").send_keys(test_package2) - self.driver.find_element_by_id("search-button").click() - column_search_list = self.get_table_column_text("class", table_head_dict[key]) - self.assertTrue(is_list_sequenced(column_search_list)) - self.driver.find_element_by_css_selector("i.icon-remove").click() - else: - self.assertTrue(is_list_sequenced(column_list), msg=("list %s not sequenced" % key)) - self.find_element_by_link_text_in_table(self.table_name, key).click() - column_list = self.get_table_column_text("class", table_head_dict[key]) - self.assertTrue(is_list_inverted(column_list)) - try: - self.driver.find_element_by_partial_link_text("acl").click() - except: - self.driver.find_element_by_partial_link_text("zlib").click() - self.driver.back() - self.assertTrue(is_list_inverted(column_list)) - # Search text box background text is "Search recipes" - self.assertTrue(self.driver.find_element_by_xpath("//*[@id='searchform']/*[@placeholder='Search recipes']")) - self.driver.find_element_by_id("search").clear() - self.driver.find_element_by_id("search").send_keys(test_package2) - self.driver.find_element_by_id("search-button").click() - column_search_list = self.get_table_column_text("class", table_head_dict[key]) - #print column_search_list - self.assertTrue(is_list_inverted(column_search_list)) - self.driver.find_element_by_css_selector("i.icon-remove").click() - - # Bug 5919 - for key in table_head_dict: - print(key) - self.find_element_by_link_text_in_table(self.table_name, key).click() - self.driver.find_element_by_id("edit-columns-button").click() - self.driver.find_element_by_id(table_head_dict[key]).click() - self.driver.find_element_by_id("edit-columns-button").click() - self.browser_delay() - # After hide the column, the default table should be sorted by Recipe - tasks_column_count = len(self.driver.find_elements_by_partial_link_text("acl")) - #print tasks_column_count - default_column_list = self.get_table_column_text_by_column_number(self.table_name, 1) - #print default_column_list - self.assertTrue(is_list_sequenced(default_column_list)) - - self.driver.find_element_by_id("edit-columns-button").click() - self.driver.find_element_by_id("recipe_file").click() - self.driver.find_element_by_id("recipe_section").click() - self.driver.find_element_by_id("recipe_license").click() - self.driver.find_element_by_id("layer_version__layer__name").click() - self.driver.find_element_by_id("edit-columns-button").click() - - - ############## - # CASE 911 # - ############## - def test_911(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - self.find_element_by_link_text_in_table('nav', 'Recipes').click() - # step 3-5 - self.driver.find_element_by_id("search").clear() - self.driver.find_element_by_id("search").send_keys("lib") - self.driver.find_element_by_id("search-button").click() - # save screen here for observation - self.save_screenshot(screenshot_type='selenium', append_name='step5') - # step 6 - self.driver.find_element_by_css_selector("i.icon-remove").click() - self.driver.find_element_by_id("search").clear() - # we deliberately want "no result" here - self.driver.find_element_by_id("search").send_keys("no such input") - self.driver.find_element_by_id("search-button").click() - try: - self.find_element_by_text("Show all recipes").click() - except: - self.fail(msg='Could not identify blank page elements') - - ############## - # CASE 912 # - ############## - def test_912(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - self.find_element_by_link_text_in_table('nav', 'Recipes').click() - # step 3 - head_list = self.get_table_head_text('otable') - for item in ['Recipe', 'Recipe version', 'Recipe file', 'Section', 'License', 'Layer']: - self.assertTrue(item in head_list, msg=("item %s not in head row" % item)) - self.driver.find_element_by_id("edit-columns-button").click() - self.driver.find_element_by_id("depends_on").click() - self.driver.find_element_by_id("layer_version__branch").click() - self.driver.find_element_by_id("layer_version__layer__commit").click() - self.driver.find_element_by_id("depends_by").click() - self.driver.find_element_by_id("edit-columns-button").click() - # check if columns selected above is shown - check_list = ['Dependencies', 'Layer branch', 'Layer commit', 'Reverse dependencies'] - head_list = self.get_table_head_text('otable') - time.sleep(2) - print(head_list) - for item in check_list: - self.assertTrue(item in head_list, msg=("item %s not in head row" % item)) - # un-check 'em all - self.driver.find_element_by_id("edit-columns-button").click() - self.driver.find_element_by_id("depends_on").click() - self.driver.find_element_by_id("layer_version__branch").click() - self.driver.find_element_by_id("layer_version__layer__commit").click() - self.driver.find_element_by_id("depends_by").click() - self.driver.find_element_by_id("edit-columns-button").click() - # don't exist any more - head_list = self.get_table_head_text('otable') - for item in check_list: - self.assertFalse(item in head_list, msg=("item %s should not be in head row" % item)) - - - ############## - # CASE 913 # - ############## - def test_913(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - self.find_element_by_link_text_in_table('nav', 'Recipes').click() - # step 3 - head_list = self.get_table_head_text('otable') - for item in ['Recipe', 'Recipe version', 'Recipe file', 'Section', 'License', 'Layer']: - self.assertTrue(item in head_list, msg=("item %s not in head row" % item)) - # step 4 - self.driver.find_element_by_id("edit-columns-button").click() - # save screen - self.browser_delay() - self.save_screenshot(screenshot_type='selenium', append_name='step4') - self.driver.find_element_by_id("edit-columns-button").click() - - - ############## - # CASE 914 # - ############## - def test_914(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - image_type="core-image-minimal" - test_package1="busybox" - test_package2="gdbm" - test_package3="gettext-native" - driver = self.driver - driver.maximize_window() - driver.get(self.base_url) - driver.find_element_by_link_text(image_type).click() - driver.find_element_by_link_text("Recipes").click() - driver.find_element_by_link_text(test_package1).click() - - self.table_name = 'information' - - tasks_row_count = len(driver.find_elements_by_xpath("//*[@id='"+self.table_name+"']/table/tbody/tr/td[1]")) - tasks_column_count = len(driver.find_elements_by_xpath("//*[@id='"+self.table_name+"']/table/tbody/tr[1]/td")) - print('rows: '+str(tasks_row_count)) - print('columns: '+str(tasks_column_count)) - - Tasks_column = self.get_table_column_text_by_column_number(self.table_name, 2) - print ("Tasks_column=", Tasks_column) - - key_tasks=["do_fetch", "do_unpack", "do_patch", "do_configure", "do_compile", "do_install", "do_package", "do_build"] - i = 0 - while i < len(key_tasks): - if key_tasks[i] not in Tasks_column: - print ("Error! Missing key task: %s" % key_tasks[i]) - else: - print ("%s is in tasks" % key_tasks[i]) - i = i + 1 - - if Tasks_column.index(key_tasks[0]) != 0: - print ("Error! %s is not in the right position" % key_tasks[0]) - else: - print ("%s is in right position" % key_tasks[0]) - - if Tasks_column[-1] != key_tasks[-1]: - print ("Error! %s is not in the right position" % key_tasks[-1]) - else: - print ("%s is in right position" % key_tasks[-1]) - - driver.find_element_by_partial_link_text("Packages (").click() - packages_name = driver.find_element_by_partial_link_text("Packages (").text - print(packages_name) - packages_num = int(filter(str.isdigit, repr(packages_name))) - print(packages_num) - - #switch the table to show more than 10 rows at a time - self.driver.find_element_by_xpath("//*[@id='packages-built']/div[1]/div/select").click() - Select(driver.find_element_by_xpath("//*[@id='packages-built']/div[1]/div/select")).select_by_value('150') - self.driver.find_element_by_xpath("//*[@id='packages-built']/div[1]/div/select").send_keys(Keys.ENTER) - - packages_row_count = len(driver.find_elements_by_xpath("//*[@id='otable']/tbody/tr/td[1]")) - print(packages_row_count) - - if packages_num != packages_row_count: - print ("Error! The packages number is not correct") - else: - print ("The packages number is correct") - - driver.find_element_by_partial_link_text("Build dependencies (").click() - depends_name = driver.find_element_by_partial_link_text("Build dependencies (").text - print(depends_name) - depends_num = int(list(filter(str.isdigit, repr(depends_name)))) - print(depends_num) - - if depends_num == 0: - depends_message = repr(driver.find_element_by_css_selector("div.alert.alert-info").text) - print(depends_message) - if depends_message.find("has no build dependencies.") < 0: - print ("Error! The message isn't expected.") - else: - print ("The message is expected") - else: - depends_row_count = len(driver.find_elements_by_xpath("//*[@id='dependencies']/table/tbody/tr/td[1]")) - print(depends_row_count) - if depends_num != depends_row_count: - print ("Error! The dependent packages number is not correct") - else: - print ("The dependent packages number is correct") - - driver.find_element_by_partial_link_text("Reverse build dependencies (").click() - rdepends_name = driver.find_element_by_partial_link_text("Reverse build dependencies (").text - print(rdepends_name) - rdepends_num = int(filter(str.isdigit, repr(rdepends_name))) - print(rdepends_num) - - if rdepends_num == 0: - rdepends_message = repr(driver.find_element_by_css_selector("#brought-in-by > div.alert.alert-info").text) - print(rdepends_message) - if rdepends_message.find("has no reverse build dependencies.") < 0: - print ("Error! The message isn't expected.") - else: - print ("The message is expected") - else: - print ("The reverse dependent packages number is correct") - - driver.find_element_by_link_text("Recipes").click() - driver.find_element_by_link_text(test_package2).click() - driver.find_element_by_partial_link_text("Packages (").click() - driver.find_element_by_partial_link_text("Build dependencies (").click() - driver.find_element_by_partial_link_text("Reverse build dependencies (").click() - - - driver.find_element_by_link_text("Recipes").click() - driver.find_element_by_link_text(test_package3).click() - - native_tasks_row_count = len(driver.find_elements_by_xpath("//*[@id='information']/table/tbody/tr/td[1]")) - native_tasks_column_count = len(driver.find_elements_by_xpath("//*[@id='information']/table/tbody/tr[1]/td")) - print(native_tasks_row_count) - print(native_tasks_column_count) - - Native_Tasks_column = self.get_table_column_text_by_column_number(self.table_name, 2) - print ("Native_Tasks_column=", Native_Tasks_column) - - native_key_tasks=["do_fetch", "do_unpack", "do_patch", "do_configure", "do_compile", "do_install", "do_build"] - i = 0 - while i < len(native_key_tasks): - if native_key_tasks[i] not in Native_Tasks_column: - print ("Error! Missing key task: %s" % native_key_tasks[i]) - else: - print ("%s is in tasks" % native_key_tasks[i]) - i = i + 1 - - if Native_Tasks_column.index(native_key_tasks[0]) != 0: - print ("Error! %s is not in the right position" % native_key_tasks[0]) - else: - print ("%s is in right position" % native_key_tasks[0]) - - if Native_Tasks_column[-1] != native_key_tasks[-1]: - print ("Error! %s is not in the right position" % native_key_tasks[-1]) - else: - print ("%s is in right position" % native_key_tasks[-1]) - - driver.find_element_by_partial_link_text("Packages (").click() - native_packages_name = driver.find_element_by_partial_link_text("Packages (").text - print(native_packages_name) - native_packages_num = int(filter(str.isdigit, repr(native_packages_name))) - print(native_packages_num) - - if native_packages_num != 0: - print ("Error! Native task shouldn't have any packages.") - else: - native_package_message = repr(driver.find_element_by_css_selector("#packages-built > div.alert.alert-info").text) - print(native_package_message) - if native_package_message.find("does not build any packages.") < 0: - print ("Error! The message for native task isn't expected.") - else: - print ("The message for native task is expected.") - - driver.find_element_by_partial_link_text("Build dependencies (").click() - native_depends_name = driver.find_element_by_partial_link_text("Build dependencies (").text - print(native_depends_name) - native_depends_num = int(filter(str.isdigit, repr(native_depends_name))) - print(native_depends_num) - - native_depends_row_count = len(driver.find_elements_by_xpath("//*[@id='dependencies']/table/tbody/tr/td[1]")) - print(native_depends_row_count) - - if native_depends_num != native_depends_row_count: - print ("Error! The dependent packages number is not correct") - else: - print ("The dependent packages number is correct") - - driver.find_element_by_partial_link_text("Reverse build dependencies (").click() - native_rdepends_name = driver.find_element_by_partial_link_text("Reverse build dependencies (").text - print(native_rdepends_name) - native_rdepends_num = int(filter(str.isdigit, repr(native_rdepends_name))) - print(native_rdepends_num) - - native_rdepends_row_count = len(driver.find_elements_by_xpath("//*[@id='brought-in-by']/table/tbody/tr/td[1]")) - print(native_rdepends_row_count) - - if native_rdepends_num != native_rdepends_row_count: - print ("Error! The reverse dependent packages number is not correct") - else: - print ("The reverse dependent packages number is correct") - - driver.find_element_by_link_text("Recipes").click() - - - ############## - # CASE 915 # - ############## - def test_915(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - # step 3 - self.find_element_by_link_text_in_table('nav', 'Configuration').click() - self.driver.find_element_by_link_text("BitBake variables").click() - # step 4 - self.driver.find_element_by_id("search").clear() - self.driver.find_element_by_id("search").send_keys("lib") - self.driver.find_element_by_id("search-button").click() - # save screen to see result - self.browser_delay() - self.save_screenshot(screenshot_type='selenium', append_name='step4') - # step 5 - self.driver.find_element_by_css_selector("i.icon-remove").click() - head_list = self.get_table_head_text('otable') - print(head_list) - print(len(head_list)) - self.assertTrue(head_list == ['Variable', 'Value', 'Set in file', 'Description'], \ - msg=("head row contents wrong")) - # step 8 - # search other string. and click "Variable" to re-sort, check if table - # head is still the same - self.driver.find_element_by_id("search").clear() - self.driver.find_element_by_id("search").send_keys("poky") - self.driver.find_element_by_id("search-button").click() - self.find_element_by_link_text_in_table('otable', 'Variable').click() - head_list = self.get_table_head_text('otable') - self.assertTrue(head_list == ['Variable', 'Value', 'Set in file', 'Description'], \ - msg=("head row contents wrong")) - self.find_element_by_link_text_in_table('otable', 'Variable').click() - head_list = self.get_table_head_text('otable') - self.assertTrue(head_list == ['Variable', 'Value', 'Set in file', 'Description'], \ - msg=("head row contents wrong")) - - - ############## - # CASE 916 # - ############## - def test_916(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - # step 2-3 - self.find_element_by_link_text_in_table('nav', 'Configuration').click() - self.driver.find_element_by_link_text("BitBake variables").click() - variable_list = self.get_table_column_text('class', 'variable_name') - self.assertTrue(is_list_sequenced(variable_list), msg=("list not in order")) - # step 4 - self.find_element_by_link_text_in_table('otable', 'Variable').click() - variable_list = self.get_table_column_text('class', 'variable_name') - self.assertTrue(is_list_inverted(variable_list), msg=("list not inverted")) - self.find_element_by_link_text_in_table('otable', 'Variable').click() - # step 5 - # searching won't change the sequentiality - self.driver.find_element_by_id("search").clear() - self.driver.find_element_by_id("search").send_keys("lib") - self.driver.find_element_by_id("search-button").click() - variable_list = self.get_table_column_text('class', 'variable_name') - self.assertTrue(is_list_sequenced(variable_list), msg=("list not in order")) - - - ############## - # CASE 923 # - ############## - def test_923(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - # Step 2 - # default sequence in "Completed on" column is inverted - c_list = self.get_table_column_text('class', 'completed_on') - self.assertTrue(is_list_inverted(c_list), msg=("list not inverted")) - # step 3 - self.driver.find_element_by_id("edit-columns-button").click() - self.driver.find_element_by_id("started_on").click() - self.driver.find_element_by_id("time").click() - self.driver.find_element_by_id("edit-columns-button").click() - head_list = self.get_table_head_text('otable') - for item in ['Outcome', 'Recipe', 'Machine', 'Started on', 'Completed on', 'Failed tasks', 'Errors', 'Warnings', 'Time', "Image files", "Project"]: - self.failUnless(item in head_list, msg=item+' is missing from table head.') - - - ############## - # CASE 924 # - ############## - def test_924(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - # Please refer to case 924 requirement - # default sequence in "Completed on" column is inverted - c_list = self.get_table_column_text('class', 'completed_on') - self.assertTrue(is_list_inverted(c_list), msg=("list not inverted")) - # Step 4 - # click Errors , order in "Completed on" should be disturbed. Then hide - # error column to check if order in "Completed on" can be restored -#THIS TEST IS NO LONGER VALID DUE TO DESIGN CHANGES. LEAVING IN PENDING UPDATES TO DESIGN - #self.find_element_by_link_text_in_table('otable', 'Errors').click() - #self.driver.find_element_by_id("edit-columns-button").click() - #self.driver.find_element_by_id("errors_no").click() - #self.driver.find_element_by_id("edit-columns-button").click() - # Note: without time.sleep here, there'll be unpredictable error..TBD - time.sleep(1) - c_list = self.get_table_column_text('class', 'completed_on') - self.assertTrue(is_list_inverted(c_list), msg=("list not inverted")) - - - ############## - # CASE 940 # - ############## - def test_940(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - # Step 2-3 - self.find_element_by_link_text_in_table('nav', 'Packages').click() - check_head_list = ['Package', 'Package version', 'Size', 'Recipe'] - head_list = self.get_table_head_text('otable') - self.assertTrue(head_list == check_head_list, msg=("head row not as expected")) - # Step 4 - # pulldown menu - option_ids = ['recipe__layer_version__layer__name', 'recipe__layer_version__branch', \ - 'recipe__layer_version__layer__commit', 'license', 'recipe__version'] - self.driver.find_element_by_id("edit-columns-button").click() - for item in option_ids: - if not self.driver.find_element_by_id(item).is_selected(): - self.driver.find_element_by_id(item).click() - self.driver.find_element_by_id("edit-columns-button").click() - # save screen here to observe that 'Package' and 'Package version' is - # not selectable - self.browser_delay() - self.save_screenshot(screenshot_type='selenium', append_name='step4') - - - ############## - # CASE 941 # - ############## - def test_941(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - # Step 2-3 - self.find_element_by_link_text_in_table('nav', 'Packages').click() - # column -- Package - column_list = self.get_table_column_text_by_column_number('otable', 1) - self.assertTrue(is_list_sequenced(column_list), msg=("list not in order")) - self.find_element_by_link_text_in_table('otable', 'Size').click() - - - ############## - # CASE 942 # - ############## - def test_942(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - self.driver.find_element_by_link_text("Packages").click() - #get initial table header - head_list = self.get_table_head_text('otable') - #remove the Recipe column from table header - self.driver.find_element_by_id("edit-columns-button").click() - self.driver.find_element_by_id("recipe__name").click() - self.driver.find_element_by_id("edit-columns-button").click() - #get modified table header - new_head = self.get_table_head_text('otable') - self.assertTrue(head_list > new_head) - - ############## - # CASE 943 # - ############## - def test_943(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - self.driver.find_element_by_link_text("Packages").click() - #search for the "bash" package -> this should definitely be present - self.driver.find_element_by_id("search").clear() - self.driver.find_element_by_id("search").send_keys("bash") - self.driver.find_element_by_id("search-button").click() - #check for the search result message "XX packages found" - self.assertTrue(self.is_text_present("packages found"), msg=("no packages found text")) - - - ############## - # CASE 944 # - ############## - def test_944(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - # step 1: test Recipes page stuff - self.driver.find_element_by_link_text("Recipes").click() - # for these 3 items, default status is not-checked - self.driver.find_element_by_id("edit-columns-button").click() - self.driver.find_element_by_id("layer_version__branch").click() - self.driver.find_element_by_id("layer_version__layer__commit").click() - self.driver.find_element_by_id("edit-columns-button").click() - # otable is the recipes table here - otable_head_text = self.get_table_head_text('otable') - for item in ["Layer", "Layer branch", "Layer commit"]: - self.failIf(item not in otable_head_text, msg=item+' not in table head.') - # click the fist recipe, whatever it is - self.get_table_element("otable", 1, 1).click() - self.assertTrue(self.is_text_present(["Layer", "Layer branch", "Layer commit", "Recipe file"]), \ - msg=("text not in web page")) - - # step 2: test Packages page stuff. almost same as above - self.driver.back() - self.browser_delay() - self.driver.find_element_by_link_text("Packages").click() - self.driver.find_element_by_id("edit-columns-button").click() - self.driver.find_element_by_id("recipe__layer_version__layer__name").click() - self.driver.find_element_by_id("recipe__layer_version__branch").click() - self.driver.find_element_by_id("recipe__layer_version__layer__commit").click() - self.driver.find_element_by_id("edit-columns-button").click() - otable_head_text = self.get_table_head_text("otable") - for item in ["Layer", "Layer branch", "Layer commit"]: - self.assertFalse(item not in otable_head_text, msg=("item %s should be in head row" % item)) - # click the fist recipe, whatever it is - self.get_table_element("otable", 1, 1).click() - self.assertTrue(self.is_text_present(["Layer", "Layer branch", "Layer commit"]), \ - msg=("text not in web page")) - - # step 3: test Packages core-image-minimal(images) stuff. almost same as above. Note when future element-id changes... - self.driver.back() - self.driver.find_element_by_link_text("core-image-minimal").click() - self.driver.find_element_by_id("edit-columns-button").click() - self.driver.find_element_by_id("layer_name").click() - self.driver.find_element_by_id("layer_branch").click() - self.driver.find_element_by_id("layer_commit").click() - self.driver.find_element_by_id("edit-columns-button").click() - otable_head_text = self.get_table_head_text("otable") - for item in ["Layer", "Layer branch", "Layer commit"]: - self.assertFalse(item not in otable_head_text, msg=("item %s should be in head row" % item)) - # click the fist recipe, whatever it is - self.get_table_element("otable", 1, 1).click() - self.assertTrue(self.is_text_present(["Layer", "Layer branch", "Layer commit"]), \ - msg=("text not in web page")) - - # step 4: check Configuration page - self.driver.back() - self.driver.find_element_by_link_text("Configuration").click() - otable_head_text = self.get_table_head_text() - self.assertTrue(self.is_text_present(["Layer", "Layer branch", "Layer commit"]), \ - msg=("text not in web page")) - - - ############## - # CASE 945 # - ############## - def test_945(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - for item in ["Packages", "Recipes", "Tasks"]: - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - self.driver.find_element_by_link_text(items).click() - - # this may be page specific. If future page content changes, try to replace it with new xpath - xpath_showrows = "/html/body/div[4]/div/div/div[2]/div[2]/div[2]/div/div/div[2]/select" - xpath_table = "html/body/div[4]/div/div/div[2]/div[2]/table/tbody"#"id=('otable')/tbody" - self.driver.find_element_by_xpath(xpath_showrows).click() - rows_displayed = int(self.driver.find_element_by_xpath(xpath_showrows + "/option[2]").text) - - # not sure if this is a Selenium Select bug: If page is not refreshed here, "select(by visible text)" operation will go back to 100-row page - # Sure we can use driver.get(url) to refresh page, but since page will vary, we use click link text here - self.driver.find_element_by_link_text(items).click() - Select(self.driver.find_element_by_css_selector("select.pagesize")).select_by_visible_text(str(rows_displayed)) - self.failUnless(self.is_element_present(By.XPATH, xpath_table + "/tr[" + str(rows_displayed) +"]")) - self.failIf(self.is_element_present(By.XPATH, xpath_table + "/tr[" + str(rows_displayed+1) +"]")) - - # click 1st package, then go back to check if it's still those rows shown. - self.driver.find_element_by_xpath(xpath_otable + "/tr[1]/td[1]/a").click() - time.sleep(3) - self.driver.find_element_by_link_text(item).click() - self.assertTrue(self.is_element_present(By.XPATH, xpath_otable + "/tr[" + str(option_tobeselected) +"]"),\ - msg=("Row %d should exist" %option_tobeselected)) - self.assertFalse(self.is_element_present(By.XPATH, xpath_otable + "/tr[" + str(option_tobeselected+1) +"]"),\ - msg=("Row %d should not exist" %(option_tobeselected+1))) - - - - ############## - # CASE 946 # - ############## - def test_946(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - self.driver.find_element_by_link_text("Configuration").click() - # step 3-4 - check_list = ["Summary", "BitBake variables"] - for item in check_list: - if not self.is_element_present(how=By.LINK_TEXT, what=item): - self.log.error("%s not found" %item) - if not self.is_text_present(['Layers', 'Layer', 'Layer branch', 'Layer commit']): - self.log.error("text not found") - # step 5 - self.driver.find_element_by_link_text("BitBake variables").click() - if not self.is_text_present(['Variable', 'Value', 'Set in file', 'Description']): - self.log.error("text not found") - # This may be unstable because it's page-specific - # step 6: this is how we find filter beside "Set in file" - temp_element = self.find_element_by_text_in_table('otable', "Set in file") - temp_element.find_element_by_xpath("..//*/a/i[@class='icon-filter filtered']").click() - self.browser_delay() - self.driver.find_element_by_xpath("(//input[@name='filter'])[3]").click() - btns = self.driver.find_elements_by_css_selector("button.btn.btn-primary") - for btn in btns: - try: - btn.click() - break - except: - pass - # save screen here - self.browser_delay() - self.save_screenshot(screenshot_type='selenium', append_name='step6') - self.driver.find_element_by_id("edit-columns-button").click() - # save screen here - # step 7 - # we should manually check the step 6-8 result using screenshot - self.browser_delay() - self.save_screenshot(screenshot_type='selenium', append_name='step7') - self.driver.find_element_by_id("edit-columns-button").click() - # step 9 - # click the 1st item, no matter what it is - self.driver.find_element_by_xpath("//*[@id='otable']/tbody/tr[1]/td[1]/a").click() - # give it 1 sec so the pop-up becomes the "active_element" - time.sleep(1) - element = self.driver.switch_to.active_element - check_list = ['Order', 'Configuration file', 'Operation', 'Line number'] - for item in check_list: - if item not in element.text: - self.log.error("%s not found" %item) - # any better way to close this pop-up? ... TBD - element.find_element_by_class_name("close").click() - # step 10 : need to manually check "Yocto Manual" in saved screen - self.driver.find_element_by_css_selector("i.icon-share.get-info").click() - # save screen here - time.sleep(5) - self.save_screenshot(screenshot_type='native', append_name='step10') - - - ############## - # CASE 947 # - ############## - def test_947(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - self.find_element_by_link_text_in_table('nav', 'Configuration').click() - # step 2 - self.driver.find_element_by_link_text("BitBake variables").click() - # step 3 - def xpath_option(column_name): - # return xpath of options under "Edit columns" button - return self.shortest_xpath('id', 'navTab') + self.shortest_xpath('id', 'editcol') \ - + self.shortest_xpath('id', column_name) - self.driver.find_element_by_id('edit-columns-button').click() - # by default, option "Description" and "Set in file" were checked - self.driver.find_element_by_xpath(xpath_option('description')).click() - self.driver.find_element_by_xpath(xpath_option('file')).click() - self.driver.find_element_by_id('edit-columns-button').click() - check_list = ['Description', 'Set in file'] - head_list = self.get_table_head_text('otable') - for item in check_list: - self.assertFalse(item in head_list, msg=("item %s should not be in head row" % item)) - # check these 2 options and verify again - self.driver.find_element_by_id('edit-columns-button').click() - self.driver.find_element_by_xpath(xpath_option('description')).click() - self.driver.find_element_by_xpath(xpath_option('file')).click() - self.driver.find_element_by_id('edit-columns-button').click() - head_list = self.get_table_head_text('otable') - for item in check_list: - self.assertTrue(item in head_list, msg=("item %s not in head row" % item)) - - - ############## - # CASE 948 # - ############## - def test_948(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - self.find_element_by_link_text_in_table('nav', 'Configuration').click() - self.driver.find_element_by_link_text("BitBake variables").click() - #get number of variables visible by default - number_before_search = self.driver.find_element_by_class_name('page-header').text - # search for a while... - self.driver.find_element_by_id("search").clear() - self.driver.find_element_by_id("search").send_keys("BB") - self.driver.find_element_by_id("search-button").click() - #get number of variables visible after search - number_after_search = self.driver.find_element_by_class_name('page-header').text - self.assertTrue(number_before_search > number_after_search, msg=("items should be less after search")) - - - ############## - # CASE 949 # - ############## - def test_949(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_link_text("core-image-minimal").click() - self.find_element_by_link_text_in_table('nav', 'core-image-minimal').click() - # step 3 - try: - self.driver.find_element_by_partial_link_text("Packages included") - self.driver.find_element_by_partial_link_text("Directory structure") - except Exception as e: - self.log.error(e) - self.assertFalse(True) - # step 4 - head_list = self.get_table_head_text('otable') - for item in ['Package', 'Package version', 'Size', 'Dependencies', 'Reverse dependencies', 'Recipe']: - self.assertTrue(item in head_list, msg=("item %s not in head row" % item)) - # step 5-6 - self.driver.find_element_by_id("edit-columns-button").click() - selectable_class = 'checkbox' - # minimum-table : means unselectable items - unselectable_class = 'checkbox muted' - selectable_check_list = ['Dependencies', 'Layer', 'Layer branch', 'Layer commit', \ - 'License', 'Recipe', 'Recipe version', 'Reverse dependencies', \ - 'Size', 'Size over total (%)'] - unselectable_check_list = ['Package', 'Package version'] - selectable_list = list() - unselectable_list = list() - selectable_elements = self.driver.find_elements_by_xpath("//*[@id='editcol']//*[@class='" + selectable_class + "']") - unselectable_elements = self.driver.find_elements_by_xpath("//*[@id='editcol']//*[@class='" + unselectable_class + "']") - for element in selectable_elements: - selectable_list.append(element.text) - for element in unselectable_elements: - unselectable_list.append(element.text) - # check them - for item in selectable_check_list: - self.assertTrue(item in selectable_list, msg=("%s not found in dropdown menu" % item)) - for item in unselectable_check_list: - self.assertTrue(item in unselectable_list, msg=("%s not found in dropdown menu" % item)) - self.driver.find_element_by_id("edit-columns-button").click() - # step 7 - self.driver.find_element_by_partial_link_text("Directory structure").click() - head_list = self.get_table_head_text('dirtable') - for item in ['Directory / File', 'Symbolic link to', 'Source package', 'Size', 'Permissions', 'Owner', 'Group']: - self.assertTrue(item in head_list, msg=("%s not found in Directory structure table head" % item)) - - ############## - # CASE 950 # - ############## - def test_950(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - # step3&4: so far we're not sure if there's "successful build" or "failed - # build".If either of them doesn't exist, we can still go on other steps - check_list = ['Configuration', 'Tasks', 'Recipes', 'Packages', 'Time', 'CPU usage', 'Disk I/O'] - has_successful_build = 1 - has_failed_build = 1 - try: - pass_icon = self.driver.find_element_by_xpath("//*[@class='icon-ok-sign success']") - except Exception: - self.log.info("no successful build exists") - has_successful_build = 0 - pass - if has_successful_build: - pass_icon.click() - # save screen here to check if it matches requirement. - self.browser_delay() - self.save_screenshot(screenshot_type='selenium', append_name='step3_1') - for item in check_list: - try: - self.find_element_by_link_text_in_table('nav', item) - except Exception: - self.assertFalse(True, msg=("link %s cannot be found in the page" % item)) - # step 6 - check_list_2 = ['Packages included', 'Total package size', \ - 'License manifest', 'Image files'] - self.assertTrue(self.is_text_present(check_list_2), msg=("text not in web page")) - self.driver.back() - try: - fail_icon = self.driver.find_element_by_xpath("//*[@class='icon-minus-sign error']") - except Exception: - has_failed_build = 0 - self.log.info("no failed build exists") - pass - if has_failed_build: - fail_icon.click() - # save screen here to check if it matches requirement. - self.browser_delay() - self.save_screenshot(screenshot_type='selenium', append_name='step3_2') - for item in check_list: - try: - self.find_element_by_link_text_in_table('nav', item) - except Exception: - self.assertFalse(True, msg=("link %s cannot be found in the page" % item)) - # step 7 involved - check_list_3 = ['Machine', 'Distro', 'Layers', 'Total number of tasks', 'Tasks executed', \ - 'Tasks not executed', 'Reuse', 'Recipes built', 'Packages built'] - self.assertTrue(self.is_text_present(check_list_3), msg=("text not in web page")) - self.driver.back() - - - ############## - # CASE 951 # - ############## - def test_951(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - # currently test case itself isn't responsible for creating "1 successful and - # 1 failed build" - has_successful_build = 1 - has_failed_build = 1 - try: - fail_icon = self.driver.find_element_by_xpath("//*[@class='icon-minus-sign error']") - except Exception: - has_failed_build = 0 - self.log.info("no failed build exists") - pass - # if there's failed build, we can proceed - if has_failed_build: - self.driver.find_element_by_partial_link_text("error").click() - self.driver.back() - # not sure if there "must be" some warnings, so here save a screen - self.browser_delay() - self.save_screenshot(screenshot_type='selenium', append_name='step4') - - - ############## - # CASE 955 # - ############## - def test_955(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.log.info(" You should manually create all images before test starts!") - # So far the case itself is not responsable for creating all sorts of images. - # So assuming they are already there - # step 2 - self.driver.find_element_by_link_text("core-image-minimal").click() - # save screen here to see the page component - - - ############## - # CASE 956 # - ############## - def test_956(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - # step 2-3 need to run manually - self.log.info("step 2-3: checking the help message when you hover on help icon of target,\ - tasks, recipes, packages need to run manually") - self.driver.find_element_by_partial_link_text("Manual").click() - if not self.is_text_present("Manual"): - self.log.error("please check [Toaster manual] link on page") - self.failIf(True) - -#################################################################################################### -# Starting backend tests ########################################################################### -#################################################################################################### - - ############## - # CASE 1066 # - ############## - def test_1066(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select count(name) from orm_project a, auth_user b where a.user_id = b.id and b.username='_anonuser';" - cursor.execute(query) - data = cursor.fetchone() - self.failUnless(data >= 1) - - - ############## - # CASE 1071 # - ############## - def test_1071(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select name from orm_release;" - cursor.execute(query) - data = cursor.fetchall() - for i in range(0,4): - data[i] = data[i][0] - data.sort() - print(data) - json_parse = json.loads(open('toasterconf.json').read()) - json_data = [] - for i in range (0,4): - json_data.append(json_parse['releases'][i]['name']) - json_data.sort() - print(json_data) - self.failUnless(data == json_data) - - ############## - # CASE 1072 # - ############## - def test_1072(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select value from orm_toastersetting where name like 'DEFCONF%';" - cursor.execute(query) - data = cursor.fetchall() - for i in range(0,6): - data[i] = data[i][0] - print(data) - json_parse = json.loads(open('toasterconf.json').read()) - json_data=json_parse['config'] - json_data = json_data.values() - print(json_data) - self.failUnless(data == json_data) - - - ############## - # CASE 1074 # - ############## - def test_1074(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select name from orm_layersource;" - cursor.execute(query) - data = cursor.fetchall() - for i in range(0,3): - data[i] = data[i][0] - print(data) - json_parse = json.loads(open('toasterconf.json').read()) - json_data = [] - for i in range(0,3): - json_data.append(json_parse['layersources'][i]['name']) - print(json_data) - self.failUnless(set(data) == set(json_data)) - - ############## - # CASE 1075 # - ############## - def test_1075(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select value from orm_toastersetting where name like 'DEFAULT_RELEASE';" - cursor.execute(query) - data = cursor.fetchall() - data = data[0][0] - print(data) - json_parse = json.loads(open('toasterconf.json').read()) - json_data = json_parse['defaultrelease'] - print(json_data) - self.failUnless(set(data) == set(json_data)) - - ############## - # CASE 1076 # - ############## - def test_1076(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - - print('Checking branches for "Local Yocto Project"') - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select name from orm_branch where layer_source_id=1;" - cursor.execute(query) - data = cursor.fetchall() - lenght = len(data) - try: - for i in range(0,lenght): - data[i] = data[i][0] - except: - pass - print(data) - json_parse = json.loads(open('toasterconf.json').read()) - json_location = json_parse['layersources'][0]['name'] - print(json_location) - json_data = json_parse['layersources'][0]['branches'] - print(json_data) - self.failUnless(set(data) == set(json_data)) - - print('Checking branches for "OpenEmbedded"') - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select name from orm_branch where layer_source_id=3;" - cursor.execute(query) - data = cursor.fetchall() - lenght = len(data) - for i in range(0,lenght): - data[i] = data[i][0] - print(data) - json_parse = json.loads(open('toasterconf.json').read()) - json_location = json_parse['layersources'][1]['name'] - print(json_location) - json_data = json_parse['layersources'][1]['branches'] - print(json_data) - self.failUnless(set(data) == set(json_data)) - - print('Checking branches for "Imported layers"') - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select name from orm_branch where layer_source_id=2;" - cursor.execute(query) - data = cursor.fetchall() - lenght = len(data) - for i in range(0,lenght): - data[i] = data[i][0] - print(data) - json_parse = json.loads(open('toasterconf.json').read()) - json_location = json_parse['layersources'][2]['name'] - print(json_location) - json_data = json_parse['layersources'][2]['branches'] - print(json_data) - self.failUnless(set(data) == set(json_data)) - - - ############## - # CASE 1077 # - ############## - def test_1077(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select name from orm_bitbakeversion;" - cursor.execute(query) - data = cursor.fetchall() - for i in range(0,4): - data[i] = data[i][0] - print(data) - json_parse = json.loads(open('toasterconf.json').read()) - json_data = [] - for i in range(0,4): - json_data.append(json_parse['bitbake'][i]['name']) - print(json_data) - self.failUnless(set(data) == set(json_data)) - - ############## - # CASE 1083 # - ############## - def test_1083(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_id("new-project-button").click() - self.driver.find_element_by_id("new-project-name").send_keys("new-test-project") - self.driver.find_element_by_id("create-project-button").click() - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select count(name) from orm_project where name = 'new-test-project';" - cursor.execute(query) - data = cursor.fetchone() - print('data: %s' % data) - self.failUnless(data >= 1) - - ############## - # CASE 1084 # - ############## - def test_1084(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_id("new-project-button").click() - self.driver.find_element_by_id("new-project-name").send_keys("new-default-project") - self.driver.find_element_by_id("create-project-button").click() - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select a.name from orm_release a, orm_project b where a.id = b.release_id and b.name = 'new-default-project' limit 1;" - cursor.execute(query) - db_data = str(cursor.fetchone()[0]) - json_parse = json.loads(open('toasterconf.json').read()) - json_data = str(json_parse['defaultrelease']) - self.failUnless(db_data == json_data) - - ############## - # CASE 1088 # - ############## - def test_1088(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_css_selector("a[href='/toastergui/projects/']").click() - self.driver.find_element_by_link_text('new-default-project').click() - self.driver.find_element_by_id('project-change-form-toggle').click() - self.driver.find_element_by_id('project-name-change-input').clear() - self.driver.find_element_by_id('project-name-change-input').send_keys('new-name') - self.driver.find_element_by_id('project-name-change-btn').click() - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select count(name) from orm_project where name = 'new-name';" - cursor.execute(query) - data = cursor.fetchone()[0] - self.failUnless(data == 1) - #reseting project name - self.driver.find_element_by_id('project-change-form-toggle').click() - self.driver.find_element_by_id('project-name-change-input').clear() - self.driver.find_element_by_id('project-name-change-input').send_keys('new-default-project') - self.driver.find_element_by_id('project-name-change-btn').click() - - - ############## - # CASE 1089 # - ############## - def test_1089(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_css_selector("a[href='/toastergui/projects/']").click() - self.driver.find_element_by_link_text('new-default-project').click() - self.driver.find_element_by_id('change-machine-toggle').click() - self.driver.find_element_by_id('machine-change-input').clear() - self.driver.find_element_by_id('machine-change-input').send_keys('qemuarm64') -# self.driver.find_element_by_id('machine-change-input').send_keys(Keys.RETURN) - self.driver.find_element_by_id('machine-change-btn').click() - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select count(id) from orm_projectvariable where name like 'machine' and value like 'qemuarm64';" - cursor.execute(query) - data = cursor.fetchone()[0] - self.failUnless(data == 1) - #resetting machine to default value - self.driver.find_element_by_id('change-machine-toggle').click() - self.driver.find_element_by_id('machine-change-input').clear() - self.driver.find_element_by_id('machine-change-input').send_keys('qemux86') - self.driver.find_element_by_id('machine-change-input').send_keys(Keys.RETURN) - self.driver.find_element_by_id('machine-change-btn').click() - - ############## - # CASE 1090 # - ############## - def test_1090(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select username from auth_user where is_superuser = 1;" - cursor.execute(query) - data = cursor.fetchall() - try: - data = data[0][0] - except: - pass - print(data) - self.failUnless(data == 'toaster_admin') - - ############## - # CASE 1091 # - ############## - def test_1091(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - self.driver.get(self.base_url) - self.driver.find_element_by_css_selector("a[href='/toastergui/projects/']").click() - self.driver.find_element_by_link_text('new-default-project').click() - self.driver.find_element_by_id('release-change-toggle').click() - dropdown = self.driver.find_element_by_css_selector('select') - for option in dropdown.find_elements_by_tag_name('option'): - if option.text == 'Local Yocto Project': - option.click() - self.driver.find_element_by_id('change-release-btn').click() - #wait for the changes to register in the DB - time.sleep(1) - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select count(*) from orm_layer_version a, orm_projectlayer b, orm_project c where a.\"commit\"=\"HEAD\" and a.id = b.layercommit_id and b.project_id=c.id and c.name='new-default-project';" - cursor.execute(query) - data = cursor.fetchone()[0] - #resetting release to default - self.driver.find_element_by_id('release-change-toggle').click() - dropdown = self.driver.find_element_by_css_selector('select') - for option in dropdown.find_elements_by_tag_name('option'): - if option.text == 'Yocto Project master': - option.click() - self.driver.find_element_by_id('change-release-btn').click() - #wait for the changes to register in the DB - time.sleep(1) - self.failUnless(data == 3) - - ############## - # CASE 1092 # - ############## - def test_1092(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - self.driver.maximize_window() - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select a.name, a.value from orm_projectvariable a, orm_project b where a.project_id = b.id and b.name = 'new-default-project';" - cursor.execute(query) - data = dict(cursor.fetchall()) - print(data) - default_values = {u'IMAGE_INSTALL_append': u'', u'PACKAGE_CLASSES': u'package_rpm', u'MACHINE': u'qemux86', u'SDKMACHINE': u'x86_64', u'DISTRO': u'poky', u'IMAGE_FSTYPES': u'ext3 jffs2 tar.bz2'} - self.failUnless(data == default_values) - - ############## - # CASE 1093 # - ############## - def test_1093(self): - self.case_no = self.get_case_number() - self.log.info(' CASE %s log: ' % str(self.case_no)) - - #get initial values - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select layercommit_id from orm_projectlayer a, orm_project b where a.project_id=b.id and b.name='new-default-project';" - cursor.execute(query) - data_initial = cursor.fetchall() - print(data_initial) - - self.driver.maximize_window() - self.driver.get('localhost:8000')#self.base_url) - self.driver.find_element_by_css_selector("a[href='/toastergui/projects/']").click() - self.driver.find_element_by_link_text('new-default-project').click() - self.driver.find_element_by_id('release-change-toggle').click() - dropdown = self.driver.find_element_by_css_selector('select') - for option in dropdown.find_elements_by_tag_name('option'): - if option.text == 'Local Yocto Project': - option.click() - self.driver.find_element_by_id('change-release-btn').click() - #wait for the changes to register in the DB - time.sleep(1) - - #get changed values - con=sqlite.connect('toaster.sqlite') - cursor = con.cursor() - query = "select layercommit_id from orm_projectlayer a, orm_project b where a.project_id=b.id and b.name='new-default-project';" - cursor.execute(query) - data_changed = cursor.fetchall() - print(data_changed) - - #resetting release to default - self.driver.find_element_by_id('release-change-toggle').click() - dropdown = self.driver.find_element_by_css_selector('select') - for option in dropdown.find_elements_by_tag_name('option'): - if option.text == 'Yocto Project master': - option.click() - self.driver.find_element_by_id('change-release-btn').click() - #wait for the changes to register in the DB - time.sleep(1) - self.failUnless(data_initial != data_changed) diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_test.cfg b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_test.cfg deleted file mode 100644 index 685a9ee6a..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/toasteruitest/toaster_test.cfg +++ /dev/null @@ -1,25 +0,0 @@ -# Configuration file for toaster_test -# Sorted by different host type - -# test browser could be: firefox; chrome; ie(still under development) -# logging_level could be: CRITICAL; ERROR; WARNING; INFO; DEBUG; NOTSET - - -[toaster_test_linux] -toaster_url = 'http://127.0.0.1:8000' -test_browser = 'firefox' -test_cases = [946] -logging_level = 'INFO' - - -[toaster_test_windows] -toaster_url = 'http://127.0.0.1:8000' -test_browser = ['ie', 'firefox', 'chrome'] -test_cases = [901, 902, 903] -logging_level = 'DEBUG' - -[toaster_test_darwin] -toaster_url = 'http://127.0.0.1:8000' -test_browser = 'firefox' -test_cases = [901, 902, 903, 904, 906, 910, 911, 912, 913, 914, 915, 916, 923, 924, 940, 941, 942, 943, 944, 945, 946, 947, 948, 949, 950, 951, 955, 956] -logging_level = 'INFO' diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/urlcheck.py b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/urlcheck.py deleted file mode 100644 index 001fcee96..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/urlcheck.py +++ /dev/null @@ -1,53 +0,0 @@ -from __future__ import print_function -import sys - -import httplib2 -import config -import urllist - - -config.logger.info("Testing %s with %s", config.TOASTER_BASEURL, config.W3C_VALIDATOR) - -def validate_html5(url): - http_client = httplib2.Http(None) - status = "Failed" - errors = -1 - warnings = -1 - - urlrequest = config.W3C_VALIDATOR+url - - # pylint: disable=broad-except - # we disable the broad-except because we want to actually catch all possible exceptions - try: - resp, _ = http_client.request(urlrequest, "HEAD") - if resp['x-w3c-validator-status'] != "Abort": - status = resp['x-w3c-validator-status'] - errors = int(resp['x-w3c-validator-errors']) - warnings = int(resp['x-w3c-validator-warnings']) - - if status == 'Invalid': - config.logger.warning("Failed %s is %s\terrors %s warnings %s (check at %s)", url, status, errors, warnings, urlrequest) - else: - config.logger.debug("OK! %s", url) - - except Exception as exc: - config.logger.warning("Failed validation call: %s", exc) - return (status, errors, warnings) - - -def print_validation(url): - status, errors, warnings = validate_html5(url) - config.logger.error("url %s is %s\terrors %s warnings %s (check at %s)", url, status, errors, warnings, config.W3C_VALIDATOR+url) - - -def main(): - print("Testing %s with %s" % (config.TOASTER_BASEURL, config.W3C_VALIDATOR)) - - if len(sys.argv) > 1: - print_validation(sys.argv[1]) - else: - for url in urllist.URLS: - print_validation(config.TOASTER_BASEURL+url) - -if __name__ == "__main__": - main() diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/urllist.py b/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/urllist.py deleted file mode 100644 index 6db9ffc7b..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/contrib/tts/urllist.py +++ /dev/null @@ -1,39 +0,0 @@ -URLS = [ - 'toastergui/landing/', - 'toastergui/builds/', - 'toastergui/build/1', - 'toastergui/build/1/tasks/', - 'toastergui/build/1/tasks/1/', - 'toastergui/build/1/task/1', - 'toastergui/build/1/recipes/', - 'toastergui/build/1/recipe/1/active_tab/1', - 'toastergui/build/1/recipe/1', - 'toastergui/build/1/recipe_packages/1', - 'toastergui/build/1/packages/', - 'toastergui/build/1/package/1', - 'toastergui/build/1/package_built_dependencies/1', - 'toastergui/build/1/package_included_detail/1/1', - 'toastergui/build/1/package_included_dependencies/1/1', - 'toastergui/build/1/package_included_reverse_dependencies/1/1', - 'toastergui/build/1/target/1', - 'toastergui/build/1/target/1/targetpkg', - 'toastergui/build/1/target/1/dirinfo', - 'toastergui/build/1/target/1/dirinfo_filepath/_/bin/bash', - 'toastergui/build/1/configuration', - 'toastergui/build/1/configvars', - 'toastergui/build/1/buildtime', - 'toastergui/build/1/cpuusage', - 'toastergui/build/1/diskio', - 'toastergui/build/1/target/1/packagefile/1', - 'toastergui/newproject/', - 'toastergui/projects/', - 'toastergui/project/1', - 'toastergui/project/1/configuration', - 'toastergui/project/1/builds/', - 'toastergui/project/1/layers/', - 'toastergui/project/1/layer/1', - 'toastergui/project/1/importlayer', - 'toastergui/project/1/targets/', - 'toastergui/project/1/machines/', - 'toastergui/', -] diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/orm/fixtures/oe-core.xml b/import-layers/yocto-poky/bitbake/lib/toaster/orm/fixtures/oe-core.xml index a6c834f44..66c3595f8 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/orm/fixtures/oe-core.xml +++ b/import-layers/yocto-poky/bitbake/lib/toaster/orm/fixtures/oe-core.xml @@ -8,21 +8,28 @@ - morty + pyro git://git.openembedded.org/bitbake - 1.32 + 1.34 HEAD + git://git.openembedded.org/bitbake + HEAD + + + master + git://git.openembedded.org/bitbake + master - morty - Openembedded Morty + pyro + Openembedded Pyro 1 - morty - Toaster will run your builds using the tip of the <a href=\"http://cgit.openembedded.org/openembedded-core/log/?h=morty\">OpenEmbedded Morty</a> branch. + pyro + Toaster will run your builds using the tip of the <a href=\"http://cgit.openembedded.org/openembedded-core/log/?h=pyro\">OpenEmbedded Pyro</a> branch. local @@ -31,21 +38,36 @@ HEAD Toaster will run your builds with the version of OpenEmbedded that you have cloned or downloaded to your computer. + + master + OpenEmbedded core master + 3 + master + Toaster will run your builds using the tip of the <a href=\"http://cgit.openembedded.org/openembedded-core/log/\">OpenEmbedded master</a> branch. + 1 openembedded-core - + 2 openembedded-core + + 3 + openembedded-core + + - + openembedded-core git://git.openembedded.org/openembedded-core + http://cgit.openembedded.org/openembedded-core + http://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch% + http://cgit.openembedded.org/openembedded-core/tree/%path%?h=%branch% 1 diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/orm/fixtures/poky.xml b/import-layers/yocto-poky/bitbake/lib/toaster/orm/fixtures/poky.xml index c192baa42..7827aac28 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/orm/fixtures/poky.xml +++ b/import-layers/yocto-poky/bitbake/lib/toaster/orm/fixtures/poky.xml @@ -8,9 +8,9 @@ - morty + pyro git://git.yoctoproject.org/poky - morty + pyro bitbake @@ -19,14 +19,21 @@ HEAD bitbake + + master + git://git.yoctoproject.org/poky + master + bitbake + + - morty - Yocto Project 2.2 "Morty" + pyro + Yocto Project 2.3 "Pyro" 1 - morty - Toaster will run your builds using the tip of the <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=morty">Yocto Project Morty branch</a>. + pyro + Toaster will run your builds using the tip of the <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/?h=pyro">Yocto Project Pyro branch</a>. local @@ -35,8 +42,15 @@ HEAD Toaster will run your builds with the version of the Yocto Project you have cloned or downloaded to your computer. + + master + Yocto Project master + 3 + master + Toaster will run your builds using the tip of the <a href="http://git.yoctoproject.org/cgit/cgit.cgi/poky/log/">Yocto Project Master branch</a>. + - + 1 openembedded-core @@ -61,16 +75,40 @@ 2 meta-yocto-bsp + + 3 + openembedded-core + + + 3 + meta-poky + + + 3 + meta-yocto-bsp + - openembedded-core git://git.yoctoproject.org/poky + http://git.yoctoproject.org/cgit/cgit.cgi/poky + http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch% + http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch% + 1 + 0 + 1 + pyro + meta + + 1 0 2 @@ -78,14 +116,30 @@ HEAD meta - + + 1 + 0 + 3 + master + meta + meta-poky git://git.yoctoproject.org/poky + http://git.yoctoproject.org/cgit/cgit.cgi/poky + http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch% + http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch% - + + 2 + 0 + 1 + pyro + meta-poky + + 2 0 2 @@ -93,14 +147,30 @@ HEAD meta-poky - + + 2 + 0 + 3 + master + meta-poky + meta-yocto-bsp git://git.yoctoproject.org/poky + http://git.yoctoproject.org/cgit/cgit.cgi/poky + http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch% + http://git.yoctoproject.org/cgit/cgit.cgi/poky/tree/%path%?h=%branch% - + + 3 + 0 + 1 + pyro + meta-yocto-bsp + + 3 0 2 @@ -108,4 +178,11 @@ HEAD meta-yocto-bsp + + 3 + 0 + 3 + master + meta-yocto-bsp + diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/orm/fixtures/settings.xml b/import-layers/yocto-poky/bitbake/lib/toaster/orm/fixtures/settings.xml index ee6a20285..78c0fdca7 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/orm/fixtures/settings.xml +++ b/import-layers/yocto-poky/bitbake/lib/toaster/orm/fixtures/settings.xml @@ -4,7 +4,7 @@ DEFAULT_RELEASE - morty + master DEFCONF_PACKAGE_CLASSES diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/orm/management/commands/lsupdates.py b/import-layers/yocto-poky/bitbake/lib/toaster/orm/management/commands/lsupdates.py index 8ff120e0b..482908d48 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/orm/management/commands/lsupdates.py +++ b/import-layers/yocto-poky/bitbake/lib/toaster/orm/management/commands/lsupdates.py @@ -90,7 +90,6 @@ class Command(NoArgsCommand): from urlparse import urlparse proxy_settings = os.environ.get("http_proxy", None) - oe_core_layer = 'openembedded-core' def _get_json_response(apiurl=DEFAULT_LAYERINDEX_SERVER): http_progress = Spinner() @@ -154,41 +153,19 @@ class Command(NoArgsCommand): total = len(layers_info) for i, li in enumerate(layers_info): - # Special case for the openembedded-core layer - if li['name'] == oe_core_layer: - try: - # If we have an existing openembedded-core for example - # from the toasterconf.json augment the info using the - # layerindex rather than duplicate it - oe_core_l = Layer.objects.get(name=oe_core_layer) - # Take ownership of the layer as now coming from the - # layerindex - oe_core_l.summary = li['summary'] - oe_core_l.description = li['description'] - oe_core_l.vcs_web_url = li['vcs_web_url'] - oe_core_l.vcs_web_tree_base_url = \ - li['vcs_web_tree_base_url'] - oe_core_l.vcs_web_file_base_url = \ - li['vcs_web_file_base_url'] - - oe_core_l.save() - li_layer_id_to_toaster_layer_id[li['id']] = oe_core_l.pk - self.mini_progress("layers", i, total) - continue - - except Layer.DoesNotExist: - pass - try: - l, created = Layer.objects.get_or_create(name=li['name'], - vcs_url=li['vcs_url']) + l, created = Layer.objects.get_or_create(name=li['name']) l.up_date = li['updated'] - l.vcs_url = li['vcs_url'] - l.vcs_web_url = li['vcs_web_url'] - l.vcs_web_tree_base_url = li['vcs_web_tree_base_url'] - l.vcs_web_file_base_url = li['vcs_web_file_base_url'] l.summary = li['summary'] l.description = li['description'] + + if created: + # predefined layers in the fixtures (for example poky.xml) + # always preempt the Layer Index for these values + l.vcs_url = li['vcs_url'] + l.vcs_web_url = li['vcs_web_url'] + l.vcs_web_tree_base_url = li['vcs_web_tree_base_url'] + l.vcs_web_file_base_url = li['vcs_web_file_base_url'] l.save() except Layer.MultipleObjectsReturned: logger.info("Skipped %s as we found multiple layers and " @@ -211,12 +188,14 @@ class Command(NoArgsCommand): total = len(layerbranches_info) for i, lbi in enumerate(layerbranches_info): + # release as defined by toaster map to layerindex branch + release = li_branch_id_to_toaster_release[lbi['branch']] try: lv, created = Layer_Version.objects.get_or_create( - layer_source=LayerSource.TYPE_LAYERINDEX, layer=Layer.objects.get( - pk=li_layer_id_to_toaster_layer_id[lbi['layer']]) + pk=li_layer_id_to_toaster_layer_id[lbi['layer']]), + release=release ) except KeyError: logger.warning( @@ -224,11 +203,12 @@ class Command(NoArgsCommand): lbi['layer']) continue - lv.release = li_branch_id_to_toaster_release[lbi['branch']] - lv.up_date = lbi['updated'] - lv.commit = lbi['actual_branch'] - lv.dirpath = lbi['vcs_subdir'] - lv.save() + if created: + lv.release = li_branch_id_to_toaster_release[lbi['branch']] + lv.up_date = lbi['updated'] + lv.commit = lbi['actual_branch'] + lv.dirpath = lbi['vcs_subdir'] + lv.save() li_layer_branch_id_to_toaster_lv_id[lbi['id']] =\ lv.pk @@ -255,9 +235,8 @@ class Command(NoArgsCommand): layer_id = li_layer_id_to_toaster_layer_id[ldi['dependency']] dependlist[lv].append( - Layer_Version.objects.get( - layer_source=LayerSource.TYPE_LAYERINDEX, - layer__pk=layer_id)) + Layer_Version.objects.get(layer__pk=layer_id, + release=lv.release)) except Layer_Version.DoesNotExist: logger.warning("Cannot find layer version (ls:%s)," diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/orm/models.py b/import-layers/yocto-poky/bitbake/lib/toaster/orm/models.py index a7de57c25..a49f9a432 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/orm/models.py +++ b/import-layers/yocto-poky/bitbake/lib/toaster/orm/models.py @@ -38,6 +38,7 @@ import re import itertools from signal import SIGUSR1 + import logging logger = logging.getLogger("toaster") @@ -178,24 +179,27 @@ class ProjectManager(models.Manager): else: return projects[0] + class Project(models.Model): - search_allowed_fields = ['name', 'short_description', 'release__name', 'release__branch_name'] + search_allowed_fields = ['name', 'short_description', 'release__name', + 'release__branch_name'] name = models.CharField(max_length=100) short_description = models.CharField(max_length=50, blank=True) bitbake_version = models.ForeignKey('BitbakeVersion', null=True) - release = models.ForeignKey("Release", null=True) - created = models.DateTimeField(auto_now_add = True) - updated = models.DateTimeField(auto_now = True) + release = models.ForeignKey("Release", null=True) + created = models.DateTimeField(auto_now_add=True) + updated = models.DateTimeField(auto_now=True) # This is a horrible hack; since Toaster has no "User" model available when # running in interactive mode, we can't reference the field here directly - # Instead, we keep a possible null reference to the User id, as not to force + # Instead, we keep a possible null reference to the User id, + # as not to force # hard links to possibly missing models - user_id = models.IntegerField(null = True) - objects = ProjectManager() + user_id = models.IntegerField(null=True) + objects = ProjectManager() # set to True for the project which is the default container # for builds initiated by the command line etc. - is_default = models.BooleanField(default = False) + is_default= models.BooleanField(default=False) def __unicode__(self): return "%s (Release %s, BBV %s)" % (self.name, self.release, self.bitbake_version) @@ -221,16 +225,16 @@ class Project(models.Model): return( -1 ) def get_last_outcome(self): - build_id = self.get_last_build_id + build_id = self.get_last_build_id() if (-1 == build_id): return( "" ) try: - return Build.objects.filter( id = self.get_last_build_id )[ 0 ].outcome + return Build.objects.filter( id = build_id )[ 0 ].outcome except (Build.DoesNotExist,IndexError): return( "not_found" ) def get_last_target(self): - build_id = self.get_last_build_id + build_id = self.get_last_build_id() if (-1 == build_id): return( "" ) try: @@ -239,7 +243,7 @@ class Project(models.Model): return( "not_found" ) def get_last_errors(self): - build_id = self.get_last_build_id + build_id = self.get_last_build_id() if (-1 == build_id): return( 0 ) try: @@ -248,7 +252,7 @@ class Project(models.Model): return( "not_found" ) def get_last_warnings(self): - build_id = self.get_last_build_id + build_id = self.get_last_build_id() if (-1 == build_id): return( 0 ) try: @@ -265,7 +269,7 @@ class Project(models.Model): return last_build.get_image_file_extensions() def get_last_imgfiles(self): - build_id = self.get_last_build_id + build_id = self.get_last_build_id() if (-1 == build_id): return( "" ) try: @@ -333,20 +337,45 @@ class Project(models.Model): return queryset - def schedule_build(self): - from bldcontrol.models import BuildRequest, BRTarget, BRLayer, BRVariable, BRBitbake - br = BuildRequest.objects.create(project = self) + + from bldcontrol.models import BuildRequest, BRTarget, BRLayer + from bldcontrol.models import BRBitbake, BRVariable + try: + now = timezone.now() + build = Build.objects.create(project=self, + completed_on=now, + started_on=now) + + br = BuildRequest.objects.create(project=self, + state=BuildRequest.REQ_QUEUED, + build=build) + BRBitbake.objects.create(req=br, + giturl=self.bitbake_version.giturl, + commit=self.bitbake_version.branch, + dirpath=self.bitbake_version.dirpath) - BRBitbake.objects.create(req = br, - giturl = self.bitbake_version.giturl, - commit = self.bitbake_version.branch, - dirpath = self.bitbake_version.dirpath) + for t in self.projecttarget_set.all(): + BRTarget.objects.create(req=br, target=t.target, task=t.task) + Target.objects.create(build=br.build, target=t.target, + task=t.task) + # If we're about to build a custom image recipe make sure + # that layer is currently in the project before we create the + # BRLayer objects + customrecipe = CustomImageRecipe.objects.filter( + name=t.target, + project=self).first() + if customrecipe: + ProjectLayer.objects.get_or_create( + project=self, + layercommit=customrecipe.layer_version, + optional=False) for l in self.projectlayer_set.all().order_by("pk"): commit = l.layercommit.get_vcs_reference() - print("ii Building layer ", l.layercommit.layer.name, " at vcs point ", commit) + logger.debug("Adding layer to build %s" % + l.layercommit.layer.name) BRLayer.objects.create( req=br, name=l.layercommit.layer.name, @@ -357,25 +386,16 @@ class Project(models.Model): local_source_dir=l.layercommit.layer.local_source_dir ) - br.state = BuildRequest.REQ_QUEUED - now = timezone.now() - br.build = Build.objects.create(project = self, - completed_on=now, - started_on=now, - ) - for t in self.projecttarget_set.all(): - BRTarget.objects.create(req = br, target = t.target, task = t.task) - Target.objects.create(build = br.build, target = t.target, task = t.task) - for v in self.projectvariable_set.all(): - BRVariable.objects.create(req = br, name = v.name, value = v.value) - + BRVariable.objects.create(req=br, name=v.name, value=v.value) try: - br.build.machine = self.projectvariable_set.get(name = 'MACHINE').value + br.build.machine = self.projectvariable_set.get( + name='MACHINE').value br.build.save() except ProjectVariable.DoesNotExist: pass + br.save() signal_runbuilds() @@ -882,7 +902,7 @@ class Target_Image_File(models.Model): 'ext4.gz', 'ext3', 'ext3.gz', 'hdddirect', 'hddimg', 'iso', 'jffs2', 'jffs2.sum', 'multiubi', 'qcow2', 'squashfs', 'squashfs-lzo', 'squashfs-xz', 'tar', 'tar.bz2', 'tar.gz', 'tar.lz4', 'tar.xz', 'ubi', - 'ubifs', 'vdi', 'vmdk', 'wic', 'wic.bz2', 'wic.gz', 'wic.lzma' + 'ubifs', 'vdi', 'vmdk', 'wic', 'wic.bmap', 'wic.bz2', 'wic.gz', 'wic.lzma' } target = models.ForeignKey(Target) @@ -1365,7 +1385,7 @@ class Layer(models.Model): name = models.CharField(max_length=100) layer_index_url = models.URLField() vcs_url = GitURLField(default=None, null=True) - local_source_dir = models.TextField(null = True, default = None) + local_source_dir = models.TextField(null=True, default=None) vcs_web_url = models.URLField(null=True, default=None) vcs_web_tree_base_url = models.URLField(null=True, default=None) vcs_web_file_base_url = models.URLField(null=True, default=None) @@ -1473,22 +1493,33 @@ class Layer_Version(models.Model): return self.commit return 'N/A' - def get_detailspage_url(self, project_id): + def get_detailspage_url(self, project_id=None): + """ returns the url to the layer details page uses own project + field if project_id is not specified """ + + if project_id is None: + project_id = self.project.pk + return reverse('layerdetails', args=(project_id, self.pk)) def get_alldeps(self, project_id): """Get full list of unique layer dependencies.""" - def gen_layerdeps(lver, project): + def gen_layerdeps(lver, project, depth): + if depth == 0: + return for ldep in lver.dependencies.all(): yield ldep.depends_on # get next level of deps recursively calling gen_layerdeps - for subdep in gen_layerdeps(ldep.depends_on, project): + for subdep in gen_layerdeps(ldep.depends_on, project, depth-1): yield subdep project = Project.objects.get(pk=project_id) result = [] - projectlvers = [player.layercommit for player in project.projectlayer_set.all()] - for dep in gen_layerdeps(self, project): + projectlvers = [player.layercommit for player in + project.projectlayer_set.all()] + # protect against infinite layer dependency loops + maxdepth = 20 + for dep in gen_layerdeps(self, project, maxdepth): # filter out duplicates and layers already belonging to the project if dep not in result + projectlvers: result.append(dep) @@ -1631,7 +1662,8 @@ class CustomImageRecipe(Recipe): if base_recipe_path: base_recipe = open(base_recipe_path, 'r').read() else: - raise IOError("Based on recipe file not found") + raise IOError("Based on recipe file not found: %s" % + base_recipe_path) # Add a special case for when the recipe we have based a custom image # recipe on requires another recipe. @@ -1741,8 +1773,12 @@ def invalidate_cache(**kwargs): def signal_runbuilds(): """Send SIGUSR1 to runbuilds process""" - with open(os.path.join(os.getenv('BUILDDIR'), '.runbuilds.pid')) as pidf: - os.kill(int(pidf.read()), SIGUSR1) + try: + with open(os.path.join(os.getenv('BUILDDIR', '.'), + '.runbuilds.pid')) as pidf: + os.kill(int(pidf.read()), SIGUSR1) + except FileNotFoundError: + logger.info("Stopping existing runbuilds: no current process found") django.db.models.signals.post_save.connect(invalidate_cache) django.db.models.signals.post_delete.connect(invalidate_cache) diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/browser/README b/import-layers/yocto-poky/bitbake/lib/toaster/tests/browser/README index 6b09d20d8..352c4fe3e 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/tests/browser/README +++ b/import-layers/yocto-poky/bitbake/lib/toaster/tests/browser/README @@ -2,9 +2,9 @@ These tests require Selenium to be installed in your Python environment. -The simplest way to install this is via pip: +The simplest way to install this is via pip3: - pip install selenium==2.53.2 + pip3 install selenium==2.53.2 Note that if you use other versions of Selenium, some of the tests (such as tests.browser.test_js_unit_tests.TestJsUnitTests) may fail, as these rely on @@ -18,7 +18,7 @@ To run tests against Chrome: * On Windows, put chromedriver.exe in the same directory as chrome.exe To run tests against PhantomJS (headless): - +--NOTE - Selenium seems to be deprecating support for this mode --- * Download and install PhantomJS: http://phantomjs.org/download.html * On *nix systems, put phantomjs on PATH @@ -43,13 +43,30 @@ Marionette driver.) The test cases will instantiate a Selenium driver set by the TOASTER_TESTS_BROWSER environment variable, or Chrome if this is not specified. +To run tests against the Selenium Firefox Docker container: +More explanation is located at https://wiki.yoctoproject.org/wiki/TipsAndTricks/TestingToasterWithContainers +* Run the Selenium container: + ** docker run -it --rm=true -p 5900:5900 -p 4444:4444 --name=selenium selenium/standalone-firefox-debug:2.53.0 + *** 5900 is the default vnc port. If you are runing a vnc server on your machine map a different port e.g. -p 6900:5900 and connect vnc client to 127.0.0.1:6900 + *** 4444 is the default selenium sever port. +* Run the tests + ** TOASTER_TESTS_BROWSER=http://127.0.0.1:4444/wd/hub TOASTER_TESTS_URL=http://172.17.0.1:8000 ./bitbake/lib/toaster/manage.py test --liveserver=172.17.0.1:8000 tests.browser + ** TOASTER_TESTS_BROWSER=remote TOASTER_REMOTE_HUB=http://127.0.0.1:4444/wd/hub ./bitbake/lib/toaster/manage.py test --liveserver=172.17.0.1:8000 tests.browser + *** TOASTER_REMOTE_HUB - This is the address for the Selenium Remote Web Driver hub. Assuming you ran the contianer with -p 4444:4444 it will be http://127.0.0.1:4444/wd/hub. + *** --liveserver=xxx tells Django to run the test server on an interface and port reachable by both host and container. + **** 172.17.0.1 is the default docker bridge on linux, viewable from inside and outside the contianers. Find it with "ip -4 addr show dev docker0" +* connect to the vnc server to see the tests if you would like + ** xtightvncviewer 127.0.0.1:5900 + ** note, you need to wait for the test container to come up before this can connect. + Available drivers: * chrome (default) * firefox * marionette (for newer Firefoxes) * ie -* phantomjs +* phantomjs (deprecated) +* remote e.g. to run the test suite with phantomjs where you have phantomjs installed in /home/me/apps/phantomjs: diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py b/import-layers/yocto-poky/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py index 14e9c1564..156d639b1 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py +++ b/import-layers/yocto-poky/bitbake/lib/toaster/tests/browser/selenium_helpers_base.py @@ -39,7 +39,7 @@ from selenium.webdriver.common.desired_capabilities import DesiredCapabilities from selenium.common.exceptions import NoSuchElementException, \ StaleElementReferenceException, TimeoutException -def create_selenium_driver(browser='chrome'): +def create_selenium_driver(cls,browser='chrome'): # set default browser string based on env (if available) env_browser = os.environ.get('TOASTER_TESTS_BROWSER') if env_browser: @@ -59,6 +59,15 @@ def create_selenium_driver(browser='chrome'): return webdriver.Ie() elif browser == 'phantomjs': return webdriver.PhantomJS() + elif browser == 'remote': + # if we were to add yet another env variable like TOASTER_REMOTE_BROWSER + # we could let people pick firefox or chrome, left for later + remote_hub= os.environ.get('TOASTER_REMOTE_HUB') + driver = webdriver.Remote(remote_hub, + webdriver.DesiredCapabilities.FIREFOX.copy()) + + driver.get("http://%s:%s"%(cls.server_thread.host,cls.server_thread.port)) + return driver else: msg = 'Selenium driver for browser %s is not available' % browser raise RuntimeError(msg) @@ -135,7 +144,7 @@ class SeleniumTestCaseBase(unittest.TestCase): # instantiate the Selenium webdriver once for all the test methods # in this test case - cls.driver = create_selenium_driver() + cls.driver = create_selenium_driver(cls) cls.driver.maximize_window() @classmethod diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py b/import-layers/yocto-poky/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py index 6392d1efb..f24fb093a 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py +++ b/import-layers/yocto-poky/bitbake/lib/toaster/tests/browser/test_layerdetails_page.py @@ -91,9 +91,10 @@ class TestLayerDetailsPage(SeleniumTestCase): for btn in self.find_all("dd .glyphicon-edit"): btn.click() - # Wait for the inputs to become visible + # Wait for the inputs to become visible after animation self.wait_until_visible("#layer-git input[type=text]") self.wait_until_visible("dd textarea") + self.wait_until_visible("dd .change-btn") # Edit each value for inputs in self.find_all("#layer-git input[type=text]") + \ diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/builds/buildtest.py b/import-layers/yocto-poky/bitbake/lib/toaster/tests/builds/buildtest.py index fc7bd5b64..5a56a110a 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/tests/builds/buildtest.py +++ b/import-layers/yocto-poky/bitbake/lib/toaster/tests/builds/buildtest.py @@ -24,82 +24,115 @@ import sys import time import unittest -from orm.models import Project, Release, ProjectTarget, Build +from orm.models import Project, Release, ProjectTarget, Build, ProjectVariable from bldcontrol.models import BuildEnvironment -from bldcontrol.management.commands.loadconf import Command\ - as LoadConfigCommand - from bldcontrol.management.commands.runbuilds import Command\ as RunBuildsCommand +from django.core.management import call_command + import subprocess +import logging + +logger = logging.getLogger("toaster") # We use unittest.TestCase instead of django.test.TestCase because we don't # want to wrap everything in a database transaction as an external process # (bitbake needs access to the database) +def load_build_environment(): + call_command('loaddata', 'settings.xml', app_label="orm") + call_command('loaddata', 'poky.xml', app_label="orm") + + current_builddir = os.environ.get("BUILDDIR") + if current_builddir: + BuildTest.BUILDDIR = current_builddir + else: + # Setup a builddir based on default layout + # bitbake inside openebedded-core + oe_init_build_env_path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + os.pardir, + os.pardir, + os.pardir, + os.pardir, + os.pardir, + 'oe-init-build-env' + ) + if not os.path.exists(oe_init_build_env_path): + raise Exception("We had no BUILDDIR set and couldn't " + "find oe-init-build-env to set this up " + "ourselves please run oe-init-build-env " + "before running these tests") + + oe_init_build_env_path = os.path.realpath(oe_init_build_env_path) + cmd = "bash -c 'source oe-init-build-env %s'" % BuildTest.BUILDDIR + p = subprocess.Popen( + cmd, + cwd=os.path.dirname(oe_init_build_env_path), + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + + output, err = p.communicate() + p.wait() + + logger.info("oe-init-build-env %s %s" % (output, err)) + + os.environ['BUILDDIR'] = BuildTest.BUILDDIR + + # Setup the path to bitbake we know where to find this + bitbake_path = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + os.pardir, + os.pardir, + os.pardir, + os.pardir, + 'bin', + 'bitbake') + if not os.path.exists(bitbake_path): + raise Exception("Could not find bitbake at the expected path %s" + % bitbake_path) + + os.environ['BBBASEDIR'] = bitbake_path class BuildTest(unittest.TestCase): PROJECT_NAME = "Testbuild" + BUILDDIR = "/tmp/build/" def build(self, target): # So that the buildinfo helper uses the test database' self.assertEqual( os.environ.get('DJANGO_SETTINGS_MODULE', ''), - 'toastermain.settings-test', + 'toastermain.settings_test', "Please initialise django with the tests settings: " - "DJANGO_SETTINGS_MODULE='toastermain.settings-test'") - - if self.target_already_built(target): - return - - # Take a guess at the location of the toasterconf - poky_toaster_conf = '../../../meta-poky/conf/toasterconf.json' - oe_toaster_conf = '../../../meta/conf/toasterconf.json' - env_toaster_conf = os.environ.get('TOASTER_CONF') + "DJANGO_SETTINGS_MODULE='toastermain.settings_test'") - config_file = None - if env_toaster_conf: - config_file = env_toaster_conf - else: - if os.path.exists(poky_toaster_conf): - config_file = poky_toaster_conf - elif os.path.exists(oe_toaster_conf): - config_file = oe_toaster_conf + built = self.target_already_built(target) + if built: + return built - self.assertIsNotNone(config_file, - "Default locations for toasterconf not found" - "please set $TOASTER_CONF manually") - - # Setup the release information and default layers - print("\nImporting file: %s" % config_file) - os.environ['TOASTER_CONF'] = config_file - LoadConfigCommand()._import_layer_config(config_file) - - os.environ['TOASTER_DIR'] = \ - os.path.abspath(os.environ['BUILDDIR'] + "/../") - - os.environ['BBBASEDIR'] = \ - subprocess.check_output('which bitbake', shell=True) + load_build_environment() BuildEnvironment.objects.get_or_create( betype=BuildEnvironment.TYPE_LOCAL, - sourcedir=os.environ['TOASTER_DIR'], - builddir=os.environ['BUILDDIR'] + sourcedir=BuildTest.BUILDDIR, + builddir=BuildTest.BUILDDIR ) release = Release.objects.get(name='local') # Create a project for this build to run in - try: - project = Project.objects.get(name=BuildTest.PROJECT_NAME) - except Project.DoesNotExist: - project = Project.objects.create_project( - name=BuildTest.PROJECT_NAME, - release=release - ) + project = Project.objects.create_project(name=BuildTest.PROJECT_NAME, + release=release) + + if os.environ.get("TOASTER_TEST_USE_SSTATE_MIRROR"): + ProjectVariable.objects.get_or_create( + name="SSTATE_MIRRORS", + value="file://.* http://autobuilder.yoctoproject.org/pub/sstate/PATH;downloadfilename=PATH", + project=project) ProjectTarget.objects.create(project=project, target=target, @@ -118,9 +151,11 @@ class BuildTest(unittest.TestCase): sys.stdout.flush() time.sleep(1) - self.assertNotEqual(build_request.build.outcome, - Build.SUCCEEDED, "Build did not SUCCEEDED") - print("\nBuild finished") + self.assertEqual(Build.objects.get(pk=build_pk).outcome, + Build.SUCCEEDED, + "Build did not SUCCEEDED") + + logger.info("\nBuild finished %s" % build_request.build.outcome) return build_request.build def target_already_built(self, target): @@ -129,6 +164,6 @@ class BuildTest(unittest.TestCase): project__name=BuildTest.PROJECT_NAME): targets = build.target_set.values_list('target', flat=True) if target in targets: - return True + return build - return False + return None diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/builds/test_core_image_min.py b/import-layers/yocto-poky/bitbake/lib/toaster/tests/builds/test_core_image_min.py index dec0bfa7f..586f4a8f7 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/tests/builds/test_core_image_min.py +++ b/import-layers/yocto-poky/bitbake/lib/toaster/tests/builds/test_core_image_min.py @@ -31,14 +31,14 @@ from orm.models import Package_Dependency, Recipe_Dependency, Build from orm.models import Task_Dependency, Package, Target, Recipe from orm.models import CustomImagePackage -from buildtest import BuildTest +from tests.builds.buildtest import BuildTest class BuildCoreImageMinimal(BuildTest): """Build core-image-minimal and test the results""" def setUp(self): - self.build("core-image-minimal") + self.completed_build = self.build("core-image-minimal") # Check if build name is unique - tc_id=795 def test_Build_Unique_Name(self): @@ -59,38 +59,29 @@ class BuildCoreImageMinimal(BuildTest): # Check if task order is unique for one build - tc=824 def test_Task_Unique_Order(self): - builds = Build.objects.values('id') - cnt_err = [] - - for build in builds: - total_task_order = Task.objects.filter( - build=build['id']).values('order').count() - distinct_task_order = Task.objects.filter( - build=build['id']).values('order').distinct().count() + total_task_order = Task.objects.filter( + build=self.built).values('order').count() + distinct_task_order = Task.objects.filter( + build=self.completed_build).values('order').distinct().count() - if (total_task_order != distinct_task_order): - cnt_err.append(build['id']) - - self.assertEqual(len(cnt_err), - 0, - msg='Errors for build id: %s' % cnt_err) + self.assertEqual(total_task_order, + distinct_task_order, + msg='Errors task order is not unique') # Check task order sequence for one build - tc=825 def test_Task_Order_Sequence(self): - builds = builds = Build.objects.values('id') cnt_err = [] - for build in builds: - tasks = Task.objects.filter( - Q(build=build['id']), - ~Q(order=None), - ~Q(task_name__contains='_setscene') - ).values('id', 'order').order_by("order") + tasks = Task.objects.filter( + Q(build=self.completed_build), + ~Q(order=None), + ~Q(task_name__contains='_setscene') + ).values('id', 'order').order_by("order") - cnt_tasks = 0 - for task in tasks: - cnt_tasks += 1 - if (task['order'] != cnt_tasks): - cnt_err.append(task['id']) + cnt_tasks = 0 + for task in tasks: + cnt_tasks += 1 + if (task['order'] != cnt_tasks): + cnt_err.append(task['id']) self.assertEqual( len(cnt_err), 0, msg='Errors for task id: %s' % cnt_err) @@ -126,8 +117,7 @@ class BuildCoreImageMinimal(BuildTest): task['sstate_result'] != Task.SSTATE_MISS): cnt_err.append({'id': task['id'], 'name': task['task_name'], - 'sstate_result': task['sstate_result'], - }) + 'sstate_result': task['sstate_result']}) self.assertEqual(len(cnt_err), 0, diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/__init__.py b/import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/test_loaddata.py b/import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/test_loaddata.py new file mode 100644 index 000000000..951f6ff5a --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/test_loaddata.py @@ -0,0 +1,61 @@ +#! /usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# BitBake Toaster Implementation +# +# Copyright (C) 2016 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +from django.test import TestCase +from django.core import management + +from orm.models import Layer_Version, Layer, Release, ToasterSetting + + +class TestLoadDataFixtures(TestCase): + """ Test loading our 3 provided fixtures """ + def test_run_loaddata_poky_command(self): + management.call_command('loaddata', 'poky') + + num_releases = Release.objects.count() + + self.assertTrue( + Layer_Version.objects.filter( + layer__name="meta-poky").count() == num_releases, + "Loaded poky fixture but don't have a meta-poky for all releases" + " defined") + + def test_run_loaddata_oecore_command(self): + management.call_command('loaddata', 'oe-core') + + # We only have the one layer for oe-core setup + self.assertTrue( + Layer.objects.filter(name="openembedded-core").count() > 0, + "Loaded oe-core fixture but still have no openemebedded-core" + " layer") + + def test_run_loaddata_settings_command(self): + management.call_command('loaddata', 'settings') + + self.assertTrue( + ToasterSetting.objects.filter(name="DEFAULT_RELEASE").count() > 0, + "Loaded settings but have no DEFAULT_RELEASE") + + self.assertTrue( + ToasterSetting.objects.filter( + name__startswith="DEFCONF").count() > 0, + "Loaded settings but have no DEFCONF (default project " + "configuration values)") diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/test_lsupdates.py b/import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/test_lsupdates.py new file mode 100644 index 000000000..49897a476 --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/test_lsupdates.py @@ -0,0 +1,45 @@ +#! /usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# BitBake Toaster Implementation +# +# Copyright (C) 2016 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +from django.test import TestCase +from django.core import management + +from orm.models import Layer_Version, Machine, Recipe + + +class TestLayerIndexUpdater(TestCase): + def test_run_lsupdates_command(self): + # Load some release information for us to fetch from the layer index + management.call_command('loaddata', 'poky') + + old_layers_count = Layer_Version.objects.count() + old_recipes_count = Recipe.objects.count() + old_machines_count = Machine.objects.count() + + # Now fetch the metadata from the layer index + management.call_command('lsupdates') + + self.assertTrue(Layer_Version.objects.count() > old_layers_count, + "lsupdates ran but we still have no more layers!") + self.assertTrue(Recipe.objects.count() > old_recipes_count, + "lsupdates ran but we still have no more Recipes!") + self.assertTrue(Machine.objects.count() > old_machines_count, + "lsupdates ran but we still have no more Machines!") diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/test_runbuilds.py b/import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/test_runbuilds.py new file mode 100644 index 000000000..3e634835e --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/toaster/tests/commands/test_runbuilds.py @@ -0,0 +1,88 @@ +#! /usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# BitBake Toaster Implementation +# +# Copyright (C) 2016 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os + +from django.test import TestCase +from django.core import management + +from orm.models import signal_runbuilds + +import threading +import time +import subprocess +import signal + + +class KillRunbuilds(threading.Thread): + """ Kill the runbuilds process after an amount of time """ + def __init__(self, *args, **kwargs): + super(KillRunbuilds, self).__init__(*args, **kwargs) + self.setDaemon(True) + + def run(self): + time.sleep(5) + signal_runbuilds() + time.sleep(1) + + pidfile_path = os.path.join(os.environ.get("BUILDDIR", "."), + ".runbuilds.pid") + + with open(pidfile_path) as pidfile: + pid = pidfile.read() + os.kill(int(pid), signal.SIGTERM) + + +class TestCommands(TestCase): + """ Sanity test that runbuilds executes OK """ + + def setUp(self): + os.environ.setdefault("DJANGO_SETTINGS_MODULE", + "toastermain.settings_test") + os.environ.setdefault("BUILDDIR", + "/tmp/") + + # Setup a real database if needed for runbuilds process + # to connect to + management.call_command('migrate') + + def test_runbuilds_command(self): + kill_runbuilds = KillRunbuilds() + kill_runbuilds.start() + + manage_py = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + os.pardir, + os.pardir, + "manage.py") + + command = "%s runbuilds" % manage_py + + process = subprocess.Popen(command, + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + + (out, err) = process.communicate() + process.wait() + + self.assertNotEqual(process.returncode, 1, + "Runbuilds returned an error %s" % err) diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/eventreplay/README b/import-layers/yocto-poky/bitbake/lib/toaster/tests/eventreplay/README new file mode 100644 index 000000000..8c5bb6432 --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/toaster/tests/eventreplay/README @@ -0,0 +1,22 @@ +# Running eventreplay tests + +These tests use event log files produced by bitbake -w +You need to have event log files produced before running this tests. + +At the moment of writing this document tests use 2 event log files: zlib.events +and core-image-minimal.events. They're not provided with the tests due to their +significant size. + +Here is how to produce them: + +$ . oe-init-build-env +$ rm -r tmp sstate-cache +$ bitbake core-image-minimal -w core-image-minimal.events +$ rm -rf tmp sstate-cache +$ bitbake zlib -w zlib.events + +After that it should be possible to run eventreplay tests this way: + +$ EVENTREPLAY_DIR=./ DJANGO_SETTINGS_MODULE=toastermain.settings_test ../bitbake/lib/toaster/manage.py test -v2 tests.eventreplay + +Note that environment variable EVENTREPLAY_DIR should point to the directory with event log files. diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/eventreplay/__init__.py b/import-layers/yocto-poky/bitbake/lib/toaster/tests/eventreplay/__init__.py new file mode 100644 index 000000000..695661947 --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/toaster/tests/eventreplay/__init__.py @@ -0,0 +1,97 @@ +#! /usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# BitBake Toaster Implementation +# +# Copyright (C) 2016 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +# Tests were part of openembedded-core oe selftest Authored by: Lucian Musat +# Ionut Chisanovici, Paul Eggleton and Cristian Iorga + +""" +Test toaster backend by playing build event log files +using toaster-eventreplay script +""" + +import os + +from subprocess import getstatusoutput +from pathlib import Path + +from django.test import TestCase + +from orm.models import Target_Installed_Package, Package, Build + +class EventReplay(TestCase): + """Base class for eventreplay test cases""" + + def setUp(self): + """ + Setup build environment: + - set self.script to toaster-eventreplay path + - set self.eventplay_dir to the value of EVENTPLAY_DIR env variable + """ + bitbake_dir = Path(__file__.split('lib/toaster')[0]) + self.script = bitbake_dir / 'bin' / 'toaster-eventreplay' + self.assertTrue(self.script.exists(), "%s doesn't exist") + self.eventplay_dir = os.getenv("EVENTREPLAY_DIR") + self.assertTrue(self.eventplay_dir, + "Environment variable EVENTREPLAY_DIR is not set") + + def _replay(self, eventfile): + """Run toaster-eventplay """ + eventpath = Path(self.eventplay_dir) / eventfile + status, output = getstatusoutput('%s %s' % (self.script, eventpath)) + if status: + print(output) + + self.assertEqual(status, 0) + +class CoreImageMinimalEventReplay(EventReplay): + """Replay core-image-minimal events""" + + def test_installed_packages(self): + """Test if all required packages have been installed""" + + self._replay('core-image-minimal.events') + + # test installed packages + packages = sorted(Target_Installed_Package.objects.\ + values_list('package__name', flat=True)) + self.assertEqual(packages, ['base-files', 'base-passwd', 'busybox', + 'busybox-hwclock', 'busybox-syslog', + 'busybox-udhcpc', 'eudev', 'glibc', + 'init-ifupdown', 'initscripts', + 'initscripts-functions', 'kernel-base', + 'kernel-module-uvesafb', 'libkmod', + 'modutils-initscripts', 'netbase', + 'packagegroup-core-boot', 'run-postinsts', + 'sysvinit', 'sysvinit-inittab', + 'sysvinit-pidof', 'udev-cache', + 'update-alternatives-opkg', + 'update-rc.d', 'util-linux-libblkid', + 'util-linux-libuuid', 'v86d', 'zlib']) + +class ZlibEventReplay(EventReplay): + """Replay zlib events""" + + def test_replay_zlib(self): + """Test if zlib build and package are in the database""" + self._replay("zlib.events") + + self.assertEqual(Build.objects.last().target_set.last().target, "zlib") + self.assertTrue('zlib' in Package.objects.values_list('name', flat=True)) diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/functional/__init__.py b/import-layers/yocto-poky/bitbake/lib/toaster/tests/functional/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/functional/functional_helpers.py b/import-layers/yocto-poky/bitbake/lib/toaster/tests/functional/functional_helpers.py new file mode 100644 index 000000000..486078a61 --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/toaster/tests/functional/functional_helpers.py @@ -0,0 +1,122 @@ +#! /usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# BitBake Toaster functional tests implementation +# +# Copyright (C) 2017 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import os +import logging +import subprocess +import signal +import time +import re + +from tests.browser.selenium_helpers_base import SeleniumTestCaseBase +from tests.builds.buildtest import load_build_environment + +logger = logging.getLogger("toaster") + +class SeleniumFunctionalTestCase(SeleniumTestCaseBase): + wait_toaster_time = 5 + + @classmethod + def setUpClass(cls): + # So that the buildinfo helper uses the test database' + if os.environ.get('DJANGO_SETTINGS_MODULE', '') != \ + 'toastermain.settings_test': + raise RuntimeError("Please initialise django with the tests settings: " \ + "DJANGO_SETTINGS_MODULE='toastermain.settings_test'") + + load_build_environment() + + # start toaster + cmd = "bash -c 'source toaster start'" + p = subprocess.Popen( + cmd, + cwd=os.environ.get("BUILDDIR"), + shell=True) + if p.wait() != 0: + raise RuntimeError("Can't initialize toaster") + + super(SeleniumFunctionalTestCase, cls).setUpClass() + cls.live_server_url = 'http://localhost:8000/' + + @classmethod + def tearDownClass(cls): + super(SeleniumFunctionalTestCase, cls).tearDownClass() + + # XXX: source toaster stop gets blocked, to review why? + # from now send SIGTERM by hand + time.sleep(cls.wait_toaster_time) + builddir = os.environ.get("BUILDDIR") + + with open(os.path.join(builddir, '.toastermain.pid'), 'r') as f: + toastermain_pid = int(f.read()) + os.kill(toastermain_pid, signal.SIGTERM) + with open(os.path.join(builddir, '.runbuilds.pid'), 'r') as f: + runbuilds_pid = int(f.read()) + os.kill(runbuilds_pid, signal.SIGTERM) + + + def get_URL(self): + rc=self.get_page_source() + project_url=re.search("(projectPageUrl\s:\s\")(.*)(\",)",rc) + return project_url.group(2) + + + def find_element_by_link_text_in_table(self, table_id, link_text): + """ + Assume there're multiple suitable "find_element_by_link_text". + In this circumstance we need to specify "table". + """ + try: + table_element = self.get_table_element(table_id) + element = table_element.find_element_by_link_text(link_text) + except NoSuchElementException as e: + print('no element found') + raise + return element + + def get_table_element(self, table_id, *coordinate): + if len(coordinate) == 0: +#return whole-table element + element_xpath = "//*[@id='" + table_id + "']" + try: + element = self.driver.find_element_by_xpath(element_xpath) + except NoSuchElementException as e: + raise + return element + row = coordinate[0] + + if len(coordinate) == 1: +#return whole-row element + element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]" + try: + element = self.driver.find_element_by_xpath(element_xpath) + except NoSuchElementException as e: + return False + return element +#now we are looking for an element with specified X and Y + column = coordinate[1] + + element_xpath = "//*[@id='" + table_id + "']/tbody/tr[" + str(row) + "]/td[" + str(column) + "]" + try: + element = self.driver.find_element_by_xpath(element_xpath) + except NoSuchElementException as e: + return False + return element diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/functional/test_functional_basic.py b/import-layers/yocto-poky/bitbake/lib/toaster/tests/functional/test_functional_basic.py new file mode 100644 index 000000000..cfa2b0fdf --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/toaster/tests/functional/test_functional_basic.py @@ -0,0 +1,243 @@ +#! /usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# BitBake Toaster functional tests implementation +# +# Copyright (C) 2017 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +import time +import re +from tests.functional.functional_helpers import SeleniumFunctionalTestCase +from orm.models import Project + +class FuntionalTestBasic(SeleniumFunctionalTestCase): + +# testcase (1514) + def test_create_slenium_project(self): + project_name = 'selenium-project' + self.get('') + self.driver.find_element_by_link_text("To start building, create your first Toaster project").click() + self.driver.find_element_by_id("new-project-name").send_keys(project_name) + self.driver.find_element_by_id('projectversion').click() + self.driver.find_element_by_id("create-project-button").click() + element = self.wait_until_visible('#project-created-notification') + self.assertTrue(self.element_exists('#project-created-notification'),'Project creation notification not shown') + self.assertTrue(project_name in element.text, + "New project name not in new project notification") + self.assertTrue(Project.objects.filter(name=project_name).count(), + "New project not found in database") + + # testcase (1515) + def test_verify_left_bar_menu(self): + self.get('') + self.wait_until_visible('#projectstable') + self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() + self.assertTrue(self.element_exists('#config-nav'),'Configuration Tab does not exist') + project_URL=self.get_URL() + self.driver.find_element_by_xpath('//a[@href="'+project_URL+'"]').click() + + try: + self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'customimages/"'+"]").click() + self.assertTrue(re.search("Custom images",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'Custom images information is not loading properly') + except: + self.fail(msg='No Custom images tab available') + + try: + self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'images/"'+"]").click() + self.assertTrue(re.search("Compatible image recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible image recipes information is not loading properly') + except: + self.fail(msg='No Compatible image tab available') + + try: + self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'softwarerecipes/"'+"]").click() + self.assertTrue(re.search("Compatible software recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible software recipe information is not loading properly') + except: + self.fail(msg='No Compatible software recipe tab available') + + try: + self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'machines/"'+"]").click() + self.assertTrue(re.search("Compatible machines",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible machine information is not loading properly') + except: + self.fail(msg='No Compatible machines tab available') + + try: + self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'layers/"'+"]").click() + self.assertTrue(re.search("Compatible layers",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Compatible layer information is not loading properly') + except: + self.fail(msg='No Compatible layers tab available') + + try: + self.driver.find_element_by_xpath("//*[@id='config-nav']/ul/li/a[@href="+'"'+project_URL+'configuration"'+"]").click() + self.assertTrue(re.search("Bitbake variables",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Bitbake variables information is not loading properly') + except: + self.fail(msg='No Bitbake variables tab available') + +# testcase (1516) + def test_review_configuration_information(self): + self.get('') + self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() + self.wait_until_visible('#projectstable') + self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() + project_URL=self.get_URL() + + try: + self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist') + self.assertTrue(re.search("qemux86",self.driver.find_element_by_xpath("//span[@id='project-machine-name']").text),'The machine type is not assigned') + self.driver.find_element_by_xpath("//span[@id='change-machine-toggle']").click() + self.wait_until_visible('#select-machine-form') + self.wait_until_visible('#cancel-machine-change') + self.driver.find_element_by_xpath("//form[@id='select-machine-form']/a[@id='cancel-machine-change']").click() + except: + self.fail(msg='The machine information is wrong in the configuration page') + + try: + self.driver.find_element_by_id('no-most-built') + except: + self.fail(msg='No Most built information in project detail page') + + try: + self.assertTrue(re.search("Yocto Project master",self.driver.find_element_by_xpath("//span[@id='project-release-title']").text),'The project release is not defined') + except: + self.fail(msg='No project release title information in project detail page') + + try: + self.driver.find_element_by_xpath("//div[@id='layer-container']") + self.assertTrue(re.search("3",self.driver.find_element_by_id("project-layers-count").text),'There should be 3 layers listed in the layer count') + layer_list = self.driver.find_element_by_id("layers-in-project-list") + layers = layer_list.find_elements_by_tag_name("li") + for layer in layers: + if re.match ("openembedded-core",layer.text): + print ("openembedded-core layer is a default layer in the project configuration") + elif re.match ("meta-poky",layer.text): + print ("meta-poky layer is a default layer in the project configuration") + elif re.match ("meta-yocto-bsp",layer.text): + print ("meta-yocto-bsp is a default layer in the project configuratoin") + else: + self.fail(msg='default layers are missing from the project configuration') + except: + self.fail(msg='No Layer information in project detail page') + +# testcase (1517) + def test_verify_machine_information(self): + self.get('') + self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() + self.wait_until_visible('#projectstable') + self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() + + try: + self.assertTrue(self.element_exists('#machine-section'),'Machine section for the project configuration page does not exist') + self.assertTrue(re.search("qemux86",self.driver.find_element_by_id("project-machine-name").text),'The machine type is not assigned') + self.driver.find_element_by_id("change-machine-toggle").click() + self.wait_until_visible('#select-machine-form') + self.wait_until_visible('#cancel-machine-change') + self.driver.find_element_by_id("cancel-machine-change").click() + except: + self.fail(msg='The machine information is wrong in the configuration page') + +# testcase (1518) + def test_verify_most_built_recipes_information(self): + self.get('') + self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() + self.wait_until_visible('#projectstable') + self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() + project_URL=self.get_URL() + + try: + self.assertTrue(re.search("You haven't built any recipes yet",self.driver.find_element_by_id("no-most-built").text),'Default message of no builds is not present') + self.driver.find_element_by_xpath("//div[@id='no-most-built']/p/a[@href="+'"'+project_URL+'images/"'+"]").click() + self.assertTrue(re.search("Compatible image recipes",self.driver.find_element_by_xpath("//div[@class='col-md-10']").text),'The Choose a recipe to build link is not working properly') + except: + self.fail(msg='No Most built information in project detail page') + +# testcase (1519) + def test_verify_project_release_information(self): + self.get('') + self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() + self.wait_until_visible('#projectstable') + self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() + + try: + self.assertTrue(re.search("Yocto Project master",self.driver.find_element_by_id("project-release-title").text),'The project release is not defined') + except: + self.fail(msg='No project release title information in project detail page') + +# testcase (1520) + def test_verify_layer_information(self): + self.get('') + self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() + self.wait_until_visible('#projectstable') + self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() + project_URL=self.get_URL() + + try: + self.driver.find_element_by_xpath("//div[@id='layer-container']") + self.assertTrue(re.search("3",self.driver.find_element_by_id("project-layers-count").text),'There should be 3 layers listed in the layer count') + layer_list = self.driver.find_element_by_id("layers-in-project-list") + layers = layer_list.find_elements_by_tag_name("li") + + for layer in layers: + if re.match ("openembedded-core",layer.text): + print ("openembedded-core layer is a default layer in the project configuration") + elif re.match ("meta-poky",layer.text): + print ("meta-poky layer is a default layer in the project configuration") + elif re.match ("meta-yocto-bsp",layer.text): + print ("meta-yocto-bsp is a default layer in the project configuratoin") + else: + self.fail(msg='default layers are missing from the project configuration') + + self.driver.find_element_by_xpath("//input[@id='layer-add-input']") + self.driver.find_element_by_xpath("//button[@id='add-layer-btn']") + self.driver.find_element_by_xpath("//div[@id='layer-container']/form[@class='form-inline']/p/a[@id='view-compatible-layers']") + self.driver.find_element_by_xpath("//div[@id='layer-container']/form[@class='form-inline']/p/a[@href="+'"'+project_URL+'importlayer"'+"]") + except: + self.fail(msg='No Layer information in project detail page') + +# testcase (1521) + def test_verify_project_detail_links(self): + self.get('') + self.driver.find_element_by_xpath("//div[@id='global-nav']/ul/li/a[@href="+'"'+'/toastergui/projects/'+'"'+"]").click() + self.wait_until_visible('#projectstable') + self.find_element_by_link_text_in_table('projectstable', 'selenium-project').click() + project_URL=self.get_URL() + + self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").click() + self.assertTrue(re.search("Configuration",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li[@id='topbar-configuration-tab']/a[@href="+'"'+project_URL+'"'+"]").text), 'Configuration tab in project topbar is misspelled') + + try: + self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").click() + self.assertTrue(re.search("Builds",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'builds/"'+"]").text), 'Builds tab in project topbar is misspelled') + self.driver.find_element_by_xpath("//div[@id='empty-state-projectbuildstable']") + except: + self.fail(msg='Builds tab information is not present') + + try: + self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").click() + self.assertTrue(re.search("Import layer",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'importlayer"'+"]").text), 'Import layer tab in project topbar is misspelled') + self.driver.find_element_by_xpath("//fieldset[@id='repo-select']") + self.driver.find_element_by_xpath("//fieldset[@id='git-repo']") + except: + self.fail(msg='Import layer tab not loading properly') + + try: + self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").click() + self.assertTrue(re.search("New custom image",self.driver.find_element_by_xpath("//div[@id='project-topbar']/ul[@class='nav nav-tabs']/li/a[@href="+'"'+project_URL+'newcustomimage/"'+"]").text), 'New custom image tab in project topbar is misspelled') + self.assertTrue(re.search("Select the image recipe you want to customise",self.driver.find_element_by_xpath("//div[@class='col-md-12']/h2").text),'The new custom image tab is not loading correctly') + except: + self.fail(msg='New custom image tab not loading properly') + + + diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/views/README b/import-layers/yocto-poky/bitbake/lib/toaster/tests/views/README new file mode 100644 index 000000000..950c7c989 --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/toaster/tests/views/README @@ -0,0 +1,4 @@ + +Django unit tests to verify classes and functions based on django Views + +To run just these tests use ./manage.py test tests.views diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/views/__init__.py b/import-layers/yocto-poky/bitbake/lib/toaster/tests/views/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/tests/views/test_views.py b/import-layers/yocto-poky/bitbake/lib/toaster/tests/views/test_views.py new file mode 100644 index 000000000..1463077e9 --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/toaster/tests/views/test_views.py @@ -0,0 +1,540 @@ +#! /usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# BitBake Toaster Implementation +# +# Copyright (C) 2013-2015 Intel Corporation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +"""Test cases for Toaster GUI and ReST.""" + +from django.test import TestCase +from django.test.client import RequestFactory +from django.core.urlresolvers import reverse +from django.db.models import Q + +from orm.models import Project, Package +from orm.models import Layer_Version, Recipe +from orm.models import CustomImageRecipe +from orm.models import CustomImagePackage + +import inspect +import toastergui + +from toastergui.tables import SoftwareRecipesTable +import json +from bs4 import BeautifulSoup +import string + +PROJECT_NAME = "test project" +PROJECT_NAME2 = "test project 2" +CLI_BUILDS_PROJECT_NAME = 'Command line builds' + + +class ViewTests(TestCase): + """Tests to verify view APIs.""" + + fixtures = ['toastergui-unittest-data'] + + def setUp(self): + + self.project = Project.objects.first() + self.recipe1 = Recipe.objects.get(pk=2) + self.customr = CustomImageRecipe.objects.first() + self.cust_package = CustomImagePackage.objects.first() + self.package = Package.objects.first() + self.lver = Layer_Version.objects.first() + + def test_get_base_call_returns_html(self): + """Basic test for all-projects view""" + response = self.client.get(reverse('all-projects'), follow=True) + self.assertEqual(response.status_code, 200) + self.assertTrue(response['Content-Type'].startswith('text/html')) + self.assertTemplateUsed(response, "projects-toastertable.html") + + def test_get_json_call_returns_json(self): + """Test for all projects output in json format""" + url = reverse('all-projects') + response = self.client.get(url, {"format": "json"}, follow=True) + self.assertEqual(response.status_code, 200) + self.assertTrue(response['Content-Type'].startswith( + 'application/json')) + + data = json.loads(response.content.decode('utf-8')) + + self.assertTrue("error" in data) + self.assertEqual(data["error"], "ok") + self.assertTrue("rows" in data) + + name_found = False + for row in data["rows"]: + name_found = row['name'].find(self.project.name) + + self.assertTrue(name_found, + "project name not found in projects table") + + def test_typeaheads(self): + """Test typeahead ReST API""" + layers_url = reverse('xhr_layerstypeahead', args=(self.project.id,)) + prj_url = reverse('xhr_projectstypeahead') + + urls = [layers_url, + prj_url, + reverse('xhr_recipestypeahead', args=(self.project.id,)), + reverse('xhr_machinestypeahead', args=(self.project.id,))] + + def basic_reponse_check(response, url): + """Check data structure of http response.""" + self.assertEqual(response.status_code, 200) + self.assertTrue(response['Content-Type'].startswith( + 'application/json')) + + data = json.loads(response.content.decode('utf-8')) + + self.assertTrue("error" in data) + self.assertEqual(data["error"], "ok") + self.assertTrue("results" in data) + + # We got a result so now check the fields + if len(data['results']) > 0: + result = data['results'][0] + + self.assertTrue(len(result['name']) > 0) + self.assertTrue("detail" in result) + self.assertTrue(result['id'] > 0) + + # Special check for the layers typeahead's extra fields + if url == layers_url: + self.assertTrue(len(result['layerdetailurl']) > 0) + self.assertTrue(len(result['vcs_url']) > 0) + self.assertTrue(len(result['vcs_reference']) > 0) + # Special check for project typeahead extra fields + elif url == prj_url: + self.assertTrue(len(result['projectPageUrl']) > 0) + + return True + + return False + + for url in urls: + results = False + + for typeing in list(string.ascii_letters): + response = self.client.get(url, {'search': typeing}) + results = basic_reponse_check(response, url) + if results: + break + + # After "typeing" the alpabet we should have result true + # from each of the urls + self.assertTrue(results) + + def test_xhr_add_layer(self): + """Test xhr_add API""" + # Test for importing an already existing layer + api_url = reverse('xhr_layer', args=(self.project.id,)) + + layer_data = {'vcs_url': "git://git.example.com/test", + 'name': "base-layer", + 'git_ref': "c12b9596afd236116b25ce26dbe0d793de9dc7ce", + 'project_id': self.project.id, + 'local_source_dir': "", + 'add_to_project': True, + 'dir_path': "/path/in/repository"} + + layer_data_json = json.dumps(layer_data) + + response = self.client.put(api_url, layer_data_json) + data = json.loads(response.content.decode('utf-8')) + self.assertEqual(response.status_code, 200) + self.assertEqual(data["error"], "ok") + + self.assertTrue( + layer_data['name'] in + self.project.get_all_compatible_layer_versions().values_list( + 'layer__name', + flat=True), + "Could not find imported layer in project's all layers list" + ) + + # Empty data passed + response = self.client.put(api_url, "{}") + data = json.loads(response.content.decode('utf-8')) + self.assertNotEqual(data["error"], "ok") + + def test_custom_ok(self): + """Test successful return from ReST API xhr_customrecipe""" + url = reverse('xhr_customrecipe') + params = {'name': 'custom', 'project': self.project.id, + 'base': self.recipe1.id} + response = self.client.post(url, params) + self.assertEqual(response.status_code, 200) + data = json.loads(response.content.decode('utf-8')) + self.assertEqual(data['error'], 'ok') + self.assertTrue('url' in data) + # get recipe from the database + recipe = CustomImageRecipe.objects.get(project=self.project, + name=params['name']) + args = (self.project.id, recipe.id,) + self.assertEqual(reverse('customrecipe', args=args), data['url']) + + def test_custom_incomplete_params(self): + """Test not passing all required parameters to xhr_customrecipe""" + url = reverse('xhr_customrecipe') + for params in [{}, {'name': 'custom'}, + {'name': 'custom', 'project': self.project.id}]: + response = self.client.post(url, params) + self.assertEqual(response.status_code, 200) + data = json.loads(response.content.decode('utf-8')) + self.assertNotEqual(data["error"], "ok") + + def test_xhr_custom_wrong_project(self): + """Test passing wrong project id to xhr_customrecipe""" + url = reverse('xhr_customrecipe') + params = {'name': 'custom', 'project': 0, "base": self.recipe1.id} + response = self.client.post(url, params) + self.assertEqual(response.status_code, 200) + data = json.loads(response.content.decode('utf-8')) + self.assertNotEqual(data["error"], "ok") + + def test_xhr_custom_wrong_base(self): + """Test passing wrong base recipe id to xhr_customrecipe""" + url = reverse('xhr_customrecipe') + params = {'name': 'custom', 'project': self.project.id, "base": 0} + response = self.client.post(url, params) + self.assertEqual(response.status_code, 200) + data = json.loads(response.content.decode('utf-8')) + self.assertNotEqual(data["error"], "ok") + + def test_xhr_custom_details(self): + """Test getting custom recipe details""" + url = reverse('xhr_customrecipe_id', args=(self.customr.id,)) + response = self.client.get(url) + self.assertEqual(response.status_code, 200) + expected = {"error": "ok", + "info": {'id': self.customr.id, + 'name': self.customr.name, + 'base_recipe_id': self.recipe1.id, + 'project_id': self.project.id}} + self.assertEqual(json.loads(response.content.decode('utf-8')), + expected) + + def test_xhr_custom_del(self): + """Test deleting custom recipe""" + name = "to be deleted" + recipe = CustomImageRecipe.objects.create( + name=name, project=self.project, + base_recipe=self.recipe1, + file_path="/tmp/testing", + layer_version=self.customr.layer_version) + url = reverse('xhr_customrecipe_id', args=(recipe.id,)) + response = self.client.delete(url) + self.assertEqual(response.status_code, 200) + + gotoUrl = reverse('projectcustomimages', args=(self.project.pk,)) + + self.assertEqual(json.loads(response.content.decode('utf-8')), + {"error": "ok", + "gotoUrl": gotoUrl}) + + # try to delete not-existent recipe + url = reverse('xhr_customrecipe_id', args=(recipe.id,)) + response = self.client.delete(url) + self.assertEqual(response.status_code, 200) + self.assertNotEqual(json.loads( + response.content.decode('utf-8'))["error"], "ok") + + def test_xhr_custom_packages(self): + """Test adding and deleting package to a custom recipe""" + # add self.package to recipe + response = self.client.put(reverse('xhr_customrecipe_packages', + args=(self.customr.id, + self.cust_package.id))) + + self.assertEqual(response.status_code, 200) + self.assertEqual(json.loads(response.content.decode('utf-8')), + {"error": "ok"}) + self.assertEqual(self.customr.appends_set.first().name, + self.cust_package.name) + # delete it + to_delete = self.customr.appends_set.first().pk + del_url = reverse('xhr_customrecipe_packages', + args=(self.customr.id, to_delete)) + + response = self.client.delete(del_url) + self.assertEqual(response.status_code, 200) + self.assertEqual(json.loads(response.content.decode('utf-8')), + {"error": "ok"}) + all_packages = self.customr.get_all_packages().values_list('pk', + flat=True) + + self.assertFalse(to_delete in all_packages) + # delete invalid package to test error condition + del_url = reverse('xhr_customrecipe_packages', + args=(self.customr.id, + 99999)) + + response = self.client.delete(del_url) + self.assertEqual(response.status_code, 200) + self.assertNotEqual(json.loads( + response.content.decode('utf-8'))["error"], "ok") + + def test_xhr_custom_packages_err(self): + """Test error conditions of xhr_customrecipe_packages""" + # test calls with wrong recipe id and wrong package id + for args in [(0, self.package.id), (self.customr.id, 0)]: + url = reverse('xhr_customrecipe_packages', args=args) + # test put and delete methods + for method in (self.client.put, self.client.delete): + response = method(url) + self.assertEqual(response.status_code, 200) + self.assertNotEqual(json.loads( + response.content.decode('utf-8')), + {"error": "ok"}) + + def test_download_custom_recipe(self): + """Download the recipe file generated for the custom image""" + + # Create a dummy recipe file for the custom image generation to read + open("/tmp/a_recipe.bb", 'a').close() + response = self.client.get(reverse('customrecipedownload', + args=(self.project.id, + self.customr.id))) + + self.assertEqual(response.status_code, 200) + + def test_software_recipes_table(self): + """Test structure returned for Software RecipesTable""" + table = SoftwareRecipesTable() + request = RequestFactory().get('/foo/', {'format': 'json'}) + response = table.get(request, pid=self.project.id) + data = json.loads(response.content.decode('utf-8')) + + recipes = Recipe.objects.filter(Q(is_image=False)) + self.assertTrue(len(recipes) > 1, + "Need more than one software recipe to test " + "SoftwareRecipesTable") + + recipe1 = recipes[0] + recipe2 = recipes[1] + + rows = data['rows'] + row1 = next(x for x in rows if x['name'] == recipe1.name) + row2 = next(x for x in rows if x['name'] == recipe2.name) + + self.assertEqual(response.status_code, 200, 'should be 200 OK status') + + # check other columns have been populated correctly + self.assertTrue(recipe1.name in row1['name']) + self.assertTrue(recipe1.version in row1['version']) + self.assertTrue(recipe1.description in + row1['get_description_or_summary']) + + self.assertTrue(recipe1.layer_version.layer.name in + row1['layer_version__layer__name']) + + self.assertTrue(recipe2.name in row2['name']) + self.assertTrue(recipe2.version in row2['version']) + self.assertTrue(recipe2.description in + row2['get_description_or_summary']) + + self.assertTrue(recipe2.layer_version.layer.name in + row2['layer_version__layer__name']) + + def test_toaster_tables(self): + """Test all ToasterTables instances""" + + def get_data(table, options={}): + """Send a request and parse the json response""" + options['format'] = "json" + options['nocache'] = "true" + request = RequestFactory().get('/', options) + + # This is the image recipe needed for a package list for + # PackagesTable do this here to throw a non exist exception + image_recipe = Recipe.objects.get(pk=4) + + # Add any kwargs that are needed by any of the possible tables + args = {'pid': self.project.id, + 'layerid': self.lver.pk, + 'recipeid': self.recipe1.pk, + 'recipe_id': image_recipe.pk, + 'custrecipeid': self.customr.pk, + 'build_id': 1, + 'target_id': 1} + + response = table.get(request, **args) + return json.loads(response.content.decode('utf-8')) + + def get_text_from_td(td): + """If we have html in the td then extract the text portion""" + # just so we don't waste time parsing non html + if "<" not in td: + ret = td + else: + ret = BeautifulSoup(td, "html.parser").text + + if len(ret): + return "0" + else: + return ret + + # Get a list of classes in tables module + tables = inspect.getmembers(toastergui.tables, inspect.isclass) + tables.extend(inspect.getmembers(toastergui.buildtables, + inspect.isclass)) + + for name, table_cls in tables: + # Filter out the non ToasterTables from the tables module + if not issubclass(table_cls, toastergui.widgets.ToasterTable) or \ + table_cls == toastergui.widgets.ToasterTable or \ + 'Mixin' in name: + continue + + # Get the table data without any options, this also does the + # initialisation of the table i.e. setup_columns, + # setup_filters and setup_queryset that we can use later + table = table_cls() + all_data = get_data(table) + + self.assertTrue(len(all_data['rows']) > 1, + "Cannot test on a %s table with < 1 row" % name) + + if table.default_orderby: + row_one = get_text_from_td( + all_data['rows'][0][table.default_orderby.strip("-")]) + row_two = get_text_from_td( + all_data['rows'][1][table.default_orderby.strip("-")]) + + if '-' in table.default_orderby: + self.assertTrue(row_one >= row_two, + "Default ordering not working on %s" + " '%s' should be >= '%s'" % + (name, row_one, row_two)) + else: + self.assertTrue(row_one <= row_two, + "Default ordering not working on %s" + " '%s' should be <= '%s'" % + (name, row_one, row_two)) + + # Test the column ordering and filtering functionality + for column in table.columns: + if column['orderable']: + # If a column is orderable test it in both order + # directions ordering on the columns field_name + ascending = get_data(table_cls(), + {"orderby": column['field_name']}) + + row_one = get_text_from_td( + ascending['rows'][0][column['field_name']]) + row_two = get_text_from_td( + ascending['rows'][1][column['field_name']]) + + self.assertTrue(row_one <= row_two, + "Ascending sort applied but row 0: \"%s\"" + " is less than row 1: \"%s\" " + "%s %s " % + (row_one, row_two, + column['field_name'], name)) + + descending = get_data(table_cls(), + {"orderby": + '-'+column['field_name']}) + + row_one = get_text_from_td( + descending['rows'][0][column['field_name']]) + row_two = get_text_from_td( + descending['rows'][1][column['field_name']]) + + self.assertTrue(row_one >= row_two, + "Descending sort applied but row 0: %s" + "is greater than row 1: %s" + "field %s table %s" % + (row_one, + row_two, + column['field_name'], name)) + + # If the two start rows are the same we haven't actually + # changed the order + self.assertNotEqual(ascending['rows'][0], + descending['rows'][0], + "An orderby %s has not changed the " + "order of the data in table %s" % + (column['field_name'], name)) + + if column['filter_name']: + # If a filter is available for the column get the filter + # info. This contains what filter actions are defined. + filter_info = get_data(table_cls(), + {"cmd": "filterinfo", + "name": column['filter_name']}) + self.assertTrue(len(filter_info['filter_actions']) > 0, + "Filter %s was defined but no actions " + "added to it" % column['filter_name']) + + for filter_action in filter_info['filter_actions']: + # filter string to pass as the option + # This is the name of the filter:action + # e.g. project_filter:not_in_project + filter_string = "%s:%s" % ( + column['filter_name'], + filter_action['action_name']) + # Now get the data with the filter applied + filtered_data = get_data(table_cls(), + {"filter": filter_string}) + + # date range filter actions can't specify the + # number of results they return, so their count is 0 + if filter_action['count'] is not None: + self.assertEqual( + len(filtered_data['rows']), + int(filter_action['count']), + "We added a table filter for %s but " + "the number of rows returned was not " + "what the filter info said there " + "would be" % name) + + # Test search functionality on the table + something_found = False + for search in list(string.ascii_letters): + search_data = get_data(table_cls(), {'search': search}) + + if len(search_data['rows']) > 0: + something_found = True + break + + self.assertTrue(something_found, + "We went through the whole alphabet and nothing" + " was found for the search of table %s" % name) + + # Test the limit functionality on the table + limited_data = get_data(table_cls(), {'limit': "1"}) + self.assertEqual(len(limited_data['rows']), + 1, + "Limit 1 set on table %s but not 1 row returned" + % name) + + # Test the pagination functionality on the table + page_one_data = get_data(table_cls(), {'limit': "1", + "page": "1"})['rows'][0] + + page_two_data = get_data(table_cls(), {'limit': "1", + "page": "2"})['rows'][0] + + self.assertNotEqual(page_one_data, + page_two_data, + "Changed page on table %s but first row is" + " the same as the previous page" % name) diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/api.py b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/api.py index ae1f15077..1a6507c3f 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/api.py +++ b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/api.py @@ -20,6 +20,7 @@ import re import logging +import json from collections import Counter from orm.models import Project, ProjectTarget, Build, Layer_Version @@ -136,14 +137,63 @@ class XhrBuildRequest(View): class XhrLayer(View): - """ Get and Update Layer information """ + """ Delete, Get, Add and Update Layer information + + Methods: GET POST DELETE PUT + """ + + def get(self, request, *args, **kwargs): + """ + Get layer information + + Method: GET + Entry point: /xhr_layer// + """ + + try: + layer_version = Layer_Version.objects.get( + pk=kwargs['layerversion_id']) + + project = Project.objects.get(pk=kwargs['pid']) + + project_layers = ProjectLayer.objects.filter( + project=project).values_list("layercommit_id", + flat=True) + + ret = { + 'error': 'ok', + 'id': layer_version.pk, + 'name': layer_version.layer.name, + 'layerdetailurl': + layer_version.get_detailspage_url(project.pk), + 'vcs_ref': layer_version.get_vcs_reference(), + 'vcs_url': layer_version.layer.vcs_url, + 'local_source_dir': layer_version.layer.local_source_dir, + 'layerdeps': { + "list": [ + { + "id": dep.id, + "name": dep.layer.name, + "layerdetailurl": + dep.get_detailspage_url(project.pk), + "vcs_url": dep.layer.vcs_url, + "vcs_reference": dep.get_vcs_reference() + } + for dep in layer_version.get_alldeps(project.id)] + }, + 'projectlayers': list(project_layers) + } + + return JsonResponse(ret) + except Layer_Version.DoesNotExist: + error_response("No such layer") def post(self, request, *args, **kwargs): """ Update a layer - Entry point: /xhr_layer/ Method: POST + Entry point: /xhr_layer/ Args: vcs_url, dirpath, commit, up_branch, summary, description, @@ -201,9 +251,100 @@ class XhrLayer(View): return error_response("Could not update layer version entry: %s" % e) - return JsonResponse({"error": "ok"}) + return error_response("ok") + + def put(self, request, *args, **kwargs): + """ Add a new layer + + Method: PUT + Entry point: /xhr_layer// + Args: + project_id, name, + [vcs_url, dir_path, git_ref], [local_source_dir], [layer_deps + (csv)] + + """ + try: + project = Project.objects.get(pk=kwargs['pid']) + + layer_data = json.loads(request.body.decode('utf-8')) + + # We require a unique layer name as otherwise the lists of layers + # becomes very confusing + existing_layers = \ + project.get_all_compatible_layer_versions().values_list( + "layer__name", + flat=True) + + add_to_project = False + layer_deps_added = [] + if 'add_to_project' in layer_data: + add_to_project = True + + if layer_data['name'] in existing_layers: + return JsonResponse({"error": "layer-name-exists"}) + + layer = Layer.objects.create(name=layer_data['name']) + + layer_version = Layer_Version.objects.create( + layer=layer, + project=project, + layer_source=LayerSource.TYPE_IMPORTED) + + # Local layer + if ('local_source_dir' in layer_data) and layer.local_source_dir: + layer.local_source_dir = layer_data['local_source_dir'] + # git layer + elif 'vcs_url' in layer_data: + layer.vcs_url = layer_data['vcs_url'] + layer_version.dirpath = layer_data['dir_path'] + layer_version.commit = layer_data['git_ref'] + layer_version.branch = layer_data['git_ref'] + + layer.save() + layer_version.save() + + if add_to_project: + ProjectLayer.objects.get_or_create( + layercommit=layer_version, project=project) + + # Add the layer dependencies + if 'layer_deps' in layer_data: + for layer_dep_id in layer_data['layer_deps'].split(","): + layer_dep = Layer_Version.objects.get(pk=layer_dep_id) + LayerVersionDependency.objects.get_or_create( + layer_version=layer_version, depends_on=layer_dep) + + # Add layer deps to the project if specified + if add_to_project: + created, pl = ProjectLayer.objects.get_or_create( + layercommit=layer_dep, project=project) + layer_deps_added.append( + {'name': layer_dep.layer.name, + 'layerdetailurl': + layer_dep.get_detailspage_url(project.pk)}) + + except Layer_Version.DoesNotExist: + return error_response("layer-dep-not-found") + except Project.DoesNotExist: + return error_response("project-not-found") + except KeyError: + return error_response("incorrect-parameters") + + return JsonResponse({'error': "ok", + 'imported_layer': { + 'name': layer.name, + 'layerdetailurl': + layer_version.get_detailspage_url()}, + 'deps_added': layer_deps_added}) def delete(self, request, *args, **kwargs): + """ Delete an imported layer + + Method: DELETE + Entry point: /xhr_layer// + + """ try: # We currently only allow Imported layers to be deleted layer_version = Layer_Version.objects.get( @@ -291,10 +432,13 @@ class XhrCustomRecipe(View): return error_response("recipe-already-exists") # create layer 'Custom layer' and verion if needed - layer = Layer.objects.get_or_create( + layer, l_created = Layer.objects.get_or_create( name=CustomImageRecipe.LAYER_NAME, - summary="Layer for custom recipes", - vcs_url="file:///toaster_created_layer")[0] + summary="Layer for custom recipes") + + if l_created: + layer.local_source_dir = "toaster_created_layer" + layer.save() # Check if we have a layer version already # We don't use get_or_create here because the dirpath will change @@ -303,9 +447,10 @@ class XhrCustomRecipe(View): Q(layer=layer) & Q(build=None)).last() if lver is None: - lver, created = Layer_Version.objects.get_or_create( + lver, lv_created = Layer_Version.objects.get_or_create( project=params['project'], layer=layer, + layer_source=LayerSource.TYPE_LOCAL, dirpath="toaster_created_layer") # Add a dependency on our layer to the base recipe's layer @@ -319,7 +464,7 @@ class XhrCustomRecipe(View): optional=False) # Create the actual recipe - recipe, created = CustomImageRecipe.objects.get_or_create( + recipe, r_created = CustomImageRecipe.objects.get_or_create( name=request.POST["name"], base_recipe=params["base"], project=params["project"], @@ -329,7 +474,7 @@ class XhrCustomRecipe(View): # If we created the object then setup these fields. They may get # overwritten later on and cause the get_or_create to create a # duplicate if they've changed. - if created: + if r_created: recipe.file_path = request.POST["name"] recipe.license = "MIT" recipe.version = "0.1" @@ -789,6 +934,9 @@ class XhrProject(View): "url": layer.layercommit.layer.layer_index_url, "layerdetailurl": layer.layercommit.get_detailspage_url( project.pk), + "xhrLayerUrl": reverse("xhr_layer", + args=(project.pk, + layer.layercommit.pk)), "layersource": layer.layercommit.layer_source }) diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/buildtables.py b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/buildtables.py index dd0a6900d..755a7c2e4 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/buildtables.py +++ b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/buildtables.py @@ -571,6 +571,7 @@ class BuildTimeTable(BuildTasksTable): super(BuildTimeTable, self).setup_columns(**kwargs) self.columns[self.toggle_columns['order']]['hidden'] = True + self.columns[self.toggle_columns['order']]['hideable'] = True self.columns[self.toggle_columns['sstate_result']]['hidden'] = True self.columns[self.toggle_columns['elapsed_time']]['hidden'] = False @@ -586,6 +587,7 @@ class BuildCPUTimeTable(BuildTasksTable): super(BuildCPUTimeTable, self).setup_columns(**kwargs) self.columns[self.toggle_columns['order']]['hidden'] = True + self.columns[self.toggle_columns['order']]['hideable'] = True self.columns[self.toggle_columns['sstate_result']]['hidden'] = True self.columns[self.toggle_columns['cpu_time_sys']]['hidden'] = False self.columns[self.toggle_columns['cpu_time_user']]['hidden'] = False @@ -602,5 +604,6 @@ class BuildIOTable(BuildTasksTable): super(BuildIOTable, self).setup_columns(**kwargs) self.columns[self.toggle_columns['order']]['hidden'] = True + self.columns[self.toggle_columns['order']]['hideable'] = True self.columns[self.toggle_columns['sstate_result']]['hidden'] = True self.columns[self.toggle_columns['disk_io']]['hidden'] = False diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/css/default.css b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/css/default.css index ff24e8c1a..5cd7e211a 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/css/default.css +++ b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/css/default.css @@ -210,7 +210,6 @@ fieldset.fields-apart-from-layer-name { margin-top: 20px; } #import-layer-name, #layer-subdir { width: 20%; } #layer-git-repo-url { width: 40%; } -#layer-git-ref { width: 32%; } #local-dir-path { width: 45%; } #layer-dependency { width: 16em; } #layer-deps-list { margin-top: 0; } diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/css/prettify.css b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/css/prettify.css deleted file mode 100755 index b317a7cda..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/css/prettify.css +++ /dev/null @@ -1 +0,0 @@ -.pln{color:#000}@media screen{.str{color:#080}.kwd{color:#008}.com{color:#800}.typ{color:#606}.lit{color:#066}.pun,.opn,.clo{color:#660}.tag{color:#008}.atn{color:#606}.atv{color:#080}.dec,.var{color:#606}.fun{color:red}}@media print,projection{.str{color:#060}.kwd{color:#006;font-weight:bold}.com{color:#600;font-style:italic}.typ{color:#404;font-weight:bold}.lit{color:#044}.pun,.opn,.clo{color:#440}.tag{color:#006;font-weight:bold}.atn{color:#404}.atv{color:#060}}pre.prettyprint{padding:2px;border:1px solid #888}ol.linenums{margin-top:0;margin-bottom:0}li.L0,li.L1,li.L2,li.L3,li.L5,li.L6,li.L7,li.L8{list-style-type:none}li.L1,li.L3,li.L5,li.L7,li.L9{background:#eee} diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/customrecipe.js b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/customrecipe.js index 9ea960288..8b1c190df 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/customrecipe.js +++ b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/customrecipe.js @@ -312,5 +312,11 @@ function customRecipePageInit(ctx) { }); }); + /* Stop the download link from working if it is in disabled state + * http://getbootstrap.com/css/#forms-disabled-fieldsets + */ + $("a[disabled=disabled]").click(function(e){ + e.preventDefault(); + }); } diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/highlight.pack.js b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/highlight.pack.js new file mode 100644 index 000000000..8cc886f1b --- /dev/null +++ b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/highlight.pack.js @@ -0,0 +1,2 @@ +/*! highlight.js v9.12.0 | BSD3 License | git.io/hljslicense */ +!function(e){var n="object"==typeof window&&window||"object"==typeof self&&self;"undefined"!=typeof exports?e(exports):n&&(n.hljs=e({}),"function"==typeof define&&define.amd&&define([],function(){return n.hljs}))}(function(e){function n(e){return e.replace(/&/g,"&").replace(//g,">")}function t(e){return e.nodeName.toLowerCase()}function r(e,n){var t=e&&e.exec(n);return t&&0===t.index}function a(e){return k.test(e)}function i(e){var n,t,r,i,o=e.className+" ";if(o+=e.parentNode?e.parentNode.className:"",t=B.exec(o))return w(t[1])?t[1]:"no-highlight";for(o=o.split(/\s+/),n=0,r=o.length;r>n;n++)if(i=o[n],a(i)||w(i))return i}function o(e){var n,t={},r=Array.prototype.slice.call(arguments,1);for(n in e)t[n]=e[n];return r.forEach(function(e){for(n in e)t[n]=e[n]}),t}function u(e){var n=[];return function r(e,a){for(var i=e.firstChild;i;i=i.nextSibling)3===i.nodeType?a+=i.nodeValue.length:1===i.nodeType&&(n.push({event:"start",offset:a,node:i}),a=r(i,a),t(i).match(/br|hr|img|input/)||n.push({event:"stop",offset:a,node:i}));return a}(e,0),n}function c(e,r,a){function i(){return e.length&&r.length?e[0].offset!==r[0].offset?e[0].offset"}function u(e){s+=""}function c(e){("start"===e.event?o:u)(e.node)}for(var l=0,s="",f=[];e.length||r.length;){var g=i();if(s+=n(a.substring(l,g[0].offset)),l=g[0].offset,g===e){f.reverse().forEach(u);do c(g.splice(0,1)[0]),g=i();while(g===e&&g.length&&g[0].offset===l);f.reverse().forEach(o)}else"start"===g[0].event?f.push(g[0].node):f.pop(),c(g.splice(0,1)[0])}return s+n(a.substr(l))}function l(e){return e.v&&!e.cached_variants&&(e.cached_variants=e.v.map(function(n){return o(e,{v:null},n)})),e.cached_variants||e.eW&&[o(e)]||[e]}function s(e){function n(e){return e&&e.source||e}function t(t,r){return new RegExp(n(t),"m"+(e.cI?"i":"")+(r?"g":""))}function r(a,i){if(!a.compiled){if(a.compiled=!0,a.k=a.k||a.bK,a.k){var o={},u=function(n,t){e.cI&&(t=t.toLowerCase()),t.split(" ").forEach(function(e){var t=e.split("|");o[t[0]]=[n,t[1]?Number(t[1]):1]})};"string"==typeof a.k?u("keyword",a.k):x(a.k).forEach(function(e){u(e,a.k[e])}),a.k=o}a.lR=t(a.l||/\w+/,!0),i&&(a.bK&&(a.b="\\b("+a.bK.split(" ").join("|")+")\\b"),a.b||(a.b=/\B|\b/),a.bR=t(a.b),a.e||a.eW||(a.e=/\B|\b/),a.e&&(a.eR=t(a.e)),a.tE=n(a.e)||"",a.eW&&i.tE&&(a.tE+=(a.e?"|":"")+i.tE)),a.i&&(a.iR=t(a.i)),null==a.r&&(a.r=1),a.c||(a.c=[]),a.c=Array.prototype.concat.apply([],a.c.map(function(e){return l("self"===e?a:e)})),a.c.forEach(function(e){r(e,a)}),a.starts&&r(a.starts,i);var c=a.c.map(function(e){return e.bK?"\\.?("+e.b+")\\.?":e.b}).concat([a.tE,a.i]).map(n).filter(Boolean);a.t=c.length?t(c.join("|"),!0):{exec:function(){return null}}}}r(e)}function f(e,t,a,i){function o(e,n){var t,a;for(t=0,a=n.c.length;a>t;t++)if(r(n.c[t].bR,e))return n.c[t]}function u(e,n){if(r(e.eR,n)){for(;e.endsParent&&e.parent;)e=e.parent;return e}return e.eW?u(e.parent,n):void 0}function c(e,n){return!a&&r(n.iR,e)}function l(e,n){var t=N.cI?n[0].toLowerCase():n[0];return e.k.hasOwnProperty(t)&&e.k[t]}function p(e,n,t,r){var a=r?"":I.classPrefix,i='',i+n+o}function h(){var e,t,r,a;if(!E.k)return n(k);for(a="",t=0,E.lR.lastIndex=0,r=E.lR.exec(k);r;)a+=n(k.substring(t,r.index)),e=l(E,r),e?(B+=e[1],a+=p(e[0],n(r[0]))):a+=n(r[0]),t=E.lR.lastIndex,r=E.lR.exec(k);return a+n(k.substr(t))}function d(){var e="string"==typeof E.sL;if(e&&!y[E.sL])return n(k);var t=e?f(E.sL,k,!0,x[E.sL]):g(k,E.sL.length?E.sL:void 0);return E.r>0&&(B+=t.r),e&&(x[E.sL]=t.top),p(t.language,t.value,!1,!0)}function b(){L+=null!=E.sL?d():h(),k=""}function v(e){L+=e.cN?p(e.cN,"",!0):"",E=Object.create(e,{parent:{value:E}})}function m(e,n){if(k+=e,null==n)return b(),0;var t=o(n,E);if(t)return t.skip?k+=n:(t.eB&&(k+=n),b(),t.rB||t.eB||(k=n)),v(t,n),t.rB?0:n.length;var r=u(E,n);if(r){var a=E;a.skip?k+=n:(a.rE||a.eE||(k+=n),b(),a.eE&&(k=n));do E.cN&&(L+=C),E.skip||(B+=E.r),E=E.parent;while(E!==r.parent);return r.starts&&v(r.starts,""),a.rE?0:n.length}if(c(n,E))throw new Error('Illegal lexeme "'+n+'" for mode "'+(E.cN||"")+'"');return k+=n,n.length||1}var N=w(e);if(!N)throw new Error('Unknown language: "'+e+'"');s(N);var R,E=i||N,x={},L="";for(R=E;R!==N;R=R.parent)R.cN&&(L=p(R.cN,"",!0)+L);var k="",B=0;try{for(var M,j,O=0;;){if(E.t.lastIndex=O,M=E.t.exec(t),!M)break;j=m(t.substring(O,M.index),M[0]),O=M.index+j}for(m(t.substr(O)),R=E;R.parent;R=R.parent)R.cN&&(L+=C);return{r:B,value:L,language:e,top:E}}catch(T){if(T.message&&-1!==T.message.indexOf("Illegal"))return{r:0,value:n(t)};throw T}}function g(e,t){t=t||I.languages||x(y);var r={r:0,value:n(e)},a=r;return t.filter(w).forEach(function(n){var t=f(n,e,!1);t.language=n,t.r>a.r&&(a=t),t.r>r.r&&(a=r,r=t)}),a.language&&(r.second_best=a),r}function p(e){return I.tabReplace||I.useBR?e.replace(M,function(e,n){return I.useBR&&"\n"===e?"
":I.tabReplace?n.replace(/\t/g,I.tabReplace):""}):e}function h(e,n,t){var r=n?L[n]:t,a=[e.trim()];return e.match(/\bhljs\b/)||a.push("hljs"),-1===e.indexOf(r)&&a.push(r),a.join(" ").trim()}function d(e){var n,t,r,o,l,s=i(e);a(s)||(I.useBR?(n=document.createElementNS("http://www.w3.org/1999/xhtml","div"),n.innerHTML=e.innerHTML.replace(/\n/g,"").replace(//g,"\n")):n=e,l=n.textContent,r=s?f(s,l,!0):g(l),t=u(n),t.length&&(o=document.createElementNS("http://www.w3.org/1999/xhtml","div"),o.innerHTML=r.value,r.value=c(t,u(o),l)),r.value=p(r.value),e.innerHTML=r.value,e.className=h(e.className,s,r.language),e.result={language:r.language,re:r.r},r.second_best&&(e.second_best={language:r.second_best.language,re:r.second_best.r}))}function b(e){I=o(I,e)}function v(){if(!v.called){v.called=!0;var e=document.querySelectorAll("pre code");E.forEach.call(e,d)}}function m(){addEventListener("DOMContentLoaded",v,!1),addEventListener("load",v,!1)}function N(n,t){var r=y[n]=t(e);r.aliases&&r.aliases.forEach(function(e){L[e]=n})}function R(){return x(y)}function w(e){return e=(e||"").toLowerCase(),y[e]||y[L[e]]}var E=[],x=Object.keys,y={},L={},k=/^(no-?highlight|plain|text)$/i,B=/\blang(?:uage)?-([\w-]+)\b/i,M=/((^(<[^>]+>|\t|)+|(?:\n)))/gm,C="
",I={classPrefix:"hljs-",tabReplace:null,useBR:!1,languages:void 0};return e.highlight=f,e.highlightAuto=g,e.fixMarkup=p,e.highlightBlock=d,e.configure=b,e.initHighlighting=v,e.initHighlightingOnLoad=m,e.registerLanguage=N,e.listLanguages=R,e.getLanguage=w,e.inherit=o,e.IR="[a-zA-Z]\\w*",e.UIR="[a-zA-Z_]\\w*",e.NR="\\b\\d+(\\.\\d+)?",e.CNR="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",e.BNR="\\b(0b[01]+)",e.RSR="!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~",e.BE={b:"\\\\[\\s\\S]",r:0},e.ASM={cN:"string",b:"'",e:"'",i:"\\n",c:[e.BE]},e.QSM={cN:"string",b:'"',e:'"',i:"\\n",c:[e.BE]},e.PWM={b:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/},e.C=function(n,t,r){var a=e.inherit({cN:"comment",b:n,e:t,c:[]},r||{});return a.c.push(e.PWM),a.c.push({cN:"doctag",b:"(?:TODO|FIXME|NOTE|BUG|XXX):",r:0}),a},e.CLCM=e.C("//","$"),e.CBCM=e.C("/\\*","\\*/"),e.HCM=e.C("#","$"),e.NM={cN:"number",b:e.NR,r:0},e.CNM={cN:"number",b:e.CNR,r:0},e.BNM={cN:"number",b:e.BNR,r:0},e.CSSNM={cN:"number",b:e.NR+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",r:0},e.RM={cN:"regexp",b:/\//,e:/\/[gimuy]*/,i:/\n/,c:[e.BE,{b:/\[/,e:/\]/,r:0,c:[e.BE]}]},e.TM={cN:"title",b:e.IR,r:0},e.UTM={cN:"title",b:e.UIR,r:0},e.METHOD_GUARD={b:"\\.\\s*"+e.UIR,r:0},e});hljs.registerLanguage("sql",function(e){var t=e.C("--","$");return{cI:!0,i:/[<>{}*#]/,c:[{bK:"begin end start commit rollback savepoint lock alter create drop rename call delete do handler insert load replace select truncate update set show pragma grant merge describe use explain help declare prepare execute deallocate release unlock purge reset change stop analyze cache flush optimize repair kill install uninstall checksum restore check backup revoke comment",e:/;/,eW:!0,l:/[\w\.]+/,k:{keyword:"abort abs absolute acc acce accep accept access accessed accessible account acos action activate add addtime admin administer advanced advise aes_decrypt aes_encrypt after agent aggregate ali alia alias allocate allow alter always analyze ancillary and any anydata anydataset anyschema anytype apply archive archived archivelog are as asc ascii asin assembly assertion associate asynchronous at atan atn2 attr attri attrib attribu attribut attribute attributes audit authenticated authentication authid authors auto autoallocate autodblink autoextend automatic availability avg backup badfile basicfile before begin beginning benchmark between bfile bfile_base big bigfile bin binary_double binary_float binlog bit_and bit_count bit_length bit_or bit_xor bitmap blob_base block blocksize body both bound buffer_cache buffer_pool build bulk by byte byteordermark bytes cache caching call calling cancel capacity cascade cascaded case cast catalog category ceil ceiling chain change changed char_base char_length character_length characters characterset charindex charset charsetform charsetid check checksum checksum_agg child choose chr chunk class cleanup clear client clob clob_base clone close cluster_id cluster_probability cluster_set clustering coalesce coercibility col collate collation collect colu colum column column_value columns columns_updated comment commit compact compatibility compiled complete composite_limit compound compress compute concat concat_ws concurrent confirm conn connec connect connect_by_iscycle connect_by_isleaf connect_by_root connect_time connection consider consistent constant constraint constraints constructor container content contents context contributors controlfile conv convert convert_tz corr corr_k corr_s corresponding corruption cos cost count count_big counted covar_pop covar_samp cpu_per_call cpu_per_session crc32 create creation critical cross cube cume_dist curdate current current_date current_time current_timestamp current_user cursor curtime customdatum cycle data database databases datafile datafiles datalength date_add date_cache date_format date_sub dateadd datediff datefromparts datename datepart datetime2fromparts day day_to_second dayname dayofmonth dayofweek dayofyear days db_role_change dbtimezone ddl deallocate declare decode decompose decrement decrypt deduplicate def defa defau defaul default defaults deferred defi defin define degrees delayed delegate delete delete_all delimited demand dense_rank depth dequeue des_decrypt des_encrypt des_key_file desc descr descri describ describe descriptor deterministic diagnostics difference dimension direct_load directory disable disable_all disallow disassociate discardfile disconnect diskgroup distinct distinctrow distribute distributed div do document domain dotnet double downgrade drop dumpfile duplicate duration each edition editionable editions element ellipsis else elsif elt empty enable enable_all enclosed encode encoding encrypt end end-exec endian enforced engine engines enqueue enterprise entityescaping eomonth error errors escaped evalname evaluate event eventdata events except exception exceptions exchange exclude excluding execu execut execute exempt exists exit exp expire explain export export_set extended extent external external_1 external_2 externally extract failed failed_login_attempts failover failure far fast feature_set feature_value fetch field fields file file_name_convert filesystem_like_logging final finish first first_value fixed flash_cache flashback floor flush following follows for forall force form forma format found found_rows freelist freelists freepools fresh from from_base64 from_days ftp full function general generated get get_format get_lock getdate getutcdate global global_name globally go goto grant grants greatest group group_concat group_id grouping grouping_id groups gtid_subtract guarantee guard handler hash hashkeys having hea head headi headin heading heap help hex hierarchy high high_priority hosts hour http id ident_current ident_incr ident_seed identified identity idle_time if ifnull ignore iif ilike ilm immediate import in include including increment index indexes indexing indextype indicator indices inet6_aton inet6_ntoa inet_aton inet_ntoa infile initial initialized initially initrans inmemory inner innodb input insert install instance instantiable instr interface interleaved intersect into invalidate invisible is is_free_lock is_ipv4 is_ipv4_compat is_not is_not_null is_used_lock isdate isnull isolation iterate java join json json_exists keep keep_duplicates key keys kill language large last last_day last_insert_id last_value lax lcase lead leading least leaves left len lenght length less level levels library like like2 like4 likec limit lines link list listagg little ln load load_file lob lobs local localtime localtimestamp locate locator lock locked log log10 log2 logfile logfiles logging logical logical_reads_per_call logoff logon logs long loop low low_priority lower lpad lrtrim ltrim main make_set makedate maketime managed management manual map mapping mask master master_pos_wait match matched materialized max maxextents maximize maxinstances maxlen maxlogfiles maxloghistory maxlogmembers maxsize maxtrans md5 measures median medium member memcompress memory merge microsecond mid migration min minextents minimum mining minus minute minvalue missing mod mode model modification modify module monitoring month months mount move movement multiset mutex name name_const names nan national native natural nav nchar nclob nested never new newline next nextval no no_write_to_binlog noarchivelog noaudit nobadfile nocheck nocompress nocopy nocycle nodelay nodiscardfile noentityescaping noguarantee nokeep nologfile nomapping nomaxvalue nominimize nominvalue nomonitoring none noneditionable nonschema noorder nopr nopro noprom nopromp noprompt norely noresetlogs noreverse normal norowdependencies noschemacheck noswitch not nothing notice notrim novalidate now nowait nth_value nullif nulls num numb numbe nvarchar nvarchar2 object ocicoll ocidate ocidatetime ociduration ociinterval ociloblocator ocinumber ociref ocirefcursor ocirowid ocistring ocitype oct octet_length of off offline offset oid oidindex old on online only opaque open operations operator optimal optimize option optionally or oracle oracle_date oradata ord ordaudio orddicom orddoc order ordimage ordinality ordvideo organization orlany orlvary out outer outfile outline output over overflow overriding package pad parallel parallel_enable parameters parent parse partial partition partitions pascal passing password password_grace_time password_lock_time password_reuse_max password_reuse_time password_verify_function patch path patindex pctincrease pctthreshold pctused pctversion percent percent_rank percentile_cont percentile_disc performance period period_add period_diff permanent physical pi pipe pipelined pivot pluggable plugin policy position post_transaction pow power pragma prebuilt precedes preceding precision prediction prediction_cost prediction_details prediction_probability prediction_set prepare present preserve prior priority private private_sga privileges procedural procedure procedure_analyze processlist profiles project prompt protection public publishingservername purge quarter query quick quiesce quota quotename radians raise rand range rank raw read reads readsize rebuild record records recover recovery recursive recycle redo reduced ref reference referenced references referencing refresh regexp_like register regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy reject rekey relational relative relaylog release release_lock relies_on relocate rely rem remainder rename repair repeat replace replicate replication required reset resetlogs resize resource respect restore restricted result result_cache resumable resume retention return returning returns reuse reverse revoke right rlike role roles rollback rolling rollup round row row_count rowdependencies rowid rownum rows rtrim rules safe salt sample save savepoint sb1 sb2 sb4 scan schema schemacheck scn scope scroll sdo_georaster sdo_topo_geometry search sec_to_time second section securefile security seed segment select self sequence sequential serializable server servererror session session_user sessions_per_user set sets settings sha sha1 sha2 share shared shared_pool short show shrink shutdown si_averagecolor si_colorhistogram si_featurelist si_positionalcolor si_stillimage si_texture siblings sid sign sin size size_t sizes skip slave sleep smalldatetimefromparts smallfile snapshot some soname sort soundex source space sparse spfile split sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_small_result sql_variant_property sqlcode sqldata sqlerror sqlname sqlstate sqrt square standalone standby start starting startup statement static statistics stats_binomial_test stats_crosstab stats_ks_test stats_mode stats_mw_test stats_one_way_anova stats_t_test_ stats_t_test_indep stats_t_test_one stats_t_test_paired stats_wsr_test status std stddev stddev_pop stddev_samp stdev stop storage store stored str str_to_date straight_join strcmp strict string struct stuff style subdate subpartition subpartitions substitutable substr substring subtime subtring_index subtype success sum suspend switch switchoffset switchover sync synchronous synonym sys sys_xmlagg sysasm sysaux sysdate sysdatetimeoffset sysdba sysoper system system_user sysutcdatetime table tables tablespace tan tdo template temporary terminated tertiary_weights test than then thread through tier ties time time_format time_zone timediff timefromparts timeout timestamp timestampadd timestampdiff timezone_abbr timezone_minute timezone_region to to_base64 to_date to_days to_seconds todatetimeoffset trace tracking transaction transactional translate translation treat trigger trigger_nestlevel triggers trim truncate try_cast try_convert try_parse type ub1 ub2 ub4 ucase unarchived unbounded uncompress under undo unhex unicode uniform uninstall union unique unix_timestamp unknown unlimited unlock unpivot unrecoverable unsafe unsigned until untrusted unusable unused update updated upgrade upped upper upsert url urowid usable usage use use_stored_outlines user user_data user_resources users using utc_date utc_timestamp uuid uuid_short validate validate_password_strength validation valist value values var var_samp varcharc vari varia variab variabl variable variables variance varp varraw varrawc varray verify version versions view virtual visible void wait wallet warning warnings week weekday weekofyear wellformed when whene whenev wheneve whenever where while whitespace with within without work wrapped xdb xml xmlagg xmlattributes xmlcast xmlcolattval xmlelement xmlexists xmlforest xmlindex xmlnamespaces xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltype xor year year_to_month years yearweek",literal:"true false null",built_in:"array bigint binary bit blob boolean char character date dec decimal float int int8 integer interval number numeric real record serial serial8 smallint text varchar varying void"},c:[{cN:"string",b:"'",e:"'",c:[e.BE,{b:"''"}]},{cN:"string",b:'"',e:'"',c:[e.BE,{b:'""'}]},{cN:"string",b:"`",e:"`",c:[e.BE]},e.CNM,e.CBCM,t]},e.CBCM,t]}});hljs.registerLanguage("perl",function(e){var t="getpwent getservent quotemeta msgrcv scalar kill dbmclose undef lc ma syswrite tr send umask sysopen shmwrite vec qx utime local oct semctl localtime readpipe do return format read sprintf dbmopen pop getpgrp not getpwnam rewinddir qqfileno qw endprotoent wait sethostent bless s|0 opendir continue each sleep endgrent shutdown dump chomp connect getsockname die socketpair close flock exists index shmgetsub for endpwent redo lstat msgctl setpgrp abs exit select print ref gethostbyaddr unshift fcntl syscall goto getnetbyaddr join gmtime symlink semget splice x|0 getpeername recv log setsockopt cos last reverse gethostbyname getgrnam study formline endhostent times chop length gethostent getnetent pack getprotoent getservbyname rand mkdir pos chmod y|0 substr endnetent printf next open msgsnd readdir use unlink getsockopt getpriority rindex wantarray hex system getservbyport endservent int chr untie rmdir prototype tell listen fork shmread ucfirst setprotoent else sysseek link getgrgid shmctl waitpid unpack getnetbyname reset chdir grep split require caller lcfirst until warn while values shift telldir getpwuid my getprotobynumber delete and sort uc defined srand accept package seekdir getprotobyname semop our rename seek if q|0 chroot sysread setpwent no crypt getc chown sqrt write setnetent setpriority foreach tie sin msgget map stat getlogin unless elsif truncate exec keys glob tied closedirioctl socket readlink eval xor readline binmode setservent eof ord bind alarm pipe atan2 getgrent exp time push setgrent gt lt or ne m|0 break given say state when",r={cN:"subst",b:"[$@]\\{",e:"\\}",k:t},s={b:"->{",e:"}"},n={v:[{b:/\$\d/},{b:/[\$%@](\^\w\b|#\w+(::\w+)*|{\w+}|\w+(::\w*)*)/},{b:/[\$%@][^\s\w{]/,r:0}]},i=[e.BE,r,n],o=[n,e.HCM,e.C("^\\=\\w","\\=cut",{eW:!0}),s,{cN:"string",c:i,v:[{b:"q[qwxr]?\\s*\\(",e:"\\)",r:5},{b:"q[qwxr]?\\s*\\[",e:"\\]",r:5},{b:"q[qwxr]?\\s*\\{",e:"\\}",r:5},{b:"q[qwxr]?\\s*\\|",e:"\\|",r:5},{b:"q[qwxr]?\\s*\\<",e:"\\>",r:5},{b:"qw\\s+q",e:"q",r:5},{b:"'",e:"'",c:[e.BE]},{b:'"',e:'"'},{b:"`",e:"`",c:[e.BE]},{b:"{\\w+}",c:[],r:0},{b:"-?\\w+\\s*\\=\\>",c:[],r:0}]},{cN:"number",b:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",r:0},{b:"(\\/\\/|"+e.RSR+"|\\b(split|return|print|reverse|grep)\\b)\\s*",k:"split return print reverse grep",r:0,c:[e.HCM,{cN:"regexp",b:"(s|tr|y)/(\\\\.|[^/])*/(\\\\.|[^/])*/[a-z]*",r:10},{cN:"regexp",b:"(m|qr)?/",e:"/[a-z]*",c:[e.BE],r:0}]},{cN:"function",bK:"sub",e:"(\\s*\\(.*?\\))?[;{]",eE:!0,r:5,c:[e.TM]},{b:"-\\w\\b",r:0},{b:"^__DATA__$",e:"^__END__$",sL:"mojolicious",c:[{b:"^@@.*",e:"$",cN:"comment"}]}];return r.c=o,s.c=o,{aliases:["pl","pm"],l:/[\w\.]+/,k:t,c:o}});hljs.registerLanguage("ini",function(e){var b={cN:"string",c:[e.BE],v:[{b:"'''",e:"'''",r:10},{b:'"""',e:'"""',r:10},{b:'"',e:'"'},{b:"'",e:"'"}]};return{aliases:["toml"],cI:!0,i:/\S/,c:[e.C(";","$"),e.HCM,{cN:"section",b:/^\s*\[+/,e:/\]+/},{b:/^[a-z0-9\[\]_-]+\s*=\s*/,e:"$",rB:!0,c:[{cN:"attr",b:/[a-z0-9\[\]_-]+/},{b:/=/,eW:!0,r:0,c:[{cN:"literal",b:/\bon|off|true|false|yes|no\b/},{cN:"variable",v:[{b:/\$[\w\d"][\w\d_]*/},{b:/\$\{(.*?)}/}]},b,{cN:"number",b:/([\+\-]+)?[\d]+_[\d_]+/},e.NM]}]}]}});hljs.registerLanguage("diff",function(e){return{aliases:["patch"],c:[{cN:"meta",r:10,v:[{b:/^@@ +\-\d+,\d+ +\+\d+,\d+ +@@$/},{b:/^\*\*\* +\d+,\d+ +\*\*\*\*$/},{b:/^\-\-\- +\d+,\d+ +\-\-\-\-$/}]},{cN:"comment",v:[{b:/Index: /,e:/$/},{b:/={3,}/,e:/$/},{b:/^\-{3}/,e:/$/},{b:/^\*{3} /,e:/$/},{b:/^\+{3}/,e:/$/},{b:/\*{5}/,e:/\*{5}$/}]},{cN:"addition",b:"^\\+",e:"$"},{cN:"deletion",b:"^\\-",e:"$"},{cN:"addition",b:"^\\!",e:"$"}]}});hljs.registerLanguage("bash",function(e){var t={cN:"variable",v:[{b:/\$[\w\d#@][\w\d_]*/},{b:/\$\{(.*?)}/}]},s={cN:"string",b:/"/,e:/"/,c:[e.BE,t,{cN:"variable",b:/\$\(/,e:/\)/,c:[e.BE]}]},a={cN:"string",b:/'/,e:/'/};return{aliases:["sh","zsh"],l:/\b-?[a-z\._]+\b/,k:{keyword:"if then else elif fi for while in do done case esac function",literal:"true false",built_in:"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp",_:"-ne -eq -lt -gt -f -d -e -s -l -a"},c:[{cN:"meta",b:/^#![^\n]+sh\s*$/,r:10},{cN:"function",b:/\w[\w\d_]*\s*\(\s*\)\s*\{/,rB:!0,c:[e.inherit(e.TM,{b:/\w[\w\d_]*/})],r:0},e.HCM,s,a,t]}});hljs.registerLanguage("php",function(e){var c={b:"\\$+[a-zA-Z_-ÿ][a-zA-Z0-9_-ÿ]*"},i={cN:"meta",b:/<\?(php)?|\?>/},t={cN:"string",c:[e.BE,i],v:[{b:'b"',e:'"'},{b:"b'",e:"'"},e.inherit(e.ASM,{i:null}),e.inherit(e.QSM,{i:null})]},a={v:[e.BNM,e.CNM]};return{aliases:["php3","php4","php5","php6"],cI:!0,k:"and include_once list abstract global private echo interface as static endswitch array null if endwhile or const for endforeach self var while isset public protected exit foreach throw elseif include __FILE__ empty require_once do xor return parent clone use __CLASS__ __LINE__ else break print eval new catch __METHOD__ case exception default die require __FUNCTION__ enddeclare final try switch continue endfor endif declare unset true false trait goto instanceof insteadof __DIR__ __NAMESPACE__ yield finally",c:[e.HCM,e.C("//","$",{c:[i]}),e.C("/\\*","\\*/",{c:[{cN:"doctag",b:"@[A-Za-z]+"}]}),e.C("__halt_compiler.+?;",!1,{eW:!0,k:"__halt_compiler",l:e.UIR}),{cN:"string",b:/<<<['"]?\w+['"]?$/,e:/^\w+;?$/,c:[e.BE,{cN:"subst",v:[{b:/\$\w+/},{b:/\{\$/,e:/\}/}]}]},i,{cN:"keyword",b:/\$this\b/},c,{b:/(::|->)+[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*/},{cN:"function",bK:"function",e:/[;{]/,eE:!0,i:"\\$|\\[|%",c:[e.UTM,{cN:"params",b:"\\(",e:"\\)",c:["self",c,e.CBCM,t,a]}]},{cN:"class",bK:"class interface",e:"{",eE:!0,i:/[:\(\$"]/,c:[{bK:"extends implements"},e.UTM]},{bK:"namespace",e:";",i:/[\.']/,c:[e.UTM]},{bK:"use",e:";",c:[e.UTM]},{b:"=>"},t,a]}});hljs.registerLanguage("python",function(e){var r={keyword:"and elif is global as in if from raise for except finally print import pass return exec else break not with class assert yield try while continue del or def lambda async await nonlocal|10 None True False",built_in:"Ellipsis NotImplemented"},b={cN:"meta",b:/^(>>>|\.\.\.) /},c={cN:"subst",b:/\{/,e:/\}/,k:r,i:/#/},a={cN:"string",c:[e.BE],v:[{b:/(u|b)?r?'''/,e:/'''/,c:[b],r:10},{b:/(u|b)?r?"""/,e:/"""/,c:[b],r:10},{b:/(fr|rf|f)'''/,e:/'''/,c:[b,c]},{b:/(fr|rf|f)"""/,e:/"""/,c:[b,c]},{b:/(u|r|ur)'/,e:/'/,r:10},{b:/(u|r|ur)"/,e:/"/,r:10},{b:/(b|br)'/,e:/'/},{b:/(b|br)"/,e:/"/},{b:/(fr|rf|f)'/,e:/'/,c:[c]},{b:/(fr|rf|f)"/,e:/"/,c:[c]},e.ASM,e.QSM]},s={cN:"number",r:0,v:[{b:e.BNR+"[lLjJ]?"},{b:"\\b(0o[0-7]+)[lLjJ]?"},{b:e.CNR+"[lLjJ]?"}]},i={cN:"params",b:/\(/,e:/\)/,c:["self",b,s,a]};return c.c=[a,s,b],{aliases:["py","gyp"],k:r,i:/(<\/|->|\?)|=>/,c:[b,s,a,e.HCM,{v:[{cN:"function",bK:"def"},{cN:"class",bK:"class"}],e:/:/,i:/[${=;\n,]/,c:[e.UTM,i,{b:/->/,eW:!0,k:"None"}]},{cN:"meta",b:/^[\t ]*@/,e:/$/},{b:/\b(print|exec)\(/}]}});hljs.registerLanguage("coffeescript",function(e){var c={keyword:"in if for while finally new do return else break catch instanceof throw try this switch continue typeof delete debugger super yield import export from as default await then unless until loop of by when and or is isnt not",literal:"true false null undefined yes no on off",built_in:"npm require console print module global window document"},n="[A-Za-z$_][0-9A-Za-z$_]*",r={cN:"subst",b:/#\{/,e:/}/,k:c},i=[e.BNM,e.inherit(e.CNM,{starts:{e:"(\\s*/)?",r:0}}),{cN:"string",v:[{b:/'''/,e:/'''/,c:[e.BE]},{b:/'/,e:/'/,c:[e.BE]},{b:/"""/,e:/"""/,c:[e.BE,r]},{b:/"/,e:/"/,c:[e.BE,r]}]},{cN:"regexp",v:[{b:"///",e:"///",c:[r,e.HCM]},{b:"//[gim]*",r:0},{b:/\/(?![ *])(\\\/|.)*?\/[gim]*(?=\W|$)/}]},{b:"@"+n},{sL:"javascript",eB:!0,eE:!0,v:[{b:"```",e:"```"},{b:"`",e:"`"}]}];r.c=i;var s=e.inherit(e.TM,{b:n}),t="(\\(.*\\))?\\s*\\B[-=]>",o={cN:"params",b:"\\([^\\(]",rB:!0,c:[{b:/\(/,e:/\)/,k:c,c:["self"].concat(i)}]};return{aliases:["coffee","cson","iced"],k:c,i:/\/\*/,c:i.concat([e.C("###","###"),e.HCM,{cN:"function",b:"^\\s*"+n+"\\s*=\\s*"+t,e:"[-=]>",rB:!0,c:[s,o]},{b:/[:\(,=]\s*/,r:0,c:[{cN:"function",b:t,e:"[-=]>",rB:!0,c:[o]}]},{cN:"class",bK:"class",e:"$",i:/[:="\[\]]/,c:[{bK:"extends",eW:!0,i:/[:="\[\]]/,c:[s]},s]},{b:n+":",e:":",rB:!0,rE:!0,r:0}])}});hljs.registerLanguage("cpp",function(t){var e={cN:"keyword",b:"\\b[a-z\\d_]*_t\\b"},r={cN:"string",v:[{b:'(u8?|U)?L?"',e:'"',i:"\\n",c:[t.BE]},{b:'(u8?|U)?R"',e:'"',c:[t.BE]},{b:"'\\\\?.",e:"'",i:"."}]},s={cN:"number",v:[{b:"\\b(0b[01']+)"},{b:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{b:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],r:0},i={cN:"meta",b:/#\s*[a-z]+\b/,e:/$/,k:{"meta-keyword":"if else elif endif define undef warning error line pragma ifdef ifndef include"},c:[{b:/\\\n/,r:0},t.inherit(r,{cN:"meta-string"}),{cN:"meta-string",b:/<[^\n>]*>/,e:/$/,i:"\\n"},t.CLCM,t.CBCM]},a=t.IR+"\\s*\\(",c={keyword:"int float while private char catch import module export virtual operator sizeof dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace unsigned long volatile static protected bool template mutable if public friend do goto auto void enum else break extern using asm case typeid short reinterpret_cast|10 default double register explicit signed typename try this switch continue inline delete alignof constexpr decltype noexcept static_assert thread_local restrict _Bool complex _Complex _Imaginary atomic_bool atomic_char atomic_schar atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong atomic_ullong new throw return and or not",built_in:"std string cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap array shared_ptr abort abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf endl initializer_list unique_ptr",literal:"true false nullptr NULL"},n=[e,t.CLCM,t.CBCM,s,r];return{aliases:["c","cc","h","c++","h++","hpp"],k:c,i:"",k:c,c:["self",e]},{b:t.IR+"::",k:c},{v:[{b:/=/,e:/;/},{b:/\(/,e:/\)/},{bK:"new throw return else",e:/;/}],k:c,c:n.concat([{b:/\(/,e:/\)/,k:c,c:n.concat(["self"]),r:0}]),r:0},{cN:"function",b:"("+t.IR+"[\\*&\\s]+)+"+a,rB:!0,e:/[{;=]/,eE:!0,k:c,i:/[^\w\s\*&]/,c:[{b:a,rB:!0,c:[t.TM],r:0},{cN:"params",b:/\(/,e:/\)/,k:c,r:0,c:[t.CLCM,t.CBCM,r,s,e]},t.CLCM,t.CBCM,i]},{cN:"class",bK:"class struct",e:/[{;:]/,c:[{b://,c:["self"]},t.TM]}]),exports:{preprocessor:i,strings:r,k:c}}});hljs.registerLanguage("cs",function(e){var i={keyword:"abstract as base bool break byte case catch char checked const continue decimal default delegate do double enum event explicit extern finally fixed float for foreach goto if implicit in int interface internal is lock long nameof object operator out override params private protected public readonly ref sbyte sealed short sizeof stackalloc static string struct switch this try typeof uint ulong unchecked unsafe ushort using virtual void volatile while add alias ascending async await by descending dynamic equals from get global group into join let on orderby partial remove select set value var where yield",literal:"null false true"},t={cN:"string",b:'@"',e:'"',c:[{b:'""'}]},r=e.inherit(t,{i:/\n/}),a={cN:"subst",b:"{",e:"}",k:i},c=e.inherit(a,{i:/\n/}),n={cN:"string",b:/\$"/,e:'"',i:/\n/,c:[{b:"{{"},{b:"}}"},e.BE,c]},s={cN:"string",b:/\$@"/,e:'"',c:[{b:"{{"},{b:"}}"},{b:'""'},a]},o=e.inherit(s,{i:/\n/,c:[{b:"{{"},{b:"}}"},{b:'""'},c]});a.c=[s,n,t,e.ASM,e.QSM,e.CNM,e.CBCM],c.c=[o,n,r,e.ASM,e.QSM,e.CNM,e.inherit(e.CBCM,{i:/\n/})];var l={v:[s,n,t,e.ASM,e.QSM]},b=e.IR+"(<"+e.IR+"(\\s*,\\s*"+e.IR+")*>)?(\\[\\])?";return{aliases:["csharp"],k:i,i:/::/,c:[e.C("///","$",{rB:!0,c:[{cN:"doctag",v:[{b:"///",r:0},{b:""},{b:""}]}]}),e.CLCM,e.CBCM,{cN:"meta",b:"#",e:"$",k:{"meta-keyword":"if else elif endif define undef warning error line region endregion pragma checksum"}},l,e.CNM,{bK:"class interface",e:/[{;=]/,i:/[^\s:]/,c:[e.TM,e.CLCM,e.CBCM]},{bK:"namespace",e:/[{;=]/,i:/[^\s:]/,c:[e.inherit(e.TM,{b:"[a-zA-Z](\\.?\\w)*"}),e.CLCM,e.CBCM]},{cN:"meta",b:"^\\s*\\[",eB:!0,e:"\\]",eE:!0,c:[{cN:"meta-string",b:/"/,e:/"/}]},{bK:"new return throw await else",r:0},{cN:"function",b:"("+b+"\\s+)+"+e.IR+"\\s*\\(",rB:!0,e:/[{;=]/,eE:!0,k:i,c:[{b:e.IR+"\\s*\\(",rB:!0,c:[e.TM],r:0},{cN:"params",b:/\(/,e:/\)/,eB:!0,eE:!0,k:i,r:0,c:[l,e.CNM,e.CBCM]},e.CLCM,e.CBCM]}]}});hljs.registerLanguage("shell",function(s){return{aliases:["console"],c:[{cN:"meta",b:"^\\s{0,3}[\\w\\d\\[\\]()@-]*[>%$#]",starts:{e:"$",sL:"bash"}}]}});hljs.registerLanguage("ruby",function(e){var b="[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?",r={keyword:"and then defined module in return redo if BEGIN retry end for self when next until do begin unless END rescue else break undef not super class case require yield alias while ensure elsif or include attr_reader attr_writer attr_accessor",literal:"true false nil"},c={cN:"doctag",b:"@[A-Za-z]+"},a={b:"#<",e:">"},s=[e.C("#","$",{c:[c]}),e.C("^\\=begin","^\\=end",{c:[c],r:10}),e.C("^__END__","\\n$")],n={cN:"subst",b:"#\\{",e:"}",k:r},t={cN:"string",c:[e.BE,n],v:[{b:/'/,e:/'/},{b:/"/,e:/"/},{b:/`/,e:/`/},{b:"%[qQwWx]?\\(",e:"\\)"},{b:"%[qQwWx]?\\[",e:"\\]"},{b:"%[qQwWx]?{",e:"}"},{b:"%[qQwWx]?<",e:">"},{b:"%[qQwWx]?/",e:"/"},{b:"%[qQwWx]?%",e:"%"},{b:"%[qQwWx]?-",e:"-"},{b:"%[qQwWx]?\\|",e:"\\|"},{b:/\B\?(\\\d{1,3}|\\x[A-Fa-f0-9]{1,2}|\\u[A-Fa-f0-9]{4}|\\?\S)\b/},{b:/<<(-?)\w+$/,e:/^\s*\w+$/}]},i={cN:"params",b:"\\(",e:"\\)",endsParent:!0,k:r},d=[t,a,{cN:"class",bK:"class module",e:"$|;",i:/=/,c:[e.inherit(e.TM,{b:"[A-Za-z_]\\w*(::\\w+)*(\\?|\\!)?"}),{b:"<\\s*",c:[{b:"("+e.IR+"::)?"+e.IR}]}].concat(s)},{cN:"function",bK:"def",e:"$|;",c:[e.inherit(e.TM,{b:b}),i].concat(s)},{b:e.IR+"::"},{cN:"symbol",b:e.UIR+"(\\!|\\?)?:",r:0},{cN:"symbol",b:":(?!\\s)",c:[t,{b:b}],r:0},{cN:"number",b:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",r:0},{b:"(\\$\\W)|((\\$|\\@\\@?)(\\w+))"},{cN:"params",b:/\|/,e:/\|/,k:r},{b:"("+e.RSR+"|unless)\\s*",k:"unless",c:[a,{cN:"regexp",c:[e.BE,n],i:/\n/,v:[{b:"/",e:"/[a-z]*"},{b:"%r{",e:"}[a-z]*"},{b:"%r\\(",e:"\\)[a-z]*"},{b:"%r!",e:"![a-z]*"},{b:"%r\\[",e:"\\][a-z]*"}]}].concat(s),r:0}].concat(s);n.c=d,i.c=d;var l="[>?]>",o="[\\w#]+\\(\\w+\\):\\d+:\\d+>",u="(\\w+-)?\\d+\\.\\d+\\.\\d(p\\d+)?[^>]+>",w=[{b:/^\s*=>/,starts:{e:"$",c:d}},{cN:"meta",b:"^("+l+"|"+o+"|"+u+")",starts:{e:"$",c:d}}];return{aliases:["rb","gemspec","podspec","thor","irb"],k:r,i:/\/\*/,c:s.concat(w).concat(d)}});hljs.registerLanguage("nginx",function(e){var r={cN:"variable",v:[{b:/\$\d+/},{b:/\$\{/,e:/}/},{b:"[\\$\\@]"+e.UIR}]},b={eW:!0,l:"[a-z/_]+",k:{literal:"on off yes no true false none blocked debug info notice warn error crit select break last permanent redirect kqueue rtsig epoll poll /dev/poll"},r:0,i:"=>",c:[e.HCM,{cN:"string",c:[e.BE,r],v:[{b:/"/,e:/"/},{b:/'/,e:/'/}]},{b:"([a-z]+):/",e:"\\s",eW:!0,eE:!0,c:[r]},{cN:"regexp",c:[e.BE,r],v:[{b:"\\s\\^",e:"\\s|{|;",rE:!0},{b:"~\\*?\\s+",e:"\\s|{|;",rE:!0},{b:"\\*(\\.[a-z\\-]+)+"},{b:"([a-z\\-]+\\.)+\\*"}]},{cN:"number",b:"\\b\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d{1,5})?\\b"},{cN:"number",b:"\\b\\d+[kKmMgGdshdwy]*\\b",r:0},r]};return{aliases:["nginxconf"],c:[e.HCM,{b:e.UIR+"\\s+{",rB:!0,e:"{",c:[{cN:"section",b:e.UIR}],r:0},{b:e.UIR+"\\s",e:";|{",rB:!0,c:[{cN:"attribute",b:e.UIR,starts:b}],r:0}],i:"[^\\s\\}]"}});hljs.registerLanguage("css",function(e){var c="[a-zA-Z-][a-zA-Z0-9_-]*",t={b:/[A-Z\_\.\-]+\s*:/,rB:!0,e:";",eW:!0,c:[{cN:"attribute",b:/\S/,e:":",eE:!0,starts:{eW:!0,eE:!0,c:[{b:/[\w-]+\(/,rB:!0,c:[{cN:"built_in",b:/[\w-]+/},{b:/\(/,e:/\)/,c:[e.ASM,e.QSM]}]},e.CSSNM,e.QSM,e.ASM,e.CBCM,{cN:"number",b:"#[0-9A-Fa-f]+"},{cN:"meta",b:"!important"}]}}]};return{cI:!0,i:/[=\/|'\$]/,c:[e.CBCM,{cN:"selector-id",b:/#[A-Za-z0-9_-]+/},{cN:"selector-class",b:/\.[A-Za-z0-9_-]+/},{cN:"selector-attr",b:/\[/,e:/\]/,i:"$"},{cN:"selector-pseudo",b:/:(:)?[a-zA-Z0-9\_\-\+\(\)"'.]+/},{b:"@(font-face|page)",l:"[a-z-]+",k:"font-face page"},{b:"@",e:"[{;]",i:/:/,c:[{cN:"keyword",b:/\w+/},{b:/\s/,eW:!0,eE:!0,r:0,c:[e.ASM,e.QSM,e.CSSNM]}]},{cN:"selector-tag",b:c,r:0},{b:"{",e:"}",i:/\S/,c:[e.CBCM,t]}]}});hljs.registerLanguage("makefile",function(e){var i={cN:"variable",v:[{b:"\\$\\("+e.UIR+"\\)",c:[e.BE]},{b:/\$[@%)?",r="false synchronized int abstract float private char boolean static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private module requires exports do",s="\\b(0[bB]([01]+[01_]+[01]+|[01]+)|0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)|(([\\d]+[\\d_]+[\\d]+|[\\d]+)(\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))?|\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))([eE][-+]?\\d+)?)[lLfF]?",c={cN:"number",b:s,r:0};return{aliases:["jsp"],k:r,i:/<\/|#/,c:[e.C("/\\*\\*","\\*/",{r:0,c:[{b:/\w+@/,r:0},{cN:"doctag",b:"@[A-Za-z]+"}]}),e.CLCM,e.CBCM,e.ASM,e.QSM,{cN:"class",bK:"class interface",e:/[{;=]/,eE:!0,k:"class interface",i:/[:"\[\]]/,c:[{bK:"extends implements"},e.UTM]},{bK:"new throw return else",r:0},{cN:"function",b:"("+t+"\\s+)+"+e.UIR+"\\s*\\(",rB:!0,e:/[{;=]/,eE:!0,k:r,c:[{b:e.UIR+"\\s*\\(",rB:!0,r:0,c:[e.UTM]},{cN:"params",b:/\(/,e:/\)/,k:r,r:0,c:[e.ASM,e.QSM,e.CNM,e.CBCM]},e.CLCM,e.CBCM]},c,{cN:"meta",b:"@[A-Za-z]+"}]}});hljs.registerLanguage("http",function(e){var t="HTTP/[0-9\\.]+";return{aliases:["https"],i:"\\S",c:[{b:"^"+t,e:"$",c:[{cN:"number",b:"\\b\\d{3}\\b"}]},{b:"^[A-Z]+ (.*?) "+t+"$",rB:!0,e:"$",c:[{cN:"string",b:" ",e:" ",eB:!0,eE:!0},{b:t},{cN:"keyword",b:"[A-Z]+"}]},{cN:"attribute",b:"^\\w",e:": ",eE:!0,i:"\\n|\\s|=",starts:{e:"$",r:0}},{b:"\\n\\n",starts:{sL:[],eW:!0}}]}});hljs.registerLanguage("objectivec",function(e){var t={cN:"built_in",b:"\\b(AV|CA|CF|CG|CI|CL|CM|CN|CT|MK|MP|MTK|MTL|NS|SCN|SK|UI|WK|XC)\\w+"},_={keyword:"int float while char export sizeof typedef const struct for union unsigned long volatile static bool mutable if do return goto void enum else break extern asm case short default double register explicit signed typename this switch continue wchar_t inline readonly assign readwrite self @synchronized id typeof nonatomic super unichar IBOutlet IBAction strong weak copy in out inout bycopy byref oneway __strong __weak __block __autoreleasing @private @protected @public @try @property @end @throw @catch @finally @autoreleasepool @synthesize @dynamic @selector @optional @required @encode @package @import @defs @compatibility_alias __bridge __bridge_transfer __bridge_retained __bridge_retain __covariant __contravariant __kindof _Nonnull _Nullable _Null_unspecified __FUNCTION__ __PRETTY_FUNCTION__ __attribute__ getter setter retain unsafe_unretained nonnull nullable null_unspecified null_resettable class instancetype NS_DESIGNATED_INITIALIZER NS_UNAVAILABLE NS_REQUIRES_SUPER NS_RETURNS_INNER_POINTER NS_INLINE NS_AVAILABLE NS_DEPRECATED NS_ENUM NS_OPTIONS NS_SWIFT_UNAVAILABLE NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_END NS_REFINED_FOR_SWIFT NS_SWIFT_NAME NS_SWIFT_NOTHROW NS_DURING NS_HANDLER NS_ENDHANDLER NS_VALUERETURN NS_VOIDRETURN",literal:"false true FALSE TRUE nil YES NO NULL",built_in:"BOOL dispatch_once_t dispatch_queue_t dispatch_sync dispatch_async dispatch_once"},i=/[a-zA-Z@][a-zA-Z0-9_]*/,n="@interface @class @protocol @implementation";return{aliases:["mm","objc","obj-c"],k:_,l:i,i:""}]}]},{cN:"class",b:"("+n.split(" ").join("|")+")\\b",e:"({|$)",eE:!0,k:n,l:i,c:[e.UTM]},{b:"\\."+e.UIR,r:0}]}});hljs.registerLanguage("javascript",function(e){var r="[A-Za-z$_][0-9A-Za-z$_]*",t={keyword:"in of if for while finally var new function do return void else break catch instanceof with throw case default try this switch continue typeof delete let yield const export super debugger as async await static import from as",literal:"true false null undefined NaN Infinity",built_in:"eval isFinite isNaN parseFloat parseInt decodeURI decodeURIComponent encodeURI encodeURIComponent escape unescape Object Function Boolean Error EvalError InternalError RangeError ReferenceError StopIteration SyntaxError TypeError URIError Number Math Date String RegExp Array Float32Array Float64Array Int16Array Int32Array Int8Array Uint16Array Uint32Array Uint8Array Uint8ClampedArray ArrayBuffer DataView JSON Intl arguments require module console window document Symbol Set Map WeakSet WeakMap Proxy Reflect Promise"},a={cN:"number",v:[{b:"\\b(0[bB][01]+)"},{b:"\\b(0[oO][0-7]+)"},{b:e.CNR}],r:0},n={cN:"subst",b:"\\$\\{",e:"\\}",k:t,c:[]},c={cN:"string",b:"`",e:"`",c:[e.BE,n]};n.c=[e.ASM,e.QSM,c,a,e.RM];var s=n.c.concat([e.CBCM,e.CLCM]);return{aliases:["js","jsx"],k:t,c:[{cN:"meta",r:10,b:/^\s*['"]use (strict|asm)['"]/},{cN:"meta",b:/^#!/,e:/$/},e.ASM,e.QSM,c,e.CLCM,e.CBCM,a,{b:/[{,]\s*/,r:0,c:[{b:r+"\\s*:",rB:!0,r:0,c:[{cN:"attr",b:r,r:0}]}]},{b:"("+e.RSR+"|\\b(case|return|throw)\\b)\\s*",k:"return throw case",c:[e.CLCM,e.CBCM,e.RM,{cN:"function",b:"(\\(.*?\\)|"+r+")\\s*=>",rB:!0,e:"\\s*=>",c:[{cN:"params",v:[{b:r},{b:/\(\s*\)/},{b:/\(/,e:/\)/,eB:!0,eE:!0,k:t,c:s}]}]},{b://,sL:"xml",c:[{b:/<\w+\s*\/>/,skip:!0},{b:/<\w+/,e:/(\/\w+|\w+\/)>/,skip:!0,c:[{b:/<\w+\s*\/>/,skip:!0},"self"]}]}],r:0},{cN:"function",bK:"function",e:/\{/,eE:!0,c:[e.inherit(e.TM,{b:r}),{cN:"params",b:/\(/,e:/\)/,eB:!0,eE:!0,c:s}],i:/\[|%/},{b:/\$[(.]/},e.METHOD_GUARD,{cN:"class",bK:"class",e:/[{;=]/,eE:!0,i:/[:"\[\]]/,c:[{bK:"extends"},e.UTM]},{bK:"constructor",e:/\{/,eE:!0}],i:/#(?!!)/}});hljs.registerLanguage("apache",function(e){var r={cN:"number",b:"[\\$%]\\d+"};return{aliases:["apacheconf"],cI:!0,c:[e.HCM,{cN:"section",b:""},{cN:"attribute",b:/\w+/,r:0,k:{nomarkup:"order deny allow setenv rewriterule rewriteengine rewritecond documentroot sethandler errordocument loadmodule options header listen serverroot servername"},starts:{e:/$/,r:0,k:{literal:"on off all"},c:[{cN:"meta",b:"\\s\\[",e:"\\]$"},{cN:"variable",b:"[\\$%]\\{",e:"\\}",c:["self",r]},r,e.QSM]}}],i:/\S/}});hljs.registerLanguage("xml",function(s){var e="[A-Za-z0-9\\._:-]+",t={eW:!0,i:/`]+/}]}]}]};return{aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist"],cI:!0,c:[{cN:"meta",b:"",r:10,c:[{b:"\\[",e:"\\]"}]},s.C("",{r:10}),{b:"<\\!\\[CDATA\\[",e:"\\]\\]>",r:10},{b:/<\?(php)?/,e:/\?>/,sL:"php",c:[{b:"/\\*",e:"\\*/",skip:!0}]},{cN:"tag",b:"|$)",e:">",k:{name:"style"},c:[t],starts:{e:"",rE:!0,sL:["css","xml"]}},{cN:"tag",b:"|$)",e:">",k:{name:"script"},c:[t],starts:{e:"",rE:!0,sL:["actionscript","javascript","handlebars","xml"]}},{cN:"meta",v:[{b:/<\?xml/,e:/\?>/,r:10},{b:/<\?\w+/,e:/\?>/}]},{cN:"tag",b:"",c:[{cN:"name",b:/[^\/><\s]+/,r:0},t]}]}});hljs.registerLanguage("markdown",function(e){return{aliases:["md","mkdown","mkd"],c:[{cN:"section",v:[{b:"^#{1,6}",e:"$"},{b:"^.+?\\n[=-]{2,}$"}]},{b:"<",e:">",sL:"xml",r:0},{cN:"bullet",b:"^([*+-]|(\\d+\\.))\\s+"},{cN:"strong",b:"[*_]{2}.+?[*_]{2}"},{cN:"emphasis",v:[{b:"\\*.+?\\*"},{b:"_.+?_",r:0}]},{cN:"quote",b:"^>\\s+",e:"$"},{cN:"code",v:[{b:"^```w*s*$",e:"^```s*$"},{b:"`.+?`"},{b:"^( {4}| )",e:"$",r:0}]},{b:"^[-\\*]{3,}",e:"$"},{b:"\\[.+?\\][\\(\\[].*?[\\)\\]]",rB:!0,c:[{cN:"string",b:"\\[",e:"\\]",eB:!0,rE:!0,r:0},{cN:"link",b:"\\]\\(",e:"\\)",eB:!0,eE:!0},{cN:"symbol",b:"\\]\\[",e:"\\]",eB:!0,eE:!0}],r:10},{b:/^\[[^\n]+\]:/,rB:!0,c:[{cN:"symbol",b:/\[/,e:/\]/,eB:!0,eE:!0},{cN:"link",b:/:\s*/,e:/$/,eB:!0}]}]}});hljs.registerLanguage("json",function(e){var i={literal:"true false null"},n=[e.QSM,e.CNM],r={e:",",eW:!0,eE:!0,c:n,k:i},t={b:"{",e:"}",c:[{cN:"attr",b:/"/,e:/"/,c:[e.BE],i:"\\n"},e.inherit(r,{b:/:/})],i:"\\S"},c={b:"\\[",e:"\\]",c:[e.inherit(r)],i:"\\S"};return n.splice(n.length,0,t,c),{c:n,k:i,i:"\\S"}}); \ No newline at end of file diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/importlayer.js b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/importlayer.js index 30dc28280..296483985 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/importlayer.js +++ b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/importlayer.js @@ -45,7 +45,7 @@ function importLayerPageInit (ctx) { function(layer) { if (layer.results.length > 0) { currentLayerDepSelection = layer.results[0]; - layerDepBtn.click(); + layerDepBtn.click(); } }); @@ -158,6 +158,7 @@ function importLayerPageInit (ctx) { project_id: libtoaster.ctx.projectId, layer_deps: layerDepsCsv, local_source_dir: $('#local-dir-path').val(), + add_to_project: true, }; if ($('input[name=repo]:checked').val() == "git") { @@ -168,13 +169,15 @@ function importLayerPageInit (ctx) { } $.ajax({ - type: "POST", - url: ctx.xhrImportLayerUrl, - data: layerData, + type: "PUT", + url: ctx.xhrLayerUrl, + data: JSON.stringify(layerData), headers: { 'X-CSRFToken' : $.cookie('csrftoken')}, success: function (data) { if (data.error != "ok") { console.log(data.error); + /* let the user know why nothing happened */ + alert(data.error) } else { createImportedNotification(data); window.location.replace(libtoaster.ctx.projectPageUrl); @@ -243,9 +246,18 @@ function importLayerPageInit (ctx) { enable_import_btn(true); } - if ($("#git-repo-radio").prop("checked") && - vcsURLInput.val().length > 0 && gitRefInput.val().length > 0) { - enable_import_btn(true); + if ($("#git-repo-radio").prop("checked")) { + if (gitRefInput.val().length > 0 && + gitRefInput.val() == 'HEAD') { + $('#invalid-layer-revision-hint').show(); + $('#layer-revision-ctrl').addClass('has-error'); + enable_import_btn(false); + } else if (vcsURLInput.val().length > 0 && + gitRefInput.val().length > 0) { + $('#invalid-layer-revision-hint').hide(); + $('#layer-revision-ctrl').removeClass('has-error'); + enable_import_btn(true); + } } } @@ -332,19 +344,36 @@ function importLayerPageInit (ctx) { check_form(); }); - /* Have a guess at the layer name */ + /* Setup 'blank' typeahead */ + libtoaster.makeTypeahead(gitRefInput, + ctx.xhrGitRevTypeAheadUrl, + { git_url: null }, function(){}); + + vcsURLInput.focusout(function (){ + if (!$(this).val()) + return; + /* If we a layer name specified don't overwrite it or if there isn't a * url typed in yet return */ - if (layerNameInput.val() || !$(this).val()) - return; - - if ($(this).val().search("/")){ + if (!layerNameInput.val() && $(this).val().search("/")){ var urlPts = $(this).val().split("/"); + /* Add a suggestion of the layer name */ var suggestion = urlPts[urlPts.length-1].replace(".git",""); layerNameInput.val(suggestion); } + + /* Now actually setup the typeahead properly with the git url entered */ + gitRefInput._typeahead('destroy'); + + libtoaster.makeTypeahead(gitRefInput, + ctx.xhrGitRevTypeAheadUrl, + { git_url: $(this).val() }, + function(selected){ + gitRefInput._typeahead("close"); + }); + }); function radioDisplay() { @@ -389,7 +418,7 @@ function importLayerPageInit (ctx) { var input = $(this); var reBeginWithSlash = /^\//; var reCheckVariable = /^\$/; - var re = /([ <>\\|":\.%\?\*]+)/; + var re = /([ <>\\|":%\?\*]+)/; var invalidDir = re.test(input.val()); var invalidSlash = reBeginWithSlash.test(input.val()); diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/libtoaster.js b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/libtoaster.js index 86662b7a6..6f9b5d0f0 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/libtoaster.js +++ b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/libtoaster.js @@ -274,9 +274,13 @@ var libtoaster = (function () { } function _addRmLayer(layerObj, add, doneCb){ + if (layerObj.xhrLayerUrl === undefined){ + throw("xhrLayerUrl is undefined") + } + if (add === true) { /* If adding get the deps for this layer */ - libtoaster.getLayerDepsForProject(layerObj.layerdetailurl, + libtoaster.getLayerDepsForProject(layerObj.xhrLayerUrl, function (layers) { /* got result for dependencies */ @@ -542,11 +546,9 @@ $(document).ready(function() { } /* - * PrettyPrint plugin. - * + * highlight plugin. */ - // Init - prettyPrint(); + hljs.initHighlightingOnLoad(); // Prevent invalid links from jumping page scroll $('a[href=#]').click(function() { diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/prettify.js b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/prettify.js deleted file mode 100755 index eef5ad7e6..000000000 --- a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/prettify.js +++ /dev/null @@ -1,28 +0,0 @@ -var q=null;window.PR_SHOULD_USE_CONTINUATION=!0; -(function(){function L(a){function m(a){var f=a.charCodeAt(0);if(f!==92)return f;var b=a.charAt(1);return(f=r[b])?f:"0"<=b&&b<="7"?parseInt(a.substring(1),8):b==="u"||b==="x"?parseInt(a.substring(2),16):a.charCodeAt(1)}function e(a){if(a<32)return(a<16?"\\x0":"\\x")+a.toString(16);a=String.fromCharCode(a);if(a==="\\"||a==="-"||a==="["||a==="]")a="\\"+a;return a}function h(a){for(var f=a.substring(1,a.length-1).match(/\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\[0-3][0-7]{0,2}|\\[0-7]{1,2}|\\[\S\s]|[^\\]/g),a= -[],b=[],o=f[0]==="^",c=o?1:0,i=f.length;c122||(d<65||j>90||b.push([Math.max(65,j)|32,Math.min(d,90)|32]),d<97||j>122||b.push([Math.max(97,j)&-33,Math.min(d,122)&-33]))}}b.sort(function(a,f){return a[0]-f[0]||f[1]-a[1]});f=[];j=[NaN,NaN];for(c=0;ci[0]&&(i[1]+1>i[0]&&b.push("-"),b.push(e(i[1])));b.push("]");return b.join("")}function y(a){for(var f=a.source.match(/\[(?:[^\\\]]|\\[\S\s])*]|\\u[\dA-Fa-f]{4}|\\x[\dA-Fa-f]{2}|\\\d+|\\[^\dux]|\(\?[!:=]|[()^]|[^()[\\^]+/g),b=f.length,d=[],c=0,i=0;c=2&&a==="["?f[c]=h(j):a!=="\\"&&(f[c]=j.replace(/[A-Za-z]/g,function(a){a=a.charCodeAt(0);return"["+String.fromCharCode(a&-33,a|32)+"]"}));return f.join("")}for(var t=0,s=!1,l=!1,p=0,d=a.length;p=5&&"lang-"===b.substring(0,5))&&!(o&&typeof o[1]==="string"))c=!1,b="src";c||(r[f]=b)}i=d;d+=f.length;if(c){c=o[1];var j=f.indexOf(c),k=j+c.length;o[2]&&(k=f.length-o[2].length,j=k-c.length);b=b.substring(5);B(l+i,f.substring(0,j),e,p);B(l+i+j,c,C(b,c),p);B(l+i+k,f.substring(k),e,p)}else p.push(l+i,b)}a.e=p}var h={},y;(function(){for(var e=a.concat(m), -l=[],p={},d=0,g=e.length;d=0;)h[n.charAt(k)]=r;r=r[1];n=""+r;p.hasOwnProperty(n)||(l.push(r),p[n]=q)}l.push(/[\S\s]/);y=L(l)})();var t=m.length;return e}function u(a){var m=[],e=[];a.tripleQuotedStrings?m.push(["str",/^(?:'''(?:[^'\\]|\\[\S\s]|''?(?=[^']))*(?:'''|$)|"""(?:[^"\\]|\\[\S\s]|""?(?=[^"]))*(?:"""|$)|'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$))/,q,"'\""]):a.multiLineStrings?m.push(["str",/^(?:'(?:[^'\\]|\\[\S\s])*(?:'|$)|"(?:[^"\\]|\\[\S\s])*(?:"|$)|`(?:[^\\`]|\\[\S\s])*(?:`|$))/, -q,"'\"`"]):m.push(["str",/^(?:'(?:[^\n\r'\\]|\\.)*(?:'|$)|"(?:[^\n\r"\\]|\\.)*(?:"|$))/,q,"\"'"]);a.verbatimStrings&&e.push(["str",/^@"(?:[^"]|"")*(?:"|$)/,q]);var h=a.hashComments;h&&(a.cStyleComments?(h>1?m.push(["com",/^#(?:##(?:[^#]|#(?!##))*(?:###|$)|.*)/,q,"#"]):m.push(["com",/^#(?:(?:define|elif|else|endif|error|ifdef|include|ifndef|line|pragma|undef|warning)\b|[^\n\r]*)/,q,"#"]),e.push(["str",/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h|[a-z]\w*)>/,q])):m.push(["com",/^#[^\n\r]*/, -q,"#"]));a.cStyleComments&&(e.push(["com",/^\/\/[^\n\r]*/,q]),e.push(["com",/^\/\*[\S\s]*?(?:\*\/|$)/,q]));a.regexLiterals&&e.push(["lang-regex",/^(?:^^\.?|[!+-]|!=|!==|#|%|%=|&|&&|&&=|&=|\(|\*|\*=|\+=|,|-=|->|\/|\/=|:|::|;|<|<<|<<=|<=|=|==|===|>|>=|>>|>>=|>>>|>>>=|[?@[^]|\^=|\^\^|\^\^=|{|\||\|=|\|\||\|\|=|~|break|case|continue|delete|do|else|finally|instanceof|return|throw|try|typeof)\s*(\/(?=[^*/])(?:[^/[\\]|\\[\S\s]|\[(?:[^\\\]]|\\[\S\s])*(?:]|$))+\/)/]);(h=a.types)&&e.push(["typ",h]);a=(""+a.keywords).replace(/^ | $/g, -"");a.length&&e.push(["kwd",RegExp("^(?:"+a.replace(/[\s,]+/g,"|")+")\\b"),q]);m.push(["pln",/^\s+/,q," \r\n\t\xa0"]);e.push(["lit",/^@[$_a-z][\w$@]*/i,q],["typ",/^(?:[@_]?[A-Z]+[a-z][\w$@]*|\w+_t\b)/,q],["pln",/^[$_a-z][\w$@]*/i,q],["lit",/^(?:0x[\da-f]+|(?:\d(?:_\d+)*\d*(?:\.\d*)?|\.\d\+)(?:e[+-]?\d+)?)[a-z]*/i,q,"0123456789"],["pln",/^\\[\S\s]?/,q],["pun",/^.[^\s\w"-$'./@\\`]*/,q]);return x(m,e)}function D(a,m){function e(a){switch(a.nodeType){case 1:if(k.test(a.className))break;if("BR"===a.nodeName)h(a), -a.parentNode&&a.parentNode.removeChild(a);else for(a=a.firstChild;a;a=a.nextSibling)e(a);break;case 3:case 4:if(p){var b=a.nodeValue,d=b.match(t);if(d){var c=b.substring(0,d.index);a.nodeValue=c;(b=b.substring(d.index+d[0].length))&&a.parentNode.insertBefore(s.createTextNode(b),a.nextSibling);h(a);c||a.parentNode.removeChild(a)}}}}function h(a){function b(a,d){var e=d?a.cloneNode(!1):a,f=a.parentNode;if(f){var f=b(f,1),g=a.nextSibling;f.appendChild(e);for(var h=g;h;h=g)g=h.nextSibling,f.appendChild(h)}return e} -for(;!a.nextSibling;)if(a=a.parentNode,!a)return;for(var a=b(a.nextSibling,0),e;(e=a.parentNode)&&e.nodeType===1;)a=e;d.push(a)}var k=/(?:^|\s)nocode(?:\s|$)/,t=/\r\n?|\n/,s=a.ownerDocument,l;a.currentStyle?l=a.currentStyle.whiteSpace:window.getComputedStyle&&(l=s.defaultView.getComputedStyle(a,q).getPropertyValue("white-space"));var p=l&&"pre"===l.substring(0,3);for(l=s.createElement("LI");a.firstChild;)l.appendChild(a.firstChild);for(var d=[l],g=0;g=0;){var h=m[e];A.hasOwnProperty(h)?window.console&&console.warn("cannot override language handler %s",h):A[h]=a}}function C(a,m){if(!a||!A.hasOwnProperty(a))a=/^\s*=o&&(h+=2);e>=c&&(a+=2)}}catch(w){"console"in window&&console.log(w&&w.stack?w.stack:w)}}var v=["break,continue,do,else,for,if,return,while"],w=[[v,"auto,case,char,const,default,double,enum,extern,float,goto,int,long,register,short,signed,sizeof,static,struct,switch,typedef,union,unsigned,void,volatile"], -"catch,class,delete,false,import,new,operator,private,protected,public,this,throw,true,try,typeof"],F=[w,"alignof,align_union,asm,axiom,bool,concept,concept_map,const_cast,constexpr,decltype,dynamic_cast,explicit,export,friend,inline,late_check,mutable,namespace,nullptr,reinterpret_cast,static_assert,static_cast,template,typeid,typename,using,virtual,where"],G=[w,"abstract,boolean,byte,extends,final,finally,implements,import,instanceof,null,native,package,strictfp,super,synchronized,throws,transient"], -H=[G,"as,base,by,checked,decimal,delegate,descending,dynamic,event,fixed,foreach,from,group,implicit,in,interface,internal,into,is,lock,object,out,override,orderby,params,partial,readonly,ref,sbyte,sealed,stackalloc,string,select,uint,ulong,unchecked,unsafe,ushort,var"],w=[w,"debugger,eval,export,function,get,null,set,undefined,var,with,Infinity,NaN"],I=[v,"and,as,assert,class,def,del,elif,except,exec,finally,from,global,import,in,is,lambda,nonlocal,not,or,pass,print,raise,try,with,yield,False,True,None"], -J=[v,"alias,and,begin,case,class,def,defined,elsif,end,ensure,false,in,module,next,nil,not,or,redo,rescue,retry,self,super,then,true,undef,unless,until,when,yield,BEGIN,END"],v=[v,"case,done,elif,esac,eval,fi,function,in,local,set,then,until"],K=/^(DIR|FILE|vector|(de|priority_)?queue|list|stack|(const_)?iterator|(multi)?(set|map)|bitset|u?(int|float)\d*)/,N=/\S/,O=u({keywords:[F,H,w,"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END"+ -I,J,v],hashComments:!0,cStyleComments:!0,multiLineStrings:!0,regexLiterals:!0}),A={};k(O,["default-code"]);k(x([],[["pln",/^[^]*(?:>|$)/],["com",/^<\!--[\S\s]*?(?:--\>|$)/],["lang-",/^<\?([\S\s]+?)(?:\?>|$)/],["lang-",/^<%([\S\s]+?)(?:%>|$)/],["pun",/^(?:<[%?]|[%?]>)/],["lang-",/^]*>([\S\s]+?)<\/xmp\b[^>]*>/i],["lang-js",/^]*>([\S\s]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\S\s]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i]]), -["default-markup","htm","html","mxml","xhtml","xml","xsl"]);k(x([["pln",/^\s+/,q," \t\r\n"],["atv",/^(?:"[^"]*"?|'[^']*'?)/,q,"\"'"]],[["tag",/^^<\/?[a-z](?:[\w-.:]*\w)?|\/?>$/i],["atn",/^(?!style[\s=]|on)[a-z](?:[\w:-]*\w)?/i],["lang-uq.val",/^=\s*([^\s"'>]*(?:[^\s"'/>]|\/(?=\s)))/],["pun",/^[/<->]+/],["lang-js",/^on\w+\s*=\s*"([^"]+)"/i],["lang-js",/^on\w+\s*=\s*'([^']+)'/i],["lang-js",/^on\w+\s*=\s*([^\s"'>]+)/i],["lang-css",/^style\s*=\s*"([^"]+)"/i],["lang-css",/^style\s*=\s*'([^']+)'/i],["lang-css", -/^style\s*=\s*([^\s"'>]+)/i]]),["in.tag"]);k(x([],[["atv",/^[\S\s]+/]]),["uq.val"]);k(u({keywords:F,hashComments:!0,cStyleComments:!0,types:K}),["c","cc","cpp","cxx","cyc","m"]);k(u({keywords:"null,true,false"}),["json"]);k(u({keywords:H,hashComments:!0,cStyleComments:!0,verbatimStrings:!0,types:K}),["cs"]);k(u({keywords:G,cStyleComments:!0}),["java"]);k(u({keywords:v,hashComments:!0,multiLineStrings:!0}),["bsh","csh","sh"]);k(u({keywords:I,hashComments:!0,multiLineStrings:!0,tripleQuotedStrings:!0}), -["cv","py"]);k(u({keywords:"caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END",hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["perl","pl","pm"]);k(u({keywords:J,hashComments:!0,multiLineStrings:!0,regexLiterals:!0}),["rb"]);k(u({keywords:w,cStyleComments:!0,regexLiterals:!0}),["js"]);k(u({keywords:"all,and,by,catch,class,else,extends,false,finally,for,if,in,is,isnt,loop,new,no,not,null,of,off,on,or,return,super,then,true,try,unless,until,when,while,yes", -hashComments:3,cStyleComments:!0,multilineStrings:!0,tripleQuotedStrings:!0,regexLiterals:!0}),["coffee"]);k(x([],[["str",/^[\S\s]+/]]),["regex"]);window.prettyPrintOne=function(a,m,e){var h=document.createElement("PRE");h.innerHTML=a;e&&D(h,e);E({g:m,i:e,h:h});return h.innerHTML};window.prettyPrint=function(a){function m(){for(var e=window.PR_SHOULD_USE_CONTINUATION?l.now()+250:Infinity;p=0){var k=k.match(g),f,b;if(b= -!k){b=n;for(var o=void 0,c=b.firstChild;c;c=c.nextSibling)var i=c.nodeType,o=i===1?o?b:c:i===3?N.test(c.nodeValue)?b:o:o;b=(f=o===b?void 0:o)&&"CODE"===f.tagName}b&&(k=f.className.match(g));k&&(k=k[1]);b=!1;for(o=n.parentNode;o;o=o.parentNode)if((o.tagName==="pre"||o.tagName==="code"||o.tagName==="xmp")&&o.className&&o.className.indexOf("prettyprint")>=0){b=!0;break}b||((b=(b=n.className.match(/\blinenums\b(?::(\d+))?/))?b[1]&&b[1].length?+b[1]:!0:!1)&&D(n,b),d={g:k,h:n,i:b},E(d))}}p= 0) { + machineChangeBtn.attr("disabled", "disabled"); + invalidMachineNameHelp.show(); + machineInputForm.addClass('has-error'); + } else { + machineChangeBtn.removeAttr("disabled"); + invalidMachineNameHelp.hide(); + machineInputForm.removeClass('has-error'); + } + }); machineChangeFormToggle.click(function(){ + machineChangeInput.val(machineNameTitle.text()); + machineChangeBtn.removeAttr("disabled"); + invalidMachineNameHelp.hide(); + machineInputForm.removeClass('has-error'); machineForm.slideDown(); machineNameTitle.hide(); $(this).hide(); diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/table.js b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/table.js index 176ce579f..abcb5ca7a 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/table.js +++ b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/table.js @@ -39,6 +39,8 @@ function tableInit(ctx){ ', .show-all-'+ctx.tableName); function loadData(tableParams){ + table.trigger("table-loading"); + $.ajax({ type: "GET", url: ctx.url, @@ -200,6 +202,7 @@ function tableInit(ctx){ } /* Add table header and column toggle menu */ + var column_edit_entries = []; for (var i in tableData.columns){ var col = tableData.columns[i]; if (col.displayable === false) { @@ -291,9 +294,17 @@ function tableInit(ctx){ defaultHiddenCols.push(col.field_name); } - editColMenu.append(toggler); + /* Gather the Edit Column entries */ + column_edit_entries.push({'title':col.title,'html':toggler}); + } /* End for each column */ + /* Append the sorted Edit Column toggler entries */ + column_edit_entries.sort(function(a,b) {return (a.title > b.title) ? 1 : ((b.title > a.title) ? -1 : 0);} ); + for (var col in column_edit_entries){ + editColMenu.append(column_edit_entries[col].html); + } + tableChromeDone = true; } @@ -835,4 +846,12 @@ function tableInit(ctx){ $('#filter-modal-'+ctx.tableName).modal('hide'); }); + + table.on("table-loading", function(){ + table.css("opacity", 0.5); + }); + + table.on("table-done", function(){ + table.css("opacity", 1); + }) } diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/tests/test.js b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/tests/test.js index d7953de44..4a4c83f4c 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/tests/test.js +++ b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/static/js/tests/test.js @@ -5,6 +5,7 @@ QUnit.test("Layer alert notification", function(assert) { var layer = { "layerdetailurl":"/toastergui/project/1/layer/22", + "xhrLayerUrl":"/toastergui/xhr_layer/1/9", "vcs_url":"git://example.com/example.git", "detail":"[ git://example.com/example.git | master ]", "vcs_reference":"master", @@ -15,6 +16,7 @@ QUnit.test("Layer alert notification", function(assert) { var layerDepsList = [ { "layerdetailurl":"/toastergui/project/1/layer/9", + "xhrLayerUrl":"/toastergui/xhr_layer/1/9", "vcs_url":"git://example.com/example.git", "detail":"[ git://example.com/example.git | master ]", "vcs_reference":"master", @@ -23,6 +25,7 @@ QUnit.test("Layer alert notification", function(assert) { }, { "layerdetailurl":"/toastergui/project/1/layer/9", + "xhrLayerUrl":"/toastergui/xhr_layer/1/9", "vcs_url":"git://example.com/example.git", "detail":"[ git://example.com/example.git | master ]", "vcs_reference":"master", @@ -66,7 +69,8 @@ QUnit.test("Show notification", function(assert){ var layer = { "id": 1, "name": "meta-testing", - "layerdetailurl": "/toastergui/project/1/layer/1" + "layerdetailurl": "/toastergui/project/1/layer/1", + "xhrLayerUrl": "/toastergui/xhr_layer/1/1" }; QUnit.test("Add layer", function(assert){ @@ -144,7 +148,7 @@ QUnit.test("Make typeaheads", function(assert){ /* Page init functions */ QUnit.test("Import layer page init", function(assert){ - assert.throws(importLayerPageInit()); + assert.throws(importLayerPageInit({ xhrGitRevTypeAheadUrl: "url" })); }); QUnit.test("Project page init", function(assert){ diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/templates/base.html b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/templates/base.html index 496dd6eab..32b49795f 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/templates/base.html +++ b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/templates/base.html @@ -24,7 +24,7 @@ - @@ -100,7 +100,8 @@ - + @@ -63,6 +63,7 @@ +
@@ -150,7 +151,7 @@ - + @@ -593,7 +594,7 @@ $(document).ready(function() { var input = $(this); var reBeginWithSlash = /^\//; var reCheckVariable = /^\$/; - var re = /([ <>\\|":\.%\?\*]+)/; + var re = /([ <>\\|":%\?\*]+)/; var invalidDir = re.test(input.val()); var invalidSlash = reBeginWithSlash.test(input.val()); var invalidVar = reCheckVariable.test(input.val()); @@ -716,8 +717,10 @@ $(document).ready(function() { } if ($('#new-imagefs_types').val().length === 0) { $("#apply-change-image_fstypes").prop("disabled", true); + $('#fstypes-error-message').show(); } else { $("#apply-change-image_fstypes").prop("disabled", false); + $('#fstypes-error-message').hide(); } }); @@ -958,7 +961,7 @@ $(document).ready(function() { var input = $(this); var reBeginWithSlash = /^\//; var reCheckVariable = /^\$/; - var re = /([ <>\\|":\.%\?\*]+)/; + var re = /([ <>\\|":%\?\*]+)/; var invalidDir = re.test(input.val()); var invalidSlash = reBeginWithSlash.test(input.val()); var invalidVar = reCheckVariable.test(input.val()); diff --git a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipe_btn.html b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipe_btn.html index e3729643a..0ee0ba559 100644 --- a/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipe_btn.html +++ b/import-layers/yocto-poky/bitbake/lib/toaster/toastergui/templates/recipe_btn.html @@ -5,7 +5,13 @@ > Build recipe -
Yocto Project Quick Start<\/a>/Yocto Project Quick Start/g -s/Yocto Project Development Manual<\/a>/Yocto Project Development Manual/g -s/Yocto Project Software Development Kit (SDK) Developer's Guide<\/a>/Yocto Project Software Development Kit (SDK) Developer's Guide/g -s/Yocto Project Board Support Package (BSP) Developer's Guide<\/a>/Yocto Project Board Support Package (BSP) Developer's Guide/g -s/Yocto Project Profiling and Tracing Manual<\/a>/Yocto Project Profiling and Tracing Manual/g -s/Yocto Project Linux Kernel Development Manual<\/a>/Yocto Project Linux Kernel Development Manual/g -s/Yocto Project Reference Manual<\/a>/Yocto Project Reference Manual/g -s/Toaster User Manual<\/a>/Toaster User Manual/g +s/Yocto Project Quick Start<\/a>/Yocto Project Quick Start/g +s/Yocto Project Development Manual<\/a>/Yocto Project Development Manual/g +s/Yocto Project Software Development Kit (SDK) Developer's Guide<\/a>/Yocto Project Software Development Kit (SDK) Developer's Guide/g +s/Yocto Project Board Support Package (BSP) Developer's Guide<\/a>/Yocto Project Board Support Package (BSP) Developer's Guide/g +s/Yocto Project Profiling and Tracing Manual<\/a>/Yocto Project Profiling and Tracing Manual/g +s/Yocto Project Linux Kernel Development Manual<\/a>/Yocto Project Linux Kernel Development Manual/g +s/Yocto Project Reference Manual<\/a>/Yocto Project Reference Manual/g +s/Toaster User Manual<\/a>/Toaster User Manual/g diff --git a/import-layers/yocto-poky/documentation/yocto-project-qs/figures/yocto-environment.png b/import-layers/yocto-poky/documentation/yocto-project-qs/figures/yocto-environment.png index 82b7a55a3..35969038c 100644 Binary files a/import-layers/yocto-poky/documentation/yocto-project-qs/figures/yocto-environment.png and b/import-layers/yocto-poky/documentation/yocto-project-qs/figures/yocto-environment.png differ diff --git a/import-layers/yocto-poky/documentation/yocto-project-qs/yocto-project-qs.xml b/import-layers/yocto-poky/documentation/yocto-project-qs/yocto-project-qs.xml index 950a4ff8b..b4b3f4bd0 100644 --- a/import-layers/yocto-poky/documentation/yocto-project-qs/yocto-project-qs.xml +++ b/import-layers/yocto-poky/documentation/yocto-project-qs/yocto-project-qs.xml @@ -16,11 +16,31 @@ Permission is granted to copy, distribute and/or modify this document under the terms of the Creative Commons Attribution-Share Alike 2.0 UK: England & Wales as published by Creative Commons. - - For the latest version of this manual associated with this - Yocto Project release, see the - Yocto Project Quick Start - from the Yocto Project website. + Manual Notes + + + For the latest version of the Yocto Project Quick + Start associated with this Yocto Project release + (version &YOCTO_DOC_VERSION;), + see the Yocto Project Quick Start from the + Yocto Project documentation page. + + + This version of the manual is version + &YOCTO_DOC_VERSION;. + For later releases of the Yocto Project (if they exist), + go to the + Yocto Project documentation page + and use the drop-down "Active Releases" button + and choose the Yocto Project version for which you want + the manual. + + + For an in-development version of the Yocto Project + Quick Start, see + . + + @@ -44,7 +64,8 @@ tool, to construct complete Linux images. The BitBake and OE components are combined together to form a reference build host, historically known as - Poky. + Poky + (Pah-key). @@ -125,11 +146,8 @@ + format="PNG" align='center' width="8in"/> - - The Yocto Project Development Environment - @@ -302,8 +320,7 @@ Ubuntu and Debian - $ sudo apt-get install &UBUNTU_HOST_PACKAGES_ESSENTIAL; \ - libsdl1.2-dev xterm + $ sudo apt-get install &UBUNTU_HOST_PACKAGES_ESSENTIAL; libsdl1.2-dev xterm Fedora @@ -313,22 +330,31 @@ OpenSUSE - $ sudo zypper install &OPENSUSE_HOST_PACKAGES_ESSENTIAL; \ - libSDL-devel xterm + $ sudo zypper install &OPENSUSE_HOST_PACKAGES_ESSENTIAL; libSDL-devel xterm CentOS - $ sudo yum install &CENTOS_HOST_PACKAGES_ESSENTIAL; \ - SDL-devel xterm + $ sudo yum install &CENTOS_HOST_PACKAGES_ESSENTIAL; SDL-devel xterm - - CentOS 6.x users need to ensure that the required - versions of Git, tar and Python are available. - For details, See the - "Required Git, tar, and Python Versions" - section in the Yocto Project Reference Manual for - information. + Notes + + + Extra Packages for Enterprise Linux + (i.e. epel-release) + is a collection of packages from Fedora + built on RHEL/CentOS for easy installation + of packages not included in enterprise + Linux by default. + You need to install these packages + separately. + + + The makecache command + consumes additional Metadata from + epel-release. + + @@ -357,11 +383,12 @@ $ git clone git://git.yoctoproject.org/poky Cloning into 'poky'... - remote: Counting objects: 226790, done. - remote: Compressing objects: 100% (57465/57465), done. - remote: Total 226790 (delta 165212), reused 225887 (delta 164327) - Receiving objects: 100% (226790/226790), 100.98 MiB | 263 KiB/s, done. - Resolving deltas: 100% (165212/165212), done. + remote: Counting objects: 361782, done. + remote: Compressing objects: 100% (87100/87100), done. + remote: Total 361782 (delta 268619), reused 361439 (delta 268277) + Receiving objects: 100% (361782/361782), 131.94 MiB | 6.88 MiB/s, done. + Resolving deltas: 100% (268619/268619), done. + Checking connectivity... done. $ git checkout &DISTRO_NAME_NO_CAP; You can also get the Yocto Project Files by downloading @@ -569,14 +596,47 @@ Depending on the number of processors and cores, the amount of RAM, the speed of your Internet connection - and other factors, the build process could take several - hours the first time you run it. + and other factors, the build process could take + several hours the first time you run it. Subsequent builds run much faster since parts of the build are cached. $ bitbake core-image-sato + + + If you experience a build error due to resources + temporarily being unavailable and it appears you + should not be having this issue, it might be due + to the combination of a 4.3+ Linux kernel and + systemd version 228+ + (i.e. see this + link + for information). + + + + To work around this issue, you can try either + of the following: + + + Try the build again. + + + Modify the "DefaultTasksMax" + systemd parameter + by uncommenting it and setting it to + "infinity". + You can find this parameter in the + system.conf file + located in + /etc/systemd + on most systems. + + + + For information on using the bitbake command, see the "BitBake" @@ -599,8 +659,8 @@ Exit QEMU: Exit QEMU by either clicking on the shutdown icon or by - opening a terminal, typing - poweroff, and then pressing "Enter". + typing Ctrl-C in the QEMU + transcript window from which you evoked QEMU. @@ -639,11 +699,11 @@ $ cd $HOME/poky $ git clone git://git.yoctoproject.org/meta-intel Cloning into 'meta-intel'... - remote: Counting objects: 11988, done. - remote: Compressing objects: 100% (3884/3884), done. - Receiving objects: 100% (11988/11988), 2.93 MiB | 2.51 MiB/s, done. - remote: Total 11988 (delta 6881), reused 11752 (delta 6645) - Resolving deltas: 100% (6881/6881), done. + remote: Counting objects: 14039, done. + remote: Compressing objects: 100% (4471/4471), done. + remote: Total 14039 (delta 8130), reused 13837 (delta 7947) + Receiving objects: 100% (14039/14039), 4.27 MiB | 3.98 MiB/s, done. + Resolving deltas: 100% (8130/8130), done. Checking connectivity... done. By default when you clone a Git repository, the @@ -727,6 +787,39 @@ $ bitbake core-image-base + + + If you experience a build error due to resources + temporarily being unavailable and it appears you + should not be having this issue, it might be due + to the combination of a 4.3+ Linux kernel and + systemd version 228+ + (i.e. see this + link + for information). + + + + To work around this issue, you can try either + of the following: + + + Try the build again. + + + Modify the "DefaultTasksMax" + systemd parameter + by uncommenting it and setting it to + "infinity". + You can find this parameter in the + system.conf file + located in + /etc/systemd + on most systems. + + + + Once the build completes, the resulting console-only image is located in the Build Directory here: diff --git a/import-layers/yocto-poky/meta-poky/conf/distro/include/maintainers.inc b/import-layers/yocto-poky/meta-poky/conf/distro/include/maintainers.inc index db9511140..261c8f313 100644 --- a/import-layers/yocto-poky/meta-poky/conf/distro/include/maintainers.inc +++ b/import-layers/yocto-poky/meta-poky/conf/distro/include/maintainers.inc @@ -61,13 +61,13 @@ RECIPE_MAINTAINER_pn-bash = "Hongxu Jia " RECIPE_MAINTAINER_pn-bash-completion = "Alexander Kanavin " RECIPE_MAINTAINER_pn-bc = "Jose Lamego " RECIPE_MAINTAINER_pn-bdwgc = "Alexander Kanavin " -RECIPE_MAINTAINER_pn-beecrypt = "Chen Qi " +RECIPE_MAINTAINER_pn-beecrypt = "Armin Kuster " RECIPE_MAINTAINER_pn-bigreqsproto = "Jussi Kukkonen " -RECIPE_MAINTAINER_pn-bind = "Robert Yang " -RECIPE_MAINTAINER_pn-binutils = "Robert Yang " -RECIPE_MAINTAINER_pn-binutils-cross = "Robert Yang " -RECIPE_MAINTAINER_pn-binutils-cross-canadian = "Robert Yang " -RECIPE_MAINTAINER_pn-binutils-crosssdk = "Robert Yang " +RECIPE_MAINTAINER_pn-bind = "Armin Kuster " +RECIPE_MAINTAINER_pn-binutils = "Khem Raj " +RECIPE_MAINTAINER_pn-binutils-cross = "Khem Raj " +RECIPE_MAINTAINER_pn-binutils-cross-canadian = "Khem Raj " +RECIPE_MAINTAINER_pn-binutils-crosssdk = "Khem Raj " RECIPE_MAINTAINER_pn-bison = "Chen Qi " RECIPE_MAINTAINER_pn-bjam-native = "Alexander Kanavin " RECIPE_MAINTAINER_pn-blktool = "Dengke Du " @@ -82,9 +82,9 @@ RECIPE_MAINTAINER_pn-build-appliance-image = "Cristian Iorga " RECIPE_MAINTAINER_pn-core-image-x11 = "Ross Burton " RECIPE_MAINTAINER_pn-coreutils = "Chen Qi " -RECIPE_MAINTAINER_pn-cpio = "Chen Qi " -RECIPE_MAINTAINER_pn-cracklib = "Hongxu Jia " +RECIPE_MAINTAINER_pn-cpio = "Denys Dmytriyenko " +RECIPE_MAINTAINER_pn-cracklib = "Armin Kuster " RECIPE_MAINTAINER_pn-createrepo = "Hongxu Jia " RECIPE_MAINTAINER_pn-cronie = "Edwin Plauchu " RECIPE_MAINTAINER_pn-cross-localedef-native = "Khem Raj " @@ -138,7 +137,8 @@ RECIPE_MAINTAINER_pn-cryptodev-linux = "Robert Yang " RECIPE_MAINTAINER_pn-cryptodev-module = "Robert Yang " RECIPE_MAINTAINER_pn-cryptodev-tests = "Robert Yang " RECIPE_MAINTAINER_pn-cups = "Chen Qi " -RECIPE_MAINTAINER_pn-curl = "Chen Qi " +RECIPE_MAINTAINER_pn-curl = "Armin Kuster " +RECIPE_MAINTAINER_pn-cve-check-tool = "Leonardo Sandoval " RECIPE_MAINTAINER_pn-cwautomacros = "Maxin B. John " RECIPE_MAINTAINER_pn-damageproto = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-db = "Mark Hatle " @@ -158,11 +158,6 @@ RECIPE_MAINTAINER_pn-distcc = "Hongxu Jia " RECIPE_MAINTAINER_pn-distcc-config = "Dengke Du " RECIPE_MAINTAINER_pn-dmidecode = "Alexander Kanavin " RECIPE_MAINTAINER_pn-dmxproto = "Jussi Kukkonen " -RECIPE_MAINTAINER_pn-docbook-dsssl-stylesheets-native = "Dengke Du " -RECIPE_MAINTAINER_pn-docbook-sgml-dtd-3.1-native = "Dengke Du " -RECIPE_MAINTAINER_pn-docbook-sgml-dtd-4.1-native = "Dengke Du " -RECIPE_MAINTAINER_pn-docbook-sgml-dtd-4.5-native = "Dengke Du " -RECIPE_MAINTAINER_pn-docbook-utils-native = "Dengke Du " RECIPE_MAINTAINER_pn-docbook-xml-dtd4 = "Dengke Du " RECIPE_MAINTAINER_pn-docbook-xsl-stylesheets = "Dengke Du " RECIPE_MAINTAINER_pn-dosfstools = "Dengke Du " @@ -202,21 +197,21 @@ RECIPE_MAINTAINER_pn-freetype = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-fstests = "Alexander Kanavin " RECIPE_MAINTAINER_pn-fts = "Khem Raj " RECIPE_MAINTAINER_pn-gawk = "Chen Qi " -RECIPE_MAINTAINER_pn-gcc = "Richard Purdie " -RECIPE_MAINTAINER_pn-gcc-cross = "Richard Purdie " -RECIPE_MAINTAINER_pn-gcc-cross-canadian = "Richard Purdie " -RECIPE_MAINTAINER_pn-gcc-cross-initial = "Richard Purdie " -RECIPE_MAINTAINER_pn-gcc-crosssdk = "Richard Purdie " -RECIPE_MAINTAINER_pn-gcc-crosssdk-initial = "Richard Purdie " -RECIPE_MAINTAINER_pn-gcc-runtime = "Richard Purdie " -RECIPE_MAINTAINER_pn-gcc-sanitizers = "Richard Purdie " -RECIPE_MAINTAINER_pn-gcc-source = "Richard Purdie " -RECIPE_MAINTAINER_pn-gccmakedep = "Richard Purdie " +RECIPE_MAINTAINER_pn-gcc = "Khem Raj " +RECIPE_MAINTAINER_pn-gcc-cross = "Khem Raj " +RECIPE_MAINTAINER_pn-gcc-cross-canadian = "Khem Raj " +RECIPE_MAINTAINER_pn-gcc-cross-initial = "Khem Raj " +RECIPE_MAINTAINER_pn-gcc-crosssdk = "Khem Raj " +RECIPE_MAINTAINER_pn-gcc-crosssdk-initial = "Khem Raj " +RECIPE_MAINTAINER_pn-gcc-runtime = "Khem Raj " +RECIPE_MAINTAINER_pn-gcc-sanitizers = "Khem Raj " +RECIPE_MAINTAINER_pn-gcc-source = "Khem Raj " +RECIPE_MAINTAINER_pn-gccmakedep = "Khem Raj " RECIPE_MAINTAINER_pn-gconf = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-gcr = "Alexander Kanavin " -RECIPE_MAINTAINER_pn-gdb = "Richard Purdie " -RECIPE_MAINTAINER_pn-gdb-cross = "Richard Purdie " -RECIPE_MAINTAINER_pn-gdb-cross-canadian = "Richard Purdie " +RECIPE_MAINTAINER_pn-gdb = "Khem Raj " +RECIPE_MAINTAINER_pn-gdb-cross = "Khem Raj " +RECIPE_MAINTAINER_pn-gdb-cross-canadian = "Khem Raj " RECIPE_MAINTAINER_pn-gdbm = "Alexander Kanavin " RECIPE_MAINTAINER_pn-gdk-pixbuf = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-gettext = "Robert Yang " @@ -226,13 +221,13 @@ RECIPE_MAINTAINER_pn-git = "Robert Yang " RECIPE_MAINTAINER_pn-glew = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-glib-2.0 = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-glib-networking = "Jussi Kukkonen " -RECIPE_MAINTAINER_pn-glibc = "Richard Purdie " -RECIPE_MAINTAINER_pn-glibc-initial = "Richard Purdie " -RECIPE_MAINTAINER_pn-glibc-locale = "Richard Purdie " -RECIPE_MAINTAINER_pn-glibc-mtrace = "Richard Purdie " -RECIPE_MAINTAINER_pn-glibc-scripts = "Richard Purdie " +RECIPE_MAINTAINER_pn-glibc = "Khem Raj " +RECIPE_MAINTAINER_pn-glibc-initial = "Khem Raj " +RECIPE_MAINTAINER_pn-glibc-locale = "Khem Raj " +RECIPE_MAINTAINER_pn-glibc-mtrace = "Khem Raj " +RECIPE_MAINTAINER_pn-glibc-scripts = "Khem Raj " RECIPE_MAINTAINER_pn-glproto = "Jussi Kukkonen " -RECIPE_MAINTAINER_pn-gmp = "Robert Yang " +RECIPE_MAINTAINER_pn-gmp = "Khem Raj " RECIPE_MAINTAINER_pn-gnome-common = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-gnome-desktop-testing = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-gnome-desktop3 = "Alexander Kanavin " @@ -241,12 +236,13 @@ RECIPE_MAINTAINER_pn-gnome-themes-standard = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-guilt-native = "Bruce Ashfield " RECIPE_MAINTAINER_pn-gummiboot = "Alexander Kanavin " -RECIPE_MAINTAINER_pn-gzip = "Chen Qi " +RECIPE_MAINTAINER_pn-gzip = "Armin Kuster " RECIPE_MAINTAINER_pn-harfbuzz = "Maxin B. John " -RECIPE_MAINTAINER_pn-hdparm = "Robert Yang " +RECIPE_MAINTAINER_pn-hdparm = "Denys Dmytriyenko " RECIPE_MAINTAINER_pn-help2man-native = "Hongxu Jia " RECIPE_MAINTAINER_pn-hicolor-icon-theme = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-hostap-conf = "Maxin B. John " @@ -284,8 +280,8 @@ RECIPE_MAINTAINER_pn-icon-naming-utils = "Alexander Kanavin " RECIPE_MAINTAINER_pn-inputproto = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-intltool = "Alexander Kanavin " -RECIPE_MAINTAINER_pn-iproute2 = "Maxin B. John " -RECIPE_MAINTAINER_pn-iptables = "Maxin B. John " +RECIPE_MAINTAINER_pn-iproute2 = "Armin Kuster " +RECIPE_MAINTAINER_pn-iptables = "Armin Kuster " RECIPE_MAINTAINER_pn-iputils = "Robert Yang " RECIPE_MAINTAINER_pn-irda-utils = "Maxin B. John " RECIPE_MAINTAINER_pn-iso-codes = "Alexander Kanavin " @@ -309,7 +305,7 @@ RECIPE_MAINTAINER_pn-kconfig-frontends = "Alexander Kanavin " RECIPE_MAINTAINER_pn-libdaemon = "Alexander Kanavin " RECIPE_MAINTAINER_pn-libdmx = "Jussi Kukkonen " -RECIPE_MAINTAINER_pn-libdrm = "Jussi Kukkonen " +RECIPE_MAINTAINER_pn-libdrm = "Otavio Salvador " RECIPE_MAINTAINER_pn-libdumpvalue-perl = "Aníbal Limón " RECIPE_MAINTAINER_pn-libenv-perl = "Aníbal Limón " RECIPE_MAINTAINER_pn-libepoxy = "Jussi Kukkonen " @@ -351,10 +347,10 @@ RECIPE_MAINTAINER_pn-libfile-checktree-perl = "Aníbal Limón " RECIPE_MAINTAINER_pn-libvorbis = "Tanu Kaskinen " RECIPE_MAINTAINER_pn-libwebp = "Alexander Kanavin " -RECIPE_MAINTAINER_pn-libwnck3 = "Alexander Kanavin " RECIPE_MAINTAINER_pn-libx11 = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-libx11-diet = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-libxau = "Jussi Kukkonen " @@ -458,13 +454,12 @@ RECIPE_MAINTAINER_pn-libxxf86vm = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-libyaml = "Alexander Kanavin " RECIPE_MAINTAINER_pn-lighttpd = "Alexander Kanavin " RECIPE_MAINTAINER_pn-linux-dummy = "Alexander Kanavin " -RECIPE_MAINTAINER_pn-linux-firmware = "Maxin B. John " +RECIPE_MAINTAINER_pn-linux-firmware = "Otavio Salvador " RECIPE_MAINTAINER_pn-linux-libc-headers = "Bruce Ashfield " RECIPE_MAINTAINER_pn-linux-yocto = "Bruce Ashfield " RECIPE_MAINTAINER_pn-linux-yocto-dev = "Bruce Ashfield " RECIPE_MAINTAINER_pn-linux-yocto-rt = "Bruce Ashfield " RECIPE_MAINTAINER_pn-linux-yocto-tiny = "Bruce Ashfield " -RECIPE_MAINTAINER_pn-linuxdoc-tools-native = "Dengke Du " RECIPE_MAINTAINER_pn-logrotate = "Robert Yang " RECIPE_MAINTAINER_pn-lrzsz = "Maxin B. John " RECIPE_MAINTAINER_pn-lsb = "Hongxu Jia " @@ -476,8 +471,8 @@ RECIPE_MAINTAINER_pn-lttng-modules = "Richard Purdie " RECIPE_MAINTAINER_pn-neard = "Maxin B. John " RECIPE_MAINTAINER_pn-neon = "Maxin B. John " -RECIPE_MAINTAINER_pn-net-tools = "Maxin B. John " -RECIPE_MAINTAINER_pn-netbase = "Maxin B. John " +RECIPE_MAINTAINER_pn-net-tools = "Armin Kuster " +RECIPE_MAINTAINER_pn-netbase = "Armin Kuster " RECIPE_MAINTAINER_pn-nettle = "Armin Kuster " RECIPE_MAINTAINER_pn-nfs-export-root = "Robert Yang " -RECIPE_MAINTAINER_pn-nfs-utils = "Mariano Lopez " +RECIPE_MAINTAINER_pn-nfs-utils = "Leonardo Sandoval " RECIPE_MAINTAINER_pn-npth = "Alexander Kanavin " -RECIPE_MAINTAINER_pn-nspr = "Alexander Kanavin " -RECIPE_MAINTAINER_pn-nss = "Alexander Kanavin " +RECIPE_MAINTAINER_pn-nspr = "Armin Kuster " +RECIPE_MAINTAINER_pn-nss = "Armin Kuster " RECIPE_MAINTAINER_pn-nss-myhostname = "Maxin B. John " RECIPE_MAINTAINER_pn-ofono = "Maxin B. John " RECIPE_MAINTAINER_pn-oh-puzzles = "Jussi Kukkonen " -RECIPE_MAINTAINER_pn-openjade-native = "Dengke Du " -RECIPE_MAINTAINER_pn-opensp = "Dengke Du " -RECIPE_MAINTAINER_pn-openssh = "Dengke Du " +RECIPE_MAINTAINER_pn-openssh = "Armin Kuster " RECIPE_MAINTAINER_pn-openssl = "Alexander Kanavin " RECIPE_MAINTAINER_pn-opkg = "Alejandro del Castillo " RECIPE_MAINTAINER_pn-opkg-arch-config = "Alejandro del Castillo " @@ -591,9 +584,9 @@ RECIPE_MAINTAINER_pn-patch = "Hongxu Jia " RECIPE_MAINTAINER_pn-patchelf = "Richard Purdie " RECIPE_MAINTAINER_pn-pax = "Hongxu Jia " RECIPE_MAINTAINER_pn-pax-utils = "Hongxu Jia " -RECIPE_MAINTAINER_pn-pbzip2 = "Alexander Kanavin " +RECIPE_MAINTAINER_pn-pbzip2 = "Armin Kuster " RECIPE_MAINTAINER_pn-pciutils = "Chen Qi " -RECIPE_MAINTAINER_pn-pcmanfm = "Edwin Plauchu " +RECIPE_MAINTAINER_pn-pcmanfm = "Alexander Kanavin " RECIPE_MAINTAINER_pn-pcmciautils = "Robert Yang " RECIPE_MAINTAINER_pn-perf = "Bruce Ashfield " RECIPE_MAINTAINER_pn-perl = "Aníbal Limón " @@ -602,7 +595,7 @@ RECIPE_MAINTAINER_pn-piglit = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-pigz = "Hongxu Jia " RECIPE_MAINTAINER_pn-pinentry = "Armin Kuster " RECIPE_MAINTAINER_pn-pixman = "Jussi Kukkonen " -RECIPE_MAINTAINER_pn-pixz = "Richard Purdie " +RECIPE_MAINTAINER_pn-pixz = "Armin Kuster " RECIPE_MAINTAINER_pn-pkgconfig = "Maxin B. John " RECIPE_MAINTAINER_pn-pm-utils = "Maxin B. John " RECIPE_MAINTAINER_pn-pointercal = "Alexander Kanavin " @@ -633,7 +626,7 @@ RECIPE_MAINTAINER_pn-python-imaging = "Jose Lamego " RECIPE_MAINTAINER_pn-python-scons = "Jose Lamego " RECIPE_MAINTAINER_pn-python-scons-native = "Jose Lamego " -RECIPE_MAINTAINER_pn-python-setuptools = "Jose Lamego " +RECIPE_MAINTAINER_pn-python-setuptools = "Edwin Plauchu " RECIPE_MAINTAINER_pn-python-six = "Jose Lamego " RECIPE_MAINTAINER_pn-python-smartpm = "Jose Lamego " RECIPE_MAINTAINER_pn-python-smmap = "Jose Lamego " @@ -651,9 +644,9 @@ RECIPE_MAINTAINER_pn-python3-async = "Edwin Plauchu " RECIPE_MAINTAINER_pn-slang = "Robert Yang " RECIPE_MAINTAINER_pn-socat = "Hongxu Jia " +RECIPE_MAINTAINER_pn-source-highlight = "Alexander Kanavin " RECIPE_MAINTAINER_pn-speex = "Tanu Kaskinen " RECIPE_MAINTAINER_pn-speexdsp = "Tanu Kaskinen " RECIPE_MAINTAINER_pn-sqlite3 = "Maxin B. John " @@ -748,20 +738,18 @@ RECIPE_MAINTAINER_pn-tiny-init = "Alexander Kanavin " RECIPE_MAINTAINER_pn-volatile-binds = "Chen Qi " RECIPE_MAINTAINER_pn-vte = "Jussi Kukkonen " +RECIPE_MAINTAINER_pn-vulkan = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-waffle = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-watchdog = "Alexander Kanavin " -RECIPE_MAINTAINER_pn-wayland = "Jussi Kukkonen " -RECIPE_MAINTAINER_pn-wayland-protocols = "Jussi Kukkonen " +RECIPE_MAINTAINER_pn-watchdog-config = "Alexander Kanavin " +RECIPE_MAINTAINER_pn-wayland = "Denys Dmytriyenko " +RECIPE_MAINTAINER_pn-wayland-protocols = "Denys Dmytriyenko " RECIPE_MAINTAINER_pn-webkitgtk = "Alexander Kanavin " -RECIPE_MAINTAINER_pn-weston = "Jussi Kukkonen " -RECIPE_MAINTAINER_pn-weston-init = "Jussi Kukkonen " +RECIPE_MAINTAINER_pn-weston = "Denys Dmytriyenko " +RECIPE_MAINTAINER_pn-weston-init = "Denys Dmytriyenko " RECIPE_MAINTAINER_pn-wget = "Robert Yang " -RECIPE_MAINTAINER_pn-which = "Mariano Lopez " +RECIPE_MAINTAINER_pn-which = "Leonardo Sandoval " RECIPE_MAINTAINER_pn-wireless-tools = "Maxin B. John " RECIPE_MAINTAINER_pn-wpa-supplicant = "Maxin B. John " RECIPE_MAINTAINER_pn-x11-common = "Jussi Kukkonen " @@ -843,8 +833,8 @@ RECIPE_MAINTAINER_pn-xuser-account = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-xvideo-tests = "Maxin B. John " RECIPE_MAINTAINER_pn-xvinfo = "Jussi Kukkonen " RECIPE_MAINTAINER_pn-xwininfo = "Jussi Kukkonen " -RECIPE_MAINTAINER_pn-xz = "Chen Qi " +RECIPE_MAINTAINER_pn-xz = "Armin Kuster " RECIPE_MAINTAINER_pn-yasm = "Dengke Du " -RECIPE_MAINTAINER_pn-zip = "Chen Qi " +RECIPE_MAINTAINER_pn-zip = "Armin Kuster " RECIPE_MAINTAINER_pn-zisofs-tools-native = "Alexander Kanavin " -RECIPE_MAINTAINER_pn-zlib = "Chen Qi " +RECIPE_MAINTAINER_pn-zlib = "Armin Kuster " diff --git a/import-layers/yocto-poky/meta-poky/conf/distro/include/poky-world-exclude.inc b/import-layers/yocto-poky/meta-poky/conf/distro/include/poky-world-exclude.inc index 5194ff143..a6635b63a 100644 --- a/import-layers/yocto-poky/meta-poky/conf/distro/include/poky-world-exclude.inc +++ b/import-layers/yocto-poky/meta-poky/conf/distro/include/poky-world-exclude.inc @@ -4,3 +4,5 @@ # qwt from meta-qt4, has poky-lsb QA warnings, qt4 for lsb only EXCLUDE_FROM_WORLD_pn-qwt = "1" +# python-pyqt from meta-qt4 requires sip from meta-oe +EXCLUDE_FROM_WORLD_pn-python-pyqt = "1" diff --git a/import-layers/yocto-poky/meta-poky/conf/distro/poky-tiny.conf b/import-layers/yocto-poky/meta-poky/conf/distro/poky-tiny.conf index 08bd49720..561566e2a 100644 --- a/import-layers/yocto-poky/meta-poky/conf/distro/poky-tiny.conf +++ b/import-layers/yocto-poky/meta-poky/conf/distro/poky-tiny.conf @@ -30,6 +30,7 @@ require conf/distro/poky.conf DISTRO = "poky-tiny" +DISTROOVERRIDES = "poky:poky-tiny" TCLIBC = "musl" # FIXME: consider adding a new "tiny" feature #DISTRO_FEATURES_append = " tiny" @@ -37,7 +38,7 @@ TCLIBC = "musl" # Distro config is evaluated after the machine config, so we have to explicitly # set the kernel provider to override a machine config. PREFERRED_PROVIDER_virtual/kernel = "linux-yocto-tiny" -PREFERRED_VERSION_linux-yocto-tiny ?= "4.4%" +PREFERRED_VERSION_linux-yocto-tiny ?= "4.9%" # We can use packagegroup-core-boot, but in the future we may need a new packagegroup-core-tiny #POKY_DEFAULT_EXTRA_RDEPENDS += "packagegroup-core-boot" @@ -115,7 +116,6 @@ RDEPENDS_${PN}-mtrace_pn-eglibc = "" PNBLACKLIST[build-appliance-image] = "not buildable with poky-tiny" PNBLACKLIST[core-image-base] = "not buildable with poky-tiny" PNBLACKLIST[core-image-clutter] = "not buildable with poky-tiny" -PNBLACKLIST[core-image-directfb] = "not buildable with poky-tiny" PNBLACKLIST[core-image-full-cmdline] = "not buildable with poky-tiny" PNBLACKLIST[core-image-lsb] = "not buildable with poky-tiny" PNBLACKLIST[core-image-lsb-dev] = "not buildable with poky-tiny" diff --git a/import-layers/yocto-poky/meta-poky/conf/distro/poky.conf b/import-layers/yocto-poky/meta-poky/conf/distro/poky.conf index aca38c3d6..621d84521 100644 --- a/import-layers/yocto-poky/meta-poky/conf/distro/poky.conf +++ b/import-layers/yocto-poky/meta-poky/conf/distro/poky.conf @@ -1,7 +1,7 @@ DISTRO = "poky" DISTRO_NAME = "Poky (Yocto Project Reference Distro)" -DISTRO_VERSION = "2.2.2" -DISTRO_CODENAME = "morty" +DISTRO_VERSION = "2.3.3" +DISTRO_CODENAME = "pyro" SDK_VENDOR = "-pokysdk" SDK_VERSION := "${@'${DISTRO_VERSION}'.replace('snapshot-${DATE}','snapshot')}" @@ -21,13 +21,13 @@ POKY_DEFAULT_EXTRA_RRECOMMENDS = "kernel-module-af-packet" DISTRO_FEATURES ?= "${DISTRO_FEATURES_DEFAULT} ${DISTRO_FEATURES_LIBC} ${POKY_DEFAULT_DISTRO_FEATURES}" -PREFERRED_VERSION_linux-yocto ?= "4.8%" -PREFERRED_VERSION_linux-yocto_qemux86 ?= "4.8%" -PREFERRED_VERSION_linux-yocto_qemux86-64 ?= "4.8%" -PREFERRED_VERSION_linux-yocto_qemuarm ?= "4.8%" -PREFERRED_VERSION_linux-yocto_qemumips ?= "4.8%" -PREFERRED_VERSION_linux-yocto_qemumips64 ?= "4.8%" -PREFERRED_VERSION_linux-yocto_qemuppc ?= "4.8%" +PREFERRED_VERSION_linux-yocto ?= "4.10%" +PREFERRED_VERSION_linux-yocto_qemux86 ?= "4.10%" +PREFERRED_VERSION_linux-yocto_qemux86-64 ?= "4.10%" +PREFERRED_VERSION_linux-yocto_qemuarm ?= "4.10%" +PREFERRED_VERSION_linux-yocto_qemumips ?= "4.10%" +PREFERRED_VERSION_linux-yocto_qemumips64 ?= "4.10%" +PREFERRED_VERSION_linux-yocto_qemuppc ?= "4.10%" SDK_NAME = "${DISTRO}-${TCLIBC}-${SDK_ARCH}-${IMAGE_BASENAME}-${TUNE_PKGARCH}" SDKPATH = "/opt/${DISTRO}/${SDK_VERSION}" @@ -45,8 +45,8 @@ DISTRO_EXTRA_RDEPENDS_append_qemux86-64 = " ${POKYQEMUDEPS}" TCLIBCAPPEND = "" -QEMU_TARGETS ?= "arm aarch64 i386 mips mipsel mips64 ppc x86_64" -# Other QEMU_TARGETS "mips64el sh4" +QEMU_TARGETS ?= "arm aarch64 i386 mips mipsel mips64 mips64el ppc x86_64" +# Other QEMU_TARGETS "sh4" PREMIRRORS ??= "\ bzr://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \n \ @@ -70,22 +70,17 @@ https://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \n" CONNECTIVITY_CHECK_URIS ?= "https://www.example.com/" SANITY_TESTED_DISTROS ?= " \ - poky-1.8 \n \ - poky-2.0 \n \ - poky-2.1 \n \ poky-2.2 \n \ - Ubuntu-14.04 \n \ - Ubuntu-14.10 \n \ - Ubuntu-15.04 \n \ - Ubuntu-15.10 \n \ - Ubuntu-16.04 \n \ - Fedora-22 \n \ - Fedora-23 \n \ - Fedora-24 \n \ - CentOSLinux-7.* \n \ - Debian-8.* \n \ - openSUSE-13.2 \n \ - SUSELINUX-42.1 \n \ + poky-2.3 \n \ + ubuntu-15.04 \n \ + ubuntu-16.04 \n \ + ubuntu-16.10 \n \ + fedora-24 \n \ + fedora-25 \n \ + centos-7 \n \ + debian-8 \n \ + opensuse-42.1 \n \ + opensuse-42.2 \n \ " # # OELAYOUT_ABI allows us to notify users when the format of TMPDIR changes in @@ -93,7 +88,7 @@ SANITY_TESTED_DISTROS ?= " \ # that breaks the format and have been previously discussed on the mailing list # with general agreement from the core team. # -OELAYOUT_ABI = "11" +OELAYOUT_ABI = "12" # add poky sanity bbclass INHERIT += "poky-sanity" diff --git a/import-layers/yocto-poky/meta-poky/conf/local.conf.sample b/import-layers/yocto-poky/meta-poky/conf/local.conf.sample index 365b6eb20..304ee012c 100644 --- a/import-layers/yocto-poky/meta-poky/conf/local.conf.sample +++ b/import-layers/yocto-poky/meta-poky/conf/local.conf.sample @@ -191,7 +191,7 @@ PATCHRESOLVE = "noop" # files and damages the build in ways which may not be easily recoverable. # It's necesary to monitor /tmp, if there is no space left the build will fail # with very exotic errors. -BB_DISKMON_DIRS = "\ +BB_DISKMON_DIRS ??= "\ STOPTASKS,${TMPDIR},1G,100K \ STOPTASKS,${DL_DIR},1G,100K \ STOPTASKS,${SSTATE_DIR},1G,100K \ diff --git a/import-layers/yocto-poky/meta-poky/conf/local.conf.sample.extended b/import-layers/yocto-poky/meta-poky/conf/local.conf.sample.extended index d407ffb49..0560de857 100644 --- a/import-layers/yocto-poky/meta-poky/conf/local.conf.sample.extended +++ b/import-layers/yocto-poky/meta-poky/conf/local.conf.sample.extended @@ -47,6 +47,13 @@ # less than 128MB RAM. #ENABLE_BINARY_LOCALE_GENERATION = "1" +# If GLIBC_SPLIT_LC_PACKAGES is set to a non-zero value, convert +# glibc-binary-localedata-XX-YY to be a meta package depending on +# glibc-binary-localedata-XX-YY-lc-address and so on. This enables +# saving quite some space if someone doesn't need LC_COLLATE for +# example. +#GLIBC_SPLIT_LC_PACKAGES = "1" + # Set GLIBC_GENERATE_LOCALES to the locales you wish to generate should you not # wish to perform the time-consuming step of generating all LIBC locales. # NOTE: If removing en_US.UTF-8 you will also need to uncomment, and set diff --git a/import-layers/yocto-poky/meta-poky/conf/toasterconf.json b/import-layers/yocto-poky/meta-poky/conf/toasterconf.json deleted file mode 100644 index 93fb9291f..000000000 --- a/import-layers/yocto-poky/meta-poky/conf/toasterconf.json +++ /dev/null @@ -1,90 +0,0 @@ -{ - "config": { - "MACHINE" : "qemux86", - "DISTRO" : "poky", - "DL_DIR" : "${TOPDIR}/../downloads", - "IMAGE_FSTYPES": "ext3 jffs2 tar.bz2", - "IMAGE_INSTALL_append": "", - "PACKAGE_CLASSES": "package_rpm", - "SSTATE_DIR" : "${TOPDIR}/../sstate-cache" - }, - "layersources": [ - { - "name": "Local Yocto Project", - "sourcetype": "local", - "apiurl": "../../", - "branches": ["HEAD" ], - "layers": [ - { - "name": "openembedded-core", - "local_path": "meta", - "vcs_url": "remote:origin", - "dirpath": "meta" - }, - { - "name": "meta-poky", - "local_path": "meta-poky", - "vcs_url": "remote:origin", - "dirpath": "meta-poky" - }, - { - "name": "meta-yocto-bsp", - "local_path": "meta-yocto-bsp", - "vcs_url": "remote:origin", - "dirpath": "meta-yocto-bsp" - } - - ] - }, - { - "name": "OpenEmbedded", - "sourcetype": "layerindex", - "apiurl": "http://layers.openembedded.org/layerindex/api/", - "branches": ["master"] - }, - { - "name": "Imported layers", - "sourcetype": "imported", - "apiurl": "", - "branches": ["master", "HEAD"] - - } - ], - "bitbake" : [ - { - "name": "master", - "giturl": "remote:origin", - "branch": "master", - "dirpath": "bitbake" - }, - { - "name": "HEAD", - "giturl": "remote:origin", - "branch": "HEAD", - "dirpath": "bitbake" - } - ], - - "defaultrelease": "master", - - "releases": [ - { - "name": "master", - "description": "Yocto Project master", - "bitbake": "master", - "branch": "master", - "defaultlayers": [ "openembedded-core", "meta-poky", "meta-yocto-bsp"], - "layersourcepriority": { "Imported layers": 99, "Local Yocto Project" : 10, "OpenEmbedded" : 0 }, - "helptext": "Toaster will run your builds using the tip of the Yocto Project master branch, where active development takes place. This is not a stable branch, so your builds might not work as expected." - }, - { - "name": "local", - "description": "Local Yocto Project", - "bitbake": "HEAD", - "branch": "HEAD", - "defaultlayers": [ "openembedded-core", "meta-poky", "meta-yocto-bsp"], - "layersourcepriority": { "Imported layers": 99, "Local Yocto Project" : 10, "OpenEmbedded" : 0 }, - "helptext": "Toaster will run your builds with the version of the Yocto Project you have cloned or downloaded to your computer." - } - ] -} diff --git a/import-layers/yocto-poky/meta-selftest/lib/devtool/bbpath.py b/import-layers/yocto-poky/meta-selftest/lib/devtool/bbpath.py new file mode 100644 index 000000000..5e8ffb3af --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/lib/devtool/bbpath.py @@ -0,0 +1,44 @@ +import argparse + +already_loaded = False +kept_context = None + +def plugin_name(filename): + return os.path.splitext(os.path.basename(filename))[0] + +def plugin_init(plugins): + global already_loaded + already_loaded = plugin_name(__file__) in (plugin_name(p.__name__) for p in plugins) + +def print_name(args, config, basepath, workspace): + print (__file__) + +def print_bbdir(args, config, basepath, workspace): + print (__file__.replace('/lib/devtool/bbpath.py','')) + +def print_registered(args, config, basepath, workspace): + global kept_context + print(kept_context.loaded) + +def multiloaded(args, config, basepath, workspace): + global already_loaded + print("yes" if already_loaded else "no") + +def register_commands(subparsers, context): + global kept_context + kept_context = context + if 'loaded' in context.__dict__: + context.loaded += 1 + else: + context.loaded = 1 + + def addparser(name, helptxt, func): + parser = subparsers.add_parser(name, help=helptxt, + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.set_defaults(func=func) + return parser + + addparser('pluginfile', 'Print the filename of this plugin', print_name) + addparser('bbdir', 'Print the BBPATH directory of this plugin', print_bbdir) + addparser('count', 'How many times have this plugin been registered.', print_registered) + addparser('multiloaded', 'How many times have this plugin been initialized', multiloaded) diff --git a/import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/cases/selftest.json b/import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/cases/selftest.json new file mode 100644 index 000000000..e5ae46ecd --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/cases/selftest.json @@ -0,0 +1,6 @@ +{ + "test_install_package": { + "pkg": "socat", + "rm": true + } +} diff --git a/import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/cases/selftest.py b/import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/cases/selftest.py new file mode 100644 index 000000000..e4985a6ed --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/cases/selftest.py @@ -0,0 +1,73 @@ +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.runtime.cases.dnf import DnfTest +from oeqa.utils.httpserver import HTTPService + +class Selftest(OERuntimeTestCase): + + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_install_package(self): + """ + Summary: Check basic package installation functionality. + Expected: 1. Before the test socat must be installed using scp. + 2. After the test socat must be uninstalled using ssh. + This can't be checked in this test. + Product: oe-core + Author: Mariano Lopez + """ + + (status, output) = self.target.run("socat -V") + self.assertEqual(status, 0, msg="socat is not installed") + + @OETestDepends(['selftest.Selftest.test_install_package']) + def test_verify_uninstall(self): + """ + Summary: Check basic package installation functionality. + Expected: 1. test_install_package must uninstall socat. + This test is just to verify that. + Product: oe-core + Author: Mariano Lopez + """ + + (status, output) = self.target.run("socat -V") + self.assertNotEqual(status, 0, msg="socat is still installed") + + +class DnfSelftest(DnfTest): + + @classmethod + def setUpClass(cls): + cls.repo_server = HTTPService(os.path.join(cls.tc.td['WORKDIR'], 'oe-rootfs-repo'), + cls.tc.target.server_ip) + cls.repo_server.start() + + @classmethod + def tearDownClass(cls): + cls.repo_server.stop() + + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_verify_package_feeds(self): + """ + Summary: Check correct setting of PACKAGE_FEED_URIS var + Expected: 1. Feeds were correctly set for dnf + 2. Update recovers packages from host's repo + Author: Humberto Ibarra + Author: Alexander Kanavin + """ + # When we created an image, we had to supply fake ip and port + # for the feeds. Now we can patch the real ones into the config file. + import tempfile + temp_file = tempfile.TemporaryDirectory(prefix="oeqa-remotefeeds-").name + self.tc.target.copyFrom("/etc/yum.repos.d/oe-remote-repo.repo", temp_file) + fixed_config = open(temp_file, "r").read().replace("bogus_ip", self.tc.target.server_ip).replace("bogus_port", str(self.repo_server.port)) + open(temp_file, "w").write(fixed_config) + self.tc.target.copyTo(temp_file, "/etc/yum.repos.d/oe-remote-repo.repo") + + import re + output_makecache = self.dnf('makecache') + self.assertTrue(re.match(r".*Metadata cache created", output_makecache, re.DOTALL) is not None, msg = "dnf makecache failed: %s" %(output_makecache)) + + output_repoinfo = self.dnf('repoinfo') + matchobj = re.match(r".*Repo-pkgs\s*:\s*(?P[0-9]+)", output_repoinfo, re.DOTALL) + self.assertTrue(matchobj is not None, msg = "Could not find the amount of packages in dnf repoinfo output: %s" %(output_repoinfo)) + self.assertTrue(int(matchobj.group('n_pkgs')) > 0, msg = "Amount of remote packages is not more than zero: %s\n" %(output_repoinfo)) diff --git a/import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/selftest.json b/import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/selftest.json deleted file mode 100644 index e5ae46ecd..000000000 --- a/import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/selftest.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "test_install_package": { - "pkg": "socat", - "rm": true - } -} diff --git a/import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/selftest.py b/import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/selftest.py deleted file mode 100644 index a7e58ab3d..000000000 --- a/import-layers/yocto-poky/meta-selftest/lib/oeqa/runtime/selftest.py +++ /dev/null @@ -1,55 +0,0 @@ -import os - -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.commands import runCmd -from oeqa.utils.decorators import * - -class Selftest(oeRuntimeTest): - - @skipUnlessPassed("test_ssh") - @tag("selftest_package_install") - def test_install_package(self): - """ - Summary: Check basic package installation functionality. - Expected: 1. Before the test socat must be installed using scp. - 2. After the test socat must be unistalled using ssh. - This can't be checked in this test. - Product: oe-core - Author: Mariano Lopez - """ - - (status, output) = self.target.run("socat -V") - self.assertEqual(status, 0, msg="socat is not installed") - - @skipUnlessPassed("test_install_package") - @tag("selftest_package_install") - def test_verify_unistall(self): - """ - Summary: Check basic package installation functionality. - Expected: 1. test_install_package must unistall socat. - This test is just to verify that. - Product: oe-core - Author: Mariano Lopez - """ - - (status, output) = self.target.run("socat -V") - self.assertNotEqual(status, 0, msg="socat is still installed") - - @tag("selftest_sdk") - def test_sdk(self): - - result = runCmd("env -0") - sdk_path = search_sdk_path(result.output) - self.assertTrue(sdk_path, msg="Can't find SDK path") - - tar_cmd = os.path.join(sdk_path, "tar") - result = runCmd("%s --help" % tar_cmd) - -def search_sdk_path(env): - for line in env.split("\0"): - (key, _, value) = line.partition("=") - if key == "PATH": - for path in value.split(":"): - if "pokysdk" in path: - return path - return "" diff --git a/import-layers/yocto-poky/meta-selftest/lib/recipetool/bbpath.py b/import-layers/yocto-poky/meta-selftest/lib/recipetool/bbpath.py new file mode 100644 index 000000000..783b2dc76 --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/lib/recipetool/bbpath.py @@ -0,0 +1,41 @@ +import argparse + +already_loaded = False +register_count = 0 + +def plugin_name(filename): + return os.path.splitext(os.path.basename(filename))[0] + +def plugin_init(plugins): + global already_loaded + already_loaded = plugin_name(__file__) in (plugin_name(p.__name__) for p in plugins) + +def print_name(opts): + print (__file__) + +def print_bbdir(opts): + print (__file__.replace('/lib/recipetool/bbpath.py','')) + +def print_registered(opts): + #global kept_context + #print(kept_context.loaded) + print ("1") + +def multiloaded(opts): + global already_loaded + print("yes" if already_loaded else "no") + +def register_commands(subparsers): + global register_count + register_count += 1 + + def addparser(name, helptxt, func): + parser = subparsers.add_parser(name, help=helptxt, + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.set_defaults(func=func) + return parser + + addparser('pluginfile', 'Print the filename of this plugin', print_name) + addparser('bbdir', 'Print the BBPATH directory of this plugin', print_bbdir) + addparser('count', 'How many times have this plugin been registered.', print_registered) + addparser('multiloaded', 'How many times have this plugin been initialized', multiloaded) diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/aspell/aspell_0.0.0.1.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/aspell/aspell_0.0.0.1.bb index 20a0ab7d3..073cf5665 100644 --- a/import-layers/yocto-poky/meta-selftest/recipes-test/aspell/aspell_0.0.0.1.bb +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/aspell/aspell_0.0.0.1.bb @@ -14,6 +14,8 @@ SRC_URI = "${GNU_MIRROR}/aspell/aspell-${PV}.tar.gz" SRC_URI[md5sum] = "e66a9c9af6a60dc46134fdacf6ce97d7" SRC_URI[sha256sum] = "f52583a83a63633701c5f71db3dc40aab87b7f76b29723aeb27941eff42df6e1" +EXCLUDE_FROM_WORLD = "1" + PACKAGECONFIG ??= "" PACKAGECONFIG[curses] = "--enable-curses,--disable-curses,ncurses" diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/container-image/container-image-testpkg.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/container-image/container-image-testpkg.bb new file mode 100644 index 000000000..f8dd2290b --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/container-image/container-image-testpkg.bb @@ -0,0 +1,8 @@ +LICENSE = "MIT" + +INHIBIT_DEFAULT_DEPS = "1" + +do_install_append() { + install -d ${D}${bindir} + touch ${D}${bindir}/theapp +} diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/container-image/container-test-image.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/container-image/container-test-image.bb new file mode 100644 index 000000000..d5f939c6e --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/container-image/container-test-image.bb @@ -0,0 +1,8 @@ +IMAGE_INSTALL += "container-image-testpkg" + +LICENSE = "MIT" + +IMAGE_FSTYPES = "container" +IMAGE_LINGUAS = "" + +inherit core-image diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-localonly.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-localonly.bb new file mode 100644 index 000000000..3f7123cda --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-localonly.bb @@ -0,0 +1,7 @@ +LICENSE = "CLOSED" +INHIBIT_DEFAULT_DEPS = "1" + +SRC_URI = "file://file1 \ + file://file2" + +EXCLUDE_FROM_WORLD = "1" diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-localonly/file1 b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-localonly/file1 new file mode 100644 index 000000000..f4bdcfc83 --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-localonly/file1 @@ -0,0 +1 @@ +The first file diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-localonly/file2 b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-localonly/file2 new file mode 100644 index 000000000..a7e2414bd --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-localonly/file2 @@ -0,0 +1 @@ +The second file diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-patch-gz.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-patch-gz.bb new file mode 100644 index 000000000..e45ee9f60 --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-patch-gz.bb @@ -0,0 +1,17 @@ +LICENSE = "GPLv2+" +LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f" + +DEPENDS = "libxres libxext virtual/libx11 ncurses" + +SRC_URI = "http://downloads.yoctoproject.org/releases/xrestop/xrestop-0.4.tar.gz \ + file://readme.patch.gz \ + " + +S = "${WORKDIR}/xrestop-0.4" + +SRC_URI[md5sum] = "d8a54596cbaf037e62b80c4585a3ca9b" +SRC_URI[sha256sum] = "67c2fc94a7ecedbaae0d1837e82e93d1d98f4a6d759828860e552119af3ce257" + +inherit autotools pkgconfig + +EXCLUDE_FROM_WORLD = "1" diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-patch-gz/readme.patch.gz b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-patch-gz/readme.patch.gz new file mode 100644 index 000000000..4752492cc Binary files /dev/null and b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-patch-gz/readme.patch.gz differ diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-subdir.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-subdir.bb new file mode 100644 index 000000000..3f6956524 --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-subdir.bb @@ -0,0 +1,9 @@ +LICENSE = "CLOSED" +INHIBIT_DEFAULT_DEPS = "1" + +SRC_URI = "file://devtool-test-subdir.tar.gz \ + file://testfile;subdir=${BPN}" + +S = "${WORKDIR}/${BPN}" + +EXCLUDE_FROM_WORLD = "1" diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-subdir/devtool-test-subdir.tar.gz b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-subdir/devtool-test-subdir.tar.gz new file mode 100644 index 000000000..3d44f803c Binary files /dev/null and b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-subdir/devtool-test-subdir.tar.gz differ diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-subdir/testfile b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-subdir/testfile new file mode 100644 index 000000000..12b519c0d --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-test-subdir/testfile @@ -0,0 +1 @@ +Modified version diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test1_1.5.3.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test1_1.5.3.bb index e93b0d551..4049be292 100644 --- a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test1_1.5.3.bb +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test1_1.5.3.bb @@ -12,5 +12,7 @@ PR = "r5" S = "${WORKDIR}/pv-${PV}" +EXCLUDE_FROM_WORLD = "1" + inherit autotools diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test1_1.5.3.bb.upgraded b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test1_1.5.3.bb.upgraded index afcc4aa13..42c070506 100644 --- a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test1_1.5.3.bb.upgraded +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test1_1.5.3.bb.upgraded @@ -10,5 +10,7 @@ SRC_URI[sha256sum] = "9dd45391806b0ed215abee4c5ac1597d018c386fe9c1f5afd2f6bc3b07 S = "${WORKDIR}/pv-${PV}" +EXCLUDE_FROM_WORLD = "1" + inherit autotools diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb index 9bfce0afe..450636ef1 100644 --- a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb @@ -15,4 +15,6 @@ SRC_URI = "git://git.yoctoproject.org/dbus-wait" S = "${WORKDIR}/git" +EXCLUDE_FROM_WORLD = "1" + inherit autotools pkgconfig diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb.upgraded b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb.upgraded index 9b947ed9e..0d2e19e5a 100644 --- a/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb.upgraded +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/devtool/devtool-upgrade-test2_git.bb.upgraded @@ -14,4 +14,6 @@ SRC_URI = "git://git.yoctoproject.org/dbus-wait" S = "${WORKDIR}/git" +EXCLUDE_FROM_WORLD = "1" + inherit autotools pkgconfig diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/error/error.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/error/error.bb index a7bdecf29..3c22e7cbe 100644 --- a/import-layers/yocto-poky/meta-selftest/recipes-test/error/error.bb +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/error/error.bb @@ -1,7 +1,6 @@ SUMMARY = "Error Test case that fails on do_compile" DESCRIPTION = "This generates a compile time error to be used to for testing." LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" INHIBIT_DEFAULT_DEPS = "1" EXCLUDE_FROM_WORLD = "1" diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/images/oe-selftest-image.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/images/oe-selftest-image.bb index f17094c5d..5d4d10eef 100644 --- a/import-layers/yocto-poky/meta-selftest/recipes-test/images/oe-selftest-image.bb +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/images/oe-selftest-image.bb @@ -1,6 +1,6 @@ SUMMARY = "An image used during oe-selftest tests" -IMAGE_INSTALL = "packagegroup-core-boot ${ROOTFS_PKGMANAGE_BOOTSTRAP} dropbear" +IMAGE_INSTALL = "packagegroup-core-boot dropbear" IMAGE_FEATURES = "debug-tweaks" IMAGE_LINGUAS = " " diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.bb index 9e93b8e1a..e1da203b5 100644 --- a/import-layers/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.bb +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.bb @@ -2,18 +2,14 @@ SUMMARY = "An example of partitioned image." SRC_URI = "file://${FILE_DIRNAME}/${BPN}.wks" -IMAGE_INSTALL = "packagegroup-core-boot ${ROOTFS_PKGMANAGE_BOOTSTRAP}" +IMAGE_INSTALL = "packagegroup-core-boot" IMAGE_FSTYPES = "wic" -DEPENDS = "syslinux syslinux-native dosfstools-native mtools-native gptfdisk-native" +WKS_FILE_DEPENDS = "syslinux syslinux-native dosfstools-native mtools-native gptfdisk-native" LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" -# core-image-minimal is referenced in .wks, so we need its rootfs -# to be ready before our rootfs -do_rootfs[depends] += "core-image-minimal:do_image core-image-minimal:do_rootfs_wicenv" - IMAGE_ROOTFS_EXTRA_SPACE = "2000" inherit image diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.wks b/import-layers/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.wks index d55075d50..9410b684b 100644 --- a/import-layers/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.wks +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/images/wic-image-minimal.wks @@ -2,9 +2,8 @@ # long-description: This image contains boot partition and 3 rootfs partitions # created from core-image-minimal and wic-image-minimal image recipes. -part /boot --source bootimg-pcbios --ondisk vda --label boot --active --align 1024 -part / --source rootfs --ondisk vda --fstype=ext4 --label platform --align 1024 --use-uuid -part /mnt --source rootfs --rootfs-dir=core-image-minimal --ondisk vda --fstype=ext4 --label core --align 1024 -part backup --source rootfs --rootfs-dir=wic-image-minimal --ondisk vda --fstype=ext4 --label backup --align 1024 +part /boot --source bootimg-pcbios --ondisk sda --label boot --active --align 1024 +part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid +part /mnt --source rootfs --rootfs-dir=wic-image-minimal --ondisk sda --fstype=ext4 --label core --align 1024 bootloader --ptable gpt --timeout=0 --append="rootwait console=tty0" diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/m4/m4_%.bbappend b/import-layers/yocto-poky/meta-selftest/recipes-test/m4/m4_%.bbappend new file mode 100644 index 000000000..205720982 --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/m4/m4_%.bbappend @@ -0,0 +1,2 @@ +# This bbappend is used to alter the recipe using the test_recipe.inc file created by tests. +include test_recipe.inc diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/m4/m4_1.4.17.bbappend b/import-layers/yocto-poky/meta-selftest/recipes-test/m4/m4_1.4.17.bbappend deleted file mode 100644 index 205720982..000000000 --- a/import-layers/yocto-poky/meta-selftest/recipes-test/m4/m4_1.4.17.bbappend +++ /dev/null @@ -1,2 +0,0 @@ -# This bbappend is used to alter the recipe using the test_recipe.inc file created by tests. -include test_recipe.inc diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/postinst/postinst_1.0.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/postinst/postinst_1.0.bb new file mode 100644 index 000000000..6d4973427 --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/postinst/postinst_1.0.bb @@ -0,0 +1,126 @@ +LICENSE = "MIT" +ALLOW_EMPTY_${PN}-at-rootfs = "1" +ALLOW_EMPTY_${PN}-delayed-a = "1" +ALLOW_EMPTY_${PN}-delayed-b = "1" +ALLOW_EMPTY_${PN}-delayed-d = "1" +ALLOW_EMPTY_${PN}-delayed-p = "1" +ALLOW_EMPTY_${PN}-delayed-t = "1" + +PACKAGES += "${PN}-at-rootfs ${PN}-delayed-a ${PN}-delayed-b ${PN}-delayed-d ${PN}-delayed-p ${PN}-delayed-t" +PROVIDES += "${PN}-at-rootfs ${PN}-delayed-a ${PN}-delayed-b ${PN}-delayed-d ${PN}-delayed-p ${PN}-delayed-t" +FILES_${PN}-delayed-a = "" +FILES_${PN}-delayed-b = "" +FILES_${PN}-delayed-d = "" +FILES_${PN}-delayed-p = "" +FILES_${PN}-delayed-t = "" + +# Runtime dependencies +RDEPENDS_${PN}-delayed-a = "${PN}-at-rootfs" +RDEPENDS_${PN}-delayed-b = "${PN}-delayed-a" +RDEPENDS_${PN}-delayed-d = "${PN}-delayed-b" +RDEPENDS_${PN}-delayed-p = "${PN}-delayed-d" +RDEPENDS_${PN}-delayed-t = "${PN}-delayed-p" + +# Main recipe post-install +pkg_postinst_${PN}-at-rootfs () { + tfile="/etc/postinsta-test" + touch "$D"/this-was-created-at-rootfstime + if test "x$D" != "x" then + # Need to run on first boot + exit 1 + else + echo "lets write postinst" > $tfile + fi +} + +# Dependency recipes post-installs +pkg_postinst_${PN}-delayed-a () { + efile="/etc/postinst-test" + tfile="/etc/postinsta-test" + rdeps="postinst" + + if test "x$D" != "x"; then + # Need to run on first boot + exit 1 + else + touch /etc/this-was-created-at-first-boot + if test -e $efile ; then + echo 'success' > $tfile + else + echo 'fail to install $rdeps first!' >&2 + exit 1 + fi + fi +} + +pkg_postinst_${PN}-delayed-b () { + efile="/etc/postinsta-test" + tfile="/etc/postinstb-test" + rdeps="postinsta" + + if test "x$D" != "x"; then + # Need to run on first boot + exit 1 + else + if test -e $efile ; then + echo 'success' > $tfile + else + echo 'fail to install $rdeps first!' >&2 + exit 1 + fi + fi +} + +pkg_postinst_${PN}-delayed-d () { + efile="/etc/postinstb-test" + tfile="/etc/postinstd-test" + rdeps="postinstb" + + if test "x$D" != "x"; then + # Need to run on first boot + exit 1 + else + if test -e $efile ; then + echo 'success' > $tfile + else + echo 'fail to install $rdeps first!' >&2 + exit 1 + fi + fi +} + +pkg_postinst_${PN}-delayed-p () { + efile="/etc/postinstd-test" + tfile="/etc/postinstp-test" + rdeps="postinstd" + + if test "x$D" != "x"; then + # Need to run on first boot + exit 1 + else + if test -e $efile ; then + echo 'success' > $tfile + else + echo 'fail to install $rdeps first!' >&2 + exit 1 + fi + fi +} + +pkg_postinst_${PN}-delayed-t () { + efile="/etc/postinstp-test" + tfile="/etc/postinstt-test" + rdeps="postinstp" + + if test "x$D" != "x"; then + # Need to run on first boot + exit 1 + else + if test -e $efile ; then + echo 'success' > $tfile + else + echo 'fail to install $rdeps first!' >&2 + exit 1 + fi + fi +} diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/recipetool/selftest-recipetool-appendfile.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/recipetool/selftest-recipetool-appendfile.bb index 7d0a040be..7375c4793 100644 --- a/import-layers/yocto-poky/meta-selftest/recipes-test/recipetool/selftest-recipetool-appendfile.bb +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/recipetool/selftest-recipetool-appendfile.bb @@ -15,6 +15,8 @@ SRC_URI = "file://installscript.sh \ file://selftest-replaceme-inst-todir-globfile \ file://selftest-replaceme-inst-func" +EXCLUDE_FROM_WORLD = "1" + install_extrafunc() { install -m 0644 ${WORKDIR}/selftest-replaceme-inst-func ${D}${datadir}/selftest-replaceme-inst-func } diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/selftest-ed/selftest-ed_0.5.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/selftest-ed/selftest-ed_0.5.bb new file mode 100644 index 000000000..8e0d1cdd8 --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/selftest-ed/selftest-ed_0.5.bb @@ -0,0 +1,22 @@ +SUMMARY = "Line-oriented text editor -- selftest GPLv2 version" +HOMEPAGE = "http://www.gnu.org/software/ed/" +SECTION = "base" +LICENSE = "GPLv2+" +LIC_FILES_CHKSUM = "file://COPYING;md5=6ddd5335ef96fb858a138230af773710 \ + file://main.c;beginline=1;endline=17;md5=36d4b85e5ae9028e918d1cc775c2475e" + +PR = "r2" +SRC_URI = "${SAVANNAH_GNU_MIRROR}/ed/ed-${PV}.tar.bz2" + +SRC_URI[md5sum] = "4ee21e9dcc9b5b6012c23038734e1632" +SRC_URI[sha256sum] = "edef2bbde0fbf0d88232782a0eded323f483a0519d6fde9a3b1809056fd35f3e" + +inherit autotools texinfo + +S = "${WORKDIR}/ed-${PV}" + +EXTRA_OECONF = "'CC=${CC}' 'CXX=${CXX}' 'CFLAGS=${CFLAGS}' 'CXXFLAGS=${CXXFLAGS}' 'CPPFLAGS=${CPPFLAGS}' 'LDFLAGS=${LDFLAGS}'" + +CONFIGUREOPTS_remove = "--disable-dependency-tracking" +CONFIGUREOPTS_remove = "--disable-silent-rules" +EXTRA_OECONF_remove = "--disable-static" diff --git a/import-layers/yocto-poky/meta-selftest/recipes-test/selftest-ed/selftest-ed_1.14.1.bb b/import-layers/yocto-poky/meta-selftest/recipes-test/selftest-ed/selftest-ed_1.14.1.bb new file mode 100644 index 000000000..62931c404 --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/recipes-test/selftest-ed/selftest-ed_1.14.1.bb @@ -0,0 +1,35 @@ +SUMMARY = "Line-oriented text editor -- selftest variant" +HOMEPAGE = "http://www.gnu.org/software/ed/" + +LICENSE = "GPLv3+" +LIC_FILES_CHKSUM = "file://COPYING;md5=0c7051aef9219dc7237f206c5c4179a7 \ + file://ed.h;endline=20;md5=4e36b7a40e137f42aee718165590d125 \ + file://main.c;endline=17;md5=c5b8f78f115df187af76868a2aead16a" + +SECTION = "base" + +# LSB states that ed should be in /bin/ +bindir = "${base_bindir}" + +SRC_URI = "${GNU_MIRROR}/ed/ed-${PV}.tar.lz" + +SRC_URI[md5sum] = "7f4a54fa7f366479f03654b8af645fd0" +SRC_URI[sha256sum] = "ffb97eb8f2a2b5a71a9b97e3872adce953aa1b8958e04c5b7bf11d556f32552a" + +S = "${WORKDIR}/ed-${PV}" + +EXTRA_OEMAKE = "-e MAKEFLAGS=" + +inherit texinfo + +do_configure() { + ${S}/configure +} + +do_install() { + oe_runmake 'DESTDIR=${D}' install + # Info dir listing isn't interesting at this point so remove it if it exists. + if [ -e "${D}${infodir}/dir" ]; then + rm -f ${D}${infodir}/dir + fi +} diff --git a/import-layers/yocto-poky/meta-selftest/wic/test_rawcopy_plugin.wks.in b/import-layers/yocto-poky/meta-selftest/wic/test_rawcopy_plugin.wks.in new file mode 100644 index 000000000..83be4be91 --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/wic/test_rawcopy_plugin.wks.in @@ -0,0 +1,6 @@ +# short-description: This file is used in oe-selftest wic module to test rawcopy plugin + +part /boot --active --source bootimg-pcbios +part / --source rawcopy --sourceparams="file=core-image-minimal-${MACHINE}.ext4" --use-uuid + +bootloader --timeout=0 --append="console=ttyS0,115200n8" diff --git a/import-layers/yocto-poky/meta-selftest/wic/wictestdisk.wks b/import-layers/yocto-poky/meta-selftest/wic/wictestdisk.wks new file mode 100644 index 000000000..d4de24d83 --- /dev/null +++ b/import-layers/yocto-poky/meta-selftest/wic/wictestdisk.wks @@ -0,0 +1,7 @@ +# short-description: image for use in machine agnostic wic test cases + +# /boot is intentionally an empty partition +part /boot --ondisk sda --label boot --active --align 1024 --size 16 +part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid + +# bootloader is intentionally left out diff --git a/import-layers/yocto-poky/meta-skeleton/recipes-skeleton/useradd/useradd-example.bb b/import-layers/yocto-poky/meta-skeleton/recipes-skeleton/useradd/useradd-example.bb index d3c02d5d6..7c5b9d050 100644 --- a/import-layers/yocto-poky/meta-skeleton/recipes-skeleton/useradd/useradd-example.bb +++ b/import-layers/yocto-poky/meta-skeleton/recipes-skeleton/useradd/useradd-example.bb @@ -3,8 +3,7 @@ DESCRIPTION = "This recipe serves as an example for using features from useradd. SECTION = "examples" PR = "r1" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \ - file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" +LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" SRC_URI = "file://file1 \ file://file2 \ @@ -15,6 +14,8 @@ S = "${WORKDIR}" PACKAGES =+ "${PN}-user3" +EXCLUDE_FROM_WORLD = "1" + inherit useradd # You must set USERADD_PACKAGES when you inherit useradd. This diff --git a/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/beaglebone.conf b/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/beaglebone.conf index c487bd861..4a90ba5ab 100644 --- a/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/beaglebone.conf +++ b/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/beaglebone.conf @@ -4,10 +4,8 @@ PREFERRED_PROVIDER_virtual/xserver ?= "xserver-xorg" XSERVER ?= "xserver-xorg \ - xf86-input-evdev \ - xf86-input-mouse \ xf86-video-modesetting \ - xf86-input-keyboard" + " MACHINE_EXTRA_RRECOMMENDS = " kernel-modules kernel-devicetree" @@ -16,16 +14,16 @@ EXTRA_IMAGEDEPENDS += "u-boot" DEFAULTTUNE ?= "cortexa8hf-neon" include conf/machine/include/tune-cortexa8.inc -IMAGE_FSTYPES += "tar.bz2 jffs2 wic" +IMAGE_FSTYPES += "tar.bz2 jffs2 wic wic.bmap" EXTRA_IMAGECMD_jffs2 = "-lnp " -WKS_FILE = "sdimage-bootpart.wks" +WKS_FILE ?= "beaglebone.wks" IMAGE_INSTALL_append = " kernel-devicetree kernel-image-zimage" do_image_wic[depends] += "mtools-native:do_populate_sysroot dosfstools-native:do_populate_sysroot" SERIAL_CONSOLE = "115200 ttyO0" PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto" -PREFERRED_VERSION_linux-yocto ?= "4.8%" +PREFERRED_VERSION_linux-yocto ?= "4.12%" KERNEL_IMAGETYPE = "zImage" KERNEL_DEVICETREE = "am335x-bone.dtb am335x-boneblack.dtb" @@ -33,7 +31,7 @@ KERNEL_EXTRA_ARGS += "LOADADDR=${UBOOT_ENTRYPOINT}" SPL_BINARY = "MLO" UBOOT_SUFFIX = "img" -UBOOT_MACHINE = "am335x_evm_config" +UBOOT_MACHINE = "am335x_boneblack_config" UBOOT_ENTRYPOINT = "0x80008000" UBOOT_LOADADDRESS = "0x80008000" diff --git a/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/edgerouter.conf b/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/edgerouter.conf index 720a94708..ab5a6fc40 100644 --- a/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/edgerouter.conf +++ b/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/edgerouter.conf @@ -11,16 +11,16 @@ KERNEL_ALT_IMAGETYPE = "vmlinux.bin" KERNEL_IMAGE_STRIP_EXTRA_SECTIONS = ".comment" PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto" -PREFERRED_VERSION_linux-yocto ?= "4.8%" +PREFERRED_VERSION_linux-yocto ?= "4.12%" SERIAL_CONSOLE = "115200 ttyS0" USE_VT ?= "0" MACHINE_EXTRA_RRECOMMENDS = " kernel-modules" -IMAGE_FSTYPES ?= "jffs2 tar.bz2 wic" +IMAGE_FSTYPES ?= "jffs2 tar.bz2 wic wic.bmap" JFFS2_ERASEBLOCK = "0x10000" -WKS_FILE ?= "sdimage-bootpart.wks" +WKS_FILE ?= "edgerouter.wks" IMAGE_BOOT_FILES ?= "vmlinux;vmlinux.64" do_image_wic[depends] += "mtools-native:do_populate_sysroot dosfstools-native:do_populate_sysroot" diff --git a/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/genericx86-64.conf b/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/genericx86-64.conf index d843fd215..bfedd84c3 100644 --- a/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/genericx86-64.conf +++ b/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/genericx86-64.conf @@ -7,6 +7,6 @@ DEFAULTTUNE ?= "core2-64" require conf/machine/include/tune-core2.inc require conf/machine/include/genericx86-common.inc -PREFERRED_VERSION_linux-yocto_genericx86-64 ?= "4.8%" +PREFERRED_VERSION_linux-yocto ?= "4.10%" SERIAL_CONSOLES_CHECK = "ttyS0" diff --git a/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/genericx86.conf b/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/genericx86.conf index 04a7e0b6a..af03b8689 100644 --- a/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/genericx86.conf +++ b/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/genericx86.conf @@ -9,4 +9,4 @@ require conf/machine/include/genericx86-common.inc MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS += "gma500-gfx-check" -PREFERRED_VERSION_linux-yocto_genericx86 ?= "4.8%" +PREFERRED_VERSION_linux-yocto ?= "4.10%" diff --git a/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/include/genericx86-common.inc b/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/include/genericx86-common.inc index 2e59a95b5..e35685cb2 100644 --- a/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/include/genericx86-common.inc +++ b/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/include/genericx86-common.inc @@ -3,7 +3,6 @@ require conf/machine/include/qemuboot-x86.inc MACHINE_FEATURES += "wifi efi pcbios" PREFERRED_PROVIDER_virtual/kernel ?= "linux-yocto" -PREFERRED_VERSION_linux-yocto ?= "4.4%" PREFERRED_PROVIDER_virtual/xserver ?= "xserver-xorg" XSERVER ?= "${XSERVER_X86_BASE} \ ${XSERVER_X86_EXT} \ @@ -20,7 +19,7 @@ GLIBC_ADDONS = "nptl" EXTRA_OECONF_append_pn-matchbox-panel-2 = " --with-battery=acpi" -IMAGE_FSTYPES += "wic" -WKS_FILE ?= "mkefidisk.wks" +IMAGE_FSTYPES += "wic wic.bmap" +WKS_FILE ?= "genericx86.wks" do_image_wic[depends] += "gptfdisk-native:do_populate_sysroot" do_image_wic[recrdeptask] += "do_bootimg" diff --git a/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/mpc8315e-rdb.conf b/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/mpc8315e-rdb.conf index a2ff07c55..6e2d31652 100644 --- a/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/mpc8315e-rdb.conf +++ b/import-layers/yocto-poky/meta-yocto-bsp/conf/machine/mpc8315e-rdb.conf @@ -8,18 +8,17 @@ require conf/machine/include/tune-ppce300c3.inc KERNEL_IMAGETYPE = "uImage" EXTRA_IMAGEDEPENDS += "u-boot" -UBOOT_MACHINE_mpc8315e-rdb = "MPC8315ERDB_config" +UBOOT_MACHINE = "MPC8315ERDB_config" SERIAL_CONSOLE = "115200 ttyS0" MACHINE_FEATURES = "keyboard pci ext2 ext3 serial" -PREFERRED_VERSION_linux-yocto ?= "4.8%" +PREFERRED_VERSION_linux-yocto ?= "4.12%" PREFERRED_PROVIDER_virtual/kernel = "linux-yocto" PREFERRED_PROVIDER_virtual/xserver ?= "xserver-xorg" XSERVER ?= "xserver-xorg \ - xf86-input-evdev \ xf86-video-fbdev" UBOOT_ENTRYPOINT = "0x00000000" @@ -30,3 +29,7 @@ MACHINE_EXTRA_RRECOMMENDS = " kernel-modules" IMAGE_FSTYPES ?= "jffs2 tar.bz2" JFFS2_ERASEBLOCK = "0x4000" + +IMAGE_FSTYPES += "wic" +WKS_FILE ?= 'mpc8315e-rdb.wks' +IMAGE_BOOT_FILES ?= "u-boot.bin uImage uImage-mpc8315erdb.dtb;dtb" diff --git a/import-layers/yocto-poky/meta-yocto-bsp/lib/oeqa/selftest/gummiboot.py b/import-layers/yocto-poky/meta-yocto-bsp/lib/oeqa/selftest/gummiboot.py deleted file mode 100644 index 00aa36f60..000000000 --- a/import-layers/yocto-poky/meta-yocto-bsp/lib/oeqa/selftest/gummiboot.py +++ /dev/null @@ -1,83 +0,0 @@ -from oeqa.selftest.base import oeSelfTest -from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu -from oeqa.utils.decorators import testcase -import re -import os -import sys -import logging - - -class Gummiboot(oeSelfTest): - - def _common_setup(self): - """ - Common setup for test cases: 1101, 1103 - """ - - # Set EFI_PROVIDER = "gummiboot" and MACHINE = "genericx86-64" in conf/local.conf - features = 'EFI_PROVIDER = "gummiboot"\n' - features += 'MACHINE = "genericx86-64"' - self.append_config(features) - - def _common_build(self): - """ - Common build for test cases: 1101, 1103 - """ - - # Build a genericx86-64/efi gummiboot image - bitbake('syslinux syslinux-native parted-native dosfstools-native mtools-native core-image-minimal') - - - @testcase(1101) - def test_efi_gummiboot_images_can_be_built(self): - """ - Summary: Check if efi/gummiboot images can be built - Expected: 1. File gummibootx64.efi should be available in build/tmp/deploy/images/genericx86-64 - 2. Efi/gummiboot images can be built - Product: oe-core - Author: Ionut Chisanovici - AutomatedBy: Daniel Istrate - """ - - # We'd use DEPLOY_DIR_IMAGE here, except that we need its value for - # MACHINE="genericx86-64 which is probably not the one configured - gummibootfile = os.path.join(get_bb_var('DEPLOY_DIR'), 'images', 'genericx86-64', 'gummibootx64.efi') - - self._common_setup() - - # Ensure we're actually testing that this gets built and not that - # it was around from an earlier build - bitbake('-c cleansstate gummiboot') - runCmd('rm -f %s' % gummibootfile) - - self._common_build() - - found = os.path.isfile(gummibootfile) - self.assertTrue(found, 'Gummiboot file %s not found' % gummibootfile) - - @testcase(1103) - def test_wic_command_can_create_efi_gummiboot_installation_images(self): - """ - Summary: Check that wic command can create efi/gummiboot installation images - Expected: A .direct file in folder /var/tmp/wic/ must be created. - Product: oe-core - Author: Ionut Chisanovici - AutomatedBy: Daniel Istrate - """ - - self._common_setup() - self._common_build() - - # Create efi/gummiboot installation images - wic_create_cmd = 'wic create mkgummidisk -e core-image-minimal' - result = runCmd(wic_create_cmd) - - # Find file written by wic from output - res = re.search('(/var/tmp/wic/.*\.direct)', result.output) - if res: - direct_file = res.group(1) - # Check it actually exists - if not os.path.exists(direct_file): - self.fail('wic reported direct file "%s" does not exist; wic output:\n%s' % (direct_file, result.output)) - else: - self.fail('No .direct file reported in wic output:\n%s' % result.output) diff --git a/import-layers/yocto-poky/meta-yocto-bsp/recipes-graphics/xorg-xserver/xserver-xf86-config/beaglebone/xorg.conf b/import-layers/yocto-poky/meta-yocto-bsp/recipes-graphics/xorg-xserver/xserver-xf86-config/beaglebone/xorg.conf new file mode 100644 index 000000000..2f40dae15 --- /dev/null +++ b/import-layers/yocto-poky/meta-yocto-bsp/recipes-graphics/xorg-xserver/xserver-xf86-config/beaglebone/xorg.conf @@ -0,0 +1,20 @@ +Section "Monitor" + Identifier "Builtin Default Monitor" +EndSection + +Section "Device" + Identifier "Builtin Default fbdev Device 0" + Driver "modesetting" +EndSection + +Section "Screen" + Identifier "Builtin Default fbdev Screen 0" + DefaultDepth 16 + Device "Builtin Default fbdev Device 0" + Monitor "Builtin Default Monitor" +EndSection + +Section "ServerLayout" + Identifier "Builtin Default Layout" + Screen "Builtin Default fbdev Screen 0" +EndSection diff --git a/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.1.bbappend b/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.1.bbappend index 68bae2c4e..c55f92569 100644 --- a/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.1.bbappend +++ b/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.1.bbappend @@ -7,11 +7,11 @@ KBRANCH_mpc8315e-rdb = "standard/fsl-mpc8315e-rdb" KMACHINE_genericx86 ?= "common-pc" KMACHINE_genericx86-64 ?= "common-pc-64" -SRCREV_machine_genericx86 ?= "fec49247816d7045aa8abe0047bcd4737af9a853" -SRCREV_machine_genericx86-64 ?= "fec49247816d7045aa8abe0047bcd4737af9a853" -SRCREV_machine_edgerouter ?= "fec49247816d7045aa8abe0047bcd4737af9a853" -SRCREV_machine_beaglebone ?= "938cc4ac8d36f166c9e2e0517d6ffd6d278fe631" -SRCREV_machine_mpc8315e-rdb ?= "fec49247816d7045aa8abe0047bcd4737af9a853" +SRCREV_machine_genericx86 ?= "782133d8166ac71ef1ffaba58b7cf81ec9e532a1" +SRCREV_machine_genericx86-64 ?= "782133d8166ac71ef1ffaba58b7cf81ec9e532a1" +SRCREV_machine_edgerouter ?= "782133d8166ac71ef1ffaba58b7cf81ec9e532a1" +SRCREV_machine_beaglebone ?= "ce38fdb820476e496579f2481be977c0f35509f4" +SRCREV_machine_mpc8315e-rdb ?= "782133d8166ac71ef1ffaba58b7cf81ec9e532a1" COMPATIBLE_MACHINE_genericx86 = "genericx86" COMPATIBLE_MACHINE_genericx86-64 = "genericx86-64" @@ -19,8 +19,8 @@ COMPATIBLE_MACHINE_edgerouter = "edgerouter" COMPATIBLE_MACHINE_beaglebone = "beaglebone" COMPATIBLE_MACHINE_mpc8315e-rdb = "mpc8315e-rdb" -LINUX_VERSION_genericx86 = "4.1.36" -LINUX_VERSION_genericx86-64 = "4.1.36" -LINUX_VERSION_edgerouter = "4.1.36" -LINUX_VERSION_beaglebone = "4.1.36" -LINUX_VERSION_mpc8315e-rdb = "4.1.36" +LINUX_VERSION_genericx86 = "4.1.43" +LINUX_VERSION_genericx86-64 = "4.1.43" +LINUX_VERSION_edgerouter = "4.1.43" +LINUX_VERSION_beaglebone = "4.1.43" +LINUX_VERSION_mpc8315e-rdb = "4.1.43" diff --git a/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.10.bbappend b/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.10.bbappend new file mode 100644 index 000000000..e1f359c9e --- /dev/null +++ b/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.10.bbappend @@ -0,0 +1,26 @@ +KBRANCH_genericx86 = "standard/base" +KBRANCH_genericx86-64 = "standard/base" + +KMACHINE_genericx86 ?= "common-pc" +KMACHINE_genericx86-64 ?= "common-pc-64" +KBRANCH_edgerouter = "standard/edgerouter" +KBRANCH_beaglebone = "standard/beaglebone" +KBRANCH_mpc8315e-rdb = "standard/fsl-mpc8315e-rdb" + +SRCREV_machine_genericx86 ?= "c1d8c4408b8aedd88eeb6ccc89ce834dd41b3f09" +SRCREV_machine_genericx86-64 ?= "c1d8c4408b8aedd88eeb6ccc89ce834dd41b3f09" +SRCREV_machine_edgerouter ?= "c1d8c4408b8aedd88eeb6ccc89ce834dd41b3f09" +SRCREV_machine_beaglebone ?= "c1d8c4408b8aedd88eeb6ccc89ce834dd41b3f09" +SRCREV_machine_mpc8315e-rdb ?= "c388fdfc9a1e06b22ca3533a023f4d67a6fc1e9b" + +COMPATIBLE_MACHINE_genericx86 = "genericx86" +COMPATIBLE_MACHINE_genericx86-64 = "genericx86-64" +COMPATIBLE_MACHINE_edgerouter = "edgerouter" +COMPATIBLE_MACHINE_beaglebone = "beaglebone" +COMPATIBLE_MACHINE_mpc8315e-rdb = "mpc8315e-rdb" + +LINUX_VERSION_genericx86 = "4.10.17" +LINUX_VERSION_genericx86-64 = "4.10.17" +LINUX_VERSION_edgerouter = "4.10.17" +LINUX_VERSION_beaglebone = "4.10.17" +LINUX_VERSION_mpc8315e-rdb = "4.10.17" diff --git a/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.4.bbappend b/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.4.bbappend index a0efb152b..427af4c4c 100644 --- a/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.4.bbappend +++ b/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.4.bbappend @@ -7,11 +7,11 @@ KBRANCH_edgerouter = "standard/edgerouter" KBRANCH_beaglebone = "standard/beaglebone" KBRANCH_mpc8315e-rdb = "standard/fsl-mpc8315e-rdb" -SRCREV_machine_genericx86 ?= "35482df5d5ba0807eb8a7c40b554bd657e3f9987" -SRCREV_machine_genericx86-64 ?= "35482df5d5ba0807eb8a7c40b554bd657e3f9987" -SRCREV_machine_edgerouter ?= "35482df5d5ba0807eb8a7c40b554bd657e3f9987" -SRCREV_machine_beaglebone ?= "35482df5d5ba0807eb8a7c40b554bd657e3f9987" -SRCREV_machine_mpc8315e-rdb ?= "772f071dbdd4b813c921058ddf9cba207237228b" +SRCREV_machine_genericx86 ?= "b71c7b786aed26c0a1e4eca66f1d874ec017d699" +SRCREV_machine_genericx86-64 ?= "b71c7b786aed26c0a1e4eca66f1d874ec017d699" +SRCREV_machine_edgerouter ?= "b71c7b786aed26c0a1e4eca66f1d874ec017d699" +SRCREV_machine_beaglebone ?= "b71c7b786aed26c0a1e4eca66f1d874ec017d699" +SRCREV_machine_mpc8315e-rdb ?= "b4daa4e9d68862e559d726b0b66b7be605889b9e" COMPATIBLE_MACHINE_genericx86 = "genericx86" COMPATIBLE_MACHINE_genericx86-64 = "genericx86-64" @@ -19,8 +19,8 @@ COMPATIBLE_MACHINE_edgerouter = "edgerouter" COMPATIBLE_MACHINE_beaglebone = "beaglebone" COMPATIBLE_MACHINE_mpc8315e-rdb = "mpc8315e-rdb" -LINUX_VERSION_genericx86 = "4.4.36" -LINUX_VERSION_genericx86-64 = "4.4.36" -LINUX_VERSION_edgerouter = "4.4.36" -LINUX_VERSION_beaglebone = "4.4.36" -LINUX_VERSION_mpc8315e-rdb = "4.4.36" +LINUX_VERSION_genericx86 = "4.4.87" +LINUX_VERSION_genericx86-64 = "4.4.87" +LINUX_VERSION_edgerouter = "4.4.87" +LINUX_VERSION_beaglebone = "4.4.87" +LINUX_VERSION_mpc8315e-rdb = "4.4.87" diff --git a/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.8.bbappend b/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.8.bbappend deleted file mode 100644 index 761e6e0a3..000000000 --- a/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.8.bbappend +++ /dev/null @@ -1,26 +0,0 @@ -KBRANCH_genericx86 = "standard/base" -KBRANCH_genericx86-64 = "standard/base" - -KMACHINE_genericx86 ?= "common-pc" -KMACHINE_genericx86-64 ?= "common-pc-64" -KBRANCH_edgerouter = "standard/edgerouter" -KBRANCH_beaglebone = "standard/beaglebone" -KBRANCH_mpc8315e-rdb = "standard/fsl-mpc8315e-rdb" - -SRCREV_machine_genericx86 ?= "021b4aef55b44597587a1ce5879be642b3dca155" -SRCREV_machine_genericx86-64 ?= "021b4aef55b44597587a1ce5879be642b3dca155" -SRCREV_machine_edgerouter ?= "6076f16536329465b62bd2037b8582a5e18f85d1" -SRCREV_machine_beaglebone ?= "85dc85153cd7e3b72d34f967c4c0edde590c79a8" -SRCREV_machine_mpc8315e-rdb ?= "f73222eb3bbd07a45564397a88dec554e848da7d" - -COMPATIBLE_MACHINE_genericx86 = "genericx86" -COMPATIBLE_MACHINE_genericx86-64 = "genericx86-64" -COMPATIBLE_MACHINE_edgerouter = "edgerouter" -COMPATIBLE_MACHINE_beaglebone = "beaglebone" -COMPATIBLE_MACHINE_mpc8315e-rdb = "mpc8315e-rdb" - -LINUX_VERSION_genericx86 = "4.8.12" -LINUX_VERSION_genericx86-64 = "4.8.12" -LINUX_VERSION_edgerouter = "4.8.12" -LINUX_VERSION_beaglebone = "4.8.12" -LINUX_VERSION_mpc8315e-rdb = "4.8.12" diff --git a/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.9.bbappend b/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.9.bbappend new file mode 100644 index 000000000..b3b5cd546 --- /dev/null +++ b/import-layers/yocto-poky/meta-yocto-bsp/recipes-kernel/linux/linux-yocto_4.9.bbappend @@ -0,0 +1,26 @@ +KBRANCH_genericx86 = "standard/base" +KBRANCH_genericx86-64 = "standard/base" + +KMACHINE_genericx86 ?= "common-pc" +KMACHINE_genericx86-64 ?= "common-pc-64" +KBRANCH_edgerouter = "standard/edgerouter" +KBRANCH_beaglebone = "standard/beaglebone" +KBRANCH_mpc8315e-rdb = "standard/fsl-mpc8315e-rdb" + +SRCREV_machine_genericx86 ?= "480ee599fb8df712c10dcf4b7aa6398b79f7d404" +SRCREV_machine_genericx86-64 ?= "480ee599fb8df712c10dcf4b7aa6398b79f7d404" +SRCREV_machine_edgerouter ?= "480ee599fb8df712c10dcf4b7aa6398b79f7d404" +SRCREV_machine_beaglebone ?= "480ee599fb8df712c10dcf4b7aa6398b79f7d404" +SRCREV_machine_mpc8315e-rdb ?= "88a703b15a7564704c3dc5d3c1237e0859897655" + +COMPATIBLE_MACHINE_genericx86 = "genericx86" +COMPATIBLE_MACHINE_genericx86-64 = "genericx86-64" +COMPATIBLE_MACHINE_edgerouter = "edgerouter" +COMPATIBLE_MACHINE_beaglebone = "beaglebone" +COMPATIBLE_MACHINE_mpc8315e-rdb = "mpc8315e-rdb" + +LINUX_VERSION_genericx86 = "4.9.49" +LINUX_VERSION_genericx86-64 = "4.9.49" +LINUX_VERSION_edgerouter = "4.9.49" +LINUX_VERSION_beaglebone = "4.9.49" +LINUX_VERSION_mpc8315e-rdb = "4.9.49" diff --git a/import-layers/yocto-poky/meta-yocto-bsp/wic/beaglebone.wks b/import-layers/yocto-poky/meta-yocto-bsp/wic/beaglebone.wks new file mode 100644 index 000000000..0c09a9547 --- /dev/null +++ b/import-layers/yocto-poky/meta-yocto-bsp/wic/beaglebone.wks @@ -0,0 +1,6 @@ +# short-description: Create SD card image for Beaglebone +# long-description: Creates a partitioned SD card image for Beaglebone. +# Boot files are located in the first vfat partition. + +part /boot --source bootimg-partition --ondisk mmcblk --fstype=vfat --label boot --active --align 4 --size 16 +part / --source rootfs --ondisk mmcblk --fstype=ext4 --label root --align 4 diff --git a/import-layers/yocto-poky/meta-yocto-bsp/wic/edgerouter.wks b/import-layers/yocto-poky/meta-yocto-bsp/wic/edgerouter.wks new file mode 100644 index 000000000..7176fe436 --- /dev/null +++ b/import-layers/yocto-poky/meta-yocto-bsp/wic/edgerouter.wks @@ -0,0 +1,4 @@ +# short-description: Create SD card image for Edgerouter +# long-description: Create a partitioned SD card image for MIPS64 Edgerouter reference hardware. +part /boot --source bootimg-partition --ondisk sda --fstype=vfat --label boot --active --align 4 --size 16 +part / --source rootfs --ondisk sda --fstype=ext4 --label root --align 4 diff --git a/import-layers/yocto-poky/meta-yocto-bsp/wic/genericx86.wks b/import-layers/yocto-poky/meta-yocto-bsp/wic/genericx86.wks new file mode 100644 index 000000000..dab719c58 --- /dev/null +++ b/import-layers/yocto-poky/meta-yocto-bsp/wic/genericx86.wks @@ -0,0 +1,7 @@ +# short-description: Create an EFI disk image for genericx86* +# long-description: Creates a partitioned EFI disk image for genericx86* machines +part /boot --source bootimg-efi --sourceparams="loader=grub-efi" --ondisk sda --label msdos --active --align 1024 +part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 --use-uuid +part swap --ondisk sda --size 44 --label swap1 --fstype=swap + +bootloader --ptable gpt --timeout=5 --append="rootfstype=ext4 console=ttyS0,115200 console=tty0" diff --git a/import-layers/yocto-poky/meta-yocto-bsp/wic/mpc8315e-rdb.wks b/import-layers/yocto-poky/meta-yocto-bsp/wic/mpc8315e-rdb.wks new file mode 100644 index 000000000..d0cb607ff --- /dev/null +++ b/import-layers/yocto-poky/meta-yocto-bsp/wic/mpc8315e-rdb.wks @@ -0,0 +1,4 @@ +# short-description: Create SD card image for MPC8315E-RDB +# long-description: Create a partitioned SD card image for Freescale MPC8315E-RDB reference hardware. +part /boot --source bootimg-partition --ondisk sdb --fstype=ext3 --label boot +part / --source rootfs --ondisk sdb --fstype=ext3 --label root diff --git a/import-layers/yocto-poky/meta/classes/allarch.bbclass b/import-layers/yocto-poky/meta/classes/allarch.bbclass index ddc2a8505..a7ce02464 100644 --- a/import-layers/yocto-poky/meta/classes/allarch.bbclass +++ b/import-layers/yocto-poky/meta/classes/allarch.bbclass @@ -2,16 +2,12 @@ # This class is used for architecture independent recipes/data files (usually scripts) # -# Expand STAGING_DIR_HOST since for cross-canadian/native/nativesdk, this will -# point elsewhere after these changes. -STAGING_DIR_HOST := "${STAGING_DIR_HOST}" - PACKAGE_ARCH = "all" python () { # Allow this class to be included but overridden - only set # the values if we're still "all" package arch. - if d.getVar("PACKAGE_ARCH", True) == "all": + if d.getVar("PACKAGE_ARCH") == "all": # No need for virtual/libc or a cross compiler d.setVar("INHIBIT_DEFAULT_DEPS","1") @@ -25,13 +21,16 @@ python () { d.setVar("TARGET_AS_ARCH", "none") d.setVar("TARGET_FPU", "") d.setVar("TARGET_PREFIX", "") - d.setVar("PACKAGE_EXTRA_ARCHS", "") + # Expand PACKAGE_EXTRA_ARCHS since the staging code needs this + # (this removes any dependencies from the hash perspective) + d.setVar("PACKAGE_EXTRA_ARCHS", d.getVar("PACKAGE_EXTRA_ARCHS")) d.setVar("SDK_ARCH", "none") d.setVar("SDK_CC_ARCH", "none") d.setVar("TARGET_CPPFLAGS", "none") d.setVar("TARGET_CFLAGS", "none") d.setVar("TARGET_CXXFLAGS", "none") d.setVar("TARGET_LDFLAGS", "none") + d.setVar("POPULATESYSROOTDEPS", "") # Avoid this being unnecessarily different due to nuances of # the target machine that aren't important for "all" arch @@ -47,6 +46,6 @@ python () { d.setVarFlag("emit_pkgdata", "vardepsexclude", "MULTILIB_VARIANTS") d.setVarFlag("write_specfile", "vardepsexclude", "MULTILIBS") elif bb.data.inherits_class('packagegroup', d) and not bb.data.inherits_class('nativesdk', d): - bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE", True)) + bb.error("Please ensure recipe %s sets PACKAGE_ARCH before inherit packagegroup" % d.getVar("FILE")) } diff --git a/import-layers/yocto-poky/meta/classes/archiver.bbclass b/import-layers/yocto-poky/meta/classes/archiver.bbclass index 188f8c042..18c5b9668 100644 --- a/import-layers/yocto-poky/meta/classes/archiver.bbclass +++ b/import-layers/yocto-poky/meta/classes/archiver.bbclass @@ -52,10 +52,10 @@ do_deploy_all_archives[dirs] = "${WORKDIR}" python () { - pn = d.getVar('PN', True) - assume_provided = (d.getVar("ASSUME_PROVIDED", True) or "").split() + pn = d.getVar('PN') + assume_provided = (d.getVar("ASSUME_PROVIDED") or "").split() if pn in assume_provided: - for p in d.getVar("PROVIDES", True).split(): + for p in d.getVar("PROVIDES").split(): if p != pn: pn = p break @@ -67,18 +67,29 @@ python () { else: bb.debug(1, 'archiver: %s is included: %s' % (pn, reason)) + + # glibc-locale: do_fetch, do_unpack and do_patch tasks have been deleted, + # so avoid archiving source here. + if pn.startswith('glibc-locale'): + return + # We just archive gcc-source for all the gcc related recipes - if d.getVar('BPN', True) in ['gcc', 'libgcc'] \ + if d.getVar('BPN') in ['gcc', 'libgcc'] \ and not pn.startswith('gcc-source'): bb.debug(1, 'archiver: %s is excluded, covered by gcc-source' % pn) return - ar_src = d.getVarFlag('ARCHIVER_MODE', 'src', True) - ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata', True) - ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe', True) + ar_src = d.getVarFlag('ARCHIVER_MODE', 'src') + ar_dumpdata = d.getVarFlag('ARCHIVER_MODE', 'dumpdata') + ar_recipe = d.getVarFlag('ARCHIVER_MODE', 'recipe') if ar_src == "original": d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_original' % pn) + # 'patched' and 'configured' invoke do_unpack_and_patch because + # do_ar_patched resp. do_ar_configured depend on it, but for 'original' + # we have to add it explicitly. + if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': + d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_unpack_and_patch' % pn) elif ar_src == "patched": d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_patched' % pn) elif ar_src == "configured": @@ -104,9 +115,9 @@ python () { d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_ar_recipe' % pn) # Output the srpm package - ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm', True) + ar_srpm = d.getVarFlag('ARCHIVER_MODE', 'srpm') if ar_srpm == "1": - if d.getVar('PACKAGES', True) != '' and d.getVar('IMAGE_PKGTYPE', True) == 'rpm': + if d.getVar('PACKAGES') != '' and d.getVar('IMAGE_PKGTYPE') == 'rpm': d.appendVarFlag('do_deploy_archives', 'depends', ' %s:do_package_write_rpm' % pn) if ar_dumpdata == "1": d.appendVarFlag('do_package_write_rpm', 'depends', ' %s:do_dumpdata' % pn) @@ -127,12 +138,12 @@ python do_ar_original() { import shutil, tempfile - if d.getVarFlag('ARCHIVER_MODE', 'src', True) != "original": + if d.getVarFlag('ARCHIVER_MODE', 'src') != "original": return - ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) + ar_outdir = d.getVar('ARCHIVER_OUTDIR') bb.note('Archiving the original source...') - urls = d.getVar("SRC_URI", True).split() + urls = d.getVar("SRC_URI").split() # destsuffix (git fetcher) and subdir (everything else) are allowed to be # absolute paths (for example, destsuffix=${S}/foobar). # That messes with unpacking inside our tmpdir below, because the fetchers @@ -157,7 +168,7 @@ python do_ar_original() { if os.path.isfile(local): shutil.copy(local, ar_outdir) elif os.path.isdir(local): - tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR', True)) + tmpdir = tempfile.mkdtemp(dir=d.getVar('ARCHIVER_WORKDIR')) fetch.unpack(tmpdir, (url,)) # To handle recipes with more than one source, we add the "name" # URL parameter as suffix. We treat it as an error when @@ -166,12 +177,18 @@ python do_ar_original() { # to be set when using the git fetcher, otherwise SRCREV cannot # be set separately for each URL. params = bb.fetch2.decodeurl(url)[5] + type = bb.fetch2.decodeurl(url)[0] + location = bb.fetch2.decodeurl(url)[2] name = params.get('name', '') - if name in tarball_suffix: - if not name: - bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url)) - else: - bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url)) + if type.lower() == 'file': + name_tmp = location.rstrip("*").rstrip("/") + name = os.path.basename(name_tmp) + else: + if name in tarball_suffix: + if not name: + bb.fatal("Cannot determine archive names for original source because 'name' URL parameter is unset in more than one URL. Add it to at least one of these: %s %s" % (tarball_suffix[name], url)) + else: + bb.fatal("Cannot determine archive names for original source because 'name=' URL parameter '%s' is used twice. Make it unique in: %s %s" % (tarball_suffix[name], url)) tarball_suffix[name] = url create_tarball(d, tmpdir + '/.', name, ar_outdir) @@ -191,28 +208,32 @@ python do_ar_original() { python do_ar_patched() { - if d.getVarFlag('ARCHIVER_MODE', 'src', True) != 'patched': + if d.getVarFlag('ARCHIVER_MODE', 'src') != 'patched': return # Get the ARCHIVER_OUTDIR before we reset the WORKDIR - ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) - ar_workdir = d.getVar('ARCHIVER_WORKDIR', True) + ar_outdir = d.getVar('ARCHIVER_OUTDIR') + ar_workdir = d.getVar('ARCHIVER_WORKDIR') bb.note('Archiving the patched source...') d.setVar('WORKDIR', ar_workdir) - create_tarball(d, d.getVar('S', True), 'patched', ar_outdir) + create_tarball(d, d.getVar('S'), 'patched', ar_outdir) } python do_ar_configured() { import shutil - ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) - if d.getVarFlag('ARCHIVER_MODE', 'src', True) == 'configured': + # Forcibly expand the sysroot paths as we're about to change WORKDIR + d.setVar('RECIPE_SYSROOT', d.getVar('RECIPE_SYSROOT')) + d.setVar('RECIPE_SYSROOT_NATIVE', d.getVar('RECIPE_SYSROOT_NATIVE')) + + ar_outdir = d.getVar('ARCHIVER_OUTDIR') + if d.getVarFlag('ARCHIVER_MODE', 'src') == 'configured': bb.note('Archiving the configured source...') - pn = d.getVar('PN', True) + pn = d.getVar('PN') # "gcc-source-${PV}" recipes don't have "do_configure" # task, so we need to run "do_preconfigure" instead if pn.startswith("gcc-source-"): - d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) + d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) bb.build.exec_func('do_preconfigure', d) # The libtool-native's do_configure will remove the @@ -221,26 +242,26 @@ python do_ar_configured() { # instead of. elif pn != 'libtool-native': # Change the WORKDIR to make do_configure run in another dir. - d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR', True)) + d.setVar('WORKDIR', d.getVar('ARCHIVER_WORKDIR')) if bb.data.inherits_class('kernel-yocto', d): bb.build.exec_func('do_kernel_configme', d) if bb.data.inherits_class('cmake', d): bb.build.exec_func('do_generate_toolchain_file', d) - prefuncs = d.getVarFlag('do_configure', 'prefuncs', True) + prefuncs = d.getVarFlag('do_configure', 'prefuncs') for func in (prefuncs or '').split(): if func != "sysroot_cleansstate": bb.build.exec_func(func, d) bb.build.exec_func('do_configure', d) - postfuncs = d.getVarFlag('do_configure', 'postfuncs', True) + postfuncs = d.getVarFlag('do_configure', 'postfuncs') for func in (postfuncs or '').split(): if func != "do_qa_configure": bb.build.exec_func(func, d) - srcdir = d.getVar('S', True) - builddir = d.getVar('B', True) + srcdir = d.getVar('S') + builddir = d.getVar('B') if srcdir != builddir: if os.path.exists(builddir): oe.path.copytree(builddir, os.path.join(srcdir, \ - 'build.%s.ar_configured' % d.getVar('PF', True))) + 'build.%s.ar_configured' % d.getVar('PF'))) create_tarball(d, srcdir, 'configured', ar_outdir) } @@ -251,14 +272,14 @@ def create_tarball(d, srcdir, suffix, ar_outdir): import tarfile # Make sure we are only creating a single tarball for gcc sources - if (d.getVar('SRC_URI', True) == ""): + if (d.getVar('SRC_URI') == ""): return bb.utils.mkdirhier(ar_outdir) if suffix: - filename = '%s-%s.tar.gz' % (d.getVar('PF', True), suffix) + filename = '%s-%s.tar.gz' % (d.getVar('PF'), suffix) else: - filename = '%s.tar.gz' % d.getVar('PF', True) + filename = '%s.tar.gz' % d.getVar('PF') tarname = os.path.join(ar_outdir, filename) bb.note('Creating %s' % tarname) @@ -279,57 +300,78 @@ def create_diff_gz(d, src_orig, src, ar_outdir): # exclude. src_patched = src + '.patched' oe.path.copyhardlinktree(src, src_patched) - for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude', True).split(): + for i in d.getVarFlag('ARCHIVER_MODE', 'diff-exclude').split(): bb.utils.remove(os.path.join(src_orig, i), recurse=True) bb.utils.remove(os.path.join(src_patched, i), recurse=True) dirname = os.path.dirname(src) basename = os.path.basename(src) - os.chdir(dirname) - out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF', True)) - diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) - subprocess.call(diff_cmd, shell=True) - bb.utils.remove(src_patched, recurse=True) + bb.utils.mkdirhier(ar_outdir) + cwd = os.getcwd() + try: + os.chdir(dirname) + out_file = os.path.join(ar_outdir, '%s-diff.gz' % d.getVar('PF')) + diff_cmd = 'diff -Naur %s.orig %s.patched | gzip -c > %s' % (basename, basename, out_file) + subprocess.check_call(diff_cmd, shell=True) + bb.utils.remove(src_patched, recurse=True) + finally: + os.chdir(cwd) # Run do_unpack and do_patch python do_unpack_and_patch() { - if d.getVarFlag('ARCHIVER_MODE', 'src', True) not in \ + if d.getVarFlag('ARCHIVER_MODE', 'src') not in \ [ 'patched', 'configured'] and \ - d.getVarFlag('ARCHIVER_MODE', 'diff', True) != '1': + d.getVarFlag('ARCHIVER_MODE', 'diff') != '1': return - ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) - ar_workdir = d.getVar('ARCHIVER_WORKDIR', True) - pn = d.getVar('PN', True) + ar_outdir = d.getVar('ARCHIVER_OUTDIR') + ar_workdir = d.getVar('ARCHIVER_WORKDIR') + ar_sysroot_native = d.getVar('STAGING_DIR_NATIVE') + pn = d.getVar('PN') # The kernel class functions require it to be on work-shared, so we dont change WORKDIR if not (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source')): # Change the WORKDIR to make do_unpack do_patch run in another dir. d.setVar('WORKDIR', ar_workdir) + # Restore the original path to recipe's native sysroot (it's relative to WORKDIR). + d.setVar('STAGING_DIR_NATIVE', ar_sysroot_native) # The changed 'WORKDIR' also caused 'B' changed, create dir 'B' for the # possibly requiring of the following tasks (such as some recipes's # do_patch required 'B' existed). - bb.utils.mkdirhier(d.getVar('B', True)) + bb.utils.mkdirhier(d.getVar('B')) bb.build.exec_func('do_unpack', d) # Save the original source for creating the patches - if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': - src = d.getVar('S', True).rstrip('/') + if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': + src = d.getVar('S').rstrip('/') src_orig = '%s.orig' % src oe.path.copytree(src, src_orig) # Make sure gcc and kernel sources are patched only once - if not (d.getVar('SRC_URI', True) == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))): + if not (d.getVar('SRC_URI') == "" or (bb.data.inherits_class('kernel-yocto', d) or pn.startswith('gcc-source'))): bb.build.exec_func('do_patch', d) # Create the patches - if d.getVarFlag('ARCHIVER_MODE', 'diff', True) == '1': + if d.getVarFlag('ARCHIVER_MODE', 'diff') == '1': bb.note('Creating diff gz...') create_diff_gz(d, src_orig, src, ar_outdir) bb.utils.remove(src_orig, recurse=True) } +# BBINCLUDED is special (excluded from basehash signature +# calculation). Using it in a task signature can cause "basehash +# changed" errors. +# +# Depending on BBINCLUDED also causes do_ar_recipe to run again +# for unrelated changes, like adding or removing buildhistory.bbclass. +# +# For these reasons we ignore the dependency completely. The versioning +# of the output file ensures that we create it each time the recipe +# gets rebuilt, at least as long as a PR server is used. We also rely +# on that mechanism to catch changes in the file content, because the +# file content is not part of of the task signature either. +do_ar_recipe[vardepsexclude] += "BBINCLUDED" python do_ar_recipe () { """ archive the recipe, including .bb and .inc. @@ -339,14 +381,14 @@ python do_ar_recipe () { require_re = re.compile( r"require\s+(.+)" ) include_re = re.compile( r"include\s+(.+)" ) - bbfile = d.getVar('FILE', True) - outdir = os.path.join(d.getVar('WORKDIR', True), \ - '%s-recipe' % d.getVar('PF', True)) + bbfile = d.getVar('FILE') + outdir = os.path.join(d.getVar('WORKDIR'), \ + '%s-recipe' % d.getVar('PF')) bb.utils.mkdirhier(outdir) shutil.copy(bbfile, outdir) - pn = d.getVar('PN', True) - bbappend_files = d.getVar('BBINCLUDED', True).split() + pn = d.getVar('PN') + bbappend_files = d.getVar('BBINCLUDED').split() # If recipe name is aa, we need to match files like aa.bbappend and aa_1.1.bbappend # Files like aa1.bbappend or aa1_1.1.bbappend must be excluded. bbappend_re = re.compile( r".*/%s_[^/]*\.bbappend$" % re.escape(pn)) @@ -356,7 +398,7 @@ python do_ar_recipe () { shutil.copy(file, outdir) dirname = os.path.dirname(bbfile) - bbpath = '%s:%s' % (dirname, d.getVar('BBPATH', True)) + bbpath = '%s:%s' % (dirname, d.getVar('BBPATH')) f = open(bbfile, 'r') for line in f.readlines(): incfile = None @@ -365,12 +407,12 @@ python do_ar_recipe () { elif include_re.match(line): incfile = include_re.match(line).group(1) if incfile: - incfile = bb.data.expand(incfile, d) + incfile = d.expand(incfile) incfile = bb.utils.which(bbpath, incfile) if incfile: shutil.copy(incfile, outdir) - create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR', True)) + create_tarball(d, outdir, 'recipe', d.getVar('ARCHIVER_OUTDIR')) bb.utils.remove(outdir, recurse=True) } @@ -379,8 +421,8 @@ python do_dumpdata () { dump environment data to ${PF}-showdata.dump """ - dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR', True), \ - '%s-showdata.dump' % d.getVar('PF', True)) + dumpfile = os.path.join(d.getVar('ARCHIVER_OUTDIR'), \ + '%s-showdata.dump' % d.getVar('PF')) bb.note('Dumping metadata into %s' % dumpfile) with open(dumpfile, "w") as f: # emit variables and shell functions @@ -419,7 +461,10 @@ do_deploy_all_archives() { } python () { - # Add tasks in the correct order, specifically for linux-yocto to avoid race condition + # Add tasks in the correct order, specifically for linux-yocto to avoid race condition. + # sstatesig.py:sstate_rundepfilter has special support that excludes this dependency + # so that do_kernel_configme does not need to run again when do_unpack_and_patch + # gets added or removed (by adding or removing archiver.bbclass). if bb.data.inherits_class('kernel-yocto', d): bb.build.addtask('do_kernel_configme', 'do_configure', 'do_unpack_and_patch', d) } diff --git a/import-layers/yocto-poky/meta/classes/autotools.bbclass b/import-layers/yocto-poky/meta/classes/autotools.bbclass index c43ea9a7e..ac04a07cb 100644 --- a/import-layers/yocto-poky/meta/classes/autotools.bbclass +++ b/import-layers/yocto-poky/meta/classes/autotools.bbclass @@ -1,8 +1,8 @@ def autotools_dep_prepend(d): - if d.getVar('INHIBIT_AUTOTOOLS_DEPS', True): + if d.getVar('INHIBIT_AUTOTOOLS_DEPS'): return '' - pn = d.getVar('PN', True) + pn = d.getVar('PN') deps = '' if pn in ['autoconf-native', 'automake-native', 'help2man-native']: @@ -14,7 +14,7 @@ def autotools_dep_prepend(d): if not bb.data.inherits_class('native', d) \ and not bb.data.inherits_class('nativesdk', d) \ and not bb.data.inherits_class('cross', d) \ - and not d.getVar('INHIBIT_DEFAULT_DEPS', True): + and not d.getVar('INHIBIT_DEFAULT_DEPS'): deps += 'libtool-cross ' return deps + 'gnu-config-native ' @@ -27,7 +27,7 @@ inherit siteinfo # results for autoconf tests we cannot run at build time. export CONFIG_SITE = "${@siteinfo_get_files(d)}" -acpaths = "default" +acpaths ?= "default" EXTRA_AUTORECONF = "--exclude=autopoint" export lt_cv_sys_lib_dlsearch_path_spec = "${libdir} ${base_libdir}" @@ -131,133 +131,18 @@ EXTRACONFFUNCS ??= "" EXTRA_OECONF_append = " ${PACKAGECONFIG_CONFARGS}" -do_configure[prefuncs] += "autotools_preconfigure autotools_copy_aclocals ${EXTRACONFFUNCS}" +do_configure[prefuncs] += "autotools_preconfigure autotools_aclocals ${EXTRACONFFUNCS}" do_configure[postfuncs] += "autotools_postconfigure" -ACLOCALDIR = "${WORKDIR}/aclocal-copy" - -python autotools_copy_aclocals () { - import copy - - s = d.getVar("AUTOTOOLS_SCRIPT_PATH", True) - if not os.path.exists(s + "/configure.in") and not os.path.exists(s + "/configure.ac"): - if not d.getVar("AUTOTOOLS_COPYACLOCAL", False): - return - - taskdepdata = d.getVar("BB_TASKDEPDATA", False) - #bb.warn(str(taskdepdata)) - pn = d.getVar("PN", True) - aclocaldir = d.getVar("ACLOCALDIR", True) - oe.path.remove(aclocaldir) - bb.utils.mkdirhier(aclocaldir) - start = None - configuredeps = [] - # Detect bitbake -b usage - # Everything but quilt-native would have dependencies - nodeps = (pn != "quilt-native") - - for dep in taskdepdata: - data = taskdepdata[dep] - if data[1] == "do_configure" and data[0] == pn: - start = dep - if not nodeps and start: - break - if nodeps and data[0] != pn: - nodeps = False - if start is None: - bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?") - - # We need to figure out which m4 files we need to expose to this do_configure task. - # This needs to match what would get restored from sstate, which is controlled - # ultimately by calls from bitbake to setscene_depvalid(). - # That function expects a setscene dependency tree. We build a dependency tree - # condensed to do_populate_sysroot -> do_populate_sysroot dependencies, similar to - # that used by setscene tasks. We can then call into setscene_depvalid() and decide - # which dependencies we can "see" and should expose the m4 files for. - setscenedeps = copy.deepcopy(taskdepdata) - - start = set([start]) - - # Create collapsed do_populate_sysroot -> do_populate_sysroot tree - for dep in taskdepdata: - data = setscenedeps[dep] - if data[1] != "do_populate_sysroot": - for dep2 in setscenedeps: - data2 = setscenedeps[dep2] - if dep in data2[3]: - data2[3].update(setscenedeps[dep][3]) - data2[3].remove(dep) - if dep in start: - start.update(setscenedeps[dep][3]) - start.remove(dep) - del setscenedeps[dep] - - # Remove circular references - for dep in setscenedeps: - if dep in setscenedeps[dep][3]: - setscenedeps[dep][3].remove(dep) - - # Direct dependencies should be present and can be depended upon - for dep in start: - configuredeps.append(setscenedeps[dep][0]) - - # Call into setscene_depvalid for each sub-dependency and only copy m4 files - # for ones that would be restored from sstate. - done = list(start) - next = list(start) - while next: - new = [] - for dep in next: - data = setscenedeps[dep] - for datadep in data[3]: - if datadep in done: - continue - taskdeps = {} - taskdeps[dep] = setscenedeps[dep][:2] - taskdeps[datadep] = setscenedeps[datadep][:2] - retval = setscene_depvalid(datadep, taskdeps, [], d) - if retval: - bb.note("Skipping setscene dependency %s for m4 macro copying" % datadep) - continue - done.append(datadep) - new.append(datadep) - configuredeps.append(setscenedeps[datadep][0]) - next = new - - cp = [] - if nodeps: - bb.warn("autotools: Unable to find task dependencies, -b being used? Pulling in all m4 files") - for l in [d.expand("${STAGING_DATADIR_NATIVE}/aclocal/"), d.expand("${STAGING_DATADIR}/aclocal/")]: - cp.extend(os.path.join(l, f) for f in os.listdir(l)) - - for c in configuredeps: - if c.endswith("-native"): - manifest = d.expand("${SSTATE_MANIFESTS}/manifest-${BUILD_ARCH}-%s.populate_sysroot" % c) - elif c.startswith("nativesdk-"): - manifest = d.expand("${SSTATE_MANIFESTS}/manifest-${SDK_ARCH}_${SDK_OS}-%s.populate_sysroot" % c) - elif "-cross-" in c or "-crosssdk" in c: - continue - else: - manifest = d.expand("${SSTATE_MANIFESTS}/manifest-${MACHINE}-%s.populate_sysroot" % c) - try: - f = open(manifest, "r") - for l in f: - if "/aclocal/" in l and l.strip().endswith(".m4"): - cp.append(l.strip()) - elif "config_site.d/" in l: - cp.append(l.strip()) - except: - bb.warn("%s not found" % manifest) - - for c in cp: - t = os.path.join(aclocaldir, os.path.basename(c)) - if not os.path.exists(t): - os.symlink(c, t) +ACLOCALDIR = "${STAGING_DATADIR}/aclocal" +ACLOCALEXTRAPATH = "" +ACLOCALEXTRAPATH_class-target = " -I ${STAGING_DATADIR_NATIVE}/aclocal/" +ACLOCALEXTRAPATH_class-nativesdk = " -I ${STAGING_DATADIR_NATIVE}/aclocal/" +python autotools_aclocals () { # Refresh variable with cache files d.setVar("CONFIG_SITE", siteinfo_get_files(d, aclocalcache=True)) } -autotools_copy_aclocals[vardepsexclude] += "MACHINE SDK_ARCH BUILD_ARCH SDK_OS BB_TASKDEPDATA" CONFIGURE_FILES = "${S}/configure.in ${S}/configure.ac ${S}/config.h.in ${S}/acinclude.m4 Makefile.am" @@ -279,6 +164,7 @@ autotools_do_configure() { if [ -e ${AUTOTOOLS_SCRIPT_PATH}/configure.in -o -e ${AUTOTOOLS_SCRIPT_PATH}/configure.ac ]; then olddir=`pwd` cd ${AUTOTOOLS_SCRIPT_PATH} + mkdir -p ${ACLOCALDIR} ACLOCAL="aclocal --system-acdir=${ACLOCALDIR}/" if [ x"${acpaths}" = xdefault ]; then acpaths= @@ -289,6 +175,7 @@ autotools_do_configure() { else acpaths="${acpaths}" fi + acpaths="$acpaths ${ACLOCALEXTRAPATH}" AUTOV=`automake --version | sed -e '1{s/.* //;s/\.[0-9]\+$//};q'` automake --version echo "AUTOV is $AUTOV" @@ -306,14 +193,14 @@ autotools_do_configure() { else CONFIGURE_AC=configure.ac fi - if grep "^[[:space:]]*AM_GLIB_GNU_GETTEXT" $CONFIGURE_AC >/dev/null; then - if grep "sed.*POTFILES" $CONFIGURE_AC >/dev/null; then + if grep -q "^[[:space:]]*AM_GLIB_GNU_GETTEXT" $CONFIGURE_AC; then + if grep -q "sed.*POTFILES" $CONFIGURE_AC; then : do nothing -- we still have an old unmodified configure.ac else bbnote Executing glib-gettextize --force --copy echo "no" | glib-gettextize --force --copy fi - elif grep "^[[:space:]]*AM_GNU_GETTEXT" $CONFIGURE_AC >/dev/null; then + elif grep -q "^[[:space:]]*AM_GNU_GETTEXT" $CONFIGURE_AC; then # We'd call gettextize here if it wasn't so broken... cp ${STAGING_DATADIR_NATIVE}/gettext/config.rpath ${AUTOTOOLS_AUXDIR}/ if [ -d ${S}/po/ ]; then @@ -325,7 +212,7 @@ autotools_do_configure() { PRUNE_M4="$PRUNE_M4 gettext.m4 iconv.m4 lib-ld.m4 lib-link.m4 lib-prefix.m4 nls.m4 po.m4 progtest.m4" fi mkdir -p m4 - if grep "^[[:space:]]*[AI][CT]_PROG_INTLTOOL" $CONFIGURE_AC >/dev/null; then + if grep -q "^[[:space:]]*[AI][CT]_PROG_INTLTOOL" $CONFIGURE_AC; then if ! echo "${DEPENDS}" | grep -q intltool-native; then bbwarn "Missing DEPENDS on intltool-native" fi diff --git a/import-layers/yocto-poky/meta/classes/base.bbclass b/import-layers/yocto-poky/meta/classes/base.bbclass index 024fe4331..d95afb7b9 100644 --- a/import-layers/yocto-poky/meta/classes/base.bbclass +++ b/import-layers/yocto-poky/meta/classes/base.bbclass @@ -10,13 +10,13 @@ inherit utility-tasks inherit metadata_scm inherit logging -OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath" +OE_IMPORTS += "os sys time oe.path oe.utils oe.types oe.package oe.packagegroup oe.sstatesig oe.lsb oe.cachedpath oe.license" OE_IMPORTS[type] = "list" def oe_import(d): import sys - bbpath = d.getVar("BBPATH", True).split(":") + bbpath = d.getVar("BBPATH").split(":") sys.path[0:0] = [os.path.join(dir, "lib") for dir in bbpath] def inject(name, value): @@ -37,7 +37,7 @@ def oe_import(d): OE_IMPORTED := "${@oe_import(d)}" def lsb_distro_identifier(d): - adjust = d.getVar('LSB_DISTRO_ADJUST', True) + adjust = d.getVar('LSB_DISTRO_ADJUST') adjust_func = None if adjust: try: @@ -72,7 +72,7 @@ def base_dep_prepend(d): # we need that built is the responsibility of the patch function / class, not # the application. if not d.getVar('INHIBIT_DEFAULT_DEPS', False): - if (d.getVar('HOST_SYS', True) != d.getVar('BUILD_SYS', True)): + if (d.getVar('HOST_SYS') != d.getVar('BUILD_SYS')): deps += " virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}compilerlibs virtual/libc " return deps @@ -83,11 +83,11 @@ DEPENDS_prepend="${BASEDEPENDS} " FILESPATH = "${@base_set_filespath(["${FILE_DIRNAME}/${BP}", "${FILE_DIRNAME}/${BPN}", "${FILE_DIRNAME}/files"], d)}" # THISDIR only works properly with imediate expansion as it has to run # in the context of the location its used (:=) -THISDIR = "${@os.path.dirname(d.getVar('FILE', True))}" +THISDIR = "${@os.path.dirname(d.getVar('FILE'))}" def extra_path_elements(d): path = "" - elements = (d.getVar('EXTRANATIVEPATH', True) or "").split() + elements = (d.getVar('EXTRANATIVEPATH') or "").split() for e in elements: path = path + "${STAGING_BINDIR_NATIVE}/" + e + ":" return path @@ -96,8 +96,11 @@ PATH_prepend = "${@extra_path_elements(d)}" def get_lic_checksum_file_list(d): filelist = [] - lic_files = d.getVar("LIC_FILES_CHKSUM", True) or '' - tmpdir = d.getVar("TMPDIR", True) + lic_files = d.getVar("LIC_FILES_CHKSUM") or '' + tmpdir = d.getVar("TMPDIR") + s = d.getVar("S") + b = d.getVar("B") + workdir = d.getVar("WORKDIR") urls = lic_files.split() for url in urls: @@ -109,13 +112,32 @@ def get_lic_checksum_file_list(d): raise bb.fetch.MalformedUrl(url) if path[0] == '/': - if path.startswith(tmpdir): + if path.startswith((tmpdir, s, b, workdir)): continue filelist.append(path + ":" + str(os.path.exists(path))) except bb.fetch.MalformedUrl: - bb.fatal(d.getVar('PN', True) + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) + bb.fatal(d.getVar('PN') + ": LIC_FILES_CHKSUM contains an invalid URL: " + url) return " ".join(filelist) +def setup_hosttools_dir(dest, toolsvar, d, fatal=True): + tools = d.getVar(toolsvar).split() + origbbenv = d.getVar("BB_ORIGENV", False) + path = origbbenv.getVar("PATH") + bb.utils.mkdirhier(dest) + notfound = [] + for tool in tools: + desttool = os.path.join(dest, tool) + if not os.path.exists(desttool): + srctool = bb.utils.which(path, tool, executable=True) + if "ccache" in srctool: + srctool = bb.utils.which(path, tool, executable=True, direction=1) + if srctool: + os.symlink(srctool, desttool) + else: + notfound.append(tool) + if notfound and fatal: + bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound)) + addtask fetch do_fetch[dirs] = "${DL_DIR}" do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" @@ -123,7 +145,7 @@ do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}" do_fetch[vardeps] += "SRCREV" python base_do_fetch() { - src_uri = (d.getVar('SRC_URI', True) or "").split() + src_uri = (d.getVar('SRC_URI') or "").split() if len(src_uri) == 0: return @@ -138,31 +160,31 @@ addtask unpack after do_fetch do_unpack[dirs] = "${WORKDIR}" python () { - if d.getVar('S', True) != d.getVar('WORKDIR', True): + if d.getVar('S') != d.getVar('WORKDIR'): d.setVarFlag('do_unpack', 'cleandirs', '${S}') else: d.setVarFlag('do_unpack', 'cleandirs', os.path.join('${S}', 'patches')) } python base_do_unpack() { - src_uri = (d.getVar('SRC_URI', True) or "").split() + src_uri = (d.getVar('SRC_URI') or "").split() if len(src_uri) == 0: return try: fetcher = bb.fetch2.Fetch(src_uri, d) - fetcher.unpack(d.getVar('WORKDIR', True)) + fetcher.unpack(d.getVar('WORKDIR')) except bb.fetch2.BBFetchException as e: bb.fatal(str(e)) } def pkgarch_mapping(d): # Compatibility mappings of TUNE_PKGARCH (opt in) - if d.getVar("PKGARCHCOMPAT_ARMV7A", True): - if d.getVar("TUNE_PKGARCH", True) == "armv7a-vfp-neon": + if d.getVar("PKGARCHCOMPAT_ARMV7A"): + if d.getVar("TUNE_PKGARCH") == "armv7a-vfp-neon": d.setVar("TUNE_PKGARCH", "armv7a") def get_layers_branch_rev(d): - layers = (d.getVar("BBLAYERS", True) or "").split() + layers = (d.getVar("BBLAYERS") or "").split() layers_branch_rev = ["%-17s = \"%s:%s\"" % (os.path.basename(i), \ base_get_metadata_git_branch(i, None).strip(), \ base_get_metadata_git_revision(i, None)) \ @@ -189,7 +211,7 @@ BUILDCFG_FUNCS[type] = "list" def buildcfg_vars(d): statusvars = oe.data.typed_value('BUILDCFG_VARS', d) for var in statusvars: - value = d.getVar(var, True) + value = d.getVar(var) if value is not None: yield '%-17s = "%s"' % (var, value) @@ -197,7 +219,7 @@ def buildcfg_neededvars(d): needed_vars = oe.data.typed_value("BUILDCFG_NEEDEDVARS", d) pesteruser = [] for v in needed_vars: - val = d.getVar(v, True) + val = d.getVar(v) if not val or val == 'INVALID': pesteruser.append(v) @@ -216,10 +238,12 @@ python base_eventhandler() { pkgarch_mapping(e.data) oe.utils.features_backfill("DISTRO_FEATURES", e.data) oe.utils.features_backfill("MACHINE_FEATURES", e.data) + # Works with the line in layer.conf which changes PATH to point here + setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS', d) + setup_hosttools_dir(d.getVar('HOSTTOOLS_DIR'), 'HOSTTOOLS_NONFATAL', d, fatal=False) if isinstance(e, bb.event.BuildStarted): localdata = bb.data.createCopy(e.data) - bb.data.update_data(localdata) statuslines = [] for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata): g = globals() @@ -230,7 +254,7 @@ python base_eventhandler() { if flines: statuslines.extend(flines) - statusheader = e.data.getVar('BUILDCFG_HEADER', True) + statusheader = e.data.getVar('BUILDCFG_HEADER') if statusheader: bb.plain('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) @@ -238,7 +262,7 @@ python base_eventhandler() { # target ones and we'd see dulpicate key names overwriting each other # for various PREFERRED_PROVIDERS if isinstance(e, bb.event.RecipePreFinalise): - if e.data.getVar("TARGET_PREFIX", True) == e.data.getVar("SDK_PREFIX", True): + if e.data.getVar("TARGET_PREFIX") == e.data.getVar("SDK_PREFIX"): e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}binutils") e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc-initial") e.data.delVar("PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}gcc") @@ -264,14 +288,14 @@ python base_eventhandler() { # sysroot since they're now "unreachable". This makes switching virtual/kernel work in # particular. # - pn = d.getVar('PN', True) + pn = d.getVar('PN') source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) if not source_mirror_fetch: - provs = (d.getVar("PROVIDES", True) or "").split() - multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() + provs = (d.getVar("PROVIDES") or "").split() + multiwhitelist = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split() for p in provs: if p.startswith("virtual/") and p not in multiwhitelist: - profprov = d.getVar("PREFERRED_PROVIDER_" + p, True) + profprov = d.getVar("PREFERRED_PROVIDER_" + p) if profprov and pn != profprov: raise bb.parse.SkipPackage("PREFERRED_PROVIDER_%s set to %s, not %s" % (p, profprov, pn)) } @@ -281,7 +305,7 @@ CLEANBROKEN = "0" addtask configure after do_patch do_configure[dirs] = "${B}" -do_configure[deptask] = "do_populate_sysroot" +do_prepare_recipe_sysroot[deptask] = "do_populate_sysroot" base_do_configure() { if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" ]; then @@ -333,9 +357,9 @@ def set_packagetriplet(d): tos = [] tvs = [] - archs.append(d.getVar("PACKAGE_ARCHS", True).split()) - tos.append(d.getVar("TARGET_OS", True)) - tvs.append(d.getVar("TARGET_VENDOR", True)) + archs.append(d.getVar("PACKAGE_ARCHS").split()) + tos.append(d.getVar("TARGET_OS")) + tvs.append(d.getVar("TARGET_VENDOR")) def settriplet(d, varname, archs, tos, tvs): triplets = [] @@ -347,16 +371,15 @@ def set_packagetriplet(d): settriplet(d, "PKGTRIPLETS", archs, tos, tvs) - variants = d.getVar("MULTILIB_VARIANTS", True) or "" + variants = d.getVar("MULTILIB_VARIANTS") or "" for item in variants.split(): localdata = bb.data.createCopy(d) overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item localdata.setVar("OVERRIDES", overrides) - bb.data.update_data(localdata) - archs.append(localdata.getVar("PACKAGE_ARCHS", True).split()) - tos.append(localdata.getVar("TARGET_OS", True)) - tvs.append(localdata.getVar("TARGET_VENDOR", True)) + archs.append(localdata.getVar("PACKAGE_ARCHS").split()) + tos.append(localdata.getVar("TARGET_OS")) + tvs.append(localdata.getVar("TARGET_VENDOR")) settriplet(d, "PKGMLTRIPLETS", archs, tos, tvs) @@ -371,10 +394,10 @@ python () { # PACKAGECONFIG[foo] = "--enable-foo,--disable-foo,foo_depends,foo_runtime_depends" pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} if pkgconfigflags: - pkgconfig = (d.getVar('PACKAGECONFIG', True) or "").split() - pn = d.getVar("PN", True) + pkgconfig = (d.getVar('PACKAGECONFIG') or "").split() + pn = d.getVar("PN") - mlprefix = d.getVar("MLPREFIX", True) + mlprefix = d.getVar("MLPREFIX") def expandFilter(appends, extension, prefix): appends = bb.utils.explode_deps(d.expand(" ".join(appends))) @@ -416,7 +439,7 @@ python () { num = len(items) if num > 4: bb.error("%s: PACKAGECONFIG[%s] Only enable,disable,depend,rdepend can be specified!" - % (d.getVar('PN', True), flag)) + % (d.getVar('PN'), flag)) if flag in pkgconfig: if num >= 3 and items[2]: @@ -431,8 +454,8 @@ python () { appendVar('RDEPENDS_${PN}', extrardeps) appendVar('PACKAGECONFIG_CONFARGS', extraconf) - pn = d.getVar('PN', True) - license = d.getVar('LICENSE', True) + pn = d.getVar('PN') + license = d.getVar('LICENSE') if license == "INVALID": bb.fatal('This recipe does not have the LICENSE field set (%s)' % pn) @@ -462,26 +485,26 @@ python () { d.setVarFlag('do_devshell', 'fakeroot', '1') d.appendVarFlag('do_devshell', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') - need_machine = d.getVar('COMPATIBLE_MACHINE', True) + need_machine = d.getVar('COMPATIBLE_MACHINE') if need_machine: import re - compat_machines = (d.getVar('MACHINEOVERRIDES', True) or "").split(":") + compat_machines = (d.getVar('MACHINEOVERRIDES') or "").split(":") for m in compat_machines: if re.match(need_machine, m): break else: - raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE', True)) + raise bb.parse.SkipPackage("incompatible with machine %s (not in COMPATIBLE_MACHINE)" % d.getVar('MACHINE')) source_mirror_fetch = d.getVar('SOURCE_MIRROR_FETCH', False) if not source_mirror_fetch: - need_host = d.getVar('COMPATIBLE_HOST', True) + need_host = d.getVar('COMPATIBLE_HOST') if need_host: import re - this_host = d.getVar('HOST_SYS', True) + this_host = d.getVar('HOST_SYS') if not re.match(need_host, this_host): raise bb.parse.SkipPackage("incompatible with host %s (not in COMPATIBLE_HOST)" % this_host) - bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split() + bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() check_license = False if pn.startswith("nativesdk-") else True for t in ["-native", "-cross-${TARGET_ARCH}", "-cross-initial-${TARGET_ARCH}", @@ -500,21 +523,21 @@ python () { for lic in bad_licenses: spdx_license = return_spdx(d, lic) for w in ["LGPLv2_WHITELIST_", "WHITELIST_"]: - whitelist.extend((d.getVar(w + lic, True) or "").split()) + whitelist.extend((d.getVar(w + lic) or "").split()) if spdx_license: - whitelist.extend((d.getVar(w + spdx_license, True) or "").split()) + whitelist.extend((d.getVar(w + spdx_license) or "").split()) ''' We need to track what we are whitelisting and why. If pn is incompatible we need to be able to note that the image that is created may infact contain incompatible licenses despite INCOMPATIBLE_LICENSE being set. ''' - incompatwl.extend((d.getVar(w + lic, True) or "").split()) + incompatwl.extend((d.getVar(w + lic) or "").split()) if spdx_license: - incompatwl.extend((d.getVar(w + spdx_license, True) or "").split()) + incompatwl.extend((d.getVar(w + spdx_license) or "").split()) if not pn in whitelist: - pkgs = d.getVar('PACKAGES', True).split() + pkgs = d.getVar('PACKAGES').split() skipped_pkgs = [] unskipped_pkgs = [] for pkg in pkgs: @@ -526,13 +549,13 @@ python () { if unskipped_pkgs: for pkg in skipped_pkgs: bb.debug(1, "SKIPPING the package " + pkg + " at do_rootfs because it's " + license) - mlprefix = d.getVar('MLPREFIX', True) + mlprefix = d.getVar('MLPREFIX') d.setVar('LICENSE_EXCLUSION-' + mlprefix + pkg, 1) for pkg in unskipped_pkgs: bb.debug(1, "INCLUDING the package " + pkg) elif all_skipped or incompatible_license(d, bad_licenses): bb.debug(1, "SKIPPING recipe %s because it's %s" % (pn, license)) - raise bb.parse.SkipPackage("incompatible with license %s" % license) + raise bb.parse.SkipPackage("it has an incompatible license: %s" % license) elif pn in whitelist: if pn in incompatwl: bb.note("INCLUDING " + pn + " as buildable despite INCOMPATIBLE_LICENSE because it has been whitelisted") @@ -542,8 +565,8 @@ python () { # matching of license expressions - just check that all license strings # in LICENSE_ are found in LICENSE. license_set = oe.license.list_licenses(license) - for pkg in d.getVar('PACKAGES', True).split(): - pkg_license = d.getVar('LICENSE_' + pkg, True) + for pkg in d.getVar('PACKAGES').split(): + pkg_license = d.getVar('LICENSE_' + pkg) if pkg_license: unlisted = oe.license.list_licenses(pkg_license) - license_set if unlisted: @@ -551,7 +574,7 @@ python () { "listed in LICENSE" % (pkg, ' '.join(unlisted))) needsrcrev = False - srcuri = d.getVar('SRC_URI', True) + srcuri = d.getVar('SRC_URI') for uri in srcuri.split(): (scheme, _ , path) = bb.fetch.decodeurl(uri)[:3] @@ -611,8 +634,8 @@ python () { set_packagetriplet(d) # 'multimachine' handling - mach_arch = d.getVar('MACHINE_ARCH', True) - pkg_arch = d.getVar('PACKAGE_ARCH', True) + mach_arch = d.getVar('MACHINE_ARCH') + pkg_arch = d.getVar('PACKAGE_ARCH') if (pkg_arch == mach_arch): # Already machine specific - nothing further to do @@ -622,11 +645,11 @@ python () { # We always try to scan SRC_URI for urls with machine overrides # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0 # - override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', True) + override = d.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH') if override != '0': paths = [] - fpaths = (d.getVar('FILESPATH', True) or '').split(':') - machine = d.getVar('MACHINE', True) + fpaths = (d.getVar('FILESPATH') or '').split(':') + machine = d.getVar('MACHINE') for p in fpaths: if os.path.basename(p) == machine and os.path.isdir(p): paths.append(p) @@ -643,16 +666,16 @@ python () { d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") return - packages = d.getVar('PACKAGES', True).split() + packages = d.getVar('PACKAGES').split() for pkg in packages: - pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg, True) + pkgarch = d.getVar("PACKAGE_ARCH_%s" % pkg) # We could look for != PACKAGE_ARCH here but how to choose # if multiple differences are present? # Look through PACKAGE_ARCHS for the priority order? if pkgarch and pkgarch == mach_arch: d.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}") - bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN", True)) + bb.warn("Recipe %s is marked as only being architecture specific but seems to have machine specific packages?! The recipe may as well mark itself as machine specific directly." % d.getVar("PN")) } addtask cleansstate after do_clean @@ -663,7 +686,7 @@ addtask cleanall after do_cleansstate do_cleansstate[nostamp] = "1" python do_cleanall() { - src_uri = (d.getVar('SRC_URI', True) or "").split() + src_uri = (d.getVar('SRC_URI') or "").split() if len(src_uri) == 0: return diff --git a/import-layers/yocto-poky/meta/classes/binconfig-disabled.bbclass b/import-layers/yocto-poky/meta/classes/binconfig-disabled.bbclass index 602a669aa..096b670e1 100644 --- a/import-layers/yocto-poky/meta/classes/binconfig-disabled.bbclass +++ b/import-layers/yocto-poky/meta/classes/binconfig-disabled.bbclass @@ -15,6 +15,7 @@ do_install_append () { echo "echo 'ERROR: $x should not be used, use an alternative such as pkg-config' >&2" >> ${D}$x echo "echo '--should-not-have-used-$x'" >> ${D}$x echo "exit 1" >> ${D}$x + chmod +x ${D}$x done } diff --git a/import-layers/yocto-poky/meta/classes/binconfig.bbclass b/import-layers/yocto-poky/meta/classes/binconfig.bbclass index cbc417360..39c3e2b17 100644 --- a/import-layers/yocto-poky/meta/classes/binconfig.bbclass +++ b/import-layers/yocto-poky/meta/classes/binconfig.bbclass @@ -13,16 +13,16 @@ def get_binconfig_mangle(d): s += " -e 's:=%s${exec_prefix}/:=\\1OEEXECPREFIX/:'" % optional_quote s += " -e 's:-L${libdir}:-LOELIBDIR:;'" s += " -e 's:-I${includedir}:-IOEINCDIR:;'" + s += " -e 's:-L${WORKDIR}:-LOELIBDIR:'" + s += " -e 's:-I${WORKDIR}:-IOEINCDIR:'" s += " -e 's:OEBASELIBDIR:${STAGING_BASELIBDIR}:;'" s += " -e 's:OELIBDIR:${STAGING_LIBDIR}:;'" s += " -e 's:OEINCDIR:${STAGING_INCDIR}:;'" s += " -e 's:OEDATADIR:${STAGING_DATADIR}:'" s += " -e 's:OEPREFIX:${STAGING_DIR_HOST}${prefix}:'" s += " -e 's:OEEXECPREFIX:${STAGING_DIR_HOST}${exec_prefix}:'" - s += " -e 's:-I${WORKDIR}:-I${STAGING_INCDIR}:'" - s += " -e 's:-L${WORKDIR}:-L${STAGING_LIBDIR}:'" - if bb.data.getVar("OE_BINCONFIG_EXTRA_MANGLE", d): - s += bb.data.getVar("OE_BINCONFIG_EXTRA_MANGLE", d) + if d.getVar("OE_BINCONFIG_EXTRA_MANGLE", False): + s += d.getVar("OE_BINCONFIG_EXTRA_MANGLE") return s diff --git a/import-layers/yocto-poky/meta/classes/blacklist.bbclass b/import-layers/yocto-poky/meta/classes/blacklist.bbclass index a0141a82c..e58564c34 100644 --- a/import-layers/yocto-poky/meta/classes/blacklist.bbclass +++ b/import-layers/yocto-poky/meta/classes/blacklist.bbclass @@ -12,33 +12,8 @@ # PNBLACKLIST[pn] = "message" # -# Cope with PNBLACKLIST flags for multilib case -addhandler blacklist_multilib_eventhandler -blacklist_multilib_eventhandler[eventmask] = "bb.event.ConfigParsed" -python blacklist_multilib_eventhandler() { - multilibs = e.data.getVar('MULTILIBS', True) - if not multilibs: - return - - # this block has been copied from base.bbclass so keep it in sync - prefixes = [] - for ext in multilibs.split(): - eext = ext.split(':') - if len(eext) > 1 and eext[0] == 'multilib': - prefixes.append(eext[1]) - - blacklists = e.data.getVarFlags('PNBLACKLIST') or {} - for pkg, reason in blacklists.items(): - if pkg.endswith(("-native", "-crosssdk")) or pkg.startswith(("nativesdk-", "virtual/nativesdk-")) or 'cross-canadian' in pkg: - continue - for p in prefixes: - newpkg = p + "-" + pkg - if not e.data.getVarFlag('PNBLACKLIST', newpkg, True): - e.data.setVarFlag('PNBLACKLIST', newpkg, reason) -} - python () { - blacklist = d.getVarFlag('PNBLACKLIST', d.getVar('PN', True), True) + blacklist = d.getVarFlag('PNBLACKLIST', d.getVar('PN')) if blacklist: raise bb.parse.SkipPackage("Recipe is blacklisted: %s" % (blacklist)) diff --git a/import-layers/yocto-poky/meta/classes/bugzilla.bbclass b/import-layers/yocto-poky/meta/classes/bugzilla.bbclass index 3fc895642..8909c2734 100644 --- a/import-layers/yocto-poky/meta/classes/bugzilla.bbclass +++ b/import-layers/yocto-poky/meta/classes/bugzilla.bbclass @@ -110,14 +110,14 @@ python bugzilla_eventhandler() { return if name == "TaskFailed": - xmlrpc = data.getVar("BUGZILLA_XMLRPC", True) - user = data.getVar("BUGZILLA_USER", True) - passw = data.getVar("BUGZILLA_PASS", True) - product = data.getVar("BUGZILLA_PRODUCT", True) - compon = data.getVar("BUGZILLA_COMPONENT", True) - version = data.getVar("BUGZILLA_VERSION", True) - - proxy = data.getVar('http_proxy', True ) + xmlrpc = data.getVar("BUGZILLA_XMLRPC") + user = data.getVar("BUGZILLA_USER") + passw = data.getVar("BUGZILLA_PASS") + product = data.getVar("BUGZILLA_PRODUCT") + compon = data.getVar("BUGZILLA_COMPONENT") + version = data.getVar("BUGZILLA_VERSION") + + proxy = data.getVar('http_proxy') if (proxy): import urllib2 s, u, p, hostport = urllib2._parse_proxy(proxy) @@ -133,14 +133,14 @@ python bugzilla_eventhandler() { 'component': compon} # evil hack to figure out what is going on - debug_file = open(os.path.join(data.getVar("TMPDIR", True),"..","bugzilla-log"),"a") + debug_file = open(os.path.join(data.getVar("TMPDIR"),"..","bugzilla-log"),"a") file = None - bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN", True), - "pv" : data.getVar("PV", True), + bugname = "%(package)s-%(pv)s-autobuild" % { "package" : data.getVar("PN"), + "pv" : data.getVar("PV"), } - log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T', True), event.task)) - text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN", True), data.getVar('DATETIME', True), data.getVar( 'MACHINE', True ) ) + log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T'), event.task)) + text = "The %s step in %s failed at %s for machine %s" % (e.task, data.getVar("PN"), data.getVar('DATETIME'), data.getVar('MACHINE') ) if len(log_file) != 0: print >> debug_file, "Adding log file %s" % log_file[0] file = open(log_file[0], 'r') @@ -168,7 +168,7 @@ python bugzilla_eventhandler() { if bug_number and log: print >> debug_file, "The bug is known as '%s'" % bug_number - desc = "Build log for machine %s" % (data.getVar('MACHINE', True)) + desc = "Build log for machine %s" % (data.getVar('MACHINE')) if not bugzilla_create_attachment(debug_file, server, args.copy(), bug_number, text, log_file[0], log, desc): print >> debug_file, "Failed to attach the build log for bug #%s" % bug_number else: diff --git a/import-layers/yocto-poky/meta/classes/buildhistory.bbclass b/import-layers/yocto-poky/meta/classes/buildhistory.bbclass index 3a5bc2c3e..3823c664a 100644 --- a/import-layers/yocto-poky/meta/classes/buildhistory.bbclass +++ b/import-layers/yocto-poky/meta/classes/buildhistory.bbclass @@ -47,6 +47,11 @@ sstate_install[vardepsexclude] += "buildhistory_emit_pkghistory" # then the value added to SSTATEPOSTINSTFUNCS: SSTATEPOSTINSTFUNCS[vardepvalueexclude] .= "| buildhistory_emit_pkghistory" +# Similarly for our function that gets the output signatures +SSTATEPOSTUNPACKFUNCS_append = " buildhistory_emit_outputsigs" +sstate_installpkgdir[vardepsexclude] += "buildhistory_emit_outputsigs" +SSTATEPOSTUNPACKFUNCS[vardepvalueexclude] .= "| buildhistory_emit_outputsigs" + # All items excepts those listed here will be removed from a recipe's # build history directory by buildhistory_emit_pkghistory(). This is # necessary because some of these items (package directories, files that @@ -64,18 +69,18 @@ PATCH_GIT_USER_NAME ?= "OpenEmbedded" # Write out metadata about this package for comparison when writing future packages # python buildhistory_emit_pkghistory() { - if not d.getVar('BB_CURRENTTASK', True) in ['packagedata', 'packagedata_setscene']: + if not d.getVar('BB_CURRENTTASK') in ['packagedata', 'packagedata_setscene']: return 0 - if not "package" in (d.getVar('BUILDHISTORY_FEATURES', True) or "").split(): + if not "package" in (d.getVar('BUILDHISTORY_FEATURES') or "").split(): return 0 import re import json import errno - pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) - oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE', True) + pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE') + oldpkghistdir = d.getVar('BUILDHISTORY_OLD_DIR_PACKAGE') class RecipeInfo: def __init__(self, name): @@ -86,6 +91,7 @@ python buildhistory_emit_pkghistory() { self.depends = "" self.packages = "" self.srcrev = "" + self.layer = "" class PackageInfo: @@ -182,12 +188,13 @@ python buildhistory_emit_pkghistory() { items.sort() return ' '.join(items) - pn = d.getVar('PN', True) - pe = d.getVar('PE', True) or "0" - pv = d.getVar('PV', True) - pr = d.getVar('PR', True) + pn = d.getVar('PN') + pe = d.getVar('PE') or "0" + pv = d.getVar('PV') + pr = d.getVar('PR') + layer = bb.utils.get_file_layer(d.getVar('FILE', True), d) - pkgdata_dir = d.getVar('PKGDATA_DIR', True) + pkgdata_dir = d.getVar('PKGDATA_DIR') packages = "" try: with open(os.path.join(pkgdata_dir, pn)) as f: @@ -203,7 +210,7 @@ python buildhistory_emit_pkghistory() { raise packagelist = packages.split() - preserve = d.getVar('BUILDHISTORY_PRESERVE', True).split() + preserve = d.getVar('BUILDHISTORY_PRESERVE').split() if not os.path.exists(pkghistdir): bb.utils.mkdirhier(pkghistdir) else: @@ -223,11 +230,12 @@ python buildhistory_emit_pkghistory() { rcpinfo.pe = pe rcpinfo.pv = pv rcpinfo.pr = pr - rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS', True) or "")) + rcpinfo.depends = sortlist(oe.utils.squashspaces(d.getVar('DEPENDS') or "")) rcpinfo.packages = packages + rcpinfo.layer = layer write_recipehistory(rcpinfo, d) - pkgdest = d.getVar('PKGDEST', True) + pkgdest = d.getVar('PKGDEST') for pkg in packagelist: pkgdata = {} with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: @@ -289,11 +297,46 @@ python buildhistory_emit_pkghistory() { bb.build.exec_func("buildhistory_list_pkg_files", d) } +python buildhistory_emit_outputsigs() { + if not "task" in (d.getVar('BUILDHISTORY_FEATURES') or "").split(): + return + + import hashlib + + taskoutdir = os.path.join(d.getVar('BUILDHISTORY_DIR'), 'task', 'output') + bb.utils.mkdirhier(taskoutdir) + currenttask = d.getVar('BB_CURRENTTASK') + pn = d.getVar('PN') + taskfile = os.path.join(taskoutdir, '%s.%s' % (pn, currenttask)) + + cwd = os.getcwd() + filesigs = {} + for root, _, files in os.walk(cwd): + for fname in files: + if fname == 'fixmepath': + continue + fullpath = os.path.join(root, fname) + try: + if os.path.islink(fullpath): + sha256 = hashlib.sha256(os.readlink(fullpath).encode('utf-8')).hexdigest() + elif os.path.isfile(fullpath): + sha256 = bb.utils.sha256_file(fullpath) + else: + continue + except OSError: + bb.warn('buildhistory: unable to read %s to get output signature' % fullpath) + continue + filesigs[os.path.relpath(fullpath, cwd)] = sha256 + with open(taskfile, 'w') as f: + for fpath, fsig in sorted(filesigs.items(), key=lambda item: item[0]): + f.write('%s %s\n' % (fpath, fsig)) +} + def write_recipehistory(rcpinfo, d): bb.debug(2, "Writing recipe history") - pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) + pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE') infofile = os.path.join(pkghistdir, "latest") with open(infofile, "w") as f: @@ -303,12 +346,13 @@ def write_recipehistory(rcpinfo, d): f.write(u"PR = %s\n" % rcpinfo.pr) f.write(u"DEPENDS = %s\n" % rcpinfo.depends) f.write(u"PACKAGES = %s\n" % rcpinfo.packages) + f.write(u"LAYER = %s\n" % rcpinfo.layer) def write_pkghistory(pkginfo, d): bb.debug(2, "Writing package history for package %s" % pkginfo.name) - pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) + pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE') pkgpath = os.path.join(pkghistdir, pkginfo.name) if not os.path.exists(pkgpath): @@ -369,7 +413,7 @@ def buildhistory_list_installed(d, rootfs_type="image"): pkgs = sdk_list_installed_packages(d, rootfs_type == "sdk_target") for output_type, output_file in process_list: - output_file_full = os.path.join(d.getVar('WORKDIR', True), output_file) + output_file_full = os.path.join(d.getVar('WORKDIR'), output_file) with open(output_file_full, 'w') as output: output.write(format_pkg_list(pkgs, output_type)) @@ -402,19 +446,26 @@ buildhistory_get_installed() { # Produce dependency graph # First, quote each name to handle characters that cause issues for dot - sed 's:\([^| ]*\):"\1":g' ${WORKDIR}/bh_installed_pkgs_deps.txt > $1/depends.tmp && \ + sed 's:\([^| ]*\):"\1":g' ${WORKDIR}/bh_installed_pkgs_deps.txt > $1/depends.tmp && rm ${WORKDIR}/bh_installed_pkgs_deps.txt - # Change delimiter from pipe to -> and set style for recommend lines - sed -i -e 's:|: -> :' -e 's:"\[REC\]":[style=dotted]:' -e 's:$:;:' $1/depends.tmp + # Remove lines with rpmlib(...) and config(...) dependencies, change the + # delimiter from pipe to "->", set the style for recommend lines and + # turn versioned dependencies into edge labels. + sed -i -e '/rpmlib(/d' \ + -e '/config(/d' \ + -e 's:|: -> :' \ + -e 's:"\[REC\]":[style=dotted]:' \ + -e 's:"\([<>=]\+\)" "\([^"]*\)":[label="\1 \2"]:' \ + $1/depends.tmp # Add header, sorted and de-duped contents and footer and then delete the temp file printf "digraph depends {\n node [shape=plaintext]\n" > $1/depends.dot - cat $1/depends.tmp | sort | uniq >> $1/depends.dot + cat $1/depends.tmp | sort -u >> $1/depends.dot echo "}" >> $1/depends.dot rm $1/depends.tmp # Produce installed package sizes list oe-pkgdata-util -p ${PKGDATA_DIR} read-value "PKGSIZE" -n -f $pkgcache > $1/installed-package-sizes.tmp - cat $1/installed-package-sizes.tmp | awk '{print $2 "\tKiB " $1}' | sort -n -r > $1/installed-package-sizes.txt + cat $1/installed-package-sizes.tmp | awk '{print $2 "\tKiB\t" $1}' | sort -n -r > $1/installed-package-sizes.txt rm $1/installed-package-sizes.tmp # We're now done with the cache, delete it @@ -550,7 +601,9 @@ END python buildhistory_get_extra_sdkinfo() { import operator import math - if d.getVar('BB_CURRENTTASK', True) == 'populate_sdk_ext': + + if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext' and \ + "sdk" in (d.getVar('BUILDHISTORY_FEATURES') or "").split(): tasksizes = {} filesizes = {} for root, _, files in os.walk(d.expand('${SDK_OUTPUT}/${SDKPATH}/sstate-cache')): @@ -573,10 +626,14 @@ python buildhistory_get_extra_sdkinfo() { # By using ROOTFS_POSTUNINSTALL_COMMAND we get in after uninstallation of # unneeded packages but before the removal of packaging files -ROOTFS_POSTUNINSTALL_COMMAND += " buildhistory_list_installed_image ;\ - buildhistory_get_image_installed ; " +ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_list_installed_image ;" +ROOTFS_POSTUNINSTALL_COMMAND += "buildhistory_get_image_installed ;" +ROOTFS_POSTUNINSTALL_COMMAND[vardepvalueexclude] .= "| buildhistory_list_installed_image ;| buildhistory_get_image_installed ;" +ROOTFS_POSTUNINSTALL_COMMAND[vardepsexclude] += "buildhistory_list_installed_image buildhistory_get_image_installed" -IMAGE_POSTPROCESS_COMMAND += " buildhistory_get_imageinfo ; " +IMAGE_POSTPROCESS_COMMAND += "buildhistory_get_imageinfo ;" +IMAGE_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_imageinfo ;" +IMAGE_POSTPROCESS_COMMAND[vardepsexclude] += "buildhistory_get_imageinfo" # We want these to be the last run so that we get called after complementary package installation POPULATE_SDK_POST_TARGET_COMMAND_append = " buildhistory_list_installed_sdk_target;" @@ -590,11 +647,21 @@ POPULATE_SDK_POST_HOST_COMMAND[vardepvalueexclude] .= "| buildhistory_list_insta SDK_POSTPROCESS_COMMAND_append = " buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; " SDK_POSTPROCESS_COMMAND[vardepvalueexclude] .= "| buildhistory_get_sdkinfo ; buildhistory_get_extra_sdkinfo; " +python buildhistory_write_sigs() { + if not "task" in (d.getVar('BUILDHISTORY_FEATURES') or "").split(): + return + + # Create sigs file + if hasattr(bb.parse.siggen, 'dump_siglist'): + taskoutdir = os.path.join(d.getVar('BUILDHISTORY_DIR'), 'task') + bb.utils.mkdirhier(taskoutdir) + bb.parse.siggen.dump_siglist(os.path.join(taskoutdir, 'tasksigs.txt')) +} + def buildhistory_get_build_id(d): - if d.getVar('BB_WORKERCONTEXT', True) != '1': + if d.getVar('BB_WORKERCONTEXT') != '1': return "" localdata = bb.data.createCopy(d) - bb.data.update_data(localdata) statuslines = [] for func in oe.data.typed_value('BUILDCFG_FUNCS', localdata): g = globals() @@ -605,12 +672,12 @@ def buildhistory_get_build_id(d): if flines: statuslines.extend(flines) - statusheader = d.getVar('BUILDCFG_HEADER', True) + statusheader = d.getVar('BUILDCFG_HEADER') return('\n%s\n%s\n' % (statusheader, '\n'.join(statuslines))) def buildhistory_get_metadata_revs(d): # We want an easily machine-readable format here, so get_layers_branch_rev isn't quite what we want - layers = (d.getVar("BBLAYERS", True) or "").split() + layers = (d.getVar("BBLAYERS") or "").split() medadata_revs = ["%-17s = %s:%s" % (os.path.basename(i), \ base_get_metadata_git_branch(i, None).strip(), \ base_get_metadata_git_revision(i, None)) \ @@ -622,7 +689,7 @@ def outputvars(vars, listvars, d): listvars = listvars.split() ret = "" for var in vars: - value = d.getVar(var, True) or "" + value = d.getVar(var) or "" if var in listvars: # Squash out spaces value = oe.utils.squashspaces(value) @@ -630,17 +697,17 @@ def outputvars(vars, listvars, d): return ret.rstrip('\n') def buildhistory_get_imagevars(d): - if d.getVar('BB_WORKERCONTEXT', True) != '1': + if d.getVar('BB_WORKERCONTEXT') != '1': return "" imagevars = "DISTRO DISTRO_VERSION USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE ROOTFS_POSTPROCESS_COMMAND IMAGE_POSTPROCESS_COMMAND" listvars = "USER_CLASSES IMAGE_CLASSES IMAGE_FEATURES IMAGE_LINGUAS IMAGE_INSTALL BAD_RECOMMENDATIONS PACKAGE_EXCLUDE" return outputvars(imagevars, listvars, d) def buildhistory_get_sdkvars(d): - if d.getVar('BB_WORKERCONTEXT', True) != '1': + if d.getVar('BB_WORKERCONTEXT') != '1': return "" sdkvars = "DISTRO DISTRO_VERSION SDK_NAME SDK_VERSION SDKMACHINE SDKIMAGE_FEATURES BAD_RECOMMENDATIONS NO_RECOMMENDATIONS PACKAGE_EXCLUDE" - if d.getVar('BB_CURRENTTASK', True) == 'populate_sdk_ext': + if d.getVar('BB_CURRENTTASK') == 'populate_sdk_ext': # Extensible SDK uses some additional variables sdkvars += " SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST SDK_UPDATE_URL SDK_EXT_TYPE SDK_RECRDEP_TASKS SDK_INCLUDE_PKGDATA SDK_INCLUDE_TOOLCHAIN" listvars = "SDKIMAGE_FEATURES BAD_RECOMMENDATIONS PACKAGE_EXCLUDE SDK_LOCAL_CONF_WHITELIST SDK_LOCAL_CONF_BLACKLIST SDK_INHERIT_BLACKLIST" @@ -735,16 +802,16 @@ END } python buildhistory_eventhandler() { - if e.data.getVar('BUILDHISTORY_FEATURES', True).strip(): - reset = e.data.getVar("BUILDHISTORY_RESET", True) - olddir = e.data.getVar("BUILDHISTORY_OLD_DIR", True) + if e.data.getVar('BUILDHISTORY_FEATURES').strip(): + reset = e.data.getVar("BUILDHISTORY_RESET") + olddir = e.data.getVar("BUILDHISTORY_OLD_DIR") if isinstance(e, bb.event.BuildStarted): if reset: import shutil # Clean up after potentially interrupted build. if os.path.isdir(olddir): shutil.rmtree(olddir) - rootdir = e.data.getVar("BUILDHISTORY_DIR", True) + rootdir = e.data.getVar("BUILDHISTORY_DIR") entries = [ x for x in os.listdir(rootdir) if not x.startswith('.') ] bb.utils.mkdirhier(olddir) for entry in entries: @@ -754,8 +821,9 @@ python buildhistory_eventhandler() { if reset: import shutil shutil.rmtree(olddir) - if e.data.getVar("BUILDHISTORY_COMMIT", True) == "1": + if e.data.getVar("BUILDHISTORY_COMMIT") == "1": bb.note("Writing buildhistory") + bb.build.exec_func("buildhistory_write_sigs", d) localdata = bb.data.createCopy(e.data) localdata.setVar('BUILDHISTORY_BUILD_FAILURES', str(e._failures)) interrupted = getattr(e, '_interrupted', 0) @@ -774,7 +842,7 @@ def _get_srcrev_values(d): """ scms = [] - fetcher = bb.fetch.Fetch(d.getVar('SRC_URI', True).split(), d) + fetcher = bb.fetch.Fetch(d.getVar('SRC_URI').split(), d) urldata = fetcher.ud for u in urldata: if urldata[u].method.supports_srcrev(): @@ -806,7 +874,7 @@ def _get_srcrev_values(d): do_fetch[postfuncs] += "write_srcrev" do_fetch[vardepsexclude] += "write_srcrev" python write_srcrev() { - pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE', True) + pkghistdir = d.getVar('BUILDHISTORY_DIR_PACKAGE') srcrevfile = os.path.join(pkghistdir, 'latest_srcrev') srcrevs, tag_srcrevs = _get_srcrev_values(d) @@ -833,12 +901,12 @@ python write_srcrev() { f.write('# SRCREV_%s = "%s"\n' % (name, orig_srcrev)) f.write('SRCREV_%s = "%s"\n' % (name, srcrev)) else: - f.write('SRCREV = "%s"\n' % srcrevs.values()) + f.write('SRCREV = "%s"\n' % next(iter(srcrevs.values()))) if len(tag_srcrevs) > 0: for name, srcrev in tag_srcrevs.items(): f.write('# tag_%s = "%s"\n' % (name, srcrev)) if name in old_tag_srcrevs and old_tag_srcrevs[name] != srcrev: - pkg = d.getVar('PN', True) + pkg = d.getVar('PN') bb.warn("Revision for tag %s in package %s was changed since last build (from %s to %s)" % (name, pkg, old_tag_srcrevs[name], srcrev)) else: diff --git a/import-layers/yocto-poky/meta/classes/buildstats-summary.bbclass b/import-layers/yocto-poky/meta/classes/buildstats-summary.bbclass index b86abcc3f..f9b241b6c 100644 --- a/import-layers/yocto-poky/meta/classes/buildstats-summary.bbclass +++ b/import-layers/yocto-poky/meta/classes/buildstats-summary.bbclass @@ -7,7 +7,7 @@ python buildstats_summary () { if not os.path.exists(bsdir): return - sstatetasks = (e.data.getVar('SSTATETASKS', True) or '').split() + sstatetasks = (e.data.getVar('SSTATETASKS') or '').split() built = collections.defaultdict(lambda: [set(), set()]) for pf in os.listdir(bsdir): taskdir = os.path.join(bsdir, pf) diff --git a/import-layers/yocto-poky/meta/classes/buildstats.bbclass b/import-layers/yocto-poky/meta/classes/buildstats.bbclass index 599a21998..960653c70 100644 --- a/import-layers/yocto-poky/meta/classes/buildstats.bbclass +++ b/import-layers/yocto-poky/meta/classes/buildstats.bbclass @@ -31,6 +31,11 @@ def get_process_cputime(pid): i = f.readline().strip() if not i: break + if not ":" in i: + # one more extra line is appended (empty or containing "0") + # most probably due to race condition in kernel while + # updating IO stats + break i = i.split(": ") iostats[i[0]] = i[1] resources = resource.getrusage(resource.RUSAGE_SELF) @@ -75,13 +80,13 @@ def get_buildtimedata(var, d): return timediff, cpuperc def write_task_data(status, logfile, e, d): - bn = d.getVar('BUILDNAME', True) - bsdir = os.path.join(d.getVar('BUILDSTATS_BASE', True), bn) + bn = d.getVar('BUILDNAME') + bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn) with open(os.path.join(logfile), "a") as f: elapsedtime = get_timedata("__timedata_task", d, e.time) if elapsedtime: - f.write(d.expand("${PF}: %s: Elapsed time: %0.2f seconds \n" % - (e.task, elapsedtime))) + f.write(d.expand("${PF}: %s\n" % e.task)) + f.write(d.expand("Elapsed time: %0.2f seconds\n" % elapsedtime)) cpu, iostats, resources, childres = get_process_cputime(os.getpid()) if cpu: f.write("utime: %s\n" % cpu['utime']) @@ -106,9 +111,9 @@ python run_buildstats () { import bb.event import time, subprocess, platform - bn = d.getVar('BUILDNAME', True) - bsdir = os.path.join(d.getVar('BUILDSTATS_BASE', True), bn) - taskdir = os.path.join(bsdir, d.getVar('PF', True)) + bn = d.getVar('BUILDNAME') + bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn) + taskdir = os.path.join(bsdir, d.getVar('PF')) if isinstance(e, bb.event.BuildStarted): ######################################################################## @@ -162,7 +167,7 @@ python run_buildstats () { if e.task == "do_rootfs": bs = os.path.join(bsdir, "build_stats") with open(bs, "a") as f: - rootfs = d.getVar('IMAGE_ROOTFS', True) + rootfs = d.getVar('IMAGE_ROOTFS') if os.path.isdir(rootfs): try: rootfs_size = subprocess.check_output(["du", "-sh", rootfs], @@ -188,3 +193,27 @@ python run_buildstats () { addhandler run_buildstats run_buildstats[eventmask] = "bb.event.BuildStarted bb.event.BuildCompleted bb.build.TaskStarted bb.build.TaskSucceeded bb.build.TaskFailed" +python runqueue_stats () { + import buildstats + from bb import event, runqueue + # We should not record any samples before the first task has started, + # because that's the first activity shown in the process chart. + # Besides, at that point we are sure that the build variables + # are available that we need to find the output directory. + # The persistent SystemStats is stored in the datastore and + # closed when the build is done. + system_stats = d.getVar('_buildstats_system_stats', False) + if not system_stats and isinstance(e, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted)): + system_stats = buildstats.SystemStats(d) + d.setVar('_buildstats_system_stats', system_stats) + if system_stats: + # Ensure that we sample at important events. + done = isinstance(e, bb.event.BuildCompleted) + system_stats.sample(e, force=done) + if done: + system_stats.close() + d.delVar('_buildstats_system_stats') +} + +addhandler runqueue_stats +runqueue_stats[eventmask] = "bb.runqueue.sceneQueueTaskStarted bb.runqueue.runQueueTaskStarted bb.event.HeartbeatEvent bb.event.BuildCompleted bb.event.MonitorDiskEvent" diff --git a/import-layers/yocto-poky/meta/classes/ccache.bbclass b/import-layers/yocto-poky/meta/classes/ccache.bbclass index 2e9837cf0..d58c8f6e5 100644 --- a/import-layers/yocto-poky/meta/classes/ccache.bbclass +++ b/import-layers/yocto-poky/meta/classes/ccache.bbclass @@ -1,6 +1,15 @@ -CCACHE = "${@bb.utils.which(d.getVar('PATH', True), 'ccache') and 'ccache '}" -export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_HOST_SYS}/${PN}" +CCACHE = "${@bb.utils.which(d.getVar('PATH'), 'ccache') and 'ccache '}" +export CCACHE_DIR ?= "${TMPDIR}/ccache/${MULTIMACH_TARGET_SYS}/${PN}" CCACHE_DISABLE[unexport] = "1" +# We need to stop ccache considering the current directory or the +# debug-prefix-map target directory to be significant when calculating +# its hash. Without this the cache would be invalidated every time +# ${PV} or ${PR} change. +export CCACHE_NOHASHDIR ?= "1" + +DEPENDS_append_class-target = " ccache-native" +DEPENDS[vardepvalueexclude] = " ccache-native" + do_configure[dirs] =+ "${CCACHE_DIR}" do_kernel_configme[dirs] =+ "${CCACHE_DIR}" diff --git a/import-layers/yocto-poky/meta/classes/chrpath.bbclass b/import-layers/yocto-poky/meta/classes/chrpath.bbclass index 3b5cd37f7..ad3c3975a 100644 --- a/import-layers/yocto-poky/meta/classes/chrpath.bbclass +++ b/import-layers/yocto-poky/meta/classes/chrpath.bbclass @@ -17,19 +17,24 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d): # Throw away everything other than the rpath list curr_rpath = out.partition("RPATH=")[2] #bb.note("Current rpath for %s is %s" % (fpath, curr_rpath.strip())) - rpaths = curr_rpath.split(":") + rpaths = curr_rpath.strip().split(":") new_rpaths = [] modified = False for rpath in rpaths: # If rpath is already dynamic copy it to new_rpath and continue if rpath.find("$ORIGIN") != -1: - new_rpaths.append(rpath.strip()) + new_rpaths.append(rpath) continue rpath = os.path.normpath(rpath) if baseprefix not in rpath and tmpdir not in rpath: - new_rpaths.append(rpath.strip()) + # Skip standard search paths + if rpath in ['/lib', '/usr/lib', '/lib64/', '/usr/lib64']: + bb.warn("Skipping RPATH %s as is a standard search path for %s" % (rpath, fpath)) + modified = True + continue + new_rpaths.append(rpath) continue - new_rpaths.append("$ORIGIN/" + os.path.relpath(rpath.strip(), os.path.dirname(fpath.replace(rootdir, "/")))) + new_rpaths.append("$ORIGIN/" + os.path.relpath(rpath, os.path.dirname(fpath.replace(rootdir, "/")))) modified = True # if we have modified some rpaths call chrpath to update the binary @@ -39,7 +44,7 @@ def process_file_linux(cmd, fpath, rootdir, baseprefix, tmpdir, d): p = sub.Popen([cmd, '-r', args, fpath],stdout=sub.PIPE,stderr=sub.PIPE) out, err = p.communicate() if p.returncode != 0: - bb.fatal("%s: chrpath command failed with exit code %d:\n%s%s" % (d.getVar('PN', True), p.returncode, out, err)) + bb.fatal("%s: chrpath command failed with exit code %d:\n%s%s" % (d.getVar('PN'), p.returncode, out, err)) def process_file_darwin(cmd, fpath, rootdir, baseprefix, tmpdir, d): import subprocess as sub @@ -67,7 +72,7 @@ def process_dir (rootdir, directory, d): cmd = d.expand('${CHRPATH_BIN}') tmpdir = os.path.normpath(d.getVar('TMPDIR', False)) baseprefix = os.path.normpath(d.expand('${base_prefix}')) - hostos = d.getVar("HOST_OS", True) + hostos = d.getVar("HOST_OS") #bb.debug("Checking %s for binaries to process" % directory) if not os.path.exists(directory): diff --git a/import-layers/yocto-poky/meta/classes/cmake.bbclass b/import-layers/yocto-poky/meta/classes/cmake.bbclass index fad0baa51..12df617ad 100644 --- a/import-layers/yocto-poky/meta/classes/cmake.bbclass +++ b/import-layers/yocto-poky/meta/classes/cmake.bbclass @@ -1,5 +1,5 @@ # Path to the CMake file to process. -OECMAKE_SOURCEPATH ?= "${S}" +OECMAKE_SOURCEPATH ??= "${S}" DEPENDS_prepend = "cmake-native " B = "${WORKDIR}/build" @@ -42,11 +42,15 @@ def map_target_arch_to_uname_arch(target_arch): return target_arch cmake_do_generate_toolchain_file() { + if [ "${BUILD_SYS}" = "${HOST_SYS}" ]; then + cmake_crosscompiling="set( CMAKE_CROSSCOMPILING FALSE )" + fi cat > ${WORKDIR}/toolchain.cmake </dev/null" + cmd = (d.getVar('TARGET_PREFIX') or "") + "objdump -p " + file + " 2>/dev/null" fd = os.popen(cmd) lines = fd.readlines() fd.close() @@ -84,7 +84,7 @@ python debian_package_name_hook () { if len(sonames) == 1: soname = sonames[0] elif len(sonames) > 1: - lead = d.getVar('LEAD_SONAME', True) + lead = d.getVar('LEAD_SONAME') if lead: r = re.compile(lead) filtered = [] @@ -115,7 +115,7 @@ python debian_package_name_hook () { newpkg = pkgname else: newpkg = pkg.replace(orig_pkg, devname, 1) - mlpre=d.getVar('MLPREFIX', True) + mlpre=d.getVar('MLPREFIX') if mlpre: if not newpkg.find(mlpre) == 0: newpkg = mlpre + newpkg @@ -131,7 +131,7 @@ python debian_package_name_hook () { # and later # DEBUG: LIBNAMES: pkgname libtic5 devname libtic pkg ncurses-libticw orig_pkg ncurses-libtic debian_pn None newpkg libticw # so we need to handle ncurses-libticw->libticw5 before ncurses-libtic->libtic5 - for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS', True) or "").split(), reverse=True): + for pkg in sorted((d.getVar('AUTO_LIBNAME_PKGS') or "").split(), reverse=True): auto_libname(packages, pkg) } diff --git a/import-layers/yocto-poky/meta/classes/devshell.bbclass b/import-layers/yocto-poky/meta/classes/devshell.bbclass index be71aff35..4de7ea6fc 100644 --- a/import-layers/yocto-poky/meta/classes/devshell.bbclass +++ b/import-layers/yocto-poky/meta/classes/devshell.bbclass @@ -3,16 +3,16 @@ inherit terminal DEVSHELL = "${SHELL}" python do_devshell () { - if d.getVarFlag("do_devshell", "manualfakeroot", True): + if d.getVarFlag("do_devshell", "manualfakeroot"): d.prependVar("DEVSHELL", "pseudo ") - fakeenv = d.getVar("FAKEROOTENV", True).split() + fakeenv = d.getVar("FAKEROOTENV").split() for f in fakeenv: k = f.split("=") d.setVar(k[0], k[1]) d.appendVar("OE_TERMINAL_EXPORTS", " " + k[0]) d.delVarFlag("do_devshell", "fakeroot") - oe_terminal(d.getVar('DEVSHELL', True), 'OpenEmbedded Developer Shell', d) + oe_terminal(d.getVar('DEVSHELL'), 'OpenEmbedded Developer Shell', d) } addtask devshell after do_patch @@ -27,7 +27,7 @@ do_devshell[nostamp] = "1" # be done as the normal user. We therfore carefully construct the envionment # manually python () { - if d.getVarFlag("do_devshell", "fakeroot", True): + if d.getVarFlag("do_devshell", "fakeroot"): # We need to signal our code that we want fakeroot however we # can't manipulate the environment and variables here yet (see YOCTO #4795) d.setVarFlag("do_devshell", "manualfakeroot", "1") @@ -82,7 +82,7 @@ def devpyshell(d): more = False i = code.InteractiveInterpreter(locals=_context) - print("OE PyShell (PN = %s)\n" % d.getVar("PN", True)) + print("OE PyShell (PN = %s)\n" % d.getVar("PN")) def prompt(more): if more: diff --git a/import-layers/yocto-poky/meta/classes/devupstream.bbclass b/import-layers/yocto-poky/meta/classes/devupstream.bbclass new file mode 100644 index 000000000..7780c5482 --- /dev/null +++ b/import-layers/yocto-poky/meta/classes/devupstream.bbclass @@ -0,0 +1,48 @@ +# Class for use in BBCLASSEXTEND to make it easier to have a single recipe that +# can build both stable tarballs and snapshots from upstream source +# repositories. +# +# Usage: +# BBCLASSEXTEND = "devupstream:target" +# SRC_URI_class-devupstream = "git://git.example.com/example" +# SRCREV_class-devupstream = "abcdef" +# +# If the first entry in SRC_URI is a git: URL then S is rewritten to +# WORKDIR/git. +# +# There are a few caveats that remain to be solved: +# - You can't build native or nativesdk recipes using for example +# devupstream:native, you can only build target recipes. +# - If the fetcher requires native tools (such as subversion-native) then +# bitbake won't be able to add them automatically. + +CLASSOVERRIDE .= ":class-devupstream" + +python devupstream_virtclass_handler () { + # Do nothing if this is inherited, as it's for BBCLASSEXTEND + if "devupstream" not in (d.getVar('BBCLASSEXTEND') or ""): + bb.error("Don't inherit devupstream, use BBCLASSEXTEND") + return + + variant = d.getVar("BBEXTENDVARIANT") + if variant not in ("target"): + bb.error("Pass the variant when using devupstream, for example devupstream:target") + return + + # Develpment releases are never preferred by default + d.setVar("DEFAULT_PREFERENCE", "-1") + + uri = bb.fetch2.URI(d.getVar("SRC_URI").split()[0]) + + if uri.scheme == "git": + d.setVar("S", "${WORKDIR}/git") + + # Modify the PV if the recipe hasn't already overridden it + pv = d.getVar("PV") + proto_marker = "+" + uri.scheme + if proto_marker not in pv: + d.setVar("PV", pv + proto_marker + "${SRCPV}") +} + +addhandler devupstream_virtclass_handler +devupstream_virtclass_handler[eventmask] = "bb.event.RecipePreFinalise" diff --git a/import-layers/yocto-poky/meta/classes/distro_features_check.bbclass b/import-layers/yocto-poky/meta/classes/distro_features_check.bbclass index 7e91dbcf4..e74d3c04b 100644 --- a/import-layers/yocto-poky/meta/classes/distro_features_check.bbclass +++ b/import-layers/yocto-poky/meta/classes/distro_features_check.bbclass @@ -11,15 +11,15 @@ python () { # Assume at least one var is set. - distro_features = (d.getVar('DISTRO_FEATURES', True) or "").split() + distro_features = (d.getVar('DISTRO_FEATURES') or "").split() - any_of_distro_features = d.getVar('ANY_OF_DISTRO_FEATURES', True) + any_of_distro_features = d.getVar('ANY_OF_DISTRO_FEATURES') if any_of_distro_features: any_of_distro_features = any_of_distro_features.split() if set.isdisjoint(set(any_of_distro_features),set(distro_features)): raise bb.parse.SkipPackage("one of '%s' needs to be in DISTRO_FEATURES" % any_of_distro_features) - required_distro_features = d.getVar('REQUIRED_DISTRO_FEATURES', True) + required_distro_features = d.getVar('REQUIRED_DISTRO_FEATURES') if required_distro_features: required_distro_features = required_distro_features.split() for f in required_distro_features: @@ -28,7 +28,7 @@ python () { else: raise bb.parse.SkipPackage("missing required distro feature '%s' (not in DISTRO_FEATURES)" % f) - conflict_distro_features = d.getVar('CONFLICT_DISTRO_FEATURES', True) + conflict_distro_features = d.getVar('CONFLICT_DISTRO_FEATURES') if conflict_distro_features: conflict_distro_features = conflict_distro_features.split() for f in conflict_distro_features: diff --git a/import-layers/yocto-poky/meta/classes/distrodata.bbclass b/import-layers/yocto-poky/meta/classes/distrodata.bbclass index fbb7402e0..5e3444161 100644 --- a/import-layers/yocto-poky/meta/classes/distrodata.bbclass +++ b/import-layers/yocto-poky/meta/classes/distrodata.bbclass @@ -25,75 +25,70 @@ addtask distrodata_np do_distrodata_np[nostamp] = "1" python do_distrodata_np() { localdata = bb.data.createCopy(d) - pn = d.getVar("PN", True) + pn = d.getVar("PN") bb.note("Package Name: %s" % pn) import oe.distro_check as dist_check - tmpdir = d.getVar('TMPDIR', True) + tmpdir = d.getVar('TMPDIR') distro_check_dir = os.path.join(tmpdir, "distro_check") - datetime = localdata.getVar('DATETIME', True) + datetime = localdata.getVar('DATETIME') dist_check.update_distro_data(distro_check_dir, datetime, localdata) if pn.find("-native") != -1: pnstripped = pn.split("-native") bb.note("Native Split: %s" % pnstripped) - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) if pn.find("-cross") != -1: pnstripped = pn.split("-cross") bb.note("cross Split: %s" % pnstripped) - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) if pn.find("-crosssdk") != -1: pnstripped = pn.split("-crosssdk") bb.note("cross Split: %s" % pnstripped) - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) if pn.startswith("nativesdk-"): pnstripped = pn.replace("nativesdk-", "") bb.note("NativeSDK Split: %s" % pnstripped) - localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES')) if pn.find("-initial") != -1: pnstripped = pn.split("-initial") bb.note("initial Split: %s" % pnstripped) - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) """generate package information from .bb file""" - pname = localdata.getVar('PN', True) - pcurver = localdata.getVar('PV', True) - pdesc = localdata.getVar('DESCRIPTION', True) + pname = localdata.getVar('PN') + pcurver = localdata.getVar('PV') + pdesc = localdata.getVar('DESCRIPTION') if pdesc is not None: pdesc = pdesc.replace(',','') pdesc = pdesc.replace('\n','') - pgrp = localdata.getVar('SECTION', True) - plicense = localdata.getVar('LICENSE', True).replace(',','_') + pgrp = localdata.getVar('SECTION') + plicense = localdata.getVar('LICENSE').replace(',','_') - rstatus = localdata.getVar('RECIPE_COLOR', True) + rstatus = localdata.getVar('RECIPE_COLOR') if rstatus is not None: rstatus = rstatus.replace(',','') - pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True) + pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION') if pcurver == pupver: vermatch="1" else: vermatch="0" - noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) + noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON') if noupdate_reason is None: noupdate="0" else: noupdate="1" noupdate_reason = noupdate_reason.replace(',','') - maintainer = localdata.getVar('RECIPE_MAINTAINER', True) - rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True) + maintainer = localdata.getVar('RECIPE_MAINTAINER') + rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE') result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) bb.note("DISTRO: %s,%s,%s,%s,%s,%s,%s,%s,%s\n" % \ @@ -109,80 +104,75 @@ addtask distrodata do_distrodata[nostamp] = "1" python do_distrodata() { import csv - logpath = d.getVar('LOG_DIR', True) + logpath = d.getVar('LOG_DIR') bb.utils.mkdirhier(logpath) logfile = os.path.join(logpath, "distrodata.csv") import oe.distro_check as dist_check localdata = bb.data.createCopy(d) - tmpdir = d.getVar('TMPDIR', True) + tmpdir = d.getVar('TMPDIR') distro_check_dir = os.path.join(tmpdir, "distro_check") - datetime = localdata.getVar('DATETIME', True) + datetime = localdata.getVar('DATETIME') dist_check.update_distro_data(distro_check_dir, datetime, localdata) - pn = d.getVar("PN", True) + pn = d.getVar("PN") bb.note("Package Name: %s" % pn) if pn.find("-native") != -1: pnstripped = pn.split("-native") bb.note("Native Split: %s" % pnstripped) - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) if pn.startswith("nativesdk-"): pnstripped = pn.replace("nativesdk-", "") bb.note("NativeSDK Split: %s" % pnstripped) - localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES')) if pn.find("-cross") != -1: pnstripped = pn.split("-cross") bb.note("cross Split: %s" % pnstripped) - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) if pn.find("-crosssdk") != -1: pnstripped = pn.split("-crosssdk") bb.note("cross Split: %s" % pnstripped) - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) if pn.find("-initial") != -1: pnstripped = pn.split("-initial") bb.note("initial Split: %s" % pnstripped) - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) """generate package information from .bb file""" - pname = localdata.getVar('PN', True) - pcurver = localdata.getVar('PV', True) - pdesc = localdata.getVar('DESCRIPTION', True) + pname = localdata.getVar('PN') + pcurver = localdata.getVar('PV') + pdesc = localdata.getVar('DESCRIPTION') if pdesc is not None: pdesc = pdesc.replace(',','') pdesc = pdesc.replace('\n','') - pgrp = localdata.getVar('SECTION', True) - plicense = localdata.getVar('LICENSE', True).replace(',','_') + pgrp = localdata.getVar('SECTION') + plicense = localdata.getVar('LICENSE').replace(',','_') - rstatus = localdata.getVar('RECIPE_COLOR', True) + rstatus = localdata.getVar('RECIPE_COLOR') if rstatus is not None: rstatus = rstatus.replace(',','') - pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION', True) + pupver = localdata.getVar('RECIPE_UPSTREAM_VERSION') if pcurver == pupver: vermatch="1" else: vermatch="0" - noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON', True) + noupdate_reason = localdata.getVar('RECIPE_NO_UPDATE_REASON') if noupdate_reason is None: noupdate="0" else: noupdate="1" noupdate_reason = noupdate_reason.replace(',','') - maintainer = localdata.getVar('RECIPE_MAINTAINER', True) - rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE', True) + maintainer = localdata.getVar('RECIPE_MAINTAINER') + rlrd = localdata.getVar('RECIPE_UPSTREAM_DATE') # do the comparison result = dist_check.compare_in_distro_packages_list(distro_check_dir, localdata) @@ -272,60 +262,56 @@ python do_checkpkg() { from bb.fetch2 import FetchError, NoMethodError, decodeurl """first check whether a uri is provided""" - src_uri = (d.getVar('SRC_URI', True) or '').split() + src_uri = (d.getVar('SRC_URI') or '').split() if src_uri: uri_type, _, _, _, _, _ = decodeurl(src_uri[0]) else: uri_type = "none" """initialize log files.""" - logpath = d.getVar('LOG_DIR', True) + logpath = d.getVar('LOG_DIR') bb.utils.mkdirhier(logpath) logfile = os.path.join(logpath, "checkpkg.csv") """generate package information from .bb file""" - pname = d.getVar('PN', True) + pname = d.getVar('PN') if pname.find("-native") != -1: - if d.getVar('BBCLASSEXTEND', True): + if d.getVar('BBCLASSEXTEND'): return pnstripped = pname.split("-native") bb.note("Native Split: %s" % pnstripped) - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) if pname.startswith("nativesdk-"): - if d.getVar('BBCLASSEXTEND', True): + if d.getVar('BBCLASSEXTEND'): return pnstripped = pname.replace("nativesdk-", "") bb.note("NativeSDK Split: %s" % pnstripped) - localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped + ":" + d.getVar('OVERRIDES')) if pname.find("-cross") != -1: pnstripped = pname.split("-cross") bb.note("cross Split: %s" % pnstripped) - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) if pname.find("-initial") != -1: pnstripped = pname.split("-initial") bb.note("initial Split: %s" % pnstripped) - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) - - pdesc = localdata.getVar('DESCRIPTION', True) - pgrp = localdata.getVar('SECTION', True) - pversion = localdata.getVar('PV', True) - plicense = localdata.getVar('LICENSE', True) - psection = localdata.getVar('SECTION', True) - phome = localdata.getVar('HOMEPAGE', True) - prelease = localdata.getVar('PR', True) - pdepends = localdata.getVar('DEPENDS', True) - pbugtracker = localdata.getVar('BUGTRACKER', True) - ppe = localdata.getVar('PE', True) - psrcuri = localdata.getVar('SRC_URI', True) - maintainer = localdata.getVar('RECIPE_MAINTAINER', True) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) + + pdesc = localdata.getVar('DESCRIPTION') + pgrp = localdata.getVar('SECTION') + pversion = localdata.getVar('PV') + plicense = localdata.getVar('LICENSE') + psection = localdata.getVar('SECTION') + phome = localdata.getVar('HOMEPAGE') + prelease = localdata.getVar('PR') + pdepends = localdata.getVar('DEPENDS') + pbugtracker = localdata.getVar('BUGTRACKER') + ppe = localdata.getVar('PE') + psrcuri = localdata.getVar('SRC_URI') + maintainer = localdata.getVar('RECIPE_MAINTAINER') """ Get upstream version version """ pupver = "" @@ -362,7 +348,7 @@ python do_checkpkg() { psrcuri = "none" pdepends = "".join(pdepends.split("\t")) pdesc = "".join(pdesc.split("\t")) - no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON', True) + no_upgr_reason = d.getVar('RECIPE_NO_UPDATE_REASON') lf = bb.utils.lockfile("%s.lock" % logfile) with open(logfile, "a") as f: writer = csv.writer(f, delimiter='\t') @@ -392,6 +378,7 @@ python distro_check_eventhandler() { addtask distro_check do_distro_check[nostamp] = "1" +do_distro_check[vardepsexclude] += "DATETIME" python do_distro_check() { """checks if the package is present in other public Linux distros""" import oe.distro_check as dc @@ -400,13 +387,12 @@ python do_distro_check() { return localdata = bb.data.createCopy(d) - bb.data.update_data(localdata) - tmpdir = d.getVar('TMPDIR', True) + tmpdir = d.getVar('TMPDIR') distro_check_dir = os.path.join(tmpdir, "distro_check") - logpath = d.getVar('LOG_DIR', True) + logpath = d.getVar('LOG_DIR') bb.utils.mkdirhier(logpath) result_file = os.path.join(logpath, "distrocheck.csv") - datetime = localdata.getVar('DATETIME', True) + datetime = localdata.getVar('DATETIME') dc.update_distro_data(distro_check_dir, datetime, localdata) # do the comparison @@ -449,12 +435,12 @@ do_checklicense[nostamp] = "1" python do_checklicense() { import csv import shutil - logpath = d.getVar('LOG_DIR', True) + logpath = d.getVar('LOG_DIR') bb.utils.mkdirhier(logpath) - pn = d.getVar('PN', True) + pn = d.getVar('PN') logfile = os.path.join(logpath, "missinglicense.csv") - generic_directory = d.getVar('COMMON_LICENSE_DIR', True) - license_types = d.getVar('LICENSE', True) + generic_directory = d.getVar('COMMON_LICENSE_DIR') + license_types = d.getVar('LICENSE') for license_type in ((license_types.replace('+', '').replace('|', '&') .replace('(', '').replace(')', '').replace(';', '') .replace(',', '').replace(" ", "").split("&"))): @@ -475,5 +461,3 @@ do_checklicenseall[nostamp] = "1" do_checklicenseall() { : } - - diff --git a/import-layers/yocto-poky/meta/classes/distutils-base.bbclass b/import-layers/yocto-poky/meta/classes/distutils-base.bbclass index aa18e8b29..9f398d705 100644 --- a/import-layers/yocto-poky/meta/classes/distutils-base.bbclass +++ b/import-layers/yocto-poky/meta/classes/distutils-base.bbclass @@ -1,4 +1,4 @@ -DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES', True) == '')]}" +DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES') == '')]}" RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}" inherit distutils-common-base pythonnative diff --git a/import-layers/yocto-poky/meta/classes/distutils-tools.bbclass b/import-layers/yocto-poky/meta/classes/distutils-tools.bbclass index 3ef9cc5a7..6f2880ea0 100644 --- a/import-layers/yocto-poky/meta/classes/distutils-tools.bbclass +++ b/import-layers/yocto-poky/meta/classes/distutils-tools.bbclass @@ -63,7 +63,7 @@ distutils_do_install() { # # FIXME: Bandaid against wrong datadir computation # - if test -e ${D}${datadir}/share; then + if [ -e ${D}${datadir}/share ]; then mv -f ${D}${datadir}/share/* ${D}${datadir}/ fi } diff --git a/import-layers/yocto-poky/meta/classes/distutils.bbclass b/import-layers/yocto-poky/meta/classes/distutils.bbclass index 857572d75..1930c3529 100644 --- a/import-layers/yocto-poky/meta/classes/distutils.bbclass +++ b/import-layers/yocto-poky/meta/classes/distutils.bbclass @@ -44,16 +44,16 @@ distutils_do_install() { if test -e ${D}${bindir} ; then for i in ${D}${bindir}/* ; do \ if [ ${PN} != "${BPN}-native" ]; then - sed -i -e s:${STAGING_BINDIR_NATIVE}/python-native/python:${bindir}/env\ python:g $i + sed -i -e s:${STAGING_BINDIR_NATIVE}/python-native/python:${USRBINPATH}/env\ python:g $i fi sed -i -e s:${STAGING_BINDIR_NATIVE}:${bindir}:g $i done fi - if test -e ${D}${sbindir}; then + if [ -e ${D}${sbindir} ]; then for i in ${D}${sbindir}/* ; do \ if [ ${PN} != "${BPN}-native" ]; then - sed -i -e s:${STAGING_BINDIR_NATIVE}/python-native/python:${bindir}/env\ python:g $i + sed -i -e s:${STAGING_BINDIR_NATIVE}/python-native/python:${USRBINPATH}/env\ python:g $i fi sed -i -e s:${STAGING_BINDIR_NATIVE}:${bindir}:g $i done @@ -65,13 +65,13 @@ distutils_do_install() { # # FIXME: Bandaid against wrong datadir computation # - if test -e ${D}${datadir}/share; then + if [ -e ${D}${datadir}/share ]; then mv -f ${D}${datadir}/share/* ${D}${datadir}/ rmdir ${D}${datadir}/share fi # Fix backport modules - if test -e ${STAGING_LIBDIR}/${PYTHON_DIR}/site-packages/backports/__init__.py && test -e ${D}${PYTHON_SITEPACKAGES_DIR}/backports/__init__.py; then + if [ -e ${STAGING_LIBDIR}/${PYTHON_DIR}/site-packages/backports/__init__.py ] && [ -e ${D}${PYTHON_SITEPACKAGES_DIR}/backports/__init__.py ]; then rm ${D}${PYTHON_SITEPACKAGES_DIR}/backports/__init__.py; rm ${D}${PYTHON_SITEPACKAGES_DIR}/backports/__init__.pyc; fi diff --git a/import-layers/yocto-poky/meta/classes/distutils3-base.bbclass b/import-layers/yocto-poky/meta/classes/distutils3-base.bbclass index 82ab6a3d1..7dbf07ac4 100644 --- a/import-layers/yocto-poky/meta/classes/distutils3-base.bbclass +++ b/import-layers/yocto-poky/meta/classes/distutils3-base.bbclass @@ -1,4 +1,4 @@ -DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES', True) == '')]}" +DEPENDS += "${@["${PYTHON_PN}-native ${PYTHON_PN}", ""][(d.getVar('PACKAGES') == '')]}" RDEPENDS_${PN} += "${@['', '${PYTHON_PN}-core']['${CLASSOVERRIDE}' == 'class-target']}" inherit distutils-common-base python3native diff --git a/import-layers/yocto-poky/meta/classes/distutils3.bbclass b/import-layers/yocto-poky/meta/classes/distutils3.bbclass index a6720c5b6..6c3030688 100644 --- a/import-layers/yocto-poky/meta/classes/distutils3.bbclass +++ b/import-layers/yocto-poky/meta/classes/distutils3.bbclass @@ -47,14 +47,14 @@ distutils3_do_install() { if test -e ${D}${bindir} ; then for i in ${D}${bindir}/* ; do \ - sed -i -e s:${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN}:${bindir}/env\ ${PYTHON_PN}:g $i + sed -i -e s:${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN}:${USRBINPATH}/env\ ${PYTHON_PN}:g $i sed -i -e s:${STAGING_BINDIR_NATIVE}:${bindir}:g $i done fi if test -e ${D}${sbindir}; then for i in ${D}${sbindir}/* ; do \ - sed -i -e s:${STAGING_BINDIR_NATIVE}/python-${PYTHON_PN}/${PYTHON_PN}:${bindir}/env\ ${PYTHON_PN}:g $i + sed -i -e s:${STAGING_BINDIR_NATIVE}/python-${PYTHON_PN}/${PYTHON_PN}:${USRBINPATH}/env\ ${PYTHON_PN}:g $i sed -i -e s:${STAGING_BINDIR_NATIVE}:${bindir}:g $i done fi @@ -64,7 +64,7 @@ distutils3_do_install() { # # FIXME: Bandaid against wrong datadir computation # - if test -e ${D}${datadir}/share; then + if [ -e ${D}${datadir}/share ]; then mv -f ${D}${datadir}/share/* ${D}${datadir}/ rmdir ${D}${datadir}/share fi diff --git a/import-layers/yocto-poky/meta/classes/externalsrc.bbclass b/import-layers/yocto-poky/meta/classes/externalsrc.bbclass index 31908c3ca..d64af6a9c 100644 --- a/import-layers/yocto-poky/meta/classes/externalsrc.bbclass +++ b/import-layers/yocto-poky/meta/classes/externalsrc.bbclass @@ -4,7 +4,7 @@ # Copyright (C) 2009 Chris Larson # Released under the MIT license (see COPYING.MIT for the terms) # -# externalsrc.bbclass enables use of an existing source tree, usually external to +# externalsrc.bbclass enables use of an existing source tree, usually external to # the build system to build a piece of software rather than the usual fetch/unpack/patch # process. # @@ -28,34 +28,34 @@ SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch" EXTERNALSRC_SYMLINKS ?= "oe-workdir:${WORKDIR} oe-logs:${T}" python () { - externalsrc = d.getVar('EXTERNALSRC', True) + externalsrc = d.getVar('EXTERNALSRC') # If this is the base recipe and EXTERNALSRC is set for it or any of its # derivatives, then enable BB_DONT_CACHE to force the recipe to always be # re-parsed so that the file-checksums function for do_compile is run every # time. - bpn = d.getVar('BPN', True) - if bpn == d.getVar('PN', True): - classextend = (d.getVar('BBCLASSEXTEND', True) or '').split() + bpn = d.getVar('BPN') + if bpn == d.getVar('PN'): + classextend = (d.getVar('BBCLASSEXTEND') or '').split() if (externalsrc or ('native' in classextend and - d.getVar('EXTERNALSRC_pn-%s-native' % bpn, True)) or + d.getVar('EXTERNALSRC_pn-%s-native' % bpn)) or ('nativesdk' in classextend and - d.getVar('EXTERNALSRC_pn-nativesdk-%s' % bpn, True)) or + d.getVar('EXTERNALSRC_pn-nativesdk-%s' % bpn)) or ('cross' in classextend and - d.getVar('EXTERNALSRC_pn-%s-cross' % bpn, True))): + d.getVar('EXTERNALSRC_pn-%s-cross' % bpn))): d.setVar('BB_DONT_CACHE', '1') if externalsrc: d.setVar('S', externalsrc) - externalsrcbuild = d.getVar('EXTERNALSRC_BUILD', True) + externalsrcbuild = d.getVar('EXTERNALSRC_BUILD') if externalsrcbuild: d.setVar('B', externalsrcbuild) else: d.setVar('B', '${WORKDIR}/${BPN}-${PV}/') local_srcuri = [] - fetch = bb.fetch2.Fetch((d.getVar('SRC_URI', True) or '').split(), d) + fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d) for url in fetch.urls: url_data = fetch.ud[url] parm = url_data.parm @@ -69,7 +69,7 @@ python () { # Dummy value because the default function can't be called with blank SRC_URI d.setVar('SRCPV', '999') - tasks = filter(lambda k: d.getVarFlag(k, "task", True), d.keys()) + tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys()) for task in tasks: if task.endswith("_setscene"): @@ -94,7 +94,7 @@ python () { # Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack']) - for task in d.getVar("SRCTREECOVEREDTASKS", True).split(): + for task in d.getVar("SRCTREECOVEREDTASKS").split(): if local_srcuri and task in fetch_tasks: continue bb.build.deltask(task, d) @@ -106,24 +106,31 @@ python () { d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}') # We don't want the workdir to go away - d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN', True)) + d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN')) + + bb.build.addtask('do_buildclean', + 'do_clean' if d.getVar('S') == d.getVar('B') else None, + None, d) # If B=S the same builddir is used even for different architectures. # Thus, use a shared CONFIGURESTAMPFILE and STAMP directory so that # change of do_configure task hash is correctly detected and stamps are # invalidated if e.g. MACHINE changes. - if d.getVar('S', True) == d.getVar('B', True): + if d.getVar('S') == d.getVar('B'): configstamp = '${TMPDIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}/configure.sstate' d.setVar('CONFIGURESTAMPFILE', configstamp) d.setVar('STAMP', '${STAMPS_DIR}/work-shared/${PN}/${EXTENDPE}${PV}-${PR}') + d.setVar('STAMPCLEAN', '${STAMPS_DIR}/work-shared/${PN}/*-*') } python externalsrc_configure_prefunc() { + s_dir = d.getVar('S') # Create desired symlinks - symlinks = (d.getVar('EXTERNALSRC_SYMLINKS', True) or '').split() + symlinks = (d.getVar('EXTERNALSRC_SYMLINKS') or '').split() + newlinks = [] for symlink in symlinks: symsplit = symlink.split(':', 1) - lnkfile = os.path.join(d.getVar('S', True), symsplit[0]) + lnkfile = os.path.join(s_dir, symsplit[0]) target = d.expand(symsplit[1]) if len(symsplit) > 1: if os.path.islink(lnkfile): @@ -135,19 +142,43 @@ python externalsrc_configure_prefunc() { # File/dir exists with same name as link, just leave it alone continue os.symlink(target, lnkfile) + newlinks.append(symsplit[0]) + # Hide the symlinks from git + try: + git_exclude_file = os.path.join(s_dir, '.git/info/exclude') + if os.path.exists(git_exclude_file): + with open(git_exclude_file, 'r+') as efile: + elines = efile.readlines() + for link in newlinks: + if link in elines or '/'+link in elines: + continue + efile.write('/' + link + '\n') + except IOError as ioe: + bb.note('Failed to hide EXTERNALSRC_SYMLINKS from git') } python externalsrc_compile_prefunc() { # Make it obvious that this is happening, since forgetting about it could lead to much confusion - bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN', True), d.getVar('EXTERNALSRC', True))) + bb.plain('NOTE: %s: compiling from external source tree %s' % (d.getVar('PN'), d.getVar('EXTERNALSRC'))) +} + +do_buildclean[dirs] = "${S} ${B}" +do_buildclean[nostamp] = "1" +do_buildclean[doc] = "Call 'make clean' or equivalent in ${B}" +externalsrc_do_buildclean() { + if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then + oe_runmake clean || die "make failed" + else + bbnote "nothing to do - no makefile found" + fi } -def srctree_hash_files(d): +def srctree_hash_files(d, srcdir=None): import shutil import subprocess import tempfile - s_dir = d.getVar('EXTERNALSRC', True) + s_dir = srcdir or d.getVar('EXTERNALSRC') git_dir = os.path.join(s_dir, '.git') oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1') @@ -159,13 +190,13 @@ def srctree_hash_files(d): # Update our custom index env = os.environ.copy() env['GIT_INDEX_FILE'] = tmp_index.name - subprocess.check_output(['git', 'add', '.'], cwd=s_dir, env=env) + subprocess.check_output(['git', 'add', '-A', '.'], cwd=s_dir, env=env) sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8") with open(oe_hash_file, 'w') as fobj: fobj.write(sha1) ret = oe_hash_file + ':True' else: - ret = d.getVar('EXTERNALSRC', True) + '/*:True' + ret = s_dir + '/*:True' return ret def srctree_configure_hash_files(d): @@ -173,7 +204,7 @@ def srctree_configure_hash_files(d): Get the list of files that should trigger do_configure to re-execute, based on the value of CONFIGURE_FILES """ - in_files = (d.getVar('CONFIGURE_FILES', True) or '').split() + in_files = (d.getVar('CONFIGURE_FILES') or '').split() out_items = [] search_files = [] for entry in in_files: @@ -182,9 +213,11 @@ def srctree_configure_hash_files(d): else: search_files.append(entry) if search_files: - s_dir = d.getVar('EXTERNALSRC', True) + s_dir = d.getVar('EXTERNALSRC') for root, _, files in os.walk(s_dir): for f in files: if f in search_files: out_items.append('%s:True' % os.path.join(root, f)) return ' '.join(out_items) + +EXPORT_FUNCTIONS do_buildclean diff --git a/import-layers/yocto-poky/meta/classes/extrausers.bbclass b/import-layers/yocto-poky/meta/classes/extrausers.bbclass index 43900f359..7709407b6 100644 --- a/import-layers/yocto-poky/meta/classes/extrausers.bbclass +++ b/import-layers/yocto-poky/meta/classes/extrausers.bbclass @@ -15,7 +15,7 @@ inherit useradd_base -IMAGE_INSTALL_append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS', True))]}" +PACKAGE_INSTALL_append = " ${@['', 'base-passwd shadow'][bool(d.getVar('EXTRA_USERS_PARAMS'))]}" # Image level user / group settings ROOTFS_POSTPROCESS_COMMAND_append = " set_user_group;" @@ -63,3 +63,7 @@ set_user_group () { remaining=`echo $remaining | cut -d ';' -f2-` done } + +USERADDEXTENSION ?= "" + +inherit ${USERADDEXTENSION} diff --git a/import-layers/yocto-poky/meta/classes/fontcache.bbclass b/import-layers/yocto-poky/meta/classes/fontcache.bbclass index 8ebdfc4f5..e76331131 100644 --- a/import-layers/yocto-poky/meta/classes/fontcache.bbclass +++ b/import-layers/yocto-poky/meta/classes/fontcache.bbclass @@ -3,7 +3,7 @@ # packages. # -DEPENDS += "qemu-native" +PACKAGE_WRITE_DEPS += "qemu-native" inherit qemu FONT_PACKAGES ??= "${PN}" @@ -30,26 +30,26 @@ fi } python () { - font_pkgs = d.getVar('FONT_PACKAGES', True).split() - deps = d.getVar("FONT_EXTRA_RDEPENDS", True) + font_pkgs = d.getVar('FONT_PACKAGES').split() + deps = d.getVar("FONT_EXTRA_RDEPENDS") for pkg in font_pkgs: if deps: d.appendVar('RDEPENDS_' + pkg, ' '+deps) } python add_fontcache_postinsts() { - for pkg in d.getVar('FONT_PACKAGES', True).split(): + for pkg in d.getVar('FONT_PACKAGES').split(): bb.note("adding fonts postinst and postrm scripts to %s" % pkg) - postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) + postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst') if not postinst: postinst = '#!/bin/sh\n' - postinst += d.getVar('fontcache_common', True) + postinst += d.getVar('fontcache_common') d.setVar('pkg_postinst_%s' % pkg, postinst) - postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) + postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm') if not postrm: postrm = '#!/bin/sh\n' - postrm += d.getVar('fontcache_common', True) + postrm += d.getVar('fontcache_common') d.setVar('pkg_postrm_%s' % pkg, postrm) } diff --git a/import-layers/yocto-poky/meta/classes/fs-uuid.bbclass b/import-layers/yocto-poky/meta/classes/fs-uuid.bbclass index bd2613cf1..9b53dfba7 100644 --- a/import-layers/yocto-poky/meta/classes/fs-uuid.bbclass +++ b/import-layers/yocto-poky/meta/classes/fs-uuid.bbclass @@ -3,7 +3,7 @@ # on ext file systems and depends on tune2fs. def get_rootfs_uuid(d): import subprocess - rootfs = d.getVar('ROOTFS', True) + rootfs = d.getVar('ROOTFS') output = subprocess.check_output(['tune2fs', '-l', rootfs]) for line in output.split('\n'): if line.startswith('Filesystem UUID:'): @@ -13,7 +13,7 @@ def get_rootfs_uuid(d): bb.fatal('Could not determine filesystem UUID of %s' % rootfs) # Replace the special <> inside a string (like the -# root= APPEND string in a syslinux.cfg or gummiboot entry) with the +# root= APPEND string in a syslinux.cfg or systemd-boot entry) with the # actual UUID of the rootfs. Does nothing if the special string # is not used. def replace_rootfs_uuid(d, string): diff --git a/import-layers/yocto-poky/meta/classes/gconf.bbclass b/import-layers/yocto-poky/meta/classes/gconf.bbclass index d7afa7282..4e0ee2e7d 100644 --- a/import-layers/yocto-poky/meta/classes/gconf.bbclass +++ b/import-layers/yocto-poky/meta/classes/gconf.bbclass @@ -1,4 +1,5 @@ -DEPENDS += "gconf gconf-native" +DEPENDS += "gconf" +PACKAGE_WRITE_DEPS += "gconf-native" # These are for when gconftool is used natively and the prefix isn't necessarily # the sysroot. TODO: replicate the postinst logic for -native packages going @@ -42,8 +43,8 @@ done python populate_packages_append () { import re - packages = d.getVar('PACKAGES', True).split() - pkgdest = d.getVar('PKGDEST', True) + packages = d.getVar('PACKAGES').split() + pkgdest = d.getVar('PKGDEST') for pkg in packages: schema_dir = '%s/%s/etc/gconf/schemas' % (pkgdest, pkg) @@ -56,15 +57,15 @@ python populate_packages_append () { if schemas != []: bb.note("adding gconf postinst and prerm scripts to %s" % pkg) d.setVar('SCHEMA_FILES', " ".join(schemas)) - postinst = d.getVar('pkg_postinst_%s' % pkg, True) + postinst = d.getVar('pkg_postinst_%s' % pkg) if not postinst: postinst = '#!/bin/sh\n' - postinst += d.getVar('gconf_postinst', True) + postinst += d.getVar('gconf_postinst') d.setVar('pkg_postinst_%s' % pkg, postinst) - prerm = d.getVar('pkg_prerm_%s' % pkg, True) + prerm = d.getVar('pkg_prerm_%s' % pkg) if not prerm: prerm = '#!/bin/sh\n' - prerm += d.getVar('gconf_prerm', True) + prerm += d.getVar('gconf_prerm') d.setVar('pkg_prerm_%s' % pkg, prerm) d.appendVar("RDEPENDS_%s" % pkg, ' ' + d.getVar('MLPREFIX', False) + 'gconf') } diff --git a/import-layers/yocto-poky/meta/classes/gettext.bbclass b/import-layers/yocto-poky/meta/classes/gettext.bbclass index 03b89b245..0be14246b 100644 --- a/import-layers/yocto-poky/meta/classes/gettext.bbclass +++ b/import-layers/yocto-poky/meta/classes/gettext.bbclass @@ -1,15 +1,15 @@ def gettext_dependencies(d): - if d.getVar('INHIBIT_DEFAULT_DEPS', True) and not oe.utils.inherits(d, 'cross-canadian'): + if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'): return "" - if d.getVar('USE_NLS', True) == 'no': + if d.getVar('USE_NLS') == 'no': return "gettext-minimal-native" return d.getVar('DEPENDS_GETTEXT', False) def gettext_oeconf(d): - if d.getVar('USE_NLS', True) == 'no': + if d.getVar('USE_NLS') == 'no': return '--disable-nls' # Remove the NLS bits if USE_NLS is no or INHIBIT_DEFAULT_DEPS is set - if d.getVar('INHIBIT_DEFAULT_DEPS', True) and not oe.utils.inherits(d, 'cross-canadian'): + if d.getVar('INHIBIT_DEFAULT_DEPS') and not oe.utils.inherits(d, 'cross-canadian'): return '--disable-nls' return "--enable-nls" diff --git a/import-layers/yocto-poky/meta/classes/gio-module-cache.bbclass b/import-layers/yocto-poky/meta/classes/gio-module-cache.bbclass index 91461b11e..a8190b7b8 100644 --- a/import-layers/yocto-poky/meta/classes/gio-module-cache.bbclass +++ b/import-layers/yocto-poky/meta/classes/gio-module-cache.bbclass @@ -1,4 +1,4 @@ -DEPENDS += "qemu-native" +PACKAGE_WRITE_DEPS += "qemu-native" inherit qemu GIO_MODULE_PACKAGES ??= "${PN}" @@ -17,21 +17,21 @@ fi } python populate_packages_append () { - packages = d.getVar('GIO_MODULE_PACKAGES', True).split() + packages = d.getVar('GIO_MODULE_PACKAGES').split() for pkg in packages: bb.note("adding gio-module-cache postinst and postrm scripts to %s" % pkg) - postinst = d.getVar('pkg_postinst_%s' % pkg, True) + postinst = d.getVar('pkg_postinst_%s' % pkg) if not postinst: postinst = '#!/bin/sh\n' - postinst += d.getVar('gio_module_cache_common', True) + postinst += d.getVar('gio_module_cache_common') d.setVar('pkg_postinst_%s' % pkg, postinst) - postrm = d.getVar('pkg_postrm_%s' % pkg, True) + postrm = d.getVar('pkg_postrm_%s' % pkg) if not postrm: postrm = '#!/bin/sh\n' - postrm += d.getVar('gio_module_cache_common', True) + postrm += d.getVar('gio_module_cache_common') d.setVar('pkg_postrm_%s' % pkg, postrm) } diff --git a/import-layers/yocto-poky/meta/classes/go.bbclass b/import-layers/yocto-poky/meta/classes/go.bbclass new file mode 100644 index 000000000..85f71a2e9 --- /dev/null +++ b/import-layers/yocto-poky/meta/classes/go.bbclass @@ -0,0 +1,77 @@ +inherit goarch + +# x32 ABI is not supported on go compiler so far +COMPATIBLE_HOST_linux-gnux32 = "null" +# ppc32 is not supported in go compilers +COMPATIBLE_HOST_powerpc = "null" + +GOROOT_class-native = "${STAGING_LIBDIR_NATIVE}/go" +GOROOT = "${STAGING_LIBDIR_NATIVE}/${TARGET_SYS}/go" +GOBIN_FINAL_class-native = "${GOROOT_FINAL}/bin" +GOBIN_FINAL = "${GOROOT_FINAL}/bin/${GOOS}_${GOARCH}" + +export GOOS = "${TARGET_GOOS}" +export GOARCH = "${TARGET_GOARCH}" +export GOARM = "${TARGET_GOARM}" +export CGO_ENABLED = "1" +export GOROOT +export GOROOT_FINAL = "${libdir}/${TARGET_SYS}/go" +export GOBIN_FINAL +export GOPKG_FINAL = "${GOROOT_FINAL}/pkg/${GOOS}_${GOARCH}" +export GOSRC_FINAL = "${GOROOT_FINAL}/src" +export GO_GCFLAGS = "${TARGET_CFLAGS}" +export GO_LDFLAGS = "${TARGET_LDFLAGS}" +export CGO_CFLAGS = "${TARGET_CC_ARCH}${TOOLCHAIN_OPTIONS} ${TARGET_CFLAGS}" +export CGO_CPPFLAGS = "${TARGET_CPPFLAGS}" +export CGO_CXXFLAGS = "${TARGET_CC_ARCH}${TOOLCHAIN_OPTIONS} ${TARGET_CXXFLAGS}" +export CGO_LDFLAGS = "${TARGET_CC_ARCH}${TOOLCHAIN_OPTIONS} ${TARGET_LDFLAGS}" + +DEPENDS += "go-cross-${TARGET_ARCH}" +DEPENDS_class-native += "go-native" + +FILES_${PN}-staticdev += "${GOSRC_FINAL}/${GO_IMPORT}" +FILES_${PN}-staticdev += "${GOPKG_FINAL}/${GO_IMPORT}*" + +GO_INSTALL ?= "${GO_IMPORT}/..." + +do_go_compile() { + GOPATH=${S}:${STAGING_LIBDIR}/${TARGET_SYS}/go go env + if [ -n "${GO_INSTALL}" ]; then + GOPATH=${S}:${STAGING_LIBDIR}/${TARGET_SYS}/go go install -v ${GO_INSTALL} + fi +} + +do_go_install() { + rm -rf ${WORKDIR}/staging + install -d ${WORKDIR}/staging${GOROOT_FINAL} ${D}${GOROOT_FINAL} + tar -C ${S} -cf - . | tar -C ${WORKDIR}/staging${GOROOT_FINAL} -xpvf - + + find ${WORKDIR}/staging${GOROOT_FINAL} \( \ + -name \*.indirectionsymlink -o \ + -name .git\* -o \ + -name .hg -o \ + -name .svn -o \ + -name .pc\* -o \ + -name patches\* \ + \) -print0 | \ + xargs -r0 rm -rf + + tar -C ${WORKDIR}/staging${GOROOT_FINAL} -cf - . | \ + tar -C ${D}${GOROOT_FINAL} -xpvf - + + chown -R root:root "${D}${GOROOT_FINAL}" + + if [ -e "${D}${GOBIN_FINAL}" ]; then + install -d -m 0755 "${D}${bindir}" + find "${D}${GOBIN_FINAL}" ! -type d -print0 | xargs -r0 mv --target-directory="${D}${bindir}" + rmdir -p "${D}${GOBIN_FINAL}" || true + fi +} + +do_compile() { + do_go_compile +} + +do_install() { + do_go_install +} diff --git a/import-layers/yocto-poky/meta/classes/goarch.bbclass b/import-layers/yocto-poky/meta/classes/goarch.bbclass new file mode 100644 index 000000000..12df88f8c --- /dev/null +++ b/import-layers/yocto-poky/meta/classes/goarch.bbclass @@ -0,0 +1,53 @@ +BUILD_GOOS = "${@go_map_os(d.getVar('BUILD_OS', True), d)}" +BUILD_GOARCH = "${@go_map_arch(d.getVar('BUILD_ARCH', True), d)}" +BUILD_GOTUPLE = "${BUILD_GOOS}_${BUILD_GOARCH}" +HOST_GOOS = "${@go_map_os(d.getVar('HOST_OS', True), d)}" +HOST_GOARCH = "${@go_map_arch(d.getVar('HOST_ARCH', True), d)}" +HOST_GOARM = "${@go_map_arm(d.getVar('HOST_ARCH', True), d.getVar('TUNE_FEATURES', True), d)}" +HOST_GOTUPLE = "${HOST_GOOS}_${HOST_GOARCH}" +TARGET_GOOS = "${@go_map_os(d.getVar('TARGET_OS', True), d)}" +TARGET_GOARCH = "${@go_map_arch(d.getVar('TARGET_ARCH', True), d)}" +TARGET_GOARM = "${@go_map_arm(d.getVar('TARGET_ARCH', True), d.getVar('TUNE_FEATURES', True), d)}" +TARGET_GOTUPLE = "${TARGET_GOOS}_${TARGET_GOARCH}" +GO_BUILD_BINDIR = "${@['bin/${HOST_GOTUPLE}','bin'][d.getVar('BUILD_GOTUPLE',True) == d.getVar('HOST_GOTUPLE',True)]}" + +def go_map_arch(a, d): + import re + if re.match('i.86', a): + return '386' + elif a == 'x86_64': + return 'amd64' + elif re.match('arm.*', a): + return 'arm' + elif re.match('aarch64.*', a): + return 'arm64' + elif re.match('mips64el*', a): + return 'mips64le' + elif re.match('mips64*', a): + return 'mips64' + elif re.match('mipsel*', a): + return 'mipsle' + elif re.match('mips*', a): + return 'mips' + elif re.match('p(pc|owerpc)(64)', a): + return 'ppc64' + elif re.match('p(pc|owerpc)(64el)', a): + return 'ppc64le' + else: + raise bb.parse.SkipPackage("Unsupported CPU architecture: %s" % a) + +def go_map_arm(a, f, d): + import re + if re.match('arm.*', a): + if 'armv7' in f: + return '7' + elif 'armv6' in f: + return '6' + return '' + +def go_map_os(o, d): + if o.startswith('linux'): + return 'linux' + return o + + diff --git a/import-layers/yocto-poky/meta/classes/gobject-introspection.bbclass b/import-layers/yocto-poky/meta/classes/gobject-introspection.bbclass index 37389cbc8..b6160b88b 100644 --- a/import-layers/yocto-poky/meta/classes/gobject-introspection.bbclass +++ b/import-layers/yocto-poky/meta/classes/gobject-introspection.bbclass @@ -17,7 +17,7 @@ UNKNOWN_CONFIGURE_WHITELIST_append = " --enable-introspection --disable-introspe # Generating introspection data depends on a combination of native and target # introspection tools, and qemu to run the target tools. -DEPENDS_append_class-target = " gobject-introspection gobject-introspection-native qemu-native" +DEPENDS_append_class-target = " gobject-introspection gobject-introspection-native qemu-native prelink-native" # Even though introspection is disabled on -native, gobject-introspection package is still # needed for m4 macros. diff --git a/import-layers/yocto-poky/meta/classes/grub-efi.bbclass b/import-layers/yocto-poky/meta/classes/grub-efi.bbclass index 17417ba5d..df7fe18a7 100644 --- a/import-layers/yocto-poky/meta/classes/grub-efi.bbclass +++ b/import-layers/yocto-poky/meta/classes/grub-efi.bbclass @@ -40,13 +40,15 @@ efi_populate() { install -d ${DEST}${EFIDIR} - GRUB_IMAGE="bootia32.efi" + GRUB_IMAGE="grub-efi-bootia32.efi" + DEST_IMAGE="bootia32.efi" if [ "${TARGET_ARCH}" = "x86_64" ]; then - GRUB_IMAGE="bootx64.efi" + GRUB_IMAGE="grub-efi-bootx64.efi" + DEST_IMAGE="bootx64.efi" fi - install -m 0644 ${DEPLOY_DIR_IMAGE}/${GRUB_IMAGE} ${DEST}${EFIDIR} + install -m 0644 ${DEPLOY_DIR_IMAGE}/${GRUB_IMAGE} ${DEST}${EFIDIR}/${DEST_IMAGE} EFIPATH=$(echo "${EFIDIR}" | sed 's/\//\\/g') - printf 'fs0:%s\%s\n' "$EFIPATH" "$GRUB_IMAGE" >${DEST}/startup.nsh + printf 'fs0:%s\%s\n' "$EFIPATH" "$DEST_IMAGE" >${DEST}/startup.nsh install -m 0644 ${GRUB_CFG} ${DEST}${EFIDIR}/grub.cfg } @@ -72,14 +74,14 @@ efi_hddimg_populate() { python build_efi_cfg() { import sys - workdir = d.getVar('WORKDIR', True) + workdir = d.getVar('WORKDIR') if not workdir: bb.error("WORKDIR not defined, unable to package") return - gfxserial = d.getVar('GRUB_GFXSERIAL', True) or "" + gfxserial = d.getVar('GRUB_GFXSERIAL') or "" - labels = d.getVar('LABELS', True) + labels = d.getVar('LABELS') if not labels: bb.debug(1, "LABELS not defined, nothing to do") return @@ -88,7 +90,7 @@ python build_efi_cfg() { bb.debug(1, "No labels, nothing to do") return - cfile = d.getVar('GRUB_CFG', True) + cfile = d.getVar('GRUB_CFG') if not cfile: bb.fatal('Unable to read GRUB_CFG') @@ -99,39 +101,38 @@ python build_efi_cfg() { cfgfile.write('# Automatically created by OE\n') - opts = d.getVar('GRUB_OPTS', True) + opts = d.getVar('GRUB_OPTS') if opts: for opt in opts.split(';'): cfgfile.write('%s\n' % opt) cfgfile.write('default=%s\n' % (labels.split()[0])) - timeout = d.getVar('GRUB_TIMEOUT', True) + timeout = d.getVar('GRUB_TIMEOUT') if timeout: cfgfile.write('timeout=%s\n' % timeout) else: cfgfile.write('timeout=50\n') - root = d.getVar('GRUB_ROOT', True) + root = d.getVar('GRUB_ROOT') if not root: bb.fatal('GRUB_ROOT not defined') if gfxserial == "1": btypes = [ [ " graphics console", "" ], - [ " serial console", d.getVar('GRUB_SERIAL', True) or "" ] ] + [ " serial console", d.getVar('GRUB_SERIAL') or "" ] ] else: btypes = [ [ "", "" ] ] for label in labels.split(): localdata = d.createCopy() - overrides = localdata.getVar('OVERRIDES', True) + overrides = localdata.getVar('OVERRIDES') if not overrides: bb.fatal('OVERRIDES not defined') for btype in btypes: localdata.setVar('OVERRIDES', label + ':' + overrides) - bb.data.update_data(localdata) cfgfile.write('\nmenuentry \'%s%s\'{\n' % (label, btype[0])) lb = label @@ -141,8 +142,8 @@ python build_efi_cfg() { cfgfile.write(' %s' % replace_rootfs_uuid(d, root)) - append = localdata.getVar('APPEND', True) - initrd = localdata.getVar('INITRD', True) + append = localdata.getVar('APPEND') + initrd = localdata.getVar('INITRD') if append: append = replace_rootfs_uuid(d, append) diff --git a/import-layers/yocto-poky/meta/classes/gsettings.bbclass b/import-layers/yocto-poky/meta/classes/gsettings.bbclass index dec5abc02..eae3dc799 100644 --- a/import-layers/yocto-poky/meta/classes/gsettings.bbclass +++ b/import-layers/yocto-poky/meta/classes/gsettings.bbclass @@ -7,31 +7,32 @@ # TODO use a trigger so that this runs once per package operation run -DEPENDS += "glib-2.0-native" RDEPENDS_${PN} += "glib-2.0-utils" FILES_${PN} += "${datadir}/glib-2.0/schemas" +PACKAGE_WRITE_DEPS += "glib-2.0-native" + gsettings_postinstrm () { glib-compile-schemas $D${datadir}/glib-2.0/schemas } python populate_packages_append () { - pkg = d.getVar('PN', True) + pkg = d.getVar('PN') bb.note("adding gsettings postinst scripts to %s" % pkg) - postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) + postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst') if not postinst: postinst = '#!/bin/sh\n' - postinst += d.getVar('gsettings_postinstrm', True) + postinst += d.getVar('gsettings_postinstrm') d.setVar('pkg_postinst_%s' % pkg, postinst) bb.note("adding gsettings postrm scripts to %s" % pkg) - postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) + postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm') if not postrm: postrm = '#!/bin/sh\n' - postrm += d.getVar('gsettings_postinstrm', True) + postrm += d.getVar('gsettings_postinstrm') d.setVar('pkg_postrm_%s' % pkg, postrm) } diff --git a/import-layers/yocto-poky/meta/classes/gtk-doc.bbclass b/import-layers/yocto-poky/meta/classes/gtk-doc.bbclass index 297eac63b..0ae2729c0 100644 --- a/import-layers/yocto-poky/meta/classes/gtk-doc.bbclass +++ b/import-layers/yocto-poky/meta/classes/gtk-doc.bbclass @@ -50,7 +50,7 @@ export GIO_MODULE_DIR=${STAGING_LIBDIR}/gio/modules-dummy GIR_EXTRA_LIBS_PATH=\`find ${B} -name .libs| tr '\n' ':'\`\$GIR_EXTRA_LIBS_PATH -if test -d ".libs"; then +if [ -d ".libs" ]; then $qemu_binary ".libs/\$@" else $qemu_binary "\$@" diff --git a/import-layers/yocto-poky/meta/classes/gtk-icon-cache.bbclass b/import-layers/yocto-poky/meta/classes/gtk-icon-cache.bbclass index 0f1052b08..d87167aec 100644 --- a/import-layers/yocto-poky/meta/classes/gtk-icon-cache.bbclass +++ b/import-layers/yocto-poky/meta/classes/gtk-icon-cache.bbclass @@ -2,6 +2,8 @@ FILES_${PN} += "${datadir}/icons/hicolor" DEPENDS += "${@['hicolor-icon-theme', '']['${BPN}' == 'hicolor-icon-theme']} gtk-icon-utils-native" +PACKAGE_WRITE_DEPS += "gtk-icon-utils-native gdk-pixbuf-native" + gtk_icon_cache_postinst() { if [ "x$D" != "x" ]; then $INTERCEPT_DIR/postinst_intercept update_icon_cache ${PKG} \ @@ -35,11 +37,11 @@ fi } python populate_packages_append () { - packages = d.getVar('PACKAGES', True).split() - pkgdest = d.getVar('PKGDEST', True) + packages = d.getVar('PACKAGES').split() + pkgdest = d.getVar('PKGDEST') for pkg in packages: - icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir', True)) + icon_dir = '%s/%s/%s/icons' % (pkgdest, pkg, d.getVar('datadir')) if not os.path.exists(icon_dir): continue @@ -49,16 +51,16 @@ python populate_packages_append () { bb.note("adding gtk-icon-cache postinst and postrm scripts to %s" % pkg) - postinst = d.getVar('pkg_postinst_%s' % pkg, True) + postinst = d.getVar('pkg_postinst_%s' % pkg) if not postinst: postinst = '#!/bin/sh\n' - postinst += d.getVar('gtk_icon_cache_postinst', True) + postinst += d.getVar('gtk_icon_cache_postinst') d.setVar('pkg_postinst_%s' % pkg, postinst) - postrm = d.getVar('pkg_postrm_%s' % pkg, True) + postrm = d.getVar('pkg_postrm_%s' % pkg) if not postrm: postrm = '#!/bin/sh\n' - postrm += d.getVar('gtk_icon_cache_postrm', True) + postrm += d.getVar('gtk_icon_cache_postrm') d.setVar('pkg_postrm_%s' % pkg, postrm) } diff --git a/import-layers/yocto-poky/meta/classes/gtk-immodules-cache.bbclass b/import-layers/yocto-poky/meta/classes/gtk-immodules-cache.bbclass index ebbc9dea8..3d82dbe9e 100644 --- a/import-layers/yocto-poky/meta/classes/gtk-immodules-cache.bbclass +++ b/import-layers/yocto-poky/meta/classes/gtk-immodules-cache.bbclass @@ -2,7 +2,7 @@ # # Usage: Set GTKIMMODULES_PACKAGES to the packages that needs to update the inputmethod modules -DEPENDS =+ "qemu-native" +PACKAGE_WRITE_DEPS += "qemu-native" inherit qemu @@ -61,21 +61,21 @@ fi } python populate_packages_append () { - gtkimmodules_pkgs = d.getVar('GTKIMMODULES_PACKAGES', True).split() + gtkimmodules_pkgs = d.getVar('GTKIMMODULES_PACKAGES').split() for pkg in gtkimmodules_pkgs: bb.note("adding gtk-immodule-cache postinst and postrm scripts to %s" % pkg) - postinst = d.getVar('pkg_postinst_%s' % pkg, True) + postinst = d.getVar('pkg_postinst_%s' % pkg) if not postinst: postinst = '#!/bin/sh\n' - postinst += d.getVar('gtk_immodule_cache_postinst', True) + postinst += d.getVar('gtk_immodule_cache_postinst') d.setVar('pkg_postinst_%s' % pkg, postinst) - postrm = d.getVar('pkg_postrm_%s' % pkg, True) + postrm = d.getVar('pkg_postrm_%s' % pkg) if not postrm: postrm = '#!/bin/sh\n' - postrm += d.getVar('gtk_immodule_cache_postrm', True) + postrm += d.getVar('gtk_immodule_cache_postrm') d.setVar('pkg_postrm_%s' % pkg, postrm) } diff --git a/import-layers/yocto-poky/meta/classes/gummiboot.bbclass b/import-layers/yocto-poky/meta/classes/gummiboot.bbclass deleted file mode 100644 index 4f2dea6c3..000000000 --- a/import-layers/yocto-poky/meta/classes/gummiboot.bbclass +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright (C) 2014 Intel Corporation -# -# Released under the MIT license (see COPYING.MIT) - -# gummiboot.bbclass - equivalent of grub-efi.bbclass -# Set EFI_PROVIDER = "gummiboot" to use gummiboot on your live images instead of grub-efi -# (images built by image-live.bbclass or image-vm.bbclass) - -do_bootimg[depends] += "${MLPREFIX}gummiboot:do_deploy" -do_bootdirectdisk[depends] += "${MLPREFIX}gummiboot:do_deploy" - -EFIDIR = "/EFI/BOOT" - -GUMMIBOOT_CFG ?= "${S}/loader.conf" -GUMMIBOOT_ENTRIES ?= "" -GUMMIBOOT_TIMEOUT ?= "10" - -# Need UUID utility code. -inherit fs-uuid - -efi_populate() { - DEST=$1 - - EFI_IMAGE="gummibootia32.efi" - DEST_EFI_IMAGE="bootia32.efi" - if [ "${TARGET_ARCH}" = "x86_64" ]; then - EFI_IMAGE="gummibootx64.efi" - DEST_EFI_IMAGE="bootx64.efi" - fi - - install -d ${DEST}${EFIDIR} - # gummiboot requires these paths for configuration files - # they are not customizable so no point in new vars - install -d ${DEST}/loader - install -d ${DEST}/loader/entries - install -m 0644 ${DEPLOY_DIR_IMAGE}/${EFI_IMAGE} ${DEST}${EFIDIR}/${DEST_EFI_IMAGE} - EFIPATH=$(echo "${EFIDIR}" | sed 's/\//\\/g') - printf 'fs0:%s\%s\n' "$EFIPATH" "$DEST_EFI_IMAGE" >${DEST}/startup.nsh - install -m 0644 ${GUMMIBOOT_CFG} ${DEST}/loader/loader.conf - for i in ${GUMMIBOOT_ENTRIES}; do - install -m 0644 ${i} ${DEST}/loader/entries - done -} - -efi_iso_populate() { - iso_dir=$1 - efi_populate $iso_dir - mkdir -p ${EFIIMGDIR}/${EFIDIR} - cp $iso_dir/${EFIDIR}/* ${EFIIMGDIR}${EFIDIR} - cp $iso_dir/vmlinuz ${EFIIMGDIR} - EFIPATH=$(echo "${EFIDIR}" | sed 's/\//\\/g') - echo "fs0:${EFIPATH}\\${DEST_EFI_IMAGE}" > ${EFIIMGDIR}/startup.nsh - if [ -f "$iso_dir/initrd" ] ; then - cp $iso_dir/initrd ${EFIIMGDIR} - fi -} - -efi_hddimg_populate() { - efi_populate $1 -} - -python build_efi_cfg() { - s = d.getVar("S", True) - labels = d.getVar('LABELS', True) - if not labels: - bb.debug(1, "LABELS not defined, nothing to do") - return - - if labels == []: - bb.debug(1, "No labels, nothing to do") - return - - cfile = d.getVar('GUMMIBOOT_CFG', True) - try: - cfgfile = open(cfile, 'w') - except OSError: - bb.fatal('Unable to open %s' % cfile) - - cfgfile.write('# Automatically created by OE\n') - cfgfile.write('default %s\n' % (labels.split()[0])) - timeout = d.getVar('GUMMIBOOT_TIMEOUT', True) - if timeout: - cfgfile.write('timeout %s\n' % timeout) - else: - cfgfile.write('timeout 10\n') - cfgfile.close() - - for label in labels.split(): - localdata = d.createCopy() - - overrides = localdata.getVar('OVERRIDES', True) - if not overrides: - bb.fatal('OVERRIDES not defined') - - entryfile = "%s/%s.conf" % (s, label) - d.appendVar("GUMMIBOOT_ENTRIES", " " + entryfile) - try: - entrycfg = open(entryfile, "w") - except OSError: - bb.fatal('Unable to open %s' % entryfile) - localdata.setVar('OVERRIDES', label + ':' + overrides) - bb.data.update_data(localdata) - - entrycfg.write('title %s\n' % label) - entrycfg.write('linux /vmlinuz\n') - - append = localdata.getVar('APPEND', True) - initrd = localdata.getVar('INITRD', True) - - if initrd: - entrycfg.write('initrd /initrd\n') - lb = label - if label == "install": - lb = "install-efi" - entrycfg.write('options LABEL=%s ' % lb) - if append: - append = replace_rootfs_uuid(d, append) - entrycfg.write('%s' % append) - entrycfg.write('\n') - entrycfg.close() -} diff --git a/import-layers/yocto-poky/meta/classes/gzipnative.bbclass b/import-layers/yocto-poky/meta/classes/gzipnative.bbclass deleted file mode 100644 index 326cbbb6f..000000000 --- a/import-layers/yocto-poky/meta/classes/gzipnative.bbclass +++ /dev/null @@ -1,5 +0,0 @@ -EXTRANATIVEPATH += "pigz-native gzip-native" -DEPENDS += "gzip-native" - -# tar may get run by do_unpack or do_populate_lic which could call gzip -do_unpack[depends] += "gzip-native:do_populate_sysroot" diff --git a/import-layers/yocto-poky/meta/classes/icecc.bbclass b/import-layers/yocto-poky/meta/classes/icecc.bbclass index c57257151..77bf61133 100644 --- a/import-layers/yocto-poky/meta/classes/icecc.bbclass +++ b/import-layers/yocto-poky/meta/classes/icecc.bbclass @@ -101,7 +101,7 @@ def use_icecc(bb,d): if icecc_is_allarch(bb, d): return "no" - pn = d.getVar('PN', True) + pn = d.getVar('PN') system_class_blacklist = [] user_class_blacklist = (d.getVar('ICECC_USER_CLASS_BL', False) or "none").split() @@ -140,7 +140,7 @@ def use_icecc(bb,d): return "yes" def icecc_is_allarch(bb, d): - return d.getVar("PACKAGE_ARCH", True) == "all" or bb.data.inherits_class('allarch', d) + return d.getVar("PACKAGE_ARCH") == "all" or bb.data.inherits_class('allarch', d) def icecc_is_kernel(bb, d): return \ diff --git a/import-layers/yocto-poky/meta/classes/image-buildinfo.bbclass b/import-layers/yocto-poky/meta/classes/image-buildinfo.bbclass index 3003f5d25..213fb9cf9 100644 --- a/import-layers/yocto-poky/meta/classes/image-buildinfo.bbclass +++ b/import-layers/yocto-poky/meta/classes/image-buildinfo.bbclass @@ -12,14 +12,17 @@ # Desired variables to display IMAGE_BUILDINFO_VARS ?= "DISTRO DISTRO_VERSION" +# Desired location of the output file in the image. +IMAGE_BUILDINFO_FILE ??= "${sysconfdir}/build" + # From buildhistory.bbclass def image_buildinfo_outputvars(vars, listvars, d): vars = vars.split() listvars = listvars.split() ret = "" for var in vars: - value = d.getVar(var, True) or "" - if (d.getVarFlag(var, 'type', True) == "list"): + value = d.getVar(var) or "" + if (d.getVarFlag(var, 'type') == "list"): value = oe.utils.squashspaces(value) ret += "%s = %s\n" % (var, value) return ret.rstrip('\n') @@ -28,7 +31,9 @@ def image_buildinfo_outputvars(vars, listvars, d): def get_layer_git_status(path): import subprocess try: - subprocess.check_output("cd %s; PSEUDO_UNLOAD=1 git diff --quiet --no-ext-diff" % path, + subprocess.check_output("""cd %s; export PSEUDO_UNLOAD=1; set -e; + git diff --quiet --no-ext-diff + git diff --quiet --no-ext-diff --cached""" % path, shell=True, stderr=subprocess.STDOUT) return "" @@ -40,7 +45,7 @@ def get_layer_git_status(path): # Returns layer revisions along with their respective status def get_layer_revs(d): - layers = (d.getVar("BBLAYERS", True) or "").split() + layers = (d.getVar("BBLAYERS") or "").split() medadata_revs = ["%-17s = %s:%s %s" % (os.path.basename(i), \ base_get_metadata_git_branch(i, None).strip(), \ base_get_metadata_git_revision(i, None), \ @@ -50,16 +55,16 @@ def get_layer_revs(d): def buildinfo_target(d): # Get context - if d.getVar('BB_WORKERCONTEXT', True) != '1': + if d.getVar('BB_WORKERCONTEXT') != '1': return "" # Single and list variables to be read - vars = (d.getVar("IMAGE_BUILDINFO_VARS", True) or "") - listvars = (d.getVar("IMAGE_BUILDINFO_LVARS", True) or "") + vars = (d.getVar("IMAGE_BUILDINFO_VARS") or "") + listvars = (d.getVar("IMAGE_BUILDINFO_LVARS") or "") return image_buildinfo_outputvars(vars, listvars, d) # Write build information to target filesystem python buildinfo () { - with open(d.expand('${IMAGE_ROOTFS}${sysconfdir}/build'), 'w') as build: + with open(d.expand('${IMAGE_ROOTFS}${IMAGE_BUILDINFO_FILE}'), 'w') as build: build.writelines(( '''----------------------- Build Configuration: | diff --git a/import-layers/yocto-poky/meta/classes/image-container.bbclass b/import-layers/yocto-poky/meta/classes/image-container.bbclass new file mode 100644 index 000000000..f002858bd --- /dev/null +++ b/import-layers/yocto-poky/meta/classes/image-container.bbclass @@ -0,0 +1,21 @@ +ROOTFS_BOOTSTRAP_INSTALL = "" +IMAGE_TYPES_MASKED += "container" +IMAGE_TYPEDEP_container = "tar.bz2" + +python __anonymous() { + if "container" in d.getVar("IMAGE_FSTYPES") and \ + d.getVar("IMAGE_CONTAINER_NO_DUMMY") != "1" and \ + "linux-dummy" not in d.getVar("PREFERRED_PROVIDER_virtual/kernel"): + msg = '"container" is in IMAGE_FSTYPES, but ' \ + 'PREFERRED_PROVIDER_virtual/kernel is not "linux-dummy". ' \ + 'Unless a particular kernel is needed, using linux-dummy will ' \ + 'prevent a kernel from being built, which can reduce ' \ + 'build times. If you don\'t want to use "linux-dummy", set ' \ + '"IMAGE_CONTAINER_NO_DUMMY" to "1".' + + # Raising skip recipe was Paul's clever idea. It causes the error to + # only be shown for the recipes actually requested to build, rather + # than bb.fatal which would appear for all recipes inheriting the + # class. + raise bb.parse.SkipRecipe(msg) +} diff --git a/import-layers/yocto-poky/meta/classes/image-live.bbclass b/import-layers/yocto-poky/meta/classes/image-live.bbclass index 4a634dca9..a3d1b4e56 100644 --- a/import-layers/yocto-poky/meta/classes/image-live.bbclass +++ b/import-layers/yocto-poky/meta/classes/image-live.bbclass @@ -51,8 +51,8 @@ IMAGE_TYPEDEP_hddimg = "ext4" IMAGE_TYPES_MASKED += "live hddimg iso" python() { - image_b = d.getVar('IMAGE_BASENAME', True) - initrd_i = d.getVar('INITRD_IMAGE_LIVE', True) + image_b = d.getVar('IMAGE_BASENAME') + initrd_i = d.getVar('INITRD_IMAGE_LIVE') if image_b == initrd_i: bb.error('INITRD_IMAGE_LIVE %s cannot use image live, hddimg or iso.' % initrd_i) bb.fatal('Check IMAGE_FSTYPES and INITRAMFS_FSTYPES settings.') @@ -264,9 +264,9 @@ build_hddimg() { python do_bootimg() { set_live_vm_vars(d, 'LIVE') - if d.getVar("PCBIOS", True) == "1": + if d.getVar("PCBIOS") == "1": bb.build.exec_func('build_syslinux_cfg', d) - if d.getVar("EFI", True) == "1": + if d.getVar("EFI") == "1": bb.build.exec_func('build_efi_cfg', d) bb.build.exec_func('build_hddimg', d) bb.build.exec_func('build_iso', d) diff --git a/import-layers/yocto-poky/meta/classes/image-vm.bbclass b/import-layers/yocto-poky/meta/classes/image-vm.bbclass index 2f35d6b4d..98bd92000 100644 --- a/import-layers/yocto-poky/meta/classes/image-vm.bbclass +++ b/import-layers/yocto-poky/meta/classes/image-vm.bbclass @@ -93,7 +93,7 @@ build_boot_dd() { parted $IMAGE print - awk "BEGIN { printf \"$(echo ${DISK_SIGNATURE} | fold -w 2 | tac | paste -sd '' | sed 's/\(..\)/\\x&/g')\" }" | \ + awk "BEGIN { printf \"$(echo ${DISK_SIGNATURE} | sed 's/\(..\)\(..\)\(..\)\(..\)/\\x\4\\x\3\\x\2\\x\1/')\" }" | \ dd of=$IMAGE bs=1 seek=440 conv=notrunc OFFSET=`expr $END2 / 512` @@ -112,9 +112,9 @@ build_boot_dd() { python do_bootdirectdisk() { validate_disk_signature(d) set_live_vm_vars(d, 'VM') - if d.getVar("PCBIOS", True) == "1": + if d.getVar("PCBIOS") == "1": bb.build.exec_func('build_syslinux_cfg', d) - if d.getVar("EFI", True) == "1": + if d.getVar("EFI") == "1": bb.build.exec_func('build_efi_cfg', d) bb.build.exec_func('build_boot_dd', d) } @@ -132,7 +132,7 @@ def generate_disk_signature(): def validate_disk_signature(d): import re - disk_signature = d.getVar("DISK_SIGNATURE", True) + disk_signature = d.getVar("DISK_SIGNATURE") if not re.match(r'^[0-9a-fA-F]{8}$', disk_signature): bb.fatal("DISK_SIGNATURE '%s' must be an 8 digit hex string" % disk_signature) @@ -158,11 +158,11 @@ create_qcow2_image () { } python do_vmimg() { - if 'vmdk' in d.getVar('IMAGE_FSTYPES', True): + if 'vmdk' in d.getVar('IMAGE_FSTYPES'): bb.build.exec_func('create_vmdk_image', d) - if 'vdi' in d.getVar('IMAGE_FSTYPES', True): + if 'vdi' in d.getVar('IMAGE_FSTYPES'): bb.build.exec_func('create_vdi_image', d) - if 'qcow2' in d.getVar('IMAGE_FSTYPES', True): + if 'qcow2' in d.getVar('IMAGE_FSTYPES'): bb.build.exec_func('create_qcow2_image', d) } diff --git a/import-layers/yocto-poky/meta/classes/image.bbclass b/import-layers/yocto-poky/meta/classes/image.bbclass index a9ab2fac1..4bcfb87c9 100644 --- a/import-layers/yocto-poky/meta/classes/image.bbclass +++ b/import-layers/yocto-poky/meta/classes/image.bbclass @@ -1,19 +1,17 @@ inherit rootfs_${IMAGE_PKGTYPE} -# Only Linux SDKs support populate_sdk_ext, fall back to populate_sdk +# Only Linux SDKs support populate_sdk_ext, fall back to populate_sdk_base # in the non-Linux SDK_OS case, such as mingw32 -SDKEXTCLASS ?= "${@['populate_sdk', 'populate_sdk_ext']['linux' in d.getVar("SDK_OS", True)]}" +SDKEXTCLASS ?= "${@['populate_sdk_base', 'populate_sdk_ext']['linux' in d.getVar("SDK_OS")]}" inherit ${SDKEXTCLASS} TOOLCHAIN_TARGET_TASK += "${PACKAGE_INSTALL}" TOOLCHAIN_TARGET_TASK_ATTEMPTONLY += "${PACKAGE_INSTALL_ATTEMPTONLY}" POPULATE_SDK_POST_TARGET_COMMAND += "rootfs_sysroot_relativelinks; " -inherit gzipnative - LICENSE = "MIT" PACKAGES = "" -DEPENDS += "${MLPREFIX}qemuwrapper-cross ${MLPREFIX}depmodwrapper-cross" +DEPENDS += "${MLPREFIX}qemuwrapper-cross depmodwrapper-cross" RDEPENDS += "${PACKAGE_INSTALL} ${LINGUAS_INSTALL}" RRECOMMENDS += "${PACKAGE_INSTALL_ATTEMPTONLY}" @@ -31,7 +29,7 @@ IMAGE_FEATURES[validitems] += "debug-tweaks read-only-rootfs empty-root-password IMAGE_GEN_DEBUGFS ?= "0" # rootfs bootstrap install -ROOTFS_BOOTSTRAP_INSTALL = "${@bb.utils.contains("IMAGE_FEATURES", "package-management", "", "${ROOTFS_PKGMANAGE_BOOTSTRAP}",d)}" +ROOTFS_BOOTSTRAP_INSTALL = "run-postinsts" # These packages will be removed from a read-only rootfs after all other # packages have been installed @@ -51,7 +49,7 @@ FEATURE_PACKAGES_splash = "${SPLASH}" IMAGE_INSTALL_COMPLEMENTARY = '${@complementary_globs("IMAGE_FEATURES", d)}' def check_image_features(d): - valid_features = (d.getVarFlag('IMAGE_FEATURES', 'validitems', True) or "").split() + valid_features = (d.getVarFlag('IMAGE_FEATURES', 'validitems') or "").split() valid_features += d.getVarFlags('COMPLEMENTARY_GLOB').keys() for var in d: if var.startswith("PACKAGE_GROUP_"): @@ -133,7 +131,7 @@ def build_live(d): if bb.utils.contains("IMAGE_FSTYPES", "live", "live", "0", d) == "0": # live is not set but hob might set iso or hddimg d.setVar('NOISO', bb.utils.contains('IMAGE_FSTYPES', "iso", "0", "1", d)) d.setVar('NOHDD', bb.utils.contains('IMAGE_FSTYPES', "hddimg", "0", "1", d)) - if d.getVar('NOISO', True) == "0" or d.getVar('NOHDD', True) == "0": + if d.getVar('NOISO') == "0" or d.getVar('NOHDD') == "0": return "image-live" return "" return "image-live" @@ -144,37 +142,39 @@ inherit ${IMAGE_TYPE_live} IMAGE_TYPE_vm = '${@bb.utils.contains_any("IMAGE_FSTYPES", ["vmdk", "vdi", "qcow2", "hdddirect"], "image-vm", "", d)}' inherit ${IMAGE_TYPE_vm} +IMAGE_TYPE_container = '${@bb.utils.contains("IMAGE_FSTYPES", "container", "image-container", "", d)}' +inherit ${IMAGE_TYPE_container} + +IMAGE_TYPE_wic = "image_types_wic" +inherit ${IMAGE_TYPE_wic} + python () { deps = " " + imagetypes_getdepends(d) d.appendVarFlag('do_rootfs', 'depends', deps) deps = "" - for dep in (d.getVar('EXTRA_IMAGEDEPENDS', True) or "").split(): + for dep in (d.getVar('EXTRA_IMAGEDEPENDS') or "").split(): deps += " %s:do_populate_sysroot" % dep - d.appendVarFlag('do_build', 'depends', deps) + d.appendVarFlag('do_image_complete', 'depends', deps) #process IMAGE_FEATURES, we must do this before runtime_mapping_rename #Check for replaces image features features = set(oe.data.typed_value('IMAGE_FEATURES', d)) remain_features = features.copy() for feature in features: - replaces = set((d.getVar("IMAGE_FEATURES_REPLACES_%s" % feature, True) or "").split()) + replaces = set((d.getVar("IMAGE_FEATURES_REPLACES_%s" % feature) or "").split()) remain_features -= replaces #Check for conflict image features for feature in remain_features: - conflicts = set((d.getVar("IMAGE_FEATURES_CONFLICTS_%s" % feature, True) or "").split()) + conflicts = set((d.getVar("IMAGE_FEATURES_CONFLICTS_%s" % feature) or "").split()) temp = conflicts & remain_features if temp: - bb.fatal("%s contains conflicting IMAGE_FEATURES %s %s" % (d.getVar('PN', True), feature, ' '.join(list(temp)))) + bb.fatal("%s contains conflicting IMAGE_FEATURES %s %s" % (d.getVar('PN'), feature, ' '.join(list(temp)))) d.setVar('IMAGE_FEATURES', ' '.join(sorted(list(remain_features)))) check_image_features(d) - initramfs_image = d.getVar('INITRAMFS_IMAGE', True) or "" - if initramfs_image != "": - d.appendVarFlag('do_build', 'depends', " %s:do_bundle_initramfs" % d.getVar('PN', True)) - d.appendVarFlag('do_bundle_initramfs', 'depends', " %s:do_image_complete" % initramfs_image) } IMAGE_CLASSES += "image_types" @@ -185,7 +185,7 @@ IMAGE_POSTPROCESS_COMMAND ?= "" # some default locales IMAGE_LINGUAS ?= "de-de fr-fr en-gb" -LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS', True).split()))}" +LINGUAS_INSTALL ?= "${@" ".join(map(lambda s: "locale-base-%s" % s, d.getVar('IMAGE_LINGUAS').split()))}" # Prefer image, but use the fallback files for lookups if the image ones # aren't yet available. @@ -199,6 +199,14 @@ PACKAGE_EXCLUDE[type] = "list" fakeroot python do_rootfs () { from oe.rootfs import create_rootfs from oe.manifest import create_manifest + import logging + + logger = d.getVar('BB_TASK_LOGGER', False) + if logger: + logcatcher = bb.utils.LogCatcher() + logger.addHandler(logcatcher) + else: + logcatcher = None # NOTE: if you add, remove or significantly refactor the stages of this # process then you should recalculate the weightings here. This is quite @@ -212,20 +220,20 @@ fakeroot python do_rootfs () { progress_reporter.next_stage() # Handle package exclusions - excl_pkgs = d.getVar("PACKAGE_EXCLUDE", True).split() - inst_pkgs = d.getVar("PACKAGE_INSTALL", True).split() - inst_attempt_pkgs = d.getVar("PACKAGE_INSTALL_ATTEMPTONLY", True).split() + excl_pkgs = d.getVar("PACKAGE_EXCLUDE").split() + inst_pkgs = d.getVar("PACKAGE_INSTALL").split() + inst_attempt_pkgs = d.getVar("PACKAGE_INSTALL_ATTEMPTONLY").split() d.setVar('PACKAGE_INSTALL_ORIG', ' '.join(inst_pkgs)) d.setVar('PACKAGE_INSTALL_ATTEMPTONLY', ' '.join(inst_attempt_pkgs)) for pkg in excl_pkgs: if pkg in inst_pkgs: - bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL (%s). It will be removed from the list." % (pkg, d.getVar('PN', True), inst_pkgs)) + bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL (%s). It will be removed from the list." % (pkg, d.getVar('PN'), inst_pkgs)) inst_pkgs.remove(pkg) if pkg in inst_attempt_pkgs: - bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL_ATTEMPTONLY (%s). It will be removed from the list." % (pkg, d.getVar('PN', True), inst_pkgs)) + bb.warn("Package %s, set to be excluded, is in %s PACKAGE_INSTALL_ATTEMPTONLY (%s). It will be removed from the list." % (pkg, d.getVar('PN'), inst_pkgs)) inst_attempt_pkgs.remove(pkg) d.setVar("PACKAGE_INSTALL", ' '.join(inst_pkgs)) @@ -235,7 +243,7 @@ fakeroot python do_rootfs () { # We have to delay the runtime_mapping_rename until just before rootfs runs # otherwise, the multilib renaming could step in and squash any fixups that # may have occurred. - pn = d.getVar('PN', True) + pn = d.getVar('PN') runtime_mapping_rename("PACKAGE_INSTALL", pn, d) runtime_mapping_rename("PACKAGE_INSTALL_ATTEMPTONLY", pn, d) runtime_mapping_rename("BAD_RECOMMENDATIONS", pn, d) @@ -246,19 +254,19 @@ fakeroot python do_rootfs () { progress_reporter.next_stage() # generate rootfs - create_rootfs(d, progress_reporter=progress_reporter) + create_rootfs(d, progress_reporter=progress_reporter, logcatcher=logcatcher) progress_reporter.finish() } do_rootfs[dirs] = "${TOPDIR}" do_rootfs[cleandirs] += "${S} ${IMGDEPLOYDIR}" do_rootfs[umask] = "022" -addtask rootfs before do_build +addtask rootfs before do_build after do_prepare_recipe_sysroot fakeroot python do_image () { from oe.utils import execute_pre_post_process - pre_process_cmds = d.getVar("IMAGE_PREPROCESS_COMMAND", True) + pre_process_cmds = d.getVar("IMAGE_PREPROCESS_COMMAND") execute_pre_post_process(d, pre_process_cmds) } @@ -269,7 +277,7 @@ addtask do_image after do_rootfs before do_build fakeroot python do_image_complete () { from oe.utils import execute_pre_post_process - post_process_cmds = d.getVar("IMAGE_POSTPROCESS_COMMAND", True) + post_process_cmds = d.getVar("IMAGE_POSTPROCESS_COMMAND") execute_pre_post_process(d, post_process_cmds) } @@ -292,7 +300,7 @@ addtask do_image_complete after do_image before do_build fakeroot python do_image_qa () { from oe.utils import ImageQAFailed - qa_cmds = (d.getVar('IMAGE_QA_COMMANDS', True) or '').split() + qa_cmds = (d.getVar('IMAGE_QA_COMMANDS') or '').split() qamsg = "" for cmd in qa_cmds: @@ -307,40 +315,17 @@ fakeroot python do_image_qa () { qamsg = qamsg + '\n' if qamsg: - imgname = d.getVar('IMAGE_NAME', True) + imgname = d.getVar('IMAGE_NAME') bb.fatal("QA errors found whilst validating image: %s\n%s" % (imgname, qamsg)) } addtask do_image_qa after do_image_complete before do_build -# -# Write environment variables used by wic -# to tmp/sysroots//imgdata/.env -# -python do_rootfs_wicenv () { - wicvars = d.getVar('WICVARS', True) - if not wicvars: - return - - stdir = d.getVar('STAGING_DIR_TARGET', True) - outdir = os.path.join(stdir, 'imgdata') - bb.utils.mkdirhier(outdir) - basename = d.getVar('IMAGE_BASENAME', True) - with open(os.path.join(outdir, basename) + '.env', 'w') as envf: - for var in wicvars.split(): - value = d.getVar(var, True) - if value: - envf.write('%s="%s"\n' % (var, value.strip())) -} -addtask do_rootfs_wicenv after do_image before do_image_wic -do_rootfs_wicenv[vardeps] += "${WICVARS}" -do_rootfs_wicenv[prefuncs] = 'set_image_size' - def setup_debugfs_variables(d): d.appendVar('IMAGE_ROOTFS', '-dbg') d.appendVar('IMAGE_LINK_NAME', '-dbg') d.appendVar('IMAGE_NAME','-dbg') d.setVar('IMAGE_BUILDING_DEBUGFS', 'true') - debugfs_image_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS', True) + debugfs_image_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS') if debugfs_image_fstypes: d.setVar('IMAGE_FSTYPES', debugfs_image_fstypes) @@ -358,7 +343,7 @@ python () { # # Without de-duplication, gen_conversion_cmds() below # would create the same compression command multiple times. - ctypes = set(d.getVar('CONVERSIONTYPES', True).split()) + ctypes = set(d.getVar('CONVERSIONTYPES').split()) old_overrides = d.getVar('OVERRIDES', False) def _image_base_type(type): @@ -375,11 +360,11 @@ python () { return basetype basetypes = {} - alltypes = d.getVar('IMAGE_FSTYPES', True).split() + alltypes = d.getVar('IMAGE_FSTYPES').split() typedeps = {} - if d.getVar('IMAGE_GEN_DEBUGFS', True) == "1": - debugfs_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS', True).split() + if d.getVar('IMAGE_GEN_DEBUGFS') == "1": + debugfs_fstypes = d.getVar('IMAGE_FSTYPES_DEBUGFS').split() for t in debugfs_fstypes: alltypes.append("debugfs_" + t) @@ -394,7 +379,7 @@ python () { if t.startswith("debugfs_"): t = t[8:] debug = "debugfs_" - deps = (d.getVar('IMAGE_TYPEDEP_' + t, True) or "").split() + deps = (d.getVar('IMAGE_TYPEDEP_' + t) or "").split() vardeps.add('IMAGE_TYPEDEP_' + t) if baset not in typedeps: typedeps[baset] = set() @@ -414,7 +399,7 @@ python () { d.appendVarFlag('do_image', 'vardeps', ' '.join(vardeps)) - maskedtypes = (d.getVar('IMAGE_TYPES_MASKED', True) or "").split() + maskedtypes = (d.getVar('IMAGE_TYPES_MASKED') or "").split() maskedtypes = [dbg + t for t in maskedtypes for dbg in ("", "debugfs_")] for t in basetypes: @@ -433,16 +418,17 @@ python () { debug = "setup_debugfs " realt = t[8:] localdata.setVar('OVERRIDES', '%s:%s' % (realt, old_overrides)) - bb.data.update_data(localdata) localdata.setVar('type', realt) # Delete DATETIME so we don't expand any references to it now # This means the task's hash can be stable rather than having hardcoded # date/time values. It will get expanded at execution time. # Similarly TMPDIR since otherwise we see QA stamp comparision problems + # Expand PV else it can trigger get_srcrev which can fail due to these variables being unset + localdata.setVar('PV', d.getVar('PV')) localdata.delVar('DATETIME') localdata.delVar('TMPDIR') - image_cmd = localdata.getVar("IMAGE_CMD", True) + image_cmd = localdata.getVar("IMAGE_CMD") vardeps.add('IMAGE_CMD_' + realt) if image_cmd: cmds.append("\t" + image_cmd) @@ -464,7 +450,7 @@ python () { # Create input image first. gen_conversion_cmds(type) localdata.setVar('type', type) - cmd = "\t" + (localdata.getVar("CONVERSION_CMD_" + ctype, True) or localdata.getVar("COMPRESS_CMD_" + ctype, True)) + cmd = "\t" + (localdata.getVar("CONVERSION_CMD_" + ctype) or localdata.getVar("COMPRESS_CMD_" + ctype)) if cmd not in cmds: cmds.append(cmd) vardeps.add('CONVERSION_CMD_' + ctype) @@ -515,17 +501,17 @@ python () { def get_rootfs_size(d): import subprocess - rootfs_alignment = int(d.getVar('IMAGE_ROOTFS_ALIGNMENT', True)) - overhead_factor = float(d.getVar('IMAGE_OVERHEAD_FACTOR', True)) - rootfs_req_size = int(d.getVar('IMAGE_ROOTFS_SIZE', True)) - rootfs_extra_space = eval(d.getVar('IMAGE_ROOTFS_EXTRA_SPACE', True)) - rootfs_maxsize = d.getVar('IMAGE_ROOTFS_MAXSIZE', True) - image_fstypes = d.getVar('IMAGE_FSTYPES', True) or '' - initramfs_fstypes = d.getVar('INITRAMFS_FSTYPES', True) or '' - initramfs_maxsize = d.getVar('INITRAMFS_MAXSIZE', True) + rootfs_alignment = int(d.getVar('IMAGE_ROOTFS_ALIGNMENT')) + overhead_factor = float(d.getVar('IMAGE_OVERHEAD_FACTOR')) + rootfs_req_size = int(d.getVar('IMAGE_ROOTFS_SIZE')) + rootfs_extra_space = eval(d.getVar('IMAGE_ROOTFS_EXTRA_SPACE')) + rootfs_maxsize = d.getVar('IMAGE_ROOTFS_MAXSIZE') + image_fstypes = d.getVar('IMAGE_FSTYPES') or '' + initramfs_fstypes = d.getVar('INITRAMFS_FSTYPES') or '' + initramfs_maxsize = d.getVar('INITRAMFS_MAXSIZE') output = subprocess.check_output(['du', '-ks', - d.getVar('IMAGE_ROOTFS', True)]) + d.getVar('IMAGE_ROOTFS')]) size_kb = int(output.split()[0]) base_size = size_kb * overhead_factor base_size = max(base_size, rootfs_req_size) + rootfs_extra_space @@ -541,7 +527,7 @@ def get_rootfs_size(d): # Do not check image size of the debugfs image. This is not supposed # to be deployed, etc. so it doesn't make sense to limit the size # of the debug. - if (d.getVar('IMAGE_BUILDING_DEBUGFS', True) or "") == "true": + if (d.getVar('IMAGE_BUILDING_DEBUGFS') or "") == "true": return base_size # Check the rootfs size against IMAGE_ROOTFS_MAXSIZE (if set) @@ -572,13 +558,13 @@ python set_image_size () { # python create_symlinks() { - deploy_dir = d.getVar('IMGDEPLOYDIR', True) - img_name = d.getVar('IMAGE_NAME', True) - link_name = d.getVar('IMAGE_LINK_NAME', True) - manifest_name = d.getVar('IMAGE_MANIFEST', True) - taskname = d.getVar("BB_CURRENTTASK", True) + deploy_dir = d.getVar('IMGDEPLOYDIR') + img_name = d.getVar('IMAGE_NAME') + link_name = d.getVar('IMAGE_LINK_NAME') + manifest_name = d.getVar('IMAGE_MANIFEST') + taskname = d.getVar("BB_CURRENTTASK") subimages = (d.getVarFlag("do_" + taskname, 'subimages', False) or "").split() - imgsuffix = d.getVarFlag("do_" + taskname, 'imgsuffix', True) or d.expand("${IMAGE_NAME_SUFFIX}.") + imgsuffix = d.getVarFlag("do_" + taskname, 'imgsuffix') or d.expand("${IMAGE_NAME_SUFFIX}.") if not link_name: return @@ -604,19 +590,11 @@ do_patch[noexec] = "1" do_configure[noexec] = "1" do_compile[noexec] = "1" do_install[noexec] = "1" -do_populate_sysroot[noexec] = "1" +deltask do_populate_sysroot do_package[noexec] = "1" -do_package_qa[noexec] = "1" +deltask do_package_qa do_packagedata[noexec] = "1" do_package_write_ipk[noexec] = "1" do_package_write_deb[noexec] = "1" do_package_write_rpm[noexec] = "1" -# Allow the kernel to be repacked with the initramfs and boot image file as a single file -do_bundle_initramfs[depends] += "virtual/kernel:do_bundle_initramfs" -do_bundle_initramfs[nostamp] = "1" -do_bundle_initramfs[noexec] = "1" -do_bundle_initramfs () { - : -} -addtask bundle_initramfs after do_image_complete diff --git a/import-layers/yocto-poky/meta/classes/image_types.bbclass b/import-layers/yocto-poky/meta/classes/image_types.bbclass index 3bfa60ba2..8db18ac5a 100644 --- a/import-layers/yocto-poky/meta/classes/image_types.bbclass +++ b/import-layers/yocto-poky/meta/classes/image_types.bbclass @@ -23,22 +23,22 @@ def imagetypes_getdepends(d): types = typestring.split(".") return types[0], types[1:] - fstypes = set((d.getVar('IMAGE_FSTYPES', True) or "").split()) - fstypes |= set((d.getVar('IMAGE_FSTYPES_DEBUGFS', True) or "").split()) + fstypes = set((d.getVar('IMAGE_FSTYPES') or "").split()) + fstypes |= set((d.getVar('IMAGE_FSTYPES_DEBUGFS') or "").split()) deps = set() for typestring in fstypes: basetype, resttypes = split_types(typestring) - adddep(d.getVar('IMAGE_DEPENDS_%s' % basetype, True) , deps) + adddep(d.getVar('IMAGE_DEPENDS_%s' % basetype) , deps) - for typedepends in (d.getVar("IMAGE_TYPEDEP_%s" % basetype, True) or "").split(): + for typedepends in (d.getVar("IMAGE_TYPEDEP_%s" % basetype) or "").split(): base, rest = split_types(typedepends) - adddep(d.getVar('IMAGE_DEPENDS_%s' % base, True) , deps) + adddep(d.getVar('IMAGE_DEPENDS_%s' % base) , deps) resttypes += rest for ctype in resttypes: - adddep(d.getVar("CONVERSION_DEPENDS_%s" % ctype, True), deps) - adddep(d.getVar("COMPRESS_DEPENDS_%s" % ctype, True), deps) + adddep(d.getVar("CONVERSION_DEPENDS_%s" % ctype), deps) + adddep(d.getVar("COMPRESS_DEPENDS_%s" % ctype), deps) # Sort the set so that ordering is consistant return " ".join(sorted(deps)) @@ -74,6 +74,8 @@ oe_mkext234fs () { # Create a sparse image block dd if=/dev/zero of=${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.$fstype seek=$ROOTFS_SIZE count=$COUNT bs=1024 mkfs.$fstype -F $extra_imagecmd ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.$fstype -d ${IMAGE_ROOTFS} + # Error codes 0-3 indicate successfull operation of fsck (no errors or errors corrected) + fsck.$fstype -pvfD ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.$fstype || [ $? -le 3 ] } IMAGE_CMD_ext2 = "oe_mkext234fs ext2 ${EXTRA_IMAGECMD}" @@ -82,12 +84,13 @@ IMAGE_CMD_ext4 = "oe_mkext234fs ext4 ${EXTRA_IMAGECMD}" MIN_BTRFS_SIZE ?= "16384" IMAGE_CMD_btrfs () { - if [ ${ROOTFS_SIZE} -gt ${MIN_BTRFS_SIZE} ]; then - dd if=/dev/zero of=${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.btrfs count=${ROOTFS_SIZE} bs=1024 - mkfs.btrfs ${EXTRA_IMAGECMD} -r ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.btrfs - else - bbfatal "Rootfs is too small for BTRFS (Rootfs Actual Size: ${ROOTFS_SIZE}, BTRFS Minimum Size: ${MIN_BTRFS_SIZE})" + size=${ROOTFS_SIZE} + if [ ${size} -lt ${MIN_BTRFS_SIZE} ] ; then + size=${MIN_BTRFS_SIZE} + bbwarn "Rootfs size is too small for BTRFS. Filesystem will be extended to ${size}K" fi + dd if=/dev/zero of=${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.btrfs count=${size} bs=1024 + mkfs.btrfs ${EXTRA_IMAGECMD} -r ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.btrfs } IMAGE_CMD_squashfs = "mksquashfs ${IMAGE_ROOTFS} ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.squashfs ${EXTRA_IMAGECMD} -noappend" @@ -192,95 +195,9 @@ IMAGE_CMD_ubi () { IMAGE_CMD_ubifs = "mkfs.ubifs -r ${IMAGE_ROOTFS} -o ${IMGDEPLOYDIR}/${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.ubifs ${MKUBIFS_ARGS}" -WKS_FILE ??= "${IMAGE_BASENAME}.${MACHINE}.wks" -WKS_FILES ?= "${WKS_FILE} ${IMAGE_BASENAME}.wks" -WKS_SEARCH_PATH ?= "${THISDIR}:${@':'.join('%s/scripts/lib/wic/canned-wks' % l for l in '${BBPATH}:${COREBASE}'.split(':'))}" -WKS_FULL_PATH = "${@wks_search('${WKS_FILES}'.split(), '${WKS_SEARCH_PATH}') or ''}" - -def wks_search(files, search_path): - for f in files: - if os.path.isabs(f): - if os.path.exists(f): - return f - else: - searched = bb.utils.which(search_path, f) - if searched: - return searched - -WIC_CREATE_EXTRA_ARGS ?= "" - -IMAGE_CMD_wic () { - out="${IMGDEPLOYDIR}/${IMAGE_NAME}" - wks="${WKS_FULL_PATH}" - if [ -z "$wks" ]; then - bbfatal "No kickstart files from WKS_FILES were found: ${WKS_FILES}. Please set WKS_FILE or WKS_FILES appropriately." - fi - - BUILDDIR="${TOPDIR}" wic create "$wks" --vars "${STAGING_DIR_TARGET}/imgdata/" -e "${IMAGE_BASENAME}" -o "$out/" ${WIC_CREATE_EXTRA_ARGS} - mv "$out/build/$(basename "${wks%.wks}")"*.direct "$out${IMAGE_NAME_SUFFIX}.wic" - rm -rf "$out/" -} -IMAGE_CMD_wic[vardepsexclude] = "WKS_FULL_PATH WKS_FILES" - -# Rebuild when the wks file or vars in WICVARS change -USING_WIC = "${@bb.utils.contains_any('IMAGE_FSTYPES', 'wic ' + ' '.join('wic.%s' % c for c in '${CONVERSIONTYPES}'.split()), '1', '', d)}" -WKS_FILE_CHECKSUM = "${@'${WKS_FULL_PATH}:%s' % os.path.exists('${WKS_FULL_PATH}') if '${USING_WIC}' else ''}" -do_image_wic[file-checksums] += "${WKS_FILE_CHECKSUM}" - -python () { - if d.getVar('USING_WIC', True) and 'do_bootimg' in d: - bb.build.addtask('do_image_wic', '', 'do_bootimg', d) -} - -python do_write_wks_template () { - """Write out expanded template contents to WKS_FULL_PATH.""" - import re - - template_body = d.getVar('_WKS_TEMPLATE', True) - - # Remove any remnant variable references left behind by the expansion - # due to undefined variables - expand_var_regexp = re.compile(r"\${[^{}@\n\t :]+}") - while True: - new_body = re.sub(expand_var_regexp, '', template_body) - if new_body == template_body: - break - else: - template_body = new_body - - wks_file = d.getVar('WKS_FULL_PATH', True) - with open(wks_file, 'w') as f: - f.write(template_body) -} - -python () { - if d.getVar('USING_WIC', True): - wks_file_u = d.getVar('WKS_FULL_PATH', False) - wks_file = d.expand(wks_file_u) - base, ext = os.path.splitext(wks_file) - if ext == '.in' and os.path.exists(wks_file): - wks_out_file = os.path.join(d.getVar('WORKDIR', True), os.path.basename(base)) - d.setVar('WKS_FULL_PATH', wks_out_file) - d.setVar('WKS_TEMPLATE_PATH', wks_file_u) - d.setVar('WKS_FILE_CHECKSUM', '${WKS_TEMPLATE_PATH}:True') - - try: - with open(wks_file, 'r') as f: - body = f.read() - except (IOError, OSError) as exc: - pass - else: - # Previously, I used expandWithRefs to get the dependency list - # and add it to WICVARS, but there's no point re-parsing the - # file in process_wks_template as well, so just put it in - # a variable and let the metadata deal with the deps. - d.setVar('_WKS_TEMPLATE', body) - bb.build.addtask('do_write_wks_template', 'do_image_wic', None, d) -} - EXTRA_IMAGECMD = "" -inherit siteinfo +inherit siteinfo kernel-arch JFFS2_ENDIANNESS ?= "${@base_conditional('SITEINFO_ENDIANNESS', 'le', '-l', '-b', d)}" JFFS2_ERASEBLOCK ?= "0x40000" EXTRA_IMAGECMD_jffs2 ?= "--pad ${JFFS2_ENDIANNESS} --eraseblock=${JFFS2_ERASEBLOCK} --no-cleanmarkers" @@ -328,6 +245,7 @@ IMAGE_TYPES = " \ hdddirect \ elf \ wic wic.gz wic.bz2 wic.lzma \ + container \ " # Compression is a special case of conversion. The old variable @@ -336,12 +254,14 @@ IMAGE_TYPES = " \ # CONVERSION_CMD/DEPENDS. COMPRESSIONTYPES ?= "" -CONVERSIONTYPES = "gz bz2 lzma xz lz4 zip sum md5sum sha1sum sha224sum sha256sum sha384sum sha512sum bmap ${COMPRESSIONTYPES}" +CONVERSIONTYPES = "gz bz2 lzma xz lz4 lzo zip sum md5sum sha1sum sha224sum sha256sum sha384sum sha512sum bmap u-boot ${COMPRESSIONTYPES}" CONVERSION_CMD_lzma = "lzma -k -f -7 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}" CONVERSION_CMD_gz = "gzip -f -9 -c ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.gz" CONVERSION_CMD_bz2 = "pbzip2 -f -k ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}" CONVERSION_CMD_xz = "xz -f -k -c ${XZ_COMPRESSION_LEVEL} ${XZ_THREADS} --check=${XZ_INTEGRITY_CHECK} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.xz" -CONVERSION_CMD_lz4 = "lz4c -9 -c ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.lz4" +CONVERSION_CMD_lz4 = "lz4 -9 -z ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.lz4" +CONVERSION_CMD_lz4_legacy = "lz4 -9 -z -l ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.lz4" +CONVERSION_CMD_lzo = "lzop -9 ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}" CONVERSION_CMD_zip = "zip ${ZIP_COMPRESSION_LEVEL} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.zip ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}" CONVERSION_CMD_sum = "sumtool -i ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} -o ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sum ${JFFS2_SUM_EXTRA_ARGS}" CONVERSION_CMD_md5sum = "md5sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.md5sum" @@ -351,14 +271,17 @@ CONVERSION_CMD_sha256sum = "sha256sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} CONVERSION_CMD_sha384sum = "sha384sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha384sum" CONVERSION_CMD_sha512sum = "sha512sum ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} > ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.sha512sum" CONVERSION_CMD_bmap = "bmaptool create ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} -o ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.bmap" +CONVERSION_CMD_u-boot = "mkimage -A ${UBOOT_ARCH} -O linux -T ramdisk -C none -n ${IMAGE_NAME} -d ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type} ${IMAGE_NAME}${IMAGE_NAME_SUFFIX}.${type}.u-boot" CONVERSION_DEPENDS_lzma = "xz-native" -CONVERSION_DEPENDS_gz = "" +CONVERSION_DEPENDS_gz = "pigz-native" CONVERSION_DEPENDS_bz2 = "pbzip2-native" CONVERSION_DEPENDS_xz = "xz-native" CONVERSION_DEPENDS_lz4 = "lz4-native" +CONVERSION_DEPENDS_lzo = "lzop-native" CONVERSION_DEPENDS_zip = "zip-native" CONVERSION_DEPENDS_sum = "mtd-utils-native" CONVERSION_DEPENDS_bmap = "bmap-tools-native" +CONVERSION_DEPENDS_u-boot = "u-boot-mkimage-native" RUNNABLE_IMAGE_TYPES ?= "ext2 ext3 ext4" RUNNABLE_MACHINE_PATTERNS ?= "qemu" @@ -371,7 +294,3 @@ IMAGE_EXTENSION_live = "hddimg iso" # The IMAGE_TYPES_MASKED variable will be used to mask out from the IMAGE_FSTYPES, # images that will not be built at do_rootfs time: vmdk, vdi, qcow2, hdddirect, hddimg, iso, etc. IMAGE_TYPES_MASKED ?= "" - -# The WICVARS variable is used to define list of bitbake variables used in wic code -# variables from this list is written to .env file -WICVARS ?= "BBLAYERS IMGDEPLOYDIR DEPLOY_DIR_IMAGE HDDDIR IMAGE_BASENAME IMAGE_BOOT_FILES IMAGE_LINK_NAME IMAGE_ROOTFS INITRAMFS_FSTYPES INITRD ISODIR MACHINE_ARCH ROOTFS_SIZE STAGING_DATADIR STAGING_DIR_NATIVE STAGING_LIBDIR TARGET_SYS" diff --git a/import-layers/yocto-poky/meta/classes/image_types_uboot.bbclass b/import-layers/yocto-poky/meta/classes/image_types_uboot.bbclass deleted file mode 100644 index 933fa4d9c..000000000 --- a/import-layers/yocto-poky/meta/classes/image_types_uboot.bbclass +++ /dev/null @@ -1,23 +0,0 @@ -inherit image_types kernel-arch - -oe_mkimage () { - mkimage -A ${UBOOT_ARCH} -O linux -T ramdisk -C $2 -n ${IMAGE_NAME} \ - -d ${IMGDEPLOYDIR}/$1 ${IMGDEPLOYDIR}/$1.u-boot -} - -CONVERSIONTYPES += "gz.u-boot bz2.u-boot lzma.u-boot u-boot" - -CONVERSION_DEPENDS_u-boot = "u-boot-mkimage-native" -CONVERSION_CMD_u-boot = "oe_mkimage ${IMAGE_NAME}.rootfs.${type} none" - -CONVERSION_DEPENDS_gz.u-boot = "u-boot-mkimage-native" -CONVERSION_CMD_gz.u-boot = "${CONVERSION_CMD_gz}; oe_mkimage ${IMAGE_NAME}.rootfs.${type}.gz gzip" - -CONVERSION_DEPENDS_bz2.u-boot = "u-boot-mkimage-native" -CONVERSION_CMD_bz2.u-boot = "${CONVERSION_CMD_bz2}; oe_mkimage ${IMAGE_NAME}.rootfs.${type}.bz2 bzip2" - -CONVERSION_DEPENDS_lzma.u-boot = "u-boot-mkimage-native" -CONVERSION_CMD_lzma.u-boot = "${CONVERSION_CMD_lzma}; oe_mkimage ${IMAGE_NAME}.rootfs.${type}.lzma lzma" - -IMAGE_TYPES += "ext2.u-boot ext2.gz.u-boot ext2.bz2.u-boot ext2.lzma.u-boot ext3.gz.u-boot ext4.gz.u-boot cpio.gz.u-boot" - diff --git a/import-layers/yocto-poky/meta/classes/image_types_wic.bbclass b/import-layers/yocto-poky/meta/classes/image_types_wic.bbclass new file mode 100644 index 000000000..68f251cfd --- /dev/null +++ b/import-layers/yocto-poky/meta/classes/image_types_wic.bbclass @@ -0,0 +1,117 @@ +# The WICVARS variable is used to define list of bitbake variables used in wic code +# variables from this list is written to .env file +WICVARS ?= "\ + BBLAYERS IMGDEPLOYDIR DEPLOY_DIR_IMAGE FAKEROOTCMD IMAGE_BASENAME IMAGE_BOOT_FILES \ + IMAGE_LINK_NAME IMAGE_ROOTFS INITRAMFS_FSTYPES INITRD INITRD_LIVE ISODIR RECIPE_SYSROOT_NATIVE \ + ROOTFS_SIZE STAGING_DATADIR STAGING_DIR STAGING_LIBDIR TARGET_SYS TRANSLATED_TARGET_ARCH" + +WKS_FILE ??= "${IMAGE_BASENAME}.${MACHINE}.wks" +WKS_FILES ?= "${WKS_FILE} ${IMAGE_BASENAME}.wks" +WKS_SEARCH_PATH ?= "${THISDIR}:${@':'.join('%s/wic' % p for p in '${BBPATH}'.split(':'))}:${@':'.join('%s/scripts/lib/wic/canned-wks' % l for l in '${BBPATH}:${COREBASE}'.split(':'))}" +WKS_FULL_PATH = "${@wks_search(d.getVar('WKS_FILES').split(), d.getVar('WKS_SEARCH_PATH')) or ''}" + +def wks_search(files, search_path): + for f in files: + if os.path.isabs(f): + if os.path.exists(f): + return f + else: + searched = bb.utils.which(search_path, f) + if searched: + return searched + +WIC_CREATE_EXTRA_ARGS ?= "" + +IMAGE_CMD_wic () { + out="${IMGDEPLOYDIR}/${IMAGE_NAME}" + wks="${WKS_FULL_PATH}" + if [ -z "$wks" ]; then + bbfatal "No kickstart files from WKS_FILES were found: ${WKS_FILES}. Please set WKS_FILE or WKS_FILES appropriately." + fi + + BUILDDIR="${TOPDIR}" wic create "$wks" --vars "${STAGING_DIR}/${MACHINE}/imgdata/" -e "${IMAGE_BASENAME}" -o "$out/" ${WIC_CREATE_EXTRA_ARGS} + mv "$out/$(basename "${wks%.wks}")"*.direct "$out${IMAGE_NAME_SUFFIX}.wic" + rm -rf "$out/" +} +IMAGE_CMD_wic[vardepsexclude] = "WKS_FULL_PATH WKS_FILES TOPDIR" + +# Rebuild when the wks file or vars in WICVARS change +USING_WIC = "${@bb.utils.contains_any('IMAGE_FSTYPES', 'wic ' + ' '.join('wic.%s' % c for c in '${CONVERSIONTYPES}'.split()), '1', '', d)}" +WKS_FILE_CHECKSUM = "${@'${WKS_FULL_PATH}:%s' % os.path.exists('${WKS_FULL_PATH}') if '${USING_WIC}' else ''}" +do_image_wic[file-checksums] += "${WKS_FILE_CHECKSUM}" +do_image_wic[depends] += "wic-tools:do_populate_sysroot" +WKS_FILE_DEPENDS ??= '' +DEPENDS += "${@ '${WKS_FILE_DEPENDS}' if d.getVar('USING_WIC') else '' }" + +python do_write_wks_template () { + """Write out expanded template contents to WKS_FULL_PATH.""" + import re + + template_body = d.getVar('_WKS_TEMPLATE') + + # Remove any remnant variable references left behind by the expansion + # due to undefined variables + expand_var_regexp = re.compile(r"\${[^{}@\n\t :]+}") + while True: + new_body = re.sub(expand_var_regexp, '', template_body) + if new_body == template_body: + break + else: + template_body = new_body + + wks_file = d.getVar('WKS_FULL_PATH') + with open(wks_file, 'w') as f: + f.write(template_body) +} + +python () { + if d.getVar('USING_WIC'): + wks_file_u = d.getVar('WKS_FULL_PATH', False) + wks_file = d.expand(wks_file_u) + base, ext = os.path.splitext(wks_file) + if ext == '.in' and os.path.exists(wks_file): + wks_out_file = os.path.join(d.getVar('WORKDIR'), os.path.basename(base)) + d.setVar('WKS_FULL_PATH', wks_out_file) + d.setVar('WKS_TEMPLATE_PATH', wks_file_u) + d.setVar('WKS_FILE_CHECKSUM', '${WKS_TEMPLATE_PATH}:True') + + # We need to re-parse each time the file changes, and bitbake + # needs to be told about that explicitly. + bb.parse.mark_dependency(d, wks_file) + + try: + with open(wks_file, 'r') as f: + body = f.read() + except (IOError, OSError) as exc: + pass + else: + # Previously, I used expandWithRefs to get the dependency list + # and add it to WICVARS, but there's no point re-parsing the + # file in process_wks_template as well, so just put it in + # a variable and let the metadata deal with the deps. + d.setVar('_WKS_TEMPLATE', body) + bb.build.addtask('do_write_wks_template', 'do_image_wic', None, d) +} + +# +# Write environment variables used by wic +# to tmp/sysroots//imgdata/.env +# +python do_rootfs_wicenv () { + wicvars = d.getVar('WICVARS') + if not wicvars: + return + + stdir = d.getVar('STAGING_DIR') + outdir = os.path.join(stdir, d.getVar('MACHINE'), 'imgdata') + bb.utils.mkdirhier(outdir) + basename = d.getVar('IMAGE_BASENAME') + with open(os.path.join(outdir, basename) + '.env', 'w') as envf: + for var in wicvars.split(): + value = d.getVar(var) + if value: + envf.write('%s="%s"\n' % (var, value.strip())) +} +addtask do_rootfs_wicenv after do_image before do_image_wic +do_rootfs_wicenv[vardeps] += "${WICVARS}" +do_rootfs_wicenv[prefuncs] = 'set_image_size' diff --git a/import-layers/yocto-poky/meta/classes/insane.bbclass b/import-layers/yocto-poky/meta/classes/insane.bbclass index 7bbe8b63a..0c11c3658 100644 --- a/import-layers/yocto-poky/meta/classes/insane.bbclass +++ b/import-layers/yocto-poky/meta/classes/insane.bbclass @@ -30,7 +30,7 @@ QA_SANE = "True" WARN_QA ?= "ldflags useless-rpaths rpaths staticdev libdir xorg-driver-abi \ textrel already-stripped incompatible-license files-invalid \ installed-vs-shipped compile-host-path install-host-path \ - pn-overrides infodir build-deps file-rdeps \ + pn-overrides infodir build-deps \ unknown-configure-option symlink-to-sysroot multilib \ invalid-packageconfig host-user-contaminated \ " @@ -38,7 +38,7 @@ ERROR_QA ?= "dev-so debug-deps dev-deps debug-files arch pkgconfig la \ perms dep-cmp pkgvarcheck perm-config perm-line perm-link \ split-strip packages-list pkgv-undefined var-undefined \ version-going-backwards expanded-d invalid-chars \ - license-checksum dev-elf \ + license-checksum dev-elf file-rdeps \ " FAKEROOT_QA = "host-user-contaminated" FAKEROOT_QA[doc] = "QA tests which need to run under fakeroot. If any \ @@ -138,6 +138,7 @@ def package_qa_get_machine_dict(d): "microblaze": (189, 0, 0, False, 32), "microblazeeb":(189, 0, 0, False, 32), "microblazeel":(189, 0, 0, True, 32), + "sh4": ( 42, 0, 0, True, 32), }, "uclinux-uclibc" : { "bfin": ( 106, 0, 0, True, 32), @@ -173,12 +174,14 @@ def package_qa_get_machine_dict(d): "linux-gnun32" : { "mips64": ( 8, 0, 0, False, 32), "mips64el": ( 8, 0, 0, True, 32), + "mipsisa64r6": ( 8, 0, 0, False, 32), + "mipsisa64r6el":( 8, 0, 0, True, 32), }, } # Add in any extra user supplied data which may come from a BSP layer, removing the # need to always change this class directly - extra_machdata = (d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS", True) or "").split() + extra_machdata = (d.getVar("PACKAGEQA_EXTRA_MACHDEFFUNCS") or "").split() for m in extra_machdata: call = m + "(machdata, d)" locs = { "machdata" : machdata, "d" : d} @@ -193,23 +196,23 @@ def package_qa_clean_path(path, d, pkg=None): TMPDIR is stripped, otherwise PKGDEST/pkg is stripped. """ if pkg: - path = path.replace(os.path.join(d.getVar("PKGDEST", True), pkg), "/") - return path.replace(d.getVar("TMPDIR", True), "/").replace("//", "/") + path = path.replace(os.path.join(d.getVar("PKGDEST"), pkg), "/") + return path.replace(d.getVar("TMPDIR"), "/").replace("//", "/") def package_qa_write_error(type, error, d): - logfile = d.getVar('QA_LOGFILE', True) + logfile = d.getVar('QA_LOGFILE') if logfile: - p = d.getVar('P', True) + p = d.getVar('P') with open(logfile, "a+") as f: f.write("%s: %s [%s]\n" % (p, error, type)) def package_qa_handle_error(error_class, error_msg, d): package_qa_write_error(error_class, error_msg, d) - if error_class in (d.getVar("ERROR_QA", True) or "").split(): + if error_class in (d.getVar("ERROR_QA") or "").split(): bb.error("QA Issue: %s [%s]" % (error_msg, error_class)) d.setVar("QA_SANE", False) return False - elif error_class in (d.getVar("WARN_QA", True) or "").split(): + elif error_class in (d.getVar("WARN_QA") or "").split(): bb.warn("QA Issue: %s [%s]" % (error_msg, error_class)) else: bb.note("QA Issue: %s [%s]" % (error_msg, error_class)) @@ -225,7 +228,7 @@ QAPATHTEST[libexec] = "package_qa_check_libexec" def package_qa_check_libexec(path,name, d, elf, messages): # Skip the case where the default is explicitly /usr/libexec - libexec = d.getVar('libexecdir', True) + libexec = d.getVar('libexecdir') if libexec == "/usr/libexec": return True @@ -246,7 +249,7 @@ def package_qa_check_rpath(file,name, d, elf, messages): if os.path.islink(file): return - bad_dirs = [d.getVar('BASE_WORKDIR', True), d.getVar('STAGING_DIR_TARGET', True)] + bad_dirs = [d.getVar('BASE_WORKDIR'), d.getVar('STAGING_DIR_TARGET')] phdrs = elf.run_objdump("-p", d) @@ -274,8 +277,8 @@ def package_qa_check_useless_rpaths(file, name, d, elf, messages): if os.path.islink(file): return - libdir = d.getVar("libdir", True) - base_libdir = d.getVar("base_libdir", True) + libdir = d.getVar("libdir") + base_libdir = d.getVar("base_libdir") phdrs = elf.run_objdump("-p", d) @@ -332,11 +335,11 @@ def package_qa_check_libdir(d): """ import re - pkgdest = d.getVar('PKGDEST', True) - base_libdir = d.getVar("base_libdir",True) + os.sep - libdir = d.getVar("libdir", True) + os.sep - libexecdir = d.getVar("libexecdir", True) + os.sep - exec_prefix = d.getVar("exec_prefix", True) + os.sep + pkgdest = d.getVar('PKGDEST') + base_libdir = d.getVar("base_libdir") + os.sep + libdir = d.getVar("libdir") + os.sep + libexecdir = d.getVar("libexecdir") + os.sep + exec_prefix = d.getVar("exec_prefix") + os.sep messages = [] @@ -351,10 +354,10 @@ def package_qa_check_libdir(d): # Skip subdirectories for any packages with libdir in INSANE_SKIP skippackages = [] for package in dirs: - if 'libdir' in (d.getVar('INSANE_SKIP_' + package, True) or "").split(): + if 'libdir' in (d.getVar('INSANE_SKIP_' + package) or "").split(): bb.note("Package %s skipping libdir QA test" % (package)) skippackages.append(package) - elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory' and package.endswith("-dbg"): + elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory' and package.endswith("-dbg"): bb.note("Package %s skipping libdir QA test for PACKAGE_DEBUG_SPLIT_STYLE equals debug-file-directory" % (package)) skippackages.append(package) for package in skippackages: @@ -395,7 +398,7 @@ def package_qa_check_dbg(path, name, d, elf, messages): if not "-dbg" in name and not "-ptest" in name: if '.debug' in path.split(os.path.sep): - messages("debug-files", "non debug package contains .debug directory: %s path %s" % \ + package_qa_add_message(messages, "debug-files", "non debug package contains .debug directory: %s path %s" % \ (name, package_qa_clean_path(path,d))) QAPATHTEST[perms] = "package_qa_check_perm" @@ -405,7 +408,6 @@ def package_qa_check_perm(path,name,d, elf, messages): """ return - QAPATHTEST[unsafe-references-in-scripts] = "package_qa_check_unsafe_references_in_scripts" def package_qa_check_unsafe_references_in_scripts(path, name, d, elf, messages): """ @@ -417,13 +419,13 @@ def package_qa_check_unsafe_references_in_scripts(path, name, d, elf, messages): if not elf: import stat import subprocess - pn = d.getVar('PN', True) + pn = d.getVar('PN') # Ensure we're checking an executable script statinfo = os.stat(path) if bool(statinfo.st_mode & stat.S_IXUSR): # grep shell scripts for possible references to /exec_prefix/ - exec_prefix = d.getVar('exec_prefix', True) + exec_prefix = d.getVar('exec_prefix') statement = "grep -e '%s/[^ :]\{1,\}/[^ :]\{1,\}' %s > /dev/null" % (exec_prefix, path) if subprocess.call(statement, shell=True) == 0: error_msg = pn + ": Found a reference to %s/ in %s" % (exec_prefix, path) @@ -447,19 +449,19 @@ def unsafe_references_skippable(path, name, d): return True # Skip unusual rootfs layouts which make these tests irrelevant - exec_prefix = d.getVar('exec_prefix', True) + exec_prefix = d.getVar('exec_prefix') if exec_prefix == "": return True - pkgdest = d.getVar('PKGDEST', True) + pkgdest = d.getVar('PKGDEST') pkgdest = pkgdest + "/" + name pkgdest = os.path.abspath(pkgdest) - base_bindir = pkgdest + d.getVar('base_bindir', True) - base_sbindir = pkgdest + d.getVar('base_sbindir', True) - base_libdir = pkgdest + d.getVar('base_libdir', True) - bindir = pkgdest + d.getVar('bindir', True) - sbindir = pkgdest + d.getVar('sbindir', True) - libdir = pkgdest + d.getVar('libdir', True) + base_bindir = pkgdest + d.getVar('base_bindir') + base_sbindir = pkgdest + d.getVar('base_sbindir') + base_libdir = pkgdest + d.getVar('base_libdir') + bindir = pkgdest + d.getVar('bindir') + sbindir = pkgdest + d.getVar('sbindir') + libdir = pkgdest + d.getVar('libdir') if base_bindir == bindir and base_sbindir == sbindir and base_libdir == libdir: return True @@ -481,13 +483,13 @@ def package_qa_check_arch(path,name,d, elf, messages): if not elf: return - target_os = d.getVar('TARGET_OS', True) - target_arch = d.getVar('TARGET_ARCH', True) - provides = d.getVar('PROVIDES', True) - bpn = d.getVar('BPN', True) + target_os = d.getVar('TARGET_OS') + target_arch = d.getVar('TARGET_ARCH') + provides = d.getVar('PROVIDES') + bpn = d.getVar('BPN') if target_arch == "allarch": - pn = d.getVar('PN', True) + pn = d.getVar('PN') package_qa_add_message(messages, "arch", pn + ": Recipe inherits the allarch class, but has packaged architecture-specific binaries") return @@ -507,7 +509,7 @@ def package_qa_check_arch(path,name,d, elf, messages): # Check the architecture and endiannes of the binary is_32 = (("virtual/kernel" in provides) or bb.data.inherits_class("module", d)) and \ - (target_os == "linux-gnux32" or re.match('mips64.*32', d.getVar('DEFAULTTUNE', True))) + (target_os == "linux-gnux32" or re.match('mips64.*32', d.getVar('DEFAULTTUNE'))) if not ((machine == elf.machine()) or is_32): package_qa_add_message(messages, "arch", "Architecture did not match (%s, expected %s) on %s" % \ (oe.qa.elf_machine_to_string(elf.machine()), oe.qa.elf_machine_to_string(machine), package_qa_clean_path(path,d))) @@ -524,7 +526,7 @@ def package_qa_check_desktop(path, name, d, elf, messages): Run all desktop files through desktop-file-validate. """ if path.endswith(".desktop"): - desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE',True),'desktop-file-validate') + desktop_file_validate = os.path.join(d.getVar('STAGING_BINDIR_NATIVE'),'desktop-file-validate') output = os.popen("%s %s" % (desktop_file_validate, path)) # This only produces output on errors for l in output: @@ -566,9 +568,9 @@ def package_qa_hash_style(path, name, d, elf, messages): if os.path.islink(path): return - gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS', True) + gnu_hash = "--hash-style=gnu" in d.getVar('LDFLAGS') if not gnu_hash: - gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS', True) + gnu_hash = "--hash-style=both" in d.getVar('LDFLAGS') if not gnu_hash: return @@ -607,7 +609,7 @@ def package_qa_check_buildpaths(path, name, d, elf, messages): if path.find(name + "/CONTROL/") != -1 or path.find(name + "/DEBIAN/") != -1: return - tmpdir = d.getVar('TMPDIR', True) + tmpdir = d.getVar('TMPDIR') with open(path, 'rb') as f: file_content = f.read().decode('utf-8', errors='ignore') if tmpdir in file_content: @@ -626,8 +628,8 @@ def package_qa_check_xorg_driver_abi(path, name, d, elf, messages): driverdir = d.expand("${libdir}/xorg/modules/drivers/") if driverdir in path and path.endswith(".so"): - mlprefix = d.getVar('MLPREFIX', True) or '' - for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + name, True) or ""): + mlprefix = d.getVar('MLPREFIX') or '' + for rdep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + name) or ""): if rdep.startswith("%sxorg-abi-" % mlprefix): return package_qa_add_message(messages, "xorg-driver-abi", "Package %s contains Xorg driver (%s) but no xorg-abi- dependencies" % (name, os.path.basename(path))) @@ -650,9 +652,9 @@ def package_qa_check_symlink_to_sysroot(path, name, d, elf, messages): if os.path.islink(path): target = os.readlink(path) if os.path.isabs(target): - tmpdir = d.getVar('TMPDIR', True) + tmpdir = d.getVar('TMPDIR') if target.startswith(tmpdir): - trimmed = path.replace(os.path.join (d.getVar("PKGDEST", True), name), "") + trimmed = path.replace(os.path.join (d.getVar("PKGDEST"), name), "") package_qa_add_message(messages, "symlink-to-sysroot", "Symlink %s in %s points to TMPDIR" % (trimmed, name)) # Check license variables @@ -664,17 +666,17 @@ python populate_lic_qa_checksum() { import tempfile sane = True - lic_files = d.getVar('LIC_FILES_CHKSUM', True) or '' - lic = d.getVar('LICENSE', True) - pn = d.getVar('PN', True) + lic_files = d.getVar('LIC_FILES_CHKSUM') or '' + lic = d.getVar('LICENSE') + pn = d.getVar('PN') if lic == "CLOSED": return - if not lic_files and d.getVar('SRC_URI', True): + if not lic_files and d.getVar('SRC_URI'): sane = package_qa_handle_error("license-checksum", pn + ": Recipe file fetches files and does not have license file information (LIC_FILES_CHKSUM)", d) - srcdir = d.getVar('S', True) + srcdir = d.getVar('S') for url in lic_files.split(): try: @@ -696,17 +698,21 @@ python populate_lic_qa_checksum() { if (not beginline) and (not endline): md5chksum = bb.utils.md5_file(srclicfile) + with open(srclicfile, 'rb') as f: + license = f.read() else: fi = open(srclicfile, 'rb') fo = tempfile.NamedTemporaryFile(mode='wb', prefix='poky.', suffix='.tmp', delete=False) tmplicfile = fo.name; lineno = 0 linesout = 0 + license = [] for line in fi: lineno += 1 if (lineno >= beginline): if ((lineno <= endline) or not endline): fo.write(line) + license.append(line) linesout += 1 else: break @@ -714,6 +720,7 @@ python populate_lic_qa_checksum() { fo.close() fi.close() md5chksum = bb.utils.md5_file(tmplicfile) + license = b''.join(license) os.unlink(tmplicfile) if recipemd5 == md5chksum: @@ -722,6 +729,30 @@ python populate_lic_qa_checksum() { if recipemd5: msg = pn + ": The LIC_FILES_CHKSUM does not match for " + url msg = msg + "\n" + pn + ": The new md5 checksum is " + md5chksum + try: + license_lines = license.decode('utf-8').split('\n') + except: + # License text might not be valid UTF-8, in which + # case we don't know how to include it in our output + # and have to skip it. + pass + else: + max_lines = int(d.getVar('QA_MAX_LICENSE_LINES') or 20) + if not license_lines or license_lines[-1] != '': + # Ensure that our license text ends with a line break + # (will be added with join() below). + license_lines.append('') + remove = len(license_lines) - max_lines + if remove > 0: + start = max_lines // 2 + end = start + remove - 1 + del license_lines[start:end] + license_lines.insert(start, '...') + msg = msg + "\n" + pn + ": Here is the selected license text:" + \ + "\n" + \ + "{:v^70}".format(" beginline=%d " % beginline if beginline else "") + \ + "\n" + "\n".join(license_lines) + \ + "{:^^70}".format(" endline=%d " % endline if endline else "") if beginline: if endline: srcfiledesc = "%s (lines %d through to %d)" % (srclicfile, beginline, endline) @@ -752,8 +783,9 @@ def package_qa_check_staged(path,d): """ sane = True - tmpdir = d.getVar('TMPDIR', True) + tmpdir = d.getVar('TMPDIR') workdir = os.path.join(tmpdir, "work") + recipesysroot = d.getVar("RECIPE_SYSROOT") if bb.data.inherits_class("native", d) or bb.data.inherits_class("cross", d): pkgconfigcheck = workdir @@ -769,12 +801,14 @@ def package_qa_check_staged(path,d): if file.endswith(".la"): with open(path) as f: file_content = f.read() + file_content = file_content.replace(recipesysroot, "") if workdir in file_content: error_msg = "%s failed sanity test (workdir) in path %s" % (file,root) sane = package_qa_handle_error("la", error_msg, d) elif file.endswith(".pc"): with open(path) as f: file_content = f.read() + file_content = file_content.replace(recipesysroot, "") if pkgconfigcheck in file_content: error_msg = "%s failed sanity test (tmpdir) in path %s" % (file,root) sane = package_qa_handle_error("pkgconfig", error_msg, d) @@ -803,8 +837,8 @@ def package_qa_walk(warnfuncs, errorfuncs, skip, package, d): import oe.qa #if this will throw an exception, then fix the dict above - target_os = d.getVar('TARGET_OS', True) - target_arch = d.getVar('TARGET_ARCH', True) + target_os = d.getVar('TARGET_OS') + target_arch = d.getVar('TARGET_ARCH') warnings = {} errors = {} @@ -833,11 +867,10 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): if not "-dbg" in pkg and not "packagegroup-" in pkg and not "-image" in pkg: localdata = bb.data.createCopy(d) - localdata.setVar('OVERRIDES', localdata.getVar('OVERRIDES', True) + ':' + pkg) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', localdata.getVar('OVERRIDES') + ':' + pkg) # Now check the RDEPENDS - rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS', True) or "") + rdepends = bb.utils.explode_deps(localdata.getVar('RDEPENDS') or "") # Now do the sanity check!!! if "build-deps" not in skip: @@ -853,7 +886,7 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): if rdep_data and 'PN' in rdep_data and rdep_data['PN'] in taskdeps: continue if not rdep_data or not 'PN' in rdep_data: - pkgdata_dir = d.getVar("PKGDATA_DIR", True) + pkgdata_dir = d.getVar("PKGDATA_DIR") try: possibles = os.listdir("%s/runtime-rprovides/%s/" % (pkgdata_dir, rdepend)) except OSError: @@ -873,14 +906,15 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): if "file-rdeps" not in skip: ignored_file_rdeps = set(['/bin/sh', '/usr/bin/env', 'rtld(GNU_HASH)']) if bb.data.inherits_class('nativesdk', d): - ignored_file_rdeps |= set(['/bin/bash', '/usr/bin/perl']) + ignored_file_rdeps |= set(['/bin/bash', '/usr/bin/perl', 'perl']) # For Saving the FILERDEPENDS filerdepends = {} rdep_data = oe.packagedata.read_subpkgdata(pkg, d) for key in rdep_data: if key.startswith("FILERDEPENDS_"): - for subkey in rdep_data[key].split(): - if subkey not in ignored_file_rdeps: + for subkey in bb.utils.explode_deps(rdep_data[key]): + if subkey not in ignored_file_rdeps and \ + not subkey.startswith('perl('): # We already know it starts with FILERDEPENDS_ filerdepends[subkey] = key[13:] @@ -895,11 +929,10 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): sub_rdeps = rdep_data.get("RDEPENDS_" + rdep) if not sub_rdeps: continue - for sub_rdep in sub_rdeps.split(): + for sub_rdep in bb.utils.explode_deps(sub_rdeps): if sub_rdep in done: continue - if not sub_rdep.startswith('(') and \ - oe.packagedata.has_subpkgdata(sub_rdep, d): + if oe.packagedata.has_subpkgdata(sub_rdep, d): # It's a new rdep done.append(sub_rdep) new.append(sub_rdep) @@ -912,16 +945,20 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): # The python is not a package, but python-core provides it, so # skip checking /usr/bin/python if python is in the rdeps, in # case there is a RDEPENDS_pkg = "python" in the recipe. - for py in [ d.getVar('MLPREFIX', True) + "python", "python" ]: + for py in [ d.getVar('MLPREFIX') + "python", "python" ]: if py in done: filerdepends.pop("/usr/bin/python",None) done.remove(py) for rdep in done: + # The file dependencies may contain package names, e.g., + # perl + filerdepends.pop(rdep,None) + # For Saving the FILERPROVIDES, RPROVIDES and FILES_INFO rdep_data = oe.packagedata.read_subpkgdata(rdep, d) for key in rdep_data: if key.startswith("FILERPROVIDES_") or key.startswith("RPROVIDES_"): - for subkey in rdep_data[key].split(): + for subkey in bb.utils.explode_deps(rdep_data[key]): filerdepends.pop(subkey,None) # Add the files list to the rprovides if key == "FILES_INFO": @@ -935,17 +972,16 @@ def package_qa_check_rdepends(pkg, pkgdest, skip, taskdeps, packages, d): for key in filerdepends: error_msg = "%s contained in package %s requires %s, but no providers found in RDEPENDS_%s?" % \ (filerdepends[key].replace("_%s" % pkg, "").replace("@underscore@", "_"), pkg, key, pkg) - package_qa_handle_error("file-rdeps", error_msg, d) + package_qa_handle_error("file-rdeps", error_msg, d) def package_qa_check_deps(pkg, pkgdest, skip, d): localdata = bb.data.createCopy(d) localdata.setVar('OVERRIDES', pkg) - bb.data.update_data(localdata) def check_valid_deps(var): try: - rvar = bb.utils.explode_dep_versions2(localdata.getVar(var, True) or "") + rvar = bb.utils.explode_dep_versions2(localdata.getVar(var) or "") except ValueError as e: bb.fatal("%s_%s: %s" % (var, pkg, e)) for dep in rvar: @@ -968,10 +1004,10 @@ def package_qa_check_expanded_d(package, d, messages): variables, warn the user to use it correctly. """ sane = True - expanded_d = d.getVar('D', True) + expanded_d = d.getVar('D') for var in 'FILES','pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm': - bbvar = d.getVar(var + "_" + package, True) or "" + bbvar = d.getVar(var + "_" + package) or "" if expanded_d in bbvar: if var == 'FILES': package_qa_add_message(messages, "expanded-d", "FILES in %s recipe should not contain the ${D} variable as it references the local build directory not the target filesystem, best solution is to remove the ${D} reference" % package) @@ -984,7 +1020,7 @@ def package_qa_check_expanded_d(package, d, messages): def package_qa_check_encoding(keys, encode, d): def check_encoding(key, enc): sane = True - value = d.getVar(key, True) + value = d.getVar(key) if value: try: s = value.encode(enc) @@ -1009,8 +1045,8 @@ def package_qa_check_host_user(path, name, d, elf, messages): if not os.path.lexists(path): return - dest = d.getVar('PKGDEST', True) - pn = d.getVar('PN', True) + dest = d.getVar('PKGDEST') + pn = d.getVar('PN') home = os.path.join(dest, 'home') if path == home or path.startswith(home + os.sep): return @@ -1023,12 +1059,12 @@ def package_qa_check_host_user(path, name, d, elf, messages): raise else: rootfs_path = path[len(dest):] - check_uid = int(d.getVar('HOST_USER_UID', True)) + check_uid = int(d.getVar('HOST_USER_UID')) if stat.st_uid == check_uid: package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by uid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_uid)) return False - check_gid = int(d.getVar('HOST_USER_GID', True)) + check_gid = int(d.getVar('HOST_USER_GID')) if stat.st_gid == check_gid: package_qa_add_message(messages, "host-user-contaminated", "%s: %s is owned by gid %d, which is the same as the user running bitbake. This may be due to host contamination" % (pn, rootfs_path, check_gid)) return False @@ -1046,8 +1082,8 @@ python do_package_qa () { # Check non UTF-8 characters on recipe's metadata package_qa_check_encoding(['DESCRIPTION', 'SUMMARY', 'LICENSE', 'SECTION'], 'utf-8', d) - logdir = d.getVar('T', True) - pkg = d.getVar('PN', True) + logdir = d.getVar('T') + pkg = d.getVar('PN') # Check the compile log for host contamination compilelog = os.path.join(logdir,"log.do_compile") @@ -1070,8 +1106,8 @@ python do_package_qa () { package_qa_handle_error("install-host-path", msg, d) # Scan the packages... - pkgdest = d.getVar('PKGDEST', True) - packages = set((d.getVar('PACKAGES', True) or '').split()) + pkgdest = d.getVar('PKGDEST') + packages = set((d.getVar('PACKAGES') or '').split()) cpath = oe.cachedpath.CachedPath() global pkgfiles @@ -1100,7 +1136,7 @@ python do_package_qa () { testmatrix = d.getVarFlags(matrix_name) or {} g = globals() warnchecks = [] - for w in (d.getVar("WARN_QA", True) or "").split(): + for w in (d.getVar("WARN_QA") or "").split(): if w in skip: continue if w in testmatrix and testmatrix[w] in g: @@ -1109,7 +1145,7 @@ python do_package_qa () { oe.utils.write_ld_so_conf(d) errorchecks = [] - for e in (d.getVar("ERROR_QA", True) or "").split(): + for e in (d.getVar("ERROR_QA") or "").split(): if e in skip: continue if e in testmatrix and testmatrix[e] in g: @@ -1118,7 +1154,8 @@ python do_package_qa () { oe.utils.write_ld_so_conf(d) return warnchecks, errorchecks - skip = (d.getVar('INSANE_SKIP_' + package, True) or "").split() + skip = set((d.getVar('INSANE_SKIP') or "").split() + + (d.getVar('INSANE_SKIP_' + package) or "").split()) if skip: bb.note("Package %s skipping QA tests: %s" % (package, str(skip))) @@ -1138,15 +1175,18 @@ python do_package_qa () { package_qa_check_rdepends(package, pkgdest, skip, taskdeps, packages, d) package_qa_check_deps(package, pkgdest, skip, d) - if 'libdir' in d.getVar("ALL_QA", True).split(): + if 'libdir' in d.getVar("ALL_QA").split(): package_qa_check_libdir(d) - qa_sane = d.getVar("QA_SANE", True) + qa_sane = d.getVar("QA_SANE") if not qa_sane: bb.fatal("QA run found fatal errors. Please consider fixing them.") bb.note("DONE with PACKAGE QA") } +# binutils is used for most checks, so need to set as dependency +# POPULATESYSROOTDEPS is defined in staging class. +do_package_qa[depends] += "${POPULATESYSROOTDEPS}" do_package_qa[vardepsexclude] = "BB_TASKDEPDATA" do_package_qa[rdeptask] = "do_packagedata" addtask do_package_qa after do_packagedata do_package before do_build @@ -1174,7 +1214,7 @@ python do_qa_configure() { ########################################################################### configs = [] - workdir = d.getVar('WORKDIR', True) + workdir = d.getVar('WORKDIR') if bb.data.inherits_class('autotools', d): bb.note("Checking autotools environment for common misconfiguration") @@ -1195,16 +1235,16 @@ Rerun configure task after fixing this.""") # Check gettext configuration and dependencies are correct ########################################################################### - cnf = d.getVar('EXTRA_OECONF', True) or "" - if "gettext" not in d.getVar('P', True) and "gcc-runtime" not in d.getVar('P', True) and "--disable-nls" not in cnf: - ml = d.getVar("MLPREFIX", True) or "" + cnf = d.getVar('EXTRA_OECONF') or "" + if "gettext" not in d.getVar('P') and "gcc-runtime" not in d.getVar('P') and "--disable-nls" not in cnf: + ml = d.getVar("MLPREFIX") or "" if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('nativesdk', d): gt = "gettext-native" elif bb.data.inherits_class('cross-canadian', d): gt = "nativesdk-gettext" else: gt = "virtual/" + ml + "gettext" - deps = bb.utils.explode_deps(d.getVar('DEPENDS', True) or "") + deps = bb.utils.explode_deps(d.getVar('DEPENDS') or "") if gt not in deps: for config in configs: gnu = "grep \"^[[:space:]]*AM_GNU_GETTEXT\" %s >/dev/null" % config @@ -1219,40 +1259,40 @@ Missing inherit gettext?""" % (gt, config)) bb.note("Checking configure output for unrecognised options") try: flag = "WARNING: unrecognized options:" - log = os.path.join(d.getVar('B', True), 'config.log') + log = os.path.join(d.getVar('B'), 'config.log') output = subprocess.check_output(['grep', '-F', flag, log]).decode("utf-8").replace(', ', ' ') options = set() for line in output.splitlines(): options |= set(line.partition(flag)[2].split()) - whitelist = set(d.getVar("UNKNOWN_CONFIGURE_WHITELIST", True).split()) + whitelist = set(d.getVar("UNKNOWN_CONFIGURE_WHITELIST").split()) options -= whitelist if options: - pn = d.getVar('PN', True) + pn = d.getVar('PN') error_msg = pn + ": configure was passed unrecognised options: " + " ".join(options) package_qa_handle_error("unknown-configure-option", error_msg, d) except subprocess.CalledProcessError: pass # Check invalid PACKAGECONFIG - pkgconfig = (d.getVar("PACKAGECONFIG", True) or "").split() + pkgconfig = (d.getVar("PACKAGECONFIG") or "").split() if pkgconfig: pkgconfigflags = d.getVarFlags("PACKAGECONFIG") or {} for pconfig in pkgconfig: if pconfig not in pkgconfigflags: - pn = d.getVar('PN', True) + pn = d.getVar('PN') error_msg = "%s: invalid PACKAGECONFIG: %s" % (pn, pconfig) package_qa_handle_error("invalid-packageconfig", error_msg, d) - qa_sane = d.getVar("QA_SANE", True) + qa_sane = d.getVar("QA_SANE") if not qa_sane: bb.fatal("Fatal QA errors found, failing task.") } python do_qa_unpack() { - src_uri = d.getVar('SRC_URI', True) - s_dir = d.getVar('S', True) + src_uri = d.getVar('SRC_URI') + s_dir = d.getVar('S') if src_uri and not os.path.exists(s_dir): - bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN', True), d.getVar('S', False), s_dir)) + bb.warn('%s: the directory %s (%s) pointed to by the S variable doesn\'t exist - please set S within the recipe to point to where the source has been unpacked to' % (d.getVar('PN'), d.getVar('S', False), s_dir)) } # The Staging Func, to check all staging @@ -1268,7 +1308,7 @@ do_configure[postfuncs] += "do_qa_configure " do_unpack[postfuncs] += "do_qa_unpack" python () { - tests = d.getVar('ALL_QA', True).split() + tests = d.getVar('ALL_QA').split() if "desktop" in tests: d.appendVar("PACKAGE_DEPENDS", " desktop-file-utils-native") @@ -1277,7 +1317,7 @@ python () { ########################################################################### # Checking ${FILESEXTRAPATHS} - extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "") + extrapaths = (d.getVar("FILESEXTRAPATHS") or "") if '__default' not in extrapaths.split(":"): msg = "FILESEXTRAPATHS-variable, must always use _prepend (or _append)\n" msg += "type of assignment, and don't forget the colon.\n" @@ -1289,29 +1329,29 @@ python () { msg += "%s\n" % extrapaths bb.warn(msg) - overrides = d.getVar('OVERRIDES', True).split(':') - pn = d.getVar('PN', True) + overrides = d.getVar('OVERRIDES').split(':') + pn = d.getVar('PN') if pn in overrides: - msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE", True), pn) + msg = 'Recipe %s has PN of "%s" which is in OVERRIDES, this can result in unexpected behaviour.' % (d.getVar("FILE"), pn) package_qa_handle_error("pn-overrides", msg, d) issues = [] - if (d.getVar('PACKAGES', True) or "").split(): - for dep in (d.getVar('QADEPENDS', True) or "").split(): + if (d.getVar('PACKAGES') or "").split(): + for dep in (d.getVar('QADEPENDS') or "").split(): d.appendVarFlag('do_package_qa', 'depends', " %s:do_populate_sysroot" % dep) for var in 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RCONFLICTS', 'RPROVIDES', 'RREPLACES', 'FILES', 'pkg_preinst', 'pkg_postinst', 'pkg_prerm', 'pkg_postrm', 'ALLOW_EMPTY': if d.getVar(var, False): issues.append(var) - fakeroot_tests = d.getVar('FAKEROOT_QA', True).split() + fakeroot_tests = d.getVar('FAKEROOT_QA').split() if set(tests) & set(fakeroot_tests): d.setVarFlag('do_package_qa', 'fakeroot', '1') d.appendVarFlag('do_package_qa', 'depends', ' virtual/fakeroot-native:do_populate_sysroot') else: d.setVarFlag('do_package_qa', 'rdeptask', '') for i in issues: - package_qa_handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE", True), i), d) - qa_sane = d.getVar("QA_SANE", True) + package_qa_handle_error("pkgvarcheck", "%s: Variable %s is set as not being package specific, please fix this." % (d.getVar("FILE"), i), d) + qa_sane = d.getVar("QA_SANE") if not qa_sane: bb.fatal("Fatal QA errors found, failing task.") } diff --git a/import-layers/yocto-poky/meta/classes/kernel-arch.bbclass b/import-layers/yocto-poky/meta/classes/kernel-arch.bbclass index ea976c66b..d036fcf20 100644 --- a/import-layers/yocto-poky/meta/classes/kernel-arch.bbclass +++ b/import-layers/yocto-poky/meta/classes/kernel-arch.bbclass @@ -19,7 +19,7 @@ valid_archs = "alpha cris ia64 \ def map_kernel_arch(a, d): import re - valid_archs = d.getVar('valid_archs', True).split() + valid_archs = d.getVar('valid_archs').split() if re.match('(i.86|athlon|x86.64)$', a): return 'x86' elif re.match('armeb$', a): return 'arm' @@ -32,9 +32,11 @@ def map_kernel_arch(a, d): elif re.match('microblazee[bl]', a): return 'microblaze' elif a in valid_archs: return a else: + if not d.getVar("TARGET_OS").startswith("linux"): + return a bb.error("cannot map '%s' to a linux kernel architecture" % a) -export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH', True), d)}" +export ARCH = "${@map_kernel_arch(d.getVar('TARGET_ARCH'), d)}" def map_uboot_arch(a, d): import re @@ -43,7 +45,7 @@ def map_uboot_arch(a, d): elif re.match('i.86$', a): return 'x86' return a -export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH', True), d)}" +export UBOOT_ARCH = "${@map_uboot_arch(d.getVar('ARCH'), d)}" # Set TARGET_??_KERNEL_ARCH in the machine .conf to set architecture # specific options necessary for building the kernel and modules. @@ -57,4 +59,5 @@ HOST_AR_KERNEL_ARCH ?= "${TARGET_AR_KERNEL_ARCH}" KERNEL_CC = "${CCACHE}${HOST_PREFIX}gcc ${HOST_CC_KERNEL_ARCH} -fuse-ld=bfd" KERNEL_LD = "${CCACHE}${HOST_PREFIX}ld.bfd ${HOST_LD_KERNEL_ARCH}" KERNEL_AR = "${CCACHE}${HOST_PREFIX}ar ${HOST_AR_KERNEL_ARCH}" +TOOLCHAIN = "gcc" diff --git a/import-layers/yocto-poky/meta/classes/kernel-fitimage.bbclass b/import-layers/yocto-poky/meta/classes/kernel-fitimage.bbclass index 05be1f070..179185b6b 100644 --- a/import-layers/yocto-poky/meta/classes/kernel-fitimage.bbclass +++ b/import-layers/yocto-poky/meta/classes/kernel-fitimage.bbclass @@ -1,13 +1,15 @@ inherit kernel-uboot uboot-sign python __anonymous () { - kerneltypes = d.getVar('KERNEL_IMAGETYPES', True) or "" + kerneltypes = d.getVar('KERNEL_IMAGETYPES') or "" if 'fitImage' in kerneltypes.split(): - depends = d.getVar("DEPENDS", True) + depends = d.getVar("DEPENDS") depends = "%s u-boot-mkimage-native dtc-native" % depends d.setVar("DEPENDS", depends) - if d.getVar("UBOOT_ARCH", True) == "x86": + if d.getVar("UBOOT_ARCH") == "mips": + replacementtype = "vmlinuz.bin" + elif d.getVar("UBOOT_ARCH") == "x86": replacementtype = "bzImage" else: replacementtype = "zImage" @@ -15,19 +17,19 @@ python __anonymous () { # Override KERNEL_IMAGETYPE_FOR_MAKE variable, which is internal # to kernel.bbclass . We have to override it, since we pack zImage # (at least for now) into the fitImage . - typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE", True) or "" + typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE") or "" if 'fitImage' in typeformake.split(): d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('fitImage', replacementtype)) - image = d.getVar('INITRAMFS_IMAGE', True) + image = d.getVar('INITRAMFS_IMAGE') if image: d.appendVarFlag('do_assemble_fitimage_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete') # Verified boot will sign the fitImage and append the public key to - # U-boot dtb. We ensure the U-Boot dtb is deployed before assembling + # U-Boot dtb. We ensure the U-Boot dtb is deployed before assembling # the fitImage: - if d.getVar('UBOOT_SIGN_ENABLE', True): - uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot', True) or 'u-boot' + if d.getVar('UBOOT_SIGN_ENABLE') == "1": + uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot') or 'u-boot' d.appendVarFlag('do_assemble_fitimage', 'depends', ' %s:do_deploy' % uboot_pn) } @@ -96,9 +98,9 @@ fitimage_emit_section_kernel() { kernel_csum="sha1" ENTRYPOINT=${UBOOT_ENTRYPOINT} - if test -n "${UBOOT_ENTRYSYMBOL}"; then - ENTRYPOINT=`${HOST_PREFIX}nm ${S}/vmlinux | \ - awk '$4=="${UBOOT_ENTRYSYMBOL}" {print $2}'` + if [ -n "${UBOOT_ENTRYSYMBOL}" ]; then + ENTRYPOINT=`${HOST_PREFIX}nm vmlinux | \ + awk '$3=="${UBOOT_ENTRYSYMBOL}" {print "0x"$1;exit}'` fi cat << EOF >> ${1} @@ -229,9 +231,10 @@ EOF # # $1 ... .its filename # $2 ... Linux kernel ID -# $3 ... DTB image ID +# $3 ... DTB image name # $4 ... ramdisk ID # $5 ... config ID +# $6 ... default flag fitimage_emit_section_config() { conf_csum="sha1" @@ -244,6 +247,8 @@ fitimage_emit_section_config() { kernel_line="kernel = \"kernel@${2}\";" fdt_line="" ramdisk_line="" + setup_line="" + default_line="" if [ -n "${3}" ]; then conf_desc="${conf_desc}, FDT blob" @@ -260,10 +265,14 @@ fitimage_emit_section_config() { setup_line="setup = \"setup@${5}\";" fi + if [ "${6}" = "1" ]; then + default_line="default = \"conf@${3}\";" + fi + cat << EOF >> ${1} - default = "conf@1"; - conf@1 { - description = "${conf_desc}"; + ${default_line} + conf@${3} { + description = "${6} ${conf_desc}"; ${kernel_line} ${fdt_line} ${ramdisk_line} @@ -314,6 +323,7 @@ EOF fitimage_assemble() { kernelcount=1 dtbcount="" + DTBS="" ramdiskcount=${3} setupcount="" rm -f ${1} arch/${ARCH}/boot/${2} @@ -331,7 +341,7 @@ fitimage_assemble() { # # Step 2: Prepare a DTB image section # - if test -n "${KERNEL_DEVICETREE}"; then + if [ -n "${KERNEL_DEVICETREE}" ]; then dtbcount=1 for DTB in ${KERNEL_DEVICETREE}; do if echo ${DTB} | grep -q '/dts/'; then @@ -343,15 +353,16 @@ fitimage_assemble() { DTB_PATH="arch/${ARCH}/boot/${DTB}" fi - fitimage_emit_section_dtb ${1} ${dtbcount} ${DTB_PATH} - dtbcount=`expr ${dtbcount} + 1` + DTB=$(echo "${DTB}" | tr '/' '_') + DTBS="${DTBS} ${DTB}" + fitimage_emit_section_dtb ${1} ${DTB} ${DTB_PATH} done fi # # Step 3: Prepare a setup section. (For x86) # - if test -e arch/${ARCH}/boot/setup.bin ; then + if [ -e arch/${ARCH}/boot/setup.bin ]; then setupcount=1 fitimage_emit_section_setup ${1} "${setupcount}" arch/${ARCH}/boot/setup.bin fi @@ -362,7 +373,7 @@ fitimage_assemble() { if [ "x${ramdiskcount}" = "x1" ] ; then # Find and use the first initramfs image archive type we find for img in cpio.lz4 cpio.lzo cpio.lzma cpio.xz cpio.gz cpio; do - initramfs_path="${DEPLOY_DIR_IMAGE}/${INITRAMFS_IMAGE}-${MACHINE}.${img}" + initramfs_path="${DEPLOY_DIR_IMAGE}/${INITRAMFS_IMAGE_NAME}.${img}" echo "Using $initramfs_path" if [ -e "${initramfs_path}" ]; then fitimage_emit_section_ramdisk ${1} "${ramdiskcount}" "${initramfs_path}" @@ -375,7 +386,7 @@ fitimage_assemble() { # Force the first Kernel and DTB in the default config kernelcount=1 - if test -n "${dtbcount}"; then + if [ -n "${dtbcount}" ]; then dtbcount=1 fi @@ -384,7 +395,13 @@ fitimage_assemble() { # fitimage_emit_section_maint ${1} confstart - fitimage_emit_section_config ${1} "${kernelcount}" "${dtbcount}" "${ramdiskcount}" "${setupcount}" + if [ -n "${DTBS}" ]; then + i=1 + for DTB in ${DTBS}; do + fitimage_emit_section_config ${1} "${kernelcount}" "${DTB}" "${ramdiskcount}" "${setupcount}" "`expr ${i} = ${dtbcount}`" + i=`expr ${i} + 1` + done + fi fitimage_emit_section_maint ${1} sectend @@ -445,11 +462,11 @@ kernel_do_deploy_append() { if [ -n "${INITRAMFS_IMAGE}" ]; then echo "Copying fit-image-${INITRAMFS_IMAGE}.its source file..." - its_initramfs_base_name="fitImage-its-${INITRAMFS_IMAGE}-${PV}-${PR}-${MACHINE}-${DATETIME}" - its_initramfs_symlink_name=fitImage-its-${INITRAMFS_IMAGE}-${MACHINE} + its_initramfs_base_name="fitImage-its-${INITRAMFS_IMAGE_NAME}-${PV}-${PR}-${DATETIME}" + its_initramfs_symlink_name=fitImage-its-${INITRAMFS_IMAGE_NAME} install -m 0644 fit-image-${INITRAMFS_IMAGE}.its ${DEPLOYDIR}/${its_initramfs_base_name}.its - fit_initramfs_base_name="fitImage-${INITRAMFS_IMAGE}-${PV}-${PR}-${MACHINE}-${DATETIME}" - fit_initramfs_symlink_name=fitImage-${INITRAMFS_IMAGE}-${MACHINE} + fit_initramfs_base_name="fitImage-${INITRAMFS_IMAGE_NAME}-${PV}-${PR}-${DATETIME}" + fit_initramfs_symlink_name=fitImage-${INITRAMFS_IMAGE_NAME} install -m 0644 arch/${ARCH}/boot/fitImage-${INITRAMFS_IMAGE} ${DEPLOYDIR}/${fit_initramfs_base_name}.bin fi diff --git a/import-layers/yocto-poky/meta/classes/kernel-grub.bbclass b/import-layers/yocto-poky/meta/classes/kernel-grub.bbclass index f7dcc0715..5d92f3b63 100644 --- a/import-layers/yocto-poky/meta/classes/kernel-grub.bbclass +++ b/import-layers/yocto-poky/meta/classes/kernel-grub.bbclass @@ -92,7 +92,7 @@ python __anonymous () { fi ''' - imagetypes = d.getVar('KERNEL_IMAGETYPES', True) + imagetypes = d.getVar('KERNEL_IMAGETYPES') imagetypes = re.sub(r'\.gz$', '', imagetypes) for type in imagetypes.split(): diff --git a/import-layers/yocto-poky/meta/classes/kernel-module-split.bbclass b/import-layers/yocto-poky/meta/classes/kernel-module-split.bbclass index 08d226276..5e10dcf73 100644 --- a/import-layers/yocto-poky/meta/classes/kernel-module-split.bbclass +++ b/import-layers/yocto-poky/meta/classes/kernel-module-split.bbclass @@ -22,6 +22,8 @@ if [ x"$D" = "x" ]; then fi } +PACKAGE_WRITE_DEPS += "kmod-native depmodwrapper-cross" + do_install_append() { install -d ${D}${sysconfdir}/modules-load.d/ ${D}${sysconfdir}/modprobe.d/ } @@ -31,6 +33,8 @@ PACKAGESPLITFUNCS_prepend = "split_kernel_module_packages " KERNEL_MODULES_META_PACKAGE ?= "kernel-modules" KERNEL_MODULE_PACKAGE_PREFIX ?= "" +KERNEL_MODULE_PACKAGE_SUFFIX ?= "-${KERNEL_VERSION}" +KERNEL_MODULE_PROVIDE_VIRTUAL ?= "1" python split_kernel_module_packages () { import re @@ -39,10 +43,10 @@ python split_kernel_module_packages () { def extract_modinfo(file): import tempfile, subprocess - tempfile.tempdir = d.getVar("WORKDIR", True) + tempfile.tempdir = d.getVar("WORKDIR") tf = tempfile.mkstemp() tmpfile = tf[1] - cmd = "%sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("HOST_PREFIX", True) or "", file, tmpfile) + cmd = "%sobjcopy -j .modinfo -O binary %s %s" % (d.getVar("HOST_PREFIX") or "", file, tmpfile) subprocess.call(cmd, shell=True) f = open(tmpfile) l = f.read().split("\000") @@ -60,12 +64,12 @@ python split_kernel_module_packages () { def frob_metadata(file, pkg, pattern, format, basename): vals = extract_modinfo(file) - dvar = d.getVar('PKGD', True) + dvar = d.getVar('PKGD') # If autoloading is requested, output /etc/modules-load.d/.conf and append # appropriate modprobe commands to the postinst - autoloadlist = (d.getVar("KERNEL_MODULE_AUTOLOAD", True) or "").split() - autoload = d.getVar('module_autoload_%s' % basename, True) + autoloadlist = (d.getVar("KERNEL_MODULE_AUTOLOAD") or "").split() + autoload = d.getVar('module_autoload_%s' % basename) if autoload and autoload == basename: bb.warn("module_autoload_%s was replaced by KERNEL_MODULE_AUTOLOAD for cases where basename == module name, please drop it" % basename) if autoload and basename not in autoloadlist: @@ -79,15 +83,15 @@ python split_kernel_module_packages () { else: f.write('%s\n' % basename) f.close() - postinst = d.getVar('pkg_postinst_%s' % pkg, True) + postinst = d.getVar('pkg_postinst_%s' % pkg) if not postinst: bb.fatal("pkg_postinst_%s not defined" % pkg) - postinst += d.getVar('autoload_postinst_fragment', True) % (autoload or basename) + postinst += d.getVar('autoload_postinst_fragment') % (autoload or basename) d.setVar('pkg_postinst_%s' % pkg, postinst) # Write out any modconf fragment - modconflist = (d.getVar("KERNEL_MODULE_PROBECONF", True) or "").split() - modconf = d.getVar('module_conf_%s' % basename, True) + modconflist = (d.getVar("KERNEL_MODULE_PROBECONF") or "").split() + modconf = d.getVar('module_conf_%s' % basename) if modconf and basename in modconflist: name = '%s/etc/modprobe.d/%s.conf' % (dvar, basename) f = open(name, 'w') @@ -96,15 +100,15 @@ python split_kernel_module_packages () { elif modconf: bb.error("Please ensure module %s is listed in KERNEL_MODULE_PROBECONF since module_conf_%s is set" % (basename, basename)) - files = d.getVar('FILES_%s' % pkg, True) + files = d.getVar('FILES_%s' % pkg) files = "%s /etc/modules-load.d/%s.conf /etc/modprobe.d/%s.conf" % (files, basename, basename) d.setVar('FILES_%s' % pkg, files) if "description" in vals: - old_desc = d.getVar('DESCRIPTION_' + pkg, True) or "" + old_desc = d.getVar('DESCRIPTION_' + pkg) or "" d.setVar('DESCRIPTION_' + pkg, old_desc + "; " + vals["description"]) - rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "") + rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "") modinfo_deps = [] if "depends" in vals and vals["depends"] != "": for dep in vals["depends"].split(","): @@ -119,26 +123,33 @@ python split_kernel_module_packages () { # Avoid automatic -dev recommendations for modules ending with -dev. d.setVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs', 1) + # Provide virtual package without postfix + providevirt = d.getVar('KERNEL_MODULE_PROVIDE_VIRTUAL') + if providevirt == "1": + postfix = format.split('%s')[1] + d.setVar('RPROVIDES_' + pkg, pkg.replace(postfix, '')) + module_regex = '^(.*)\.k?o$' - module_pattern_prefix = d.getVar('KERNEL_MODULE_PACKAGE_PREFIX', True) - module_pattern = module_pattern_prefix + 'kernel-module-%s' + module_pattern_prefix = d.getVar('KERNEL_MODULE_PACKAGE_PREFIX') + module_pattern_suffix = d.getVar('KERNEL_MODULE_PACKAGE_SUFFIX') + module_pattern = module_pattern_prefix + 'kernel-module-%s' + module_pattern_suffix - postinst = d.getVar('pkg_postinst_modules', True) - postrm = d.getVar('pkg_postrm_modules', True) + postinst = d.getVar('pkg_postinst_modules') + postrm = d.getVar('pkg_postrm_modules') - modules = do_split_packages(d, root='/lib/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='kernel-%s' % (d.getVar("KERNEL_VERSION", True))) + modules = do_split_packages(d, root='${nonarch_base_libdir}/modules', file_regex=module_regex, output_pattern=module_pattern, description='%s kernel module', postinst=postinst, postrm=postrm, recursive=True, hook=frob_metadata, extra_depends='kernel-%s' % (d.getVar("KERNEL_VERSION"))) if modules: - metapkg = d.getVar('KERNEL_MODULES_META_PACKAGE', True) + metapkg = d.getVar('KERNEL_MODULES_META_PACKAGE') d.appendVar('RDEPENDS_' + metapkg, ' '+' '.join(modules)) # If modules-load.d and modprobe.d are empty at this point, remove them to # avoid warnings. removedirs only raises an OSError if an empty # directory cannot be removed. - dvar = d.getVar('PKGD', True) + dvar = d.getVar('PKGD') for dir in ["%s/etc/modprobe.d" % (dvar), "%s/etc/modules-load.d" % (dvar), "%s/etc" % (dvar)]: if len(os.listdir(dir)) == 0: os.rmdir(dir) } -do_package[vardeps] += '${@" ".join(map(lambda s: "module_conf_" + s, (d.getVar("KERNEL_MODULE_PROBECONF", True) or "").split()))}' +do_package[vardeps] += '${@" ".join(map(lambda s: "module_conf_" + s, (d.getVar("KERNEL_MODULE_PROBECONF") or "").split()))}' diff --git a/import-layers/yocto-poky/meta/classes/kernel-uboot.bbclass b/import-layers/yocto-poky/meta/classes/kernel-uboot.bbclass index 345e7f5f3..87f02654f 100644 --- a/import-layers/yocto-poky/meta/classes/kernel-uboot.bbclass +++ b/import-layers/yocto-poky/meta/classes/kernel-uboot.bbclass @@ -1,15 +1,21 @@ uboot_prep_kimage() { - if test -e arch/${ARCH}/boot/compressed/vmlinux ; then + if [ -e arch/${ARCH}/boot/compressed/vmlinux ]; then vmlinux_path="arch/${ARCH}/boot/compressed/vmlinux" linux_suffix="" linux_comp="none" + elif [ -e arch/${ARCH}/boot/vmlinuz.bin ]; then + rm -f linux.bin + cp -l arch/${ARCH}/boot/vmlinuz.bin linux.bin + vmlinux_path="" + linux_suffix="" + linux_comp="none" else vmlinux_path="vmlinux" linux_suffix=".gz" linux_comp="gzip" fi - ${OBJCOPY} -O binary -R .note -R .comment -S "${vmlinux_path}" linux.bin + [ -n "${vmlinux_path}" ] && ${OBJCOPY} -O binary -R .note -R .comment -S "${vmlinux_path}" linux.bin if [ "${linux_comp}" != "none" ] ; then gzip -9 linux.bin diff --git a/import-layers/yocto-poky/meta/classes/kernel-uimage.bbclass b/import-layers/yocto-poky/meta/classes/kernel-uimage.bbclass index 340503a2d..1d8656e76 100644 --- a/import-layers/yocto-poky/meta/classes/kernel-uimage.bbclass +++ b/import-layers/yocto-poky/meta/classes/kernel-uimage.bbclass @@ -1,8 +1,8 @@ inherit kernel-uboot python __anonymous () { - if "uImage" in (d.getVar('KERNEL_IMAGETYPES', True) or "").split(): - depends = d.getVar("DEPENDS", True) + if "uImage" in (d.getVar('KERNEL_IMAGETYPES') or "").split(): + depends = d.getVar("DEPENDS") depends = "%s u-boot-mkimage-native" % depends d.setVar("DEPENDS", depends) @@ -11,27 +11,25 @@ python __anonymous () { # to build uImage using the kernel build system if and only if # KEEPUIMAGE == yes. Otherwise, we pack compressed vmlinux into # the uImage . - if d.getVar("KEEPUIMAGE", True) != 'yes': - typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE", True) or "" + if d.getVar("KEEPUIMAGE") != 'yes': + typeformake = d.getVar("KERNEL_IMAGETYPE_FOR_MAKE") or "" if "uImage" in typeformake.split(): d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake.replace('uImage', 'vmlinux')) + + # Enable building of uImage with mkimage + bb.build.addtask('do_uboot_mkimage', 'do_install', 'do_kernel_link_images', d) } +do_uboot_mkimage[dirs] += "${B}" do_uboot_mkimage() { - if echo "${KERNEL_IMAGETYPES}" | grep -wq "uImage"; then - if test "x${KEEPUIMAGE}" != "xyes" ; then - uboot_prep_kimage - - ENTRYPOINT=${UBOOT_ENTRYPOINT} - if test -n "${UBOOT_ENTRYSYMBOL}"; then - ENTRYPOINT=`${HOST_PREFIX}nm ${S}/vmlinux | \ - awk '$3=="${UBOOT_ENTRYSYMBOL}" {print $1}'` - fi + uboot_prep_kimage - uboot-mkimage -A ${UBOOT_ARCH} -O linux -T kernel -C "${linux_comp}" -a ${UBOOT_LOADADDRESS} -e $ENTRYPOINT -n "${DISTRO_NAME}/${PV}/${MACHINE}" -d linux.bin arch/${ARCH}/boot/uImage - rm -f linux.bin - fi + ENTRYPOINT=${UBOOT_ENTRYPOINT} + if [ -n "${UBOOT_ENTRYSYMBOL}" ]; then + ENTRYPOINT=`${HOST_PREFIX}nm ${B}/vmlinux | \ + awk '$3=="${UBOOT_ENTRYSYMBOL}" {print "0x"$1;exit}'` fi -} -addtask uboot_mkimage before do_install after do_compile + uboot-mkimage -A ${UBOOT_ARCH} -O linux -T kernel -C "${linux_comp}" -a ${UBOOT_LOADADDRESS} -e $ENTRYPOINT -n "${DISTRO_NAME}/${PV}/${MACHINE}" -d linux.bin ${B}/arch/${ARCH}/boot/uImage + rm -f linux.bin +} diff --git a/import-layers/yocto-poky/meta/classes/kernel-yocto.bbclass b/import-layers/yocto-poky/meta/classes/kernel-yocto.bbclass index a60327a07..1ca0756c4 100644 --- a/import-layers/yocto-poky/meta/classes/kernel-yocto.bbclass +++ b/import-layers/yocto-poky/meta/classes/kernel-yocto.bbclass @@ -148,7 +148,8 @@ do_kernel_metadata() { # run1: pull all the configuration fragments, no matter where they come from elements="`echo -n ${bsp_definition} ${sccs} ${patches} ${KERNEL_FEATURES}`" if [ -n "${elements}" ]; then - scc --force -o ${S}/${meta_dir}:cfg,meta ${includes} ${bsp_definition} ${sccs} ${patches} ${KERNEL_FEATURES} + echo "${bsp_definition}" > ${S}/${meta_dir}/bsp_definition + scc --force -o ${S}/${meta_dir}:cfg,merge,meta ${includes} ${bsp_definition} ${sccs} ${patches} ${KERNEL_FEATURES} if [ $? -ne 0 ]; then bbfatal_log "Could not generate configuration queue for ${KMACHINE}." fi @@ -165,6 +166,7 @@ do_kernel_metadata() { } do_patch() { + set +e cd ${S} check_git_config @@ -177,6 +179,19 @@ do_patch() { bbfatal_log "Patch failures can be resolved in the linux source directory ${S})" fi fi + + if [ -f "${meta_dir}/merge.queue" ]; then + # we need to merge all these branches + for b in $(cat ${meta_dir}/merge.queue); do + git show-ref --verify --quiet refs/heads/${b} + if [ $? -eq 0 ]; then + bbnote "Merging branch ${b}" + git merge -q --no-ff -m "Merge branch ${b}" ${b} + else + bbfatal "branch ${b} does not exist, cannot merge" + fi + done + fi } do_kernel_checkout() { @@ -240,6 +255,7 @@ do_kernel_checkout[dirs] = "${S}" addtask kernel_checkout before do_kernel_metadata after do_unpack addtask kernel_metadata after do_validate_branches do_unpack before do_patch do_kernel_metadata[depends] = "kern-tools-native:do_populate_sysroot" +do_validate_branches[depends] = "kern-tools-native:do_populate_sysroot" do_kernel_configme[dirs] += "${S} ${B}" do_kernel_configme() { @@ -265,7 +281,8 @@ do_kernel_configme() { meta_dir=$(kgit --meta) configs="$(scc --configs -o ${meta_dir})" - if [ -z "${configs}" ]; then + if [ $? -ne 0 ]; then + bberror "${configs}" bbfatal_log "Could not find configuration queue (${meta_dir}/config.queue)" fi @@ -286,11 +303,11 @@ python do_kernel_configcheck() { # if KMETA isn't set globally by a recipe using this routine, we need to # set the default to 'meta'. Otherwise, kconf_check is not passed a valid # meta-series for processing - kmeta = d.getVar( "KMETA", True ) or "meta" + kmeta = d.getVar("KMETA") or "meta" if not os.path.exists(kmeta): kmeta = "." + kmeta - pathprefix = "export PATH=%s:%s; " % (d.getVar('PATH', True), "${S}/scripts/util/") + pathprefix = "export PATH=%s:%s; " % (d.getVar('PATH'), "${S}/scripts/util/") cmd = d.expand("scc --configs -o ${S}/.kernel-meta") ret, configs = oe.utils.getstatusoutput("%s%s" % (pathprefix, cmd)) @@ -298,8 +315,8 @@ python do_kernel_configcheck() { cmd = d.expand("cd ${S}; kconf_check --report -o ${S}/%s/cfg/ ${B}/.config ${S} %s" % (kmeta,configs)) ret, result = oe.utils.getstatusoutput("%s%s" % (pathprefix, cmd)) - config_check_visibility = int(d.getVar( "KCONF_AUDIT_LEVEL", True ) or 0) - bsp_check_visibility = int(d.getVar( "KCONF_BSP_AUDIT_LEVEL", True ) or 0) + config_check_visibility = int(d.getVar("KCONF_AUDIT_LEVEL") or 0) + bsp_check_visibility = int(d.getVar("KCONF_BSP_AUDIT_LEVEL") or 0) # if config check visibility is non-zero, report dropped configuration values mismatch_file = d.expand("${S}/%s/cfg/mismatch.txt" % kmeta) @@ -350,6 +367,10 @@ do_validate_branches() { current_branch=`git rev-parse --abbrev-ref HEAD` git branch "$current_branch-orig" git reset --hard ${force_srcrev} + # We've checked out HEAD, make sure we cleanup kgit-s2q fence post check + # so the patches are applied as expected otherwise no patching + # would be done in some corner cases. + kgit-s2q --clean fi fi } diff --git a/import-layers/yocto-poky/meta/classes/kernel.bbclass b/import-layers/yocto-poky/meta/classes/kernel.bbclass index eefe574a6..ce2cab65a 100644 --- a/import-layers/yocto-poky/meta/classes/kernel.bbclass +++ b/import-layers/yocto-poky/meta/classes/kernel.bbclass @@ -1,7 +1,12 @@ inherit linux-kernel-base kernel-module-split PROVIDES += "virtual/kernel" -DEPENDS += "virtual/${TARGET_PREFIX}binutils virtual/${TARGET_PREFIX}gcc kmod-native depmodwrapper-cross bc-native lzop-native" +DEPENDS += "virtual/${TARGET_PREFIX}binutils virtual/${TARGET_PREFIX}gcc kmod-native bc-native lzop-native" +PACKAGE_WRITE_DEPS += "depmodwrapper-cross virtual/update-alternatives-native" + +do_deploy[depends] += "depmodwrapper-cross:do_populate_sysroot" + +CVE_PRODUCT ?= "linux_kernel" S = "${STAGING_KERNEL_DIR}" B = "${WORKDIR}/build" @@ -13,6 +18,7 @@ INHIBIT_DEFAULT_DEPS = "1" KERNEL_IMAGETYPE ?= "zImage" INITRAMFS_IMAGE ?= "" +INITRAMFS_IMAGE_NAME ?= "${@['${INITRAMFS_IMAGE}-${MACHINE}', ''][d.getVar('INITRAMFS_IMAGE') == '']}" INITRAMFS_TASK ?= "" INITRAMFS_IMAGE_BUNDLE ?= "" @@ -22,33 +28,36 @@ INITRAMFS_IMAGE_BUNDLE ?= "" # number and cause kernel to be rebuilt. To avoid this, make # KERNEL_VERSION_NAME and KERNEL_VERSION_PKG_NAME depend on # LINUX_VERSION which is a constant. -KERNEL_VERSION_NAME = "${@d.getVar('KERNEL_VERSION', True) or ""}" +KERNEL_VERSION_NAME = "${@d.getVar('KERNEL_VERSION') or ""}" KERNEL_VERSION_NAME[vardepvalue] = "${LINUX_VERSION}" -KERNEL_VERSION_PKG_NAME = "${@legitimize_package_name(d.getVar('KERNEL_VERSION', True))}" +KERNEL_VERSION_PKG_NAME = "${@legitimize_package_name(d.getVar('KERNEL_VERSION'))}" KERNEL_VERSION_PKG_NAME[vardepvalue] = "${LINUX_VERSION}" python __anonymous () { - import re # Merge KERNEL_IMAGETYPE and KERNEL_ALT_IMAGETYPE into KERNEL_IMAGETYPES - type = d.getVar('KERNEL_IMAGETYPE', True) or "" - alttype = d.getVar('KERNEL_ALT_IMAGETYPE', True) or "" - types = d.getVar('KERNEL_IMAGETYPES', True) or "" + type = d.getVar('KERNEL_IMAGETYPE') or "" + alttype = d.getVar('KERNEL_ALT_IMAGETYPE') or "" + types = d.getVar('KERNEL_IMAGETYPES') or "" if type not in types.split(): types = (type + ' ' + types).strip() if alttype not in types.split(): types = (alttype + ' ' + types).strip() d.setVar('KERNEL_IMAGETYPES', types) - typeformake = re.sub(r'\.gz', '', types) + # some commonly used kernel images aren't generated by the kernel build system, such as vmlinux.gz + # typeformake lists only valid kernel make targets, and post processing can be done after the kernel + # is built (such as using gzip to compress vmlinux) + typeformake = types.replace('vmlinux.gz', 'vmlinux') d.setVar('KERNEL_IMAGETYPE_FOR_MAKE', typeformake) for type in types.split(): typelower = type.lower() + imagedest = d.getVar('KERNEL_IMAGEDEST') d.appendVar('PACKAGES', ' ' + 'kernel-image-' + typelower) - d.setVar('FILES_kernel-image-' + typelower, '/boot/' + type + '-${KERNEL_VERSION_NAME}') + d.setVar('FILES_kernel-image-' + typelower, '/' + imagedest + '/' + type + '-${KERNEL_VERSION_NAME}') d.appendVar('RDEPENDS_kernel-image', ' ' + 'kernel-image-' + typelower) @@ -56,15 +65,14 @@ python __anonymous () { d.setVar('ALLOW_EMPTY_kernel-image-' + typelower, '1') - imagedest = d.getVar('KERNEL_IMAGEDEST', True) - priority = d.getVar('KERNEL_PRIORITY', True) - postinst = '#!/bin/sh\n' + 'update-alternatives --install /' + imagedest + '/' + type + ' ' + type + ' ' + '/' + imagedest + '/' + type + '-${KERNEL_VERSION_NAME} ' + priority + ' || true' + '\n' + priority = d.getVar('KERNEL_PRIORITY') + postinst = '#!/bin/sh\n' + 'update-alternatives --install /' + imagedest + '/' + type + ' ' + type + ' ' + type + '-${KERNEL_VERSION_NAME} ' + priority + ' || true' + '\n' d.setVar('pkg_postinst_kernel-image-' + typelower, postinst) postrm = '#!/bin/sh\n' + 'update-alternatives --remove' + ' ' + type + ' ' + type + '-${KERNEL_VERSION_NAME} || true' + '\n' d.setVar('pkg_postrm_kernel-image-' + typelower, postrm) - image = d.getVar('INITRAMFS_IMAGE', True) + image = d.getVar('INITRAMFS_IMAGE') if image: d.appendVarFlag('do_bundle_initramfs', 'depends', ' ${INITRAMFS_IMAGE}:do_image_complete') @@ -72,7 +80,7 @@ python __anonymous () { # The preferred method is to set INITRAMFS_IMAGE, because # this INITRAMFS_TASK has circular dependency problems # if the initramfs requires kernel modules - image_task = d.getVar('INITRAMFS_TASK', True) + image_task = d.getVar('INITRAMFS_TASK') if image_task: d.appendVarFlag('do_configure', 'depends', ' ${INITRAMFS_TASK}') } @@ -101,15 +109,15 @@ inherit ${KERNEL_CLASSES} do_unpack[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}" do_clean[cleandirs] += " ${S} ${STAGING_KERNEL_DIR} ${B} ${STAGING_KERNEL_BUILDDIR}" base_do_unpack_append () { - s = d.getVar("S", True) + s = d.getVar("S") if s[-1] == '/': # drop trailing slash, so that os.symlink(kernsrc, s) doesn't use s as directory name and fail s=s[:-1] - kernsrc = d.getVar("STAGING_KERNEL_DIR", True) + kernsrc = d.getVar("STAGING_KERNEL_DIR") if s != kernsrc: bb.utils.mkdirhier(kernsrc) bb.utils.remove(kernsrc, recurse=True) - if d.getVar("EXTERNALSRC", True): + if d.getVar("EXTERNALSRC"): # With EXTERNALSRC S will not be wiped so we can symlink to it os.symlink(s, kernsrc) else: @@ -126,10 +134,12 @@ PACKAGES_DYNAMIC += "^kernel-firmware-.*" export OS = "${TARGET_OS}" export CROSS_COMPILE = "${TARGET_PREFIX}" +export KBUILD_BUILD_USER = "oe-user" +export KBUILD_BUILD_HOST = "oe-host" -KERNEL_PRIORITY ?= "${@int(d.getVar('PV', True).split('-')[0].split('+')[0].split('.')[0]) * 10000 + \ - int(d.getVar('PV', True).split('-')[0].split('+')[0].split('.')[1]) * 100 + \ - int(d.getVar('PV', True).split('-')[0].split('+')[0].split('.')[-1])}" +KERNEL_PRIORITY ?= "${@int(d.getVar('PV').split('-')[0].split('+')[0].split('.')[0]) * 10000 + \ + int(d.getVar('PV').split('-')[0].split('+')[0].split('.')[1]) * 100 + \ + int(d.getVar('PV').split('-')[0].split('+')[0].split('.')[-1])}" KERNEL_RELEASE ?= "${KERNEL_VERSION}" @@ -140,7 +150,7 @@ KERNEL_IMAGEDEST = "boot" # # configuration # -export CMDLINE_CONSOLE = "console=${@d.getVar("KERNEL_CONSOLE", True) or "ttyS0"}" +export CMDLINE_CONSOLE = "console=${@d.getVar("KERNEL_CONSOLE") or "ttyS0"}" KERNEL_VERSION = "${@get_kernelversion_headers('${B}')}" @@ -164,34 +174,34 @@ copy_initramfs() { # In case the directory is not created yet from the first pass compile: mkdir -p ${B}/usr # Find and use the first initramfs image archive type we find - rm -f ${B}/usr/${INITRAMFS_IMAGE}-${MACHINE}.cpio + rm -f ${B}/usr/${INITRAMFS_IMAGE_NAME}.cpio for img in cpio cpio.gz cpio.lz4 cpio.lzo cpio.lzma cpio.xz; do - if [ -e "${DEPLOY_DIR_IMAGE}/${INITRAMFS_IMAGE}-${MACHINE}.$img" ]; then - cp ${DEPLOY_DIR_IMAGE}/${INITRAMFS_IMAGE}-${MACHINE}.$img ${B}/usr/. + if [ -e "${DEPLOY_DIR_IMAGE}/${INITRAMFS_IMAGE_NAME}.$img" ]; then + cp ${DEPLOY_DIR_IMAGE}/${INITRAMFS_IMAGE_NAME}.$img ${B}/usr/. case $img in *gz) echo "gzip decompressing image" - gunzip -f ${B}/usr/${INITRAMFS_IMAGE}-${MACHINE}.$img + gunzip -f ${B}/usr/${INITRAMFS_IMAGE_NAME}.$img break ;; *lz4) echo "lz4 decompressing image" - lz4 -df ${B}/usr/${INITRAMFS_IMAGE}-${MACHINE}.$img + lz4 -df ${B}/usr/${INITRAMFS_IMAGE_NAME}.$img break ;; *lzo) echo "lzo decompressing image" - lzop -df ${B}/usr/${INITRAMFS_IMAGE}-${MACHINE}.$img + lzop -df ${B}/usr/${INITRAMFS_IMAGE_NAME}.$img break ;; *lzma) echo "lzma decompressing image" - lzma -df ${B}/usr/${INITRAMFS_IMAGE}-${MACHINE}.$img + lzma -df ${B}/usr/${INITRAMFS_IMAGE_NAME}.$img break ;; *xz) echo "xz decompressing image" - xz -df ${B}/usr/${INITRAMFS_IMAGE}-${MACHINE}.$img + xz -df ${B}/usr/${INITRAMFS_IMAGE_NAME}.$img break ;; esac @@ -219,7 +229,7 @@ do_bundle_initramfs () { tmp_path=$tmp_path" "$type"##" fi done - use_alternate_initrd=CONFIG_INITRAMFS_SOURCE=${B}/usr/${INITRAMFS_IMAGE}-${MACHINE}.cpio + use_alternate_initrd=CONFIG_INITRAMFS_SOURCE=${B}/usr/${INITRAMFS_IMAGE_NAME}.cpio kernel_do_compile # Restoring kernel image for tp in $tmp_path ; do @@ -258,18 +268,16 @@ kernel_do_compile() { # The old style way of copying an prebuilt image and building it # is turned on via INTIRAMFS_TASK != "" copy_initramfs - use_alternate_initrd=CONFIG_INITRAMFS_SOURCE=${B}/usr/${INITRAMFS_IMAGE}-${MACHINE}.cpio + use_alternate_initrd=CONFIG_INITRAMFS_SOURCE=${B}/usr/${INITRAMFS_IMAGE_NAME}.cpio fi for typeformake in ${KERNEL_IMAGETYPE_FOR_MAKE} ; do oe_runmake ${typeformake} CC="${KERNEL_CC}" LD="${KERNEL_LD}" ${KERNEL_EXTRA_ARGS} $use_alternate_initrd - for type in ${KERNEL_IMAGETYPES} ; do - if test "${typeformake}.gz" = "${type}"; then - mkdir -p "${KERNEL_OUTPUT_DIR}" - gzip -9c < "${typeformake}" > "${KERNEL_OUTPUT_DIR}/${type}" - break; - fi - done done + # vmlinux.gz is not built by kernel + if (echo "${KERNEL_IMAGETYPES}" | grep -wq "vmlinux\.gz"); then + mkdir -p "${KERNEL_OUTPUT_DIR}" + gzip -9c < ${B}/vmlinux > "${KERNEL_OUTPUT_DIR}/vmlinux.gz" + fi } do_compile_kernelmodules() { @@ -296,11 +304,11 @@ kernel_do_install() { # unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS MACHINE if (grep -q -i -e '^CONFIG_MODULES=y$' .config); then - oe_runmake DEPMOD=echo INSTALL_MOD_PATH="${D}" modules_install - rm "${D}/lib/modules/${KERNEL_VERSION}/build" - rm "${D}/lib/modules/${KERNEL_VERSION}/source" + oe_runmake DEPMOD=echo MODLIB=${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION} INSTALL_FW_PATH=${D}${nonarch_base_libdir}/firmware modules_install + rm "${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}/build" + rm "${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}/source" # If the kernel/ directory is empty remove it to prevent QA issues - rmdir --ignore-fail-on-non-empty "${D}/lib/modules/${KERNEL_VERSION}/kernel" + rmdir --ignore-fail-on-non-empty "${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}/kernel" else bbnote "no modules to install" fi @@ -324,6 +332,10 @@ do_install[prefuncs] += "package_get_auto_pr" # Must be ran no earlier than after do_kernel_checkout or else Makefile won't be in ${S}/Makefile do_kernel_version_sanity_check() { + if [ "x${KERNEL_VERSION_SANITY_SKIP}" = "x1" ]; then + exit 0 + fi + # The Makefile determines the kernel version shown at runtime # Don't use KERNEL_VERSION because the headers it grabs the version from aren't generated until do_compile VERSION=$(grep "^VERSION =" ${S}/Makefile | sed s/.*=\ *//) @@ -347,7 +359,7 @@ do_kernel_version_sanity_check() { reg="${reg}${EXTRAVERSION}" if [ -z `echo ${PV} | grep -E "${reg}"` ]; then - bbfatal "Package Version (${PV}) does not match of kernel being built (${vers}). Please update the PV variable to match the kernel source." + bbfatal "Package Version (${PV}) does not match of kernel being built (${vers}). Please update the PV variable to match the kernel source or set KERNEL_VERSION_SANITY_SKIP=\"1\" in your recipe." fi exit 0 } @@ -430,14 +442,14 @@ sysroot_stage_all () { KERNEL_CONFIG_COMMAND ?= "oe_runmake_call -C ${S} O=${B} oldnoconfig || yes '' | oe_runmake -C ${S} O=${B} oldconfig" python check_oldest_kernel() { - oldest_kernel = d.getVar('OLDEST_KERNEL', True) - kernel_version = d.getVar('KERNEL_VERSION', True) - tclibc = d.getVar('TCLIBC', True) + oldest_kernel = d.getVar('OLDEST_KERNEL') + kernel_version = d.getVar('KERNEL_VERSION') + tclibc = d.getVar('TCLIBC') if tclibc == 'glibc': kernel_version = kernel_version.split('-', 1)[0] if oldest_kernel and kernel_version: if bb.utils.vercmp_string(kernel_version, oldest_kernel) < 0: - bb.warn('%s: OLDEST_KERNEL is "%s" but the version of the kernel you are building is "%s" - therefore %s as built may not be compatible with this kernel. Either set OLDEST_KERNEL to an older version, or build a newer kernel.' % (d.getVar('PN', True), oldest_kernel, kernel_version, tclibc)) + bb.warn('%s: OLDEST_KERNEL is "%s" but the version of the kernel you are building is "%s" - therefore %s as built may not be compatible with this kernel. Either set OLDEST_KERNEL to an older version, or build a newer kernel.' % (d.getVar('PN'), oldest_kernel, kernel_version, tclibc)) } check_oldest_kernel[vardepsexclude] += "OLDEST_KERNEL KERNEL_VERSION" @@ -478,9 +490,9 @@ EXPORT_FUNCTIONS do_compile do_install do_configure # kernel-image becomes kernel-image-${KERNEL_VERSION} PACKAGES = "kernel kernel-base kernel-vmlinux kernel-image kernel-dev kernel-modules" FILES_${PN} = "" -FILES_kernel-base = "/lib/modules/${KERNEL_VERSION}/modules.order /lib/modules/${KERNEL_VERSION}/modules.builtin" +FILES_kernel-base = "${nonarch_base_libdir}/modules/${KERNEL_VERSION}/modules.order ${nonarch_base_libdir}/modules/${KERNEL_VERSION}/modules.builtin" FILES_kernel-image = "" -FILES_kernel-dev = "/boot/System.map* /boot/Module.symvers* /boot/config* ${KERNEL_SRC_PATH} /lib/modules/${KERNEL_VERSION}/build" +FILES_kernel-dev = "/boot/System.map* /boot/Module.symvers* /boot/config* ${KERNEL_SRC_PATH} ${nonarch_base_libdir}/modules/${KERNEL_VERSION}/build" FILES_kernel-vmlinux = "/boot/vmlinux-${KERNEL_VERSION_NAME}" FILES_kernel-modules = "" RDEPENDS_kernel = "kernel-base" @@ -511,7 +523,7 @@ pkg_postinst_kernel-base () { PACKAGESPLITFUNCS_prepend = "split_kernel_packages " python split_kernel_packages () { - do_split_packages(d, root='/lib/firmware', file_regex='^(.*)\.(bin|fw|cis|csp|dsp)$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') + do_split_packages(d, root='${nonarch_base_libdir}/firmware', file_regex='^(.*)\.(bin|fw|cis|csp|dsp)$', output_pattern='kernel-firmware-%s', description='Firmware for %s', recursive=True, extra_depends='') } # Many scripts want to look in arch/$arch/boot for the bootable @@ -527,7 +539,11 @@ do_kernel_link_images() { if [ -f ../../../vmlinuz ]; then ln -sf ../../../vmlinuz fi + if [ -f ../../../vmlinuz.bin ]; then + ln -sf ../../../vmlinuz.bin + fi } +addtask kernel_link_images after do_compile before do_strip do_strip() { if [ -n "${KERNEL_IMAGE_STRIP_EXTRA_SECTIONS}" ]; then @@ -556,7 +572,7 @@ do_strip() { } do_strip[dirs] = "${B}" -addtask do_strip before do_sizecheck after do_kernel_link_images +addtask strip before do_sizecheck after do_kernel_link_images # Support checking the kernel size since some kernels need to reside in partitions # with a fixed length or there is a limit in transferring the kernel to memory @@ -623,6 +639,6 @@ do_deploy[cleandirs] = "${DEPLOYDIR}" do_deploy[dirs] = "${DEPLOYDIR} ${B}" do_deploy[prefuncs] += "package_get_auto_pr" -addtask deploy after do_populate_sysroot +addtask deploy after do_populate_sysroot do_packagedata EXPORT_FUNCTIONS do_deploy diff --git a/import-layers/yocto-poky/meta/classes/kernelsrc.bbclass b/import-layers/yocto-poky/meta/classes/kernelsrc.bbclass index 9efd46a92..675d40ec9 100644 --- a/import-layers/yocto-poky/meta/classes/kernelsrc.bbclass +++ b/import-layers/yocto-poky/meta/classes/kernelsrc.bbclass @@ -1,7 +1,7 @@ S = "${STAGING_KERNEL_DIR}" -do_fetch[noexec] = "1" -do_unpack[depends] += "virtual/kernel:do_patch" -do_unpack[noexec] = "1" +deltask do_fetch +deltask do_unpack +do_patch[depends] += "virtual/kernel:do_patch" do_patch[noexec] = "1" do_package[depends] += "virtual/kernel:do_populate_sysroot" KERNEL_VERSION = "${@get_kernelversion_file("${STAGING_KERNEL_BUILDDIR}")}" diff --git a/import-layers/yocto-poky/meta/classes/libc-common.bbclass b/import-layers/yocto-poky/meta/classes/libc-common.bbclass index 11b0065a6..9ea2c0374 100644 --- a/import-layers/yocto-poky/meta/classes/libc-common.bbclass +++ b/import-layers/yocto-poky/meta/classes/libc-common.bbclass @@ -17,15 +17,15 @@ do_install() { } def get_libc_fpu_setting(bb, d): - if d.getVar('TARGET_FPU', True) in [ 'soft', 'ppc-efd' ]: + if d.getVar('TARGET_FPU') in [ 'soft', 'ppc-efd' ]: return "--without-fp" return "" python populate_packages_prepend () { - if d.getVar('DEBIAN_NAMES', True): - pkgs = d.getVar('PACKAGES', True).split() - bpn = d.getVar('BPN', True) - prefix = d.getVar('MLPREFIX', True) or "" + if d.getVar('DEBIAN_NAMES'): + pkgs = d.getVar('PACKAGES').split() + bpn = d.getVar('BPN') + prefix = d.getVar('MLPREFIX') or "" # Set the base package... d.setVar('PKG_' + prefix + bpn, prefix + 'libc6') libcprefix = prefix + bpn + '-' diff --git a/import-layers/yocto-poky/meta/classes/libc-package.bbclass b/import-layers/yocto-poky/meta/classes/libc-package.bbclass index 2dc90c44d..739adce69 100644 --- a/import-layers/yocto-poky/meta/classes/libc-package.bbclass +++ b/import-layers/yocto-poky/meta/classes/libc-package.bbclass @@ -9,25 +9,27 @@ GLIBC_INTERNAL_USE_BINARY_LOCALE ?= "ondevice" +GLIBC_SPLIT_LC_PACKAGES ?= "0" + python __anonymous () { - enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION", True) + enabled = d.getVar("ENABLE_BINARY_LOCALE_GENERATION") - pn = d.getVar("PN", True) + pn = d.getVar("PN") if pn.endswith("-initial"): enabled = False if enabled and int(enabled): import re - target_arch = d.getVar("TARGET_ARCH", True) - binary_arches = d.getVar("BINARY_LOCALE_ARCHES", True) or "" - use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "" + target_arch = d.getVar("TARGET_ARCH") + binary_arches = d.getVar("BINARY_LOCALE_ARCHES") or "" + use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF") or "" for regexp in binary_arches.split(" "): r = re.compile(regexp) if r.match(target_arch): - depends = d.getVar("DEPENDS", True) + depends = d.getVar("DEPENDS") if use_cross_localedef == "1" : depends = "%s cross-localedef-native" % depends else: @@ -92,21 +94,21 @@ inherit qemu python package_do_split_gconvs () { import re - if (d.getVar('PACKAGE_NO_GCONV', True) == '1'): + if (d.getVar('PACKAGE_NO_GCONV') == '1'): bb.note("package requested not splitting gconvs") return - if not d.getVar('PACKAGES', True): + if not d.getVar('PACKAGES'): return - mlprefix = d.getVar("MLPREFIX", True) or "" + mlprefix = d.getVar("MLPREFIX") or "" - bpn = d.getVar('BPN', True) - libdir = d.getVar('libdir', True) + bpn = d.getVar('BPN') + libdir = d.getVar('libdir') if not libdir: bb.error("libdir not defined") return - datadir = d.getVar('datadir', True) + datadir = d.getVar('datadir') if not datadir: bb.error("datadir not defined") return @@ -114,7 +116,7 @@ python package_do_split_gconvs () { gconv_libdir = base_path_join(libdir, "gconv") charmap_dir = base_path_join(datadir, "i18n", "charmaps") locales_dir = base_path_join(datadir, "i18n", "locales") - binary_locales_dir = d.getVar('localedir', True) + binary_locales_dir = d.getVar('localedir') def calc_gconv_deps(fn, pkg, file_regex, output_pattern, group): deps = [] @@ -181,13 +183,13 @@ python package_do_split_gconvs () { description='locale definition for %s', hook=calc_locale_deps, extra_depends='') d.setVar('PACKAGES', d.getVar('PACKAGES', False) + ' ' + d.getVar('MLPREFIX', False) + bpn + '-gconv') - use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE", True) + use_bin = d.getVar("GLIBC_INTERNAL_USE_BINARY_LOCALE") dot_re = re.compile("(.*)\.(.*)") # Read in supported locales and associated encodings supported = {} - with open(base_path_join(d.getVar('WORKDIR', True), "SUPPORTED")) as f: + with open(base_path_join(d.getVar('WORKDIR'), "SUPPORTED")) as f: for line in f.readlines(): try: locale, charset = line.rstrip().split() @@ -196,7 +198,7 @@ python package_do_split_gconvs () { supported[locale] = charset # GLIBC_GENERATE_LOCALES var specifies which locales to be generated. empty or "all" means all locales - to_generate = d.getVar('GLIBC_GENERATE_LOCALES', True) + to_generate = d.getVar('GLIBC_GENERATE_LOCALES') if not to_generate or to_generate == 'all': to_generate = sorted(supported.keys()) else: @@ -213,33 +215,32 @@ python package_do_split_gconvs () { def output_locale_source(name, pkgname, locale, encoding): d.setVar('RDEPENDS_%s' % pkgname, '%slocaledef %s-localedata-%s %s-charmap-%s' % \ (mlprefix, mlprefix+bpn, legitimize_package_name(locale), mlprefix+bpn, legitimize_package_name(encoding))) - d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst', True) \ + d.setVar('pkg_postinst_%s' % pkgname, d.getVar('locale_base_postinst') \ % (locale, encoding, locale)) - d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm', True) % \ + d.setVar('pkg_postrm_%s' % pkgname, d.getVar('locale_base_postrm') % \ (locale, encoding, locale)) def output_locale_binary_rdepends(name, pkgname, locale, encoding): - m = re.match("(.*)\.(.*)", name) - if m: - libc_name = "%s.%s" % (m.group(1), m.group(2).lower()) - else: - libc_name = name - d.setVar('RDEPENDS_%s' % pkgname, legitimize_package_name('%s-binary-localedata-%s' \ - % (mlprefix+bpn, libc_name))) + dep = legitimize_package_name('%s-binary-localedata-%s' % (bpn, name)) + lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES') + if lcsplit and int(lcsplit): + d.appendVar('PACKAGES', ' ' + dep) + d.setVar('ALLOW_EMPTY_%s' % dep, '1') + d.setVar('RDEPENDS_%s' % pkgname, mlprefix + dep) commands = {} def output_locale_binary(name, pkgname, locale, encoding): - treedir = base_path_join(d.getVar("WORKDIR", True), "locale-tree") - ldlibdir = base_path_join(treedir, d.getVar("base_libdir", True)) - path = d.getVar("PATH", True) + treedir = base_path_join(d.getVar("WORKDIR"), "locale-tree") + ldlibdir = base_path_join(treedir, d.getVar("base_libdir")) + path = d.getVar("PATH") i18npath = base_path_join(treedir, datadir, "i18n") gconvpath = base_path_join(treedir, "iconvdata") outputpath = base_path_join(treedir, binary_locales_dir) - use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF", True) or "0" + use_cross_localedef = d.getVar("LOCALE_GENERATION_WITH_CROSS-LOCALEDEF") or "0" if use_cross_localedef == "1": - target_arch = d.getVar('TARGET_ARCH', True) + target_arch = d.getVar('TARGET_ARCH') locale_arch_options = { \ "arm": " --uint32-align=4 --little-endian ", \ "armeb": " --uint32-align=4 --big-endian ", \ @@ -278,7 +279,7 @@ python package_do_split_gconvs () { --inputfile=%s/i18n/locales/%s --charmap=%s %s" \ % (treedir, datadir, locale, encoding, name) - qemu_options = d.getVar('QEMU_OPTIONS', True) + qemu_options = d.getVar('QEMU_OPTIONS') cmd = "PSEUDO_RELOADED=YES PATH=\"%s\" I18NPATH=\"%s\" %s -L %s \ -E LD_LIBRARY_PATH=%s %s %s/bin/localedef %s" % \ @@ -291,7 +292,7 @@ python package_do_split_gconvs () { def output_locale(name, locale, encoding): pkgname = d.getVar('MLPREFIX', False) + 'locale-base-' + legitimize_package_name(name) d.setVar('ALLOW_EMPTY_%s' % pkgname, '1') - d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES', True))) + d.setVar('PACKAGES', '%s %s' % (pkgname, d.getVar('PACKAGES'))) rprovides = ' %svirtual-locale-%s' % (mlprefix, legitimize_package_name(name)) m = re.match("(.*)_(.*)", name) if m: @@ -310,8 +311,8 @@ python package_do_split_gconvs () { bb.note("preparing tree for binary locale generation") bb.build.exec_func("do_prep_locale_tree", d) - utf8_only = int(d.getVar('LOCALE_UTF8_ONLY', True) or 0) - utf8_is_default = int(d.getVar('LOCALE_UTF8_IS_DEFAULT', True) or 0) + utf8_only = int(d.getVar('LOCALE_UTF8_ONLY') or 0) + utf8_is_default = int(d.getVar('LOCALE_UTF8_IS_DEFAULT') or 0) encodings = {} for locale in to_generate: @@ -337,8 +338,13 @@ python package_do_split_gconvs () { else: output_locale('%s.%s' % (base, charset), base, charset) + def metapkg_hook(file, pkg, pattern, format, basename): + name = basename.split('/', 1)[0] + metapkg = legitimize_package_name('%s-binary-localedata-%s' % (mlprefix+bpn, name)) + d.appendVar('RDEPENDS_%s' % metapkg, ' ' + pkg) + if use_bin == "compile": - makefile = base_path_join(d.getVar("WORKDIR", True), "locale-tree", "Makefile") + makefile = base_path_join(d.getVar("WORKDIR"), "locale-tree", "Makefile") m = open(makefile, "w") m.write("all: %s\n\n" % " ".join(commands.keys())) for cmd in commands: @@ -350,13 +356,18 @@ python package_do_split_gconvs () { bb.build.exec_func("oe_runmake", d) bb.note("collecting binary locales from locale tree") bb.build.exec_func("do_collect_bins_from_locale_tree", d) - do_split_packages(d, binary_locales_dir, file_regex='(.*)', \ - output_pattern=bpn+'-binary-localedata-%s', \ - description='binary locale definition for %s', extra_depends='', allow_dirs=True) - elif use_bin == "precompiled": - do_split_packages(d, binary_locales_dir, file_regex='(.*)', \ - output_pattern=bpn+'-binary-localedata-%s', \ - description='binary locale definition for %s', extra_depends='', allow_dirs=True) + + if use_bin in ('compile', 'precompiled'): + lcsplit = d.getVar('GLIBC_SPLIT_LC_PACKAGES') + if lcsplit and int(lcsplit): + do_split_packages(d, binary_locales_dir, file_regex='^(.*/LC_\w+)', \ + output_pattern=bpn+'-binary-localedata-%s', \ + description='binary locale definition for %s', recursive=True, + hook=metapkg_hook, extra_depends='', allow_dirs=True, match_path=True) + else: + do_split_packages(d, binary_locales_dir, file_regex='(.*)', \ + output_pattern=bpn+'-binary-localedata-%s', \ + description='binary locale definition for %s', extra_depends='', allow_dirs=True) else: bb.note("generation of binary locales disabled. this may break i18n!") diff --git a/import-layers/yocto-poky/meta/classes/license.bbclass b/import-layers/yocto-poky/meta/classes/license.bbclass index 721343d0f..b1fffe70f 100644 --- a/import-layers/yocto-poky/meta/classes/license.bbclass +++ b/import-layers/yocto-poky/meta/classes/license.bbclass @@ -37,13 +37,13 @@ python license_create_manifest() { import oe.packagedata from oe.rootfs import image_list_installed_packages - build_images_from_feeds = d.getVar('BUILD_IMAGES_FROM_FEEDS', True) + build_images_from_feeds = d.getVar('BUILD_IMAGES_FROM_FEEDS') if build_images_from_feeds == "1": return 0 pkg_dic = {} for pkg in sorted(image_list_installed_packages(d)): - pkg_info = os.path.join(d.getVar('PKGDATA_DIR', True), + pkg_info = os.path.join(d.getVar('PKGDATA_DIR'), 'runtime-reverse', pkg) pkg_name = os.path.basename(os.readlink(pkg_info)) @@ -52,15 +52,15 @@ python license_create_manifest() { pkg_lic_name = "LICENSE_" + pkg_name pkg_dic[pkg_name]["LICENSE"] = pkg_dic[pkg_name][pkg_lic_name] - rootfs_license_manifest = os.path.join(d.getVar('LICENSE_DIRECTORY', True), - d.getVar('IMAGE_NAME', True), 'license.manifest') + rootfs_license_manifest = os.path.join(d.getVar('LICENSE_DIRECTORY'), + d.getVar('IMAGE_NAME'), 'license.manifest') write_license_files(d, rootfs_license_manifest, pkg_dic) } def write_license_files(d, license_manifest, pkg_dic): import re - bad_licenses = (d.getVar("INCOMPATIBLE_LICENSE", True) or "").split() + bad_licenses = (d.getVar("INCOMPATIBLE_LICENSE") or "").split() bad_licenses = map(lambda l: canonical_license(d, l), bad_licenses) bad_licenses = expand_wildcard_licenses(d, bad_licenses) @@ -72,7 +72,7 @@ def write_license_files(d, license_manifest, pkg_dic): oe.license.manifest_licenses(pkg_dic[pkg]["LICENSE"], bad_licenses, canonical_license, d) except oe.license.LicenseError as exc: - bb.fatal('%s: %s' % (d.getVar('P', True), exc)) + bb.fatal('%s: %s' % (d.getVar('P'), exc)) else: pkg_dic[pkg]["LICENSES"] = re.sub('[|&()*]', ' ', pkg_dic[pkg]["LICENSE"]) pkg_dic[pkg]["LICENSES"] = re.sub(' *', ' ', pkg_dic[pkg]["LICENSES"]) @@ -98,7 +98,7 @@ def write_license_files(d, license_manifest, pkg_dic): license_file.write("FILES: %s\n\n" % pkg_dic[pkg]["FILES"]) for lic in pkg_dic[pkg]["LICENSES"]: - lic_file = os.path.join(d.getVar('LICENSE_DIRECTORY', True), + lic_file = os.path.join(d.getVar('LICENSE_DIRECTORY'), pkg_dic[pkg]["PN"], "generic_%s" % re.sub('\+', '', lic)) # add explicity avoid of CLOSED license because isn't generic @@ -114,10 +114,10 @@ def write_license_files(d, license_manifest, pkg_dic): # - Just copy the manifest # - Copy the manifest and the license directories # With both options set we see a .5 M increase in core-image-minimal - copy_lic_manifest = d.getVar('COPY_LIC_MANIFEST', True) - copy_lic_dirs = d.getVar('COPY_LIC_DIRS', True) + copy_lic_manifest = d.getVar('COPY_LIC_MANIFEST') + copy_lic_dirs = d.getVar('COPY_LIC_DIRS') if copy_lic_manifest == "1": - rootfs_license_dir = os.path.join(d.getVar('IMAGE_ROOTFS', 'True'), + rootfs_license_dir = os.path.join(d.getVar('IMAGE_ROOTFS'), 'usr', 'share', 'common-licenses') bb.utils.mkdirhier(rootfs_license_dir) rootfs_license_manifest = os.path.join(rootfs_license_dir, @@ -129,8 +129,12 @@ def write_license_files(d, license_manifest, pkg_dic): for pkg in sorted(pkg_dic): pkg_rootfs_license_dir = os.path.join(rootfs_license_dir, pkg) bb.utils.mkdirhier(pkg_rootfs_license_dir) - pkg_license_dir = os.path.join(d.getVar('LICENSE_DIRECTORY', True), + pkg_license_dir = os.path.join(d.getVar('LICENSE_DIRECTORY'), pkg_dic[pkg]["PN"]) + + pkg_manifest_licenses = [canonical_license(d, lic) \ + for lic in pkg_dic[pkg]["LICENSES"]] + licenses = os.listdir(pkg_license_dir) for lic in licenses: rootfs_license = os.path.join(rootfs_license_dir, lic) @@ -138,9 +142,18 @@ def write_license_files(d, license_manifest, pkg_dic): pkg_rootfs_license = os.path.join(pkg_rootfs_license_dir, lic) if re.match("^generic_.*$", lic): - generic_lic = re.search("^generic_(.*)$", lic).group(1) - if oe.license.license_ok(canonical_license(d, - generic_lic), bad_licenses) == False: + generic_lic = canonical_license(d, + re.search("^generic_(.*)$", lic).group(1)) + + # Do not copy generic license into package if isn't + # declared into LICENSES of the package. + if not re.sub('\+$', '', generic_lic) in \ + [re.sub('\+', '', lic) for lic in \ + pkg_manifest_licenses]: + continue + + if oe.license.license_ok(generic_lic, + bad_licenses) == False: continue if not os.path.exists(rootfs_license): @@ -166,7 +179,7 @@ def license_deployed_manifest(d): dep_dic = {} man_dic = {} - lic_dir = d.getVar("LICENSE_DIRECTORY", True) + lic_dir = d.getVar("LICENSE_DIRECTORY") dep_dic = get_deployed_dependencies(d) for dep in dep_dic.keys(): @@ -181,8 +194,8 @@ def license_deployed_manifest(d): key,val = line.split(": ", 1) man_dic[dep][key] = val[:-1] - lic_manifest_dir = os.path.join(d.getVar('LICENSE_DIRECTORY', True), - d.getVar('IMAGE_NAME', True)) + lic_manifest_dir = os.path.join(d.getVar('LICENSE_DIRECTORY'), + d.getVar('IMAGE_NAME')) bb.utils.mkdirhier(lic_manifest_dir) image_license_manifest = os.path.join(lic_manifest_dir, 'image_license.manifest') write_license_files(d, image_license_manifest, man_dic) @@ -202,7 +215,7 @@ def get_deployed_dependencies(d): depends = list(set([dep[0] for dep in list(taskdata.values()) if not dep[0].endswith("-native")])) - extra_depends = d.getVar("EXTRA_IMAGEDEPENDS", True) + extra_depends = d.getVar("EXTRA_IMAGEDEPENDS") boot_depends = get_boot_dependencies(d) depends.extend(extra_depends.split()) depends.extend(boot_depends) @@ -212,13 +225,13 @@ def get_deployed_dependencies(d): # the SSTATE_MANIFESTS for "deploy" task. # The manifest file name contains the arch. Because we are not running # in the recipe context it is necessary to check every arch used. - sstate_manifest_dir = d.getVar("SSTATE_MANIFESTS", True) - sstate_archs = d.getVar("SSTATE_ARCHS", True) - extra_archs = d.getVar("PACKAGE_EXTRA_ARCHS", True) + sstate_manifest_dir = d.getVar("SSTATE_MANIFESTS") + sstate_archs = d.getVar("SSTATE_ARCHS") + extra_archs = d.getVar("PACKAGE_EXTRA_ARCHS") archs = list(set(("%s %s" % (sstate_archs, extra_archs)).split())) for dep in depends: # Some recipes have an arch on their own, so we try that first. - special_arch = d.getVar("PACKAGE_ARCH_pn-%s" % dep, True) + special_arch = d.getVar("PACKAGE_ARCH_pn-%s" % dep) if special_arch: sstate_manifest_file = os.path.join(sstate_manifest_dir, "manifest-%s-%s.deploy" % (special_arch, dep)) @@ -249,12 +262,12 @@ def get_boot_dependencies(d): for task in boot_tasks: boot_depends_string = "%s %s" % (boot_depends_string, - d.getVarFlag(task, "depends", True) or "") + d.getVarFlag(task, "depends") or "") boot_depends = [dep.split(":")[0] for dep in boot_depends_string.split() if not dep.split(":")[0].endswith("-native")] for dep in boot_depends: - info_file = os.path.join(d.getVar("LICENSE_DIRECTORY", True), + info_file = os.path.join(d.getVar("LICENSE_DIRECTORY"), dep, "recipeinfo") # If the recipe and dependency name is the same if os.path.exists(info_file): @@ -265,7 +278,7 @@ def get_boot_dependencies(d): # The fifth field contains what the task provides if dep in taskdep[4]: info_file = os.path.join( - d.getVar("LICENSE_DIRECTORY", True), + d.getVar("LICENSE_DIRECTORY"), taskdep[0], "recipeinfo") if os.path.exists(info_file): depends.append(taskdep[0]) @@ -295,7 +308,7 @@ python do_populate_lic() { lic_files_paths = find_license_files(d) # The base directory we wrangle licenses to - destdir = os.path.join(d.getVar('LICSSTATEDIR', True), d.getVar('PN', True)) + destdir = os.path.join(d.getVar('LICSSTATEDIR'), d.getVar('PN')) copy_license_files(lic_files_paths, destdir) info = get_recipe_info(d) with open(os.path.join(destdir, "recipeinfo"), "w") as f: @@ -306,11 +319,11 @@ python do_populate_lic() { # it would be better to copy them in do_install_append, but find_license_filesa is python python perform_packagecopy_prepend () { enabled = oe.data.typed_value('LICENSE_CREATE_PACKAGE', d) - if d.getVar('CLASSOVERRIDE', True) == 'class-target' and enabled: + if d.getVar('CLASSOVERRIDE') == 'class-target' and enabled: lic_files_paths = find_license_files(d) # LICENSE_FILES_DIRECTORY starts with '/' so os.path.join cannot be used to join D and LICENSE_FILES_DIRECTORY - destdir = d.getVar('D', True) + os.path.join(d.getVar('LICENSE_FILES_DIRECTORY', True), d.getVar('PN', True)) + destdir = d.getVar('D') + os.path.join(d.getVar('LICENSE_FILES_DIRECTORY'), d.getVar('PN')) copy_license_files(lic_files_paths, destdir) add_package_and_files(d) } @@ -318,15 +331,15 @@ perform_packagecopy[vardeps] += "LICENSE_CREATE_PACKAGE" def get_recipe_info(d): info = {} - info["PV"] = d.getVar("PV", True) - info["PR"] = d.getVar("PR", True) - info["LICENSE"] = d.getVar("LICENSE", True) + info["PV"] = d.getVar("PV") + info["PR"] = d.getVar("PR") + info["LICENSE"] = d.getVar("LICENSE") return info def add_package_and_files(d): - packages = d.getVar('PACKAGES', True) - files = d.getVar('LICENSE_FILES_DIRECTORY', True) - pn = d.getVar('PN', True) + packages = d.getVar('PACKAGES') + files = d.getVar('LICENSE_FILES_DIRECTORY') + pn = d.getVar('PN') pn_lic = "%s%s" % (pn, d.getVar('LICENSE_PACKAGE_SUFFIX', False)) if pn_lic in packages: bb.warn("%s package already existed in %s." % (pn_lic, pn)) @@ -334,7 +347,7 @@ def add_package_and_files(d): # first in PACKAGES to be sure that nothing else gets LICENSE_FILES_DIRECTORY d.setVar('PACKAGES', "%s %s" % (pn_lic, packages)) d.setVar('FILES_' + pn_lic, files) - rrecommends_pn = d.getVar('RRECOMMENDS_' + pn, True) + rrecommends_pn = d.getVar('RRECOMMENDS_' + pn) if rrecommends_pn: d.setVar('RRECOMMENDS_' + pn, "%s %s" % (pn_lic, rrecommends_pn)) else: @@ -345,7 +358,7 @@ def copy_license_files(lic_files_paths, destdir): import errno bb.utils.mkdirhier(destdir) - for (basename, path) in lic_files_paths: + for (basename, path, beginline, endline) in lic_files_paths: try: src = path dst = os.path.join(destdir, basename) @@ -353,7 +366,7 @@ def copy_license_files(lic_files_paths, destdir): os.remove(dst) if os.path.islink(src): src = os.path.realpath(src) - canlink = os.access(src, os.W_OK) and (os.stat(src).st_dev == os.stat(destdir).st_dev) + canlink = os.access(src, os.W_OK) and (os.stat(src).st_dev == os.stat(destdir).st_dev) and beginline is None and endline is None if canlink: try: os.link(src, dst) @@ -364,20 +377,19 @@ def copy_license_files(lic_files_paths, destdir): canlink = False else: raise - try: - if canlink: - os.chown(dst,0,0) - except OSError as err: - if err.errno in (errno.EPERM, errno.EINVAL): - # Suppress "Operation not permitted" error, as - # sometimes this function is not executed under pseudo. - # Also ignore "Invalid argument" errors that happen in - # some (unprivileged) container environments (no root). - pass - else: - raise + # Only chown if we did hardling, and, we're running under pseudo + if canlink and os.environ.get('PSEUDO_DISABLED') == '0': + os.chown(dst,0,0) if not canlink: - shutil.copyfile(src, dst) + begin_idx = int(beginline)-1 if beginline is not None else None + end_idx = int(endline) if endline is not None else None + if begin_idx is None and end_idx is None: + shutil.copyfile(src, dst) + else: + with open(src, 'rb') as src_f: + with open(dst, 'wb') as dst_f: + dst_f.write(b''.join(src_f.readlines()[begin_idx:end_idx])) + except Exception as e: bb.warn("Could not copy license file %s to %s: %s" % (src, dst, e)) @@ -390,20 +402,22 @@ def find_license_files(d): from collections import defaultdict, OrderedDict # All the license files for the package - lic_files = d.getVar('LIC_FILES_CHKSUM', True) - pn = d.getVar('PN', True) + lic_files = d.getVar('LIC_FILES_CHKSUM') or "" + pn = d.getVar('PN') # The license files are located in S/LIC_FILE_CHECKSUM. - srcdir = d.getVar('S', True) + srcdir = d.getVar('S') # Directory we store the generic licenses as set in the distro configuration - generic_directory = d.getVar('COMMON_LICENSE_DIR', True) + generic_directory = d.getVar('COMMON_LICENSE_DIR') # List of basename, path tuples lic_files_paths = [] + # hash for keep track generic lics mappings + non_generic_lics = {} # Entries from LIC_FILES_CHKSUM lic_chksums = {} license_source_dirs = [] license_source_dirs.append(generic_directory) try: - additional_lic_dirs = d.getVar('LICENSE_PATH', True).split() + additional_lic_dirs = d.getVar('LICENSE_PATH').split() for lic_dir in additional_lic_dirs: license_source_dirs.append(lic_dir) except: @@ -431,10 +445,10 @@ def find_license_files(d): # unless NO_GENERIC_LICENSE is set. for lic_dir in license_source_dirs: if not os.path.isfile(os.path.join(lic_dir, license_type)): - if d.getVarFlag('SPDXLICENSEMAP', license_type, True) != None: + if d.getVarFlag('SPDXLICENSEMAP', license_type) != None: # Great, there is an SPDXLICENSEMAP. We can copy! bb.debug(1, "We need to use a SPDXLICENSEMAP for %s" % (license_type)) - spdx_generic = d.getVarFlag('SPDXLICENSEMAP', license_type, True) + spdx_generic = d.getVarFlag('SPDXLICENSEMAP', license_type) license_source = lic_dir break elif os.path.isfile(os.path.join(lic_dir, license_type)): @@ -442,23 +456,25 @@ def find_license_files(d): license_source = lic_dir break - non_generic_lic = d.getVarFlag('NO_GENERIC_LICENSE', license_type, True) + non_generic_lic = d.getVarFlag('NO_GENERIC_LICENSE', license_type) if spdx_generic and license_source: # we really should copy to generic_ + spdx_generic, however, that ends up messing the manifest # audit up. This should be fixed in emit_pkgdata (or, we actually got and fix all the recipes) - lic_files_paths.append(("generic_" + license_type, os.path.join(license_source, spdx_generic))) + lic_files_paths.append(("generic_" + license_type, os.path.join(license_source, spdx_generic), + None, None)) # The user may attempt to use NO_GENERIC_LICENSE for a generic license which doesn't make sense # and should not be allowed, warn the user in this case. - if d.getVarFlag('NO_GENERIC_LICENSE', license_type, True): + if d.getVarFlag('NO_GENERIC_LICENSE', license_type): bb.warn("%s: %s is a generic license, please don't use NO_GENERIC_LICENSE for it." % (pn, license_type)) elif non_generic_lic and non_generic_lic in lic_chksums: # if NO_GENERIC_LICENSE is set, we copy the license files from the fetched source # of the package rather than the license_source_dirs. lic_files_paths.append(("generic_" + license_type, - os.path.join(srcdir, non_generic_lic))) + os.path.join(srcdir, non_generic_lic), None, None)) + non_generic_lics[non_generic_lic] = license_type else: # Add explicity avoid of CLOSED license because this isn't generic if license_type != 'CLOSED': @@ -469,41 +485,40 @@ def find_license_files(d): if not generic_directory: bb.fatal("COMMON_LICENSE_DIR is unset. Please set this in your distro config") - if not lic_files: - # No recipe should have an invalid license file. This is checked else - # where, but let's be pedantic - bb.note(pn + ": Recipe file does not have license file information.") - return lic_files_paths - for url in lic_files.split(): try: (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url) except bb.fetch.MalformedUrl: - bb.fatal("%s: LIC_FILES_CHKSUM contains an invalid URL: %s" % (d.getVar('PF', True), url)) + bb.fatal("%s: LIC_FILES_CHKSUM contains an invalid URL: %s" % (d.getVar('PF'), url)) # We want the license filename and path - chksum = parm['md5'] if 'md5' in parm else parm['sha256'] - lic_chksums[path] = chksum + chksum = parm.get('md5', None) + beginline = parm.get('beginline') + endline = parm.get('endline') + lic_chksums[path] = (chksum, beginline, endline) v = FindVisitor() try: - v.visit_string(d.getVar('LICENSE', True)) + v.visit_string(d.getVar('LICENSE')) except oe.license.InvalidLicense as exc: - bb.fatal('%s: %s' % (d.getVar('PF', True), exc)) + bb.fatal('%s: %s' % (d.getVar('PF'), exc)) except SyntaxError: - bb.warn("%s: Failed to parse it's LICENSE field." % (d.getVar('PF', True))) - + bb.warn("%s: Failed to parse it's LICENSE field." % (d.getVar('PF'))) # Add files from LIC_FILES_CHKSUM to list of license files lic_chksum_paths = defaultdict(OrderedDict) - for path, chksum in lic_chksums.items(): - lic_chksum_paths[os.path.basename(path)][chksum] = os.path.join(srcdir, path) + for path, data in sorted(lic_chksums.items()): + lic_chksum_paths[os.path.basename(path)][data] = (os.path.join(srcdir, path), data[1], data[2]) for basename, files in lic_chksum_paths.items(): if len(files) == 1: - lic_files_paths.append((basename, list(files.values())[0])) + # Don't copy again a LICENSE already handled as non-generic + if basename in non_generic_lics: + continue + data = list(files.values())[0] + lic_files_paths.append(tuple([basename] + list(data))) else: # If there are multiple different license files with identical # basenames we rename them to .0, .1, ... - for i, path in enumerate(files.values()): - lic_files_paths.append(("%s.%d" % (basename, i), path)) + for i, data in enumerate(files.values()): + lic_files_paths.append(tuple(["%s.%d" % (basename, i)] + list(data))) return lic_files_paths @@ -511,7 +526,7 @@ def return_spdx(d, license): """ This function returns the spdx mapping of a license if it exists. """ - return d.getVarFlag('SPDXLICENSEMAP', license, True) + return d.getVarFlag('SPDXLICENSEMAP', license) def canonical_license(d, license): """ @@ -520,9 +535,9 @@ def canonical_license(d, license): 'X' if availabel and the tailing '+' (so GPLv3+ becomes GPL-3.0+), or the passed license if there is no canonical form. """ - lic = d.getVarFlag('SPDXLICENSEMAP', license, True) or "" + lic = d.getVarFlag('SPDXLICENSEMAP', license) or "" if not lic and license.endswith('+'): - lic = d.getVarFlag('SPDXLICENSEMAP', license.rstrip('+'), True) + lic = d.getVarFlag('SPDXLICENSEMAP', license.rstrip('+')) if lic: lic += '+' return lic or license @@ -537,7 +552,7 @@ def expand_wildcard_licenses(d, wildcard_licenses): spdxmapkeys = d.getVarFlags('SPDXLICENSEMAP').keys() for wld_lic in wildcard_licenses: spdxflags = fnmatch.filter(spdxmapkeys, wld_lic) - licenses += [d.getVarFlag('SPDXLICENSEMAP', flag, True) for flag in spdxflags] + licenses += [d.getVarFlag('SPDXLICENSEMAP', flag) for flag in spdxflags] spdx_lics = (d.getVar('SRC_DISTRIBUTE_LICENSES', False) or '').split() for wld_lic in wildcard_licenses: @@ -548,7 +563,7 @@ def expand_wildcard_licenses(d, wildcard_licenses): def incompatible_license_contains(license, truevalue, falsevalue, d): license = canonical_license(d, license) - bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE', True) or "").split() + bad_licenses = (d.getVar('INCOMPATIBLE_LICENSE') or "").split() bad_licenses = expand_wildcard_licenses(d, bad_licenses) return truevalue if license in bad_licenses else falsevalue @@ -559,9 +574,9 @@ def incompatible_license(d, dont_want_licenses, package=None): as canonical (SPDX) names. """ import oe.license - license = d.getVar("LICENSE_%s" % package, True) if package else None + license = d.getVar("LICENSE_%s" % package) if package else None if not license: - license = d.getVar('LICENSE', True) + license = d.getVar('LICENSE') # Handles an "or" or two license sets provided by # flattened_licenses(), pick one that works if possible. @@ -572,7 +587,7 @@ def incompatible_license(d, dont_want_licenses, package=None): try: licenses = oe.license.flattened_licenses(license, choose_lic_set) except oe.license.LicenseError as exc: - bb.fatal('%s: %s' % (d.getVar('P', True), exc)) + bb.fatal('%s: %s' % (d.getVar('P'), exc)) return any(not oe.license.license_ok(canonical_license(d, l), \ dont_want_licenses) for l in licenses) @@ -620,16 +635,16 @@ def check_license_flags(d): def all_license_flags_match(license_flags, whitelist): """ Return first unmatched flag, None if all flags match """ - pn = d.getVar('PN', True) + pn = d.getVar('PN') split_whitelist = whitelist.split() for flag in license_flags.split(): if not license_flag_matches(flag, split_whitelist, pn): return flag return None - license_flags = d.getVar('LICENSE_FLAGS', True) + license_flags = d.getVar('LICENSE_FLAGS') if license_flags: - whitelist = d.getVar('LICENSE_FLAGS_WHITELIST', True) + whitelist = d.getVar('LICENSE_FLAGS_WHITELIST') if not whitelist: return license_flags unmatched_flag = all_license_flags_match(license_flags, whitelist) @@ -643,8 +658,8 @@ def check_license_format(d): Validate operators in LICENSES. No spaces are allowed between LICENSES. """ - pn = d.getVar('PN', True) - licenses = d.getVar('LICENSE', True) + pn = d.getVar('PN') + licenses = d.getVar('LICENSE') from oe.license import license_operator, license_operator_chars, license_pattern elements = list(filter(lambda x: x.strip(), license_operator.split(licenses))) diff --git a/import-layers/yocto-poky/meta/classes/live-vm-common.bbclass b/import-layers/yocto-poky/meta/classes/live-vm-common.bbclass index 734697f9e..27b137dec 100644 --- a/import-layers/yocto-poky/meta/classes/live-vm-common.bbclass +++ b/import-layers/yocto-poky/meta/classes/live-vm-common.bbclass @@ -4,11 +4,11 @@ def set_live_vm_vars(d, suffix): vars = ['GRUB_CFG', 'SYSLINUX_CFG', 'ROOT', 'LABELS', 'INITRD'] for var in vars: var_with_suffix = var + '_' + suffix - if d.getVar(var, True): + if d.getVar(var): bb.warn('Found potential conflicted var %s, please use %s rather than %s' % \ (var, var_with_suffix, var)) - elif d.getVar(var_with_suffix, True): - d.setVar(var, d.getVar(var_with_suffix, True)) + elif d.getVar(var_with_suffix): + d.setVar(var, d.getVar(var_with_suffix)) EFI = "${@bb.utils.contains("MACHINE_FEATURES", "efi", "1", "0", d)}" @@ -25,7 +25,7 @@ def pcbios(d): return pcbios PCBIOS = "${@pcbios(d)}" -PCBIOS_CLASS = "${@['','syslinux'][d.getVar('PCBIOS', True) == '1']}" +PCBIOS_CLASS = "${@['','syslinux'][d.getVar('PCBIOS') == '1']}" inherit ${EFI_CLASS} inherit ${PCBIOS_CLASS} diff --git a/import-layers/yocto-poky/meta/classes/manpages.bbclass b/import-layers/yocto-poky/meta/classes/manpages.bbclass new file mode 100644 index 000000000..d16237b89 --- /dev/null +++ b/import-layers/yocto-poky/meta/classes/manpages.bbclass @@ -0,0 +1,5 @@ +# Inherit this class to enable or disable building and installation of manpages +# depending on whether 'api-documentation' is in DISTRO_FEATURES. Such building +# tends to pull in the entire XML stack and other tools, so it's not enabled +# by default. +PACKAGECONFIG_append_class-target = " ${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'manpages', '', d)}" diff --git a/import-layers/yocto-poky/meta/classes/metadata_scm.bbclass b/import-layers/yocto-poky/meta/classes/metadata_scm.bbclass index 2e6fac209..fa791f04c 100644 --- a/import-layers/yocto-poky/meta/classes/metadata_scm.bbclass +++ b/import-layers/yocto-poky/meta/classes/metadata_scm.bbclass @@ -26,7 +26,7 @@ def base_detect_branch(d): return "" def base_get_scmbasepath(d): - return os.path.join(d.getVar('COREBASE', True), 'meta') + return os.path.join(d.getVar('COREBASE'), 'meta') def base_get_metadata_monotone_branch(path, d): monotone_branch = "" diff --git a/import-layers/yocto-poky/meta/classes/migrate_localcount.bbclass b/import-layers/yocto-poky/meta/classes/migrate_localcount.bbclass index aa0df8bb7..810a54131 100644 --- a/import-layers/yocto-poky/meta/classes/migrate_localcount.bbclass +++ b/import-layers/yocto-poky/meta/classes/migrate_localcount.bbclass @@ -6,12 +6,12 @@ python migrate_localcount_handler () { if not e.data: return - pv = e.data.getVar('PV', True) + pv = e.data.getVar('PV') if not 'AUTOINC' in pv: return localcounts = bb.persist_data.persist('BB_URI_LOCALCOUNT', e.data) - pn = e.data.getVar('PN', True) + pn = e.data.getVar('PN') revs = localcounts.get_by_pattern('%%-%s_rev' % pn) counts = localcounts.get_by_pattern('%%-%s_count' % pn) if not revs or not counts: @@ -21,10 +21,10 @@ python migrate_localcount_handler () { bb.warn("The number of revs and localcounts don't match in %s" % pn) return - version = e.data.getVar('PRAUTOINX', True) + version = e.data.getVar('PRAUTOINX') srcrev = bb.fetch2.get_srcrev(e.data) base_ver = 'AUTOINC-%s' % version[:version.find(srcrev)] - pkgarch = e.data.getVar('PACKAGE_ARCH', True) + pkgarch = e.data.getVar('PACKAGE_ARCH') value = max(int(count) for count in counts) if len(revs) == 1: @@ -33,8 +33,8 @@ python migrate_localcount_handler () { else: value += 1 - bb.utils.mkdirhier(e.data.getVar('PRSERV_DUMPDIR', True)) - df = e.data.getVar('LOCALCOUNT_DUMPFILE', True) + bb.utils.mkdirhier(e.data.getVar('PRSERV_DUMPDIR')) + df = e.data.getVar('LOCALCOUNT_DUMPFILE') flock = bb.utils.lockfile("%s.lock" % df) with open(df, 'a') as fd: fd.write('PRAUTO$%s$%s$%s = "%s"\n' % diff --git a/import-layers/yocto-poky/meta/classes/mime.bbclass b/import-layers/yocto-poky/meta/classes/mime.bbclass index 721c73fcf..0df15831c 100644 --- a/import-layers/yocto-poky/meta/classes/mime.bbclass +++ b/import-layers/yocto-poky/meta/classes/mime.bbclass @@ -1,4 +1,5 @@ -DEPENDS += "shared-mime-info-native shared-mime-info" +DEPENDS += "shared-mime-info" +PACKAGE_WRITE_DEPS += "shared-mime-info-native" mime_postinst() { if [ "$1" = configure ]; then @@ -28,8 +29,8 @@ fi python populate_packages_append () { import re - packages = d.getVar('PACKAGES', True).split() - pkgdest = d.getVar('PKGDEST', True) + packages = d.getVar('PACKAGES').split() + pkgdest = d.getVar('PKGDEST') for pkg in packages: mime_dir = '%s/%s/usr/share/mime/packages' % (pkgdest, pkg) @@ -41,15 +42,15 @@ python populate_packages_append () { mimes.append(f) if mimes: bb.note("adding mime postinst and postrm scripts to %s" % pkg) - postinst = d.getVar('pkg_postinst_%s' % pkg, True) + postinst = d.getVar('pkg_postinst_%s' % pkg) if not postinst: postinst = '#!/bin/sh\n' - postinst += d.getVar('mime_postinst', True) + postinst += d.getVar('mime_postinst') d.setVar('pkg_postinst_%s' % pkg, postinst) - postrm = d.getVar('pkg_postrm_%s' % pkg, True) + postrm = d.getVar('pkg_postrm_%s' % pkg) if not postrm: postrm = '#!/bin/sh\n' - postrm += d.getVar('mime_postrm', True) + postrm += d.getVar('mime_postrm') d.setVar('pkg_postrm_%s' % pkg, postrm) bb.note("adding shared-mime-info-data dependency to %s" % pkg) d.appendVar('RDEPENDS_' + pkg, " shared-mime-info-data") diff --git a/import-layers/yocto-poky/meta/classes/mirrors.bbclass b/import-layers/yocto-poky/meta/classes/mirrors.bbclass index 2cdc71b6e..4ad814ff2 100644 --- a/import-layers/yocto-poky/meta/classes/mirrors.bbclass +++ b/import-layers/yocto-poky/meta/classes/mirrors.bbclass @@ -27,7 +27,7 @@ ${GNUPG_MIRROR} ftp://mirrors.dotsrc.org/gcrypt \n \ ftp://dante.ctan.org/tex-archive ftp://ftp.fu-berlin.de/tex/CTAN \n \ ftp://dante.ctan.org/tex-archive http://sunsite.sut.ac.jp/pub/archives/ctan/ \n \ ftp://dante.ctan.org/tex-archive http://ctan.unsw.edu.au/ \n \ -ftp://ftp.gnutls.org/gcrypt/gnutls ${GNUPG_MIRROR} \n \ +ftp://ftp.gnutls.org/gcrypt/gnutls ${GNUPG_MIRROR}/gnutls \n \ http://ftp.info-zip.org/pub/infozip/src/ http://mirror.switch.ch/ftp/mirror/infozip/src/ \n \ http://ftp.info-zip.org/pub/infozip/src/ ftp://sunsite.icm.edu.pl/pub/unix/archiving/info-zip/src/ \n \ ftp://lsof.itap.purdue.edu/pub/tools/unix/lsof/ ftp://ftp.cerias.purdue.edu/pub/tools/unix/sysutils/lsof/ \n \ @@ -54,7 +54,7 @@ p4://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \n \ osc://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \n \ https?$://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \n \ ftp://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \n \ -npm://.*/.* http://downloads.yoctoproject.org/mirror/sources/ \n \ +npm://.*/?.* http://downloads.yoctoproject.org/mirror/sources/ \n \ cvs://.*/.* http://sources.openembedded.org/ \n \ svn://.*/.* http://sources.openembedded.org/ \n \ git://.*/.* http://sources.openembedded.org/ \n \ @@ -64,7 +64,18 @@ p4://.*/.* http://sources.openembedded.org/ \n \ osc://.*/.* http://sources.openembedded.org/ \n \ https?$://.*/.* http://sources.openembedded.org/ \n \ ftp://.*/.* http://sources.openembedded.org/ \n \ -npm://.*/.* http://sources.openembedded.org/ \n \ +npm://.*/?.* http://sources.openembedded.org/ \n \ ${CPAN_MIRROR} http://cpan.metacpan.org/ \n \ ${CPAN_MIRROR} http://search.cpan.org/CPAN/ \n \ " + +# Use MIRRORS to provide git repo fallbacks using the https protocol, for cases +# where git native protocol fetches may fail due to local firewall rules, etc. + +MIRRORS += "\ +git://anonscm.debian.org/.* git://anonscm.debian.org/git/PATH;protocol=https \n \ +git://git.gnome.org/.* git://git.gnome.org/browse/PATH;protocol=https \n \ +git://git.savannah.gnu.org/.* git://git.savannah.gnu.org/git/PATH;protocol=https \n \ +git://git.yoctoproject.org/.* git://git.yoctoproject.org/git/PATH;protocol=https \n \ +git://.*/.* git://HOST/PATH;protocol=https \n \ +" diff --git a/import-layers/yocto-poky/meta/classes/module.bbclass b/import-layers/yocto-poky/meta/classes/module.bbclass index 68e3d341a..802476bc7 100644 --- a/import-layers/yocto-poky/meta/classes/module.bbclass +++ b/import-layers/yocto-poky/meta/classes/module.bbclass @@ -1,15 +1,16 @@ -inherit module-base kernel-module-split +inherit module-base kernel-module-split pkgconfig -addtask make_scripts after do_patch before do_compile +addtask make_scripts after do_prepare_recipe_sysroot before do_compile do_make_scripts[lockfiles] = "${TMPDIR}/kernel-scripts.lock" do_make_scripts[depends] += "virtual/kernel:do_shared_workdir" EXTRA_OEMAKE += "KERNEL_SRC=${STAGING_KERNEL_DIR}" MODULES_INSTALL_TARGET ?= "modules_install" +MODULES_MODULE_SYMVERS_LOCATION ?= "" python __anonymous () { - depends = d.getVar('DEPENDS', True) + depends = d.getVar('DEPENDS') extra_symbols = [] for dep in depends.split(): if dep.startswith("kernel-module-"): @@ -30,15 +31,22 @@ module_do_compile() { module_do_install() { unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS - oe_runmake DEPMOD=echo INSTALL_MOD_PATH="${D}" \ + oe_runmake DEPMOD=echo MODLIB="${D}${nonarch_base_libdir}/modules/${KERNEL_VERSION}" \ CC="${KERNEL_CC}" LD="${KERNEL_LD}" \ O=${STAGING_KERNEL_BUILDDIR} \ ${MODULES_INSTALL_TARGET} - install -d -m0755 ${D}${includedir}/${BPN} - cp -a --no-preserve=ownership ${B}/Module.symvers ${D}${includedir}/${BPN} - # it doesn't actually seem to matter which path is specified here - sed -e 's:${B}/::g' -i ${D}${includedir}/${BPN}/Module.symvers + if [ ! -e "${B}/${MODULES_MODULE_SYMVERS_LOCATION}/Module.symvers" ] ; then + bbwarn "Module.symvers not found in ${B}/${MODULES_MODULE_SYMVERS_LOCATION}" + bbwarn "Please consider setting MODULES_MODULE_SYMVERS_LOCATION to a" + bbwarn "directory below B to get correct inter-module dependencies" + else + install -Dm0644 "${B}/${MODULES_MODULE_SYMVERS_LOCATION}"/Module.symvers ${D}${includedir}/${BPN}/Module.symvers + # Module.symvers contains absolute path to the build directory. + # While it doesn't actually seem to matter which path is specified, + # clear them out to avoid confusion + sed -e 's:${B}/::g' -i ${D}${includedir}/${BPN}/Module.symvers + fi } EXPORT_FUNCTIONS do_compile do_install diff --git a/import-layers/yocto-poky/meta/classes/multilib.bbclass b/import-layers/yocto-poky/meta/classes/multilib.bbclass index d5a31287a..ab04597f9 100644 --- a/import-layers/yocto-poky/meta/classes/multilib.bbclass +++ b/import-layers/yocto-poky/meta/classes/multilib.bbclass @@ -1,20 +1,20 @@ python multilib_virtclass_handler () { - cls = e.data.getVar("BBEXTENDCURR", True) - variant = e.data.getVar("BBEXTENDVARIANT", True) + cls = e.data.getVar("BBEXTENDCURR") + variant = e.data.getVar("BBEXTENDVARIANT") if cls != "multilib" or not variant: return - e.data.setVar('STAGING_KERNEL_DIR', e.data.getVar('STAGING_KERNEL_DIR', True)) + e.data.setVar('STAGING_KERNEL_DIR', e.data.getVar('STAGING_KERNEL_DIR')) # There should only be one kernel in multilib configs # We also skip multilib setup for module packages. - provides = (e.data.getVar("PROVIDES", True) or "").split() + provides = (e.data.getVar("PROVIDES") or "").split() if "virtual/kernel" in provides or bb.data.inherits_class('module-base', e.data): raise bb.parse.SkipPackage("We shouldn't have multilib variants for the kernel") - save_var_name=e.data.getVar("MULTILIB_SAVE_VARNAME", True) or "" + save_var_name=e.data.getVar("MULTILIB_SAVE_VARNAME") or "" for name in save_var_name.split(): - val=e.data.getVar(name, True) + val=e.data.getVar(name) if val: e.data.setVar(name + "_MULTILIB_ORIGINAL", val) @@ -26,7 +26,7 @@ python multilib_virtclass_handler () { if bb.data.inherits_class('image', e.data): e.data.setVar("MLPREFIX", variant + "-") e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False)) - e.data.setVar('SDKTARGETSYSROOT', e.data.getVar('SDKTARGETSYSROOT', True)) + e.data.setVar('SDKTARGETSYSROOT', e.data.getVar('SDKTARGETSYSROOT')) target_vendor = e.data.getVar("TARGET_VENDOR_" + "virtclass-multilib-" + variant, False) if target_vendor: e.data.setVar("TARGET_VENDOR", target_vendor) @@ -36,7 +36,6 @@ python multilib_virtclass_handler () { e.data.setVar("MLPREFIX", variant + "-") override = ":virtclass-multilib-" + variant e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override) - bb.data.update_data(e.data) return if bb.data.inherits_class('native', e.data): @@ -50,17 +49,23 @@ python multilib_virtclass_handler () { # Expand this since this won't work correctly once we set a multilib into place - e.data.setVar("ALL_MULTILIB_PACKAGE_ARCHS", e.data.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True)) + e.data.setVar("ALL_MULTILIB_PACKAGE_ARCHS", e.data.getVar("ALL_MULTILIB_PACKAGE_ARCHS")) override = ":virtclass-multilib-" + variant + blacklist = e.data.getVarFlag('PNBLACKLIST', e.data.getVar('PN')) + if blacklist: + pn_new = variant + "-" + e.data.getVar('PN') + if not e.data.getVarFlag('PNBLACKLIST', pn_new): + e.data.setVarFlag('PNBLACKLIST', pn_new, blacklist) + e.data.setVar("MLPREFIX", variant + "-") e.data.setVar("PN", variant + "-" + e.data.getVar("PN", False)) e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + override) # Expand the WHITELISTs with multilib prefix for whitelist in ["WHITELIST_GPL-3.0", "LGPLv2_WHITELIST_GPL-3.0"]: - pkgs = e.data.getVar(whitelist, True) + pkgs = e.data.getVar(whitelist) for pkg in pkgs.split(): pkgs += " " + variant + "-" + pkg e.data.setVar(whitelist, pkgs) @@ -78,7 +83,7 @@ multilib_virtclass_handler[eventmask] = "bb.event.RecipePreFinalise" STAGINGCC_prepend = "${BBEXTENDVARIANT}-" python __anonymous () { - variant = d.getVar("BBEXTENDVARIANT", True) + variant = d.getVar("BBEXTENDVARIANT") import oe.classextend @@ -88,7 +93,7 @@ python __anonymous () { clsextend.map_depends_variable("PACKAGE_INSTALL") clsextend.map_depends_variable("LINGUAS_INSTALL") clsextend.map_depends_variable("RDEPENDS") - pinstall = d.getVar("LINGUAS_INSTALL", True) + " " + d.getVar("PACKAGE_INSTALL", True) + pinstall = d.getVar("LINGUAS_INSTALL") + " " + d.getVar("PACKAGE_INSTALL") d.setVar("PACKAGE_INSTALL", pinstall) d.setVar("LINGUAS_INSTALL", "") # FIXME, we need to map this to something, not delete it! @@ -104,7 +109,7 @@ python __anonymous () { return clsextend.rename_packages() - clsextend.rename_package_variables((d.getVar("PACKAGEVARS", True) or "").split()) + clsextend.rename_package_variables((d.getVar("PACKAGEVARS") or "").split()) clsextend.map_packagevars() clsextend.map_regexp_variable("PACKAGES_DYNAMIC") @@ -119,7 +124,7 @@ PACKAGEFUNCS_append = " do_package_qa_multilib" python do_package_qa_multilib() { def check_mlprefix(pkg, var, mlprefix): - values = bb.utils.explode_deps(d.getVar('%s_%s' % (var, pkg), True) or d.getVar(var, True) or "") + values = bb.utils.explode_deps(d.getVar('%s_%s' % (var, pkg)) or d.getVar(var) or "") candidates = [] for i in values: if i.startswith('virtual/'): @@ -130,14 +135,14 @@ python do_package_qa_multilib() { candidates.append(i) if len(candidates) > 0: msg = "%s package %s - suspicious values '%s' in %s" \ - % (d.getVar('PN', True), pkg, ' '.join(candidates), var) + % (d.getVar('PN'), pkg, ' '.join(candidates), var) package_qa_handle_error("multilib", msg, d) - ml = d.getVar('MLPREFIX', True) + ml = d.getVar('MLPREFIX') if not ml: return - packages = d.getVar('PACKAGES', True) + packages = d.getVar('PACKAGES') for pkg in packages.split(): check_mlprefix(pkg, 'RDEPENDS', ml) check_mlprefix(pkg, 'RPROVIDES', ml) diff --git a/import-layers/yocto-poky/meta/classes/multilib_global.bbclass b/import-layers/yocto-poky/meta/classes/multilib_global.bbclass index 11ae2681f..fd0bfe127 100644 --- a/import-layers/yocto-poky/meta/classes/multilib_global.bbclass +++ b/import-layers/yocto-poky/meta/classes/multilib_global.bbclass @@ -1,7 +1,7 @@ def preferred_ml_updates(d): # If any PREFERRED_PROVIDER or PREFERRED_VERSION are set, # we need to mirror these variables in the multilib case; - multilibs = d.getVar('MULTILIBS', True) or "" + multilibs = d.getVar('MULTILIBS') or "" if not multilibs: return @@ -29,7 +29,6 @@ def preferred_ml_updates(d): localdata = bb.data.createCopy(d) override = ":virtclass-multilib-" + p localdata.setVar("OVERRIDES", localdata.getVar("OVERRIDES", False) + override) - bb.data.update_data(localdata) if "-canadian-" in pkg: newname = localdata.expand(v) else: @@ -57,7 +56,6 @@ def preferred_ml_updates(d): localdata = bb.data.createCopy(d) override = ":virtclass-multilib-" + p localdata.setVar("OVERRIDES", localdata.getVar("OVERRIDES", False) + override) - bb.data.update_data(localdata) newname = localdata.expand(prov) if newname != prov: newval = localdata.expand(val) @@ -80,7 +78,6 @@ def preferred_ml_updates(d): localdata = bb.data.createCopy(d) override = ":virtclass-multilib-" + p localdata.setVar("OVERRIDES", localdata.getVar("OVERRIDES", False) + override) - bb.data.update_data(localdata) newname = localdata.expand(prov) if newname != prov and not d.getVar(newname, False): d.setVar(newname, localdata.expand(newval)) @@ -102,7 +99,7 @@ def preferred_ml_updates(d): prov = prov.replace("virtual/", "") return "virtual/" + prefix + "-" + prov - mp = (d.getVar("MULTI_PROVIDER_WHITELIST", True) or "").split() + mp = (d.getVar("MULTI_PROVIDER_WHITELIST") or "").split() extramp = [] for p in mp: if p.endswith("-native") or "-crosssdk-" in p or p.startswith(("nativesdk-", "virtual/nativesdk-")) or 'cross-canadian' in p: @@ -111,14 +108,14 @@ def preferred_ml_updates(d): extramp.append(translate_provide(pref, p)) d.setVar("MULTI_PROVIDER_WHITELIST", " ".join(mp + extramp)) - abisafe = (d.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split() + abisafe = (d.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() extras = [] for p in prefixes: for a in abisafe: extras.append(p + "-" + a) d.appendVar("SIGGEN_EXCLUDERECIPES_ABISAFE", " " + " ".join(extras)) - siggen_exclude = (d.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split() + siggen_exclude = (d.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() extras = [] for p in prefixes: for a in siggen_exclude: @@ -128,7 +125,7 @@ def preferred_ml_updates(d): python multilib_virtclass_handler_vendor () { if isinstance(e, bb.event.ConfigParsed): - for v in e.data.getVar("MULTILIB_VARIANTS", True).split(): + for v in e.data.getVar("MULTILIB_VARIANTS").split(): if e.data.getVar("TARGET_VENDOR_virtclass-multilib-" + v, False) is None: e.data.setVar("TARGET_VENDOR_virtclass-multilib-" + v, e.data.getVar("TARGET_VENDOR", False) + "ml" + v) preferred_ml_updates(e.data) @@ -137,17 +134,15 @@ addhandler multilib_virtclass_handler_vendor multilib_virtclass_handler_vendor[eventmask] = "bb.event.ConfigParsed" python multilib_virtclass_handler_global () { - if not e.data: + variant = e.data.getVar("BBEXTENDVARIANT") + if variant: return - variant = e.data.getVar("BBEXTENDVARIANT", True) - - if isinstance(e, bb.event.RecipeParsed) and not variant: - if bb.data.inherits_class('kernel', e.data) or \ + if bb.data.inherits_class('kernel', e.data) or \ bb.data.inherits_class('module-base', e.data) or \ (bb.data.inherits_class('allarch', e.data) and\ not bb.data.inherits_class('packagegroup', e.data)): - variants = (e.data.getVar("MULTILIB_VARIANTS", True) or "").split() + variants = (e.data.getVar("MULTILIB_VARIANTS") or "").split() import oe.classextend clsextends = [] @@ -155,21 +150,21 @@ python multilib_virtclass_handler_global () { clsextends.append(oe.classextend.ClassExtender(variant, e.data)) # Process PROVIDES - origprovs = provs = e.data.getVar("PROVIDES", True) or "" + origprovs = provs = e.data.getVar("PROVIDES") or "" for clsextend in clsextends: provs = provs + " " + clsextend.map_variable("PROVIDES", setvar=False) e.data.setVar("PROVIDES", provs) # Process RPROVIDES - origrprovs = rprovs = e.data.getVar("RPROVIDES", True) or "" + origrprovs = rprovs = e.data.getVar("RPROVIDES") or "" for clsextend in clsextends: rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES", setvar=False) if rprovs.strip(): e.data.setVar("RPROVIDES", rprovs) # Process RPROVIDES_${PN}... - for pkg in (e.data.getVar("PACKAGES", True) or "").split(): - origrprovs = rprovs = e.data.getVar("RPROVIDES_%s" % pkg, True) or "" + for pkg in (e.data.getVar("PACKAGES") or "").split(): + origrprovs = rprovs = e.data.getVar("RPROVIDES_%s" % pkg) or "" for clsextend in clsextends: rprovs = rprovs + " " + clsextend.map_variable("RPROVIDES_%s" % pkg, setvar=False) rprovs = rprovs + " " + clsextend.extname + "-" + pkg @@ -177,5 +172,5 @@ python multilib_virtclass_handler_global () { } addhandler multilib_virtclass_handler_global -multilib_virtclass_handler_global[eventmask] = "bb.event.RecipePreFinalise bb.event.RecipeParsed" +multilib_virtclass_handler_global[eventmask] = "bb.event.RecipeParsed" diff --git a/import-layers/yocto-poky/meta/classes/multilib_header.bbclass b/import-layers/yocto-poky/meta/classes/multilib_header.bbclass index 304c28e77..e03f5b13b 100644 --- a/import-layers/yocto-poky/meta/classes/multilib_header.bbclass +++ b/import-layers/yocto-poky/meta/classes/multilib_header.bbclass @@ -13,13 +13,9 @@ oe_multilib_header() { ;; *) esac - # We use - # For ARM: We don't support multilib builds. # For MIPS: "n32" is a special case, which needs to be # distinct from both 64-bit and 32-bit. case ${TARGET_ARCH} in - arm*) return - ;; mips*) case "${MIPSPKGSFX_ABI}" in "-n32") ident=n32 @@ -31,9 +27,6 @@ oe_multilib_header() { ;; *) ident=${SITEINFO_BITS} esac - if echo ${TARGET_ARCH} | grep -q arm; then - return - fi for each_header in "$@" ; do if [ ! -f "${D}/${includedir}/$each_header" ]; then bberror "oe_multilib_header: Unable to find header $each_header." diff --git a/import-layers/yocto-poky/meta/classes/native.bbclass b/import-layers/yocto-poky/meta/classes/native.bbclass index 143f8a914..6b7f3dd76 100644 --- a/import-layers/yocto-poky/meta/classes/native.bbclass +++ b/import-layers/yocto-poky/meta/classes/native.bbclass @@ -95,7 +95,7 @@ libdir .= "${NATIVE_PACKAGE_PATH_SUFFIX}" libexecdir .= "${NATIVE_PACKAGE_PATH_SUFFIX}" do_populate_sysroot[sstate-inputdirs] = "${SYSROOT_DESTDIR}/${STAGING_DIR_NATIVE}/" -do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR_NATIVE}/" +do_populate_sysroot[sstate-outputdirs] = "${COMPONENTS_DIR}/${PACKAGE_ARCH}/${PN}" # Since we actually install these into situ there is no staging prefix STAGING_DIR_HOST = "" @@ -112,22 +112,33 @@ PKG_CONFIG_SYSTEM_INCLUDE_PATH[unexport] = "1" LIBCOVERRIDE = "" CLASSOVERRIDE = "class-native" MACHINEOVERRIDES = "" +MACHINE_FEATURES = "" PATH_prepend = "${COREBASE}/scripts/native-intercept:" +# This class encodes staging paths into its scripts data so can only be +# reused if we manipulate the paths. +SSTATE_SCAN_CMD ?= "${SSTATE_SCAN_CMD_NATIVE}" + python native_virtclass_handler () { - classextend = e.data.getVar('BBCLASSEXTEND', True) or "" - if "native" not in classextend: + pn = e.data.getVar("PN") + if not pn.endswith("-native"): return - pn = e.data.getVar("PN", True) - if not pn.endswith("-native"): + # Set features here to prevent appends and distro features backfill + # from modifying native distro features + features = set(d.getVar("DISTRO_FEATURES_NATIVE").split()) + filtered = set(bb.utils.filter("DISTRO_FEATURES", d.getVar("DISTRO_FEATURES_FILTER_NATIVE"), d).split()) + d.setVar("DISTRO_FEATURES", " ".join(sorted(features | filtered))) + + classextend = e.data.getVar('BBCLASSEXTEND') or "" + if "native" not in classextend: return def map_dependencies(varname, d, suffix = ""): if suffix: varname = varname + "_" + suffix - deps = d.getVar(varname, True) + deps = d.getVar(varname) if not deps: return deps = bb.utils.explode_deps(deps) @@ -146,14 +157,14 @@ python native_virtclass_handler () { e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-native") map_dependencies("DEPENDS", e.data) - for pkg in [e.data.getVar("PN", True), "", "${PN}"]: + for pkg in [e.data.getVar("PN"), "", "${PN}"]: map_dependencies("RDEPENDS", e.data, pkg) map_dependencies("RRECOMMENDS", e.data, pkg) map_dependencies("RSUGGESTS", e.data, pkg) map_dependencies("RPROVIDES", e.data, pkg) map_dependencies("RREPLACES", e.data, pkg) - provides = e.data.getVar("PROVIDES", True) + provides = e.data.getVar("PROVIDES") nprovides = [] for prov in provides.split(): if prov.find(pn) != -1: @@ -170,6 +181,11 @@ python native_virtclass_handler () { addhandler native_virtclass_handler native_virtclass_handler[eventmask] = "bb.event.RecipePreFinalise" +python do_addto_recipe_sysroot () { + bb.build.exec_func("extend_recipe_sysroot", d) +} +addtask addto_recipe_sysroot after do_populate_sysroot + inherit nopackages do_packagedata[stamp-extra-info] = "" diff --git a/import-layers/yocto-poky/meta/classes/nativesdk.bbclass b/import-layers/yocto-poky/meta/classes/nativesdk.bbclass index 31dde4a90..69fb45c8a 100644 --- a/import-layers/yocto-poky/meta/classes/nativesdk.bbclass +++ b/import-layers/yocto-poky/meta/classes/nativesdk.bbclass @@ -25,9 +25,7 @@ PACKAGE_ARCHS = "${SDK_PACKAGE_ARCHS}" DEPENDS_append = " chrpath-replacement-native" EXTRANATIVEPATH += "chrpath-native" -STAGING_DIR_HOST = "${STAGING_DIR}/${MULTIMACH_HOST_SYS}" -STAGING_DIR_TARGET = "${STAGING_DIR}/${MULTIMACH_TARGET_SYS}" -PKGDATA_DIR = "${STAGING_DIR_HOST}/pkgdata" +PKGDATA_DIR = "${TMPDIR}/pkgdata/${SDK_SYS}" HOST_ARCH = "${SDK_ARCH}" HOST_VENDOR = "${SDK_VENDOR}" @@ -45,6 +43,10 @@ TARGET_PREFIX = "${SDK_PREFIX}" TARGET_CC_ARCH = "${SDK_CC_ARCH}" TARGET_LD_ARCH = "${SDK_LD_ARCH}" TARGET_AS_ARCH = "${SDK_AS_ARCH}" +TARGET_CPPFLAGS = "${BUILDSDK_CPPFLAGS}" +TARGET_CFLAGS = "${BUILDSDK_CFLAGS}" +TARGET_CXXFLAGS = "${BUILDSDK_CXXFLAGS}" +TARGET_LDFLAGS = "${BUILDSDK_LDFLAGS}" TARGET_FPU = "" EXTRA_OECONF_GCC_FLOAT = "" @@ -64,17 +66,23 @@ export PKG_CONFIG_DIR = "${STAGING_DIR_HOST}${libdir}/pkgconfig" export PKG_CONFIG_SYSROOT_DIR = "${STAGING_DIR_HOST}" python nativesdk_virtclass_handler () { - pn = e.data.getVar("PN", True) + pn = e.data.getVar("PN") if not (pn.endswith("-nativesdk") or pn.startswith("nativesdk-")): return + # Set features here to prevent appends and distro features backfill + # from modifying nativesdk distro features + features = set(d.getVar("DISTRO_FEATURES_NATIVESDK").split()) + filtered = set(bb.utils.filter("DISTRO_FEATURES", d.getVar("DISTRO_FEATURES_FILTER_NATIVESDK"), d).split()) + d.setVar("DISTRO_FEATURES", " ".join(sorted(features | filtered))) + e.data.setVar("MLPREFIX", "nativesdk-") - e.data.setVar("PN", "nativesdk-" + e.data.getVar("PN", True).replace("-nativesdk", "").replace("nativesdk-", "")) + e.data.setVar("PN", "nativesdk-" + e.data.getVar("PN").replace("-nativesdk", "").replace("nativesdk-", "")) e.data.setVar("OVERRIDES", e.data.getVar("OVERRIDES", False) + ":virtclass-nativesdk") } python () { - pn = d.getVar("PN", True) + pn = d.getVar("PN") if not pn.startswith("nativesdk-"): return @@ -82,7 +90,7 @@ python () { clsextend = oe.classextend.NativesdkClassExtender("nativesdk", d) clsextend.rename_packages() - clsextend.rename_package_variables((d.getVar("PACKAGEVARS", True) or "").split()) + clsextend.rename_package_variables((d.getVar("PACKAGEVARS") or "").split()) clsextend.map_depends_variable("DEPENDS") clsextend.map_packagevars() diff --git a/import-layers/yocto-poky/meta/classes/npm.bbclass b/import-layers/yocto-poky/meta/classes/npm.bbclass index fce4c1146..a69bedbb2 100644 --- a/import-layers/yocto-poky/meta/classes/npm.bbclass +++ b/import-layers/yocto-poky/meta/classes/npm.bbclass @@ -13,7 +13,8 @@ def npm_oe_arch_map(target_arch, d): elif re.match('arm64$', target_arch): return 'arm' return target_arch -NPM_ARCH ?= "${@npm_oe_arch_map(d.getVar('TARGET_ARCH', True), d)}" +NPM_ARCH ?= "${@npm_oe_arch_map(d.getVar('TARGET_ARCH'), d)}" +NPM_INSTALL_DEV = "0" npm_do_compile() { # Copy in any additionally fetched modules @@ -23,17 +24,32 @@ npm_do_compile() { # changing the home directory to the working directory, the .npmrc will # be created in this directory export HOME=${WORKDIR} - npm config set dev false + if [ "${NPM_INSTALL_DEV}" = "1" ]; then + npm config set dev true + else + npm config set dev false + fi npm set cache ${WORKDIR}/npm_cache # clear cache before every build npm cache clear # Install pkg into ${S} without going to the registry - npm --arch=${NPM_ARCH} --target_arch=${NPM_ARCH} --production --no-registry install + if [ "${NPM_INSTALL_DEV}" = "1" ]; then + npm --arch=${NPM_ARCH} --target_arch=${NPM_ARCH} --no-registry install + else + npm --arch=${NPM_ARCH} --target_arch=${NPM_ARCH} --production --no-registry install + fi } npm_do_install() { + # changing the home directory to the working directory, the .npmrc will + # be created in this directory + export HOME=${WORKDIR} mkdir -p ${NPM_INSTALLDIR}/ - cp -a ${S}/* ${NPM_INSTALLDIR}/ --no-preserve=ownership + npm install --prefix ${D}${prefix} -g --arch=${NPM_ARCH} --target_arch=${NPM_ARCH} --production --no-registry + if [ -d ${D}${prefix}/etc ] ; then + # This will be empty + rmdir ${D}${prefix}/etc + fi } python populate_packages_prepend () { @@ -55,7 +71,7 @@ python populate_packages_prepend () { description = pdata.get('description', None) if description: d.setVar('SUMMARY_%s' % expanded_pkgname, description.replace(u"\u2018", "'").replace(u"\u2019", "'")) - d.appendVar('RDEPENDS_%s' % d.getVar('PN', True), ' %s' % ' '.join(pkgnames).replace('_', '-')) + d.appendVar('RDEPENDS_%s' % d.getVar('PN'), ' %s' % ' '.join(pkgnames).replace('_', '-')) } FILES_${PN} += " \ diff --git a/import-layers/yocto-poky/meta/classes/oelint.bbclass b/import-layers/yocto-poky/meta/classes/oelint.bbclass index c4febc2cf..2589d3405 100644 --- a/import-layers/yocto-poky/meta/classes/oelint.bbclass +++ b/import-layers/yocto-poky/meta/classes/oelint.bbclass @@ -1,7 +1,7 @@ addtask lint before do_build do_lint[nostamp] = "1" python do_lint() { - pkgname = d.getVar("PN", True) + pkgname = d.getVar("PN") ############################## # Test that DESCRIPTION exists @@ -35,7 +35,7 @@ python do_lint() { # Check that all patches have Signed-off-by and Upstream-Status # srcuri = d.getVar("SRC_URI", False).split() - fpaths = (d.getVar('FILESPATH', True) or '').split(':') + fpaths = (d.getVar('FILESPATH') or '').split(':') def findPatch(patchname): for dir in fpaths: diff --git a/import-layers/yocto-poky/meta/classes/own-mirrors.bbclass b/import-layers/yocto-poky/meta/classes/own-mirrors.bbclass index 12b42675b..0296d545b 100644 --- a/import-layers/yocto-poky/meta/classes/own-mirrors.bbclass +++ b/import-layers/yocto-poky/meta/classes/own-mirrors.bbclass @@ -9,5 +9,5 @@ p4://.*/.* ${SOURCE_MIRROR_URL} osc://.*/.* ${SOURCE_MIRROR_URL} https?$://.*/.* ${SOURCE_MIRROR_URL} ftp://.*/.* ${SOURCE_MIRROR_URL} -npm://.*/.* ${SOURCE_MIRROR_URL} +npm://.*/?.* ${SOURCE_MIRROR_URL} } diff --git a/import-layers/yocto-poky/meta/classes/package.bbclass b/import-layers/yocto-poky/meta/classes/package.bbclass index a6f0a7a63..a03c05b9f 100644 --- a/import-layers/yocto-poky/meta/classes/package.bbclass +++ b/import-layers/yocto-poky/meta/classes/package.bbclass @@ -54,6 +54,14 @@ ALL_MULTILIB_PACKAGE_ARCHS = "${@all_multilib_tune_values(d, 'PACKAGE_ARCHS')}" # rpm is used for the per-file dependency identification PACKAGE_DEPENDS += "rpm-native" + +# If your postinstall can execute at rootfs creation time rather than on +# target but depends on a native/cross tool in order to execute, you need to +# list that tool in PACKAGE_WRITE_DEPENDS. Target package dependencies belong +# in the package dependencies as normal, this is just for native/cross support +# tools at rootfs build time. +PACKAGE_WRITE_DEPS ??= "" + def legitimize_package_name(s): """ Make sure package names are legitimate strings @@ -120,7 +128,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst """ - dvar = d.getVar('PKGD', True) + dvar = d.getVar('PKGD') root = d.expand(root) output_pattern = d.expand(output_pattern) extra_depends = d.expand(extra_depends) @@ -130,7 +138,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst if not os.path.exists(dvar + root): return [] - ml = d.getVar("MLPREFIX", True) + ml = d.getVar("MLPREFIX") if ml: if not output_pattern.startswith(ml): output_pattern = ml + output_pattern @@ -145,7 +153,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst extra_depends = " ".join(newdeps) - packages = d.getVar('PACKAGES', True).split() + packages = d.getVar('PACKAGES').split() split_packages = set() if postinst: @@ -163,7 +171,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst objs.append(relpath) if extra_depends == None: - extra_depends = d.getVar("PN", True) + extra_depends = d.getVar("PN") if not summary: summary = description @@ -189,7 +197,7 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst packages = [pkg] + packages else: packages.append(pkg) - oldfiles = d.getVar('FILES_' + pkg, True) + oldfiles = d.getVar('FILES_' + pkg) newfile = os.path.join(root, o) # These names will be passed through glob() so if the filename actually # contains * or ? (rare, but possible) we need to handle that specially @@ -214,9 +222,9 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst d.setVar('FILES_' + pkg, oldfiles + " " + newfile) if extra_depends != '': d.appendVar('RDEPENDS_' + pkg, ' ' + extra_depends) - if not d.getVar('DESCRIPTION_' + pkg, True): + if not d.getVar('DESCRIPTION_' + pkg): d.setVar('DESCRIPTION_' + pkg, description % on) - if not d.getVar('SUMMARY_' + pkg, True): + if not d.getVar('SUMMARY_' + pkg): d.setVar('SUMMARY_' + pkg, summary % on) if postinst: d.setVar('pkg_postinst_' + pkg, postinst) @@ -231,9 +239,9 @@ def do_split_packages(d, root, file_regex, output_pattern, description, postinst PACKAGE_DEPENDS += "file-native" python () { - if d.getVar('PACKAGES', True) != '': + if d.getVar('PACKAGES') != '': deps = "" - for dep in (d.getVar('PACKAGE_DEPENDS', True) or "").split(): + for dep in (d.getVar('PACKAGE_DEPENDS') or "").split(): deps += " %s:do_populate_sysroot" % dep d.appendVarFlag('do_package', 'depends', deps) @@ -286,14 +294,14 @@ def files_from_filevars(filevars): # Called in package_.bbclass to get the correct list of configuration files def get_conffiles(pkg, d): - pkgdest = d.getVar('PKGDEST', True) + pkgdest = d.getVar('PKGDEST') root = os.path.join(pkgdest, pkg) cwd = os.getcwd() os.chdir(root) - conffiles = d.getVar('CONFFILES_%s' % pkg, True); + conffiles = d.getVar('CONFFILES_%s' % pkg); if conffiles == None: - conffiles = d.getVar('CONFFILES', True) + conffiles = d.getVar('CONFFILES') if conffiles == None: conffiles = "" conffiles = conffiles.split() @@ -318,7 +326,7 @@ def get_conffiles(pkg, d): return conf_list def checkbuildpath(file, d): - tmpdir = d.getVar('TMPDIR', True) + tmpdir = d.getVar('TMPDIR') with open(file) as f: file_content = f.read() if tmpdir in file_content: @@ -335,9 +343,9 @@ def splitdebuginfo(file, debugfile, debugsrcdir, sourcefile, d): import stat - dvar = d.getVar('PKGD', True) - objcopy = d.getVar("OBJCOPY", True) - debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit") + dvar = d.getVar('PKGD') + objcopy = d.getVar("OBJCOPY") + debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/debugedit") # We ignore kernel modules, we don't generate debug info files. if file.find("/lib/modules/") != -1 and file.endswith(".ko"): @@ -382,11 +390,11 @@ def copydebugsources(debugsrcdir, d): sourcefile = d.expand("${WORKDIR}/debugsources.list") if debugsrcdir and os.path.isfile(sourcefile): - dvar = d.getVar('PKGD', True) - strip = d.getVar("STRIP", True) - objcopy = d.getVar("OBJCOPY", True) + dvar = d.getVar('PKGD') + strip = d.getVar("STRIP") + objcopy = d.getVar("OBJCOPY") debugedit = d.expand("${STAGING_LIBDIR_NATIVE}/rpm/bin/debugedit") - workdir = d.getVar("WORKDIR", True) + workdir = d.getVar("WORKDIR") workparentdir = os.path.dirname(os.path.dirname(workdir)) workbasedir = os.path.basename(os.path.dirname(workdir)) + "/" + os.path.basename(workdir) @@ -406,7 +414,8 @@ def copydebugsources(debugsrcdir, d): bb.utils.mkdirhier(basepath) cpath.updatecache(basepath) - processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '(|)$' | " + # Ignore files from the recipe sysroots (target and native) + processdebugsrc = "LC_ALL=C ; sort -z -u '%s' | egrep -v -z '((|)$|/.*recipe-sysroot.*/)' | " # We need to ignore files that are not actually ours # we do this by only paying attention to items from this package processdebugsrc += "fgrep -zw '%s' | " @@ -462,26 +471,23 @@ def get_package_additional_metadata (pkg_type, d): if d.getVar(key, False) is None: continue d.setVarFlag(key, "type", "list") - if d.getVarFlag(key, "separator", True) is None: + if d.getVarFlag(key, "separator") is None: d.setVarFlag(key, "separator", "\\n") metadata_fields = [field.strip() for field in oe.data.typed_value(key, d)] return "\n".join(metadata_fields).strip() def runtime_mapping_rename (varname, pkg, d): - #bb.note("%s before: %s" % (varname, d.getVar(varname, True))) - - if bb.data.inherits_class('packagegroup', d): - return + #bb.note("%s before: %s" % (varname, d.getVar(varname))) new_depends = {} - deps = bb.utils.explode_dep_versions2(d.getVar(varname, True) or "") + deps = bb.utils.explode_dep_versions2(d.getVar(varname) or "") for depend in deps: new_depend = get_package_mapping(depend, pkg, d) new_depends[new_depend] = deps[depend] d.setVar(varname, bb.utils.join_deps(new_depends, commasep=False)) - #bb.note("%s after: %s" % (varname, d.getVar(varname, True))) + #bb.note("%s after: %s" % (varname, d.getVar(varname))) # # Package functions suitable for inclusion in PACKAGEFUNCS @@ -492,34 +498,34 @@ python package_get_auto_pr() { import re # Support per recipe PRSERV_HOST - pn = d.getVar('PN', True) - host = d.getVar("PRSERV_HOST_" + pn, True) + pn = d.getVar('PN') + host = d.getVar("PRSERV_HOST_" + pn) if not (host is None): d.setVar("PRSERV_HOST", host) - pkgv = d.getVar("PKGV", True) + pkgv = d.getVar("PKGV") # PR Server not active, handle AUTOINC - if not d.getVar('PRSERV_HOST', True): + if not d.getVar('PRSERV_HOST'): if 'AUTOINC' in pkgv: d.setVar("PKGV", pkgv.replace("AUTOINC", "0")) return auto_pr = None - pv = d.getVar("PV", True) - version = d.getVar("PRAUTOINX", True) - pkgarch = d.getVar("PACKAGE_ARCH", True) - checksum = d.getVar("BB_TASKHASH", True) + pv = d.getVar("PV") + version = d.getVar("PRAUTOINX") + pkgarch = d.getVar("PACKAGE_ARCH") + checksum = d.getVar("BB_TASKHASH") - if d.getVar('PRSERV_LOCKDOWN', True): - auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch, True) or d.getVar('PRAUTO_' + version, True) or None + if d.getVar('PRSERV_LOCKDOWN'): + auto_pr = d.getVar('PRAUTO_' + version + '_' + pkgarch) or d.getVar('PRAUTO_' + version) or None if auto_pr is None: bb.fatal("Can NOT get PRAUTO from lockdown exported file") d.setVar('PRAUTO',str(auto_pr)) return try: - conn = d.getVar("__PRSERV_CONN", True) + conn = d.getVar("__PRSERV_CONN") if conn is None: conn = oe.prservice.prserv_make_conn(d) if conn is not None: @@ -540,19 +546,19 @@ python package_get_auto_pr() { LOCALEBASEPN ??= "${PN}" python package_do_split_locales() { - if (d.getVar('PACKAGE_NO_LOCALE', True) == '1'): + if (d.getVar('PACKAGE_NO_LOCALE') == '1'): bb.debug(1, "package requested not splitting locales") return - packages = (d.getVar('PACKAGES', True) or "").split() + packages = (d.getVar('PACKAGES') or "").split() - datadir = d.getVar('datadir', True) + datadir = d.getVar('datadir') if not datadir: bb.note("datadir not defined") return - dvar = d.getVar('PKGD', True) - pn = d.getVar('LOCALEBASEPN', True) + dvar = d.getVar('PKGD') + pn = d.getVar('LOCALEBASEPN') if pn + '-locale' in packages: packages.remove(pn + '-locale') @@ -565,10 +571,10 @@ python package_do_split_locales() { locales = os.listdir(localedir) - summary = d.getVar('SUMMARY', True) or pn - description = d.getVar('DESCRIPTION', True) or "" - locale_section = d.getVar('LOCALE_SECTION', True) - mlprefix = d.getVar('MLPREFIX', True) or "" + summary = d.getVar('SUMMARY') or pn + description = d.getVar('DESCRIPTION') or "" + locale_section = d.getVar('LOCALE_SECTION') + mlprefix = d.getVar('MLPREFIX') or "" for l in sorted(locales): ln = legitimize_package_name(l) pkg = pn + '-locale-' + ln @@ -589,14 +595,14 @@ python package_do_split_locales() { # glibc-localedata-translit* won't install as a dependency # for some other package which breaks meta-toolchain # Probably breaks since virtual-locale- isn't provided anywhere - #rdep = (d.getVar('RDEPENDS_%s' % pn, True) or "").split() + #rdep = (d.getVar('RDEPENDS_%s' % pn) or "").split() #rdep.append('%s-locale*' % pn) #d.setVar('RDEPENDS_%s' % pn, ' '.join(rdep)) } python perform_packagecopy () { - dest = d.getVar('D', True) - dvar = d.getVar('PKGD', True) + dest = d.getVar('D') + dvar = d.getVar('PKGD') # Start by package population by taking a copy of the installed # files to operate on @@ -730,8 +736,8 @@ python fixup_perms () { # paths are resolved via BBPATH def get_fs_perms_list(d): str = "" - bbpath = d.getVar('BBPATH', True) - fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES', True) + bbpath = d.getVar('BBPATH') + fs_perms_tables = d.getVar('FILESYSTEM_PERMS_TABLES') if not fs_perms_tables: fs_perms_tables = 'files/fs-perms.txt' for conf_file in fs_perms_tables.split(): @@ -740,7 +746,7 @@ python fixup_perms () { - dvar = d.getVar('PKGD', True) + dvar = d.getVar('PKGD') fs_perms_table = {} fs_link_table = {} @@ -769,10 +775,10 @@ python fixup_perms () { 'oldincludedir' ] for path in target_path_vars: - dir = d.getVar(path, True) or "" + dir = d.getVar(path) or "" if dir == "": continue - fs_perms_table[dir] = fs_perms_entry(bb.data.expand("%s 0755 root root false - - -" % (dir), d)) + fs_perms_table[dir] = fs_perms_entry(d.expand("%s 0755 root root false - - -" % (dir))) # Now we actually load from the configuration files for conf in get_fs_perms_list(d).split(): @@ -854,20 +860,20 @@ python fixup_perms () { python split_and_strip_files () { import stat, errno - dvar = d.getVar('PKGD', True) - pn = d.getVar('PN', True) + dvar = d.getVar('PKGD') + pn = d.getVar('PN') oldcwd = os.getcwd() os.chdir(dvar) # We default to '.debug' style - if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-file-directory': + if d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-file-directory': # Single debug-file-directory style debug info debugappend = ".debug" debugdir = "" debuglibdir = "/usr/lib/debug" debugsrcdir = "/usr/src/debug" - elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE', True) == 'debug-without-src': + elif d.getVar('PACKAGE_DEBUG_SPLIT_STYLE') == 'debug-without-src': # Original OE-core, a.k.a. ".debug", style debug info, but without sources in /usr/src/debug debugappend = "" debugdir = "/.debug" @@ -918,10 +924,10 @@ python split_and_strip_files () { symlinks = {} kernmods = [] inodes = {} - libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True)) - baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True)) - if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1' or \ - d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'): + libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir")) + baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir")) + if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1' or \ + d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): for root, dirs, files in cpath.walk(dvar): for f in files: file = os.path.join(root, f) @@ -962,7 +968,7 @@ python split_and_strip_files () { elf_file = isELF(file) if elf_file & 1: if elf_file & 2: - if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): + if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split(): bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) else: msg = "File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn) @@ -991,7 +997,7 @@ python split_and_strip_files () { # # First lets process debug splitting # - if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT', True) != '1'): + if (d.getVar('INHIBIT_PACKAGE_DEBUG_SPLIT') != '1'): for file in elffiles: src = file[len(dvar):] dest = debuglibdir + os.path.dirname(src) + debugdir + "/" + os.path.basename(src) + debugappend @@ -1054,8 +1060,8 @@ python split_and_strip_files () { # # Now lets go back over things and strip them # - if (d.getVar('INHIBIT_PACKAGE_STRIP', True) != '1'): - strip = d.getVar("STRIP", True) + if (d.getVar('INHIBIT_PACKAGE_STRIP') != '1'): + strip = d.getVar("STRIP") sfiles = [] for file in elffiles: elf_file = int(elffiles[file]) @@ -1075,16 +1081,16 @@ python split_and_strip_files () { python populate_packages () { import glob, re - workdir = d.getVar('WORKDIR', True) - outdir = d.getVar('DEPLOY_DIR', True) - dvar = d.getVar('PKGD', True) - packages = d.getVar('PACKAGES', True) - pn = d.getVar('PN', True) + workdir = d.getVar('WORKDIR') + outdir = d.getVar('DEPLOY_DIR') + dvar = d.getVar('PKGD') + packages = d.getVar('PACKAGES') + pn = d.getVar('PN') bb.utils.mkdirhier(outdir) os.chdir(dvar) - autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG", True) or False) + autodebug = not (d.getVar("NOAUTOPACKAGEDEBUG") or False) # Sanity check PACKAGES for duplicates # Sanity should be moved to sanity.bbclass once we have the infrastucture @@ -1099,7 +1105,7 @@ python populate_packages () { else: package_list.append(pkg) d.setVar('PACKAGES', ' '.join(package_list)) - pkgdest = d.getVar('PKGDEST', True) + pkgdest = d.getVar('PKGDEST') seen = [] @@ -1120,7 +1126,7 @@ python populate_packages () { root = os.path.join(pkgdest, pkg) bb.utils.mkdirhier(root) - filesvar = d.getVar('FILES_%s' % pkg, True) or "" + filesvar = d.getVar('FILES_%s' % pkg) or "" if "//" in filesvar: msg = "FILES variable for package %s contains '//' which is invalid. Attempting to fix this but you should correct the metadata.\n" % pkg package_qa_handle_error("files-invalid", msg, d) @@ -1188,7 +1194,7 @@ python populate_packages () { # Handle LICENSE_EXCLUSION package_list = [] for pkg in packages.split(): - if d.getVar('LICENSE_EXCLUSION-' + pkg, True): + if d.getVar('LICENSE_EXCLUSION-' + pkg): msg = "%s has an incompatible license. Excluding from packaging." % pkg package_qa_handle_error("incompatible-license", msg, d) else: @@ -1207,7 +1213,7 @@ python populate_packages () { if unshipped != []: msg = pn + ": Files/directories were installed but not shipped in any package:" - if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): + if "installed-vs-shipped" in (d.getVar('INSANE_SKIP_' + pn) or "").split(): bb.note("Package %s skipping QA tests: installed-vs-shipped" % pn) else: for f in unshipped: @@ -1220,7 +1226,7 @@ populate_packages[dirs] = "${D}" python package_fixsymlinks () { import errno - pkgdest = d.getVar('PKGDEST', True) + pkgdest = d.getVar('PKGDEST') packages = d.getVar("PACKAGES", False).split() dangling_links = {} @@ -1255,7 +1261,7 @@ python package_fixsymlinks () { bb.note("%s contains dangling symlink to %s" % (pkg, l)) for pkg in newrdepends: - rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "") + rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "") for p in newrdepends[pkg]: if p not in rdepends: rdepends[p] = [] @@ -1286,11 +1292,11 @@ python emit_pkgdata() { c = codecs.getencoder("unicode_escape") return c(str)[0].decode("latin1") - val = d.getVar('%s_%s' % (var, pkg), True) + val = d.getVar('%s_%s' % (var, pkg)) if val: f.write('%s_%s: %s\n' % (var, pkg, encode(val))) return val - val = d.getVar('%s' % (var), True) + val = d.getVar('%s' % (var)) if val: f.write('%s: %s\n' % (var, encode(val))) return val @@ -1309,9 +1315,9 @@ python emit_pkgdata() { with open(subdata_file, 'w') as fd: fd.write("PKG_%s: %s" % (ml_pkg, pkg)) - packages = d.getVar('PACKAGES', True) - pkgdest = d.getVar('PKGDEST', True) - pkgdatadir = d.getVar('PKGDESTWORK', True) + packages = d.getVar('PACKAGES') + pkgdest = d.getVar('PKGDEST') + pkgdatadir = d.getVar('PKGDESTWORK') # Take shared lock since we're only reading, not writing lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}"), True) @@ -1321,9 +1327,9 @@ python emit_pkgdata() { f.write("PACKAGES: %s\n" % packages) f.close() - pn = d.getVar('PN', True) - global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS', True) or "").split() - variants = (d.getVar('MULTILIB_VARIANTS', True) or "").split() + pn = d.getVar('PN') + global_variants = (d.getVar('MULTILIB_GLOBAL_VARIANTS') or "").split() + variants = (d.getVar('MULTILIB_VARIANTS') or "").split() if bb.data.inherits_class('kernel', d) or bb.data.inherits_class('module-base', d): write_extra_pkgs(variants, pn, packages, pkgdatadir) @@ -1331,10 +1337,10 @@ python emit_pkgdata() { if (bb.data.inherits_class('allarch', d) and not bb.data.inherits_class('packagegroup', d)): write_extra_pkgs(global_variants, pn, packages, pkgdatadir) - workdir = d.getVar('WORKDIR', True) + workdir = d.getVar('WORKDIR') for pkg in packages.split(): - pkgval = d.getVar('PKG_%s' % pkg, True) + pkgval = d.getVar('PKG_%s' % pkg) if pkgval is None: pkgval = pkg d.setVar('PKG_%s' % pkg, pkg) @@ -1342,11 +1348,14 @@ python emit_pkgdata() { pkgdestpkg = os.path.join(pkgdest, pkg) files = {} total_size = 0 + seen = set() for f in pkgfiles[pkg]: relpth = os.path.relpath(f, pkgdestpkg) fstat = os.lstat(f) - total_size += fstat.st_size files[os.sep + relpth] = fstat.st_size + if fstat.st_ino not in seen: + seen.add(fstat.st_ino) + total_size += fstat.st_size d.setVar('FILES_INFO', json.dumps(files)) subdata_file = pkgdatadir + "/runtime/%s" % pkg @@ -1371,17 +1380,18 @@ python emit_pkgdata() { write_if_exists(sf, pkg, 'PKG') write_if_exists(sf, pkg, 'ALLOW_EMPTY') write_if_exists(sf, pkg, 'FILES') + write_if_exists(sf, pkg, 'CONFFILES') write_if_exists(sf, pkg, 'pkg_postinst') write_if_exists(sf, pkg, 'pkg_postrm') write_if_exists(sf, pkg, 'pkg_preinst') write_if_exists(sf, pkg, 'pkg_prerm') write_if_exists(sf, pkg, 'FILERPROVIDESFLIST') write_if_exists(sf, pkg, 'FILES_INFO') - for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg, True) or "").split(): + for dfile in (d.getVar('FILERPROVIDESFLIST_' + pkg) or "").split(): write_if_exists(sf, pkg, 'FILERPROVIDES_' + dfile) write_if_exists(sf, pkg, 'FILERDEPENDSFLIST') - for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg, True) or "").split(): + for dfile in (d.getVar('FILERDEPENDSFLIST_' + pkg) or "").split(): write_if_exists(sf, pkg, 'FILERDEPENDS_' + dfile) sf.write('%s_%s: %d\n' % ('PKGSIZE', pkg, total_size)) @@ -1394,9 +1404,9 @@ python emit_pkgdata() { bb.utils.mkdirhier(os.path.dirname(subdata_sym)) oe.path.symlink("../../runtime/%s" % pkg, subdata_sym, True) - allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg, True) + allow_empty = d.getVar('ALLOW_EMPTY_%s' % pkg) if not allow_empty: - allow_empty = d.getVar('ALLOW_EMPTY', True) + allow_empty = d.getVar('ALLOW_EMPTY') root = "%s/%s" % (pkgdest, pkg) os.chdir(root) g = glob('*') @@ -1424,7 +1434,13 @@ if [ x"$D" = "x" ]; then fi } -RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LIBDIR_NATIVE}/rpm/macros --define '_rpmfc_magic_path ${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc' --rpmpopt ${STAGING_LIBDIR_NATIVE}/rpm/rpmpopt" +# In Morty and earlier releases, and on master (Rocko), the RPM file +# dependencies are always enabled. However, since they were broken with the +# release of Pyro and enabling them may cause build problems for some packages, +# they are not enabled by default in Pyro. Setting ENABLE_RPM_FILEDEPS_FOR_PYRO +# to "1" will enable them again. +ENABLE_RPM_FILEDEPS_FOR_PYRO ??= "0" +RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/rpmdeps${@' --alldeps' if d.getVar('ENABLE_RPM_FILEDEPS_FOR_PYRO') == '1' else ''}" # Collect perfile run-time dependency metadata # Output: @@ -1435,19 +1451,19 @@ RPMDEPS = "${STAGING_LIBDIR_NATIVE}/rpm/bin/rpmdeps-oecore --macros ${STAGING_LI # FILERDEPENDS_filepath_pkg - per file dep python package_do_filedeps() { - if d.getVar('SKIP_FILEDEPS', True) == '1': + if d.getVar('SKIP_FILEDEPS') == '1': return - pkgdest = d.getVar('PKGDEST', True) - packages = d.getVar('PACKAGES', True) - rpmdeps = d.getVar('RPMDEPS', True) + pkgdest = d.getVar('PKGDEST') + packages = d.getVar('PACKAGES') + rpmdeps = d.getVar('RPMDEPS') def chunks(files, n): return [files[i:i+n] for i in range(0, len(files), n)] pkglist = [] for pkg in packages.split(): - if d.getVar('SKIP_FILEDEPS_' + pkg, True) == '1': + if d.getVar('SKIP_FILEDEPS_' + pkg) == '1': continue if pkg.endswith('-dbg') or pkg.endswith('-doc') or pkg.find('-locale-') != -1 or pkg.find('-localedata-') != -1 or pkg.find('-gconv-') != -1 or pkg.find('-charmap-') != -1 or pkg.startswith('kernel-module-'): continue @@ -1496,22 +1512,22 @@ python package_do_shlibs() { return lib_re = re.compile("^.*\.so") - libdir_re = re.compile(".*/%s$" % d.getVar('baselib', True)) + libdir_re = re.compile(".*/%s$" % d.getVar('baselib')) - packages = d.getVar('PACKAGES', True) - targetos = d.getVar('TARGET_OS', True) + packages = d.getVar('PACKAGES') + targetos = d.getVar('TARGET_OS') - workdir = d.getVar('WORKDIR', True) + workdir = d.getVar('WORKDIR') - ver = d.getVar('PKGV', True) + ver = d.getVar('PKGV') if not ver: msg = "PKGV not defined" package_qa_handle_error("pkgv-undefined", msg, d) return - pkgdest = d.getVar('PKGDEST', True) + pkgdest = d.getVar('PKGDEST') - shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) + shlibswork_dir = d.getVar('SHLIBSWORKDIR') # Take shared lock since we're only reading, not writing lf = bb.utils.lockfile(d.expand("${PACKAGELOCK}")) @@ -1519,7 +1535,7 @@ python package_do_shlibs() { def linux_so(file, needed, sonames, renames, pkgver): needs_ldconfig = False ldir = os.path.dirname(file).replace(pkgdest + "/" + pkg, '') - cmd = d.getVar('OBJDUMP', True) + " -p " + pipes.quote(file) + " 2>/dev/null" + cmd = d.getVar('OBJDUMP') + " -p " + pipes.quote(file) + " 2>/dev/null" fd = os.popen(cmd) lines = fd.readlines() fd.close() @@ -1601,28 +1617,44 @@ python package_do_shlibs() { if name and name not in needed[pkg]: needed[pkg].append((name, file, [])) - if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS', True) == "1": + def mingw_dll(file, needed, sonames, renames, pkgver): + if not os.path.exists(file): + return + + if file.endswith(".dll"): + # assume all dlls are shared objects provided by the package + sonames.append((os.path.basename(file), os.path.dirname(file).replace(pkgdest + "/" + pkg, ''), pkgver)) + + if (file.endswith(".dll") or file.endswith(".exe")): + # use objdump to search for "DLL Name: .*\.dll" + p = sub.Popen([d.expand("${HOST_PREFIX}objdump"), "-p", file], stdout = sub.PIPE, stderr= sub.PIPE) + out, err = p.communicate() + # process the output, grabbing all .dll names + if p.returncode == 0: + for m in re.finditer("DLL Name: (.*?\.dll)$", out.decode(), re.MULTILINE | re.IGNORECASE): + dllname = m.group(1) + if dllname: + needed[pkg].append((dllname, file, [])) + + if d.getVar('PACKAGE_SNAP_LIB_SYMLINKS') == "1": snap_symlinks = True else: snap_symlinks = False - if (d.getVar('USE_LDCONFIG', True) or "1") == "1": - use_ldconfig = True - else: - use_ldconfig = False + use_ldconfig = bb.utils.contains('DISTRO_FEATURES', 'ldconfig', True, False, d) needed = {} shlib_provider = oe.package.read_shlib_providers(d) for pkg in packages.split(): - private_libs = d.getVar('PRIVATE_LIBS_' + pkg, True) or d.getVar('PRIVATE_LIBS', True) or "" + private_libs = d.getVar('PRIVATE_LIBS_' + pkg) or d.getVar('PRIVATE_LIBS') or "" private_libs = private_libs.split() needs_ldconfig = False bb.debug(2, "calculating shlib provides for %s" % pkg) - pkgver = d.getVar('PKGV_' + pkg, True) + pkgver = d.getVar('PKGV_' + pkg) if not pkgver: - pkgver = d.getVar('PV_' + pkg, True) + pkgver = d.getVar('PV_' + pkg) if not pkgver: pkgver = ver @@ -1635,6 +1667,8 @@ python package_do_shlibs() { continue if targetos == "darwin" or targetos == "darwin8": darwin_so(file, needed, sonames, renames, pkgver) + elif targetos.startswith("mingw"): + mingw_dll(file, needed, sonames, renames, pkgver) elif os.access(file, os.X_OK) or lib_re.match(file): ldconfig = linux_so(file, needed, sonames, renames, pkgver) needs_ldconfig = needs_ldconfig or ldconfig @@ -1659,18 +1693,18 @@ python package_do_shlibs() { fd.close() if needs_ldconfig and use_ldconfig: bb.debug(1, 'adding ldconfig call to postinst for %s' % pkg) - postinst = d.getVar('pkg_postinst_%s' % pkg, True) + postinst = d.getVar('pkg_postinst_%s' % pkg) if not postinst: postinst = '#!/bin/sh\n' - postinst += d.getVar('ldconfig_postinst_fragment', True) + postinst += d.getVar('ldconfig_postinst_fragment') d.setVar('pkg_postinst_%s' % pkg, postinst) bb.debug(1, 'LIBNAMES: pkg %s sonames %s' % (pkg, sonames)) bb.utils.unlockfile(lf) - assumed_libs = d.getVar('ASSUME_SHLIBS', True) + assumed_libs = d.getVar('ASSUME_SHLIBS') if assumed_libs: - libdir = d.getVar("libdir", True) + libdir = d.getVar("libdir") for e in assumed_libs.split(): l, dep_pkg = e.split(":") lib_ver = None @@ -1682,7 +1716,7 @@ python package_do_shlibs() { shlib_provider[l] = {} shlib_provider[l][libdir] = (dep_pkg, lib_ver) - libsearchpath = [d.getVar('libdir', True), d.getVar('base_libdir', True)] + libsearchpath = [d.getVar('libdir'), d.getVar('base_libdir')] for pkg in packages.split(): bb.debug(2, "calculating shlib requirements for %s" % pkg) @@ -1736,12 +1770,12 @@ python package_do_shlibs() { python package_do_pkgconfig () { import re - packages = d.getVar('PACKAGES', True) - workdir = d.getVar('WORKDIR', True) - pkgdest = d.getVar('PKGDEST', True) + packages = d.getVar('PACKAGES') + workdir = d.getVar('WORKDIR') + pkgdest = d.getVar('PKGDEST') - shlibs_dirs = d.getVar('SHLIBSDIRS', True).split() - shlibswork_dir = d.getVar('SHLIBSWORKDIR', True) + shlibs_dirs = d.getVar('SHLIBSDIRS').split() + shlibswork_dir = d.getVar('SHLIBSWORKDIR') pc_re = re.compile('(.*)\.pc$') var_re = re.compile('(.*)=(.*)') @@ -1773,7 +1807,7 @@ python package_do_pkgconfig () { m = field_re.match(l) if m: hdr = m.group(1) - exp = bb.data.expand(m.group(2), pd) + exp = pd.expand(m.group(2)) if hdr == 'Requires': pkgconfig_needed[pkg] += exp.replace(',', ' ').split() @@ -1826,7 +1860,7 @@ python package_do_pkgconfig () { def read_libdep_files(d): pkglibdeps = {} - packages = d.getVar('PACKAGES', True).split() + packages = d.getVar('PACKAGES').split() for pkg in packages: pkglibdeps[pkg] = {} for extension in ".shlibdeps", ".pcdeps", ".clilibdeps": @@ -1846,9 +1880,9 @@ def read_libdep_files(d): python read_shlibdeps () { pkglibdeps = read_libdep_files(d) - packages = d.getVar('PACKAGES', True).split() + packages = d.getVar('PACKAGES').split() for pkg in packages: - rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg, True) or "") + rdepends = bb.utils.explode_dep_versions2(d.getVar('RDEPENDS_' + pkg) or "") for dep in pkglibdeps[pkg]: # Add the dep if it's not already there, or if no comparison is set if dep not in rdepends: @@ -1873,14 +1907,14 @@ python package_depchains() { package. """ - packages = d.getVar('PACKAGES', True) - postfixes = (d.getVar('DEPCHAIN_POST', True) or '').split() - prefixes = (d.getVar('DEPCHAIN_PRE', True) or '').split() + packages = d.getVar('PACKAGES') + postfixes = (d.getVar('DEPCHAIN_POST') or '').split() + prefixes = (d.getVar('DEPCHAIN_PRE') or '').split() def pkg_adddeprrecs(pkg, base, suffix, getname, depends, d): #bb.note('depends for %s is %s' % (base, depends)) - rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "") + rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "") for depend in depends: if depend.find('-native') != -1 or depend.find('-cross') != -1 or depend.startswith('virtual/'): @@ -1901,7 +1935,7 @@ python package_depchains() { def pkg_addrrecs(pkg, base, suffix, getname, rdepends, d): #bb.note('rdepends for %s is %s' % (base, rdepends)) - rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg, True) or "") + rreclist = bb.utils.explode_dep_versions2(d.getVar('RRECOMMENDS_' + pkg) or "") for depend in rdepends: if depend.find('virtual-locale-') != -1: @@ -1924,12 +1958,12 @@ python package_depchains() { list.append(dep) depends = [] - for dep in bb.utils.explode_deps(d.getVar('DEPENDS', True) or ""): + for dep in bb.utils.explode_deps(d.getVar('DEPENDS') or ""): add_dep(depends, dep) rdepends = [] for pkg in packages.split(): - for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg, True) or ""): + for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + pkg) or ""): add_dep(rdepends, dep) #bb.note('rdepends is %s' % rdepends) @@ -1959,11 +1993,11 @@ python package_depchains() { for pkg in pkglibdeps: for k in pkglibdeps[pkg]: add_dep(pkglibdeplist, k) - dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS', True) == '1') or (bb.data.inherits_class('packagegroup', d))) + dbgdefaultdeps = ((d.getVar('DEPCHAIN_DBGDEFAULTDEPS') == '1') or (bb.data.inherits_class('packagegroup', d))) for suffix in pkgs: for pkg in pkgs[suffix]: - if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs', True): + if d.getVarFlag('RRECOMMENDS_' + pkg, 'nodeprrecs'): continue (base, func) = pkgs[suffix][pkg] if suffix == "-dev": @@ -1976,19 +2010,19 @@ python package_depchains() { pkg_addrrecs(pkg, base, suffix, func, rdepends, d) else: rdeps = [] - for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base, True) or ""): + for dep in bb.utils.explode_deps(d.getVar('RDEPENDS_' + base) or ""): add_dep(rdeps, dep) pkg_addrrecs(pkg, base, suffix, func, rdeps, d) } # Since bitbake can't determine which variables are accessed during package # iteration, we need to list them here: -PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE" +PACKAGEVARS = "FILES RDEPENDS RRECOMMENDS SUMMARY DESCRIPTION RSUGGESTS RPROVIDES RCONFLICTS PKG ALLOW_EMPTY pkg_postinst pkg_postrm INITSCRIPT_NAME INITSCRIPT_PARAMS DEBIAN_NOAUTONAME ALTERNATIVE PKGE PKGV PKGR USERADD_PARAM GROUPADD_PARAM CONFFILES SYSTEMD_SERVICE LICENSE SECTION pkg_preinst pkg_prerm RREPLACES GROUPMEMS_PARAM SYSTEMD_AUTO_ENABLE SKIP_FILEDEPS PRIVATE_LIBS" def gen_packagevar(d): ret = [] - pkgs = (d.getVar("PACKAGES", True) or "").split() - vars = (d.getVar("PACKAGEVARS", True) or "").split() + pkgs = (d.getVar("PACKAGES") or "").split() + vars = (d.getVar("PACKAGEVARS") or "").split() for p in pkgs: for v in vars: ret.append(v + "_" + p) @@ -2036,16 +2070,16 @@ python do_package () { # Sanity test the setup ########################################################################### - packages = (d.getVar('PACKAGES', True) or "").split() + packages = (d.getVar('PACKAGES') or "").split() if len(packages) < 1: bb.debug(1, "No packages to build, skipping do_package") return - workdir = d.getVar('WORKDIR', True) - outdir = d.getVar('DEPLOY_DIR', True) - dest = d.getVar('D', True) - dvar = d.getVar('PKGD', True) - pn = d.getVar('PN', True) + workdir = d.getVar('WORKDIR') + outdir = d.getVar('DEPLOY_DIR') + dest = d.getVar('D') + dvar = d.getVar('PKGD') + pn = d.getVar('PN') if not workdir or not outdir or not dest or not dvar or not pn: msg = "WORKDIR, DEPLOY_DIR, D, PN and PKGD all must be defined, unable to package" @@ -2063,7 +2097,7 @@ python do_package () { # code pre-expands some frequently used variables def expandVar(x, d): - d.setVar(x, d.getVar(x, True)) + d.setVar(x, d.getVar(x)) for x in 'PN', 'PV', 'BPN', 'TARGET_SYS', 'EXTENDPRAUTO': expandVar(x, d) @@ -2072,7 +2106,7 @@ python do_package () { # Setup PKGD (from D) ########################################################################### - for f in (d.getVar('PACKAGEBUILDPKGD', True) or '').split(): + for f in (d.getVar('PACKAGEBUILDPKGD') or '').split(): bb.build.exec_func(f, d) ########################################################################### @@ -2081,7 +2115,7 @@ python do_package () { cpath = oe.cachedpath.CachedPath() - for f in (d.getVar('PACKAGESPLITFUNCS', True) or '').split(): + for f in (d.getVar('PACKAGESPLITFUNCS') or '').split(): bb.build.exec_func(f, d) ########################################################################### @@ -2091,18 +2125,18 @@ python do_package () { # Build global list of files in each split package global pkgfiles pkgfiles = {} - packages = d.getVar('PACKAGES', True).split() - pkgdest = d.getVar('PKGDEST', True) + packages = d.getVar('PACKAGES').split() + pkgdest = d.getVar('PKGDEST') for pkg in packages: pkgfiles[pkg] = [] for walkroot, dirs, files in cpath.walk(pkgdest + "/" + pkg): for file in files: pkgfiles[pkg].append(walkroot + os.sep + file) - for f in (d.getVar('PACKAGEFUNCS', True) or '').split(): + for f in (d.getVar('PACKAGEFUNCS') or '').split(): bb.build.exec_func(f, d) - qa_sane = d.getVar("QA_SANE", True) + qa_sane = d.getVar("QA_SANE") if not qa_sane: bb.fatal("Fatal QA errors found, failing task.") } @@ -2149,7 +2183,7 @@ def mapping_rename_hook(d): Rewrite variables to account for package renaming in things like debian.bbclass or manual PKG variable name changes """ - pkg = d.getVar("PKG", True) + pkg = d.getVar("PKG") runtime_mapping_rename("RDEPENDS", pkg, d) runtime_mapping_rename("RRECOMMENDS", pkg, d) runtime_mapping_rename("RSUGGESTS", pkg, d) diff --git a/import-layers/yocto-poky/meta/classes/package_deb.bbclass b/import-layers/yocto-poky/meta/classes/package_deb.bbclass index fb6034cab..eacabcdb6 100644 --- a/import-layers/yocto-poky/meta/classes/package_deb.bbclass +++ b/import-layers/yocto-poky/meta/classes/package_deb.bbclass @@ -6,17 +6,19 @@ inherit package IMAGE_PKGTYPE ?= "deb" -DPKG_ARCH ?= "${@debian_arch_map(d.getVar('TARGET_ARCH', True), d.getVar('TUNE_FEATURES', True))}" +DPKG_ARCH ?= "${@debian_arch_map(d.getVar('TARGET_ARCH'), d.getVar('TUNE_FEATURES'))}" DPKG_ARCH[vardepvalue] = "${DPKG_ARCH}" PKGWRITEDIRDEB = "${WORKDIR}/deploy-debs" APTCONF_TARGET = "${WORKDIR}" -APT_ARGS = "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS", True) == "1"]}" +APT_ARGS = "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS") == "1"]}" def debian_arch_map(arch, tune): tune_features = tune.split() + if arch == "allarch": + return "all" if arch in ["i586", "i686"]: return "i386" if arch == "x86_64": @@ -53,25 +55,26 @@ python do_package_deb () { import textwrap import subprocess import collections + import codecs oldcwd = os.getcwd() - workdir = d.getVar('WORKDIR', True) + workdir = d.getVar('WORKDIR') if not workdir: bb.error("WORKDIR not defined, unable to package") return - outdir = d.getVar('PKGWRITEDIRDEB', True) + outdir = d.getVar('PKGWRITEDIRDEB') if not outdir: bb.error("PKGWRITEDIRDEB not defined, unable to package") return - packages = d.getVar('PACKAGES', True) + packages = d.getVar('PACKAGES') if not packages: bb.debug(1, "PACKAGES not defined, nothing to package") return - tmpdir = d.getVar('TMPDIR', True) + tmpdir = d.getVar('TMPDIR') if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK): os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN")) @@ -80,7 +83,7 @@ python do_package_deb () { bb.debug(1, "No packages; nothing to do") return - pkgdest = d.getVar('PKGDEST', True) + pkgdest = d.getVar('PKGDEST') def cleanupcontrol(root): for p in ['CONTROL', 'DEBIAN']: @@ -96,17 +99,16 @@ python do_package_deb () { localdata.setVar('ROOT', '') localdata.setVar('ROOT_%s' % pkg, root) - pkgname = localdata.getVar('PKG_%s' % pkg, True) + pkgname = localdata.getVar('PKG_%s' % pkg) if not pkgname: pkgname = pkg localdata.setVar('PKG', pkgname) localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + pkg) - bb.data.update_data(localdata) basedir = os.path.join(os.path.dirname(root)) - pkgoutdir = os.path.join(outdir, localdata.getVar('PACKAGE_ARCH', True)) + pkgoutdir = os.path.join(outdir, localdata.getVar('PACKAGE_ARCH')) bb.utils.mkdirhier(pkgoutdir) os.chdir(root) @@ -114,22 +116,18 @@ python do_package_deb () { from glob import glob g = glob('*') if not g and localdata.getVar('ALLOW_EMPTY', False) != "1": - bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) + bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV'), localdata.getVar('PKGR'))) bb.utils.unlockfile(lf) continue controldir = os.path.join(root, 'DEBIAN') bb.utils.mkdirhier(controldir) os.chmod(controldir, 0o755) - try: - import codecs - ctrlfile = codecs.open(os.path.join(controldir, 'control'), 'w', 'utf-8') - except OSError: - bb.utils.unlockfile(lf) - bb.fatal("unable to open control file for writing") + + ctrlfile = codecs.open(os.path.join(controldir, 'control'), 'w', 'utf-8') fields = [] - pe = d.getVar('PKGE', True) + pe = d.getVar('PKGE') if pe and int(pe) > 0: fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) else: @@ -141,7 +139,7 @@ python do_package_deb () { fields.append(["Architecture: %s\n", ['DPKG_ARCH']]) fields.append(["OE: %s\n", ['PN']]) fields.append(["PackageArch: %s\n", ['PACKAGE_ARCH']]) - if d.getVar('HOMEPAGE', True): + if d.getVar('HOMEPAGE'): fields.append(["Homepage: %s\n", ['HOMEPAGE']]) # Package, Version, Maintainer, Description - mandatory @@ -151,10 +149,10 @@ python do_package_deb () { def pullData(l, d): l2 = [] for i in l: - data = d.getVar(i, True) + data = d.getVar(i) if data is None: - raise KeyError(f) - if i == 'DPKG_ARCH' and d.getVar('PACKAGE_ARCH', True) == 'all': + raise KeyError(i) + if i == 'DPKG_ARCH' and d.getVar('PACKAGE_ARCH') == 'all': data = 'all' elif i == 'PACKAGE_ARCH' or i == 'DPKG_ARCH': # The params in deb package control don't allow character @@ -165,44 +163,31 @@ python do_package_deb () { return l2 ctrlfile.write("Package: %s\n" % pkgname) - if d.getVar('PACKAGE_ARCH', True) == "all": + if d.getVar('PACKAGE_ARCH') == "all": ctrlfile.write("Multi-Arch: foreign\n") # check for required fields - try: - for (c, fs) in fields: - for f in fs: - if localdata.getVar(f, False) is None: - raise KeyError(f) - # Special behavior for description... - if 'DESCRIPTION' in fs: - summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." - ctrlfile.write('Description: %s\n' % summary) - description = localdata.getVar('DESCRIPTION', True) or "." - description = textwrap.dedent(description).strip() - if '\\n' in description: - # Manually indent - for t in description.split('\\n'): - # We don't limit the width when manually indent, but we do - # need the textwrap.fill() to set the initial_indent and - # subsequent_indent, so set a large width - ctrlfile.write('%s\n' % textwrap.fill(t, width=100000, initial_indent=' ', subsequent_indent=' ')) - else: - # Auto indent - ctrlfile.write('%s\n' % textwrap.fill(description.strip(), width=74, initial_indent=' ', subsequent_indent=' ')) - - else: - ctrlfile.write(c % tuple(pullData(fs, localdata))) - except KeyError: - import sys - (type, value, traceback) = sys.exc_info() - bb.utils.unlockfile(lf) - ctrlfile.close() - bb.fatal("Missing field for deb generation: %s" % value) + for (c, fs) in fields: + # Special behavior for description... + if 'DESCRIPTION' in fs: + summary = localdata.getVar('SUMMARY') or localdata.getVar('DESCRIPTION') or "." + ctrlfile.write('Description: %s\n' % summary) + description = localdata.getVar('DESCRIPTION') or "." + description = textwrap.dedent(description).strip() + if '\\n' in description: + # Manually indent + for t in description.split('\\n'): + ctrlfile.write(' %s\n' % (t.strip() or '.')) + else: + # Auto indent + ctrlfile.write('%s\n' % textwrap.fill(description.strip(), width=74, initial_indent=' ', subsequent_indent=' ')) + + else: + ctrlfile.write(c % tuple(pullData(fs, localdata))) # more fields custom_fields_chunk = get_package_additional_metadata("deb", localdata) - if custom_fields_chunk is not None: + if custom_fields_chunk: ctrlfile.write(custom_fields_chunk) ctrlfile.write("\n") @@ -231,7 +216,7 @@ python do_package_deb () { elif (v or "").startswith("> "): var[dep][i] = var[dep][i].replace("> ", ">> ") - rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS", True) or "") + rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "") debian_cmp_remap(rdepends) for dep in list(rdepends.keys()): if dep == pkg: @@ -239,20 +224,24 @@ python do_package_deb () { continue if '*' in dep: del rdepends[dep] - rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS", True) or "") + rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS") or "") debian_cmp_remap(rrecommends) for dep in list(rrecommends.keys()): if '*' in dep: del rrecommends[dep] - rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS", True) or "") + rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS") or "") debian_cmp_remap(rsuggests) # Deliberately drop version information here, not wanted/supported by deb - rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES", True) or ""), []) + rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES") or ""), []) + # Remove file paths if any from rprovides, debian does not support custom providers + for key in list(rprovides.keys()): + if key.startswith('/'): + del rprovides[key] rprovides = collections.OrderedDict(sorted(rprovides.items(), key=lambda x: x[0])) debian_cmp_remap(rprovides) - rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES", True) or "") + rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES") or "") debian_cmp_remap(rreplaces) - rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS", True) or "") + rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS") or "") debian_cmp_remap(rconflicts) if rdepends: ctrlfile.write("Depends: %s\n" % bb.utils.join_deps(rdepends)) @@ -269,15 +258,11 @@ python do_package_deb () { ctrlfile.close() for script in ["preinst", "postinst", "prerm", "postrm"]: - scriptvar = localdata.getVar('pkg_%s' % script, True) + scriptvar = localdata.getVar('pkg_%s' % script) if not scriptvar: continue scriptvar = scriptvar.strip() - try: - scriptfile = open(os.path.join(controldir, script), 'w') - except OSError: - bb.utils.unlockfile(lf) - bb.fatal("unable to open %s script file for writing" % script) + scriptfile = open(os.path.join(controldir, script), 'w') if scriptvar.startswith("#!"): pos = scriptvar.find("\n") + 1 @@ -297,21 +282,14 @@ python do_package_deb () { conffiles_str = ' '.join(get_conffiles(pkg, d)) if conffiles_str: - try: - conffiles = open(os.path.join(controldir, 'conffiles'), 'w') - except OSError: - bb.utils.unlockfile(lf) - bb.fatal("unable to open conffiles for writing") + conffiles = open(os.path.join(controldir, 'conffiles'), 'w') for f in conffiles_str.split(): if os.path.exists(oe.path.join(root, f)): conffiles.write('%s\n' % f) conffiles.close() os.chdir(basedir) - ret = subprocess.call("PATH=\"%s\" dpkg-deb -b %s %s" % (localdata.getVar("PATH", True), root, pkgoutdir), shell=True) - if ret != 0: - bb.utils.unlockfile(lf) - bb.fatal("dpkg-deb execution failed") + subprocess.check_output("PATH=\"%s\" dpkg-deb -b %s %s" % (localdata.getVar("PATH"), root, pkgoutdir), shell=True) cleanupcontrol(root) bb.utils.unlockfile(lf) @@ -328,7 +306,7 @@ do_package_write_deb[sstate-inputdirs] = "${PKGWRITEDIRDEB}" do_package_write_deb[sstate-outputdirs] = "${DEPLOY_DIR_DEB}" python do_package_write_deb_setscene () { - tmpdir = d.getVar('TMPDIR', True) + tmpdir = d.getVar('TMPDIR') if os.access(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN"),os.R_OK): os.unlink(os.path.join(tmpdir, "stamps", "DEB_PACKAGE_INDEX_CLEAN")) @@ -338,7 +316,7 @@ python do_package_write_deb_setscene () { addtask do_package_write_deb_setscene python () { - if d.getVar('PACKAGES', True) != '': + if d.getVar('PACKAGES') != '': deps = ' dpkg-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot' d.appendVarFlag('do_package_write_deb', 'depends', deps) d.setVarFlag('do_package_write_deb', 'fakeroot', "1") @@ -351,6 +329,7 @@ python do_package_write_deb () { do_package_write_deb[dirs] = "${PKGWRITEDIRDEB}" do_package_write_deb[cleandirs] = "${PKGWRITEDIRDEB}" do_package_write_deb[umask] = "022" +do_package_write_deb[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}" addtask package_write_deb after do_packagedata do_package diff --git a/import-layers/yocto-poky/meta/classes/package_ipk.bbclass b/import-layers/yocto-poky/meta/classes/package_ipk.bbclass index e7e7d4929..a1e51ee69 100644 --- a/import-layers/yocto-poky/meta/classes/package_ipk.bbclass +++ b/import-layers/yocto-poky/meta/classes/package_ipk.bbclass @@ -11,8 +11,8 @@ PKGWRITEDIRIPK = "${WORKDIR}/deploy-ipks" OPKGBUILDCMD ??= "opkg-build" OPKG_ARGS += "--force_postinstall --prefer-arch-to-version" -OPKG_ARGS += "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS", True) == "1"]}" -OPKG_ARGS += "${@['', '--add-exclude ' + ' --add-exclude '.join((d.getVar('PACKAGE_EXCLUDE', True) or "").split())][(d.getVar("PACKAGE_EXCLUDE", True) or "") != ""]}" +OPKG_ARGS += "${@['', '--no-install-recommends'][d.getVar("NO_RECOMMENDATIONS") == "1"]}" +OPKG_ARGS += "${@['', '--add-exclude ' + ' --add-exclude '.join((d.getVar('PACKAGE_EXCLUDE') or "").split())][(d.getVar("PACKAGE_EXCLUDE") or "") != ""]}" OPKGLIBDIR = "${localstatedir}/lib" @@ -24,15 +24,15 @@ python do_package_ipk () { oldcwd = os.getcwd() - workdir = d.getVar('WORKDIR', True) - outdir = d.getVar('PKGWRITEDIRIPK', True) - tmpdir = d.getVar('TMPDIR', True) - pkgdest = d.getVar('PKGDEST', True) + workdir = d.getVar('WORKDIR') + outdir = d.getVar('PKGWRITEDIRIPK') + tmpdir = d.getVar('TMPDIR') + pkgdest = d.getVar('PKGDEST') if not workdir or not outdir or not tmpdir: bb.error("Variables incorrectly set, unable to package") return - packages = d.getVar('PACKAGES', True) + packages = d.getVar('PACKAGES') if not packages or packages == '': bb.debug(1, "No packages; nothing to do") return @@ -48,7 +48,7 @@ python do_package_ipk () { if os.path.exists(p): bb.utils.prunedir(p) - recipesource = os.path.basename(d.getVar('FILE', True)) + recipesource = os.path.basename(d.getVar('FILE')) for pkg in packages.split(): localdata = bb.data.createCopy(d) @@ -58,16 +58,15 @@ python do_package_ipk () { localdata.setVar('ROOT', '') localdata.setVar('ROOT_%s' % pkg, root) - pkgname = localdata.getVar('PKG_%s' % pkg, True) + pkgname = localdata.getVar('PKG_%s' % pkg) if not pkgname: pkgname = pkg localdata.setVar('PKG', pkgname) localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + pkg) - bb.data.update_data(localdata) basedir = os.path.join(os.path.dirname(root)) - arch = localdata.getVar('PACKAGE_ARCH', True) + arch = localdata.getVar('PACKAGE_ARCH') if localdata.getVar('IPK_HIERARCHICAL_FEED', False) == "1": # Spread packages across subdirectories so each isn't too crowded @@ -100,20 +99,16 @@ python do_package_ipk () { from glob import glob g = glob('*') if not g and localdata.getVar('ALLOW_EMPTY', False) != "1": - bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) + bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV'), localdata.getVar('PKGR'))) bb.utils.unlockfile(lf) continue controldir = os.path.join(root, 'CONTROL') bb.utils.mkdirhier(controldir) - try: - ctrlfile = open(os.path.join(controldir, 'control'), 'w') - except OSError: - bb.utils.unlockfile(lf) - bb.fatal("unable to open control file for writing") + ctrlfile = open(os.path.join(controldir, 'control'), 'w') fields = [] - pe = d.getVar('PKGE', True) + pe = d.getVar('PKGE') if pe and int(pe) > 0: fields.append(["Version: %s:%s-%s\n", ['PKGE', 'PKGV', 'PKGR']]) else: @@ -125,46 +120,43 @@ python do_package_ipk () { fields.append(["License: %s\n", ['LICENSE']]) fields.append(["Architecture: %s\n", ['PACKAGE_ARCH']]) fields.append(["OE: %s\n", ['PN']]) - if d.getVar('HOMEPAGE', True): + if d.getVar('HOMEPAGE'): fields.append(["Homepage: %s\n", ['HOMEPAGE']]) def pullData(l, d): l2 = [] for i in l: - l2.append(d.getVar(i, True)) + l2.append(d.getVar(i)) return l2 ctrlfile.write("Package: %s\n" % pkgname) # check for required fields - try: - for (c, fs) in fields: - for f in fs: - if localdata.getVar(f, False) is None: - raise KeyError(f) - # Special behavior for description... - if 'DESCRIPTION' in fs: - summary = localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or "." - ctrlfile.write('Description: %s\n' % summary) - description = localdata.getVar('DESCRIPTION', True) or "." - description = textwrap.dedent(description).strip() - if '\\n' in description: - # Manually indent - for t in description.split('\\n'): - # We don't limit the width when manually indent, but we do - # need the textwrap.fill() to set the initial_indent and - # subsequent_indent, so set a large width - ctrlfile.write('%s\n' % textwrap.fill(t.strip(), width=100000, initial_indent=' ', subsequent_indent=' ')) - else: - # Auto indent - ctrlfile.write('%s\n' % textwrap.fill(description, width=74, initial_indent=' ', subsequent_indent=' ')) + for (c, fs) in fields: + for f in fs: + if localdata.getVar(f, False) is None: + raise KeyError(f) + # Special behavior for description... + if 'DESCRIPTION' in fs: + summary = localdata.getVar('SUMMARY') or localdata.getVar('DESCRIPTION') or "." + ctrlfile.write('Description: %s\n' % summary) + description = localdata.getVar('DESCRIPTION') or "." + description = textwrap.dedent(description).strip() + if '\\n' in description: + # Manually indent + for t in description.split('\\n'): + # We don't limit the width when manually indent, but we do + # need the textwrap.fill() to set the initial_indent and + # subsequent_indent, so set a large width + line = textwrap.fill(t.strip(), + width=100000, + initial_indent=' ', + subsequent_indent=' ') or '.' + ctrlfile.write('%s\n' % line) else: - ctrlfile.write(c % tuple(pullData(fs, localdata))) - except KeyError: - import sys - (type, value, traceback) = sys.exc_info() - ctrlfile.close() - bb.utils.unlockfile(lf) - bb.fatal("Missing field for ipk generation: %s" % value) + # Auto indent + ctrlfile.write('%s\n' % textwrap.fill(description, width=74, initial_indent=' ', subsequent_indent=' ')) + else: + ctrlfile.write(c % tuple(pullData(fs, localdata))) # more fields custom_fields_chunk = get_package_additional_metadata("ipk", localdata) @@ -187,19 +179,19 @@ python do_package_ipk () { elif (v or "").startswith("> "): var[dep][i] = var[dep][i].replace("> ", ">> ") - rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS", True) or "") + rdepends = bb.utils.explode_dep_versions2(localdata.getVar("RDEPENDS") or "") debian_cmp_remap(rdepends) - rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS", True) or "") + rrecommends = bb.utils.explode_dep_versions2(localdata.getVar("RRECOMMENDS") or "") debian_cmp_remap(rrecommends) - rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS", True) or "") + rsuggests = bb.utils.explode_dep_versions2(localdata.getVar("RSUGGESTS") or "") debian_cmp_remap(rsuggests) # Deliberately drop version information here, not wanted/supported by ipk - rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES", True) or ""), []) + rprovides = dict.fromkeys(bb.utils.explode_dep_versions2(localdata.getVar("RPROVIDES") or ""), []) rprovides = collections.OrderedDict(sorted(rprovides.items(), key=lambda x: x[0])) debian_cmp_remap(rprovides) - rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES", True) or "") + rreplaces = bb.utils.explode_dep_versions2(localdata.getVar("RREPLACES") or "") debian_cmp_remap(rreplaces) - rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS", True) or "") + rconflicts = bb.utils.explode_dep_versions2(localdata.getVar("RCONFLICTS") or "") debian_cmp_remap(rconflicts) if rdepends: @@ -218,40 +210,29 @@ python do_package_ipk () { ctrlfile.close() for script in ["preinst", "postinst", "prerm", "postrm"]: - scriptvar = localdata.getVar('pkg_%s' % script, True) + scriptvar = localdata.getVar('pkg_%s' % script) if not scriptvar: continue - try: - scriptfile = open(os.path.join(controldir, script), 'w') - except OSError: - bb.utils.unlockfile(lf) - bb.fatal("unable to open %s script file for writing" % script) + scriptfile = open(os.path.join(controldir, script), 'w') scriptfile.write(scriptvar) scriptfile.close() os.chmod(os.path.join(controldir, script), 0o755) conffiles_str = ' '.join(get_conffiles(pkg, d)) if conffiles_str: - try: - conffiles = open(os.path.join(controldir, 'conffiles'), 'w') - except OSError: - bb.utils.unlockfile(lf) - bb.fatal("unable to open conffiles for writing") + conffiles = open(os.path.join(controldir, 'conffiles'), 'w') for f in conffiles_str.split(): if os.path.exists(oe.path.join(root, f)): conffiles.write('%s\n' % f) conffiles.close() os.chdir(basedir) - ret = subprocess.call("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH", True), - d.getVar("OPKGBUILDCMD", True), pkg, pkgoutdir), shell=True) - if ret != 0: - bb.utils.unlockfile(lf) - bb.fatal("opkg-build execution failed") + subprocess.check_output("PATH=\"%s\" %s %s %s" % (localdata.getVar("PATH"), + d.getVar("OPKGBUILDCMD"), pkg, pkgoutdir), shell=True) - if d.getVar('IPK_SIGN_PACKAGES', True) == '1': - ipkver = "%s-%s" % (d.getVar('PKGV', True), d.getVar('PKGR', True)) - ipk_to_sign = "%s/%s_%s_%s.ipk" % (pkgoutdir, pkgname, ipkver, d.getVar('PACKAGE_ARCH', True)) + if d.getVar('IPK_SIGN_PACKAGES') == '1': + ipkver = "%s-%s" % (d.getVar('PKGV'), d.getVar('PKGR')) + ipk_to_sign = "%s/%s_%s_%s.ipk" % (pkgoutdir, pkgname, ipkver, d.getVar('PACKAGE_ARCH')) sign_ipk(d, ipk_to_sign) cleanupcontrol(root) @@ -267,7 +248,7 @@ do_package_write_ipk[sstate-inputdirs] = "${PKGWRITEDIRIPK}" do_package_write_ipk[sstate-outputdirs] = "${DEPLOY_DIR_IPK}" python do_package_write_ipk_setscene () { - tmpdir = d.getVar('TMPDIR', True) + tmpdir = d.getVar('TMPDIR') if os.access(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN"), os.R_OK): os.unlink(os.path.join(tmpdir, "stamps", "IPK_PACKAGE_INDEX_CLEAN")) @@ -277,7 +258,7 @@ python do_package_write_ipk_setscene () { addtask do_package_write_ipk_setscene python () { - if d.getVar('PACKAGES', True) != '': + if d.getVar('PACKAGES') != '': deps = ' opkg-utils-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot' d.appendVarFlag('do_package_write_ipk', 'depends', deps) d.setVarFlag('do_package_write_ipk', 'fakeroot', "1") @@ -290,6 +271,7 @@ python do_package_write_ipk () { do_package_write_ipk[dirs] = "${PKGWRITEDIRIPK}" do_package_write_ipk[cleandirs] = "${PKGWRITEDIRIPK}" do_package_write_ipk[umask] = "022" +do_package_write_ipk[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}" addtask package_write_ipk after do_packagedata do_package PACKAGEINDEXDEPS += "opkg-utils-native:do_populate_sysroot" diff --git a/import-layers/yocto-poky/meta/classes/package_rpm.bbclass b/import-layers/yocto-poky/meta/classes/package_rpm.bbclass index c431545f7..1deaf832d 100644 --- a/import-layers/yocto-poky/meta/classes/package_rpm.bbclass +++ b/import-layers/yocto-poky/meta/classes/package_rpm.bbclass @@ -7,15 +7,33 @@ RPMBUILD="rpmbuild" PKGWRITEDIRRPM = "${WORKDIR}/deploy-rpms" -# Maintaining the perfile dependencies has singificant overhead when writing the +# Maintaining the perfile dependencies has singificant overhead when writing the # packages. When set, this value merges them for efficiency. MERGEPERFILEDEPS = "1" +# Filter dependencies based on a provided function. +def filter_deps(var, f): + import collections + + depends_dict = bb.utils.explode_dep_versions2(var) + newdeps_dict = collections.OrderedDict() + for dep in depends_dict: + if f(dep): + newdeps_dict[dep] = depends_dict[dep] + return bb.utils.join_deps(newdeps_dict, commasep=False) + +# Filter out absolute paths (typically /bin/sh and /usr/bin/env) and any perl +# dependencies for nativesdk packages. +def filter_nativesdk_deps(srcname, var): + if var and srcname.startswith("nativesdk-"): + var = filter_deps(var, lambda dep: not dep.startswith('/') and dep != 'perl' and not dep.startswith('perl(')) + return var + # Construct per file dependencies file def write_rpm_perfiledata(srcname, d): - workdir = d.getVar('WORKDIR', True) - packages = d.getVar('PACKAGES', True) - pkgd = d.getVar('PKGD', True) + workdir = d.getVar('WORKDIR') + packages = d.getVar('PACKAGES') + pkgd = d.getVar('PKGD') def dump_filerdeps(varname, outfile, d): outfile.write("#!/usr/bin/env python\n\n") @@ -23,10 +41,11 @@ def write_rpm_perfiledata(srcname, d): outfile.write('deps = {\n') for pkg in packages.split(): dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg - dependsflist = (d.getVar(dependsflist_key, True) or "") + dependsflist = (d.getVar(dependsflist_key) or "") for dfile in dependsflist.split(): key = "FILE" + varname + "_" + dfile + "_" + pkg - depends_dict = bb.utils.explode_dep_versions(d.getVar(key, True) or "") + deps = filter_nativesdk_deps(srcname, d.getVar(key) or "") + depends_dict = bb.utils.explode_dep_versions(deps) file = dfile.replace("@underscore@", "_") file = file.replace("@closebrace@", "]") file = file.replace("@openbrace@", "[") @@ -55,10 +74,7 @@ def write_rpm_perfiledata(srcname, d): # OE-core dependencies a.k.a. RPM requires outdepends = workdir + "/" + srcname + ".requires" - try: - dependsfile = open(outdepends, 'w') - except OSError: - bb.fatal("unable to open spec file for writing") + dependsfile = open(outdepends, 'w') dump_filerdeps('RDEPENDS', dependsfile, d) @@ -68,10 +84,7 @@ def write_rpm_perfiledata(srcname, d): # OE-core / RPM Provides outprovides = workdir + "/" + srcname + ".provides" - try: - providesfile = open(outprovides, 'w') - except OSError: - bb.fatal("unable to open spec file for writing") + providesfile = open(outprovides, 'w') dump_filerdeps('RPROVIDES', providesfile, d) @@ -86,15 +99,15 @@ python write_specfile () { # append information for logs and patches to %prep def add_prep(d,spec_files_bottom): - if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d): - spec_files_bottom.append('%%prep -n %s' % d.getVar('PN', True) ) + if d.getVarFlag('ARCHIVER_MODE', 'srpm') == '1' and bb.data.inherits_class('archiver', d): + spec_files_bottom.append('%%prep -n %s' % d.getVar('PN') ) spec_files_bottom.append('%s' % "echo \"include logs and patches, Please check them in SOURCES\"") spec_files_bottom.append('') # append the name of tarball to key word 'SOURCE' in xxx.spec. def tail_source(d): - if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d): - ar_outdir = d.getVar('ARCHIVER_OUTDIR', True) + if d.getVarFlag('ARCHIVER_MODE', 'srpm') == '1' and bb.data.inherits_class('archiver', d): + ar_outdir = d.getVar('ARCHIVER_OUTDIR') if not os.path.exists(ar_outdir): return source_list = os.listdir(ar_outdir) @@ -107,27 +120,6 @@ python write_specfile () { os.chown(f, 0, 0) spec_preamble_top.append('Source%s: %s' % (source_number, source)) source_number += 1 - # We need a simple way to remove the MLPREFIX from the package name, - # and dependency information... - def strip_multilib(name, d): - multilibs = d.getVar('MULTILIBS', True) or "" - for ext in multilibs.split(): - eext = ext.split(':') - if len(eext) > 1 and eext[0] == 'multilib' and name and name.find(eext[1] + '-') >= 0: - name = "".join(name.split(eext[1] + '-')) - return name - - def strip_multilib_deps(deps, d): - depends = bb.utils.explode_dep_versions2(deps or "") - newdeps = {} - for dep in depends: - newdeps[strip_multilib(dep, d)] = depends[dep] - return bb.utils.join_deps(newdeps) - -# ml = d.getVar("MLPREFIX", True) -# if ml and name and len(ml) != 0 and name.find(ml) == 0: -# return ml.join(name.split(ml, 1)[1:]) -# return name # In RPM, dependencies are of the format: pkg <>= Epoch:Version-Release # This format is similar to OE, however there are restrictions on the @@ -144,7 +136,7 @@ python write_specfile () { # after renaming we cannot look up the dependencies in the packagedata # store. def translate_vers(varname, d): - depends = d.getVar(varname, True) + depends = d.getVar(varname) if depends: depends_dict = bb.utils.explode_dep_versions2(depends) newdeps_dict = {} @@ -197,6 +189,8 @@ python write_specfile () { if path.endswith("DEBIAN") or path.endswith("CONTROL"): continue path = path.replace("%", "%%%%%%%%") + path = path.replace("[", "?") + path = path.replace("]", "?") # Treat all symlinks to directories as normal files. # os.walk() lists them as directories. @@ -216,6 +210,8 @@ python write_specfile () { if dir == "CONTROL" or dir == "DEBIAN": continue dir = dir.replace("%", "%%%%%%%%") + dir = dir.replace("[", "?") + dir = dir.replace("]", "?") # All packages own the directories their files are in... target.append('%dir "' + path + '/' + dir + '"') else: @@ -230,6 +226,8 @@ python write_specfile () { if file == "CONTROL" or file == "DEBIAN": continue file = file.replace("%", "%%%%%%%%") + file = file.replace("[", "?") + file = file.replace("]", "?") if conffiles.count(path + '/' + file): target.append('%config "' + path + '/' + file + '"') else: @@ -248,10 +246,10 @@ python write_specfile () { def get_perfile(varname, pkg, d): deps = [] dependsflist_key = 'FILE' + varname + 'FLIST' + "_" + pkg - dependsflist = (d.getVar(dependsflist_key, True) or "") + dependsflist = (d.getVar(dependsflist_key) or "") for dfile in dependsflist.split(): key = "FILE" + varname + "_" + dfile + "_" + pkg - depends = d.getVar(key, True) + depends = d.getVar(key) if depends: deps.append(depends) return " ".join(deps) @@ -269,33 +267,33 @@ python write_specfile () { else: spec_preamble.append('%s' % textwrap.fill(dedent_text, width=75)) - packages = d.getVar('PACKAGES', True) + packages = d.getVar('PACKAGES') if not packages or packages == '': bb.debug(1, "No packages; nothing to do") return - pkgdest = d.getVar('PKGDEST', True) + pkgdest = d.getVar('PKGDEST') if not pkgdest: bb.fatal("No PKGDEST") - outspecfile = d.getVar('OUTSPECFILE', True) + outspecfile = d.getVar('OUTSPECFILE') if not outspecfile: bb.fatal("No OUTSPECFILE") # Construct the SPEC file... - srcname = strip_multilib(d.getVar('PN', True), d) - srcsummary = (d.getVar('SUMMARY', True) or d.getVar('DESCRIPTION', True) or ".") - srcversion = d.getVar('PKGV', True).replace('-', '+') - srcrelease = d.getVar('PKGR', True) - srcepoch = (d.getVar('PKGE', True) or "") - srclicense = d.getVar('LICENSE', True) - srcsection = d.getVar('SECTION', True) - srcmaintainer = d.getVar('MAINTAINER', True) - srchomepage = d.getVar('HOMEPAGE', True) - srcdescription = d.getVar('DESCRIPTION', True) or "." + srcname = d.getVar('PN') + srcsummary = (d.getVar('SUMMARY') or d.getVar('DESCRIPTION') or ".") + srcversion = d.getVar('PKGV').replace('-', '+') + srcrelease = d.getVar('PKGR') + srcepoch = (d.getVar('PKGE') or "") + srclicense = d.getVar('LICENSE') + srcsection = d.getVar('SECTION') + srcmaintainer = d.getVar('MAINTAINER') + srchomepage = d.getVar('HOMEPAGE') + srcdescription = d.getVar('DESCRIPTION') or "." srccustomtagschunk = get_package_additional_metadata("rpm", d) - srcdepends = strip_multilib_deps(d.getVar('DEPENDS', True), d) + srcdepends = d.getVar('DEPENDS') srcrdepends = [] srcrrecommends = [] srcrsuggests = [] @@ -318,8 +316,8 @@ python write_specfile () { spec_files_top = [] spec_files_bottom = [] - perfiledeps = (d.getVar("MERGEPERFILEDEPS", True) or "0") == "0" - extra_pkgdata = (d.getVar("RPM_EXTRA_PKGDATA", True) or "0") == "1" + perfiledeps = (d.getVar("MERGEPERFILEDEPS") or "0") == "0" + extra_pkgdata = (d.getVar("RPM_EXTRA_PKGDATA") or "0") == "1" for pkg in packages.split(): localdata = bb.data.createCopy(d) @@ -328,29 +326,27 @@ python write_specfile () { localdata.setVar('ROOT', '') localdata.setVar('ROOT_%s' % pkg, root) - pkgname = localdata.getVar('PKG_%s' % pkg, True) + pkgname = localdata.getVar('PKG_%s' % pkg) if not pkgname: pkgname = pkg localdata.setVar('PKG', pkgname) localdata.setVar('OVERRIDES', d.getVar("OVERRIDES", False) + ":" + pkg) - bb.data.update_data(localdata) - conffiles = get_conffiles(pkg, d) - dirfiles = localdata.getVar('DIRFILES', True) + dirfiles = localdata.getVar('DIRFILES') if dirfiles is not None: dirfiles = dirfiles.split() - splitname = strip_multilib(pkgname, d) + splitname = pkgname - splitsummary = (localdata.getVar('SUMMARY', True) or localdata.getVar('DESCRIPTION', True) or ".") - splitversion = (localdata.getVar('PKGV', True) or "").replace('-', '+') - splitrelease = (localdata.getVar('PKGR', True) or "") - splitepoch = (localdata.getVar('PKGE', True) or "") - splitlicense = (localdata.getVar('LICENSE', True) or "") - splitsection = (localdata.getVar('SECTION', True) or "") - splitdescription = (localdata.getVar('DESCRIPTION', True) or ".") + splitsummary = (localdata.getVar('SUMMARY') or localdata.getVar('DESCRIPTION') or ".") + splitversion = (localdata.getVar('PKGV') or "").replace('-', '+') + splitrelease = (localdata.getVar('PKGR') or "") + splitepoch = (localdata.getVar('PKGE') or "") + splitlicense = (localdata.getVar('LICENSE') or "") + splitsection = (localdata.getVar('SECTION') or "") + splitdescription = (localdata.getVar('DESCRIPTION') or ".") splitcustomtagschunk = get_package_additional_metadata("rpm", localdata) translate_vers('RDEPENDS', localdata) @@ -363,18 +359,18 @@ python write_specfile () { # Map the dependencies into their final form mapping_rename_hook(localdata) - splitrdepends = strip_multilib_deps(localdata.getVar('RDEPENDS', True), d) - splitrrecommends = strip_multilib_deps(localdata.getVar('RRECOMMENDS', True), d) - splitrsuggests = strip_multilib_deps(localdata.getVar('RSUGGESTS', True), d) - splitrprovides = strip_multilib_deps(localdata.getVar('RPROVIDES', True), d) - splitrreplaces = strip_multilib_deps(localdata.getVar('RREPLACES', True), d) - splitrconflicts = strip_multilib_deps(localdata.getVar('RCONFLICTS', True), d) + splitrdepends = localdata.getVar('RDEPENDS') + splitrrecommends = localdata.getVar('RRECOMMENDS') + splitrsuggests = localdata.getVar('RSUGGESTS') + splitrprovides = localdata.getVar('RPROVIDES') + splitrreplaces = localdata.getVar('RREPLACES') + splitrconflicts = localdata.getVar('RCONFLICTS') splitrobsoletes = [] - splitrpreinst = localdata.getVar('pkg_preinst', True) - splitrpostinst = localdata.getVar('pkg_postinst', True) - splitrprerm = localdata.getVar('pkg_prerm', True) - splitrpostrm = localdata.getVar('pkg_postrm', True) + splitrpreinst = localdata.getVar('pkg_preinst') + splitrpostinst = localdata.getVar('pkg_postinst') + splitrprerm = localdata.getVar('pkg_prerm') + splitrpostrm = localdata.getVar('pkg_postrm') if not perfiledeps: @@ -382,6 +378,8 @@ python write_specfile () { splitrdepends = splitrdepends + " " + get_perfile('RDEPENDS', pkg, d) splitrprovides = splitrprovides + " " + get_perfile('RPROVIDES', pkg, d) + splitrdepends = filter_nativesdk_deps(srcname, splitrdepends) + # Gather special src/first package data if srcname == splitname: srcrdepends = splitrdepends @@ -452,25 +450,10 @@ python write_specfile () { if splitrpostrm: print_deps(splitrdepends, "Requires(postun)", spec_preamble_bottom, d) - # Suggests in RPM are like recommends in OE-core! - print_deps(splitrrecommends, "Suggests", spec_preamble_bottom, d) - # While there is no analog for suggests... (So call them recommends for now) - print_deps(splitrsuggests, "Recommends", spec_preamble_bottom, d) + print_deps(splitrrecommends, "Recommends", spec_preamble_bottom, d) + print_deps(splitrsuggests, "Suggests", spec_preamble_bottom, d) print_deps(splitrprovides, "Provides", spec_preamble_bottom, d) print_deps(splitrobsoletes, "Obsoletes", spec_preamble_bottom, d) - - # conflicts can not be in a provide! We will need to filter it. - if splitrconflicts: - depends_dict = bb.utils.explode_dep_versions2(splitrconflicts) - newdeps_dict = {} - for dep in depends_dict: - if dep not in splitrprovides: - newdeps_dict[dep] = depends_dict[dep] - if newdeps_dict: - splitrconflicts = bb.utils.join_deps(newdeps_dict) - else: - splitrconflicts = "" - print_deps(splitrconflicts, "Conflicts", spec_preamble_bottom, d) spec_preamble_bottom.append('') @@ -562,25 +545,10 @@ python write_specfile () { if srcrpostrm: print_deps(srcrdepends, "Requires(postun)", spec_preamble_top, d) - # Suggests in RPM are like recommends in OE-core! - print_deps(srcrrecommends, "Suggests", spec_preamble_top, d) - # While there is no analog for suggests... (So call them recommends for now) - print_deps(srcrsuggests, "Recommends", spec_preamble_top, d) - print_deps(srcrprovides, "Provides", spec_preamble_top, d) + print_deps(srcrrecommends, "Recommends", spec_preamble_top, d) + print_deps(srcrsuggests, "Suggests", spec_preamble_top, d) + print_deps(srcrprovides + (" /bin/sh" if srcname.startswith("nativesdk-") else ""), "Provides", spec_preamble_top, d) print_deps(srcrobsoletes, "Obsoletes", spec_preamble_top, d) - - # conflicts can not be in a provide! We will need to filter it. - if srcrconflicts: - depends_dict = bb.utils.explode_dep_versions2(srcrconflicts) - newdeps_dict = {} - for dep in depends_dict: - if dep not in srcrprovides: - newdeps_dict[dep] = depends_dict[dep] - if newdeps_dict: - srcrconflicts = bb.utils.join_deps(newdeps_dict) - else: - srcrconflicts = "" - print_deps(srcrconflicts, "Conflicts", spec_preamble_top, d) spec_preamble_top.append('') @@ -614,14 +582,11 @@ python write_specfile () { spec_scriptlets_top.append('') # Write the SPEC file - try: - specfile = open(outspecfile, 'w') - except OSError: - bb.fatal("unable to open spec file for writing") + specfile = open(outspecfile, 'w') # RPMSPEC_PREAMBLE is a way to add arbitrary text to the top # of the generated spec file - external_preamble = d.getVar("RPMSPEC_PREAMBLE", True) + external_preamble = d.getVar("RPMSPEC_PREAMBLE") if external_preamble: specfile.write(external_preamble + "\n") @@ -649,23 +614,15 @@ python write_specfile () { write_specfile[vardepsexclude] = "OVERRIDES" python do_package_rpm () { - # We need a simple way to remove the MLPREFIX from the package name, - # and dependency information... - def strip_multilib(name, d): - ml = d.getVar("MLPREFIX", True) - if ml and name and len(ml) != 0 and name.find(ml) >= 0: - return "".join(name.split(ml)) - return name - - workdir = d.getVar('WORKDIR', True) - tmpdir = d.getVar('TMPDIR', True) - pkgd = d.getVar('PKGD', True) - pkgdest = d.getVar('PKGDEST', True) + workdir = d.getVar('WORKDIR') + tmpdir = d.getVar('TMPDIR') + pkgd = d.getVar('PKGD') + pkgdest = d.getVar('PKGDEST') if not workdir or not pkgd or not tmpdir: bb.error("Variables incorrectly set, unable to package") return - packages = d.getVar('PACKAGES', True) + packages = d.getVar('PACKAGES') if not packages or packages == '': bb.debug(1, "No packages; nothing to do") return @@ -674,42 +631,43 @@ python do_package_rpm () { # If the spec file already exist, and has not been stored into # pseudo's files.db, it maybe cause rpmbuild src.rpm fail, # so remove it before doing rpmbuild src.rpm. - srcname = strip_multilib(d.getVar('PN', True), d) + srcname = d.getVar('PN') outspecfile = workdir + "/" + srcname + ".spec" if os.path.isfile(outspecfile): os.remove(outspecfile) d.setVar('OUTSPECFILE', outspecfile) bb.build.exec_func('write_specfile', d) - perfiledeps = (d.getVar("MERGEPERFILEDEPS", True) or "0") == "0" + perfiledeps = (d.getVar("MERGEPERFILEDEPS") or "0") == "0" if perfiledeps: outdepends, outprovides = write_rpm_perfiledata(srcname, d) # Setup the rpmbuild arguments... - rpmbuild = d.getVar('RPMBUILD', True) - targetsys = d.getVar('TARGET_SYS', True) - targetvendor = d.getVar('HOST_VENDOR', True) - package_arch = (d.getVar('PACKAGE_ARCH', True) or "").replace("-", "_") - sdkpkgsuffix = (d.getVar('SDKPKGSUFFIX', True) or "nativesdk").replace("-", "_") - if package_arch not in "all any noarch".split() and not package_arch.endswith(sdkpkgsuffix): - ml_prefix = (d.getVar('MLPREFIX', True) or "").replace("-", "_") - d.setVar('PACKAGE_ARCH_EXTEND', ml_prefix + package_arch) - else: - d.setVar('PACKAGE_ARCH_EXTEND', package_arch) + rpmbuild = d.getVar('RPMBUILD') + targetsys = d.getVar('TARGET_SYS') + targetvendor = d.getVar('HOST_VENDOR') + # Too many places in dnf stack assume that arch-independent packages are "noarch". + # Let's not fight against this. + package_arch = (d.getVar('PACKAGE_ARCH') or "").replace("-", "_").replace("all", "noarch") + sdkpkgsuffix = (d.getVar('SDKPKGSUFFIX') or "nativesdk").replace("-", "_") + d.setVar('PACKAGE_ARCH_EXTEND', package_arch) pkgwritedir = d.expand('${PKGWRITEDIRRPM}/${PACKAGE_ARCH_EXTEND}') d.setVar('RPM_PKGWRITEDIR', pkgwritedir) - bb.debug(1, 'PKGWRITEDIR: %s' % d.getVar('RPM_PKGWRITEDIR', True)) - pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${HOST_VENDOR}-${HOST_OS}') - magicfile = d.expand('${STAGING_DIR_NATIVE}${datadir_native}/misc/magic.mgc') + bb.debug(1, 'PKGWRITEDIR: %s' % d.getVar('RPM_PKGWRITEDIR')) + pkgarch = d.expand('${PACKAGE_ARCH_EXTEND}${HOST_VENDOR}-linux') bb.utils.mkdirhier(pkgwritedir) os.chmod(pkgwritedir, 0o755) cmd = rpmbuild - cmd = cmd + " --nodeps --short-circuit --target " + pkgarch + " --buildroot " + pkgd + cmd = cmd + " --noclean --nodeps --short-circuit --target " + pkgarch + " --buildroot " + pkgd cmd = cmd + " --define '_topdir " + workdir + "' --define '_rpmdir " + pkgwritedir + "'" - cmd = cmd + " --define '_builddir " + d.getVar('S', True) + "'" + cmd = cmd + " --define '_builddir " + d.getVar('S') + "'" cmd = cmd + " --define '_build_name_fmt %%{NAME}-%%{VERSION}-%%{RELEASE}.%%{ARCH}.rpm'" cmd = cmd + " --define '_use_internal_dependency_generator 0'" + cmd = cmd + " --define '_binaries_in_noarch_packages_terminate_build 0'" + cmd = cmd + " --define '_build_id_links none'" + cmd = cmd + " --define '_binary_payload w6T.xzdio'" + cmd = cmd + " --define '_source_payload w6T.xzdio'" if perfiledeps: cmd = cmd + " --define '__find_requires " + outdepends + "'" cmd = cmd + " --define '__find_provides " + outprovides + "'" @@ -718,11 +676,10 @@ python do_package_rpm () { cmd = cmd + " --define '__find_provides %{nil}'" cmd = cmd + " --define '_unpackaged_files_terminate_build 0'" cmd = cmd + " --define 'debug_package %{nil}'" - cmd = cmd + " --define '_rpmfc_magic_path " + magicfile + "'" cmd = cmd + " --define '_tmppath " + workdir + "'" - if d.getVarFlag('ARCHIVER_MODE', 'srpm', True) == '1' and bb.data.inherits_class('archiver', d): - cmd = cmd + " --define '_sourcedir " + d.getVar('ARCHIVER_OUTDIR', True) + "'" - cmdsrpm = cmd + " --define '_srcrpmdir " + d.getVar('ARCHIVER_OUTDIR', True) + "'" + if d.getVarFlag('ARCHIVER_MODE', 'srpm') == '1' and bb.data.inherits_class('archiver', d): + cmd = cmd + " --define '_sourcedir " + d.getVar('ARCHIVER_OUTDIR') + "'" + cmdsrpm = cmd + " --define '_srcrpmdir " + d.getVar('ARCHIVER_OUTDIR') + "'" cmdsrpm = cmdsrpm + " -bs " + outspecfile # Build the .src.rpm d.setVar('SBUILDSPEC', cmdsrpm + "\n") @@ -730,17 +687,20 @@ python do_package_rpm () { bb.build.exec_func('SBUILDSPEC', d) cmd = cmd + " -bb " + outspecfile + # rpm 4 creates various empty directories in _topdir, let's clean them up + cleanupcmd = "rm -rf %s/BUILDROOT %s/SOURCES %s/SPECS %s/SRPMS" % (workdir, workdir, workdir, workdir) + # Build the rpm package! - d.setVar('BUILDSPEC', cmd + "\n") + d.setVar('BUILDSPEC', cmd + "\n" + cleanupcmd + "\n") d.setVarFlag('BUILDSPEC', 'func', '1') bb.build.exec_func('BUILDSPEC', d) - if d.getVar('RPM_SIGN_PACKAGES', True) == '1': + if d.getVar('RPM_SIGN_PACKAGES') == '1': bb.build.exec_func("sign_rpm", d) } python () { - if d.getVar('PACKAGES', True) != '': + if d.getVar('PACKAGES') != '': deps = ' rpm-native:do_populate_sysroot virtual/fakeroot-native:do_populate_sysroot' d.appendVarFlag('do_package_write_rpm', 'depends', deps) d.setVarFlag('do_package_write_rpm', 'fakeroot', '1') @@ -766,9 +726,10 @@ python do_package_write_rpm () { do_package_write_rpm[dirs] = "${PKGWRITEDIRRPM}" do_package_write_rpm[cleandirs] = "${PKGWRITEDIRRPM}" do_package_write_rpm[umask] = "022" +do_package_write_rpm[depends] += "${@oe.utils.build_depends_string(d.getVar('PACKAGE_WRITE_DEPS'), 'do_populate_sysroot')}" addtask package_write_rpm after do_packagedata do_package PACKAGEINDEXDEPS += "rpm-native:do_populate_sysroot" -PACKAGEINDEXDEPS += "createrepo-native:do_populate_sysroot" +PACKAGEINDEXDEPS += "createrepo-c-native:do_populate_sysroot" do_build[recrdeptask] += "do_package_write_rpm" diff --git a/import-layers/yocto-poky/meta/classes/package_tar.bbclass b/import-layers/yocto-poky/meta/classes/package_tar.bbclass index e217814af..ce3ab4c8e 100644 --- a/import-layers/yocto-poky/meta/classes/package_tar.bbclass +++ b/import-layers/yocto-poky/meta/classes/package_tar.bbclass @@ -7,27 +7,27 @@ python do_package_tar () { oldcwd = os.getcwd() - workdir = d.getVar('WORKDIR', True) + workdir = d.getVar('WORKDIR') if not workdir: bb.error("WORKDIR not defined, unable to package") return - outdir = d.getVar('DEPLOY_DIR_TAR', True) + outdir = d.getVar('DEPLOY_DIR_TAR') if not outdir: bb.error("DEPLOY_DIR_TAR not defined, unable to package") return - dvar = d.getVar('D', True) + dvar = d.getVar('D') if not dvar: bb.error("D not defined, unable to package") return - packages = d.getVar('PACKAGES', True) + packages = d.getVar('PACKAGES') if not packages: bb.debug(1, "PACKAGES not defined, nothing to package") return - pkgdest = d.getVar('PKGDEST', True) + pkgdest = d.getVar('PKGDEST') bb.utils.mkdirhier(outdir) bb.utils.mkdirhier(dvar) @@ -38,7 +38,6 @@ python do_package_tar () { overrides = localdata.getVar('OVERRIDES', False) localdata.setVar('OVERRIDES', '%s:%s' % (overrides, pkg)) - bb.data.update_data(localdata) bb.utils.mkdirhier(root) basedir = os.path.dirname(root) @@ -46,7 +45,7 @@ python do_package_tar () { os.chdir(root) dlist = os.listdir(root) if not dlist: - bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV', True), localdata.getVar('PKGR', True))) + bb.note("Not creating empty archive for %s-%s-%s" % (pkg, localdata.getVar('PKGV'), localdata.getVar('PKGR'))) continue args = "tar -cz --exclude=CONTROL --exclude=DEBIAN -f".split() ret = subprocess.call(args + [tarfn] + dlist) @@ -57,8 +56,8 @@ python do_package_tar () { } python () { - if d.getVar('PACKAGES', True) != '': - deps = (d.getVarFlag('do_package_write_tar', 'depends', True) or "").split() + if d.getVar('PACKAGES') != '': + deps = (d.getVarFlag('do_package_write_tar', 'depends') or "").split() deps.append('tar-native:do_populate_sysroot') deps.append('virtual/fakeroot-native:do_populate_sysroot') d.setVarFlag('do_package_write_tar', 'depends', " ".join(deps)) diff --git a/import-layers/yocto-poky/meta/classes/packagedata.bbclass b/import-layers/yocto-poky/meta/classes/packagedata.bbclass index 3397f1e36..a903e5cfd 100644 --- a/import-layers/yocto-poky/meta/classes/packagedata.bbclass +++ b/import-layers/yocto-poky/meta/classes/packagedata.bbclass @@ -2,10 +2,10 @@ python read_subpackage_metadata () { import oe.packagedata vars = { - "PN" : d.getVar('PN', True), - "PE" : d.getVar('PE', True), - "PV" : d.getVar('PV', True), - "PR" : d.getVar('PR', True), + "PN" : d.getVar('PN'), + "PE" : d.getVar('PE'), + "PV" : d.getVar('PV'), + "PR" : d.getVar('PR'), } data = oe.packagedata.read_pkgdata(vars["PN"], d) @@ -13,7 +13,7 @@ python read_subpackage_metadata () { for key in data.keys(): d.setVar(key, data[key]) - for pkg in d.getVar('PACKAGES', True).split(): + for pkg in d.getVar('PACKAGES').split(): sdata = oe.packagedata.read_subpkgdata(pkg, d) for key in sdata.keys(): if key in vars: diff --git a/import-layers/yocto-poky/meta/classes/packagefeed-stability.bbclass b/import-layers/yocto-poky/meta/classes/packagefeed-stability.bbclass index aa01def74..c0e9be549 100644 --- a/import-layers/yocto-poky/meta/classes/packagefeed-stability.bbclass +++ b/import-layers/yocto-poky/meta/classes/packagefeed-stability.bbclass @@ -31,7 +31,7 @@ python() { # This assumes that the package_write task is called package_write_ # and that the directory in which packages should be written is # pointed to by the variable DEPLOY_DIR_ - for pkgclass in (d.getVar('PACKAGE_CLASSES', True) or '').split(): + for pkgclass in (d.getVar('PACKAGE_CLASSES') or '').split(): if pkgclass.startswith('package_'): pkgtype = pkgclass.split('_', 1)[1] pkgwritefunc = 'do_package_write_%s' % pkgtype @@ -51,7 +51,7 @@ python() { d.appendVarFlag('do_build', 'recrdeptask', ' ' + pkgcomparefunc) - if d.getVarFlag(pkgwritefunc, 'noexec', True) or not d.getVarFlag(pkgwritefunc, 'task', True): + if d.getVarFlag(pkgwritefunc, 'noexec') or not d.getVarFlag(pkgwritefunc, 'task'): # Packaging is disabled for this recipe, we shouldn't do anything continue @@ -71,7 +71,7 @@ python() { # This isn't the real task function - it's a template that we use in the # anonymous python code above fakeroot python do_package_compare () { - currenttask = d.getVar('BB_CURRENTTASK', True) + currenttask = d.getVar('BB_CURRENTTASK') pkgtype = currenttask.rsplit('_', 1)[1] package_compare_impl(pkgtype, d) } @@ -83,12 +83,12 @@ def package_compare_impl(pkgtype, d): import subprocess import oe.sstatesig - pn = d.getVar('PN', True) - deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True) + pn = d.getVar('PN') + deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper()) prepath = deploydir + '-prediff/' # Find out PKGR values are - pkgdatadir = d.getVar('PKGDATA_DIR', True) + pkgdatadir = d.getVar('PKGDATA_DIR') packages = [] try: with open(os.path.join(pkgdatadir, pn), 'r') as f: @@ -138,7 +138,7 @@ def package_compare_impl(pkgtype, d): files = [] docopy = False manifest, _ = oe.sstatesig.sstate_get_manifest_filename(pkgwritetask, d) - mlprefix = d.getVar('MLPREFIX', True) + mlprefix = d.getVar('MLPREFIX') # Copy recipe's all packages if one of the packages are different to make # they have the same PR. with open(manifest, 'r') as f: @@ -215,7 +215,7 @@ def package_compare_impl(pkgtype, d): # multilib), they're identical in theory, but sstate.bbclass # copies it again, so keep align with that. if os.path.exists(destpath) and pkgtype == 'rpm' \ - and d.getVar('PACKAGE_ARCH', True) == 'all': + and d.getVar('PACKAGE_ARCH') == 'all': os.unlink(destpath) if (os.stat(srcpath).st_dev == os.stat(destdir).st_dev): # Use a hard link to save space @@ -229,10 +229,10 @@ def package_compare_impl(pkgtype, d): do_cleansstate[postfuncs] += "pfs_cleanpkgs" python pfs_cleanpkgs () { import errno - for pkgclass in (d.getVar('PACKAGE_CLASSES', True) or '').split(): + for pkgclass in (d.getVar('PACKAGE_CLASSES') or '').split(): if pkgclass.startswith('package_'): pkgtype = pkgclass.split('_', 1)[1] - deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper(), True) + deploydir = d.getVar('DEPLOY_DIR_%s' % pkgtype.upper()) prepath = deploydir + '-prediff' pcmanifest = os.path.join(prepath, d.expand('pkg-compare-manifest-${MULTIMACH_TARGET_SYS}-${PN}')) try: diff --git a/import-layers/yocto-poky/meta/classes/packagegroup.bbclass b/import-layers/yocto-poky/meta/classes/packagegroup.bbclass index 3928c8a4a..eea2e5b9f 100644 --- a/import-layers/yocto-poky/meta/classes/packagegroup.bbclass +++ b/import-layers/yocto-poky/meta/classes/packagegroup.bbclass @@ -16,15 +16,15 @@ PACKAGE_ARCH_EXPANDED := "${PACKAGE_ARCH}" LICENSE ?= "MIT" -inherit ${@oe.utils.ifelse(d.getVar('PACKAGE_ARCH_EXPANDED', True) == 'all', 'allarch', '')} +inherit ${@oe.utils.ifelse(d.getVar('PACKAGE_ARCH_EXPANDED') == 'all', 'allarch', '')} # This automatically adds -dbg and -dev flavours of all PACKAGES # to the list. Their dependencies (RRECOMMENDS) are handled as usual # by package_depchains in a following step. # Also mark all packages as ALLOW_EMPTY python () { - packages = d.getVar('PACKAGES', True).split() - if d.getVar('PACKAGEGROUP_DISABLE_COMPLEMENTARY', True) != '1': + packages = d.getVar('PACKAGES').split() + if d.getVar('PACKAGEGROUP_DISABLE_COMPLEMENTARY') != '1': types = ['', '-dbg', '-dev'] if bb.utils.contains('DISTRO_FEATURES', 'ptest', True, False, d): types.append('-ptest') @@ -40,16 +40,18 @@ python () { DEPCHAIN_DBGDEFAULTDEPS = "1" # We only need the packaging tasks - disable the rest -do_fetch[noexec] = "1" -do_unpack[noexec] = "1" -do_patch[noexec] = "1" -do_configure[noexec] = "1" -do_compile[noexec] = "1" -do_install[noexec] = "1" -do_populate_sysroot[noexec] = "1" +deltask do_fetch +deltask do_unpack +deltask do_patch +deltask do_configure +deltask do_compile +deltask do_install +deltask do_populate_sysroot python () { - initman = d.getVar("VIRTUAL-RUNTIME_init_manager", True) + if bb.data.inherits_class('nativesdk', d): + return + initman = d.getVar("VIRTUAL-RUNTIME_init_manager") if initman and initman in ['sysvinit', 'systemd'] and not bb.utils.contains('DISTRO_FEATURES', initman, True, False, d): bb.fatal("Please ensure that your setting of VIRTUAL-RUNTIME_init_manager (%s) matches the entries enabled in DISTRO_FEATURES" % initman) } diff --git a/import-layers/yocto-poky/meta/classes/patch.bbclass b/import-layers/yocto-poky/meta/classes/patch.bbclass index 1f6927be0..8f35cb4f9 100644 --- a/import-layers/yocto-poky/meta/classes/patch.bbclass +++ b/import-layers/yocto-poky/meta/classes/patch.bbclass @@ -10,110 +10,65 @@ PATCH_GIT_USER_EMAIL ?= "oe.patch@oe" inherit terminal -def src_patches(d, all = False ): - workdir = d.getVar('WORKDIR', True) - fetch = bb.fetch2.Fetch([], d) - patches = [] - sources = [] - for url in fetch.urls: - local = patch_path(url, fetch, workdir) - if not local: - if all: - local = fetch.localpath(url) - sources.append(local) - continue - - urldata = fetch.ud[url] - parm = urldata.parm - patchname = parm.get('pname') or os.path.basename(local) - - apply, reason = should_apply(parm, d) - if not apply: - if reason: - bb.note("Patch %s %s" % (patchname, reason)) - continue - - patchparm = {'patchname': patchname} - if "striplevel" in parm: - striplevel = parm["striplevel"] - elif "pnum" in parm: - #bb.msg.warn(None, "Deprecated usage of 'pnum' url parameter in '%s', please use 'striplevel'" % url) - striplevel = parm["pnum"] - else: - striplevel = '1' - patchparm['striplevel'] = striplevel - - patchdir = parm.get('patchdir') - if patchdir: - patchparm['patchdir'] = patchdir - - localurl = bb.fetch.encodeurl(('file', '', local, '', '', patchparm)) - patches.append(localurl) - - if all: - return sources - - return patches +python () { + if d.getVar('PATCHTOOL') == 'git' and d.getVar('PATCH_COMMIT_FUNCTIONS') == '1': + extratasks = bb.build.tasksbetween('do_unpack', 'do_patch', d) + try: + extratasks.remove('do_unpack') + except ValueError: + # For some recipes do_unpack doesn't exist, ignore it + pass + + d.appendVarFlag('do_patch', 'prefuncs', ' patch_task_patch_prefunc') + for task in extratasks: + d.appendVarFlag(task, 'postfuncs', ' patch_task_postfunc') +} -def patch_path(url, fetch, workdir): - """Return the local path of a patch, or None if this isn't a patch""" +python patch_task_patch_prefunc() { + # Prefunc for do_patch + func = d.getVar('BB_RUNTASK') + srcsubdir = d.getVar('S') - local = fetch.localpath(url) - base, ext = os.path.splitext(os.path.basename(local)) - if ext in ('.gz', '.bz2', '.Z'): - local = os.path.join(workdir, base) - ext = os.path.splitext(base)[1] + patchdir = os.path.join(srcsubdir, 'patches') + if os.path.exists(patchdir): + if os.listdir(patchdir): + d.setVar('PATCH_HAS_PATCHES_DIR', '1') + else: + os.rmdir(patchdir) +} - urldata = fetch.ud[url] - if "apply" in urldata.parm: - apply = oe.types.boolean(urldata.parm["apply"]) - if not apply: - return - elif ext not in (".diff", ".patch"): - return +python patch_task_postfunc() { + # Prefunc for task functions between do_unpack and do_patch + import oe.patch + import shutil + func = d.getVar('BB_RUNTASK') + srcsubdir = d.getVar('S') + + if os.path.exists(srcsubdir): + if func == 'do_patch': + haspatches = (d.getVar('PATCH_HAS_PATCHES_DIR') == '1') + patchdir = os.path.join(srcsubdir, 'patches') + if os.path.exists(patchdir): + shutil.rmtree(patchdir) + if haspatches: + stdout, _ = bb.process.run('git status --porcelain patches', cwd=srcsubdir) + if stdout: + bb.process.run('git checkout patches', cwd=srcsubdir) + stdout, _ = bb.process.run('git status --porcelain .', cwd=srcsubdir) + if stdout: + useroptions = [] + oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=d) + bb.process.run('git add .; git %s commit -a -m "Committing changes from %s\n\n%s"' % (' '.join(useroptions), func, oe.patch.GitApplyTree.ignore_commit_prefix + ' - from %s' % func), cwd=srcsubdir) +} - return local +def src_patches(d, all=False, expand=True): + import oe.patch + return oe.patch.src_patches(d, all, expand) def should_apply(parm, d): """Determine if we should apply the given patch""" - - if "mindate" in parm or "maxdate" in parm: - pn = d.getVar('PN', True) - srcdate = d.getVar('SRCDATE_%s' % pn, True) - if not srcdate: - srcdate = d.getVar('SRCDATE', True) - - if srcdate == "now": - srcdate = d.getVar('DATE', True) - - if "maxdate" in parm and parm["maxdate"] < srcdate: - return False, 'is outdated' - - if "mindate" in parm and parm["mindate"] > srcdate: - return False, 'is predated' - - - if "minrev" in parm: - srcrev = d.getVar('SRCREV', True) - if srcrev and srcrev < parm["minrev"]: - return False, 'applies to later revisions' - - if "maxrev" in parm: - srcrev = d.getVar('SRCREV', True) - if srcrev and srcrev > parm["maxrev"]: - return False, 'applies to earlier revisions' - - if "rev" in parm: - srcrev = d.getVar('SRCREV', True) - if srcrev and parm["rev"] not in srcrev: - return False, "doesn't apply to revision" - - if "notrev" in parm: - srcrev = d.getVar('SRCREV', True) - if srcrev and parm["notrev"] in srcrev: - return False, "doesn't apply to revision" - - return True, None + import oe.patch + return oe.patch.should_apply(parm, d) should_apply[vardepsexclude] = "DATE SRCDATE" @@ -126,20 +81,20 @@ python patch_do_patch() { "git": oe.patch.GitApplyTree, } - cls = patchsetmap[d.getVar('PATCHTOOL', True) or 'quilt'] + cls = patchsetmap[d.getVar('PATCHTOOL') or 'quilt'] resolvermap = { "noop": oe.patch.NOOPResolver, "user": oe.patch.UserResolver, } - rcls = resolvermap[d.getVar('PATCHRESOLVE', True) or 'user'] + rcls = resolvermap[d.getVar('PATCHRESOLVE') or 'user'] classes = {} - s = d.getVar('S', True) + s = d.getVar('S') - os.putenv('PATH', d.getVar('PATH', True)) + os.putenv('PATH', d.getVar('PATH')) # We must use one TMPDIR per process so that the "patch" processes # don't generate the same temp file name. diff --git a/import-layers/yocto-poky/meta/classes/perl-version.bbclass b/import-layers/yocto-poky/meta/classes/perl-version.bbclass new file mode 100644 index 000000000..fafe68a77 --- /dev/null +++ b/import-layers/yocto-poky/meta/classes/perl-version.bbclass @@ -0,0 +1,24 @@ +PERL_OWN_DIR = "${@["", "/perl-native"][(bb.data.inherits_class('native', d))]}" + +# Determine the staged version of perl from the perl configuration file +# Assign vardepvalue, because otherwise signature is changed before and after +# perl is built (from None to real version in config.sh). +get_perl_version[vardepvalue] = "${PERL_OWN_DIR}" +def get_perl_version(d): + import re + cfg = d.expand('${STAGING_LIBDIR}${PERL_OWN_DIR}/perl/config.sh') + try: + f = open(cfg, 'r') + except IOError: + return None + l = f.readlines(); + f.close(); + r = re.compile("^version='(\d*\.\d*\.\d*)'") + for s in l: + m = r.match(s) + if m: + return m.group(1) + return None + +PERLVERSION := "${@get_perl_version(d)}" +PERLVERSION[vardepvalue] = "" diff --git a/import-layers/yocto-poky/meta/classes/pixbufcache.bbclass b/import-layers/yocto-poky/meta/classes/pixbufcache.bbclass index 3f48a0f34..b3e507f61 100644 --- a/import-layers/yocto-poky/meta/classes/pixbufcache.bbclass +++ b/import-layers/yocto-poky/meta/classes/pixbufcache.bbclass @@ -8,6 +8,8 @@ inherit qemu PIXBUF_PACKAGES ??= "${PN}" +PACKAGE_WRITE_DEPS += "qemu-native gdk-pixbuf-native" + pixbufcache_common() { if [ "x$D" != "x" ]; then $INTERCEPT_DIR/postinst_intercept update_pixbuf_cache ${PKG} mlprefix=${MLPREFIX} libdir=${libdir} \ @@ -28,42 +30,35 @@ fi } python populate_packages_append() { - pixbuf_pkgs = d.getVar('PIXBUF_PACKAGES', True).split() + pixbuf_pkgs = d.getVar('PIXBUF_PACKAGES').split() for pkg in pixbuf_pkgs: bb.note("adding pixbuf postinst and postrm scripts to %s" % pkg) - postinst = d.getVar('pkg_postinst_%s' % pkg, True) or d.getVar('pkg_postinst', True) + postinst = d.getVar('pkg_postinst_%s' % pkg) or d.getVar('pkg_postinst') if not postinst: postinst = '#!/bin/sh\n' - postinst += d.getVar('pixbufcache_common', True) + postinst += d.getVar('pixbufcache_common') d.setVar('pkg_postinst_%s' % pkg, postinst) - postrm = d.getVar('pkg_postrm_%s' % pkg, True) or d.getVar('pkg_postrm', True) + postrm = d.getVar('pkg_postrm_%s' % pkg) or d.getVar('pkg_postrm') if not postrm: postrm = '#!/bin/sh\n' - postrm += d.getVar('pixbufcache_common', True) + postrm += d.getVar('pixbufcache_common') d.setVar('pkg_postrm_%s' % pkg, postrm) } gdkpixbuf_complete() { - GDK_PIXBUF_FATAL_LOADER=1 ${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --update-cache || exit 1 +GDK_PIXBUF_FATAL_LOADER=1 ${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders --update-cache || exit 1 } -# -# Add an sstate postinst hook to update the cache for native packages. -# An error exit during populate_sysroot_setscene allows bitbake to -# try to recover by re-building the package. -# DEPENDS_append_class-native = " gdk-pixbuf-native" -SSTATEPOSTINSTFUNCS_append_class-native = " pixbufcache_sstate_postinst" +SYSROOT_PREPROCESS_FUNCS_append_class-native = " pixbufcache_sstate_postinst" # See base.bbclass for the other half of this pixbufcache_sstate_postinst() { - if [ "${BB_CURRENTTASK}" = "populate_sysroot" ]; then - ${gdkpixbuf_complete} - elif [ "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ]; then - if [ -x ${STAGING_LIBDIR_NATIVE}/gdk-pixbuf-2.0/gdk-pixbuf-query-loaders ]; then - echo "${gdkpixbuf_complete}" >> ${STAGING_DIR}/sstatecompletions - fi - fi + mkdir -p ${SYSROOT_DESTDIR}${bindir} + dest=${SYSROOT_DESTDIR}${bindir}/postinst-${PN} + echo '#!/bin/sh' > $dest + echo "${gdkpixbuf_complete}" >> $dest + chmod 0755 $dest } diff --git a/import-layers/yocto-poky/meta/classes/populate_sdk_base.bbclass b/import-layers/yocto-poky/meta/classes/populate_sdk_base.bbclass index 69aae2644..563582e0a 100644 --- a/import-layers/yocto-poky/meta/classes/populate_sdk_base.bbclass +++ b/import-layers/yocto-poky/meta/classes/populate_sdk_base.bbclass @@ -11,13 +11,13 @@ COMPLEMENTARY_GLOB[ptest-pkgs] = '*-ptest' def complementary_globs(featurevar, d): all_globs = d.getVarFlags('COMPLEMENTARY_GLOB') globs = [] - features = set((d.getVar(featurevar, True) or '').split()) + features = set((d.getVar(featurevar) or '').split()) for name, glob in all_globs.items(): if name in features: globs.append(glob) return ' '.join(globs) -SDKIMAGE_FEATURES ??= "dev-pkgs dbg-pkgs" +SDKIMAGE_FEATURES ??= "dev-pkgs dbg-pkgs ${@bb.utils.contains('DISTRO_FEATURES', 'api-documentation', 'doc-pkgs', '', d)}" SDKIMAGE_INSTALL_COMPLEMENTARY = '${@complementary_globs("SDKIMAGE_FEATURES", d)}' inherit rootfs_${IMAGE_PKGTYPE} @@ -34,10 +34,7 @@ SDKTARGETSYSROOT = "${SDKPATH}/sysroots/${REAL_MULTIMACH_TARGET_SYS}" TOOLCHAIN_HOST_TASK ?= "nativesdk-packagegroup-sdk-host packagegroup-cross-canadian-${MACHINE}" TOOLCHAIN_HOST_TASK_ATTEMPTONLY ?= "" -TOOLCHAIN_TARGET_TASK ?= " \ - ${@multilib_pkg_extend(d, 'packagegroup-core-standalone-sdk-target')} \ - ${@multilib_pkg_extend(d, 'packagegroup-core-standalone-sdk-target-dbg')} \ - " +TOOLCHAIN_TARGET_TASK ?= "${@multilib_pkg_extend(d, 'packagegroup-core-standalone-sdk-target')}" TOOLCHAIN_TARGET_TASK_ATTEMPTONLY ?= "" TOOLCHAIN_OUTPUTNAME ?= "${SDK_NAME}-toolchain-${SDK_VERSION}" @@ -57,34 +54,41 @@ SDK_PRE_INSTALL_COMMAND ?= "" SDK_POST_INSTALL_COMMAND ?= "" SDK_RELOCATE_AFTER_INSTALL ?= "1" -SDKEXTPATH ?= "~/${@d.getVar('DISTRO', True)}_sdk" -SDK_TITLE ?= "${@d.getVar('DISTRO_NAME', True) or d.getVar('DISTRO', True)} SDK" +SDKEXTPATH ?= "~/${@d.getVar('DISTRO')}_sdk" +SDK_TITLE ?= "${@d.getVar('DISTRO_NAME') or d.getVar('DISTRO')} SDK" SDK_TARGET_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.target.manifest" SDK_HOST_MANIFEST = "${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.host.manifest" python write_target_sdk_manifest () { from oe.sdk import sdk_list_installed_packages from oe.utils import format_pkg_list - sdkmanifestdir = os.path.dirname(d.getVar("SDK_TARGET_MANIFEST", True)) + sdkmanifestdir = os.path.dirname(d.getVar("SDK_TARGET_MANIFEST")) pkgs = sdk_list_installed_packages(d, True) if not os.path.exists(sdkmanifestdir): bb.utils.mkdirhier(sdkmanifestdir) - with open(d.getVar('SDK_TARGET_MANIFEST', True), 'w') as output: + with open(d.getVar('SDK_TARGET_MANIFEST'), 'w') as output: output.write(format_pkg_list(pkgs, 'ver')) } +python write_sdk_test_data() { + from oe.data import export2json + testdata = "%s/%s.testdata.json" % (d.getVar('SDKDEPLOYDIR'), d.getVar('TOOLCHAIN_OUTPUTNAME')) + bb.utils.mkdirhier(os.path.dirname(testdata)) + export2json(d, testdata) +} + python write_host_sdk_manifest () { from oe.sdk import sdk_list_installed_packages from oe.utils import format_pkg_list - sdkmanifestdir = os.path.dirname(d.getVar("SDK_HOST_MANIFEST", True)) + sdkmanifestdir = os.path.dirname(d.getVar("SDK_HOST_MANIFEST")) pkgs = sdk_list_installed_packages(d, False) if not os.path.exists(sdkmanifestdir): bb.utils.mkdirhier(sdkmanifestdir) - with open(d.getVar('SDK_HOST_MANIFEST', True), 'w') as output: + with open(d.getVar('SDK_HOST_MANIFEST'), 'w') as output: output.write(format_pkg_list(pkgs, 'ver')) } -POPULATE_SDK_POST_TARGET_COMMAND_append = " write_target_sdk_manifest ; " +POPULATE_SDK_POST_TARGET_COMMAND_append = " write_target_sdk_manifest ; write_sdk_test_data ; " POPULATE_SDK_POST_HOST_COMMAND_append = " write_host_sdk_manifest; " SDK_PACKAGING_COMMAND = "${@'${SDK_PACKAGING_FUNC};' if '${SDK_PACKAGING_FUNC}' else ''}" SDK_POSTPROCESS_COMMAND = " create_sdk_files; check_sdk_sysroots; tar_sdk; ${SDK_PACKAGING_COMMAND} " @@ -93,7 +97,7 @@ def populate_sdk_common(d): from oe.sdk import populate_sdk from oe.manifest import create_manifest, Manifest - pn = d.getVar('PN', True) + pn = d.getVar('PN') runtime_mapping_rename("TOOLCHAIN_TARGET_TASK", pn, d) runtime_mapping_rename("TOOLCHAIN_TARGET_TASK_ATTEMPTONLY", pn, d) @@ -101,13 +105,13 @@ def populate_sdk_common(d): ld.setVar("PKGDATA_DIR", "${STAGING_DIR}/${SDK_ARCH}-${SDKPKGSUFFIX}${SDK_VENDOR}-${SDK_OS}/pkgdata") runtime_mapping_rename("TOOLCHAIN_HOST_TASK", pn, ld) runtime_mapping_rename("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", pn, ld) - d.setVar("TOOLCHAIN_HOST_TASK", ld.getVar("TOOLCHAIN_HOST_TASK", True)) - d.setVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", ld.getVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", True)) + d.setVar("TOOLCHAIN_HOST_TASK", ld.getVar("TOOLCHAIN_HOST_TASK")) + d.setVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY", ld.getVar("TOOLCHAIN_HOST_TASK_ATTEMPTONLY")) # create target/host SDK manifests - create_manifest(d, manifest_dir=d.getVar('SDK_DIR', True), + create_manifest(d, manifest_dir=d.getVar('SDK_DIR'), manifest_type=Manifest.MANIFEST_TYPE_SDK_HOST) - create_manifest(d, manifest_dir=d.getVar('SDK_DIR', True), + create_manifest(d, manifest_dir=d.getVar('SDK_DIR'), manifest_type=Manifest.MANIFEST_TYPE_SDK_TARGET) populate_sdk(d) @@ -134,7 +138,7 @@ fakeroot create_sdk_files() { python check_sdk_sysroots() { # Fails build if there are broken or dangling symlinks in SDK sysroots - if d.getVar('CHECK_SDK_SYSROOTS', True) != '1': + if d.getVar('CHECK_SDK_SYSROOTS') != '1': # disabled, bail out return @@ -142,8 +146,8 @@ python check_sdk_sysroots() { return os.path.abspath(path) # Get scan root - SCAN_ROOT = norm_path("%s/%s/sysroots/" % (d.getVar('SDK_OUTPUT', True), - d.getVar('SDKPATH', True))) + SCAN_ROOT = norm_path("%s/%s/sysroots/" % (d.getVar('SDK_OUTPUT'), + d.getVar('SDKPATH'))) bb.note('Checking SDK sysroots at ' + SCAN_ROOT) @@ -218,10 +222,11 @@ EOF -e 's#@SDKEXTPATH@#${SDKEXTPATH}#g' \ -e 's#@OLDEST_KERNEL@#${SDK_OLDEST_KERNEL}#g' \ -e 's#@REAL_MULTIMACH_TARGET_SYS@#${REAL_MULTIMACH_TARGET_SYS}#g' \ - -e 's#@SDK_TITLE@#${@d.getVar("SDK_TITLE", True).replace('&', '\&')}#g' \ + -e 's#@SDK_TITLE@#${@d.getVar("SDK_TITLE").replace('&', '\&')}#g' \ -e 's#@SDK_VERSION@#${SDK_VERSION}#g' \ -e '/@SDK_PRE_INSTALL_COMMAND@/d' \ -e '/@SDK_POST_INSTALL_COMMAND@/d' \ + -e 's#@SDK_GCC_VER@#${@oe.utils.host_gcc_version(d)}#g' \ ${SDKDEPLOYDIR}/${TOOLCHAIN_OUTPUTNAME}.sh # add execution permission @@ -241,8 +246,7 @@ populate_sdk_log_check() { echo "log_check: Using $lf_path as logfile" - if test -e "$lf_path" - then + if [ -e "$lf_path" ]; then ${IMAGE_PKGTYPE}_log_check $target $lf_path else echo "Cannot find logfile [$lf_path]" @@ -268,7 +272,7 @@ do_populate_sdk[file-checksums] += "${COREBASE}/meta/files/toolchain-shar-reloca ${COREBASE}/meta/files/toolchain-shar-extract.sh:True" do_populate_sdk[dirs] = "${PKGDATA_DIR} ${TOPDIR}" -do_populate_sdk[depends] += "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_DEPENDS', True).split()])} ${@d.getVarFlag('do_rootfs', 'depends', False)}" -do_populate_sdk[rdepends] = "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_RDEPENDS', True).split()])}" +do_populate_sdk[depends] += "${@' '.join([x + ':do_populate_sysroot' for x in d.getVar('SDK_DEPENDS').split()])} ${@d.getVarFlag('do_rootfs', 'depends', False)}" +do_populate_sdk[rdepends] = "${@' '.join([x + ':do_package_write_${IMAGE_PKGTYPE} ' + x + ':do_packagedata' for x in d.getVar('SDK_RDEPENDS').split()])}" do_populate_sdk[recrdeptask] += "do_packagedata do_package_write_rpm do_package_write_ipk do_package_write_deb" addtask populate_sdk diff --git a/import-layers/yocto-poky/meta/classes/populate_sdk_ext.bbclass b/import-layers/yocto-poky/meta/classes/populate_sdk_ext.bbclass index 39f614274..8b8a341e3 100644 --- a/import-layers/yocto-poky/meta/classes/populate_sdk_ext.bbclass +++ b/import-layers/yocto-poky/meta/classes/populate_sdk_ext.bbclass @@ -11,8 +11,6 @@ TOOLCHAIN_HOST_TASK_task-populate-sdk-ext = " \ TOOLCHAIN_TARGET_TASK_task-populate-sdk-ext = "" -SDK_RDEPENDS_append_task-populate-sdk-ext = " ${SDK_TARGETS}" - SDK_RELOCATE_AFTER_INSTALL_task-populate-sdk-ext = "0" SDK_EXT = "" @@ -21,7 +19,7 @@ SDK_EXT_task-populate-sdk-ext = "-ext" # Options are full or minimal SDK_EXT_TYPE ?= "full" SDK_INCLUDE_PKGDATA ?= "0" -SDK_INCLUDE_TOOLCHAIN ?= "${@'1' if d.getVar('SDK_EXT_TYPE', True) == 'full' else '0'}" +SDK_INCLUDE_TOOLCHAIN ?= "${@'1' if d.getVar('SDK_EXT_TYPE') == 'full' else '0'}" SDK_RECRDEP_TASKS ?= "" @@ -43,19 +41,21 @@ SDK_TARGETS ?= "${PN}" def get_sdk_install_targets(d, images_only=False): sdk_install_targets = '' - if images_only or d.getVar('SDK_EXT_TYPE', True) != 'minimal': - sdk_install_targets = d.getVar('SDK_TARGETS', True) + if images_only or d.getVar('SDK_EXT_TYPE') != 'minimal': + sdk_install_targets = d.getVar('SDK_TARGETS') depd = d.getVar('BB_TASKDEPDATA', False) + tasklist = bb.build.tasksbetween('do_image_complete', 'do_build', d) + tasklist.remove('do_build') for v in depd.values(): - if v[1] == 'do_image_complete': + if v[1] in tasklist: if v[0] not in sdk_install_targets: sdk_install_targets += ' {}'.format(v[0]) if not images_only: - if d.getVar('SDK_INCLUDE_PKGDATA', True) == '1': + if d.getVar('SDK_INCLUDE_PKGDATA') == '1': sdk_install_targets += ' meta-world-pkgdata:do_allpackagedata' - if d.getVar('SDK_INCLUDE_TOOLCHAIN', True) == '1': + if d.getVar('SDK_INCLUDE_TOOLCHAIN') == '1': sdk_install_targets += ' meta-extsdk-toolchain:do_populate_sysroot' return sdk_install_targets @@ -77,13 +77,13 @@ COREBASE_FILES ?= " \ SDK_DIR_task-populate-sdk-ext = "${WORKDIR}/sdk-ext" B_task-populate-sdk-ext = "${SDK_DIR}" -TOOLCHAINEXT_OUTPUTNAME = "${SDK_NAME}-toolchain-ext-${SDK_VERSION}" +TOOLCHAINEXT_OUTPUTNAME ?= "${SDK_NAME}-toolchain-ext-${SDK_VERSION}" TOOLCHAIN_OUTPUTNAME_task-populate-sdk-ext = "${TOOLCHAINEXT_OUTPUTNAME}" SDK_EXT_TARGET_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest" SDK_EXT_HOST_MANIFEST = "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest" -SDK_TITLE_task-populate-sdk-ext = "${@d.getVar('DISTRO_NAME', True) or d.getVar('DISTRO', True)} Extensible SDK" +SDK_TITLE_task-populate-sdk-ext = "${@d.getVar('DISTRO_NAME') or d.getVar('DISTRO')} Extensible SDK" def clean_esdk_builddir(d, sdkbasepath): """Clean up traces of the fake build for create_filtered_tasklist()""" @@ -110,10 +110,11 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath): try: with open(sdkbasepath + '/conf/local.conf', 'a') as f: # Force the use of sstate from the build system - f.write('\nSSTATE_DIR_forcevariable = "%s"\n' % d.getVar('SSTATE_DIR', True)) + f.write('\nSSTATE_DIR_forcevariable = "%s"\n' % d.getVar('SSTATE_DIR')) f.write('SSTATE_MIRRORS_forcevariable = ""\n') # Ensure TMPDIR is the default so that clean_esdk_builddir() can delete it f.write('TMPDIR_forcevariable = "${TOPDIR}/tmp"\n') + f.write('TCLIBCAPPEND_forcevariable = ""\n') # Drop uninative if the build isn't using it (or else NATIVELSBSTRING will # be different and we won't be able to find our native sstate) if not bb.data.inherits_class('uninative', d): @@ -121,7 +122,7 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath): # Unfortunately the default SDKPATH (or even a custom value) may contain characters that bitbake # will not allow in its COREBASE path, so we need to rename the directory temporarily - temp_sdkbasepath = d.getVar('SDK_OUTPUT', True) + '/tmp-renamed-sdk' + temp_sdkbasepath = d.getVar('SDK_OUTPUT') + '/tmp-renamed-sdk' # Delete any existing temp dir try: shutil.rmtree(temp_sdkbasepath) @@ -130,7 +131,7 @@ def create_filtered_tasklist(d, sdkbasepath, tasklistfile, conf_initpath): os.rename(sdkbasepath, temp_sdkbasepath) try: cmdprefix = '. %s .; ' % conf_initpath - logfile = d.getVar('WORKDIR', True) + '/tasklist_bb_log.txt' + logfile = d.getVar('WORKDIR') + '/tasklist_bb_log.txt' try: oe.copy_buildsystem.check_sstate_task_list(d, get_sdk_install_targets(d), tasklistfile, cmdprefix=cmdprefix, cwd=temp_sdkbasepath, logfile=logfile) except bb.process.ExecutionError as e: @@ -152,7 +153,7 @@ python copy_buildsystem () { import glob import oe.copy_buildsystem - oe_init_env_script = d.getVar('OE_INIT_ENV_SCRIPT', True) + oe_init_env_script = d.getVar('OE_INIT_ENV_SCRIPT') conf_bbpath = '' conf_initpath = '' @@ -160,10 +161,10 @@ python copy_buildsystem () { # Copy in all metadata layers + bitbake (as repositories) buildsystem = oe.copy_buildsystem.BuildSystem('extensible SDK', d) - baseoutpath = d.getVar('SDK_OUTPUT', True) + '/' + d.getVar('SDKPATH', True) + baseoutpath = d.getVar('SDK_OUTPUT') + '/' + d.getVar('SDKPATH') # Determine if we're building a derivative extensible SDK (from devtool build-sdk) - derivative = (d.getVar('SDK_DERIVATIVE', True) or '') == '1' + derivative = (d.getVar('SDK_DERIVATIVE') or '') == '1' if derivative: workspace_name = 'orig-workspace' else: @@ -171,7 +172,7 @@ python copy_buildsystem () { layers_copied = buildsystem.copy_bitbake_and_layers(baseoutpath + '/layers', workspace_name) sdkbblayers = [] - corebase = os.path.basename(d.getVar('COREBASE', True)) + corebase = os.path.basename(d.getVar('COREBASE')) for layer in layers_copied: if corebase == os.path.basename(layer): conf_bbpath = os.path.join('layers', layer, 'bitbake') @@ -202,8 +203,8 @@ python copy_buildsystem () { config.set('General', 'init_path', conf_initpath) config.set('General', 'core_meta_subdir', core_meta_subdir) config.add_section('SDK') - config.set('SDK', 'sdk_targets', d.getVar('SDK_TARGETS', True)) - updateurl = d.getVar('SDK_UPDATE_URL', True) + config.set('SDK', 'sdk_targets', d.getVar('SDK_TARGETS')) + updateurl = d.getVar('SDK_UPDATE_URL') if updateurl: config.set('SDK', 'updateserver', updateurl) bb.utils.mkdirhier(os.path.join(baseoutpath, 'conf')) @@ -215,7 +216,7 @@ python copy_buildsystem () { pass # Create a layer for new recipes / appends - bbpath = d.getVar('BBPATH', True) + bbpath = d.getVar('BBPATH') bb.process.run(['devtool', '--bbpath', bbpath, '--basepath', baseoutpath, 'create-workspace', '--create-only', os.path.join(baseoutpath, 'workspace')]) # Create bblayers.conf @@ -242,22 +243,25 @@ python copy_buildsystem () { # Copy uninative tarball # For now this is where uninative.bbclass expects the tarball - uninative_file = d.expand('${SDK_DEPLOY}/${BUILD_ARCH}-nativesdk-libc.tar.bz2') - uninative_checksum = bb.utils.sha256_file(uninative_file) - uninative_outdir = '%s/downloads/uninative/%s' % (baseoutpath, uninative_checksum) - bb.utils.mkdirhier(uninative_outdir) - shutil.copy(uninative_file, uninative_outdir) - - env_whitelist = (d.getVar('BB_ENV_EXTRAWHITE', True) or '').split() + if bb.data.inherits_class('uninative', d): + uninative_file = d.expand('${UNINATIVE_DLDIR}/' + d.getVarFlag("UNINATIVE_CHECKSUM", d.getVar("BUILD_ARCH")) + '/${UNINATIVE_TARBALL}') + uninative_checksum = bb.utils.sha256_file(uninative_file) + uninative_outdir = '%s/downloads/uninative/%s' % (baseoutpath, uninative_checksum) + bb.utils.mkdirhier(uninative_outdir) + shutil.copy(uninative_file, uninative_outdir) + + env_whitelist = (d.getVar('BB_ENV_EXTRAWHITE') or '').split() env_whitelist_values = {} # Create local.conf - builddir = d.getVar('TOPDIR', True) + builddir = d.getVar('TOPDIR') + if derivative and os.path.exists(builddir + '/conf/auto.conf'): + shutil.copyfile(builddir + '/conf/auto.conf', baseoutpath + '/conf/auto.conf') if derivative: shutil.copyfile(builddir + '/conf/local.conf', baseoutpath + '/conf/local.conf') else: - local_conf_whitelist = (d.getVar('SDK_LOCAL_CONF_WHITELIST', True) or '').split() - local_conf_blacklist = (d.getVar('SDK_LOCAL_CONF_BLACKLIST', True) or '').split() + local_conf_whitelist = (d.getVar('SDK_LOCAL_CONF_WHITELIST') or '').split() + local_conf_blacklist = (d.getVar('SDK_LOCAL_CONF_BLACKLIST') or '').split() def handle_var(varname, origvalue, op, newlines): if varname in local_conf_blacklist or (origvalue.strip().startswith('/') and not varname in local_conf_whitelist): newlines.append('# Removed original setting of %s\n' % varname) @@ -267,8 +271,12 @@ python copy_buildsystem () { env_whitelist_values[varname] = origvalue return origvalue, op, 0, True varlist = ['[^#=+ ]*'] + oldlines = [] + if os.path.exists(builddir + '/conf/auto.conf'): + with open(builddir + '/conf/auto.conf', 'r') as f: + oldlines += f.readlines() with open(builddir + '/conf/local.conf', 'r') as f: - oldlines = f.readlines() + oldlines += f.readlines() (updated, newlines) = bb.utils.edit_metadata(oldlines, varlist, handle_var) with open(baseoutpath + '/conf/local.conf', 'w') as f: @@ -282,10 +290,12 @@ python copy_buildsystem () { # Write a newline just in case there's none at the end of the original f.write('\n') + f.write('TMPDIR = "${TOPDIR}/tmp"\n') + f.write('TCLIBCAPPEND = ""\n') f.write('DL_DIR = "${TOPDIR}/downloads"\n') f.write('INHERIT += "%s"\n' % 'uninative') - f.write('UNINATIVE_CHECKSUM[%s] = "%s"\n\n' % (d.getVar('BUILD_ARCH', True), uninative_checksum)) + f.write('UNINATIVE_CHECKSUM[%s] = "%s"\n\n' % (d.getVar('BUILD_ARCH'), uninative_checksum)) f.write('CONF_VERSION = "%s"\n\n' % d.getVar('CONF_VERSION', False)) # Some classes are not suitable for SDK, remove them from INHERIT @@ -305,13 +315,13 @@ python copy_buildsystem () { f.write('SIGGEN_LOCKEDSIGS_TASKSIG_CHECK = "warn"\n\n') # Set up whitelist for run on install - f.write('BB_SETSCENE_ENFORCE_WHITELIST = "%:* *:do_shared_workdir *:do_rm_work *:do_package"\n\n') + f.write('BB_SETSCENE_ENFORCE_WHITELIST = "%:* *:do_shared_workdir *:do_rm_work wic-tools:* *:do_addto_recipe_sysroot"\n\n') # Hide the config information from bitbake output (since it's fixed within the SDK) f.write('BUILDCFG_HEADER = ""\n\n') # Map gcc-dependent uninative sstate cache for installer usage - f.write('SSTATE_MIRRORS = "file://universal/(.*) file://universal-4.9/\\1\\nfile://universal-4.9/(.*) file://universal-4.8/\\1"\n\n') + f.write('SSTATE_MIRRORS += " file://universal/(.*) file://universal-4.9/\\1 file://universal-4.9/(.*) file://universal-4.8/\\1"\n\n') # Allow additional config through sdk-extra.conf fn = bb.cookerdata.findConfigFile('sdk-extra.conf', d) @@ -322,7 +332,7 @@ python copy_buildsystem () { # If you define a sdk_extraconf() function then it can contain additional config # (Though this is awkward; sdk-extra.conf should probably be used instead) - extraconf = (d.getVar('sdk_extraconf', True) or '').strip() + extraconf = (d.getVar('sdk_extraconf') or '').strip() if extraconf: # Strip off any leading / trailing spaces for line in extraconf.splitlines(): @@ -331,22 +341,6 @@ python copy_buildsystem () { f.write('require conf/locked-sigs.inc\n') f.write('require conf/unlocked-sigs.inc\n') - if os.path.exists(builddir + '/conf/auto.conf'): - if derivative: - shutil.copyfile(builddir + '/conf/auto.conf', baseoutpath + '/conf/auto.conf') - else: - with open(builddir + '/conf/auto.conf', 'r') as f: - oldlines = f.readlines() - (updated, newlines) = bb.utils.edit_metadata(oldlines, varlist, handle_var) - with open(baseoutpath + '/conf/auto.conf', 'w') as f: - f.write('# WARNING: this configuration has been automatically generated and in\n') - f.write('# most cases should not be edited. If you need more flexibility than\n') - f.write('# this configuration provides, it is strongly suggested that you set\n') - f.write('# up a proper instance of the full build system and use that instead.\n\n') - for line in newlines: - if line.strip() and not line.startswith('#'): - f.write(line) - # Write a templateconf.cfg with open(baseoutpath + '/conf/templateconf.cfg', 'w') as f: f.write('meta/conf\n') @@ -355,7 +349,7 @@ python copy_buildsystem () { # BB_ENV_EXTRAWHITE) are set in the SDK's configuration extralines = [] for name, value in env_whitelist_values.items(): - actualvalue = d.getVar(name, True) or '' + actualvalue = d.getVar(name) or '' if value != actualvalue: extralines.append('%s = "%s"\n' % (name, actualvalue)) if extralines: @@ -368,7 +362,7 @@ python copy_buildsystem () { # Filter the locked signatures file to just the sstate tasks we are interested in excluded_targets = get_sdk_install_targets(d, images_only=True) - sigfile = d.getVar('WORKDIR', True) + '/locked-sigs.inc' + sigfile = d.getVar('WORKDIR') + '/locked-sigs.inc' lockedsigs_pruned = baseoutpath + '/conf/locked-sigs.inc' oe.copy_buildsystem.prune_lockedsigs([], excluded_targets.split(), @@ -381,36 +375,36 @@ python copy_buildsystem () { # uninative.bbclass sets NATIVELSBSTRING to 'universal%s' % oe.utils.host_gcc_version(d) fixedlsbstring = "universal%s" % oe.utils.host_gcc_version(d) - sdk_include_toolchain = (d.getVar('SDK_INCLUDE_TOOLCHAIN', True) == '1') - sdk_ext_type = d.getVar('SDK_EXT_TYPE', True) + sdk_include_toolchain = (d.getVar('SDK_INCLUDE_TOOLCHAIN') == '1') + sdk_ext_type = d.getVar('SDK_EXT_TYPE') if sdk_ext_type != 'minimal' or sdk_include_toolchain or derivative: # Create the filtered task list used to generate the sstate cache shipped with the SDK - tasklistfn = d.getVar('WORKDIR', True) + '/tasklist.txt' + tasklistfn = d.getVar('WORKDIR') + '/tasklist.txt' create_filtered_tasklist(d, baseoutpath, tasklistfn, conf_initpath) else: tasklistfn = None # Add packagedata if enabled - if d.getVar('SDK_INCLUDE_PKGDATA', True) == '1': - lockedsigs_base = d.getVar('WORKDIR', True) + '/locked-sigs-base.inc' - lockedsigs_copy = d.getVar('WORKDIR', True) + '/locked-sigs-copy.inc' + if d.getVar('SDK_INCLUDE_PKGDATA') == '1': + lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base.inc' + lockedsigs_copy = d.getVar('WORKDIR') + '/locked-sigs-copy.inc' shutil.move(lockedsigs_pruned, lockedsigs_base) oe.copy_buildsystem.merge_lockedsigs(['do_packagedata'], lockedsigs_base, - d.getVar('STAGING_DIR_HOST', True) + '/world-pkgdata/locked-sigs-pkgdata.inc', + d.getVar('STAGING_DIR_HOST') + '/world-pkgdata/locked-sigs-pkgdata.inc', lockedsigs_pruned, lockedsigs_copy) if sdk_include_toolchain: - lockedsigs_base = d.getVar('WORKDIR', True) + '/locked-sigs-base2.inc' - lockedsigs_toolchain = d.getVar('STAGING_DIR_HOST', True) + '/locked-sigs/locked-sigs-extsdk-toolchain.inc' + lockedsigs_base = d.getVar('WORKDIR') + '/locked-sigs-base2.inc' + lockedsigs_toolchain = d.expand("${STAGING_DIR}/${TUNE_PKGARCH}/meta-extsdk-toolchain/locked-sigs/locked-sigs-extsdk-toolchain.inc") shutil.move(lockedsigs_pruned, lockedsigs_base) oe.copy_buildsystem.merge_lockedsigs([], lockedsigs_base, lockedsigs_toolchain, lockedsigs_pruned) oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_toolchain, - d.getVar('SSTATE_DIR', True), + d.getVar('SSTATE_DIR'), sstate_out, d, fixedlsbstring, filterfile=tasklistfn) @@ -420,22 +414,22 @@ python copy_buildsystem () { # Assume the user is not going to set up an additional sstate # mirror, thus we need to copy the additional artifacts (from # workspace recipes) into the derivative SDK - lockedsigs_orig = d.getVar('TOPDIR', True) + '/conf/locked-sigs.inc' + lockedsigs_orig = d.getVar('TOPDIR') + '/conf/locked-sigs.inc' if os.path.exists(lockedsigs_orig): - lockedsigs_extra = d.getVar('WORKDIR', True) + '/locked-sigs-extra.inc' + lockedsigs_extra = d.getVar('WORKDIR') + '/locked-sigs-extra.inc' oe.copy_buildsystem.merge_lockedsigs(None, lockedsigs_orig, lockedsigs_pruned, None, lockedsigs_extra) oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_extra, - d.getVar('SSTATE_DIR', True), + d.getVar('SSTATE_DIR'), sstate_out, d, fixedlsbstring, filterfile=tasklistfn) else: oe.copy_buildsystem.create_locked_sstate_cache(lockedsigs_pruned, - d.getVar('SSTATE_DIR', True), + d.getVar('SSTATE_DIR'), sstate_out, d, fixedlsbstring, filterfile=tasklistfn) @@ -466,24 +460,24 @@ python copy_buildsystem () { def get_current_buildtools(d): """Get the file name of the current buildtools installer""" import glob - btfiles = glob.glob(os.path.join(d.getVar('SDK_DEPLOY', True), '*-buildtools-nativesdk-standalone-*.sh')) + btfiles = glob.glob(os.path.join(d.getVar('SDK_DEPLOY'), '*-buildtools-nativesdk-standalone-*.sh')) btfiles.sort(key=os.path.getctime) return os.path.basename(btfiles[-1]) def get_sdk_required_utilities(buildtools_fn, d): """Find required utilities that aren't provided by the buildtools""" - sanity_required_utilities = (d.getVar('SANITY_REQUIRED_UTILITIES', True) or '').split() + sanity_required_utilities = (d.getVar('SANITY_REQUIRED_UTILITIES') or '').split() sanity_required_utilities.append(d.expand('${BUILD_PREFIX}gcc')) sanity_required_utilities.append(d.expand('${BUILD_PREFIX}g++')) - buildtools_installer = os.path.join(d.getVar('SDK_DEPLOY', True), buildtools_fn) + buildtools_installer = os.path.join(d.getVar('SDK_DEPLOY'), buildtools_fn) filelist, _ = bb.process.run('%s -l' % buildtools_installer) localdata = bb.data.createCopy(d) localdata.setVar('SDKPATH', '.') - sdkpathnative = localdata.getVar('SDKPATHNATIVE', True) - sdkbindirs = [localdata.getVar('bindir_nativesdk', True), - localdata.getVar('sbindir_nativesdk', True), - localdata.getVar('base_bindir_nativesdk', True), - localdata.getVar('base_sbindir_nativesdk', True)] + sdkpathnative = localdata.getVar('SDKPATHNATIVE') + sdkbindirs = [localdata.getVar('bindir_nativesdk'), + localdata.getVar('sbindir_nativesdk'), + localdata.getVar('base_bindir_nativesdk'), + localdata.getVar('base_sbindir_nativesdk')] for line in filelist.splitlines(): splitline = line.split() if len(splitline) > 5: @@ -510,9 +504,10 @@ install_tools() { done # We can't use the same method as above because files in the sysroot won't exist at this point # (they get populated from sstate on installation) - if [ "${SDK_INCLUDE_TOOLCHAIN}" == "1" ] ; then - binrelpath=${@os.path.relpath(d.getVar('STAGING_BINDIR_NATIVE',True), d.getVar('TOPDIR', True))} - lnr ${SDK_OUTPUT}/${SDKPATH}/$binrelpath/unfsd ${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}/unfsd + unfsd_path="${SDK_OUTPUT}/${SDKPATHNATIVE}${bindir_nativesdk}/unfsd" + if [ "${SDK_INCLUDE_TOOLCHAIN}" = "1" -a ! -e $unfsd_path ] ; then + binrelpath=${@os.path.relpath(d.getVar('STAGING_BINDIR_NATIVE'), d.getVar('TMPDIR'))} + lnr ${SDK_OUTPUT}/${SDKPATH}/tmp/$binrelpath/unfsd $unfsd_path fi touch ${SDK_OUTPUT}/${SDKPATH}/.devtoolbase @@ -583,6 +578,8 @@ sdk_ext_postinst() { # Allow bitbake environment setup to be ran as part of this sdk. echo "export OE_SKIP_SDK_CHECK=1" >> $env_setup_script + # Work around runqemu not knowing how to get this information within the eSDK + echo "export DEPLOY_DIR_IMAGE=$target_sdk_dir/tmp/${@os.path.relpath(d.getVar('DEPLOY_DIR_IMAGE'), d.getVar('TMPDIR'))}" >> $env_setup_script # A bit of another hack, but we need this in the path only for devtool # so put it at the end of $PATH. @@ -613,8 +610,8 @@ SDK_INSTALL_TARGETS = "" fakeroot python do_populate_sdk_ext() { # FIXME hopefully we can remove this restriction at some point, but uninative # currently forces this upon us - if d.getVar('SDK_ARCH', True) != d.getVar('BUILD_ARCH', True): - bb.fatal('The extensible SDK can currently only be built for the same architecture as the machine being built on - SDK_ARCH is set to %s (likely via setting SDKMACHINE) which is different from the architecture of the build machine (%s). Unable to continue.' % (d.getVar('SDK_ARCH', True), d.getVar('BUILD_ARCH', True))) + if d.getVar('SDK_ARCH') != d.getVar('BUILD_ARCH'): + bb.fatal('The extensible SDK can currently only be built for the same architecture as the machine being built on - SDK_ARCH is set to %s (likely via setting SDKMACHINE) which is different from the architecture of the build machine (%s). Unable to continue.' % (d.getVar('SDK_ARCH'), d.getVar('BUILD_ARCH'))) d.setVar('SDK_INSTALL_TARGETS', get_sdk_install_targets(d)) buildtools_fn = get_current_buildtools(d) @@ -628,10 +625,12 @@ fakeroot python do_populate_sdk_ext() { def get_ext_sdk_depends(d): # Note: the deps varflag is a list not a string, so we need to specify expand=False deps = d.getVarFlag('do_image_complete', 'deps', False) - pn = d.getVar('PN', True) + pn = d.getVar('PN') deplist = ['%s:%s' % (pn, dep) for dep in deps] - for task in ['do_image_complete', 'do_rootfs', 'do_build']: - deplist.extend((d.getVarFlag(task, 'depends', True) or '').split()) + tasklist = bb.build.tasksbetween('do_image_complete', 'do_build', d) + tasklist.append('do_rootfs') + for task in tasklist: + deplist.extend((d.getVarFlag(task, 'depends') or '').split()) return ' '.join(deplist) python do_sdk_depends() { @@ -639,13 +638,13 @@ python do_sdk_depends() { # dependencies we don't need to (e.g. buildtools-tarball) and bringing those # into the SDK's sstate-cache import oe.copy_buildsystem - sigfile = d.getVar('WORKDIR', True) + '/locked-sigs.inc' + sigfile = d.getVar('WORKDIR') + '/locked-sigs.inc' oe.copy_buildsystem.generate_locked_sigs(sigfile, d) } addtask sdk_depends do_sdk_depends[dirs] = "${WORKDIR}" -do_sdk_depends[depends] = "${@get_ext_sdk_depends(d)}" +do_sdk_depends[depends] = "${@get_ext_sdk_depends(d)} meta-extsdk-toolchain:do_populate_sysroot" do_sdk_depends[recrdeptask] = "${@d.getVarFlag('do_populate_sdk', 'recrdeptask', False)}" do_sdk_depends[recrdeptask] += "do_populate_lic do_package_qa do_populate_sysroot do_deploy ${SDK_RECRDEP_TASKS}" do_sdk_depends[rdepends] = "${@get_sdk_ext_rdepends(d)}" @@ -653,17 +652,21 @@ do_sdk_depends[rdepends] = "${@get_sdk_ext_rdepends(d)}" def get_sdk_ext_rdepends(d): localdata = d.createCopy() localdata.appendVar('OVERRIDES', ':task-populate-sdk-ext') - bb.data.update_data(localdata) - return localdata.getVarFlag('do_populate_sdk', 'rdepends', True) + return localdata.getVarFlag('do_populate_sdk', 'rdepends') do_populate_sdk_ext[dirs] = "${@d.getVarFlag('do_populate_sdk', 'dirs', False)}" do_populate_sdk_ext[depends] = "${@d.getVarFlag('do_populate_sdk', 'depends', False)} \ - buildtools-tarball:do_populate_sdk uninative-tarball:do_populate_sdk \ - ${@'meta-world-pkgdata:do_collect_packagedata' if d.getVar('SDK_INCLUDE_PKGDATA', True) == '1' else ''} \ - ${@'meta-extsdk-toolchain:do_locked_sigs' if d.getVar('SDK_INCLUDE_TOOLCHAIN', True) == '1' else ''}" + buildtools-tarball:do_populate_sdk \ + ${@'meta-world-pkgdata:do_collect_packagedata' if d.getVar('SDK_INCLUDE_PKGDATA') == '1' else ''} \ + ${@'meta-extsdk-toolchain:do_locked_sigs' if d.getVar('SDK_INCLUDE_TOOLCHAIN') == '1' else ''}" -do_populate_sdk_ext[rdepends] += "${@' '.join([x + ':do_build' for x in d.getVar('SDK_TARGETS', True).split()])}" +# We must avoid depending on do_build here if rm_work.bbclass is active, +# because otherwise do_rm_work may run before do_populate_sdk_ext itself. +# We can't mark do_populate_sdk_ext and do_sdk_depends as having to +# run before do_rm_work, because then they would also run as part +# of normal builds. +do_populate_sdk_ext[rdepends] += "${@' '.join([x + ':' + (d.getVar('RM_WORK_BUILD_WITHOUT') or 'do_build') for x in d.getVar('SDK_TARGETS').split()])}" # Make sure code changes can result in rebuild do_populate_sdk_ext[vardeps] += "copy_buildsystem \ @@ -678,7 +681,7 @@ SDKEXTDEPLOYDIR = "${WORKDIR}/deploy-${PN}-populate-sdk-ext" SSTATETASKS += "do_populate_sdk_ext" SSTATE_SKIP_CREATION_task-populate-sdk-ext = '1' -do_populate_sdk_ext[cleandirs] = "${SDKDEPLOYDIR}" +do_populate_sdk_ext[cleandirs] = "${SDKEXTDEPLOYDIR}" do_populate_sdk_ext[sstate-inputdirs] = "${SDKEXTDEPLOYDIR}" do_populate_sdk_ext[sstate-outputdirs] = "${SDK_DEPLOY}" do_populate_sdk_ext[stamp-extra-info] = "${MACHINE}" diff --git a/import-layers/yocto-poky/meta/classes/prexport.bbclass b/import-layers/yocto-poky/meta/classes/prexport.bbclass index 809ec1034..6dcf99e29 100644 --- a/import-layers/yocto-poky/meta/classes/prexport.bbclass +++ b/import-layers/yocto-poky/meta/classes/prexport.bbclass @@ -15,7 +15,7 @@ python prexport_handler () { if isinstance(e, bb.event.RecipeParsed): import oe.prservice #get all PR values for the current PRAUTOINX - ver = e.data.getVar('PRSERV_DUMPOPT_VERSION', True) + ver = e.data.getVar('PRSERV_DUMPOPT_VERSION') ver = ver.replace('%','-') retval = oe.prservice.prserv_dump_db(e.data) if not retval: @@ -40,7 +40,7 @@ python prexport_handler () { import oe.prservice oe.prservice.prserv_check_avail(e.data) #remove dumpfile - bb.utils.remove(e.data.getVar('PRSERV_DUMPFILE', True)) + bb.utils.remove(e.data.getVar('PRSERV_DUMPFILE')) elif isinstance(e, bb.event.ParseCompleted): import oe.prservice #dump meta info of tables diff --git a/import-layers/yocto-poky/meta/classes/ptest.bbclass b/import-layers/yocto-poky/meta/classes/ptest.bbclass index fa3561e62..c19f65b9b 100644 --- a/import-layers/yocto-poky/meta/classes/ptest.bbclass +++ b/import-layers/yocto-poky/meta/classes/ptest.bbclass @@ -2,7 +2,7 @@ SUMMARY_${PN}-ptest ?= "${SUMMARY} - Package test files" DESCRIPTION_${PN}-ptest ?= "${DESCRIPTION} \ This package contains a test directory ${PTEST_PATH} for package test purposes." -PTEST_PATH ?= "${libdir}/${PN}/ptest" +PTEST_PATH ?= "${libdir}/${BPN}/ptest" FILES_${PN}-ptest = "${PTEST_PATH}" SECTION_${PN}-ptest = "devel" ALLOW_EMPTY_${PN}-ptest = "1" @@ -61,7 +61,7 @@ python () { d.setVarFlag('do_install_ptest_base', 'fakeroot', '1') # Remove all '*ptest_base' tasks when ptest is not enabled - if not(d.getVar('PTEST_ENABLED', True) == "1"): + if not(d.getVar('PTEST_ENABLED') == "1"): for i in ['do_configure_ptest_base', 'do_compile_ptest_base', 'do_install_ptest_base']: bb.build.deltask(i, d) } diff --git a/import-layers/yocto-poky/meta/classes/qemu.bbclass b/import-layers/yocto-poky/meta/classes/qemu.bbclass index f2d4d1c9e..f5c578012 100644 --- a/import-layers/yocto-poky/meta/classes/qemu.bbclass +++ b/import-layers/yocto-poky/meta/classes/qemu.bbclass @@ -4,12 +4,12 @@ # def qemu_target_binary(data): - package_arch = data.getVar("PACKAGE_ARCH", True) - qemu_target_binary = (data.getVar("QEMU_TARGET_BINARY_%s" % package_arch, True) or "") + package_arch = data.getVar("PACKAGE_ARCH") + qemu_target_binary = (data.getVar("QEMU_TARGET_BINARY_%s" % package_arch) or "") if qemu_target_binary: return qemu_target_binary - target_arch = data.getVar("TARGET_ARCH", True) + target_arch = data.getVar("TARGET_ARCH") if target_arch in ("i486", "i586", "i686"): target_arch = "i386" elif target_arch == "powerpc": @@ -26,7 +26,7 @@ def qemu_wrapper_cmdline(data, rootfs_path, library_paths): if qemu_binary == "qemu-allarch": qemu_binary = "qemuwrapper" - qemu_options = data.getVar("QEMU_OPTIONS", True) + qemu_options = data.getVar("QEMU_OPTIONS") return "PSEUDO_UNLOAD=1 " + qemu_binary + " " + qemu_options + " -L " + rootfs_path\ + " -E LD_LIBRARY_PATH=" + ":".join(library_paths) + " " @@ -52,7 +52,7 @@ def qemu_run_binary(data, rootfs_path, binary): # this dance). For others (e.g. arm) a -cpu option is not necessary, since the # qemu-arm default CPU supports all required architecture levels. -QEMU_OPTIONS = "-r ${OLDEST_KERNEL} ${@d.getVar("QEMU_EXTRAOPTIONS_%s" % d.getVar('PACKAGE_ARCH', True), True) or ""}" +QEMU_OPTIONS = "-r ${OLDEST_KERNEL} ${@d.getVar("QEMU_EXTRAOPTIONS_%s" % d.getVar('PACKAGE_ARCH')) or ""}" QEMU_OPTIONS[vardeps] += "QEMU_EXTRAOPTIONS_${PACKAGE_ARCH}" QEMU_EXTRAOPTIONS_ppce500v2 = " -cpu e500v2" diff --git a/import-layers/yocto-poky/meta/classes/qemuboot.bbclass b/import-layers/yocto-poky/meta/classes/qemuboot.bbclass index b5cc93dc9..3468d1c67 100644 --- a/import-layers/yocto-poky/meta/classes/qemuboot.bbclass +++ b/import-layers/yocto-poky/meta/classes/qemuboot.bbclass @@ -3,30 +3,52 @@ # boot by runqemu: # # QB_SYSTEM_NAME: qemu name, e.g., "qemu-system-i386" +# # QB_OPT_APPEND: options to append to qemu, e.g., "-show-cursor" +# # QB_DEFAULT_KERNEL: default kernel to boot, e.g., "bzImage" +# # QB_DEFAULT_FSTYPE: default FSTYPE to boot, e.g., "ext4" +# # QB_MEM: memory, e.g., "-m 512" +# # QB_MACHINE: qemu machine, e.g., "-machine virt" +# # QB_CPU: qemu cpu, e.g., "-cpu qemu32" +# # QB_CPU_KVM: the similar to QB_CPU, but used when kvm, e.g., '-cpu kvm64', # set it when support kvm. +# # QB_KERNEL_CMDLINE_APPEND: options to append to kernel's -append # option, e.g., "console=ttyS0 console=tty" +# # QB_DTB: qemu dtb name +# # QB_AUDIO_DRV: qemu audio driver, e.g., "alsa", set it when support audio +# # QB_AUDIO_OPT: qemu audio option, e.g., "-soundhw ac97,es1370", used # when QB_AUDIO_DRV is set. +# # QB_KERNEL_ROOT: kernel's root, e.g., /dev/vda +# +# QB_NETWORK_DEVICE: network device, e.g., "-device virtio-net-pci,netdev=net0,mac=@MAC@", +# it needs work with QB_TAP_OPT and QB_SLIRP_OPT. +# Note, runqemu will replace @MAC@ with a predefined mac, you can set +# a custom one, but that may cause conflicts when multiple qemus are +# running on the same host. +# # QB_TAP_OPT: netowrk option for 'tap' mode, e.g., -# "-netdev tap,id=net0,ifname=@TAP@,script=no,downscript=no -device virtio-net-device,netdev=net0" +# "-netdev tap,id=net0,ifname=@TAP@,script=no,downscript=no" # Note, runqemu will replace "@TAP@" with the one which is used, such as tap0, tap1 ... -# QB_SLIRP_OPT: network option for SLIRP mode, e.g., -# "-netdev user,id=net0 -device virtio-net-device,netdev=net0" +# +# QB_SLIRP_OPT: network option for SLIRP mode, e.g., -netdev user,id=net0" +# # QB_ROOTFS_OPT: used as rootfs, e.g., # "-drive id=disk0,file=@ROOTFS@,if=none,format=raw -device virtio-blk-device,drive=disk0" # Note, runqemu will replace "@ROOTFS@" with the one which is used, such as core-image-minimal-qemuarm64.ext4. +# # QB_SERIAL_OPT: serial port, e.g., "-serial mon:stdio" +# # QB_TCPSERIAL_OPT: tcp serial port option, e.g., # " -device virtio-serial-device -chardev socket,id=virtcon,port=@PORT@,host=127.0.0.1 -device virtconsole,chardev=virtcon" # Note, runqemu will replace "@PORT@" with the port number which is used. @@ -40,36 +62,53 @@ QB_SERIAL_OPT ?= "-serial mon:stdio -serial null" QB_DEFAULT_KERNEL ?= "${KERNEL_IMAGETYPE}" QB_DEFAULT_FSTYPE ?= "ext4" QB_OPT_APPEND ?= "-show-cursor" +QB_NETWORK_DEVICE ?= "-device virtio-net-pci,netdev=net0,mac=@MAC@" -# Create qemuboot.conf -ROOTFS_POSTPROCESS_COMMAND += "write_qemuboot_conf; " +# This should be kept align with ROOT_VM +QB_DRIVE_TYPE ?= "/dev/sd" -python write_qemuboot_conf() { - import configparser +# Create qemuboot.conf +addtask do_write_qemuboot_conf after do_rootfs before do_image +IMGDEPLOYDIR ?= "${WORKDIR}/deploy-${PN}-image-complete" - build_vars = ['MACHINE', 'TUNE_ARCH', 'DEPLOY_DIR_IMAGE', \ - 'KERNEL_IMAGETYPE', 'IMAGE_NAME', 'IMAGE_LINK_NAME', \ - 'STAGING_DIR_NATIVE', 'STAGING_BINDIR_NATIVE', \ +def qemuboot_vars(d): + build_vars = ['MACHINE', 'TUNE_ARCH', 'DEPLOY_DIR_IMAGE', + 'KERNEL_IMAGETYPE', 'IMAGE_NAME', 'IMAGE_LINK_NAME', + 'STAGING_DIR_NATIVE', 'STAGING_BINDIR_NATIVE', 'STAGING_DIR_HOST'] + return build_vars + [k for k in d.keys() if k.startswith('QB_')] - # Vars from bsp - qb_vars = [] - for k in d.keys(): - if k.startswith('QB_'): - qb_vars.append(k) +do_write_qemuboot_conf[vardeps] += "${@' '.join(qemuboot_vars(d))}" +do_write_qemuboot_conf[vardepsexclude] += "TOPDIR" +python do_write_qemuboot_conf() { + import configparser - qemuboot = "%s/%s.qemuboot.conf" % (d.getVar('DEPLOY_DIR_IMAGE', True), d.getVar('IMAGE_NAME', True)) - qemuboot_link = "%s/%s.qemuboot.conf" % (d.getVar('DEPLOY_DIR_IMAGE', True), d.getVar('IMAGE_LINK_NAME', True)) + qemuboot = "%s/%s.qemuboot.conf" % (d.getVar('IMGDEPLOYDIR'), d.getVar('IMAGE_NAME')) + qemuboot_link = "%s/%s.qemuboot.conf" % (d.getVar('IMGDEPLOYDIR'), d.getVar('IMAGE_LINK_NAME')) + topdir="%s/"%(d.getVar('TOPDIR')).replace("//","/") cf = configparser.ConfigParser() cf.add_section('config_bsp') - for k in build_vars + qb_vars: - cf.set('config_bsp', k, '%s' % d.getVar(k, True)) + for k in qemuboot_vars(d): + # qemu-helper-native sysroot is not removed by rm_work and + # contains all tools required by runqemu + if k == 'STAGING_BINDIR_NATIVE': + val = os.path.join(d.getVar('BASE_WORKDIR'), d.getVar('BUILD_SYS'), + 'qemu-helper-native/1.0-r1/recipe-sysroot-native/usr/bin/') + else: + val = d.getVar(k) + # we only want to write out relative paths so that we can relocate images + # and still run them + val=val.replace(topdir,"") + cf.set('config_bsp', k, '%s' % val) # QB_DEFAULT_KERNEL's value of KERNEL_IMAGETYPE is the name of a symlink # to the kernel file, which hinders relocatability of the qb conf. # Read the link and replace it with the full filename of the target. - kernel_link = os.path.join(d.getVar('DEPLOY_DIR_IMAGE', True), d.getVar('QB_DEFAULT_KERNEL', True)) + kernel_link = os.path.join(d.getVar('DEPLOY_DIR_IMAGE'), d.getVar('QB_DEFAULT_KERNEL')) kernel = os.path.realpath(kernel_link) + # we only want to write out relative paths so that we can relocate images + # and still run them + kernel=kernel.replace(topdir,"") cf.set('config_bsp', 'QB_DEFAULT_KERNEL', kernel) bb.utils.mkdirhier(os.path.dirname(qemuboot)) diff --git a/import-layers/yocto-poky/meta/classes/recipe_sanity.bbclass b/import-layers/yocto-poky/meta/classes/recipe_sanity.bbclass index add34df9d..7fa4a849e 100644 --- a/import-layers/yocto-poky/meta/classes/recipe_sanity.bbclass +++ b/import-layers/yocto-poky/meta/classes/recipe_sanity.bbclass @@ -1,5 +1,5 @@ def __note(msg, d): - bb.note("%s: recipe_sanity: %s" % (d.getVar("P", True), msg)) + bb.note("%s: recipe_sanity: %s" % (d.getVar("P"), msg)) __recipe_sanity_badruntimevars = "RDEPENDS RPROVIDES RRECOMMENDS RCONFLICTS" def bad_runtime_vars(cfgdata, d): @@ -7,7 +7,7 @@ def bad_runtime_vars(cfgdata, d): bb.data.inherits_class("cross", d): return - for var in d.getVar("__recipe_sanity_badruntimevars", True).split(): + for var in d.getVar("__recipe_sanity_badruntimevars").split(): val = d.getVar(var, False) if val and val != cfgdata.get(var): __note("%s should be %s_${PN}" % (var, var), d) @@ -15,11 +15,11 @@ def bad_runtime_vars(cfgdata, d): __recipe_sanity_reqvars = "DESCRIPTION" __recipe_sanity_reqdiffvars = "" def req_vars(cfgdata, d): - for var in d.getVar("__recipe_sanity_reqvars", True).split(): + for var in d.getVar("__recipe_sanity_reqvars").split(): if not d.getVar(var, False): __note("%s should be set" % var, d) - for var in d.getVar("__recipe_sanity_reqdiffvars", True).split(): + for var in d.getVar("__recipe_sanity_reqdiffvars").split(): val = d.getVar(var, False) cfgval = cfgdata.get(var) @@ -38,11 +38,11 @@ def var_renames_overwrite(cfgdata, d): def incorrect_nonempty_PACKAGES(cfgdata, d): if bb.data.inherits_class("native", d) or \ bb.data.inherits_class("cross", d): - if d.getVar("PACKAGES", True): + if d.getVar("PACKAGES"): return True def can_use_autotools_base(cfgdata, d): - cfg = d.getVar("do_configure", True) + cfg = d.getVar("do_configure") if not bb.data.inherits_class("autotools", d): return False @@ -61,7 +61,7 @@ def can_delete_FILESPATH(cfgdata, d): expected = cfgdata.get("FILESPATH") expectedpaths = d.expand(expected) unexpanded = d.getVar("FILESPATH", False) - filespath = d.getVar("FILESPATH", True).split(":") + filespath = d.getVar("FILESPATH").split(":") filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] for fp in filespath: if not fp in expectedpaths: @@ -70,22 +70,6 @@ def can_delete_FILESPATH(cfgdata, d): return False return expected != unexpanded -def can_delete_FILESDIR(cfgdata, d): - expected = cfgdata.get("FILESDIR") - #expected = "${@bb.utils.which(d.getVar('FILESPATH', True), '.')}" - unexpanded = d.getVar("FILESDIR", False) - if unexpanded is None: - return False - - expanded = os.path.normpath(d.getVar("FILESDIR", True)) - filespath = d.getVar("FILESPATH", True).split(":") - filespath = [os.path.normpath(f) for f in filespath if os.path.exists(f)] - - return unexpanded != expected and \ - os.path.exists(expanded) and \ - (expanded in filespath or - expanded == d.expand(expected)) - def can_delete_others(p, cfgdata, d): for k in ["S", "PV", "PN", "DESCRIPTION", "DEPENDS", "SECTION", "PACKAGES", "EXTRA_OECONF", "EXTRA_OEMAKE"]: @@ -96,7 +80,7 @@ def can_delete_others(p, cfgdata, d): continue try: - expanded = d.getVar(k, True) + expanded = d.getVar(k) cfgexpanded = d.expand(cfgunexpanded) except bb.fetch.ParameterError: continue @@ -108,11 +92,10 @@ def can_delete_others(p, cfgdata, d): (p, cfgunexpanded, unexpanded, expanded)) python do_recipe_sanity () { - p = d.getVar("P", True) - p = "%s %s %s" % (d.getVar("PN", True), d.getVar("PV", True), d.getVar("PR", True)) + p = d.getVar("P") + p = "%s %s %s" % (d.getVar("PN"), d.getVar("PV"), d.getVar("PR")) sanitychecks = [ - (can_delete_FILESDIR, "candidate for removal of FILESDIR"), (can_delete_FILESPATH, "candidate for removal of FILESPATH"), #(can_use_autotools_base, "candidate for use of autotools_base"), (incorrect_nonempty_PACKAGES, "native or cross recipe with non-empty PACKAGES"), diff --git a/import-layers/yocto-poky/meta/classes/relative_symlinks.bbclass b/import-layers/yocto-poky/meta/classes/relative_symlinks.bbclass new file mode 100644 index 000000000..315773734 --- /dev/null +++ b/import-layers/yocto-poky/meta/classes/relative_symlinks.bbclass @@ -0,0 +1,5 @@ +do_install[postfuncs] += "install_relative_symlinks" + +python install_relative_symlinks () { + oe.path.replace_absolute_symlinks(d.getVar('D'), d) +} diff --git a/import-layers/yocto-poky/meta/classes/relocatable.bbclass b/import-layers/yocto-poky/meta/classes/relocatable.bbclass index 4ca9981f4..582812c1c 100644 --- a/import-layers/yocto-poky/meta/classes/relocatable.bbclass +++ b/import-layers/yocto-poky/meta/classes/relocatable.bbclass @@ -1,7 +1,18 @@ inherit chrpath -SYSROOT_PREPROCESS_FUNCS += "relocatable_binaries_preprocess" +SYSROOT_PREPROCESS_FUNCS += "relocatable_binaries_preprocess relocatable_native_pcfiles" python relocatable_binaries_preprocess() { rpath_replace(d.expand('${SYSROOT_DESTDIR}'), d) } + +relocatable_native_pcfiles () { + if [ -d ${SYSROOT_DESTDIR}${libdir}/pkgconfig ]; then + rel=${@os.path.relpath(d.getVar('base_prefix'), d.getVar('libdir') + "/pkgconfig")} + sed -i -e "s:${base_prefix}:\${pcfiledir}/$rel:g" ${SYSROOT_DESTDIR}${libdir}/pkgconfig/*.pc + fi + if [ -d ${SYSROOT_DESTDIR}${datadir}/pkgconfig ]; then + rel=${@os.path.relpath(d.getVar('base_prefix'), d.getVar('datadir') + "/pkgconfig")} + sed -i -e "s:${base_prefix}:\${pcfiledir}/$rel:g" ${SYSROOT_DESTDIR}${datadir}/pkgconfig/*.pc + fi +} diff --git a/import-layers/yocto-poky/meta/classes/report-error.bbclass b/import-layers/yocto-poky/meta/classes/report-error.bbclass index 5bb231efc..d6fdd364a 100644 --- a/import-layers/yocto-poky/meta/classes/report-error.bbclass +++ b/import-layers/yocto-poky/meta/classes/report-error.bbclass @@ -10,7 +10,7 @@ ERR_REPORT_DIR ?= "${LOG_DIR}/error-report" def errorreport_getdata(e): import codecs - logpath = e.data.getVar('ERR_REPORT_DIR', True) + logpath = e.data.getVar('ERR_REPORT_DIR') datafile = os.path.join(logpath, "error-report.txt") with codecs.open(datafile, 'r', 'utf-8') as f: data = f.read() @@ -19,7 +19,7 @@ def errorreport_getdata(e): def errorreport_savedata(e, newdata, file): import json import codecs - logpath = e.data.getVar('ERR_REPORT_DIR', True) + logpath = e.data.getVar('ERR_REPORT_DIR') datafile = os.path.join(logpath, file) with codecs.open(datafile, 'w', 'utf-8') as f: json.dump(newdata, f, indent=4, sort_keys=True) @@ -29,18 +29,18 @@ python errorreport_handler () { import json import codecs - logpath = e.data.getVar('ERR_REPORT_DIR', True) + logpath = e.data.getVar('ERR_REPORT_DIR') datafile = os.path.join(logpath, "error-report.txt") if isinstance(e, bb.event.BuildStarted): bb.utils.mkdirhier(logpath) data = {} - machine = e.data.getVar("MACHINE", True) + machine = e.data.getVar("MACHINE") data['machine'] = machine - data['build_sys'] = e.data.getVar("BUILD_SYS", True) - data['nativelsb'] = e.data.getVar("NATIVELSBSTRING", True) - data['distro'] = e.data.getVar("DISTRO", True) - data['target_sys'] = e.data.getVar("TARGET_SYS", True) + data['build_sys'] = e.data.getVar("BUILD_SYS") + data['nativelsb'] = e.data.getVar("NATIVELSBSTRING") + data['distro'] = e.data.getVar("DISTRO") + data['target_sys'] = e.data.getVar("TARGET_SYS") data['failures'] = [] data['component'] = " ".join(e.getPkgs()) data['branch_commit'] = str(base_detect_branch(e.data)) + ": " + str(base_detect_revision(e.data)) @@ -51,7 +51,7 @@ python errorreport_handler () { elif isinstance(e, bb.build.TaskFailed): task = e.task taskdata={} - log = e.data.getVar('BB_LOGFILE', True) + log = e.data.getVar('BB_LOGFILE') taskdata['package'] = e.data.expand("${PF}") taskdata['task'] = task if log: @@ -61,7 +61,7 @@ python errorreport_handler () { # Replace host-specific paths so the logs are cleaner for d in ("TOPDIR", "TMPDIR"): - s = e.data.getVar(d, True) + s = e.data.getVar(d) if s: logdata = logdata.replace(s, d) @@ -92,7 +92,7 @@ python errorreport_handler () { bb.utils.unlockfile(lock) failures = jsondata['failures'] if(len(failures) > 0): - filename = "error_report_" + e.data.getVar("BUILDNAME", True)+".txt" + filename = "error_report_" + e.data.getVar("BUILDNAME")+".txt" datafile = errorreport_savedata(e, jsondata, filename) bb.note("The errors for this build are stored in %s\nYou can send the errors to a reports server by running:\n send-error-report %s [-s server]" % (datafile, datafile)) bb.note("The contents of these logs will be posted in public if you use the above command with the default server. Please ensure you remove any identifying or proprietary information when prompted before sending.") diff --git a/import-layers/yocto-poky/meta/classes/rm_work.bbclass b/import-layers/yocto-poky/meta/classes/rm_work.bbclass index 64b6981a4..badeaeba0 100644 --- a/import-layers/yocto-poky/meta/classes/rm_work.bbclass +++ b/import-layers/yocto-poky/meta/classes/rm_work.bbclass @@ -10,6 +10,14 @@ # # RM_WORK_EXCLUDE += "icu-native icu busybox" # +# Recipes can also configure which entries in their ${WORKDIR} +# are preserved besides temp, which already gets excluded by default +# because it contains logs: +# do_install_append () { +# echo "bar" >${WORKDIR}/foo +# } +# RM_WORK_EXCLUDE_ITEMS += "foo" +RM_WORK_EXCLUDE_ITEMS = "temp" # Use the completion scheduler by default when rm_work is active # to try and reduce disk usage @@ -18,9 +26,6 @@ BB_SCHEDULER ?= "completion" # Run the rm_work task in the idle scheduling class BB_TASK_IONICE_LEVEL_task-rm_work = "3.0" -RMWORK_ORIG_TASK := "${BB_DEFAULT_TASK}" -BB_DEFAULT_TASK = "rm_work_all" - do_rm_work () { # If the recipe name is in the RM_WORK_EXCLUDE, skip the recipe. for p in ${RM_WORK_EXCLUDE}; do @@ -37,7 +42,7 @@ do_rm_work () { # failures of removing pseudo folers on NFS2/3 server. if [ $dir = 'pseudo' ]; then rm -rf $dir 2> /dev/null || true - elif [ $dir != 'temp' ]; then + elif ! echo '${RM_WORK_EXCLUDE_ITEMS}' | grep -q -w "$dir"; then rm -rf $dir fi done @@ -66,7 +71,7 @@ do_rm_work () { i=dummy break ;; - *do_rootfs*|*do_image*|*do_bootimg*|*do_bootdirectdisk*|*do_vmimg*) + *do_rootfs*|*do_image*|*do_bootimg*|*do_bootdirectdisk*|*do_vmimg*|*do_write_qemuboot_conf*) i=dummy break ;; @@ -97,13 +102,12 @@ do_rm_work () { rm -f $i done } -addtask rm_work after do_${RMWORK_ORIG_TASK} - do_rm_work_all () { : } do_rm_work_all[recrdeptask] = "do_rm_work" -addtask rm_work_all after do_rm_work +do_rm_work_all[noexec] = "1" +addtask rm_work_all after before do_build do_populate_sdk[postfuncs] += "rm_work_populatesdk" rm_work_populatesdk () { @@ -117,13 +121,52 @@ rm_work_rootfs () { } rm_work_rootfs[cleandirs] = "${WORKDIR}/rootfs" -python () { +# This task can be used instead of do_build to trigger building +# without also invoking do_rm_work. It only exists when rm_work.bbclass +# is active, otherwise do_build needs to be used. +# +# The intended usage is +# ${@ d.getVar('RM_WORK_BUILD_WITHOUT') or 'do_build'} +# in places that previously used just 'do_build'. +RM_WORK_BUILD_WITHOUT = "do_build_without_rm_work" +do_build_without_rm_work () { + : +} +do_build_without_rm_work[noexec] = "1" + +# We have to add these tasks already now, because all tasks are +# meant to be defined before the RecipeTaskPreProcess event triggers. +# The inject_rm_work event handler then merely changes task dependencies. +addtask do_rm_work +addtask do_build_without_rm_work +addhandler inject_rm_work +inject_rm_work[eventmask] = "bb.event.RecipeTaskPreProcess" +python inject_rm_work() { if bb.data.inherits_class('kernel', d): - d.appendVar("RM_WORK_EXCLUDE", ' ' + d.getVar("PN", True)) + d.appendVar("RM_WORK_EXCLUDE", ' ' + d.getVar("PN")) # If the recipe name is in the RM_WORK_EXCLUDE, skip the recipe. - excludes = (d.getVar("RM_WORK_EXCLUDE", True) or "").split() - pn = d.getVar("PN", True) + excludes = (d.getVar("RM_WORK_EXCLUDE") or "").split() + pn = d.getVar("PN") + + # Determine what do_build depends upon, without including do_build + # itself or our own special do_rm_work_all. + deps = set(bb.build.preceedtask('do_build', True, d)) + deps.difference_update(('do_build', 'do_rm_work_all')) + if pn in excludes: d.delVarFlag('rm_work_rootfs', 'cleandirs') d.delVarFlag('rm_work_populatesdk', 'cleandirs') + else: + # Inject do_rm_work into the tasks of the current recipe such that do_build + # depends on it and that it runs after all other tasks that block do_build, + # i.e. after all work on the current recipe is done. The reason for taking + # this approach instead of making do_rm_work depend on do_build is that + # do_build inherits additional runtime dependencies on + # other recipes and thus will typically run much later than completion of + # work in the recipe itself. + # In practice, addtask() here merely updates the dependencies. + bb.build.addtask('do_rm_work', 'do_build', ' '.join(deps), d) + + # Always update do_build_without_rm_work dependencies. + bb.build.addtask('do_build_without_rm_work', '', ' '.join(deps), d) } diff --git a/import-layers/yocto-poky/meta/classes/rm_work_and_downloads.bbclass b/import-layers/yocto-poky/meta/classes/rm_work_and_downloads.bbclass new file mode 100644 index 000000000..7c00bea59 --- /dev/null +++ b/import-layers/yocto-poky/meta/classes/rm_work_and_downloads.bbclass @@ -0,0 +1,33 @@ +# Author: Patrick Ohly +# Copyright: Copyright (C) 2015 Intel Corporation +# +# This file is licensed under the MIT license, see COPYING.MIT in +# this source distribution for the terms. + +# This class is used like rm_work: +# INHERIT += "rm_work_and_downloads" +# +# In addition to removing local build directories of a recipe, it also +# removes the downloaded source. This is achieved by making the DL_DIR +# recipe-specific. While reducing disk usage, it increases network usage (for +# example, compiling the same source for target and host implies downloading +# the source twice). +# +# Because the "do_fetch" task does not get re-run after removing the downloaded +# sources, this class is also not suitable for incremental builds. +# +# Where it works well is in well-connected build environments with limited +# disk space (like TravisCI). + +inherit rm_work + +# This would ensure that the existing do_rm_work() removes the downloads, +# but does not work because some recipes have a circular dependency between +# WORKDIR and DL_DIR (via ${SRCPV}?). +# DL_DIR = "${WORKDIR}/downloads" + +# Instead go up one level and remove ourself. +DL_DIR = "${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}/${PN}/downloads" +do_rm_work_append () { + rm -rf ${DL_DIR} +} diff --git a/import-layers/yocto-poky/meta/classes/rootfs-postcommands.bbclass b/import-layers/yocto-poky/meta/classes/rootfs-postcommands.bbclass index 0c7ceea54..c19ff8738 100644 --- a/import-layers/yocto-poky/meta/classes/rootfs-postcommands.bbclass +++ b/import-layers/yocto-poky/meta/classes/rootfs-postcommands.bbclass @@ -14,6 +14,9 @@ ROOTFS_POSTPROCESS_COMMAND += "rootfs_update_timestamp ; " # Tweak the mount options for rootfs in /etc/fstab if read-only-rootfs is enabled ROOTFS_POSTPROCESS_COMMAND += '${@bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", "read_only_rootfs_hook; ", "",d)}' +# Generates test data file with data store variables expanded in json format +ROOTFS_POSTPROCESS_COMMAND += "write_image_test_data ; " + # Write manifest IMAGE_MANIFEST = "${IMGDEPLOYDIR}/${IMAGE_NAME}.rootfs.manifest" ROOTFS_POSTUNINSTALL_COMMAND =+ "write_image_manifest ; " @@ -30,6 +33,23 @@ ROOTFS_POSTPROCESS_COMMAND += 'empty_var_volatile;' SSH_DISABLE_DNS_LOOKUP ?= " ssh_disable_dns_lookup ; " ROOTFS_POSTPROCESS_COMMAND_append_qemuall = "${SSH_DISABLE_DNS_LOOKUP}" +# Sort the user and group entries in /etc by ID in order to make the content +# deterministic. Package installs are not deterministic, causing the ordering +# of entries to change between builds. In case that this isn't desired, +# the command can be overridden. +# +# Note that useradd-staticids.bbclass has to be used to ensure that +# the numeric IDs of dynamically created entries remain stable. +# +# We want this to run as late as possible, in particular after +# systemd_sysusers_create and set_user_group. Using _append is not +# enough for that, set_user_group is added that way and would end +# up running after us. +SORT_PASSWD_POSTPROCESS_COMMAND ??= " sort_passwd; " +python () { + d.appendVar('ROOTFS_POSTPROCESS_COMMAND', '${SORT_PASSWD_POSTPROCESS_COMMAND}') +} + systemd_create_users () { for conffile in ${IMAGE_ROOTFS}/usr/lib/sysusers.d/systemd.conf ${IMAGE_ROOTFS}/usr/lib/sysusers.d/systemd-remote.conf; do [ -e $conffile ] || continue @@ -71,10 +91,10 @@ read_only_rootfs_hook () { # and the keys under /var/run/ssh. if [ -d ${IMAGE_ROOTFS}/etc/ssh ]; then if [ -e ${IMAGE_ROOTFS}/etc/ssh/ssh_host_rsa_key ]; then - echo "SYSCONFDIR=/etc/ssh" >> ${IMAGE_ROOTFS}/etc/default/ssh + echo "SYSCONFDIR=\${SYSCONFDIR:-/etc/ssh}" >> ${IMAGE_ROOTFS}/etc/default/ssh echo "SSHD_OPTS=" >> ${IMAGE_ROOTFS}/etc/default/ssh else - echo "SYSCONFDIR=/var/run/ssh" >> ${IMAGE_ROOTFS}/etc/default/ssh + echo "SYSCONFDIR=\${SYSCONFDIR:-/var/run/ssh}" >> ${IMAGE_ROOTFS}/etc/default/ssh echo "SSHD_OPTS='-f /etc/ssh/sshd_config_readonly'" >> ${IMAGE_ROOTFS}/etc/default/ssh fi fi @@ -112,7 +132,7 @@ zap_empty_root_password () { if [ -e ${IMAGE_ROOTFS}/etc/passwd ]; then sed -i 's%^root::%root:*:%' ${IMAGE_ROOTFS}/etc/passwd fi -} +} # # allow dropbear/openssh to accept root logins and logins from accounts with an empty password string @@ -136,7 +156,10 @@ ssh_allow_empty_password () { fi if [ -d ${IMAGE_ROOTFS}${sysconfdir}/pam.d ] ; then - sed -i 's/nullok_secure/nullok/' ${IMAGE_ROOTFS}${sysconfdir}/pam.d/* + for f in `find ${IMAGE_ROOTFS}${sysconfdir}/pam.d/* -type f -exec test -e {} \; -print` + do + sed -i 's/nullok_secure/nullok/' $f + done fi } @@ -146,6 +169,11 @@ ssh_disable_dns_lookup () { fi } +python sort_passwd () { + import rootfspostcommands + rootfspostcommands.sort_passwd(d.expand('${IMAGE_ROOTFS}${sysconfdir}')) +} + # # Enable postinst logging if debug-tweaks is enabled # @@ -195,31 +223,13 @@ make_zimage_symlink_relative () { fi } -insert_feed_uris () { - - echo "Building feeds for [${DISTRO}].." - - for line in ${FEED_URIS} - do - # strip leading and trailing spaces/tabs, then split into name and uri - line_clean="`echo "$line"|sed 's/^[ \t]*//;s/[ \t]*$//'`" - feed_name="`echo "$line_clean" | sed -n 's/\(.*\)##\(.*\)/\1/p'`" - feed_uri="`echo "$line_clean" | sed -n 's/\(.*\)##\(.*\)/\2/p'`" - - echo "Added $feed_name feed with URL $feed_uri" - - # insert new feed-sources - echo "src/gz $feed_name $feed_uri" >> ${IMAGE_ROOTFS}/etc/opkg/${feed_name}-feed.conf - done -} - python write_image_manifest () { from oe.rootfs import image_list_installed_packages from oe.utils import format_pkg_list - deploy_dir = d.getVar('IMGDEPLOYDIR', True) - link_name = d.getVar('IMAGE_LINK_NAME', True) - manifest_name = d.getVar('IMAGE_MANIFEST', True) + deploy_dir = d.getVar('IMGDEPLOYDIR') + link_name = d.getVar('IMAGE_LINK_NAME') + manifest_name = d.getVar('IMAGE_MANIFEST') if not manifest_name: return @@ -236,7 +246,7 @@ python write_image_manifest () { os.symlink(os.path.basename(manifest_name), manifest_link) } -# Can be use to create /etc/timestamp during image construction to give a reasonably +# Can be use to create /etc/timestamp during image construction to give a reasonably # sane default time setting rootfs_update_timestamp () { date -u +%4Y%2m%2d%2H%2M%2S >${IMAGE_ROOTFS}/etc/timestamp @@ -278,3 +288,33 @@ rootfs_check_host_user_contaminated () { rootfs_sysroot_relativelinks () { sysroot-relativelinks.py ${SDK_OUTPUT}/${SDKTARGETSYSROOT} } + +# Generated test data json file +python write_image_test_data() { + from oe.data import export2json + + testdata = "%s/%s.testdata.json" % (d.getVar('DEPLOY_DIR_IMAGE'), d.getVar('IMAGE_NAME')) + testdata_link = "%s/%s.testdata.json" % (d.getVar('DEPLOY_DIR_IMAGE'), d.getVar('IMAGE_LINK_NAME')) + + bb.utils.mkdirhier(os.path.dirname(testdata)) + searchString = "%s/"%(d.getVar("TOPDIR")).replace("//","/") + export2json(d, testdata,searchString=searchString,replaceString="") + + if testdata_link != testdata: + if os.path.lexists(testdata_link): + os.remove(testdata_link) + os.symlink(os.path.basename(testdata), testdata_link) +} +write_image_test_data[vardepsexclude] += "TOPDIR" + +# Check for unsatisfied recommendations (RRECOMMENDS) +python rootfs_log_check_recommends() { + log_path = d.expand("${T}/log.do_rootfs") + with open(log_path, 'r') as log: + for line in log: + if 'log_check' in line: + continue + + if 'unsatisfied recommendation for' in line: + bb.warn('[log_check] %s: %s' % (d.getVar('PN', True), line)) +} diff --git a/import-layers/yocto-poky/meta/classes/rootfs_deb.bbclass b/import-layers/yocto-poky/meta/classes/rootfs_deb.bbclass index f79fca608..262e3d555 100644 --- a/import-layers/yocto-poky/meta/classes/rootfs_deb.bbclass +++ b/import-layers/yocto-poky/meta/classes/rootfs_deb.bbclass @@ -3,7 +3,6 @@ # ROOTFS_PKGMANAGE = "dpkg apt" -ROOTFS_PKGMANAGE_BOOTSTRAP = "run-postinsts" do_rootfs[depends] += "dpkg-native:do_populate_sysroot apt-native:do_populate_sysroot" do_populate_sdk[depends] += "dpkg-native:do_populate_sysroot apt-native:do_populate_sysroot bzip2-native:do_populate_sysroot" @@ -12,9 +11,10 @@ do_rootfs[vardeps] += "PACKAGE_FEED_URIS" do_rootfs[lockfiles] += "${DEPLOY_DIR_DEB}/deb.lock" do_populate_sdk[lockfiles] += "${DEPLOY_DIR_DEB}/deb.lock" +do_populate_sdk_ext[lockfiles] += "${DEPLOY_DIR_DEB}/deb.lock" python rootfs_deb_bad_recommendations() { - if d.getVar("BAD_RECOMMENDATIONS", True): + if d.getVar("BAD_RECOMMENDATIONS"): bb.warn("Debian package install does not support BAD_RECOMMENDATIONS") } do_rootfs[prefuncs] += "rootfs_deb_bad_recommendations" @@ -25,7 +25,7 @@ opkglibdir = "${localstatedir}/lib/opkg" python () { # Map TARGET_ARCH to Debian's ideas about architectures - darch = d.getVar('SDK_ARCH', True) + darch = d.getVar('SDK_ARCH') if darch in ["x86", "i486", "i586", "i686", "pentium"]: d.setVar('DEB_SDK_ARCH', 'i386') elif darch == "x86_64": diff --git a/import-layers/yocto-poky/meta/classes/rootfs_ipk.bbclass b/import-layers/yocto-poky/meta/classes/rootfs_ipk.bbclass index d5c38fef7..52b468d85 100644 --- a/import-layers/yocto-poky/meta/classes/rootfs_ipk.bbclass +++ b/import-layers/yocto-poky/meta/classes/rootfs_ipk.bbclass @@ -7,7 +7,6 @@ EXTRAOPKGCONFIG ?= "" ROOTFS_PKGMANAGE = "opkg ${EXTRAOPKGCONFIG}" -ROOTFS_PKGMANAGE_BOOTSTRAP = "run-postinsts" do_rootfs[depends] += "opkg-native:do_populate_sysroot opkg-utils-native:do_populate_sysroot" do_populate_sdk[depends] += "opkg-native:do_populate_sysroot opkg-utils-native:do_populate_sysroot" @@ -16,6 +15,7 @@ do_rootfs[vardeps] += "PACKAGE_FEED_URIS" do_rootfs[lockfiles] += "${WORKDIR}/ipk.lock" do_populate_sdk[lockfiles] += "${WORKDIR}/ipk.lock" +do_populate_sdk_ext[lockfiles] += "${WORKDIR}/ipk.lock" OPKG_PREPROCESS_COMMANDS = "" @@ -27,8 +27,8 @@ MULTILIBRE_ALLOW_REP = "${OPKGLIBDIR}/opkg|/usr/lib/opkg" python () { - if d.getVar('BUILD_IMAGES_FROM_FEEDS', True): - flags = d.getVarFlag('do_rootfs', 'recrdeptask', True) + if d.getVar('BUILD_IMAGES_FROM_FEEDS'): + flags = d.getVarFlag('do_rootfs', 'recrdeptask') flags = flags.replace("do_package_write_ipk", "") flags = flags.replace("do_deploy", "") flags = flags.replace("do_populate_sysroot", "") diff --git a/import-layers/yocto-poky/meta/classes/rootfs_rpm.bbclass b/import-layers/yocto-poky/meta/classes/rootfs_rpm.bbclass index 37730a710..7f305f51c 100644 --- a/import-layers/yocto-poky/meta/classes/rootfs_rpm.bbclass +++ b/import-layers/yocto-poky/meta/classes/rootfs_rpm.bbclass @@ -2,20 +2,22 @@ # Creates a root filesystem out of rpm packages # -ROOTFS_PKGMANAGE = "rpm smartpm" -ROOTFS_PKGMANAGE_BOOTSTRAP = "run-postinsts" +ROOTFS_PKGMANAGE = "rpm dnf" -# Add 100Meg of extra space for Smart -IMAGE_ROOTFS_EXTRA_SPACE_append = "${@bb.utils.contains("PACKAGE_INSTALL", "smartpm", " + 102400", "" ,d)}" +# dnf is using our custom distutils, and so will fail without these +export STAGING_INCDIR +export STAGING_LIBDIR -# Smart is python based, so be sure python-native is available to us. -EXTRANATIVEPATH += "python-native" +# Add 100Meg of extra space for dnf +IMAGE_ROOTFS_EXTRA_SPACE_append = "${@bb.utils.contains("PACKAGE_INSTALL", "dnf", " + 102400", "" ,d)}" + +# Dnf is python based, so be sure python3-native is available to us. +EXTRANATIVEPATH += "python3-native" # opkg is needed for update-alternatives RPMROOTFSDEPENDS = "rpm-native:do_populate_sysroot \ - rpmresolve-native:do_populate_sysroot \ - python-smartpm-native:do_populate_sysroot \ - createrepo-native:do_populate_sysroot \ + dnf-native:do_populate_sysroot \ + createrepo-c-native:do_populate_sysroot \ opkg-native:do_populate_sysroot" do_rootfs[depends] += "${RPMROOTFSDEPENDS}" @@ -25,8 +27,8 @@ do_rootfs[recrdeptask] += "do_package_write_rpm" do_rootfs[vardeps] += "PACKAGE_FEED_URIS" python () { - if d.getVar('BUILD_IMAGES_FROM_FEEDS', True): - flags = d.getVarFlag('do_rootfs', 'recrdeptask', True) + if d.getVar('BUILD_IMAGES_FROM_FEEDS'): + flags = d.getVarFlag('do_rootfs', 'recrdeptask') flags = flags.replace("do_package_write_rpm", "") flags = flags.replace("do_deploy", "") flags = flags.replace("do_populate_sysroot", "") @@ -35,7 +37,3 @@ python () { d.setVar('RPM_POSTPROCESS_COMMANDS', '') } -# Smart is python based, so be sure python-native is available to us. -EXTRANATIVEPATH += "python-native" - -rpmlibdir = "/var/lib/rpm" diff --git a/import-layers/yocto-poky/meta/classes/sanity.bbclass b/import-layers/yocto-poky/meta/classes/sanity.bbclass index a11b581a0..e8064ac48 100644 --- a/import-layers/yocto-poky/meta/classes/sanity.bbclass +++ b/import-layers/yocto-poky/meta/classes/sanity.bbclass @@ -3,10 +3,10 @@ # SANITY_REQUIRED_UTILITIES ?= "patch diffstat makeinfo git bzip2 tar \ - gzip gawk chrpath wget cpio perl file" + gzip gawk chrpath wget cpio perl file which" def bblayers_conf_file(d): - return os.path.join(d.getVar('TOPDIR', True), 'conf/bblayers.conf') + return os.path.join(d.getVar('TOPDIR'), 'conf/bblayers.conf') def sanity_conf_read(fn): with open(fn, 'r') as f: @@ -39,8 +39,8 @@ SANITY_DIFF_TOOL ?= "meld" SANITY_LOCALCONF_SAMPLE ?= "${COREBASE}/meta*/conf/local.conf.sample" python oecore_update_localconf() { # Check we are using a valid local.conf - current_conf = d.getVar('CONF_VERSION', True) - conf_version = d.getVar('LOCALCONF_VERSION', True) + current_conf = d.getVar('CONF_VERSION') + conf_version = d.getVar('LOCALCONF_VERSION') failmsg = """Your version of local.conf was generated from an older/newer version of local.conf.sample and there have been updates made to this file. Please compare the two @@ -59,8 +59,8 @@ is a good way to visualise the changes.""" SANITY_SITECONF_SAMPLE ?= "${COREBASE}/meta*/conf/site.conf.sample" python oecore_update_siteconf() { # If we have a site.conf, check it's valid - current_sconf = d.getVar('SCONF_VERSION', True) - sconf_version = d.getVar('SITE_CONF_VERSION', True) + current_sconf = d.getVar('SCONF_VERSION') + sconf_version = d.getVar('SITE_CONF_VERSION') failmsg = """Your version of site.conf was generated from an older version of site.conf.sample and there have been updates made to this file. Please compare the two @@ -80,8 +80,8 @@ SANITY_BBLAYERCONF_SAMPLE ?= "${COREBASE}/meta*/conf/bblayers.conf.sample" python oecore_update_bblayers() { # bblayers.conf is out of date, so see if we can resolve that - current_lconf = int(d.getVar('LCONF_VERSION', True)) - lconf_version = int(d.getVar('LAYER_CONF_VERSION', True)) + current_lconf = int(d.getVar('LCONF_VERSION')) + lconf_version = int(d.getVar('LAYER_CONF_VERSION')) failmsg = """Your version of bblayers.conf has the wrong LCONF_VERSION (has ${LCONF_VERSION}, expecting ${LAYER_CONF_VERSION}). Please compare your file against bblayers.conf.sample and merge any changes before continuing. @@ -141,7 +141,7 @@ is a good way to visualise the changes.""" # Handle rename of meta-yocto -> meta-poky # This marks the start of separate version numbers but code is needed in OE-Core # for the migration, one last time. - layers = d.getVar('BBLAYERS', True).split() + layers = d.getVar('BBLAYERS').split() layers = [ os.path.basename(path) for path in layers ] if 'meta-yocto' in layers: found = False @@ -172,7 +172,7 @@ is a good way to visualise the changes.""" } def raise_sanity_error(msg, d, network_error=False): - if d.getVar("SANITY_USE_EVENTS", True) == "1": + if d.getVar("SANITY_USE_EVENTS") == "1": try: bb.event.fire(bb.event.SanityCheckFailed(msg, network_error), d) except TypeError: @@ -198,8 +198,8 @@ def check_toolchain_tune_args(data, tune, multilib, errs): return found_errors def check_toolchain_args_present(data, tune, multilib, tune_errors, which): - args_set = (data.getVar("TUNE_%s" % which, True) or "").split() - args_wanted = (data.getVar("TUNEABI_REQUIRED_%s_tune-%s" % (which, tune), True) or "").split() + args_set = (data.getVar("TUNE_%s" % which) or "").split() + args_wanted = (data.getVar("TUNEABI_REQUIRED_%s_tune-%s" % (which, tune)) or "").split() args_missing = [] # If no args are listed/required, we are done. @@ -226,9 +226,8 @@ def check_toolchain_tune(data, tune, multilib): # Apply the overrides so we can look at the details. overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + multilib localdata.setVar("OVERRIDES", overrides) - bb.data.update_data(localdata) bb.debug(2, "Sanity-checking tuning '%s' (%s) features:" % (tune, multilib)) - features = (localdata.getVar("TUNE_FEATURES_tune-%s" % tune, True) or "").split() + features = (localdata.getVar("TUNE_FEATURES_tune-%s" % tune) or "").split() if not features: return "Tuning '%s' has no defined features, and cannot be used." % tune valid_tunes = localdata.getVarFlags('TUNEVALID') or {} @@ -248,9 +247,9 @@ def check_toolchain_tune(data, tune, multilib): bb.debug(2, " %s: %s" % (feature, valid_tunes[feature])) else: tune_errors.append("Feature '%s' is not defined." % feature) - whitelist = localdata.getVar("TUNEABI_WHITELIST", True) + whitelist = localdata.getVar("TUNEABI_WHITELIST") if whitelist: - tuneabi = localdata.getVar("TUNEABI_tune-%s" % tune, True) + tuneabi = localdata.getVar("TUNEABI_tune-%s" % tune) if not tuneabi: tuneabi = tune if True not in [x in whitelist.split() for x in tuneabi.split()]: @@ -264,13 +263,13 @@ def check_toolchain_tune(data, tune, multilib): def check_toolchain(data): tune_error_set = [] - deftune = data.getVar("DEFAULTTUNE", True) + deftune = data.getVar("DEFAULTTUNE") tune_errors = check_toolchain_tune(data, deftune, 'default') if tune_errors: tune_error_set.append(tune_errors) - multilibs = (data.getVar("MULTILIB_VARIANTS", True) or "").split() - global_multilibs = (data.getVar("MULTILIB_GLOBAL_VARIANTS", True) or "").split() + multilibs = (data.getVar("MULTILIB_VARIANTS") or "").split() + global_multilibs = (data.getVar("MULTILIB_GLOBAL_VARIANTS") or "").split() if multilibs: seen_libs = [] @@ -282,7 +281,7 @@ def check_toolchain(data): seen_libs.append(lib) if not lib in global_multilibs: tune_error_set.append("Multilib %s is not present in MULTILIB_GLOBAL_VARIANTS" % lib) - tune = data.getVar("DEFAULTTUNE_virtclass-multilib-%s" % lib, True) + tune = data.getVar("DEFAULTTUNE_virtclass-multilib-%s" % lib) if tune in seen_tunes: tune_error_set.append("The tuning '%s' appears in more than one multilib." % tune) else: @@ -360,27 +359,34 @@ def check_connectivity(d): # URI's to check can be set in the CONNECTIVITY_CHECK_URIS variable # using the same syntax as for SRC_URI. If the variable is not set # the check is skipped - test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS', True) or "").split() + test_uris = (d.getVar('CONNECTIVITY_CHECK_URIS') or "").split() retval = "" + bbn = d.getVar('BB_NO_NETWORK') + if bbn not in (None, '0', '1'): + return 'BB_NO_NETWORK should be "0" or "1", but it is "%s"' % bbn + # Only check connectivity if network enabled and the # CONNECTIVITY_CHECK_URIS are set - network_enabled = not d.getVar('BB_NO_NETWORK', True) + network_enabled = not (bbn == '1') check_enabled = len(test_uris) - # Take a copy of the data store and unset MIRRORS and PREMIRRORS - data = bb.data.createCopy(d) - data.delVar('PREMIRRORS') - data.delVar('MIRRORS') if check_enabled and network_enabled: + # Take a copy of the data store and unset MIRRORS and PREMIRRORS + data = bb.data.createCopy(d) + data.delVar('PREMIRRORS') + data.delVar('MIRRORS') try: fetcher = bb.fetch2.Fetch(test_uris, data) fetcher.checkstatus() except Exception as err: # Allow the message to be configured so that users can be # pointed to a support mechanism. - msg = data.getVar('CONNECTIVITY_CHECK_MSG', True) or "" + msg = data.getVar('CONNECTIVITY_CHECK_MSG') or "" if len(msg) == 0: - msg = "%s. Please ensure your network is configured correctly.\n" % err + msg = "%s.\n" % err + msg += " Please ensure your host's network is configured correctly,\n" + msg += " or set BB_NO_NETWORK = \"1\" to disable network access if\n" + msg += " all required sources are on local disk.\n" retval = msg return retval @@ -388,7 +394,7 @@ def check_connectivity(d): def check_supported_distro(sanity_data): from fnmatch import fnmatch - tested_distros = sanity_data.getVar('SANITY_TESTED_DISTROS', True) + tested_distros = sanity_data.getVar('SANITY_TESTED_DISTROS') if not tested_distros: return @@ -411,17 +417,17 @@ def check_sanity_validmachine(sanity_data): messages = "" # Check TUNE_ARCH is set - if sanity_data.getVar('TUNE_ARCH', True) == 'INVALID': + if sanity_data.getVar('TUNE_ARCH') == 'INVALID': messages = messages + 'TUNE_ARCH is unset. Please ensure your MACHINE configuration includes a valid tune configuration file which will set this correctly.\n' # Check TARGET_OS is set - if sanity_data.getVar('TARGET_OS', True) == 'INVALID': + if sanity_data.getVar('TARGET_OS') == 'INVALID': messages = messages + 'Please set TARGET_OS directly, or choose a MACHINE or DISTRO that does so.\n' # Check that we don't have duplicate entries in PACKAGE_ARCHS & that TUNE_PKGARCH is in PACKAGE_ARCHS - pkgarchs = sanity_data.getVar('PACKAGE_ARCHS', True) - tunepkg = sanity_data.getVar('TUNE_PKGARCH', True) - defaulttune = sanity_data.getVar('DEFAULTTUNE', True) + pkgarchs = sanity_data.getVar('PACKAGE_ARCHS') + tunepkg = sanity_data.getVar('TUNE_PKGARCH') + defaulttune = sanity_data.getVar('DEFAULTTUNE') tunefound = False seen = {} dups = [] @@ -448,7 +454,7 @@ def check_gcc_march(sanity_data): message = "" # Check if -march not in BUILD_CFLAGS - if sanity_data.getVar("BUILD_CFLAGS",True).find("-march") < 0: + if sanity_data.getVar("BUILD_CFLAGS").find("-march") < 0: result = False # Construct a test file @@ -469,7 +475,7 @@ def check_gcc_march(sanity_data): result = True; if not result: - build_arch = sanity_data.getVar('BUILD_ARCH', True) + build_arch = sanity_data.getVar('BUILD_ARCH') status,res = oe.utils.getstatusoutput(sanity_data.expand("${BUILD_CC} -march=%s gcc_test.c -o gcc_test" % build_arch)) if status == 0: message = "BUILD_CFLAGS_append = \" -march=%s\"" % build_arch @@ -557,15 +563,15 @@ def check_perl_modules(sanity_data): return None def sanity_check_conffiles(d): - funcs = d.getVar('BBLAYERS_CONF_UPDATE_FUNCS', True).split() + funcs = d.getVar('BBLAYERS_CONF_UPDATE_FUNCS').split() for func in funcs: conffile, current_version, required_version, func = func.split(":") - if check_conf_exists(conffile, d) and d.getVar(current_version, True) is not None and \ - d.getVar(current_version, True) != d.getVar(required_version, True): + if check_conf_exists(conffile, d) and d.getVar(current_version) is not None and \ + d.getVar(current_version) != d.getVar(required_version): try: bb.build.exec_func(func, d, pythonexception=True) except NotImplementedError as e: - bb.fatal(e) + bb.fatal(str(e)) d.setVar("BB_INVALIDCONF", True) def sanity_handle_abichanges(status, d): @@ -574,55 +580,16 @@ def sanity_handle_abichanges(status, d): # import subprocess - current_abi = d.getVar('OELAYOUT_ABI', True) - abifile = d.getVar('SANITY_ABIFILE', True) + current_abi = d.getVar('OELAYOUT_ABI') + abifile = d.getVar('SANITY_ABIFILE') if os.path.exists(abifile): with open(abifile, "r") as f: abi = f.read().strip() if not abi.isdigit(): with open(abifile, "w") as f: f.write(current_abi) - elif abi == "2" and current_abi == "3": - bb.note("Converting staging from layout version 2 to layout version 3") - subprocess.call(d.expand("mv ${TMPDIR}/staging ${TMPDIR}/sysroots"), shell=True) - subprocess.call(d.expand("ln -s sysroots ${TMPDIR}/staging"), shell=True) - subprocess.call(d.expand("cd ${TMPDIR}/stamps; for i in */*do_populate_staging; do new=`echo $i | sed -e 's/do_populate_staging/do_populate_sysroot/'`; mv $i $new; done"), shell=True) - with open(abifile, "w") as f: - f.write(current_abi) - elif abi == "3" and current_abi == "4": - bb.note("Converting staging layout from version 3 to layout version 4") - if os.path.exists(d.expand("${STAGING_DIR_NATIVE}${bindir_native}/${MULTIMACH_HOST_SYS}")): - subprocess.call(d.expand("mv ${STAGING_DIR_NATIVE}${bindir_native}/${MULTIMACH_HOST_SYS} ${STAGING_BINDIR_CROSS}"), shell=True) - subprocess.call(d.expand("ln -s ${STAGING_BINDIR_CROSS} ${STAGING_DIR_NATIVE}${bindir_native}/${MULTIMACH_HOST_SYS}"), shell=True) - with open(abifile, "w") as f: - f.write(current_abi) - elif abi == "4": - status.addresult("Staging layout has changed. The cross directory has been deprecated and cross packages are now built under the native sysroot.\nThis requires a rebuild.\n") - elif abi == "5" and current_abi == "6": - bb.note("Converting staging layout from version 5 to layout version 6") - subprocess.call(d.expand("mv ${TMPDIR}/pstagelogs ${SSTATE_MANIFESTS}"), shell=True) - with open(abifile, "w") as f: - f.write(current_abi) - elif abi == "7" and current_abi == "8": - status.addresult("Your configuration is using stamp files including the sstate hash but your build directory was built with stamp files that do not include this.\nTo continue, either rebuild or switch back to the OEBasic signature handler with BB_SIGNATURE_HANDLER = 'OEBasic'.\n") - elif (abi != current_abi and current_abi == "9"): - status.addresult("The layout of the TMPDIR STAMPS directory has changed. Please clean out TMPDIR and rebuild (sstate will be still be valid and reused)\n") - elif (abi != current_abi and current_abi == "10" and (abi == "8" or abi == "9")): - bb.note("Converting staging layout from version 8/9 to layout version 10") - cmd = d.expand("grep -r -l sysroot-providers/virtual_kernel ${SSTATE_MANIFESTS}") - ret, result = oe.utils.getstatusoutput(cmd) - result = result.split() - for f in result: - bb.note("Uninstalling manifest file %s" % f) - sstate_clean_manifest(f, d) - with open(abifile, "w") as f: - f.write(current_abi) - elif abi == "10" and current_abi == "11": - bb.note("Converting staging layout from version 10 to layout version 11") - # Files in xf86-video-modesetting moved to xserver-xorg and bitbake can't currently handle that: - subprocess.call(d.expand("rm ${TMPDIR}/sysroots/*/usr/lib/xorg/modules/drivers/modesetting_drv.so ${TMPDIR}/sysroots/*/pkgdata/runtime/xf86-video-modesetting* ${TMPDIR}/sysroots/*/pkgdata/runtime-reverse/xf86-video-modesetting* ${TMPDIR}/sysroots/*/pkgdata/shlibs2/xf86-video-modesetting*"), shell=True) - with open(abifile, "w") as f: - f.write(current_abi) + elif int(abi) <= 11 and current_abi == "12": + status.addresult("The layout of TMPDIR changed for Recipe Specific Sysroots.\nConversion doesn't make sense and this change will rebuild everything so please delete TMPDIR (%s).\n" % d.getVar("TMPDIR")) elif (abi != current_abi): # Code to convert from one ABI to another could go here if possible. status.addresult("Error, TMPDIR has changed its layout version number (%s to %s) and you need to either rebuild, revert or adjust it at your own risk.\n" % (abi, current_abi)) @@ -670,12 +637,12 @@ def check_sanity_version_change(status, d): missing = missing + "GNU make," if not check_app_exists('${BUILD_CC}', d): - missing = missing + "C Compiler (%s)," % d.getVar("BUILD_CC", True) + missing = missing + "C Compiler (%s)," % d.getVar("BUILD_CC") if not check_app_exists('${BUILD_CXX}', d): - missing = missing + "C++ Compiler (%s)," % d.getVar("BUILD_CXX", True) + missing = missing + "C++ Compiler (%s)," % d.getVar("BUILD_CXX") - required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES', True) + required_utilities = d.getVar('SANITY_REQUIRED_UTILITIES') for util in required_utilities.split(): if not check_app_exists(util, d): @@ -685,7 +652,7 @@ def check_sanity_version_change(status, d): missing = missing.rstrip(',') status.addresult("Please install the following missing utilities: %s\n" % missing) - assume_provided = d.getVar('ASSUME_PROVIDED', True).split() + assume_provided = d.getVar('ASSUME_PROVIDED').split() # Check user doesn't have ASSUME_PROVIDED = instead of += in local.conf if "diffstat-native" not in assume_provided: status.addresult('Please use ASSUME_PROVIDED +=, not ASSUME_PROVIDED = in your local.conf\n') @@ -708,7 +675,7 @@ def check_sanity_version_change(status, d): status.addresult(" __sync_bool_compare_and_swap (&atomic, 2, 3);\n") # Check that TMPDIR isn't on a filesystem with limited filename length (eg. eCryptFS) - tmpdir = d.getVar('TMPDIR', True) + tmpdir = d.getVar('TMPDIR') status.addresult(check_create_long_filename(tmpdir, "TMPDIR")) tmpdirmode = os.stat(tmpdir).st_mode if (tmpdirmode & stat.S_ISGID): @@ -732,7 +699,7 @@ def check_sanity_version_change(status, d): if netcheck: status.network_error = True - nolibs = d.getVar('NO32LIBS', True) + nolibs = d.getVar('NO32LIBS') if not nolibs: lib32path = '/lib' if os.path.exists('/lib64') and ( os.path.islink('/lib64') or os.path.islink('/lib') ): @@ -741,7 +708,7 @@ def check_sanity_version_change(status, d): if os.path.exists('%s/libc.so.6' % lib32path) and not os.path.exists('/usr/include/gnu/stubs-32.h'): status.addresult("You have a 32-bit libc, but no 32-bit headers. You must install the 32-bit libc headers.\n") - bbpaths = d.getVar('BBPATH', True).split(":") + bbpaths = d.getVar('BBPATH').split(":") if ("." in bbpaths or "./" in bbpaths or "" in bbpaths): status.addresult("BBPATH references the current directory, either through " \ "an empty entry, a './' or a '.'.\n\t This is unsafe and means your "\ @@ -751,7 +718,7 @@ def check_sanity_version_change(status, d): "references.\n" \ "Parsed BBPATH is" + str(bbpaths)); - oes_bb_conf = d.getVar( 'OES_BITBAKE_CONF', True) + oes_bb_conf = d.getVar( 'OES_BITBAKE_CONF') if not oes_bb_conf: status.addresult('You are not using the OpenEmbedded version of conf/bitbake.conf. This means your environment is misconfigured, in particular check BBPATH.\n') @@ -786,26 +753,26 @@ def check_sanity_everybuild(status, d): # Check the bitbake version meets minimum requirements from distutils.version import LooseVersion - minversion = d.getVar('BB_MIN_VERSION', True) + minversion = d.getVar('BB_MIN_VERSION') if (LooseVersion(bb.__version__) < LooseVersion(minversion)): status.addresult('Bitbake version %s is required and version %s was found\n' % (minversion, bb.__version__)) sanity_check_locale(d) - paths = d.getVar('PATH', True).split(":") + paths = d.getVar('PATH').split(":") if "." in paths or "./" in paths or "" in paths: status.addresult("PATH contains '.', './' or '' (empty element), which will break the build, please remove this.\nParsed PATH is " + str(paths) + "\n") # Check that the DISTRO is valid, if set # need to take into account DISTRO renaming DISTRO - distro = d.getVar('DISTRO', True) + distro = d.getVar('DISTRO') if distro and distro != "nodistro": if not ( check_conf_exists("conf/distro/${DISTRO}.conf", d) or check_conf_exists("conf/distro/include/${DISTRO}.inc", d) ): - status.addresult("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf\n" % d.getVar("DISTRO", True)) + status.addresult("DISTRO '%s' not found. Please set a valid DISTRO in your local.conf\n" % d.getVar("DISTRO")) # Check that DL_DIR is set, exists and is writable. In theory, we should never even hit the check if DL_DIR isn't # set, since so much relies on it being set. - dldir = d.getVar('DL_DIR', True) + dldir = d.getVar('DL_DIR') if not dldir: status.addresult("DL_DIR is not set. Your environment is misconfigured, check that DL_DIR is set, and if the directory exists, that it is writable. \n") if os.path.exists(dldir) and not os.access(dldir, os.W_OK): @@ -814,9 +781,9 @@ def check_sanity_everybuild(status, d): # Check that the MACHINE is valid, if it is set machinevalid = True - if d.getVar('MACHINE', True): + if d.getVar('MACHINE'): if not check_conf_exists("conf/machine/${MACHINE}.conf", d): - status.addresult('Please set a valid MACHINE in your local.conf or environment\n') + status.addresult('MACHINE=%s is invalid. Please set a valid MACHINE in your local.conf, environment or other configuration file.\n' % (d.getVar('MACHINE'))) machinevalid = False else: status.addresult(check_sanity_validmachine(d)) @@ -827,7 +794,7 @@ def check_sanity_everybuild(status, d): status.addresult(check_toolchain(d)) # Check that the SDKMACHINE is valid, if it is set - if d.getVar('SDKMACHINE', True): + if d.getVar('SDKMACHINE'): if not check_conf_exists("conf/machine-sdk/${SDKMACHINE}.conf", d): status.addresult('Specified SDKMACHINE value is not valid\n') elif d.getVar('SDK_ARCH', False) == "${BUILD_ARCH}": @@ -840,7 +807,7 @@ def check_sanity_everybuild(status, d): status.addresult("Please use a umask which allows a+rx and u+rwx\n") os.umask(omask) - if d.getVar('TARGET_ARCH', True) == "arm": + if d.getVar('TARGET_ARCH') == "arm": # This path is no longer user-readable in modern (very recent) Linux try: if os.path.exists("/proc/sys/vm/mmap_min_addr"): @@ -853,7 +820,7 @@ def check_sanity_everybuild(status, d): except: pass - oeroot = d.getVar('COREBASE', True) + oeroot = d.getVar('COREBASE') if oeroot.find('+') != -1: status.addresult("Error, you have an invalid character (+) in your COREBASE directory path. Please move the installation to a directory which doesn't include any + characters.") if oeroot.find('@') != -1: @@ -866,20 +833,18 @@ def check_sanity_everybuild(status, d): mirror_vars = ['MIRRORS', 'PREMIRRORS', 'SSTATE_MIRRORS'] protocols = ['http', 'ftp', 'file', 'https', \ 'git', 'gitsm', 'hg', 'osc', 'p4', 'svn', \ - 'bzr', 'cvs', 'npm', 'sftp', 'ssh'] + 'bzr', 'cvs', 'npm', 'sftp', 'ssh', 's3' ] for mirror_var in mirror_vars: - mirrors = (d.getVar(mirror_var, True) or '').replace('\\n', '\n').split('\n') - for mirror_entry in mirrors: - mirror_entry = mirror_entry.strip() - if not mirror_entry: - # ignore blank lines - continue + mirrors = (d.getVar(mirror_var) or '').replace('\\n', ' ').split() - try: - pattern, mirror = mirror_entry.split() - except ValueError: - bb.warn('Invalid %s: %s, should be 2 members.' % (mirror_var, mirror_entry.strip())) - continue + # Split into pairs + if len(mirrors) % 2 != 0: + bb.warn('Invalid mirror variable value for %s: %s, should contain paired members.' % (mirror_var, mirrors.strip())) + continue + mirrors = list(zip(*[iter(mirrors)]*2)) + + for mirror_entry in mirrors: + pattern, mirror = mirror_entry decoded = bb.fetch2.decodeurl(pattern) try: @@ -907,7 +872,7 @@ def check_sanity_everybuild(status, d): check_symlink(mirror_base, d) # Check that TMPDIR hasn't changed location since the last time we were run - tmpdir = d.getVar('TMPDIR', True) + tmpdir = d.getVar('TMPDIR') checkfile = os.path.join(tmpdir, "saved_tmpdir") if os.path.exists(checkfile): with open(checkfile, "r") as f: @@ -946,8 +911,8 @@ def check_sanity(sanity_data): status = SanityStatus() - tmpdir = sanity_data.getVar('TMPDIR', True) - sstate_dir = sanity_data.getVar('SSTATE_DIR', True) + tmpdir = sanity_data.getVar('TMPDIR') + sstate_dir = sanity_data.getVar('SSTATE_DIR') check_symlink(sstate_dir, sanity_data) @@ -971,7 +936,7 @@ def check_sanity(sanity_data): check_sanity_everybuild(status, sanity_data) - sanity_version = int(sanity_data.getVar('SANITY_VERSION', True) or 1) + sanity_version = int(sanity_data.getVar('SANITY_VERSION') or 1) network_error = False # NATIVELSBSTRING var may have been overridden with "universal", so # get actual host distribution id and version diff --git a/import-layers/yocto-poky/meta/classes/sign_ipk.bbclass b/import-layers/yocto-poky/meta/classes/sign_ipk.bbclass index a481f6d9a..e5057b779 100644 --- a/import-layers/yocto-poky/meta/classes/sign_ipk.bbclass +++ b/import-layers/yocto-poky/meta/classes/sign_ipk.bbclass @@ -29,10 +29,10 @@ IPK_GPG_SIGNATURE_TYPE ?= 'ASC' python () { # Check configuration for var in ('IPK_GPG_NAME', 'IPK_GPG_PASSPHRASE_FILE'): - if not d.getVar(var, True): + if not d.getVar(var): raise_sanity_error("You need to define %s in the config" % var, d) - sigtype = d.getVar("IPK_GPG_SIGNATURE_TYPE", True) + sigtype = d.getVar("IPK_GPG_SIGNATURE_TYPE") if sigtype.upper() != "ASC" and sigtype.upper() != "BIN": raise_sanity_error("Bad value for IPK_GPG_SIGNATURE_TYPE (%s), use either ASC or BIN" % sigtype) } @@ -42,11 +42,11 @@ def sign_ipk(d, ipk_to_sign): bb.debug(1, 'Signing ipk: %s' % ipk_to_sign) - signer = get_signer(d, d.getVar('IPK_GPG_BACKEND', True)) - sig_type = d.getVar('IPK_GPG_SIGNATURE_TYPE', True) + signer = get_signer(d, d.getVar('IPK_GPG_BACKEND')) + sig_type = d.getVar('IPK_GPG_SIGNATURE_TYPE') is_ascii_sig = (sig_type.upper() != "BIN") signer.detach_sign(ipk_to_sign, - d.getVar('IPK_GPG_NAME', True), - d.getVar('IPK_GPG_PASSPHRASE_FILE', True), + d.getVar('IPK_GPG_NAME'), + d.getVar('IPK_GPG_PASSPHRASE_FILE'), armor=is_ascii_sig) diff --git a/import-layers/yocto-poky/meta/classes/sign_package_feed.bbclass b/import-layers/yocto-poky/meta/classes/sign_package_feed.bbclass index 31a6e9b04..71df03bab 100644 --- a/import-layers/yocto-poky/meta/classes/sign_package_feed.bbclass +++ b/import-layers/yocto-poky/meta/classes/sign_package_feed.bbclass @@ -31,10 +31,10 @@ PACKAGE_FEED_GPG_SIGNATURE_TYPE ?= 'ASC' python () { # Check sanity of configuration for var in ('PACKAGE_FEED_GPG_NAME', 'PACKAGE_FEED_GPG_PASSPHRASE_FILE'): - if not d.getVar(var, True): + if not d.getVar(var): raise_sanity_error("You need to define %s in the config" % var, d) - sigtype = d.getVar("PACKAGE_FEED_GPG_SIGNATURE_TYPE", True) + sigtype = d.getVar("PACKAGE_FEED_GPG_SIGNATURE_TYPE") if sigtype.upper() != "ASC" and sigtype.upper() != "BIN": raise_sanity_error("Bad value for PACKAGE_FEED_GPG_SIGNATURE_TYPE (%s), use either ASC or BIN" % sigtype) } diff --git a/import-layers/yocto-poky/meta/classes/sign_rpm.bbclass b/import-layers/yocto-poky/meta/classes/sign_rpm.bbclass index a8ea75faa..bc2e94710 100644 --- a/import-layers/yocto-poky/meta/classes/sign_rpm.bbclass +++ b/import-layers/yocto-poky/meta/classes/sign_rpm.bbclass @@ -22,31 +22,24 @@ RPM_GPG_BACKEND ?= 'local' python () { - if d.getVar('RPM_GPG_PASSPHRASE_FILE', True): + if d.getVar('RPM_GPG_PASSPHRASE_FILE'): raise_sanity_error('RPM_GPG_PASSPHRASE_FILE is replaced by RPM_GPG_PASSPHRASE', d) # Check configuration for var in ('RPM_GPG_NAME', 'RPM_GPG_PASSPHRASE'): - if not d.getVar(var, True): + if not d.getVar(var): raise_sanity_error("You need to define %s in the config" % var, d) - - # Set the expected location of the public key - d.setVar('RPM_GPG_PUBKEY', os.path.join(d.getVar('STAGING_DIR_TARGET', False), - d.getVar('sysconfdir', False), - 'pki', - 'rpm-gpg', - 'RPM-GPG-KEY-${DISTRO_VERSION}')) } python sign_rpm () { import glob from oe.gpg_sign import get_signer - signer = get_signer(d, d.getVar('RPM_GPG_BACKEND', True)) - rpms = glob.glob(d.getVar('RPM_PKGWRITEDIR', True) + '/*') + signer = get_signer(d, d.getVar('RPM_GPG_BACKEND')) + rpms = glob.glob(d.getVar('RPM_PKGWRITEDIR') + '/*') signer.sign_rpms(rpms, - d.getVar('RPM_GPG_NAME', True), - d.getVar('RPM_GPG_PASSPHRASE', True)) + d.getVar('RPM_GPG_NAME'), + d.getVar('RPM_GPG_PASSPHRASE')) } do_package_index[depends] += "signing-keys:do_deploy" diff --git a/import-layers/yocto-poky/meta/classes/siteconfig.bbclass b/import-layers/yocto-poky/meta/classes/siteconfig.bbclass index 45dce489d..bb491d299 100644 --- a/import-layers/yocto-poky/meta/classes/siteconfig.bbclass +++ b/import-layers/yocto-poky/meta/classes/siteconfig.bbclass @@ -2,12 +2,12 @@ python siteconfig_do_siteconfig () { shared_state = sstate_state_fromvars(d) if shared_state['task'] != 'populate_sysroot': return - if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME', True), 'site_config')): + if not os.path.isdir(os.path.join(d.getVar('FILE_DIRNAME'), 'site_config')): bb.debug(1, "No site_config directory, skipping do_siteconfig") return + sstate_install(shared_state, d) bb.build.exec_func('do_siteconfig_gencache', d) sstate_clean(shared_state, d) - sstate_install(shared_state, d) } EXTRASITECONFIG ?= "" diff --git a/import-layers/yocto-poky/meta/classes/siteinfo.bbclass b/import-layers/yocto-poky/meta/classes/siteinfo.bbclass index 6eca004c5..2c33732be 100644 --- a/import-layers/yocto-poky/meta/classes/siteinfo.bbclass +++ b/import-layers/yocto-poky/meta/classes/siteinfo.bbclass @@ -89,6 +89,8 @@ def siteinfo_data(d): "mips64el-linux-musl": "mips64el-linux", "mips64-linux-gnun32": "mips-linux bit-32", "mips64el-linux-gnun32": "mipsel-linux bit-32", + "mipsisa64r6-linux-gnun32": "mipsisa32r6-linux bit-32", + "mipsisa64r6el-linux-gnun32": "mipsisa32r6el-linux bit-32", "powerpc-linux": "powerpc32-linux", "powerpc-linux-musl": "powerpc-linux powerpc32-linux", "powerpc-linux-uclibc": "powerpc-linux powerpc32-linux", @@ -113,14 +115,14 @@ def siteinfo_data(d): # Add in any extra user supplied data which may come from a BSP layer, removing the # need to always change this class directly - extra_siteinfo = (d.getVar("SITEINFO_EXTRA_DATAFUNCS", True) or "").split() + extra_siteinfo = (d.getVar("SITEINFO_EXTRA_DATAFUNCS") or "").split() for m in extra_siteinfo: call = m + "(archinfo, osinfo, targetinfo, d)" locs = { "archinfo" : archinfo, "osinfo" : osinfo, "targetinfo" : targetinfo, "d" : d} archinfo, osinfo, targetinfo = bb.utils.better_eval(call, locs) - hostarch = d.getVar("HOST_ARCH", True) - hostos = d.getVar("HOST_OS", True) + hostarch = d.getVar("HOST_ARCH") + hostos = d.getVar("HOST_OS") target = "%s-%s" % (hostarch, hostos) sitedata = [] @@ -144,7 +146,7 @@ python () { d.setVar("SITEINFO_ENDIANNESS", "be") else: bb.error("Unable to determine endianness for architecture '%s'" % - d.getVar("HOST_ARCH", True)) + d.getVar("HOST_ARCH")) bb.fatal("Please add your architecture to siteinfo.bbclass") if "bit-32" in sitedata: @@ -153,14 +155,14 @@ python () { d.setVar("SITEINFO_BITS", "64") else: bb.error("Unable to determine bit size for architecture '%s'" % - d.getVar("HOST_ARCH", True)) + d.getVar("HOST_ARCH")) bb.fatal("Please add your architecture to siteinfo.bbclass") } def siteinfo_get_files(d, aclocalcache = False): sitedata = siteinfo_data(d) sitefiles = "" - for path in d.getVar("BBPATH", True).split(":"): + for path in d.getVar("BBPATH").split(":"): for element in sitedata: filename = os.path.join(path, "site", element) if os.path.exists(filename): @@ -177,7 +179,7 @@ def siteinfo_get_files(d, aclocalcache = False): # issues and the directory being created/removed whilst this code executes. This can happen # when a multilib recipe is parsed along with its base variant which may be running at the time # causing rare but nasty failures - path_siteconfig = d.getVar('ACLOCALDIR', True) + path_siteconfig = d.getVar('ACLOCALDIR') if path_siteconfig and os.path.isdir(path_siteconfig): for i in os.listdir(path_siteconfig): if not i.endswith("_config"): diff --git a/import-layers/yocto-poky/meta/classes/spdx.bbclass b/import-layers/yocto-poky/meta/classes/spdx.bbclass index 89394d3a9..c5f544d2a 100644 --- a/import-layers/yocto-poky/meta/classes/spdx.bbclass +++ b/import-layers/yocto-poky/meta/classes/spdx.bbclass @@ -26,20 +26,20 @@ python do_spdx () { import json, shutil info = {} - info['workdir'] = d.getVar('WORKDIR', True) - info['sourcedir'] = d.getVar('SPDX_S', True) - info['pn'] = d.getVar('PN', True) - info['pv'] = d.getVar('PV', True) - info['spdx_version'] = d.getVar('SPDX_VERSION', True) - info['data_license'] = d.getVar('DATA_LICENSE', True) - - sstatedir = d.getVar('SPDXSSTATEDIR', True) + info['workdir'] = d.getVar('WORKDIR') + info['sourcedir'] = d.getVar('SPDX_S') + info['pn'] = d.getVar('PN') + info['pv'] = d.getVar('PV') + info['spdx_version'] = d.getVar('SPDX_VERSION') + info['data_license'] = d.getVar('DATA_LICENSE') + + sstatedir = d.getVar('SPDXSSTATEDIR') sstatefile = os.path.join(sstatedir, info['pn'] + info['pv'] + ".spdx") - manifest_dir = d.getVar('SPDX_MANIFEST_DIR', True) + manifest_dir = d.getVar('SPDX_MANIFEST_DIR') info['outfile'] = os.path.join(manifest_dir, info['pn'] + ".spdx" ) - info['spdx_temp_dir'] = d.getVar('SPDX_TEMP_DIR', True) + info['spdx_temp_dir'] = d.getVar('SPDX_TEMP_DIR') info['tar_file'] = os.path.join(info['workdir'], info['pn'] + ".tar.gz" ) # Make sure important dirs exist @@ -74,9 +74,9 @@ python do_spdx () { foss_license_info = cached_spdx['Licenses'] else: ## setup fossology command - foss_server = d.getVar('FOSS_SERVER', True) - foss_flags = d.getVar('FOSS_WGET_FLAGS', True) - foss_full_spdx = d.getVar('FOSS_FULL_SPDX', True) == "true" or False + foss_server = d.getVar('FOSS_SERVER') + foss_flags = d.getVar('FOSS_WGET_FLAGS') + foss_full_spdx = d.getVar('FOSS_FULL_SPDX') == "true" or False foss_command = "wget %s --post-file=%s %s"\ % (foss_flags, info['tar_file'], foss_server) diff --git a/import-layers/yocto-poky/meta/classes/sstate.bbclass b/import-layers/yocto-poky/meta/classes/sstate.bbclass index 5b92c5485..0a12935be 100644 --- a/import-layers/yocto-poky/meta/classes/sstate.bbclass +++ b/import-layers/yocto-poky/meta/classes/sstate.bbclass @@ -11,7 +11,7 @@ def generate_sstatefn(spec, hash, d): SSTATE_PKGARCH = "${PACKAGE_ARCH}" SSTATE_PKGSPEC = "sstate:${PN}:${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}:${PV}:${PR}:${SSTATE_PKGARCH}:${SSTATE_VERSION}:" SSTATE_SWSPEC = "sstate:${PN}::${PV}:${PR}::${SSTATE_VERSION}:" -SSTATE_PKGNAME = "${SSTATE_EXTRAPATH}${@generate_sstatefn(d.getVar('SSTATE_PKGSPEC', True), d.getVar('BB_TASKHASH', True), d)}" +SSTATE_PKGNAME = "${SSTATE_EXTRAPATH}${@generate_sstatefn(d.getVar('SSTATE_PKGSPEC'), d.getVar('BB_TASKHASH'), d)}" SSTATE_PKG = "${SSTATE_DIR}/${SSTATE_PKGNAME}" SSTATE_EXTRAPATH = "" SSTATE_EXTRAPATHWILDCARD = "" @@ -25,14 +25,15 @@ PV[vardepvalue] = "${PV}" SSTATE_EXTRAPATH[vardepvalue] = "" # For multilib rpm the allarch packagegroup files can overwrite (in theory they're identical) -SSTATE_DUPWHITELIST = "${DEPLOY_DIR_IMAGE}/ ${DEPLOY_DIR}/licenses/ ${DEPLOY_DIR_RPM}/all/" +SSTATE_DUPWHITELIST = "${DEPLOY_DIR_IMAGE}/ ${DEPLOY_DIR}/licenses/ ${DEPLOY_DIR_RPM}/noarch/" # Avoid docbook/sgml catalog warnings for now SSTATE_DUPWHITELIST += "${STAGING_ETCDIR_NATIVE}/sgml ${STAGING_DATADIR_NATIVE}/sgml" # Archive the sources for many architectures in one deploy folder SSTATE_DUPWHITELIST += "${DEPLOY_DIR_SRC}" -SSTATE_SCAN_FILES ?= "*.la *-config *_config" -SSTATE_SCAN_CMD ?= 'find ${SSTATE_BUILDDIR} \( -name "${@"\" -o -name \"".join(d.getVar("SSTATE_SCAN_FILES", True).split())}" \) -type f' +SSTATE_SCAN_FILES ?= "*.la *-config *_config postinst-*" +SSTATE_SCAN_CMD ??= 'find ${SSTATE_BUILDDIR} \( -name "${@"\" -o -name \"".join(d.getVar("SSTATE_SCAN_FILES").split())}" \) -type f' +SSTATE_SCAN_CMD_NATIVE ??= 'grep -Irl -e ${RECIPE_SYSROOT} -e ${RECIPE_SYSROOT_NATIVE} -e ${HOSTTOOLS_DIR} ${SSTATE_BUILDDIR}' BB_HASHFILENAME = "False ${SSTATE_PKGSPEC} ${SSTATE_SWSPEC}" @@ -53,7 +54,7 @@ SSTATEPOSTCREATEFUNCS = "" SSTATEPREINSTFUNCS = "" SSTATEPOSTUNPACKFUNCS = "sstate_hardcode_path_unpack" SSTATEPOSTINSTFUNCS = "" -EXTRA_STAGING_FIXMES ?= "" +EXTRA_STAGING_FIXMES ?= "HOSTTOOLS_DIR" SSTATECLEANFUNCS = "" # Check whether sstate exists for tasks that support sstate and are in the @@ -82,7 +83,7 @@ python () { d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${SDK_OS}")) elif bb.data.inherits_class('cross-canadian', d): d.setVar('SSTATE_PKGARCH', d.expand("${SDK_ARCH}_${PACKAGE_ARCH}")) - elif bb.data.inherits_class('allarch', d) and d.getVar("PACKAGE_ARCH", True) == "all": + elif bb.data.inherits_class('allarch', d) and d.getVar("PACKAGE_ARCH") == "all": d.setVar('SSTATE_PKGARCH', "allarch") else: d.setVar('SSTATE_MANMACH', d.expand("${PACKAGE_ARCH}")) @@ -92,13 +93,7 @@ python () { d.setVar('BB_HASHFILENAME', "True ${SSTATE_PKGSPEC} ${SSTATE_SWSPEC}") d.setVar('SSTATE_EXTRAPATHWILDCARD', "*/") - # These classes encode staging paths into their scripts data so can only be - # reused if we manipulate the paths - if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross', d) or bb.data.inherits_class('sdk', d) or bb.data.inherits_class('crosssdk', d): - scan_cmd = "grep -Irl ${STAGING_DIR} ${SSTATE_BUILDDIR}" - d.setVar('SSTATE_SCAN_CMD', scan_cmd) - - unique_tasks = sorted(set((d.getVar('SSTATETASKS', True) or "").split())) + unique_tasks = sorted(set((d.getVar('SSTATETASKS') or "").split())) d.setVar('SSTATETASKS', " ".join(unique_tasks)) for task in unique_tasks: d.prependVarFlag(task, 'prefuncs', "sstate_task_prefunc ") @@ -116,19 +111,20 @@ def sstate_init(task, d): def sstate_state_fromvars(d, task = None): if task is None: - task = d.getVar('BB_CURRENTTASK', True) + task = d.getVar('BB_CURRENTTASK') if not task: bb.fatal("sstate code running without task context?!") task = task.replace("_setscene", "") if task.startswith("do_"): task = task[3:] - inputs = (d.getVarFlag("do_" + task, 'sstate-inputdirs', True) or "").split() - outputs = (d.getVarFlag("do_" + task, 'sstate-outputdirs', True) or "").split() - plaindirs = (d.getVarFlag("do_" + task, 'sstate-plaindirs', True) or "").split() - lockfiles = (d.getVarFlag("do_" + task, 'sstate-lockfile', True) or "").split() - lockfilesshared = (d.getVarFlag("do_" + task, 'sstate-lockfile-shared', True) or "").split() - interceptfuncs = (d.getVarFlag("do_" + task, 'sstate-interceptfuncs', True) or "").split() + inputs = (d.getVarFlag("do_" + task, 'sstate-inputdirs') or "").split() + outputs = (d.getVarFlag("do_" + task, 'sstate-outputdirs') or "").split() + plaindirs = (d.getVarFlag("do_" + task, 'sstate-plaindirs') or "").split() + lockfiles = (d.getVarFlag("do_" + task, 'sstate-lockfile') or "").split() + lockfilesshared = (d.getVarFlag("do_" + task, 'sstate-lockfile-shared') or "").split() + interceptfuncs = (d.getVarFlag("do_" + task, 'sstate-interceptfuncs') or "").split() + fixmedir = d.getVarFlag("do_" + task, 'sstate-fixmedir') or "" if not task or len(inputs) != len(outputs): bb.fatal("sstate variables not setup correctly?!") @@ -144,6 +140,7 @@ def sstate_state_fromvars(d, task = None): ss['lockfiles-shared'] = lockfilesshared ss['plaindirs'] = plaindirs ss['interceptfuncs'] = interceptfuncs + ss['fixmedir'] = fixmedir return ss def sstate_add(ss, source, dest, d): @@ -193,15 +190,18 @@ def sstate_install(ss, d): srcdir = os.path.join(walkroot, dir) dstdir = srcdir.replace(state[1], state[2]) #bb.debug(2, "Staging %s to %s" % (srcdir, dstdir)) + if os.path.islink(srcdir): + sharedfiles.append(dstdir) + continue if not dstdir.endswith("/"): dstdir = dstdir + "/" shareddirs.append(dstdir) # Check the file list for conflicts against files which already exist - whitelist = (d.getVar("SSTATE_DUPWHITELIST", True) or "").split() + whitelist = (d.getVar("SSTATE_DUPWHITELIST") or "").split() match = [] for f in sharedfiles: - if os.path.exists(f): + if os.path.exists(f) and not os.path.islink(f): f = os.path.normpath(f) realmatch = True for w in whitelist: @@ -211,25 +211,27 @@ def sstate_install(ss, d): break if realmatch: match.append(f) - sstate_search_cmd = "grep -rl '%s' %s --exclude=master.list | sed -e 's:^.*/::' -e 's:\.populate-sysroot::'" % (f, d.expand("${SSTATE_MANIFESTS}")) + sstate_search_cmd = "grep -rlF '%s' %s --exclude=master.list | sed -e 's:^.*/::'" % (f, d.expand("${SSTATE_MANIFESTS}")) search_output = subprocess.Popen(sstate_search_cmd, shell=True, stdout=subprocess.PIPE).communicate()[0] - if search_output != "": - match.append("Matched in %s" % search_output.rstrip()) + if search_output: + match.append(" (matched in %s)" % search_output.decode('utf-8').rstrip()) + else: + match.append(" (not matched to any task)") if match: bb.error("The recipe %s is trying to install files into a shared " \ "area when those files already exist. Those files and their manifest " \ - "location are:\n %s\nPlease verify which recipe should provide the " \ - "above files.\nThe build has stopped as continuing in this scenario WILL " \ - "break things, if not now, possibly in the future (we've seen builds fail " \ + "location are:\n %s\nPlease verify which recipe should provide the " \ + "above files.\n\nThe build has stopped, as continuing in this scenario WILL " \ + "break things - if not now, possibly in the future (we've seen builds fail " \ "several months later). If the system knew how to recover from this " \ - "automatically it would however there are several different scenarios " \ + "automatically it would, however there are several different scenarios " \ "which can result in this and we don't know which one this is. It may be " \ "you have switched providers of something like virtual/kernel (e.g. from " \ "linux-yocto to linux-yocto-dev), in that case you need to execute the " \ "clean task for both recipes and it will resolve this error. It may be " \ "you changed DISTRO_FEATURES from systemd to udev or vice versa. Cleaning " \ - "those recipes should again resolve this error however switching " \ - "DISTRO_FEATURES on an existing build directory is not supported, you " \ + "those recipes should again resolve this error, however switching " \ + "DISTRO_FEATURES on an existing build directory is not supported - you " \ "should really clean out tmp and rebuild (reusing sstate should be safe). " \ "It could be the overlapping files detected are harmless in which case " \ "adding them to SSTATE_DUPWHITELIST may be the correct solution. It could " \ @@ -237,9 +239,13 @@ def sstate_install(ss, d): "things (e.g. bluez 4 and bluez 5 and the correct solution for that would " \ "be to resolve the conflict. If in doubt, please ask on the mailing list, " \ "sharing the error and filelist above." % \ - (d.getVar('PN', True), "\n ".join(match))) + (d.getVar('PN'), "\n ".join(match))) bb.fatal("If the above message is too much, the simpler version is you're advised to wipe out tmp and rebuild (reusing sstate is fine). That will likely fix things in most (but not all) cases.") + if ss['fixmedir'] and os.path.exists(ss['fixmedir'] + "/fixmepath.cmd"): + sharedfiles.append(ss['fixmedir'] + "/fixmepath.cmd") + sharedfiles.append(ss['fixmedir'] + "/fixmepath") + # Write out the manifest f = open(manifest, "w") for file in sharedfiles: @@ -258,7 +264,7 @@ def sstate_install(ss, d): i = d2.expand("${SSTATE_MANIFESTS}/index-${SSTATE_MANMACH}") l = bb.utils.lockfile(i + ".lock") - filedata = d.getVar("STAMP", True) + " " + d2.getVar("SSTATE_MANFILEPREFIX", True) + " " + d.getVar("WORKDIR", True) + "\n" + filedata = d.getVar("STAMP") + " " + d2.getVar("SSTATE_MANFILEPREFIX") + " " + d.getVar("WORKDIR") + "\n" manifests = [] if os.path.exists(i): with open(i, "r") as f: @@ -273,7 +279,7 @@ def sstate_install(ss, d): if os.path.exists(state[1]): oe.path.copyhardlinktree(state[1], state[2]) - for postinst in (d.getVar('SSTATEPOSTINSTFUNCS', True) or '').split(): + for postinst in (d.getVar('SSTATEPOSTINSTFUNCS') or '').split(): # All hooks should run in the SSTATE_INSTDIR bb.build.exec_func(postinst, d, (sstateinst,)) @@ -284,20 +290,11 @@ sstate_install[vardepsexclude] += "SSTATE_DUPWHITELIST STATE_MANMACH SSTATE_MANF sstate_install[vardeps] += "${SSTATEPOSTINSTFUNCS}" def sstate_installpkg(ss, d): - import oe.path - import subprocess from oe.gpg_sign import get_signer - def prepdir(dir): - # remove dir if it exists, ensure any parent directories do exist - if os.path.exists(dir): - oe.path.remove(dir) - bb.utils.mkdirhier(dir) - oe.path.remove(dir) - sstateinst = d.expand("${WORKDIR}/sstate-install-%s/" % ss['task']) - sstatefetch = d.getVar('SSTATE_PKGNAME', True) + '_' + ss['task'] + ".tgz" - sstatepkg = d.getVar('SSTATE_PKG', True) + '_' + ss['task'] + ".tgz" + sstatefetch = d.getVar('SSTATE_PKGNAME') + '_' + ss['task'] + ".tgz" + sstatepkg = d.getVar('SSTATE_PKG') + '_' + ss['task'] + ".tgz" if not os.path.exists(sstatepkg): pstaging_fetch(sstatefetch, sstatepkg, d) @@ -311,22 +308,52 @@ def sstate_installpkg(ss, d): d.setVar('SSTATE_INSTDIR', sstateinst) d.setVar('SSTATE_PKG', sstatepkg) - if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG", True), False): + if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False): signer = get_signer(d, 'local') if not signer.verify(sstatepkg + '.sig'): bb.warn("Cannot verify signature on sstate package %s" % sstatepkg) - for f in (d.getVar('SSTATEPREINSTFUNCS', True) or '').split() + ['sstate_unpack_package'] + (d.getVar('SSTATEPOSTUNPACKFUNCS', True) or '').split(): + # Empty sstateinst directory, ensure its clean + if os.path.exists(sstateinst): + oe.path.remove(sstateinst) + bb.utils.mkdirhier(sstateinst) + + sstateinst = d.getVar("SSTATE_INSTDIR") + d.setVar('SSTATE_FIXMEDIR', ss['fixmedir']) + + for f in (d.getVar('SSTATEPREINSTFUNCS') or '').split() + ['sstate_unpack_package']: + # All hooks should run in the SSTATE_INSTDIR + bb.build.exec_func(f, d, (sstateinst,)) + + return sstate_installpkgdir(ss, d) + +def sstate_installpkgdir(ss, d): + import oe.path + import subprocess + + sstateinst = d.getVar("SSTATE_INSTDIR") + d.setVar('SSTATE_FIXMEDIR', ss['fixmedir']) + + for f in (d.getVar('SSTATEPOSTUNPACKFUNCS') or '').split(): # All hooks should run in the SSTATE_INSTDIR bb.build.exec_func(f, d, (sstateinst,)) + def prepdir(dir): + # remove dir if it exists, ensure any parent directories do exist + if os.path.exists(dir): + oe.path.remove(dir) + bb.utils.mkdirhier(dir) + oe.path.remove(dir) + for state in ss['dirs']: + if d.getVar('SSTATE_SKIP_CREATION') == '1': + continue prepdir(state[1]) os.rename(sstateinst + state[0], state[1]) sstate_install(ss, d) for plain in ss['plaindirs']: - workdir = d.getVar('WORKDIR', True) + workdir = d.getVar('WORKDIR') src = sstateinst + "/" + plain.replace(workdir, '') dest = plain bb.utils.mkdirhier(src) @@ -342,28 +369,40 @@ python sstate_hardcode_path_unpack () { # sstate_hardcode_path(d) import subprocess - sstateinst = d.getVar('SSTATE_INSTDIR', True) - fixmefn = sstateinst + "fixmepath" + sstateinst = d.getVar('SSTATE_INSTDIR') + sstatefixmedir = d.getVar('SSTATE_FIXMEDIR') + fixmefn = sstateinst + "fixmepath" if os.path.isfile(fixmefn): - staging = d.getVar('STAGING_DIR', True) - staging_target = d.getVar('STAGING_DIR_TARGET', True) - staging_host = d.getVar('STAGING_DIR_HOST', True) - - if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): - sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIR:%s:g'" % (staging) - elif bb.data.inherits_class('cross', d): - sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIR:%s:g'" % (staging_target, staging) - else: + staging_target = d.getVar('RECIPE_SYSROOT') + staging_host = d.getVar('RECIPE_SYSROOT_NATIVE') + + if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross-canadian', d): sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRHOST:%s:g'" % (staging_host) + elif bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d): + sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIRHOST:%s:g'" % (staging_target, staging_host) + else: + sstate_sed_cmd = "sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g'" % (staging_target) - extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES', True) or '' + extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES') or '' for fixmevar in extra_staging_fixmes.split(): - fixme_path = d.getVar(fixmevar, True) + fixme_path = d.getVar(fixmevar) sstate_sed_cmd += " -e 's:FIXME_%s:%s:g'" % (fixmevar, fixme_path) # Add sstateinst to each filename in fixmepath, use xargs to efficiently call sed sstate_hardcode_cmd = "sed -e 's:^:%s:g' %s | xargs %s" % (sstateinst, fixmefn, sstate_sed_cmd) + # Defer do_populate_sysroot relocation command + if sstatefixmedir: + bb.utils.mkdirhier(sstatefixmedir) + with open(sstatefixmedir + "/fixmepath.cmd", "w") as f: + sstate_hardcode_cmd = sstate_hardcode_cmd.replace(fixmefn, sstatefixmedir + "/fixmepath") + sstate_hardcode_cmd = sstate_hardcode_cmd.replace(sstateinst, "FIXMEFINALSSTATEINST") + sstate_hardcode_cmd = sstate_hardcode_cmd.replace(staging_host, "FIXMEFINALSSTATEHOST") + sstate_hardcode_cmd = sstate_hardcode_cmd.replace(staging_target, "FIXMEFINALSSTATETARGET") + f.write(sstate_hardcode_cmd) + bb.utils.copyfile(fixmefn, sstatefixmedir + "/fixmepath") + return + bb.note("Replacing fixme paths in sstate package: %s" % (sstate_hardcode_cmd)) subprocess.call(sstate_hardcode_cmd, shell=True) @@ -375,17 +414,17 @@ python sstate_hardcode_path_unpack () { def sstate_clean_cachefile(ss, d): import oe.path - sstatepkgfile = d.getVar('SSTATE_PATHSPEC', True) + "*_" + ss['task'] + ".tgz*" + sstatepkgfile = d.getVar('SSTATE_PATHSPEC') + "*_" + ss['task'] + ".tgz*" bb.note("Removing %s" % sstatepkgfile) oe.path.remove(sstatepkgfile) def sstate_clean_cachefiles(d): - for task in (d.getVar('SSTATETASKS', True) or "").split(): + for task in (d.getVar('SSTATETASKS') or "").split(): ld = d.createCopy() ss = sstate_state_fromvars(ld, task) sstate_clean_cachefile(ss, ld) -def sstate_clean_manifest(manifest, d): +def sstate_clean_manifest(manifest, d, prefix=None): import oe.path mfile = open(manifest) @@ -394,6 +433,8 @@ def sstate_clean_manifest(manifest, d): for entry in entries: entry = entry.strip() + if prefix and not entry.startswith("/"): + entry = prefix + "/" + entry bb.debug(2, "Removing manifest: %s" % entry) # We can race against another package populating directories as we're removing them # so we ignore errors here. @@ -404,7 +445,7 @@ def sstate_clean_manifest(manifest, d): elif os.path.exists(entry) and len(os.listdir(entry)) == 0: os.rmdir(entry[:-1]) else: - oe.path.remove(entry) + os.remove(entry) except OSError: pass @@ -422,8 +463,8 @@ def sstate_clean(ss, d): import glob d2 = d.createCopy() - stamp_clean = d.getVar("STAMPCLEAN", True) - extrainf = d.getVarFlag("do_" + ss['task'], 'stamp-extra-info', True) + stamp_clean = d.getVar("STAMPCLEAN") + extrainf = d.getVarFlag("do_" + ss['task'], 'stamp-extra-info') if extrainf: d2.setVar("SSTATE_MANMACH", extrainf) wildcard_stfile = "%s.do_%s*.%s" % (stamp_clean, ss['task'], extrainf) @@ -465,7 +506,7 @@ def sstate_clean(ss, d): oe.path.remove(stfile) # Removes the users/groups created by the package - for cleanfunc in (d.getVar('SSTATECLEANFUNCS', True) or '').split(): + for cleanfunc in (d.getVar('SSTATECLEANFUNCS') or '').split(): bb.build.exec_func(cleanfunc, d) sstate_clean[vardepsexclude] = "SSTATE_MANFILEPREFIX" @@ -473,13 +514,13 @@ sstate_clean[vardepsexclude] = "SSTATE_MANFILEPREFIX" CLEANFUNCS += "sstate_cleanall" python sstate_cleanall() { - bb.note("Removing shared state for package %s" % d.getVar('PN', True)) + bb.note("Removing shared state for package %s" % d.getVar('PN')) - manifest_dir = d.getVar('SSTATE_MANIFESTS', True) + manifest_dir = d.getVar('SSTATE_MANIFESTS') if not os.path.exists(manifest_dir): return - tasks = d.getVar('SSTATETASKS', True).split() + tasks = d.getVar('SSTATETASKS').split() for name in tasks: ld = d.createCopy() shared_state = sstate_state_fromvars(ld, name) @@ -495,29 +536,29 @@ python sstate_hardcode_path () { # Note: the logic in this function needs to match the reverse logic # in sstate_installpkg(ss, d) - staging = d.getVar('STAGING_DIR', True) - staging_target = d.getVar('STAGING_DIR_TARGET', True) - staging_host = d.getVar('STAGING_DIR_HOST', True) - sstate_builddir = d.getVar('SSTATE_BUILDDIR', True) + staging_target = d.getVar('RECIPE_SYSROOT') + staging_host = d.getVar('RECIPE_SYSROOT_NATIVE') + sstate_builddir = d.getVar('SSTATE_BUILDDIR') - if bb.data.inherits_class('native', d) or bb.data.inherits_class('nativesdk', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross-canadian', d): - sstate_grep_cmd = "grep -l -e '%s'" % (staging) - sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIR:g'" % (staging) - elif bb.data.inherits_class('cross', d): - sstate_grep_cmd = "grep -l -e '%s' -e '%s'" % (staging_target, staging) - sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIR:g'" % (staging_target, staging) - else: + if bb.data.inherits_class('native', d) or bb.data.inherits_class('cross-canadian', d): sstate_grep_cmd = "grep -l -e '%s'" % (staging_host) sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRHOST:g'" % (staging_host) + elif bb.data.inherits_class('cross', d) or bb.data.inherits_class('crosssdk', d): + sstate_grep_cmd = "grep -l -e '%s' -e '%s'" % (staging_target, staging_host) + sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g; s:%s:FIXMESTAGINGDIRHOST:g'" % (staging_target, staging_host) + else: + sstate_grep_cmd = "grep -l -e '%s'" % (staging_target) + sstate_sed_cmd = "sed -i -e 's:%s:FIXMESTAGINGDIRTARGET:g'" % (staging_target) - extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES', True) or '' + extra_staging_fixmes = d.getVar('EXTRA_STAGING_FIXMES') or '' for fixmevar in extra_staging_fixmes.split(): - fixme_path = d.getVar(fixmevar, True) + fixme_path = d.getVar(fixmevar) sstate_sed_cmd += " -e 's:%s:FIXME_%s:g'" % (fixme_path, fixmevar) + sstate_grep_cmd += " -e '%s'" % (fixme_path) fixmefn = sstate_builddir + "fixmepath" - sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD', True) + sstate_scan_cmd = d.getVar('SSTATE_SCAN_CMD') sstate_filelist_cmd = "tee %s" % (fixmefn) # fixmepath file needs relative paths, drop sstate_builddir prefix @@ -532,96 +573,81 @@ python sstate_hardcode_path () { sstate_hardcode_cmd = "%s | xargs %s | %s | xargs %s %s" % (sstate_scan_cmd, sstate_grep_cmd, sstate_filelist_cmd, xargs_no_empty_run_cmd, sstate_sed_cmd) bb.note("Removing hardcoded paths from sstate package: '%s'" % (sstate_hardcode_cmd)) - subprocess.call(sstate_hardcode_cmd, shell=True) + subprocess.check_output(sstate_hardcode_cmd, shell=True, cwd=sstate_builddir) # If the fixmefn is empty, remove it.. if os.stat(fixmefn).st_size == 0: os.remove(fixmefn) else: bb.note("Replacing absolute paths in fixmepath file: '%s'" % (sstate_filelist_relative_cmd)) - subprocess.call(sstate_filelist_relative_cmd, shell=True) + subprocess.check_output(sstate_filelist_relative_cmd, shell=True) } def sstate_package(ss, d): import oe.path - def make_relative_symlink(path, outputpath, d): - # Replace out absolute TMPDIR paths in symlinks with relative ones - if not os.path.islink(path): - return - link = os.readlink(path) - if not os.path.isabs(link): - return - if not link.startswith(tmpdir): - return - - depth = outputpath.rpartition(tmpdir)[2].count('/') - base = link.partition(tmpdir)[2].strip() - while depth > 1: - base = "/.." + base - depth -= 1 - base = "." + base - - bb.debug(2, "Replacing absolute path %s with relative path %s for %s" % (link, base, outputpath)) - os.remove(path) - os.symlink(base, path) - - tmpdir = d.getVar('TMPDIR', True) + tmpdir = d.getVar('TMPDIR') sstatebuild = d.expand("${WORKDIR}/sstate-build-%s/" % ss['task']) - sstatepkg = d.getVar('SSTATE_PKG', True) + '_'+ ss['task'] + ".tgz" + sstatepkg = d.getVar('SSTATE_PKG') + '_'+ ss['task'] + ".tgz" bb.utils.remove(sstatebuild, recurse=True) bb.utils.mkdirhier(sstatebuild) bb.utils.mkdirhier(os.path.dirname(sstatepkg)) for state in ss['dirs']: if not os.path.exists(state[1]): continue - if d.getVar('SSTATE_SKIP_CREATION', True) == '1': + if d.getVar('SSTATE_SKIP_CREATION') == '1': continue srcbase = state[0].rstrip("/").rsplit('/', 1)[0] + # Find and error for absolute symlinks. We could attempt to relocate but its not + # clear where the symlink is relative to in this context. We could add that markup + # to sstate tasks but there aren't many of these so better just avoid them entirely. for walkroot, dirs, files in os.walk(state[1]): - for file in files: + for file in files + dirs: srcpath = os.path.join(walkroot, file) - dstpath = srcpath.replace(state[1], state[2]) - make_relative_symlink(srcpath, dstpath, d) - for dir in dirs: - srcpath = os.path.join(walkroot, dir) - dstpath = srcpath.replace(state[1], state[2]) - make_relative_symlink(srcpath, dstpath, d) + if not os.path.islink(srcpath): + continue + link = os.readlink(srcpath) + if not os.path.isabs(link): + continue + if not link.startswith(tmpdir): + continue + bb.error("sstate found an absolute path symlink %s pointing at %s. Please replace this with a relative link." % (srcpath, link)) bb.debug(2, "Preparing tree %s for packaging at %s" % (state[1], sstatebuild + state[0])) - oe.path.copyhardlinktree(state[1], sstatebuild + state[0]) + os.rename(state[1], sstatebuild + state[0]) - workdir = d.getVar('WORKDIR', True) + workdir = d.getVar('WORKDIR') for plain in ss['plaindirs']: pdir = plain.replace(workdir, sstatebuild) bb.utils.mkdirhier(plain) bb.utils.mkdirhier(pdir) - oe.path.copyhardlinktree(plain, pdir) + os.rename(plain, pdir) d.setVar('SSTATE_BUILDDIR', sstatebuild) d.setVar('SSTATE_PKG', sstatepkg) - for f in (d.getVar('SSTATECREATEFUNCS', True) or '').split() + \ + for f in (d.getVar('SSTATECREATEFUNCS') or '').split() + \ ['sstate_create_package', 'sstate_sign_package'] + \ - (d.getVar('SSTATEPOSTCREATEFUNCS', True) or '').split(): + (d.getVar('SSTATEPOSTCREATEFUNCS') or '').split(): # All hooks should run in SSTATE_BUILDDIR. bb.build.exec_func(f, d, (sstatebuild,)) bb.siggen.dump_this_task(sstatepkg + ".siginfo", d) + d.setVar('SSTATE_INSTDIR', sstatebuild) + return def pstaging_fetch(sstatefetch, sstatepkg, d): import bb.fetch2 # Only try and fetch if the user has configured a mirror - mirrors = d.getVar('SSTATE_MIRRORS', True) + mirrors = d.getVar('SSTATE_MIRRORS') if not mirrors: return # Copy the data object and override DL_DIR and SRC_URI localdata = bb.data.createCopy(d) - bb.data.update_data(localdata) dldir = localdata.expand("${SSTATE_DIR}") bb.utils.mkdirhier(dldir) @@ -633,14 +659,14 @@ def pstaging_fetch(sstatefetch, sstatepkg, d): # if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK, # we'll want to allow network access for the current set of fetches. - if localdata.getVar('BB_NO_NETWORK', True) == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK', True) == "1": + if localdata.getVar('BB_NO_NETWORK') == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK') == "1": localdata.delVar('BB_NO_NETWORK') # Try a fetch from the sstate mirror, if it fails just return and # we will build the package uris = ['file://{0};downloadfilename={0}'.format(sstatefetch), 'file://{0}.siginfo;downloadfilename={0}.siginfo'.format(sstatefetch)] - if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG", True), False): + if bb.utils.to_boolean(d.getVar("SSTATE_VERIFY_SIG"), False): uris += ['file://{0}.sig;downloadfilename={0}.sig'.format(sstatefetch)] for srcuri in uris: @@ -667,14 +693,21 @@ sstate_task_prefunc[dirs] = "${WORKDIR}" python sstate_task_postfunc () { shared_state = sstate_state_fromvars(d) - sstate_install(shared_state, d) for intercept in shared_state['interceptfuncs']: - bb.build.exec_func(intercept, d, (d.getVar("WORKDIR", True),)) + bb.build.exec_func(intercept, d, (d.getVar("WORKDIR"),)) + omask = os.umask(0o002) if omask != 0o002: bb.note("Using umask 0o002 (not %0o) for sstate packaging" % omask) sstate_package(shared_state, d) os.umask(omask) + + sstateinst = d.getVar("SSTATE_INSTDIR") + d.setVar('SSTATE_FIXMEDIR', shared_state['fixmedir']) + + sstate_installpkgdir(shared_state, d) + + bb.utils.remove(d.getVar("SSTATE_BUILDDIR"), recurse=True) } sstate_task_postfunc[dirs] = "${WORKDIR}" @@ -699,21 +732,18 @@ sstate_create_package () { fi chmod 0664 $TFILE mv -f $TFILE ${SSTATE_PKG} - - cd ${WORKDIR} - rm -rf ${SSTATE_BUILDDIR} } python sstate_sign_package () { from oe.gpg_sign import get_signer - if d.getVar('SSTATE_SIG_KEY', True): + if d.getVar('SSTATE_SIG_KEY'): signer = get_signer(d, 'local') - sstate_pkg = d.getVar('SSTATE_PKG', True) + sstate_pkg = d.getVar('SSTATE_PKG') if os.path.exists(sstate_pkg + '.sig'): os.unlink(sstate_pkg + '.sig') signer.detach_sign(sstate_pkg, d.getVar('SSTATE_SIG_KEY', False), None, - d.getVar('SSTATE_SIG_PASSPHRASE', True), armor=False) + d.getVar('SSTATE_SIG_PASSPHRASE'), armor=False) } # @@ -736,7 +766,6 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False): ret = [] missed = [] - missing = [] extension = ".tgz" if siginfo: extension = extension + ".siginfo" @@ -746,7 +775,7 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False): splithashfn = sq_hashfn[task].split(" ") spec = splithashfn[1] if splithashfn[0] == "True": - extrapath = d.getVar("NATIVELSBSTRING", True) + "/" + extrapath = d.getVar("NATIVELSBSTRING") + "/" else: extrapath = "" @@ -758,18 +787,6 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False): return spec, extrapath, tname - def sstate_pkg_to_pn(pkg, d): - """ - Translate an sstate filename to a PN value by way of SSTATE_PKGSPEC. This is slightly hacky but - we don't have access to everything in this context. - """ - pkgspec = d.getVar('SSTATE_PKGSPEC', False) - try: - idx = pkgspec.split(':').index('${PN}') - except ValueError: - bb.fatal('Unable to find ${PN} in SSTATE_PKGSPEC') - return pkg.split(':')[idx] - for task in range(len(sq_fn)): @@ -785,11 +802,10 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False): missed.append(task) bb.debug(2, "SState: Looked for but didn't find file %s" % sstatefile) - mirrors = d.getVar("SSTATE_MIRRORS", True) + mirrors = d.getVar("SSTATE_MIRRORS") if mirrors: # Copy the data object and override DL_DIR and SRC_URI localdata = bb.data.createCopy(d) - bb.data.update_data(localdata) dldir = localdata.expand("${SSTATE_DIR}") localdata.delVar('MIRRORS') @@ -801,11 +817,9 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False): # if BB_NO_NETWORK is set but we also have SSTATE_MIRROR_ALLOW_NETWORK, # we'll want to allow network access for the current set of fetches. - if localdata.getVar('BB_NO_NETWORK', True) == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK', True) == "1": + if localdata.getVar('BB_NO_NETWORK') == "1" and localdata.getVar('SSTATE_MIRROR_ALLOW_NETWORK') == "1": localdata.delVar('BB_NO_NETWORK') - whitelist = bb.runqueue.get_setscene_enforce_whitelist(d) - from bb.fetch2 import FetchConnectionCache def checkstatus_init(thread_worker): thread_worker.connection_cache = FetchConnectionCache() @@ -832,12 +846,6 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False): except: missed.append(task) bb.debug(2, "SState: Unsuccessful fetch test for %s" % srcuri) - if whitelist: - pn = sstate_pkg_to_pn(sstatefile, d) - taskname = sq_task[task] - if not bb.runqueue.check_setscene_enforce_whitelist(pn, taskname, whitelist): - missing.append(task) - bb.error('Sstate artifact unavailable for %s.%s' % (pn, taskname)) pass bb.event.fire(bb.event.ProcessProgress("Checking sstate mirror object availability", len(tasklist) - thread_worker.tasks.qsize()), d) @@ -865,10 +873,8 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False): bb.event.disable_threadlock() bb.event.fire(bb.event.ProcessFinished("Checking sstate mirror object availability"), d) - if whitelist and missing: - bb.fatal('Required artifacts were unavailable - exiting') - inheritlist = d.getVar("INHERIT", True) + inheritlist = d.getVar("INHERIT") if "toaster" in inheritlist: evdata = {'missed': [], 'found': []}; for task in missed: @@ -888,24 +894,31 @@ def sstate_checkhashes(sq_fn, sq_task, sq_hash, sq_hashfn, d, siginfo=False): BB_SETSCENE_DEPVALID = "setscene_depvalid" -def setscene_depvalid(task, taskdependees, notneeded, d): +def setscene_depvalid(task, taskdependees, notneeded, d, log=None): # taskdependees is a dict of tasks which depend on task, each being a 3 item list of [PN, TASKNAME, FILENAME] # task is included in taskdependees too + # Return - False - We need this dependency + # - True - We can skip this dependency - bb.debug(2, "Considering setscene task: %s" % (str(taskdependees[task]))) + def logit(msg, log): + if log is not None: + log.append(msg) + else: + bb.debug(2, msg) - def isNativeCross(x): - return x.endswith("-native") or "-cross-" in x or "-crosssdk" in x + logit("Considering setscene task: %s" % (str(taskdependees[task])), log) - def isPostInstDep(x): - if x in ["qemu-native", "gdk-pixbuf-native", "qemuwrapper-cross", "depmodwrapper-cross", "systemd-systemctl-native", "gtk-icon-utils-native", "ca-certificates-native"]: - return True - return False + def isNativeCross(x): + return x.endswith("-native") or "-cross-" in x or "-crosssdk" in x or x.endswith("-cross") # We only need to trigger populate_lic through direct dependencies if taskdependees[task][1] == "do_populate_lic": return True + # stash_locale and gcc_stash_builddir are never needed as a dependency for built objects + if taskdependees[task][1] == "do_stash_locale" or taskdependees[task][1] == "do_gcc_stash_builddir": + return True + # We only need to trigger packagedata through direct dependencies # but need to preserve packagedata on packagedata links if taskdependees[task][1] == "do_packagedata": @@ -915,7 +928,7 @@ def setscene_depvalid(task, taskdependees, notneeded, d): return True for dep in taskdependees: - bb.debug(2, " considering dependency: %s" % (str(taskdependees[dep]))) + logit(" considering dependency: %s" % (str(taskdependees[dep])), log) if task == dep: continue if dep in notneeded: @@ -923,10 +936,11 @@ def setscene_depvalid(task, taskdependees, notneeded, d): # do_package_write_* and do_package doesn't need do_package if taskdependees[task][1] == "do_package" and taskdependees[dep][1] in ['do_package', 'do_package_write_deb', 'do_package_write_ipk', 'do_package_write_rpm', 'do_packagedata', 'do_package_qa']: continue - # do_package_write_* and do_package doesn't need do_populate_sysroot, unless is a postinstall dependency - if taskdependees[task][1] == "do_populate_sysroot" and taskdependees[dep][1] in ['do_package', 'do_package_write_deb', 'do_package_write_ipk', 'do_package_write_rpm', 'do_packagedata', 'do_package_qa']: - if isPostInstDep(taskdependees[task][0]) and taskdependees[dep][1] in ['do_package_write_deb', 'do_package_write_ipk', 'do_package_write_rpm']: - return False + # do_package_write_* need do_populate_sysroot as they're mainly postinstall dependencies + if taskdependees[task][1] == "do_populate_sysroot" and taskdependees[dep][1] in ['do_package_write_deb', 'do_package_write_ipk', 'do_package_write_rpm']: + return False + # do_package/packagedata/package_qa don't need do_populate_sysroot + if taskdependees[task][1] == "do_populate_sysroot" and taskdependees[dep][1] in ['do_package', 'do_packagedata', 'do_package_qa']: continue # Native/Cross packages don't exist and are noexec anyway if isNativeCross(taskdependees[dep][0]) and taskdependees[dep][1] in ['do_package_write_deb', 'do_package_write_ipk', 'do_package_write_rpm', 'do_packagedata', 'do_package', 'do_package_qa']: @@ -968,7 +982,7 @@ def setscene_depvalid(task, taskdependees, notneeded, d): # Safe fallthrough default - bb.debug(2, " Default setscene dependency fall through due to dependency: %s" % (str(taskdependees[dep]))) + logit(" Default setscene dependency fall through due to dependency: %s" % (str(taskdependees[dep])), log) return False return True @@ -977,15 +991,15 @@ sstate_eventhandler[eventmask] = "bb.build.TaskSucceeded" python sstate_eventhandler() { d = e.data # When we write an sstate package we rewrite the SSTATE_PKG - spkg = d.getVar('SSTATE_PKG', True) + spkg = d.getVar('SSTATE_PKG') if not spkg.endswith(".tgz"): - taskname = d.getVar("BB_RUNTASK", True)[3:] - spec = d.getVar('SSTATE_PKGSPEC', True) - swspec = d.getVar('SSTATE_SWSPEC', True) + taskname = d.getVar("BB_RUNTASK")[3:] + spec = d.getVar('SSTATE_PKGSPEC') + swspec = d.getVar('SSTATE_SWSPEC') if taskname in ["fetch", "unpack", "patch", "populate_lic", "preconfigure"] and swspec: d.setVar("SSTATE_PKGSPEC", "${SSTATE_SWSPEC}") d.setVar("SSTATE_EXTRAPATH", "") - sstatepkg = d.getVar('SSTATE_PKG', True) + sstatepkg = d.getVar('SSTATE_PKG') bb.siggen.dump_this_task(sstatepkg + '_' + taskname + ".tgz" ".siginfo", d) } @@ -1004,7 +1018,7 @@ python sstate_eventhandler2() { stamps = e.stamps.values() removeworkdir = (d.getVar("SSTATE_PRUNE_OBSOLETEWORKDIR", False) == "1") seen = [] - for a in d.getVar("SSTATE_ARCHS", True).split(): + for a in d.getVar("SSTATE_ARCHS").split(): toremove = [] i = d.expand("${SSTATE_MANIFESTS}/index-" + a) if not os.path.exists(i): diff --git a/import-layers/yocto-poky/meta/classes/staging.bbclass b/import-layers/yocto-poky/meta/classes/staging.bbclass index bfabd06f3..984051d6a 100644 --- a/import-layers/yocto-poky/meta/classes/staging.bbclass +++ b/import-layers/yocto-poky/meta/classes/staging.bbclass @@ -31,6 +31,7 @@ SYSROOT_DIRS_BLACKLIST = " \ ${datadir}/applications \ ${datadir}/fonts \ ${datadir}/pixmaps \ + ${libdir}/${PN}/ptest \ " sysroot_stage_dir() { @@ -69,8 +70,8 @@ sysroot_stage_all() { python sysroot_strip () { import stat, errno - dvar = d.getVar('SYSROOT_DESTDIR', True) - pn = d.getVar('PN', True) + dvar = d.getVar('SYSROOT_DESTDIR') + pn = d.getVar('PN') os.chdir(dvar) @@ -103,9 +104,9 @@ python sysroot_strip () { elffiles = {} inodes = {} - libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir", True)) - baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir", True)) - if (d.getVar('INHIBIT_SYSROOT_STRIP', True) != '1'): + libdir = os.path.abspath(dvar + os.sep + d.getVar("libdir")) + baselibdir = os.path.abspath(dvar + os.sep + d.getVar("base_libdir")) + if (d.getVar('INHIBIT_SYSROOT_STRIP') != '1'): # # First lets figure out all of the files we may have to process # @@ -136,7 +137,7 @@ python sysroot_strip () { elf_file = isELF(file) if elf_file & 1: if elf_file & 2: - if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn, True) or "").split(): + if 'already-stripped' in (d.getVar('INSANE_SKIP_' + pn) or "").split(): bb.note("Skipping file %s from %s for already-stripped QA test" % (file[len(dvar):], pn)) else: bb.warn("File '%s' from %s was already stripped, this will prevent future debugging!" % (file[len(dvar):], pn)) @@ -154,7 +155,7 @@ python sysroot_strip () { # # Now strip them (in parallel) # - strip = d.getVar("STRIP", True) + strip = d.getVar("STRIP") sfiles = [] for file in elffiles: elf_file = int(elffiles[file]) @@ -172,52 +173,16 @@ addtask populate_sysroot after do_install SYSROOT_PREPROCESS_FUNCS ?= "" SYSROOT_DESTDIR = "${WORKDIR}/sysroot-destdir" -# We clean out any existing sstate from the sysroot if we rerun configure -python sysroot_cleansstate () { - ss = sstate_state_fromvars(d, "populate_sysroot") - sstate_clean(ss, d) -} -do_configure[prefuncs] += "sysroot_cleansstate" - - -BB_SETSCENE_VERIFY_FUNCTION2 = "sysroot_checkhashes2" - -def sysroot_checkhashes2(covered, tasknames, fns, d, invalidtasks): - problems = set() - configurefns = set() - for tid in invalidtasks: - if tasknames[tid] == "do_configure" and tid not in covered: - configurefns.add(fns[tid]) - for tid in covered: - if tasknames[tid] == "do_populate_sysroot" and fns[tid] in configurefns: - problems.add(tid) - return problems - -BB_SETSCENE_VERIFY_FUNCTION = "sysroot_checkhashes" - -def sysroot_checkhashes(covered, tasknames, fnids, fns, d, invalidtasks = None): - problems = set() - configurefnids = set() - if not invalidtasks: - invalidtasks = range(len(tasknames)) - for task in invalidtasks: - if tasknames[task] == "do_configure" and task not in covered: - configurefnids.add(fnids[task]) - for task in covered: - if tasknames[task] == "do_populate_sysroot" and fnids[task] in configurefnids: - problems.add(task) - return problems - python do_populate_sysroot () { bb.build.exec_func("sysroot_stage_all", d) bb.build.exec_func("sysroot_strip", d) - for f in (d.getVar('SYSROOT_PREPROCESS_FUNCS', True) or '').split(): + for f in (d.getVar('SYSROOT_PREPROCESS_FUNCS') or '').split(): bb.build.exec_func(f, d) - pn = d.getVar("PN", True) - multiprov = d.getVar("MULTI_PROVIDER_WHITELIST", True).split() + pn = d.getVar("PN") + multiprov = d.getVar("MULTI_PROVIDER_WHITELIST").split() provdir = d.expand("${SYSROOT_DESTDIR}${base_prefix}/sysroot-providers/") bb.utils.mkdirhier(provdir) - for p in d.getVar("PROVIDES", True).split(): + for p in d.getVar("PROVIDES").split(): if p in multiprov: continue p = p.replace("/", "_") @@ -228,15 +193,483 @@ python do_populate_sysroot () { do_populate_sysroot[vardeps] += "${SYSROOT_PREPROCESS_FUNCS}" do_populate_sysroot[vardepsexclude] += "MULTI_PROVIDER_WHITELIST" +POPULATESYSROOTDEPS = "" +POPULATESYSROOTDEPS_class-target = "virtual/${MLPREFIX}${TARGET_PREFIX}binutils:do_populate_sysroot" +POPULATESYSROOTDEPS_class-nativesdk = "virtual/${TARGET_PREFIX}binutils-crosssdk:do_populate_sysroot" +do_populate_sysroot[depends] += "${POPULATESYSROOTDEPS}" + SSTATETASKS += "do_populate_sysroot" do_populate_sysroot[cleandirs] = "${SYSROOT_DESTDIR}" do_populate_sysroot[sstate-inputdirs] = "${SYSROOT_DESTDIR}" -do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR_HOST}/" -do_populate_sysroot[stamp-extra-info] = "${MACHINE}" +do_populate_sysroot[sstate-outputdirs] = "${COMPONENTS_DIR}/${PACKAGE_ARCH}/${PN}" +do_populate_sysroot[sstate-fixmedir] = "${COMPONENTS_DIR}/${PACKAGE_ARCH}/${PN}" python do_populate_sysroot_setscene () { sstate_setscene(d) } addtask do_populate_sysroot_setscene +def staging_copyfile(c, target, dest, postinsts, seendirs): + import errno + + destdir = os.path.dirname(dest) + if destdir not in seendirs: + bb.utils.mkdirhier(destdir) + seendirs.add(destdir) + if "/usr/bin/postinst-" in c: + postinsts.append(dest) + if os.path.islink(c): + linkto = os.readlink(c) + if os.path.lexists(dest): + if not os.path.islink(dest): + raise OSError(errno.EEXIST, "Link %s already exists as a file" % dest, dest) + if os.readlink(dest) == linkto: + return dest + raise OSError(errno.EEXIST, "Link %s already exists to a different location? (%s vs %s)" % (dest, os.readlink(dest), linkto), dest) + os.symlink(linkto, dest) + #bb.warn(c) + else: + try: + os.link(c, dest) + except OSError as err: + if err.errno == errno.EXDEV: + bb.utils.copyfile(c, dest) + else: + raise + return dest + +def staging_copydir(c, target, dest, seendirs): + if dest not in seendirs: + bb.utils.mkdirhier(dest) + seendirs.add(dest) + +def staging_processfixme(fixme, target, recipesysroot, recipesysrootnative, d): + import subprocess + + if not fixme: + return + cmd = "sed -e 's:^[^/]*/:%s/:g' %s | xargs sed -i -e 's:FIXMESTAGINGDIRTARGET:%s:g; s:FIXMESTAGINGDIRHOST:%s:g'" % (target, " ".join(fixme), recipesysroot, recipesysrootnative) + for fixmevar in ['COMPONENTS_DIR', 'HOSTTOOLS_DIR', 'PKGDATA_DIR', 'PSEUDO_LOCALSTATEDIR', 'LOGFIFO']: + fixme_path = d.getVar(fixmevar) + cmd += " -e 's:FIXME_%s:%s:g'" % (fixmevar, fixme_path) + bb.debug(2, cmd) + subprocess.check_output(cmd, shell=True) + + +def staging_populate_sysroot_dir(targetsysroot, nativesysroot, native, d): + import glob + import subprocess + + fixme = [] + postinsts = [] + seendirs = set() + stagingdir = d.getVar("STAGING_DIR") + if native: + pkgarchs = ['${BUILD_ARCH}', '${BUILD_ARCH}_*'] + targetdir = nativesysroot + else: + pkgarchs = ['${MACHINE_ARCH}'] + pkgarchs = pkgarchs + list(reversed(d.getVar("PACKAGE_EXTRA_ARCHS").split())) + pkgarchs.append('allarch') + targetdir = targetsysroot + + bb.utils.mkdirhier(targetdir) + for pkgarch in pkgarchs: + for manifest in glob.glob(d.expand("${SSTATE_MANIFESTS}/manifest-%s-*.populate_sysroot" % pkgarch)): + if manifest.endswith("-initial.populate_sysroot"): + # skip glibc-initial and libgcc-initial due to file overlap + continue + tmanifest = targetdir + "/" + os.path.basename(manifest) + if os.path.exists(tmanifest): + continue + try: + os.link(manifest, tmanifest) + except OSError as err: + if err.errno == errno.EXDEV: + bb.utils.copyfile(manifest, tmanifest) + else: + raise + with open(manifest, "r") as f: + for l in f: + l = l.strip() + if l.endswith("/fixmepath"): + fixme.append(l) + continue + if l.endswith("/fixmepath.cmd"): + continue + dest = l.replace(stagingdir, "") + dest = targetdir + "/" + "/".join(dest.split("/")[3:]) + if l.endswith("/"): + staging_copydir(l, targetdir, dest, seendirs) + continue + try: + staging_copyfile(l, targetdir, dest, postinsts, seendirs) + except FileExistsError: + continue + + staging_processfixme(fixme, targetdir, targetsysroot, nativesysroot, d) + for p in postinsts: + subprocess.check_output(p, shell=True) + +# +# Manifests here are complicated. The main sysroot area has the unpacked sstate +# which us unrelocated and tracked by the main sstate manifests. Each recipe +# specific sysroot has manifests for each dependency that is installed there. +# The task hash is used to tell whether the data needs to be reinstalled. We +# use a symlink to point to the currently installed hash. There is also a +# "complete" stamp file which is used to mark if installation completed. If +# something fails (e.g. a postinst), this won't get written and we would +# remove and reinstall the dependency. This also means partially installed +# dependencies should get cleaned up correctly. +# + +python extend_recipe_sysroot() { + import copy + import subprocess + import errno + import collections + import glob + + taskdepdata = d.getVar("BB_TASKDEPDATA", False) + mytaskname = d.getVar("BB_RUNTASK") + if mytaskname.endswith("_setscene"): + mytaskname = mytaskname.replace("_setscene", "") + workdir = d.getVar("WORKDIR") + #bb.warn(str(taskdepdata)) + pn = d.getVar("PN") + + stagingdir = d.getVar("STAGING_DIR") + sharedmanifests = d.getVar("COMPONENTS_DIR") + "/manifests" + recipesysroot = d.getVar("RECIPE_SYSROOT") + recipesysrootnative = d.getVar("RECIPE_SYSROOT_NATIVE") + current_variant = d.getVar("BBEXTENDVARIANT") + + # Detect bitbake -b usage + nodeps = d.getVar("BB_LIMITEDDEPS") or False + if nodeps: + lock = bb.utils.lockfile(recipesysroot + "/sysroot.lock") + staging_populate_sysroot_dir(recipesysroot, recipesysrootnative, True, d) + staging_populate_sysroot_dir(recipesysroot, recipesysrootnative, False, d) + bb.utils.unlockfile(lock) + return + + start = None + configuredeps = [] + for dep in taskdepdata: + data = taskdepdata[dep] + if data[1] == mytaskname and data[0] == pn: + start = dep + break + if start is None: + bb.fatal("Couldn't find ourself in BB_TASKDEPDATA?") + + # We need to figure out which sysroot files we need to expose to this task. + # This needs to match what would get restored from sstate, which is controlled + # ultimately by calls from bitbake to setscene_depvalid(). + # That function expects a setscene dependency tree. We build a dependency tree + # condensed to inter-sstate task dependencies, similar to that used by setscene + # tasks. We can then call into setscene_depvalid() and decide + # which dependencies we can "see" and should expose in the recipe specific sysroot. + setscenedeps = copy.deepcopy(taskdepdata) + + start = set([start]) + + sstatetasks = d.getVar("SSTATETASKS").split() + + def print_dep_tree(deptree): + data = "" + for dep in deptree: + deps = " " + "\n ".join(deptree[dep][3]) + "\n" + data = "%s:\n %s\n %s\n%s %s\n %s\n" % (deptree[dep][0], deptree[dep][1], deptree[dep][2], deps, deptree[dep][4], deptree[dep][5]) + return data + + #bb.note("Full dep tree is:\n%s" % print_dep_tree(taskdepdata)) + + #bb.note(" start2 is %s" % str(start)) + + # If start is an sstate task (like do_package) we need to add in its direct dependencies + # else the code below won't recurse into them. + for dep in set(start): + for dep2 in setscenedeps[dep][3]: + start.add(dep2) + start.remove(dep) + + #bb.note(" start3 is %s" % str(start)) + + # Create collapsed do_populate_sysroot -> do_populate_sysroot tree + for dep in taskdepdata: + data = setscenedeps[dep] + if data[1] not in sstatetasks: + for dep2 in setscenedeps: + data2 = setscenedeps[dep2] + if dep in data2[3]: + data2[3].update(setscenedeps[dep][3]) + data2[3].remove(dep) + if dep in start: + start.update(setscenedeps[dep][3]) + start.remove(dep) + del setscenedeps[dep] + + # Remove circular references + for dep in setscenedeps: + if dep in setscenedeps[dep][3]: + setscenedeps[dep][3].remove(dep) + + #bb.note("Computed dep tree is:\n%s" % print_dep_tree(setscenedeps)) + #bb.note(" start is %s" % str(start)) + + # Direct dependencies should be present and can be depended upon + for dep in set(start): + if setscenedeps[dep][1] == "do_populate_sysroot": + if dep not in configuredeps: + configuredeps.append(dep) + bb.note("Direct dependencies are %s" % str(configuredeps)) + #bb.note(" or %s" % str(start)) + + msgbuf = [] + # Call into setscene_depvalid for each sub-dependency and only copy sysroot files + # for ones that would be restored from sstate. + done = list(start) + next = list(start) + while next: + new = [] + for dep in next: + data = setscenedeps[dep] + for datadep in data[3]: + if datadep in done: + continue + taskdeps = {} + taskdeps[dep] = setscenedeps[dep][:2] + taskdeps[datadep] = setscenedeps[datadep][:2] + retval = setscene_depvalid(datadep, taskdeps, [], d, msgbuf) + if retval: + msgbuf.append("Skipping setscene dependency %s for installation into the sysroot" % datadep) + continue + done.append(datadep) + new.append(datadep) + if datadep not in configuredeps and setscenedeps[datadep][1] == "do_populate_sysroot": + configuredeps.append(datadep) + msgbuf.append("Adding dependency on %s" % setscenedeps[datadep][0]) + else: + msgbuf.append("Following dependency on %s" % setscenedeps[datadep][0]) + next = new + + bb.note("\n".join(msgbuf)) + + depdir = recipesysrootnative + "/installeddeps" + bb.utils.mkdirhier(depdir) + bb.utils.mkdirhier(sharedmanifests) + + lock = bb.utils.lockfile(recipesysroot + "/sysroot.lock") + + fixme = {} + fixme[''] = [] + fixme['native'] = [] + seendirs = set() + postinsts = [] + multilibs = {} + manifests = {} + + for f in os.listdir(depdir): + if not f.endswith(".complete"): + continue + f = depdir + "/" + f + if os.path.islink(f) and not os.path.exists(f): + bb.note("%s no longer exists, removing from sysroot" % f) + lnk = os.readlink(f.replace(".complete", "")) + sstate_clean_manifest(depdir + "/" + lnk, d, workdir) + os.unlink(f) + os.unlink(f.replace(".complete", "")) + + installed = [] + for dep in configuredeps: + c = setscenedeps[dep][0] + if mytaskname in ["do_sdk_depends", "do_populate_sdk_ext"] and c.endswith("-initial"): + bb.note("Skipping initial setscene dependency %s for installation into the sysroot" % c) + continue + installed.append(c) + + # We want to remove anything which this task previously installed but is no longer a dependency + taskindex = depdir + "/" + "index." + mytaskname + if os.path.exists(taskindex): + potential = [] + with open(taskindex, "r") as f: + for l in f: + l = l.strip() + if l not in installed: + fl = depdir + "/" + l + if not os.path.exists(fl): + # Was likely already uninstalled + continue + potential.append(l) + # We need to ensure not other task needs this dependency. We hold the sysroot + # lock so we ca search the indexes to check + if potential: + for i in glob.glob(depdir + "/index.*"): + if i.endswith("." + mytaskname): + continue + with open(i, "r") as f: + for l in f: + l = l.strip() + if l in potential: + potential.remove(l) + for l in potential: + fl = depdir + "/" + l + bb.note("Task %s no longer depends on %s, removing from sysroot" % (mytaskname, l)) + lnk = os.readlink(fl) + sstate_clean_manifest(depdir + "/" + lnk, d, workdir) + os.unlink(fl) + os.unlink(fl + ".complete") + + for dep in configuredeps: + c = setscenedeps[dep][0] + if c not in installed: + continue + taskhash = setscenedeps[dep][5] + taskmanifest = depdir + "/" + c + "." + taskhash + + if os.path.exists(depdir + "/" + c): + lnk = os.readlink(depdir + "/" + c) + if lnk == c + "." + taskhash and os.path.exists(depdir + "/" + c + ".complete"): + bb.note("%s exists in sysroot, skipping" % c) + continue + else: + bb.note("%s exists in sysroot, but is stale (%s vs. %s), removing." % (c, lnk, c + "." + taskhash)) + sstate_clean_manifest(depdir + "/" + lnk, d, workdir) + os.unlink(depdir + "/" + c) + if os.path.lexists(depdir + "/" + c + ".complete"): + os.unlink(depdir + "/" + c + ".complete") + elif os.path.lexists(depdir + "/" + c): + os.unlink(depdir + "/" + c) + + os.symlink(c + "." + taskhash, depdir + "/" + c) + + d2 = d + destsysroot = recipesysroot + variant = '' + if setscenedeps[dep][2].startswith("virtual:multilib"): + variant = setscenedeps[dep][2].split(":")[2] + if variant != current_variant: + if variant not in multilibs: + multilibs[variant] = get_multilib_datastore(variant, d) + d2 = multilibs[variant] + destsysroot = d2.getVar("RECIPE_SYSROOT") + + native = False + if c.endswith("-native"): + manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${BUILD_ARCH}-%s.populate_sysroot" % c) + native = True + elif c.startswith("nativesdk-"): + manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${SDK_ARCH}_${SDK_OS}-%s.populate_sysroot" % c) + elif "-cross-" in c: + manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${BUILD_ARCH}_${TARGET_ARCH}-%s.populate_sysroot" % c) + native = True + elif "-crosssdk" in c: + manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-${BUILD_ARCH}_${SDK_ARCH}_${SDK_OS}-%s.populate_sysroot" % c) + native = True + else: + pkgarchs = ['${MACHINE_ARCH}'] + pkgarchs = pkgarchs + list(reversed(d2.getVar("PACKAGE_EXTRA_ARCHS").split())) + pkgarchs.append('allarch') + for pkgarch in pkgarchs: + manifest = d2.expand("${SSTATE_MANIFESTS}/manifest-%s-%s.populate_sysroot" % (pkgarch, c)) + if os.path.exists(manifest): + break + if not os.path.exists(manifest): + bb.warn("Manifest %s not found?" % manifest) + else: + newmanifest = collections.OrderedDict() + if native: + fm = fixme['native'] + targetdir = recipesysrootnative + else: + fm = fixme[''] + targetdir = destsysroot + with open(manifest, "r") as f: + manifests[dep] = manifest + for l in f: + l = l.strip() + if l.endswith("/fixmepath"): + fm.append(l) + continue + if l.endswith("/fixmepath.cmd"): + continue + dest = l.replace(stagingdir, "") + dest = targetdir + "/" + "/".join(dest.split("/")[3:]) + newmanifest[l] = dest + # Having multiple identical manifests in each sysroot eats diskspace so + # create a shared pool of them and hardlink if we can. + # We create the manifest in advance so that if something fails during installation, + # or the build is interrupted, subsequent exeuction can cleanup. + sharedm = sharedmanifests + "/" + os.path.basename(taskmanifest) + if not os.path.exists(sharedm): + smlock = bb.utils.lockfile(sharedm + ".lock") + # Can race here. You'd think it just means we may not end up with all copies hardlinked to each other + # but python can lose file handles so we need to do this under a lock. + if not os.path.exists(sharedm): + with open(sharedm, 'w') as m: + for l in newmanifest: + dest = newmanifest[l] + m.write(dest.replace(workdir + "/", "") + "\n") + bb.utils.unlockfile(smlock) + try: + os.link(sharedm, taskmanifest) + except OSError as err: + if err.errno == errno.EXDEV: + bb.utils.copyfile(sharedm, taskmanifest) + else: + raise + # Finally actually install the files + for l in newmanifest: + dest = newmanifest[l] + if l.endswith("/"): + staging_copydir(l, targetdir, dest, seendirs) + continue + staging_copyfile(l, targetdir, dest, postinsts, seendirs) + + for f in fixme: + if f == '': + staging_processfixme(fixme[f], recipesysroot, recipesysroot, recipesysrootnative, d) + elif f == 'native': + staging_processfixme(fixme[f], recipesysrootnative, recipesysroot, recipesysrootnative, d) + else: + staging_processfixme(fixme[f], multilibs[f].getVar("RECIPE_SYSROOT"), recipesysroot, recipesysrootnative, d) + + for p in postinsts: + subprocess.check_output(p, shell=True) + + for dep in manifests: + c = setscenedeps[dep][0] + os.symlink(manifests[dep], depdir + "/" + c + ".complete") + + with open(taskindex, "w") as f: + for l in sorted(installed): + f.write(l + "\n") + + bb.utils.unlockfile(lock) +} +extend_recipe_sysroot[vardepsexclude] += "MACHINE_ARCH PACKAGE_EXTRA_ARCHS SDK_ARCH BUILD_ARCH SDK_OS BB_TASKDEPDATA" + +python do_prepare_recipe_sysroot () { + bb.build.exec_func("extend_recipe_sysroot", d) +} +addtask do_prepare_recipe_sysroot before do_configure after do_fetch + +# Clean out the recipe specific sysroots before do_fetch +# (use a prefunc so we can order before extend_recipe_sysroot if it gets added) +python clean_recipe_sysroot() { + return +} +clean_recipe_sysroot[cleandirs] += "${RECIPE_SYSROOT} ${RECIPE_SYSROOT_NATIVE}" +do_fetch[prefuncs] += "clean_recipe_sysroot" + +python staging_taskhandler() { + bbtasks = e.tasklist + for task in bbtasks: + deps = d.getVarFlag(task, "depends") + if deps and "populate_sysroot" in deps: + d.appendVarFlag(task, "prefuncs", " extend_recipe_sysroot") +} +staging_taskhandler[eventmask] = "bb.event.RecipeTaskPreProcess" +addhandler staging_taskhandler diff --git a/import-layers/yocto-poky/meta/classes/syslinux.bbclass b/import-layers/yocto-poky/meta/classes/syslinux.bbclass index 7778fd708..d6f882420 100644 --- a/import-layers/yocto-poky/meta/classes/syslinux.bbclass +++ b/import-layers/yocto-poky/meta/classes/syslinux.bbclass @@ -84,12 +84,12 @@ python build_syslinux_cfg () { import copy import sys - workdir = d.getVar('WORKDIR', True) + workdir = d.getVar('WORKDIR') if not workdir: bb.error("WORKDIR not defined, unable to package") return - labels = d.getVar('LABELS', True) + labels = d.getVar('LABELS') if not labels: bb.debug(1, "LABELS not defined, nothing to do") return @@ -98,7 +98,7 @@ python build_syslinux_cfg () { bb.debug(1, "No labels, nothing to do") return - cfile = d.getVar('SYSLINUX_CFG', True) + cfile = d.getVar('SYSLINUX_CFG') if not cfile: bb.fatal('Unable to read SYSLINUX_CFG') @@ -109,39 +109,39 @@ python build_syslinux_cfg () { cfgfile.write('# Automatically created by OE\n') - opts = d.getVar('SYSLINUX_OPTS', True) + opts = d.getVar('SYSLINUX_OPTS') if opts: for opt in opts.split(';'): cfgfile.write('%s\n' % opt) - allowoptions = d.getVar('SYSLINUX_ALLOWOPTIONS', True) + allowoptions = d.getVar('SYSLINUX_ALLOWOPTIONS') if allowoptions: cfgfile.write('ALLOWOPTIONS %s\n' % allowoptions) else: cfgfile.write('ALLOWOPTIONS 1\n') - syslinux_default_console = d.getVar('SYSLINUX_DEFAULT_CONSOLE', True) - syslinux_serial_tty = d.getVar('SYSLINUX_SERIAL_TTY', True) - syslinux_serial = d.getVar('SYSLINUX_SERIAL', True) + syslinux_default_console = d.getVar('SYSLINUX_DEFAULT_CONSOLE') + syslinux_serial_tty = d.getVar('SYSLINUX_SERIAL_TTY') + syslinux_serial = d.getVar('SYSLINUX_SERIAL') if syslinux_serial: cfgfile.write('SERIAL %s\n' % syslinux_serial) - menu = (d.getVar('AUTO_SYSLINUXMENU', True) == "1") + menu = (d.getVar('AUTO_SYSLINUXMENU') == "1") if menu and syslinux_serial: cfgfile.write('DEFAULT Graphics console %s\n' % (labels.split()[0])) else: cfgfile.write('DEFAULT %s\n' % (labels.split()[0])) - timeout = d.getVar('SYSLINUX_TIMEOUT', True) + timeout = d.getVar('SYSLINUX_TIMEOUT') if timeout: cfgfile.write('TIMEOUT %s\n' % timeout) else: cfgfile.write('TIMEOUT 50\n') - prompt = d.getVar('SYSLINUX_PROMPT', True) + prompt = d.getVar('SYSLINUX_PROMPT') if prompt: cfgfile.write('PROMPT %s\n' % prompt) else: @@ -151,38 +151,37 @@ python build_syslinux_cfg () { cfgfile.write('ui vesamenu.c32\n') cfgfile.write('menu title Select kernel options and boot kernel\n') cfgfile.write('menu tabmsg Press [Tab] to edit, [Return] to select\n') - splash = d.getVar('SYSLINUX_SPLASH', True) + splash = d.getVar('SYSLINUX_SPLASH') if splash: cfgfile.write('menu background splash.lss\n') for label in labels.split(): localdata = bb.data.createCopy(d) - overrides = localdata.getVar('OVERRIDES', True) + overrides = localdata.getVar('OVERRIDES') if not overrides: bb.fatal('OVERRIDES not defined') localdata.setVar('OVERRIDES', label + ':' + overrides) - bb.data.update_data(localdata) btypes = [ [ "", syslinux_default_console ] ] if menu and syslinux_serial: btypes = [ [ "Graphics console ", syslinux_default_console ], [ "Serial console ", syslinux_serial_tty ] ] - root= d.getVar('SYSLINUX_ROOT', True) + root= d.getVar('SYSLINUX_ROOT') if not root: bb.fatal('SYSLINUX_ROOT not defined') for btype in btypes: cfgfile.write('LABEL %s%s\nKERNEL /vmlinuz\n' % (btype[0], label)) - exargs = d.getVar('SYSLINUX_KERNEL_ARGS', True) + exargs = d.getVar('SYSLINUX_KERNEL_ARGS') if exargs: btype[1] += " " + exargs - append = localdata.getVar('APPEND', True) - initrd = localdata.getVar('INITRD', True) + append = localdata.getVar('APPEND') + initrd = localdata.getVar('INITRD') append = root + " " + append cfgfile.write('APPEND ') diff --git a/import-layers/yocto-poky/meta/classes/systemd-boot.bbclass b/import-layers/yocto-poky/meta/classes/systemd-boot.bbclass index 05244c7e5..959775992 100644 --- a/import-layers/yocto-poky/meta/classes/systemd-boot.bbclass +++ b/import-layers/yocto-poky/meta/classes/systemd-boot.bbclass @@ -4,9 +4,7 @@ # systemd-boot.bbclass - The "systemd-boot" is essentially the gummiboot merged into systemd. # The original standalone gummiboot project is dead without any more -# maintenance. As a start point, we replace all gummitboot occurrences -# with systemd-boot in gummiboot.bbclass to have a base version of this -# systemd-boot.bbclass. +# maintenance. # # Set EFI_PROVIDER = "systemd-boot" to use systemd-boot on your live images instead of grub-efi # (images built by image-live.bbclass or image-vm.bbclass) @@ -39,6 +37,8 @@ efi_populate() { install -d ${DEST}/loader install -d ${DEST}/loader/entries install -m 0644 ${DEPLOY_DIR_IMAGE}/${EFI_IMAGE} ${DEST}${EFIDIR}/${DEST_EFI_IMAGE} + EFIPATH=$(echo "${EFIDIR}" | sed 's/\//\\/g') + printf 'fs0:%s\%s\n' "$EFIPATH" "$DEST_EFI_IMAGE" >${DEST}/startup.nsh install -m 0644 ${SYSTEMD_BOOT_CFG} ${DEST}/loader/loader.conf for i in ${SYSTEMD_BOOT_ENTRIES}; do install -m 0644 ${i} ${DEST}/loader/entries @@ -50,6 +50,7 @@ efi_iso_populate() { efi_populate $iso_dir mkdir -p ${EFIIMGDIR}/${EFIDIR} cp $iso_dir/${EFIDIR}/* ${EFIIMGDIR}${EFIDIR} + cp -r $iso_dir/loader ${EFIIMGDIR} cp $iso_dir/vmlinuz ${EFIIMGDIR} EFIPATH=$(echo "${EFIDIR}" | sed 's/\//\\/g') echo "fs0:${EFIPATH}\\${DEST_EFI_IMAGE}" > ${EFIIMGDIR}/startup.nsh @@ -63,8 +64,8 @@ efi_hddimg_populate() { } python build_efi_cfg() { - s = d.getVar("S", True) - labels = d.getVar('LABELS', True) + s = d.getVar("S") + labels = d.getVar('LABELS') if not labels: bb.debug(1, "LABELS not defined, nothing to do") return @@ -73,7 +74,10 @@ python build_efi_cfg() { bb.debug(1, "No labels, nothing to do") return - cfile = d.getVar('SYSTEMD_BOOT_CFG', True) + cfile = d.getVar('SYSTEMD_BOOT_CFG') + cdir = os.path.dirname(cfile) + if not os.path.exists(cdir): + os.makedirs(cdir) try: cfgfile = open(cfile, 'w') except OSError: @@ -81,7 +85,7 @@ python build_efi_cfg() { cfgfile.write('# Automatically created by OE\n') cfgfile.write('default %s\n' % (labels.split()[0])) - timeout = d.getVar('SYSTEMD_BOOT_TIMEOUT', True) + timeout = d.getVar('SYSTEMD_BOOT_TIMEOUT') if timeout: cfgfile.write('timeout %s\n' % timeout) else: @@ -91,7 +95,7 @@ python build_efi_cfg() { for label in labels.split(): localdata = d.createCopy() - overrides = localdata.getVar('OVERRIDES', True) + overrides = localdata.getVar('OVERRIDES') if not overrides: bb.fatal('OVERRIDES not defined') @@ -102,13 +106,12 @@ python build_efi_cfg() { except OSError: bb.fatal('Unable to open %s' % entryfile) localdata.setVar('OVERRIDES', label + ':' + overrides) - bb.data.update_data(localdata) entrycfg.write('title %s\n' % label) entrycfg.write('linux /vmlinuz\n') - append = localdata.getVar('APPEND', True) - initrd = localdata.getVar('INITRD', True) + append = localdata.getVar('APPEND') + initrd = localdata.getVar('INITRD') if initrd: entrycfg.write('initrd /initrd\n') diff --git a/import-layers/yocto-poky/meta/classes/systemd.bbclass b/import-layers/yocto-poky/meta/classes/systemd.bbclass index 4ea1f45e9..c4b4bb9b7 100644 --- a/import-layers/yocto-poky/meta/classes/systemd.bbclass +++ b/import-layers/yocto-poky/meta/classes/systemd.bbclass @@ -17,6 +17,7 @@ python __anonymous() { # files. if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d): d.appendVar("DEPENDS", " systemd-systemctl-native") + d.appendVar("PACKAGE_WRITE_DEPS", " systemd-systemctl-native") if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d): d.setVar("INHIBIT_UPDATERCD_BBCLASS", "1") } @@ -29,6 +30,10 @@ if [ -n "$D" ]; then fi if type systemctl >/dev/null 2>/dev/null; then + if [ -z "$D" ]; then + systemctl daemon-reload + fi + systemctl $OPTS ${SYSTEMD_AUTO_ENABLE} ${SYSTEMD_SERVICE} if [ -z "$D" -a "${SYSTEMD_AUTO_ENABLE}" = "enable" ]; then @@ -65,14 +70,14 @@ python systemd_populate_packages() { return def get_package_var(d, var, pkg): - val = (d.getVar('%s_%s' % (var, pkg), True) or "").strip() + val = (d.getVar('%s_%s' % (var, pkg)) or "").strip() if val == "": - val = (d.getVar(var, True) or "").strip() + val = (d.getVar(var) or "").strip() return val # Check if systemd-packages already included in PACKAGES def systemd_check_package(pkg_systemd): - packages = d.getVar('PACKAGES', True) + packages = d.getVar('PACKAGES') if not pkg_systemd in packages.split(): bb.error('%s does not appear in package list, please add it' % pkg_systemd) @@ -84,25 +89,24 @@ python systemd_populate_packages() { # variable. localdata = d.createCopy() localdata.prependVar("OVERRIDES", pkg + ":") - bb.data.update_data(localdata) - postinst = d.getVar('pkg_postinst_%s' % pkg, True) + postinst = d.getVar('pkg_postinst_%s' % pkg) if not postinst: postinst = '#!/bin/sh\n' - postinst += localdata.getVar('systemd_postinst', True) + postinst += localdata.getVar('systemd_postinst') d.setVar('pkg_postinst_%s' % pkg, postinst) - prerm = d.getVar('pkg_prerm_%s' % pkg, True) + prerm = d.getVar('pkg_prerm_%s' % pkg) if not prerm: prerm = '#!/bin/sh\n' - prerm += localdata.getVar('systemd_prerm', True) + prerm += localdata.getVar('systemd_prerm') d.setVar('pkg_prerm_%s' % pkg, prerm) # Add files to FILES_*-systemd if existent and not already done def systemd_append_file(pkg_systemd, file_append): appended = False - if os.path.exists(oe.path.join(d.getVar("D", True), file_append)): + if os.path.exists(oe.path.join(d.getVar("D"), file_append)): var_name = "FILES_" + pkg_systemd files = d.getVar(var_name, False) or "" if file_append not in files.split(): @@ -114,7 +118,7 @@ python systemd_populate_packages() { def systemd_add_files_and_parse(pkg_systemd, path, service, keys): # avoid infinite recursion if systemd_append_file(pkg_systemd, oe.path.join(path, service)): - fullpath = oe.path.join(d.getVar("D", True), path, service) + fullpath = oe.path.join(d.getVar("D"), path, service) if service.find('.service') != -1: # for *.service add *@.service service_base = service.replace('.service', '') @@ -137,9 +141,9 @@ python systemd_populate_packages() { # Check service-files and call systemd_add_files_and_parse for each entry def systemd_check_services(): - searchpaths = [oe.path.join(d.getVar("sysconfdir", True), "systemd", "system"),] - searchpaths.append(d.getVar("systemd_system_unitdir", True)) - systemd_packages = d.getVar('SYSTEMD_PACKAGES', True) + searchpaths = [oe.path.join(d.getVar("sysconfdir"), "systemd", "system"),] + searchpaths.append(d.getVar("systemd_system_unitdir")) + systemd_packages = d.getVar('SYSTEMD_PACKAGES') keys = 'Also' # scan for all in SYSTEMD_SERVICE[] @@ -154,11 +158,11 @@ python systemd_populate_packages() { base = re.sub('@[^.]+.', '@.', service) for path in searchpaths: - if os.path.exists(oe.path.join(d.getVar("D", True), path, service)): + if os.path.exists(oe.path.join(d.getVar("D"), path, service)): path_found = path break elif base is not None: - if os.path.exists(oe.path.join(d.getVar("D", True), path, base)): + if os.path.exists(oe.path.join(d.getVar("D"), path, base)): path_found = path break @@ -168,10 +172,10 @@ python systemd_populate_packages() { bb.fatal("SYSTEMD_SERVICE_%s value %s does not exist" % (pkg_systemd, service)) # Run all modifications once when creating package - if os.path.exists(d.getVar("D", True)): - for pkg in d.getVar('SYSTEMD_PACKAGES', True).split(): + if os.path.exists(d.getVar("D")): + for pkg in d.getVar('SYSTEMD_PACKAGES').split(): systemd_check_package(pkg) - if d.getVar('SYSTEMD_SERVICE_' + pkg, True): + if d.getVar('SYSTEMD_SERVICE_' + pkg): systemd_generate_package_scripts(pkg) systemd_check_services() } @@ -181,7 +185,7 @@ PACKAGESPLITFUNCS_prepend = "systemd_populate_packages " python rm_systemd_unitdir (){ import shutil if not bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d): - systemd_unitdir = oe.path.join(d.getVar("D", True), d.getVar('systemd_unitdir', True)) + systemd_unitdir = oe.path.join(d.getVar("D"), d.getVar('systemd_unitdir')) if os.path.exists(systemd_unitdir): shutil.rmtree(systemd_unitdir) systemd_libdir = os.path.dirname(systemd_unitdir) @@ -192,12 +196,12 @@ do_install[postfuncs] += "rm_systemd_unitdir " python rm_sysvinit_initddir (){ import shutil - sysv_initddir = oe.path.join(d.getVar("D", True), (d.getVar('INIT_D_DIR', True) or "/etc/init.d")) + sysv_initddir = oe.path.join(d.getVar("D"), (d.getVar('INIT_D_DIR') or "/etc/init.d")) if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d) and \ not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d) and \ os.path.exists(sysv_initddir): - systemd_system_unitdir = oe.path.join(d.getVar("D", True), d.getVar('systemd_system_unitdir', True)) + systemd_system_unitdir = oe.path.join(d.getVar("D"), d.getVar('systemd_system_unitdir')) # If systemd_system_unitdir contains anything, delete sysv_initddir if (os.path.exists(systemd_system_unitdir) and os.listdir(systemd_system_unitdir)): diff --git a/import-layers/yocto-poky/meta/classes/terminal.bbclass b/import-layers/yocto-poky/meta/classes/terminal.bbclass index a94f755a4..a27e10c6e 100644 --- a/import-layers/yocto-poky/meta/classes/terminal.bbclass +++ b/import-layers/yocto-poky/meta/classes/terminal.bbclass @@ -3,7 +3,7 @@ OE_TERMINAL[type] = 'choice' OE_TERMINAL[choices] = 'auto none \ ${@oe_terminal_prioritized()}' -OE_TERMINAL_EXPORTS += 'EXTRA_OEMAKE' +OE_TERMINAL_EXPORTS += 'EXTRA_OEMAKE CACHED_CONFIGUREVARS CONFIGUREOPTS EXTRA_OECONF' OE_TERMINAL_EXPORTS[type] = 'list' XAUTHORITY ?= "${HOME}/.Xauthority" @@ -19,9 +19,9 @@ def emit_terminal_func(command, envdata, d): envdata.setVar(cmd_func, 'exec ' + command) envdata.setVarFlag(cmd_func, 'func', '1') - runfmt = d.getVar('BB_RUNFMT', True) or "run.{func}.{pid}" + runfmt = d.getVar('BB_RUNFMT') or "run.{func}.{pid}" runfile = runfmt.format(func=cmd_func, task=cmd_func, taskfunc=cmd_func, pid=os.getpid()) - runfile = os.path.join(d.getVar('T', True), runfile) + runfile = os.path.join(d.getVar('T'), runfile) bb.utils.mkdirhier(os.path.dirname(runfile)) with open(runfile, 'w') as script: @@ -44,7 +44,7 @@ def oe_terminal(command, title, d): envdata.setVarFlag(v, 'export', '1') for export in oe.data.typed_value('OE_TERMINAL_EXPORTS', d): - value = d.getVar(export, True) + value = d.getVar(export) if value is not None: os.environ[export] = str(value) envdata.setVar(export, str(value)) @@ -60,12 +60,17 @@ def oe_terminal(command, title, d): for key in origbbenv: if key in envdata: continue - value = origbbenv.getVar(key, True) + value = origbbenv.getVar(key) if value is not None: os.environ[key] = str(value) envdata.setVar(key, str(value)) envdata.setVarFlag(key, 'export', '1') + # Use original PATH as a fallback + path = d.getVar('PATH') + ":" + origbbenv.getVar('PATH') + os.environ['PATH'] = path + envdata.setVar('PATH', path) + # A complex PS1 might need more escaping of chars. # Lets not export PS1 instead. envdata.delVar("PS1") @@ -88,8 +93,12 @@ def oe_terminal(command, title, d): try: oe.terminal.spawn_preferred(command, title, None, d) - except oe.terminal.NoSupportedTerminals: - bb.fatal('No valid terminal found, unable to open devshell') + except oe.terminal.NoSupportedTerminals as nosup: + nosup.terms.remove("false") + cmds = '\n\t'.join(nosup.terms).replace("{command}", + "do_terminal").replace("{title}", title) + bb.fatal('No valid terminal found, unable to open devshell.\n' + + 'Tried the following commands:\n\t%s' % cmds) except oe.terminal.ExecutionError as exc: bb.fatal('Unable to spawn terminal %s: %s' % (terminal, exc)) diff --git a/import-layers/yocto-poky/meta/classes/testexport.bbclass b/import-layers/yocto-poky/meta/classes/testexport.bbclass index 514702082..56edda994 100644 --- a/import-layers/yocto-poky/meta/classes/testexport.bbclass +++ b/import-layers/yocto-poky/meta/classes/testexport.bbclass @@ -33,162 +33,136 @@ TEST_EXPORT_DEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'cpio-nativ TEST_EXPORT_DEPENDS += "${@bb.utils.contains('TEST_EXPORT_SDK_ENABLED', '1', 'testexport-tarball:do_populate_sdk', '', d)}" TEST_EXPORT_LOCK = "${TMPDIR}/testimage.lock" -python do_testexport() { - testexport_main(d) -} - addtask testexport do_testexport[nostamp] = "1" do_testexport[depends] += "${TEST_EXPORT_DEPENDS} ${TESTIMAGEDEPENDS}" do_testexport[lockfiles] += "${TEST_EXPORT_LOCK}" -def exportTests(d,tc): +python do_testexport() { + testexport_main(d) +} + +def testexport_main(d): import json + import logging + + from oeqa.runtime.context import OERuntimeTestContext + from oeqa.runtime.context import OERuntimeTestContextExecutor + + image_name = ("%s/%s" % (d.getVar('DEPLOY_DIR_IMAGE'), + d.getVar('IMAGE_LINK_NAME'))) + + tdname = "%s.testdata.json" % image_name + td = json.load(open(tdname, "r")) + + logger = logging.getLogger("BitBake") + + target = OERuntimeTestContextExecutor.getTarget( + d.getVar("TEST_TARGET"), None, d.getVar("TEST_TARGET_IP"), + d.getVar("TEST_SERVER_IP")) + + host_dumper = OERuntimeTestContextExecutor.getHostDumper( + d.getVar("testimage_dump_host"), d.getVar("TESTIMAGE_DUMP_DIR")) + + image_manifest = "%s.manifest" % image_name + image_packages = OERuntimeTestContextExecutor.readPackagesManifest(image_manifest) + + extract_dir = d.getVar("TEST_EXTRACTED_DIR") + + tc = OERuntimeTestContext(td, logger, target, host_dumper, + image_packages, extract_dir) + + copy_needed_files(d, tc) + +def copy_needed_files(d, tc): import shutil - import pkgutil - import re import oe.path - exportpath = d.getVar("TEST_EXPORT_DIR", True) - - savedata = {} - savedata["d"] = {} - savedata["target"] = {} - savedata["target"]["ip"] = tc.target.ip or d.getVar("TEST_TARGET_IP", True) - savedata["target"]["server_ip"] = tc.target.server_ip or d.getVar("TEST_SERVER_IP", True) - - keys = [ key for key in d.keys() if not key.startswith("_") and not key.startswith("BB") \ - and not key.startswith("B_pn") and not key.startswith("do_") and not d.getVarFlag(key, "func", True)] - for key in keys: - try: - savedata["d"][key] = d.getVar(key, True) - except bb.data_smart.ExpansionError: - # we don't care about those anyway - pass - - json_file = os.path.join(exportpath, "testdata.json") - with open(json_file, "w") as f: - json.dump(savedata, f, skipkeys=True, indent=4, sort_keys=True) - - # Replace absolute path with relative in the file - exclude_path = os.path.join(d.getVar("COREBASE", True),'meta','lib','oeqa') - f1 = open(json_file,'r').read() - f2 = open(json_file,'w') - m = f1.replace(exclude_path,'oeqa') - f2.write(m) - f2.close() - - # now start copying files - # we'll basically copy everything under meta/lib/oeqa, with these exceptions - # - oeqa/targetcontrol.py - not needed - # - oeqa/selftest - something else - # That means: - # - all tests from oeqa/runtime defined in TEST_SUITES (including from other layers) - # - the contents of oeqa/utils and oeqa/runtime/files - # - oeqa/oetest.py and oeqa/runexport.py (this will get copied to exportpath not exportpath/oeqa) - # - __init__.py files - bb.utils.mkdirhier(os.path.join(exportpath, "oeqa/runtime/files")) - bb.utils.mkdirhier(os.path.join(exportpath, "oeqa/utils")) - # copy test modules, this should cover tests in other layers too - bbpath = d.getVar("BBPATH", True).split(':') - for t in tc.testslist: - isfolder = False - if re.search("\w+\.\w+\.test_\S+", t): - t = '.'.join(t.split('.')[:3]) - mod = pkgutil.get_loader(t) - # More depth than usual? - if (t.count('.') > 2): - for p in bbpath: - foldername = os.path.join(p, 'lib', os.sep.join(t.split('.')).rsplit(os.sep, 1)[0]) - if os.path.isdir(foldername): - isfolder = True - target_folder = os.path.join(exportpath, "oeqa", "runtime", os.path.basename(foldername)) - if not os.path.exists(target_folder): - oe.path.copytree(foldername, target_folder) - if not isfolder: - shutil.copy2(mod.path, os.path.join(exportpath, "oeqa/runtime")) - json_file = "%s.json" % mod.path.rsplit(".", 1)[0] - if os.path.isfile(json_file): - shutil.copy2(json_file, os.path.join(exportpath, "oeqa/runtime")) - # Get meta layer - for layer in d.getVar("BBLAYERS", True).split(): - if os.path.basename(layer) == "meta": - meta_layer = layer - break - # copy oeqa/oetest.py and oeqa/runexported.py - oeqadir = os.path.join(meta_layer, "lib/oeqa") - shutil.copy2(os.path.join(oeqadir, "oetest.py"), os.path.join(exportpath, "oeqa")) - shutil.copy2(os.path.join(oeqadir, "runexported.py"), exportpath) - # copy oeqa/utils/*.py - for root, dirs, files in os.walk(os.path.join(oeqadir, "utils")): - for f in files: - if f.endswith(".py"): - shutil.copy2(os.path.join(root, f), os.path.join(exportpath, "oeqa/utils")) - # copy oeqa/runtime/files/* - for root, dirs, files in os.walk(os.path.join(oeqadir, "runtime/files")): - for f in files: - shutil.copy2(os.path.join(root, f), os.path.join(exportpath, "oeqa/runtime/files")) + from oeqa.utils.package_manager import _get_json_file + from oeqa.core.utils.test import getSuiteCasesFiles + + export_path = d.getVar('TEST_EXPORT_DIR') + corebase_path = d.getVar('COREBASE') + + # Clean everything before starting + oe.path.remove(export_path) + bb.utils.mkdirhier(os.path.join(export_path, 'lib', 'oeqa')) + + # The source of files to copy are relative to 'COREBASE' directory + # The destination is relative to 'TEST_EXPORT_DIR' + # Because we are squashing the libraries, we need to remove + # the layer/script directory + files_to_copy = [ os.path.join('meta', 'lib', 'oeqa', 'core'), + os.path.join('meta', 'lib', 'oeqa', 'runtime'), + os.path.join('meta', 'lib', 'oeqa', 'files'), + os.path.join('meta', 'lib', 'oeqa', 'utils'), + os.path.join('scripts', 'oe-test'), + os.path.join('scripts', 'lib', 'argparse_oe.py'), + os.path.join('scripts', 'lib', 'scriptutils.py'), ] + + for f in files_to_copy: + src = os.path.join(corebase_path, f) + dst = os.path.join(export_path, f.split('/', 1)[-1]) + if os.path.isdir(src): + oe.path.copytree(src, dst) + else: + shutil.copy2(src, dst) + + # Remove cases and just copy the ones specified + cases_path = os.path.join(export_path, 'lib', 'oeqa', 'runtime', 'cases') + oe.path.remove(cases_path) + bb.utils.mkdirhier(cases_path) + test_paths = get_runtime_paths(d) + test_modules = d.getVar('TEST_SUITES') + tc.loadTests(test_paths, modules=test_modules) + for f in getSuiteCasesFiles(tc.suites): + shutil.copy2(f, cases_path) + json_file = _get_json_file(f) + if json_file: + shutil.copy2(json_file, cases_path) + + # Copy test data + image_name = ("%s/%s" % (d.getVar('DEPLOY_DIR_IMAGE'), + d.getVar('IMAGE_LINK_NAME'))) + image_manifest = "%s.manifest" % image_name + tdname = "%s.testdata.json" % image_name + test_data_path = os.path.join(export_path, 'data') + bb.utils.mkdirhier(test_data_path) + shutil.copy2(image_manifest, os.path.join(test_data_path, 'manifest')) + shutil.copy2(tdname, os.path.join(test_data_path, 'testdata.json')) # Create tar file for common parts of testexport - create_tarball(d, "testexport.tar.gz", d.getVar("TEST_EXPORT_DIR", True)) + create_tarball(d, "testexport.tar.gz", d.getVar("TEST_EXPORT_DIR")) # Copy packages needed for runtime testing - test_pkg_dir = d.getVar("TEST_NEEDED_PACKAGES_DIR", True) - if os.listdir(test_pkg_dir): - export_pkg_dir = os.path.join(d.getVar("TEST_EXPORT_DIR", True), "packages") + package_extraction(d, tc.suites) + test_pkg_dir = d.getVar("TEST_NEEDED_PACKAGES_DIR") + if os.path.isdir(test_pkg_dir) and os.listdir(test_pkg_dir): + export_pkg_dir = os.path.join(d.getVar("TEST_EXPORT_DIR"), "packages") oe.path.copytree(test_pkg_dir, export_pkg_dir) # Create tar file for packages needed by the DUT - create_tarball(d, "testexport_packages_%s.tar.gz" % d.getVar("MACHINE", True), export_pkg_dir) + create_tarball(d, "testexport_packages_%s.tar.gz" % d.getVar("MACHINE"), export_pkg_dir) # Copy SDK - if d.getVar("TEST_EXPORT_SDK_ENABLED", True) == "1": - sdk_deploy = d.getVar("SDK_DEPLOY", True) - tarball_name = "%s.sh" % d.getVar("TEST_EXPORT_SDK_NAME", True) + if d.getVar("TEST_EXPORT_SDK_ENABLED") == "1": + sdk_deploy = d.getVar("SDK_DEPLOY") + tarball_name = "%s.sh" % d.getVar("TEST_EXPORT_SDK_NAME") tarball_path = os.path.join(sdk_deploy, tarball_name) - export_sdk_dir = os.path.join(d.getVar("TEST_EXPORT_DIR", True), - d.getVar("TEST_EXPORT_SDK_DIR", True)) + export_sdk_dir = os.path.join(d.getVar("TEST_EXPORT_DIR"), + d.getVar("TEST_EXPORT_SDK_DIR")) bb.utils.mkdirhier(export_sdk_dir) shutil.copy2(tarball_path, export_sdk_dir) # Create tar file for the sdk - create_tarball(d, "testexport_sdk_%s.tar.gz" % d.getVar("SDK_ARCH", True), export_sdk_dir) - - bb.plain("Exported tests to: %s" % exportpath) + create_tarball(d, "testexport_sdk_%s.tar.gz" % d.getVar("SDK_ARCH"), export_sdk_dir) -def testexport_main(d): - from oeqa.oetest import ExportTestContext - from oeqa.targetcontrol import get_target_controller - from oeqa.utils.dump import get_host_dumper - - test_create_extract_dirs(d) - export_dir = d.getVar("TEST_EXPORT_DIR", True) - bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR", True)) - bb.utils.remove(export_dir, recurse=True) - bb.utils.mkdirhier(export_dir) - - # the robot dance - target = get_target_controller(d) - - # test context - tc = ExportTestContext(d, target) - - # this is a dummy load of tests - # we are doing that to find compile errors in the tests themselves - # before booting the image - try: - tc.loadTests() - except Exception as e: - import traceback - bb.fatal("Loading tests failed:\n%s" % traceback.format_exc()) - - tc.extract_packages() - exportTests(d,tc) + bb.plain("Exported tests to: %s" % export_path) def create_tarball(d, tar_name, src_dir): import tarfile - tar_path = os.path.join(d.getVar("TEST_EXPORT_DIR", True), tar_name) + tar_path = os.path.join(d.getVar("TEST_EXPORT_DIR"), tar_name) current_dir = os.getcwd() src_dir = src_dir.rstrip('/') dir_name = os.path.dirname(src_dir) @@ -200,7 +174,4 @@ def create_tarball(d, tar_name, src_dir): tar.close() os.chdir(current_dir) - -testexport_main[vardepsexclude] =+ "BB_ORIGENV" - inherit testimage diff --git a/import-layers/yocto-poky/meta/classes/testimage.bbclass b/import-layers/yocto-poky/meta/classes/testimage.bbclass index 6b6781d86..fb214604a 100644 --- a/import-layers/yocto-poky/meta/classes/testimage.bbclass +++ b/import-layers/yocto-poky/meta/classes/testimage.bbclass @@ -35,9 +35,10 @@ TEST_NEEDED_PACKAGES_DIR ?= "${WORKDIR}/testimage/packages" TEST_EXTRACTED_DIR ?= "${TEST_NEEDED_PACKAGES_DIR}/extracted" TEST_PACKAGED_DIR ?= "${TEST_NEEDED_PACKAGES_DIR}/packaged" -RPMTESTSUITE = "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'smart rpm', '', d)}" +RPMTESTSUITE = "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'dnf rpm', '', d)}" +SYSTEMDSUITE = "${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)}" MINTESTSUITE = "ping" -NETTESTSUITE = "${MINTESTSUITE} ssh df date scp syslog" +NETTESTSUITE = "${MINTESTSUITE} ssh df date scp oe_syslog ${SYSTEMDSUITE}" DEVTESTSUITE = "gcc kernelmodule ldd" DEFAULT_TEST_SUITES = "${MINTESTSUITE} auto" @@ -48,11 +49,11 @@ DEFAULT_TEST_SUITES_pn-core-image-x11 = "${MINTESTSUITE}" DEFAULT_TEST_SUITES_pn-core-image-lsb = "${NETTESTSUITE} pam parselogs ${RPMTESTSUITE}" DEFAULT_TEST_SUITES_pn-core-image-sato = "${NETTESTSUITE} connman xorg parselogs ${RPMTESTSUITE} \ ${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'python', '', d)}" -DEFAULT_TEST_SUITES_pn-core-image-sato-sdk = "${NETTESTSUITE} connman xorg perl python \ - ${DEVTESTSUITE} parselogs ${RPMTESTSUITE}" +DEFAULT_TEST_SUITES_pn-core-image-sato-sdk = "${NETTESTSUITE} buildcpio buildiptables buildgalculator \ + connman ${DEVTESTSUITE} logrotate perl parselogs python ${RPMTESTSUITE} xorg" DEFAULT_TEST_SUITES_pn-core-image-lsb-dev = "${NETTESTSUITE} pam perl python parselogs ${RPMTESTSUITE}" -DEFAULT_TEST_SUITES_pn-core-image-lsb-sdk = "${NETTESTSUITE} buildcvs buildiptables buildgalculator \ - connman ${DEVTESTSUITE} pam perl python parselogs ${RPMTESTSUITE}" +DEFAULT_TEST_SUITES_pn-core-image-lsb-sdk = "${NETTESTSUITE} buildcpio buildiptables buildgalculator \ + connman ${DEVTESTSUITE} logrotate pam parselogs perl python ${RPMTESTSUITE}" DEFAULT_TEST_SUITES_pn-meta-toolchain = "auto" # aarch64 has no graphics @@ -60,7 +61,7 @@ DEFAULT_TEST_SUITES_remove_aarch64 = "xorg" # qemumips is quite slow and has reached the timeout limit several times on the YP build cluster, # mitigate this by removing build tests for qemumips machines. -MIPSREMOVE ??= "buildcvs buildiptables buildgalculator" +MIPSREMOVE ??= "buildcpio buildiptables buildgalculator" DEFAULT_TEST_SUITES_remove_qemumips = "${MIPSREMOVE}" DEFAULT_TEST_SUITES_remove_qemumips64 = "${MIPSREMOVE}" @@ -70,20 +71,22 @@ TEST_QEMUBOOT_TIMEOUT ?= "1000" TEST_TARGET ?= "qemu" TESTIMAGEDEPENDS = "" -TESTIMAGEDEPENDS_qemuall = "qemu-native:do_populate_sysroot qemu-helper-native:do_populate_sysroot" +TESTIMAGEDEPENDS_qemuall = "qemu-native:do_populate_sysroot qemu-helper-native:do_populate_sysroot qemu-helper-native:do_addto_recipe_sysroot" TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'cpio-native:do_populate_sysroot', '', d)}" TESTIMAGEDEPENDS_qemuall += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'cpio-native:do_populate_sysroot', '', d)}" -TESTIMAGEDEPENDS_qemuall += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'createrepo-native:do_populate_sysroot', '', d)}" -TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'python-smartpm-native:do_populate_sysroot', '', d)}" +TESTIMAGEDEPENDS_qemuall += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'createrepo-c-native:do_populate_sysroot', '', d)}" +TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'dnf-native:do_populate_sysroot', '', d)}" TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'ipk', 'opkg-utils-native:do_populate_sysroot', '', d)}" TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'deb', 'apt-native:do_populate_sysroot', '', d)}" - +TESTIMAGEDEPENDS += "${@bb.utils.contains('IMAGE_PKGTYPE', 'rpm', 'createrepo-c-native:do_populate_sysroot', '', d)}" TESTIMAGELOCK = "${TMPDIR}/testimage.lock" TESTIMAGELOCK_qemuall = "" TESTIMAGE_DUMP_DIR ?= "/tmp/oe-saved-tests/" +TESTIMAGE_UPDATE_VARS ?= "DL_DIR WORKDIR DEPLOY_DIR" + testimage_dump_target () { top -bn1 ps @@ -112,6 +115,13 @@ testimage_dump_host () { } python do_testimage() { + + testimage_sanity(d) + + if (d.getVar('IMAGE_PKGTYPE') == 'rpm' + and 'dnf' in d.getVar('TEST_SUITES')): + create_rpm_index(d) + testimage_main(d) } @@ -120,72 +130,244 @@ do_testimage[nostamp] = "1" do_testimage[depends] += "${TESTIMAGEDEPENDS}" do_testimage[lockfiles] += "${TESTIMAGELOCK}" +def testimage_sanity(d): + if (d.getVar('TEST_TARGET') == 'simpleremote' + and (not d.getVar('TEST_TARGET_IP') + or not d.getVar('TEST_SERVER_IP'))): + bb.fatal('When TEST_TARGET is set to "simpleremote" ' + 'TEST_TARGET_IP and TEST_SERVER_IP are needed too.') + def testimage_main(d): - import unittest import os - import oeqa.runtime - import time + import json import signal - from oeqa.oetest import ImageTestContext - from oeqa.targetcontrol import get_target_controller - from oeqa.utils.dump import get_host_dumper + import logging + + from bb.utils import export_proxies + from oeqa.core.utils.misc import updateTestData + from oeqa.runtime.context import OERuntimeTestContext + from oeqa.runtime.context import OERuntimeTestContextExecutor + from oeqa.core.target.qemu import supported_fstypes + from oeqa.core.utils.test import getSuiteCases + from oeqa.utils import make_logger_bitbake_compatible + + def sigterm_exception(signum, stackframe): + """ + Catch SIGTERM from worker in order to stop qemu. + """ + raise RuntimeError + + logger = make_logger_bitbake_compatible(logging.getLogger("BitBake")) + pn = d.getVar("PN") + + bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR")) + + image_name = ("%s/%s" % (d.getVar('DEPLOY_DIR_IMAGE'), + d.getVar('IMAGE_LINK_NAME'))) + + tdname = "%s.testdata.json" % image_name + try: + td = json.load(open(tdname, "r")) + except (FileNotFoundError) as err: + bb.fatal('File %s Not Found. Have you built the image with INHERIT+="testimage" in the conf/local.conf?' % tdname) + + # Some variables need to be updates (mostly paths) with the + # ones of the current environment because some tests require them. + updateTestData(d, td, d.getVar('TESTIMAGE_UPDATE_VARS').split()) + + image_manifest = "%s.manifest" % image_name + image_packages = OERuntimeTestContextExecutor.readPackagesManifest(image_manifest) + + extract_dir = d.getVar("TEST_EXTRACTED_DIR") + + # Get machine + machine = d.getVar("MACHINE") + + # Get rootfs + fstypes = [fs for fs in d.getVar('IMAGE_FSTYPES').split(' ') + if fs in supported_fstypes] + if not fstypes: + bb.fatal('Unsupported image type built. Add a comptible image to ' + 'IMAGE_FSTYPES. Supported types: %s' % + ', '.join(supported_fstypes)) + rootfs = '%s.%s' % (image_name, fstypes[0]) + + # Get tmpdir (not really used, just for compatibility) + tmpdir = d.getVar("TMPDIR") + + # Get deploy_dir_image (not really used, just for compatibility) + dir_image = d.getVar("DEPLOY_DIR_IMAGE") + + # Get bootlog + bootlog = os.path.join(d.getVar("TEST_LOG_DIR"), + 'qemu_boot_log.%s' % d.getVar('DATETIME')) + + # Get display + display = d.getVar("BB_ORIGENV").getVar("DISPLAY") - pn = d.getVar("PN", True) - bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR", True)) - test_create_extract_dirs(d) + # Get kernel + kernel_name = ('%s-%s.bin' % (d.getVar("KERNEL_IMAGETYPE"), machine)) + kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), kernel_name) + + # Get boottime + boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")) + + # Get use_kvm + qemu_use_kvm = d.getVar("QEMU_USE_KVM") + if qemu_use_kvm and \ + (qemu_use_kvm == 'True' and 'x86' in machine or \ + d.getVar('MACHINE') in qemu_use_kvm.split()): + kvm = True + else: + kvm = False + + # TODO: We use the current implementatin of qemu runner because of + # time constrains, qemu runner really needs a refactor too. + target_kwargs = { 'machine' : machine, + 'rootfs' : rootfs, + 'tmpdir' : tmpdir, + 'dir_image' : dir_image, + 'display' : display, + 'kernel' : kernel, + 'boottime' : boottime, + 'bootlog' : bootlog, + 'kvm' : kvm, + } + + # TODO: Currently BBPATH is needed for custom loading of targets. + # It would be better to find these modules using instrospection. + target_kwargs['target_modules_path'] = d.getVar('BBPATH') + + # runtime use network for download projects for build + export_proxies(d) # we need the host dumper in test context - host_dumper = get_host_dumper(d) + host_dumper = OERuntimeTestContextExecutor.getHostDumper( + d.getVar("testimage_dump_host"), + d.getVar("TESTIMAGE_DUMP_DIR")) # the robot dance - target = get_target_controller(d) + target = OERuntimeTestContextExecutor.getTarget( + d.getVar("TEST_TARGET"), None, d.getVar("TEST_TARGET_IP"), + d.getVar("TEST_SERVER_IP"), **target_kwargs) # test context - tc = ImageTestContext(d, target, host_dumper) + tc = OERuntimeTestContext(td, logger, target, host_dumper, + image_packages, extract_dir) - # this is a dummy load of tests - # we are doing that to find compile errors in the tests themselves - # before booting the image - try: - tc.loadTests() - except Exception as e: - import traceback - bb.fatal("Loading tests failed:\n%s" % traceback.format_exc()) + # Load tests before starting the target + test_paths = get_runtime_paths(d) + test_modules = d.getVar('TEST_SUITES') + tc.loadTests(test_paths, modules=test_modules) - tc.extract_packages() - target.deploy() + if not getSuiteCases(tc.suites): + bb.fatal('Empty test suite, please verify TEST_SUITES variable') + + package_extraction(d, tc.suites) + + bootparams = None + if d.getVar('VIRTUAL-RUNTIME_init_manager', '') == 'systemd': + # Add systemd.log_level=debug to enable systemd debug logging + bootparams = 'systemd.log_target=console' + + results = None + orig_sigterm_handler = signal.signal(signal.SIGTERM, sigterm_exception) try: - bootparams = None - if d.getVar('VIRTUAL-RUNTIME_init_manager', '') == 'systemd': - bootparams = 'systemd.log_level=debug systemd.log_target=console' - target.start(extra_bootparams=bootparams) - starttime = time.time() - result = tc.runTests() - stoptime = time.time() - if result.wasSuccessful(): - bb.plain("%s - Ran %d test%s in %.3fs" % (pn, result.testsRun, result.testsRun != 1 and "s" or "", stoptime - starttime)) - msg = "%s - OK - All required tests passed" % pn - skipped = len(result.skipped) - if skipped: - msg += " (skipped=%d)" % skipped - bb.plain(msg) + # We need to check if runqemu ends unexpectedly + # or if the worker send us a SIGTERM + tc.target.start(extra_bootparams=bootparams) + results = tc.runTests() + except (RuntimeError, BlockingIOError) as err: + if isinstance(err, RuntimeError): + bb.error('testimage received SIGTERM, shutting down...') else: - bb.fatal("%s - FAILED - check the task log and the ssh log" % pn) + bb.error('runqemu failed, shutting down...') + if results: + results.stop() + results = None finally: - signal.signal(signal.SIGTERM, tc.origsigtermhandler) - target.stop() + signal.signal(signal.SIGTERM, orig_sigterm_handler) + tc.target.stop() + + # Show results (if we have them) + if not results: + bb.fatal('%s - FAILED - tests were interrupted during execution' % pn) + tc.logSummary(results, pn) + tc.logDetails() + if not results.wasSuccessful(): + bb.fatal('%s - FAILED - check the task log and the ssh log' % pn) + +def get_runtime_paths(d): + """ + Returns a list of paths where runtime test must reside. + + Runtime tests are expected in /lib/oeqa/runtime/cases/ + """ + paths = [] + + for layer in d.getVar('BBLAYERS').split(): + path = os.path.join(layer, 'lib/oeqa/runtime/cases') + if os.path.isdir(path): + paths.append(path) + return paths + +def create_index(arg): + import subprocess + + index_cmd = arg + try: + bb.note("Executing '%s' ..." % index_cmd) + result = subprocess.check_output(index_cmd, + stderr=subprocess.STDOUT, + shell=True) + result = result.decode('utf-8') + except subprocess.CalledProcessError as e: + return("Index creation command '%s' failed with return code " + '%d:\n%s' % (e.cmd, e.returncode, e.output.decode("utf-8"))) + if result: + bb.note(result) + return None + +def create_rpm_index(d): + # Index RPMs + rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo_c") + index_cmds = [] + archs = (d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or '').replace('-', '_') + + for arch in archs.split(): + rpm_dir = os.path.join(d.getVar('DEPLOY_DIR_RPM'), arch) + idx_path = os.path.join(d.getVar('WORKDIR'), 'oe-testimage-repo', arch) + + if not os.path.isdir(rpm_dir): + continue + + lockfilename = os.path.join(d.getVar('DEPLOY_DIR_RPM'), 'rpm.lock') + lf = bb.utils.lockfile(lockfilename, False) + oe.path.copyhardlinktree(rpm_dir, idx_path) + # Full indexes overload a 256MB image so reduce the number of rpms + # in the feed. Filter to r* since we use the run-postinst packages and + # this leaves some allarch and machine arch packages too. + bb.utils.remove(idx_path + "*/[a-qs-z]*.rpm") + bb.utils.unlockfile(lf) + cmd = '%s --update -q %s' % (rpm_createrepo, idx_path) + + # Create repodata + result = create_index(cmd) + if result: + bb.fatal('%s' % ('\n'.join(result))) -def test_create_extract_dirs(d): - install_path = d.getVar("TEST_INSTALL_TMP_DIR", True) - package_path = d.getVar("TEST_PACKAGED_DIR", True) - extracted_path = d.getVar("TEST_EXTRACTED_DIR", True) - bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR", True)) - bb.utils.remove(package_path, recurse=True) - bb.utils.mkdirhier(install_path) - bb.utils.mkdirhier(package_path) - bb.utils.mkdirhier(extracted_path) +def package_extraction(d, test_suites): + from oeqa.utils.package_manager import find_packages_to_extract + from oeqa.utils.package_manager import extract_packages + bb.utils.remove(d.getVar("TEST_NEEDED_PACKAGES_DIR"), recurse=True) + packages = find_packages_to_extract(test_suites) + if packages: + bb.utils.mkdirhier(d.getVar("TEST_INSTALL_TMP_DIR")) + bb.utils.mkdirhier(d.getVar("TEST_PACKAGED_DIR")) + bb.utils.mkdirhier(d.getVar("TEST_EXTRACTED_DIR")) + extract_packages(d, packages) -testimage_main[vardepsexclude] =+ "BB_ORIGENV" +testimage_main[vardepsexclude] += "BB_ORIGENV DATETIME" inherit testsdk diff --git a/import-layers/yocto-poky/meta/classes/testsdk.bbclass b/import-layers/yocto-poky/meta/classes/testsdk.bbclass index 43342b1f2..6a201aa41 100644 --- a/import-layers/yocto-poky/meta/classes/testsdk.bbclass +++ b/import-layers/yocto-poky/meta/classes/testsdk.bbclass @@ -14,66 +14,72 @@ # # where "" is an image like core-image-sato. -TEST_LOG_DIR ?= "${WORKDIR}/testimage" -TESTSDKLOCK = "${TMPDIR}/testsdk.lock" - -def run_test_context(CTestContext, d, testdir, tcname, pn, *args): - import glob - import time - - targets = glob.glob(d.expand(testdir + "/tc/environment-setup-*")) - for sdkenv in targets: - bb.plain("Testing %s" % sdkenv) - tc = CTestContext(d, testdir, sdkenv, tcname, args) - - # this is a dummy load of tests - # we are doing that to find compile errors in the tests themselves - # before booting the image - try: - tc.loadTests() - except Exception as e: - import traceback - bb.fatal("Loading tests failed:\n%s" % traceback.format_exc()) - - starttime = time.time() - result = tc.runTests() - stoptime = time.time() - if result.wasSuccessful(): - bb.plain("%s SDK(%s):%s - Ran %d test%s in %.3fs" % (pn, os.path.basename(tcname), os.path.basename(sdkenv),result.testsRun, result.testsRun != 1 and "s" or "", stoptime - starttime)) - msg = "%s - OK - All required tests passed" % pn - skipped = len(result.skipped) - if skipped: - msg += " (skipped=%d)" % skipped - bb.plain(msg) - else: - bb.fatal("%s - FAILED - check the task log and the commands log" % pn) - def testsdk_main(d): import os - import oeqa.sdk import subprocess - from oeqa.oetest import SDKTestContext + import json + import logging - pn = d.getVar("PN", True) - bb.utils.mkdirhier(d.getVar("TEST_LOG_DIR", True)) + from bb.utils import export_proxies + from oeqa.core.runner import OEStreamLogger + from oeqa.sdk.context import OESDKTestContext, OESDKTestContextExecutor + from oeqa.utils import make_logger_bitbake_compatible + + pn = d.getVar("PN") + logger = make_logger_bitbake_compatible(logging.getLogger("BitBake")) + + # sdk use network for download projects for build + export_proxies(d) tcname = d.expand("${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.sh") if not os.path.exists(tcname): - bb.fatal("The toolchain is not built. Build it before running the tests: 'bitbake -c populate_sdk' .") + bb.fatal("The toolchain %s is not built. Build it before running the tests: 'bitbake -c populate_sdk' ." % tcname) + + tdname = d.expand("${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.testdata.json") + test_data = json.load(open(tdname, "r")) + + target_pkg_manifest = OESDKTestContextExecutor._load_manifest( + d.expand("${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.target.manifest")) + host_pkg_manifest = OESDKTestContextExecutor._load_manifest( + d.expand("${SDK_DEPLOY}/${TOOLCHAIN_OUTPUTNAME}.host.manifest")) - sdktestdir = d.expand("${WORKDIR}/testimage-sdk/") - bb.utils.remove(sdktestdir, True) - bb.utils.mkdirhier(sdktestdir) + sdk_dir = d.expand("${WORKDIR}/testimage-sdk/") + bb.utils.remove(sdk_dir, True) + bb.utils.mkdirhier(sdk_dir) try: - subprocess.check_output("cd %s; %s < -c populate_sdk_ext' .") + bb.fatal("The toolchain ext %s is not built. Build it before running the" \ + " tests: 'bitbake -c populate_sdk_ext' ." % tcname) - testdir = d.expand("${WORKDIR}/testsdkext/") - bb.utils.remove(testdir, True) - bb.utils.mkdirhier(testdir) - sdkdir = os.path.join(testdir, 'tc') + tdname = d.expand("${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.testdata.json") + test_data = json.load(open(tdname, "r")) + + target_pkg_manifest = OESDKExtTestContextExecutor._load_manifest( + d.expand("${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.target.manifest")) + host_pkg_manifest = OESDKExtTestContextExecutor._load_manifest( + d.expand("${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}.host.manifest")) + + sdk_dir = d.expand("${WORKDIR}/testsdkext/") + bb.utils.remove(sdk_dir, True) + bb.utils.mkdirhier(sdk_dir) try: - subprocess.check_output("%s -y -d %s" % (tcname, sdkdir), shell=True) + subprocess.check_output("%s -y -d %s" % (tcname, sdk_dir), shell=True) except subprocess.CalledProcessError as e: msg = "Couldn't install the extensible SDK:\n%s" % e.output.decode("utf-8") - logfn = os.path.join(sdkdir, 'preparing_build_system.log') + logfn = os.path.join(sdk_dir, 'preparing_build_system.log') if os.path.exists(logfn): msg += '\n\nContents of preparing_build_system.log:\n' with open(logfn, 'r') as f: @@ -128,19 +140,47 @@ def testsdkext_main(d): msg += line bb.fatal(msg) - try: - bb.plain("Running SDK Compatibility tests ...") - run_test_context(SDKExtTestContext, d, testdir, tcname, pn, True) - finally: - pass + fail = False + sdk_envs = OESDKExtTestContextExecutor._get_sdk_environs(sdk_dir) + for s in sdk_envs: + bb.plain("Extensible SDK testing environment: %s" % s) - try: - bb.plain("Running Extensible SDK tests ...") - run_test_context(SDKExtTestContext, d, testdir, tcname, pn) - finally: - pass + sdk_env = sdk_envs[s] + + # Use our own SSTATE_DIR and DL_DIR so that updates to the eSDK come from our sstate cache + # and we don't spend hours downloading kernels for the kernel module test + # Abuse auto.conf since local.conf would be overwritten by the SDK + with open(os.path.join(sdk_dir, 'conf', 'auto.conf'), 'a+') as f: + f.write('SSTATE_MIRRORS += " \\n file://.* file://%s/PATH"\n' % test_data.get('SSTATE_DIR')) + f.write('SOURCE_MIRROR_URL = "file://%s"\n' % test_data.get('DL_DIR')) + f.write('INHERIT += "own-mirrors"') + + # We need to do this in case we have a minimal SDK + subprocess.check_output(". %s > /dev/null; devtool sdk-install meta-extsdk-toolchain" % sdk_env, cwd=sdk_dir, shell=True) - bb.utils.remove(testdir, True) + tc = OESDKExtTestContext(td=test_data, logger=logger, sdk_dir=sdk_dir, + sdk_env=sdk_env, target_pkg_manifest=target_pkg_manifest, + host_pkg_manifest=host_pkg_manifest) + + try: + tc.loadTests(OESDKExtTestContextExecutor.default_cases) + except Exception as e: + import traceback + bb.fatal("Loading tests failed:\n%s" % traceback.format_exc()) + + result = tc.runTests() + + component = "%s %s" % (pn, OESDKExtTestContextExecutor.name) + context_msg = "%s:%s" % (os.path.basename(tcname), os.path.basename(sdk_env)) + + tc.logSummary(result, component, context_msg) + tc.logDetails() + + if not result.wasSuccessful(): + fail = True + + if fail: + bb.fatal("%s - FAILED - check the task log and the commands log" % pn) testsdkext_main[vardepsexclude] =+ "BB_ORIGENV" @@ -149,4 +189,4 @@ python do_testsdkext() { } addtask testsdkext do_testsdkext[nostamp] = "1" -do_testsdkext[lockfiles] += "${TESTSDKEXTLOCK}" + diff --git a/import-layers/yocto-poky/meta/classes/texinfo.bbclass b/import-layers/yocto-poky/meta/classes/texinfo.bbclass index 92efbccdd..6b0def0ea 100644 --- a/import-layers/yocto-poky/meta/classes/texinfo.bbclass +++ b/import-layers/yocto-poky/meta/classes/texinfo.bbclass @@ -1,10 +1,10 @@ # This class is inherited by recipes whose upstream packages invoke the # texinfo utilities at build-time. Native and cross recipes are made to use the -# dummy scripts provided by texinfo-dummy-native, for improved performance. -# Target architecture recipes use the genuine Texinfo utilities. By default, +# dummy scripts provided by texinfo-dummy-native, for improved performance. +# Target architecture recipes use the genuine Texinfo utilities. By default, # they use the Texinfo utilities on the host system. If you want to use the -# Texinfo recipe shipped with yoco, you can remove texinfo-native from -# ASSUME_PROVIDED and makeinfo from SANITY_REQUIRED_UTILITIES. +# Texinfo recipe, you can remove texinfo-native from ASSUME_PROVIDED and +# makeinfo from SANITY_REQUIRED_UTILITIES. TEXDEP = "texinfo-native" TEXDEP_class-native = "texinfo-dummy-native" @@ -13,3 +13,6 @@ DEPENDS_append = " ${TEXDEP}" PATH_prepend_class-native = "${STAGING_BINDIR_NATIVE}/texinfo-dummy-native:" PATH_prepend_class-cross = "${STAGING_BINDIR_NATIVE}/texinfo-dummy-native:" +# libtool-cross doesn't inherit cross +TEXDEP_pn-libtool-cross = "texinfo-dummy-native" +PATH_prepend_pn-libtool-cross = "${STAGING_BINDIR_NATIVE}/texinfo-dummy-native:" diff --git a/import-layers/yocto-poky/meta/classes/tinderclient.bbclass b/import-layers/yocto-poky/meta/classes/tinderclient.bbclass index 917b74d88..00f453cec 100644 --- a/import-layers/yocto-poky/meta/classes/tinderclient.bbclass +++ b/import-layers/yocto-poky/meta/classes/tinderclient.bbclass @@ -55,22 +55,22 @@ def tinder_format_http_post(d,status,log): # the variables we will need to send on this form post variables = { - "tree" : d.getVar('TINDER_TREE', True), - "machine_name" : d.getVar('TINDER_MACHINE', True), + "tree" : d.getVar('TINDER_TREE'), + "machine_name" : d.getVar('TINDER_MACHINE'), "os" : os.uname()[0], "os_version" : os.uname()[2], "compiler" : "gcc", - "clobber" : d.getVar('TINDER_CLOBBER', True) or "0", - "srcdate" : d.getVar('SRCDATE', True), - "PN" : d.getVar('PN', True), - "PV" : d.getVar('PV', True), - "PR" : d.getVar('PR', True), - "FILE" : d.getVar('FILE', True) or "N/A", - "TARGETARCH" : d.getVar('TARGET_ARCH', True), - "TARGETFPU" : d.getVar('TARGET_FPU', True) or "Unknown", - "TARGETOS" : d.getVar('TARGET_OS', True) or "Unknown", - "MACHINE" : d.getVar('MACHINE', True) or "Unknown", - "DISTRO" : d.getVar('DISTRO', True) or "Unknown", + "clobber" : d.getVar('TINDER_CLOBBER') or "0", + "srcdate" : d.getVar('SRCDATE'), + "PN" : d.getVar('PN'), + "PV" : d.getVar('PV'), + "PR" : d.getVar('PR'), + "FILE" : d.getVar('FILE') or "N/A", + "TARGETARCH" : d.getVar('TARGET_ARCH'), + "TARGETFPU" : d.getVar('TARGET_FPU') or "Unknown", + "TARGETOS" : d.getVar('TARGET_OS') or "Unknown", + "MACHINE" : d.getVar('MACHINE') or "Unknown", + "DISTRO" : d.getVar('DISTRO') or "Unknown", "zecke-rocks" : "sure", } @@ -82,7 +82,7 @@ def tinder_format_http_post(d,status,log): # we only need on build_status.pl but sending it # always does not hurt try: - f = open(d.getVar('TMPDIR',True)+'/tinder-machine.id', 'r') + f = open(d.getVar('TMPDIR')+'/tinder-machine.id', 'r') id = f.read() variables['machine_id'] = id except: @@ -106,8 +106,8 @@ def tinder_build_start(d): # get the body and type content_type, body = tinder_format_http_post(d,None,None) - server = d.getVar('TINDER_HOST', True ) - url = d.getVar('TINDER_URL', True ) + server = d.getVar('TINDER_HOST') + url = d.getVar('TINDER_URL') selector = url + "/xml/build_start.pl" @@ -127,7 +127,7 @@ def tinder_build_start(d): # now we will need to save the machine number # we will override any previous numbers - f = open(d.getVar('TMPDIR', True)+"/tinder-machine.id", 'w') + f = open(d.getVar('TMPDIR')+"/tinder-machine.id", 'w') f.write(report) @@ -137,8 +137,8 @@ def tinder_send_http(d, status, _log): """ # get the body and type - server = d.getVar('TINDER_HOST', True) - url = d.getVar('TINDER_URL', True) + server = d.getVar('TINDER_HOST') + url = d.getVar('TINDER_URL') selector = url + "/xml/build_status.pl" @@ -163,16 +163,16 @@ def tinder_print_info(d): time = tinder_time_string() ops = os.uname()[0] version = os.uname()[2] - url = d.getVar( 'TINDER_URL' , True ) - tree = d.getVar( 'TINDER_TREE', True ) - branch = d.getVar( 'TINDER_BRANCH', True ) - srcdate = d.getVar( 'SRCDATE', True ) - machine = d.getVar( 'MACHINE', True ) - distro = d.getVar( 'DISTRO', True ) - bbfiles = d.getVar( 'BBFILES', True ) - tarch = d.getVar( 'TARGET_ARCH', True ) - fpu = d.getVar( 'TARGET_FPU', True ) - oerev = d.getVar( 'OE_REVISION', True ) or "unknown" + url = d.getVar('TINDER_URL') + tree = d.getVar('TINDER_TREE') + branch = d.getVar('TINDER_BRANCH') + srcdate = d.getVar('SRCDATE') + machine = d.getVar('MACHINE') + distro = d.getVar('DISTRO') + bbfiles = d.getVar('BBFILES') + tarch = d.getVar('TARGET_ARCH') + fpu = d.getVar('TARGET_FPU') + oerev = d.getVar('OE_REVISION') or "unknown" # there is a bug with tipple quoted strings # i will work around but will fix the original @@ -278,7 +278,7 @@ def tinder_do_tinder_report(event): try: # truncate the tinder log file - f = open(event.data.getVar('TINDER_LOG', True), 'w') + f = open(event.data.getVar('TINDER_LOG'), 'w') f.write("") f.close() except: @@ -287,7 +287,7 @@ def tinder_do_tinder_report(event): try: # write a status to the file. This is needed for the -k option # of BitBake - g = open(event.data.getVar('TMPDIR', True)+"/tinder-status", 'w') + g = open(event.data.getVar('TMPDIR')+"/tinder-status", 'w') g.write("") g.close() except IOError: @@ -296,10 +296,10 @@ def tinder_do_tinder_report(event): # Append the Task-Log (compile,configure...) to the log file # we will send to the server if name == "TaskSucceeded" or name == "TaskFailed": - log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T', True), event.task)) + log_file = glob.glob("%s/log.%s.*" % (event.data.getVar('T'), event.task)) if len(log_file) != 0: - to_file = event.data.getVar('TINDER_LOG', True) + to_file = event.data.getVar('TINDER_LOG') log += "".join(open(log_file[0], 'r').readlines()) # set the right 'HEADER'/Summary for the TinderBox @@ -310,23 +310,23 @@ def tinder_do_tinder_report(event): elif name == "TaskFailed": log += "<--- TINDERBOX Task %s failed (FAILURE)\n" % event.task elif name == "PkgStarted": - log += "---> TINDERBOX Package %s started\n" % event.data.getVar('PF', True) + log += "---> TINDERBOX Package %s started\n" % event.data.getVar('PF') elif name == "PkgSucceeded": - log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % event.data.getVar('PF', True) + log += "<--- TINDERBOX Package %s done (SUCCESS)\n" % event.data.getVar('PF') elif name == "PkgFailed": - if not event.data.getVar('TINDER_AUTOBUILD', True) == "0": + if not event.data.getVar('TINDER_AUTOBUILD') == "0": build.exec_task('do_clean', event.data) - log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % event.data.getVar('PF', True) + log += "<--- TINDERBOX Package %s failed (FAILURE)\n" % event.data.getVar('PF') status = 200 # remember the failure for the -k case - h = open(event.data.getVar('TMPDIR', True)+"/tinder-status", 'w') + h = open(event.data.getVar('TMPDIR')+"/tinder-status", 'w') h.write("200") elif name == "BuildCompleted": log += "Build Completed\n" status = 100 # Check if we have a old status... try: - h = open(event.data.getVar('TMPDIR',True)+'/tinder-status', 'r') + h = open(event.data.getVar('TMPDIR')+'/tinder-status', 'r') status = int(h.read()) except: pass @@ -342,7 +342,7 @@ def tinder_do_tinder_report(event): log += "Error:Was Runtime: %d\n" % event.isRuntime() status = 200 # remember the failure for the -k case - h = open(event.data.getVar('TMPDIR', True)+"/tinder-status", 'w') + h = open(event.data.getVar('TMPDIR')+"/tinder-status", 'w') h.write("200") # now post the log @@ -360,7 +360,7 @@ python tinderclient_eventhandler() { if e.data is None or bb.event.getName(e) == "MsgNote": return - do_tinder_report = e.data.getVar('TINDER_REPORT', True) + do_tinder_report = e.data.getVar('TINDER_REPORT') if do_tinder_report and do_tinder_report == "1": tinder_do_tinder_report(e) diff --git a/import-layers/yocto-poky/meta/classes/toaster.bbclass b/import-layers/yocto-poky/meta/classes/toaster.bbclass index 4bddf34e9..6cef0b8f6 100644 --- a/import-layers/yocto-poky/meta/classes/toaster.bbclass +++ b/import-layers/yocto-poky/meta/classes/toaster.bbclass @@ -80,7 +80,7 @@ python toaster_layerinfo_dumpdata() { return layer_info - bblayers = e.data.getVar("BBLAYERS", True) + bblayers = e.data.getVar("BBLAYERS") llayerinfo = {} @@ -119,10 +119,10 @@ python toaster_package_dumpdata() { """ # No need to try and dumpdata if the recipe isn't generating packages - if not d.getVar('PACKAGES', True): + if not d.getVar('PACKAGES'): return - pkgdatadir = d.getVar('PKGDESTWORK', True) + pkgdatadir = d.getVar('PKGDESTWORK') lpkgdata = {} datadir = os.path.join(pkgdatadir, 'runtime') @@ -142,7 +142,7 @@ python toaster_artifact_dumpdata() { """ event_data = { - "TOOLCHAIN_OUTPUTNAME": d.getVar("TOOLCHAIN_OUTPUTNAME", True) + "TOOLCHAIN_OUTPUTNAME": d.getVar("TOOLCHAIN_OUTPUTNAME") } bb.event.fire(bb.event.MetadataEvent("SDKArtifactInfo", event_data), d) @@ -157,11 +157,11 @@ python toaster_collect_task_stats() { import bb.utils import os - toaster_statlist_file = os.path.join(e.data.getVar('BUILDSTATS_BASE', True), "toasterstatlist") - - if not e.data.getVar('BUILDSTATS_BASE', True): + if not e.data.getVar('BUILDSTATS_BASE'): return # if we don't have buildstats, we cannot collect stats + toaster_statlist_file = os.path.join(e.data.getVar('BUILDSTATS_BASE'), "toasterstatlist") + def stat_to_float(value): return float(value.strip('% \n\r')) @@ -246,7 +246,7 @@ python toaster_buildhistory_dump() { import re BUILDHISTORY_DIR = e.data.expand("${TOPDIR}/buildhistory") BUILDHISTORY_DIR_IMAGE_BASE = e.data.expand("%s/images/${MACHINE_ARCH}/${TCLIBC}/"% BUILDHISTORY_DIR) - pkgdata_dir = e.data.getVar("PKGDATA_DIR", True) + pkgdata_dir = e.data.getVar("PKGDATA_DIR") # scan the build targets for this build @@ -265,28 +265,33 @@ python toaster_buildhistory_dump() { with open("%s/installed-package-sizes.txt" % installed_img_path, "r") as fin: for line in fin: line = line.rstrip(";") - psize, px = line.split("\t") - punit, pname = px.split(" ") + psize, punit, pname = line.split() # this size is "installed-size" as it measures how much space it takes on disk images[target][pname.strip()] = {'size':int(psize)*1024, 'depends' : []} with open("%s/depends.dot" % installed_img_path, "r") as fin: - p = re.compile(r' -> ') - dot = re.compile(r'.*style=dotted') + p = re.compile(r'\s*"(?P[^"]+)"\s*->\s*"(?P[^"]+)"(?P.*?\[style=dotted\])?') for line in fin: - line = line.rstrip(';') - linesplit = p.split(line) - if len(linesplit) == 2: - pname = linesplit[0].rstrip('"').strip('"') - dependsname = linesplit[1].split(" ")[0].strip().strip(";").strip('"').rstrip('"') - deptype = "depends" - if dot.match(line): - deptype = "recommends" - if not pname in images[target]: - images[target][pname] = {'size': 0, 'depends' : []} - if not dependsname in images[target]: - images[target][dependsname] = {'size': 0, 'depends' : []} - images[target][pname]['depends'].append((dependsname, deptype)) + m = p.match(line) + if not m: + continue + pname = m.group('name') + dependsname = m.group('dep') + deptype = 'recommends' if m.group('rec') else 'depends' + + # If RPM is used for packaging, then there may be + # dependencies such as "/bin/sh", which will confuse + # _toaster_load_pkgdatafile() later on. While at it, ignore + # any dependencies that contain parentheses, e.g., + # "libc.so.6(GLIBC_2.7)". + if dependsname.startswith('/') or '(' in dependsname: + continue + + if not pname in images[target]: + images[target][pname] = {'size': 0, 'depends' : []} + if not dependsname in images[target]: + images[target][dependsname] = {'size': 0, 'depends' : []} + images[target][pname]['depends'].append((dependsname, deptype)) # files-in-image.txt is only generated if an image file is created, # so the file entries ('syms', 'dirs', 'files') for a target will be @@ -329,8 +334,18 @@ python toaster_artifacts() { if e.taskname in ["do_deploy", "do_image_complete", "do_populate_sdk", "do_populate_sdk_ext"]: d2 = d.createCopy() d2.setVar('FILE', e.taskfile) - d2.setVar('SSTATE_MANMACH', d2.expand("${MACHINE}")) + # Use 'stamp-extra-info' if present, else use workaround + # to determine 'SSTATE_MANMACH' + extrainf = d2.getVarFlag(e.taskname, 'stamp-extra-info') + if extrainf: + d2.setVar('SSTATE_MANMACH', extrainf) + else: + if "do_populate_sdk" == e.taskname: + d2.setVar('SSTATE_MANMACH', d2.expand("${MACHINE}${SDKMACHINE}")) + else: + d2.setVar('SSTATE_MANMACH', d2.expand("${MACHINE}")) manifest = oe.sstatesig.sstate_get_manifest_filename(e.taskname[3:], d2)[0] + if os.access(manifest, os.R_OK): with open(manifest) as fmanifest: artifacts = [fname.strip() for fname in fmanifest] @@ -357,8 +372,9 @@ do_packagedata_setscene[vardepsexclude] += "toaster_package_dumpdata " do_package[postfuncs] += "toaster_package_dumpdata " do_package[vardepsexclude] += "toaster_package_dumpdata " -do_populate_sdk[postfuncs] += "toaster_artifact_dumpdata " -do_populate_sdk[vardepsexclude] += "toaster_artifact_dumpdata " +#do_populate_sdk[postfuncs] += "toaster_artifact_dumpdata " +#do_populate_sdk[vardepsexclude] += "toaster_artifact_dumpdata " + +#do_populate_sdk_ext[postfuncs] += "toaster_artifact_dumpdata " +#do_populate_sdk_ext[vardepsexclude] += "toaster_artifact_dumpdata " -do_populate_sdk_ext[postfuncs] += "toaster_artifact_dumpdata " -do_populate_sdk_ext[vardepsexclude] += "toaster_artifact_dumpdata " \ No newline at end of file diff --git a/import-layers/yocto-poky/meta/classes/toolchain-scripts.bbclass b/import-layers/yocto-poky/meta/classes/toolchain-scripts.bbclass index 0e11f2d7a..260ece967 100644 --- a/import-layers/yocto-poky/meta/classes/toolchain-scripts.bbclass +++ b/import-layers/yocto-poky/meta/classes/toolchain-scripts.bbclass @@ -31,7 +31,6 @@ toolchain_create_sdk_env_script () { EXTRAPATH="$EXTRAPATH:$sdkpathnative$bindir/${TARGET_ARCH}${TARGET_VENDOR}-$i" done echo "export PATH=$sdkpathnative$bindir:$sdkpathnative$sbindir:$sdkpathnative$base_bindir:$sdkpathnative$base_sbindir:$sdkpathnative$bindir/../${HOST_SYS}/bin:$sdkpathnative$bindir/${TARGET_SYS}"$EXTRAPATH':$PATH' >> $script - echo "export CCACHE_PATH=$sdkpathnative$bindir:$sdkpathnative$bindir/../${HOST_SYS}/bin:$sdkpathnative$bindir/${TARGET_SYS}"$EXTRAPATH':$CCACHE_PATH' >> $script echo 'export PKG_CONFIG_SYSROOT_DIR=$SDKTARGETSYSROOT' >> $script echo 'export PKG_CONFIG_PATH=$SDKTARGETSYSROOT'"$libdir"'/pkgconfig:$SDKTARGETSYSROOT'"$prefix"'/share/pkgconfig' >> $script echo 'export CONFIG_SITE=${SDKPATH}/site-config-'"${multimach_target_sys}" >> $script @@ -50,7 +49,6 @@ toolchain_create_tree_env_script () { rm -f $script touch $script echo 'export PATH=${STAGING_DIR_NATIVE}/usr/bin:${PATH}' >> $script - echo 'export CCACHE_PATH=${STAGING_DIR_NATIVE}/usr/bin:${CCACHE_PATH}' >> $script echo 'export PKG_CONFIG_SYSROOT_DIR=${PKG_CONFIG_SYSROOT_DIR}' >> $script echo 'export PKG_CONFIG_PATH=${PKG_CONFIG_PATH}' >> $script echo 'export CONFIG_SITE="${@siteinfo_get_files(d)}"' >> $script @@ -108,6 +106,7 @@ EOF TOOLCHAIN_CONFIGSITE_NOCACHE = "${@siteinfo_get_files(d)}" TOOLCHAIN_CONFIGSITE_SYSROOTCACHE = "${STAGING_DIR}/${MLPREFIX}${MACHINE}/${target_datadir}/${TARGET_SYS}_config_site.d" TOOLCHAIN_NEED_CONFIGSITE_CACHE ??= "virtual/${MLPREFIX}libc ncurses" +DEPENDS += "${TOOLCHAIN_NEED_CONFIGSITE_CACHE}" #This function create a site config file toolchain_create_sdk_siteconfig () { @@ -139,9 +138,9 @@ toolchain_create_sdk_siteconfig[vardepsexclude] = "TOOLCHAIN_CONFIGSITE_SYSROOTC python __anonymous () { import oe.classextend deps = "" - for dep in (d.getVar('TOOLCHAIN_NEED_CONFIGSITE_CACHE', True) or "").split(): + for dep in (d.getVar('TOOLCHAIN_NEED_CONFIGSITE_CACHE') or "").split(): deps += " %s:do_populate_sysroot" % dep - for variant in (d.getVar('MULTILIB_VARIANTS', True) or "").split(): + for variant in (d.getVar('MULTILIB_VARIANTS') or "").split(): clsextend = oe.classextend.ClassExtender(variant, d) newdep = clsextend.extend_name(dep) deps += " %s:do_populate_sysroot" % newdep diff --git a/import-layers/yocto-poky/meta/classes/typecheck.bbclass b/import-layers/yocto-poky/meta/classes/typecheck.bbclass index 6bff7c713..72da93223 100644 --- a/import-layers/yocto-poky/meta/classes/typecheck.bbclass +++ b/import-layers/yocto-poky/meta/classes/typecheck.bbclass @@ -5,7 +5,7 @@ python check_types() { import oe.types for key in e.data.keys(): - if e.data.getVarFlag(key, "type", True): + if e.data.getVarFlag(key, "type"): oe.data.typed_value(key, e.data) } addhandler check_types diff --git a/import-layers/yocto-poky/meta/classes/uboot-config.bbclass b/import-layers/yocto-poky/meta/classes/uboot-config.bbclass index 3f760f2fb..10013b7d4 100644 --- a/import-layers/yocto-poky/meta/classes/uboot-config.bbclass +++ b/import-layers/yocto-poky/meta/classes/uboot-config.bbclass @@ -14,19 +14,19 @@ UBOOT_BINARY ?= "u-boot.${UBOOT_SUFFIX}" python () { - ubootmachine = d.getVar("UBOOT_MACHINE", True) + ubootmachine = d.getVar("UBOOT_MACHINE") ubootconfigflags = d.getVarFlags('UBOOT_CONFIG') - ubootbinary = d.getVar('UBOOT_BINARY', True) - ubootbinaries = d.getVar('UBOOT_BINARIES', True) + ubootbinary = d.getVar('UBOOT_BINARY') + ubootbinaries = d.getVar('UBOOT_BINARIES') # The "doc" varflag is special, we don't want to see it here ubootconfigflags.pop('doc', None) if not ubootmachine and not ubootconfigflags: - PN = d.getVar("PN", True) - FILE = os.path.basename(d.getVar("FILE", True)) + PN = d.getVar("PN") + FILE = os.path.basename(d.getVar("FILE")) bb.debug(1, "To build %s, see %s for instructions on \ setting up your machine config" % (PN, FILE)) - raise bb.parse.SkipPackage("Either UBOOT_MACHINE or UBOOT_CONFIG must be set in the %s machine configuration." % d.getVar("MACHINE", True)) + raise bb.parse.SkipPackage("Either UBOOT_MACHINE or UBOOT_CONFIG must be set in the %s machine configuration." % d.getVar("MACHINE")) if ubootmachine and ubootconfigflags: raise bb.parse.SkipPackage("You cannot use UBOOT_MACHINE and UBOOT_CONFIG at the same time.") @@ -37,7 +37,7 @@ python () { if not ubootconfigflags: return - ubootconfig = (d.getVar('UBOOT_CONFIG', True) or "").split() + ubootconfig = (d.getVar('UBOOT_CONFIG') or "").split() if len(ubootconfig) > 0: for config in ubootconfig: for f, v in ubootconfigflags.items(): diff --git a/import-layers/yocto-poky/meta/classes/uboot-extlinux-config.bbclass b/import-layers/yocto-poky/meta/classes/uboot-extlinux-config.bbclass index df91386c0..8447a047e 100644 --- a/import-layers/yocto-poky/meta/classes/uboot-extlinux-config.bbclass +++ b/import-layers/yocto-poky/meta/classes/uboot-extlinux-config.bbclass @@ -12,10 +12,15 @@ # UBOOT_EXTLINUX_KERNEL_ARGS - Add additional kernel arguments. # UBOOT_EXTLINUX_KERNEL_IMAGE - Kernel image name. # UBOOT_EXTLINUX_FDTDIR - Device tree directory. +# UBOOT_EXTLINUX_FDT - Device tree file. # UBOOT_EXTLINUX_INITRD - Indicates a list of filesystem images to # concatenate and use as an initrd (optional). # UBOOT_EXTLINUX_MENU_DESCRIPTION - Name to use as description. # UBOOT_EXTLINUX_ROOT - Root kernel cmdline. +# UBOOT_EXTLINUX_TIMEOUT - Timeout before DEFAULT selection is made. +# Measured in 1/10 of a second. +# UBOOT_EXTLINUX_DEFAULT_LABEL - Target to be selected by default after +# the timeout period # # If there's only one label system will boot automatically and menu won't be # created. If you want to use more than one labels, e.g linux and alternate, @@ -25,6 +30,9 @@ # # UBOOT_EXTLINUX_LABELS ??= "default fallback" # +# UBOOT_EXTLINUX_DEFAULT_LABEL ??= "Linux Default" +# UBOOT_EXTLINUX_TIMEOUT ??= "30" +# # UBOOT_EXTLINUX_KERNEL_IMAGE_default ??= "../zImage" # UBOOT_EXTLINUX_MENU_DESCRIPTION_default ??= "Linux Default" # @@ -34,6 +42,8 @@ # Results: # # menu title Select the boot mode +# TIMEOUT 30 +# DEFAULT Linux Default # LABEL Linux Default # KERNEL ../zImage # FDTDIR ../ @@ -50,6 +60,7 @@ # a console=...some_tty... UBOOT_EXTLINUX_CONSOLE ??= "console=${console}" UBOOT_EXTLINUX_LABELS ??= "linux" +UBOOT_EXTLINUX_FDT ??= "" UBOOT_EXTLINUX_FDTDIR ??= "../" UBOOT_EXTLINUX_KERNEL_IMAGE ??= "../${KERNEL_IMAGETYPE}" UBOOT_EXTLINUX_KERNEL_ARGS ??= "rootwait rw" @@ -58,23 +69,25 @@ UBOOT_EXTLINUX_MENU_DESCRIPTION_linux ??= "${DISTRO_NAME}" UBOOT_EXTLINUX_CONFIG = "${B}/extlinux.conf" python create_extlinux_config() { - if d.getVar("UBOOT_EXTLINUX", True) != "1": + if d.getVar("UBOOT_EXTLINUX") != "1": return - if not d.getVar('WORKDIR', True): + if not d.getVar('WORKDIR'): bb.error("WORKDIR not defined, unable to package") - labels = d.getVar('UBOOT_EXTLINUX_LABELS', True) + labels = d.getVar('UBOOT_EXTLINUX_LABELS') if not labels: bb.fatal("UBOOT_EXTLINUX_LABELS not defined, nothing to do") if not labels.strip(): bb.fatal("No labels, nothing to do") - cfile = d.getVar('UBOOT_EXTLINUX_CONFIG', True) + cfile = d.getVar('UBOOT_EXTLINUX_CONFIG') if not cfile: bb.fatal('Unable to read UBOOT_EXTLINUX_CONFIG') + localdata = bb.data.createCopy(d) + try: with open(cfile, 'w') as cfgfile: cfgfile.write('# Generic Distro Configuration file generated by OpenEmbedded\n') @@ -82,37 +95,50 @@ python create_extlinux_config() { if len(labels.split()) > 1: cfgfile.write('menu title Select the boot mode\n') + timeout = localdata.getVar('UBOOT_EXTLINUX_TIMEOUT') + if timeout: + cfgfile.write('TIMEOUT %s\n' % (timeout)) + + if len(labels.split()) > 1: + default = localdata.getVar('UBOOT_EXTLINUX_DEFAULT_LABEL') + if default: + cfgfile.write('DEFAULT %s\n' % (default)) + for label in labels.split(): - localdata = bb.data.createCopy(d) - overrides = localdata.getVar('OVERRIDES', True) + overrides = localdata.getVar('OVERRIDES') if not overrides: bb.fatal('OVERRIDES not defined') localdata.setVar('OVERRIDES', label + ':' + overrides) - bb.data.update_data(localdata) - extlinux_console = localdata.getVar('UBOOT_EXTLINUX_CONSOLE', True) + extlinux_console = localdata.getVar('UBOOT_EXTLINUX_CONSOLE') - menu_description = localdata.getVar('UBOOT_EXTLINUX_MENU_DESCRIPTION', True) + menu_description = localdata.getVar('UBOOT_EXTLINUX_MENU_DESCRIPTION') if not menu_description: menu_description = label - root = localdata.getVar('UBOOT_EXTLINUX_ROOT', True) + root = localdata.getVar('UBOOT_EXTLINUX_ROOT') if not root: bb.fatal('UBOOT_EXTLINUX_ROOT not defined') - kernel_image = localdata.getVar('UBOOT_EXTLINUX_KERNEL_IMAGE', True) - fdtdir = localdata.getVar('UBOOT_EXTLINUX_FDTDIR', True) - if fdtdir: + kernel_image = localdata.getVar('UBOOT_EXTLINUX_KERNEL_IMAGE') + fdtdir = localdata.getVar('UBOOT_EXTLINUX_FDTDIR') + + fdt = localdata.getVar('UBOOT_EXTLINUX_FDT') + + if fdt: + cfgfile.write('LABEL %s\n\tKERNEL %s\n\tFDT %s\n' % + (menu_description, kernel_image, fdt)) + elif fdtdir: cfgfile.write('LABEL %s\n\tKERNEL %s\n\tFDTDIR %s\n' % (menu_description, kernel_image, fdtdir)) else: cfgfile.write('LABEL %s\n\tKERNEL %s\n' % (menu_description, kernel_image)) - kernel_args = localdata.getVar('UBOOT_EXTLINUX_KERNEL_ARGS', True) + kernel_args = localdata.getVar('UBOOT_EXTLINUX_KERNEL_ARGS') - initrd = localdata.getVar('UBOOT_EXTLINUX_INITRD', True) + initrd = localdata.getVar('UBOOT_EXTLINUX_INITRD') if initrd: cfgfile.write('\tINITRD %s\n'% initrd) diff --git a/import-layers/yocto-poky/meta/classes/uboot-sign.bbclass b/import-layers/yocto-poky/meta/classes/uboot-sign.bbclass index cef26b19b..8ee904e7d 100644 --- a/import-layers/yocto-poky/meta/classes/uboot-sign.bbclass +++ b/import-layers/yocto-poky/meta/classes/uboot-sign.bbclass @@ -25,7 +25,7 @@ # u-boot:do_concat_dtb # u-boot:do_install # -# For more details on signature process, please refer to U-boot documentation. +# For more details on signature process, please refer to U-Boot documentation. # Signature activation. UBOOT_SIGN_ENABLE ?= "0" @@ -80,9 +80,9 @@ do_concat_dtb () { } python () { - uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot', True) or 'u-boot' - if d.getVar('UBOOT_SIGN_ENABLE', True) == '1' and d.getVar('PN', True) == uboot_pn: - kernel_pn = d.getVar('PREFERRED_PROVIDER_virtual/kernel', True) + uboot_pn = d.getVar('PREFERRED_PROVIDER_u-boot') or 'u-boot' + if d.getVar('UBOOT_SIGN_ENABLE') == '1' and d.getVar('PN') == uboot_pn: + kernel_pn = d.getVar('PREFERRED_PROVIDER_virtual/kernel') # u-boot.dtb and u-boot-nodtb.bin are deployed _before_ do_deploy # Thus, do_deploy_setscene will also populate them in DEPLOY_IMAGE_DIR diff --git a/import-layers/yocto-poky/meta/classes/uninative.bbclass b/import-layers/yocto-poky/meta/classes/uninative.bbclass index 975466929..8f3448336 100644 --- a/import-layers/yocto-poky/meta/classes/uninative.bbclass +++ b/import-layers/yocto-poky/meta/classes/uninative.bbclass @@ -20,11 +20,11 @@ python uninative_event_fetchloader() { loader isn't already present. """ - chksum = d.getVarFlag("UNINATIVE_CHECKSUM", d.getVar("BUILD_ARCH", True), True) + chksum = d.getVarFlag("UNINATIVE_CHECKSUM", d.getVar("BUILD_ARCH")) if not chksum: - bb.fatal("Uninative selected but not configured correctly, please set UNINATIVE_CHECKSUM[%s]" % d.getVar("BUILD_ARCH", True)) + bb.fatal("Uninative selected but not configured correctly, please set UNINATIVE_CHECKSUM[%s]" % d.getVar("BUILD_ARCH")) - loader = d.getVar("UNINATIVE_LOADER", True) + loader = d.getVar("UNINATIVE_LOADER") loaderchksum = loader + ".chksum" if os.path.exists(loader) and os.path.exists(loaderchksum): with open(loaderchksum, "r") as f: @@ -37,13 +37,13 @@ python uninative_event_fetchloader() { # Save and restore cwd as Fetch.download() does a chdir() olddir = os.getcwd() - tarball = d.getVar("UNINATIVE_TARBALL", True) - tarballdir = os.path.join(d.getVar("UNINATIVE_DLDIR", True), chksum) + tarball = d.getVar("UNINATIVE_TARBALL") + tarballdir = os.path.join(d.getVar("UNINATIVE_DLDIR"), chksum) tarballpath = os.path.join(tarballdir, tarball) if not os.path.exists(tarballpath): bb.utils.mkdirhier(tarballdir) - if d.getVar("UNINATIVE_URL", True) == "unset": + if d.getVar("UNINATIVE_URL") == "unset": bb.fatal("Uninative selected but not configured, please set UNINATIVE_URL") localdata = bb.data.createCopy(d) @@ -59,8 +59,17 @@ python uninative_event_fetchloader() { if localpath != tarballpath and os.path.exists(localpath) and not os.path.exists(tarballpath): os.symlink(localpath, tarballpath) - cmd = d.expand("mkdir -p ${UNINATIVE_STAGING_DIR}-uninative; cd ${UNINATIVE_STAGING_DIR}-uninative; tar -xjf ${UNINATIVE_DLDIR}/%s/${UNINATIVE_TARBALL}; ${UNINATIVE_STAGING_DIR}-uninative/relocate_sdk.py ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux ${UNINATIVE_LOADER} ${UNINATIVE_LOADER} ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux/${bindir_native}/patchelf-uninative ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux${base_libdir_native}/libc*.so" % chksum) - subprocess.check_call(cmd, shell=True) + cmd = d.expand("\ +mkdir -p ${UNINATIVE_STAGING_DIR}-uninative; \ +cd ${UNINATIVE_STAGING_DIR}-uninative; \ +tar -xjf ${UNINATIVE_DLDIR}/%s/${UNINATIVE_TARBALL}; \ +${UNINATIVE_STAGING_DIR}-uninative/relocate_sdk.py \ + ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux \ + ${UNINATIVE_LOADER} \ + ${UNINATIVE_LOADER} \ + ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux/${bindir_native}/patchelf-uninative \ + ${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux${base_libdir_native}/libc*.so" % chksum) + subprocess.check_output(cmd, shell=True) with open(loaderchksum, "w") as f: f.write(chksum) @@ -86,12 +95,13 @@ python uninative_event_enable() { } def enable_uninative(d): - loader = d.getVar("UNINATIVE_LOADER", True) + loader = d.getVar("UNINATIVE_LOADER") if os.path.exists(loader): bb.debug(2, "Enabling uninative") d.setVar("NATIVELSBSTRING", "universal%s" % oe.utils.host_gcc_version(d)) d.appendVar("SSTATEPOSTUNPACKFUNCS", " uninative_changeinterp") - d.prependVar("PATH", "${UNINATIVE_STAGING_DIR}-uninative/${BUILD_ARCH}-linux${bindir_native}:") + d.appendVarFlag("SSTATEPOSTUNPACKFUNCS", "vardepvalueexclude", "| uninative_changeinterp") + d.prependVar("PATH", "${STAGING_DIR}-uninative/${BUILD_ARCH}-linux${bindir_native}:") python uninative_changeinterp () { import subprocess @@ -101,7 +111,7 @@ python uninative_changeinterp () { if not (bb.data.inherits_class('native', d) or bb.data.inherits_class('crosssdk', d) or bb.data.inherits_class('cross', d)): return - sstateinst = d.getVar('SSTATE_INSTDIR', True) + sstateinst = d.getVar('SSTATE_INSTDIR') for walkroot, dirs, files in os.walk(sstateinst): for file in files: if file.endswith(".so") or ".so." in file: @@ -120,11 +130,5 @@ python uninative_changeinterp () { if not elf.isDynamic(): continue - try: - subprocess.check_output(("patchelf-uninative", "--set-interpreter", - d.getVar("UNINATIVE_LOADER", True), f), - stderr=subprocess.STDOUT) - except subprocess.CalledProcessError as e: - bb.fatal("'%s' failed with exit code %d and the following output:\n%s" % - (e.cmd, e.returncode, e.output)) + subprocess.check_output(("patchelf-uninative", "--set-interpreter", d.getVar("UNINATIVE_LOADER"), f), stderr=subprocess.STDOUT) } diff --git a/import-layers/yocto-poky/meta/classes/update-alternatives.bbclass b/import-layers/yocto-poky/meta/classes/update-alternatives.bbclass index 65929e555..4bba76c3b 100644 --- a/import-layers/yocto-poky/meta/classes/update-alternatives.bbclass +++ b/import-layers/yocto-poky/meta/classes/update-alternatives.bbclass @@ -65,9 +65,11 @@ ALTERNATIVE_PRIORITY = "10" # and include that vairable in the set. UPDALTVARS = "ALTERNATIVE ALTERNATIVE_LINK_NAME ALTERNATIVE_TARGET ALTERNATIVE_PRIORITY" +PACKAGE_WRITE_DEPS += "virtual/update-alternatives-native" + def gen_updatealternativesvardeps(d): - pkgs = (d.getVar("PACKAGES", True) or "").split() - vars = (d.getVar("UPDALTVARS", True) or "").split() + pkgs = (d.getVar("PACKAGES") or "").split() + vars = (d.getVar("UPDALTVARS") or "").split() # First compute them for non_pkg versions for v in vars: @@ -84,7 +86,7 @@ def gen_updatealternativesvardeps(d): d.appendVar('%s_VARDEPS_%s' % (v,p), ' %s:%s' % (flag, d.getVarFlag('%s_%s' % (v,p), flag, False))) def ua_extend_depends(d): - if not 'virtual/update-alternatives' in d.getVar('PROVIDES', True): + if not 'virtual/update-alternatives' in d.getVar('PROVIDES'): d.appendVar('DEPENDS', ' virtual/${MLPREFIX}update-alternatives') python __anonymous() { @@ -94,6 +96,10 @@ python __anonymous() { bb.data.inherits_class('cross-canadian', d): return + # Disable when targeting mingw32 (no target support) + if d.getVar("TARGET_OS") == "mingw32": + return + # compute special vardeps gen_updatealternativesvardeps(d) @@ -103,8 +109,8 @@ python __anonymous() { def gen_updatealternativesvars(d): ret = [] - pkgs = (d.getVar("PACKAGES", True) or "").split() - vars = (d.getVar("UPDALTVARS", True) or "").split() + pkgs = (d.getVar("PACKAGES") or "").split() + vars = (d.getVar("UPDALTVARS") or "").split() for v in vars: ret.append(v + "_VARDEPS") @@ -123,23 +129,23 @@ populate_packages[vardeps] += "${UPDALTVARS} ${@gen_updatealternativesvars(d)}" # place. python perform_packagecopy_append () { # Check for deprecated usage... - pn = d.getVar('BPN', True) - if d.getVar('ALTERNATIVE_LINKS', True) != None: + pn = d.getVar('BPN') + if d.getVar('ALTERNATIVE_LINKS') != None: bb.fatal('%s: Use of ALTERNATIVE_LINKS/ALTERNATIVE_PATH/ALTERNATIVE_NAME is no longer supported, please convert to the updated syntax, see update-alternatives.bbclass for more info.' % pn) # Do actual update alternatives processing - pkgdest = d.getVar('PKGD', True) - for pkg in (d.getVar('PACKAGES', True) or "").split(): + pkgdest = d.getVar('PKGD') + for pkg in (d.getVar('PACKAGES') or "").split(): # If the src == dest, we know we need to rename the dest by appending ${BPN} link_rename = {} - for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): - alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) + for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split(): + alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name) if not alt_link: - alt_link = "%s/%s" % (d.getVar('bindir', True), alt_name) + alt_link = "%s/%s" % (d.getVar('bindir'), alt_name) d.setVarFlag('ALTERNATIVE_LINK_NAME', alt_name, alt_link) - alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) - alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link + alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name) + alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link # Sometimes alt_target is specified as relative to the link name. alt_target = os.path.join(os.path.dirname(alt_link), alt_target) @@ -189,23 +195,23 @@ python perform_packagecopy_append () { PACKAGESPLITFUNCS_prepend = "populate_packages_updatealternatives " python populate_packages_updatealternatives () { - pn = d.getVar('BPN', True) + pn = d.getVar('BPN') # Do actual update alternatives processing - pkgdest = d.getVar('PKGD', True) - for pkg in (d.getVar('PACKAGES', True) or "").split(): + pkgdest = d.getVar('PKGD') + for pkg in (d.getVar('PACKAGES') or "").split(): # Create post install/removal scripts alt_setup_links = "# Begin section update-alternatives\n" alt_remove_links = "# Begin section update-alternatives\n" - for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): - alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) - alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) - alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link + for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split(): + alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name) + alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name) + alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link # Sometimes alt_target is specified as relative to the link name. alt_target = os.path.join(os.path.dirname(alt_link), alt_target) - alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name, True) - alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg, True) or d.getVar('ALTERNATIVE_PRIORITY', True) + alt_priority = d.getVarFlag('ALTERNATIVE_PRIORITY_%s' % pkg, alt_name) or d.getVarFlag('ALTERNATIVE_PRIORITY', alt_name) + alt_priority = alt_priority or d.getVar('ALTERNATIVE_PRIORITY_%s' % pkg) or d.getVar('ALTERNATIVE_PRIORITY') # This shouldn't trigger, as it should have been resolved earlier! if alt_link == alt_target: @@ -227,14 +233,14 @@ python populate_packages_updatealternatives () { if len(alt_setup_links.splitlines()) > 2: # RDEPENDS setup - provider = d.getVar('VIRTUAL-RUNTIME_update-alternatives', True) + provider = d.getVar('VIRTUAL-RUNTIME_update-alternatives') if provider: #bb.note('adding runtime requirement for update-alternatives for %s' % pkg) d.appendVar('RDEPENDS_%s' % pkg, ' ' + d.getVar('MLPREFIX', False) + provider) bb.note('adding update-alternatives calls to postinst/prerm for %s' % pkg) bb.note('%s' % alt_setup_links) - postinst = d.getVar('pkg_postinst_%s' % pkg, True) or '#!/bin/sh\n' + postinst = d.getVar('pkg_postinst_%s' % pkg) or '#!/bin/sh\n' postinst = postinst.splitlines(True) try: index = postinst.index('# Begin section update-rc.d\n') @@ -245,7 +251,7 @@ python populate_packages_updatealternatives () { d.setVar('pkg_postinst_%s' % pkg, postinst) bb.note('%s' % alt_remove_links) - prerm = d.getVar('pkg_prerm_%s' % pkg, True) or '#!/bin/sh\n' + prerm = d.getVar('pkg_prerm_%s' % pkg) or '#!/bin/sh\n' prerm = prerm.splitlines(True) try: index = prerm.index('# End section update-rc.d\n') @@ -257,14 +263,14 @@ python populate_packages_updatealternatives () { } python package_do_filedeps_append () { - pn = d.getVar('BPN', True) - pkgdest = d.getVar('PKGDEST', True) + pn = d.getVar('BPN') + pkgdest = d.getVar('PKGDEST') for pkg in packages.split(): - for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg, True) or "").split(): - alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name, True) - alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name, True) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name, True) - alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg, True) or d.getVar('ALTERNATIVE_TARGET', True) or alt_link + for alt_name in (d.getVar('ALTERNATIVE_%s' % pkg) or "").split(): + alt_link = d.getVarFlag('ALTERNATIVE_LINK_NAME', alt_name) + alt_target = d.getVarFlag('ALTERNATIVE_TARGET_%s' % pkg, alt_name) or d.getVarFlag('ALTERNATIVE_TARGET', alt_name) + alt_target = alt_target or d.getVar('ALTERNATIVE_TARGET_%s' % pkg) or d.getVar('ALTERNATIVE_TARGET') or alt_link if alt_link == alt_target: bb.warn('%s: alt_link == alt_target: %s == %s' % (pn, alt_link, alt_target)) @@ -276,7 +282,7 @@ python package_do_filedeps_append () { # Add file provide trans_target = oe.package.file_translate(alt_target) d.appendVar('FILERPROVIDES_%s_%s' % (trans_target, pkg), " " + alt_link) - if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg, True) or ""): + if not trans_target in (d.getVar('FILERPROVIDESFLIST_%s' % pkg) or ""): d.appendVar('FILERPROVIDESFLIST_%s' % pkg, " " + trans_target) } diff --git a/import-layers/yocto-poky/meta/classes/update-rc.d.bbclass b/import-layers/yocto-poky/meta/classes/update-rc.d.bbclass index 18df2dc3f..9ba3dacca 100644 --- a/import-layers/yocto-poky/meta/classes/update-rc.d.bbclass +++ b/import-layers/yocto-poky/meta/classes/update-rc.d.bbclass @@ -1,6 +1,6 @@ UPDATERCPN ?= "${PN}" -DEPENDS_append_class-target = "${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', ' update-rc.d-native update-rc.d initscripts', '', d)}" +DEPENDS_append_class-target = "${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', ' update-rc.d initscripts', '', d)}" UPDATERCD = "update-rc.d" UPDATERCD_class-cross = "" @@ -11,11 +11,20 @@ INITSCRIPT_PARAMS ?= "defaults" INIT_D_DIR = "${sysconfdir}/init.d" +def use_updatercd(d): + # If the distro supports both sysvinit and systemd, and the current recipe + # supports systemd, only call update-rc.d on rootfs creation or if systemd + # is not running. That's because systemctl enable/disable will already call + # update-rc.d if it detects initscripts. + if bb.utils.contains('DISTRO_FEATURES', 'systemd', True, False, d) and bb.data.inherits_class('systemd', d): + return '[ -n "$D" -o ! -d /run/systemd/system ]' + return 'true' + updatercd_preinst() { -if [ -z "$D" -a -f "${INIT_D_DIR}/${INITSCRIPT_NAME}" ]; then +if ${@use_updatercd(d)} && [ -z "$D" -a -f "${INIT_D_DIR}/${INITSCRIPT_NAME}" ]; then ${INIT_D_DIR}/${INITSCRIPT_NAME} stop || : fi -if type update-rc.d >/dev/null 2>/dev/null; then +if ${@use_updatercd(d)} && type update-rc.d >/dev/null 2>/dev/null; then if [ -n "$D" ]; then OPT="-f -r $D" else @@ -25,9 +34,11 @@ if type update-rc.d >/dev/null 2>/dev/null; then fi } +PACKAGE_WRITE_DEPS += "update-rc.d-native" + updatercd_postinst() { # Begin section update-rc.d -if type update-rc.d >/dev/null 2>/dev/null; then +if ${@use_updatercd(d)} && type update-rc.d >/dev/null 2>/dev/null; then if [ -n "$D" ]; then OPT="-r $D" else @@ -40,14 +51,14 @@ fi updatercd_prerm() { # Begin section update-rc.d -if [ -z "$D" -a -x "${INIT_D_DIR}/${INITSCRIPT_NAME}" ]; then +if ${@use_updatercd(d)} && [ -z "$D" -a -x "${INIT_D_DIR}/${INITSCRIPT_NAME}" ]; then ${INIT_D_DIR}/${INITSCRIPT_NAME} stop || : fi # End section update-rc.d } updatercd_postrm() { -if type update-rc.d >/dev/null 2>/dev/null; then +if ${@use_updatercd(d)} && type update-rc.d >/dev/null 2>/dev/null; then if [ -n "$D" ]; then OPT="-f -r $D" else @@ -84,64 +95,63 @@ python populate_packages_updatercd () { return statement = "grep -q -w '/etc/init.d/functions' %s" % path if subprocess.call(statement, shell=True) == 0: - mlprefix = d.getVar('MLPREFIX', True) or "" + mlprefix = d.getVar('MLPREFIX') or "" d.appendVar('RDEPENDS_' + pkg, ' %sinitscripts-functions' % (mlprefix)) def update_rcd_package(pkg): bb.debug(1, 'adding update-rc.d calls to preinst/postinst/prerm/postrm for %s' % pkg) localdata = bb.data.createCopy(d) - overrides = localdata.getVar("OVERRIDES", True) + overrides = localdata.getVar("OVERRIDES") localdata.setVar("OVERRIDES", "%s:%s" % (pkg, overrides)) - bb.data.update_data(localdata) update_rcd_auto_depend(pkg) - preinst = d.getVar('pkg_preinst_%s' % pkg, True) + preinst = d.getVar('pkg_preinst_%s' % pkg) if not preinst: preinst = '#!/bin/sh\n' - preinst += localdata.getVar('updatercd_preinst', True) + preinst += localdata.getVar('updatercd_preinst') d.setVar('pkg_preinst_%s' % pkg, preinst) - postinst = d.getVar('pkg_postinst_%s' % pkg, True) + postinst = d.getVar('pkg_postinst_%s' % pkg) if not postinst: postinst = '#!/bin/sh\n' postinst = postinst.splitlines(True) try: index = postinst.index('# End section update-alternatives\n') - postinst.insert(index + 1, localdata.getVar('updatercd_postinst', True)) + postinst.insert(index + 1, localdata.getVar('updatercd_postinst')) except ValueError: - postinst.append(localdata.getVar('updatercd_postinst', True)) + postinst.append(localdata.getVar('updatercd_postinst')) postinst = ''.join(postinst) d.setVar('pkg_postinst_%s' % pkg, postinst) - prerm = d.getVar('pkg_prerm_%s' % pkg, True) + prerm = d.getVar('pkg_prerm_%s' % pkg) if not prerm: prerm = '#!/bin/sh\n' prerm = prerm.splitlines(True) try: index = prerm.index('# Begin section update-alternatives\n') - prerm.insert(index, localdata.getVar('updatercd_prerm', True)) + prerm.insert(index, localdata.getVar('updatercd_prerm')) except ValueError: - prerm.append(localdata.getVar('updatercd_prerm', True)) + prerm.append(localdata.getVar('updatercd_prerm')) prerm = ''.join(prerm) d.setVar('pkg_prerm_%s' % pkg, prerm) - postrm = d.getVar('pkg_postrm_%s' % pkg, True) + postrm = d.getVar('pkg_postrm_%s' % pkg) if not postrm: postrm = '#!/bin/sh\n' - postrm += localdata.getVar('updatercd_postrm', True) + postrm += localdata.getVar('updatercd_postrm') d.setVar('pkg_postrm_%s' % pkg, postrm) d.appendVar('RRECOMMENDS_' + pkg, " ${MLPREFIX}${UPDATERCD}") # Check that this class isn't being inhibited (generally, by # systemd.bbclass) before doing any work. - if not d.getVar("INHIBIT_UPDATERCD_BBCLASS", True): - pkgs = d.getVar('INITSCRIPT_PACKAGES', True) + if not d.getVar("INHIBIT_UPDATERCD_BBCLASS"): + pkgs = d.getVar('INITSCRIPT_PACKAGES') if pkgs == None: - pkgs = d.getVar('UPDATERCPN', True) - packages = (d.getVar('PACKAGES', True) or "").split() + pkgs = d.getVar('UPDATERCPN') + packages = (d.getVar('PACKAGES') or "").split() if not pkgs in packages and packages != []: pkgs = packages[0] for pkg in pkgs.split(): diff --git a/import-layers/yocto-poky/meta/classes/upstream-version-is-even.bbclass b/import-layers/yocto-poky/meta/classes/upstream-version-is-even.bbclass index 89556ed7d..256c75242 100644 --- a/import-layers/yocto-poky/meta/classes/upstream-version-is-even.bbclass +++ b/import-layers/yocto-poky/meta/classes/upstream-version-is-even.bbclass @@ -2,4 +2,4 @@ # accepts even minor versions (i.e. 3.0.x, 3.2.x, 3.4.x, etc.) # This scheme is used by Gnome and a number of other projects # to signify stable releases vs development releases. -UPSTREAM_CHECK_REGEX = "(?P\d+\.(\d*[02468])+(\.\d+)+)" +UPSTREAM_CHECK_REGEX = "[^\d\.](?P\d+\.(\d*[02468])+(\.\d+)+)\.tar" diff --git a/import-layers/yocto-poky/meta/classes/useradd-staticids.bbclass b/import-layers/yocto-poky/meta/classes/useradd-staticids.bbclass index afb580aed..6ebf7600f 100644 --- a/import-layers/yocto-poky/meta/classes/useradd-staticids.bbclass +++ b/import-layers/yocto-poky/meta/classes/useradd-staticids.bbclass @@ -8,11 +8,11 @@ def update_useradd_static_config(d): class myArgumentParser( argparse.ArgumentParser ): def _print_message(self, message, file=None): - bb.warn("%s - %s: %s" % (d.getVar('PN', True), pkg, message)) + bb.warn("%s - %s: %s" % (d.getVar('PN'), pkg, message)) # This should never be called... def exit(self, status=0, message=None): - message = message or ("%s - %s: useradd.bbclass: Argument parsing exited" % (d.getVar('PN', True), pkg)) + message = message or ("%s - %s: useradd.bbclass: Argument parsing exited" % (d.getVar('PN'), pkg)) error(message) def error(self, message): @@ -52,14 +52,13 @@ def update_useradd_static_config(d): def handle_missing_id(id, type, pkg): # For backwards compatibility we accept "1" in addition to "error" - if d.getVar('USERADD_ERROR_DYNAMIC', True) == 'error' or d.getVar('USERADD_ERROR_DYNAMIC', True) == '1': - #bb.error("Skipping recipe %s, package %s which adds %sname %s does not have a static ID defined." % (d.getVar('PN', True), pkg, type, id)) - bb.fatal("%s - %s: %sname %s does not have a static ID defined." % (d.getVar('PN', True), pkg, type, id)) - elif d.getVar('USERADD_ERROR_DYNAMIC', True) == 'warn': - bb.warn("%s - %s: %sname %s does not have a static ID defined." % (d.getVar('PN', True), pkg, type, id)) + if d.getVar('USERADD_ERROR_DYNAMIC') == 'error' or d.getVar('USERADD_ERROR_DYNAMIC') == '1': + raise NotImplementedError("%s - %s: %sname %s does not have a static ID defined. Skipping it." % (d.getVar('PN'), pkg, type, id)) + elif d.getVar('USERADD_ERROR_DYNAMIC') == 'warn': + bb.warn("%s - %s: %sname %s does not have a static ID defined." % (d.getVar('PN'), pkg, type, id)) # We parse and rewrite the useradd components - def rewrite_useradd(params): + def rewrite_useradd(params, is_pkg): # The following comes from --help on useradd from shadow parser = myArgumentParser(prog='useradd') parser.add_argument("-b", "--base-dir", metavar="BASE_DIR", help="base directory for the home directory of the new account") @@ -78,6 +77,7 @@ def update_useradd_static_config(d): parser.add_argument("-N", "--no-user-group", dest="user_group", help="do not create a group with the same name as the user", action="store_const", const=False) parser.add_argument("-o", "--non-unique", help="allow to create users with duplicate (non-unique UID)", action="store_true") parser.add_argument("-p", "--password", metavar="PASSWORD", help="encrypted password of the new account") + parser.add_argument("-P", "--clear-password", metavar="CLEAR_PASSWORD", help="use this clear password for the new account") parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into") parser.add_argument("-r", "--system", help="create a system account", action="store_true") parser.add_argument("-s", "--shell", metavar="SHELL", help="login shell of the new account") @@ -90,8 +90,8 @@ def update_useradd_static_config(d): # paths are resolved via BBPATH def get_passwd_list(d): str = "" - bbpath = d.getVar('BBPATH', True) - passwd_tables = d.getVar('USERADD_UID_TABLES', True) + bbpath = d.getVar('BBPATH') + passwd_tables = d.getVar('USERADD_UID_TABLES') if not passwd_tables: passwd_tables = 'files/passwd' for conf_file in passwd_tables.split(): @@ -107,7 +107,7 @@ def update_useradd_static_config(d): try: uaargs = parser.parse_args(re.split('''[ \t]+(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', param)) except: - bb.fatal("%s: Unable to parse arguments for USERADD_PARAM_%s: '%s'" % (d.getVar('PN', True), pkg, param)) + bb.fatal("%s: Unable to parse arguments for USERADD_PARAM_%s: '%s'" % (d.getVar('PN'), pkg, param)) # Read all passwd files specified in USERADD_UID_TABLES or files/passwd # Use the standard passwd layout: @@ -124,14 +124,14 @@ def update_useradd_static_config(d): users = merge_files(get_passwd_list(d), 7) if uaargs.LOGIN not in users: - if not uaargs.uid or not uaargs.uid.isdigit() or not uaargs.gid: - handle_missing_id(uaargs.LOGIN, 'user', pkg) + handle_missing_id(uaargs.LOGIN, 'user', pkg) + newparams.append(param) continue field = users[uaargs.LOGIN] if uaargs.uid and field[2] and (uaargs.uid != field[2]): - bb.warn("%s: Changing username %s's uid from (%s) to (%s), verify configuration files!" % (d.getVar('PN', True), uaargs.LOGIN, uaargs.uid, field[2])) + bb.warn("%s: Changing username %s's uid from (%s) to (%s), verify configuration files!" % (d.getVar('PN'), uaargs.LOGIN, uaargs.uid, field[2])) uaargs.uid = field[2] or uaargs.uid # Determine the possible groupname @@ -141,9 +141,13 @@ def update_useradd_static_config(d): # So if the implicit username-group creation is on, then the implicit groupname (LOGIN) # is used, and we disable the user_group option. # - user_group = uaargs.user_group is None or uaargs.user_group is True - uaargs.groupname = uaargs.LOGIN if user_group else uaargs.gid - uaargs.groupid = field[3] or uaargs.gid or uaargs.groupname + if uaargs.gid: + uaargs.groupname = uaargs.gid + elif uaargs.user_group is not False: + uaargs.groupname = uaargs.LOGIN + else: + uaargs.groupname = 'users' + uaargs.groupid = field[3] or uaargs.groupname if uaargs.groupid and uaargs.gid != uaargs.groupid: newgroup = None @@ -159,14 +163,16 @@ def update_useradd_static_config(d): # We want to add a group, but we don't know it's name... so we can't add the group... # We have to assume the group has previously been added or we'll fail on the adduser... # Note: specifying the actual gid is very rare in OE, usually the group name is specified. - bb.warn("%s: Changing gid for login %s to %s, verify configuration files!" % (d.getVar('PN', True), uaargs.LOGIN, uaargs.groupid)) + bb.warn("%s: Changing gid for login %s to %s, verify configuration files!" % (d.getVar('PN'), uaargs.LOGIN, uaargs.groupid)) uaargs.gid = uaargs.groupid uaargs.user_group = None - if newgroup: - groupadd = d.getVar("GROUPADD_PARAM_%s" % pkg, True) + if newgroup and is_pkg: + groupadd = d.getVar("GROUPADD_PARAM_%s" % pkg) if groupadd: - d.setVar("GROUPADD_PARAM_%s" % pkg, "%s; %s" % (groupadd, newgroup)) + # Only add the group if not already specified + if not uaargs.groupname in groupadd: + d.setVar("GROUPADD_PARAM_%s" % pkg, "%s; %s" % (groupadd, newgroup)) else: d.setVar("GROUPADD_PARAM_%s" % pkg, newgroup) @@ -183,7 +189,7 @@ def update_useradd_static_config(d): newparam += ['', ' --base-dir %s' % uaargs.base_dir][uaargs.base_dir != None] newparam += ['', ' --comment %s' % uaargs.comment][uaargs.comment != None] newparam += ['', ' --home-dir %s' % uaargs.home_dir][uaargs.home_dir != None] - newparam += ['', ' --expiredata %s' % uaargs.expiredate][uaargs.expiredate != None] + newparam += ['', ' --expiredate %s' % uaargs.expiredate][uaargs.expiredate != None] newparam += ['', ' --inactive %s' % uaargs.inactive][uaargs.inactive != None] newparam += ['', ' --gid %s' % uaargs.gid][uaargs.gid != None] newparam += ['', ' --groups %s' % uaargs.groups][uaargs.groups != None] @@ -194,7 +200,10 @@ def update_useradd_static_config(d): newparam += ['', ' --no-create-home'][uaargs.create_home is False] newparam += ['', ' --no-user-group'][uaargs.user_group is False] newparam += ['', ' --non-unique'][uaargs.non_unique] - newparam += ['', ' --password %s' % uaargs.password][uaargs.password != None] + if uaargs.password != None: + newparam += ['', ' --password %s' % uaargs.password][uaargs.password != None] + elif uaargs.clear_password: + newparam += ['', ' --clear-password %s' % uaargs.clear_password][uaargs.clear_password != None] newparam += ['', ' --root %s' % uaargs.root][uaargs.root != None] newparam += ['', ' --system'][uaargs.system] newparam += ['', ' --shell %s' % uaargs.shell][uaargs.shell != None] @@ -207,7 +216,7 @@ def update_useradd_static_config(d): return ";".join(newparams).strip() # We parse and rewrite the groupadd components - def rewrite_groupadd(params): + def rewrite_groupadd(params, is_pkg): # The following comes from --help on groupadd from shadow parser = myArgumentParser(prog='groupadd') parser.add_argument("-f", "--force", help="exit successfully if the group already exists, and cancel -g if the GID is already used", action="store_true") @@ -215,6 +224,7 @@ def update_useradd_static_config(d): parser.add_argument("-K", "--key", metavar="KEY=VALUE", help="override /etc/login.defs defaults") parser.add_argument("-o", "--non-unique", help="allow to create groups with duplicate (non-unique) GID", action="store_true") parser.add_argument("-p", "--password", metavar="PASSWORD", help="use this encrypted password for the new group") + parser.add_argument("-P", "--clear-password", metavar="CLEAR_PASSWORD", help="use this clear password for the new group") parser.add_argument("-R", "--root", metavar="CHROOT_DIR", help="directory to chroot into") parser.add_argument("-r", "--system", help="create a system account", action="store_true") parser.add_argument("GROUP", help="Group name of the new group") @@ -224,8 +234,8 @@ def update_useradd_static_config(d): # paths are resolved via BBPATH def get_group_list(d): str = "" - bbpath = d.getVar('BBPATH', True) - group_tables = d.getVar('USERADD_GID_TABLES', True) + bbpath = d.getVar('BBPATH') + group_tables = d.getVar('USERADD_GID_TABLES') if not group_tables: group_tables = 'files/group' for conf_file in group_tables.split(): @@ -242,7 +252,7 @@ def update_useradd_static_config(d): # If we're processing multiple lines, we could have left over values here... gaargs = parser.parse_args(re.split('''[ \t]+(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', param)) except: - bb.fatal("%s: Unable to parse arguments for GROUPADD_PARAM_%s: '%s'" % (d.getVar('PN', True), pkg, param)) + bb.fatal("%s: Unable to parse arguments for GROUPADD_PARAM_%s: '%s'" % (d.getVar('PN'), pkg, param)) # Read all group files specified in USERADD_GID_TABLES or files/group # Use the standard group layout: @@ -257,15 +267,15 @@ def update_useradd_static_config(d): groups = merge_files(get_group_list(d), 4) if gaargs.GROUP not in groups: - if not gaargs.gid or not gaargs.gid.isdigit(): - handle_missing_id(gaargs.GROUP, 'group', pkg) + handle_missing_id(gaargs.GROUP, 'group', pkg) + newparams.append(param) continue field = groups[gaargs.GROUP] if field[2]: if gaargs.gid and (gaargs.gid != field[2]): - bb.warn("%s: Changing groupname %s's gid from (%s) to (%s), verify configuration files!" % (d.getVar('PN', True), gaargs.GROUP, gaargs.gid, field[2])) + bb.warn("%s: Changing groupname %s's gid from (%s) to (%s), verify configuration files!" % (d.getVar('PN'), gaargs.GROUP, gaargs.gid, field[2])) gaargs.gid = field[2] if not gaargs.gid or not gaargs.gid.isdigit(): @@ -276,7 +286,10 @@ def update_useradd_static_config(d): newparam += ['', ' --gid %s' % gaargs.gid][gaargs.gid != None] newparam += ['', ' --key %s' % gaargs.key][gaargs.key != None] newparam += ['', ' --non-unique'][gaargs.non_unique] - newparam += ['', ' --password %s' % gaargs.password][gaargs.password != None] + if gaargs.password != None: + newparam += ['', ' --password %s' % gaargs.password][gaargs.password != None] + elif gaargs.clear_password: + newparam += ['', ' --clear-password %s' % gaargs.clear_password][gaargs.clear_password != None] newparam += ['', ' --root %s' % gaargs.root][gaargs.root != None] newparam += ['', ' --system'][gaargs.system] newparam += ' %s' % gaargs.GROUP @@ -289,33 +302,58 @@ def update_useradd_static_config(d): # the files listed in USERADD_UID/GID_TABLES. We need to tell bitbake # about that explicitly to trigger re-parsing and thus re-execution of # this code when the files change. - bbpath = d.getVar('BBPATH', True) + bbpath = d.getVar('BBPATH') for varname, default in (('USERADD_UID_TABLES', 'files/passwd'), ('USERADD_GID_TABLES', 'files/group')): - tables = d.getVar(varname, True) + tables = d.getVar(varname) if not tables: tables = default for conf_file in tables.split(): bb.parse.mark_dependency(d, bb.utils.which(bbpath, conf_file)) # Load and process the users and groups, rewriting the adduser/addgroup params - useradd_packages = d.getVar('USERADD_PACKAGES', True) + useradd_packages = d.getVar('USERADD_PACKAGES') or "" for pkg in useradd_packages.split(): # Groupmems doesn't have anything we might want to change, so simply validating # is a bit of a waste -- only process useradd/groupadd - useradd_param = d.getVar('USERADD_PARAM_%s' % pkg, True) + useradd_param = d.getVar('USERADD_PARAM_%s' % pkg) if useradd_param: #bb.warn("Before: 'USERADD_PARAM_%s' - '%s'" % (pkg, useradd_param)) - d.setVar('USERADD_PARAM_%s' % pkg, rewrite_useradd(useradd_param)) - #bb.warn("After: 'USERADD_PARAM_%s' - '%s'" % (pkg, d.getVar('USERADD_PARAM_%s' % pkg, True))) + d.setVar('USERADD_PARAM_%s' % pkg, rewrite_useradd(useradd_param, True)) + #bb.warn("After: 'USERADD_PARAM_%s' - '%s'" % (pkg, d.getVar('USERADD_PARAM_%s' % pkg))) - groupadd_param = d.getVar('GROUPADD_PARAM_%s' % pkg, True) + groupadd_param = d.getVar('GROUPADD_PARAM_%s' % pkg) if groupadd_param: #bb.warn("Before: 'GROUPADD_PARAM_%s' - '%s'" % (pkg, groupadd_param)) - d.setVar('GROUPADD_PARAM_%s' % pkg, rewrite_groupadd(groupadd_param)) - #bb.warn("After: 'GROUPADD_PARAM_%s' - '%s'" % (pkg, d.getVar('GROUPADD_PARAM_%s' % pkg, True))) + d.setVar('GROUPADD_PARAM_%s' % pkg, rewrite_groupadd(groupadd_param, True)) + #bb.warn("After: 'GROUPADD_PARAM_%s' - '%s'" % (pkg, d.getVar('GROUPADD_PARAM_%s' % pkg))) + + # Load and process extra users and groups, rewriting only adduser/addgroup params + pkg = d.getVar('PN') + extrausers = d.getVar('EXTRA_USERS_PARAMS') or "" + + #bb.warn("Before: 'EXTRA_USERS_PARAMS' - '%s'" % (d.getVar('EXTRA_USERS_PARAMS'))) + new_extrausers = [] + for cmd in re.split('''[ \t]*;[ \t]*(?=(?:[^'"]|'[^']*'|"[^"]*")*$)''', extrausers): + cmd = cmd.strip() + if not cmd: + continue + + if re.match('''useradd (.*)''', cmd): + useradd_param = re.match('''useradd (.*)''', cmd).group(1) + useradd_param = rewrite_useradd(useradd_param, False) + cmd = 'useradd %s' % useradd_param + elif re.match('''groupadd (.*)''', cmd): + groupadd_param = re.match('''groupadd (.*)''', cmd).group(1) + groupadd_param = rewrite_groupadd(groupadd_param, False) + cmd = 'groupadd %s' % groupadd_param + + new_extrausers.append(cmd) + new_extrausers.append('') + d.setVar('EXTRA_USERS_PARAMS', ';'.join(new_extrausers)) + #bb.warn("After: 'EXTRA_USERS_PARAMS' - '%s'" % (d.getVar('EXTRA_USERS_PARAMS'))) python __anonymous() { @@ -323,7 +361,7 @@ python __anonymous() { and not bb.data.inherits_class('native', d): try: update_useradd_static_config(d) - except bb.build.FuncFailed as f: - bb.debug(1, "Skipping recipe %s: %s" % (d.getVar('PN', True), f)) + except NotImplementedError as f: + bb.debug(1, "Skipping recipe %s: %s" % (d.getVar('PN'), f)) raise bb.parse.SkipPackage(f) } diff --git a/import-layers/yocto-poky/meta/classes/useradd.bbclass b/import-layers/yocto-poky/meta/classes/useradd.bbclass index 3cff08e00..0f551b50f 100644 --- a/import-layers/yocto-poky/meta/classes/useradd.bbclass +++ b/import-layers/yocto-poky/meta/classes/useradd.bbclass @@ -3,7 +3,8 @@ inherit useradd_base # base-passwd-cross provides the default passwd and group files in the # target sysroot, and shadow -native and -sysroot provide the utilities # and support files needed to add and modify user and group accounts -DEPENDS_append_class-target = " base-files shadow-native shadow-sysroot shadow" +DEPENDS_append_class-target = " base-files shadow-native shadow-sysroot shadow base-passwd" +PACKAGE_WRITE_DEPS += "shadow-native" # This preinstall function can be run in four different contexts: # @@ -31,7 +32,7 @@ if test "x$D" != "x"; then fi # user/group lookups should match useradd/groupadd --root - export PSEUDO_PASSWD="$SYSROOT:${STAGING_DIR_NATIVE}" + export PSEUDO_PASSWD="$SYSROOT" fi # If we're not doing a special SSTATE/SYSROOT install @@ -96,15 +97,30 @@ fi } useradd_sysroot () { - # Pseudo may (do_install) or may not (do_populate_sysroot_setscene) be running + # Pseudo may (do_prepare_recipe_sysroot) or may not (do_populate_sysroot_setscene) be running # at this point so we're explicit about the environment so pseudo can load if # not already present. - export PSEUDO="${FAKEROOTENV} PSEUDO_LOCALSTATEDIR=${STAGING_DIR_TARGET}${localstatedir}/pseudo ${STAGING_DIR_NATIVE}${bindir_native}/pseudo" + export PSEUDO="${FAKEROOTENV} ${PSEUDO_SYSROOT}${bindir_native}/pseudo" # Explicitly set $D since it isn't set to anything - # before do_install + # before do_prepare_recipe_sysroot D=${STAGING_DIR_TARGET} + # base-passwd's postinst may not have run yet in which case we'll get called later, just exit. + # Beware that in some cases we might see the fake pseudo passwd here, in which case we also must + # exit. + if [ ! -f $D${sysconfdir}/passwd ] || + grep -q this-is-the-pseudo-passwd $D${sysconfdir}/passwd; then + exit 0 + fi + + # It is also possible we may be in a recipe which doesn't have useradd dependencies and hence the + # useradd/groupadd tools are unavailable. If there is no dependency, we assume we don't want to + # create users in the sysroot + if ! command -v useradd; then + exit 0 + fi + # Add groups and users defined for all recipe packages GROUPADD_PARAM="${@get_all_cmd_params(d, 'groupadd')}" USERADD_PARAM="${@get_all_cmd_params(d, 'useradd')}" @@ -116,49 +132,43 @@ useradd_sysroot () { useradd_preinst } -useradd_sysroot_sstate () { - if [ "${BB_CURRENTTASK}" = "package_setscene" -o "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ] - then - useradd_sysroot - fi -} - -userdel_sysroot_sstate () { -if test "x${STAGING_DIR_TARGET}" != "x"; then - if [ "${BB_CURRENTTASK}" = "clean" ]; then - export PSEUDO="${FAKEROOTENV} PSEUDO_LOCALSTATEDIR=${STAGING_DIR_TARGET}${localstatedir}/pseudo ${STAGING_DIR_NATIVE}${bindir_native}/pseudo" - OPT="--root ${STAGING_DIR_TARGET}" - - # Remove groups and users defined for package - GROUPADD_PARAM="${@get_all_cmd_params(d, 'groupadd')}" - USERADD_PARAM="${@get_all_cmd_params(d, 'useradd')}" - - user=`echo "$USERADD_PARAM" | cut -d ';' -f 1 | awk '{ print $NF }'` - remaining=`echo "$USERADD_PARAM" | cut -d ';' -f 2- -s | sed -e 's#[ \t]*$##'` - while test "x$user" != "x"; do - perform_userdel "${STAGING_DIR_TARGET}" "$OPT $user" - user=`echo "$remaining" | cut -d ';' -f 1 | awk '{ print $NF }'` - remaining=`echo "$remaining" | cut -d ';' -f 2- -s | sed -e 's#[ \t]*$##'` - done - - user=`echo "$GROUPADD_PARAM" | cut -d ';' -f 1 | awk '{ print $NF }'` - remaining=`echo "$GROUPADD_PARAM" | cut -d ';' -f 2- -s | sed -e 's#[ \t]*$##'` - while test "x$user" != "x"; do - perform_groupdel "${STAGING_DIR_TARGET}" "$OPT $user" - user=`echo "$remaining" | cut -d ';' -f 1 | awk '{ print $NF }'` - remaining=`echo "$remaining" | cut -d ';' -f 2- -s | sed -e 's#[ \t]*$##'` - done - - fi -fi +# The export of PSEUDO in useradd_sysroot() above contains references to +# ${COMPONENTS_DIR} and ${PSEUDO_LOCALSTATEDIR}. Additionally, the logging +# shell functions use ${LOGFIFO}. These need to be handled when restoring +# postinst-useradd-${PN} from the sstate cache. +EXTRA_STAGING_FIXMES += "COMPONENTS_DIR PSEUDO_LOCALSTATEDIR LOGFIFO" + +python useradd_sysroot_sstate () { + task = d.getVar("BB_CURRENTTASK") + if task == "package_setscene": + bb.build.exec_func("useradd_sysroot", d) + elif task == "prepare_recipe_sysroot": + # Used to update this recipe's own sysroot so the user/groups are available to do_install + scriptfile = d.expand("${RECIPE_SYSROOT}${bindir}/postinst-useradd-${PN}") + bb.utils.mkdirhier(os.path.dirname(scriptfile)) + with open(scriptfile, 'w') as script: + script.write("#!/bin/sh\n") + bb.data.emit_func("useradd_sysroot", script, d) + script.write("useradd_sysroot\n") + os.chmod(scriptfile, 0o755) + bb.build.exec_func("useradd_sysroot", d) + elif task == "populate_sysroot": + # Used when installed in dependent task sysroots + scriptfile = d.expand("${SYSROOT_DESTDIR}${bindir}/postinst-useradd-${PN}") + bb.utils.mkdirhier(os.path.dirname(scriptfile)) + with open(scriptfile, 'w') as script: + script.write("#!/bin/sh\n") + bb.data.emit_func("useradd_sysroot", script, d) + script.write("useradd_sysroot\n") + os.chmod(scriptfile, 0o755) } -SSTATECLEANFUNCS_append_class-target = " userdel_sysroot_sstate" - -do_install[prefuncs] += "${SYSROOTFUNC}" -SYSROOTFUNC_class-target = "useradd_sysroot" +do_prepare_recipe_sysroot[postfuncs] += "${SYSROOTFUNC}" +SYSROOTFUNC_class-target = "useradd_sysroot_sstate" SYSROOTFUNC = "" +SYSROOT_PREPROCESS_FUNCS += "${SYSROOTFUNC}" + SSTATEPREINSTFUNCS_append_class-target = " useradd_sysroot_sstate" do_package_setscene[depends] += "${USERADDSETSCENEDEPS}" @@ -168,13 +178,13 @@ USERADDSETSCENEDEPS = "" # Recipe parse-time sanity checks def update_useradd_after_parse(d): - useradd_packages = d.getVar('USERADD_PACKAGES', True) + useradd_packages = d.getVar('USERADD_PACKAGES') if not useradd_packages: bb.fatal("%s inherits useradd but doesn't set USERADD_PACKAGES" % d.getVar('FILE', False)) for pkg in useradd_packages.split(): - if not d.getVar('USERADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPADD_PARAM_%s' % pkg, True) and not d.getVar('GROUPMEMS_PARAM_%s' % pkg, True): + if not d.getVar('USERADD_PARAM_%s' % pkg) and not d.getVar('GROUPADD_PARAM_%s' % pkg) and not d.getVar('GROUPMEMS_PARAM_%s' % pkg): bb.fatal("%s inherits useradd but doesn't set USERADD_PARAM, GROUPADD_PARAM or GROUPMEMS_PARAM for package %s" % (d.getVar('FILE', False), pkg)) python __anonymous() { @@ -191,9 +201,9 @@ def get_all_cmd_params(d, cmd_type): param_type = cmd_type.upper() + "_PARAM_%s" params = [] - useradd_packages = d.getVar('USERADD_PACKAGES', True) or "" + useradd_packages = d.getVar('USERADD_PACKAGES') or "" for pkg in useradd_packages.split(): - param = d.getVar(param_type % pkg, True) + param = d.getVar(param_type % pkg) if param: params.append(param.rstrip(" ;")) @@ -209,20 +219,20 @@ fakeroot python populate_packages_prepend () { required to execute on the target. Not doing so may cause useradd preinst to be invoked twice, causing unwanted warnings. """ - preinst = d.getVar('pkg_preinst_%s' % pkg, True) or d.getVar('pkg_preinst', True) + preinst = d.getVar('pkg_preinst_%s' % pkg) or d.getVar('pkg_preinst') if not preinst: preinst = '#!/bin/sh\n' preinst += 'bbnote () {\n\techo "NOTE: $*"\n}\n' preinst += 'bbwarn () {\n\techo "WARNING: $*"\n}\n' preinst += 'bbfatal () {\n\techo "ERROR: $*"\n\texit 1\n}\n' - preinst += 'perform_groupadd () {\n%s}\n' % d.getVar('perform_groupadd', True) - preinst += 'perform_useradd () {\n%s}\n' % d.getVar('perform_useradd', True) - preinst += 'perform_groupmems () {\n%s}\n' % d.getVar('perform_groupmems', True) - preinst += d.getVar('useradd_preinst', True) + preinst += 'perform_groupadd () {\n%s}\n' % d.getVar('perform_groupadd') + preinst += 'perform_useradd () {\n%s}\n' % d.getVar('perform_useradd') + preinst += 'perform_groupmems () {\n%s}\n' % d.getVar('perform_groupmems') + preinst += d.getVar('useradd_preinst') d.setVar('pkg_preinst_%s' % pkg, preinst) # RDEPENDS setup - rdepends = d.getVar("RDEPENDS_%s" % pkg, True) or "" + rdepends = d.getVar("RDEPENDS_%s" % pkg) or "" rdepends += ' ' + d.getVar('MLPREFIX', False) + 'base-passwd' rdepends += ' ' + d.getVar('MLPREFIX', False) + 'shadow' # base-files is where the default /etc/skel is packaged @@ -233,7 +243,7 @@ fakeroot python populate_packages_prepend () { # to packages specified by USERADD_PACKAGES if not bb.data.inherits_class('nativesdk', d) \ and not bb.data.inherits_class('native', d): - useradd_packages = d.getVar('USERADD_PACKAGES', True) or "" + useradd_packages = d.getVar('USERADD_PACKAGES') or "" for pkg in useradd_packages.split(): update_useradd_package(pkg) } diff --git a/import-layers/yocto-poky/meta/classes/useradd_base.bbclass b/import-layers/yocto-poky/meta/classes/useradd_base.bbclass index ba87edc57..551c82c32 100644 --- a/import-layers/yocto-poky/meta/classes/useradd_base.bbclass +++ b/import-layers/yocto-poky/meta/classes/useradd_base.bbclass @@ -69,11 +69,21 @@ perform_groupdel () { bbnote "${PN}: Performing groupdel with [$opts]" local groupname=`echo "$opts" | awk '{ print $NF }'` local group_exists="`grep "^$groupname:" $rootdir/etc/group || true`" + if test "x$group_exists" != "x"; then - eval flock -x $rootdir${sysconfdir} -c \"$PSEUDO groupdel \$opts\" || true - group_exists="`grep "^$groupname:" $rootdir/etc/group || true`" - if test "x$group_exists" != "x"; then - bbfatal "${PN}: groupdel command did not succeed." + local awk_input='BEGIN {FS=":"}; $1=="'$groupname'" { print $3 }' + local groupid=`echo "$awk_input" | awk -f- $rootdir/etc/group` + local awk_check_users='BEGIN {FS=":"}; $4=="'$groupid'" {print $1}' + local other_users=`echo "$awk_check_users" | awk -f- $rootdir/etc/passwd` + + if test "x$other_users" = "x"; then + eval flock -x $rootdir${sysconfdir} -c \"$PSEUDO groupdel \$opts\" || true + group_exists="`grep "^$groupname:" $rootdir/etc/group || true`" + if test "x$group_exists" != "x"; then + bbfatal "${PN}: groupdel command did not succeed." + fi + else + bbnote "${PN}: '$groupname' is primary group for users '$other_users', not removing it" fi else bbnote "${PN}: group $groupname doesn't exist, not removing it" diff --git a/import-layers/yocto-poky/meta/classes/utility-tasks.bbclass b/import-layers/yocto-poky/meta/classes/utility-tasks.bbclass index 7ba56e28a..587bfd4ab 100644 --- a/import-layers/yocto-poky/meta/classes/utility-tasks.bbclass +++ b/import-layers/yocto-poky/meta/classes/utility-tasks.bbclass @@ -4,12 +4,12 @@ python do_listtasks() { taskdescs = {} maxlen = 0 for e in d.keys(): - if d.getVarFlag(e, 'task', True): + if d.getVarFlag(e, 'task'): maxlen = max(maxlen, len(e)) if e.endswith('_setscene'): - desc = "%s (setscene version)" % (d.getVarFlag(e[:-9], 'doc', True) or '') + desc = "%s (setscene version)" % (d.getVarFlag(e[:-9], 'doc') or '') else: - desc = d.getVarFlag(e, 'doc', True) or '' + desc = d.getVarFlag(e, 'doc') or '' taskdescs[e] = desc tasks = sorted(taskdescs.keys()) @@ -28,18 +28,18 @@ python do_clean() { bb.note("Removing " + dir) oe.path.remove(dir) - dir = "%s.*" % bb.data.expand(d.getVar('STAMP', False), d) + dir = "%s.*" % d.getVar('STAMP') bb.note("Removing " + dir) oe.path.remove(dir) - for f in (d.getVar('CLEANFUNCS', True) or '').split(): + for f in (d.getVar('CLEANFUNCS') or '').split(): bb.build.exec_func(f, d) } addtask checkuri do_checkuri[nostamp] = "1" python do_checkuri() { - src_uri = (d.getVar('SRC_URI', True) or "").split() + src_uri = (d.getVar('SRC_URI') or "").split() if len(src_uri) == 0: return diff --git a/import-layers/yocto-poky/meta/classes/utils.bbclass b/import-layers/yocto-poky/meta/classes/utils.bbclass index dbb5e4cbb..96463ab32 100644 --- a/import-layers/yocto-poky/meta/classes/utils.bbclass +++ b/import-layers/yocto-poky/meta/classes/utils.bbclass @@ -41,9 +41,9 @@ def oe_filter_out(f, str, d): def machine_paths(d): """List any existing machine specific filespath directories""" - machine = d.getVar("MACHINE", True) - filespathpkg = d.getVar("FILESPATHPKG", True).split(":") - for basepath in d.getVar("FILESPATHBASE", True).split(":"): + machine = d.getVar("MACHINE") + filespathpkg = d.getVar("FILESPATHPKG").split(":") + for basepath in d.getVar("FILESPATHBASE").split(":"): for pkgpath in filespathpkg: machinepath = os.path.join(basepath, pkgpath, machine) if os.path.isdir(machinepath): @@ -52,7 +52,7 @@ def machine_paths(d): def is_machine_specific(d): """Determine whether the current recipe is machine specific""" machinepaths = set(machine_paths(d)) - srcuri = d.getVar("SRC_URI", True).split() + srcuri = d.getVar("SRC_URI").split() for url in srcuri: fetcher = bb.fetch2.Fetch([srcuri], d) if url.startswith("file://"): @@ -264,10 +264,17 @@ create_cmdline_wrapper () { mv $cmd $cmd.real cmdname=`basename $cmd` + dirname=`dirname $cmd` + cmdoptions=$@ + if [ "${base_prefix}" != "" ]; then + relpath=`python3 -c "import os; print(os.path.relpath('${D}${base_prefix}', '$dirname'))"` + cmdoptions=`echo $@ | sed -e "s:${base_prefix}:\\$realdir/$relpath:g"` + fi cat <$cmd #!/bin/bash realpath=\`readlink -fn \$0\` -exec -a \`dirname \$realpath\`/$cmdname \`dirname \$realpath\`/$cmdname.real $@ "\$@" +realdir=\`dirname \$realpath\` +exec -a \`dirname \$realpath\`/$cmdname \`dirname \$realpath\`/$cmdname.real $cmdoptions "\$@" END chmod +x $cmd } @@ -287,10 +294,17 @@ create_wrapper () { mv $cmd $cmd.real cmdname=`basename $cmd` + dirname=`dirname $cmd` + exportstring=$@ + if [ "${base_prefix}" != "" ]; then + relpath=`python3 -c "import os; print(os.path.relpath('${D}${base_prefix}', '$dirname'))"` + exportstring=`echo $@ | sed -e "s:${base_prefix}:\\$realdir/$relpath:g"` + fi cat <$cmd #!/bin/bash realpath=\`readlink -fn \$0\` -export $@ +realdir=\`dirname \$realpath\` +export $exportstring exec -a \`dirname \$realpath\`/$cmdname \`dirname \$realpath\`/$cmdname.real "\$@" END chmod +x $cmd @@ -307,7 +321,7 @@ hardlinkdir () { def check_app_exists(app, d): app = d.expand(app).strip() - path = d.getVar('PATH', d, True) + path = d.getVar('PATH') return bool(bb.utils.which(path, app)) def explode_deps(s): @@ -315,14 +329,14 @@ def explode_deps(s): def base_set_filespath(path, d): filespath = [] - extrapaths = (d.getVar("FILESEXTRAPATHS", True) or "") + extrapaths = (d.getVar("FILESEXTRAPATHS") or "") # Remove default flag which was used for checking extrapaths = extrapaths.replace("__default:", "") # Don't prepend empty strings to the path list if extrapaths != "": path = extrapaths.split(":") + path # The ":" ensures we have an 'empty' override - overrides = (":" + (d.getVar("FILESOVERRIDES", True) or "")).split(":") + overrides = (":" + (d.getVar("FILESOVERRIDES") or "")).split(":") overrides.reverse() for o in overrides: for p in path: @@ -333,7 +347,7 @@ def base_set_filespath(path, d): def extend_variants(d, var, extend, delim=':'): """Return a string of all bb class extend variants for the given extend""" variants = [] - whole = d.getVar(var, True) or "" + whole = d.getVar(var) or "" for ext in whole.split(): eext = ext.split(delim) if len(eext) > 1 and eext[0] == extend: @@ -341,7 +355,7 @@ def extend_variants(d, var, extend, delim=':'): return " ".join(variants) def multilib_pkg_extend(d, pkg): - variants = (d.getVar("MULTILIB_VARIANTS", True) or "").split() + variants = (d.getVar("MULTILIB_VARIANTS") or "").split() if not variants: return pkg pkgs = pkg @@ -349,24 +363,27 @@ def multilib_pkg_extend(d, pkg): pkgs = pkgs + " " + v + "-" + pkg return pkgs +def get_multilib_datastore(variant, d): + localdata = bb.data.createCopy(d) + overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + variant + localdata.setVar("OVERRIDES", overrides) + localdata.setVar("MLPREFIX", variant + "-") + return localdata + def all_multilib_tune_values(d, var, unique = True, need_split = True, delim = ' '): """Return a string of all ${var} in all multilib tune configuration""" values = [] - value = d.getVar(var, True) or "" + value = d.getVar(var) or "" if value != "": if need_split: for item in value.split(delim): values.append(item) else: values.append(value) - variants = d.getVar("MULTILIB_VARIANTS", True) or "" + variants = d.getVar("MULTILIB_VARIANTS") or "" for item in variants.split(): - localdata = bb.data.createCopy(d) - overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item - localdata.setVar("OVERRIDES", overrides) - localdata.setVar("MLPREFIX", item + "-") - bb.data.update_data(localdata) - value = localdata.getVar(var, True) or "" + localdata = get_multilib_datastore(item, d) + value = localdata.getVar(var) or "" if value != "": if need_split: for item in value.split(delim): @@ -402,21 +419,16 @@ def all_multilib_tune_list(vars, d): newoverrides.append(o) localdata.setVar("OVERRIDES", ":".join(newoverrides)) localdata.setVar("MLPREFIX", "") - origdefault = localdata.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL", True) + origdefault = localdata.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL") if origdefault: localdata.setVar("DEFAULTTUNE", origdefault) - bb.data.update_data(localdata) values['ml'] = [''] for v in vars: - values[v].append(localdata.getVar(v, True)) - variants = d.getVar("MULTILIB_VARIANTS", True) or "" + values[v].append(localdata.getVar(v)) + variants = d.getVar("MULTILIB_VARIANTS") or "" for item in variants.split(): - localdata = bb.data.createCopy(d) - overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item - localdata.setVar("OVERRIDES", overrides) - localdata.setVar("MLPREFIX", item + "-") - bb.data.update_data(localdata) - values[v].append(localdata.getVar(v, True)) + localdata = get_multilib_datastore(item, d) + values[v].append(localdata.getVar(v)) values['ml'].append(item) return values diff --git a/import-layers/yocto-poky/meta/classes/waf.bbclass b/import-layers/yocto-poky/meta/classes/waf.bbclass index 5e55833ca..c4698e910 100644 --- a/import-layers/yocto-poky/meta/classes/waf.bbclass +++ b/import-layers/yocto-poky/meta/classes/waf.bbclass @@ -1,8 +1,10 @@ # avoids build breaks when using no-static-libs.inc DISABLE_STATIC = "" +EXTRA_OECONF_append = " ${PACKAGECONFIG_CONFARGS}" + def get_waf_parallel_make(d): - pm = d.getVar('PARALLEL_MAKE', True) + pm = d.getVar('PARALLEL_MAKE') if pm: # look for '-j' and throw other options (e.g. '-l') away # because they might have different meaning in bjam diff --git a/import-layers/yocto-poky/meta/conf/abi_version.conf b/import-layers/yocto-poky/meta/conf/abi_version.conf index a82968778..496488010 100644 --- a/import-layers/yocto-poky/meta/conf/abi_version.conf +++ b/import-layers/yocto-poky/meta/conf/abi_version.conf @@ -4,4 +4,4 @@ # that breaks the format and have been previously discussed on the mailing list # with general agreement from the core team. # -OELAYOUT_ABI = "11" +OELAYOUT_ABI = "12" diff --git a/import-layers/yocto-poky/meta/conf/bitbake.conf b/import-layers/yocto-poky/meta/conf/bitbake.conf index 6e767b1bc..2dac3a148 100644 --- a/import-layers/yocto-poky/meta/conf/bitbake.conf +++ b/import-layers/yocto-poky/meta/conf/bitbake.conf @@ -123,7 +123,7 @@ SDKUSE_NLS ??= "yes" TARGET_ARCH = "${TUNE_ARCH}" TARGET_OS = "linux${LIBCEXTENSION}${ABIEXTENSION}" TARGET_VENDOR = "-oe" -TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}${@['-' + d.getVar('TARGET_OS', True), ''][d.getVar('TARGET_OS', True) == ('' or 'custom')]}" +TARGET_SYS = "${TARGET_ARCH}${TARGET_VENDOR}${@['-' + d.getVar('TARGET_OS'), ''][d.getVar('TARGET_OS') == ('' or 'custom')]}" TARGET_PREFIX = "${TARGET_SYS}-" TARGET_CC_ARCH = "${TUNE_CCARGS}" TARGET_LD_ARCH = "${TUNE_LDARGS}" @@ -132,7 +132,7 @@ TARGET_AS_ARCH = "${TUNE_ASARGS}" SDKMACHINE ??= "x86_64" SDK_OS = "${BUILD_OS}" SDK_VENDOR = "-oesdk" -SDK_SYS = "${SDK_ARCH}${SDK_VENDOR}${@['-' + d.getVar('SDK_OS', True), ''][d.getVar('SDK_OS', True) == ('' or 'custom')]}" +SDK_SYS = "${SDK_ARCH}${SDK_VENDOR}${@['-' + d.getVar('SDK_OS'), ''][d.getVar('SDK_OS') == ('' or 'custom')]}" SDK_PREFIX = "${SDK_SYS}-" SDK_CC_ARCH = "${BUILD_CC_ARCH}" SDKPKGSUFFIX = "nativesdk" @@ -142,7 +142,7 @@ SDK_AS_ARCH = "${BUILD_AS_ARCH}" TUNE_PKGARCH ??= "" PACKAGE_ARCH ??= "${TUNE_PKGARCH}" -MACHINE_ARCH = "${@[d.getVar('TUNE_PKGARCH', True), d.getVar('MACHINE', True)][bool(d.getVar('MACHINE', True))].replace('-', '_')}" +MACHINE_ARCH = "${@[d.getVar('TUNE_PKGARCH'), d.getVar('MACHINE')][bool(d.getVar('MACHINE'))].replace('-', '_')}" PACKAGE_EXTRA_ARCHS ??= "${PACKAGE_EXTRA_ARCHS_tune-${DEFAULTTUNE}}" PACKAGE_ARCHS = "all any noarch ${PACKAGE_EXTRA_ARCHS} ${MACHINE_ARCH}" # MACHINE_ARCH shouldn't be included here as a variable dependency @@ -150,7 +150,6 @@ PACKAGE_ARCHS = "all any noarch ${PACKAGE_EXTRA_ARCHS} ${MACHINE_ARCH}" PACKAGE_ARCHS[vardepsexclude] = "MACHINE_ARCH" MULTIMACH_TARGET_SYS = "${PACKAGE_ARCH}${TARGET_VENDOR}-${TARGET_OS}" -MULTIMACH_HOST_SYS = "${PACKAGE_ARCH}${HOST_VENDOR}-${HOST_OS}" ################################################################## # Date/time variables. @@ -197,24 +196,24 @@ PV = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[1] or '1.0 PR = "${@bb.parse.BBHandler.vars_from_file(d.getVar('FILE', False),d)[2] or 'r0'}" PE = "" PF = "${PN}-${EXTENDPE}${PV}-${PR}" -EXTENDPE = "${@['','${PE}_'][int(d.getVar('PE', True) or 0) > 0]}" +EXTENDPE = "${@['','${PE}_'][int(d.getVar('PE') or 0) > 0]}" P = "${PN}-${PV}" PRAUTO = "" -EXTENDPRAUTO = "${@['.${PRAUTO}', ''][not d.getVar('PRAUTO', True)]}" +EXTENDPRAUTO = "${@['.${PRAUTO}', ''][not d.getVar('PRAUTO')]}" PRAUTOINX = "${PF}" PKGV ?= "${PV}" PKGR ?= "${PR}${EXTENDPRAUTO}" -PKGE ?= "${@['','${PE}'][int(d.getVar('PE', True) or 0) > 0]}" -EXTENDPKGEVER = "${@['','${PKGE}:'][d.getVar('PKGE', True).strip() != '']}" +PKGE ?= "${@['','${PE}'][int(d.getVar('PE') or 0) > 0]}" +EXTENDPKGEVER = "${@['','${PKGE}:'][d.getVar('PKGE').strip() != '']}" EXTENDPKGV ?= "${EXTENDPKGEVER}${PKGV}-${PKGR}" # Base package name # Automatically derives "foo" from "foo-native", "foo-cross" or "foo-initial" # otherwise it is the same as PN and P SPECIAL_PKGSUFFIX = "-native -cross -initial -intermediate -crosssdk -cross-canadian" -BPN = "${@base_prune_suffix(d.getVar('PN', True), d.getVar('SPECIAL_PKGSUFFIX', True).split(), d)}" +BPN = "${@base_prune_suffix(d.getVar('PN'), d.getVar('SPECIAL_PKGSUFFIX').split(), d)}" BP = "${BPN}-${PV}" # Package info. @@ -330,7 +329,7 @@ FILESEXTRAPATHS ?= "__default:" ################################################################## TMPDIR ?= "${TOPDIR}/tmp" -CACHE = "${TMPDIR}/cache${@['', '/' + str(d.getVar('MACHINE', True))][bool(d.getVar('MACHINE', True))]}${@['', '/' + str(d.getVar('SDKMACHINE', True))][bool(d.getVar('SDKMACHINE', True))]}" +CACHE = "${TMPDIR}/cache${@['', '/' + str(d.getVar('MACHINE'))][bool(d.getVar('MACHINE'))]}${@['', '/' + str(d.getVar('SDKMACHINE'))][bool(d.getVar('SDKMACHINE'))]}" # The persistent cache should be shared by all builds PERSISTENT_DIR = "${TOPDIR}/cache" LOG_DIR = "${TMPDIR}/log" @@ -352,8 +351,11 @@ S = "${WORKDIR}/${BP}" B = "${S}" STAGING_DIR = "${TMPDIR}/sysroots" +COMPONENTS_DIR = "${STAGING_DIR}-components" +RECIPE_SYSROOT = "${WORKDIR}/recipe-sysroot" +RECIPE_SYSROOT_NATIVE = "${WORKDIR}/recipe-sysroot-native" -STAGING_DIR_NATIVE = "${STAGING_DIR}/${BUILD_SYS}" +STAGING_DIR_NATIVE = "${RECIPE_SYSROOT_NATIVE}" STAGING_BINDIR_NATIVE = "${STAGING_DIR_NATIVE}${bindir_native}" STAGING_BINDIR_CROSS = "${STAGING_BINDIR}/crossscripts" STAGING_BINDIR_TOOLCHAIN = "${STAGING_DIR_NATIVE}${bindir_native}/${TARGET_ARCH}${TARGET_VENDOR}-${TARGET_OS}" @@ -365,7 +367,7 @@ STAGING_INCDIR_NATIVE = "${STAGING_DIR_NATIVE}${includedir_native}" STAGING_ETCDIR_NATIVE = "${STAGING_DIR_NATIVE}${sysconfdir_native}" STAGING_DATADIR_NATIVE = "${STAGING_DIR_NATIVE}${datadir_native}" -STAGING_DIR_HOST = "${STAGING_DIR}/${MACHINE}" +STAGING_DIR_HOST = "${RECIPE_SYSROOT}" STAGING_BINDIR = "${STAGING_DIR_HOST}${bindir}" STAGING_LIBDIR = "${STAGING_DIR_HOST}${libdir}" STAGING_LIBEXECDIR = "${STAGING_DIR_HOST}${libexecdir}" @@ -376,8 +378,7 @@ STAGING_EXECPREFIXDIR = "${STAGING_DIR_HOST}${exec_prefix}" STAGING_LOADER_DIR = "${STAGING_DIR_HOST}/loader" STAGING_FIRMWARE_DIR = "${STAGING_DIR_HOST}/firmware" -STAGING_DIR_TARGET = "${STAGING_DIR}/${MACHINE}" -STAGING_DIR_TCBOOTSTRAP = "${STAGING_DIR_TARGET}-tcbootstrap" +STAGING_DIR_TARGET = "${RECIPE_SYSROOT}" # Setting DEPLOY_DIR outside of TMPDIR is helpful, when you are using # packaged staging and/or multimachine. @@ -389,7 +390,7 @@ DEPLOY_DIR_DEB = "${DEPLOY_DIR}/deb" DEPLOY_DIR_IMAGE ?= "${DEPLOY_DIR}/images/${MACHINE}" DEPLOY_DIR_TOOLS = "${DEPLOY_DIR}/tools" -PKGDATA_DIR = "${STAGING_DIR_HOST}/pkgdata" +PKGDATA_DIR = "${TMPDIR}/pkgdata/${MACHINE}" ################################################################## # SDK variables. @@ -452,10 +453,37 @@ export PATH # Build utility info. ################################################################## +# Directory where host tools are copied +HOSTTOOLS_DIR = "${TMPDIR}/hosttools" + +# Tools needed to run builds with OE-Core +HOSTTOOLS += " \ + [ ar as awk basename bash bzip2 cat chgrp chmod chown chrpath cmp cp cpio \ + cpp cut date dd diff diffstat dirname du echo egrep env expand expr false \ + fgrep file find flock g++ gawk gcc getconf getopt git grep gunzip gzip \ + head hostname install ld ldd ln ls make makeinfo md5sum mkdir mknod \ + mktemp mv nm objcopy objdump od patch perl pod2man pr printf pwd python python2 \ + python2.7 python3 ranlib readelf readlink rm rmdir rpcgen sed sh sha256sum \ + sleep sort split stat strings strip tail tar tee test touch tr true uname \ + uniq wc wget which xargs \ +" + +# Tools needed to run testimage runtime image testing +HOSTTOOLS += "ip ping ps scp ssh stty" + +# Link to these if present +HOSTTOOLS_NONFATAL += "aws ccache gcc-ar gpg ld.bfd ld.gold nc sftp socat sudo" + +# Temporary add few more detected in bitbake world +HOSTTOOLS_NONFATAL += "join nl size yes zcat" + +# Used by bzr fetcher +HOSTTOOLS_NONFATAL += "bzr" + CCACHE ??= "" # Disable ccache explicitly if CCACHE is null since gcc may be a symlink # of ccache some distributions (e.g., Fedora 17). -export CCACHE_DISABLE ??= "${@[0,1][d.getVar('CCACHE', True) == '']}" +export CCACHE_DISABLE ??= "${@[0,1][d.getVar('CCACHE') == '']}" # ccache < 3.1.10 will create CCACHE_DIR on startup even if disabled, and # autogen sets HOME=/dev/null so in certain situations builds can fail. # Explicitly export CCACHE_DIR until we can assume ccache >3.1.10 on the host. @@ -533,18 +561,9 @@ BUILDSDK_LDFLAGS = "-Wl,-O1" LINKER_HASH_STYLE ??= "gnu" # mips does not support GNU hash style therefore we override -LINKER_HASH_STYLE_mips = "sysv" -LINKER_HASH_STYLE_mipsel = "sysv" -LINKER_HASH_STYLE_mips64 = "sysv" -LINKER_HASH_STYLE_mips64el = "sysv" -LINKER_HASH_STYLE_mips64n32 = "sysv" -LINKER_HASH_STYLE_mips64eln32 = "sysv" -LINKER_HASH_STYLE_mipsisa32r6 = "sysv" -LINKER_HASH_STYLE_mipsisa32r6el = "sysv" -LINKER_HASH_STYLE_mipsisa64r6 = "sysv" -LINKER_HASH_STYLE_mipsisa64r6el = "sysv" - -TARGET_LINK_HASH_STYLE ?= "${@['-Wl,--hash-style=gnu',''][d.getVar('LINKER_HASH_STYLE', True) != 'gnu']}" +LINKER_HASH_STYLE_mipsarch = "sysv" + +TARGET_LINK_HASH_STYLE ?= "${@['-Wl,--hash-style=gnu',''][d.getVar('LINKER_HASH_STYLE') != 'gnu']}" export LDFLAGS = "${TARGET_LDFLAGS}" export TARGET_LDFLAGS = "-Wl,-O1 ${TARGET_LINK_HASH_STYLE}" @@ -570,7 +589,7 @@ DEBUG_FLAGS ?= "-g -feliminate-unused-debug-types ${DEBUG_PREFIX_MAP}" # Disabled until the option works properly -feliminate-dwarf2-dups FULL_OPTIMIZATION = "-O2 -pipe ${DEBUG_FLAGS}" DEBUG_OPTIMIZATION = "-O -fno-omit-frame-pointer ${DEBUG_FLAGS} -pipe" -SELECTED_OPTIMIZATION = "${@d.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD', True) == '1'], True)}" +SELECTED_OPTIMIZATION = "${@d.getVar(['FULL_OPTIMIZATION', 'DEBUG_OPTIMIZATION'][d.getVar('DEBUG_BUILD') == '1'])}" SELECTED_OPTIMIZATION[vardeps] += "FULL_OPTIMIZATION DEBUG_OPTIMIZATION" BUILD_OPTIMIZATION = "-O2 -pipe" @@ -592,7 +611,7 @@ GNOME_MIRROR = "http://ftp.gnome.org/pub/GNOME/sources" GNU_MIRROR = "http://ftp.gnu.org/gnu" GNUPG_MIRROR = "https://www.gnupg.org/ftp/gcrypt" GPE_MIRROR = "http://gpe.linuxtogo.org/download/source" -KERNELORG_MIRROR = "http://kernel.org/pub" +KERNELORG_MIRROR = "http://cdn.kernel.org/pub" SOURCEFORGE_MIRROR = "http://downloads.sourceforge.net" XLIBS_MIRROR = "http://xlibs.freedesktop.org/release" XORG_MIRROR = "http://xorg.freedesktop.org/releases" @@ -639,14 +658,15 @@ SRC_URI = "" # Use pseudo as the fakeroot implementation PSEUDO_LOCALSTATEDIR ?= "${WORKDIR}/pseudo/" -PSEUDO_PASSWD ?= "${STAGING_DIR_TARGET}:${STAGING_DIR_NATIVE}" +PSEUDO_PASSWD ?= "${STAGING_DIR_TARGET}:${PSEUDO_SYSROOT}" +PSEUDO_SYSROOT = "${COMPONENTS_DIR}/${BUILD_ARCH}/pseudo-native" export PSEUDO_DISABLED = "1" #export PSEUDO_PREFIX = "${STAGING_DIR_NATIVE}${prefix_native}" #export PSEUDO_BINDIR = "${STAGING_DIR_NATIVE}${bindir_native}" #export PSEUDO_LIBDIR = "${STAGING_DIR_NATIVE}$PSEUDOBINDIR/../lib/pseudo/lib -FAKEROOTBASEENV = "PSEUDO_BINDIR=${STAGING_BINDIR_NATIVE} PSEUDO_LIBDIR=${STAGING_BINDIR_NATIVE}/../lib/pseudo/lib PSEUDO_PREFIX=${STAGING_BINDIR_NATIVE}/../../ PSEUDO_DISABLED=1" -FAKEROOTCMD = "${STAGING_BINDIR_NATIVE}/pseudo" -FAKEROOTENV = "PSEUDO_PREFIX=${STAGING_DIR_NATIVE}${prefix_native} PSEUDO_LOCALSTATEDIR=${PSEUDO_LOCALSTATEDIR} PSEUDO_PASSWD=${PSEUDO_PASSWD} PSEUDO_NOSYMLINKEXP=1 PSEUDO_DISABLED=0" +FAKEROOTBASEENV = "PSEUDO_BINDIR=${PSEUDO_SYSROOT}${bindir_native} PSEUDO_LIBDIR=${PSEUDO_SYSROOT}${prefix_native}/lib/pseudo/lib PSEUDO_PREFIX=${PSEUDO_SYSROOT}${prefix_native} PSEUDO_DISABLED=1" +FAKEROOTCMD = "${PSEUDO_SYSROOT}${bindir_native}/pseudo" +FAKEROOTENV = "PSEUDO_PREFIX=${PSEUDO_SYSROOT}${prefix_native} PSEUDO_LOCALSTATEDIR=${PSEUDO_LOCALSTATEDIR} PSEUDO_PASSWD=${PSEUDO_PASSWD} PSEUDO_NOSYMLINKEXP=1 PSEUDO_DISABLED=0" FAKEROOTNOENV = "PSEUDO_UNLOAD=1" FAKEROOTDIRS = "${PSEUDO_LOCALSTATEDIR}" PREFERRED_PROVIDER_virtual/fakeroot-native ?= "pseudo-native" @@ -696,7 +716,7 @@ DISTRO_NAME ??= "OpenEmbedded" OVERRIDES = "${TARGET_OS}:${TRANSLATED_TARGET_ARCH}:build-${BUILD_OS}:pn-${PN}:${MACHINEOVERRIDES}:${DISTROOVERRIDES}:${CLASSOVERRIDE}:forcevariable" OVERRIDES[vardepsexclude] = "MACHINEOVERRIDES" CLASSOVERRIDE ?= "class-target" -DISTROOVERRIDES ?= "${@d.getVar('DISTRO', True) or ''}" +DISTROOVERRIDES ?= "${@d.getVar('DISTRO') or ''}" MACHINEOVERRIDES ?= "${MACHINE}" MACHINEOVERRIDES[vardepsexclude] = "MACHINE" @@ -710,11 +730,11 @@ require conf/abi_version.conf include conf/site.conf include conf/auto.conf include conf/local.conf -include conf/multiconfig/${BB_CURRENT_MC}.conf +require conf/multiconfig/${BB_CURRENT_MC}.conf include conf/build/${BUILD_SYS}.conf -include conf/target/${TARGET_SYS}.conf include conf/machine/${MACHINE}.conf include conf/machine-sdk/${SDKMACHINE}.conf +include conf/target/${TARGET_SYS}.conf include conf/distro/${DISTRO}.conf include conf/distro/defaultsetup.conf include conf/documentation.conf @@ -776,14 +796,24 @@ MACHINE_ESSENTIAL_EXTRA_RRECOMMENDS ?= "" EXTRA_IMAGE_FEATURES ??= "" IMAGE_FEATURES += "${EXTRA_IMAGE_FEATURES}" -DISTRO_FEATURES_BACKFILL = "pulseaudio sysvinit bluez5 gobject-introspection-data" +# Native distro features (will always be used for -native, even if they +# are not enabled for target) +DISTRO_FEATURES_NATIVE ?= "x11" +DISTRO_FEATURES_NATIVESDK ?= "x11 libc-charsets libc-locales libc-locale-code" + +# Normally target distro features will not be applied to native builds: +# Native distro features on this list will use the target feature value +DISTRO_FEATURES_FILTER_NATIVE ?= "api-documentation" +DISTRO_FEATURES_FILTER_NATIVESDK ?= "api-documentation" + +DISTRO_FEATURES_BACKFILL = "pulseaudio sysvinit bluez5 gobject-introspection-data ldconfig" MACHINE_FEATURES_BACKFILL = "rtc qemu-usermode" COMBINED_FEATURES = "${@oe.utils.set_intersect('DISTRO_FEATURES', 'MACHINE_FEATURES', d)}" COMBINED_FEATURES[vardeps] += "DISTRO_FEATURES MACHINE_FEATURES" SERIAL_CONSOLE ??= "" -SERIAL_CONSOLES ??= "${@d.getVar('SERIAL_CONSOLE', True).replace(' ', ';')}" +SERIAL_CONSOLES ??= "${@d.getVar('SERIAL_CONSOLE').replace(' ', ';')}" NO_RECOMMENDATIONS ?= "" BAD_RECOMMENDATIONS ?= "" @@ -807,7 +837,7 @@ DISTRO[unexport] = "1" SHELL[unexport] = "1" # Used by canadian-cross to handle string conversions on TARGET_ARCH where needed -TRANSLATED_TARGET_ARCH ??= "${@d.getVar('TARGET_ARCH', True).replace("_", "-")}" +TRANSLATED_TARGET_ARCH ??= "${@d.getVar('TARGET_ARCH').replace("_", "-")}" # Complete output from bitbake BB_CONSOLELOG ?= "${LOG_DIR}/cooker/${MACHINE}/${DATETIME}.log" @@ -820,7 +850,7 @@ BB_HASHBASE_WHITELIST ?= "TMPDIR FILE PATH PWD BB_TASKHASH BBPATH BBSERVER DL_DI PRSERV_DUMPDIR PRSERV_DUMPFILE PRSERV_LOCKDOWN PARALLEL_MAKE \ CCACHE_DIR EXTERNAL_TOOLCHAIN CCACHE CCACHE_DISABLE LICENSE_PATH SDKPKGSUFFIX \ WARN_QA ERROR_QA WORKDIR STAMPCLEAN PKGDATA_DIR BUILD_ARCH SSTATE_PKGARCH \ - BB_WORKERCONTEXT" + BB_WORKERCONTEXT BB_LIMITEDDEPS extend_recipe_sysroot DEPLOY_DIR" BB_HASHCONFIG_WHITELIST ?= "${BB_HASHBASE_WHITELIST} DATE TIME SSH_AGENT_PID \ SSH_AUTH_SOCK PSEUDO_BUILD BB_ENV_EXTRAWHITE DISABLE_SANITY_CHECKS \ PARALLEL_MAKE BB_NUMBER_THREADS BB_ORIGENV BB_INVALIDCONF BBINCLUDED \ diff --git a/import-layers/yocto-poky/meta/conf/distro/defaultsetup.conf b/import-layers/yocto-poky/meta/conf/distro/defaultsetup.conf index aa21345a1..ca2f9178d 100644 --- a/import-layers/yocto-poky/meta/conf/distro/defaultsetup.conf +++ b/import-layers/yocto-poky/meta/conf/distro/defaultsetup.conf @@ -15,10 +15,10 @@ require conf/distro/include/uninative-flags.inc TCLIBCAPPEND ?= "-${TCLIBC}" TMPDIR .= "${TCLIBCAPPEND}" -CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(d.getVar('MACHINE', True))][bool(d.getVar('MACHINE', True))]}${@['', '/' + str(d.getVar('SDKMACHINE', True))][bool(d.getVar('SDKMACHINE', True))]}" +CACHE = "${TMPDIR}/cache/${TCMODE}-${TCLIBC}${@['', '/' + str(d.getVar('MACHINE'))][bool(d.getVar('MACHINE'))]}${@['', '/' + str(d.getVar('SDKMACHINE'))][bool(d.getVar('SDKMACHINE'))]}" USER_CLASSES ?= "" PACKAGE_CLASSES ?= "package_ipk" INHERIT_BLACKLIST = "blacklist" -INHERIT_DISTRO ?= "debian devshell sstate license" +INHERIT_DISTRO ?= "debian devshell sstate license remove-libtool" INHERIT += "${PACKAGE_CLASSES} ${USER_CLASSES} ${INHERIT_DISTRO} ${INHERIT_BLACKLIST}" diff --git a/import-layers/yocto-poky/meta/conf/distro/include/default-distrovars.inc b/import-layers/yocto-poky/meta/conf/distro/include/default-distrovars.inc index f7ed943c9..08542a743 100644 --- a/import-layers/yocto-poky/meta/conf/distro/include/default-distrovars.inc +++ b/import-layers/yocto-poky/meta/conf/distro/include/default-distrovars.inc @@ -9,7 +9,7 @@ ENABLE_BINARY_LOCALE_GENERATION ?= "1" LOCALE_UTF8_ONLY ?= "0" LOCALE_UTF8_IS_DEFAULT ?= "1" -DISTRO_FEATURES_DEFAULT ?= "alsa argp bluetooth ext2 irda largefile pcmcia usbgadget usbhost wifi xattr nfs zeroconf pci 3g nfc x11" +DISTRO_FEATURES_DEFAULT ?= "acl alsa argp bluetooth ext2 irda largefile pcmcia usbgadget usbhost wifi xattr nfs zeroconf pci 3g nfc x11" DISTRO_FEATURES_LIBC_DEFAULT ?= "ipv4 ipv6 libc-backtrace libc-big-macros libc-bsd libc-cxx-tests libc-catgets libc-charsets libc-crypt \ libc-crypt-ufc libc-db-aliases libc-envz libc-fcvt libc-fmtmsg libc-fstab libc-ftraverse \ libc-getlogin libc-idn libc-inet-anl libc-libm libc-locales libc-locale-code \ diff --git a/import-layers/yocto-poky/meta/conf/distro/include/default-versions.inc b/import-layers/yocto-poky/meta/conf/distro/include/default-versions.inc index cc8c533d4..d976508ff 100644 --- a/import-layers/yocto-poky/meta/conf/distro/include/default-versions.inc +++ b/import-layers/yocto-poky/meta/conf/distro/include/default-versions.inc @@ -5,6 +5,3 @@ # Force the older version of liberation-fonts until we fix the fontforge issue PREFERRED_VERSION_liberation-fonts ?= "1.04" -# Force db-native's version to keep sync with db while -# 'AGPL-3.0' in ${INCOMPATIBLE_LICENSE} blacklist -PREFERRED_VERSION_db-native = "${@incompatible_license_contains('AGPL-3.0', '5.%', '6.%', d)}" diff --git a/import-layers/yocto-poky/meta/conf/distro/include/distro_alias.inc b/import-layers/yocto-poky/meta/conf/distro/include/distro_alias.inc index 10efb096f..489f5ea63 100644 --- a/import-layers/yocto-poky/meta/conf/distro/include/distro_alias.inc +++ b/import-layers/yocto-poky/meta/conf/distro/include/distro_alias.inc @@ -135,7 +135,7 @@ DISTRO_PN_ALIAS_pn-gtk-doc = "Fedora=gtk-doc Ubuntu=gtk-doc" DISTRO_PN_ALIAS_pn-gtk-engines = "Fedora=gtk2-engines OpenSuSE=gtk2-engines Ubuntu=gtk2-engines Mandriva=gtk-engines2 Debian=gtk2-engines" DISTRO_PN_ALIAS_pn-gtk-sato-engine = "OpenedHand" DISTRO_PN_ALIAS_pn-gtk-icon-utils-native = "OSPDT" -DISTRO_PN_ALIAS_pn-gummiboot = "Debian=gummiboot Fedora=gummiboot" +DISTRO_PN_ALIAS_pn-systemd-boot = "Ubuntu=systemd-boot Fedora=systemd-boot" DISTRO_PN_ALIAS_pn-hello-mod = "OE-Core" DISTRO_PN_ALIAS_pn-hostap-conf = "OE-Core" DISTRO_PN_ALIAS_pn-hwlatdetect = "OSPDT" @@ -400,7 +400,6 @@ DISTRO_PN_ALIAS_pn-weston = "Fedora=weston OpenSuSE=weston" DISTRO_PN_ALIAS_pn-weston-init = "OE-Core" DISTRO_PN_ALIAS_pn-which = "Mandriva=which Fedora=which" DISTRO_PN_ALIAS_pn-wpa-supplicant = "Meego=wpa_supplicant Fedora=wpa_supplicant OpenSuSE=wpa_supplicant Ubuntu=wpasupplicant Mandriva=wpa_supplicant Debian=wpasupplicant" -DISTRO_PN_ALIAS_pn-x11-common = "OE-Core" DISTRO_PN_ALIAS_pn-x11perf = "Fedora=xorg-x11-apps Ubuntu=x11-apps" DISTRO_PN_ALIAS_pn-xcb-util-image = "Debian=xcb-util Fedora=xcb-util" DISTRO_PN_ALIAS_pn-xcb-util-keysyms = "Debian=xcb-util Fedora=xcb-util" diff --git a/import-layers/yocto-poky/meta/conf/distro/include/no-static-libs.inc b/import-layers/yocto-poky/meta/conf/distro/include/no-static-libs.inc index 13a791869..f8d8c09cf 100644 --- a/import-layers/yocto-poky/meta/conf/distro/include/no-static-libs.inc +++ b/import-layers/yocto-poky/meta/conf/distro/include/no-static-libs.inc @@ -32,4 +32,8 @@ DISABLE_STATIC_pn-libusb1-native = "" EXTRA_OECONF_append = "${DISABLE_STATIC}" -EXTRA_OECMAKE_append_pn-libical = "-DSHARED_ONLY=True" +EXTRA_OECMAKE_append_pn-libical = " -DSHARED_ONLY=True" + +EXCONFIG_ARGS_append_pn-ncurses = " --without-normal" +EXCONFIG_ARGS_append_pn-ncurses-native = " --without-normal" +EXCONFIG_ARGS_append_pn-nativesdk-ncurses = " --without-normal" diff --git a/import-layers/yocto-poky/meta/conf/distro/include/security_flags.inc b/import-layers/yocto-poky/meta/conf/distro/include/security_flags.inc index eca835e3a..e162abeb3 100644 --- a/import-layers/yocto-poky/meta/conf/distro/include/security_flags.inc +++ b/import-layers/yocto-poky/meta/conf/distro/include/security_flags.inc @@ -45,6 +45,7 @@ SECURITY_CFLAGS_pn-gcc-sanitizers = "${SECURITY_NO_PIE_CFLAGS}" SECURITY_CFLAGS_pn-gdb = "${SECURITY_NO_PIE_CFLAGS}" SECURITY_CFLAGS_pn-gmp = "${SECURITY_NO_PIE_CFLAGS}" SECURITY_CFLAGS_pn-gnutls = "${SECURITY_NO_PIE_CFLAGS}" +SECURITY_CFLAGS_pn-gpgme = "${SECURITY_NO_PIE_CFLAGS}" SECURITY_CFLAGS_pn-grub = "" SECURITY_CFLAGS_pn-grub-efi = "" SECURITY_CFLAGS_pn-grub-efi-native = "" @@ -72,13 +73,12 @@ SECURITY_CFLAGS_pn-opensp = "${SECURITY_NO_PIE_CFLAGS}" SECURITY_CFLAGS_pn-ppp = "${SECURITY_NO_PIE_CFLAGS}" SECURITY_CFLAGS_pn-python = "${SECURITY_NO_PIE_CFLAGS}" SECURITY_CFLAGS_pn-python-pycurl = "${SECURITY_NO_PIE_CFLAGS}" -SECURITY_CFLAGS_pn-python-smartpm = "${SECURITY_NO_PIE_CFLAGS}" SECURITY_CFLAGS_pn-python-numpy = "${SECURITY_NO_PIE_CFLAGS}" SECURITY_CFLAGS_pn-python3-numpy = "${SECURITY_NO_PIE_CFLAGS}" SECURITY_CFLAGS_pn-python3-pycairo = "${SECURITY_NO_PIE_CFLAGS}" +SECURITY_CFLAGS_pn-python3-pycurl = "${SECURITY_NO_PIE_CFLAGS}" +SECURITY_CFLAGS_pn-python3-pygpgme = "${SECURITY_NO_PIE_CFLAGS}" SECURITY_CFLAGS_pn-python3 = "${SECURITY_NO_PIE_CFLAGS}" -# Revert RPM to using internally supported values -SECURITY_CFLAGS_pn-rpm = "${lcl_maybe_fortify} -fstack-protector" SECURITY_CFLAGS_pn-syslinux = "${SECURITY_NO_PIE_CFLAGS}" SECURITY_CFLAGS_pn-slang = "${SECURITY_NO_PIE_CFLAGS}" SECURITY_CFLAGS_pn-source-highlight = "${SECURITY_NO_PIE_CFLAGS}" @@ -91,12 +91,7 @@ SECURITY_CFLAGS_pn-zlib = "${SECURITY_NO_PIE_CFLAGS}" # Recipes which fail to compile when elevating -Wformat-security to an error SECURITY_STRINGFORMAT_pn-busybox = "" -SECURITY_STRINGFORMAT_pn-console-tools = "" -SECURITY_STRINGFORMAT_pn-cmake = "" -SECURITY_STRINGFORMAT_pn-expect = "" SECURITY_STRINGFORMAT_pn-gcc = "" -SECURITY_STRINGFORMAT_pn-gettext = "" -SECURITY_STRINGFORMAT_pn-kexec-tools = "" SECURITY_STRINGFORMAT_pn-oh-puzzles = "" TARGET_CFLAGS_append_class-target = " ${SECURITY_CFLAGS}" diff --git a/import-layers/yocto-poky/meta/conf/distro/include/tclibc-glibc.inc b/import-layers/yocto-poky/meta/conf/distro/include/tclibc-glibc.inc index 649918fd2..ad8000f5d 100644 --- a/import-layers/yocto-poky/meta/conf/distro/include/tclibc-glibc.inc +++ b/import-layers/yocto-poky/meta/conf/distro/include/tclibc-glibc.inc @@ -2,7 +2,7 @@ # glibc specific configuration # -LIBCEXTENSION = "${@['', '-gnu'][(d.getVar('ABIEXTENSION', True) or '') != '']}" +LIBCEXTENSION = "${@['', '-gnu'][(d.getVar('ABIEXTENSION') or '') != '']}" # Add glibc overrides to the overrides for glibc. LIBCOVERRIDE = ":libc-glibc" @@ -34,7 +34,7 @@ LIBC_LOCALE_DEPENDENCIES = "\ glibc-gconv-iso8859-15" def get_libc_locales_dependencies(d): - if 'libc-locales' in (d.getVar('DISTRO_FEATURES', True) or '').split() : - return d.getVar('LIBC_LOCALE_DEPENDENCIES', True) or '' + if 'libc-locales' in (d.getVar('DISTRO_FEATURES') or '').split() : + return d.getVar('LIBC_LOCALE_DEPENDENCIES') or '' else: return '' diff --git a/import-layers/yocto-poky/meta/conf/distro/include/tclibc-musl.inc b/import-layers/yocto-poky/meta/conf/distro/include/tclibc-musl.inc index e6b10f946..3d3f6ac4f 100644 --- a/import-layers/yocto-poky/meta/conf/distro/include/tclibc-musl.inc +++ b/import-layers/yocto-poky/meta/conf/distro/include/tclibc-musl.inc @@ -4,7 +4,7 @@ LIBCEXTENSION = "-musl" -# Add uclibc overrides to the overrides. +# Add musl libc overrides to the overrides. LIBCOVERRIDE = ":libc-musl" OVERRIDES .= "${LIBCOVERRIDE}" @@ -25,7 +25,3 @@ LIBC_DEPENDENCIES = "\ musl-dbg \ musl-dev \ " - -# GLib binaries trigger assertion failures and crash under qemu -# when musl is in use -DISTRO_FEATURES_BACKFILL_CONSIDERED += "gobject-introspection-data" diff --git a/import-layers/yocto-poky/meta/conf/distro/include/tcmode-default.inc b/import-layers/yocto-poky/meta/conf/distro/include/tcmode-default.inc index ca3c5ec90..3db16e8fb 100644 --- a/import-layers/yocto-poky/meta/conf/distro/include/tcmode-default.inc +++ b/import-layers/yocto-poky/meta/conf/distro/include/tcmode-default.inc @@ -22,13 +22,13 @@ PREFERRED_PROVIDER_virtual/${TARGET_PREFIX}libc-initial = "${TCLIBC}-initial" PREFERRED_PROVIDER_virtual/nativesdk-${SDK_PREFIX}libc-initial ?= "nativesdk-glibc-initial" PREFERRED_PROVIDER_virtual/gettext ??= "gettext" -GCCVERSION ?= "6.2%" +GCCVERSION ?= "6.3%" SDKGCCVERSION ?= "${GCCVERSION}" -BINUVERSION ?= "2.27%" -GDBVERSION ?= "7.11%" -GLIBCVERSION ?= "2.24" +BINUVERSION ?= "2.28%" +GDBVERSION ?= "7.12%" +GLIBCVERSION ?= "2.25" UCLIBCVERSION ?= "1.0%" -LINUXLIBCVERSION ?= "4.8%" +LINUXLIBCVERSION ?= "4.10%" PREFERRED_VERSION_gcc ?= "${GCCVERSION}" PREFERRED_VERSION_gcc-cross-${TARGET_ARCH} ?= "${GCCVERSION}" diff --git a/import-layers/yocto-poky/meta/conf/distro/include/uninative-flags.inc b/import-layers/yocto-poky/meta/conf/distro/include/uninative-flags.inc index e9f82c39e..b6a944ef6 100644 --- a/import-layers/yocto-poky/meta/conf/distro/include/uninative-flags.inc +++ b/import-layers/yocto-poky/meta/conf/distro/include/uninative-flags.inc @@ -7,3 +7,11 @@ BUILD_CXXFLAGS_append = " -D_GLIBCXX_USE_CXX11_ABI=0" # icu configure defaults to CXX11 if no -std= option is passed in CXXFLAGS # therefore pass one BUILD_CXXFLAGS_append_pn-icu-native = " -std=c++98" + +# Some distros (ubuntu 16.10, debian-testing) default to gcc configured with +# --enable-default-pie (see gcc -v). This breaks e.g. prelink-native on a pie +# default system if binutils-native was built on a system which is not pie default +# We therefore enable pie unconditionally for native recipes where static libs are +# used such as libiberty from binutils, for now, until our minimum distro set is +# all default pie. +BUILD_CFLAGS_append_pn-binutils-native = " -pie -fpie" diff --git a/import-layers/yocto-poky/meta/conf/documentation.conf b/import-layers/yocto-poky/meta/conf/documentation.conf index 06527cb99..35b9103b4 100644 --- a/import-layers/yocto-poky/meta/conf/documentation.conf +++ b/import-layers/yocto-poky/meta/conf/documentation.conf @@ -415,7 +415,7 @@ TARGET_FPU[doc] = "Specifies the method for handling FPU code. For FPU-less targ TARGET_OS[doc] = "Specifies the target's operating system." TARGET_PREFIX[doc] = "The prefix for the cross-compile toolchain (e.g. arm-linux-)." TARGET_SYS[doc] = "The target system is comprised of TARGET_ARCH,TARGET_VENDOR and TARGET_OS." -TCLIBC[doc] = "Specifies GNU standard C library (libc) variant to use during the build process. You can select 'glibc' or 'uclibc'." +TCLIBC[doc] = "Specifies C library (libc) variant to use during the build process. You can select 'baremetal', 'glibc' or 'musl'." TCMODE[doc] = "Enables an external toolchain (where provided by an additional layer) if set to a value other than 'default'." TEST_IMAGE[doc] = "Enables test booting of virtual machine images under the QEMU emulator after any root filesystems are created and runs tests against those images." TEST_QEMUBOOT_TIMEOUT[doc] = "The time in seconds allowed for an image to boot before automated runtime tests begin to run against an image." diff --git a/import-layers/yocto-poky/meta/conf/layer.conf b/import-layers/yocto-poky/meta/conf/layer.conf index 24b4df07f..fc165021c 100644 --- a/import-layers/yocto-poky/meta/conf/layer.conf +++ b/import-layers/yocto-poky/meta/conf/layer.conf @@ -9,7 +9,7 @@ BBFILE_PRIORITY_core = "5" # This should only be incremented on significant changes that will # cause compatibility issues with other layers -LAYERVERSION_core = "9" +LAYERVERSION_core = "10" BBLAYERS_LAYERINDEX_NAME_core = "openembedded-core" @@ -59,3 +59,5 @@ SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS += " \ oprofile->virtual/kernel \ " +# We need to keep bitbake tools in PATH +PATH := "${@os.path.dirname(bb.utils.which(d.getVar('PATH'),'bitbake'))}:${HOSTTOOLS_DIR}" diff --git a/import-layers/yocto-poky/meta/conf/licenses.conf b/import-layers/yocto-poky/meta/conf/licenses.conf index 9917c40e1..d210a0e94 100644 --- a/import-layers/yocto-poky/meta/conf/licenses.conf +++ b/import-layers/yocto-poky/meta/conf/licenses.conf @@ -133,11 +133,10 @@ DATA_LICENSE = "CC0-1.0" # You can set option to control if the copyright information will be skipped # during the identification process. # -# It is defined as [FOSS_COPYRIGHT] in ./meta/conf/licenses.conf. -# FOSS_COPYRIGHT = "true" +# FOSS_NO_COPYRIGHT = "true" # NO copyright will be processed. That means only license information will be # identified and output to SPDX file -# FOSS_COPYRIGHT = "false" +# FOSS_NO_COPYRIGHT = "false" # Copyright will be identified and output to SPDX file along with license # information. The process will take more time than not processing copyright # information. diff --git a/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-arm.inc b/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-arm.inc index 2e3127c79..99625d841 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-arm.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-arm.inc @@ -13,4 +13,4 @@ TUNE_PKGARCH = "${ARMPKGARCH}${ARMPKGSFX_THUMB}${ARMPKGSFX_DSP}${ARMPKGSFX_EABI} ABIEXTENSION = "eabi" -TARGET_FPU = "${@d.getVar('TUNE_CCARGS_MFLOAT', True) or 'soft'}" +TARGET_FPU = "${@d.getVar('TUNE_CCARGS_MFLOAT') or 'soft'}" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-arm64.inc b/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-arm64.inc index 9eeffac81..5f90763f7 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-arm64.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-arm64.inc @@ -28,7 +28,7 @@ TARGET_FPU_64 = "" TUNE_ARCH_32 = "${@bb.utils.contains('TUNE_FEATURES', 'bigendian', 'armeb', 'arm', d)}" TUNE_PKGARCH_32 = "${ARMPKGARCH}${ARMPKGSFX_THUMB}${ARMPKGSFX_DSP}${ARMPKGSFX_EABI}${ARMPKGSFX_ENDIAN}${ARMPKGSFX_FPU}" ABIEXTENSION_32 = "eabi" -TARGET_FPU_32 = "${@d.getVar('TUNE_CCARGS_MFLOAT', True) or 'soft'}" +TARGET_FPU_32 = "${@d.getVar('TUNE_CCARGS_MFLOAT') or 'soft'}" TUNE_ARCH = "${@bb.utils.contains('TUNE_FEATURES', 'aarch64', '${TUNE_ARCH_64}', '${TUNE_ARCH_32}' ,d)}" TUNE_PKGARCH = "${@bb.utils.contains('TUNE_FEATURES', 'aarch64', '${TUNE_PKGARCH_64}', '${TUNE_PKGARCH_32}' ,d)}" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv5-dsp.inc b/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv5-dsp.inc index f20492071..1f16085fc 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv5-dsp.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv5-dsp.inc @@ -7,9 +7,9 @@ require conf/machine/include/arm/arch-armv5.inc AVAILTUNES += "armv5e armv5te" ARMPKGARCH_tune-armv5e ?= "armv5" ARMPKGARCH_tune-armv5te ?= "armv5" -TUNE_FEATURES_tune-armv5e = "${TUNE_FEATURES_tune-armv5} dsp" +TUNE_FEATURES_tune-armv5e = "${TUNE_FEATURES_tune-armv5} dsp" TUNE_FEATURES_tune-armv5te = "${TUNE_FEATURES_tune-armv5t} dsp" -PACKAGE_EXTRA_ARCHS_tune-armv5e = "${PACKAGE_EXTRA_ARCHS_tune-armv5} armv5e" +PACKAGE_EXTRA_ARCHS_tune-armv5e = "${PACKAGE_EXTRA_ARCHS_tune-armv5} armv5e" PACKAGE_EXTRA_ARCHS_tune-armv5te = "${PACKAGE_EXTRA_ARCHS_tune-armv5t} armv5e armv5te" # Little Endian + VFP/DSP @@ -18,22 +18,22 @@ ARMPKGARCH_tune-armv5e-vfp ?= "armv5" ARMPKGARCH_tune-armv5te-vfp ?= "armv5" ARMPKGARCH_tune-armv5ehf-vfp ?= "armv5" ARMPKGARCH_tune-armv5tehf-vfp ?= "armv5" -TUNE_FEATURES_tune-armv5e-vfp = "${TUNE_FEATURES_tune-armv5e} vfp" -TUNE_FEATURES_tune-armv5te-vfp = "${TUNE_FEATURES_tune-armv5te} vfp" -TUNE_FEATURES_tune-armv5ehf-vfp = "${TUNE_FEATURES_tune-armv5e-vfp} callconvention-hard" +TUNE_FEATURES_tune-armv5e-vfp = "${TUNE_FEATURES_tune-armv5e} vfp" +TUNE_FEATURES_tune-armv5te-vfp = "${TUNE_FEATURES_tune-armv5te} vfp" +TUNE_FEATURES_tune-armv5ehf-vfp = "${TUNE_FEATURES_tune-armv5e-vfp} callconvention-hard" TUNE_FEATURES_tune-armv5tehf-vfp = "${TUNE_FEATURES_tune-armv5te-vfp} callconvention-hard" -PACKAGE_EXTRA_ARCHS_tune-armv5e-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5-vfp} armv5e armv5e-vfp" -PACKAGE_EXTRA_ARCHS_tune-armv5te-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5t-vfp} armv5e armv5te armv5e-vfp armv5te-vfp" -PACKAGE_EXTRA_ARCHS_tune-armv5ehf-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5hf-vfp} armv5ehf-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv5e-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5-vfp} armv5e armv5e-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv5te-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5t-vfp} armv5e armv5te armv5e-vfp armv5te-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv5ehf-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5hf-vfp} armv5ehf-vfp" PACKAGE_EXTRA_ARCHS_tune-armv5tehf-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5thf-vfp} armv5ehf-vfp armv5tehf-vfp" # Big Endian AVAILTUNES += "armv5eb armv5teb" ARMPKGARCH_tune-armv5eb ?= "armv5" ARMPKGARCH_tune-armv5teb ?= "armv5" -TUNE_FEATURES_tune-armv5eb = "${TUNE_FEATURES_tune-armv5e} bigendian" +TUNE_FEATURES_tune-armv5eb = "${TUNE_FEATURES_tune-armv5e} bigendian" TUNE_FEATURES_tune-armv5teb = "${TUNE_FEATURES_tune-armv5te} bigendian" -PACKAGE_EXTRA_ARCHS_tune-armv5eb = "${PACKAGE_EXTRA_ARCHS_tune-armv5b} armv5eb" +PACKAGE_EXTRA_ARCHS_tune-armv5eb = "${PACKAGE_EXTRA_ARCHS_tune-armv5b} armv5eb" PACKAGE_EXTRA_ARCHS_tune-armv5teb = "${PACKAGE_EXTRA_ARCHS_tune-armv5tb} armv5eb armv5teb" # Big Endian + VFP/DSP @@ -42,11 +42,11 @@ ARMPKGARCH_tune-armv5eb-vfp ?= "armv5" ARMPKGARCH_tune-armv5teb-vfp ?= "armv5" ARMPKGARCH_tune-armv5ehfb-vfp ?= "armv5" ARMPKGARCH_tune-armv5tehfb-vfp ?= "armv5" -TUNE_FEATURES_tune-armv5eb-vfp = "${TUNE_FEATURES_tune-armv5e-vfp} bigendian" -TUNE_FEATURES_tune-armv5teb-vfp = "${TUNE_FEATURES_tune-armv5te-vfp} bigendian" -TUNE_FEATURES_tune-armv5ehfb-vfp = "${TUNE_FEATURES_tune-armv5ehf-vfp} bigendian" +TUNE_FEATURES_tune-armv5eb-vfp = "${TUNE_FEATURES_tune-armv5e-vfp} bigendian" +TUNE_FEATURES_tune-armv5teb-vfp = "${TUNE_FEATURES_tune-armv5te-vfp} bigendian" +TUNE_FEATURES_tune-armv5ehfb-vfp = "${TUNE_FEATURES_tune-armv5ehf-vfp} bigendian" TUNE_FEATURES_tune-armv5tehfb-vfp = "${TUNE_FEATURES_tune-armv5tehf-vfp} bigendian" -PACKAGE_EXTRA_ARCHS_tune-armv5eb-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5b-vfp} armv5eb armv5eb-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv5eb-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5b-vfp} armv5eb armv5eb-vfp" PACKAGE_EXTRA_ARCHS_tune-armv5teb-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5tb-vfp} armv5eb armv5teb armv5eb-vfp armv5teb-vfp" -PACKAGE_EXTRA_ARCHS_tune-armv5ehfb-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5hfb-vfp} armv5ehfb-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv5ehfb-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5hfb-vfp} armv5ehfb-vfp" PACKAGE_EXTRA_ARCHS_tune-armv5tehfb-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5thfb-vfp} armv5ehfb-vfp armv5tehfb-vfp" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv5.inc b/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv5.inc index e6ff902a9..46f631c81 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv5.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv5.inc @@ -14,7 +14,7 @@ ARMPKGARCH_tune-armv5 ?= "armv5" ARMPKGARCH_tune-armv5t ?= "armv5" TUNE_FEATURES_tune-armv5 = "arm armv5" TUNE_FEATURES_tune-armv5t = "${TUNE_FEATURES_tune-armv5} thumb" -PACKAGE_EXTRA_ARCHS_tune-armv5 = "${PACKAGE_EXTRA_ARCHS_tune-armv4} armv5" +PACKAGE_EXTRA_ARCHS_tune-armv5 = "${PACKAGE_EXTRA_ARCHS_tune-armv4} armv5" PACKAGE_EXTRA_ARCHS_tune-armv5t = "${PACKAGE_EXTRA_ARCHS_tune-armv4t} armv5 armv5t" # Little Endian + VFP/DSP @@ -23,11 +23,11 @@ ARMPKGARCH_tune-armv5-vfp ?= "armv5" ARMPKGARCH_tune-armv5t-vfp ?= "armv5" ARMPKGARCH_tune-armv5hf-vfp ?= "armv5" ARMPKGARCH_tune-armv5thf-vfp ?= "armv5" -TUNE_FEATURES_tune-armv5-vfp = "${TUNE_FEATURES_tune-armv5} vfp" -TUNE_FEATURES_tune-armv5t-vfp = "${TUNE_FEATURES_tune-armv5t} vfp" -TUNE_FEATURES_tune-armv5hf-vfp = "${TUNE_FEATURES_tune-armv5-vfp} callconvention-hard" +TUNE_FEATURES_tune-armv5-vfp = "${TUNE_FEATURES_tune-armv5} vfp" +TUNE_FEATURES_tune-armv5t-vfp = "${TUNE_FEATURES_tune-armv5t} vfp" +TUNE_FEATURES_tune-armv5hf-vfp = "${TUNE_FEATURES_tune-armv5-vfp} callconvention-hard" TUNE_FEATURES_tune-armv5thf-vfp = "${TUNE_FEATURES_tune-armv5t-vfp} callconvention-hard" -PACKAGE_EXTRA_ARCHS_tune-armv5-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5} armv5-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv5-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5} armv5-vfp" PACKAGE_EXTRA_ARCHS_tune-armv5t-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5t} armv5-vfp armv5t-vfp" PACKAGE_EXTRA_ARCHS_tune-armv5hf-vfp = "armv5hf-vfp" PACKAGE_EXTRA_ARCHS_tune-armv5thf-vfp = "armv5hf-vfp armv5thf-vfp" @@ -36,9 +36,9 @@ PACKAGE_EXTRA_ARCHS_tune-armv5thf-vfp = "armv5hf-vfp armv5thf-vfp" AVAILTUNES += "armv5b armv5tb" ARMPKGARCH_tune-armv5b ?= "armv5" ARMPKGARCH_tune-armv5tb ?= "armv5" -TUNE_FEATURES_tune-armv5b = "${TUNE_FEATURES_tune-armv5} bigendian" +TUNE_FEATURES_tune-armv5b = "${TUNE_FEATURES_tune-armv5} bigendian" TUNE_FEATURES_tune-armv5tb = "${TUNE_FEATURES_tune-armv5t} bigendian" -PACKAGE_EXTRA_ARCHS_tune-armv5b = "${PACKAGE_EXTRA_ARCHS_tune-armv4b} armv5b" +PACKAGE_EXTRA_ARCHS_tune-armv5b = "${PACKAGE_EXTRA_ARCHS_tune-armv4b} armv5b" PACKAGE_EXTRA_ARCHS_tune-armv5tb = "${PACKAGE_EXTRA_ARCHS_tune-armv4tb} armv5b armv5tb" # Big Endian + VFP/DSP @@ -47,11 +47,11 @@ ARMPKGARCH_tune-armv5b-vfp ?= "armv5" ARMPKGARCH_tune-armv5tb-vfp ?= "armv5" ARMPKGARCH_tune-armv5hfb-vfp ?= "armv5" ARMPKGARCH_tune-armv5thfb-vfp ?= "armv5" -TUNE_FEATURES_tune-armv5b-vfp = "${TUNE_FEATURES_tune-armv5-vfp} bigendian" -TUNE_FEATURES_tune-armv5tb-vfp = "${TUNE_FEATURES_tune-armv5t-vfp} bigendian" -TUNE_FEATURES_tune-armv5hfb-vfp = "${TUNE_FEATURES_tune-armv5hf-vfp} bigendian" +TUNE_FEATURES_tune-armv5b-vfp = "${TUNE_FEATURES_tune-armv5-vfp} bigendian" +TUNE_FEATURES_tune-armv5tb-vfp = "${TUNE_FEATURES_tune-armv5t-vfp} bigendian" +TUNE_FEATURES_tune-armv5hfb-vfp = "${TUNE_FEATURES_tune-armv5hf-vfp} bigendian" TUNE_FEATURES_tune-armv5thfb-vfp = "${TUNE_FEATURES_tune-armv5thf-vfp} bigendian" -PACKAGE_EXTRA_ARCHS_tune-armv5b-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5b} armv5b-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv5b-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5b} armv5b-vfp" PACKAGE_EXTRA_ARCHS_tune-armv5tb-vfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5tb} armv5b-vfp armv5tb-vfp" PACKAGE_EXTRA_ARCHS_tune-armv5hfb-vfp = "armv5hfb-vfp" PACKAGE_EXTRA_ARCHS_tune-armv5thfb-vfp = "armv5hfb-vfp armv5thfb-vfp" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv6.inc b/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv6.inc index 4c93f2c7a..6c838e999 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv6.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv6.inc @@ -1,4 +1,4 @@ -DEFAULTTUNE ?= "armv6" +DEFAULTTUNE ?= "armv6hf" TUNEVALID[armv6] = "Enable instructions for ARMv6" TUNECONFLICTS[armv6] = "armv4 armv5" @@ -16,16 +16,16 @@ ARMPKGARCH_tune-armv6t ?= "armv6" ARMPKGARCH_tune-armv6hf ?= "armv6" ARMPKGARCH_tune-armv6thf ?= "armv6" TUNE_FEATURES_tune-armv6-novfp = "arm armv6" -TUNE_FEATURES_tune-armv6t-novfp = "${TUNE_FEATURES_tune-armv6-novfp} thumb" -TUNE_FEATURES_tune-armv6 = "${TUNE_FEATURES_tune-armv6-novfp} vfp" +TUNE_FEATURES_tune-armv6t-novfp = "${TUNE_FEATURES_tune-armv6-novfp} thumb" +TUNE_FEATURES_tune-armv6 = "${TUNE_FEATURES_tune-armv6-novfp} vfp" TUNE_FEATURES_tune-armv6t = "${TUNE_FEATURES_tune-armv6t-novfp} vfp" -TUNE_FEATURES_tune-armv6hf = "${TUNE_FEATURES_tune-armv6} callconvention-hard" -TUNE_FEATURES_tune-armv6thf = "${TUNE_FEATURES_tune-armv6t} callconvention-hard" -PACKAGE_EXTRA_ARCHS_tune-armv6-novfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5e} armv6" +TUNE_FEATURES_tune-armv6hf = "${TUNE_FEATURES_tune-armv6} callconvention-hard" +TUNE_FEATURES_tune-armv6thf = "${TUNE_FEATURES_tune-armv6t} callconvention-hard" +PACKAGE_EXTRA_ARCHS_tune-armv6-novfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5e} armv6" PACKAGE_EXTRA_ARCHS_tune-armv6t-novfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5te} armv6 armv6t" -PACKAGE_EXTRA_ARCHS_tune-armv6 = "${PACKAGE_EXTRA_ARCHS_tune-armv5e-vfp} armv6 armv6-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv6 = "${PACKAGE_EXTRA_ARCHS_tune-armv5e-vfp} armv6 armv6-vfp" PACKAGE_EXTRA_ARCHS_tune-armv6t = "${PACKAGE_EXTRA_ARCHS_tune-armv5te-vfp} armv6 armv6t armv6-vfp armv6t-vfp" -PACKAGE_EXTRA_ARCHS_tune-armv6hf = "${PACKAGE_EXTRA_ARCHS_tune-armv5ehf-vfp} armv6hf-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv6hf = "${PACKAGE_EXTRA_ARCHS_tune-armv5ehf-vfp} armv6hf-vfp" PACKAGE_EXTRA_ARCHS_tune-armv6thf = "${PACKAGE_EXTRA_ARCHS_tune-armv5tehf-vfp} armv6hf-vfp armv6thf-vfp" # Big Endian @@ -36,15 +36,15 @@ ARMPKGARCH_tune-armv6b ?= "armv6" ARMPKGARCH_tune-armv6tb ?= "armv6" ARMPKGARCH_tune-armv6hfb ?= "armv6" ARMPKGARCH_tune-armv6thfb ?= "armv6" -TUNE_FEATURES_tune-armv6b-novfp = "${TUNE_FEATURES_tune-armv6-novfp} bigendian" +TUNE_FEATURES_tune-armv6b-novfp = "${TUNE_FEATURES_tune-armv6-novfp} bigendian" TUNE_FEATURES_tune-armv6tb-novfp = "${TUNE_FEATURES_tune-armv6t-novfp} bigendian" -TUNE_FEATURES_tune-armv6b = "${TUNE_FEATURES_tune-armv6} bigendian" -TUNE_FEATURES_tune-armv6tb = "${TUNE_FEATURES_tune-armv6t} bigendian" -TUNE_FEATURES_tune-armv6hfb = "${TUNE_FEATURES_tune-armv6hf} bigendian" -TUNE_FEATURES_tune-armv6thfb = "${TUNE_FEATURES_tune-armv6thf} bigendian" -PACKAGE_EXTRA_ARCHS_tune-armv6b-novfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5eb} armv6b" +TUNE_FEATURES_tune-armv6b = "${TUNE_FEATURES_tune-armv6} bigendian" +TUNE_FEATURES_tune-armv6tb = "${TUNE_FEATURES_tune-armv6t} bigendian" +TUNE_FEATURES_tune-armv6hfb = "${TUNE_FEATURES_tune-armv6hf} bigendian" +TUNE_FEATURES_tune-armv6thfb = "${TUNE_FEATURES_tune-armv6thf} bigendian" +PACKAGE_EXTRA_ARCHS_tune-armv6b-novfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5eb} armv6b" PACKAGE_EXTRA_ARCHS_tune-armv6tb-novfp = "${PACKAGE_EXTRA_ARCHS_tune-armv5teb} armv6b armv6tb" -PACKAGE_EXTRA_ARCHS_tune-armv6b = "${PACKAGE_EXTRA_ARCHS_tune-armv5eb-vfp} armv6b armv6b-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv6b = "${PACKAGE_EXTRA_ARCHS_tune-armv5eb-vfp} armv6b armv6b-vfp" PACKAGE_EXTRA_ARCHS_tune-armv6tb = "${PACKAGE_EXTRA_ARCHS_tune-armv5teb-vfp} armv6b armv6tb armv6b-vfp armv6tb-vfp" -PACKAGE_EXTRA_ARCHS_tune-armv6hfb = "${PACKAGE_EXTRA_ARCHS_tune-armv5ehfb-vfp} armv6hfb-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv6hfb = "${PACKAGE_EXTRA_ARCHS_tune-armv5ehfb-vfp} armv6hfb-vfp" PACKAGE_EXTRA_ARCHS_tune-armv6thfb = "${PACKAGE_EXTRA_ARCHS_tune-armv5tehfb-vfp} armv6hfb-vfp armv6thfb-vfp" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv7a.inc b/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv7a.inc index 1f2e071d5..bad1c2705 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv7a.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv7a.inc @@ -1,4 +1,4 @@ -DEFAULTTUNE ?= "armv7a" +DEFAULTTUNE ?= "armv7athf" TUNEVALID[armv7a] = "Enable instructions for ARMv7-a" TUNECONFLICTS[armv7a] = "armv4 armv5 armv6 armv7" @@ -22,23 +22,23 @@ ARMPKGARCH_tune-armv7a-neon-vfpv4 ?= "armv7a" ARMPKGARCH_tune-armv7at-neon-vfpv4 ?= "armv7a" TUNE_FEATURES_tune-armv7a = "arm armv7a vfp" TUNE_FEATURES_tune-armv7at = "${TUNE_FEATURES_tune-armv7a} thumb" -TUNE_FEATURES_tune-armv7a-vfpv3d16 = "${TUNE_FEATURES_tune-armv7a} vfpv3d16" +TUNE_FEATURES_tune-armv7a-vfpv3d16 = "${TUNE_FEATURES_tune-armv7a} vfpv3d16" TUNE_FEATURES_tune-armv7at-vfpv3d16 = "${TUNE_FEATURES_tune-armv7at} vfpv3d16" -TUNE_FEATURES_tune-armv7a-vfpv3 = "${TUNE_FEATURES_tune-armv7a-vfpv3d16} vfpv3" +TUNE_FEATURES_tune-armv7a-vfpv3 = "${TUNE_FEATURES_tune-armv7a-vfpv3d16} vfpv3" TUNE_FEATURES_tune-armv7at-vfpv3 = "${TUNE_FEATURES_tune-armv7at-vfpv3d16} vfpv3" -TUNE_FEATURES_tune-armv7a-neon = "${TUNE_FEATURES_tune-armv7a} neon" +TUNE_FEATURES_tune-armv7a-neon = "${TUNE_FEATURES_tune-armv7a} neon" TUNE_FEATURES_tune-armv7at-neon = "${TUNE_FEATURES_tune-armv7at} neon" -TUNE_FEATURES_tune-armv7a-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7a-neon} vfpv4" +TUNE_FEATURES_tune-armv7a-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7a-neon} vfpv4" TUNE_FEATURES_tune-armv7at-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7at-neon} vfpv4" -PACKAGE_EXTRA_ARCHS_tune-armv7a = "${PACKAGE_EXTRA_ARCHS_tune-armv6} armv7a armv7a-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv7a = "${PACKAGE_EXTRA_ARCHS_tune-armv6} armv7a armv7a-vfp" PACKAGE_EXTRA_ARCHS_tune-armv7at = "${PACKAGE_EXTRA_ARCHS_tune-armv6t} armv7a armv7a-vfp armv7at2-vfp" -PACKAGE_EXTRA_ARCHS_tune-armv7a-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7a} armv7a-vfpv3d16" +PACKAGE_EXTRA_ARCHS_tune-armv7a-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7a} armv7a-vfpv3d16" PACKAGE_EXTRA_ARCHS_tune-armv7at-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7at} armv7a-vfpv3d16 armv7at2-vfpv3d16" -PACKAGE_EXTRA_ARCHS_tune-armv7a-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7a-vfpv3d16} armv7a-vfpv3" +PACKAGE_EXTRA_ARCHS_tune-armv7a-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7a-vfpv3d16} armv7a-vfpv3" PACKAGE_EXTRA_ARCHS_tune-armv7at-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7at-vfpv3d16} armv7a-vfpv3 armv7at2-vfpv3" -PACKAGE_EXTRA_ARCHS_tune-armv7a-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7a} armv7a-neon" +PACKAGE_EXTRA_ARCHS_tune-armv7a-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7a} armv7a-neon" PACKAGE_EXTRA_ARCHS_tune-armv7at-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7at} armv7a-neon armv7at2-neon" -PACKAGE_EXTRA_ARCHS_tune-armv7a-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7a-neon} armv7a-neon-vfpv4" +PACKAGE_EXTRA_ARCHS_tune-armv7a-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7a-neon} armv7a-neon-vfpv4" PACKAGE_EXTRA_ARCHS_tune-armv7at-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7at-neon} armv7a-neon-vfpv4 armv7at2-neon-vfpv4" # HF Tunes @@ -53,25 +53,25 @@ ARMPKGARCH_tune-armv7ahf-neon ?= "armv7a" ARMPKGARCH_tune-armv7athf-neon ?= "armv7a" ARMPKGARCH_tune-armv7ahf-neon-vfpv4 ?= "armv7a" ARMPKGARCH_tune-armv7athf-neon-vfpv4 ?= "armv7a" -TUNE_FEATURES_tune-armv7ahf = "${TUNE_FEATURES_tune-armv7a} callconvention-hard" -TUNE_FEATURES_tune-armv7athf = "${TUNE_FEATURES_tune-armv7at} callconvention-hard" -TUNE_FEATURES_tune-armv7ahf-vfpv3d16 = "${TUNE_FEATURES_tune-armv7a-vfpv3d16} callconvention-hard" -TUNE_FEATURES_tune-armv7athf-vfpv3d16 = "${TUNE_FEATURES_tune-armv7at-vfpv3d16} callconvention-hard" -TUNE_FEATURES_tune-armv7ahf-vfpv3 = "${TUNE_FEATURES_tune-armv7a-vfpv3} callconvention-hard" -TUNE_FEATURES_tune-armv7athf-vfpv3 = "${TUNE_FEATURES_tune-armv7at-vfpv3} callconvention-hard" -TUNE_FEATURES_tune-armv7ahf-neon = "${TUNE_FEATURES_tune-armv7a-neon} callconvention-hard" -TUNE_FEATURES_tune-armv7athf-neon = "${TUNE_FEATURES_tune-armv7at-neon} callconvention-hard" -TUNE_FEATURES_tune-armv7ahf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7a-neon-vfpv4} callconvention-hard" +TUNE_FEATURES_tune-armv7ahf = "${TUNE_FEATURES_tune-armv7a} callconvention-hard" +TUNE_FEATURES_tune-armv7athf = "${TUNE_FEATURES_tune-armv7at} callconvention-hard" +TUNE_FEATURES_tune-armv7ahf-vfpv3d16 = "${TUNE_FEATURES_tune-armv7a-vfpv3d16} callconvention-hard" +TUNE_FEATURES_tune-armv7athf-vfpv3d16 = "${TUNE_FEATURES_tune-armv7at-vfpv3d16} callconvention-hard" +TUNE_FEATURES_tune-armv7ahf-vfpv3 = "${TUNE_FEATURES_tune-armv7a-vfpv3} callconvention-hard" +TUNE_FEATURES_tune-armv7athf-vfpv3 = "${TUNE_FEATURES_tune-armv7at-vfpv3} callconvention-hard" +TUNE_FEATURES_tune-armv7ahf-neon = "${TUNE_FEATURES_tune-armv7a-neon} callconvention-hard" +TUNE_FEATURES_tune-armv7athf-neon = "${TUNE_FEATURES_tune-armv7at-neon} callconvention-hard" +TUNE_FEATURES_tune-armv7ahf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7a-neon-vfpv4} callconvention-hard" TUNE_FEATURES_tune-armv7athf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7at-neon-vfpv4} callconvention-hard" -PACKAGE_EXTRA_ARCHS_tune-armv7ahf = "${PACKAGE_EXTRA_ARCHS_tune-armv6hf} armv7ahf-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv7ahf = "${PACKAGE_EXTRA_ARCHS_tune-armv6hf} armv7ahf-vfp" PACKAGE_EXTRA_ARCHS_tune-armv7athf = "${PACKAGE_EXTRA_ARCHS_tune-armv6thf} armv7ahf-vfp armv7at2hf-vfp" -PACKAGE_EXTRA_ARCHS_tune-armv7ahf-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf} armv7ahf-vfpv3d16" +PACKAGE_EXTRA_ARCHS_tune-armv7ahf-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf} armv7ahf-vfpv3d16" PACKAGE_EXTRA_ARCHS_tune-armv7athf-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7athf} armv7ahf-vfpv3d16 armv7at2hf-vfpv3d16" -PACKAGE_EXTRA_ARCHS_tune-armv7ahf-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf-vfpv3d16} armv7ahf-vfpv3" +PACKAGE_EXTRA_ARCHS_tune-armv7ahf-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf-vfpv3d16} armv7ahf-vfpv3" PACKAGE_EXTRA_ARCHS_tune-armv7athf-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7athf-vfpv3d16} armv7ahf-vfpv3 armv7at2hf-vfpv3" -PACKAGE_EXTRA_ARCHS_tune-armv7ahf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf} armv7ahf-neon" +PACKAGE_EXTRA_ARCHS_tune-armv7ahf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf} armv7ahf-neon" PACKAGE_EXTRA_ARCHS_tune-armv7athf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7athf} armv7ahf-neon armv7at2hf-neon" -PACKAGE_EXTRA_ARCHS_tune-armv7ahf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf-neon} armv7ahf-neon-vfpv4" +PACKAGE_EXTRA_ARCHS_tune-armv7ahf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf-neon} armv7ahf-neon-vfpv4" PACKAGE_EXTRA_ARCHS_tune-armv7athf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7athf-neon} armv7ahf-neon-vfpv4 armv7at2hf-neon-vfpv4" # Big Endian @@ -86,25 +86,25 @@ ARMPKGARCH_tune-armv7ab-neon ?= "armv7a" ARMPKGARCH_tune-armv7atb-neon ?= "armv7a" ARMPKGARCH_tune-armv7ab-neon-vfpv4 ?= "armv7a" ARMPKGARCH_tune-armv7atb-neon-vfpv4 ?= "armv7a" -TUNE_FEATURES_tune-armv7ab = "${TUNE_FEATURES_tune-armv7a} bigendian" -TUNE_FEATURES_tune-armv7atb = "${TUNE_FEATURES_tune-armv7at} bigendian" -TUNE_FEATURES_tune-armv7ab-vfpv3d16 = "${TUNE_FEATURES_tune-armv7a-vfpv3d16} bigendian" -TUNE_FEATURES_tune-armv7atb-vfpv3d16 = "${TUNE_FEATURES_tune-armv7at-vfpv3d16} bigendian" -TUNE_FEATURES_tune-armv7ab-vfpv3 = "${TUNE_FEATURES_tune-armv7a-vfpv3} bigendian" -TUNE_FEATURES_tune-armv7atb-vfpv3 = "${TUNE_FEATURES_tune-armv7at-vfpv3} bigendian" -TUNE_FEATURES_tune-armv7ab-neon = "${TUNE_FEATURES_tune-armv7a-neon} bigendian" -TUNE_FEATURES_tune-armv7atb-neon = "${TUNE_FEATURES_tune-armv7at-neon} bigendian" -TUNE_FEATURES_tune-armv7ab-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7a-neon-vfpv4} bigendian" +TUNE_FEATURES_tune-armv7ab = "${TUNE_FEATURES_tune-armv7a} bigendian" +TUNE_FEATURES_tune-armv7atb = "${TUNE_FEATURES_tune-armv7at} bigendian" +TUNE_FEATURES_tune-armv7ab-vfpv3d16 = "${TUNE_FEATURES_tune-armv7a-vfpv3d16} bigendian" +TUNE_FEATURES_tune-armv7atb-vfpv3d16 = "${TUNE_FEATURES_tune-armv7at-vfpv3d16} bigendian" +TUNE_FEATURES_tune-armv7ab-vfpv3 = "${TUNE_FEATURES_tune-armv7a-vfpv3} bigendian" +TUNE_FEATURES_tune-armv7atb-vfpv3 = "${TUNE_FEATURES_tune-armv7at-vfpv3} bigendian" +TUNE_FEATURES_tune-armv7ab-neon = "${TUNE_FEATURES_tune-armv7a-neon} bigendian" +TUNE_FEATURES_tune-armv7atb-neon = "${TUNE_FEATURES_tune-armv7at-neon} bigendian" +TUNE_FEATURES_tune-armv7ab-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7a-neon-vfpv4} bigendian" TUNE_FEATURES_tune-armv7atb-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7at-neon-vfpv4} bigendian" -PACKAGE_EXTRA_ARCHS_tune-armv7ab = "${PACKAGE_EXTRA_ARCHS_tune-armv6b} armv7ab-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv7ab = "${PACKAGE_EXTRA_ARCHS_tune-armv6b} armv7ab-vfp" PACKAGE_EXTRA_ARCHS_tune-armv7atb = "${PACKAGE_EXTRA_ARCHS_tune-armv6tb} armv7ab-vfp armv7at2b-vfp" -PACKAGE_EXTRA_ARCHS_tune-armv7ab-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ab} armv7ab-vfpv3d16" +PACKAGE_EXTRA_ARCHS_tune-armv7ab-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ab} armv7ab-vfpv3d16" PACKAGE_EXTRA_ARCHS_tune-armv7atb-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7atb} armv7ab-vfpv3d16 armv7at2b-vfpv3d16" -PACKAGE_EXTRA_ARCHS_tune-armv7ab-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ab-vfpv3d16} armv7ab-vfpv3" +PACKAGE_EXTRA_ARCHS_tune-armv7ab-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ab-vfpv3d16} armv7ab-vfpv3" PACKAGE_EXTRA_ARCHS_tune-armv7atb-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7atb-vfpv3d16} armv7ab-vfpv3 armv7at2b-vfpv3" -PACKAGE_EXTRA_ARCHS_tune-armv7ab-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ab} armv7ab-neon" +PACKAGE_EXTRA_ARCHS_tune-armv7ab-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ab} armv7ab-neon" PACKAGE_EXTRA_ARCHS_tune-armv7atb-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7atb} armv7ab-neon armv7at2b-neon" -PACKAGE_EXTRA_ARCHS_tune-armv7ab-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ab-neon} armv7ab-neon-vfpv4" +PACKAGE_EXTRA_ARCHS_tune-armv7ab-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ab-neon} armv7ab-neon-vfpv4" PACKAGE_EXTRA_ARCHS_tune-armv7atb-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7atb-neon} armv7ab-neon-vfpv4 armv7at2b-neon-vfpv4" # Big Endian + HF @@ -119,23 +119,23 @@ ARMPKGARCH_tune-armv7ahfb-neon ?= "armv7a" ARMPKGARCH_tune-armv7athfb-neon ?= "armv7a" ARMPKGARCH_tune-armv7ahfb-neon-vfpv4 ?= "armv7a" ARMPKGARCH_tune-armv7athfb-neon-vfpv4 ?= "armv7a" -TUNE_FEATURES_tune-armv7ahfb = "${TUNE_FEATURES_tune-armv7ahf} bigendian" -TUNE_FEATURES_tune-armv7athfb = "${TUNE_FEATURES_tune-armv7athf} bigendian" -TUNE_FEATURES_tune-armv7ahfb-vfpv3d16 = "${TUNE_FEATURES_tune-armv7ahf-vfpv3d16} bigendian" -TUNE_FEATURES_tune-armv7athfb-vfpv3d16 = "${TUNE_FEATURES_tune-armv7athf-vfpv3d16} bigendian" -TUNE_FEATURES_tune-armv7ahfb-vfpv3 = "${TUNE_FEATURES_tune-armv7ahf-vfpv3} bigendian" -TUNE_FEATURES_tune-armv7athfb-vfpv3 = "${TUNE_FEATURES_tune-armv7athf-vfpv3} bigendian" -TUNE_FEATURES_tune-armv7ahfb-neon = "${TUNE_FEATURES_tune-armv7ahf-neon} bigendian" -TUNE_FEATURES_tune-armv7athfb-neon = "${TUNE_FEATURES_tune-armv7athf-neon} bigendian" -TUNE_FEATURES_tune-armv7ahfb-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7ahf-neon-vfpv4} bigendian" +TUNE_FEATURES_tune-armv7ahfb = "${TUNE_FEATURES_tune-armv7ahf} bigendian" +TUNE_FEATURES_tune-armv7athfb = "${TUNE_FEATURES_tune-armv7athf} bigendian" +TUNE_FEATURES_tune-armv7ahfb-vfpv3d16 = "${TUNE_FEATURES_tune-armv7ahf-vfpv3d16} bigendian" +TUNE_FEATURES_tune-armv7athfb-vfpv3d16 = "${TUNE_FEATURES_tune-armv7athf-vfpv3d16} bigendian" +TUNE_FEATURES_tune-armv7ahfb-vfpv3 = "${TUNE_FEATURES_tune-armv7ahf-vfpv3} bigendian" +TUNE_FEATURES_tune-armv7athfb-vfpv3 = "${TUNE_FEATURES_tune-armv7athf-vfpv3} bigendian" +TUNE_FEATURES_tune-armv7ahfb-neon = "${TUNE_FEATURES_tune-armv7ahf-neon} bigendian" +TUNE_FEATURES_tune-armv7athfb-neon = "${TUNE_FEATURES_tune-armv7athf-neon} bigendian" +TUNE_FEATURES_tune-armv7ahfb-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7ahf-neon-vfpv4} bigendian" TUNE_FEATURES_tune-armv7athfb-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7athf-neon-vfpv4} bigendian" -PACKAGE_EXTRA_ARCHS_tune-armv7ahfb = "${PACKAGE_EXTRA_ARCHS_tune-armv6hfb} armv7ahfb-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv7ahfb = "${PACKAGE_EXTRA_ARCHS_tune-armv6hfb} armv7ahfb-vfp" PACKAGE_EXTRA_ARCHS_tune-armv7athfb = "${PACKAGE_EXTRA_ARCHS_tune-armv6thfb} armv7ahfb-vfp armv7at2hfb-vfp" -PACKAGE_EXTRA_ARCHS_tune-armv7ahfb-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahfb} armv7ahfb-vfpv3d16" +PACKAGE_EXTRA_ARCHS_tune-armv7ahfb-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahfb} armv7ahfb-vfpv3d16" PACKAGE_EXTRA_ARCHS_tune-armv7athfb-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7athfb} armv7ahfb-vfpv3d16 armv7at2hfb-vfpv3d16" -PACKAGE_EXTRA_ARCHS_tune-armv7ahfb-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahfb-vfpv3d16} armv7ahfb-vfpv3" +PACKAGE_EXTRA_ARCHS_tune-armv7ahfb-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahfb-vfpv3d16} armv7ahfb-vfpv3" PACKAGE_EXTRA_ARCHS_tune-armv7athfb-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7athfb-vfpv3d16} armv7ahfb-vfpv3 armv7at2hfb-vfpv3" -PACKAGE_EXTRA_ARCHS_tune-armv7ahfb-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahfb} armv7ahfb-neon" +PACKAGE_EXTRA_ARCHS_tune-armv7ahfb-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahfb} armv7ahfb-neon" PACKAGE_EXTRA_ARCHS_tune-armv7athfb-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7athfb} armv7ahfb-neon armv7at2hfb-neon" -PACKAGE_EXTRA_ARCHS_tune-armv7ahfb-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahfb-neon} armv7ahfb-neon-vfpv4" +PACKAGE_EXTRA_ARCHS_tune-armv7ahfb-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahfb-neon} armv7ahfb-neon-vfpv4" PACKAGE_EXTRA_ARCHS_tune-armv7athfb-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7athfb-neon} armv7ahfb-neon-vfpv4 armv7at2hfb-neon-vfpv4" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv7ve.inc b/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv7ve.inc index e13156c17..4d9260fec 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv7ve.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/arm/arch-armv7ve.inc @@ -1,4 +1,4 @@ -DEFAULTTUNE ?= "armv7ve" +DEFAULTTUNE ?= "armv7vethf" TUNEVALID[armv7ve] = "Enable instructions for ARMv7ve" TUNECONFLICTS[armv7ve] = "armv4 armv5 armv6 armv7 armv7a" @@ -21,23 +21,23 @@ ARMPKGARCH_tune-armv7ve-neon-vfpv4 ?= "armv7ve" ARMPKGARCH_tune-armv7vet-neon-vfpv4 ?= "armv7ve" TUNE_FEATURES_tune-armv7ve = "arm armv7ve vfp" TUNE_FEATURES_tune-armv7vet = "${TUNE_FEATURES_tune-armv7ve} thumb" -TUNE_FEATURES_tune-armv7ve-vfpv3d16 = "${TUNE_FEATURES_tune-armv7ve} vfpv3d16" +TUNE_FEATURES_tune-armv7ve-vfpv3d16 = "${TUNE_FEATURES_tune-armv7ve} vfpv3d16" TUNE_FEATURES_tune-armv7vet-vfpv3d16 = "${TUNE_FEATURES_tune-armv7vet} vfpv3d16" -TUNE_FEATURES_tune-armv7ve-vfpv3 = "${TUNE_FEATURES_tune-armv7ve-vfpv3d16} vfpv3" +TUNE_FEATURES_tune-armv7ve-vfpv3 = "${TUNE_FEATURES_tune-armv7ve-vfpv3d16} vfpv3" TUNE_FEATURES_tune-armv7vet-vfpv3 = "${TUNE_FEATURES_tune-armv7vet-vfpv3d16} vfpv3" -TUNE_FEATURES_tune-armv7ve-neon = "${TUNE_FEATURES_tune-armv7ve} neon" +TUNE_FEATURES_tune-armv7ve-neon = "${TUNE_FEATURES_tune-armv7ve} neon" TUNE_FEATURES_tune-armv7vet-neon = "${TUNE_FEATURES_tune-armv7vet} neon" -TUNE_FEATURES_tune-armv7ve-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7ve-neon} vfpv4" +TUNE_FEATURES_tune-armv7ve-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7ve-neon} vfpv4" TUNE_FEATURES_tune-armv7vet-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vet-neon} vfpv4" -PACKAGE_EXTRA_ARCHS_tune-armv7ve = "${PACKAGE_EXTRA_ARCHS_tune-armv7a} armv7ve armv7ve-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv7ve = "${PACKAGE_EXTRA_ARCHS_tune-armv7a} armv7ve armv7ve-vfp" PACKAGE_EXTRA_ARCHS_tune-armv7vet = "${PACKAGE_EXTRA_ARCHS_tune-armv7at} armv7ve armv7ve-vfp armv7vet2-vfp" -PACKAGE_EXTRA_ARCHS_tune-armv7ve-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve} armv7ve-vfpv3d16" +PACKAGE_EXTRA_ARCHS_tune-armv7ve-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve} armv7ve-vfpv3d16" PACKAGE_EXTRA_ARCHS_tune-armv7vet-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vet} armv7ve-vfpv3d16 armv7vet2-vfpv3d16" -PACKAGE_EXTRA_ARCHS_tune-armv7ve-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-vfpv3d16} armv7ve-vfpv3" +PACKAGE_EXTRA_ARCHS_tune-armv7ve-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-vfpv3d16} armv7ve-vfpv3" PACKAGE_EXTRA_ARCHS_tune-armv7vet-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vet-vfpv3d16} armv7ve-vfpv3 armv7vet2-vfpv3" -PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve} armv7ve-neon" +PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve} armv7ve-neon" PACKAGE_EXTRA_ARCHS_tune-armv7vet-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vet} armv7ve-neon armv7vet2-neon" -PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon} armv7ve-neon-vfpv4" +PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon} armv7ve-neon-vfpv4" PACKAGE_EXTRA_ARCHS_tune-armv7vet-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vet-neon} armv7ve-neon-vfpv4 armv7vet2-neon-vfpv4" # HF Tunes @@ -52,25 +52,25 @@ ARMPKGARCH_tune-armv7vehf-neon ?= "armv7ve" ARMPKGARCH_tune-armv7vethf-neon ?= "armv7ve" ARMPKGARCH_tune-armv7vehf-neon-vfpv4 ?= "armv7ve" ARMPKGARCH_tune-armv7vethf-neon-vfpv4 ?= "armv7ve" -TUNE_FEATURES_tune-armv7vehf = "${TUNE_FEATURES_tune-armv7ve} callconvention-hard" -TUNE_FEATURES_tune-armv7vethf = "${TUNE_FEATURES_tune-armv7vet} callconvention-hard" -TUNE_FEATURES_tune-armv7vehf-vfpv3d16 = "${TUNE_FEATURES_tune-armv7ve-vfpv3d16} callconvention-hard" -TUNE_FEATURES_tune-armv7vethf-vfpv3d16 = "${TUNE_FEATURES_tune-armv7vet-vfpv3d16} callconvention-hard" -TUNE_FEATURES_tune-armv7vehf-vfpv3 = "${TUNE_FEATURES_tune-armv7ve-vfpv3} callconvention-hard" -TUNE_FEATURES_tune-armv7vethf-vfpv3 = "${TUNE_FEATURES_tune-armv7vet-vfpv3} callconvention-hard" -TUNE_FEATURES_tune-armv7vehf-neon = "${TUNE_FEATURES_tune-armv7ve-neon} callconvention-hard" -TUNE_FEATURES_tune-armv7vethf-neon = "${TUNE_FEATURES_tune-armv7vet-neon} callconvention-hard" -TUNE_FEATURES_tune-armv7vehf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7ve-neon-vfpv4} callconvention-hard" +TUNE_FEATURES_tune-armv7vehf = "${TUNE_FEATURES_tune-armv7ve} callconvention-hard" +TUNE_FEATURES_tune-armv7vethf = "${TUNE_FEATURES_tune-armv7vet} callconvention-hard" +TUNE_FEATURES_tune-armv7vehf-vfpv3d16 = "${TUNE_FEATURES_tune-armv7ve-vfpv3d16} callconvention-hard" +TUNE_FEATURES_tune-armv7vethf-vfpv3d16 = "${TUNE_FEATURES_tune-armv7vet-vfpv3d16} callconvention-hard" +TUNE_FEATURES_tune-armv7vehf-vfpv3 = "${TUNE_FEATURES_tune-armv7ve-vfpv3} callconvention-hard" +TUNE_FEATURES_tune-armv7vethf-vfpv3 = "${TUNE_FEATURES_tune-armv7vet-vfpv3} callconvention-hard" +TUNE_FEATURES_tune-armv7vehf-neon = "${TUNE_FEATURES_tune-armv7ve-neon} callconvention-hard" +TUNE_FEATURES_tune-armv7vethf-neon = "${TUNE_FEATURES_tune-armv7vet-neon} callconvention-hard" +TUNE_FEATURES_tune-armv7vehf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7ve-neon-vfpv4} callconvention-hard" TUNE_FEATURES_tune-armv7vethf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vet-neon-vfpv4} callconvention-hard" -PACKAGE_EXTRA_ARCHS_tune-armv7vehf = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf} armv7vehf-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv7vehf = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf} armv7vehf-vfp" PACKAGE_EXTRA_ARCHS_tune-armv7vethf = "${PACKAGE_EXTRA_ARCHS_tune-armv7athf} armv7vehf-vfp armv7vet2hf-vfp" -PACKAGE_EXTRA_ARCHS_tune-armv7vehf-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf} armv7vehf-vfpv3d16" +PACKAGE_EXTRA_ARCHS_tune-armv7vehf-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf} armv7vehf-vfpv3d16" PACKAGE_EXTRA_ARCHS_tune-armv7vethf-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethf} armv7vehf-vfpv3d16 armv7vet2hf-vfpv3d16" -PACKAGE_EXTRA_ARCHS_tune-armv7vehf-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-vfpv3d16} armv7vehf-vfpv3" +PACKAGE_EXTRA_ARCHS_tune-armv7vehf-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-vfpv3d16} armv7vehf-vfpv3" PACKAGE_EXTRA_ARCHS_tune-armv7vethf-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethf-vfpv3d16} armv7vehf-vfpv3 armv7vet2hf-vfpv3" -PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf} armv7vehf-neon" +PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf} armv7vehf-neon" PACKAGE_EXTRA_ARCHS_tune-armv7vethf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethf} armv7vehf-neon armv7vet2hf-neon" -PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon} armv7vehf-neon-vfpv4" +PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon} armv7vehf-neon-vfpv4" PACKAGE_EXTRA_ARCHS_tune-armv7vethf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethf-neon} armv7vehf-neon-vfpv4 armv7vet2hf-neon-vfpv4" # Big Endian @@ -85,25 +85,25 @@ ARMPKGARCH_tune-armv7veb-neon ?= "armv7ve" ARMPKGARCH_tune-armv7vetb-neon ?= "armv7ve" ARMPKGARCH_tune-armv7veb-neon-vfpv4 ?= "armv7ve" ARMPKGARCH_tune-armv7vetb-neon-vfpv4 ?= "armv7ve" -TUNE_FEATURES_tune-armv7veb = "${TUNE_FEATURES_tune-armv7ve} bigendian" -TUNE_FEATURES_tune-armv7vetb = "${TUNE_FEATURES_tune-armv7vet} bigendian" -TUNE_FEATURES_tune-armv7veb-vfpv3d16 = "${TUNE_FEATURES_tune-armv7ve-vfpv3d16} bigendian" -TUNE_FEATURES_tune-armv7vetb-vfpv3d16 = "${TUNE_FEATURES_tune-armv7vet-vfpv3d16} bigendian" -TUNE_FEATURES_tune-armv7veb-vfpv3 = "${TUNE_FEATURES_tune-armv7ve-vfpv3} bigendian" -TUNE_FEATURES_tune-armv7vetb-vfpv3 = "${TUNE_FEATURES_tune-armv7vet-vfpv3} bigendian" -TUNE_FEATURES_tune-armv7veb-neon = "${TUNE_FEATURES_tune-armv7ve-neon} bigendian" -TUNE_FEATURES_tune-armv7vetb-neon = "${TUNE_FEATURES_tune-armv7vet-neon} bigendian" -TUNE_FEATURES_tune-armv7veb-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7ve-neon-vfpv4} bigendian" +TUNE_FEATURES_tune-armv7veb = "${TUNE_FEATURES_tune-armv7ve} bigendian" +TUNE_FEATURES_tune-armv7vetb = "${TUNE_FEATURES_tune-armv7vet} bigendian" +TUNE_FEATURES_tune-armv7veb-vfpv3d16 = "${TUNE_FEATURES_tune-armv7ve-vfpv3d16} bigendian" +TUNE_FEATURES_tune-armv7vetb-vfpv3d16 = "${TUNE_FEATURES_tune-armv7vet-vfpv3d16} bigendian" +TUNE_FEATURES_tune-armv7veb-vfpv3 = "${TUNE_FEATURES_tune-armv7ve-vfpv3} bigendian" +TUNE_FEATURES_tune-armv7vetb-vfpv3 = "${TUNE_FEATURES_tune-armv7vet-vfpv3} bigendian" +TUNE_FEATURES_tune-armv7veb-neon = "${TUNE_FEATURES_tune-armv7ve-neon} bigendian" +TUNE_FEATURES_tune-armv7vetb-neon = "${TUNE_FEATURES_tune-armv7vet-neon} bigendian" +TUNE_FEATURES_tune-armv7veb-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7ve-neon-vfpv4} bigendian" TUNE_FEATURES_tune-armv7vetb-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vet-neon-vfpv4} bigendian" -PACKAGE_EXTRA_ARCHS_tune-armv7veb = "${PACKAGE_EXTRA_ARCHS_tune-armv7ab} armv7veb-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv7veb = "${PACKAGE_EXTRA_ARCHS_tune-armv7ab} armv7veb-vfp" PACKAGE_EXTRA_ARCHS_tune-armv7vetb = "${PACKAGE_EXTRA_ARCHS_tune-armv7atb} armv7veb-vfp armv7vet2b-vfp" -PACKAGE_EXTRA_ARCHS_tune-armv7veb-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7veb} armv7veb-vfpv3d16" +PACKAGE_EXTRA_ARCHS_tune-armv7veb-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7veb} armv7veb-vfpv3d16" PACKAGE_EXTRA_ARCHS_tune-armv7vetb-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vetb} armv7veb-vfpv3d16 armv7vet2b-vfpv3d16" -PACKAGE_EXTRA_ARCHS_tune-armv7veb-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7veb-vfpv3d16} armv7veb-vfpv3" +PACKAGE_EXTRA_ARCHS_tune-armv7veb-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7veb-vfpv3d16} armv7veb-vfpv3" PACKAGE_EXTRA_ARCHS_tune-armv7vetb-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vetb-vfpv3d16} armv7veb-vfpv3 armv7vet2b-vfpv3" -PACKAGE_EXTRA_ARCHS_tune-armv7veb-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7veb} armv7veb-neon" +PACKAGE_EXTRA_ARCHS_tune-armv7veb-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7veb} armv7veb-neon" PACKAGE_EXTRA_ARCHS_tune-armv7vetb-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vetb} armv7veb-neon armv7vet2b-neon" -PACKAGE_EXTRA_ARCHS_tune-armv7veb-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7veb-neon} armv7veb-neon-vfpv4" +PACKAGE_EXTRA_ARCHS_tune-armv7veb-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7veb-neon} armv7veb-neon-vfpv4" PACKAGE_EXTRA_ARCHS_tune-armv7vetb-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vetb-neon} armv7veb-neon-vfpv4 armv7vet2b-neon-vfpv4" # Big Endian + HF @@ -118,23 +118,23 @@ ARMPKGARCH_tune-armv7vehfb-neon ?= "armv7ve" ARMPKGARCH_tune-armv7vethfb-neon ?= "armv7ve" ARMPKGARCH_tune-armv7vehfb-neon-vfpv4 ?= "armv7ve" ARMPKGARCH_tune-armv7vethfb-neon-vfpv4 ?= "armv7ve" -TUNE_FEATURES_tune-armv7vehfb = "${TUNE_FEATURES_tune-armv7vehf} bigendian" -TUNE_FEATURES_tune-armv7vethfb = "${TUNE_FEATURES_tune-armv7vethf} bigendian" -TUNE_FEATURES_tune-armv7vehfb-vfpv3d16 = "${TUNE_FEATURES_tune-armv7vehf-vfpv3d16} bigendian" -TUNE_FEATURES_tune-armv7vethfb-vfpv3d16 = "${TUNE_FEATURES_tune-armv7vethf-vfpv3d16} bigendian" -TUNE_FEATURES_tune-armv7vehfb-vfpv3 = "${TUNE_FEATURES_tune-armv7vehf-vfpv3} bigendian" -TUNE_FEATURES_tune-armv7vethfb-vfpv3 = "${TUNE_FEATURES_tune-armv7vethf-vfpv3} bigendian" -TUNE_FEATURES_tune-armv7vehfb-neon = "${TUNE_FEATURES_tune-armv7vehf-neon} bigendian" -TUNE_FEATURES_tune-armv7vethfb-neon = "${TUNE_FEATURES_tune-armv7vethf-neon} bigendian" -TUNE_FEATURES_tune-armv7vehfb-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vehf-neon-vfpv4} bigendian" +TUNE_FEATURES_tune-armv7vehfb = "${TUNE_FEATURES_tune-armv7vehf} bigendian" +TUNE_FEATURES_tune-armv7vethfb = "${TUNE_FEATURES_tune-armv7vethf} bigendian" +TUNE_FEATURES_tune-armv7vehfb-vfpv3d16 = "${TUNE_FEATURES_tune-armv7vehf-vfpv3d16} bigendian" +TUNE_FEATURES_tune-armv7vethfb-vfpv3d16 = "${TUNE_FEATURES_tune-armv7vethf-vfpv3d16} bigendian" +TUNE_FEATURES_tune-armv7vehfb-vfpv3 = "${TUNE_FEATURES_tune-armv7vehf-vfpv3} bigendian" +TUNE_FEATURES_tune-armv7vethfb-vfpv3 = "${TUNE_FEATURES_tune-armv7vethf-vfpv3} bigendian" +TUNE_FEATURES_tune-armv7vehfb-neon = "${TUNE_FEATURES_tune-armv7vehf-neon} bigendian" +TUNE_FEATURES_tune-armv7vethfb-neon = "${TUNE_FEATURES_tune-armv7vethf-neon} bigendian" +TUNE_FEATURES_tune-armv7vehfb-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vehf-neon-vfpv4} bigendian" TUNE_FEATURES_tune-armv7vethfb-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vethf-neon-vfpv4} bigendian" -PACKAGE_EXTRA_ARCHS_tune-armv7vehfb = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahfb} armv7vehfb-vfp" +PACKAGE_EXTRA_ARCHS_tune-armv7vehfb = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahfb} armv7vehfb-vfp" PACKAGE_EXTRA_ARCHS_tune-armv7vethfb = "${PACKAGE_EXTRA_ARCHS_tune-armv7athfb} armv7vehfb-vfp armv7vet2hfb-vfp" -PACKAGE_EXTRA_ARCHS_tune-armv7vehfb-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehfb} armv7vehfb-vfpv3d16" +PACKAGE_EXTRA_ARCHS_tune-armv7vehfb-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehfb} armv7vehfb-vfpv3d16" PACKAGE_EXTRA_ARCHS_tune-armv7vethfb-vfpv3d16 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethfb} armv7vehfb-vfpv3d16 armv7vet2hfb-vfpv3d16" -PACKAGE_EXTRA_ARCHS_tune-armv7vehfb-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehfb-vfpv3d16} armv7vehfb-vfpv3" +PACKAGE_EXTRA_ARCHS_tune-armv7vehfb-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehfb-vfpv3d16} armv7vehfb-vfpv3" PACKAGE_EXTRA_ARCHS_tune-armv7vethfb-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethfb-vfpv3d16} armv7vehfb-vfpv3 armv7vet2hfb-vfpv3" -PACKAGE_EXTRA_ARCHS_tune-armv7vehfb-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehfb} armv7vehfb-neon" +PACKAGE_EXTRA_ARCHS_tune-armv7vehfb-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehfb} armv7vehfb-neon" PACKAGE_EXTRA_ARCHS_tune-armv7vethfb-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethfb} armv7vehfb-neon armv7vet2hfb-neon" -PACKAGE_EXTRA_ARCHS_tune-armv7vehfb-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehfb-neon} armv7vehfb-neon-vfpv4" +PACKAGE_EXTRA_ARCHS_tune-armv7vehfb-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehfb-neon} armv7vehfb-neon-vfpv4" PACKAGE_EXTRA_ARCHS_tune-armv7vethfb-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethfb-neon} armv7vehfb-neon-vfpv4 armv7vet2hfb-neon-vfpv4" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/arm/feature-arm-thumb.inc b/import-layers/yocto-poky/meta/conf/machine/include/arm/feature-arm-thumb.inc index 1faebf7c2..0b47ccad0 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/arm/feature-arm-thumb.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/arm/feature-arm-thumb.inc @@ -1,33 +1,30 @@ -TUNEVALID[thumb] = "Use thumb instructions instead of ARM" -ARM_THUMB_OPT = "${@['arm', 'thumb'][d.getVar('ARM_INSTRUCTION_SET', True) == 'thumb']}" -ARM_THUMB_SUFFIX .= "${@bb.utils.contains('TUNE_FEATURES', 'armv4', 't', '', d)}" -ARM_THUMB_SUFFIX .= "${@bb.utils.contains('TUNE_FEATURES', 'armv5', 't', '', d)}" -ARM_THUMB_SUFFIX .= "${@bb.utils.contains('TUNE_FEATURES', 'armv6', 't', '', d)}" -ARM_THUMB_SUFFIX .= "${@bb.utils.contains('TUNE_FEATURES', 'armv7a', 't2', '', d)}" -ARM_THUMB_SUFFIX .= "${@bb.utils.contains('TUNE_FEATURES', 'armv7r', 't2', '', d)}" -ARM_THUMB_SUFFIX .= "${@bb.utils.contains('TUNE_FEATURES', 'armv7m', 't2', '', d)}" -ARM_THUMB_SUFFIX .= "${@bb.utils.contains('TUNE_FEATURES', 'armv7ve', 't2', '', d)}" + +TUNEVALID[thumb] = "Support Thumb instructions" + +ARM_THUMB_SUFFIX = "${@bb.utils.contains_any('TUNE_FEATURES', 'armv4 armv5 armv6', 't', 't2', d)}" # If the device supports ARM, then respect ARM_THUMB_OPT (which can be "arm" or "thumb") -# If the defice doesn't support ARM, then always set "thumb" even when +# If the device doesn't support ARM, then always set "thumb" even when # some recipe explicitly sets ARM_INSTRUCTION_SET to "arm" +ARM_THUMB_OPT = "${@['arm', 'thumb'][d.getVar('ARM_INSTRUCTION_SET') == 'thumb']}" ARM_M_OPT = "${@bb.utils.contains('TUNE_FEATURES', 'arm', '${ARM_THUMB_OPT}', 'thumb', d)}" + python () { if bb.utils.contains('TUNE_FEATURES', 'thumb', False, True, d): return - selected = d.getVar('ARM_INSTRUCTION_SET', True) + selected = d.getVar('ARM_INSTRUCTION_SET') if selected == None: return - used = d.getVar('ARM_M_OPT', True) + used = d.getVar('ARM_M_OPT') if selected != used: - pn = d.getVar('PN', True) + pn = d.getVar('PN') bb.warn("Recipe '%s' selects ARM_INSTRUCTION_SET to be '%s', but tune configuration overrides it to '%s'" % (pn, selected, used)) } TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'thumb', ' -m${ARM_M_OPT}', '', d)}" # Add suffix from ARM_THUMB_SUFFIX only if after all this we still set ARM_M_OPT to thumb -ARMPKGSFX_THUMB .= "${@bb.utils.contains('TUNE_FEATURES', 'thumb', '${ARM_THUMB_SUFFIX}', '', d) if d.getVar('ARM_M_OPT', True) == 'thumb' else ''}" +ARMPKGSFX_THUMB .= "${@bb.utils.contains('TUNE_FEATURES', 'thumb', '${ARM_THUMB_SUFFIX}', '', d) if d.getVar('ARM_M_OPT') == 'thumb' else ''}" # what about armv7m devices which don't support -marm (e.g. Cortex-M3)? TARGET_CC_KERNEL_ARCH += "${@bb.utils.contains('TUNE_FEATURES', 'thumb', '-mno-thumb-interwork -marm', '', d)}" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/arm/feature-arm-vfp.inc b/import-layers/yocto-poky/meta/conf/machine/include/arm/feature-arm-vfp.inc index 9ef31e70e..667b60910 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/arm/feature-arm-vfp.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/arm/feature-arm-vfp.inc @@ -5,10 +5,10 @@ TUNEVALID[vfp] = "Enable Vector Floating Point (vfp) unit." TUNE_CCARGS_MFPU .= "${@bb.utils.contains('TUNE_FEATURES', 'vfp', ' vfp', '', d)}" -TUNE_CCARGS .= "${@ (' -mfpu=%s ' % d.getVar('TUNE_CCARGS_MFPU', True).split()[-1]) if (d.getVar('TUNE_CCARGS_MFPU', True) != '') else ''}" -ARMPKGSFX_FPU = "${@ ('-%s' % d.getVar('TUNE_CCARGS_MFPU', True).split()[-1].replace('vfpv3-d16', 'vfpv3d16')) if (d.getVar('TUNE_CCARGS_MFPU', True) != '') else ''}" +TUNE_CCARGS .= "${@ (' -mfpu=%s ' % d.getVar('TUNE_CCARGS_MFPU').split()[-1]) if (d.getVar('TUNE_CCARGS_MFPU') != '') else ''}" +ARMPKGSFX_FPU = "${@ ('-%s' % d.getVar('TUNE_CCARGS_MFPU').split()[-1].replace('vfpv3-d16', 'vfpv3d16')) if (d.getVar('TUNE_CCARGS_MFPU') != '') else ''}" TUNEVALID[callconvention-hard] = "Enable EABI hard float call convention, requires VFP." -TUNE_CCARGS_MFLOAT = "${@ bb.utils.contains('TUNE_FEATURES', 'callconvention-hard', 'hard', 'softfp', d) if (d.getVar('TUNE_CCARGS_MFPU', True) != '') else '' }" -TUNE_CCARGS .= "${@ ' -mfloat-abi=${TUNE_CCARGS_MFLOAT}' if (d.getVar('TUNE_CCARGS_MFLOAT', True) != '') else ''}" -ARMPKGSFX_EABI = "${@ 'hf' if (d.getVar('TUNE_CCARGS_MFLOAT', True) == 'hard') else ''}" +TUNE_CCARGS_MFLOAT = "${@ bb.utils.contains('TUNE_FEATURES', 'callconvention-hard', 'hard', 'softfp', d) if (d.getVar('TUNE_CCARGS_MFPU') != '') else '' }" +TUNE_CCARGS .= "${@ ' -mfloat-abi=${TUNE_CCARGS_MFLOAT}' if (d.getVar('TUNE_CCARGS_MFLOAT') != '') else ''}" +ARMPKGSFX_EABI = "${@ 'hf' if (d.getVar('TUNE_CCARGS_MFLOAT') == 'hard') else ''}" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/mips/README b/import-layers/yocto-poky/meta/conf/machine/include/mips/README index 62fa1561b..f36e87b45 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/mips/README +++ b/import-layers/yocto-poky/meta/conf/machine/include/mips/README @@ -32,6 +32,9 @@ MIPSPKGSFX_BYTE - This is defined as either blank and "64" for MIPS64 CPUs. MIPSPKGSFX_ENDIAN - For bigendian hardware this is blank, otherwise it's defined as "el". +MIPSPKGSFX_ENDIAN2 - For bigendian hardware this is "eb", otherwise it's +defined as "el". + MIPSPKGSFX_VARIANT_tune- - In the default tunings it is set to the same value as TUNE_ARCH. In custom, optimized tunings, the value should be modified to more precisely describe the tuning. diff --git a/import-layers/yocto-poky/meta/conf/machine/include/mips/arch-mips.inc b/import-layers/yocto-poky/meta/conf/machine/include/mips/arch-mips.inc index 6069ca1ec..2f625119c 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/mips/arch-mips.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/mips/arch-mips.inc @@ -17,7 +17,7 @@ TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'o32', ' -mabi=32', '', d) TUNEVALID[n32] = "MIPS64 n32 ABI" TUNECONFLICTS[n32] = "o32 n64" -ABIEXTENSION .= "${@bb.utils.contains('TUNE_FEATURES', 'n32', 'n32', '' ,d)}" +ABIEXTENSION .= "${@bb.utils.filter('TUNE_FEATURES', 'n32' ,d)}" TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'n32', ' -mabi=n32', '', d)}" # user mode qemu doesn't support mips64 n32: "Invalid ELF image for this architecture" @@ -39,17 +39,29 @@ TUNEVALID[mipsisa32r6] = "Use 32r6" # Package naming MIPSPKGSFX_ENDIAN = "${@bb.utils.contains('TUNE_FEATURES', 'bigendian', '', 'el', d)}" +MIPSPKGSFX_ENDIAN2 = "${@bb.utils.contains('TUNE_FEATURES', 'bigendian', 'eb', 'el', d)}" MIPSPKGSFX_BYTE = "${@bb.utils.contains('TUNE_FEATURES', 'n64' , '64', '', d)}" MIPSPKGSFX_BYTE .= "${@bb.utils.contains('TUNE_FEATURES', 'n32' , '64', '', d)}" MIPSPKGSFX_FPU = "${@bb.utils.contains('TUNE_FEATURES', 'fpu-hard' , '', '-nf', d)}" MIPSPKGSFX_ABI = "${@bb.utils.contains('TUNE_FEATURES', 'n32', '-n32', '', d)}" -MIPSPKGSFX_R6 = "${@bb.utils.contains('TUNE_FEATURES', 'r6', 'r6', '', d)}" +MIPSPKGSFX_R6 = "${@bb.utils.filter('TUNE_FEATURES', 'r6', d)}" MIPSPKGSFX_64R6 = "${@bb.utils.contains('TUNE_FEATURES', 'mipsisa64r6', 'isa', '', d)}" MIPSPKGSFX_32R6 = "${@bb.utils.contains('TUNE_FEATURES', 'mipsisa32r6', 'isa32', '', d)}" TUNE_ARCH = "mips${MIPSPKGSFX_32R6}${MIPSPKGSFX_64R6}${MIPSPKGSFX_BYTE}${MIPSPKGSFX_R6}${MIPSPKGSFX_ENDIAN}" TUNE_PKGARCH = "${MIPSPKGSFX_VARIANT_tune-${DEFAULTTUNE}}${MIPSPKGSFX_FPU}${MIPSPKGSFX_ABI}" +# Various Global Machine Overrides +MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'n64', 'mipsarchn64${MIPSPKGSFX_ENDIAN2}:', '' ,d)}" +MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'o32', 'mipsarcho32${MIPSPKGSFX_ENDIAN2}:', '' ,d)}" +MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'n32', 'mipsarchn32${MIPSPKGSFX_ENDIAN2}:', '' ,d)}" +MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'n64', 'mipsarchn64:', '' ,d)}" +MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'o32', 'mipsarcho32:', '' ,d)}" +MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'n32', 'mipsarchn32:', '' ,d)}" +MACHINEOVERRIDES =. "${@bb.utils.contains('TUNE_FEATURES', 'r6', 'mipsarchr6:', '' ,d)}" +MACHINEOVERRIDES =. "mipsarch${MIPSPKGSFX_ENDIAN2}:" +MACHINEOVERRIDES =. "mipsarch:" + # Base tunes AVAILTUNES += "mips mips64-n32 mips64 mipsel mips64el-n32 mips64el mips-nf mips64-nf-n32 mips64-nf mipsel-nf mips64el-nf-n32 mips64el-nf" TUNE_FEATURES_tune-mips = "o32 bigendian fpu-hard" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/mips/feature-mips-mips16e.inc b/import-layers/yocto-poky/meta/conf/machine/include/mips/feature-mips-mips16e.inc index 05011dec4..101d5331b 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/mips/feature-mips-mips16e.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/mips/feature-mips-mips16e.inc @@ -1,8 +1,8 @@ TUNEVALID[mips16e] = "Build target packages with MIPS16e ASE instructions" -MIPS_MIPS16E_OPT = "${@['mno-mips16', 'mips16'][d.getVar('MIPS_INSTRUCTION_SET', True) == 'mips16e']}" +MIPS_MIPS16E_OPT = "${@['mno-mips16', 'mips16'][d.getVar('MIPS_INSTRUCTION_SET') == 'mips16e']}" TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'mips16e', ' -${MIPS_MIPS16E_OPT}', '', d)}" -MIPSPKGSFX_MIPS16E .= "${@bb.utils.contains('TUNE_FEATURES', 'mips16e', '-m16', '', d) if d.getVar('MIPS_MIPS16E_OPT', True) == 'mips16' else ''}" +MIPSPKGSFX_MIPS16E .= "${@bb.utils.contains('TUNE_FEATURES', 'mips16e', '-m16', '', d) if d.getVar('MIPS_MIPS16E_OPT') == 'mips16' else ''}" # Whether to compile with code to allow interworking between the two # instruction sets. This allows mips16e code to be executed on a primarily @@ -14,4 +14,4 @@ TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'mips16e', ' ${MIPS16_TUNE OVERRIDES .= "${@bb.utils.contains('TUNE_FEATURES', 'mips16e', ':mips16e', '', d)}" # show status (if compiling in MIPS16e mode) -BUILDCFG_VARS += "${@['', 'MIPS_INSTRUCTION_SET'][d.getVar('MIPS_INSTRUCTION_SET', True) == 'mips16e']}" +BUILDCFG_VARS += "${@['', 'MIPS_INSTRUCTION_SET'][d.getVar('MIPS_INSTRUCTION_SET') == 'mips16e']}" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/qemu.inc b/import-layers/yocto-poky/meta/conf/machine/include/qemu.inc index 2bc4dc2db..0e4103bad 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/qemu.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/qemu.inc @@ -6,10 +6,8 @@ PREFERRED_PROVIDER_virtual/libgles2 ?= "mesa" XSERVER ?= "xserver-xorg \ ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'mesa-driver-swrast xserver-xorg-extension-glx', '', d)} \ - xf86-input-evdev \ - xf86-input-mouse \ xf86-video-fbdev \ - xf86-input-keyboard" + " MACHINE_FEATURES = "alsa bluetooth usbgadget screen" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/qemuboot-x86.inc b/import-layers/yocto-poky/meta/conf/machine/include/qemuboot-x86.inc index 06ac983d4..acf9d55c4 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/qemuboot-x86.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/qemuboot-x86.inc @@ -1,15 +1,20 @@ # For runqemu IMAGE_CLASSES += "qemuboot" QB_SYSTEM_NAME_x86 = "qemu-system-i386" -QB_CPU_x86 = "-cpu qemu32" -QB_CPU_KVM_x86 = "-cpu kvm32" +QB_CPU_x86 = "-cpu pentium2" +QB_CPU_KVM_x86 = "-cpu pentium2" QB_SYSTEM_NAME_x86-64 = "qemu-system-x86_64" QB_CPU_x86-64 = "-cpu core2duo" -QB_CPU_KVM_x86-64 = "-cpu kvm64" +QB_CPU_KVM_x86-64 = "-cpu core2duo" QB_AUDIO_DRV = "alsa" QB_AUDIO_OPT = "-soundhw ac97,es1370" -QB_KERNEL_CMDLINE_APPEND = "vga=0 uvesafb.mode_option=640x480-32 oprofile.timer=1 uvesafb.task_timeout=-1" +QB_KERNEL_CMDLINE_APPEND = "vga=0 uvesafb.mode_option=${UVESA_MODE} oprofile.timer=1 uvesafb.task_timeout=-1" # Add the 'virtio-rng-pci' device otherwise the guest may run out of entropy QB_OPT_APPEND = "-vga vmware -show-cursor -usb -usbdevice tablet -device virtio-rng-pci" + +KERNEL_MODULE_AUTOLOAD += "uvesafb" +KERNEL_MODULE_PROBECONF += "uvesafb" +UVESA_MODE ?= "640x480-32" +module_conf_uvesafb = "options uvesafb mode_option=${UVESA_MODE}" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/tune-arm1136jf-s.inc b/import-layers/yocto-poky/meta/conf/machine/include/tune-arm1136jf-s.inc index 53994ef36..c5de63e1c 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/tune-arm1136jf-s.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/tune-arm1136jf-s.inc @@ -1,4 +1,4 @@ -DEFAULTTUNE ?= "armv6" +DEFAULTTUNE ?= "armv6hf" require conf/machine/include/arm/arch-armv6.inc diff --git a/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa15.inc b/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa15.inc index a33b27f32..25e99f93d 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa15.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa15.inc @@ -1,4 +1,4 @@ -DEFAULTTUNE ?= "armv7ve-neon" +DEFAULTTUNE ?= "armv7vethf-neon" require conf/machine/include/arm/arch-armv7ve.inc @@ -13,17 +13,17 @@ ARMPKGARCH_tune-cortexa15-neon = "cortexa15" ARMPKGARCH_tune-cortexa15t-neon = "cortexa15" ARMPKGARCH_tune-cortexa15-neon-vfpv4 = "cortexa15" ARMPKGARCH_tune-cortexa15t-neon-vfpv4 = "cortexa15" -TUNE_FEATURES_tune-cortexa15 = "${TUNE_FEATURES_tune-armv7ve} cortexa15" -TUNE_FEATURES_tune-cortexa15t = "${TUNE_FEATURES_tune-armv7vet} cortexa15" -TUNE_FEATURES_tune-cortexa15-neon = "${TUNE_FEATURES_tune-armv7ve-neon} cortexa15" -TUNE_FEATURES_tune-cortexa15t-neon = "${TUNE_FEATURES_tune-armv7vet-neon} cortexa15" -TUNE_FEATURES_tune-cortexa15-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7ve-neon-vfpv4} cortexa15" +TUNE_FEATURES_tune-cortexa15 = "${TUNE_FEATURES_tune-armv7ve} cortexa15" +TUNE_FEATURES_tune-cortexa15t = "${TUNE_FEATURES_tune-armv7vet} cortexa15" +TUNE_FEATURES_tune-cortexa15-neon = "${TUNE_FEATURES_tune-armv7ve-neon} cortexa15" +TUNE_FEATURES_tune-cortexa15t-neon = "${TUNE_FEATURES_tune-armv7vet-neon} cortexa15" +TUNE_FEATURES_tune-cortexa15-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7ve-neon-vfpv4} cortexa15" TUNE_FEATURES_tune-cortexa15t-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vet-neon-vfpv4} cortexa15" -PACKAGE_EXTRA_ARCHS_tune-cortexa15 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve} cortexa15-vfp" +PACKAGE_EXTRA_ARCHS_tune-cortexa15 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve} cortexa15-vfp" PACKAGE_EXTRA_ARCHS_tune-cortexa15t = "${PACKAGE_EXTRA_ARCHS_tune-armv7vet} cortexa15-vfp cortexa15t2-vfp" -PACKAGE_EXTRA_ARCHS_tune-cortexa15-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon} cortexa15-vfp cortexa15-neon" +PACKAGE_EXTRA_ARCHS_tune-cortexa15-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon} cortexa15-vfp cortexa15-neon" PACKAGE_EXTRA_ARCHS_tune-cortexa15t-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vet-neon} cortexa15-vfp cortexa15-neon cortexa15t2-vfp cortexa15t2-neon" -PACKAGE_EXTRA_ARCHS_tune-cortexa15-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon-vfpv4} cortexa15-vfp cortexa15-neon cortexa15-neon-vfpv4" +PACKAGE_EXTRA_ARCHS_tune-cortexa15-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon-vfpv4} cortexa15-vfp cortexa15-neon cortexa15-neon-vfpv4" PACKAGE_EXTRA_ARCHS_tune-cortexa15t-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vet-neon-vfpv4} cortexa15-vfp cortexa15-neon cortexa15-neon-vfpv4 cortexa15t2-vfp cortexa15t2-neon cortexa15t2-neon-vfpv4" # HF Tunes @@ -34,15 +34,15 @@ ARMPKGARCH_tune-cortexa15hf-neon = "cortexa15" ARMPKGARCH_tune-cortexa15thf-neon = "cortexa15" ARMPKGARCH_tune-cortexa15hf-neon-vfpv4 = "cortexa15" ARMPKGARCH_tune-cortexa15thf-neon-vfpv4 = "cortexa15" -TUNE_FEATURES_tune-cortexa15hf = "${TUNE_FEATURES_tune-armv7vehf} cortexa15" -TUNE_FEATURES_tune-cortexa15thf = "${TUNE_FEATURES_tune-armv7vethf} cortexa15" -TUNE_FEATURES_tune-cortexa15hf-neon = "${TUNE_FEATURES_tune-armv7vehf-neon} cortexa15" -TUNE_FEATURES_tune-cortexa15thf-neon = "${TUNE_FEATURES_tune-armv7vethf-neon} cortexa15" -TUNE_FEATURES_tune-cortexa15hf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vehf-neon-vfpv4} cortexa15" +TUNE_FEATURES_tune-cortexa15hf = "${TUNE_FEATURES_tune-armv7vehf} cortexa15" +TUNE_FEATURES_tune-cortexa15thf = "${TUNE_FEATURES_tune-armv7vethf} cortexa15" +TUNE_FEATURES_tune-cortexa15hf-neon = "${TUNE_FEATURES_tune-armv7vehf-neon} cortexa15" +TUNE_FEATURES_tune-cortexa15thf-neon = "${TUNE_FEATURES_tune-armv7vethf-neon} cortexa15" +TUNE_FEATURES_tune-cortexa15hf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vehf-neon-vfpv4} cortexa15" TUNE_FEATURES_tune-cortexa15thf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vethf-neon-vfpv4} cortexa15" -PACKAGE_EXTRA_ARCHS_tune-cortexa15hf = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf} cortexa15hf-vfp" +PACKAGE_EXTRA_ARCHS_tune-cortexa15hf = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf} cortexa15hf-vfp" PACKAGE_EXTRA_ARCHS_tune-cortexa15thf = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethf} cortexa15hf-vfp cortexa15t2hf-vfp" -PACKAGE_EXTRA_ARCHS_tune-cortexa15hf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon} cortexa15hf-vfp cortexa15hf-neon" +PACKAGE_EXTRA_ARCHS_tune-cortexa15hf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon} cortexa15hf-vfp cortexa15hf-neon" PACKAGE_EXTRA_ARCHS_tune-cortexa15thf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethf-neon} cortexa15hf-vfp cortexa15hf-neon cortexa15t2hf-vfp cortexa15t2hf-neon" -PACKAGE_EXTRA_ARCHS_tune-cortexa15hf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon-vfpv4} cortexa15hf-vfp cortexa15hf-neon cortexa15hf-neon-vfpv4" +PACKAGE_EXTRA_ARCHS_tune-cortexa15hf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon-vfpv4} cortexa15hf-vfp cortexa15hf-neon cortexa15hf-neon-vfpv4" PACKAGE_EXTRA_ARCHS_tune-cortexa15thf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethf-neon-vfpv4} cortexa15hf-vfp cortexa15hf-neon cortexa15hf-neon-vfpv4 cortexa15t2hf-vfp cortexa15t2hf-neon cortexa15t2hf-neon-vfpv4" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa17.inc b/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa17.inc index d08b00296..40392f9bc 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa17.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa17.inc @@ -1,4 +1,4 @@ -DEFAULTTUNE ?= "armv7ve-neon" +DEFAULTTUNE ?= "armv7vethf-neon" require conf/machine/include/arm/arch-armv7ve.inc @@ -13,17 +13,17 @@ ARMPKGARCH_tune-cortexa17-neon = "cortexa17" ARMPKGARCH_tune-cortexa17t-neon = "cortexa17" ARMPKGARCH_tune-cortexa17-neon-vfpv4 = "cortexa17" ARMPKGARCH_tune-cortexa17t-neon-vfpv4 = "cortexa17" -TUNE_FEATURES_tune-cortexa17 = "${TUNE_FEATURES_tune-armv7ve} cortexa17" -TUNE_FEATURES_tune-cortexa17t = "${TUNE_FEATURES_tune-armv7vet} cortexa17" -TUNE_FEATURES_tune-cortexa17-neon = "${TUNE_FEATURES_tune-armv7ve-neon} cortexa17" -TUNE_FEATURES_tune-cortexa17t-neon = "${TUNE_FEATURES_tune-armv7vet-neon} cortexa17" -TUNE_FEATURES_tune-cortexa17-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7ve-neon-vfpv4} cortexa17" +TUNE_FEATURES_tune-cortexa17 = "${TUNE_FEATURES_tune-armv7ve} cortexa17" +TUNE_FEATURES_tune-cortexa17t = "${TUNE_FEATURES_tune-armv7vet} cortexa17" +TUNE_FEATURES_tune-cortexa17-neon = "${TUNE_FEATURES_tune-armv7ve-neon} cortexa17" +TUNE_FEATURES_tune-cortexa17t-neon = "${TUNE_FEATURES_tune-armv7vet-neon} cortexa17" +TUNE_FEATURES_tune-cortexa17-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7ve-neon-vfpv4} cortexa17" TUNE_FEATURES_tune-cortexa17t-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vet-neon-vfpv4} cortexa17" -PACKAGE_EXTRA_ARCHS_tune-cortexa17 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve} cortexa17-vfp" +PACKAGE_EXTRA_ARCHS_tune-cortexa17 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve} cortexa17-vfp" PACKAGE_EXTRA_ARCHS_tune-cortexa17t = "${PACKAGE_EXTRA_ARCHS_tune-armv7vet} cortexa17-vfp cortexa17t2-vfp" -PACKAGE_EXTRA_ARCHS_tune-cortexa17-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon} cortexa17-vfp cortexa17-neon" +PACKAGE_EXTRA_ARCHS_tune-cortexa17-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon} cortexa17-vfp cortexa17-neon" PACKAGE_EXTRA_ARCHS_tune-cortexa17t-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vet-neon} cortexa17-vfp cortexa17-neon cortexa17t2-vfp cortexa17t2-neon" -PACKAGE_EXTRA_ARCHS_tune-cortexa17-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon-vfpv4} cortexa17-vfp cortexa17-neon cortexa17-neon-vfpv4" +PACKAGE_EXTRA_ARCHS_tune-cortexa17-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon-vfpv4} cortexa17-vfp cortexa17-neon cortexa17-neon-vfpv4" PACKAGE_EXTRA_ARCHS_tune-cortexa17t-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vet-neon-vfpv4} cortexa17-vfp cortexa17-neon cortexa17-neon-vfpv4 cortexa17t2-vfp cortexa17t2-neon cortexa17t2-neon-vfpv4" # HF Tunes @@ -34,15 +34,15 @@ ARMPKGARCH_tune-cortexa17hf-neon = "cortexa17" ARMPKGARCH_tune-cortexa17thf-neon = "cortexa17" ARMPKGARCH_tune-cortexa17hf-neon-vfpv4 = "cortexa17" ARMPKGARCH_tune-cortexa17thf-neon-vfpv4 = "cortexa17" -TUNE_FEATURES_tune-cortexa17hf = "${TUNE_FEATURES_tune-armv7vehf} cortexa17" -TUNE_FEATURES_tune-cortexa17thf = "${TUNE_FEATURES_tune-armv7vethf} cortexa17" -TUNE_FEATURES_tune-cortexa17hf-neon = "${TUNE_FEATURES_tune-armv7vehf-neon} cortexa17" -TUNE_FEATURES_tune-cortexa17thf-neon = "${TUNE_FEATURES_tune-armv7vethf-neon} cortexa17" -TUNE_FEATURES_tune-cortexa17hf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vehf-neon-vfpv4} cortexa17" +TUNE_FEATURES_tune-cortexa17hf = "${TUNE_FEATURES_tune-armv7vehf} cortexa17" +TUNE_FEATURES_tune-cortexa17thf = "${TUNE_FEATURES_tune-armv7vethf} cortexa17" +TUNE_FEATURES_tune-cortexa17hf-neon = "${TUNE_FEATURES_tune-armv7vehf-neon} cortexa17" +TUNE_FEATURES_tune-cortexa17thf-neon = "${TUNE_FEATURES_tune-armv7vethf-neon} cortexa17" +TUNE_FEATURES_tune-cortexa17hf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vehf-neon-vfpv4} cortexa17" TUNE_FEATURES_tune-cortexa17thf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vethf-neon-vfpv4} cortexa17" -PACKAGE_EXTRA_ARCHS_tune-cortexa17hf = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf} cortexa17hf-vfp" +PACKAGE_EXTRA_ARCHS_tune-cortexa17hf = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf} cortexa17hf-vfp" PACKAGE_EXTRA_ARCHS_tune-cortexa17thf = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethf} cortexa17hf-vfp cortexa17t2hf-vfp" -PACKAGE_EXTRA_ARCHS_tune-cortexa17hf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon} cortexa17hf-vfp cortexa17hf-neon" +PACKAGE_EXTRA_ARCHS_tune-cortexa17hf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon} cortexa17hf-vfp cortexa17hf-neon" PACKAGE_EXTRA_ARCHS_tune-cortexa17thf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethf-neon} cortexa17hf-vfp cortexa17hf-neon cortexa17t2hf-vfp cortexa17t2hf-neon" -PACKAGE_EXTRA_ARCHS_tune-cortexa17hf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon-vfpv4} cortexa17hf-vfp cortexa17hf-neon cortexa17hf-neon-vfpv4" +PACKAGE_EXTRA_ARCHS_tune-cortexa17hf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon-vfpv4} cortexa17hf-vfp cortexa17hf-neon cortexa17hf-neon-vfpv4" PACKAGE_EXTRA_ARCHS_tune-cortexa17thf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethf-neon-vfpv4} cortexa17hf-vfp cortexa17hf-neon cortexa17hf-neon-vfpv4 cortexa17t2hf-vfp cortexa17t2hf-neon cortexa17t2hf-neon-vfpv4" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa5.inc b/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa5.inc index c7e5ab96a..1f0cda664 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa5.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa5.inc @@ -1,4 +1,4 @@ -DEFAULTTUNE ?= "armv7a-neon" +DEFAULTTUNE ?= "armv7athf-neon" require conf/machine/include/arm/arch-armv7a.inc @@ -11,13 +11,13 @@ ARMPKGARCH_tune-cortexa5 = "cortexa5" ARMPKGARCH_tune-cortexa5t = "cortexa5" ARMPKGARCH_tune-cortexa5-neon = "cortexa5" ARMPKGARCH_tune-cortexa5t-neon = "cortexa5" -TUNE_FEATURES_tune-cortexa5 = "${TUNE_FEATURES_tune-armv7a} cortexa5" -TUNE_FEATURES_tune-cortexa5t = "${TUNE_FEATURES_tune-armv7at} cortexa5" -TUNE_FEATURES_tune-cortexa5-neon = "${TUNE_FEATURES_tune-armv7a-neon} cortexa5" -TUNE_FEATURES_tune-cortexa5t-neon = "${TUNE_FEATURES_tune-armv7at-neon} cortexa5" -PACKAGE_EXTRA_ARCHS_tune-cortexa5 = "${PACKAGE_EXTRA_ARCHS_tune-armv7a} cortexa5-vfp" +TUNE_FEATURES_tune-cortexa5 = "${TUNE_FEATURES_tune-armv7a} cortexa5" +TUNE_FEATURES_tune-cortexa5t = "${TUNE_FEATURES_tune-armv7at} cortexa5" +TUNE_FEATURES_tune-cortexa5-neon = "${TUNE_FEATURES_tune-armv7a-neon} cortexa5" +TUNE_FEATURES_tune-cortexa5t-neon = "${TUNE_FEATURES_tune-armv7at-neon} cortexa5" +PACKAGE_EXTRA_ARCHS_tune-cortexa5 = "${PACKAGE_EXTRA_ARCHS_tune-armv7a} cortexa5-vfp" PACKAGE_EXTRA_ARCHS_tune-cortexa5t = "${PACKAGE_EXTRA_ARCHS_tune-armv7at} cortexa5-vfp cortexa5t2-vfp" -PACKAGE_EXTRA_ARCHS_tune-cortexa5-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7a-neon} cortexa5-vfp cortexa5-neon" +PACKAGE_EXTRA_ARCHS_tune-cortexa5-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7a-neon} cortexa5-vfp cortexa5-neon" PACKAGE_EXTRA_ARCHS_tune-cortexa5t-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7at-neon} cortexa5-vfp cortexa5-neon cortexa5t2-vfp cortexa5t2-neon" # HF Tunes @@ -26,11 +26,11 @@ ARMPKGARCH_tune-cortexa5hf = "cortexa5" ARMPKGARCH_tune-cortexa5thf = "cortexa5" ARMPKGARCH_tune-cortexa5hf-neon = "cortexa5" ARMPKGARCH_tune-cortexa5thf-neon = "cortexa5" -TUNE_FEATURES_tune-cortexa5hf = "${TUNE_FEATURES_tune-armv7ahf} cortexa5" -TUNE_FEATURES_tune-cortexa5thf = "${TUNE_FEATURES_tune-armv7athf} cortexa5" -TUNE_FEATURES_tune-cortexa5hf-neon = "${TUNE_FEATURES_tune-armv7ahf-neon} cortexa5" -TUNE_FEATURES_tune-cortexa5thf-neon = "${TUNE_FEATURES_tune-armv7athf-neon} cortexa5" -PACKAGE_EXTRA_ARCHS_tune-cortexa5hf = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf} cortexa5hf-vfp" +TUNE_FEATURES_tune-cortexa5hf = "${TUNE_FEATURES_tune-armv7ahf} cortexa5" +TUNE_FEATURES_tune-cortexa5thf = "${TUNE_FEATURES_tune-armv7athf} cortexa5" +TUNE_FEATURES_tune-cortexa5hf-neon = "${TUNE_FEATURES_tune-armv7ahf-neon} cortexa5" +TUNE_FEATURES_tune-cortexa5thf-neon = "${TUNE_FEATURES_tune-armv7athf-neon} cortexa5" +PACKAGE_EXTRA_ARCHS_tune-cortexa5hf = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf} cortexa5hf-vfp" PACKAGE_EXTRA_ARCHS_tune-cortexa5thf = "${PACKAGE_EXTRA_ARCHS_tune-armv7athf} cortexa5hf-vfp cortexa5t2hf-vfp" -PACKAGE_EXTRA_ARCHS_tune-cortexa5hf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf-neon} cortexa5hf-vfp cortexa5hf-neon" +PACKAGE_EXTRA_ARCHS_tune-cortexa5hf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf-neon} cortexa5hf-vfp cortexa5hf-neon" PACKAGE_EXTRA_ARCHS_tune-cortexa5thf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7athf-neon} cortexa5hf-vfp cortexa5hf-neon cortexa5t2hf-vfp cortexa5t2hf-neon" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa7.inc b/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa7.inc index 10b6ea7f3..52415d9c8 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa7.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa7.inc @@ -1,4 +1,4 @@ -DEFAULTTUNE ?= "armv7ve-neon" +DEFAULTTUNE ?= "armv7vethf-neon" require conf/machine/include/arm/arch-armv7ve.inc @@ -13,17 +13,17 @@ ARMPKGARCH_tune-cortexa7-neon = "cortexa7" ARMPKGARCH_tune-cortexa7t-neon = "cortexa7" ARMPKGARCH_tune-cortexa7-neon-vfpv4 = "cortexa7" ARMPKGARCH_tune-cortexa7t-neon-vfpv4 = "cortexa7" -TUNE_FEATURES_tune-cortexa7 = "${TUNE_FEATURES_tune-armv7ve} cortexa7" -TUNE_FEATURES_tune-cortexa7t = "${TUNE_FEATURES_tune-armv7vet} cortexa7" -TUNE_FEATURES_tune-cortexa7-neon = "${TUNE_FEATURES_tune-armv7ve-neon} cortexa7" -TUNE_FEATURES_tune-cortexa7t-neon = "${TUNE_FEATURES_tune-armv7vet-neon} cortexa7" -TUNE_FEATURES_tune-cortexa7-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7ve-neon-vfpv4} cortexa7" +TUNE_FEATURES_tune-cortexa7 = "${TUNE_FEATURES_tune-armv7ve} cortexa7" +TUNE_FEATURES_tune-cortexa7t = "${TUNE_FEATURES_tune-armv7vet} cortexa7" +TUNE_FEATURES_tune-cortexa7-neon = "${TUNE_FEATURES_tune-armv7ve-neon} cortexa7" +TUNE_FEATURES_tune-cortexa7t-neon = "${TUNE_FEATURES_tune-armv7vet-neon} cortexa7" +TUNE_FEATURES_tune-cortexa7-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7ve-neon-vfpv4} cortexa7" TUNE_FEATURES_tune-cortexa7t-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vet-neon-vfpv4} cortexa7" -PACKAGE_EXTRA_ARCHS_tune-cortexa7 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve} cortexa7-vfp" +PACKAGE_EXTRA_ARCHS_tune-cortexa7 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve} cortexa7-vfp" PACKAGE_EXTRA_ARCHS_tune-cortexa7t = "${PACKAGE_EXTRA_ARCHS_tune-armv7vet} cortexa7-vfp cortexa7t2-vfp" -PACKAGE_EXTRA_ARCHS_tune-cortexa7-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon} cortexa7-vfp cortexa7-neon" +PACKAGE_EXTRA_ARCHS_tune-cortexa7-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon} cortexa7-vfp cortexa7-neon" PACKAGE_EXTRA_ARCHS_tune-cortexa7t-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vet-neon} cortexa7-vfp cortexa7-neon cortexa7t2-vfp cortexa7t2-neon" -PACKAGE_EXTRA_ARCHS_tune-cortexa7-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon-vfpv4} cortexa7-vfp cortexa7-neon cortexa7-neon-vfpv4" +PACKAGE_EXTRA_ARCHS_tune-cortexa7-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ve-neon-vfpv4} cortexa7-vfp cortexa7-neon cortexa7-neon-vfpv4" PACKAGE_EXTRA_ARCHS_tune-cortexa7t-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vet-neon-vfpv4} cortexa7-vfp cortexa7-neon cortexa7-neon-vfpv4 cortexa7t2-vfp cortexa7t2-neon cortexa7t2-neon-vfpv4" # HF Tunes @@ -34,15 +34,15 @@ ARMPKGARCH_tune-cortexa7hf-neon = "cortexa7" ARMPKGARCH_tune-cortexa7thf-neon = "cortexa7" ARMPKGARCH_tune-cortexa7hf-neon-vfpv4 = "cortexa7" ARMPKGARCH_tune-cortexa7thf-neon-vfpv4 = "cortexa7" -TUNE_FEATURES_tune-cortexa7hf = "${TUNE_FEATURES_tune-armv7vehf} cortexa7" -TUNE_FEATURES_tune-cortexa7thf = "${TUNE_FEATURES_tune-armv7vethf} cortexa7" -TUNE_FEATURES_tune-cortexa7hf-neon = "${TUNE_FEATURES_tune-armv7vehf-neon} cortexa7" -TUNE_FEATURES_tune-cortexa7thf-neon = "${TUNE_FEATURES_tune-armv7vethf-neon} cortexa7" -TUNE_FEATURES_tune-cortexa7hf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vehf-neon-vfpv4} cortexa7" +TUNE_FEATURES_tune-cortexa7hf = "${TUNE_FEATURES_tune-armv7vehf} cortexa7" +TUNE_FEATURES_tune-cortexa7thf = "${TUNE_FEATURES_tune-armv7vethf} cortexa7" +TUNE_FEATURES_tune-cortexa7hf-neon = "${TUNE_FEATURES_tune-armv7vehf-neon} cortexa7" +TUNE_FEATURES_tune-cortexa7thf-neon = "${TUNE_FEATURES_tune-armv7vethf-neon} cortexa7" +TUNE_FEATURES_tune-cortexa7hf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vehf-neon-vfpv4} cortexa7" TUNE_FEATURES_tune-cortexa7thf-neon-vfpv4 = "${TUNE_FEATURES_tune-armv7vethf-neon-vfpv4} cortexa7" -PACKAGE_EXTRA_ARCHS_tune-cortexa7hf = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf} cortexa7hf-vfp" +PACKAGE_EXTRA_ARCHS_tune-cortexa7hf = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf} cortexa7hf-vfp" PACKAGE_EXTRA_ARCHS_tune-cortexa7thf = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethf} cortexa7hf-vfp cortexa7t2hf-vfp" -PACKAGE_EXTRA_ARCHS_tune-cortexa7hf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon} cortexa7hf-vfp cortexa7hf-neon" +PACKAGE_EXTRA_ARCHS_tune-cortexa7hf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon} cortexa7hf-vfp cortexa7hf-neon" PACKAGE_EXTRA_ARCHS_tune-cortexa7thf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethf-neon} cortexa7hf-vfp cortexa7hf-neon cortexa7t2hf-vfp cortexa7t2hf-neon" -PACKAGE_EXTRA_ARCHS_tune-cortexa7hf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon-vfpv4} cortexa7hf-vfp cortexa7hf-neon cortexa7hf-neon-vfpv4" +PACKAGE_EXTRA_ARCHS_tune-cortexa7hf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vehf-neon-vfpv4} cortexa7hf-vfp cortexa7hf-neon cortexa7hf-neon-vfpv4" PACKAGE_EXTRA_ARCHS_tune-cortexa7thf-neon-vfpv4 = "${PACKAGE_EXTRA_ARCHS_tune-armv7vethf-neon-vfpv4} cortexa7hf-vfp cortexa7hf-neon cortexa7hf-neon-vfpv4 cortexa7t2hf-vfp cortexa7t2hf-neon cortexa7t2hf-neon-vfpv4" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa8.inc b/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa8.inc index c8ce02169..8ee8de97f 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa8.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa8.inc @@ -1,4 +1,4 @@ -DEFAULTTUNE ?= "armv7a-neon" +DEFAULTTUNE ?= "armv7athf-neon" require conf/machine/include/arm/arch-armv7a.inc @@ -11,13 +11,13 @@ ARMPKGARCH_tune-cortexa8 = "cortexa8" ARMPKGARCH_tune-cortexa8t = "cortexa8" ARMPKGARCH_tune-cortexa8-neon = "cortexa8" ARMPKGARCH_tune-cortexa8t-neon = "cortexa8" -TUNE_FEATURES_tune-cortexa8 = "${TUNE_FEATURES_tune-armv7a} cortexa8" -TUNE_FEATURES_tune-cortexa8t = "${TUNE_FEATURES_tune-armv7at} cortexa8" -TUNE_FEATURES_tune-cortexa8-neon = "${TUNE_FEATURES_tune-armv7a-neon} cortexa8" -TUNE_FEATURES_tune-cortexa8t-neon = "${TUNE_FEATURES_tune-armv7at-neon} cortexa8" -PACKAGE_EXTRA_ARCHS_tune-cortexa8 = "${PACKAGE_EXTRA_ARCHS_tune-armv7a} cortexa8-vfp" +TUNE_FEATURES_tune-cortexa8 = "${TUNE_FEATURES_tune-armv7a} cortexa8" +TUNE_FEATURES_tune-cortexa8t = "${TUNE_FEATURES_tune-armv7at} cortexa8" +TUNE_FEATURES_tune-cortexa8-neon = "${TUNE_FEATURES_tune-armv7a-neon} cortexa8" +TUNE_FEATURES_tune-cortexa8t-neon = "${TUNE_FEATURES_tune-armv7at-neon} cortexa8" +PACKAGE_EXTRA_ARCHS_tune-cortexa8 = "${PACKAGE_EXTRA_ARCHS_tune-armv7a} cortexa8-vfp" PACKAGE_EXTRA_ARCHS_tune-cortexa8t = "${PACKAGE_EXTRA_ARCHS_tune-armv7at} cortexa8-vfp cortexa8t2-vfp" -PACKAGE_EXTRA_ARCHS_tune-cortexa8-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7a-neon} cortexa8-vfp cortexa8-neon" +PACKAGE_EXTRA_ARCHS_tune-cortexa8-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7a-neon} cortexa8-vfp cortexa8-neon" PACKAGE_EXTRA_ARCHS_tune-cortexa8t-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7at-neon} cortexa8-vfp cortexa8-neon cortexa8t2-vfp cortexa8t2-neon" # HF Tunes @@ -26,11 +26,11 @@ ARMPKGARCH_tune-cortexa8hf = "cortexa8" ARMPKGARCH_tune-cortexa8thf = "cortexa8" ARMPKGARCH_tune-cortexa8hf-neon = "cortexa8" ARMPKGARCH_tune-cortexa8thf-neon = "cortexa8" -TUNE_FEATURES_tune-cortexa8hf = "${TUNE_FEATURES_tune-armv7ahf} cortexa8" -TUNE_FEATURES_tune-cortexa8thf = "${TUNE_FEATURES_tune-armv7athf} cortexa8" -TUNE_FEATURES_tune-cortexa8hf-neon = "${TUNE_FEATURES_tune-armv7ahf-neon} cortexa8" -TUNE_FEATURES_tune-cortexa8thf-neon = "${TUNE_FEATURES_tune-armv7athf-neon} cortexa8" -PACKAGE_EXTRA_ARCHS_tune-cortexa8hf = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf} cortexa8hf-vfp" +TUNE_FEATURES_tune-cortexa8hf = "${TUNE_FEATURES_tune-armv7ahf} cortexa8" +TUNE_FEATURES_tune-cortexa8thf = "${TUNE_FEATURES_tune-armv7athf} cortexa8" +TUNE_FEATURES_tune-cortexa8hf-neon = "${TUNE_FEATURES_tune-armv7ahf-neon} cortexa8" +TUNE_FEATURES_tune-cortexa8thf-neon = "${TUNE_FEATURES_tune-armv7athf-neon} cortexa8" +PACKAGE_EXTRA_ARCHS_tune-cortexa8hf = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf} cortexa8hf-vfp" PACKAGE_EXTRA_ARCHS_tune-cortexa8thf = "${PACKAGE_EXTRA_ARCHS_tune-armv7athf} cortexa8hf-vfp cortexa8t2hf-vfp" -PACKAGE_EXTRA_ARCHS_tune-cortexa8hf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf-neon} cortexa8hf-vfp cortexa8hf-neon" +PACKAGE_EXTRA_ARCHS_tune-cortexa8hf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf-neon} cortexa8hf-vfp cortexa8hf-neon" PACKAGE_EXTRA_ARCHS_tune-cortexa8thf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7athf-neon} cortexa8hf-vfp cortexa8hf-neon cortexa8t2hf-vfp cortexa8t2hf-neon" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa9.inc b/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa9.inc index 3d8dc1d8c..0cf323c96 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa9.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/tune-cortexa9.inc @@ -1,4 +1,4 @@ -DEFAULTTUNE ?= "armv7a-neon" +DEFAULTTUNE ?= "armv7athf-neon" require conf/machine/include/arm/arch-armv7a.inc @@ -11,13 +11,13 @@ ARMPKGARCH_tune-cortexa9 = "cortexa9" ARMPKGARCH_tune-cortexa9t = "cortexa9" ARMPKGARCH_tune-cortexa9-neon = "cortexa9" ARMPKGARCH_tune-cortexa9t-neon = "cortexa9" -TUNE_FEATURES_tune-cortexa9 = "${TUNE_FEATURES_tune-armv7a} cortexa9" -TUNE_FEATURES_tune-cortexa9t = "${TUNE_FEATURES_tune-armv7at} cortexa9" -TUNE_FEATURES_tune-cortexa9-neon = "${TUNE_FEATURES_tune-armv7a-neon} cortexa9" -TUNE_FEATURES_tune-cortexa9t-neon = "${TUNE_FEATURES_tune-armv7at-neon} cortexa9" -PACKAGE_EXTRA_ARCHS_tune-cortexa9 = "${PACKAGE_EXTRA_ARCHS_tune-armv7a} cortexa9-vfp" +TUNE_FEATURES_tune-cortexa9 = "${TUNE_FEATURES_tune-armv7a} cortexa9" +TUNE_FEATURES_tune-cortexa9t = "${TUNE_FEATURES_tune-armv7at} cortexa9" +TUNE_FEATURES_tune-cortexa9-neon = "${TUNE_FEATURES_tune-armv7a-neon} cortexa9" +TUNE_FEATURES_tune-cortexa9t-neon = "${TUNE_FEATURES_tune-armv7at-neon} cortexa9" +PACKAGE_EXTRA_ARCHS_tune-cortexa9 = "${PACKAGE_EXTRA_ARCHS_tune-armv7a} cortexa9-vfp" PACKAGE_EXTRA_ARCHS_tune-cortexa9t = "${PACKAGE_EXTRA_ARCHS_tune-armv7at} cortexa9-vfp cortexa9t2-vfp" -PACKAGE_EXTRA_ARCHS_tune-cortexa9-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7a-neon} cortexa9-vfp cortexa9-neon" +PACKAGE_EXTRA_ARCHS_tune-cortexa9-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7a-neon} cortexa9-vfp cortexa9-neon" PACKAGE_EXTRA_ARCHS_tune-cortexa9t-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7at-neon} cortexa9-vfp cortexa9-neon cortexa9t2-vfp cortexa9t2-neon" # HF Tunes @@ -26,13 +26,13 @@ ARMPKGARCH_tune-cortexa9hf = "cortexa9" ARMPKGARCH_tune-cortexa9thf = "cortexa9" ARMPKGARCH_tune-cortexa9hf-neon = "cortexa9" ARMPKGARCH_tune-cortexa9thf-neon = "cortexa9" -TUNE_FEATURES_tune-cortexa9hf = "${TUNE_FEATURES_tune-armv7ahf} cortexa9" -TUNE_FEATURES_tune-cortexa9thf = "${TUNE_FEATURES_tune-armv7athf} cortexa9" -TUNE_FEATURES_tune-cortexa9hf-neon = "${TUNE_FEATURES_tune-armv7ahf-neon} cortexa9" -TUNE_FEATURES_tune-cortexa9thf-neon = "${TUNE_FEATURES_tune-armv7athf-neon} cortexa9" -PACKAGE_EXTRA_ARCHS_tune-cortexa9hf = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf} cortexa9hf-vfp" +TUNE_FEATURES_tune-cortexa9hf = "${TUNE_FEATURES_tune-armv7ahf} cortexa9" +TUNE_FEATURES_tune-cortexa9thf = "${TUNE_FEATURES_tune-armv7athf} cortexa9" +TUNE_FEATURES_tune-cortexa9hf-neon = "${TUNE_FEATURES_tune-armv7ahf-neon} cortexa9" +TUNE_FEATURES_tune-cortexa9thf-neon = "${TUNE_FEATURES_tune-armv7athf-neon} cortexa9" +PACKAGE_EXTRA_ARCHS_tune-cortexa9hf = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf} cortexa9hf-vfp" PACKAGE_EXTRA_ARCHS_tune-cortexa9thf = "${PACKAGE_EXTRA_ARCHS_tune-armv7athf} cortexa9hf-vfp cortexa9t2hf-vfp" -PACKAGE_EXTRA_ARCHS_tune-cortexa9hf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf-neon} cortexa9hf-vfp cortexa9hf-neon" +PACKAGE_EXTRA_ARCHS_tune-cortexa9hf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf-neon} cortexa9hf-vfp cortexa9hf-neon" PACKAGE_EXTRA_ARCHS_tune-cortexa9thf-neon = "${PACKAGE_EXTRA_ARCHS_tune-armv7athf-neon} cortexa9hf-vfp cortexa9hf-neon cortexa9t2hf-vfp cortexa9t2hf-neon" # VFPv3 Tunes @@ -41,11 +41,11 @@ ARMPKGARCH_tune-cortexa9-vfpv3 = "cortexa9" ARMPKGARCH_tune-cortexa9t-vfpv3 = "cortexa9" ARMPKGARCH_tune-cortexa9hf-vfpv3 = "cortexa9" ARMPKGARCH_tune-cortexa9thf-vfpv3 = "cortexa9" -TUNE_FEATURES_tune-cortexa9-vfpv3 = "${TUNE_FEATURES_tune-armv7a-vfpv3} cortexa9" -TUNE_FEATURES_tune-cortexa9t-vfpv3 = "${TUNE_FEATURES_tune-armv7at-vfpv3} cortexa9" -TUNE_FEATURES_tune-cortexa9hf-vfpv3 = "${TUNE_FEATURES_tune-armv7ahf-vfpv3} cortexa9" -TUNE_FEATURES_tune-cortexa9thf-vfpv3 = "${TUNE_FEATURES_tune-armv7athf-vfpv3} cortexa9" -PACKAGE_EXTRA_ARCHS_tune-cortexa9-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7a-vfpv3} cortexa9-vfp cortexa9-vfpv3" +TUNE_FEATURES_tune-cortexa9-vfpv3 = "${TUNE_FEATURES_tune-armv7a-vfpv3} cortexa9" +TUNE_FEATURES_tune-cortexa9t-vfpv3 = "${TUNE_FEATURES_tune-armv7at-vfpv3} cortexa9" +TUNE_FEATURES_tune-cortexa9hf-vfpv3 = "${TUNE_FEATURES_tune-armv7ahf-vfpv3} cortexa9" +TUNE_FEATURES_tune-cortexa9thf-vfpv3 = "${TUNE_FEATURES_tune-armv7athf-vfpv3} cortexa9" +PACKAGE_EXTRA_ARCHS_tune-cortexa9-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7a-vfpv3} cortexa9-vfp cortexa9-vfpv3" PACKAGE_EXTRA_ARCHS_tune-cortexa9t-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7at-vfpv3} cortexa9-vfp cortexa9-vfpv3 cortexa9t2-vfp cortexa9t2-vfpv3" -PACKAGE_EXTRA_ARCHS_tune-cortexa9hf-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf-vfpv3} cortexa9hf-vfp cortexa9hf-vfpv3" +PACKAGE_EXTRA_ARCHS_tune-cortexa9hf-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7ahf-vfpv3} cortexa9hf-vfp cortexa9hf-vfpv3" PACKAGE_EXTRA_ARCHS_tune-cortexa9thf-vfpv3 = "${PACKAGE_EXTRA_ARCHS_tune-armv7athf-vfpv3} cortexa9hf-vfp cortexa9hf-vfpv3 cortexa9t2hf-vfp cortexa9t2hf-vfpv3" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/tune-mips32r6.inc b/import-layers/yocto-poky/meta/conf/machine/include/tune-mips32r6.inc index 47213198c..dea33eace 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/tune-mips32r6.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/tune-mips32r6.inc @@ -9,21 +9,21 @@ TUNE_CCARGS .= "${@bb.utils.contains('TUNE_FEATURES', 'mipsisa32r6', ' -march=mi # Base Tunes AVAILTUNES += "mipsisa32r6 mipsisa32r6el" -TUNE_FEATURES_tune-mipsisa32r6 = "bigendian mipsisa32r6 fpu-hard r6" +TUNE_FEATURES_tune-mipsisa32r6 = "o32 bigendian mipsisa32r6 fpu-hard r6" MIPSPKGSFX_VARIANT_tune-mipsisa32r6 = "${TUNE_ARCH}" PACKAGE_EXTRA_ARCHS_tune-mipsisa32r6 = "mipsisa32r6" -TUNE_FEATURES_tune-mipsisa32r6el = "fpu-hard mipsisa32r6 r6" +TUNE_FEATURES_tune-mipsisa32r6el = "o32 fpu-hard mipsisa32r6 r6" MIPSPKGSFX_VARIANT_tune-mipsisa32r6el = "${TUNE_ARCH}" PACKAGE_EXTRA_ARCHS_tune-mipsisa32r6el = "mipsisa32r6el" # Soft Float AVAILTUNES += "mipsisa32r6-nf mipsisa32r6el-nf" -TUNE_FEATURES_tune-mipsisa32r6-nf = "bigendian mipsisa32r6 r6" +TUNE_FEATURES_tune-mipsisa32r6-nf = "o32 bigendian mipsisa32r6 r6" MIPSPKGSFX_VARIANT_tune-mipsisa32r6-nf = "${TUNE_ARCH}" PACKAGE_EXTRA_ARCHS_tune-mipsisa32r6-nf = "mipsisa32r6-nf" -TUNE_FEATURES_tune-mipsisa32r6el-nf = "mipsisa32r6 r6" +TUNE_FEATURES_tune-mipsisa32r6el-nf = "o32 mipsisa32r6 r6" MIPSPKGSFX_VARIANT_tune-mipsisa32r6el-nf = "${TUNE_ARCH}" PACKAGE_EXTRA_ARCHS_tune-mipsisa32r6el-nf = "mipsisa32r6el-nf" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/tune-mips64r6.inc b/import-layers/yocto-poky/meta/conf/machine/include/tune-mips64r6.inc index f0ad56468..4fe3eedf1 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/tune-mips64r6.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/tune-mips64r6.inc @@ -30,3 +30,29 @@ TUNE_FEATURES_tune-mipsisa64r6el-nf = "r6 n64 mipsisa64r6" MIPSPKGSFX_VARIANT_tune-mipsisa64r6el-nf = "${TUNE_ARCH}" BASE_LIB_tune-mipsisa64r6el-nf = "lib64" PACKAGE_EXTRA_ARCHS_tune-mipsisa64r6el-nf = "mipsisa64r6el-nf" + +# MIPS 64r6 n32 +AVAILTUNES += "mipsisa64r6-n32 mipsisa64r6el-n32" + +TUNE_FEATURES_tune-mipsisa64r6-n32 = "bigendian fpu-hard r6 n32 mipsisa64r6" +BASE_LIB_tune-mipsisa64r6-n32 = "lib32" +MIPSPKGSFX_VARIANT_tune-mipsisa64r6-n32 = "${TUNE_ARCH}" +PACKAGE_EXTRA_ARCHS_tune-mipsisa64r6-n32 = "mipsisa64r6-n32" + +TUNE_FEATURES_tune-mipsisa64r6el-n32 = "fpu-hard r6 n32 mipsisa64r6" +BASE_LIB_tune-mipsisa64r6el-n32 = "lib32" +MIPSPKGSFX_VARIANT_tune-mipsisa64r6el-n32 = "${TUNE_ARCH}" +PACKAGE_EXTRA_ARCHS_tune-mipsisa64r6el-n32 = "mipsisa64r6el-n32" + +# MIPS 64r6 n32 and Soft Float +AVAILTUNES += "mipsisa64r6-nf-n32 mipsisa64r6el-nf-n32" + +TUNE_FEATURES_tune-mipsisa64r6-nf-n32 = "bigendian r6 n32 mipsisa64r6" +BASE_LIB_tune-mipsisa64r6-nf-n32 = "lib32" +MIPSPKGSFX_VARIANT_tune-mipsisa64r6-nf-n32 = "${TUNE_ARCH}" +PACKAGE_EXTRA_ARCHS_tune-mipsisa64r6-nf-n32 = "mipsisa64r6-nf-n32" + +TUNE_FEATURES_tune-mipsisa64r6el-nf-n32 = "r6 n32 mipsisa64r6" +BASE_LIB_tune-mipsisa64r6el-nf-n32 = "lib32" +MIPSPKGSFX_VARIANT_tune-mipsisa64r6el-nf-n32 = "${TUNE_ARCH}" +PACKAGE_EXTRA_ARCHS_tune-mipsisa64r6el-nf-n32 = "mipsisa64r6el-nf-n32" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/tune-ppce500.inc b/import-layers/yocto-poky/meta/conf/machine/include/tune-ppce500.inc index 96073d2d0..89ec3f38a 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/tune-ppce500.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/tune-ppce500.inc @@ -11,7 +11,7 @@ TARGET_FPU .= "${@bb.utils.contains('TUNE_FEATURES', [ 'ppce500' , 'spe' ], 'ppc # spe is defined potentially in two places, so we want to be sure it will # only write spe once to the ABIEXTENSIONS field. -SPEABIEXTENSION = "${@bb.utils.contains('TUNE_FEATURES', 'spe', 'spe', '', d)}" +SPEABIEXTENSION = "${@bb.utils.filter('TUNE_FEATURES', 'spe', d)}" ABIEXTENSION .= "${SPEABIEXTENSION}" AVAILTUNES += "ppce500" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/tune-ppce500v2.inc b/import-layers/yocto-poky/meta/conf/machine/include/tune-ppce500v2.inc index 20a5f25b6..3a006e2ca 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/tune-ppce500v2.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/tune-ppce500v2.inc @@ -11,7 +11,7 @@ TARGET_FPU .= "${@bb.utils.contains('TUNE_FEATURES', [ 'ppce500v2' , 'spe' ], 'p # spe is defined potentially in two places, so we want to be sure it will # only write spe once to the ABIEXTENSIONS field. -SPEABIEXTENSION = "${@bb.utils.contains('TUNE_FEATURES', 'spe', 'spe', '', d)}" +SPEABIEXTENSION = "${@bb.utils.filter('TUNE_FEATURES', 'spe', d)}" ABIEXTENSION .= "${SPEABIEXTENSION}" AVAILTUNES += "ppce500v2" diff --git a/import-layers/yocto-poky/meta/conf/machine/include/x86-base.inc b/import-layers/yocto-poky/meta/conf/machine/include/x86-base.inc index 479e89e54..7365953ca 100644 --- a/import-layers/yocto-poky/meta/conf/machine/include/x86-base.inc +++ b/import-layers/yocto-poky/meta/conf/machine/include/x86-base.inc @@ -20,16 +20,12 @@ SERIAL_CONSOLE ?= "115200 ttyS0" # kernel-related variables # PREFERRED_PROVIDER_virtual/kernel ??= "linux-yocto" -PREFERRED_VERSION_linux-yocto ??= "4.8%" +PREFERRED_VERSION_linux-yocto ??= "4.10%" # # XSERVER subcomponents, used to build the XSERVER variable # XSERVER_X86_BASE = "xserver-xorg \ - xf86-input-mouse \ - xf86-input-keyboard \ - xf86-input-evdev \ - xf86-input-synaptics \ " XSERVER_X86_EXT = " \ diff --git a/import-layers/yocto-poky/meta/conf/machine/qemuarm.conf b/import-layers/yocto-poky/meta/conf/machine/qemuarm.conf index f9d6dd7e3..6b875e415 100644 --- a/import-layers/yocto-poky/meta/conf/machine/qemuarm.conf +++ b/import-layers/yocto-poky/meta/conf/machine/qemuarm.conf @@ -16,5 +16,5 @@ QB_MACHINE = "-machine versatilepb" QB_KERNEL_CMDLINE_APPEND = "console=ttyAMA0,115200 console=tty" # Add the 'virtio-rng-pci' device otherwise the guest may run out of entropy QB_OPT_APPEND = "-show-cursor -usb -usbdevice tablet -device virtio-rng-pci" -PREFERRED_VERSION_linux-yocto ??= "4.8%" +PREFERRED_VERSION_linux-yocto ??= "4.10%" QB_DTB = "${@base_version_less_or_equal('PREFERRED_VERSION_linux-yocto', '4.7', '', 'zImage-versatile-pb.dtb', d)}" diff --git a/import-layers/yocto-poky/meta/conf/machine/qemuarm64.conf b/import-layers/yocto-poky/meta/conf/machine/qemuarm64.conf index e70538aac..242889ac8 100644 --- a/import-layers/yocto-poky/meta/conf/machine/qemuarm64.conf +++ b/import-layers/yocto-poky/meta/conf/machine/qemuarm64.conf @@ -17,8 +17,8 @@ QB_CPU = "-cpu cortex-a57" QB_KERNEL_CMDLINE_APPEND = "console=ttyAMA0,38400" # Add the 'virtio-rng-pci' device otherwise the guest may run out of entropy QB_OPT_APPEND = "-show-cursor -device virtio-rng-pci -monitor null" -QB_TAP_OPT = "-netdev tap,id=net0,ifname=@TAP@,script=no,downscript=no -device virtio-net-device,netdev=net0,mac=@MAC@" -QB_SLIRP_OPT = "-netdev user,id=net0 -device virtio-net-device,netdev=net0" +QB_TAP_OPT = "-netdev tap,id=net0,ifname=@TAP@,script=no,downscript=no" +QB_NETWORK_DEVICE = "-device virtio-net-device,netdev=net0,mac=@MAC@" QB_ROOTFS_OPT = "-drive id=disk0,file=@ROOTFS@,if=none,format=raw -device virtio-blk-device,drive=disk0" QB_SERIAL_OPT = "-device virtio-serial-device -chardev null,id=virtcon -device virtconsole,chardev=virtcon" QB_TCPSERIAL_OPT = " -device virtio-serial-device -chardev socket,id=virtcon,port=@PORT@,host=127.0.0.1 -device virtconsole,chardev=virtcon" diff --git a/import-layers/yocto-poky/meta/conf/machine/qemuppc.conf b/import-layers/yocto-poky/meta/conf/machine/qemuppc.conf index 9d174bc43..a9ef64b0a 100644 --- a/import-layers/yocto-poky/meta/conf/machine/qemuppc.conf +++ b/import-layers/yocto-poky/meta/conf/machine/qemuppc.conf @@ -18,4 +18,4 @@ QB_CPU = "-cpu G4" QB_KERNEL_CMDLINE_APPEND = "console=tty console=ttyS0" # Add the 'virtio-rng-pci' device otherwise the guest may run out of entropy QB_OPT_APPEND = "-show-cursor -usb -usbdevice tablet -device virtio-rng-pci" -QB_TAP_OPT = "-netdev tap,id=net0,ifname=@TAP@,script=no,downscript=no -device virtio-net-pci,netdev=net0,mac=@MAC@" +QB_TAP_OPT = "-netdev tap,id=net0,ifname=@TAP@,script=no,downscript=no" diff --git a/import-layers/yocto-poky/meta/conf/machine/qemux86-64.conf b/import-layers/yocto-poky/meta/conf/machine/qemux86-64.conf index 4f30033e5..10189cb5c 100644 --- a/import-layers/yocto-poky/meta/conf/machine/qemux86-64.conf +++ b/import-layers/yocto-poky/meta/conf/machine/qemux86-64.conf @@ -18,9 +18,6 @@ SERIAL_CONSOLES ?= "115200;ttyS0 115200;ttyS1" XSERVER = "xserver-xorg \ ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'mesa-driver-swrast xserver-xorg-extension-glx', '', d)} \ - xf86-input-vmmouse \ - xf86-input-keyboard \ - xf86-input-evdev \ xf86-video-cirrus \ xf86-video-fbdev \ xf86-video-vmware \ @@ -33,4 +30,4 @@ MACHINE_FEATURES += "x86" MACHINE_ESSENTIAL_EXTRA_RDEPENDS += "v86d" WKS_FILE ?= "directdisk.wks" -do_image_wic[depends] += "syslinux:do_build syslinux-native:do_populate_sysroot mtools-native:do_populate_sysroot dosfstools-native:do_populate_sysroot" +do_image_wic[depends] += "syslinux:do_populate_sysroot syslinux-native:do_populate_sysroot mtools-native:do_populate_sysroot dosfstools-native:do_populate_sysroot" diff --git a/import-layers/yocto-poky/meta/conf/machine/qemux86.conf b/import-layers/yocto-poky/meta/conf/machine/qemux86.conf index e232947ae..c26dda27f 100644 --- a/import-layers/yocto-poky/meta/conf/machine/qemux86.conf +++ b/import-layers/yocto-poky/meta/conf/machine/qemux86.conf @@ -17,9 +17,6 @@ SERIAL_CONSOLES ?= "115200;ttyS0 115200;ttyS1" XSERVER = "xserver-xorg \ ${@bb.utils.contains('DISTRO_FEATURES', 'opengl', 'mesa-driver-swrast xserver-xorg-extension-glx', '', d)} \ - xf86-input-vmmouse \ - xf86-input-keyboard \ - xf86-input-evdev \ xf86-video-cirrus \ xf86-video-fbdev \ xf86-video-vmware \ @@ -31,5 +28,5 @@ MACHINE_FEATURES += "x86" MACHINE_ESSENTIAL_EXTRA_RDEPENDS += "v86d" -WKS_FILE = "directdisk.wks" -do_image_wic[depends] += "syslinux:do_build syslinux-native:do_populate_sysroot mtools-native:do_populate_sysroot dosfstools-native:do_populate_sysroot" +WKS_FILE ?= "directdisk.wks" +do_image_wic[depends] += "syslinux:do_populate_sysroot syslinux-native:do_populate_sysroot mtools-native:do_populate_sysroot dosfstools-native:do_populate_sysroot" diff --git a/import-layers/yocto-poky/meta/conf/multiconfig/default.conf b/import-layers/yocto-poky/meta/conf/multiconfig/default.conf new file mode 100644 index 000000000..e69de29bb diff --git a/import-layers/yocto-poky/meta/conf/multilib.conf b/import-layers/yocto-poky/meta/conf/multilib.conf index 1403a034a..e74dec81a 100644 --- a/import-layers/yocto-poky/meta/conf/multilib.conf +++ b/import-layers/yocto-poky/meta/conf/multilib.conf @@ -1,14 +1,14 @@ -baselib = "${@d.getVar('BASE_LIB_tune-' + (d.getVar('DEFAULTTUNE', True) or 'INVALID'), True) or d.getVar('BASELIB', True)}" +baselib = "${@d.getVar('BASE_LIB_tune-' + (d.getVar('DEFAULTTUNE') or 'INVALID')) or d.getVar('BASELIB')}" MULTILIB_VARIANTS = "${@extend_variants(d,'MULTILIBS','multilib')}" MULTILIB_SAVE_VARNAME = "DEFAULTTUNE TARGET_ARCH TARGET_SYS TARGET_VENDOR" MULTILIBS ??= "multilib:lib32" -STAGING_DIR_HOST = "${STAGING_DIR}/${MLPREFIX}${MACHINE}" -STAGING_DIR_TARGET = "${STAGING_DIR}/${MLPREFIX}${MACHINE}" -PKGDATA_DIR = "${STAGING_DIR}/${MACHINE}/pkgdata" +STAGING_DIR_HOST = "${WORKDIR}/${MLPREFIX}recipe-sysroot" +STAGING_DIR_TARGET = "${WORKDIR}/${MLPREFIX}recipe-sysroot" +RECIPE_SYSROOT = "${WORKDIR}/${MLPREFIX}recipe-sysroot" INHERIT += "multilib_global" @@ -23,6 +23,6 @@ OPKG_ARGS_append = " --force-maintainer --force-overwrite" # find an allarch pkgconfig file will fail as the PKG_CONFIG_PATH only looks # inside the multilib sysroot. Fix this by explicitly adding the MACHINE's # architecture-independent pkgconfig location to PKG_CONFIG_PATH. -PKG_CONFIG_PATH .= ":${STAGING_DIR}/${MACHINE}${datadir}/pkgconfig" -PKG_CONFIG_PATH[vardepsexclude] = "MACHINE datadir STAGING_DIR" -PKG_CONFIG_PATH[vardepvalueexclude] = ":${STAGING_DIR}/${MACHINE}${datadir}/pkgconfig" +PKG_CONFIG_PATH .= ":${WORKDIR}/recipe-sysroot/${datadir}/pkgconfig" +PKG_CONFIG_PATH[vardepsexclude] = "datadir WORKDIR" +PKG_CONFIG_PATH[vardepvalueexclude] = ":${WORKDIR}/recipe-sysroot/${datadir}/pkgconfig" diff --git a/import-layers/yocto-poky/meta/conf/sanity.conf b/import-layers/yocto-poky/meta/conf/sanity.conf index dcf60b74d..46bdbeb83 100644 --- a/import-layers/yocto-poky/meta/conf/sanity.conf +++ b/import-layers/yocto-poky/meta/conf/sanity.conf @@ -3,7 +3,7 @@ # See sanity.bbclass # # Expert users can confirm their sanity with "touch conf/sanity.conf" -BB_MIN_VERSION = "1.31.2" +BB_MIN_VERSION = "1.33.4" SANITY_ABIFILE = "${TMPDIR}/abi_version" diff --git a/import-layers/yocto-poky/meta/conf/toasterconf.json b/import-layers/yocto-poky/meta/conf/toasterconf.json deleted file mode 100644 index 796125bbf..000000000 --- a/import-layers/yocto-poky/meta/conf/toasterconf.json +++ /dev/null @@ -1,77 +0,0 @@ -{ - "config": { - "MACHINE" : "qemux86", - "DISTRO" : "nodistro", - "DL_DIR" : "${TOPDIR}/../downloads", - "IMAGE_FSTYPES": "ext4 jffs2 tar.bz2", - "IMAGE_INSTALL_append": "", - "PACKAGE_CLASSES": "package_rpm", - "SSTATE_DIR" : "${TOPDIR}/../sstate-cache" - }, - "layersources": [ - { - "name": "Local OpenEmbedded", - "sourcetype": "local", - "apiurl": "../../", - "branches": ["HEAD", "master"], - "layers": [ - { - "name": "openembedded-core", - "local_path": "meta", - "vcs_url": "remote:origin", - "dirpath": "meta" - } - ] - }, - { - "name": "OpenEmbedded", - "sourcetype": "layerindex", - "apiurl": "http://layers.openembedded.org/layerindex/api/", - "branches": ["master"] - }, - { - "name": "Imported layers", - "sourcetype": "imported", - "apiurl": "", - "branches": ["master", "HEAD"] - - } - ], - "bitbake" : [ - { - "name": "master", - "giturl": "git://git.openembedded.org/bitbake", - "branch": "master", - "dirpath": "" - }, - { - "name": "HEAD", - "giturl": "git://git.openembedded.org/bitbake", - "branch": "HEAD", - "dirpath": "" - } - ], - - "defaultrelease": "master", - - "releases": [ - { - "name": "master", - "description": "OpenEmbedded master", - "bitbake": "master", - "branch": "master", - "defaultlayers": [ "openembedded-core" ], - "layersourcepriority": { "Imported layers": 99, "Local OpenEmbedded" : 10, "OpenEmbedded" : 0 }, - "helptext": "Toaster will run your builds using the tip of the OpenEmbedded master branch, where active development takes place. This is not a stable branch, so your builds might not work as expected." - }, - { - "name": "local", - "description": "Local OpenEmbedded", - "bitbake": "HEAD", - "branch": "HEAD", - "defaultlayers": [ "openembedded-core" ], - "layersourcepriority": { "Imported layers": 99, "Local OpenEmbedded" : 10, "OpenEmbedded" : 0 }, - "helptext": "Toaster will run your builds with the version of OpenEmbedded that you have cloned or downloaded to your computer." - } - ] -} diff --git a/import-layers/yocto-poky/meta/files/ext-sdk-prepare.py b/import-layers/yocto-poky/meta/files/ext-sdk-prepare.py index 78c1d1630..96c5212a2 100644 --- a/import-layers/yocto-poky/meta/files/ext-sdk-prepare.py +++ b/import-layers/yocto-poky/meta/files/ext-sdk-prepare.py @@ -53,6 +53,8 @@ def main(): logf.write('Preparing SDK for %s...\n' % ', '.join(sdk_targets)) ret = run_command_interruptible('BB_SETSCENE_ENFORCE=1 bitbake --quiet %s' % ' '.join(sdk_targets)) + if not ret: + ret = run_command_interruptible('bitbake --quiet build-sysroots') lastlog = get_last_consolelog() if lastlog: with open(lastlog, 'r') as f: diff --git a/import-layers/yocto-poky/meta/files/fs-perms.txt b/import-layers/yocto-poky/meta/files/fs-perms.txt index 3d00e862e..c8c3ac5db 100644 --- a/import-layers/yocto-poky/meta/files/fs-perms.txt +++ b/import-layers/yocto-poky/meta/files/fs-perms.txt @@ -63,8 +63,5 @@ ${localstatedir}/volatile/tmp 01777 root root false - - - # Set 0700 ${ROOT_HOME} 0700 root root false - - - -# Set 755-lsb -/srv 0755 root root false - - - - # Set 2775-lsb -/var/mail 02775 root mail false - - - +${localstatedir}/mail 02775 root mail false - - - diff --git a/import-layers/yocto-poky/meta/files/toolchain-shar-extract.sh b/import-layers/yocto-poky/meta/files/toolchain-shar-extract.sh index 9295ddc86..91804ec28 100644 --- a/import-layers/yocto-poky/meta/files/toolchain-shar-extract.sh +++ b/import-layers/yocto-poky/meta/files/toolchain-shar-extract.sh @@ -8,9 +8,26 @@ [ -f /etc/environment ] && . /etc/environment export PATH=`echo "$PATH" | sed -e 's/:\.//' -e 's/::/:/'` +tweakpath () { + case ":${PATH}:" in + *:"$1":*) + ;; + *) + PATH=$PATH:$1 + esac +} + +# Some systems don't have /usr/sbin or /sbin in the cleaned environment PATH but we make need it +# for the system's host tooling checks +tweakpath /usr/sbin +tweakpath /sbin + INST_ARCH=$(uname -m | sed -e "s/i[3-6]86/ix86/" -e "s/x86[-_]64/x86_64/") SDK_ARCH=$(echo @SDK_ARCH@ | sed -e "s/i[3-6]86/ix86/" -e "s/x86[-_]64/x86_64/") +INST_GCC_VER=$(gcc --version | sed -ne 's/.* \([0-9]\+\.[0-9]\+\)\.[0-9]\+.*/\1/p') +SDK_GCC_VER='@SDK_GCC_VER@' + verlte () { [ "$1" = "`printf "$1\n$2" | sort -V | head -n1`" ] } @@ -112,6 +129,11 @@ fi # SDK_EXTENSIBLE is exposed from the SDK_PRE_INSTALL_COMMAND above if [ "$SDK_EXTENSIBLE" = "1" ]; then DEFAULT_INSTALL_DIR="@SDKEXTPATH@" + if [ "$INST_GCC_VER" = '4.8' -a "$SDK_GCC_VER" = '4.9' ] || [ "$INST_GCC_VER" = '4.8' -a "$SDK_GCC_VER" = '' ] || \ + [ "$INST_GCC_VER" = '4.9' -a "$SDK_GCC_VER" = '' ]; then + echo "Error: Incompatible SDK installer! Your host gcc version is $INST_GCC_VER and this SDK was built by gcc higher version." + exit 1 + fi fi if [ "$target_sdk_dir" = "" ]; then diff --git a/import-layers/yocto-poky/meta/lib/buildstats.py b/import-layers/yocto-poky/meta/lib/buildstats.py new file mode 100644 index 000000000..c5d4c73cf --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/buildstats.py @@ -0,0 +1,158 @@ +# Implements system state sampling. Called by buildstats.bbclass. +# Because it is a real Python module, it can hold persistent state, +# like open log files and the time of the last sampling. + +import time +import re +import bb.event + +class SystemStats: + def __init__(self, d): + bn = d.getVar('BUILDNAME') + bsdir = os.path.join(d.getVar('BUILDSTATS_BASE'), bn) + bb.utils.mkdirhier(bsdir) + + self.proc_files = [] + for filename, handler in ( + ('diskstats', self._reduce_diskstats), + ('meminfo', self._reduce_meminfo), + ('stat', self._reduce_stat), + ): + # The corresponding /proc files might not exist on the host. + # For example, /proc/diskstats is not available in virtualized + # environments like Linux-VServer. Silently skip collecting + # the data. + if os.path.exists(os.path.join('/proc', filename)): + # In practice, this class gets instantiated only once in + # the bitbake cooker process. Therefore 'append' mode is + # not strictly necessary, but using it makes the class + # more robust should two processes ever write + # concurrently. + destfile = os.path.join(bsdir, '%sproc_%s.log' % ('reduced_' if handler else '', filename)) + self.proc_files.append((filename, open(destfile, 'ab'), handler)) + self.monitor_disk = open(os.path.join(bsdir, 'monitor_disk.log'), 'ab') + # Last time that we sampled /proc data resp. recorded disk monitoring data. + self.last_proc = 0 + self.last_disk_monitor = 0 + # Minimum number of seconds between recording a sample. This + # becames relevant when we get called very often while many + # short tasks get started. Sampling during quiet periods + # depends on the heartbeat event, which fires less often. + self.min_seconds = 1 + + self.meminfo_regex = re.compile(b'^(MemTotal|MemFree|Buffers|Cached|SwapTotal|SwapFree):\s*(\d+)') + self.diskstats_regex = re.compile(b'^([hsv]d.|mtdblock\d|mmcblk\d|cciss/c\d+d\d+.*)$') + self.diskstats_ltime = None + self.diskstats_data = None + self.stat_ltimes = None + + def close(self): + self.monitor_disk.close() + for _, output, _ in self.proc_files: + output.close() + + def _reduce_meminfo(self, time, data): + """ + Extracts 'MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapTotal', 'SwapFree' + and writes their values into a single line, in that order. + """ + values = {} + for line in data.split(b'\n'): + m = self.meminfo_regex.match(line) + if m: + values[m.group(1)] = m.group(2) + if len(values) == 6: + return (time, + b' '.join([values[x] for x in + (b'MemTotal', b'MemFree', b'Buffers', b'Cached', b'SwapTotal', b'SwapFree')]) + b'\n') + + def _diskstats_is_relevant_line(self, linetokens): + if len(linetokens) != 14: + return False + disk = linetokens[2] + return self.diskstats_regex.match(disk) + + def _reduce_diskstats(self, time, data): + relevant_tokens = filter(self._diskstats_is_relevant_line, map(lambda x: x.split(), data.split(b'\n'))) + diskdata = [0] * 3 + reduced = None + for tokens in relevant_tokens: + # rsect + diskdata[0] += int(tokens[5]) + # wsect + diskdata[1] += int(tokens[9]) + # use + diskdata[2] += int(tokens[12]) + if self.diskstats_ltime: + # We need to compute information about the time interval + # since the last sampling and record the result as sample + # for that point in the past. + interval = time - self.diskstats_ltime + if interval > 0: + sums = [ a - b for a, b in zip(diskdata, self.diskstats_data) ] + readTput = sums[0] / 2.0 * 100.0 / interval + writeTput = sums[1] / 2.0 * 100.0 / interval + util = float( sums[2] ) / 10 / interval + util = max(0.0, min(1.0, util)) + reduced = (self.diskstats_ltime, (readTput, writeTput, util)) + + self.diskstats_ltime = time + self.diskstats_data = diskdata + return reduced + + + def _reduce_nop(self, time, data): + return (time, data) + + def _reduce_stat(self, time, data): + if not data: + return None + # CPU times {user, nice, system, idle, io_wait, irq, softirq} from first line + tokens = data.split(b'\n', 1)[0].split() + times = [ int(token) for token in tokens[1:] ] + reduced = None + if self.stat_ltimes: + user = float((times[0] + times[1]) - (self.stat_ltimes[0] + self.stat_ltimes[1])) + system = float((times[2] + times[5] + times[6]) - (self.stat_ltimes[2] + self.stat_ltimes[5] + self.stat_ltimes[6])) + idle = float(times[3] - self.stat_ltimes[3]) + iowait = float(times[4] - self.stat_ltimes[4]) + + aSum = max(user + system + idle + iowait, 1) + reduced = (time, (user/aSum, system/aSum, iowait/aSum)) + + self.stat_ltimes = times + return reduced + + def sample(self, event, force): + now = time.time() + if (now - self.last_proc > self.min_seconds) or force: + for filename, output, handler in self.proc_files: + with open(os.path.join('/proc', filename), 'rb') as input: + data = input.read() + if handler: + reduced = handler(now, data) + else: + reduced = (now, data) + if reduced: + if isinstance(reduced[1], bytes): + # Use as it is. + data = reduced[1] + else: + # Convert to a single line. + data = (' '.join([str(x) for x in reduced[1]]) + '\n').encode('ascii') + # Unbuffered raw write, less overhead and useful + # in case that we end up with concurrent writes. + os.write(output.fileno(), + ('%.0f\n' % reduced[0]).encode('ascii') + + data + + b'\n') + self.last_proc = now + + if isinstance(event, bb.event.MonitorDiskEvent) and \ + ((now - self.last_disk_monitor > self.min_seconds) or force): + os.write(self.monitor_disk.fileno(), + ('%.0f\n' % now).encode('ascii') + + ''.join(['%s: %d\n' % (dev, sample.total_bytes - sample.free_bytes) + for dev, sample in event.disk_usage.items()]).encode('ascii') + + b'\n') + self.last_disk_monitor = now diff --git a/import-layers/yocto-poky/meta/lib/oe/buildhistory_analysis.py b/import-layers/yocto-poky/meta/lib/oe/buildhistory_analysis.py index b6c0265c1..3a5b7b6b4 100644 --- a/import-layers/yocto-poky/meta/lib/oe/buildhistory_analysis.py +++ b/import-layers/yocto-poky/meta/lib/oe/buildhistory_analysis.py @@ -1,6 +1,6 @@ # Report significant differences in the buildhistory repository since a specific revision # -# Copyright (C) 2012 Intel Corporation +# Copyright (C) 2012-2013, 2016-2017 Intel Corporation # Author: Paul Eggleton # # Note: requires GitPython 0.3.1+ @@ -13,7 +13,10 @@ import os.path import difflib import git import re +import hashlib +import collections import bb.utils +import bb.tinfoil # How to display fields @@ -69,7 +72,22 @@ class ChangeRecord: pkglist.append(k) return pkglist + def detect_renamed_dirs(aitems, bitems): + adirs = set(map(os.path.dirname, aitems)) + bdirs = set(map(os.path.dirname, bitems)) + files_ab = [(name, sorted(os.path.basename(item) for item in aitems if os.path.dirname(item) == name)) \ + for name in adirs - bdirs] + files_ba = [(name, sorted(os.path.basename(item) for item in bitems if os.path.dirname(item) == name)) \ + for name in bdirs - adirs] + renamed_dirs = [(dir1, dir2) for dir1, files1 in files_ab for dir2, files2 in files_ba if files1 == files2] + # remove files that belong to renamed dirs from aitems and bitems + for dir1, dir2 in renamed_dirs: + aitems = [item for item in aitems if os.path.dirname(item) not in (dir1, dir2)] + bitems = [item for item in bitems if os.path.dirname(item) not in (dir1, dir2)] + return renamed_dirs, aitems, bitems + if self.fieldname in list_fields or self.fieldname in list_order_fields: + renamed_dirs = [] if self.fieldname in ['RPROVIDES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RREPLACES', 'RCONFLICTS']: (depvera, depverb) = compare_pkg_lists(self.oldvalue, self.newvalue) aitems = pkglist_combine(depvera) @@ -77,16 +95,29 @@ class ChangeRecord: else: aitems = self.oldvalue.split() bitems = self.newvalue.split() + if self.fieldname == 'FILELIST': + renamed_dirs, aitems, bitems = detect_renamed_dirs(aitems, bitems) + removed = list(set(aitems) - set(bitems)) added = list(set(bitems) - set(aitems)) + lines = [] + if renamed_dirs: + for dfrom, dto in renamed_dirs: + lines.append('directory renamed %s -> %s' % (dfrom, dto)) if removed or added: if removed and not bitems: - out = '%s: removed all items "%s"' % (self.fieldname, ' '.join(removed)) + lines.append('removed all items "%s"' % ' '.join(removed)) else: - out = '%s:%s%s' % (self.fieldname, ' removed "%s"' % ' '.join(removed) if removed else '', ' added "%s"' % ' '.join(added) if added else '') + if removed: + lines.append('removed "%s"' % ' '.join(removed)) + if added: + lines.append('added "%s"' % ' '.join(added)) else: - out = '%s changed order' % self.fieldname + lines.append('changed order') + + out = '%s: %s' % (self.fieldname, ', '.join(lines)) + elif self.fieldname in numeric_fields: aval = int(self.oldvalue or 0) bval = int(self.newvalue or 0) @@ -382,13 +413,115 @@ def compare_dict_blobs(path, ablob, bblob, report_all, report_ver): return changes -def process_changes(repopath, revision1, revision2='HEAD', report_all=False, report_ver=False): +def compare_siglists(a_blob, b_blob, taskdiff=False): + # FIXME collapse down a recipe's tasks? + alines = a_blob.data_stream.read().decode('utf-8').splitlines() + blines = b_blob.data_stream.read().decode('utf-8').splitlines() + keys = [] + pnmap = {} + def readsigs(lines): + sigs = {} + for line in lines: + linesplit = line.split() + if len(linesplit) > 2: + sigs[linesplit[0]] = linesplit[2] + if not linesplit[0] in keys: + keys.append(linesplit[0]) + pnmap[linesplit[1]] = linesplit[0].rsplit('.', 1)[0] + return sigs + adict = readsigs(alines) + bdict = readsigs(blines) + out = [] + + changecount = 0 + addcount = 0 + removecount = 0 + if taskdiff: + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.prepare(config_only=True) + + changes = collections.OrderedDict() + + def compare_hashfiles(pn, taskname, hash1, hash2): + hashes = [hash1, hash2] + hashfiles = bb.siggen.find_siginfo(pn, taskname, hashes, tinfoil.config_data) + + if not taskname: + (pn, taskname) = pn.rsplit('.', 1) + pn = pnmap.get(pn, pn) + desc = '%s.%s' % (pn, taskname) + + if len(hashfiles) == 0: + out.append("Unable to find matching sigdata for %s with hashes %s or %s" % (desc, hash1, hash2)) + elif not hash1 in hashfiles: + out.append("Unable to find matching sigdata for %s with hash %s" % (desc, hash1)) + elif not hash2 in hashfiles: + out.append("Unable to find matching sigdata for %s with hash %s" % (desc, hash2)) + else: + out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, collapsed=True) + for line in out2: + m = hashlib.sha256() + m.update(line.encode('utf-8')) + entry = changes.get(m.hexdigest(), (line, [])) + if desc not in entry[1]: + changes[m.hexdigest()] = (line, entry[1] + [desc]) + + # Define recursion callback + def recursecb(key, hash1, hash2): + compare_hashfiles(key, None, hash1, hash2) + return [] + + for key in keys: + siga = adict.get(key, None) + sigb = bdict.get(key, None) + if siga is not None and sigb is not None and siga != sigb: + changecount += 1 + (pn, taskname) = key.rsplit('.', 1) + compare_hashfiles(pn, taskname, siga, sigb) + elif siga is None: + addcount += 1 + elif sigb is None: + removecount += 1 + for key, item in changes.items(): + line, tasks = item + if len(tasks) == 1: + desc = tasks[0] + elif len(tasks) == 2: + desc = '%s and %s' % (tasks[0], tasks[1]) + else: + desc = '%s and %d others' % (tasks[-1], len(tasks)-1) + out.append('%s: %s' % (desc, line)) + else: + for key in keys: + siga = adict.get(key, None) + sigb = bdict.get(key, None) + if siga is not None and sigb is not None and siga != sigb: + out.append('%s changed from %s to %s' % (key, siga, sigb)) + changecount += 1 + elif siga is None: + out.append('%s was added' % key) + addcount += 1 + elif sigb is None: + out.append('%s was removed' % key) + removecount += 1 + out.append('Summary: %d tasks added, %d tasks removed, %d tasks modified (%.1f%%)' % (addcount, removecount, changecount, (changecount / float(len(bdict)) * 100))) + return '\n'.join(out) + + +def process_changes(repopath, revision1, revision2='HEAD', report_all=False, report_ver=False, sigs=False, sigsdiff=False): repo = git.Repo(repopath) assert repo.bare == False commit = repo.commit(revision1) diff = commit.diff(revision2) changes = [] + + if sigs or sigsdiff: + for d in diff.iter_change_type('M'): + if d.a_blob.path == 'siglist.txt': + changes.append(compare_siglists(d.a_blob, d.b_blob, taskdiff=sigsdiff)) + return changes + for d in diff.iter_change_type('M'): path = os.path.dirname(d.a_blob.path) if path.startswith('packages/'): diff --git a/import-layers/yocto-poky/meta/lib/oe/classextend.py b/import-layers/yocto-poky/meta/lib/oe/classextend.py index 4c8a00070..d2eeaf0e5 100644 --- a/import-layers/yocto-poky/meta/lib/oe/classextend.py +++ b/import-layers/yocto-poky/meta/lib/oe/classextend.py @@ -25,7 +25,7 @@ class ClassExtender(object): return name def map_variable(self, varname, setvar = True): - var = self.d.getVar(varname, True) + var = self.d.getVar(varname) if not var: return "" var = var.split() @@ -38,7 +38,7 @@ class ClassExtender(object): return newdata def map_regexp_variable(self, varname, setvar = True): - var = self.d.getVar(varname, True) + var = self.d.getVar(varname) if not var: return "" var = var.split() @@ -60,7 +60,7 @@ class ClassExtender(object): return dep else: # Do not extend for that already have multilib prefix - var = self.d.getVar("MULTILIB_VARIANTS", True) + var = self.d.getVar("MULTILIB_VARIANTS") if var: var = var.split() for v in var: @@ -74,7 +74,7 @@ class ClassExtender(object): varname = varname + "_" + suffix orig = self.d.getVar("EXTENDPKGV", False) self.d.setVar("EXTENDPKGV", "EXTENDPKGV") - deps = self.d.getVar(varname, True) + deps = self.d.getVar(varname) if not deps: self.d.setVar("EXTENDPKGV", orig) return @@ -87,7 +87,7 @@ class ClassExtender(object): self.d.setVar("EXTENDPKGV", orig) def map_packagevars(self): - for pkg in (self.d.getVar("PACKAGES", True).split() + [""]): + for pkg in (self.d.getVar("PACKAGES").split() + [""]): self.map_depends_variable("RDEPENDS", pkg) self.map_depends_variable("RRECOMMENDS", pkg) self.map_depends_variable("RSUGGESTS", pkg) @@ -97,7 +97,7 @@ class ClassExtender(object): self.map_depends_variable("PKG", pkg) def rename_packages(self): - for pkg in (self.d.getVar("PACKAGES", True) or "").split(): + for pkg in (self.d.getVar("PACKAGES") or "").split(): if pkg.startswith(self.extname): self.pkgs_mapping.append([pkg.split(self.extname + "-")[1], pkg]) continue diff --git a/import-layers/yocto-poky/meta/lib/oe/classutils.py b/import-layers/yocto-poky/meta/lib/oe/classutils.py index e7856c86f..45cd5249b 100644 --- a/import-layers/yocto-poky/meta/lib/oe/classutils.py +++ b/import-layers/yocto-poky/meta/lib/oe/classutils.py @@ -36,7 +36,7 @@ abstract base classes out of the registry).""" @classmethod def prioritized(tcls): return sorted(list(tcls.registry.values()), - key=lambda v: v.priority, reverse=True) + key=lambda v: (v.priority, v.name), reverse=True) def unregister(cls): for key in cls.registry.keys(): diff --git a/import-layers/yocto-poky/meta/lib/oe/copy_buildsystem.py b/import-layers/yocto-poky/meta/lib/oe/copy_buildsystem.py index 29ac6d418..a37290418 100644 --- a/import-layers/yocto-poky/meta/lib/oe/copy_buildsystem.py +++ b/import-layers/yocto-poky/meta/lib/oe/copy_buildsystem.py @@ -21,8 +21,8 @@ class BuildSystem(object): def __init__(self, context, d): self.d = d self.context = context - self.layerdirs = [os.path.abspath(pth) for pth in d.getVar('BBLAYERS', True).split()] - self.layers_exclude = (d.getVar('SDK_LAYERS_EXCLUDE', True) or "").split() + self.layerdirs = [os.path.abspath(pth) for pth in d.getVar('BBLAYERS').split()] + self.layers_exclude = (d.getVar('SDK_LAYERS_EXCLUDE') or "").split() def copy_bitbake_and_layers(self, destdir, workspace_name=None): # Copy in all metadata layers + bitbake (as repositories) @@ -30,7 +30,7 @@ class BuildSystem(object): bb.utils.mkdirhier(destdir) layers = list(self.layerdirs) - corebase = os.path.abspath(self.d.getVar('COREBASE', True)) + corebase = os.path.abspath(self.d.getVar('COREBASE')) layers.append(corebase) # Exclude layers @@ -46,7 +46,7 @@ class BuildSystem(object): extranum += 1 workspace_newname = '%s-%d' % (workspace_name, extranum) - corebase_files = self.d.getVar('COREBASE_FILES', True).split() + corebase_files = self.d.getVar('COREBASE_FILES').split() corebase_files = [corebase + '/' +x for x in corebase_files] # Make sure bitbake goes in bitbake_dir = bb.__file__.rsplit('/', 3)[0] @@ -100,7 +100,7 @@ class BuildSystem(object): # Drop all bbappends except the one for the image the SDK is being built for # (because of externalsrc, the workspace bbappends will interfere with the # locked signatures if present, and we don't need them anyway) - image_bbappend = os.path.splitext(os.path.basename(self.d.getVar('FILE', True)))[0] + '.bbappend' + image_bbappend = os.path.splitext(os.path.basename(self.d.getVar('FILE')))[0] + '.bbappend' appenddir = os.path.join(layerdestpath, 'appends') if os.path.isdir(appenddir): for fn in os.listdir(appenddir): @@ -208,7 +208,7 @@ def create_locked_sstate_cache(lockedsigs, input_sstate_cache, output_sstate_cac import shutil bb.note('Generating sstate-cache...') - nativelsbstring = d.getVar('NATIVELSBSTRING', True) + nativelsbstring = d.getVar('NATIVELSBSTRING') bb.process.run("gen-lockedsig-cache %s %s %s %s %s" % (lockedsigs, input_sstate_cache, output_sstate_cache, nativelsbstring, filterfile or '')) if fixedlsbstring and nativelsbstring != fixedlsbstring: nativedir = output_sstate_cache + '/' + nativelsbstring diff --git a/import-layers/yocto-poky/meta/lib/oe/data.py b/import-layers/yocto-poky/meta/lib/oe/data.py index ee48950a8..b8901e63f 100644 --- a/import-layers/yocto-poky/meta/lib/oe/data.py +++ b/import-layers/yocto-poky/meta/lib/oe/data.py @@ -1,9 +1,10 @@ +import json import oe.maketype def typed_value(key, d): """Construct a value for the specified metadata variable, using its flags to determine the type and parameters for construction.""" - var_type = d.getVarFlag(key, 'type', True) + var_type = d.getVarFlag(key, 'type') flags = d.getVarFlags(key) if flags is not None: flags = dict((flag, d.expand(value)) @@ -12,6 +13,35 @@ def typed_value(key, d): flags = {} try: - return oe.maketype.create(d.getVar(key, True) or '', var_type, **flags) + return oe.maketype.create(d.getVar(key) or '', var_type, **flags) except (TypeError, ValueError) as exc: bb.msg.fatal("Data", "%s: %s" % (key, str(exc))) + +def export2json(d, json_file, expand=True, searchString="",replaceString=""): + data2export = {} + keys2export = [] + + for key in d.keys(): + if key.startswith("_"): + continue + elif key.startswith("BB"): + continue + elif key.startswith("B_pn"): + continue + elif key.startswith("do_"): + continue + elif d.getVarFlag(key, "func"): + continue + + keys2export.append(key) + + for key in keys2export: + try: + data2export[key] = d.getVar(key, expand).replace(searchString,replaceString) + except bb.data_smart.ExpansionError: + data2export[key] = '' + except AttributeError: + pass + + with open(json_file, "w") as f: + json.dump(data2export, f, skipkeys=True, indent=4, sort_keys=True) diff --git a/import-layers/yocto-poky/meta/lib/oe/distro_check.py b/import-layers/yocto-poky/meta/lib/oe/distro_check.py index 87c52fae9..37f04ed35 100644 --- a/import-layers/yocto-poky/meta/lib/oe/distro_check.py +++ b/import-layers/yocto-poky/meta/lib/oe/distro_check.py @@ -1,32 +1,17 @@ -from contextlib import contextmanager - -from bb.utils import export_proxies - def create_socket(url, d): import urllib + from bb.utils import export_proxies - socket = None - try: - export_proxies(d) - socket = urllib.request.urlopen(url) - except: - bb.warn("distro_check: create_socket url %s can't access" % url) - - return socket + export_proxies(d) + return urllib.request.urlopen(url) def get_links_from_url(url, d): "Return all the href links found on the web location" from bs4 import BeautifulSoup, SoupStrainer + soup = BeautifulSoup(create_socket(url,d), "html.parser", parse_only=SoupStrainer("a")) hyperlinks = [] - - webpage = '' - sock = create_socket(url,d) - if sock: - webpage = sock.read() - - soup = BeautifulSoup(webpage, "html.parser", parse_only=SoupStrainer("a")) for line in soup.find_all('a', href=True): hyperlinks.append(line['href'].strip('/')) return hyperlinks @@ -37,6 +22,7 @@ def find_latest_numeric_release(url, d): maxstr="" for link in get_links_from_url(url, d): try: + # TODO use LooseVersion release = float(link) except: release = 0 @@ -47,144 +33,112 @@ def find_latest_numeric_release(url, d): def is_src_rpm(name): "Check if the link is pointing to a src.rpm file" - if name[-8:] == ".src.rpm": - return True - else: - return False + return name.endswith(".src.rpm") def package_name_from_srpm(srpm): "Strip out the package name from the src.rpm filename" - strings = srpm.split('-') - package_name = strings[0] - for i in range(1, len (strings) - 1): - str = strings[i] - if not str[0].isdigit(): - package_name += '-' + str - return package_name - -def clean_package_list(package_list): - "Removes multiple entries of packages and sorts the list" - set = {} - map(set.__setitem__, package_list, []) - return set.keys() - -def get_latest_released_meego_source_package_list(d): - "Returns list of all the name os packages in the latest meego distro" - - package_names = [] - try: - f = open("/tmp/Meego-1.1", "r") - for line in f: - package_names.append(line[:-1] + ":" + "main") # Also strip the '\n' at the end - except IOError: pass - package_list=clean_package_list(package_names) - return "1.0", package_list + # ca-certificates-2016.2.7-1.0.fc24.src.rpm + # ^name ^ver ^release^removed + (name, version, release) = srpm.replace(".src.rpm", "").rsplit("-", 2) + return name def get_source_package_list_from_url(url, section, d): "Return a sectioned list of package names from a URL list" bb.note("Reading %s: %s" % (url, section)) links = get_links_from_url(url, d) - srpms = list(filter(is_src_rpm, links)) - names_list = list(map(package_name_from_srpm, srpms)) + srpms = filter(is_src_rpm, links) + names_list = map(package_name_from_srpm, srpms) - new_pkgs = [] + new_pkgs = set() for pkgs in names_list: - new_pkgs.append(pkgs + ":" + section) - + new_pkgs.add(pkgs + ":" + section) return new_pkgs +def get_source_package_list_from_url_by_letter(url, section, d): + import string + from urllib.error import HTTPError + packages = set() + for letter in (string.ascii_lowercase + string.digits): + # Not all subfolders may exist, so silently handle 404 + try: + packages |= get_source_package_list_from_url(url + "/" + letter, section, d) + except HTTPError as e: + if e.code != 404: raise + return packages + def get_latest_released_fedora_source_package_list(d): "Returns list of all the name os packages in the latest fedora distro" latest = find_latest_numeric_release("http://archive.fedoraproject.org/pub/fedora/linux/releases/", d) - - package_names = get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/releases/%s/Fedora/source/SRPMS/" % latest, "main", d) - -# package_names += get_source_package_list_from_url("http://download.fedora.redhat.com/pub/fedora/linux/releases/%s/Everything/source/SPRMS/" % latest, "everything") - package_names += get_source_package_list_from_url("http://archive.fedoraproject.org/pub/fedora/linux/updates/%s/SRPMS/" % latest, "updates", d) - - package_list=clean_package_list(package_names) - - return latest, package_list + package_names = get_source_package_list_from_url_by_letter("http://archive.fedoraproject.org/pub/fedora/linux/releases/%s/Everything/source/tree/Packages/" % latest, "main", d) + package_names |= get_source_package_list_from_url_by_letter("http://archive.fedoraproject.org/pub/fedora/linux/updates/%s/SRPMS/" % latest, "updates", d) + return latest, package_names def get_latest_released_opensuse_source_package_list(d): "Returns list of all the name os packages in the latest opensuse distro" latest = find_latest_numeric_release("http://download.opensuse.org/source/distribution/",d) package_names = get_source_package_list_from_url("http://download.opensuse.org/source/distribution/%s/repo/oss/suse/src/" % latest, "main", d) - package_names += get_source_package_list_from_url("http://download.opensuse.org/update/%s/rpm/src/" % latest, "updates", d) - - package_list=clean_package_list(package_names) - return latest, package_list + package_names |= get_source_package_list_from_url("http://download.opensuse.org/update/%s/src/" % latest, "updates", d) + return latest, package_names def get_latest_released_mandriva_source_package_list(d): "Returns list of all the name os packages in the latest mandriva distro" latest = find_latest_numeric_release("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/", d) package_names = get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/release/" % latest, "main", d) -# package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/contrib/release/" % latest, "contrib") - package_names += get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/updates/" % latest, "updates", d) + package_names |= get_source_package_list_from_url("http://distrib-coffee.ipsl.jussieu.fr/pub/linux/MandrivaLinux/official/%s/SRPMS/main/updates/" % latest, "updates", d) + return latest, package_names - package_list=clean_package_list(package_names) - return latest, package_list +def get_latest_released_clear_source_package_list(d): + latest = find_latest_numeric_release("https://download.clearlinux.org/releases/", d) + package_names = get_source_package_list_from_url("https://download.clearlinux.org/releases/%s/clear/source/SRPMS/" % latest, "main", d) + return latest, package_names def find_latest_debian_release(url, d): "Find the latest listed debian release on the given url" - releases = [] - for link in get_links_from_url(url, d): - if link[:6] == "Debian": - if ';' not in link: - releases.append(link) + releases = [link.replace("Debian", "") + for link in get_links_from_url(url, d) + if link.startswith("Debian")] releases.sort() try: - return releases.pop()[6:] + return releases[-1] except: return "_NotFound_" def get_debian_style_source_package_list(url, section, d): "Return the list of package-names stored in the debian style Sources.gz file" - import tempfile import gzip - webpage = '' - sock = create_socket(url,d) - if sock: - webpage = sock.read() - - tmpfile = tempfile.NamedTemporaryFile(mode='wb', prefix='oecore.', suffix='.tmp', delete=False) - tmpfilename=tmpfile.name - tmpfile.write(sock.read()) - tmpfile.close() - bb.note("Reading %s: %s" % (url, section)) - - f = gzip.open(tmpfilename) - package_names = [] - for line in f: - if line[:9] == "Package: ": - package_names.append(line[9:-1] + ":" + section) # Also strip the '\n' at the end - os.unlink(tmpfilename) - + package_names = set() + for line in gzip.open(create_socket(url, d), mode="rt"): + if line.startswith("Package:"): + pkg = line.split(":", 1)[1].strip() + package_names.add(pkg + ":" + section) return package_names def get_latest_released_debian_source_package_list(d): - "Returns list of all the name os packages in the latest debian distro" + "Returns list of all the name of packages in the latest debian distro" latest = find_latest_debian_release("http://ftp.debian.org/debian/dists/", d) - url = "http://ftp.debian.org/debian/dists/stable/main/source/Sources.gz" + url = "http://ftp.debian.org/debian/dists/stable/main/source/Sources.gz" package_names = get_debian_style_source_package_list(url, "main", d) -# url = "http://ftp.debian.org/debian/dists/stable/contrib/source/Sources.gz" -# package_names += get_debian_style_source_package_list(url, "contrib") - url = "http://ftp.debian.org/debian/dists/stable-proposed-updates/main/source/Sources.gz" - package_names += get_debian_style_source_package_list(url, "updates", d) - package_list=clean_package_list(package_names) - return latest, package_list + url = "http://ftp.debian.org/debian/dists/stable-proposed-updates/main/source/Sources.gz" + package_names |= get_debian_style_source_package_list(url, "updates", d) + return latest, package_names def find_latest_ubuntu_release(url, d): - "Find the latest listed ubuntu release on the given url" + """ + Find the latest listed Ubuntu release on the given ubuntu/dists/ URL. + + To avoid matching development releases look for distributions that have + updates, so the resulting distro could be any supported release. + """ url += "?C=M;O=D" # Descending Sort by Last Modified for link in get_links_from_url(url, d): - if link[-8:] == "-updates": - return link[:-8] + if "-updates" in link: + distro = link.replace("-updates", "") + return distro return "_NotFound_" def get_latest_released_ubuntu_source_package_list(d): @@ -192,52 +146,45 @@ def get_latest_released_ubuntu_source_package_list(d): latest = find_latest_ubuntu_release("http://archive.ubuntu.com/ubuntu/dists/", d) url = "http://archive.ubuntu.com/ubuntu/dists/%s/main/source/Sources.gz" % latest package_names = get_debian_style_source_package_list(url, "main", d) -# url = "http://archive.ubuntu.com/ubuntu/dists/%s/multiverse/source/Sources.gz" % latest -# package_names += get_debian_style_source_package_list(url, "multiverse") -# url = "http://archive.ubuntu.com/ubuntu/dists/%s/universe/source/Sources.gz" % latest -# package_names += get_debian_style_source_package_list(url, "universe") url = "http://archive.ubuntu.com/ubuntu/dists/%s-updates/main/source/Sources.gz" % latest - package_names += get_debian_style_source_package_list(url, "updates", d) - package_list=clean_package_list(package_names) - return latest, package_list + package_names |= get_debian_style_source_package_list(url, "updates", d) + return latest, package_names def create_distro_packages_list(distro_check_dir, d): + import shutil + pkglst_dir = os.path.join(distro_check_dir, "package_lists") - if not os.path.isdir (pkglst_dir): - os.makedirs(pkglst_dir) - # first clear old stuff - for file in os.listdir(pkglst_dir): - os.unlink(os.path.join(pkglst_dir, file)) - - per_distro_functions = [ - ["Debian", get_latest_released_debian_source_package_list], - ["Ubuntu", get_latest_released_ubuntu_source_package_list], - ["Fedora", get_latest_released_fedora_source_package_list], - ["OpenSuSE", get_latest_released_opensuse_source_package_list], - ["Mandriva", get_latest_released_mandriva_source_package_list], - ["Meego", get_latest_released_meego_source_package_list] - ] - - from datetime import datetime - begin = datetime.now() - for distro in per_distro_functions: - name = distro[0] - release, package_list = distro[1](d) + bb.utils.remove(pkglst_dir, True) + bb.utils.mkdirhier(pkglst_dir) + + per_distro_functions = ( + ("Debian", get_latest_released_debian_source_package_list), + ("Ubuntu", get_latest_released_ubuntu_source_package_list), + ("Fedora", get_latest_released_fedora_source_package_list), + ("OpenSuSE", get_latest_released_opensuse_source_package_list), + ("Mandriva", get_latest_released_mandriva_source_package_list), + ("Clear", get_latest_released_clear_source_package_list), + ) + + for name, fetcher_func in per_distro_functions: + try: + release, package_list = fetcher_func(d) + except Exception as e: + bb.warn("Cannot fetch packages for %s: %s" % (name, e)) bb.note("Distro: %s, Latest Release: %s, # src packages: %d" % (name, release, len(package_list))) + if len(package_list) == 0: + bb.error("Didn't fetch any packages for %s %s" % (name, release)) + package_list_file = os.path.join(pkglst_dir, name + "-" + release) - f = open(package_list_file, "w+b") - for pkg in package_list: - f.write(pkg + "\n") - f.close() - end = datetime.now() - delta = end - begin - bb.note("package_list generatiosn took this much time: %d seconds" % delta.seconds) + with open(package_list_file, 'w') as f: + for pkg in sorted(package_list): + f.write(pkg + "\n") def update_distro_data(distro_check_dir, datetime, d): """ - If distro packages list data is old then rebuild it. - The operations has to be protected by a lock so that - only one thread performes it at a time. + If distro packages list data is old then rebuild it. + The operations has to be protected by a lock so that + only one thread performes it at a time. """ if not os.path.isdir (distro_check_dir): try: @@ -264,71 +211,59 @@ def update_distro_data(distro_check_dir, datetime, d): f.seek(0) f.write(datetime) - except OSError: - raise Exception('Unable to read/write this file: %s' % (datetime_file)) + except OSError as e: + raise Exception('Unable to open timestamp: %s' % e) finally: fcntl.lockf(f, fcntl.LOCK_UN) f.close() - + def compare_in_distro_packages_list(distro_check_dir, d): if not os.path.isdir(distro_check_dir): raise Exception("compare_in_distro_packages_list: invalid distro_check_dir passed") - + localdata = bb.data.createCopy(d) pkglst_dir = os.path.join(distro_check_dir, "package_lists") matching_distros = [] - pn = d.getVar('PN', True) - recipe_name = d.getVar('PN', True) + pn = recipe_name = d.getVar('PN') bb.note("Checking: %s" % pn) - trim_dict = dict({"-native":"-native", "-cross":"-cross", "-initial":"-initial"}) - if pn.find("-native") != -1: pnstripped = pn.split("-native") - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) recipe_name = pnstripped[0] if pn.startswith("nativesdk-"): pnstripped = pn.split("nativesdk-") - localdata.setVar('OVERRIDES', "pn-" + pnstripped[1] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[1] + ":" + d.getVar('OVERRIDES')) recipe_name = pnstripped[1] if pn.find("-cross") != -1: pnstripped = pn.split("-cross") - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) recipe_name = pnstripped[0] if pn.find("-initial") != -1: pnstripped = pn.split("-initial") - localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES', True)) - bb.data.update_data(localdata) + localdata.setVar('OVERRIDES', "pn-" + pnstripped[0] + ":" + d.getVar('OVERRIDES')) recipe_name = pnstripped[0] bb.note("Recipe: %s" % recipe_name) - tmp = localdata.getVar('DISTRO_PN_ALIAS', True) distro_exceptions = dict({"OE-Core":'OE-Core', "OpenedHand":'OpenedHand', "Intel":'Intel', "Upstream":'Upstream', "Windriver":'Windriver', "OSPDT":'OSPDT Approved', "Poky":'poky'}) - - if tmp: - list = tmp.split(' ') - for str in list: - if str and str.find("=") == -1 and distro_exceptions[str]: - matching_distros.append(str) + tmp = localdata.getVar('DISTRO_PN_ALIAS') or "" + for str in tmp.split(): + if str and str.find("=") == -1 and distro_exceptions[str]: + matching_distros.append(str) distro_pn_aliases = {} - if tmp: - list = tmp.split(' ') - for str in list: - if str.find("=") != -1: - (dist, pn_alias) = str.split('=') - distro_pn_aliases[dist.strip().lower()] = pn_alias.strip() - + for str in tmp.split(): + if "=" in str: + (dist, pn_alias) = str.split('=') + distro_pn_aliases[dist.strip().lower()] = pn_alias.strip() + for file in os.listdir(pkglst_dir): (distro, distro_release) = file.split("-") - f = open(os.path.join(pkglst_dir, file), "rb") + f = open(os.path.join(pkglst_dir, file), "r") for line in f: (pkg, section) = line.split(":") if distro.lower() in distro_pn_aliases: @@ -341,38 +276,34 @@ def compare_in_distro_packages_list(distro_check_dir, d): break f.close() - - if tmp != None: - list = tmp.split(' ') - for item in list: - matching_distros.append(item) + for item in tmp.split(): + matching_distros.append(item) bb.note("Matching: %s" % matching_distros) return matching_distros def create_log_file(d, logname): - import subprocess - logpath = d.getVar('LOG_DIR', True) + logpath = d.getVar('LOG_DIR') bb.utils.mkdirhier(logpath) logfn, logsuffix = os.path.splitext(logname) - logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME', True), logsuffix)) + logfile = os.path.join(logpath, "%s.%s%s" % (logfn, d.getVar('DATETIME'), logsuffix)) if not os.path.exists(logfile): slogfile = os.path.join(logpath, logname) if os.path.exists(slogfile): os.remove(slogfile) - subprocess.call("touch %s" % logfile, shell=True) + open(logfile, 'w+').close() os.symlink(logfile, slogfile) d.setVar('LOG_FILE', logfile) return logfile def save_distro_check_result(result, datetime, result_file, d): - pn = d.getVar('PN', True) - logdir = d.getVar('LOG_DIR', True) + pn = d.getVar('PN') + logdir = d.getVar('LOG_DIR') if not logdir: bb.error("LOG_DIR variable is not defined, can't write the distro_check results") return - if not os.path.isdir(logdir): - os.makedirs(logdir) + bb.utils.mkdirhier(logdir) + line = pn for i in result: line = line + "," + i diff --git a/import-layers/yocto-poky/meta/lib/oe/gpg_sign.py b/import-layers/yocto-poky/meta/lib/oe/gpg_sign.py index ba61f9890..7ce767ee0 100644 --- a/import-layers/yocto-poky/meta/lib/oe/gpg_sign.py +++ b/import-layers/yocto-poky/meta/lib/oe/gpg_sign.py @@ -7,11 +7,11 @@ import oe.utils class LocalSigner(object): """Class for handling local (on the build host) signing""" def __init__(self, d): - self.gpg_bin = d.getVar('GPG_BIN', True) or \ + self.gpg_bin = d.getVar('GPG_BIN') or \ bb.utils.which(os.getenv('PATH'), 'gpg') - self.gpg_path = d.getVar('GPG_PATH', True) + self.gpg_path = d.getVar('GPG_PATH') self.gpg_version = self.get_gpg_version() - self.rpm_bin = bb.utils.which(os.getenv('PATH'), "rpm") + self.rpm_bin = bb.utils.which(os.getenv('PATH'), "rpmsign") def export_pubkey(self, output_file, keyid, armor=True): """Export GPG public key to a file""" @@ -31,9 +31,10 @@ class LocalSigner(object): """Sign RPM files""" cmd = self.rpm_bin + " --addsign --define '_gpg_name %s' " % keyid - cmd += "--define '_gpg_passphrase %s' " % passphrase + gpg_args = '--batch --passphrase=%s' % passphrase if self.gpg_version > (2,1,): - cmd += "--define '_gpg_sign_cmd_extra_args --pinentry-mode=loopback' " + gpg_args += ' --pinentry-mode=loopback' + cmd += "--define '_gpg_sign_cmd_extra_args %s' " % gpg_args if self.gpg_bin: cmd += "--define '%%__gpg %s' " % self.gpg_bin if self.gpg_path: diff --git a/import-layers/yocto-poky/meta/lib/oe/lsb.py b/import-layers/yocto-poky/meta/lib/oe/lsb.py index e0bdfba25..3a945e0fc 100644 --- a/import-layers/yocto-poky/meta/lib/oe/lsb.py +++ b/import-layers/yocto-poky/meta/lib/oe/lsb.py @@ -1,26 +1,54 @@ -def release_dict(): - """Return the output of lsb_release -ir as a dictionary""" +def release_dict_osr(): + """ Populate a dict with pertinent values from /etc/os-release """ + if not os.path.exists('/etc/os-release'): + return None + + data = {} + with open('/etc/os-release') as f: + for line in f: + try: + key, val = line.rstrip().split('=', 1) + except ValueError: + continue + if key == 'ID': + data['DISTRIB_ID'] = val.strip('"') + if key == 'VERSION_ID': + data['DISTRIB_RELEASE'] = val.strip('"') + + return data + +def release_dict_lsb(): + """ Return the output of lsb_release -ir as a dictionary """ from subprocess import PIPE try: output, err = bb.process.run(['lsb_release', '-ir'], stderr=PIPE) except bb.process.CmdError as exc: - return None + return {} + + lsb_map = { 'Distributor ID': 'DISTRIB_ID', + 'Release': 'DISTRIB_RELEASE'} + lsb_keys = lsb_map.keys() data = {} for line in output.splitlines(): - if line.startswith("-e"): line = line[3:] + if line.startswith("-e"): + line = line[3:] try: key, value = line.split(":\t", 1) except ValueError: continue - else: - data[key] = value + if key in lsb_keys: + data[lsb_map[key]] = value + + if len(data.keys()) != 2: + return None + return data def release_dict_file(): - """ Try to gather LSB release information manually when lsb_release tool is unavailable """ - data = None + """ Try to gather release information manually when other methods fail """ + data = {} try: if os.path.exists('/etc/lsb-release'): data = {} @@ -37,14 +65,6 @@ def release_dict_file(): if match: data['DISTRIB_ID'] = match.group(1) data['DISTRIB_RELEASE'] = match.group(2) - elif os.path.exists('/etc/os-release'): - data = {} - with open('/etc/os-release') as f: - for line in f: - if line.startswith('NAME='): - data['DISTRIB_ID'] = line[5:].rstrip().strip('"') - if line.startswith('VERSION_ID='): - data['DISTRIB_RELEASE'] = line[11:].rstrip().strip('"') elif os.path.exists('/etc/SuSE-release'): data = {} data['DISTRIB_ID'] = 'SUSE LINUX' @@ -55,7 +75,7 @@ def release_dict_file(): break except IOError: - return None + return {} return data def distro_identifier(adjust_hook=None): @@ -64,15 +84,17 @@ def distro_identifier(adjust_hook=None): import re - lsb_data = release_dict() - if lsb_data: - distro_id, release = lsb_data['Distributor ID'], lsb_data['Release'] - else: - lsb_data_file = release_dict_file() - if lsb_data_file: - distro_id, release = lsb_data_file['DISTRIB_ID'], lsb_data_file.get('DISTRIB_RELEASE', None) - else: - distro_id, release = None, None + # Try /etc/os-release first, then the output of `lsb_release -ir` and + # finally fall back on parsing various release files in order to determine + # host distro name and version. + distro_data = release_dict_osr() + if not distro_data: + distro_data = release_dict_lsb() + if not distro_data: + distro_data = release_dict_file() + + distro_id = distro_data.get('DISTRIB_ID', '') + release = distro_data.get('DISTRIB_RELEASE', '') if adjust_hook: distro_id, release = adjust_hook(distro_id, release) @@ -82,7 +104,7 @@ def distro_identifier(adjust_hook=None): distro_id = re.sub(r'\W', '', distro_id) if release: - id_str = '{0}-{1}'.format(distro_id, release) + id_str = '{0}-{1}'.format(distro_id.lower(), release) else: id_str = distro_id return id_str.replace(' ','-').replace('/','-') diff --git a/import-layers/yocto-poky/meta/lib/oe/manifest.py b/import-layers/yocto-poky/meta/lib/oe/manifest.py index 95f8eb2df..60c49be0e 100644 --- a/import-layers/yocto-poky/meta/lib/oe/manifest.py +++ b/import-layers/yocto-poky/meta/lib/oe/manifest.py @@ -59,9 +59,9 @@ class Manifest(object, metaclass=ABCMeta): if manifest_dir is None: if manifest_type != self.MANIFEST_TYPE_IMAGE: - self.manifest_dir = self.d.getVar('SDK_DIR', True) + self.manifest_dir = self.d.getVar('SDK_DIR') else: - self.manifest_dir = self.d.getVar('WORKDIR', True) + self.manifest_dir = self.d.getVar('WORKDIR') else: self.manifest_dir = manifest_dir @@ -82,7 +82,7 @@ class Manifest(object, metaclass=ABCMeta): This will be used for testing until the class is implemented properly! """ def _create_dummy_initial(self): - image_rootfs = self.d.getVar('IMAGE_ROOTFS', True) + image_rootfs = self.d.getVar('IMAGE_ROOTFS') pkg_list = dict() if image_rootfs.find("core-image-sato-sdk") > 0: pkg_list[self.PKG_TYPE_MUST_INSTALL] = \ @@ -104,7 +104,7 @@ class Manifest(object, metaclass=ABCMeta): pkg_list['lgp'] = \ "locale-base-en-us locale-base-en-gb" elif image_rootfs.find("core-image-minimal") > 0: - pkg_list[self.PKG_TYPE_MUST_INSTALL] = "run-postinsts packagegroup-core-boot" + pkg_list[self.PKG_TYPE_MUST_INSTALL] = "packagegroup-core-boot" with open(self.initial_manifest, "w+") as manifest: manifest.write(self.initial_manifest_file_header) @@ -195,7 +195,7 @@ class RpmManifest(Manifest): for pkg in pkg_list.split(): pkg_type = self.PKG_TYPE_MUST_INSTALL - ml_variants = self.d.getVar('MULTILIB_VARIANTS', True).split() + ml_variants = self.d.getVar('MULTILIB_VARIANTS').split() for ml_variant in ml_variants: if pkg.startswith(ml_variant + '-'): @@ -216,13 +216,13 @@ class RpmManifest(Manifest): for var in self.var_maps[self.manifest_type]: if var in self.vars_to_split: - split_pkgs = self._split_multilib(self.d.getVar(var, True)) + split_pkgs = self._split_multilib(self.d.getVar(var)) if split_pkgs is not None: pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) else: - pkg_list = self.d.getVar(var, True) + pkg_list = self.d.getVar(var) if pkg_list is not None: - pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var, True) + pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var) for pkg_type in pkgs: for pkg in pkgs[pkg_type].split(): @@ -245,7 +245,7 @@ class OpkgManifest(Manifest): for pkg in pkg_list.split(): pkg_type = self.PKG_TYPE_MUST_INSTALL - ml_variants = self.d.getVar('MULTILIB_VARIANTS', True).split() + ml_variants = self.d.getVar('MULTILIB_VARIANTS').split() for ml_variant in ml_variants: if pkg.startswith(ml_variant + '-'): @@ -266,13 +266,13 @@ class OpkgManifest(Manifest): for var in self.var_maps[self.manifest_type]: if var in self.vars_to_split: - split_pkgs = self._split_multilib(self.d.getVar(var, True)) + split_pkgs = self._split_multilib(self.d.getVar(var)) if split_pkgs is not None: pkgs = dict(list(pkgs.items()) + list(split_pkgs.items())) else: - pkg_list = self.d.getVar(var, True) + pkg_list = self.d.getVar(var) if pkg_list is not None: - pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var, True) + pkgs[self.var_maps[self.manifest_type][var]] = self.d.getVar(var) for pkg_type in pkgs: for pkg in pkgs[pkg_type].split(): @@ -310,7 +310,7 @@ class DpkgManifest(Manifest): manifest.write(self.initial_manifest_file_header) for var in self.var_maps[self.manifest_type]: - pkg_list = self.d.getVar(var, True) + pkg_list = self.d.getVar(var) if pkg_list is None: continue @@ -332,7 +332,7 @@ def create_manifest(d, final_manifest=False, manifest_dir=None, 'ipk': OpkgManifest, 'deb': DpkgManifest} - manifest = manifest_map[d.getVar('IMAGE_PKGTYPE', True)](d, manifest_dir, manifest_type) + manifest = manifest_map[d.getVar('IMAGE_PKGTYPE')](d, manifest_dir, manifest_type) if final_manifest: manifest.create_final() diff --git a/import-layers/yocto-poky/meta/lib/oe/package.py b/import-layers/yocto-poky/meta/lib/oe/package.py index 02642f29f..4797e7d65 100644 --- a/import-layers/yocto-poky/meta/lib/oe/package.py +++ b/import-layers/yocto-poky/meta/lib/oe/package.py @@ -18,23 +18,24 @@ def runstrip(arg): newmode = origmode | stat.S_IWRITE | stat.S_IREAD os.chmod(file, newmode) - extraflags = "" + stripcmd = [strip] # kernel module if elftype & 16: - extraflags = "--strip-debug --remove-section=.comment --remove-section=.note --preserve-dates" + stripcmd.extend(["--strip-debug", "--remove-section=.comment", + "--remove-section=.note", "--preserve-dates"]) # .so and shared library elif ".so" in file and elftype & 8: - extraflags = "--remove-section=.comment --remove-section=.note --strip-unneeded" + stripcmd.extend(["--remove-section=.comment", "--remove-section=.note", "--strip-unneeded"]) # shared or executable: elif elftype & 8 or elftype & 4: - extraflags = "--remove-section=.comment --remove-section=.note" + stripcmd.extend(["--remove-section=.comment", "--remove-section=.note"]) - stripcmd = "'%s' %s '%s'" % (strip, extraflags, file) + stripcmd.append(file) bb.debug(1, "runstrip: %s" % stripcmd) try: - output = subprocess.check_output(stripcmd, stderr=subprocess.STDOUT, shell=True) + output = subprocess.check_output(stripcmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: bb.error("runstrip: '%s' strip command failed with %s (%s)" % (stripcmd, e.returncode, e.output)) @@ -60,32 +61,59 @@ def filedeprunner(arg): provides = {} requires = {} - r = re.compile(r'[<>=]+ +[^ ]*') + file_re = re.compile(r'\s+\d+\s(.*)') + dep_re = re.compile(r'\s+(\S)\s+(.*)') + r = re.compile(r'[<>=]+\s+\S*') def process_deps(pipe, pkg, pkgdest, provides, requires): + file = None for line in pipe: - f = line.decode("utf-8").split(" ", 1)[0].strip() - line = line.decode("utf-8").split(" ", 1)[1].strip() + line = line.decode("utf-8") - if line.startswith("Requires:"): + m = file_re.match(line) + if m: + file = m.group(1) + file = file.replace(pkgdest + "/" + pkg, "") + file = file_translate(file) + continue + + m = dep_re.match(line) + if not m or not file: + continue + + type, dep = m.groups() + + if type == 'R': i = requires - elif line.startswith("Provides:"): + elif type == 'P': i = provides else: - continue + continue - file = f.replace(pkgdest + "/" + pkg, "") - file = file_translate(file) - value = line.split(":", 1)[1].strip() - value = r.sub(r'(\g<0>)', value) + if dep.startswith("python("): + continue - if value.startswith("rpmlib("): + # Ignore all perl(VMS::...) and perl(Mac::...) dependencies. These + # are typically used conditionally from the Perl code, but are + # generated as unconditional dependencies. + if dep.startswith('perl(VMS::') or dep.startswith('perl(Mac::'): continue - if value == "python": + + # Ignore perl dependencies on .pl files. + if dep.startswith('perl(') and dep.endswith('.pl)'): continue + + # Remove perl versions and perl module versions since they typically + # do not make sense when used as package versions. + if dep.startswith('perl') and r.search(dep): + dep = dep.split()[0] + + # Put parentheses around any version specifications. + dep = r.sub(r'(\g<0>)',dep) + if file not in i: i[file] = [] - i[file].append(value) + i[file].append(dep) return provides, requires @@ -103,7 +131,7 @@ def read_shlib_providers(d): import re shlib_provider = {} - shlibs_dirs = d.getVar('SHLIBSDIRS', True).split() + shlibs_dirs = d.getVar('SHLIBSDIRS').split() list_re = re.compile('^(.*)\.list$') # Go from least to most specific since the last one found wins for dir in reversed(shlibs_dirs): @@ -149,6 +177,7 @@ def npm_split_package_dirs(pkgdir): continue pkgitems.append(pathitem) pkgname = '-'.join(pkgitems).replace('_', '-') + pkgname = pkgname.replace('@', '') pkgfile = os.path.join(root, dn, 'package.json') data = None if os.path.exists(pkgfile): diff --git a/import-layers/yocto-poky/meta/lib/oe/package_manager.py b/import-layers/yocto-poky/meta/lib/oe/package_manager.py index 13577b18b..3a2daadaf 100644 --- a/import-layers/yocto-poky/meta/lib/oe/package_manager.py +++ b/import-layers/yocto-poky/meta/lib/oe/package_manager.py @@ -102,108 +102,14 @@ class Indexer(object, metaclass=ABCMeta): class RpmIndexer(Indexer): - def get_ml_prefix_and_os_list(self, arch_var=None, os_var=None): - package_archs = collections.OrderedDict() - target_os = collections.OrderedDict() - - if arch_var is not None and os_var is not None: - package_archs['default'] = self.d.getVar(arch_var, True).split() - package_archs['default'].reverse() - target_os['default'] = self.d.getVar(os_var, True).strip() - else: - package_archs['default'] = self.d.getVar("PACKAGE_ARCHS", True).split() - # arch order is reversed. This ensures the -best- match is - # listed first! - package_archs['default'].reverse() - target_os['default'] = self.d.getVar("TARGET_OS", True).strip() - multilibs = self.d.getVar('MULTILIBS', True) or "" - for ext in multilibs.split(): - eext = ext.split(':') - if len(eext) > 1 and eext[0] == 'multilib': - localdata = bb.data.createCopy(self.d) - default_tune_key = "DEFAULTTUNE_virtclass-multilib-" + eext[1] - default_tune = localdata.getVar(default_tune_key, False) - if default_tune is None: - default_tune_key = "DEFAULTTUNE_ML_" + eext[1] - default_tune = localdata.getVar(default_tune_key, False) - if default_tune: - localdata.setVar("DEFAULTTUNE", default_tune) - bb.data.update_data(localdata) - package_archs[eext[1]] = localdata.getVar('PACKAGE_ARCHS', - True).split() - package_archs[eext[1]].reverse() - target_os[eext[1]] = localdata.getVar("TARGET_OS", - True).strip() - - ml_prefix_list = collections.OrderedDict() - for mlib in package_archs: - if mlib == 'default': - ml_prefix_list[mlib] = package_archs[mlib] - else: - ml_prefix_list[mlib] = list() - for arch in package_archs[mlib]: - if arch in ['all', 'noarch', 'any']: - ml_prefix_list[mlib].append(arch) - else: - ml_prefix_list[mlib].append(mlib + "_" + arch) - - return (ml_prefix_list, target_os) - def write_index(self): - sdk_pkg_archs = (self.d.getVar('SDK_PACKAGE_ARCHS', True) or "").replace('-', '_').split() - all_mlb_pkg_archs = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split() - - mlb_prefix_list = self.get_ml_prefix_and_os_list()[0] - - archs = set() - for item in mlb_prefix_list: - archs = archs.union(set(i.replace('-', '_') for i in mlb_prefix_list[item])) - - if len(archs) == 0: - archs = archs.union(set(all_mlb_pkg_archs)) - - archs = archs.union(set(sdk_pkg_archs)) - - rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo") - if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': - signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND', True)) - else: - signer = None - index_cmds = [] - repomd_files = [] - rpm_dirs_found = False - for arch in archs: - dbpath = os.path.join(self.d.getVar('WORKDIR', True), 'rpmdb', arch) - if os.path.exists(dbpath): - bb.utils.remove(dbpath, True) - arch_dir = os.path.join(self.deploy_dir, arch) - if not os.path.isdir(arch_dir): - continue + if self.d.getVar('PACKAGE_FEED_SIGN') == '1': + raise NotImplementedError('Package feed signing not yet implementd for rpm') - index_cmds.append("%s --dbpath %s --update -q %s" % \ - (rpm_createrepo, dbpath, arch_dir)) - repomd_files.append(os.path.join(arch_dir, 'repodata', 'repomd.xml')) - - rpm_dirs_found = True - - if not rpm_dirs_found: - bb.note("There are no packages in %s" % self.deploy_dir) - return - - # Create repodata - result = oe.utils.multiprocess_exec(index_cmds, create_index) + createrepo_c = bb.utils.which(os.environ['PATH'], "createrepo_c") + result = create_index("%s --update -q %s" % (createrepo_c, self.deploy_dir)) if result: - bb.fatal('%s' % ('\n'.join(result))) - # Sign repomd - if signer: - for repomd in repomd_files: - feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE', True) - is_ascii_sig = (feed_sig_type.upper() != "BIN") - signer.detach_sign(repomd, - self.d.getVar('PACKAGE_FEED_GPG_NAME', True), - self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True), - armor=is_ascii_sig) - + bb.fatal(result) class OpkgIndexer(Indexer): def write_index(self): @@ -212,8 +118,8 @@ class OpkgIndexer(Indexer): "MULTILIB_ARCHS"] opkg_index_cmd = bb.utils.which(os.getenv('PATH'), "opkg-make-index") - if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': - signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND', True)) + if self.d.getVar('PACKAGE_FEED_SIGN') == '1': + signer = get_signer(self.d, self.d.getVar('PACKAGE_FEED_GPG_BACKEND')) else: signer = None @@ -223,7 +129,7 @@ class OpkgIndexer(Indexer): index_cmds = set() index_sign_files = set() for arch_var in arch_vars: - archs = self.d.getVar(arch_var, True) + archs = self.d.getVar(arch_var) if archs is None: continue @@ -251,12 +157,12 @@ class OpkgIndexer(Indexer): bb.fatal('%s' % ('\n'.join(result))) if signer: - feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE', True) + feed_sig_type = self.d.getVar('PACKAGE_FEED_GPG_SIGNATURE_TYPE') is_ascii_sig = (feed_sig_type.upper() != "BIN") for f in index_sign_files: signer.detach_sign(f, - self.d.getVar('PACKAGE_FEED_GPG_NAME', True), - self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE', True), + self.d.getVar('PACKAGE_FEED_GPG_NAME'), + self.d.getVar('PACKAGE_FEED_GPG_PASSPHRASE_FILE'), armor=is_ascii_sig) @@ -290,16 +196,16 @@ class DpkgIndexer(Indexer): os.environ['APT_CONFIG'] = self.apt_conf_file - pkg_archs = self.d.getVar('PACKAGE_ARCHS', True) + pkg_archs = self.d.getVar('PACKAGE_ARCHS') if pkg_archs is not None: arch_list = pkg_archs.split() - sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS', True) + sdk_pkg_archs = self.d.getVar('SDK_PACKAGE_ARCHS') if sdk_pkg_archs is not None: for a in sdk_pkg_archs.split(): if a not in pkg_archs: arch_list.append(a) - all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").split() + all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split() arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in arch_list) apt_ftparchive = bb.utils.which(os.getenv('PATH'), "apt-ftparchive") @@ -332,7 +238,7 @@ class DpkgIndexer(Indexer): result = oe.utils.multiprocess_exec(index_cmds, create_index) if result: bb.fatal('%s' % ('\n'.join(result))) - if self.d.getVar('PACKAGE_FEED_SIGN', True) == '1': + if self.d.getVar('PACKAGE_FEED_SIGN') == '1': raise NotImplementedError('Package feed signing not implementd for dpkg') @@ -346,119 +252,9 @@ class PkgsList(object, metaclass=ABCMeta): def list_pkgs(self): pass - class RpmPkgsList(PkgsList): - def __init__(self, d, rootfs_dir, arch_var=None, os_var=None): - super(RpmPkgsList, self).__init__(d, rootfs_dir) - - self.rpm_cmd = bb.utils.which(os.getenv('PATH'), "rpm") - self.image_rpmlib = os.path.join(self.rootfs_dir, 'var/lib/rpm') - - self.ml_prefix_list, self.ml_os_list = \ - RpmIndexer(d, rootfs_dir).get_ml_prefix_and_os_list(arch_var, os_var) - - # Determine rpm version - cmd = "%s --version" % self.rpm_cmd - try: - output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8") - except subprocess.CalledProcessError as e: - bb.fatal("Getting rpm version failed. Command '%s' " - "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) - - ''' - Translate the RPM/Smart format names to the OE multilib format names - ''' - def _pkg_translate_smart_to_oe(self, pkg, arch): - new_pkg = pkg - new_arch = arch - fixed_arch = arch.replace('_', '-') - found = 0 - for mlib in self.ml_prefix_list: - for cmp_arch in self.ml_prefix_list[mlib]: - fixed_cmp_arch = cmp_arch.replace('_', '-') - if fixed_arch == fixed_cmp_arch: - if mlib == 'default': - new_pkg = pkg - new_arch = cmp_arch - else: - new_pkg = mlib + '-' + pkg - # We need to strip off the ${mlib}_ prefix on the arch - new_arch = cmp_arch.replace(mlib + '_', '') - - # Workaround for bug 3565. Simply look to see if we - # know of a package with that name, if not try again! - filename = os.path.join(self.d.getVar('PKGDATA_DIR', True), - 'runtime-reverse', - new_pkg) - if os.path.exists(filename): - found = 1 - break - - if found == 1 and fixed_arch == fixed_cmp_arch: - break - #bb.note('%s, %s -> %s, %s' % (pkg, arch, new_pkg, new_arch)) - return new_pkg, new_arch - - def _list_pkg_deps(self): - cmd = [bb.utils.which(os.getenv('PATH'), "rpmresolve"), - "-t", self.image_rpmlib] - - try: - output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).strip().decode("utf-8") - except subprocess.CalledProcessError as e: - bb.fatal("Cannot get the package dependencies. Command '%s' " - "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) - - return output - def list_pkgs(self): - cmd = self.rpm_cmd + ' --root ' + self.rootfs_dir - cmd += ' -D "_dbpath /var/lib/rpm" -qa' - cmd += " --qf '[%{NAME} %{ARCH} %{VERSION} %{PACKAGEORIGIN}\n]'" - - try: - # bb.note(cmd) - tmp_output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip().decode("utf-8") - except subprocess.CalledProcessError as e: - bb.fatal("Cannot get the installed packages list. Command '%s' " - "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) - - output = dict() - deps = dict() - dependencies = self._list_pkg_deps() - - # Populate deps dictionary for better manipulation - for line in dependencies.splitlines(): - try: - pkg, dep = line.split("|") - if not pkg in deps: - deps[pkg] = list() - if not dep in deps[pkg]: - deps[pkg].append(dep) - except: - # Ignore any other lines they're debug or errors - pass - - for line in tmp_output.split('\n'): - if len(line.strip()) == 0: - continue - pkg = line.split()[0] - arch = line.split()[1] - ver = line.split()[2] - dep = deps.get(pkg, []) - - # Skip GPG keys - if pkg == 'gpg-pubkey': - continue - - pkgorigin = line.split()[3] - new_pkg, new_arch = self._pkg_translate_smart_to_oe(pkg, arch) - - output[new_pkg] = {"arch":new_arch, "ver":ver, - "filename":pkgorigin, "deps":dep} - - return output - + return RpmPM(self.d, self.rootfs_dir, self.d.getVar('TARGET_VENDOR')).list_installed() class OpkgPkgsList(PkgsList): def __init__(self, d, rootfs_dir, config_file): @@ -466,7 +262,7 @@ class OpkgPkgsList(PkgsList): self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg") self.opkg_args = "-f %s -o %s " % (config_file, rootfs_dir) - self.opkg_args += self.d.getVar("OPKG_ARGS", True) + self.opkg_args += self.d.getVar("OPKG_ARGS") def list_pkgs(self, format=None): cmd = "%s %s status" % (self.opkg_cmd, self.opkg_args) @@ -514,9 +310,6 @@ class PackageManager(object, metaclass=ABCMeta): self.d = d self.deploy_dir = None self.deploy_lock = None - self.feed_uris = self.d.getVar('PACKAGE_FEED_URIS', True) or "" - self.feed_base_paths = self.d.getVar('PACKAGE_FEED_BASE_PATHS', True) or "" - self.feed_archs = self.d.getVar('PACKAGE_FEED_ARCHS', True) """ Update the package manager package database. @@ -556,8 +349,24 @@ class PackageManager(object, metaclass=ABCMeta): def list_installed(self): pass + """ + Returns the path to a tmpdir where resides the contents of a package. + + Deleting the tmpdir is responsability of the caller. + + """ + @abstractmethod + def extract(self, pkg): + pass + + """ + Add remote package feeds into repository manager configuration. The parameters + for the feeds are set by feed_uris, feed_base_paths and feed_archs. + See http://www.yoctoproject.org/docs/current/ref-manual/ref-manual.html#var-PACKAGE_FEED_URIS + for their description. + """ @abstractmethod - def insert_feeds_uris(self): + def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs): pass """ @@ -568,20 +377,11 @@ class PackageManager(object, metaclass=ABCMeta): installation """ def install_complementary(self, globs=None): - # we need to write the list of installed packages to a file because the - # oe-pkgdata-util reads it from a file - installed_pkgs_file = os.path.join(self.d.getVar('WORKDIR', True), - "installed_pkgs.txt") - with open(installed_pkgs_file, "w+") as installed_pkgs: - pkgs = self.list_installed() - output = oe.utils.format_pkg_list(pkgs, "arch") - installed_pkgs.write(output) - if globs is None: - globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY', True) + globs = self.d.getVar('IMAGE_INSTALL_COMPLEMENTARY') split_linguas = set() - for translation in self.d.getVar('IMAGE_LINGUAS', True).split(): + for translation in self.d.getVar('IMAGE_LINGUAS').split(): split_linguas.add(translation) split_linguas.add(translation.split('-')[0]) @@ -593,22 +393,29 @@ class PackageManager(object, metaclass=ABCMeta): if globs is None: return - cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"), - "-p", self.d.getVar('PKGDATA_DIR', True), "glob", installed_pkgs_file, - globs] - exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY', True) - if exclude: - cmd.extend(['--exclude=' + '|'.join(exclude.split())]) - try: - bb.note("Installing complementary packages ...") - bb.note('Running %s' % cmd) - complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode("utf-8") - except subprocess.CalledProcessError as e: - bb.fatal("Could not compute complementary packages list. Command " - "'%s' returned %d:\n%s" % - (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) - self.install(complementary_pkgs.split(), attempt_only=True) - os.remove(installed_pkgs_file) + # we need to write the list of installed packages to a file because the + # oe-pkgdata-util reads it from a file + with tempfile.NamedTemporaryFile(mode="w+", prefix="installed-pkgs") as installed_pkgs: + pkgs = self.list_installed() + output = oe.utils.format_pkg_list(pkgs, "arch") + installed_pkgs.write(output) + installed_pkgs.flush() + + cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"), + "-p", self.d.getVar('PKGDATA_DIR'), "glob", installed_pkgs.name, + globs] + exclude = self.d.getVar('PACKAGE_EXCLUDE_COMPLEMENTARY') + if exclude: + cmd.extend(['--exclude=' + '|'.join(exclude.split())]) + try: + bb.note("Installing complementary packages ...") + bb.note('Running %s' % cmd) + complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode("utf-8") + except subprocess.CalledProcessError as e: + bb.fatal("Could not compute complementary packages list. Command " + "'%s' returned %d:\n%s" % + (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) + self.install(complementary_pkgs.split(), attempt_only=True) def deploy_dir_lock(self): if self.deploy_dir is None: @@ -654,829 +461,299 @@ class RpmPM(PackageManager): task_name='target', providename=None, arch_var=None, - os_var=None): + os_var=None, + rpm_repo_workdir="oe-rootfs-repo"): super(RpmPM, self).__init__(d) self.target_rootfs = target_rootfs self.target_vendor = target_vendor self.task_name = task_name - self.providename = providename - self.fullpkglist = list() - self.deploy_dir = self.d.getVar('DEPLOY_DIR_RPM', True) - self.etcrpm_dir = os.path.join(self.target_rootfs, "etc/rpm") - self.install_dir_name = "oe_install" - self.install_dir_path = os.path.join(self.target_rootfs, self.install_dir_name) - self.rpm_cmd = bb.utils.which(os.getenv('PATH'), "rpm") - self.smart_cmd = bb.utils.which(os.getenv('PATH'), "smart") - # 0 = default, only warnings - # 1 = --log-level=info (includes information about executing scriptlets and their output) - # 2 = --log-level=debug - # 3 = --log-level=debug plus dumps of scriplet content and command invocation - self.debug_level = int(d.getVar('ROOTFS_RPM_DEBUG', True) or "0") - self.smart_opt = "--log-level=%s --data-dir=%s" % \ - ("warning" if self.debug_level == 0 else - "info" if self.debug_level == 1 else - "debug", - os.path.join(target_rootfs, 'var/lib/smart')) - self.scriptlet_wrapper = self.d.expand('${WORKDIR}/scriptlet_wrapper') + if arch_var == None: + self.archs = self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS').replace("-","_") + else: + self.archs = self.d.getVar(arch_var).replace("-","_") + if task_name == "host": + self.primary_arch = self.d.getVar('SDK_ARCH') + else: + self.primary_arch = self.d.getVar('MACHINE_ARCH') + + self.rpm_repo_dir = oe.path.join(self.d.getVar('WORKDIR'), rpm_repo_workdir) + bb.utils.mkdirhier(self.rpm_repo_dir) + oe.path.symlink(self.d.getVar('DEPLOY_DIR_RPM'), oe.path.join(self.rpm_repo_dir, "rpm"), True) + + self.saved_packaging_data = self.d.expand('${T}/saved_packaging_data/%s' % self.task_name) + if not os.path.exists(self.d.expand('${T}/saved_packaging_data')): + bb.utils.mkdirhier(self.d.expand('${T}/saved_packaging_data')) + self.packaging_data_dirs = ['var/lib/rpm', 'var/lib/dnf', 'var/cache/dnf'] self.solution_manifest = self.d.expand('${T}/saved/%s_solution' % self.task_name) - self.saved_rpmlib = self.d.expand('${T}/saved/%s' % self.task_name) - self.image_rpmlib = os.path.join(self.target_rootfs, 'var/lib/rpm') - if not os.path.exists(self.d.expand('${T}/saved')): bb.utils.mkdirhier(self.d.expand('${T}/saved')) - packageindex_dir = os.path.join(self.d.getVar('WORKDIR', True), 'rpms') - self.indexer = RpmIndexer(self.d, packageindex_dir) - self.pkgs_list = RpmPkgsList(self.d, self.target_rootfs, arch_var, os_var) - - self.ml_prefix_list, self.ml_os_list = self.indexer.get_ml_prefix_and_os_list(arch_var, os_var) - - def insert_feeds_uris(self): - if self.feed_uris == "": - return - - arch_list = [] - if self.feed_archs is not None: - # User define feed architectures - arch_list = self.feed_archs.split() + def _configure_dnf(self): + # libsolv handles 'noarch' internally, we don't need to specify it explicitly + archs = [i for i in reversed(self.archs.split()) if i not in ["any", "all", "noarch"]] + # This prevents accidental matching against libsolv's built-in policies + if len(archs) <= 1: + archs = archs + ["bogusarch"] + confdir = "%s/%s" %(self.target_rootfs, "etc/dnf/vars/") + bb.utils.mkdirhier(confdir) + open(confdir + "arch", 'w').write(":".join(archs)) + distro_codename = self.d.getVar('DISTRO_CODENAME') + open(confdir + "releasever", 'w').write(distro_codename if distro_codename is not None else '') + + open(oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), 'w').write("") + + + def _configure_rpm(self): + # We need to configure rpm to use our primary package architecture as the installation architecture, + # and to make it compatible with other package architectures that we use. + # Otherwise it will refuse to proceed with packages installation. + platformconfdir = "%s/%s" %(self.target_rootfs, "etc/rpm/") + rpmrcconfdir = "%s/%s" %(self.target_rootfs, "etc/") + bb.utils.mkdirhier(platformconfdir) + open(platformconfdir + "platform", 'w').write("%s-pc-linux" % self.primary_arch) + open(rpmrcconfdir + "rpmrc", 'w').write("arch_compat: %s: %s\n" % (self.primary_arch, self.archs if len(self.archs) > 0 else self.primary_arch)) + + open(platformconfdir + "macros", 'w').write("%_transaction_color 7\n") + if self.d.getVar('RPM_PREFER_ELF_ARCH'): + open(platformconfdir + "macros", 'a').write("%%_prefer_color %s" % (self.d.getVar('RPM_PREFER_ELF_ARCH'))) else: - # List must be prefered to least preferred order - default_platform_extra = list() - platform_extra = list() - bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or "" - for mlib in self.ml_os_list: - for arch in self.ml_prefix_list[mlib]: - plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib] - if mlib == bbextendvariant: - if plt not in default_platform_extra: - default_platform_extra.append(plt) - else: - if plt not in platform_extra: - platform_extra.append(plt) - platform_extra = default_platform_extra + platform_extra + open(platformconfdir + "macros", 'a').write("%_prefer_color 7") + + if self.d.getVar('RPM_SIGN_PACKAGES') == '1': + signer = get_signer(self.d, self.d.getVar('RPM_GPG_BACKEND')) + pubkey_path = oe.path.join(self.d.getVar('B'), 'rpm-key') + signer.export_pubkey(pubkey_path, self.d.getVar('RPM_GPG_NAME')) + rpm_bin = bb.utils.which(os.getenv('PATH'), "rpmkeys") + cmd = [rpm_bin, '--root=%s' % self.target_rootfs, '--import', pubkey_path] + try: + subprocess.check_output(cmd, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + bb.fatal("Importing GPG key failed. Command '%s' " + "returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) - for canonical_arch in platform_extra: - arch = canonical_arch.split('-')[0] - if not os.path.exists(os.path.join(self.deploy_dir, arch)): - continue - arch_list.append(arch) + def create_configs(self): + self._configure_dnf() + self._configure_rpm() - feed_uris = self.construct_uris(self.feed_uris.split(), self.feed_base_paths.split()) - - uri_iterator = 0 - channel_priority = 10 + 5 * len(feed_uris) * (len(arch_list) if arch_list else 1) - - for uri in feed_uris: - if arch_list: - for arch in arch_list: - bb.note('Adding Smart channel url%d%s (%s)' % - (uri_iterator, arch, channel_priority)) - self._invoke_smart('channel --add url%d-%s type=rpm-md baseurl=%s/%s -y' - % (uri_iterator, arch, uri, arch)) - self._invoke_smart('channel --set url%d-%s priority=%d' % - (uri_iterator, arch, channel_priority)) - channel_priority -= 5 - else: - bb.note('Adding Smart channel url%d (%s)' % - (uri_iterator, channel_priority)) - self._invoke_smart('channel --add url%d type=rpm-md baseurl=%s -y' - % (uri_iterator, uri)) - self._invoke_smart('channel --set url%d priority=%d' % - (uri_iterator, channel_priority)) - channel_priority -= 5 + def write_index(self): + lockfilename = self.d.getVar('DEPLOY_DIR_RPM') + "/rpm.lock" + lf = bb.utils.lockfile(lockfilename, False) + RpmIndexer(self.d, self.rpm_repo_dir).write_index() + bb.utils.unlockfile(lf) - uri_iterator += 1 + def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs): + from urllib.parse import urlparse - ''' - Create configs for rpm and smart, and multilib is supported - ''' - def create_configs(self): - target_arch = self.d.getVar('TARGET_ARCH', True) - platform = '%s%s-%s' % (target_arch.replace('-', '_'), - self.target_vendor, - self.ml_os_list['default']) - - # List must be prefered to least preferred order - default_platform_extra = list() - platform_extra = list() - bbextendvariant = self.d.getVar('BBEXTENDVARIANT', True) or "" - for mlib in self.ml_os_list: - for arch in self.ml_prefix_list[mlib]: - plt = arch.replace('-', '_') + '-.*-' + self.ml_os_list[mlib] - if mlib == bbextendvariant: - if plt not in default_platform_extra: - default_platform_extra.append(plt) - else: - if plt not in platform_extra: - platform_extra.append(plt) - platform_extra = default_platform_extra + platform_extra + if feed_uris == "": + return - self._create_configs(platform, platform_extra) + bb.utils.mkdirhier(oe.path.join(self.target_rootfs, "etc", "yum.repos.d")) + remote_uris = self.construct_uris(feed_uris.split(), feed_base_paths.split()) + for uri in remote_uris: + repo_base = "oe-remote-repo" + "-".join(urlparse(uri).path.split("/")) + if feed_archs is not None: + for arch in feed_archs.split(): + repo_uri = uri + "/" + arch + repo_id = "oe-remote-repo" + "-".join(urlparse(repo_uri).path.split("/")) + repo_name = "OE Remote Repo:" + " ".join(urlparse(repo_uri).path.split("/")) + open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'a').write( + "[%s]\nname=%s\nbaseurl=%s\n\n" % (repo_id, repo_name, repo_uri)) + else: + repo_name = "OE Remote Repo:" + " ".join(urlparse(uri).path.split("/")) + repo_uri = uri + open(oe.path.join(self.target_rootfs, "etc", "yum.repos.d", repo_base + ".repo"), 'w').write( + "[%s]\nname=%s\nbaseurl=%s\n" % (repo_base, repo_name, repo_uri)) - def _invoke_smart(self, args): - cmd = "%s %s %s" % (self.smart_cmd, self.smart_opt, args) - # bb.note(cmd) - try: - complementary_pkgs = subprocess.check_output(cmd, - stderr=subprocess.STDOUT, - shell=True).decode("utf-8") - # bb.note(complementary_pkgs) - return complementary_pkgs - except subprocess.CalledProcessError as e: - bb.fatal("Could not invoke smart. Command " - "'%s' returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) - - def _search_pkg_name_in_feeds(self, pkg, feed_archs): - for arch in feed_archs: - arch = arch.replace('-', '_') - regex_match = re.compile(r"^%s-[^-]*-[^-]*@%s$" % \ - (re.escape(pkg), re.escape(arch))) - for p in self.fullpkglist: - if regex_match.match(p) is not None: - # First found is best match - # bb.note('%s -> %s' % (pkg, pkg + '@' + arch)) - return pkg + '@' + arch - - # Search provides if not found by pkgname. - bb.note('Not found %s by name, searching provides ...' % pkg) - cmd = "%s %s query --provides %s --show-format='$name-$version'" % \ - (self.smart_cmd, self.smart_opt, pkg) - cmd += " | sed -ne 's/ *Provides://p'" - bb.note('cmd: %s' % cmd) - output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8") - # Found a provider - if output: - bb.note('Found providers for %s: %s' % (pkg, output)) - for p in output.split(): - for arch in feed_archs: - arch = arch.replace('-', '_') - if p.rstrip().endswith('@' + arch): - return p - - return "" + def _prepare_pkg_transaction(self): + os.environ['D'] = self.target_rootfs + os.environ['OFFLINE_ROOT'] = self.target_rootfs + os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs + os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs + os.environ['INTERCEPT_DIR'] = oe.path.join(self.d.getVar('WORKDIR'), + "intercept_scripts") + os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') - ''' - Translate the OE multilib format names to the RPM/Smart format names - It searched the RPM/Smart format names in probable multilib feeds first, - and then searched the default base feed. - ''' - def _pkg_translate_oe_to_smart(self, pkgs, attempt_only=False): - new_pkgs = list() - - for pkg in pkgs: - new_pkg = pkg - # Search new_pkg in probable multilibs first - for mlib in self.ml_prefix_list: - # Jump the default archs - if mlib == 'default': - continue - subst = pkg.replace(mlib + '-', '') - # if the pkg in this multilib feed - if subst != pkg: - feed_archs = self.ml_prefix_list[mlib] - new_pkg = self._search_pkg_name_in_feeds(subst, feed_archs) - if not new_pkg: - # Failed to translate, package not found! - err_msg = '%s not found in the %s feeds (%s) in %s.' % \ - (pkg, mlib, " ".join(feed_archs), self.d.getVar('DEPLOY_DIR_RPM', True)) - if not attempt_only: - bb.error(err_msg) - bb.fatal("This is often caused by an empty package declared " \ - "in a recipe's PACKAGES variable. (Empty packages are " \ - "not constructed unless ALLOW_EMPTY_ = '1' is used.)") - bb.warn(err_msg) - else: - new_pkgs.append(new_pkg) - - break - - # Apparently not a multilib package... - if pkg == new_pkg: - # Search new_pkg in default archs - default_archs = self.ml_prefix_list['default'] - new_pkg = self._search_pkg_name_in_feeds(pkg, default_archs) - if not new_pkg: - err_msg = '%s not found in the feeds (%s) in %s.' % \ - (pkg, " ".join(default_archs), self.d.getVar('DEPLOY_DIR_RPM', True)) - if not attempt_only: - bb.error(err_msg) - bb.fatal("This is often caused by an empty package declared " \ - "in a recipe's PACKAGES variable. (Empty packages are " \ - "not constructed unless ALLOW_EMPTY_ = '1' is used.)") - bb.warn(err_msg) - else: - new_pkgs.append(new_pkg) - - return new_pkgs - - def _create_configs(self, platform, platform_extra): - # Setup base system configuration - bb.note("configuring RPM platform settings") - - # Configure internal RPM environment when using Smart - os.environ['RPM_ETCRPM'] = self.etcrpm_dir - bb.utils.mkdirhier(self.etcrpm_dir) - - # Setup temporary directory -- install... - if os.path.exists(self.install_dir_path): - bb.utils.remove(self.install_dir_path, True) - bb.utils.mkdirhier(os.path.join(self.install_dir_path, 'tmp')) - - channel_priority = 5 - platform_dir = os.path.join(self.etcrpm_dir, "platform") - sdkos = self.d.getVar("SDK_OS", True) - with open(platform_dir, "w+") as platform_fd: - platform_fd.write(platform + '\n') - for pt in platform_extra: - channel_priority += 5 - if sdkos: - tmp = re.sub("-%s$" % sdkos, "-%s\n" % sdkos, pt) - tmp = re.sub("-linux.*$", "-linux.*\n", tmp) - platform_fd.write(tmp) - - # Tell RPM that the "/" directory exist and is available - bb.note("configuring RPM system provides") - sysinfo_dir = os.path.join(self.etcrpm_dir, "sysinfo") - bb.utils.mkdirhier(sysinfo_dir) - with open(os.path.join(sysinfo_dir, "Dirnames"), "w+") as dirnames: - dirnames.write("/\n") - - if self.providename: - providename_dir = os.path.join(sysinfo_dir, "Providename") - if not os.path.exists(providename_dir): - providename_content = '\n'.join(self.providename) - providename_content += '\n' - open(providename_dir, "w+").write(providename_content) - - # Configure RPM... we enforce these settings! - bb.note("configuring RPM DB settings") - # After change the __db.* cache size, log file will not be - # generated automatically, that will raise some warnings, - # so touch a bare log for rpm write into it. - rpmlib_log = os.path.join(self.image_rpmlib, 'log', 'log.0000000001') - if not os.path.exists(rpmlib_log): - bb.utils.mkdirhier(os.path.join(self.image_rpmlib, 'log')) - open(rpmlib_log, 'w+').close() - - DB_CONFIG_CONTENT = "# ================ Environment\n" \ - "set_data_dir .\n" \ - "set_create_dir .\n" \ - "set_lg_dir ./log\n" \ - "set_tmp_dir ./tmp\n" \ - "set_flags db_log_autoremove on\n" \ - "\n" \ - "# -- thread_count must be >= 8\n" \ - "set_thread_count 64\n" \ - "\n" \ - "# ================ Logging\n" \ - "\n" \ - "# ================ Memory Pool\n" \ - "set_cachesize 0 1048576 0\n" \ - "set_mp_mmapsize 268435456\n" \ - "\n" \ - "# ================ Locking\n" \ - "set_lk_max_locks 16384\n" \ - "set_lk_max_lockers 16384\n" \ - "set_lk_max_objects 16384\n" \ - "mutex_set_max 163840\n" \ - "\n" \ - "# ================ Replication\n" - - db_config_dir = os.path.join(self.image_rpmlib, 'DB_CONFIG') - if not os.path.exists(db_config_dir): - open(db_config_dir, 'w+').write(DB_CONFIG_CONTENT) - - # Create database so that smart doesn't complain (lazy init) - opt = "-qa" - cmd = "%s --root %s --dbpath /var/lib/rpm %s > /dev/null" % ( - self.rpm_cmd, self.target_rootfs, opt) - try: - subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) - except subprocess.CalledProcessError as e: - bb.fatal("Create rpm database failed. Command '%s' " - "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) - # Import GPG key to RPM database of the target system - if self.d.getVar('RPM_SIGN_PACKAGES', True) == '1': - pubkey_path = self.d.getVar('RPM_GPG_PUBKEY', True) - cmd = "%s --root %s --dbpath /var/lib/rpm --import %s > /dev/null" % ( - self.rpm_cmd, self.target_rootfs, pubkey_path) - subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) + def install(self, pkgs, attempt_only = False): + if len(pkgs) == 0: + return + self._prepare_pkg_transaction() - # Configure smart - bb.note("configuring Smart settings") - bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/smart'), - True) - self._invoke_smart('config --set rpm-root=%s' % self.target_rootfs) - self._invoke_smart('config --set rpm-dbpath=/var/lib/rpm') - self._invoke_smart('config --set rpm-extra-macros._var=%s' % - self.d.getVar('localstatedir', True)) - cmd = "config --set rpm-extra-macros._tmppath=/%s/tmp" % (self.install_dir_name) - - prefer_color = self.d.getVar('RPM_PREFER_ELF_ARCH', True) - if prefer_color: - if prefer_color not in ['0', '1', '2', '4']: - bb.fatal("Invalid RPM_PREFER_ELF_ARCH: %s, it should be one of:\n" - "\t1: ELF32 wins\n" - "\t2: ELF64 wins\n" - "\t4: ELF64 N32 wins (mips64 or mips64el only)" % - prefer_color) - if prefer_color == "4" and self.d.getVar("TUNE_ARCH", True) not in \ - ['mips64', 'mips64el']: - bb.fatal("RPM_PREFER_ELF_ARCH = \"4\" is for mips64 or mips64el " - "only.") - self._invoke_smart('config --set rpm-extra-macros._prefer_color=%s' - % prefer_color) - - self._invoke_smart(cmd) - self._invoke_smart('config --set rpm-ignoresize=1') - - # Write common configuration for host and target usage - self._invoke_smart('config --set rpm-nolinktos=1') - self._invoke_smart('config --set rpm-noparentdirs=1') - check_signature = self.d.getVar('RPM_CHECK_SIGNATURES', True) - if check_signature and check_signature.strip() == "0": - self._invoke_smart('config --set rpm-check-signatures=false') - for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split(): - self._invoke_smart('flag --set ignore-recommends %s' % i) - - # Do the following configurations here, to avoid them being - # saved for field upgrade - if self.d.getVar('NO_RECOMMENDATIONS', True).strip() == "1": - self._invoke_smart('config --set ignore-all-recommends=1') - pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or "" - for i in pkg_exclude.split(): - self._invoke_smart('flag --set exclude-packages %s' % i) - - # Optional debugging - # self._invoke_smart('config --set rpm-log-level=debug') - # cmd = 'config --set rpm-log-file=/tmp/smart-debug-logfile' - # self._invoke_smart(cmd) - ch_already_added = [] - for canonical_arch in platform_extra: - arch = canonical_arch.split('-')[0] - arch_channel = os.path.join(self.d.getVar('WORKDIR', True), 'rpms', arch) - oe.path.remove(arch_channel) - deploy_arch_dir = os.path.join(self.deploy_dir, arch) - if not os.path.exists(deploy_arch_dir): - continue + bad_recommendations = self.d.getVar('BAD_RECOMMENDATIONS') + package_exclude = self.d.getVar('PACKAGE_EXCLUDE') + exclude_pkgs = (bad_recommendations.split() if bad_recommendations else []) + (package_exclude.split() if package_exclude else []) - lockfilename = self.d.getVar('DEPLOY_DIR_RPM', True) + "/rpm.lock" - lf = bb.utils.lockfile(lockfilename, False) - oe.path.copyhardlinktree(deploy_arch_dir, arch_channel) - bb.utils.unlockfile(lf) - - if not arch in ch_already_added: - bb.note('Adding Smart channel %s (%s)' % - (arch, channel_priority)) - self._invoke_smart('channel --add %s type=rpm-md baseurl=%s -y' - % (arch, arch_channel)) - self._invoke_smart('channel --set %s priority=%d' % - (arch, channel_priority)) - channel_priority -= 5 - - ch_already_added.append(arch) - - bb.note('adding Smart RPM DB channel') - self._invoke_smart('channel --add rpmsys type=rpm-sys -y') - - # Construct install scriptlet wrapper. - # Scripts need to be ordered when executed, this ensures numeric order. - # If we ever run into needing more the 899 scripts, we'll have to. - # change num to start with 1000. - # - scriptletcmd = "$2 $1/$3 $4\n" - scriptpath = "$1/$3" - - # When self.debug_level >= 3, also dump the content of the - # executed scriptlets and how they get invoked. We have to - # replace "exit 1" and "ERR" because printing those as-is - # would trigger a log analysis failure. - if self.debug_level >= 3: - dump_invocation = 'echo "Executing ${name} ${kind} with: ' + scriptletcmd + '"\n' - dump_script = 'cat ' + scriptpath + '| sed -e "s/exit 1/exxxit 1/g" -e "s/ERR/IRR/g"; echo\n' - else: - dump_invocation = 'echo "Executing ${name} ${kind}"\n' - dump_script = '' - - SCRIPTLET_FORMAT = "#!/bin/bash\n" \ - "\n" \ - "export PATH=%s\n" \ - "export D=%s\n" \ - 'export OFFLINE_ROOT="$D"\n' \ - 'export IPKG_OFFLINE_ROOT="$D"\n' \ - 'export OPKG_OFFLINE_ROOT="$D"\n' \ - "export INTERCEPT_DIR=%s\n" \ - "export NATIVE_ROOT=%s\n" \ - "\n" \ - "name=`head -1 " + scriptpath + " | cut -d\' \' -f 2`\n" \ - "kind=`head -1 " + scriptpath + " | cut -d\' \' -f 4`\n" \ - + dump_invocation \ - + dump_script \ - + scriptletcmd + \ - "ret=$?\n" \ - "echo Result of ${name} ${kind}: ${ret}\n" \ - "if [ ${ret} -ne 0 ]; then\n" \ - " if [ $4 -eq 1 ]; then\n" \ - " mkdir -p $1/etc/rpm-postinsts\n" \ - " num=100\n" \ - " while [ -e $1/etc/rpm-postinsts/${num}-* ]; do num=$((num + 1)); done\n" \ - ' echo "#!$2" > $1/etc/rpm-postinsts/${num}-${name}\n' \ - ' echo "# Arg: $4" >> $1/etc/rpm-postinsts/${num}-${name}\n' \ - " cat " + scriptpath + " >> $1/etc/rpm-postinsts/${num}-${name}\n" \ - " chmod +x $1/etc/rpm-postinsts/${num}-${name}\n" \ - ' echo "Info: deferring ${name} ${kind} install scriptlet to first boot"\n' \ - " else\n" \ - ' echo "Error: ${name} ${kind} remove scriptlet failed"\n' \ - " fi\n" \ - "fi\n" - - intercept_dir = self.d.expand('${WORKDIR}/intercept_scripts') - native_root = self.d.getVar('STAGING_DIR_NATIVE', True) - scriptlet_content = SCRIPTLET_FORMAT % (os.environ['PATH'], - self.target_rootfs, - intercept_dir, - native_root) - open(self.scriptlet_wrapper, 'w+').write(scriptlet_content) - - bb.note("configuring RPM cross-install scriptlet_wrapper") - os.chmod(self.scriptlet_wrapper, 0o755) - cmd = 'config --set rpm-extra-macros._cross_scriptlet_wrapper=%s' % \ - self.scriptlet_wrapper - self._invoke_smart(cmd) - - # Debug to show smart config info - # bb.note(self._invoke_smart('config --show')) + output = self._invoke_dnf((["--skip-broken"] if attempt_only else []) + + (["-x", ",".join(exclude_pkgs)] if len(exclude_pkgs) > 0 else []) + + (["--setopt=install_weak_deps=False"] if self.d.getVar('NO_RECOMMENDATIONS') == "1" else []) + + (["--nogpgcheck"] if self.d.getVar('RPM_SIGN_PACKAGES') != '1' else ["--setopt=gpgcheck=True"]) + + ["install"] + + pkgs) - def update(self): - self._invoke_smart('update rpmsys') - - def get_rdepends_recursively(self, pkgs): - # pkgs will be changed during the loop, so use [:] to make a copy. - for pkg in pkgs[:]: - sub_data = oe.packagedata.read_subpkgdata(pkg, self.d) - sub_rdep = sub_data.get("RDEPENDS_" + pkg) - if not sub_rdep: - continue - done = list(bb.utils.explode_dep_versions2(sub_rdep).keys()) - next = done - # Find all the rdepends on dependency chain - while next: - new = [] - for sub_pkg in next: - sub_data = oe.packagedata.read_subpkgdata(sub_pkg, self.d) - sub_pkg_rdep = sub_data.get("RDEPENDS_" + sub_pkg) - if not sub_pkg_rdep: - continue - for p in bb.utils.explode_dep_versions2(sub_pkg_rdep): - # Already handled, skip it. - if p in done or p in pkgs: - continue - # It's a new dep - if oe.packagedata.has_subpkgdata(p, self.d): - done.append(p) - new.append(p) - next = new - pkgs.extend(done) - return pkgs + failed_scriptlets_pkgnames = collections.OrderedDict() + for line in output.splitlines(): + if line.startswith("Non-fatal POSTIN scriptlet failure in rpm package"): + failed_scriptlets_pkgnames[line.split()[-1]] = True - ''' - Install pkgs with smart, the pkg name is oe format - ''' - def install(self, pkgs, attempt_only=False): + for pkg in failed_scriptlets_pkgnames.keys(): + self.save_rpmpostinst(pkg) - if not pkgs: - bb.note("There are no packages to install") - return - bb.note("Installing the following packages: %s" % ' '.join(pkgs)) - if not attempt_only: - # Pull in multilib requires since rpm may not pull in them - # correctly, for example, - # lib32-packagegroup-core-standalone-sdk-target requires - # lib32-libc6, but rpm may pull in libc6 rather than lib32-libc6 - # since it doesn't know mlprefix (lib32-), bitbake knows it and - # can handle it well, find out the RDEPENDS on the chain will - # fix the problem. Both do_rootfs and do_populate_sdk have this - # issue. - # The attempt_only packages don't need this since they are - # based on the installed ones. - # - # Separate pkgs into two lists, one is multilib, the other one - # is non-multilib. - ml_pkgs = [] - non_ml_pkgs = pkgs[:] - for pkg in pkgs: - for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split(): - if pkg.startswith(mlib + '-'): - ml_pkgs.append(pkg) - non_ml_pkgs.remove(pkg) - - if len(ml_pkgs) > 0 and len(non_ml_pkgs) > 0: - # Found both foo and lib-foo - ml_pkgs = self.get_rdepends_recursively(ml_pkgs) - non_ml_pkgs = self.get_rdepends_recursively(non_ml_pkgs) - # Longer list makes smart slower, so only keep the pkgs - # which have the same BPN, and smart can handle others - # correctly. - pkgs_new = [] - for pkg in non_ml_pkgs: - for mlib in (self.d.getVar("MULTILIB_VARIANTS", True) or "").split(): - mlib_pkg = mlib + "-" + pkg - if mlib_pkg in ml_pkgs: - pkgs_new.append(pkg) - pkgs_new.append(mlib_pkg) - for pkg in pkgs: - if pkg not in pkgs_new: - pkgs_new.append(pkg) - pkgs = pkgs_new - new_depends = {} - deps = bb.utils.explode_dep_versions2(" ".join(pkgs)) - for depend in deps: - data = oe.packagedata.read_subpkgdata(depend, self.d) - key = "PKG_%s" % depend - if key in data: - new_depend = data[key] - else: - new_depend = depend - new_depends[new_depend] = deps[depend] - pkgs = bb.utils.join_deps(new_depends, commasep=True).split(', ') - pkgs = self._pkg_translate_oe_to_smart(pkgs, attempt_only) - if not pkgs: - bb.note("There are no packages to install") + def remove(self, pkgs, with_dependencies = True): + if len(pkgs) == 0: return - if not attempt_only: - bb.note('to be installed: %s' % ' '.join(pkgs)) - cmd = "%s %s install -y %s" % \ - (self.smart_cmd, self.smart_opt, ' '.join(pkgs)) - bb.note(cmd) - else: - bb.note('installing attempt only packages...') - bb.note('Attempting %s' % ' '.join(pkgs)) - cmd = "%s %s install --attempt -y %s" % \ - (self.smart_cmd, self.smart_opt, ' '.join(pkgs)) - try: - output = subprocess.check_output(cmd.split(), stderr=subprocess.STDOUT).decode("utf-8") - bb.note(output) - except subprocess.CalledProcessError as e: - bb.fatal("Unable to install packages. Command '%s' " - "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) + self._prepare_pkg_transaction() - ''' - Remove pkgs with smart, the pkg name is smart/rpm format - ''' - def remove(self, pkgs, with_dependencies=True): - bb.note('to be removed: ' + ' '.join(pkgs)) - - if not with_dependencies: - cmd = "%s -e --nodeps " % self.rpm_cmd - cmd += "--root=%s " % self.target_rootfs - cmd += "--dbpath=/var/lib/rpm " - cmd += "--define='_cross_scriptlet_wrapper %s' " % \ - self.scriptlet_wrapper - cmd += "--define='_tmppath /%s/tmp' %s" % (self.install_dir_name, ' '.join(pkgs)) + if with_dependencies: + self._invoke_dnf(["remove"] + pkgs) else: - # for pkg in pkgs: - # bb.note('Debug: What required: %s' % pkg) - # bb.note(self._invoke_smart('query %s --show-requiredby' % pkg)) - - cmd = "%s %s remove -y %s" % (self.smart_cmd, - self.smart_opt, - ' '.join(pkgs)) + cmd = bb.utils.which(os.getenv('PATH'), "rpm") + args = ["-e", "--nodeps", "--root=%s" %self.target_rootfs] - try: - bb.note(cmd) - output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8") - bb.note(output) - except subprocess.CalledProcessError as e: - bb.note("Unable to remove packages. Command '%s' " - "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) + try: + output = subprocess.check_output([cmd] + args + pkgs, stderr=subprocess.STDOUT).decode("utf-8") + except subprocess.CalledProcessError as e: + bb.fatal("Could not invoke rpm. Command " + "'%s' returned %d:\n%s" % (' '.join([cmd] + args + pkgs), e.returncode, e.output.decode("utf-8"))) def upgrade(self): - bb.note('smart upgrade') - self._invoke_smart('upgrade') - - def write_index(self): - result = self.indexer.write_index() + self._prepare_pkg_transaction() + self._invoke_dnf(["upgrade"]) - if result is not None: - bb.fatal(result) + def autoremove(self): + self._prepare_pkg_transaction() + self._invoke_dnf(["autoremove"]) def remove_packaging_data(self): - bb.utils.remove(self.image_rpmlib, True) - bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/smart'), - True) - bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/opkg'), True) - - # remove temp directory - bb.utils.remove(self.install_dir_path, True) + self._invoke_dnf(["clean", "all"]) + for dir in self.packaging_data_dirs: + bb.utils.remove(oe.path.join(self.target_rootfs, dir), True) def backup_packaging_data(self): - # Save the rpmlib for increment rpm image generation - if os.path.exists(self.saved_rpmlib): - bb.utils.remove(self.saved_rpmlib, True) - shutil.copytree(self.image_rpmlib, - self.saved_rpmlib, - symlinks=True) + # Save the packaging dirs for increment rpm image generation + if os.path.exists(self.saved_packaging_data): + bb.utils.remove(self.saved_packaging_data, True) + for i in self.packaging_data_dirs: + source_dir = oe.path.join(self.target_rootfs, i) + target_dir = oe.path.join(self.saved_packaging_data, i) + shutil.copytree(source_dir, target_dir, symlinks=True) def recovery_packaging_data(self): # Move the rpmlib back - if os.path.exists(self.saved_rpmlib): - if os.path.exists(self.image_rpmlib): - bb.utils.remove(self.image_rpmlib, True) - - bb.note('Recovery packaging data') - shutil.copytree(self.saved_rpmlib, - self.image_rpmlib, + if os.path.exists(self.saved_packaging_data): + for i in self.packaging_data_dirs: + target_dir = oe.path.join(self.target_rootfs, i) + if os.path.exists(target_dir): + bb.utils.remove(target_dir, True) + source_dir = oe.path.join(self.saved_packaging_data, i) + shutil.copytree(source_dir, + target_dir, symlinks=True) def list_installed(self): - return self.pkgs_list.list_pkgs() - - ''' - If incremental install, we need to determine what we've got, - what we need to add, and what to remove... - The dump_install_solution will dump and save the new install - solution. - ''' - def dump_install_solution(self, pkgs): - bb.note('creating new install solution for incremental install') - if len(pkgs) == 0: - return - - pkgs = self._pkg_translate_oe_to_smart(pkgs, False) - install_pkgs = list() + output = self._invoke_dnf(["repoquery", "--installed", "--queryformat", "Package: %{name} %{arch} %{version} %{name}-%{version}-%{release}.%{arch}.rpm\nDependencies:\n%{requires}\nRecommendations:\n%{recommends}\nDependenciesEndHere:\n"], + print_output = False) + packages = {} + current_package = None + current_deps = None + current_state = "initial" + for line in output.splitlines(): + if line.startswith("Package:"): + package_info = line.split(" ")[1:] + current_package = package_info[0] + package_arch = package_info[1] + package_version = package_info[2] + package_rpm = package_info[3] + packages[current_package] = {"arch":package_arch, "ver":package_version, "filename":package_rpm} + current_deps = [] + elif line.startswith("Dependencies:"): + current_state = "dependencies" + elif line.startswith("Recommendations"): + current_state = "recommendations" + elif line.startswith("DependenciesEndHere:"): + current_state = "initial" + packages[current_package]["deps"] = current_deps + elif len(line) > 0: + if current_state == "dependencies": + current_deps.append(line) + elif current_state == "recommendations": + current_deps.append("%s [REC]" % line) + + return packages - cmd = "%s %s install -y --dump %s 2>%s" % \ - (self.smart_cmd, - self.smart_opt, - ' '.join(pkgs), - self.solution_manifest) + def update(self): + self._invoke_dnf(["makecache"]) + + def _invoke_dnf(self, dnf_args, fatal = True, print_output = True ): + os.environ['RPM_ETCCONFIGDIR'] = self.target_rootfs + + dnf_cmd = bb.utils.which(os.getenv('PATH'), "dnf") + standard_dnf_args = (["-v", "--rpmverbosity=debug"] if self.d.getVar('ROOTFS_RPM_DEBUG') else []) + ["-y", + "-c", oe.path.join(self.target_rootfs, "etc/dnf/dnf.conf"), + "--setopt=reposdir=%s" %(oe.path.join(self.target_rootfs, "etc/yum.repos.d")), + "--repofrompath=oe-repo,%s" % (self.rpm_repo_dir), + "--installroot=%s" % (self.target_rootfs), + "--setopt=logdir=%s" % (self.d.getVar('T')) + ] + cmd = [dnf_cmd] + standard_dnf_args + dnf_args try: - # Disable rpmsys channel for the fake install - self._invoke_smart('channel --disable rpmsys') - - subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) - with open(self.solution_manifest, 'r') as manifest: - for pkg in manifest.read().split('\n'): - if '@' in pkg: - install_pkgs.append(pkg) + output = subprocess.check_output(cmd,stderr=subprocess.STDOUT).decode("utf-8") + if print_output: + bb.note(output) + return output except subprocess.CalledProcessError as e: - bb.note("Unable to dump install packages. Command '%s' " - "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) - # Recovery rpmsys channel - self._invoke_smart('channel --enable rpmsys') - return install_pkgs + if print_output: + (bb.note, bb.fatal)[fatal]("Could not invoke dnf. Command " + "'%s' returned %d:\n%s" % (' '.join(cmd), e.returncode, e.output.decode("utf-8"))) + else: + (bb.note, bb.fatal)[fatal]("Could not invoke dnf. Command " + "'%s' returned %d:" % (' '.join(cmd), e.returncode)) + return e.output.decode("utf-8") + + def dump_install_solution(self, pkgs): + open(self.solution_manifest, 'w').write(" ".join(pkgs)) + return pkgs - ''' - If incremental install, we need to determine what we've got, - what we need to add, and what to remove... - The load_old_install_solution will load the previous install - solution - ''' def load_old_install_solution(self): - bb.note('load old install solution for incremental install') - installed_pkgs = list() if not os.path.exists(self.solution_manifest): - bb.note('old install solution not exist') - return installed_pkgs - - with open(self.solution_manifest, 'r') as manifest: - for pkg in manifest.read().split('\n'): - if '@' in pkg: - installed_pkgs.append(pkg.strip()) - - return installed_pkgs - - ''' - Dump all available packages in feeds, it should be invoked after the - newest rpm index was created - ''' - def dump_all_available_pkgs(self): - available_manifest = self.d.expand('${T}/saved/available_pkgs.txt') - available_pkgs = list() - cmd = "%s %s query --output %s" % \ - (self.smart_cmd, self.smart_opt, available_manifest) - try: - subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) - with open(available_manifest, 'r') as manifest: - for pkg in manifest.read().split('\n'): - if '@' in pkg: - available_pkgs.append(pkg.strip()) - except subprocess.CalledProcessError as e: - bb.note("Unable to list all available packages. Command '%s' " - "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) + return [] - self.fullpkglist = available_pkgs + return open(self.solution_manifest, 'r').read().split() - return + def _script_num_prefix(self, path): + files = os.listdir(path) + numbers = set() + numbers.add(99) + for f in files: + numbers.add(int(f.split("-")[0])) + return max(numbers) + 1 def save_rpmpostinst(self, pkg): - mlibs = (self.d.getVar('MULTILIB_GLOBAL_VARIANTS', False) or "").split() - - new_pkg = pkg - # Remove any multilib prefix from the package name - for mlib in mlibs: - if mlib in pkg: - new_pkg = pkg.replace(mlib + '-', '') - break - - bb.note(' * postponing %s' % new_pkg) - saved_dir = self.target_rootfs + self.d.expand('${sysconfdir}/rpm-postinsts/') + new_pkg - - cmd = self.rpm_cmd + ' -q --scripts --root ' + self.target_rootfs - cmd += ' --dbpath=/var/lib/rpm ' + new_pkg - cmd += ' | sed -n -e "/^postinstall scriptlet (using .*):$/,/^.* scriptlet (using .*):$/ {/.*/p}"' - cmd += ' | sed -e "/postinstall scriptlet (using \(.*\)):$/d"' - cmd += ' -e "/^.* scriptlet (using .*):$/d" > %s' % saved_dir - - try: - bb.note(cmd) - output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).strip().decode("utf-8") - bb.note(output) - os.chmod(saved_dir, 0o755) - except subprocess.CalledProcessError as e: - bb.fatal("Invoke save_rpmpostinst failed. Command '%s' " - "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) - - '''Write common configuration for target usage''' - def rpm_setup_smart_target_config(self): - bb.utils.remove(os.path.join(self.target_rootfs, 'var/lib/smart'), - True) - - self._invoke_smart('config --set rpm-nolinktos=1') - self._invoke_smart('config --set rpm-noparentdirs=1') - for i in self.d.getVar('BAD_RECOMMENDATIONS', True).split(): - self._invoke_smart('flag --set ignore-recommends %s' % i) - self._invoke_smart('channel --add rpmsys type=rpm-sys -y') + bb.note("Saving postinstall script of %s" % (pkg)) + cmd = bb.utils.which(os.getenv('PATH'), "rpm") + args = ["-q", "--root=%s" % self.target_rootfs, "--queryformat", "%{postin}", pkg] - ''' - The rpm db lock files were produced after invoking rpm to query on - build system, and they caused the rpm on target didn't work, so we - need to unlock the rpm db by removing the lock files. - ''' - def unlock_rpm_db(self): - # Remove rpm db lock files - rpm_db_locks = glob.glob('%s/var/lib/rpm/__db.*' % self.target_rootfs) - for f in rpm_db_locks: - bb.utils.remove(f, True) - - """ - Returns a dictionary with the package info. - """ - def package_info(self, pkg): - cmd = "%s %s info --urls %s" % (self.smart_cmd, self.smart_opt, pkg) try: - output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8") + output = subprocess.check_output([cmd] + args,stderr=subprocess.STDOUT).decode("utf-8") except subprocess.CalledProcessError as e: - bb.fatal("Unable to list available packages. Command '%s' " - "returned %d:\n%s" % (cmd, e.returncode, e.output.decode("utf-8"))) + bb.fatal("Could not invoke rpm. Command " + "'%s' returned %d:\n%s" % (' '.join([cmd] + args), e.returncode, e.output.decode("utf-8"))) - # Set default values to avoid UnboundLocalError - arch = "" - ver = "" - filename = "" - - #Parse output - for line in output.splitlines(): - line = line.rstrip() - if line.startswith("Name:"): - pkg = line.split(": ")[1] - elif line.startswith("Version:"): - tmp_str = line.split(": ")[1] - ver, arch = tmp_str.split("@") - break - - # Get filename - index = re.search("^URLs", output, re.MULTILINE) - tmp_str = output[index.end():] - for line in tmp_str.splitlines(): - if "/" in line: - line = line.lstrip() - filename = line.split(" ")[0] - break - - # To have the same data type than other package_info methods - filepath = os.path.join(self.deploy_dir, arch, filename) - pkg_dict = {} - pkg_dict[pkg] = {"arch":arch, "ver":ver, "filename":filename, - "filepath": filepath} - - return pkg_dict - - """ - Returns the path to a tmpdir where resides the contents of a package. + # may need to prepend #!/bin/sh to output - Deleting the tmpdir is responsability of the caller. + target_path = oe.path.join(self.target_rootfs, self.d.expand('${sysconfdir}/rpm-postinsts/')) + bb.utils.mkdirhier(target_path) + num = self._script_num_prefix(target_path) + saved_script_name = oe.path.join(target_path, "%d-%s" % (num, pkg)) + open(saved_script_name, 'w').write(output) + os.chmod(saved_script_name, 0o755) - """ def extract(self, pkg): - pkg_info = self.package_info(pkg) - if not pkg_info: - bb.fatal("Unable to get information for package '%s' while " - "trying to extract the package." % pkg) - - pkg_path = pkg_info[pkg]["filepath"] + output = self._invoke_dnf(["repoquery", "--queryformat", "%{location}", pkg]) + pkg_name = output.splitlines()[-1] + if not pkg_name.endswith(".rpm"): + bb.fatal("dnf could not find package %s in repository: %s" %(pkg, output)) + pkg_path = oe.path.join(self.rpm_repo_dir, pkg_name) cpio_cmd = bb.utils.which(os.getenv("PATH"), "cpio") rpm2cpio_cmd = bb.utils.which(os.getenv("PATH"), "rpm2cpio") @@ -1548,20 +825,24 @@ class OpkgDpkgPM(PackageManager): tmp_dir = tempfile.mkdtemp() current_dir = os.getcwd() os.chdir(tmp_dir) + if self.d.getVar('IMAGE_PKGTYPE') == 'deb': + data_tar = 'data.tar.xz' + else: + data_tar = 'data.tar.gz' try: - cmd = "%s x %s" % (ar_cmd, pkg_path) - output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) - cmd = "%s xf data.tar.*" % tar_cmd - output = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) + cmd = [ar_cmd, 'x', pkg_path] + output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) + cmd = [tar_cmd, 'xf', data_tar] + output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: bb.utils.remove(tmp_dir, recurse=True) bb.fatal("Unable to extract %s package. Command '%s' " - "returned %d:\n%s" % (pkg_path, cmd, e.returncode, e.output.decode("utf-8"))) + "returned %d:\n%s" % (pkg_path, ' '.join(cmd), e.returncode, e.output.decode("utf-8"))) except OSError as e: bb.utils.remove(tmp_dir, recurse=True) bb.fatal("Unable to extract %s package. Command '%s' " - "returned %d:\n%s at %s" % (pkg_path, cmd, e.errno, e.strerror, e.filename)) + "returned %d:\n%s at %s" % (pkg_path, ' '.join(cmd), e.errno, e.strerror, e.filename)) bb.note("Extracted %s to %s" % (pkg_path, tmp_dir)) bb.utils.remove(os.path.join(tmp_dir, "debian-binary")) @@ -1580,13 +861,13 @@ class OpkgPM(OpkgDpkgPM): self.pkg_archs = archs self.task_name = task_name - self.deploy_dir = self.d.getVar("DEPLOY_DIR_IPK", True) + self.deploy_dir = self.d.getVar("DEPLOY_DIR_IPK") self.deploy_lock_file = os.path.join(self.deploy_dir, "deploy.lock") self.opkg_cmd = bb.utils.which(os.getenv('PATH'), "opkg") self.opkg_args = "--volatile-cache -f %s -t %s -o %s " % (self.config_file, self.d.expand('${T}/ipktemp/') ,target_rootfs) - self.opkg_args += self.d.getVar("OPKG_ARGS", True) + self.opkg_args += self.d.getVar("OPKG_ARGS") - opkg_lib_dir = self.d.getVar('OPKGLIBDIR', True) + opkg_lib_dir = self.d.getVar('OPKGLIBDIR') if opkg_lib_dir[0] == "/": opkg_lib_dir = opkg_lib_dir[1:] @@ -1598,7 +879,7 @@ class OpkgPM(OpkgDpkgPM): if not os.path.exists(self.d.expand('${T}/saved')): bb.utils.mkdirhier(self.d.expand('${T}/saved')) - self.from_feeds = (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") == "1" + self.from_feeds = (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") == "1" if self.from_feeds: self._create_custom_config() else: @@ -1643,7 +924,7 @@ class OpkgPM(OpkgDpkgPM): config_file.write("arch %s %d\n" % (arch, priority)) priority += 5 - for line in (self.d.getVar('IPK_FEED_URIS', True) or "").split(): + for line in (self.d.getVar('IPK_FEED_URIS') or "").split(): feed_match = re.match("^[ \t]*(.*)##([^ \t]*)[ \t]*$", line) if feed_match is not None: @@ -1660,29 +941,29 @@ class OpkgPM(OpkgDpkgPM): specified as compatible for the current machine. NOTE: Development-helper feature, NOT a full-fledged feed. """ - if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True) or "") != "": + if (self.d.getVar('FEED_DEPLOYDIR_BASE_URI') or "") != "": for arch in self.pkg_archs.split(): cfg_file_name = os.path.join(self.target_rootfs, - self.d.getVar("sysconfdir", True), + self.d.getVar("sysconfdir"), "opkg", "local-%s-feed.conf" % arch) with open(cfg_file_name, "w+") as cfg_file: cfg_file.write("src/gz local-%s %s/%s" % (arch, - self.d.getVar('FEED_DEPLOYDIR_BASE_URI', True), + self.d.getVar('FEED_DEPLOYDIR_BASE_URI'), arch)) - if self.d.getVar('OPKGLIBDIR', True) != '/var/lib': + if self.d.getVar('OPKGLIBDIR') != '/var/lib': # There is no command line option for this anymore, we need to add # info_dir and status_file to config file, if OPKGLIBDIR doesn't have # the default value of "/var/lib" as defined in opkg: # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_LISTS_DIR VARDIR "/lib/opkg/lists" # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR VARDIR "/lib/opkg/info" # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE VARDIR "/lib/opkg/status" - cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info')) - cfg_file.write("option lists_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'lists')) - cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status')) + cfg_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'info')) + cfg_file.write("option lists_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'lists')) + cfg_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'status')) def _create_config(self): @@ -1700,33 +981,33 @@ class OpkgPM(OpkgDpkgPM): config_file.write("src oe-%s file:%s\n" % (arch, pkgs_dir)) - if self.d.getVar('OPKGLIBDIR', True) != '/var/lib': + if self.d.getVar('OPKGLIBDIR') != '/var/lib': # There is no command line option for this anymore, we need to add # info_dir and status_file to config file, if OPKGLIBDIR doesn't have # the default value of "/var/lib" as defined in opkg: # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_LISTS_DIR VARDIR "/lib/opkg/lists" # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_INFO_DIR VARDIR "/lib/opkg/info" # libopkg/opkg_conf.h:#define OPKG_CONF_DEFAULT_STATUS_FILE VARDIR "/lib/opkg/status" - config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'info')) - config_file.write("option lists_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'lists')) - config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR', True), 'opkg', 'status')) + config_file.write("option info_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'info')) + config_file.write("option lists_dir %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'lists')) + config_file.write("option status_file %s\n" % os.path.join(self.d.getVar('OPKGLIBDIR'), 'opkg', 'status')) - def insert_feeds_uris(self): - if self.feed_uris == "": + def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs): + if feed_uris == "": return rootfs_config = os.path.join('%s/etc/opkg/base-feeds.conf' % self.target_rootfs) - feed_uris = self.construct_uris(self.feed_uris.split(), self.feed_base_paths.split()) - archs = self.pkg_archs.split() if self.feed_archs is None else self.feed_archs.split() + feed_uris = self.construct_uris(feed_uris.split(), feed_base_paths.split()) + archs = self.pkg_archs.split() if feed_archs is None else feed_archs.split() with open(rootfs_config, "w+") as config_file: uri_iterator = 0 for uri in feed_uris: if archs: for arch in archs: - if (self.feed_archs is None) and (not os.path.exists(os.path.join(self.deploy_dir, arch))): + if (feed_archs is None) and (not os.path.exists(oe.path.join(self.deploy_dir, arch))): continue bb.note('Adding opkg feed url-%s-%d (%s)' % (arch, uri_iterator, uri)) @@ -1764,9 +1045,9 @@ class OpkgPM(OpkgDpkgPM): os.environ['OFFLINE_ROOT'] = self.target_rootfs os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs - os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True), + os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts") - os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True) + os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') try: bb.note("Installing the following packages: %s" % ' '.join(pkgs)) @@ -1817,7 +1098,7 @@ class OpkgPM(OpkgDpkgPM): return OpkgPkgsList(self.d, self.target_rootfs, self.config_file).list_pkgs() def handle_bad_recommendations(self): - bad_recommendations = self.d.getVar("BAD_RECOMMENDATIONS", True) or "" + bad_recommendations = self.d.getVar("BAD_RECOMMENDATIONS") or "" if bad_recommendations.strip() == "": return @@ -1871,7 +1152,7 @@ class OpkgPM(OpkgDpkgPM): bb.utils.mkdirhier(temp_opkg_dir) opkg_args = "-f %s -o %s " % (self.config_file, temp_rootfs) - opkg_args += self.d.getVar("OPKG_ARGS", True) + opkg_args += self.d.getVar("OPKG_ARGS") cmd = "%s %s update" % (self.opkg_cmd, opkg_args) try: @@ -1947,7 +1228,7 @@ class DpkgPM(OpkgDpkgPM): def __init__(self, d, target_rootfs, archs, base_archs, apt_conf_dir=None): super(DpkgPM, self).__init__(d) self.target_rootfs = target_rootfs - self.deploy_dir = self.d.getVar('DEPLOY_DIR_DEB', True) + self.deploy_dir = self.d.getVar('DEPLOY_DIR_DEB') if apt_conf_dir is None: self.apt_conf_dir = self.d.expand("${APTCONF_TARGET}/apt") else: @@ -1956,10 +1237,10 @@ class DpkgPM(OpkgDpkgPM): self.apt_get_cmd = bb.utils.which(os.getenv('PATH'), "apt-get") self.apt_cache_cmd = bb.utils.which(os.getenv('PATH'), "apt-cache") - self.apt_args = d.getVar("APT_ARGS", True) + self.apt_args = d.getVar("APT_ARGS") self.all_arch_list = archs.split() - all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").split() + all_mlb_pkg_arch_list = (self.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS') or "").split() self.all_arch_list.extend(arch for arch in all_mlb_pkg_arch_list if arch not in self.all_arch_list) self._create_configs(archs, base_archs) @@ -2000,7 +1281,10 @@ class DpkgPM(OpkgDpkgPM): """ def run_pre_post_installs(self, package_name=None): info_dir = self.target_rootfs + "/var/lib/dpkg/info" - suffixes = [(".preinst", "Preinstall"), (".postinst", "Postinstall")] + ControlScript = collections.namedtuple("ControlScript", ["suffix", "name", "argument"]) + control_scripts = [ + ControlScript(".preinst", "Preinstall", "install"), + ControlScript(".postinst", "Postinstall", "configure")] status_file = self.target_rootfs + "/var/lib/dpkg/status" installed_pkgs = [] @@ -2017,22 +1301,25 @@ class DpkgPM(OpkgDpkgPM): os.environ['OFFLINE_ROOT'] = self.target_rootfs os.environ['IPKG_OFFLINE_ROOT'] = self.target_rootfs os.environ['OPKG_OFFLINE_ROOT'] = self.target_rootfs - os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR', True), + os.environ['INTERCEPT_DIR'] = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts") - os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE', True) + os.environ['NATIVE_ROOT'] = self.d.getVar('STAGING_DIR_NATIVE') failed_pkgs = [] for pkg_name in installed_pkgs: - for suffix in suffixes: - p_full = os.path.join(info_dir, pkg_name + suffix[0]) + for control_script in control_scripts: + p_full = os.path.join(info_dir, pkg_name + control_script.suffix) if os.path.exists(p_full): try: bb.note("Executing %s for package: %s ..." % - (suffix[1].lower(), pkg_name)) - subprocess.check_output(p_full, stderr=subprocess.STDOUT) + (control_script.name.lower(), pkg_name)) + output = subprocess.check_output([p_full, control_script.argument], + stderr=subprocess.STDOUT).decode("utf-8") + bb.note(output) except subprocess.CalledProcessError as e: bb.note("%s for package %s failed with %d:\n%s" % - (suffix[1], pkg_name, e.returncode, e.output.decode("utf-8"))) + (control_script.name, pkg_name, e.returncode, + e.output.decode("utf-8"))) failed_pkgs.append(pkg_name) break @@ -2112,23 +1399,23 @@ class DpkgPM(OpkgDpkgPM): if result is not None: bb.fatal(result) - def insert_feeds_uris(self): - if self.feed_uris == "": + def insert_feeds_uris(self, feed_uris, feed_base_paths, feed_archs): + if feed_uris == "": return sources_conf = os.path.join("%s/etc/apt/sources.list" % self.target_rootfs) arch_list = [] - if self.feed_archs is None: + if feed_archs is None: for arch in self.all_arch_list: if not os.path.exists(os.path.join(self.deploy_dir, arch)): continue arch_list.append(arch) else: - arch_list = self.feed_archs.split() + arch_list = feed_archs.split() - feed_uris = self.construct_uris(self.feed_uris.split(), self.feed_base_paths.split()) + feed_uris = self.construct_uris(feed_uris.split(), feed_base_paths.split()) with open(sources_conf, "w+") as sources_file: for uri in feed_uris: @@ -2168,7 +1455,7 @@ class DpkgPM(OpkgDpkgPM): priority += 5 - pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE', True) or "" + pkg_exclude = self.d.getVar('PACKAGE_EXCLUDE') or "" for pkg in pkg_exclude.split(): prefs_file.write( "Package: %s\n" @@ -2183,14 +1470,13 @@ class DpkgPM(OpkgDpkgPM): os.path.join(self.deploy_dir, arch)) base_arch_list = base_archs.split() - multilib_variants = self.d.getVar("MULTILIB_VARIANTS", True); + multilib_variants = self.d.getVar("MULTILIB_VARIANTS"); for variant in multilib_variants.split(): localdata = bb.data.createCopy(self.d) variant_tune = localdata.getVar("DEFAULTTUNE_virtclass-multilib-" + variant, False) - orig_arch = localdata.getVar("DPKG_ARCH", True) + orig_arch = localdata.getVar("DPKG_ARCH") localdata.setVar("DEFAULTTUNE", variant_tune) - bb.data.update_data(localdata) - variant_arch = localdata.getVar("DPKG_ARCH", True) + variant_arch = localdata.getVar("DPKG_ARCH") if variant_arch not in base_arch_list: base_arch_list.append(variant_arch) @@ -2221,7 +1507,7 @@ class DpkgPM(OpkgDpkgPM): def remove_packaging_data(self): bb.utils.remove(os.path.join(self.target_rootfs, - self.d.getVar('opkglibdir', True)), True) + self.d.getVar('opkglibdir')), True) bb.utils.remove(self.target_rootfs + "/var/lib/dpkg/", True) def fix_broken_dependencies(self): @@ -2269,12 +1555,12 @@ class DpkgPM(OpkgDpkgPM): return tmp_dir def generate_index_files(d): - classes = d.getVar('PACKAGE_CLASSES', True).replace("package_", "").split() + classes = d.getVar('PACKAGE_CLASSES').replace("package_", "").split() indexer_map = { - "rpm": (RpmIndexer, d.getVar('DEPLOY_DIR_RPM', True)), - "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK', True)), - "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB', True)) + "rpm": (RpmIndexer, d.getVar('DEPLOY_DIR_RPM')), + "ipk": (OpkgIndexer, d.getVar('DEPLOY_DIR_IPK')), + "deb": (DpkgIndexer, d.getVar('DEPLOY_DIR_DEB')) } result = None diff --git a/import-layers/yocto-poky/meta/lib/oe/packagedata.py b/import-layers/yocto-poky/meta/lib/oe/packagedata.py index 21d4de914..32e5c82a9 100644 --- a/import-layers/yocto-poky/meta/lib/oe/packagedata.py +++ b/import-layers/yocto-poky/meta/lib/oe/packagedata.py @@ -57,7 +57,7 @@ def read_subpkgdata_dict(pkg, d): def _pkgmap(d): """Return a dictionary mapping package to recipe name.""" - pkgdatadir = d.getVar("PKGDATA_DIR", True) + pkgdatadir = d.getVar("PKGDATA_DIR") pkgmap = {} try: diff --git a/import-layers/yocto-poky/meta/lib/oe/packagegroup.py b/import-layers/yocto-poky/meta/lib/oe/packagegroup.py index 97819279b..4bc5d3e4b 100644 --- a/import-layers/yocto-poky/meta/lib/oe/packagegroup.py +++ b/import-layers/yocto-poky/meta/lib/oe/packagegroup.py @@ -1,17 +1,17 @@ import itertools def is_optional(feature, d): - packages = d.getVar("FEATURE_PACKAGES_%s" % feature, True) + packages = d.getVar("FEATURE_PACKAGES_%s" % feature) if packages: - return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional", True)) + return bool(d.getVarFlag("FEATURE_PACKAGES_%s" % feature, "optional")) else: - return bool(d.getVarFlag("PACKAGE_GROUP_%s" % feature, "optional", True)) + return bool(d.getVarFlag("PACKAGE_GROUP_%s" % feature, "optional")) def packages(features, d): for feature in features: - packages = d.getVar("FEATURE_PACKAGES_%s" % feature, True) + packages = d.getVar("FEATURE_PACKAGES_%s" % feature) if not packages: - packages = d.getVar("PACKAGE_GROUP_%s" % feature, True) + packages = d.getVar("PACKAGE_GROUP_%s" % feature) for pkg in (packages or "").split(): yield pkg diff --git a/import-layers/yocto-poky/meta/lib/oe/patch.py b/import-layers/yocto-poky/meta/lib/oe/patch.py index 0332f100f..f1ab3dd80 100644 --- a/import-layers/yocto-poky/meta/lib/oe/patch.py +++ b/import-layers/yocto-poky/meta/lib/oe/patch.py @@ -81,7 +81,7 @@ class PatchSet(object): patch[param] = PatchSet.defaults[param] if patch.get("remote"): - patch["file"] = bb.data.expand(bb.fetch2.localpath(patch["remote"], self.d), self.d) + patch["file"] = self.d.expand(bb.fetch2.localpath(patch["remote"], self.d)) patch["filemd5"] = bb.utils.md5_file(patch["file"]) @@ -281,8 +281,8 @@ class GitApplyTree(PatchTree): def __init__(self, dir, d): PatchTree.__init__(self, dir, d) - self.commituser = d.getVar('PATCH_GIT_USER_NAME', True) - self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL', True) + self.commituser = d.getVar('PATCH_GIT_USER_NAME') + self.commitemail = d.getVar('PATCH_GIT_USER_EMAIL') @staticmethod def extractPatchHeader(patchfile): @@ -371,8 +371,8 @@ class GitApplyTree(PatchTree): @staticmethod def gitCommandUserOptions(cmd, commituser=None, commitemail=None, d=None): if d: - commituser = d.getVar('PATCH_GIT_USER_NAME', True) - commitemail = d.getVar('PATCH_GIT_USER_EMAIL', True) + commituser = d.getVar('PATCH_GIT_USER_NAME') + commitemail = d.getVar('PATCH_GIT_USER_EMAIL') if commituser: cmd += ['-c', 'user.name="%s"' % commituser] if commitemail: @@ -428,6 +428,7 @@ class GitApplyTree(PatchTree): def extractPatches(tree, startcommit, outdir, paths=None): import tempfile import shutil + import re tempdir = tempfile.mkdtemp(prefix='oepatch') try: shellcmd = ["git", "format-patch", startcommit, "-o", tempdir] @@ -443,10 +444,13 @@ class GitApplyTree(PatchTree): try: with open(srcfile, 'r', encoding=encoding) as f: for line in f: - if line.startswith(GitApplyTree.patch_line_prefix): + checkline = line + if checkline.startswith('Subject: '): + checkline = re.sub(r'\[.+?\]\s*', '', checkline[9:]) + if checkline.startswith(GitApplyTree.patch_line_prefix): outfile = line.split()[-1].strip() continue - if line.startswith(GitApplyTree.ignore_commit_prefix): + if checkline.startswith(GitApplyTree.ignore_commit_prefix): continue patchlines.append(line) except UnicodeDecodeError: @@ -547,7 +551,7 @@ class GitApplyTree(PatchTree): class QuiltTree(PatchSet): def _runcmd(self, args, run = True): - quiltrc = self.d.getVar('QUILTRCFILE', True) + quiltrc = self.d.getVar('QUILTRCFILE') if not run: return ["quilt"] + ["--quiltrc"] + [quiltrc] + args runcmd(["quilt"] + ["--quiltrc"] + [quiltrc] + args, self.dir) @@ -723,7 +727,7 @@ class UserResolver(Resolver): # Patch application failed patchcmd = self.patchset.Push(True, False, False) - t = self.patchset.d.getVar('T', True) + t = self.patchset.d.getVar('T') if not t: bb.msg.fatal("Build", "T not set") bb.utils.mkdirhier(t) @@ -765,3 +769,110 @@ class UserResolver(Resolver): os.chdir(olddir) raise os.chdir(olddir) + + +def patch_path(url, fetch, workdir, expand=True): + """Return the local path of a patch, or None if this isn't a patch""" + + local = fetch.localpath(url) + base, ext = os.path.splitext(os.path.basename(local)) + if ext in ('.gz', '.bz2', '.xz', '.Z'): + if expand: + local = os.path.join(workdir, base) + ext = os.path.splitext(base)[1] + + urldata = fetch.ud[url] + if "apply" in urldata.parm: + apply = oe.types.boolean(urldata.parm["apply"]) + if not apply: + return + elif ext not in (".diff", ".patch"): + return + + return local + +def src_patches(d, all=False, expand=True): + workdir = d.getVar('WORKDIR') + fetch = bb.fetch2.Fetch([], d) + patches = [] + sources = [] + for url in fetch.urls: + local = patch_path(url, fetch, workdir, expand) + if not local: + if all: + local = fetch.localpath(url) + sources.append(local) + continue + + urldata = fetch.ud[url] + parm = urldata.parm + patchname = parm.get('pname') or os.path.basename(local) + + apply, reason = should_apply(parm, d) + if not apply: + if reason: + bb.note("Patch %s %s" % (patchname, reason)) + continue + + patchparm = {'patchname': patchname} + if "striplevel" in parm: + striplevel = parm["striplevel"] + elif "pnum" in parm: + #bb.msg.warn(None, "Deprecated usage of 'pnum' url parameter in '%s', please use 'striplevel'" % url) + striplevel = parm["pnum"] + else: + striplevel = '1' + patchparm['striplevel'] = striplevel + + patchdir = parm.get('patchdir') + if patchdir: + patchparm['patchdir'] = patchdir + + localurl = bb.fetch.encodeurl(('file', '', local, '', '', patchparm)) + patches.append(localurl) + + if all: + return sources + + return patches + + +def should_apply(parm, d): + if "mindate" in parm or "maxdate" in parm: + pn = d.getVar('PN') + srcdate = d.getVar('SRCDATE_%s' % pn) + if not srcdate: + srcdate = d.getVar('SRCDATE') + + if srcdate == "now": + srcdate = d.getVar('DATE') + + if "maxdate" in parm and parm["maxdate"] < srcdate: + return False, 'is outdated' + + if "mindate" in parm and parm["mindate"] > srcdate: + return False, 'is predated' + + + if "minrev" in parm: + srcrev = d.getVar('SRCREV') + if srcrev and srcrev < parm["minrev"]: + return False, 'applies to later revisions' + + if "maxrev" in parm: + srcrev = d.getVar('SRCREV') + if srcrev and srcrev > parm["maxrev"]: + return False, 'applies to earlier revisions' + + if "rev" in parm: + srcrev = d.getVar('SRCREV') + if srcrev and parm["rev"] not in srcrev: + return False, "doesn't apply to revision" + + if "notrev" in parm: + srcrev = d.getVar('SRCREV') + if srcrev and parm["notrev"] in srcrev: + return False, "doesn't apply to revision" + + return True, None + diff --git a/import-layers/yocto-poky/meta/lib/oe/path.py b/import-layers/yocto-poky/meta/lib/oe/path.py index ed7fd1eef..448a2b944 100644 --- a/import-layers/yocto-poky/meta/lib/oe/path.py +++ b/import-layers/yocto-poky/meta/lib/oe/path.py @@ -50,9 +50,30 @@ def make_relative_symlink(path): os.remove(path) os.symlink(base, path) +def replace_absolute_symlinks(basedir, d): + """ + Walk basedir looking for absolute symlinks and replacing them with relative ones. + The absolute links are assumed to be relative to basedir + (compared to make_relative_symlink above which tries to compute common ancestors + using pattern matching instead) + """ + for walkroot, dirs, files in os.walk(basedir): + for file in files + dirs: + path = os.path.join(walkroot, file) + if not os.path.islink(path): + continue + link = os.readlink(path) + if not os.path.isabs(link): + continue + walkdir = os.path.dirname(path.rpartition(basedir)[2]) + base = os.path.relpath(link, walkdir) + bb.debug(2, "Replacing absolute path %s with relative path %s" % (link, base)) + os.remove(path) + os.symlink(base, path) + def format_display(path, metadata): """ Prepare a path for display to the user. """ - rel = relative(metadata.getVar("TOPDIR", True), path) + rel = relative(metadata.getVar("TOPDIR"), path) if len(rel) > len(path): return path else: @@ -81,7 +102,6 @@ def copyhardlinktree(src, dst): subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT) source = '' if os.path.isdir(src): - import glob if len(glob.glob('%s/.??*' % src)) > 0: source = './.??* ' source += './*' @@ -95,7 +115,14 @@ def copyhardlinktree(src, dst): copytree(src, dst) def remove(path, recurse=True): - """Equivalent to rm -f or rm -rf""" + """ + Equivalent to rm -f or rm -rf + NOTE: be careful about passing paths that may contain filenames with + wildcards in them (as opposed to passing an actual wildcarded path) - + since we use glob.glob() to expand the path. Filenames containing + square brackets are particularly problematic since the they may not + actually expand to match the original filename. + """ for name in glob.glob(path): try: os.unlink(name) diff --git a/import-layers/yocto-poky/meta/lib/oe/prservice.py b/import-layers/yocto-poky/meta/lib/oe/prservice.py index 0054f954c..32dfc15e8 100644 --- a/import-layers/yocto-poky/meta/lib/oe/prservice.py +++ b/import-layers/yocto-poky/meta/lib/oe/prservice.py @@ -1,7 +1,7 @@ def prserv_make_conn(d, check = False): import prserv.serv - host_params = list([_f for _f in (d.getVar("PRSERV_HOST", True) or '').split(':') if _f]) + host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) try: conn = None conn = prserv.serv.PRServerConnection(host_params[0], int(host_params[1])) @@ -15,11 +15,11 @@ def prserv_make_conn(d, check = False): return conn def prserv_dump_db(d): - if not d.getVar('PRSERV_HOST', True): + if not d.getVar('PRSERV_HOST'): bb.error("Not using network based PR service") return None - conn = d.getVar("__PRSERV_CONN", True) + conn = d.getVar("__PRSERV_CONN") if conn is None: conn = prserv_make_conn(d) if conn is None: @@ -27,18 +27,18 @@ def prserv_dump_db(d): return None #dump db - opt_version = d.getVar('PRSERV_DUMPOPT_VERSION', True) - opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH', True) - opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM', True) - opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL', True)) + opt_version = d.getVar('PRSERV_DUMPOPT_VERSION') + opt_pkgarch = d.getVar('PRSERV_DUMPOPT_PKGARCH') + opt_checksum = d.getVar('PRSERV_DUMPOPT_CHECKSUM') + opt_col = ("1" == d.getVar('PRSERV_DUMPOPT_COL')) return conn.export(opt_version, opt_pkgarch, opt_checksum, opt_col) def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None): - if not d.getVar('PRSERV_HOST', True): + if not d.getVar('PRSERV_HOST'): bb.error("Not using network based PR service") return None - conn = d.getVar("__PRSERV_CONN", True) + conn = d.getVar("__PRSERV_CONN") if conn is None: conn = prserv_make_conn(d) if conn is None: @@ -58,7 +58,7 @@ def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksu (filter_checksum and filter_checksum != checksum): continue try: - value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum, True)) + value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum)) except BaseException as exc: bb.debug("Not valid value of %s:%s" % (v,str(exc))) continue @@ -72,8 +72,8 @@ def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksu def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): import bb.utils #initilize the output file - bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR', True)) - df = d.getVar('PRSERV_DUMPFILE', True) + bb.utils.mkdirhier(d.getVar('PRSERV_DUMPDIR')) + df = d.getVar('PRSERV_DUMPFILE') #write data lf = bb.utils.lockfile("%s.lock" % df) f = open(df, "a") @@ -114,7 +114,7 @@ def prserv_export_tofile(d, metainfo, datainfo, lockdown, nomax=False): bb.utils.unlockfile(lf) def prserv_check_avail(d): - host_params = list([_f for _f in (d.getVar("PRSERV_HOST", True) or '').split(':') if _f]) + host_params = list([_f for _f in (d.getVar("PRSERV_HOST") or '').split(':') if _f]) try: if len(host_params) != 2: raise TypeError diff --git a/import-layers/yocto-poky/meta/lib/oe/qa.py b/import-layers/yocto-poky/meta/lib/oe/qa.py index 22d76dcbc..3231e60ce 100644 --- a/import-layers/yocto-poky/meta/lib/oe/qa.py +++ b/import-layers/yocto-poky/meta/lib/oe/qa.py @@ -129,11 +129,11 @@ class ELFFile: if cmd in self.objdump_output: return self.objdump_output[cmd] - objdump = d.getVar('OBJDUMP', True) + objdump = d.getVar('OBJDUMP') env = os.environ.copy() env["LC_ALL"] = "C" - env["PATH"] = d.getVar('PATH', True) + env["PATH"] = d.getVar('PATH') try: bb.note("%s %s %s" % (objdump, cmd, self.name)) diff --git a/import-layers/yocto-poky/meta/lib/oe/recipeutils.py b/import-layers/yocto-poky/meta/lib/oe/recipeutils.py index 58e4028ae..a7fdd36e4 100644 --- a/import-layers/yocto-poky/meta/lib/oe/recipeutils.py +++ b/import-layers/yocto-poky/meta/lib/oe/recipeutils.py @@ -29,18 +29,9 @@ meta_vars = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION'] def pn_to_recipe(cooker, pn, mc=''): """Convert a recipe name (PN) to the path to the recipe file""" - import bb.providers - - if pn in cooker.recipecaches[mc].pkg_pn: - best = bb.providers.findBestProvider(pn, cooker.data, cooker.recipecaches[mc], cooker.recipecaches[mc].pkg_pn) - return best[3] - elif pn in cooker.recipecaches[mc].providers: - filenames = cooker.recipecaches[mc].providers[pn] - eligible, foundUnique = bb.providers.filterProviders(filenames, pn, cooker.expanded_data, cooker.recipecaches[mc]) - filename = eligible[0] - return filename - else: - return None + + best = cooker.findBestProvider(pn, mc) + return best[3] def get_unavailable_reasons(cooker, pn): @@ -61,28 +52,6 @@ def parse_recipe(cooker, fn, appendfiles): return envdata -def parse_recipe_simple(cooker, pn, d, appends=True): - """ - Parse a recipe and optionally all bbappends that apply to it - in the current configuration. - """ - import bb.providers - - recipefile = pn_to_recipe(cooker, pn) - if not recipefile: - skipreasons = get_unavailable_reasons(cooker, pn) - # We may as well re-use bb.providers.NoProvider here - if skipreasons: - raise bb.providers.NoProvider(skipreasons) - else: - raise bb.providers.NoProvider('Unable to find any recipe file matching %s' % pn) - if appends: - appendfiles = cooker.collection.get_file_appends(recipefile) - else: - appendfiles = None - return parse_recipe(cooker, recipefile, appendfiles) - - def get_var_files(fn, varlist, d): """Find the file in which each of a list of variables is set. Note: requires variable history to be enabled when parsing. @@ -359,16 +328,16 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True): # FIXME need a warning if the unexpanded SRC_URI value contains variable references - uris = (d.getVar('SRC_URI', True) or "").split() + uris = (d.getVar('SRC_URI') or "").split() fetch = bb.fetch2.Fetch(uris, d) if download: fetch.download() # Copy local files to target directory and gather any remote files - bb_dir = os.path.dirname(d.getVar('FILE', True)) + os.sep + bb_dir = os.path.dirname(d.getVar('FILE')) + os.sep remotes = [] copied = [] - includes = [path for path in d.getVar('BBINCLUDED', True).split() if + includes = [path for path in d.getVar('BBINCLUDED').split() if path.startswith(bb_dir) and os.path.exists(path)] for path in fetch.localpaths() + includes: # Only import files that are under the meta directory @@ -389,15 +358,21 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True): return copied, remotes -def get_recipe_local_files(d, patches=False): +def get_recipe_local_files(d, patches=False, archives=False): """Get a list of local files in SRC_URI within a recipe.""" - uris = (d.getVar('SRC_URI', True) or "").split() + import oe.patch + uris = (d.getVar('SRC_URI') or "").split() fetch = bb.fetch2.Fetch(uris, d) + # FIXME this list should be factored out somewhere else (such as the + # fetcher) though note that this only encompasses actual container formats + # i.e. that can contain multiple files as opposed to those that only + # contain a compressed stream (i.e. .tar.gz as opposed to just .gz) + archive_exts = ['.tar', '.tgz', '.tar.gz', '.tar.Z', '.tbz', '.tbz2', '.tar.bz2', '.tar.xz', '.tar.lz', '.zip', '.jar', '.rpm', '.srpm', '.deb', '.ipk', '.tar.7z', '.7z'] ret = {} for uri in uris: if fetch.ud[uri].type == 'file': if (not patches and - bb.utils.exec_flat_python_func('patch_path', uri, fetch, '')): + oe.patch.patch_path(uri, fetch, '', expand=False)): continue # Skip files that are referenced by absolute path fname = fetch.ud[uri].basepath @@ -409,16 +384,22 @@ def get_recipe_local_files(d, patches=False): if os.path.isabs(subdir): continue fname = os.path.join(subdir, fname) - ret[fname] = fetch.localpath(uri) + localpath = fetch.localpath(uri) + if not archives: + # Ignore archives that will be unpacked + if localpath.endswith(tuple(archive_exts)): + unpack = fetch.ud[uri].parm.get('unpack', True) + if unpack: + continue + ret[fname] = localpath return ret def get_recipe_patches(d): """Get a list of the patches included in SRC_URI within a recipe.""" + import oe.patch + patches = oe.patch.src_patches(d, expand=False) patchfiles = [] - # Execute src_patches() defined in patch.bbclass - this works since that class - # is inherited globally - patches = bb.utils.exec_flat_python_func('src_patches', d) for patch in patches: _, _, local, _, _, parm = bb.fetch.decodeurl(patch) patchfiles.append(local) @@ -435,14 +416,12 @@ def get_recipe_patched_files(d): change mode ('A' for add, 'D' for delete or 'M' for modify) """ import oe.patch - # Execute src_patches() defined in patch.bbclass - this works since that class - # is inherited globally - patches = bb.utils.exec_flat_python_func('src_patches', d) + patches = oe.patch.src_patches(d, expand=False) patchedfiles = {} for patch in patches: _, _, patchfile, _, _, parm = bb.fetch.decodeurl(patch) striplevel = int(parm['striplevel']) - patchedfiles[patchfile] = oe.patch.PatchSet.getPatchedFiles(patchfile, striplevel, os.path.join(d.getVar('S', True), parm.get('patchdir', ''))) + patchedfiles[patchfile] = oe.patch.PatchSet.getPatchedFiles(patchfile, striplevel, os.path.join(d.getVar('S'), parm.get('patchdir', ''))) return patchedfiles @@ -480,9 +459,9 @@ def get_bbfile_path(d, destdir, extrapathhint=None): confdata.setVar('LAYERDIR', destlayerdir) destlayerconf = os.path.join(destlayerdir, "conf", "layer.conf") confdata = bb.cookerdata.parse_config_file(destlayerconf, confdata) - pn = d.getVar('PN', True) + pn = d.getVar('PN') - bbfilespecs = (confdata.getVar('BBFILES', True) or '').split() + bbfilespecs = (confdata.getVar('BBFILES') or '').split() if destdir == destlayerdir: for bbfilespec in bbfilespecs: if not bbfilespec.endswith('.bbappend'): @@ -495,8 +474,8 @@ def get_bbfile_path(d, destdir, extrapathhint=None): # Try to make up a path that matches BBFILES # this is a little crude, but better than nothing - bpn = d.getVar('BPN', True) - recipefn = os.path.basename(d.getVar('FILE', True)) + bpn = d.getVar('BPN') + recipefn = os.path.basename(d.getVar('FILE')) pathoptions = [destdir] if extrapathhint: pathoptions.append(os.path.join(destdir, extrapathhint)) @@ -520,7 +499,7 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False): import bb.cookerdata destlayerdir = os.path.abspath(destlayerdir) - recipefile = d.getVar('FILE', True) + recipefile = d.getVar('FILE') recipefn = os.path.splitext(os.path.basename(recipefile))[0] if wildcardver and '_' in recipefn: recipefn = recipefn.split('_', 1)[0] + '_%' @@ -540,7 +519,7 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False): appendpath = os.path.join(destlayerdir, os.path.relpath(os.path.dirname(recipefile), origlayerdir), appendfn) closepath = '' pathok = True - for bbfilespec in confdata.getVar('BBFILES', True).split(): + for bbfilespec in confdata.getVar('BBFILES').split(): if fnmatch.fnmatchcase(appendpath, bbfilespec): # Our append path works, we're done break @@ -613,7 +592,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, # FIXME check if the bbappend doesn't get overridden by a higher priority layer? - layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS', True).split()] + layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()] if not os.path.abspath(destlayerdir) in layerdirs: bb.warn('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active') @@ -649,7 +628,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, else: bbappendlines.append((varname, op, value)) - destsubdir = rd.getVar('PN', True) + destsubdir = rd.getVar('PN') if srcfiles: bbappendlines.append(('FILESEXTRAPATHS_prepend', ':=', '${THISDIR}/${PN}:')) @@ -668,7 +647,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, srcurientry = 'file://%s' % srcfile # Double-check it's not there already # FIXME do we care if the entry is added by another bbappend that might go away? - if not srcurientry in rd.getVar('SRC_URI', True).split(): + if not srcurientry in rd.getVar('SRC_URI').split(): if machine: appendline('SRC_URI_append%s' % appendoverride, '=', ' ' + srcurientry) else: @@ -786,7 +765,11 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, for newfile, srcfile in copyfiles.items(): filedest = os.path.join(appenddir, destsubdir, os.path.basename(srcfile)) if os.path.abspath(newfile) != os.path.abspath(filedest): - bb.note('Copying %s to %s' % (newfile, filedest)) + if newfile.startswith(tempfile.gettempdir()): + newfiledisp = os.path.basename(newfile) + else: + newfiledisp = newfile + bb.note('Copying %s to %s' % (newfiledisp, filedest)) bb.utils.mkdirhier(os.path.dirname(filedest)) shutil.copyfile(newfile, filedest) @@ -813,7 +796,7 @@ def replace_dir_vars(path, d): # Sort by length so we get the variables we're interested in first for var in sorted(list(d.keys()), key=len): if var.endswith('dir') and var.lower() == var: - value = d.getVar(var, True) + value = d.getVar(var) if value.startswith('/') and not '\n' in value and value not in dirvars: dirvars[value] = var for dirpath in sorted(list(dirvars.keys()), reverse=True): @@ -867,12 +850,12 @@ def get_recipe_upstream_version(rd): ru['type'] = 'U' ru['datetime'] = '' - pv = rd.getVar('PV', True) + pv = rd.getVar('PV') # XXX: If don't have SRC_URI means that don't have upstream sources so # returns the current recipe version, so that upstream version check # declares a match. - src_uris = rd.getVar('SRC_URI', True) + src_uris = rd.getVar('SRC_URI') if not src_uris: ru['version'] = pv ru['type'] = 'M' @@ -883,13 +866,13 @@ def get_recipe_upstream_version(rd): src_uri = src_uris.split()[0] uri_type, _, _, _, _, _ = decodeurl(src_uri) - manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION", True) + manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION") if manual_upstream_version: # manual tracking of upstream version. ru['version'] = manual_upstream_version ru['type'] = 'M' - manual_upstream_date = rd.getVar("CHECK_DATE", True) + manual_upstream_date = rd.getVar("CHECK_DATE") if manual_upstream_date: date = datetime.strptime(manual_upstream_date, "%b %d, %Y") else: diff --git a/import-layers/yocto-poky/meta/lib/oe/rootfs.py b/import-layers/yocto-poky/meta/lib/oe/rootfs.py index f96788399..96591f370 100644 --- a/import-layers/yocto-poky/meta/lib/oe/rootfs.py +++ b/import-layers/yocto-poky/meta/lib/oe/rootfs.py @@ -15,12 +15,13 @@ class Rootfs(object, metaclass=ABCMeta): This is an abstract class. Do not instantiate this directly. """ - def __init__(self, d, progress_reporter=None): + def __init__(self, d, progress_reporter=None, logcatcher=None): self.d = d self.pm = None - self.image_rootfs = self.d.getVar('IMAGE_ROOTFS', True) - self.deploydir = self.d.getVar('IMGDEPLOYDIR', True) + self.image_rootfs = self.d.getVar('IMAGE_ROOTFS') + self.deploydir = self.d.getVar('IMGDEPLOYDIR') self.progress_reporter = progress_reporter + self.logcatcher = logcatcher self.install_order = Manifest.INSTALL_ORDER @@ -53,6 +54,8 @@ class Rootfs(object, metaclass=ABCMeta): messages = [] with open(log_path, 'r') as log: for line in log: + if self.logcatcher and self.logcatcher.contains(line.rstrip()): + continue for ee in excludes: m = ee.search(line) if m: @@ -69,7 +72,7 @@ class Rootfs(object, metaclass=ABCMeta): else: msg = '%d %s messages' % (len(messages), type) msg = '[log_check] %s: found %s in the logfile:\n%s' % \ - (self.d.getVar('PN', True), msg, ''.join(messages)) + (self.d.getVar('PN'), msg, ''.join(messages)) if type == 'error': bb.fatal(msg) else: @@ -84,7 +87,10 @@ class Rootfs(object, metaclass=ABCMeta): def _insert_feed_uris(self): if bb.utils.contains("IMAGE_FEATURES", "package-management", True, False, self.d): - self.pm.insert_feeds_uris() + self.pm.insert_feeds_uris(self.d.getVar('PACKAGE_FEED_URIS') or "", + self.d.getVar('PACKAGE_FEED_BASE_PATHS') or "", + self.d.getVar('PACKAGE_FEED_ARCHS')) + @abstractmethod def _handle_intercept_failure(self, failed_script): @@ -100,7 +106,7 @@ class Rootfs(object, metaclass=ABCMeta): pass def _setup_dbg_rootfs(self, dirs): - gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS', True) or '0' + gen_debugfs = self.d.getVar('IMAGE_GEN_DEBUGFS') or '0' if gen_debugfs != '1': return @@ -153,7 +159,7 @@ class Rootfs(object, metaclass=ABCMeta): os.rename(self.image_rootfs + '-orig', self.image_rootfs) def _exec_shell_cmd(self, cmd): - fakerootcmd = self.d.getVar('FAKEROOT', True) + fakerootcmd = self.d.getVar('FAKEROOT') if fakerootcmd is not None: exec_cmd = [fakerootcmd, cmd] else: @@ -168,14 +174,14 @@ class Rootfs(object, metaclass=ABCMeta): def create(self): bb.note("###### Generate rootfs #######") - pre_process_cmds = self.d.getVar("ROOTFS_PREPROCESS_COMMAND", True) - post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND", True) - rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND', True) + pre_process_cmds = self.d.getVar("ROOTFS_PREPROCESS_COMMAND") + post_process_cmds = self.d.getVar("ROOTFS_POSTPROCESS_COMMAND") + rootfs_post_install_cmds = self.d.getVar('ROOTFS_POSTINSTALL_COMMAND') - postinst_intercepts_dir = self.d.getVar("POSTINST_INTERCEPTS_DIR", True) + postinst_intercepts_dir = self.d.getVar("POSTINST_INTERCEPTS_DIR") if not postinst_intercepts_dir: postinst_intercepts_dir = self.d.expand("${COREBASE}/scripts/postinst-intercepts") - intercepts_dir = os.path.join(self.d.getVar('WORKDIR', True), + intercepts_dir = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts") bb.utils.remove(intercepts_dir, True) @@ -194,10 +200,10 @@ class Rootfs(object, metaclass=ABCMeta): # call the package manager dependent create method self._create() - sysconfdir = self.image_rootfs + self.d.getVar('sysconfdir', True) + sysconfdir = self.image_rootfs + self.d.getVar('sysconfdir') bb.utils.mkdirhier(sysconfdir) with open(sysconfdir + "/version", "w+") as ver: - ver.write(self.d.getVar('BUILDNAME', True) + "\n") + ver.write(self.d.getVar('BUILDNAME') + "\n") execute_pre_post_process(self.d, rootfs_post_install_cmds) @@ -216,7 +222,7 @@ class Rootfs(object, metaclass=ABCMeta): "offline and rootfs is read-only: %s" % delayed_postinsts) - if self.d.getVar('USE_DEVFS', True) != "1": + if self.d.getVar('USE_DEVFS') != "1": self._create_devfs() self._uninstall_unneeded() @@ -228,7 +234,7 @@ class Rootfs(object, metaclass=ABCMeta): self._run_ldconfig() - if self.d.getVar('USE_DEPMOD', True) != "0": + if self.d.getVar('USE_DEPMOD') != "0": self._generate_kernel_module_deps() self._cleanup() @@ -244,18 +250,23 @@ class Rootfs(object, metaclass=ABCMeta): if delayed_postinsts is None: if os.path.exists(self.d.expand("${IMAGE_ROOTFS}${sysconfdir}/init.d/run-postinsts")): self._exec_shell_cmd(["update-rc.d", "-f", "-r", - self.d.getVar('IMAGE_ROOTFS', True), + self.d.getVar('IMAGE_ROOTFS'), "run-postinsts", "remove"]) image_rorfs = bb.utils.contains("IMAGE_FEATURES", "read-only-rootfs", True, False, self.d) - image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE', True) + image_rorfs_force = self.d.getVar('FORCE_RO_REMOVE') if image_rorfs or image_rorfs_force == "1": # Remove components that we don't need if it's a read-only rootfs - unneeded_pkgs = self.d.getVar("ROOTFS_RO_UNNEEDED", True).split() + unneeded_pkgs = self.d.getVar("ROOTFS_RO_UNNEEDED").split() pkgs_installed = image_list_installed_packages(self.d) - pkgs_to_remove = [pkg for pkg in pkgs_installed if pkg in unneeded_pkgs] + # Make sure update-alternatives is last on the command line, so + # that it is removed last. This makes sure that its database is + # available while uninstalling packages, allowing alternative + # symlinks of packages to be uninstalled to be managed correctly. + provider = self.d.getVar("VIRTUAL-RUNTIME_update-alternatives") + pkgs_to_remove = sorted([pkg for pkg in pkgs_installed if pkg in unneeded_pkgs], key=lambda x: x == provider) if len(pkgs_to_remove) > 0: self.pm.remove(pkgs_to_remove, False) @@ -266,7 +277,7 @@ class Rootfs(object, metaclass=ABCMeta): bb.warn("There are post install scripts " "in a read-only rootfs") - post_uninstall_cmds = self.d.getVar("ROOTFS_POSTUNINSTALL_COMMAND", True) + post_uninstall_cmds = self.d.getVar("ROOTFS_POSTUNINSTALL_COMMAND") execute_pre_post_process(self.d, post_uninstall_cmds) runtime_pkgmanage = bb.utils.contains("IMAGE_FEATURES", "package-management", @@ -276,12 +287,12 @@ class Rootfs(object, metaclass=ABCMeta): self.pm.remove_packaging_data() def _run_intercepts(self): - intercepts_dir = os.path.join(self.d.getVar('WORKDIR', True), + intercepts_dir = os.path.join(self.d.getVar('WORKDIR'), "intercept_scripts") bb.note("Running intercept scripts:") os.environ['D'] = self.image_rootfs - os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE', True) + os.environ['STAGING_DIR_NATIVE'] = self.d.getVar('STAGING_DIR_NATIVE') for script in os.listdir(intercepts_dir): script_full = os.path.join(intercepts_dir, script) @@ -291,10 +302,10 @@ class Rootfs(object, metaclass=ABCMeta): bb.note("> Executing %s intercept ..." % script) try: - subprocess.check_call(script_full) + subprocess.check_output(script_full) except subprocess.CalledProcessError as e: - bb.warn("The postinstall intercept hook '%s' failed (exit code: %d)! See log for details!" % - (script, e.returncode)) + bb.warn("The postinstall intercept hook '%s' failed (exit code: %d)! See log for details! (Output: %s)" % + (script, e.returncode, e.output)) with open(script_full) as intercept: registered_pkgs = None @@ -313,7 +324,7 @@ class Rootfs(object, metaclass=ABCMeta): self._handle_intercept_failure(registered_pkgs) def _run_ldconfig(self): - if self.d.getVar('LDCONFIGDEPEND', True): + if self.d.getVar('LDCONFIGDEPEND'): bb.note("Executing: ldconfig -r" + self.image_rootfs + "-c new -v") self._exec_shell_cmd(['ldconfig', '-r', self.image_rootfs, '-c', 'new', '-v']) @@ -333,7 +344,7 @@ class Rootfs(object, metaclass=ABCMeta): bb.note("No Kernel Modules found, not running depmod") return - kernel_abi_ver_file = oe.path.join(self.d.getVar('PKGDATA_DIR', True), "kernel-depmod", + kernel_abi_ver_file = oe.path.join(self.d.getVar('PKGDATA_DIR'), "kernel-depmod", 'kernel-abiversion') if not os.path.exists(kernel_abi_ver_file): bb.fatal("No kernel-abiversion file found (%s), cannot run depmod, aborting" % kernel_abi_ver_file) @@ -355,15 +366,15 @@ class Rootfs(object, metaclass=ABCMeta): """ def _create_devfs(self): devtable_list = [] - devtable = self.d.getVar('IMAGE_DEVICE_TABLE', True) + devtable = self.d.getVar('IMAGE_DEVICE_TABLE') if devtable is not None: devtable_list.append(devtable) else: - devtables = self.d.getVar('IMAGE_DEVICE_TABLES', True) + devtables = self.d.getVar('IMAGE_DEVICE_TABLES') if devtables is None: devtables = 'files/device_table-minimal.txt' for devtable in devtables.split(): - devtable_list.append("%s" % bb.utils.which(self.d.getVar('BBPATH', True), devtable)) + devtable_list.append("%s" % bb.utils.which(self.d.getVar('BBPATH'), devtable)) for devtable in devtable_list: self._exec_shell_cmd(["makedevs", "-r", @@ -371,24 +382,24 @@ class Rootfs(object, metaclass=ABCMeta): class RpmRootfs(Rootfs): - def __init__(self, d, manifest_dir, progress_reporter=None): - super(RpmRootfs, self).__init__(d, progress_reporter) + def __init__(self, d, manifest_dir, progress_reporter=None, logcatcher=None): + super(RpmRootfs, self).__init__(d, progress_reporter, logcatcher) self.log_check_regex = '(unpacking of archive failed|Cannot find package'\ '|exit 1|ERROR: |Error: |Error |ERROR '\ '|Failed |Failed: |Failed$|Failed\(\d+\):)' self.manifest = RpmManifest(d, manifest_dir) self.pm = RpmPM(d, - d.getVar('IMAGE_ROOTFS', True), - self.d.getVar('TARGET_VENDOR', True) + d.getVar('IMAGE_ROOTFS'), + self.d.getVar('TARGET_VENDOR') ) - self.inc_rpm_image_gen = self.d.getVar('INC_RPM_IMAGE_GEN', True) + self.inc_rpm_image_gen = self.d.getVar('INC_RPM_IMAGE_GEN') if self.inc_rpm_image_gen != "1": bb.utils.remove(self.image_rootfs, True) else: self.pm.recovery_packaging_data() - bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) + bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) self.pm.create_configs() @@ -420,10 +431,12 @@ class RpmRootfs(Rootfs): bb.note('incremental removed: %s' % ' '.join(pkg_to_remove)) self.pm.remove(pkg_to_remove) + self.pm.autoremove() + def _create(self): pkgs_to_install = self.manifest.parse_initial_manifest() - rpm_pre_process_cmds = self.d.getVar('RPM_PREPROCESS_COMMANDS', True) - rpm_post_process_cmds = self.d.getVar('RPM_POSTPROCESS_COMMANDS', True) + rpm_pre_process_cmds = self.d.getVar('RPM_PREPROCESS_COMMANDS') + rpm_post_process_cmds = self.d.getVar('RPM_POSTPROCESS_COMMANDS') # update PM index files self.pm.write_index() @@ -433,8 +446,6 @@ class RpmRootfs(Rootfs): if self.progress_reporter: self.progress_reporter.next_stage() - self.pm.dump_all_available_pkgs() - if self.inc_rpm_image_gen == "1": self._create_incremental(pkgs_to_install) @@ -469,15 +480,13 @@ class RpmRootfs(Rootfs): if self.progress_reporter: self.progress_reporter.next_stage() - self._setup_dbg_rootfs(['/etc/rpm', '/var/lib/rpm', '/var/lib/smart']) + self._setup_dbg_rootfs(['/etc', '/var/lib/rpm', '/var/cache/dnf', '/var/lib/dnf']) execute_pre_post_process(self.d, rpm_post_process_cmds) if self.inc_rpm_image_gen == "1": self.pm.backup_packaging_data() - self.pm.rpm_setup_smart_target_config() - if self.progress_reporter: self.progress_reporter.next_stage() @@ -515,19 +524,11 @@ class RpmRootfs(Rootfs): self.pm.save_rpmpostinst(pkg) def _cleanup(self): - # during the execution of postprocess commands, rpm is called several - # times to get the files installed, dependencies, etc. This creates the - # __db.00* (Berkeley DB files that hold locks, rpm specific environment - # settings, etc.), that should not get into the final rootfs - self.pm.unlock_rpm_db() - if os.path.isdir(self.pm.install_dir_path + "/tmp") and not os.listdir(self.pm.install_dir_path + "/tmp"): - bb.utils.remove(self.pm.install_dir_path + "/tmp", True) - if os.path.isdir(self.pm.install_dir_path) and not os.listdir(self.pm.install_dir_path): - bb.utils.remove(self.pm.install_dir_path, True) + pass class DpkgOpkgRootfs(Rootfs): - def __init__(self, d, progress_reporter=None): - super(DpkgOpkgRootfs, self).__init__(d, progress_reporter) + def __init__(self, d, progress_reporter=None, logcatcher=None): + super(DpkgOpkgRootfs, self).__init__(d, progress_reporter, logcatcher) def _get_pkgs_postinsts(self, status_file): def _get_pkg_depends_list(pkg_depends): @@ -594,7 +595,7 @@ class DpkgOpkgRootfs(Rootfs): pkg_list = [] pkgs = None - if not self.d.getVar('PACKAGE_INSTALL', True).strip(): + if not self.d.getVar('PACKAGE_INSTALL').strip(): bb.note("Building empty image") else: pkgs = self._get_pkgs_postinsts(status_file) @@ -621,8 +622,8 @@ class DpkgOpkgRootfs(Rootfs): num += 1 class DpkgRootfs(DpkgOpkgRootfs): - def __init__(self, d, manifest_dir, progress_reporter=None): - super(DpkgRootfs, self).__init__(d, progress_reporter) + def __init__(self, d, manifest_dir, progress_reporter=None, logcatcher=None): + super(DpkgRootfs, self).__init__(d, progress_reporter, logcatcher) self.log_check_regex = '^E:' self.log_check_expected_regexes = \ [ @@ -630,17 +631,17 @@ class DpkgRootfs(DpkgOpkgRootfs): ] bb.utils.remove(self.image_rootfs, True) - bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) + bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) self.manifest = DpkgManifest(d, manifest_dir) - self.pm = DpkgPM(d, d.getVar('IMAGE_ROOTFS', True), - d.getVar('PACKAGE_ARCHS', True), - d.getVar('DPKG_ARCH', True)) + self.pm = DpkgPM(d, d.getVar('IMAGE_ROOTFS'), + d.getVar('PACKAGE_ARCHS'), + d.getVar('DPKG_ARCH')) def _create(self): pkgs_to_install = self.manifest.parse_initial_manifest() - deb_pre_process_cmds = self.d.getVar('DEB_PREPROCESS_COMMANDS', True) - deb_post_process_cmds = self.d.getVar('DEB_POSTPROCESS_COMMANDS', True) + deb_pre_process_cmds = self.d.getVar('DEB_PREPROCESS_COMMANDS') + deb_post_process_cmds = self.d.getVar('DEB_POSTPROCESS_COMMANDS') alt_dir = self.d.expand("${IMAGE_ROOTFS}/var/lib/dpkg/alternatives") bb.utils.mkdirhier(alt_dir) @@ -713,15 +714,15 @@ class DpkgRootfs(DpkgOpkgRootfs): class OpkgRootfs(DpkgOpkgRootfs): - def __init__(self, d, manifest_dir, progress_reporter=None): - super(OpkgRootfs, self).__init__(d, progress_reporter) + def __init__(self, d, manifest_dir, progress_reporter=None, logcatcher=None): + super(OpkgRootfs, self).__init__(d, progress_reporter, logcatcher) self.log_check_regex = '(exit 1|Collected errors)' self.manifest = OpkgManifest(d, manifest_dir) - self.opkg_conf = self.d.getVar("IPKGCONF_TARGET", True) - self.pkg_archs = self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True) + self.opkg_conf = self.d.getVar("IPKGCONF_TARGET") + self.pkg_archs = self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS") - self.inc_opkg_image_gen = self.d.getVar('INC_IPK_IMAGE_GEN', True) or "" + self.inc_opkg_image_gen = self.d.getVar('INC_IPK_IMAGE_GEN') or "" if self._remove_old_rootfs(): bb.utils.remove(self.image_rootfs, True) self.pm = OpkgPM(d, @@ -735,7 +736,7 @@ class OpkgRootfs(DpkgOpkgRootfs): self.pkg_archs) self.pm.recover_packaging_data() - bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS', True), True) + bb.utils.remove(self.d.getVar('MULTILIB_TEMP_ROOTFS'), True) def _prelink_file(self, root_dir, filename): bb.note('prelink %s in %s' % (filename, root_dir)) @@ -790,7 +791,7 @@ class OpkgRootfs(DpkgOpkgRootfs): """ def _multilib_sanity_test(self, dirs): - allow_replace = self.d.getVar("MULTILIBRE_ALLOW_REP", True) + allow_replace = self.d.getVar("MULTILIBRE_ALLOW_REP") if allow_replace is None: allow_replace = "" @@ -822,12 +823,12 @@ class OpkgRootfs(DpkgOpkgRootfs): files[key] = item def _multilib_test_install(self, pkgs): - ml_temp = self.d.getVar("MULTILIB_TEMP_ROOTFS", True) + ml_temp = self.d.getVar("MULTILIB_TEMP_ROOTFS") bb.utils.mkdirhier(ml_temp) dirs = [self.image_rootfs] - for variant in self.d.getVar("MULTILIB_VARIANTS", True).split(): + for variant in self.d.getVar("MULTILIB_VARIANTS").split(): ml_target_rootfs = os.path.join(ml_temp, variant) bb.utils.remove(ml_target_rootfs, True) @@ -887,9 +888,9 @@ class OpkgRootfs(DpkgOpkgRootfs): old_vars_list = open(vars_list_file, 'r+').read() new_vars_list = '%s:%s:%s\n' % \ - ((self.d.getVar('BAD_RECOMMENDATIONS', True) or '').strip(), - (self.d.getVar('NO_RECOMMENDATIONS', True) or '').strip(), - (self.d.getVar('PACKAGE_EXCLUDE', True) or '').strip()) + ((self.d.getVar('BAD_RECOMMENDATIONS') or '').strip(), + (self.d.getVar('NO_RECOMMENDATIONS') or '').strip(), + (self.d.getVar('PACKAGE_EXCLUDE') or '').strip()) open(vars_list_file, 'w+').write(new_vars_list) if old_vars_list != new_vars_list: @@ -899,11 +900,11 @@ class OpkgRootfs(DpkgOpkgRootfs): def _create(self): pkgs_to_install = self.manifest.parse_initial_manifest() - opkg_pre_process_cmds = self.d.getVar('OPKG_PREPROCESS_COMMANDS', True) - opkg_post_process_cmds = self.d.getVar('OPKG_POSTPROCESS_COMMANDS', True) + opkg_pre_process_cmds = self.d.getVar('OPKG_PREPROCESS_COMMANDS') + opkg_post_process_cmds = self.d.getVar('OPKG_POSTPROCESS_COMMANDS') # update PM index files, unless users provide their own feeds - if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") != "1": + if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") != "1": self.pm.write_index() execute_pre_post_process(self.d, opkg_pre_process_cmds) @@ -945,9 +946,9 @@ class OpkgRootfs(DpkgOpkgRootfs): if self.progress_reporter: self.progress_reporter.next_stage() - opkg_lib_dir = self.d.getVar('OPKGLIBDIR', True) + opkg_lib_dir = self.d.getVar('OPKGLIBDIR') opkg_dir = os.path.join(opkg_lib_dir, 'opkg') - self._setup_dbg_rootfs(['/etc', opkg_dir, '/usr/lib/ssl']) + self._setup_dbg_rootfs([opkg_dir]) execute_pre_post_process(self.d, opkg_post_process_cmds) @@ -963,7 +964,7 @@ class OpkgRootfs(DpkgOpkgRootfs): def _get_delayed_postinsts(self): status_file = os.path.join(self.image_rootfs, - self.d.getVar('OPKGLIBDIR', True).strip('/'), + self.d.getVar('OPKGLIBDIR').strip('/'), "opkg", "status") return self._get_delayed_postinsts_common(status_file) @@ -988,20 +989,20 @@ def get_class_for_type(imgtype): "deb": DpkgRootfs}[imgtype] def variable_depends(d, manifest_dir=None): - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') cls = get_class_for_type(img_type) return cls._depends_list() -def create_rootfs(d, manifest_dir=None, progress_reporter=None): +def create_rootfs(d, manifest_dir=None, progress_reporter=None, logcatcher=None): env_bkp = os.environ.copy() - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') if img_type == "rpm": - RpmRootfs(d, manifest_dir, progress_reporter).create() + RpmRootfs(d, manifest_dir, progress_reporter, logcatcher).create() elif img_type == "ipk": - OpkgRootfs(d, manifest_dir, progress_reporter).create() + OpkgRootfs(d, manifest_dir, progress_reporter, logcatcher).create() elif img_type == "deb": - DpkgRootfs(d, manifest_dir, progress_reporter).create() + DpkgRootfs(d, manifest_dir, progress_reporter, logcatcher).create() os.environ.clear() os.environ.update(env_bkp) @@ -1009,13 +1010,13 @@ def create_rootfs(d, manifest_dir=None, progress_reporter=None): def image_list_installed_packages(d, rootfs_dir=None): if not rootfs_dir: - rootfs_dir = d.getVar('IMAGE_ROOTFS', True) + rootfs_dir = d.getVar('IMAGE_ROOTFS') - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') if img_type == "rpm": return RpmPkgsList(d, rootfs_dir).list_pkgs() elif img_type == "ipk": - return OpkgPkgsList(d, rootfs_dir, d.getVar("IPKGCONF_TARGET", True)).list_pkgs() + return OpkgPkgsList(d, rootfs_dir, d.getVar("IPKGCONF_TARGET")).list_pkgs() elif img_type == "deb": return DpkgPkgsList(d, rootfs_dir).list_pkgs() diff --git a/import-layers/yocto-poky/meta/lib/oe/sdk.py b/import-layers/yocto-poky/meta/lib/oe/sdk.py index c74525f92..9fe1687ac 100644 --- a/import-layers/yocto-poky/meta/lib/oe/sdk.py +++ b/import-layers/yocto-poky/meta/lib/oe/sdk.py @@ -11,16 +11,16 @@ import traceback class Sdk(object, metaclass=ABCMeta): def __init__(self, d, manifest_dir): self.d = d - self.sdk_output = self.d.getVar('SDK_OUTPUT', True) - self.sdk_native_path = self.d.getVar('SDKPATHNATIVE', True).strip('/') - self.target_path = self.d.getVar('SDKTARGETSYSROOT', True).strip('/') - self.sysconfdir = self.d.getVar('sysconfdir', True).strip('/') + self.sdk_output = self.d.getVar('SDK_OUTPUT') + self.sdk_native_path = self.d.getVar('SDKPATHNATIVE').strip('/') + self.target_path = self.d.getVar('SDKTARGETSYSROOT').strip('/') + self.sysconfdir = self.d.getVar('sysconfdir').strip('/') self.sdk_target_sysroot = os.path.join(self.sdk_output, self.target_path) self.sdk_host_sysroot = self.sdk_output if manifest_dir is None: - self.manifest_dir = self.d.getVar("SDK_DIR", True) + self.manifest_dir = self.d.getVar("SDK_DIR") else: self.manifest_dir = manifest_dir @@ -40,12 +40,12 @@ class Sdk(object, metaclass=ABCMeta): # Don't ship any libGL in the SDK self.remove(os.path.join(self.sdk_output, self.sdk_native_path, - self.d.getVar('libdir_nativesdk', True).strip('/'), + self.d.getVar('libdir_nativesdk').strip('/'), "libGL*")) # Fix or remove broken .la files self.remove(os.path.join(self.sdk_output, self.sdk_native_path, - self.d.getVar('libdir_nativesdk', True).strip('/'), + self.d.getVar('libdir_nativesdk').strip('/'), "*.la")) # Link the ld.so.cache file into the hosts filesystem @@ -54,7 +54,7 @@ class Sdk(object, metaclass=ABCMeta): self.mkdirhier(os.path.dirname(link_name)) os.symlink("/etc/ld.so.cache", link_name) - execute_pre_post_process(self.d, self.d.getVar('SDK_POSTPROCESS_COMMAND', True)) + execute_pre_post_process(self.d, self.d.getVar('SDK_POSTPROCESS_COMMAND')) def movefile(self, sourcefile, destdir): try: @@ -85,7 +85,7 @@ class Sdk(object, metaclass=ABCMeta): bb.warn("cannot remove SDK dir: %s" % path) class RpmSdk(Sdk): - def __init__(self, d, manifest_dir=None): + def __init__(self, d, manifest_dir=None, rpm_workdir="oe-sdk-repo"): super(RpmSdk, self).__init__(d, manifest_dir) self.target_manifest = RpmManifest(d, self.manifest_dir, @@ -100,11 +100,17 @@ class RpmSdk(Sdk): 'pkgconfig' ] + rpm_repo_workdir = "oe-sdk-repo" + if "sdk_ext" in d.getVar("BB_RUNTASK"): + rpm_repo_workdir = "oe-sdk-ext-repo" + + self.target_pm = RpmPM(d, self.sdk_target_sysroot, - self.d.getVar('TARGET_VENDOR', True), + self.d.getVar('TARGET_VENDOR'), 'target', - target_providename + target_providename, + rpm_repo_workdir=rpm_repo_workdir ) sdk_providename = ['/bin/sh', @@ -118,11 +124,12 @@ class RpmSdk(Sdk): self.host_pm = RpmPM(d, self.sdk_host_sysroot, - self.d.getVar('SDK_VENDOR', True), + self.d.getVar('SDK_VENDOR'), 'host', sdk_providename, "SDK_PACKAGE_ARCHS", - "SDK_OS" + "SDK_OS", + rpm_repo_workdir=rpm_repo_workdir ) def _populate_sysroot(self, pm, manifest): @@ -130,7 +137,6 @@ class RpmSdk(Sdk): pm.create_configs() pm.write_index() - pm.dump_all_available_pkgs() pm.update() pkgs = [] @@ -149,9 +155,9 @@ class RpmSdk(Sdk): bb.note("Installing TARGET packages") self._populate_sysroot(self.target_pm, self.target_manifest) - self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) + self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): self.target_pm.remove_packaging_data() @@ -159,7 +165,7 @@ class RpmSdk(Sdk): bb.note("Installing NATIVESDK packages") self._populate_sysroot(self.host_pm, self.host_manifest) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): self.host_pm.remove_packaging_data() @@ -167,7 +173,7 @@ class RpmSdk(Sdk): # Move host RPM library data native_rpm_state_dir = os.path.join(self.sdk_output, self.sdk_native_path, - self.d.getVar('localstatedir_nativesdk', True).strip('/'), + self.d.getVar('localstatedir_nativesdk').strip('/'), "lib", "rpm" ) @@ -188,7 +194,9 @@ class RpmSdk(Sdk): True).strip('/'), ) self.mkdirhier(native_sysconf_dir) - for f in glob.glob(os.path.join(self.sdk_output, "etc", "*")): + for f in glob.glob(os.path.join(self.sdk_output, "etc", "rpm*")): + self.movefile(f, native_sysconf_dir) + for f in glob.glob(os.path.join(self.sdk_output, "etc", "dnf", "*")): self.movefile(f, native_sysconf_dir) self.remove(os.path.join(self.sdk_output, "etc"), True) @@ -197,8 +205,8 @@ class OpkgSdk(Sdk): def __init__(self, d, manifest_dir=None): super(OpkgSdk, self).__init__(d, manifest_dir) - self.target_conf = self.d.getVar("IPKGCONF_TARGET", True) - self.host_conf = self.d.getVar("IPKGCONF_SDK", True) + self.target_conf = self.d.getVar("IPKGCONF_TARGET") + self.host_conf = self.d.getVar("IPKGCONF_SDK") self.target_manifest = OpkgManifest(d, self.manifest_dir, Manifest.MANIFEST_TYPE_SDK_TARGET) @@ -206,15 +214,15 @@ class OpkgSdk(Sdk): Manifest.MANIFEST_TYPE_SDK_HOST) self.target_pm = OpkgPM(d, self.sdk_target_sysroot, self.target_conf, - self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True)) + self.d.getVar("ALL_MULTILIB_PACKAGE_ARCHS")) self.host_pm = OpkgPM(d, self.sdk_host_sysroot, self.host_conf, - self.d.getVar("SDK_PACKAGE_ARCHS", True)) + self.d.getVar("SDK_PACKAGE_ARCHS")) def _populate_sysroot(self, pm, manifest): pkgs_to_install = manifest.parse_initial_manifest() - if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS', True) or "") != "1": + if (self.d.getVar('BUILD_IMAGES_FROM_FEEDS') or "") != "1": pm.write_index() pm.update() @@ -228,9 +236,9 @@ class OpkgSdk(Sdk): bb.note("Installing TARGET packages") self._populate_sysroot(self.target_pm, self.target_manifest) - self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) + self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): self.target_pm.remove_packaging_data() @@ -238,7 +246,7 @@ class OpkgSdk(Sdk): bb.note("Installing NATIVESDK packages") self._populate_sysroot(self.host_pm, self.host_manifest) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) if not bb.utils.contains("SDKIMAGE_FEATURES", "package-management", True, False, self.d): self.host_pm.remove_packaging_data() @@ -257,7 +265,7 @@ class OpkgSdk(Sdk): os.path.basename(self.host_conf)), 0o644) native_opkg_state_dir = os.path.join(self.sdk_output, self.sdk_native_path, - self.d.getVar('localstatedir_nativesdk', True).strip('/'), + self.d.getVar('localstatedir_nativesdk').strip('/'), "lib", "opkg") self.mkdirhier(native_opkg_state_dir) for f in glob.glob(os.path.join(self.sdk_output, "var", "lib", "opkg", "*")): @@ -270,8 +278,8 @@ class DpkgSdk(Sdk): def __init__(self, d, manifest_dir=None): super(DpkgSdk, self).__init__(d, manifest_dir) - self.target_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET", True), "apt") - self.host_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET", True), "apt-sdk") + self.target_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET"), "apt") + self.host_conf_dir = os.path.join(self.d.getVar("APTCONF_TARGET"), "apt-sdk") self.target_manifest = DpkgManifest(d, self.manifest_dir, Manifest.MANIFEST_TYPE_SDK_TARGET) @@ -279,17 +287,17 @@ class DpkgSdk(Sdk): Manifest.MANIFEST_TYPE_SDK_HOST) self.target_pm = DpkgPM(d, self.sdk_target_sysroot, - self.d.getVar("PACKAGE_ARCHS", True), - self.d.getVar("DPKG_ARCH", True), + self.d.getVar("PACKAGE_ARCHS"), + self.d.getVar("DPKG_ARCH"), self.target_conf_dir) self.host_pm = DpkgPM(d, self.sdk_host_sysroot, - self.d.getVar("SDK_PACKAGE_ARCHS", True), - self.d.getVar("DEB_SDK_ARCH", True), + self.d.getVar("SDK_PACKAGE_ARCHS"), + self.d.getVar("DEB_SDK_ARCH"), self.host_conf_dir) def _copy_apt_dir_to(self, dst_dir): - staging_etcdir_native = self.d.getVar("STAGING_ETCDIR_NATIVE", True) + staging_etcdir_native = self.d.getVar("STAGING_ETCDIR_NATIVE") self.remove(dst_dir, True) @@ -310,9 +318,9 @@ class DpkgSdk(Sdk): bb.note("Installing TARGET packages") self._populate_sysroot(self.target_pm, self.target_manifest) - self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY', True)) + self.target_pm.install_complementary(self.d.getVar('SDKIMAGE_INSTALL_COMPLEMENTARY')) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_TARGET_COMMAND")) self._copy_apt_dir_to(os.path.join(self.sdk_target_sysroot, "etc", "apt")) @@ -322,7 +330,7 @@ class DpkgSdk(Sdk): bb.note("Installing NATIVESDK packages") self._populate_sysroot(self.host_pm, self.host_manifest) - execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND", True)) + execute_pre_post_process(self.d, self.d.getVar("POPULATE_SDK_POST_HOST_COMMAND")) self._copy_apt_dir_to(os.path.join(self.sdk_output, self.sdk_native_path, "etc", "apt")) @@ -341,26 +349,26 @@ class DpkgSdk(Sdk): def sdk_list_installed_packages(d, target, rootfs_dir=None): if rootfs_dir is None: - sdk_output = d.getVar('SDK_OUTPUT', True) - target_path = d.getVar('SDKTARGETSYSROOT', True).strip('/') + sdk_output = d.getVar('SDK_OUTPUT') + target_path = d.getVar('SDKTARGETSYSROOT').strip('/') rootfs_dir = [sdk_output, os.path.join(sdk_output, target_path)][target is True] - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') if img_type == "rpm": arch_var = ["SDK_PACKAGE_ARCHS", None][target is True] os_var = ["SDK_OS", None][target is True] - return RpmPkgsList(d, rootfs_dir, arch_var, os_var).list_pkgs() + return RpmPkgsList(d, rootfs_dir).list_pkgs() elif img_type == "ipk": conf_file_var = ["IPKGCONF_SDK", "IPKGCONF_TARGET"][target is True] - return OpkgPkgsList(d, rootfs_dir, d.getVar(conf_file_var, True)).list_pkgs() + return OpkgPkgsList(d, rootfs_dir, d.getVar(conf_file_var)).list_pkgs() elif img_type == "deb": return DpkgPkgsList(d, rootfs_dir).list_pkgs() def populate_sdk(d, manifest_dir=None): env_bkp = os.environ.copy() - img_type = d.getVar('IMAGE_PKGTYPE', True) + img_type = d.getVar('IMAGE_PKGTYPE') if img_type == "rpm": RpmSdk(d, manifest_dir).populate() elif img_type == "ipk": diff --git a/import-layers/yocto-poky/meta/lib/oe/sstatesig.py b/import-layers/yocto-poky/meta/lib/oe/sstatesig.py index 8224e3a12..b8dd4c869 100644 --- a/import-layers/yocto-poky/meta/lib/oe/sstatesig.py +++ b/import-layers/yocto-poky/meta/lib/oe/sstatesig.py @@ -20,8 +20,12 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache): def isImage(fn): return "/image.bbclass" in " ".join(dataCache.inherits[fn]) - # Always include our own inter-task dependencies + # (Almost) always include our own inter-task dependencies. + # The exception is the special do_kernel_configme->do_unpack_and_patch + # dependency from archiver.bbclass. if recipename == depname: + if task == "do_kernel_configme" and dep.endswith(".do_unpack_and_patch"): + return False return True # Quilt (patch application) changing isn't likely to affect anything @@ -63,10 +67,10 @@ def sstate_rundepfilter(siggen, fn, recipename, task, dep, depname, dataCache): def sstate_lockedsigs(d): sigs = {} - types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES", True) or "").split() + types = (d.getVar("SIGGEN_LOCKEDSIGS_TYPES") or "").split() for t in types: siggen_lockedsigs_var = "SIGGEN_LOCKEDSIGS_%s" % t - lockedsigs = (d.getVar(siggen_lockedsigs_var, True) or "").split() + lockedsigs = (d.getVar(siggen_lockedsigs_var) or "").split() for ls in lockedsigs: pn, task, h = ls.split(":", 2) if pn not in sigs: @@ -77,8 +81,8 @@ def sstate_lockedsigs(d): class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic): name = "OEBasic" def init_rundepcheck(self, data): - self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split() - self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split() + self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() + self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() pass def rundep_check(self, fn, recipename, task, dep, depname, dataCache = None): return sstate_rundepfilter(self, fn, recipename, task, dep, depname, dataCache) @@ -86,15 +90,15 @@ class SignatureGeneratorOEBasic(bb.siggen.SignatureGeneratorBasic): class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): name = "OEBasicHash" def init_rundepcheck(self, data): - self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE", True) or "").split() - self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS", True) or "").split() + self.abisaferecipes = (data.getVar("SIGGEN_EXCLUDERECIPES_ABISAFE") or "").split() + self.saferecipedeps = (data.getVar("SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS") or "").split() self.lockedsigs = sstate_lockedsigs(data) self.lockedhashes = {} self.lockedpnmap = {} self.lockedhashfn = {} - self.machine = data.getVar("MACHINE", True) + self.machine = data.getVar("MACHINE") self.mismatch_msgs = [] - self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES", True) or + self.unlockedrecipes = (data.getVar("SIGGEN_UNLOCKED_RECIPES") or "").split() self.unlockedrecipes = { k: "" for k in self.unlockedrecipes } pass @@ -197,7 +201,8 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): types[t].append(k) with open(sigfile, "w") as f: - for t in types: + l = sorted(types) + for t in l: f.write('SIGGEN_LOCKEDSIGS_%s = "\\\n' % t) types[t].sort() sortedk = sorted(types[t], key=lambda k: self.lockedpnmap[k.rsplit(".",1)[0]]) @@ -208,7 +213,17 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): continue f.write(" " + self.lockedpnmap[fn] + ":" + task + ":" + self.taskhash[k] + " \\\n") f.write(' "\n') - f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(list(types.keys())))) + f.write('SIGGEN_LOCKEDSIGS_TYPES_%s = "%s"' % (self.machine, " ".join(l))) + + def dump_siglist(self, sigfile): + with open(sigfile, "w") as f: + tasks = [] + for taskitem in self.taskhash: + (fn, task) = taskitem.rsplit(".", 1) + pn = self.lockedpnmap[fn] + tasks.append((pn, task, fn, self.taskhash[taskitem])) + for (pn, task, fn, taskhash) in sorted(tasks): + f.write('%s.%s %s %s\n' % (pn, task, fn, taskhash)) def checkhashes(self, missed, ret, sq_fn, sq_task, sq_hash, sq_hashfn, d): warn_msgs = [] @@ -224,13 +239,13 @@ class SignatureGeneratorOEBasicHash(bb.siggen.SignatureGeneratorBasicHash): sstate_missing_msgs.append("Locked sig is set for %s:%s (%s) yet not in sstate cache?" % (pn, sq_task[task], sq_hash[task])) - checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK", True) + checklevel = d.getVar("SIGGEN_LOCKEDSIGS_TASKSIG_CHECK") if checklevel == 'warn': warn_msgs += self.mismatch_msgs elif checklevel == 'error': error_msgs += self.mismatch_msgs - checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK", True) + checklevel = d.getVar("SIGGEN_LOCKEDSIGS_SSTATE_EXISTS_CHECK") if checklevel == 'warn': warn_msgs += sstate_missing_msgs elif checklevel == 'error': @@ -253,9 +268,6 @@ def find_siginfo(pn, taskname, taskhashlist, d): import fnmatch import glob - if taskhashlist: - hashfiles = {} - if not taskname: # We have to derive pn and taskname key = pn @@ -265,8 +277,15 @@ def find_siginfo(pn, taskname, taskhashlist, d): if key.startswith('virtual:native:'): pn = pn + '-native' + hashfiles = {} filedates = {} + def get_hashval(siginfo): + if siginfo.endswith('.siginfo'): + return siginfo.rpartition(':')[2].partition('_')[0] + else: + return siginfo.rpartition('.')[2] + # First search in stamps dir localdata = d.createCopy() localdata.setVar('MULTIMACH_TARGET_SYS', '*') @@ -274,7 +293,7 @@ def find_siginfo(pn, taskname, taskhashlist, d): localdata.setVar('PV', '*') localdata.setVar('PR', '*') localdata.setVar('EXTENDPE', '') - stamp = localdata.getVar('STAMP', True) + stamp = localdata.getVar('STAMP') if pn.startswith("gcc-source"): # gcc-source shared workdir is a special case :( stamp = localdata.expand("${STAMPS_DIR}/work-shared/gcc-${PV}-${PR}") @@ -296,6 +315,8 @@ def find_siginfo(pn, taskname, taskhashlist, d): filedates[fullpath] = os.stat(fullpath).st_mtime except OSError: continue + hashval = get_hashval(fullpath) + hashfiles[hashval] = fullpath if not taskhashlist or (len(filedates) < 2 and not foundall): # That didn't work, look in sstate-cache @@ -309,30 +330,25 @@ def find_siginfo(pn, taskname, taskhashlist, d): localdata.setVar('PV', '*') localdata.setVar('PR', '*') localdata.setVar('BB_TASKHASH', hashval) - swspec = localdata.getVar('SSTATE_SWSPEC', True) + swspec = localdata.getVar('SSTATE_SWSPEC') if taskname in ['do_fetch', 'do_unpack', 'do_patch', 'do_populate_lic', 'do_preconfigure'] and swspec: localdata.setVar('SSTATE_PKGSPEC', '${SSTATE_SWSPEC}') elif pn.endswith('-native') or "-cross-" in pn or "-crosssdk-" in pn: localdata.setVar('SSTATE_EXTRAPATH', "${NATIVELSBSTRING}/") sstatename = taskname[3:] - filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG', True), sstatename) + filespec = '%s_%s.*.siginfo' % (localdata.getVar('SSTATE_PKG'), sstatename) - if hashval != '*': - sstatedir = "%s/%s" % (d.getVar('SSTATE_DIR', True), hashval[:2]) - else: - sstatedir = d.getVar('SSTATE_DIR', True) - - for root, dirs, files in os.walk(sstatedir): - for fn in files: - fullpath = os.path.join(root, fn) - if fnmatch.fnmatch(fullpath, filespec): - if taskhashlist: - hashfiles[hashval] = fullpath - else: - try: - filedates[fullpath] = os.stat(fullpath).st_mtime - except: - continue + matchedfiles = glob.glob(filespec) + for fullpath in matchedfiles: + actual_hashval = get_hashval(fullpath) + if actual_hashval in hashfiles: + continue + hashfiles[hashval] = fullpath + if not taskhashlist: + try: + filedates[fullpath] = os.stat(fullpath).st_mtime + except: + continue if taskhashlist: return hashfiles @@ -348,7 +364,7 @@ def sstate_get_manifest_filename(task, d): Also returns the datastore that can be used to query related variables. """ d2 = d.createCopy() - extrainf = d.getVarFlag("do_" + task, 'stamp-extra-info', True) + extrainf = d.getVarFlag("do_" + task, 'stamp-extra-info') if extrainf: d2.setVar("SSTATE_MANMACH", extrainf) return (d2.expand("${SSTATE_MANFILEPREFIX}.%s" % task), d2) diff --git a/import-layers/yocto-poky/meta/lib/oe/terminal.py b/import-layers/yocto-poky/meta/lib/oe/terminal.py index 3c8ef59a4..2f18ec028 100644 --- a/import-layers/yocto-poky/meta/lib/oe/terminal.py +++ b/import-layers/yocto-poky/meta/lib/oe/terminal.py @@ -11,7 +11,8 @@ class UnsupportedTerminal(Exception): pass class NoSupportedTerminals(Exception): - pass + def __init__(self, terms): + self.terms = terms class Registry(oe.classutils.ClassRegistry): @@ -61,31 +62,10 @@ class Gnome(XTerminal): # Once fixed on the gnome-terminal project, this should be removed. if os.getenv('LC_ALL'): os.putenv('LC_ALL','') - # We need to know when the command completes but gnome-terminal gives us no way - # to do this. We therefore write the pid to a file using a "phonehome" wrapper - # script, then monitor the pid until it exits. Thanks gnome! - import tempfile - pidfile = tempfile.NamedTemporaryFile(delete = False).name - try: - sh_cmd = "oe-gnome-terminal-phonehome " + pidfile + " " + sh_cmd - XTerminal.__init__(self, sh_cmd, title, env, d) - while os.stat(pidfile).st_size <= 0: - continue - with open(pidfile, "r") as f: - pid = int(f.readline()) - finally: - os.unlink(pidfile) - - import time - while True: - try: - os.kill(pid, 0) - time.sleep(0.1) - except OSError: - return + XTerminal.__init__(self, sh_cmd, title, env, d) class Mate(XTerminal): - command = 'mate-terminal -t "{title}" -x {command}' + command = 'mate-terminal --disable-factory -t "{title}" -x {command}' priority = 2 class Xfce(XTerminal): @@ -97,7 +77,7 @@ class Terminology(XTerminal): priority = 2 class Konsole(XTerminal): - command = 'konsole --nofork --workdir . -p tabtitle="{title}" -e {command}' + command = 'konsole --separate --workdir . -p tabtitle="{title}" -e {command}' priority = 2 def __init__(self, sh_cmd, title=None, env=None, d=None): @@ -106,6 +86,9 @@ class Konsole(XTerminal): if vernum and LooseVersion(vernum) < '2.0.0': # Konsole from KDE 3.x self.command = 'konsole -T "{title}" -e {command}' + elif vernum and LooseVersion(vernum) < '16.08.1': + # Konsole pre 16.08.01 Has nofork + self.command = 'konsole --nofork --workdir . -p tabtitle="{title}" -e {command}' XTerminal.__init__(self, sh_cmd, title, env, d) class XTerm(XTerminal): @@ -192,7 +175,7 @@ class Custom(Terminal): priority = 3 def __init__(self, sh_cmd, title=None, env=None, d=None): - self.command = d and d.getVar('OE_TERMINAL_CUSTOMCMD', True) + self.command = d and d.getVar('OE_TERMINAL_CUSTOMCMD') if self.command: if not '{command}' in self.command: self.command += ' {command}' @@ -206,6 +189,14 @@ class Custom(Terminal): def prioritized(): return Registry.prioritized() +def get_cmd_list(): + terms = Registry.prioritized() + cmds = [] + for term in terms: + if term.command: + cmds.append(term.command) + return cmds + def spawn_preferred(sh_cmd, title=None, env=None, d=None): """Spawn the first supported terminal, by priority""" for terminal in prioritized(): @@ -215,7 +206,7 @@ def spawn_preferred(sh_cmd, title=None, env=None, d=None): except UnsupportedTerminal: continue else: - raise NoSupportedTerminals() + raise NoSupportedTerminals(get_cmd_list()) def spawn(name, sh_cmd, title=None, env=None, d=None): """Spawn the specified terminal, by name""" @@ -225,12 +216,36 @@ def spawn(name, sh_cmd, title=None, env=None, d=None): except KeyError: raise UnsupportedTerminal(name) - pipe = terminal(sh_cmd, title, env, d) - output = pipe.communicate()[0] - if output: - output = output.decode("utf-8") - if pipe.returncode != 0: - raise ExecutionError(sh_cmd, pipe.returncode, output) + # We need to know when the command completes but some terminals (at least + # gnome and tmux) gives us no way to do this. We therefore write the pid + # to a file using a "phonehome" wrapper script, then monitor the pid + # until it exits. + import tempfile + import time + pidfile = tempfile.NamedTemporaryFile(delete = False).name + try: + sh_cmd = bb.utils.which(os.getenv('PATH'), "oe-gnome-terminal-phonehome") + " " + pidfile + " " + sh_cmd + pipe = terminal(sh_cmd, title, env, d) + output = pipe.communicate()[0] + if output: + output = output.decode("utf-8") + if pipe.returncode != 0: + raise ExecutionError(sh_cmd, pipe.returncode, output) + + while os.stat(pidfile).st_size <= 0: + time.sleep(0.01) + continue + with open(pidfile, "r") as f: + pid = int(f.readline()) + finally: + os.unlink(pidfile) + + while True: + try: + os.kill(pid, 0) + time.sleep(0.1) + except OSError: + return def check_tmux_pane_size(tmux): import subprocess as sub diff --git a/import-layers/yocto-poky/meta/lib/oe/tests/__init__.py b/import-layers/yocto-poky/meta/lib/oe/tests/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/import-layers/yocto-poky/meta/lib/oe/tests/test_elf.py b/import-layers/yocto-poky/meta/lib/oe/tests/test_elf.py deleted file mode 100644 index 1f59037ed..000000000 --- a/import-layers/yocto-poky/meta/lib/oe/tests/test_elf.py +++ /dev/null @@ -1,21 +0,0 @@ -import unittest -import oe.qa - -class TestElf(unittest.TestCase): - def test_machine_name(self): - """ - Test elf_machine_to_string() - """ - self.assertEqual(oe.qa.elf_machine_to_string(0x02), "SPARC") - self.assertEqual(oe.qa.elf_machine_to_string(0x03), "x86") - self.assertEqual(oe.qa.elf_machine_to_string(0x08), "MIPS") - self.assertEqual(oe.qa.elf_machine_to_string(0x14), "PowerPC") - self.assertEqual(oe.qa.elf_machine_to_string(0x28), "ARM") - self.assertEqual(oe.qa.elf_machine_to_string(0x2A), "SuperH") - self.assertEqual(oe.qa.elf_machine_to_string(0x32), "IA-64") - self.assertEqual(oe.qa.elf_machine_to_string(0x3E), "x86-64") - self.assertEqual(oe.qa.elf_machine_to_string(0xB7), "AArch64") - - self.assertEqual(oe.qa.elf_machine_to_string(0x00), "Unknown (0)") - self.assertEqual(oe.qa.elf_machine_to_string(0xDEADBEEF), "Unknown (3735928559)") - self.assertEqual(oe.qa.elf_machine_to_string("foobar"), "Unknown ('foobar')") diff --git a/import-layers/yocto-poky/meta/lib/oe/tests/test_license.py b/import-layers/yocto-poky/meta/lib/oe/tests/test_license.py deleted file mode 100644 index c38888618..000000000 --- a/import-layers/yocto-poky/meta/lib/oe/tests/test_license.py +++ /dev/null @@ -1,68 +0,0 @@ -import unittest -import oe.license - -class SeenVisitor(oe.license.LicenseVisitor): - def __init__(self): - self.seen = [] - oe.license.LicenseVisitor.__init__(self) - - def visit_Str(self, node): - self.seen.append(node.s) - -class TestSingleLicense(unittest.TestCase): - licenses = [ - "GPLv2", - "LGPL-2.0", - "Artistic", - "MIT", - "GPLv3+", - "FOO_BAR", - ] - invalid_licenses = ["GPL/BSD"] - - @staticmethod - def parse(licensestr): - visitor = SeenVisitor() - visitor.visit_string(licensestr) - return visitor.seen - - def test_single_licenses(self): - for license in self.licenses: - licenses = self.parse(license) - self.assertListEqual(licenses, [license]) - - def test_invalid_licenses(self): - for license in self.invalid_licenses: - with self.assertRaises(oe.license.InvalidLicense) as cm: - self.parse(license) - self.assertEqual(cm.exception.license, license) - -class TestSimpleCombinations(unittest.TestCase): - tests = { - "FOO&BAR": ["FOO", "BAR"], - "BAZ & MOO": ["BAZ", "MOO"], - "ALPHA|BETA": ["ALPHA"], - "BAZ&MOO|FOO": ["FOO"], - "FOO&BAR|BAZ": ["FOO", "BAR"], - } - preferred = ["ALPHA", "FOO", "BAR"] - - def test_tests(self): - def choose(a, b): - if all(lic in self.preferred for lic in b): - return b - else: - return a - - for license, expected in self.tests.items(): - licenses = oe.license.flattened_licenses(license, choose) - self.assertListEqual(licenses, expected) - -class TestComplexCombinations(TestSimpleCombinations): - tests = { - "FOO & (BAR | BAZ)&MOO": ["FOO", "BAR", "MOO"], - "(ALPHA|(BETA&THETA)|OMEGA)&DELTA": ["OMEGA", "DELTA"], - "((ALPHA|BETA)&FOO)|BAZ": ["BETA", "FOO"], - "(GPL-2.0|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0", "BSD-4-clause", "MIT"], - } - preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0"] diff --git a/import-layers/yocto-poky/meta/lib/oe/tests/test_path.py b/import-layers/yocto-poky/meta/lib/oe/tests/test_path.py deleted file mode 100644 index 44d068143..000000000 --- a/import-layers/yocto-poky/meta/lib/oe/tests/test_path.py +++ /dev/null @@ -1,89 +0,0 @@ -import unittest -import oe, oe.path -import tempfile -import os -import errno -import shutil - -class TestRealPath(unittest.TestCase): - DIRS = [ "a", "b", "etc", "sbin", "usr", "usr/bin", "usr/binX", "usr/sbin", "usr/include", "usr/include/gdbm" ] - FILES = [ "etc/passwd", "b/file" ] - LINKS = [ - ( "bin", "/usr/bin", "/usr/bin" ), - ( "binX", "usr/binX", "/usr/binX" ), - ( "c", "broken", "/broken" ), - ( "etc/passwd-1", "passwd", "/etc/passwd" ), - ( "etc/passwd-2", "passwd-1", "/etc/passwd" ), - ( "etc/passwd-3", "/etc/passwd-1", "/etc/passwd" ), - ( "etc/shadow-1", "/etc/shadow", "/etc/shadow" ), - ( "etc/shadow-2", "/etc/shadow-1", "/etc/shadow" ), - ( "prog-A", "bin/prog-A", "/usr/bin/prog-A" ), - ( "prog-B", "/bin/prog-B", "/usr/bin/prog-B" ), - ( "usr/bin/prog-C", "../../sbin/prog-C", "/sbin/prog-C" ), - ( "usr/bin/prog-D", "/sbin/prog-D", "/sbin/prog-D" ), - ( "usr/binX/prog-E", "../sbin/prog-E", None ), - ( "usr/bin/prog-F", "../../../sbin/prog-F", "/sbin/prog-F" ), - ( "loop", "a/loop", None ), - ( "a/loop", "../loop", None ), - ( "b/test", "file/foo", "/b/file/foo" ), - ] - - LINKS_PHYS = [ - ( "./", "/", "" ), - ( "binX/prog-E", "/usr/sbin/prog-E", "/sbin/prog-E" ), - ] - - EXCEPTIONS = [ - ( "loop", errno.ELOOP ), - ( "b/test", errno.ENOENT ), - ] - - def __del__(self): - try: - #os.system("tree -F %s" % self.tmpdir) - shutil.rmtree(self.tmpdir) - except: - pass - - def setUp(self): - self.tmpdir = tempfile.mkdtemp(prefix = "oe-test_path") - self.root = os.path.join(self.tmpdir, "R") - - os.mkdir(os.path.join(self.tmpdir, "_real")) - os.symlink("_real", self.root) - - for d in self.DIRS: - os.mkdir(os.path.join(self.root, d)) - for f in self.FILES: - open(os.path.join(self.root, f), "w") - for l in self.LINKS: - os.symlink(l[1], os.path.join(self.root, l[0])) - - def __realpath(self, file, use_physdir, assume_dir = True): - return oe.path.realpath(os.path.join(self.root, file), self.root, - use_physdir, assume_dir = assume_dir) - - def test_norm(self): - for l in self.LINKS: - if l[2] == None: - continue - - target_p = self.__realpath(l[0], True) - target_l = self.__realpath(l[0], False) - - if l[2] != False: - self.assertEqual(target_p, target_l) - self.assertEqual(l[2], target_p[len(self.root):]) - - def test_phys(self): - for l in self.LINKS_PHYS: - target_p = self.__realpath(l[0], True) - target_l = self.__realpath(l[0], False) - - self.assertEqual(l[1], target_p[len(self.root):]) - self.assertEqual(l[2], target_l[len(self.root):]) - - def test_loop(self): - for e in self.EXCEPTIONS: - self.assertRaisesRegex(OSError, r'\[Errno %u\]' % e[1], - self.__realpath, e[0], False, False) diff --git a/import-layers/yocto-poky/meta/lib/oe/tests/test_types.py b/import-layers/yocto-poky/meta/lib/oe/tests/test_types.py deleted file mode 100644 index 367cc30e4..000000000 --- a/import-layers/yocto-poky/meta/lib/oe/tests/test_types.py +++ /dev/null @@ -1,62 +0,0 @@ -import unittest -from oe.maketype import create, factory - -class TestTypes(unittest.TestCase): - def assertIsInstance(self, obj, cls): - return self.assertTrue(isinstance(obj, cls)) - - def assertIsNot(self, obj, other): - return self.assertFalse(obj is other) - - def assertFactoryCreated(self, value, type, **flags): - cls = factory(type) - self.assertIsNot(cls, None) - self.assertIsInstance(create(value, type, **flags), cls) - -class TestBooleanType(TestTypes): - def test_invalid(self): - self.assertRaises(ValueError, create, '', 'boolean') - self.assertRaises(ValueError, create, 'foo', 'boolean') - self.assertRaises(TypeError, create, object(), 'boolean') - - def test_true(self): - self.assertTrue(create('y', 'boolean')) - self.assertTrue(create('yes', 'boolean')) - self.assertTrue(create('1', 'boolean')) - self.assertTrue(create('t', 'boolean')) - self.assertTrue(create('true', 'boolean')) - self.assertTrue(create('TRUE', 'boolean')) - self.assertTrue(create('truE', 'boolean')) - - def test_false(self): - self.assertFalse(create('n', 'boolean')) - self.assertFalse(create('no', 'boolean')) - self.assertFalse(create('0', 'boolean')) - self.assertFalse(create('f', 'boolean')) - self.assertFalse(create('false', 'boolean')) - self.assertFalse(create('FALSE', 'boolean')) - self.assertFalse(create('faLse', 'boolean')) - - def test_bool_equality(self): - self.assertEqual(create('n', 'boolean'), False) - self.assertNotEqual(create('n', 'boolean'), True) - self.assertEqual(create('y', 'boolean'), True) - self.assertNotEqual(create('y', 'boolean'), False) - -class TestList(TestTypes): - def assertListEqual(self, value, valid, sep=None): - obj = create(value, 'list', separator=sep) - self.assertEqual(obj, valid) - if sep is not None: - self.assertEqual(obj.separator, sep) - self.assertEqual(str(obj), obj.separator.join(obj)) - - def test_list_nosep(self): - testlist = ['alpha', 'beta', 'theta'] - self.assertListEqual('alpha beta theta', testlist) - self.assertListEqual('alpha beta\ttheta', testlist) - self.assertListEqual('alpha', ['alpha']) - - def test_list_usersep(self): - self.assertListEqual('foo:bar', ['foo', 'bar'], ':') - self.assertListEqual('foo:bar:baz', ['foo', 'bar', 'baz'], ':') diff --git a/import-layers/yocto-poky/meta/lib/oe/tests/test_utils.py b/import-layers/yocto-poky/meta/lib/oe/tests/test_utils.py deleted file mode 100644 index 5d9ac52e7..000000000 --- a/import-layers/yocto-poky/meta/lib/oe/tests/test_utils.py +++ /dev/null @@ -1,51 +0,0 @@ -import unittest -from oe.utils import packages_filter_out_system - -class TestPackagesFilterOutSystem(unittest.TestCase): - def test_filter(self): - """ - Test that oe.utils.packages_filter_out_system works. - """ - try: - import bb - except ImportError: - self.skipTest("Cannot import bb") - - d = bb.data_smart.DataSmart() - d.setVar("PN", "foo") - - d.setVar("PACKAGES", "foo foo-doc foo-dev") - pkgs = packages_filter_out_system(d) - self.assertEqual(pkgs, []) - - d.setVar("PACKAGES", "foo foo-doc foo-data foo-dev") - pkgs = packages_filter_out_system(d) - self.assertEqual(pkgs, ["foo-data"]) - - d.setVar("PACKAGES", "foo foo-locale-en-gb") - pkgs = packages_filter_out_system(d) - self.assertEqual(pkgs, []) - - d.setVar("PACKAGES", "foo foo-data foo-locale-en-gb") - pkgs = packages_filter_out_system(d) - self.assertEqual(pkgs, ["foo-data"]) - - -class TestTrimVersion(unittest.TestCase): - def test_version_exception(self): - with self.assertRaises(TypeError): - trim_version(None, 2) - with self.assertRaises(TypeError): - trim_version((1, 2, 3), 2) - - def test_num_exception(self): - with self.assertRaises(ValueError): - trim_version("1.2.3", 0) - with self.assertRaises(ValueError): - trim_version("1.2.3", -1) - - def test_valid(self): - self.assertEqual(trim_version("1.2.3", 1), "1") - self.assertEqual(trim_version("1.2.3", 2), "1.2") - self.assertEqual(trim_version("1.2.3", 3), "1.2.3") - self.assertEqual(trim_version("1.2.3", 4), "1.2.3") diff --git a/import-layers/yocto-poky/meta/lib/oe/utils.py b/import-layers/yocto-poky/meta/lib/oe/utils.py index 36cf74f29..330a5ff94 100644 --- a/import-layers/yocto-poky/meta/lib/oe/utils.py +++ b/import-layers/yocto-poky/meta/lib/oe/utils.py @@ -1,9 +1,4 @@ -try: - # Python 2 - import commands as cmdstatus -except ImportError: - # Python 3 - import subprocess as cmdstatus +import subprocess def read_file(filename): try: @@ -23,27 +18,27 @@ def ifelse(condition, iftrue = True, iffalse = False): return iffalse def conditional(variable, checkvalue, truevalue, falsevalue, d): - if d.getVar(variable, True) == checkvalue: + if d.getVar(variable) == checkvalue: return truevalue else: return falsevalue def less_or_equal(variable, checkvalue, truevalue, falsevalue, d): - if float(d.getVar(variable, True)) <= float(checkvalue): + if float(d.getVar(variable)) <= float(checkvalue): return truevalue else: return falsevalue def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d): - result = bb.utils.vercmp_string(d.getVar(variable,True), checkvalue) + result = bb.utils.vercmp_string(d.getVar(variable), checkvalue) if result <= 0: return truevalue else: return falsevalue def both_contain(variable1, variable2, checkvalue, d): - val1 = d.getVar(variable1, True) - val2 = d.getVar(variable2, True) + val1 = d.getVar(variable1) + val2 = d.getVar(variable2) val1 = set(val1.split()) val2 = set(val2.split()) if isinstance(checkvalue, str): @@ -66,8 +61,8 @@ def set_intersect(variable1, variable2, d): s3 = set_intersect(s1, s2) => s3 = "b c" """ - val1 = set(d.getVar(variable1, True).split()) - val2 = set(d.getVar(variable2, True).split()) + val1 = set(d.getVar(variable1).split()) + val2 = set(d.getVar(variable2).split()) return " ".join(val1 & val2) def prune_suffix(var, suffixes, d): @@ -77,7 +72,7 @@ def prune_suffix(var, suffixes, d): if var.endswith(suffix): var = var.replace(suffix, "") - prefix = d.getVar("MLPREFIX", True) + prefix = d.getVar("MLPREFIX") if prefix and var.startswith(prefix): var = var.replace(prefix, "") @@ -102,6 +97,10 @@ def param_bool(cfg, field, dflt = None): return False raise ValueError("invalid value for boolean parameter '%s': '%s'" % (field, value)) +def build_depends_string(depends, task): + """Append a taskname to a string of dependencies as used by the [depends] flag""" + return " ".join(dep + ":" + task for dep in depends.split()) + def inherits(d, *classes): """Return True if the metadata inherits any of the specified classes""" return any(bb.data.inherits_class(cls, d) for cls in classes) @@ -115,9 +114,9 @@ def features_backfill(var,d): # disturbing distributions that have already set DISTRO_FEATURES. # Distributions wanting to elide a value in DISTRO_FEATURES_BACKFILL should # add the feature to DISTRO_FEATURES_BACKFILL_CONSIDERED - features = (d.getVar(var, True) or "").split() - backfill = (d.getVar(var+"_BACKFILL", True) or "").split() - considered = (d.getVar(var+"_BACKFILL_CONSIDERED", True) or "").split() + features = (d.getVar(var) or "").split() + backfill = (d.getVar(var+"_BACKFILL") or "").split() + considered = (d.getVar(var+"_BACKFILL_CONSIDERED") or "").split() addfeatures = [] for feature in backfill: @@ -133,18 +132,18 @@ def packages_filter_out_system(d): Return a list of packages from PACKAGES with the "system" packages such as PN-dbg PN-doc PN-locale-eb-gb removed. """ - pn = d.getVar('PN', True) + pn = d.getVar('PN') blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev')] localepkg = pn + "-locale-" pkgs = [] - for pkg in d.getVar('PACKAGES', True).split(): + for pkg in d.getVar('PACKAGES').split(): if pkg not in blacklist and localepkg not in pkg: pkgs.append(pkg) return pkgs def getstatusoutput(cmd): - return cmdstatus.getstatusoutput(cmd) + return subprocess.getstatusoutput(cmd) def trim_version(version, num_parts=2): @@ -233,11 +232,10 @@ def format_pkg_list(pkg_dict, ret_format=None): def host_gcc_version(d): import re, subprocess - compiler = d.getVar("BUILD_CC", True) - + compiler = d.getVar("BUILD_CC") try: env = os.environ.copy() - env["PATH"] = d.getVar("PATH", True) + env["PATH"] = d.getVar("PATH") output = subprocess.check_output("%s --version" % compiler, shell=True, env=env).decode("utf-8") except subprocess.CalledProcessError as e: bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8"))) @@ -321,8 +319,8 @@ def write_ld_so_conf(d): bb.utils.remove(ldsoconf) bb.utils.mkdirhier(os.path.dirname(ldsoconf)) with open(ldsoconf, "w") as f: - f.write(d.getVar("base_libdir", True) + '\n') - f.write(d.getVar("libdir", True) + '\n') + f.write(d.getVar("base_libdir") + '\n') + f.write(d.getVar("libdir") + '\n') class ImageQAFailed(bb.build.FuncFailed): def __init__(self, description, name=None, logfile=None): diff --git a/import-layers/yocto-poky/meta/lib/oeqa/buildperf/base.py b/import-layers/yocto-poky/meta/lib/oeqa/buildperf/base.py index 59dd02521..6e62b279c 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/buildperf/base.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/buildperf/base.py @@ -10,21 +10,22 @@ # more details. # """Build performance test base classes and functionality""" -import glob import json import logging import os import re import resource -import shutil import socket +import shutil import time -import traceback import unittest +import xml.etree.ElementTree as ET +from collections import OrderedDict from datetime import datetime, timedelta from functools import partial from multiprocessing import Process from multiprocessing import SimpleQueue +from xml.dom import minidom import oe.path from oeqa.utils.commands import CommandError, runCmd, get_bb_vars @@ -35,7 +36,7 @@ log = logging.getLogger('build-perf') # Our own version of runCmd which does not raise AssertErrors which would cause # errors to interpreted as failures -runCmd2 = partial(runCmd, assert_error=False) +runCmd2 = partial(runCmd, assert_error=False, limit_exc_output=40) class KernelDropCaches(object): @@ -99,50 +100,34 @@ class BuildPerfTestResult(unittest.TextTestResult): super(BuildPerfTestResult, self).__init__(*args, **kwargs) self.out_dir = out_dir - # Get Git parameters - try: - self.repo = GitRepo('.') - except GitError: - self.repo = None - self.git_commit, self.git_commit_count, self.git_branch = \ - self.get_git_revision() self.hostname = socket.gethostname() self.product = os.getenv('OE_BUILDPERFTEST_PRODUCT', 'oe-core') self.start_time = self.elapsed_time = None self.successes = [] - log.info("Using Git branch:commit %s:%s (%s)", self.git_branch, - self.git_commit, self.git_commit_count) - - def get_git_revision(self): - """Get git branch and commit under testing""" - commit = os.getenv('OE_BUILDPERFTEST_GIT_COMMIT') - commit_cnt = os.getenv('OE_BUILDPERFTEST_GIT_COMMIT_COUNT') - branch = os.getenv('OE_BUILDPERFTEST_GIT_BRANCH') - if not self.repo and (not commit or not commit_cnt or not branch): - log.info("The current working directory doesn't seem to be a Git " - "repository clone. You can specify branch and commit " - "displayed in test results with OE_BUILDPERFTEST_GIT_BRANCH, " - "OE_BUILDPERFTEST_GIT_COMMIT and " - "OE_BUILDPERFTEST_GIT_COMMIT_COUNT environment variables") - else: - if not commit: - commit = self.repo.rev_parse('HEAD^0') - commit_cnt = self.repo.run_cmd(['rev-list', '--count', 'HEAD^0']) - if not branch: - branch = self.repo.get_current_branch() - if not branch: - log.debug('Currently on detached HEAD') - return str(commit), str(commit_cnt), str(branch) def addSuccess(self, test): """Record results from successful tests""" super(BuildPerfTestResult, self).addSuccess(test) - self.successes.append((test, None)) + self.successes.append(test) + + def addError(self, test, err): + """Record results from crashed test""" + test.err = err + super(BuildPerfTestResult, self).addError(test, err) + + def addFailure(self, test, err): + """Record results from failed test""" + test.err = err + super(BuildPerfTestResult, self).addFailure(test, err) + + def addExpectedFailure(self, test, err): + """Record results from expectedly failed test""" + test.err = err + super(BuildPerfTestResult, self).addExpectedFailure(test, err) def startTest(self, test): """Pre-test hook""" test.base_dir = self.out_dir - os.mkdir(test.out_dir) log.info("Executing test %s: %s", test.name, test.shortDescription()) self.stream.write(datetime.now().strftime("[%Y-%m-%d %H:%M:%S] ")) super(BuildPerfTestResult, self).startTest(test) @@ -154,141 +139,113 @@ class BuildPerfTestResult(unittest.TextTestResult): def stopTestRun(self): """Pre-run hook""" self.elapsed_time = datetime.utcnow() - self.start_time - self.write_results_json() def all_results(self): - result_map = {'SUCCESS': self.successes, - 'FAIL': self.failures, - 'ERROR': self.errors, - 'EXP_FAIL': self.expectedFailures, - 'UNEXP_SUCCESS': self.unexpectedSuccesses, - 'SKIPPED': self.skipped} - for status, tests in result_map.items(): - for test in tests: - yield (status, test) - - - def update_globalres_file(self, filename): - """Write results to globalres csv file""" - # Map test names to time and size columns in globalres - # The tuples represent index and length of times and sizes - # respectively - gr_map = {'test1': ((0, 1), (8, 1)), - 'test12': ((1, 1), (None, None)), - 'test13': ((2, 1), (9, 1)), - 'test2': ((3, 1), (None, None)), - 'test3': ((4, 3), (None, None)), - 'test4': ((7, 1), (10, 2))} - - if self.repo: - git_tag_rev = self.repo.run_cmd(['describe', self.git_commit]) - else: - git_tag_rev = self.git_commit + compound = [('SUCCESS', t, None) for t in self.successes] + \ + [('FAILURE', t, m) for t, m in self.failures] + \ + [('ERROR', t, m) for t, m in self.errors] + \ + [('EXPECTED_FAILURE', t, m) for t, m in self.expectedFailures] + \ + [('UNEXPECTED_SUCCESS', t, None) for t in self.unexpectedSuccesses] + \ + [('SKIPPED', t, m) for t, m in self.skipped] + return sorted(compound, key=lambda info: info[1].start_time) + + + def write_buildstats_json(self): + """Write buildstats file""" + buildstats = OrderedDict() + for _, test, _ in self.all_results(): + for key, val in test.buildstats.items(): + buildstats[test.name + '.' + key] = val + with open(os.path.join(self.out_dir, 'buildstats.json'), 'w') as fobj: + json.dump(buildstats, fobj, cls=ResultsJsonEncoder) - values = ['0'] * 12 - for status, (test, msg) in self.all_results(): - if status in ['ERROR', 'SKIPPED']: - continue - (t_ind, t_len), (s_ind, s_len) = gr_map[test.name] - if t_ind is not None: - values[t_ind:t_ind + t_len] = test.times - if s_ind is not None: - values[s_ind:s_ind + s_len] = test.sizes - - log.debug("Writing globalres log to %s", filename) - with open(filename, 'a') as fobj: - fobj.write('{},{}:{},{},'.format(self.hostname, - self.git_branch, - self.git_commit, - git_tag_rev)) - fobj.write(','.join(values) + '\n') def write_results_json(self): """Write test results into a json-formatted file""" - results = {'tester_host': self.hostname, - 'git_branch': self.git_branch, - 'git_commit': self.git_commit, - 'git_commit_count': self.git_commit_count, - 'product': self.product, - 'start_time': self.start_time, - 'elapsed_time': self.elapsed_time} - - tests = {} - for status, (test, reason) in self.all_results(): - tests[test.name] = {'name': test.name, - 'description': test.shortDescription(), - 'status': status, - 'start_time': test.start_time, - 'elapsed_time': test.elapsed_time, - 'cmd_log_file': os.path.relpath(test.cmd_log_file, - self.out_dir), - 'measurements': test.measurements} - results['tests'] = tests + results = OrderedDict([('tester_host', self.hostname), + ('start_time', self.start_time), + ('elapsed_time', self.elapsed_time), + ('tests', OrderedDict())]) + + for status, test, reason in self.all_results(): + test_result = OrderedDict([('name', test.name), + ('description', test.shortDescription()), + ('status', status), + ('start_time', test.start_time), + ('elapsed_time', test.elapsed_time), + ('measurements', test.measurements)]) + if status in ('ERROR', 'FAILURE', 'EXPECTED_FAILURE'): + test_result['message'] = str(test.err[1]) + test_result['err_type'] = test.err[0].__name__ + test_result['err_output'] = reason + elif reason: + test_result['message'] = reason + + results['tests'][test.name] = test_result with open(os.path.join(self.out_dir, 'results.json'), 'w') as fobj: - json.dump(results, fobj, indent=4, sort_keys=True, + json.dump(results, fobj, indent=4, cls=ResultsJsonEncoder) - - def git_commit_results(self, repo_path, branch=None, tag=None): - """Commit results into a Git repository""" - repo = GitRepo(repo_path, is_topdir=True) - if not branch: - branch = self.git_branch - else: - # Replace keywords - branch = branch.format(git_branch=self.git_branch, - tester_host=self.hostname) - - log.info("Committing test results into %s %s", repo_path, branch) - tmp_index = os.path.join(repo_path, '.git', 'index.oe-build-perf') - try: - # Create new commit object from the new results - env_update = {'GIT_INDEX_FILE': tmp_index, - 'GIT_WORK_TREE': self.out_dir} - repo.run_cmd('add .', env_update) - tree = repo.run_cmd('write-tree', env_update) - parent = repo.rev_parse(branch) - msg = "Results of {}:{}\n".format(self.git_branch, self.git_commit) - git_cmd = ['commit-tree', tree, '-m', msg] - if parent: - git_cmd += ['-p', parent] - commit = repo.run_cmd(git_cmd, env_update) - - # Update branch head - git_cmd = ['update-ref', 'refs/heads/' + branch, commit] - if parent: - git_cmd.append(parent) - repo.run_cmd(git_cmd) - - # Update current HEAD, if we're on branch 'branch' - if repo.get_current_branch() == branch: - log.info("Updating %s HEAD to latest commit", repo_path) - repo.run_cmd('reset --hard') - - # Create (annotated) tag - if tag: - # Find tags matching the pattern - tag_keywords = dict(git_branch=self.git_branch, - git_commit=self.git_commit, - git_commit_count=self.git_commit_count, - tester_host=self.hostname, - tag_num='[0-9]{1,5}') - tag_re = re.compile(tag.format(**tag_keywords) + '$') - tag_keywords['tag_num'] = 0 - for existing_tag in repo.run_cmd('tag').splitlines(): - if tag_re.match(existing_tag): - tag_keywords['tag_num'] += 1 - - tag = tag.format(**tag_keywords) - msg = "Test run #{} of {}:{}\n".format(tag_keywords['tag_num'], - self.git_branch, - self.git_commit) - repo.run_cmd(['tag', '-a', '-m', msg, tag, commit]) - - finally: - if os.path.exists(tmp_index): - os.unlink(tmp_index) + def write_results_xml(self): + """Write test results into a JUnit XML file""" + top = ET.Element('testsuites') + suite = ET.SubElement(top, 'testsuite') + suite.set('name', 'oeqa.buildperf') + suite.set('timestamp', self.start_time.isoformat()) + suite.set('time', str(self.elapsed_time.total_seconds())) + suite.set('hostname', self.hostname) + suite.set('failures', str(len(self.failures) + len(self.expectedFailures))) + suite.set('errors', str(len(self.errors))) + suite.set('skipped', str(len(self.skipped))) + + test_cnt = 0 + for status, test, reason in self.all_results(): + test_cnt += 1 + testcase = ET.SubElement(suite, 'testcase') + testcase.set('classname', test.__module__ + '.' + test.__class__.__name__) + testcase.set('name', test.name) + testcase.set('description', test.shortDescription()) + testcase.set('timestamp', test.start_time.isoformat()) + testcase.set('time', str(test.elapsed_time.total_seconds())) + if status in ('ERROR', 'FAILURE', 'EXP_FAILURE'): + if status in ('FAILURE', 'EXP_FAILURE'): + result = ET.SubElement(testcase, 'failure') + else: + result = ET.SubElement(testcase, 'error') + result.set('message', str(test.err[1])) + result.set('type', test.err[0].__name__) + result.text = reason + elif status == 'SKIPPED': + result = ET.SubElement(testcase, 'skipped') + result.text = reason + elif status not in ('SUCCESS', 'UNEXPECTED_SUCCESS'): + raise TypeError("BUG: invalid test status '%s'" % status) + + for data in test.measurements.values(): + measurement = ET.SubElement(testcase, data['type']) + measurement.set('name', data['name']) + measurement.set('legend', data['legend']) + vals = data['values'] + if data['type'] == BuildPerfTestCase.SYSRES: + ET.SubElement(measurement, 'time', + timestamp=vals['start_time'].isoformat()).text = \ + str(vals['elapsed_time'].total_seconds()) + attrib = dict((k, str(v)) for k, v in vals['iostat'].items()) + ET.SubElement(measurement, 'iostat', attrib=attrib) + attrib = dict((k, str(v)) for k, v in vals['rusage'].items()) + ET.SubElement(measurement, 'rusage', attrib=attrib) + elif data['type'] == BuildPerfTestCase.DISKUSAGE: + ET.SubElement(measurement, 'size').text = str(vals['size']) + else: + raise TypeError('BUG: unsupported measurement type') + + suite.set('tests', str(test_cnt)) + + # Use minidom for pretty-printing + dom_doc = minidom.parseString(ET.tostring(top, 'utf-8')) + with open(os.path.join(self.out_dir, 'results.xml'), 'w') as fobj: + dom_doc.writexml(fobj, addindent=' ', newl='\n', encoding='utf-8') class BuildPerfTestCase(unittest.TestCase): @@ -303,7 +260,10 @@ class BuildPerfTestCase(unittest.TestCase): self.base_dir = None self.start_time = None self.elapsed_time = None - self.measurements = [] + self.measurements = OrderedDict() + self.buildstats = OrderedDict() + # self.err is supposed to be a tuple from sys.exc_info() + self.err = None self.bb_vars = get_bb_vars() # TODO: remove 'times' and 'sizes' arrays when globalres support is # removed @@ -311,18 +271,23 @@ class BuildPerfTestCase(unittest.TestCase): self.sizes = [] @property - def out_dir(self): - return os.path.join(self.base_dir, self.name) + def tmp_dir(self): + return os.path.join(self.base_dir, self.name + '.tmp') - @property - def cmd_log_file(self): - return os.path.join(self.out_dir, 'commands.log') + def shortDescription(self): + return super(BuildPerfTestCase, self).shortDescription() or "" def setUp(self): """Set-up fixture for each test""" + if not os.path.isdir(self.tmp_dir): + os.mkdir(self.tmp_dir) if self.build_target: - self.log_cmd_output(['bitbake', self.build_target, - '-c', 'fetchall']) + self.run_cmd(['bitbake', self.build_target, '-c', 'fetchall']) + + def tearDown(self): + """Tear-down fixture for each test""" + if os.path.isdir(self.tmp_dir): + shutil.rmtree(self.tmp_dir) def run(self, *args, **kwargs): """Run test""" @@ -330,17 +295,23 @@ class BuildPerfTestCase(unittest.TestCase): super(BuildPerfTestCase, self).run(*args, **kwargs) self.elapsed_time = datetime.now() - self.start_time - def log_cmd_output(self, cmd): - """Run a command and log it's output""" + def run_cmd(self, cmd): + """Convenience method for running a command""" cmd_str = cmd if isinstance(cmd, str) else ' '.join(cmd) log.info("Logging command: %s", cmd_str) try: - with open(self.cmd_log_file, 'a') as fobj: - runCmd2(cmd, stdout=fobj) + runCmd2(cmd) except CommandError as err: log.error("Command failed: %s", err.retcode) raise + def _append_measurement(self, measurement): + """Simple helper for adding measurements results""" + if measurement['name'] in self.measurements: + raise ValueError('BUG: two measurements with the same name in {}'.format( + self.__class__.__name__)) + self.measurements[measurement['name']] = measurement + def measure_cmd_resources(self, cmd, name, legend, save_bs=False): """Measure system resource usage of a command""" def _worker(data_q, cmd, **kwargs): @@ -350,12 +321,12 @@ class BuildPerfTestCase(unittest.TestCase): ret = runCmd2(cmd, **kwargs) etime = datetime.now() - start_time rusage_struct = resource.getrusage(resource.RUSAGE_CHILDREN) - iostat = {} + iostat = OrderedDict() with open('/proc/{}/io'.format(os.getpid())) as fobj: for line in fobj.readlines(): key, val = line.split(':') iostat[key] = int(val) - rusage = {} + rusage = OrderedDict() # Skip unused fields, (i.e. 'ru_ixrss', 'ru_idrss', 'ru_isrss', # 'ru_nswap', 'ru_msgsnd', 'ru_msgrcv' and 'ru_nsignals') for key in ['ru_utime', 'ru_stime', 'ru_maxrss', 'ru_minflt', @@ -374,33 +345,28 @@ class BuildPerfTestCase(unittest.TestCase): log.info("Timing command: %s", cmd_str) data_q = SimpleQueue() try: - with open(self.cmd_log_file, 'a') as fobj: - proc = Process(target=_worker, args=(data_q, cmd,), - kwargs={'stdout': fobj}) - proc.start() - data = data_q.get() - proc.join() + proc = Process(target=_worker, args=(data_q, cmd,)) + proc.start() + data = data_q.get() + proc.join() if isinstance(data, Exception): raise data except CommandError: - log.error("Command '%s' failed, see %s for more details", cmd_str, - self.cmd_log_file) + log.error("Command '%s' failed", cmd_str) raise etime = data['elapsed_time'] - measurement = {'type': self.SYSRES, - 'name': name, - 'legend': legend} - measurement['values'] = {'start_time': data['start_time'], - 'elapsed_time': etime, - 'rusage': data['rusage'], - 'iostat': data['iostat']} + measurement = OrderedDict([('type', self.SYSRES), + ('name', name), + ('legend', legend)]) + measurement['values'] = OrderedDict([('start_time', data['start_time']), + ('elapsed_time', etime), + ('rusage', data['rusage']), + ('iostat', data['iostat'])]) if save_bs: - bs_file = self.save_buildstats(legend) - measurement['values']['buildstats_file'] = \ - os.path.relpath(bs_file, self.base_dir) + self.save_buildstats(name) - self.measurements.append(measurement) + self._append_measurement(measurement) # Append to 'times' array for globalres log e_sec = etime.total_seconds() @@ -418,15 +384,15 @@ class BuildPerfTestCase(unittest.TestCase): ret = runCmd2(cmd) size = int(ret.output.split()[0]) log.debug("Size of %s path is %s", path, size) - measurement = {'type': self.DISKUSAGE, - 'name': name, - 'legend': legend} - measurement['values'] = {'size': size} - self.measurements.append(measurement) + measurement = OrderedDict([('type', self.DISKUSAGE), + ('name', name), + ('legend', legend)]) + measurement['values'] = OrderedDict([('size', size)]) + self._append_measurement(measurement) # Append to 'sizes' array for globalres log self.sizes.append(str(size)) - def save_buildstats(self, label=None): + def save_buildstats(self, measurement_name): """Save buildstats""" def split_nevr(nevr): """Split name and version information from recipe "nevr" string""" @@ -445,9 +411,9 @@ class BuildPerfTestCase(unittest.TestCase): def bs_to_json(filename): """Convert (task) buildstats file into json format""" - bs_json = {'iostat': {}, - 'rusage': {}, - 'child_rusage': {}} + bs_json = OrderedDict() + iostat = OrderedDict() + rusage = OrderedDict() with open(filename) as fobj: for line in fobj.readlines(): key, val = line.split(':', 1) @@ -459,7 +425,7 @@ class BuildPerfTestCase(unittest.TestCase): end_time = datetime.utcfromtimestamp(float(val)) elif key.startswith('IO '): split = key.split() - bs_json['iostat'][split[1]] = int(val) + iostat[split[1]] = int(val) elif key.find('rusage') >= 0: split = key.split() ru_key = split[-1] @@ -467,12 +433,12 @@ class BuildPerfTestCase(unittest.TestCase): val = float(val) else: val = int(val) - ru_type = 'rusage' if split[0] == 'rusage' else \ - 'child_rusage' - bs_json[ru_type][ru_key] = val + rusage[ru_key] = rusage.get(ru_key, 0) + val elif key == 'Status': bs_json['status'] = val bs_json['elapsed_time'] = end_time - start_time + bs_json['rusage'] = rusage + bs_json['iostat'] = iostat return bs_json log.info('Saving buildstats in JSON format') @@ -488,24 +454,17 @@ class BuildPerfTestCase(unittest.TestCase): if not os.path.isdir(recipe_dir): continue name, epoch, version, revision = split_nevr(fname) - recipe_bs = {'name': name, - 'epoch': epoch, - 'version': version, - 'revision': revision, - 'tasks': {}} + recipe_bs = OrderedDict((('name', name), + ('epoch', epoch), + ('version', version), + ('revision', revision), + ('tasks', OrderedDict()))) for task in os.listdir(recipe_dir): recipe_bs['tasks'][task] = bs_to_json(os.path.join(recipe_dir, task)) buildstats.append(recipe_bs) - # Write buildstats into json file - postfix = '.' + str_to_fn(label) if label else '' - postfix += '.json' - outfile = os.path.join(self.out_dir, 'buildstats' + postfix) - with open(outfile, 'w') as fobj: - json.dump(buildstats, fobj, indent=4, sort_keys=True, - cls=ResultsJsonEncoder) - return outfile + self.buildstats[measurement_name] = buildstats def rm_tmp(self): """Cleanup temporary/intermediate files and directories""" @@ -547,5 +506,5 @@ class BuildPerfTestRunner(unittest.TextTestRunner): self.out_dir = out_dir def _makeResult(self): - return BuildPerfTestResult(self.out_dir, self.stream, self.descriptions, - self.verbosity) + return BuildPerfTestResult(self.out_dir, self.stream, self.descriptions, + self.verbosity) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/buildperf/test_basic.py b/import-layers/yocto-poky/meta/lib/oeqa/buildperf/test_basic.py index 7a48c1e77..a9e4a5b73 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/buildperf/test_basic.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/buildperf/test_basic.py @@ -22,7 +22,7 @@ class Test1P1(BuildPerfTestCase): build_target = 'core-image-sato' def test1(self): - """Measure wall clock of bitbake core-image-sato and size of tmp dir""" + """Build core-image-sato""" self.rm_tmp() self.rm_sstate() self.rm_cache() @@ -36,10 +36,10 @@ class Test1P2(BuildPerfTestCase): build_target = 'virtual/kernel' def test12(self): - """Measure bitbake virtual/kernel""" + """Build virtual/kernel""" # Build and cleans state in order to get all dependencies pre-built - self.log_cmd_output(['bitbake', self.build_target]) - self.log_cmd_output(['bitbake', self.build_target, '-c', 'cleansstate']) + self.run_cmd(['bitbake', self.build_target]) + self.run_cmd(['bitbake', self.build_target, '-c', 'cleansstate']) self.sync() self.measure_cmd_resources(['bitbake', self.build_target], 'build', @@ -51,30 +51,28 @@ class Test1P3(BuildPerfTestCase): def test13(self): """Build core-image-sato with rm_work enabled""" - postfile = os.path.join(self.out_dir, 'postfile.conf') + postfile = os.path.join(self.tmp_dir, 'postfile.conf') with open(postfile, 'w') as fobj: fobj.write('INHERIT += "rm_work"\n') - try: - self.rm_tmp() - self.rm_sstate() - self.rm_cache() - self.sync() - cmd = ['bitbake', '-R', postfile, self.build_target] - self.measure_cmd_resources(cmd, 'build', - 'bitbake' + self.build_target, - save_bs=True) - self.measure_disk_usage(self.bb_vars['TMPDIR'], 'tmpdir', 'tmpdir') - finally: - os.unlink(postfile) + + self.rm_tmp() + self.rm_sstate() + self.rm_cache() + self.sync() + cmd = ['bitbake', '-R', postfile, self.build_target] + self.measure_cmd_resources(cmd, 'build', + 'bitbake' + self.build_target, + save_bs=True) + self.measure_disk_usage(self.bb_vars['TMPDIR'], 'tmpdir', 'tmpdir') class Test2(BuildPerfTestCase): build_target = 'core-image-sato' def test2(self): - """Measure bitbake core-image-sato -c rootfs with sstate""" + """Run core-image-sato do_rootfs with sstate""" # Build once in order to populate sstate cache - self.log_cmd_output(['bitbake', self.build_target]) + self.run_cmd(['bitbake', self.build_target]) self.rm_tmp() self.rm_cache() @@ -86,7 +84,7 @@ class Test2(BuildPerfTestCase): class Test3(BuildPerfTestCase): def test3(self): - """Parsing time metrics (bitbake -p)""" + """Bitbake parsing (bitbake -p)""" # Drop all caches and parse self.rm_cache() oe.path.remove(os.path.join(self.bb_vars['TMPDIR'], 'cache'), True) @@ -106,8 +104,8 @@ class Test4(BuildPerfTestCase): def test4(self): """eSDK metrics""" - self.log_cmd_output("bitbake {} -c do_populate_sdk_ext".format( - self.build_target)) + self.run_cmd(['bitbake', '-c', 'do_populate_sdk_ext', + self.build_target]) self.bb_vars = get_bb_vars(None, self.build_target) tmp_dir = self.bb_vars['TMPDIR'] installer = os.path.join( diff --git a/import-layers/yocto-poky/meta/lib/oeqa/controllers/masterimage.py b/import-layers/yocto-poky/meta/lib/oeqa/controllers/masterimage.py index 9ce3bf803..07418fcda 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/controllers/masterimage.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/controllers/masterimage.py @@ -32,14 +32,14 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta super(MasterImageHardwareTarget, self).__init__(d) # target ip - addr = d.getVar("TEST_TARGET_IP", True) or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.') + addr = d.getVar("TEST_TARGET_IP") or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.') self.ip = addr.split(":")[0] try: self.port = addr.split(":")[1] except IndexError: self.port = None bb.note("Target IP: %s" % self.ip) - self.server_ip = d.getVar("TEST_SERVER_IP", True) + self.server_ip = d.getVar("TEST_SERVER_IP") if not self.server_ip: try: self.server_ip = subprocess.check_output(['ip', 'route', 'get', self.ip ]).split("\n")[0].split()[-1] @@ -49,8 +49,8 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta # test rootfs + kernel self.image_fstype = self.get_image_fstype(d) - self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("IMAGE_LINK_NAME", True) + '.' + self.image_fstype) - self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin') + self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("IMAGE_LINK_NAME") + '.' + self.image_fstype) + self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin') if not os.path.isfile(self.rootfs): # we could've checked that IMAGE_FSTYPES contains tar.gz but the config for running testimage might not be # the same as the config with which the image was build, ie @@ -64,16 +64,16 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta # master ssh connection self.master = None # if the user knows what they are doing, then by all means... - self.user_cmds = d.getVar("TEST_DEPLOY_CMDS", True) + self.user_cmds = d.getVar("TEST_DEPLOY_CMDS") self.deploy_cmds = None # this is the name of the command that controls the power for a board # e.g: TEST_POWERCONTROL_CMD = "/home/user/myscripts/powercontrol.py ${MACHINE} what-ever-other-args-the-script-wants" # the command should take as the last argument "off" and "on" and "cycle" (off, on) - self.powercontrol_cmd = d.getVar("TEST_POWERCONTROL_CMD", True) or None + self.powercontrol_cmd = d.getVar("TEST_POWERCONTROL_CMD") or None self.powercontrol_args = d.getVar("TEST_POWERCONTROL_EXTRA_ARGS", False) or "" - self.serialcontrol_cmd = d.getVar("TEST_SERIALCONTROL_CMD", True) or None + self.serialcontrol_cmd = d.getVar("TEST_SERIALCONTROL_CMD") or None self.serialcontrol_args = d.getVar("TEST_SERIALCONTROL_EXTRA_ARGS", False) or "" self.origenv = os.environ @@ -82,7 +82,7 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta # ssh + keys means we need the original user env bborigenv = d.getVar("BB_ORIGENV", False) or {} for key in bborigenv: - val = bborigenv.getVar(key, True) + val = bborigenv.getVar(key) if val is not None: self.origenv[key] = str(val) @@ -159,10 +159,10 @@ class MasterImageHardwareTarget(oeqa.targetcontrol.BaseTarget, metaclass=ABCMeta self.power_cycle(self.connection) -class GummibootTarget(MasterImageHardwareTarget): +class SystemdbootTarget(MasterImageHardwareTarget): def __init__(self, d): - super(GummibootTarget, self).__init__(d) + super(SystemdbootTarget, self).__init__(d) # this the value we need to set in the LoaderEntryOneShot EFI variable # so the system boots the 'test' bootloader label and not the default # The first four bytes are EFI bits, and the rest is an utf-16le string diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/README b/import-layers/yocto-poky/meta/lib/oeqa/core/README new file mode 100644 index 000000000..0c859fd78 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/README @@ -0,0 +1,38 @@ += OEQA Framework = + +== Introduction == + +This is the new OEQA framework the base clases of the framework +are in this module oeqa/core the subsequent components needs to +extend this classes. + +A new/unique runner was created called oe-test and is under scripts/ +oe-test, this new runner scans over oeqa module searching for test +components that supports OETestContextExecutor implemented in context +module (i.e. oeqa/core/context.py). + +For execute an example: + +$ source oe-init-build-env +$ oe-test core + +For list supported components: + +$ oe-test -h + +== Create new Test component == + +Usally for add a new Test component the developer needs to extend +OETestContext/OETestContextExecutor in context.py and OETestCase in +case.py. + +== How to run the testing of the OEQA framework == + +Run all tests: + +$ PATH=$PATH:../../ python3 -m unittest discover -s tests + +Run some test: + +$ cd tests/ +$ ./test_data.py diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/__init__.py b/import-layers/yocto-poky/meta/lib/oeqa/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/case.py b/import-layers/yocto-poky/meta/lib/oeqa/core/case.py new file mode 100644 index 000000000..d2dbf20f9 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/case.py @@ -0,0 +1,46 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import unittest + +from oeqa.core.exception import OEQAMissingVariable + +def _validate_td_vars(td, td_vars, type_msg): + if td_vars: + for v in td_vars: + if not v in td: + raise OEQAMissingVariable("Test %s need %s variable but"\ + " isn't into td" % (type_msg, v)) + +class OETestCase(unittest.TestCase): + # TestContext and Logger instance set by OETestLoader. + tc = None + logger = None + + # td has all the variables needed by the test cases + # is the same across all the test cases. + td = None + + # td_vars has the variables needed by a test class + # or test case instance, if some var isn't into td a + # OEMissingVariable exception is raised + td_vars = None + + @classmethod + def _oeSetUpClass(clss): + _validate_td_vars(clss.td, clss.td_vars, "class") + clss.setUpClassMethod() + + @classmethod + def _oeTearDownClass(clss): + clss.tearDownClassMethod() + + def _oeSetUp(self): + for d in self.decorators: + d.setUpDecorator() + self.setUpMethod() + + def _oeTearDown(self): + for d in self.decorators: + d.tearDownDecorator() + self.tearDownMethod() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/cases/__init__.py b/import-layers/yocto-poky/meta/lib/oeqa/core/cases/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/cases/example/data.json b/import-layers/yocto-poky/meta/lib/oeqa/core/cases/example/data.json new file mode 100644 index 000000000..21d6b16d1 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/cases/example/data.json @@ -0,0 +1 @@ +{"ARCH": "x86", "IMAGE": "core-image-minimal"} \ No newline at end of file diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/cases/example/test_basic.py b/import-layers/yocto-poky/meta/lib/oeqa/core/cases/example/test_basic.py new file mode 100644 index 000000000..11cf3800c --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/cases/example/test_basic.py @@ -0,0 +1,20 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from oeqa.core.case import OETestCase +from oeqa.core.decorator.depends import OETestDepends + +class OETestExample(OETestCase): + def test_example(self): + self.logger.info('IMAGE: %s' % self.td.get('IMAGE')) + self.assertEqual('core-image-minimal', self.td.get('IMAGE')) + self.logger.info('ARCH: %s' % self.td.get('ARCH')) + self.assertEqual('x86', self.td.get('ARCH')) + +class OETestExampleDepend(OETestCase): + @OETestDepends(['OETestExample.test_example']) + def test_example_depends(self): + pass + + def test_example_no_depends(self): + pass diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/context.py b/import-layers/yocto-poky/meta/lib/oeqa/core/context.py new file mode 100644 index 000000000..4476750a3 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/context.py @@ -0,0 +1,243 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import sys +import json +import time +import logging +import collections +import re + +from oeqa.core.loader import OETestLoader +from oeqa.core.runner import OETestRunner, OEStreamLogger, xmlEnabled + +class OETestContext(object): + loaderClass = OETestLoader + runnerClass = OETestRunner + streamLoggerClass = OEStreamLogger + + files_dir = os.path.abspath(os.path.join(os.path.dirname( + os.path.abspath(__file__)), "../files")) + + def __init__(self, td=None, logger=None): + if not type(td) is dict: + raise TypeError("td isn't dictionary type") + + self.td = td + self.logger = logger + self._registry = {} + self._registry['cases'] = collections.OrderedDict() + self._results = {} + + def _read_modules_from_manifest(self, manifest): + if not os.path.exists(manifest): + raise + + modules = [] + for line in open(manifest).readlines(): + line = line.strip() + if line and not line.startswith("#"): + modules.append(line) + + return modules + + def loadTests(self, module_paths, modules=[], tests=[], + modules_manifest="", modules_required=[], filters={}): + if modules_manifest: + modules = self._read_modules_from_manifest(modules_manifest) + + self.loader = self.loaderClass(self, module_paths, modules, tests, + modules_required, filters) + self.suites = self.loader.discover() + + def runTests(self): + streamLogger = self.streamLoggerClass(self.logger) + self.runner = self.runnerClass(self, stream=streamLogger, verbosity=2) + + self._run_start_time = time.time() + result = self.runner.run(self.suites) + self._run_end_time = time.time() + + return result + + def logSummary(self, result, component, context_msg=''): + self.logger.info("SUMMARY:") + self.logger.info("%s (%s) - Ran %d test%s in %.3fs" % (component, + context_msg, result.testsRun, result.testsRun != 1 and "s" or "", + (self._run_end_time - self._run_start_time))) + + if result.wasSuccessful(): + msg = "%s - OK - All required tests passed" % component + else: + msg = "%s - FAIL - Required tests failed" % component + skipped = len(self._results['skipped']) + if skipped: + msg += " (skipped=%d)" % skipped + self.logger.info(msg) + + def _getDetailsNotPassed(self, case, type, desc): + found = False + + for (scase, msg) in self._results[type]: + # XXX: When XML reporting is enabled scase is + # xmlrunner.result._TestInfo instance instead of + # string. + if xmlEnabled: + if case.id() == scase.test_id: + found = True + break + scase_str = scase.test_id + else: + if case == scase: + found = True + break + scase_str = str(scase) + + # When fails at module or class level the class name is passed as string + # so figure out to see if match + m = re.search("^setUpModule \((?P.*)\)$", scase_str) + if m: + if case.__class__.__module__ == m.group('module_name'): + found = True + break + + m = re.search("^setUpClass \((?P.*)\)$", scase_str) + if m: + class_name = "%s.%s" % (case.__class__.__module__, + case.__class__.__name__) + + if class_name == m.group('class_name'): + found = True + break + + if found: + return (found, msg) + + return (found, None) + + def logDetails(self): + self.logger.info("RESULTS:") + for case_name in self._registry['cases']: + case = self._registry['cases'][case_name] + + result_types = ['failures', 'errors', 'skipped', 'expectedFailures'] + result_desc = ['FAILED', 'ERROR', 'SKIPPED', 'EXPECTEDFAIL'] + + fail = False + desc = None + for idx, name in enumerate(result_types): + (fail, msg) = self._getDetailsNotPassed(case, result_types[idx], + result_desc[idx]) + if fail: + desc = result_desc[idx] + break + + oeid = -1 + for d in case.decorators: + if hasattr(d, 'oeid'): + oeid = d.oeid + + if fail: + self.logger.info("RESULTS - %s - Testcase %s: %s" % (case.id(), + oeid, desc)) + if msg: + self.logger.info(msg) + else: + self.logger.info("RESULTS - %s - Testcase %s: %s" % (case.id(), + oeid, 'PASSED')) + +class OETestContextExecutor(object): + _context_class = OETestContext + + name = 'core' + help = 'core test component example' + description = 'executes core test suite example' + + default_cases = [os.path.join(os.path.abspath(os.path.dirname(__file__)), + 'cases/example')] + default_test_data = os.path.join(default_cases[0], 'data.json') + default_tests = None + + def register_commands(self, logger, subparsers): + self.parser = subparsers.add_parser(self.name, help=self.help, + description=self.description, group='components') + + self.default_output_log = '%s-results-%s.log' % (self.name, + time.strftime("%Y%m%d%H%M%S")) + self.parser.add_argument('--output-log', action='store', + default=self.default_output_log, + help="results output log, default: %s" % self.default_output_log) + self.parser.add_argument('--run-tests', action='store', + default=self.default_tests, + help="tests to run in [.[.]] format. Just works for modules now") + + if self.default_test_data: + self.parser.add_argument('--test-data-file', action='store', + default=self.default_test_data, + help="data file to load, default: %s" % self.default_test_data) + else: + self.parser.add_argument('--test-data-file', action='store', + help="data file to load") + + if self.default_cases: + self.parser.add_argument('CASES_PATHS', action='store', + default=self.default_cases, nargs='*', + help="paths to directories with test cases, default: %s"\ + % self.default_cases) + else: + self.parser.add_argument('CASES_PATHS', action='store', + nargs='+', help="paths to directories with test cases") + + self.parser.set_defaults(func=self.run) + + def _setup_logger(self, logger, args): + formatter = logging.Formatter('%(asctime)s - ' + self.name + \ + ' - %(levelname)s - %(message)s') + sh = logger.handlers[0] + sh.setFormatter(formatter) + fh = logging.FileHandler(args.output_log) + fh.setFormatter(formatter) + logger.addHandler(fh) + + return logger + + def _process_args(self, logger, args): + self.tc_kwargs = {} + self.tc_kwargs['init'] = {} + self.tc_kwargs['load'] = {} + self.tc_kwargs['run'] = {} + + self.tc_kwargs['init']['logger'] = self._setup_logger(logger, args) + if args.test_data_file: + self.tc_kwargs['init']['td'] = json.load( + open(args.test_data_file, "r")) + else: + self.tc_kwargs['init']['td'] = {} + + + if args.run_tests: + self.tc_kwargs['load']['modules'] = args.run_tests.split() + else: + self.tc_kwargs['load']['modules'] = None + + self.module_paths = args.CASES_PATHS + + def run(self, logger, args): + self._process_args(logger, args) + + self.tc = self._context_class(**self.tc_kwargs['init']) + self.tc.loadTests(self.module_paths, **self.tc_kwargs['load']) + rc = self.tc.runTests(**self.tc_kwargs['run']) + self.tc.logSummary(rc, self.name) + self.tc.logDetails() + + output_link = os.path.join(os.path.dirname(args.output_log), + "%s-results.log" % self.name) + if os.path.exists(output_link): + os.remove(output_link) + os.symlink(args.output_log, output_link) + + return rc + +_executor_class = OETestContextExecutor diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/__init__.py b/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/__init__.py new file mode 100644 index 000000000..855b6b9d2 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/__init__.py @@ -0,0 +1,71 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from functools import wraps +from abc import abstractmethod + +decoratorClasses = set() + +def registerDecorator(obj): + decoratorClasses.add(obj) + return obj + +class OETestDecorator(object): + case = None # Reference of OETestCase decorated + attrs = None # Attributes to be loaded by decorator implementation + + def __init__(self, *args, **kwargs): + if not self.attrs: + return + + for idx, attr in enumerate(self.attrs): + if attr in kwargs: + value = kwargs[attr] + else: + value = args[idx] + setattr(self, attr, value) + + def __call__(self, func): + @wraps(func) + def wrapped_f(*args, **kwargs): + self.attrs = self.attrs # XXX: Enables OETestLoader discover + return func(*args, **kwargs) + return wrapped_f + + # OETestLoader call it when is loading test cases. + # XXX: Most methods would change the registry for later + # processing; be aware that filtrate method needs to + # run later than bind, so there could be data (in the + # registry) of a cases that were filtered. + def bind(self, registry, case): + self.case = case + self.logger = case.tc.logger + self.case.decorators.append(self) + + # OETestRunner call this method when tries to run + # the test case. + def setUpDecorator(self): + pass + + # OETestRunner call it after a test method has been + # called even if the method raised an exception. + def tearDownDecorator(self): + pass + +class OETestDiscover(OETestDecorator): + + # OETestLoader call it after discover test cases + # needs to return the cases to be run. + @staticmethod + def discover(registry): + return registry['cases'] + +class OETestFilter(OETestDecorator): + + # OETestLoader call it while loading the tests + # in loadTestsFromTestCase method, it needs to + # return a bool, True if needs to be filtered. + # This method must consume the filter used. + @abstractmethod + def filtrate(self, filters): + return False diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/data.py b/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/data.py new file mode 100644 index 000000000..ff7bdd98b --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/data.py @@ -0,0 +1,98 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from oeqa.core.exception import OEQAMissingVariable + +from . import OETestDecorator, registerDecorator + +def has_feature(td, feature): + """ + Checks for feature in DISTRO_FEATURES or IMAGE_FEATURES. + """ + + if (feature in td.get('DISTRO_FEATURES', '') or + feature in td.get('IMAGE_FEATURES', '')): + return True + return False + +@registerDecorator +class skipIfDataVar(OETestDecorator): + """ + Skip test based on value of a data store's variable. + + It will get the info of var from the data store and will + check it against value; if are equal it will skip the test + with msg as the reason. + """ + + attrs = ('var', 'value', 'msg') + + def setUpDecorator(self): + msg = ('Checking if %r value is %r to skip test' % + (self.var, self.value)) + self.logger.debug(msg) + if self.case.td.get(self.var) == self.value: + self.case.skipTest(self.msg) + +@registerDecorator +class skipIfNotDataVar(OETestDecorator): + """ + Skip test based on value of a data store's variable. + + It will get the info of var from the data store and will + check it against value; if are not equal it will skip the + test with msg as the reason. + """ + + attrs = ('var', 'value', 'msg') + + def setUpDecorator(self): + msg = ('Checking if %r value is not %r to skip test' % + (self.var, self.value)) + self.logger.debug(msg) + if not self.case.td.get(self.var) == self.value: + self.case.skipTest(self.msg) + +@registerDecorator +class skipIfNotInDataVar(OETestDecorator): + """ + Skip test if value is not in data store's variable. + """ + + attrs = ('var', 'value', 'msg') + def setUpDecorator(self): + msg = ('Checking if %r value is in %r to run ' + 'the test' % (self.var, self.value)) + self.logger.debug(msg) + if not self.value in self.case.td.get(self.var): + self.case.skipTest(self.msg) + +@registerDecorator +class OETestDataDepends(OETestDecorator): + attrs = ('td_depends',) + + def setUpDecorator(self): + for v in self.td_depends: + try: + value = self.case.td[v] + except KeyError: + raise OEQAMissingVariable("Test case need %s variable but"\ + " isn't into td" % v) + +@registerDecorator +class skipIfNotFeature(OETestDecorator): + """ + Skip test based on DISTRO_FEATURES. + + value must be in distro features or it will skip the test + with msg as the reason. + """ + + attrs = ('value', 'msg') + + def setUpDecorator(self): + msg = ('Checking if %s is in DISTRO_FEATURES ' + 'or IMAGE_FEATURES' % (self.value)) + self.logger.debug(msg) + if not has_feature(self.case.td, self.value): + self.case.skipTest(self.msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/depends.py b/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/depends.py new file mode 100644 index 000000000..195711cf1 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/depends.py @@ -0,0 +1,94 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from unittest import SkipTest + +from oeqa.core.exception import OEQADependency + +from . import OETestDiscover, registerDecorator + +def _add_depends(registry, case, depends): + module_name = case.__module__ + class_name = case.__class__.__name__ + + case_id = case.id() + + for depend in depends: + dparts = depend.split('.') + + if len(dparts) == 1: + depend_id = ".".join((module_name, class_name, dparts[0])) + elif len(dparts) == 2: + depend_id = ".".join((module_name, dparts[0], dparts[1])) + else: + depend_id = depend + + if not case_id in registry: + registry[case_id] = [] + if not depend_id in registry[case_id]: + registry[case_id].append(depend_id) + +def _validate_test_case_depends(cases, depends): + for case in depends: + if not case in cases: + continue + for dep in depends[case]: + if not dep in cases: + raise OEQADependency("TestCase %s depends on %s and isn't available"\ + ", cases available %s." % (case, dep, str(cases.keys()))) + +def _order_test_case_by_depends(cases, depends): + def _dep_resolve(graph, node, resolved, seen): + seen.append(node) + for edge in graph[node]: + if edge not in resolved: + if edge in seen: + raise OEQADependency("Test cases %s and %s have a circular" \ + " dependency." % (node, edge)) + _dep_resolve(graph, edge, resolved, seen) + resolved.append(node) + + dep_graph = {} + dep_graph['__root__'] = cases.keys() + for case in cases: + if case in depends: + dep_graph[case] = depends[case] + else: + dep_graph[case] = [] + + cases_ordered = [] + _dep_resolve(dep_graph, '__root__', cases_ordered, []) + cases_ordered.remove('__root__') + + return [cases[case_id] for case_id in cases_ordered] + +def _skipTestDependency(case, depends): + results = case.tc._results + skipReasons = ['errors', 'failures', 'skipped'] + + for reason in skipReasons: + for test, _ in results[reason]: + if test.id() in depends: + raise SkipTest("Test case %s depends on %s and was in %s." \ + % (case.id(), test.id(), reason)) + +@registerDecorator +class OETestDepends(OETestDiscover): + attrs = ('depends',) + + def bind(self, registry, case): + super(OETestDepends, self).bind(registry, case) + if not registry.get('depends'): + registry['depends'] = {} + _add_depends(registry['depends'], case, self.depends) + + @staticmethod + def discover(registry): + if registry.get('depends'): + _validate_test_case_depends(registry['cases'], registry['depends']) + return _order_test_case_by_depends(registry['cases'], registry['depends']) + else: + return [registry['cases'][case_id] for case_id in registry['cases']] + + def setUpDecorator(self): + _skipTestDependency(self.case, self.depends) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/oeid.py b/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/oeid.py new file mode 100644 index 000000000..ea8017a55 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/oeid.py @@ -0,0 +1,23 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from . import OETestFilter, registerDecorator +from oeqa.core.utils.misc import intToList + +def _idFilter(oeid, filters): + return False if oeid in filters else True + +@registerDecorator +class OETestID(OETestFilter): + attrs = ('oeid',) + + def bind(self, registry, case): + super(OETestID, self).bind(registry, case) + + def filtrate(self, filters): + if filters.get('oeid'): + filterx = intToList(filters['oeid'], 'oeid') + del filters['oeid'] + if _idFilter(self.oeid, filterx): + return True + return False diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/oetag.py b/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/oetag.py new file mode 100644 index 000000000..ad38ab78a --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/oetag.py @@ -0,0 +1,24 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from . import OETestFilter, registerDecorator +from oeqa.core.utils.misc import strToList + +def _tagFilter(tags, filters): + return False if set(tags) & set(filters) else True + +@registerDecorator +class OETestTag(OETestFilter): + attrs = ('oetag',) + + def bind(self, registry, case): + super(OETestTag, self).bind(registry, case) + self.oetag = strToList(self.oetag, 'oetag') + + def filtrate(self, filters): + if filters.get('oetag'): + filterx = strToList(filters['oetag'], 'oetag') + del filters['oetag'] + if _tagFilter(self.oetag, filterx): + return True + return False diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/oetimeout.py b/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/oetimeout.py new file mode 100644 index 000000000..a247583f7 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/decorator/oetimeout.py @@ -0,0 +1,25 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import signal +from . import OETestDecorator, registerDecorator +from oeqa.core.exception import OEQATimeoutError + +@registerDecorator +class OETimeout(OETestDecorator): + attrs = ('oetimeout',) + + def setUpDecorator(self): + timeout = self.oetimeout + def _timeoutHandler(signum, frame): + raise OEQATimeoutError("Timed out after %s " + "seconds of execution" % timeout) + + self.logger.debug("Setting up a %d second(s) timeout" % self.oetimeout) + self.alarmSignal = signal.signal(signal.SIGALRM, _timeoutHandler) + signal.alarm(self.oetimeout) + + def tearDownDecorator(self): + signal.alarm(0) + signal.signal(signal.SIGALRM, self.alarmSignal) + self.logger.debug("Removed SIGALRM handler") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/exception.py b/import-layers/yocto-poky/meta/lib/oeqa/core/exception.py new file mode 100644 index 000000000..2dfd8402c --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/exception.py @@ -0,0 +1,14 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +class OEQAException(Exception): + pass + +class OEQATimeoutError(OEQAException): + pass + +class OEQAMissingVariable(OEQAException): + pass + +class OEQADependency(OEQAException): + pass diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/loader.py b/import-layers/yocto-poky/meta/lib/oeqa/core/loader.py new file mode 100644 index 000000000..63a170353 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/loader.py @@ -0,0 +1,272 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import sys +import unittest + +from oeqa.core.utils.path import findFile +from oeqa.core.utils.test import getSuiteModules, getCaseID + +from oeqa.core.case import OETestCase +from oeqa.core.decorator import decoratorClasses, OETestDecorator, \ + OETestFilter, OETestDiscover + +def _make_failed_test(classname, methodname, exception, suiteClass): + """ + When loading tests unittest framework stores the exception in a new + class created for be displayed into run(). + + For our purposes will be better to raise the exception in loading + step instead of wait to run the test suite. + """ + raise exception +unittest.loader._make_failed_test = _make_failed_test + +def _find_duplicated_modules(suite, directory): + for module in getSuiteModules(suite): + path = findFile('%s.py' % module, directory) + if path: + raise ImportError("Duplicated %s module found in %s" % (module, path)) + +class OETestLoader(unittest.TestLoader): + caseClass = OETestCase + + kwargs_names = ['testMethodPrefix', 'sortTestMethodUsing', 'suiteClass', + '_top_level_dir'] + + def __init__(self, tc, module_paths, modules, tests, modules_required, + filters, *args, **kwargs): + self.tc = tc + + self.modules = modules + self.tests = tests + self.modules_required = modules_required + + self.filters = filters + self.decorator_filters = [d for d in decoratorClasses if \ + issubclass(d, OETestFilter)] + self._validateFilters(self.filters, self.decorator_filters) + self.used_filters = [d for d in self.decorator_filters + for f in self.filters + if f in d.attrs] + + if isinstance(module_paths, str): + module_paths = [module_paths] + elif not isinstance(module_paths, list): + raise TypeError('module_paths must be a str or a list of str') + self.module_paths = module_paths + + for kwname in self.kwargs_names: + if kwname in kwargs: + setattr(self, kwname, kwargs[kwname]) + + self._patchCaseClass(self.caseClass) + + def _patchCaseClass(self, testCaseClass): + # Adds custom attributes to the OETestCase class + setattr(testCaseClass, 'tc', self.tc) + setattr(testCaseClass, 'td', self.tc.td) + setattr(testCaseClass, 'logger', self.tc.logger) + + def _validateFilters(self, filters, decorator_filters): + # Validate if filter isn't empty + for key,value in filters.items(): + if not value: + raise TypeError("Filter %s specified is empty" % key) + + # Validate unique attributes + attr_filters = [attr for clss in decorator_filters \ + for attr in clss.attrs] + dup_attr = [attr for attr in attr_filters + if attr_filters.count(attr) > 1] + if dup_attr: + raise TypeError('Detected duplicated attribute(s) %s in filter' + ' decorators' % ' ,'.join(dup_attr)) + + # Validate if filter is supported + for f in filters: + if f not in attr_filters: + classes = ', '.join([d.__name__ for d in decorator_filters]) + raise TypeError('Found "%s" filter but not declared in any of ' + '%s decorators' % (f, classes)) + + def _registerTestCase(self, case): + case_id = case.id() + self.tc._registry['cases'][case_id] = case + + def _handleTestCaseDecorators(self, case): + def _handle(obj): + if isinstance(obj, OETestDecorator): + if not obj.__class__ in decoratorClasses: + raise Exception("Decorator %s isn't registered" \ + " in decoratorClasses." % obj.__name__) + obj.bind(self.tc._registry, case) + + def _walk_closure(obj): + if hasattr(obj, '__closure__') and obj.__closure__: + for f in obj.__closure__: + obj = f.cell_contents + _handle(obj) + _walk_closure(obj) + method = getattr(case, case._testMethodName, None) + _walk_closure(method) + + def _filterTest(self, case): + """ + Returns True if test case must be filtered, False otherwise. + """ + if self.filters: + filters = self.filters.copy() + case_decorators = [cd for cd in case.decorators + if cd.__class__ in self.used_filters] + + # Iterate over case decorators to check if needs to be filtered. + for cd in case_decorators: + if cd.filtrate(filters): + return True + + # Case is missing one or more decorators for all the filters + # being used, so filter test case. + if filters: + return True + + return False + + def _getTestCase(self, testCaseClass, tcName): + if not hasattr(testCaseClass, '__oeqa_loader'): + # In order to support data_vars validation + # monkey patch the default setUp/tearDown{Class} to use + # the ones provided by OETestCase + setattr(testCaseClass, 'setUpClassMethod', + getattr(testCaseClass, 'setUpClass')) + setattr(testCaseClass, 'tearDownClassMethod', + getattr(testCaseClass, 'tearDownClass')) + setattr(testCaseClass, 'setUpClass', + testCaseClass._oeSetUpClass) + setattr(testCaseClass, 'tearDownClass', + testCaseClass._oeTearDownClass) + + # In order to support decorators initialization + # monkey patch the default setUp/tearDown to use + # a setUpDecorators/tearDownDecorators that methods + # will call setUp/tearDown original methods. + setattr(testCaseClass, 'setUpMethod', + getattr(testCaseClass, 'setUp')) + setattr(testCaseClass, 'tearDownMethod', + getattr(testCaseClass, 'tearDown')) + setattr(testCaseClass, 'setUp', testCaseClass._oeSetUp) + setattr(testCaseClass, 'tearDown', testCaseClass._oeTearDown) + + setattr(testCaseClass, '__oeqa_loader', True) + + case = testCaseClass(tcName) + setattr(case, 'decorators', []) + + return case + + def loadTestsFromTestCase(self, testCaseClass): + """ + Returns a suite of all tests cases contained in testCaseClass. + """ + if issubclass(testCaseClass, unittest.suite.TestSuite): + raise TypeError("Test cases should not be derived from TestSuite." \ + " Maybe you meant to derive %s from TestCase?" \ + % testCaseClass.__name__) + if not issubclass(testCaseClass, self.caseClass): + raise TypeError("Test %s is not derived from %s" % \ + (testCaseClass.__name__, self.caseClass.__name__)) + + testCaseNames = self.getTestCaseNames(testCaseClass) + if not testCaseNames and hasattr(testCaseClass, 'runTest'): + testCaseNames = ['runTest'] + + suite = [] + for tcName in testCaseNames: + case = self._getTestCase(testCaseClass, tcName) + # Filer by case id + if not (self.tests and not 'all' in self.tests + and not getCaseID(case) in self.tests): + self._handleTestCaseDecorators(case) + + # Filter by decorators + if not self._filterTest(case): + self._registerTestCase(case) + suite.append(case) + + return self.suiteClass(suite) + + def discover(self): + big_suite = self.suiteClass() + for path in self.module_paths: + _find_duplicated_modules(big_suite, path) + suite = super(OETestLoader, self).discover(path, + pattern='*.py', top_level_dir=path) + big_suite.addTests(suite) + + cases = None + discover_classes = [clss for clss in decoratorClasses + if issubclass(clss, OETestDiscover)] + for clss in discover_classes: + cases = clss.discover(self.tc._registry) + + return self.suiteClass(cases) if cases else big_suite + + # XXX After Python 3.5, remove backward compatibility hacks for + # use_load_tests deprecation via *args and **kws. See issue 16662. + if sys.version_info >= (3,5): + def loadTestsFromModule(self, module, *args, pattern=None, **kws): + """ + Returns a suite of all tests cases contained in module. + """ + if module.__name__ in sys.builtin_module_names: + msg = 'Tried to import %s test module but is a built-in' + raise ImportError(msg % module.__name__) + + # Normal test modules are loaded if no modules were specified, + # if module is in the specified module list or if 'all' is in + # module list. + # Underscore modules are loaded only if specified in module list. + load_module = True if not module.__name__.startswith('_') \ + and (not self.modules \ + or module.__name__ in self.modules \ + or 'all' in self.modules) \ + else False + + load_underscore = True if module.__name__.startswith('_') \ + and module.__name__ in self.modules \ + else False + + if load_module or load_underscore: + return super(OETestLoader, self).loadTestsFromModule( + module, *args, pattern=pattern, **kws) + else: + return self.suiteClass() + else: + def loadTestsFromModule(self, module, use_load_tests=True): + """ + Returns a suite of all tests cases contained in module. + """ + if module.__name__ in sys.builtin_module_names: + msg = 'Tried to import %s test module but is a built-in' + raise ImportError(msg % module.__name__) + + # Normal test modules are loaded if no modules were specified, + # if module is in the specified module list or if 'all' is in + # module list. + # Underscore modules are loaded only if specified in module list. + load_module = True if not module.__name__.startswith('_') \ + and (not self.modules \ + or module.__name__ in self.modules \ + or 'all' in self.modules) \ + else False + + load_underscore = True if module.__name__.startswith('_') \ + and module.__name__ in self.modules \ + else False + + if load_module or load_underscore: + return super(OETestLoader, self).loadTestsFromModule( + module, use_load_tests) + else: + return self.suiteClass() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/runner.py b/import-layers/yocto-poky/meta/lib/oeqa/core/runner.py new file mode 100644 index 000000000..44ffecb0c --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/runner.py @@ -0,0 +1,76 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import time +import unittest +import logging + +xmlEnabled = False +try: + import xmlrunner + from xmlrunner.result import _XMLTestResult as _TestResult + from xmlrunner.runner import XMLTestRunner as _TestRunner + xmlEnabled = True +except ImportError: + # use the base runner instead + from unittest import TextTestResult as _TestResult + from unittest import TextTestRunner as _TestRunner + +class OEStreamLogger(object): + def __init__(self, logger): + self.logger = logger + self.buffer = "" + + def write(self, msg): + if len(msg) > 1 and msg[0] != '\n': + self.buffer += msg + else: + self.logger.log(logging.INFO, self.buffer.rstrip("\n")) + self.buffer = "" + + def flush(self): + for handler in self.logger.handlers: + handler.flush() + +class OETestResult(_TestResult): + def __init__(self, tc, *args, **kwargs): + super(OETestResult, self).__init__(*args, **kwargs) + + self.tc = tc + + self.tc._results['failures'] = self.failures + self.tc._results['errors'] = self.errors + self.tc._results['skipped'] = self.skipped + self.tc._results['expectedFailures'] = self.expectedFailures + + def startTest(self, test): + super(OETestResult, self).startTest(test) + +class OETestRunner(_TestRunner): + def __init__(self, tc, *args, **kwargs): + if xmlEnabled: + if not kwargs.get('output'): + kwargs['output'] = os.path.join(os.getcwd(), + 'TestResults_%s_%s' % (time.strftime("%Y%m%d%H%M%S"), os.getpid())) + + super(OETestRunner, self).__init__(*args, **kwargs) + self.tc = tc + self.resultclass = OETestResult + + # XXX: The unittest-xml-reporting package defines _make_result method instead + # of _makeResult standard on unittest. + if xmlEnabled: + def _make_result(self): + """ + Creates a TestResult object which will be used to store + information about the executed tests. + """ + # override in subclasses if necessary. + return self.resultclass(self.tc, + self.stream, self.descriptions, self.verbosity, self.elapsed_times + ) + else: + def _makeResult(self): + return self.resultclass(self.tc, self.stream, self.descriptions, + self.verbosity) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/target/__init__.py b/import-layers/yocto-poky/meta/lib/oeqa/core/target/__init__.py new file mode 100644 index 000000000..d2468bc25 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/target/__init__.py @@ -0,0 +1,33 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from abc import abstractmethod + +class OETarget(object): + + def __init__(self, logger, *args, **kwargs): + self.logger = logger + + @abstractmethod + def start(self): + pass + + @abstractmethod + def stop(self): + pass + + @abstractmethod + def run(self, cmd, timeout=None): + pass + + @abstractmethod + def copyTo(self, localSrc, remoteDst): + pass + + @abstractmethod + def copyFrom(self, remoteSrc, localDst): + pass + + @abstractmethod + def copyDirTo(self, localSrc, remoteDst): + pass diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/target/qemu.py b/import-layers/yocto-poky/meta/lib/oeqa/core/target/qemu.py new file mode 100644 index 000000000..2dc521c21 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/target/qemu.py @@ -0,0 +1,45 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import sys +import signal +import time + +from .ssh import OESSHTarget +from oeqa.utils.qemurunner import QemuRunner + +supported_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic', 'elf'] + +class OEQemuTarget(OESSHTarget): + def __init__(self, logger, ip, server_ip, timeout=300, user='root', + port=None, machine='', rootfs='', kernel='', kvm=False, + dump_dir='', dump_host_cmds='', display='', bootlog='', + tmpdir='', dir_image='', boottime=60, **kwargs): + + super(OEQemuTarget, self).__init__(logger, ip, server_ip, timeout, + user, port) + + self.ip = ip + self.server_ip = server_ip + self.machine = machine + self.rootfs = rootfs + self.kernel = kernel + self.kvm = kvm + + self.runner = QemuRunner(machine=machine, rootfs=rootfs, tmpdir=tmpdir, + deploy_dir_image=dir_image, display=display, + logfile=bootlog, boottime=boottime, + use_kvm=kvm, dump_dir=dump_dir, + dump_host_cmds=dump_host_cmds) + + def start(self, params=None, extra_bootparams=None): + if self.runner.start(params, extra_bootparams=extra_bootparams): + self.ip = self.runner.ip + self.server_ip = self.runner.server_ip + else: + self.stop() + raise RuntimeError("FAILED to start qemu - check the task log and the boot log") + + def stop(self): + self.runner.stop() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/target/ssh.py b/import-layers/yocto-poky/meta/lib/oeqa/core/target/ssh.py new file mode 100644 index 000000000..b80939c0e --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/target/ssh.py @@ -0,0 +1,266 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import time +import select +import logging +import subprocess + +from . import OETarget + +class OESSHTarget(OETarget): + def __init__(self, logger, ip, server_ip, timeout=300, user='root', + port=None, **kwargs): + if not logger: + logger = logging.getLogger('target') + logger.setLevel(logging.INFO) + filePath = os.path.join(os.getcwd(), 'remoteTarget.log') + fileHandler = logging.FileHandler(filePath, 'w', 'utf-8') + formatter = logging.Formatter( + '%(asctime)s.%(msecs)03d %(levelname)s: %(message)s', + '%H:%M:%S') + fileHandler.setFormatter(formatter) + logger.addHandler(fileHandler) + + super(OESSHTarget, self).__init__(logger) + self.ip = ip + self.server_ip = server_ip + self.timeout = timeout + self.user = user + ssh_options = [ + '-o', 'UserKnownHostsFile=/dev/null', + '-o', 'StrictHostKeyChecking=no', + '-o', 'LogLevel=ERROR' + ] + self.ssh = ['ssh', '-l', self.user ] + ssh_options + self.scp = ['scp'] + ssh_options + if port: + self.ssh = self.ssh + [ '-p', port ] + self.scp = self.scp + [ '-P', port ] + + def start(self, **kwargs): + pass + + def stop(self, **kwargs): + pass + + def _run(self, command, timeout=None, ignore_status=True): + """ + Runs command in target using SSHProcess. + """ + self.logger.debug("[Running]$ %s" % " ".join(command)) + + starttime = time.time() + status, output = SSHCall(command, self.logger, timeout) + self.logger.debug("[Command returned '%d' after %.2f seconds]" + "" % (status, time.time() - starttime)) + + if status and not ignore_status: + raise AssertionError("Command '%s' returned non-zero exit " + "status %d:\n%s" % (command, status, output)) + + return (status, output) + + def run(self, command, timeout=None): + """ + Runs command in target. + + command: Command to run on target. + timeout: : Kill command after seconds. + None: Kill command default value seconds. + 0: No timeout, runs until return. + """ + targetCmd = 'export PATH=/usr/sbin:/sbin:/usr/bin:/bin; %s' % command + sshCmd = self.ssh + [self.ip, targetCmd] + + if timeout: + processTimeout = timeout + elif timeout==0: + processTimeout = None + else: + processTimeout = self.timeout + + status, output = self._run(sshCmd, processTimeout, True) + self.logger.info('\nCommand: %s\nOutput: %s\n' % (command, output)) + return (status, output) + + def copyTo(self, localSrc, remoteDst): + """ + Copy file to target. + + If local file is symlink, recreate symlink in target. + """ + if os.path.islink(localSrc): + link = os.readlink(localSrc) + dstDir, dstBase = os.path.split(remoteDst) + sshCmd = 'cd %s; ln -s %s %s' % (dstDir, link, dstBase) + return self.run(sshCmd) + + else: + remotePath = '%s@%s:%s' % (self.user, self.ip, remoteDst) + scpCmd = self.scp + [localSrc, remotePath] + return self._run(scpCmd, ignore_status=False) + + def copyFrom(self, remoteSrc, localDst): + """ + Copy file from target. + """ + remotePath = '%s@%s:%s' % (self.user, self.ip, remoteSrc) + scpCmd = self.scp + [remotePath, localDst] + return self._run(scpCmd, ignore_status=False) + + def copyDirTo(self, localSrc, remoteDst): + """ + Copy recursively localSrc directory to remoteDst in target. + """ + + for root, dirs, files in os.walk(localSrc): + # Create directories in the target as needed + for d in dirs: + tmpDir = os.path.join(root, d).replace(localSrc, "") + newDir = os.path.join(remoteDst, tmpDir.lstrip("/")) + cmd = "mkdir -p %s" % newDir + self.run(cmd) + + # Copy files into the target + for f in files: + tmpFile = os.path.join(root, f).replace(localSrc, "") + dstFile = os.path.join(remoteDst, tmpFile.lstrip("/")) + srcFile = os.path.join(root, f) + self.copyTo(srcFile, dstFile) + + def deleteFiles(self, remotePath, files): + """ + Deletes files in target's remotePath. + """ + + cmd = "rm" + if not isinstance(files, list): + files = [files] + + for f in files: + cmd = "%s %s" % (cmd, os.path.join(remotePath, f)) + + self.run(cmd) + + + def deleteDir(self, remotePath): + """ + Deletes target's remotePath directory. + """ + + cmd = "rmdir %s" % remotePath + self.run(cmd) + + + def deleteDirStructure(self, localPath, remotePath): + """ + Delete recursively localPath structure directory in target's remotePath. + + This function is very usefult to delete a package that is installed in + the DUT and the host running the test has such package extracted in tmp + directory. + + Example: + pwd: /home/user/tmp + tree: . + └── work + ├── dir1 + │   └── file1 + └── dir2 + + localpath = "/home/user/tmp" and remotepath = "/home/user" + + With the above variables this function will try to delete the + directory in the DUT in this order: + /home/user/work/dir1/file1 + /home/user/work/dir1 (if dir is empty) + /home/user/work/dir2 (if dir is empty) + /home/user/work (if dir is empty) + """ + + for root, dirs, files in os.walk(localPath, topdown=False): + # Delete files first + tmpDir = os.path.join(root).replace(localPath, "") + remoteDir = os.path.join(remotePath, tmpDir.lstrip("/")) + self.deleteFiles(remoteDir, files) + + # Remove dirs if empty + for d in dirs: + tmpDir = os.path.join(root, d).replace(localPath, "") + remoteDir = os.path.join(remotePath, tmpDir.lstrip("/")) + self.deleteDir(remoteDir) + +def SSHCall(command, logger, timeout=None, **opts): + + def run(): + nonlocal output + nonlocal process + starttime = time.time() + process = subprocess.Popen(command, **options) + if timeout: + endtime = starttime + timeout + eof = False + while time.time() < endtime and not eof: + logger.debug('time: %s, endtime: %s' % (time.time(), endtime)) + try: + if select.select([process.stdout], [], [], 5)[0] != []: + data = os.read(process.stdout.fileno(), 1024) + if not data: + process.stdout.close() + eof = True + else: + data = data.decode("utf-8") + output += data + logger.debug('Partial data from SSH call: %s' % data) + endtime = time.time() + timeout + except InterruptedError: + continue + + # process hasn't returned yet + if not eof: + process.terminate() + time.sleep(5) + try: + process.kill() + except OSError: + pass + endtime = time.time() - starttime + lastline = ("\nProcess killed - no output for %d seconds. Total" + " running time: %d seconds." % (timeout, endtime)) + logger.debug('Received data from SSH call %s ' % lastline) + output += lastline + + else: + output = process.communicate()[0].decode("utf-8") + logger.debug('Data from SSH call: %s' % output.rstrip()) + + options = { + "stdout": subprocess.PIPE, + "stderr": subprocess.STDOUT, + "stdin": None, + "shell": False, + "bufsize": -1, + "preexec_fn": os.setsid, + } + options.update(opts) + output = '' + process = None + + # Unset DISPLAY which means we won't trigger SSH_ASKPASS + env = os.environ.copy() + if "DISPLAY" in env: + del env['DISPLAY'] + options['env'] = env + + try: + run() + except: + # Need to guard against a SystemExit or other exception ocurring + # whilst running and ensure we don't leave a process behind. + if process.poll() is None: + process.kill() + logger.debug('Something went wrong, killing SSH process') + raise + return (process.wait(), output.rstrip()) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/tests/__init__.py b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/data.py b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/data.py new file mode 100644 index 000000000..88003a6ad --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/data.py @@ -0,0 +1,20 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from oeqa.core.case import OETestCase +from oeqa.core.decorator.oetag import OETestTag +from oeqa.core.decorator.data import OETestDataDepends + +class DataTest(OETestCase): + data_vars = ['IMAGE', 'ARCH'] + + @OETestDataDepends(['MACHINE',]) + @OETestTag('dataTestOk') + def testDataOk(self): + self.assertEqual(self.td.get('IMAGE'), 'core-image-minimal') + self.assertEqual(self.td.get('ARCH'), 'x86') + self.assertEqual(self.td.get('MACHINE'), 'qemuarm') + + @OETestTag('dataTestFail') + def testDataFail(self): + pass diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/depends.py b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/depends.py new file mode 100644 index 000000000..17cdd90b1 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/depends.py @@ -0,0 +1,38 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from oeqa.core.case import OETestCase +from oeqa.core.decorator.depends import OETestDepends + +class DependsTest(OETestCase): + + def testDependsFirst(self): + self.assertTrue(True, msg='How is this possible?') + + @OETestDepends(['testDependsFirst']) + def testDependsSecond(self): + self.assertTrue(True, msg='How is this possible?') + + @OETestDepends(['testDependsSecond']) + def testDependsThird(self): + self.assertTrue(True, msg='How is this possible?') + + @OETestDepends(['testDependsSecond']) + def testDependsFourth(self): + self.assertTrue(True, msg='How is this possible?') + + @OETestDepends(['testDependsThird', 'testDependsFourth']) + def testDependsFifth(self): + self.assertTrue(True, msg='How is this possible?') + + @OETestDepends(['testDependsCircular3']) + def testDependsCircular1(self): + self.assertTrue(True, msg='How is this possible?') + + @OETestDepends(['testDependsCircular1']) + def testDependsCircular2(self): + self.assertTrue(True, msg='How is this possible?') + + @OETestDepends(['testDependsCircular2']) + def testDependsCircular3(self): + self.assertTrue(True, msg='How is this possible?') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/loader/invalid/oeid.py b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/loader/invalid/oeid.py new file mode 100644 index 000000000..038d44593 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/loader/invalid/oeid.py @@ -0,0 +1,15 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from oeqa.core.case import OETestCase + +class AnotherIDTest(OETestCase): + + def testAnotherIdGood(self): + self.assertTrue(True, msg='How is this possible?') + + def testAnotherIdOther(self): + self.assertTrue(True, msg='How is this possible?') + + def testAnotherIdNone(self): + self.assertTrue(True, msg='How is this possible?') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/loader/valid/another.py b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/loader/valid/another.py new file mode 100644 index 000000000..c9ffd1777 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/loader/valid/another.py @@ -0,0 +1,9 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from oeqa.core.case import OETestCase + +class AnotherTest(OETestCase): + + def testAnother(self): + self.assertTrue(True, msg='How is this possible?') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/oeid.py b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/oeid.py new file mode 100644 index 000000000..c2d3d32f2 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/oeid.py @@ -0,0 +1,18 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from oeqa.core.case import OETestCase +from oeqa.core.decorator.oeid import OETestID + +class IDTest(OETestCase): + + @OETestID(101) + def testIdGood(self): + self.assertTrue(True, msg='How is this possible?') + + @OETestID(102) + def testIdOther(self): + self.assertTrue(True, msg='How is this possible?') + + def testIdNone(self): + self.assertTrue(True, msg='How is this possible?') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/oetag.py b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/oetag.py new file mode 100644 index 000000000..0cae02e75 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/oetag.py @@ -0,0 +1,18 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from oeqa.core.case import OETestCase +from oeqa.core.decorator.oetag import OETestTag + +class TagTest(OETestCase): + + @OETestTag('goodTag') + def testTagGood(self): + self.assertTrue(True, msg='How is this possible?') + + @OETestTag('otherTag') + def testTagOther(self): + self.assertTrue(True, msg='How is this possible?') + + def testTagNone(self): + self.assertTrue(True, msg='How is this possible?') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/timeout.py b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/timeout.py new file mode 100644 index 000000000..870c3157f --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/cases/timeout.py @@ -0,0 +1,18 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from time import sleep + +from oeqa.core.case import OETestCase +from oeqa.core.decorator.oetimeout import OETimeout + +class TimeoutTest(OETestCase): + + @OETimeout(1) + def testTimeoutPass(self): + self.assertTrue(True, msg='How is this possible?') + + @OETimeout(1) + def testTimeoutFail(self): + sleep(2) + self.assertTrue(True, msg='How is this possible?') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/tests/common.py b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/common.py new file mode 100644 index 000000000..52b18a1c3 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/common.py @@ -0,0 +1,35 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import sys +import os + +import unittest +import logging +import os + +logger = logging.getLogger("oeqa") +logger.setLevel(logging.INFO) +consoleHandler = logging.StreamHandler() +formatter = logging.Formatter('OEQATest: %(message)s') +consoleHandler.setFormatter(formatter) +logger.addHandler(consoleHandler) + +def setup_sys_path(): + directory = os.path.dirname(os.path.abspath(__file__)) + oeqa_lib = os.path.realpath(os.path.join(directory, '../../../')) + if not oeqa_lib in sys.path: + sys.path.insert(0, oeqa_lib) + +class TestBase(unittest.TestCase): + def setUp(self): + self.logger = logger + directory = os.path.dirname(os.path.abspath(__file__)) + self.cases_path = os.path.join(directory, 'cases') + + def _testLoader(self, d={}, modules=[], tests=[], filters={}): + from oeqa.core.context import OETestContext + tc = OETestContext(d, self.logger) + tc.loadTests(self.cases_path, modules=modules, tests=tests, + filters=filters) + return tc diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_data.py b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_data.py new file mode 100755 index 000000000..320468cbe --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_data.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import unittest +import logging +import os + +from common import setup_sys_path, TestBase +setup_sys_path() + +from oeqa.core.exception import OEQAMissingVariable +from oeqa.core.utils.test import getCaseMethod, getSuiteCasesNames + +class TestData(TestBase): + modules = ['data'] + + def test_data_fail_missing_variable(self): + expectedException = "oeqa.core.exception.OEQAMissingVariable" + + tc = self._testLoader(modules=self.modules) + self.assertEqual(False, tc.runTests().wasSuccessful()) + for test, data in tc._results['errors']: + expect = False + if expectedException in data: + expect = True + + self.assertTrue(expect) + + def test_data_fail_wrong_variable(self): + expectedError = 'AssertionError' + d = {'IMAGE' : 'core-image-sato', 'ARCH' : 'arm'} + + tc = self._testLoader(d=d, modules=self.modules) + self.assertEqual(False, tc.runTests().wasSuccessful()) + for test, data in tc._results['failures']: + expect = False + if expectedError in data: + expect = True + + self.assertTrue(expect) + + def test_data_ok(self): + d = {'IMAGE' : 'core-image-minimal', 'ARCH' : 'x86', 'MACHINE' : 'qemuarm'} + + tc = self._testLoader(d=d, modules=self.modules) + self.assertEqual(True, tc.runTests().wasSuccessful()) + +if __name__ == '__main__': + unittest.main() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_decorators.py b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_decorators.py new file mode 100755 index 000000000..f7d11e885 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_decorators.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import signal +import unittest + +from common import setup_sys_path, TestBase +setup_sys_path() + +from oeqa.core.exception import OEQADependency +from oeqa.core.utils.test import getCaseMethod, getSuiteCasesNames, getSuiteCasesIDs + +class TestFilterDecorator(TestBase): + + def _runFilterTest(self, modules, filters, expect, msg): + tc = self._testLoader(modules=modules, filters=filters) + test_loaded = set(getSuiteCasesNames(tc.suites)) + self.assertEqual(expect, test_loaded, msg=msg) + + def test_oetag(self): + # Get all cases without filtering. + filter_all = {} + test_all = {'testTagGood', 'testTagOther', 'testTagNone'} + msg_all = 'Failed to get all oetag cases without filtering.' + + # Get cases with 'goodTag'. + filter_good = {'oetag':'goodTag'} + test_good = {'testTagGood'} + msg_good = 'Failed to get just one test filtering with "goodTag" oetag.' + + # Get cases with an invalid tag. + filter_invalid = {'oetag':'invalidTag'} + test_invalid = set() + msg_invalid = 'Failed to filter all test using an invalid oetag.' + + tests = ((filter_all, test_all, msg_all), + (filter_good, test_good, msg_good), + (filter_invalid, test_invalid, msg_invalid)) + + for test in tests: + self._runFilterTest(['oetag'], test[0], test[1], test[2]) + + def test_oeid(self): + # Get all cases without filtering. + filter_all = {} + test_all = {'testIdGood', 'testIdOther', 'testIdNone'} + msg_all = 'Failed to get all oeid cases without filtering.' + + # Get cases with '101' oeid. + filter_good = {'oeid': 101} + test_good = {'testIdGood'} + msg_good = 'Failed to get just one tes filtering with "101" oeid.' + + # Get cases with an invalid id. + filter_invalid = {'oeid':999} + test_invalid = set() + msg_invalid = 'Failed to filter all test using an invalid oeid.' + + tests = ((filter_all, test_all, msg_all), + (filter_good, test_good, msg_good), + (filter_invalid, test_invalid, msg_invalid)) + + for test in tests: + self._runFilterTest(['oeid'], test[0], test[1], test[2]) + +class TestDependsDecorator(TestBase): + modules = ['depends'] + + def test_depends_order(self): + tests = ['depends.DependsTest.testDependsFirst', + 'depends.DependsTest.testDependsSecond', + 'depends.DependsTest.testDependsThird', + 'depends.DependsTest.testDependsFourth', + 'depends.DependsTest.testDependsFifth'] + tests2 = list(tests) + tests2[2], tests2[3] = tests[3], tests[2] + tc = self._testLoader(modules=self.modules, tests=tests) + test_loaded = getSuiteCasesIDs(tc.suites) + result = True if test_loaded == tests or test_loaded == tests2 else False + msg = 'Failed to order tests using OETestDepends decorator.\nTest order:'\ + ' %s.\nExpected: %s\nOr: %s' % (test_loaded, tests, tests2) + self.assertTrue(result, msg=msg) + + def test_depends_fail_missing_dependency(self): + expect = "TestCase depends.DependsTest.testDependsSecond depends on "\ + "depends.DependsTest.testDependsFirst and isn't available" + tests = ['depends.DependsTest.testDependsSecond'] + try: + # Must throw OEQADependency because missing 'testDependsFirst' + tc = self._testLoader(modules=self.modules, tests=tests) + self.fail('Expected OEQADependency exception') + except OEQADependency as e: + result = True if expect in str(e) else False + msg = 'Expected OEQADependency exception missing testDependsFirst test' + self.assertTrue(result, msg=msg) + + def test_depends_fail_circular_dependency(self): + expect = 'have a circular dependency' + tests = ['depends.DependsTest.testDependsCircular1', + 'depends.DependsTest.testDependsCircular2', + 'depends.DependsTest.testDependsCircular3'] + try: + # Must throw OEQADependency because circular dependency + tc = self._testLoader(modules=self.modules, tests=tests) + self.fail('Expected OEQADependency exception') + except OEQADependency as e: + result = True if expect in str(e) else False + msg = 'Expected OEQADependency exception having a circular dependency' + self.assertTrue(result, msg=msg) + +class TestTimeoutDecorator(TestBase): + modules = ['timeout'] + + def test_timeout(self): + tests = ['timeout.TimeoutTest.testTimeoutPass'] + msg = 'Failed to run test using OETestTimeout' + alarm_signal = signal.getsignal(signal.SIGALRM) + tc = self._testLoader(modules=self.modules, tests=tests) + self.assertTrue(tc.runTests().wasSuccessful(), msg=msg) + msg = "OETestTimeout didn't restore SIGALRM" + self.assertIs(alarm_signal, signal.getsignal(signal.SIGALRM), msg=msg) + + def test_timeout_fail(self): + tests = ['timeout.TimeoutTest.testTimeoutFail'] + msg = "OETestTimeout test didn't timeout as expected" + alarm_signal = signal.getsignal(signal.SIGALRM) + tc = self._testLoader(modules=self.modules, tests=tests) + self.assertFalse(tc.runTests().wasSuccessful(), msg=msg) + msg = "OETestTimeout didn't restore SIGALRM" + self.assertIs(alarm_signal, signal.getsignal(signal.SIGALRM), msg=msg) + +if __name__ == '__main__': + unittest.main() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_loader.py b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_loader.py new file mode 100755 index 000000000..b79b8bad4 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_loader.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import unittest + +from common import setup_sys_path, TestBase +setup_sys_path() + +from oeqa.core.exception import OEQADependency +from oeqa.core.utils.test import getSuiteModules, getSuiteCasesIDs + +class TestLoader(TestBase): + + def test_fail_empty_filter(self): + filters = {'oetag' : ''} + expect = 'Filter oetag specified is empty' + msg = 'Expected TypeError exception for having invalid filter' + try: + # Must throw TypeError because empty filter + tc = self._testLoader(filters=filters) + self.fail(msg) + except TypeError as e: + result = True if expect in str(e) else False + self.assertTrue(result, msg=msg) + + def test_fail_invalid_filter(self): + filters = {'invalid' : 'good'} + expect = 'filter but not declared in any of' + msg = 'Expected TypeError exception for having invalid filter' + try: + # Must throw TypeError because invalid filter + tc = self._testLoader(filters=filters) + self.fail(msg) + except TypeError as e: + result = True if expect in str(e) else False + self.assertTrue(result, msg=msg) + + def test_fail_duplicated_module(self): + cases_path = self.cases_path + invalid_path = os.path.join(cases_path, 'loader', 'invalid') + self.cases_path = [self.cases_path, invalid_path] + expect = 'Duplicated oeid module found in' + msg = 'Expected ImportError exception for having duplicated module' + try: + # Must throw ImportEror because duplicated module + tc = self._testLoader() + self.fail(msg) + except ImportError as e: + result = True if expect in str(e) else False + self.assertTrue(result, msg=msg) + finally: + self.cases_path = cases_path + + def test_filter_modules(self): + expected_modules = {'oeid', 'oetag'} + tc = self._testLoader(modules=expected_modules) + modules = getSuiteModules(tc.suites) + msg = 'Expected just %s modules' % ', '.join(expected_modules) + self.assertEqual(modules, expected_modules, msg=msg) + + def test_filter_cases(self): + modules = ['oeid', 'oetag', 'data'] + expected_cases = {'data.DataTest.testDataOk', + 'oetag.TagTest.testTagGood', + 'oeid.IDTest.testIdGood'} + tc = self._testLoader(modules=modules, tests=expected_cases) + cases = set(getSuiteCasesIDs(tc.suites)) + msg = 'Expected just %s cases' % ', '.join(expected_cases) + self.assertEqual(cases, expected_cases, msg=msg) + + def test_import_from_paths(self): + cases_path = self.cases_path + cases2_path = os.path.join(cases_path, 'loader', 'valid') + expected_modules = {'oeid', 'another'} + self.cases_path = [self.cases_path, cases2_path] + tc = self._testLoader(modules=expected_modules) + modules = getSuiteModules(tc.suites) + self.cases_path = cases_path + msg = 'Expected modules from two different paths' + self.assertEqual(modules, expected_modules, msg=msg) + +if __name__ == '__main__': + unittest.main() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_runner.py b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_runner.py new file mode 100755 index 000000000..a3f3861fe --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/tests/test_runner.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python3 + +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import unittest +import logging +import tempfile + +from common import setup_sys_path, TestBase +setup_sys_path() + +from oeqa.core.runner import OEStreamLogger + +class TestRunner(TestBase): + def test_stream_logger(self): + fp = tempfile.TemporaryFile(mode='w+') + + logging.basicConfig(format='%(message)s', stream=fp) + logger = logging.getLogger() + logger.setLevel(logging.INFO) + + oeSL = OEStreamLogger(logger) + + lines = ['init', 'bigline_' * 65535, 'morebigline_' * 65535 * 4, 'end'] + for line in lines: + oeSL.write(line) + + fp.seek(0) + fp_lines = fp.readlines() + for i, fp_line in enumerate(fp_lines): + fp_line = fp_line.strip() + self.assertEqual(lines[i], fp_line) + + fp.close() + +if __name__ == '__main__': + unittest.main() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/utils/__init__.py b/import-layers/yocto-poky/meta/lib/oeqa/core/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/utils/misc.py b/import-layers/yocto-poky/meta/lib/oeqa/core/utils/misc.py new file mode 100644 index 000000000..0b223b5d0 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/utils/misc.py @@ -0,0 +1,44 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +def toList(obj, obj_type, obj_name="Object"): + if isinstance(obj, obj_type): + return [obj] + elif isinstance(obj, list): + return obj + else: + raise TypeError("%s must be %s or list" % (obj_name, obj_type)) + +def toSet(obj, obj_type, obj_name="Object"): + if isinstance(obj, obj_type): + return {obj} + elif isinstance(obj, list): + return set(obj) + elif isinstance(obj, set): + return obj + else: + raise TypeError("%s must be %s or set" % (obj_name, obj_type)) + +def strToList(obj, obj_name="Object"): + return toList(obj, str, obj_name) + +def strToSet(obj, obj_name="Object"): + return toSet(obj, str, obj_name) + +def intToList(obj, obj_name="Object"): + return toList(obj, int, obj_name) + +def dataStoteToDict(d, variables): + data = {} + + for v in variables: + data[v] = d.getVar(v) + + return data + +def updateTestData(d, td, variables): + """ + Updates variables with values of data store to test data. + """ + for var in variables: + td[var] = d.getVar(var) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/utils/path.py b/import-layers/yocto-poky/meta/lib/oeqa/core/utils/path.py new file mode 100644 index 000000000..a21caad5c --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/utils/path.py @@ -0,0 +1,19 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import sys + +def findFile(file_name, directory): + """ + Search for a file in directory and returns its complete path. + """ + for r, d, f in os.walk(directory): + if file_name in f: + return os.path.join(r, file_name) + return None + +def remove_safe(path): + if os.path.exists(path): + os.remove(path) + diff --git a/import-layers/yocto-poky/meta/lib/oeqa/core/utils/test.py b/import-layers/yocto-poky/meta/lib/oeqa/core/utils/test.py new file mode 100644 index 000000000..88d5d1398 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/core/utils/test.py @@ -0,0 +1,86 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import inspect +import unittest + +def getSuiteCases(suite): + """ + Returns individual test from a test suite. + """ + tests = [] + + if isinstance(suite, unittest.TestCase): + tests.append(suite) + elif isinstance(suite, unittest.suite.TestSuite): + for item in suite: + tests.extend(getSuiteCases(item)) + + return tests + +def getSuiteModules(suite): + """ + Returns modules in a test suite. + """ + modules = set() + for test in getSuiteCases(suite): + modules.add(getCaseModule(test)) + return modules + +def getSuiteCasesInfo(suite, func): + """ + Returns test case info from suite. Info is fetched from func. + """ + tests = [] + for test in getSuiteCases(suite): + tests.append(func(test)) + return tests + +def getSuiteCasesNames(suite): + """ + Returns test case names from suite. + """ + return getSuiteCasesInfo(suite, getCaseMethod) + +def getSuiteCasesIDs(suite): + """ + Returns test case ids from suite. + """ + return getSuiteCasesInfo(suite, getCaseID) + +def getSuiteCasesFiles(suite): + """ + Returns test case files paths from suite. + """ + return getSuiteCasesInfo(suite, getCaseFile) + +def getCaseModule(test_case): + """ + Returns test case module name. + """ + return test_case.__module__ + +def getCaseClass(test_case): + """ + Returns test case class name. + """ + return test_case.__class__.__name__ + +def getCaseID(test_case): + """ + Returns test case complete id. + """ + return test_case.id() + +def getCaseFile(test_case): + """ + Returns test case file path. + """ + return inspect.getsourcefile(test_case.__class__) + +def getCaseMethod(test_case): + """ + Returns test case method name. + """ + return getCaseID(test_case).split('.')[-1] diff --git a/import-layers/yocto-poky/meta/lib/oeqa/files/test.c b/import-layers/yocto-poky/meta/lib/oeqa/files/test.c new file mode 100644 index 000000000..2d8389c92 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/files/test.c @@ -0,0 +1,26 @@ +#include +#include +#include + +double convert(long long l) +{ + return (double)l; +} + +int main(int argc, char * argv[]) { + + long long l = 10; + double f; + double check = 10.0; + + f = convert(l); + printf("convert: %lld => %f\n", l, f); + if ( f != check ) exit(1); + + f = 1234.67; + check = 1234.0; + printf("floorf(%f) = %f\n", f, floorf(f)); + if ( floorf(f) != check) exit(1); + + return 0; +} diff --git a/import-layers/yocto-poky/meta/lib/oeqa/files/test.cpp b/import-layers/yocto-poky/meta/lib/oeqa/files/test.cpp new file mode 100644 index 000000000..9e1a76473 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/files/test.cpp @@ -0,0 +1,3 @@ +#include + +int main() {} \ No newline at end of file diff --git a/import-layers/yocto-poky/meta/lib/oeqa/files/test.pl b/import-layers/yocto-poky/meta/lib/oeqa/files/test.pl new file mode 100644 index 000000000..689c8f163 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/files/test.pl @@ -0,0 +1,2 @@ +$a = 9.01e+21 - 9.01e+21 + 0.01; +print ("the value of a is ", $a, "\n"); diff --git a/import-layers/yocto-poky/meta/lib/oeqa/files/test.py b/import-layers/yocto-poky/meta/lib/oeqa/files/test.py new file mode 100644 index 000000000..f389225d7 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/files/test.py @@ -0,0 +1,6 @@ +import os + +os.system('touch /tmp/testfile.python') + +a = 9.01e+21 - 9.01e+21 + 0.01 +print("the value of a is %s" % a) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/oetest.py b/import-layers/yocto-poky/meta/lib/oeqa/oetest.py index 95d3bf72f..f7171260e 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/oetest.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/oetest.py @@ -27,7 +27,6 @@ try: except ImportError: pass from oeqa.utils.decorators import LogResults, gettag, getResults -from oeqa.utils import avoid_paths_in_environ logger = logging.getLogger("BitBake") @@ -107,7 +106,7 @@ class oeRuntimeTest(oeTest): pass def tearDown(self): - # Unistall packages in the DUT + # Uninstall packages in the DUT self.tc.install_uninstall_packages(self.id(), False) res = getResults() @@ -129,48 +128,6 @@ class oeRuntimeTest(oeTest): def tearDownLocal(self): pass - #TODO: use package_manager.py to install packages on any type of image - def install_packages(self, packagelist): - for package in packagelist: - (status, result) = self.target.run("smart install -y "+package) - if status != 0: - return status - -class OETestCalledProcessError(subprocess.CalledProcessError): - def __str__(self): - if hasattr(self, "stderr"): - return "Command '%s' returned non-zero exit status %d with output %s and stderr %s" % (self.cmd, self.returncode, self.output, self.stderr) - else: - return "Command '%s' returned non-zero exit status %d with output %s" % (self.cmd, self.returncode, self.output) - -subprocess.CalledProcessError = OETestCalledProcessError - -class oeSDKTest(oeTest): - def __init__(self, methodName='runTest'): - self.sdktestdir = oeSDKTest.tc.sdktestdir - super(oeSDKTest, self).__init__(methodName) - - @classmethod - def hasHostPackage(self, pkg): - if re.search(pkg, oeTest.tc.hostpkgmanifest): - return True - return False - - def _run(self, cmd): - return subprocess.check_output(". %s > /dev/null; %s;" % (self.tc.sdkenv, cmd), shell=True, stderr=subprocess.STDOUT).decode("utf-8") - -class oeSDKExtTest(oeSDKTest): - def _run(self, cmd): - # extensible sdk shows a warning if found bitbake in the path - # because can cause contamination, i.e. use devtool from - # poky/scripts instead of eSDK one. - env = os.environ.copy() - paths_to_avoid = ['bitbake/bin', 'poky/scripts'] - env['PATH'] = avoid_paths_in_environ(paths_to_avoid) - - return subprocess.check_output(". %s > /dev/null;"\ - " %s;" % (self.tc.sdkenv, cmd), stderr=subprocess.STDOUT, shell=True, env=env).decode("utf-8") - def getmodule(pos=2): # stack returns a list of tuples containg frame information # First element of the list the is current frame, caller is 1 @@ -221,15 +178,16 @@ class TestContext(object): path = [os.path.dirname(os.path.abspath(__file__))] extrapath = "" else: - path = d.getVar("BBPATH", True).split(':') + path = d.getVar("BBPATH").split(':') extrapath = "lib/oeqa" self.testslist = self._get_tests_list(path, extrapath) self.testsrequired = self._get_test_suites_required() self.filesdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "runtime/files") - self.imagefeatures = d.getVar("IMAGE_FEATURES", True).split() - self.distrofeatures = d.getVar("DISTRO_FEATURES", True).split() + self.corefilesdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "files") + self.imagefeatures = d.getVar("IMAGE_FEATURES").split() + self.distrofeatures = d.getVar("DISTRO_FEATURES").split() # get testcase list from specified file # if path is a relative path, then relative to build/conf/ @@ -406,9 +364,9 @@ class RuntimeTestContext(TestContext): self.target = target self.pkgmanifest = {} - manifest = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), - d.getVar("IMAGE_LINK_NAME", True) + ".manifest") - nomanifest = d.getVar("IMAGE_NO_MANIFEST", True) + manifest = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), + d.getVar("IMAGE_LINK_NAME") + ".manifest") + nomanifest = d.getVar("IMAGE_NO_MANIFEST") if nomanifest is None or nomanifest != "1": try: with open(manifest) as f: @@ -424,19 +382,19 @@ class RuntimeTestContext(TestContext): def _get_test_suites(self): testsuites = [] - manifests = (self.d.getVar("TEST_SUITES_MANIFEST", True) or '').split() + manifests = (self.d.getVar("TEST_SUITES_MANIFEST") or '').split() if manifests: for manifest in manifests: testsuites.extend(self._read_testlist(manifest, - self.d.getVar("TOPDIR", True)).split()) + self.d.getVar("TOPDIR")).split()) else: - testsuites = self.d.getVar("TEST_SUITES", True).split() + testsuites = self.d.getVar("TEST_SUITES").split() return testsuites def _get_test_suites_required(self): - return [t for t in self.d.getVar("TEST_SUITES", True).split() if t != "auto"] + return [t for t in self.d.getVar("TEST_SUITES").split() if t != "auto"] def loadTests(self): super(RuntimeTestContext, self).loadTests() @@ -449,10 +407,10 @@ class RuntimeTestContext(TestContext): """ modules = self.getTestModules() - bbpaths = self.d.getVar("BBPATH", True).split(":") + bbpaths = self.d.getVar("BBPATH").split(":") - shutil.rmtree(self.d.getVar("TEST_EXTRACTED_DIR", True)) - shutil.rmtree(self.d.getVar("TEST_PACKAGED_DIR", True)) + shutil.rmtree(self.d.getVar("TEST_EXTRACTED_DIR")) + shutil.rmtree(self.d.getVar("TEST_PACKAGED_DIR")) for module in modules: json_file = self._getJsonFile(module) if json_file: @@ -466,8 +424,8 @@ class RuntimeTestContext(TestContext): import oe.path - extracted_path = self.d.getVar("TEST_EXTRACTED_DIR", True) - packaged_path = self.d.getVar("TEST_PACKAGED_DIR", True) + extracted_path = self.d.getVar("TEST_EXTRACTED_DIR") + packaged_path = self.d.getVar("TEST_PACKAGED_DIR") for key,value in needed_packages.items(): packages = () @@ -548,7 +506,7 @@ class RuntimeTestContext(TestContext): from oeqa.utils.package_manager import get_package_manager - pkg_path = os.path.join(self.d.getVar("TEST_INSTALL_TMP_DIR", True), pkg) + pkg_path = os.path.join(self.d.getVar("TEST_INSTALL_TMP_DIR"), pkg) pm = get_package_manager(self.d, pkg_path) extract_dir = pm.extract(pkg) shutil.rmtree(pkg_path) @@ -562,8 +520,8 @@ class RuntimeTestContext(TestContext): from oeqa.utils.package_manager import get_package_manager - pkg_path = os.path.join(self.d.getVar("TEST_INSTALL_TMP_DIR", True), pkg) - dst_dir = self.d.getVar("TEST_PACKAGED_DIR", True) + pkg_path = os.path.join(self.d.getVar("TEST_INSTALL_TMP_DIR"), pkg) + dst_dir = self.d.getVar("TEST_PACKAGED_DIR") pm = get_package_manager(self.d, pkg_path) pkg_info = pm.package_info(pkg) file_path = pkg_info[pkg]["filepath"] @@ -572,7 +530,7 @@ class RuntimeTestContext(TestContext): def install_uninstall_packages(self, test_id, pkg_dir, install): """ - Check if the test requires a package and Install/Unistall it in the DUT + Check if the test requires a package and Install/Uninstall it in the DUT """ test = test_id.split(".")[4] @@ -585,7 +543,7 @@ class RuntimeTestContext(TestContext): def _install_uninstall_packages(self, needed_packages, pkg_dir, install=True): """ - Install/Unistall packages in the DUT without using a package manager + Install/Uninstall packages in the DUT without using a package manager """ if isinstance(needed_packages, dict): @@ -603,7 +561,7 @@ class RuntimeTestContext(TestContext): if install and extract: self.target.connection.copy_dir_to(src_dir, "/") - # Unistall package + # Uninstall package elif not install and rm: self.target.connection.delete_dir_structure(src_dir, "/") @@ -611,7 +569,7 @@ class ImageTestContext(RuntimeTestContext): def __init__(self, d, target, host_dumper): super(ImageTestContext, self).__init__(d, target) - self.tagexp = d.getVar("TEST_SUITES_TAGS", True) + self.tagexp = d.getVar("TEST_SUITES_TAGS") self.host_dumper = host_dumper @@ -626,10 +584,10 @@ class ImageTestContext(RuntimeTestContext): def install_uninstall_packages(self, test_id, install=True): """ - Check if the test requires a package and Install/Unistall it in the DUT + Check if the test requires a package and Install/Uninstall it in the DUT """ - pkg_dir = self.d.getVar("TEST_EXTRACTED_DIR", True) + pkg_dir = self.d.getVar("TEST_EXTRACTED_DIR") super(ImageTestContext, self).install_uninstall_packages(test_id, pkg_dir, install) class ExportTestContext(RuntimeTestContext): @@ -643,80 +601,16 @@ class ExportTestContext(RuntimeTestContext): super(ExportTestContext, self).__init__(d, target, exported) tag = parsedArgs.get("tag", None) - self.tagexp = tag if tag != None else d.getVar("TEST_SUITES_TAGS", True) + self.tagexp = tag if tag != None else d.getVar("TEST_SUITES_TAGS") self.sigterm = None def install_uninstall_packages(self, test_id, install=True): """ - Check if the test requires a package and Install/Unistall it in the DUT + Check if the test requires a package and Install/Uninstall it in the DUT """ export_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - extracted_dir = self.d.getVar("TEST_EXPORT_EXTRACTED_DIR", True) + extracted_dir = self.d.getVar("TEST_EXPORT_EXTRACTED_DIR") pkg_dir = os.path.join(export_dir, extracted_dir) super(ExportTestContext, self).install_uninstall_packages(test_id, pkg_dir, install) - -class SDKTestContext(TestContext): - def __init__(self, d, sdktestdir, sdkenv, tcname, *args): - super(SDKTestContext, self).__init__(d) - - self.sdktestdir = sdktestdir - self.sdkenv = sdkenv - self.tcname = tcname - - if not hasattr(self, 'target_manifest'): - self.target_manifest = d.getVar("SDK_TARGET_MANIFEST", True) - try: - self.pkgmanifest = {} - with open(self.target_manifest) as f: - for line in f: - (pkg, arch, version) = line.strip().split() - self.pkgmanifest[pkg] = (version, arch) - except IOError as e: - bb.fatal("No package manifest file found. Did you build the sdk image?\n%s" % e) - - if not hasattr(self, 'host_manifest'): - self.host_manifest = d.getVar("SDK_HOST_MANIFEST", True) - try: - with open(self.host_manifest) as f: - self.hostpkgmanifest = f.read() - except IOError as e: - bb.fatal("No host package manifest file found. Did you build the sdk image?\n%s" % e) - - def _get_test_namespace(self): - return "sdk" - - def _get_test_suites(self): - return (self.d.getVar("TEST_SUITES_SDK", True) or "auto").split() - - def _get_test_suites_required(self): - return [t for t in (self.d.getVar("TEST_SUITES_SDK", True) or \ - "auto").split() if t != "auto"] - -class SDKExtTestContext(SDKTestContext): - def __init__(self, d, sdktestdir, sdkenv, tcname, *args): - self.target_manifest = d.getVar("SDK_EXT_TARGET_MANIFEST", True) - self.host_manifest = d.getVar("SDK_EXT_HOST_MANIFEST", True) - if args: - self.cm = args[0] # Compatibility mode for run SDK tests - else: - self.cm = False - - super(SDKExtTestContext, self).__init__(d, sdktestdir, sdkenv, tcname) - - self.sdkextfilesdir = os.path.join(os.path.dirname(os.path.abspath( - oeqa.sdkext.__file__)), "files") - - def _get_test_namespace(self): - if self.cm: - return "sdk" - else: - return "sdkext" - - def _get_test_suites(self): - return (self.d.getVar("TEST_SUITES_SDK_EXT", True) or "auto").split() - - def _get_test_suites_required(self): - return [t for t in (self.d.getVar("TEST_SUITES_SDK_EXT", True) or \ - "auto").split() if t != "auto"] diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runexported.py b/import-layers/yocto-poky/meta/lib/oeqa/runexported.py index 7e245c412..9cfea0f7a 100755 --- a/import-layers/yocto-poky/meta/lib/oeqa/runexported.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/runexported.py @@ -43,8 +43,8 @@ class FakeTarget(object): self.ip = None self.server_ip = None self.datetime = time.strftime('%Y%m%d%H%M%S',time.gmtime()) - self.testdir = d.getVar("TEST_LOG_DIR", True) - self.pn = d.getVar("PN", True) + self.testdir = d.getVar("TEST_LOG_DIR") + self.pn = d.getVar("PN") def exportStart(self): self.sshlog = os.path.join(self.testdir, "ssh_target_log.%s" % self.datetime) @@ -130,8 +130,8 @@ def extract_sdk(d): """ export_dir = os.path.dirname(os.path.realpath(__file__)) - tools_dir = d.getVar("TEST_EXPORT_SDK_DIR", True) - tarball_name = "%s.sh" % d.getVar("TEST_EXPORT_SDK_NAME", True) + tools_dir = d.getVar("TEST_EXPORT_SDK_DIR") + tarball_name = "%s.sh" % d.getVar("TEST_EXPORT_SDK_NAME") tarball_path = os.path.join(export_dir, tools_dir, tarball_name) extract_path = os.path.join(export_dir, "sysroot") if os.path.isfile(tarball_path): diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/_ptest.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/_ptest.py deleted file mode 100644 index 71324d3da..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/_ptest.py +++ /dev/null @@ -1,125 +0,0 @@ -import unittest, os, shutil -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * -from oeqa.utils.logparser import * -from oeqa.utils.httpserver import HTTPService -import bb -import glob -from oe.package_manager import RpmPkgsList -import subprocess - -def setUpModule(): - if not oeRuntimeTest.hasFeature("package-management"): - skipModule("Image doesn't have package management feature") - if not oeRuntimeTest.hasPackage("smartpm"): - skipModule("Image doesn't have smart installed") - if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES", True).split()[0]: - skipModule("Rpm is not the primary package manager") - -class PtestRunnerTest(oeRuntimeTest): - - # a ptest log parser - def parse_ptest(self, logfile): - parser = Lparser(test_0_pass_regex="^PASS:(.+)", test_0_fail_regex="^FAIL:(.+)", section_0_begin_regex="^BEGIN: .*/(.+)/ptest", section_0_end_regex="^END: .*/(.+)/ptest") - parser.init() - result = Result() - - with open(logfile) as f: - for line in f: - result_tuple = parser.parse_line(line) - if not result_tuple: - continue - result_tuple = line_type, category, status, name = parser.parse_line(line) - - if line_type == 'section' and status == 'begin': - current_section = name - continue - - if line_type == 'section' and status == 'end': - current_section = None - continue - - if line_type == 'test' and status == 'pass': - result.store(current_section, name, status) - continue - - if line_type == 'test' and status == 'fail': - result.store(current_section, name, status) - continue - - result.sort_tests() - return result - - @classmethod - def setUpClass(self): - #note the existing channels that are on the board before creating new ones -# self.existingchannels = set() -# (status, result) = oeRuntimeTest.tc.target.run('smart channel --show | grep "\["', 0) -# for x in result.split("\n"): -# self.existingchannels.add(x) - self.repo_server = HTTPService(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR', True), oeRuntimeTest.tc.target.server_ip) - self.repo_server.start() - - @classmethod - def tearDownClass(self): - self.repo_server.stop() - #remove created channels to be able to repeat the tests on same image -# (status, result) = oeRuntimeTest.tc.target.run('smart channel --show | grep "\["', 0) -# for x in result.split("\n"): -# if x not in self.existingchannels: -# oeRuntimeTest.tc.target.run('smart channel --remove '+x[1:-1]+' -y', 0) - - def add_smart_channel(self): - image_pkgtype = self.tc.d.getVar('IMAGE_PKGTYPE', True) - deploy_url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, image_pkgtype) - pkgarchs = self.tc.d.getVar('PACKAGE_ARCHS', True).replace("-","_").split() - for arch in os.listdir('%s/%s' % (self.repo_server.root_dir, image_pkgtype)): - if arch in pkgarchs: - self.target.run('smart channel -y --add {a} type=rpm-md baseurl={u}/{a}'.format(a=arch, u=deploy_url), 0) - self.target.run('smart update', 0) - - def install_complementary(self, globs=None): - installed_pkgs_file = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR', True), - "installed_pkgs.txt") - self.pkgs_list = RpmPkgsList(oeRuntimeTest.tc.d, oeRuntimeTest.tc.d.getVar('IMAGE_ROOTFS', True), oeRuntimeTest.tc.d.getVar('arch_var', True), oeRuntimeTest.tc.d.getVar('os_var', True)) - with open(installed_pkgs_file, "w+") as installed_pkgs: - installed_pkgs.write(self.pkgs_list.list("arch")) - - cmd = [bb.utils.which(os.getenv('PATH'), "oe-pkgdata-util"), - "-p", oeRuntimeTest.tc.d.getVar('PKGDATA_DIR', True), "glob", installed_pkgs_file, - globs] - try: - bb.note("Installing complementary packages ...") - complementary_pkgs = subprocess.check_output(cmd, stderr=subprocess.STDOUT) - except subprocess.CalledProcessError as e: - bb.fatal("Could not compute complementary packages list. Command " - "'%s' returned %d:\n%s" % - (' '.join(cmd), e.returncode, e.output)) - - return complementary_pkgs.split() - - def setUpLocal(self): - self.ptest_log = os.path.join(oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR",True), "ptest-%s.log" % oeRuntimeTest.tc.d.getVar('DATETIME', True)) - - @skipUnlessPassed('test_ssh') - def test_ptestrunner(self): - self.add_smart_channel() - (runnerstatus, result) = self.target.run('which ptest-runner', 0) - cond = oeRuntimeTest.hasPackage("ptest-runner") and oeRuntimeTest.hasFeature("ptest") and oeRuntimeTest.hasPackageMatch("-ptest") and (runnerstatus != 0) - if cond: - self.install_packages(self.install_complementary("*-ptest")) - self.install_packages(['ptest-runner']) - - (runnerstatus, result) = self.target.run('/usr/bin/ptest-runner > /tmp/ptest.log 2>&1', 0) - #exit code is !=0 even if ptest-runner executes because some ptest tests fail. - self.assertTrue(runnerstatus != 127, msg="Cannot execute ptest-runner!") - self.target.copy_from('/tmp/ptest.log', self.ptest_log) - shutil.copyfile(self.ptest_log, "ptest.log") - - result = self.parse_ptest("ptest.log") - log_results_to_location = "./results" - if os.path.exists(log_results_to_location): - shutil.rmtree(log_results_to_location) - os.makedirs(log_results_to_location) - - result.log_as_files(log_results_to_location, test_status = ['pass','fail']) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/_qemutiny.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/_qemutiny.py deleted file mode 100644 index a3c29f357..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/_qemutiny.py +++ /dev/null @@ -1,9 +0,0 @@ -import unittest -from oeqa.oetest import oeRuntimeTest -from oeqa.utils.qemutinyrunner import * - -class QemuTinyTest(oeRuntimeTest): - - def test_boot_tiny(self): - (status, output) = self.target.run_serial('uname -a') - self.assertTrue("yocto-tiny" in output, msg="Cannot detect poky tiny boot!") \ No newline at end of file diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/buildcvs.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/buildcvs.py deleted file mode 100644 index fe6cbfbcd..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/buildcvs.py +++ /dev/null @@ -1,31 +0,0 @@ -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * -from oeqa.utils.targetbuild import TargetBuildProject - -def setUpModule(): - if not oeRuntimeTest.hasFeature("tools-sdk"): - skipModule("Image doesn't have tools-sdk in IMAGE_FEATURES") - -class BuildCvsTest(oeRuntimeTest): - - @classmethod - def setUpClass(self): - self.project = TargetBuildProject(oeRuntimeTest.tc.target, oeRuntimeTest.tc.d, - "http://ftp.gnu.org/non-gnu/cvs/source/feature/1.12.13/cvs-1.12.13.tar.bz2") - self.project.download_archive() - - @testcase(205) - @skipUnlessPassed("test_ssh") - def test_cvs(self): - self.assertEqual(self.project.run_configure(), 0, - msg="Running configure failed") - - self.assertEqual(self.project.run_make(), 0, - msg="Running make failed") - - self.assertEqual(self.project.run_install(), 0, - msg="Running make install failed") - - @classmethod - def tearDownClass(self): - self.project.clean() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/buildgalculator.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/buildgalculator.py deleted file mode 100644 index 28ba29e5c..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/buildgalculator.py +++ /dev/null @@ -1,23 +0,0 @@ -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * -from oeqa.utils.targetbuild import TargetBuildProject - -def setUpModule(): - if not oeRuntimeTest.hasFeature("tools-sdk"): - skipModule("Image doesn't have tools-sdk in IMAGE_FEATURES") - -class GalculatorTest(oeRuntimeTest): - @skipUnlessPassed("test_ssh") - def test_galculator(self): - try: - project = TargetBuildProject(oeRuntimeTest.tc.target, oeRuntimeTest.tc.d, - "http://galculator.mnim.org/downloads/galculator-2.1.4.tar.bz2") - project.download_archive() - - self.assertEqual(project.run_configure(), 0, - msg="Running configure failed") - - self.assertEqual(project.run_make(), 0, - msg="Running make failed") - finally: - project.clean() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/buildiptables.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/buildiptables.py deleted file mode 100644 index bc75d0a0c..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/buildiptables.py +++ /dev/null @@ -1,31 +0,0 @@ -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * -from oeqa.utils.targetbuild import TargetBuildProject - -def setUpModule(): - if not oeRuntimeTest.hasFeature("tools-sdk"): - skipModule("Image doesn't have tools-sdk in IMAGE_FEATURES") - -class BuildIptablesTest(oeRuntimeTest): - - @classmethod - def setUpClass(self): - self.project = TargetBuildProject(oeRuntimeTest.tc.target, oeRuntimeTest.tc.d, - "http://downloads.yoctoproject.org/mirror/sources/iptables-1.4.13.tar.bz2") - self.project.download_archive() - - @testcase(206) - @skipUnlessPassed("test_ssh") - def test_iptables(self): - self.assertEqual(self.project.run_configure(), 0, - msg="Running configure failed") - - self.assertEqual(self.project.run_make(), 0, - msg="Running make failed") - - self.assertEqual(self.project.run_install(), 0, - msg="Running make install failed") - - @classmethod - def tearDownClass(self): - self.project.clean() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/case.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/case.py new file mode 100644 index 000000000..c1485c986 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/case.py @@ -0,0 +1,17 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from oeqa.core.case import OETestCase +from oeqa.utils.package_manager import install_package, uninstall_package + +class OERuntimeTestCase(OETestCase): + # target instance set by OERuntimeTestLoader. + target = None + + def _oeSetUp(self): + super(OERuntimeTestCase, self)._oeSetUp() + install_package(self) + + def _oeTearDown(self): + super(OERuntimeTestCase, self)._oeTearDown() + uninstall_package(self) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/_ptest.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/_ptest.py new file mode 100644 index 000000000..aaed9a535 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/_ptest.py @@ -0,0 +1,103 @@ +import os +import shutil +import subprocess + +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfNotDataVar, skipIfNotFeature +from oeqa.runtime.decorator.package import OEHasPackage + +from oeqa.runtime.cases.dnf import DnfTest +from oeqa.utils.logparser import * +from oeqa.utils.httpserver import HTTPService + +class PtestRunnerTest(DnfTest): + + @classmethod + def setUpClass(cls): + rpm_deploy = os.path.join(cls.tc.td['DEPLOY_DIR'], 'rpm') + cls.repo_server = HTTPService(rpm_deploy, cls.tc.target.server_ip) + cls.repo_server.start() + + @classmethod + def tearDownClass(cls): + cls.repo_server.stop() + + # a ptest log parser + def parse_ptest(self, logfile): + parser = Lparser(test_0_pass_regex="^PASS:(.+)", + test_0_fail_regex="^FAIL:(.+)", + section_0_begin_regex="^BEGIN: .*/(.+)/ptest", + section_0_end_regex="^END: .*/(.+)/ptest") + parser.init() + result = Result() + + with open(logfile, errors='replace') as f: + for line in f: + result_tuple = parser.parse_line(line) + if not result_tuple: + continue + result_tuple = line_type, category, status, name = parser.parse_line(line) + + if line_type == 'section' and status == 'begin': + current_section = name + continue + + if line_type == 'section' and status == 'end': + current_section = None + continue + + if line_type == 'test' and status == 'pass': + result.store(current_section, name, status) + continue + + if line_type == 'test' and status == 'fail': + result.store(current_section, name, status) + continue + + result.sort_tests() + return result + + def _install_ptest_packages(self): + # Get ptest packages that can be installed in the image. + packages_dir = os.path.join(self.tc.td['DEPLOY_DIR'], 'rpm') + ptest_pkgs = [pkg[:pkg.find('-ptest')+6] + for _, _, filenames in os.walk(packages_dir) + for pkg in filenames + if 'ptest' in pkg + and pkg[:pkg.find('-ptest')] in self.tc.image_packages] + + repo_url = 'http://%s:%s' % (self.target.server_ip, + self.repo_server.port) + dnf_options = ('--repofrompath=oe-ptest-repo,%s ' + '--nogpgcheck ' + 'install -y' % repo_url) + self.dnf('%s %s ptest-runner' % (dnf_options, ' '.join(ptest_pkgs))) + + @skipIfNotFeature('package-management', + 'Test requires package-management to be in DISTRO_FEATURES') + @skipIfNotFeature('ptest', + 'Test requires package-management to be in DISTRO_FEATURES') + @skipIfNotDataVar('IMAGE_PKGTYPE', 'rpm', + 'RPM is not the primary package manager') + @OEHasPackage(['dnf']) + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_ptestrunner(self): + self.ptest_log = os.path.join(self.tc.td['TEST_LOG_DIR'], + 'ptest-%s.log' % self.tc.td['DATETIME']) + self._install_ptest_packages() + + (runnerstatus, result) = self.target.run('/usr/bin/ptest-runner > /tmp/ptest.log 2>&1', 0) + #exit code is !=0 even if ptest-runner executes because some ptest tests fail. + self.assertTrue(runnerstatus != 127, msg="Cannot execute ptest-runner!") + self.target.copyFrom('/tmp/ptest.log', self.ptest_log) + shutil.copyfile(self.ptest_log, "ptest.log") + + result = self.parse_ptest("ptest.log") + log_results_to_location = "./results" + if os.path.exists(log_results_to_location): + shutil.rmtree(log_results_to_location) + os.makedirs(log_results_to_location) + + result.log_as_files(log_results_to_location, test_status = ['pass','fail']) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/_qemutiny.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/_qemutiny.py new file mode 100644 index 000000000..7b5b48141 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/_qemutiny.py @@ -0,0 +1,8 @@ +from oeqa.runtime.case import OERuntimeTestCase + +class QemuTinyTest(OERuntimeTestCase): + + def test_boot_tiny(self): + status, output = self.target.run_serial('uname -a') + msg = "Cannot detect poky tiny boot!" + self.assertTrue("yocto-tiny" in output, msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/buildcpio.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/buildcpio.py new file mode 100644 index 000000000..59edc9c2c --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/buildcpio.py @@ -0,0 +1,30 @@ +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfNotFeature + +from oeqa.runtime.utils.targetbuildproject import TargetBuildProject + +class BuildCpioTest(OERuntimeTestCase): + + @classmethod + def setUpClass(cls): + uri = 'https://ftp.gnu.org/gnu/cpio' + uri = '%s/cpio-2.12.tar.bz2' % uri + cls.project = TargetBuildProject(cls.tc.target, + uri, + dl_dir = cls.tc.td['DL_DIR']) + cls.project.download_archive() + + @classmethod + def tearDownClass(cls): + cls.project.clean() + + @OETestID(205) + @skipIfNotFeature('tools-sdk', + 'Test requires tools-sdk to be in IMAGE_FEATURES') + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_cpio(self): + self.project.run_configure() + self.project.run_make() + self.project.run_install() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/buildgalculator.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/buildgalculator.py new file mode 100644 index 000000000..7c9d4a392 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/buildgalculator.py @@ -0,0 +1,28 @@ +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfNotFeature + +from oeqa.runtime.utils.targetbuildproject import TargetBuildProject + +class GalculatorTest(OERuntimeTestCase): + + @classmethod + def setUpClass(cls): + uri = 'http://galculator.mnim.org/downloads/galculator-2.1.4.tar.bz2' + cls.project = TargetBuildProject(cls.tc.target, + uri, + dl_dir = cls.tc.td['DL_DIR']) + cls.project.download_archive() + + @classmethod + def tearDownClass(cls): + cls.project.clean() + + @OETestID(1526) + @skipIfNotFeature('tools-sdk', + 'Test requires tools-sdk to be in IMAGE_FEATURES') + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_galculator(self): + self.project.run_configure() + self.project.run_make() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/buildlzip.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/buildlzip.py new file mode 100644 index 000000000..ca3fead2e --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/buildlzip.py @@ -0,0 +1,34 @@ +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfNotFeature + +from oeqa.runtime.utils.targetbuildproject import TargetBuildProject + +class BuildLzipTest(OERuntimeTestCase): + + @classmethod + def setUpClass(cls): + uri = 'http://downloads.yoctoproject.org/mirror/sources' + uri = '%s/lzip-1.19.tar.gz' % uri + cls.project = TargetBuildProject(cls.tc.target, + uri, + dl_dir = cls.tc.td['DL_DIR']) + cls.project.download_archive() + + @classmethod + def tearDownClass(cls): + cls.project.clean() + + @OETestID(206) + @skipIfNotFeature('tools-sdk', + 'Test requires tools-sdk to be in IMAGE_FEATURES') + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_lzip(self): + self.project.run_configure() + self.project.run_make() + self.project.run_install() + + @classmethod + def tearDownClass(self): + self.project.clean() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/connman.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/connman.py new file mode 100644 index 000000000..12456b417 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/connman.py @@ -0,0 +1,30 @@ +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.runtime.decorator.package import OEHasPackage + +class ConnmanTest(OERuntimeTestCase): + + def service_status(self, service): + if 'systemd' in self.tc.td['DISTRO_FEATURES']: + (_, output) = self.target.run('systemctl status -l %s' % service) + return output + else: + return "Unable to get status or logs for %s" % service + + @OETestID(961) + @OETestDepends(['ssh.SSHTest.test_ssh']) + @OEHasPackage(["connman"]) + def test_connmand_help(self): + (status, output) = self.target.run('/usr/sbin/connmand --help') + msg = 'Failed to get connman help. Output: %s' % output + self.assertEqual(status, 0, msg=msg) + + @OETestID(221) + @OETestDepends(['connman.ConnmanTest.test_connmand_help']) + def test_connmand_running(self): + cmd = '%s | grep [c]onnmand' % self.tc.target_cmds['ps'] + (status, output) = self.target.run(cmd) + if status != 0: + self.logger.info(self.service_status("connman")) + self.fail("No connmand process running") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/date.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/date.py new file mode 100644 index 000000000..ece7338de --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/date.py @@ -0,0 +1,38 @@ +import re + +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID + +class DateTest(OERuntimeTestCase): + + def setUp(self): + if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': + self.logger.debug('Stopping systemd-timesyncd daemon') + self.target.run('systemctl stop systemd-timesyncd') + + def tearDown(self): + if self.tc.td.get('VIRTUAL-RUNTIME_init_manager') == 'systemd': + self.logger.debug('Starting systemd-timesyncd daemon') + self.target.run('systemctl start systemd-timesyncd') + + @OETestID(211) + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_date(self): + (status, output) = self.target.run('date +"%Y-%m-%d %T"') + msg = 'Failed to get initial date, output: %s' % output + self.assertEqual(status, 0, msg=msg) + oldDate = output + + sampleDate = '"2016-08-09 10:00:00"' + (status, output) = self.target.run("date -s %s" % sampleDate) + self.assertEqual(status, 0, msg='Date set failed, output: %s' % output) + + (status, output) = self.target.run("date -R") + p = re.match('Tue, 09 Aug 2016 10:00:.. \+0000', output) + msg = 'The date was not set correctly, output: %s' % output + self.assertTrue(p, msg=msg) + + (status, output) = self.target.run('date -s "%s"' % oldDate) + msg = 'Failed to reset date, output: %s' % output + self.assertEqual(status, 0, msg=msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/df.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/df.py new file mode 100644 index 000000000..aecc32d7c --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/df.py @@ -0,0 +1,13 @@ +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID + +class DfTest(OERuntimeTestCase): + + @OETestID(234) + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_df(self): + cmd = "df / | sed -n '2p' | awk '{print $4}'" + (status,output) = self.target.run(cmd) + msg = 'Not enough space on image. Current size is %s' % output + self.assertTrue(int(output)>5120, msg=msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/dnf.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/dnf.py new file mode 100644 index 000000000..2f87296b4 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/dnf.py @@ -0,0 +1,123 @@ +import os +import re +import subprocess +from oeqa.utils.httpserver import HTTPService + +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfNotDataVar, skipIfNotFeature +from oeqa.runtime.decorator.package import OEHasPackage + +class DnfTest(OERuntimeTestCase): + + def dnf(self, command, expected = 0): + command = 'dnf %s' % command + status, output = self.target.run(command, 1500) + message = os.linesep.join([command, output]) + self.assertEqual(status, expected, message) + return output + +class DnfBasicTest(DnfTest): + + @skipIfNotFeature('package-management', + 'Test requires package-management to be in IMAGE_FEATURES') + @skipIfNotDataVar('IMAGE_PKGTYPE', 'rpm', + 'RPM is not the primary package manager') + @OEHasPackage(['dnf']) + @OETestDepends(['ssh.SSHTest.test_ssh']) + @OETestID(1735) + def test_dnf_help(self): + self.dnf('--help') + + @OETestDepends(['dnf.DnfBasicTest.test_dnf_help']) + @OETestID(1739) + def test_dnf_version(self): + self.dnf('--version') + + @OETestDepends(['dnf.DnfBasicTest.test_dnf_help']) + @OETestID(1737) + def test_dnf_info(self): + self.dnf('info dnf') + + @OETestDepends(['dnf.DnfBasicTest.test_dnf_help']) + @OETestID(1738) + def test_dnf_search(self): + self.dnf('search dnf') + + @OETestDepends(['dnf.DnfBasicTest.test_dnf_help']) + @OETestID(1736) + def test_dnf_history(self): + self.dnf('history') + +class DnfRepoTest(DnfTest): + + @classmethod + def setUpClass(cls): + cls.repo_server = HTTPService(os.path.join(cls.tc.td['WORKDIR'], 'oe-testimage-repo'), + cls.tc.target.server_ip) + cls.repo_server.start() + + @classmethod + def tearDownClass(cls): + cls.repo_server.stop() + + def dnf_with_repo(self, command): + pkgarchs = os.listdir(os.path.join(self.tc.td['WORKDIR'], 'oe-testimage-repo')) + deploy_url = 'http://%s:%s/' %(self.target.server_ip, self.repo_server.port) + cmdlinerepoopts = ["--repofrompath=oe-testimage-repo-%s,%s%s" %(arch, deploy_url, arch) for arch in pkgarchs] + + self.dnf(" ".join(cmdlinerepoopts) + " --nogpgcheck " + command) + + @OETestDepends(['dnf.DnfBasicTest.test_dnf_help']) + @OETestID(1744) + def test_dnf_makecache(self): + self.dnf_with_repo('makecache') + + +# Does not work when repo is specified on the command line +# @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) +# def test_dnf_repolist(self): +# self.dnf_with_repo('repolist') + + @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) + @OETestID(1746) + def test_dnf_repoinfo(self): + self.dnf_with_repo('repoinfo') + + @OETestDepends(['dnf.DnfRepoTest.test_dnf_makecache']) + @OETestID(1740) + def test_dnf_install(self): + self.dnf_with_repo('install -y run-postinsts-dev') + + @OETestDepends(['dnf.DnfRepoTest.test_dnf_install']) + @OETestID(1741) + def test_dnf_install_dependency(self): + self.dnf_with_repo('remove -y run-postinsts') + self.dnf_with_repo('install -y run-postinsts-dev') + + @OETestDepends(['dnf.DnfRepoTest.test_dnf_install_dependency']) + @OETestID(1742) + def test_dnf_install_from_disk(self): + self.dnf_with_repo('remove -y run-postinsts-dev') + self.dnf_with_repo('install -y --downloadonly run-postinsts-dev') + status, output = self.target.run('find /var/cache/dnf -name run-postinsts-dev*rpm', 1500) + self.assertEqual(status, 0, output) + self.dnf_with_repo('install -y %s' % output) + + @OETestDepends(['dnf.DnfRepoTest.test_dnf_install_from_disk']) + @OETestID(1743) + def test_dnf_install_from_http(self): + output = subprocess.check_output('%s %s -name run-postinsts-dev*' % (bb.utils.which(os.getenv('PATH'), "find"), + os.path.join(self.tc.td['WORKDIR'], 'oe-testimage-repo')), shell=True).decode("utf-8") + rpm_path = output.split("/")[-2] + "/" + output.split("/")[-1] + url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, rpm_path) + self.dnf_with_repo('remove -y run-postinsts-dev') + self.dnf_with_repo('install -y %s' % url) + + @OETestDepends(['dnf.DnfRepoTest.test_dnf_install']) + @OETestID(1745) + def test_dnf_reinstall(self): + self.dnf_with_repo('reinstall -y run-postinsts-dev') + + diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/gcc.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/gcc.py new file mode 100644 index 000000000..911083156 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/gcc.py @@ -0,0 +1,73 @@ +import os + +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfNotFeature + +class GccCompileTest(OERuntimeTestCase): + + @classmethod + def setUpClass(cls): + dst = '/tmp/' + src = os.path.join(cls.tc.files_dir, 'test.c') + cls.tc.target.copyTo(src, dst) + + src = os.path.join(cls.tc.runtime_files_dir, 'testmakefile') + cls.tc.target.copyTo(src, dst) + + src = os.path.join(cls.tc.files_dir, 'test.cpp') + cls.tc.target.copyTo(src, dst) + + @classmethod + def tearDownClass(cls): + files = '/tmp/test.c /tmp/test.o /tmp/test /tmp/testmakefile' + cls.tc.target.run('rm %s' % files) + + @OETestID(203) + @skipIfNotFeature('tools-sdk', + 'Test requires tools-sdk to be in IMAGE_FEATURES') + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_gcc_compile(self): + status, output = self.target.run('gcc /tmp/test.c -o /tmp/test -lm') + msg = 'gcc compile failed, output: %s' % output + self.assertEqual(status, 0, msg=msg) + + status, output = self.target.run('/tmp/test') + msg = 'running compiled file failed, output: %s' % output + self.assertEqual(status, 0, msg=msg) + + @OETestID(200) + @skipIfNotFeature('tools-sdk', + 'Test requires tools-sdk to be in IMAGE_FEATURES') + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_gpp_compile(self): + status, output = self.target.run('g++ /tmp/test.c -o /tmp/test -lm') + msg = 'g++ compile failed, output: %s' % output + self.assertEqual(status, 0, msg=msg) + + status, output = self.target.run('/tmp/test') + msg = 'running compiled file failed, output: %s' % output + self.assertEqual(status, 0, msg=msg) + + @OETestID(1142) + @skipIfNotFeature('tools-sdk', + 'Test requires tools-sdk to be in IMAGE_FEATURES') + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_gpp2_compile(self): + status, output = self.target.run('g++ /tmp/test.cpp -o /tmp/test -lm') + msg = 'g++ compile failed, output: %s' % output + self.assertEqual(status, 0, msg=msg) + + status, output = self.target.run('/tmp/test') + msg = 'running compiled file failed, output: %s' % output + self.assertEqual(status, 0, msg=msg) + + @OETestID(204) + @skipIfNotFeature('tools-sdk', + 'Test requires tools-sdk to be in IMAGE_FEATURES') + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_make(self): + status, output = self.target.run('cd /tmp; make -f testmakefile') + msg = 'running make failed, output %s' % output + self.assertEqual(status, 0, msg=msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/kernelmodule.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/kernelmodule.py new file mode 100644 index 000000000..11ad7b7f0 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/kernelmodule.py @@ -0,0 +1,40 @@ +import os + +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfNotFeature + +class KernelModuleTest(OERuntimeTestCase): + + @classmethod + def setUpClass(cls): + src = os.path.join(cls.tc.runtime_files_dir, 'hellomod.c') + dst = '/tmp/hellomod.c' + cls.tc.target.copyTo(src, dst) + + src = os.path.join(cls.tc.runtime_files_dir, 'hellomod_makefile') + dst = '/tmp/Makefile' + cls.tc.target.copyTo(src, dst) + + @classmethod + def tearDownClass(cls): + files = '/tmp/Makefile /tmp/hellomod.c' + cls.tc.target.run('rm %s' % files) + + @OETestID(1541) + @skipIfNotFeature('tools-sdk', + 'Test requires tools-sdk to be in IMAGE_FEATURES') + @OETestDepends(['gcc.GccCompileTest.test_gcc_compile']) + def test_kernel_module(self): + cmds = [ + 'cd /usr/src/kernel && make scripts', + 'cd /tmp && make', + 'cd /tmp && insmod hellomod.ko', + 'lsmod | grep hellomod', + 'dmesg | grep Hello', + 'rmmod hellomod', 'dmesg | grep "Cleaning up hellomod"' + ] + for cmd in cmds: + status, output = self.target.run(cmd, 900) + self.assertEqual(status, 0, msg='\n'.join([cmd, output])) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/ldd.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/ldd.py new file mode 100644 index 000000000..c6d92fd5a --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/ldd.py @@ -0,0 +1,25 @@ +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfNotFeature + +class LddTest(OERuntimeTestCase): + + @OETestID(962) + @skipIfNotFeature('tools-sdk', + 'Test requires tools-sdk to be in IMAGE_FEATURES') + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_ldd_exists(self): + status, output = self.target.run('which ldd') + msg = 'ldd does not exist in PATH: which ldd: %s' % output + self.assertEqual(status, 0, msg=msg) + + @OETestID(239) + @OETestDepends(['ldd.LddTest.test_ldd_exists']) + def test_ldd_rtldlist_check(self): + cmd = ('for i in $(which ldd | xargs cat | grep "^RTLDLIST"| ' + 'cut -d\'=\' -f2|tr -d \'"\'); ' + 'do test -f $i && echo $i && break; done') + status, output = self.target.run(cmd) + msg = "ldd path not correct or RTLDLIST files don't exist." + self.assertEqual(status, 0, msg=msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/logrotate.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/logrotate.py new file mode 100644 index 000000000..992fef298 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/logrotate.py @@ -0,0 +1,42 @@ +# This test should cover https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=289 testcase +# Note that the image under test must have logrotate installed + +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.runtime.decorator.package import OEHasPackage + +class LogrotateTest(OERuntimeTestCase): + + @classmethod + def tearDownClass(cls): + cls.tc.target.run('rm -rf $HOME/logrotate_dir') + + @OETestID(1544) + @OETestDepends(['ssh.SSHTest.test_ssh']) + @OEHasPackage(['logrotate']) + def test_1_logrotate_setup(self): + status, output = self.target.run('mkdir $HOME/logrotate_dir') + msg = 'Could not create logrotate_dir. Output: %s' % output + self.assertEqual(status, 0, msg = msg) + + cmd = ('sed -i "s#wtmp {#wtmp {\\n olddir $HOME/logrotate_dir#"' + ' /etc/logrotate.conf') + status, output = self.target.run(cmd) + msg = ('Could not write to logrotate.conf file. Status and output: ' + ' %s and %s' % (status, output)) + self.assertEqual(status, 0, msg = msg) + + @OETestID(1542) + @OETestDepends(['logrotate.LogrotateTest.test_1_logrotate_setup']) + def test_2_logrotate(self): + status, output = self.target.run('logrotate -f /etc/logrotate.conf') + msg = ('logrotate service could not be reloaded. Status and output: ' + '%s and %s' % (status, output)) + self.assertEqual(status, 0, msg = msg) + + _, output = self.target.run('ls -la $HOME/logrotate_dir/ | wc -l') + msg = ('new logfile could not be created. List of files within log ' + 'directory: %s' % ( + self.target.run('ls -la $HOME/logrotate_dir')[1])) + self.assertTrue(int(output)>=3, msg = msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/multilib.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/multilib.py new file mode 100644 index 000000000..8c167f100 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/multilib.py @@ -0,0 +1,41 @@ +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfNotInDataVar +from oeqa.runtime.decorator.package import OEHasPackage + +class MultilibTest(OERuntimeTestCase): + + def archtest(self, binary, arch): + """ + Check that ``binary`` has the ELF class ``arch`` (e.g. ELF32/ELF64). + """ + + status, output = self.target.run('readelf -h %s' % binary) + self.assertEqual(status, 0, 'Failed to readelf %s' % binary) + + l = [l.split()[1] for l in output.split('\n') if "Class:" in l] + if l: + theclass = l[0] + else: + self.fail('Cannot parse readelf. Output:\n%s' % output) + + msg = "%s isn't %s (is %s)" % (binary, arch, theclass) + self.assertEqual(theclass, arch, msg=msg) + + @OETestID(1593) + @skipIfNotInDataVar('MULTILIBS', 'multilib:lib32', + "This isn't a multilib:lib32 image") + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_check_multilib_libc(self): + """ + Check that a multilib image has both 32-bit and 64-bit libc in. + """ + self.archtest("/lib/libc.so.6", "ELF32") + self.archtest("/lib64/libc.so.6", "ELF64") + + @OETestID(279) + @OETestDepends(['multilib.MultilibTest.test_check_multilib_libc']) + @OEHasPackage(['lib32-connman']) + def test_file_connman(self): + self.archtest("/usr/sbin/connmand", "ELF32") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/oe_syslog.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/oe_syslog.py new file mode 100644 index 000000000..005b6978d --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/oe_syslog.py @@ -0,0 +1,66 @@ +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfDataVar +from oeqa.runtime.decorator.package import OEHasPackage + +class SyslogTest(OERuntimeTestCase): + + @OETestID(201) + @OETestDepends(['ssh.SSHTest.test_ssh']) + @OEHasPackage(["busybox-syslog", "sysklogd"]) + def test_syslog_running(self): + cmd = '%s | grep -i [s]yslogd' % self.tc.target_cmds['ps'] + status, output = self.target.run(cmd) + msg = "No syslogd process; ps output: %s" % output + self.assertEqual(status, 0, msg=msg) + +class SyslogTestConfig(OERuntimeTestCase): + + @OETestID(1149) + @OETestDepends(['oe_syslog.SyslogTest.test_syslog_running']) + def test_syslog_logger(self): + status, output = self.target.run('logger foobar') + msg = "Can't log into syslog. Output: %s " % output + self.assertEqual(status, 0, msg=msg) + + status, output = self.target.run('grep foobar /var/log/messages') + if status != 0: + if self.tc.td.get("VIRTUAL-RUNTIME_init_manager") == "systemd": + status, output = self.target.run('journalctl -o cat | grep foobar') + else: + status, output = self.target.run('logread | grep foobar') + msg = ('Test log string not found in /var/log/messages or logread.' + ' Output: %s ' % output) + self.assertEqual(status, 0, msg=msg) + + @OETestID(1150) + @OETestDepends(['oe_syslog.SyslogTest.test_syslog_running']) + def test_syslog_restart(self): + if "systemd" != self.tc.td.get("VIRTUAL-RUNTIME_init_manager", ""): + (_, _) = self.target.run('/etc/init.d/syslog restart') + else: + (_, _) = self.target.run('systemctl restart syslog.service') + + + @OETestID(202) + @OETestDepends(['oe_syslog.SyslogTestConfig.test_syslog_logger']) + @OEHasPackage(["!sysklogd", "busybox"]) + @skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd', + 'Not appropiate for systemd image') + def test_syslog_startup_config(self): + cmd = 'echo "LOGFILE=/var/log/test" >> /etc/syslog-startup.conf' + self.target.run(cmd) + status, output = self.target.run('/etc/init.d/syslog restart') + msg = ('Could not restart syslog service. Status and output:' + ' %s and %s' % (status,output)) + self.assertEqual(status, 0, msg) + + cmd = 'logger foobar && grep foobar /var/log/test' + status,output = self.target.run(cmd) + msg = 'Test log string not found. Output: %s ' % output + self.assertEqual(status, 0, msg=msg) + + cmd = "sed -i 's#LOGFILE=/var/log/test##' /etc/syslog-startup.conf" + self.target.run(cmd) + self.target.run('/etc/init.d/syslog restart') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/pam.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/pam.py new file mode 100644 index 000000000..3654cdc94 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/pam.py @@ -0,0 +1,33 @@ +# This test should cover https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=287 testcase +# Note that the image under test must have "pam" in DISTRO_FEATURES + +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfNotFeature + +class PamBasicTest(OERuntimeTestCase): + + @OETestID(1543) + @skipIfNotFeature('pam', 'Test requires pam to be in DISTRO_FEATURES') + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_pam(self): + status, output = self.target.run('login --help') + msg = ('login command does not work as expected. ' + 'Status and output:%s and %s' % (status, output)) + self.assertEqual(status, 1, msg = msg) + + status, output = self.target.run('passwd --help') + msg = ('passwd command does not work as expected. ' + 'Status and output:%s and %s' % (status, output)) + self.assertEqual(status, 0, msg = msg) + + status, output = self.target.run('su --help') + msg = ('su command does not work as expected. ' + 'Status and output:%s and %s' % (status, output)) + self.assertEqual(status, 0, msg = msg) + + status, output = self.target.run('useradd --help') + msg = ('useradd command does not work as expected. ' + 'Status and output:%s and %s' % (status, output)) + self.assertEqual(status, 0, msg = msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/parselogs.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/parselogs.py new file mode 100644 index 000000000..6e929469c --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/parselogs.py @@ -0,0 +1,359 @@ +import os + +from subprocess import check_output +from shutil import rmtree +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfDataVar +from oeqa.runtime.decorator.package import OEHasPackage + +#in the future these lists could be moved outside of module +errors = ["error", "cannot", "can\'t", "failed"] + +common_errors = [ + "(WW) warning, (EE) error, (NI) not implemented, (??) unknown.", + "dma timeout", + "can\'t add hid device:", + "usbhid: probe of ", + "_OSC failed (AE_ERROR)", + "_OSC failed (AE_SUPPORT)", + "AE_ALREADY_EXISTS", + "ACPI _OSC request failed (AE_SUPPORT)", + "can\'t disable ASPM", + "Failed to load module \"vesa\"", + "Failed to load module vesa", + "Failed to load module \"modesetting\"", + "Failed to load module modesetting", + "Failed to load module \"glx\"", + "Failed to load module \"fbdev\"", + "Failed to load module fbdev", + "Failed to load module glx", + "[drm] Cannot find any crtc or sizes - going 1024x768", + "_OSC failed (AE_NOT_FOUND); disabling ASPM", + "Open ACPI failed (/var/run/acpid.socket) (No such file or directory)", + "NX (Execute Disable) protection cannot be enabled: non-PAE kernel!", + "hd.: possibly failed opcode", + 'NETLINK INITIALIZATION FAILED', + 'kernel: Cannot find map file', + 'omap_hwmod: debugss: _wait_target_disable failed', + 'VGA arbiter: cannot open kernel arbiter, no multi-card support', + 'Failed to find URL:http://ipv4.connman.net/online/status.html', + 'Online check failed for', + 'netlink init failed', + 'Fast TSC calibration', + "BAR 0-9", + "Failed to load module \"ati\"", + "controller can't do DEVSLP, turning off", + "stmmac_dvr_probe: warning: cannot get CSR clock", + "error: couldn\'t mount because of unsupported optional features", + "GPT: Use GNU Parted to correct GPT errors", + "Cannot set xattr user.Librepo.DownloadInProgress", + ] + +video_related = [ + "uvesafb", +] + +x86_common = [ + '[drm:psb_do_init] *ERROR* Debug is', + 'wrong ELF class', + 'Could not enable PowerButton event', + 'probe of LNXPWRBN:00 failed with error -22', + 'pmd_set_huge: Cannot satisfy', + 'failed to setup card detect gpio', + 'amd_nb: Cannot enumerate AMD northbridges', + 'failed to retrieve link info, disabling eDP', + 'Direct firmware load for iwlwifi', +] + common_errors + +qemux86_common = [ + 'wrong ELF class', + "fail to add MMCONFIG information, can't access extended PCI configuration space under this bridge.", + "can't claim BAR ", + 'amd_nb: Cannot enumerate AMD northbridges', + 'uvesafb: 5000 ms task timeout, infinitely waiting', + 'tsc: HPET/PMTIMER calibration failed', +] + common_errors + +ignore_errors = { + 'default' : common_errors, + 'qemux86' : [ + 'Failed to access perfctr msr (MSR', + 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)', + ] + qemux86_common, + 'qemux86-64' : qemux86_common, + 'qemumips' : [ + 'Failed to load module "glx"', + 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)', + ] + common_errors, + 'qemumips64' : [ + 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)', + ] + common_errors, + 'qemuppc' : [ + 'PCI 0000:00 Cannot reserve Legacy IO [io 0x0000-0x0fff]', + 'host side 80-wire cable detection failed, limiting max speed', + 'mode "640x480" test failed', + 'Failed to load module "glx"', + 'can\'t handle BAR above 4GB', + 'Cannot reserve Legacy IO', + ] + common_errors, + 'qemuarm' : [ + 'mmci-pl18x: probe of fpga:05 failed with error -22', + 'mmci-pl18x: probe of fpga:0b failed with error -22', + 'Failed to load module "glx"', + 'OF: amba_device_add() failed (-19) for /amba/smc@10100000', + 'OF: amba_device_add() failed (-19) for /amba/mpmc@10110000', + 'OF: amba_device_add() failed (-19) for /amba/sctl@101e0000', + 'OF: amba_device_add() failed (-19) for /amba/watchdog@101e1000', + 'OF: amba_device_add() failed (-19) for /amba/sci@101f0000', + 'OF: amba_device_add() failed (-19) for /amba/ssp@101f4000', + 'OF: amba_device_add() failed (-19) for /amba/fpga/sci@a000', + 'Failed to initialize \'/amba/timer@101e3000\': -22', + 'jitterentropy: Initialization failed with host not compliant with requirements: 2', + ] + common_errors, + 'qemuarm64' : [ + 'Fatal server error:', + '(EE) Server terminated with error (1). Closing log file.', + 'dmi: Firmware registration failed.', + 'irq: type mismatch, failed to map hwirq-27 for /intc', + ] + common_errors, + 'emenlow' : [ + '[Firmware Bug]: ACPI: No _BQC method, cannot determine initial brightness', + '(EE) Failed to load module "psb"', + '(EE) Failed to load module psb', + '(EE) Failed to load module "psbdrv"', + '(EE) Failed to load module psbdrv', + '(EE) open /dev/fb0: No such file or directory', + '(EE) AIGLX: reverting to software rendering', + ] + x86_common, + 'intel-core2-32' : [ + 'ACPI: No _BQC method, cannot determine initial brightness', + '[Firmware Bug]: ACPI: No _BQC method, cannot determine initial brightness', + '(EE) Failed to load module "psb"', + '(EE) Failed to load module psb', + '(EE) Failed to load module "psbdrv"', + '(EE) Failed to load module psbdrv', + '(EE) open /dev/fb0: No such file or directory', + '(EE) AIGLX: reverting to software rendering', + 'dmi: Firmware registration failed.', + 'ioremap error for 0x78', + ] + x86_common, + 'intel-corei7-64' : [ + 'can\'t set Max Payload Size to 256', + 'intel_punit_ipc: can\'t request region for resource', + '[drm] parse error at position 4 in video mode \'efifb\'', + 'ACPI Error: Could not enable RealTimeClock event', + 'ACPI Warning: Could not enable fixed event - RealTimeClock', + 'hci_intel INT33E1:00: Unable to retrieve gpio', + 'hci_intel: probe of INT33E1:00 failed', + 'can\'t derive routing for PCI INT A', + 'failed to read out thermal zone', + 'Bluetooth: hci0: Setting Intel event mask failed', + 'ttyS2 - failed to request DMA', + ] + x86_common, + 'crownbay' : x86_common, + 'genericx86' : x86_common, + 'genericx86-64' : [ + 'Direct firmware load for i915', + 'Failed to load firmware i915', + 'Failed to fetch GuC', + 'Failed to initialize GuC', + 'Failed to load DMC firmware', + 'The driver is built-in, so to load the firmware you need to', + ] + x86_common, + 'edgerouter' : [ + 'Fatal server error:', + ] + common_errors, + 'jasperforest' : [ + 'Activated service \'org.bluez\' failed:', + 'Unable to find NFC netlink family', + ] + common_errors, +} + +log_locations = ["/var/log/","/var/log/dmesg", "/tmp/dmesg_output.log"] + +class ParseLogsTest(OERuntimeTestCase): + + @classmethod + def setUpClass(cls): + cls.errors = errors + + # When systemd is enabled we need to notice errors on + # circular dependencies in units. + if 'systemd' in cls.td.get('DISTRO_FEATURES', ''): + cls.errors.extend([ + 'Found ordering cycle on', + 'Breaking ordering cycle by deleting job', + 'deleted to break ordering cycle', + 'Ordering cycle found, skipping', + ]) + + cls.ignore_errors = ignore_errors + cls.log_locations = log_locations + cls.msg = '' + is_lsb, _ = cls.tc.target.run("which LSB_Test.sh") + if is_lsb == 0: + for machine in cls.ignore_errors: + cls.ignore_errors[machine] = cls.ignore_errors[machine] \ + + video_related + + def getMachine(self): + return self.td.get('MACHINE', '') + + def getWorkdir(self): + return self.td.get('WORKDIR', '') + + # Get some information on the CPU of the machine to display at the + # beginning of the output. This info might be useful in some cases. + def getHardwareInfo(self): + hwi = "" + cmd = ('cat /proc/cpuinfo | grep "model name" | head -n1 | ' + " awk 'BEGIN{FS=\":\"}{print $2}'") + _, cpu_name = self.target.run(cmd) + + cmd = ('cat /proc/cpuinfo | grep "cpu cores" | head -n1 | ' + "awk {'print $4'}") + _, cpu_physical_cores = self.target.run(cmd) + + cmd = 'cat /proc/cpuinfo | grep "processor" | wc -l' + _, cpu_logical_cores = self.target.run(cmd) + + _, cpu_arch = self.target.run('uname -m') + + hwi += 'Machine information: \n' + hwi += '*******************************\n' + hwi += 'Machine name: ' + self.getMachine() + '\n' + hwi += 'CPU: ' + str(cpu_name) + '\n' + hwi += 'Arch: ' + str(cpu_arch)+ '\n' + hwi += 'Physical cores: ' + str(cpu_physical_cores) + '\n' + hwi += 'Logical cores: ' + str(cpu_logical_cores) + '\n' + hwi += '*******************************\n' + + return hwi + + # Go through the log locations provided and if it's a folder + # create a list with all the .log files in it, if it's a file + # just add it to that list. + def getLogList(self, log_locations): + logs = [] + for location in log_locations: + status, _ = self.target.run('test -f ' + str(location)) + if status == 0: + logs.append(str(location)) + else: + status, _ = self.target.run('test -d ' + str(location)) + if status == 0: + cmd = 'find ' + str(location) + '/*.log -maxdepth 1 -type f' + status, output = self.target.run(cmd) + if status == 0: + output = output.splitlines() + for logfile in output: + logs.append(os.path.join(location, str(logfile))) + return logs + + # Copy the log files to be parsed locally + def transfer_logs(self, log_list): + workdir = self.getWorkdir() + self.target_logs = workdir + '/' + 'target_logs' + target_logs = self.target_logs + if os.path.exists(target_logs): + rmtree(self.target_logs) + os.makedirs(target_logs) + for f in log_list: + self.target.copyFrom(str(f), target_logs) + + # Get the local list of logs + def get_local_log_list(self, log_locations): + self.transfer_logs(self.getLogList(log_locations)) + list_dir = os.listdir(self.target_logs) + dir_files = [os.path.join(self.target_logs, f) for f in list_dir] + logs = [f for f in dir_files if os.path.isfile(f)] + return logs + + # Build the grep command to be used with filters and exclusions + def build_grepcmd(self, errors, ignore_errors, log): + grepcmd = 'grep ' + grepcmd += '-Ei "' + for error in errors: + grepcmd += error + '|' + grepcmd = grepcmd[:-1] + grepcmd += '" ' + str(log) + " | grep -Eiv \'" + + try: + errorlist = ignore_errors[self.getMachine()] + except KeyError: + self.msg += 'No ignore list found for this machine, using default\n' + errorlist = ignore_errors['default'] + + for ignore_error in errorlist: + ignore_error = ignore_error.replace('(', '\(') + ignore_error = ignore_error.replace(')', '\)') + ignore_error = ignore_error.replace("'", '.') + ignore_error = ignore_error.replace('?', '\?') + ignore_error = ignore_error.replace('[', '\[') + ignore_error = ignore_error.replace(']', '\]') + ignore_error = ignore_error.replace('*', '\*') + ignore_error = ignore_error.replace('0-9', '[0-9]') + grepcmd += ignore_error + '|' + grepcmd = grepcmd[:-1] + grepcmd += "\'" + + return grepcmd + + # Grep only the errors so that their context could be collected. + # Default context is 10 lines before and after the error itself + def parse_logs(self, errors, ignore_errors, logs, + lines_before = 10, lines_after = 10): + results = {} + rez = [] + grep_output = '' + + for log in logs: + result = None + thegrep = self.build_grepcmd(errors, ignore_errors, log) + + try: + result = check_output(thegrep, shell=True).decode('utf-8') + except: + pass + + if result is not None: + results[log.replace('target_logs/','')] = {} + rez = result.splitlines() + + for xrez in rez: + try: + cmd = ['grep', '-F', xrez, '-B', str(lines_before)] + cmd += ['-A', str(lines_after), log] + grep_output = check_output(cmd).decode('utf-8') + except: + pass + results[log.replace('target_logs/','')][xrez]=grep_output + + return results + + # Get the output of dmesg and write it in a file. + # This file is added to log_locations. + def write_dmesg(self): + (status, dmesg) = self.target.run('dmesg > /tmp/dmesg_output.log') + + @OETestID(1059) + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_parselogs(self): + self.write_dmesg() + log_list = self.get_local_log_list(self.log_locations) + result = self.parse_logs(self.errors, self.ignore_errors, log_list) + print(self.getHardwareInfo()) + errcount = 0 + for log in result: + self.msg += 'Log: ' + log + '\n' + self.msg += '-----------------------\n' + for error in result[log]: + errcount += 1 + self.msg += 'Central error: ' + str(error) + '\n' + self.msg += '***********************\n' + self.msg += result[str(log)][str(error)] + '\n' + self.msg += '***********************\n' + self.msg += '%s errors found in logs.' % errcount + self.assertEqual(errcount, 0, msg=self.msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/perl.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/perl.py new file mode 100644 index 000000000..d0b7e8ed9 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/perl.py @@ -0,0 +1,37 @@ +import os + +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.runtime.decorator.package import OEHasPackage + +class PerlTest(OERuntimeTestCase): + + @classmethod + def setUpClass(cls): + src = os.path.join(cls.tc.files_dir, 'test.pl') + dst = '/tmp/test.pl' + cls.tc.target.copyTo(src, dst) + + @classmethod + def tearDownClass(cls): + dst = '/tmp/test.pl' + cls.tc.target.run('rm %s' % dst) + + @OETestID(1141) + @OETestDepends(['ssh.SSHTest.test_ssh']) + @OEHasPackage(['perl']) + def test_perl_exists(self): + status, output = self.target.run('which perl') + msg = 'Perl binary not in PATH or not on target.' + self.assertEqual(status, 0, msg=msg) + + @OETestID(208) + @OETestDepends(['perl.PerlTest.test_perl_exists']) + def test_perl_works(self): + status, output = self.target.run('perl /tmp/test.pl') + msg = 'Exit status was not 0. Output: %s' % output + self.assertEqual(status, 0, msg=msg) + + msg = 'Incorrect output: %s' % output + self.assertEqual(output, "the value of a is 0.01", msg=msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/ping.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/ping.py new file mode 100644 index 000000000..02f580abe --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/ping.py @@ -0,0 +1,24 @@ +from subprocess import Popen, PIPE + +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.oetimeout import OETimeout + +class PingTest(OERuntimeTestCase): + + @OETimeout(30) + @OETestID(964) + def test_ping(self): + output = '' + count = 0 + while count < 5: + cmd = 'ping -c 1 %s' % self.target.ip + proc = Popen(cmd, shell=True, stdout=PIPE) + output += proc.communicate()[0].decode('utf-8') + if proc.poll() == 0: + count += 1 + else: + count = 0 + msg = ('Expected 5 consecutive, got %d.\n' + 'ping output is:\n%s' % (count,output)) + self.assertEqual(count, 5, msg = msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/python.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/python.py new file mode 100644 index 000000000..bf3e17916 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/python.py @@ -0,0 +1,43 @@ +import os + +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.runtime.decorator.package import OEHasPackage + +class PythonTest(OERuntimeTestCase): + + @classmethod + def setUpClass(cls): + src = os.path.join(cls.tc.files_dir, 'test.py') + dst = '/tmp/test.py' + cls.tc.target.copyTo(src, dst) + + @classmethod + def tearDownClass(cls): + dst = '/tmp/test.py' + cls.tc.target.run('rm %s' % dst) + + @OETestID(1145) + @OETestDepends(['ssh.SSHTest.test_ssh']) + @OEHasPackage(['python-core']) + def test_python_exists(self): + status, output = self.target.run('which python') + msg = 'Python binary not in PATH or not on target.' + self.assertEqual(status, 0, msg=msg) + + @OETestID(965) + @OETestDepends(['python.PythonTest.test_python_exists']) + def test_python_stdout(self): + status, output = self.target.run('python /tmp/test.py') + msg = 'Exit status was not 0. Output: %s' % output + self.assertEqual(status, 0, msg=msg) + + msg = 'Incorrect output: %s' % output + self.assertEqual(output, "the value of a is 0.01", msg=msg) + + @OETestID(1146) + @OETestDepends(['python.PythonTest.test_python_stdout']) + def test_python_testfile(self): + status, output = self.target.run('ls /tmp/testfile.python') + self.assertEqual(status, 0, msg='Python test file generate failed.') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/rpm.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/rpm.py new file mode 100644 index 000000000..05b94c7b4 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/rpm.py @@ -0,0 +1,142 @@ +import os +import fnmatch + +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfDataVar +from oeqa.runtime.decorator.package import OEHasPackage +from oeqa.core.utils.path import findFile + +class RpmBasicTest(OERuntimeTestCase): + + @classmethod + def setUpClass(cls): + if cls.tc.td['PACKAGE_CLASSES'].split()[0] != 'package_rpm': + cls.skipTest('Tests require image to be build from rpm') + + @OETestID(960) + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_rpm_help(self): + status, output = self.target.run('rpm --help') + msg = 'status and output: %s and %s' % (status, output) + self.assertEqual(status, 0, msg=msg) + + @OETestID(191) + @OETestDepends(['rpm.RpmBasicTest.test_rpm_help']) + def test_rpm_query(self): + status, output = self.target.run('rpm -q rpm') + msg = 'status and output: %s and %s' % (status, output) + self.assertEqual(status, 0, msg=msg) + +class RpmInstallRemoveTest(OERuntimeTestCase): + + @classmethod + def setUpClass(cls): + if cls.tc.td['PACKAGE_CLASSES'].split()[0] != 'package_rpm': + cls.skipTest('Tests require image to be build from rpm') + + pkgarch = cls.td['TUNE_PKGARCH'].replace('-', '_') + rpmdir = os.path.join(cls.tc.td['DEPLOY_DIR'], 'rpm', pkgarch) + # Pick rpm-doc as a test file to get installed, because it's small + # and it will always be built for standard targets + rpm_doc = 'rpm-doc-*.%s.rpm' % pkgarch + for f in fnmatch.filter(os.listdir(rpmdir), rpm_doc): + test_file = os.path.join(rpmdir, f) + dst = '/tmp/rpm-doc.rpm' + cls.tc.target.copyTo(test_file, dst) + + @classmethod + def tearDownClass(cls): + dst = '/tmp/rpm-doc.rpm' + cls.tc.target.run('rm -f %s' % dst) + + @OETestID(192) + @OETestDepends(['rpm.RpmBasicTest.test_rpm_help']) + def test_rpm_install(self): + status, output = self.target.run('rpm -ivh /tmp/rpm-doc.rpm') + msg = 'Failed to install rpm-doc package: %s' % output + self.assertEqual(status, 0, msg=msg) + + @OETestID(194) + @OETestDepends(['rpm.RpmInstallRemoveTest.test_rpm_install']) + def test_rpm_remove(self): + status,output = self.target.run('rpm -e rpm-doc') + msg = 'Failed to remove rpm-doc package: %s' % output + self.assertEqual(status, 0, msg=msg) + + @OETestID(1096) + @OETestDepends(['rpm.RpmBasicTest.test_rpm_query']) + def test_rpm_query_nonroot(self): + + def set_up_test_user(u): + status, output = self.target.run('id -u %s' % u) + if status: + status, output = self.target.run('useradd %s' % u) + msg = 'Failed to create new user: %s' % output + self.assertTrue(status == 0, msg=msg) + + def exec_as_test_user(u): + status, output = self.target.run('su -c id %s' % u) + msg = 'Failed to execute as new user' + self.assertTrue("({0})".format(u) in output, msg=msg) + + status, output = self.target.run('su -c "rpm -qa" %s ' % u) + msg = 'status: %s. Cannot run rpm -qa: %s' % (status, output) + self.assertEqual(status, 0, msg=msg) + + def unset_up_test_user(u): + status, output = self.target.run('userdel -r %s' % u) + msg = 'Failed to erase user: %s' % output + self.assertTrue(status == 0, msg=msg) + + tuser = 'test1' + + try: + set_up_test_user(tuser) + exec_as_test_user(tuser) + finally: + unset_up_test_user(tuser) + + @OETestID(195) + @OETestDepends(['rpm.RpmInstallRemoveTest.test_rpm_remove']) + def test_check_rpm_install_removal_log_file_size(self): + """ + Summary: Check that rpm writes into /var/log/messages + Expected: There should be some RPM prefixed entries in the above file. + Product: BSPs + Author: Alexandru Georgescu + Author: Alexander Kanavin + AutomatedBy: Daniel Istrate + """ + db_files_cmd = 'ls /var/lib/rpm/__db.*' + check_log_cmd = "grep RPM /var/log/messages | wc -l" + + # Make sure that some database files are under /var/lib/rpm as '__db.xxx' + status, output = self.target.run(db_files_cmd) + msg = 'Failed to find database files under /var/lib/rpm/ as __db.xxx' + self.assertEqual(0, status, msg=msg) + + # Remove the package just in case + self.target.run('rpm -e rpm-doc') + + # Install/Remove a package 10 times + for i in range(10): + status, output = self.target.run('rpm -ivh /tmp/rpm-doc.rpm') + msg = 'Failed to install rpm-doc package. Reason: {}'.format(output) + self.assertEqual(0, status, msg=msg) + + status, output = self.target.run('rpm -e rpm-doc') + msg = 'Failed to remove rpm-doc package. Reason: {}'.format(output) + self.assertEqual(0, status, msg=msg) + + # if using systemd this should ensure all entries are flushed to /var + status, output = self.target.run("journalctl --sync") + # Get the amount of entries in the log file + status, output = self.target.run(check_log_cmd) + msg = 'Failed to get the final size of the log file.' + self.assertEqual(0, status, msg=msg) + + # Check that there's enough of them + self.assertGreaterEqual(int(output), 80, + 'Cound not find sufficient amount of rpm entries in /var/log/messages, found {} entries'.format(output)) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/scanelf.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/scanelf.py new file mode 100644 index 000000000..3ba1f78af --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/scanelf.py @@ -0,0 +1,26 @@ +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.runtime.decorator.package import OEHasPackage + +class ScanelfTest(OERuntimeTestCase): + scancmd = 'scanelf --quiet --recursive --mount --ldpath --path' + + @OETestID(966) + @OETestDepends(['ssh.SSHTest.test_ssh']) + @OEHasPackage(['pax-utils']) + def test_scanelf_textrel(self): + # print TEXTREL information + cmd = '%s --textrel' % self.scancmd + status, output = self.target.run(cmd) + msg = '\n'.join([cmd, output]) + self.assertEqual(output.strip(), '', msg=msg) + + @OETestID(967) + @OETestDepends(['scanelf.ScanelfTest.test_scanelf_textrel']) + def test_scanelf_rpath(self): + # print RPATH information + cmd = '%s --textrel --rpath' % self.scancmd + status, output = self.target.run(cmd) + msg = '\n'.join([cmd, output]) + self.assertEqual(output.strip(), '', msg=msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/scp.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/scp.py new file mode 100644 index 000000000..f488a6175 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/scp.py @@ -0,0 +1,33 @@ +import os +from tempfile import mkstemp + +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID + +class ScpTest(OERuntimeTestCase): + + @classmethod + def setUpClass(cls): + cls.tmp_fd, cls.tmp_path = mkstemp() + with os.fdopen(cls.tmp_fd, 'w') as f: + f.seek(2 ** 22 -1) + f.write(os.linesep) + + @classmethod + def tearDownClass(cls): + os.remove(cls.tmp_path) + + @OETestID(220) + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_scp_file(self): + dst = '/tmp/test_scp_file' + + (status, output) = self.target.copyTo(self.tmp_path, dst) + msg = 'File could not be copied. Output: %s' % output + self.assertEqual(status, 0, msg=msg) + + (status, output) = self.target.run('ls -la %s' % dst) + self.assertEqual(status, 0, msg = 'SCP test failed') + + self.target.run('rm %s' % dst) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/skeletoninit.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/skeletoninit.py new file mode 100644 index 000000000..4fdcf033a --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/skeletoninit.py @@ -0,0 +1,33 @@ +# This test should cover https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=284 +# testcase. Image under test must have meta-skeleton layer in bblayers and +# IMAGE_INSTALL_append = " service" in local.conf +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfDataVar +from oeqa.runtime.decorator.package import OEHasPackage + +class SkeletonBasicTest(OERuntimeTestCase): + + @OETestDepends(['ssh.SSHTest.test_ssh']) + @OEHasPackage(['service']) + @skipIfDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd', + 'Not appropiate for systemd image') + def test_skeleton_availability(self): + status, output = self.target.run('ls /etc/init.d/skeleton') + msg = 'skeleton init script not found. Output:\n%s' % output + self.assertEqual(status, 0, msg=msg) + + status, output = self.target.run('ls /usr/sbin/skeleton-test') + msg = 'skeleton-test not found. Output:\n%s' % output + self.assertEqual(status, 0, msg=msg) + + @OETestID(284) + @OETestDepends(['skeletoninit.SkeletonBasicTest.test_skeleton_availability']) + def test_skeleton_script(self): + output1 = self.target.run("/etc/init.d/skeleton start")[1] + cmd = '%s | grep [s]keleton-test' % self.tc.target_cmds['ps'] + status, output2 = self.target.run(cmd) + msg = ('Skeleton script could not be started:' + '\n%s\n%s' % (output1, output2)) + self.assertEqual(status, 0, msg=msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/ssh.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/ssh.py new file mode 100644 index 000000000..eca167969 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/ssh.py @@ -0,0 +1,15 @@ +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID + +class SSHTest(OERuntimeTestCase): + + @OETestID(224) + @OETestDepends(['ping.PingTest.test_ping']) + def test_ssh(self): + (status, output) = self.target.run('uname -a') + self.assertEqual(status, 0, msg='SSH Test failed: %s' % output) + (status, output) = self.target.run('cat /etc/masterimage') + msg = "This isn't the right image - /etc/masterimage " \ + "shouldn't be here %s" % output + self.assertEqual(status, 1, msg=msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/systemd.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/systemd.py new file mode 100644 index 000000000..db69384c8 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/systemd.py @@ -0,0 +1,181 @@ +import re +import time + +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfDataVar, skipIfNotDataVar +from oeqa.runtime.decorator.package import OEHasPackage +from oeqa.core.decorator.data import skipIfNotFeature + +class SystemdTest(OERuntimeTestCase): + + def systemctl(self, action='', target='', expected=0, verbose=False): + command = 'systemctl %s %s' % (action, target) + status, output = self.target.run(command) + message = '\n'.join([command, output]) + if status != expected and verbose: + cmd = 'systemctl status --full %s' % target + message += self.target.run(cmd)[1] + self.assertEqual(status, expected, message) + return output + + #TODO: use pyjournalctl instead + def journalctl(self, args='',l_match_units=None): + """ + Request for the journalctl output to the current target system + + Arguments: + -args, an optional argument pass through argument + -l_match_units, an optional list of units to filter the output + Returns: + -string output of the journalctl command + Raises: + -AssertionError, on remote commands that fail + -ValueError, on a journalctl call with filtering by l_match_units that + returned no entries + """ + + query_units='' + if l_match_units: + query_units = ['_SYSTEMD_UNIT='+unit for unit in l_match_units] + query_units = ' '.join(query_units) + command = 'journalctl %s %s' %(args, query_units) + status, output = self.target.run(command) + if status: + raise AssertionError("Command '%s' returned non-zero exit " + 'code %d:\n%s' % (command, status, output)) + if len(output) == 1 and "-- No entries --" in output: + raise ValueError('List of units to match: %s, returned no entries' + % l_match_units) + return output + +class SystemdBasicTests(SystemdTest): + + def settle(self): + """ + Block until systemd has finished activating any units being activated, + or until two minutes has elapsed. + + Returns a tuple, either (True, '') if all units have finished + activating, or (False, message string) if there are still units + activating (generally, failing units that restart). + """ + endtime = time.time() + (60 * 2) + while True: + status, output = self.target.run('systemctl --state=activating') + if "0 loaded units listed" in output: + return (True, '') + if time.time() >= endtime: + return (False, output) + time.sleep(10) + + @skipIfNotFeature('systemd', + 'Test requires systemd to be in DISTRO_FEATURES') + @skipIfNotDataVar('VIRTUAL-RUNTIME_init_manager', 'systemd', + 'systemd is not the init manager for this image') + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_systemd_basic(self): + self.systemctl('--version') + + @OETestID(551) + @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic']) + def test_systemd_list(self): + self.systemctl('list-unit-files') + + @OETestID(550) + @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic']) + def test_systemd_failed(self): + settled, output = self.settle() + msg = "Timed out waiting for systemd to settle:\n%s" % output + self.assertTrue(settled, msg=msg) + + output = self.systemctl('list-units', '--failed') + match = re.search('0 loaded units listed', output) + if not match: + output += self.systemctl('status --full --failed') + self.assertTrue(match, msg='Some systemd units failed:\n%s' % output) + + +class SystemdServiceTests(SystemdTest): + + @OEHasPackage(['avahi-daemon']) + @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic']) + def test_systemd_status(self): + self.systemctl('status --full', 'avahi-daemon.service') + + @OETestID(695) + @OETestDepends(['systemd.SystemdServiceTests.test_systemd_status']) + def test_systemd_stop_start(self): + self.systemctl('stop', 'avahi-daemon.service') + self.systemctl('is-active', 'avahi-daemon.service', + expected=3, verbose=True) + self.systemctl('start','avahi-daemon.service') + self.systemctl('is-active', 'avahi-daemon.service', verbose=True) + + @OETestID(696) + @OETestDepends(['systemd.SystemdServiceTests.test_systemd_status']) + def test_systemd_disable_enable(self): + self.systemctl('disable', 'avahi-daemon.service') + self.systemctl('is-enabled', 'avahi-daemon.service', expected=1) + self.systemctl('enable', 'avahi-daemon.service') + self.systemctl('is-enabled', 'avahi-daemon.service') + +class SystemdJournalTests(SystemdTest): + + @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic']) + def test_systemd_journal(self): + status, output = self.target.run('journalctl') + self.assertEqual(status, 0, output) + + @OETestDepends(['systemd.SystemdBasicTests.test_systemd_basic']) + def test_systemd_boot_time(self, systemd_TimeoutStartSec=90): + """ + Get the target boot time from journalctl and log it + + Arguments: + -systemd_TimeoutStartSec, an optional argument containing systemd's + unit start timeout to compare against + """ + + # The expression chain that uniquely identifies the time boot message. + expr_items=['Startup finished', 'kernel', 'userspace','\.$'] + try: + output = self.journalctl(args='-o cat --reverse') + except AssertionError: + self.fail('Error occurred while calling journalctl') + if not len(output): + self.fail('Error, unable to get startup time from systemd journal') + + # Check for the regular expression items that match the startup time. + for line in output.split('\n'): + check_match = ''.join(re.findall('.*'.join(expr_items), line)) + if check_match: + break + # Put the startup time in the test log + if check_match: + self.tc.logger.info('%s' % check_match) + else: + self.skipTest('Error at obtaining the boot time from journalctl') + boot_time_sec = 0 + + # Get the numeric values from the string and convert them to seconds + # same data will be placed in list and string for manipulation. + l_boot_time = check_match.split(' ')[-2:] + s_boot_time = ' '.join(l_boot_time) + try: + # Obtain the minutes it took to boot. + if l_boot_time[0].endswith('min') and l_boot_time[0][0].isdigit(): + boot_time_min = s_boot_time.split('min')[0] + # Convert to seconds and accumulate it. + boot_time_sec += int(boot_time_min) * 60 + # Obtain the seconds it took to boot and accumulate. + boot_time_sec += float(l_boot_time[1].split('s')[0]) + except ValueError: + self.skipTest('Error when parsing time from boot string') + + # Assert the target boot time against systemd's unit start timeout. + if boot_time_sec > systemd_TimeoutStartSec: + msg = ("Target boot time %s exceeds systemd's TimeoutStartSec %s" + % (boot_time_sec, systemd_TimeoutStartSec)) + self.tc.logger.info(msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/x32lib.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/x32lib.py new file mode 100644 index 000000000..8da0154e7 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/x32lib.py @@ -0,0 +1,19 @@ +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfNotInDataVar + +class X32libTest(OERuntimeTestCase): + + @skipIfNotInDataVar('DEFAULTTUNE', 'x86-64-x32', + 'DEFAULTTUNE is not set to x86-64-x32') + @OETestID(281) + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_x32_file(self): + cmd = 'readelf -h /bin/ls | grep Class | grep ELF32' + status1 = self.target.run(cmd)[0] + cmd = 'readelf -h /bin/ls | grep Machine | grep X86-64' + status2 = self.target.run(cmd)[0] + msg = ("/bin/ls isn't an X86-64 ELF32 binary. readelf says: %s" % + self.target.run("readelf -h /bin/ls")[1]) + self.assertTrue(status1 == 0 and status2 == 0, msg=msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/xorg.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/xorg.py new file mode 100644 index 000000000..2124813e3 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/cases/xorg.py @@ -0,0 +1,17 @@ +from oeqa.runtime.case import OERuntimeTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID +from oeqa.core.decorator.data import skipIfNotFeature + +class XorgTest(OERuntimeTestCase): + + @OETestID(1151) + @skipIfNotFeature('x11-base', + 'Test requires x11 to be in IMAGE_FEATURES') + @OETestDepends(['ssh.SSHTest.test_ssh']) + def test_xorg_running(self): + cmd ='%s | grep -v xinit | grep [X]org' % self.tc.target_cmds['ps'] + status, output = self.target.run(cmd) + msg = ('Xorg does not appear to be running %s' % + self.target.run(self.tc.target_cmds['ps'])[1]) + self.assertEqual(status, 0, msg=msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/connman.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/connman.py deleted file mode 100644 index 003fefe2c..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/connman.py +++ /dev/null @@ -1,31 +0,0 @@ -import unittest -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeRuntimeTest.hasPackage("connman"): - skipModule("No connman package in image") - - -class ConnmanTest(oeRuntimeTest): - - def service_status(self, service): - if oeRuntimeTest.hasFeature("systemd"): - (status, output) = self.target.run('systemctl status -l %s' % service) - return output - else: - return "Unable to get status or logs for %s" % service - - @testcase(961) - @skipUnlessPassed('test_ssh') - def test_connmand_help(self): - (status, output) = self.target.run('/usr/sbin/connmand --help') - self.assertEqual(status, 0, msg="status and output: %s and %s" % (status,output)) - - @testcase(221) - @skipUnlessPassed('test_connmand_help') - def test_connmand_running(self): - (status, output) = self.target.run(oeRuntimeTest.pscmd + ' | grep [c]onnmand') - if status != 0: - print(self.service_status("connman")) - self.fail("No connmand process running") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/context.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/context.py new file mode 100644 index 000000000..c4cd76cf4 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/context.py @@ -0,0 +1,220 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os + +from oeqa.core.context import OETestContext, OETestContextExecutor +from oeqa.core.target.ssh import OESSHTarget +from oeqa.core.target.qemu import OEQemuTarget +from oeqa.utils.dump import HostDumper + +from oeqa.runtime.loader import OERuntimeTestLoader + +class OERuntimeTestContext(OETestContext): + loaderClass = OERuntimeTestLoader + runtime_files_dir = os.path.join( + os.path.dirname(os.path.abspath(__file__)), "files") + + def __init__(self, td, logger, target, + host_dumper, image_packages, extract_dir): + super(OERuntimeTestContext, self).__init__(td, logger) + + self.target = target + self.image_packages = image_packages + self.host_dumper = host_dumper + self.extract_dir = extract_dir + self._set_target_cmds() + + def _set_target_cmds(self): + self.target_cmds = {} + + self.target_cmds['ps'] = 'ps' + if 'procps' in self.image_packages: + self.target_cmds['ps'] = self.target_cmds['ps'] + ' -ef' + +class OERuntimeTestContextExecutor(OETestContextExecutor): + _context_class = OERuntimeTestContext + + name = 'runtime' + help = 'runtime test component' + description = 'executes runtime tests over targets' + + default_cases = os.path.join(os.path.abspath(os.path.dirname(__file__)), + 'cases') + default_data = None + default_test_data = 'data/testdata.json' + default_tests = '' + + default_target_type = 'simpleremote' + default_manifest = 'data/manifest' + default_server_ip = '192.168.7.1' + default_target_ip = '192.168.7.2' + default_host_dumper_dir = '/tmp/oe-saved-tests' + default_extract_dir = 'packages/extracted' + + def register_commands(self, logger, subparsers): + super(OERuntimeTestContextExecutor, self).register_commands(logger, subparsers) + + runtime_group = self.parser.add_argument_group('runtime options') + + runtime_group.add_argument('--target-type', action='store', + default=self.default_target_type, choices=['simpleremote', 'qemu'], + help="Target type of device under test, default: %s" \ + % self.default_target_type) + runtime_group.add_argument('--target-ip', action='store', + default=self.default_target_ip, + help="IP address of device under test, default: %s" \ + % self.default_target_ip) + runtime_group.add_argument('--server-ip', action='store', + default=self.default_target_ip, + help="IP address of device under test, default: %s" \ + % self.default_server_ip) + + runtime_group.add_argument('--host-dumper-dir', action='store', + default=self.default_host_dumper_dir, + help="Directory where host status is dumped, if tests fails, default: %s" \ + % self.default_host_dumper_dir) + + runtime_group.add_argument('--packages-manifest', action='store', + default=self.default_manifest, + help="Package manifest of the image under testi, default: %s" \ + % self.default_manifest) + + runtime_group.add_argument('--extract-dir', action='store', + default=self.default_extract_dir, + help='Directory where extracted packages reside, default: %s' \ + % self.default_extract_dir) + + runtime_group.add_argument('--qemu-boot', action='store', + help="Qemu boot configuration, only needed when target_type is QEMU.") + + @staticmethod + def getTarget(target_type, logger, target_ip, server_ip, **kwargs): + target = None + + if target_type == 'simpleremote': + target = OESSHTarget(logger, target_ip, server_ip, **kwargs) + elif target_type == 'qemu': + target = OEQemuTarget(logger, target_ip, server_ip, **kwargs) + else: + # XXX: This code uses the old naming convention for controllers and + # targets, the idea it is to leave just targets as the controller + # most of the time was just a wrapper. + # XXX: This code tries to import modules from lib/oeqa/controllers + # directory and treat them as controllers, it will less error prone + # to use introspection to load such modules. + # XXX: Don't base your targets on this code it will be refactored + # in the near future. + # Custom target module loading + try: + target_modules_path = kwargs.get('target_modules_path', '') + controller = OERuntimeTestContextExecutor.getControllerModule(target_type, target_modules_path) + target = controller(logger, target_ip, server_ip, **kwargs) + except ImportError as e: + raise TypeError("Failed to import %s from available controller modules" % target_type) + + return target + + # Search oeqa.controllers module directory for and return a controller + # corresponding to the given target name. + # AttributeError raised if not found. + # ImportError raised if a provided module can not be imported. + @staticmethod + def getControllerModule(target, target_modules_path): + controllerslist = OERuntimeTestContextExecutor._getControllerModulenames(target_modules_path) + controller = OERuntimeTestContextExecutor._loadControllerFromName(target, controllerslist) + return controller + + # Return a list of all python modules in lib/oeqa/controllers for each + # layer in bbpath + @staticmethod + def _getControllerModulenames(target_modules_path): + + controllerslist = [] + + def add_controller_list(path): + if not os.path.exists(os.path.join(path, '__init__.py')): + raise OSError('Controllers directory %s exists but is missing __init__.py' % path) + files = sorted([f for f in os.listdir(path) if f.endswith('.py') and not f.startswith('_')]) + for f in files: + module = 'oeqa.controllers.' + f[:-3] + if module not in controllerslist: + controllerslist.append(module) + else: + raise RuntimeError("Duplicate controller module found for %s. Layers should create unique controller module names" % module) + + extpath = target_modules_path.split(':') + for p in extpath: + controllerpath = os.path.join(p, 'lib', 'oeqa', 'controllers') + if os.path.exists(controllerpath): + add_controller_list(controllerpath) + return controllerslist + + # Search for and return a controller from given target name and + # set of module names. + # Raise AttributeError if not found. + # Raise ImportError if a provided module can not be imported + @staticmethod + def _loadControllerFromName(target, modulenames): + for name in modulenames: + obj = OERuntimeTestContextExecutor._loadControllerFromModule(target, name) + if obj: + return obj + raise AttributeError("Unable to load {0} from available modules: {1}".format(target, str(modulenames))) + + # Search for and return a controller or None from given module name + @staticmethod + def _loadControllerFromModule(target, modulename): + obj = None + # import module, allowing it to raise import exception + try: + module = __import__(modulename, globals(), locals(), [target]) + except Exception as e: + return obj + # look for target class in the module, catching any exceptions as it + # is valid that a module may not have the target class. + try: + obj = getattr(module, target) + except: + obj = None + return obj + + @staticmethod + def readPackagesManifest(manifest): + if not manifest or not os.path.exists(manifest): + raise OSError("Manifest file not exists: %s" % manifest) + + image_packages = set() + with open(manifest, 'r') as f: + for line in f.readlines(): + line = line.strip() + if line and not line.startswith("#"): + image_packages.add(line.split()[0]) + + return image_packages + + @staticmethod + def getHostDumper(cmds, directory): + return HostDumper(cmds, directory) + + def _process_args(self, logger, args): + if not args.packages_manifest: + raise TypeError('Manifest file not provided') + + super(OERuntimeTestContextExecutor, self)._process_args(logger, args) + + target_kwargs = {} + target_kwargs['qemuboot'] = args.qemu_boot + + self.tc_kwargs['init']['target'] = \ + OERuntimeTestContextExecutor.getTarget(args.target_type, + None, args.target_ip, args.server_ip, **target_kwargs) + self.tc_kwargs['init']['host_dumper'] = \ + OERuntimeTestContextExecutor.getHostDumper(None, + args.host_dumper_dir) + self.tc_kwargs['init']['image_packages'] = \ + OERuntimeTestContextExecutor.readPackagesManifest( + args.packages_manifest) + self.tc_kwargs['init']['extract_dir'] = args.extract_dir + +_executor_class = OERuntimeTestContextExecutor diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/date.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/date.py deleted file mode 100644 index 447987e07..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/date.py +++ /dev/null @@ -1,31 +0,0 @@ -from oeqa.oetest import oeRuntimeTest -from oeqa.utils.decorators import * -import re - -class DateTest(oeRuntimeTest): - - def setUpLocal(self): - if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", True) == "systemd": - self.target.run('systemctl stop systemd-timesyncd') - - def tearDownLocal(self): - if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", True) == "systemd": - self.target.run('systemctl start systemd-timesyncd') - - @testcase(211) - @skipUnlessPassed("test_ssh") - def test_date(self): - (status, output) = self.target.run('date +"%Y-%m-%d %T"') - self.assertEqual(status, 0, msg="Failed to get initial date, output: %s" % output) - oldDate = output - - sampleDate = '"2016-08-09 10:00:00"' - (status, output) = self.target.run("date -s %s" % sampleDate) - self.assertEqual(status, 0, msg="Date set failed, output: %s" % output) - - (status, output) = self.target.run("date -R") - p = re.match('Tue, 09 Aug 2016 10:00:.. \+0000', output) - self.assertTrue(p, msg="The date was not set correctly, output: %s" % output) - - (status, output) = self.target.run('date -s "%s"' % oldDate) - self.assertEqual(status, 0, msg="Failed to reset date, output: %s" % output) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/decorator/package.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/decorator/package.py new file mode 100644 index 000000000..aa6ecb68f --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/decorator/package.py @@ -0,0 +1,53 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from oeqa.core.decorator import OETestDecorator, registerDecorator +from oeqa.core.utils.misc import strToSet + +@registerDecorator +class OEHasPackage(OETestDecorator): + """ + Checks if image has packages (un)installed. + + The argument must be a string, set, or list of packages that must be + installed or not present in the image. + + The way to tell a package must not be in an image is using an + exclamation point ('!') before the name of the package. + + If test depends on pkg1 or pkg2 you need to use: + @OEHasPackage({'pkg1', 'pkg2'}) + + If test depends on pkg1 and pkg2 you need to use: + @OEHasPackage('pkg1') + @OEHasPackage('pkg2') + + If test depends on pkg1 but pkg2 must not be present use: + @OEHasPackage({'pkg1', '!pkg2'}) + """ + + attrs = ('need_pkgs',) + + def setUpDecorator(self): + need_pkgs = set() + unneed_pkgs = set() + pkgs = strToSet(self.need_pkgs) + for pkg in pkgs: + if pkg.startswith('!'): + unneed_pkgs.add(pkg[1:]) + else: + need_pkgs.add(pkg) + + if unneed_pkgs: + msg = 'Checking if %s is not installed' % ', '.join(unneed_pkgs) + self.logger.debug(msg) + if not self.case.tc.image_packages.isdisjoint(unneed_pkgs): + msg = "Test can't run with %s installed" % ', or'.join(unneed_pkgs) + self.case.skipTest(msg) + + if need_pkgs: + msg = 'Checking if at least one of %s is installed' % ', '.join(need_pkgs) + self.logger.debug(msg) + if self.case.tc.image_packages.isdisjoint(need_pkgs): + msg = "Test requires %s to be installed" % ', or'.join(need_pkgs) + self.case.skipTest(msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/df.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/df.py deleted file mode 100644 index 09569d5ff..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/df.py +++ /dev/null @@ -1,12 +0,0 @@ -import unittest -from oeqa.oetest import oeRuntimeTest -from oeqa.utils.decorators import * - - -class DfTest(oeRuntimeTest): - - @testcase(234) - @skipUnlessPassed("test_ssh") - def test_df(self): - (status,output) = self.target.run("df / | sed -n '2p' | awk '{print $4}'") - self.assertTrue(int(output)>5120, msg="Not enough space on image. Current size is %s" % output) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.c b/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.c deleted file mode 100644 index 2d8389c92..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.c +++ /dev/null @@ -1,26 +0,0 @@ -#include -#include -#include - -double convert(long long l) -{ - return (double)l; -} - -int main(int argc, char * argv[]) { - - long long l = 10; - double f; - double check = 10.0; - - f = convert(l); - printf("convert: %lld => %f\n", l, f); - if ( f != check ) exit(1); - - f = 1234.67; - check = 1234.0; - printf("floorf(%f) = %f\n", f, floorf(f)); - if ( floorf(f) != check) exit(1); - - return 0; -} diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.cpp b/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.cpp deleted file mode 100644 index 9e1a76473..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.cpp +++ /dev/null @@ -1,3 +0,0 @@ -#include - -int main() {} \ No newline at end of file diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.pl b/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.pl deleted file mode 100644 index 689c8f163..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.pl +++ /dev/null @@ -1,2 +0,0 @@ -$a = 9.01e+21 - 9.01e+21 + 0.01; -print ("the value of a is ", $a, "\n"); diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.py deleted file mode 100644 index f389225d7..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/test.py +++ /dev/null @@ -1,6 +0,0 @@ -import os - -os.system('touch /tmp/testfile.python') - -a = 9.01e+21 - 9.01e+21 + 0.01 -print("the value of a is %s" % a) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/testsdkmakefile b/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/testsdkmakefile deleted file mode 100644 index fb05f822f..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/files/testsdkmakefile +++ /dev/null @@ -1,5 +0,0 @@ -test: test.o - $(CC) -o test test.o -lm -test.o: test.c - $(CC) -c test.c - diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/gcc.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/gcc.py deleted file mode 100644 index d90cd1799..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/gcc.py +++ /dev/null @@ -1,47 +0,0 @@ -import unittest -import os -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeRuntimeTest.hasFeature("tools-sdk"): - skipModule("Image doesn't have tools-sdk in IMAGE_FEATURES") - - -class GccCompileTest(oeRuntimeTest): - - @classmethod - def setUpClass(self): - oeRuntimeTest.tc.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "test.c"), "/tmp/test.c") - oeRuntimeTest.tc.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "testmakefile"), "/tmp/testmakefile") - oeRuntimeTest.tc.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "test.cpp"), "/tmp/test.cpp") - - @testcase(203) - def test_gcc_compile(self): - (status, output) = self.target.run('gcc /tmp/test.c -o /tmp/test -lm') - self.assertEqual(status, 0, msg="gcc compile failed, output: %s" % output) - (status, output) = self.target.run('/tmp/test') - self.assertEqual(status, 0, msg="running compiled file failed, output %s" % output) - - @testcase(200) - def test_gpp_compile(self): - (status, output) = self.target.run('g++ /tmp/test.c -o /tmp/test -lm') - self.assertEqual(status, 0, msg="g++ compile failed, output: %s" % output) - (status, output) = self.target.run('/tmp/test') - self.assertEqual(status, 0, msg="running compiled file failed, output %s" % output) - - @testcase(1142) - def test_gpp2_compile(self): - (status, output) = self.target.run('g++ /tmp/test.cpp -o /tmp/test -lm') - self.assertEqual(status, 0, msg="g++ compile failed, output: %s" % output) - (status, output) = self.target.run('/tmp/test') - self.assertEqual(status, 0, msg="running compiled file failed, output %s" % output) - - @testcase(204) - def test_make(self): - (status, output) = self.target.run('cd /tmp; make -f testmakefile') - self.assertEqual(status, 0, msg="running make failed, output %s" % output) - - @classmethod - def tearDownClass(self): - oeRuntimeTest.tc.target.run("rm /tmp/test.c /tmp/test.o /tmp/test /tmp/testmakefile") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/kernelmodule.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/kernelmodule.py deleted file mode 100644 index 38ca18454..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/kernelmodule.py +++ /dev/null @@ -1,34 +0,0 @@ -import unittest -import os -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeRuntimeTest.hasFeature("tools-sdk"): - skipModule("Image doesn't have tools-sdk in IMAGE_FEATURES") - - -class KernelModuleTest(oeRuntimeTest): - - def setUpLocal(self): - self.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "hellomod.c"), "/tmp/hellomod.c") - self.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "hellomod_makefile"), "/tmp/Makefile") - - @testcase('316') - @skipUnlessPassed('test_ssh') - @skipUnlessPassed('test_gcc_compile') - def test_kernel_module(self): - cmds = [ - 'cd /usr/src/kernel && make scripts', - 'cd /tmp && make', - 'cd /tmp && insmod hellomod.ko', - 'lsmod | grep hellomod', - 'dmesg | grep Hello', - 'rmmod hellomod', 'dmesg | grep "Cleaning up hellomod"' - ] - for cmd in cmds: - (status, output) = self.target.run(cmd, 900) - self.assertEqual(status, 0, msg="\n".join([cmd, output])) - - def tearDownLocal(self): - self.target.run('rm -f /tmp/Makefile /tmp/hellomod.c') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/ldd.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/ldd.py deleted file mode 100644 index 47b3885df..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/ldd.py +++ /dev/null @@ -1,21 +0,0 @@ -import unittest -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeRuntimeTest.hasFeature("tools-sdk"): - skipModule("Image doesn't have tools-sdk in IMAGE_FEATURES") - -class LddTest(oeRuntimeTest): - - @testcase(962) - @skipUnlessPassed('test_ssh') - def test_ldd_exists(self): - (status, output) = self.target.run('which ldd') - self.assertEqual(status, 0, msg = "ldd does not exist in PATH: which ldd: %s" % output) - - @testcase(239) - @skipUnlessPassed('test_ldd_exists') - def test_ldd_rtldlist_check(self): - (status, output) = self.target.run('for i in $(which ldd | xargs cat | grep "^RTLDLIST"|cut -d\'=\' -f2|tr -d \'"\'); do test -f $i && echo $i && break; done') - self.assertEqual(status, 0, msg = "ldd path not correct or RTLDLIST files don't exist. ") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/loader.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/loader.py new file mode 100644 index 000000000..041ef976e --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/loader.py @@ -0,0 +1,16 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from oeqa.core.loader import OETestLoader +from oeqa.runtime.case import OERuntimeTestCase + +class OERuntimeTestLoader(OETestLoader): + caseClass = OERuntimeTestCase + + def _getTestCase(self, testCaseClass, tcName): + case = super(OERuntimeTestLoader, self)._getTestCase(testCaseClass, tcName) + + # Adds custom attributes to the OERuntimeTestCase + setattr(case, 'target', self.tc.target) + + return case diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/logrotate.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/logrotate.py deleted file mode 100644 index de300bf55..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/logrotate.py +++ /dev/null @@ -1,28 +0,0 @@ -# This test should cover https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=289 testcase -# Note that the image under test must have logrotate installed - -import unittest -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeRuntimeTest.hasPackage("logrotate"): - skipModule("No logrotate package in image") - - -class LogrotateTest(oeRuntimeTest): - - @skipUnlessPassed("test_ssh") - def test_1_logrotate_setup(self): - (status, output) = self.target.run('mkdir $HOME/logrotate_dir') - self.assertEqual(status, 0, msg = "Could not create logrotate_dir. Output: %s" % output) - (status, output) = self.target.run("sed -i \"s#wtmp {#wtmp {\\n olddir $HOME/logrotate_dir#\" /etc/logrotate.conf") - self.assertEqual(status, 0, msg = "Could not write to logrotate.conf file. Status and output: %s and %s)" % (status, output)) - - @testcase(289) - @skipUnlessPassed("test_1_logrotate_setup") - def test_2_logrotate(self): - (status, output) = self.target.run('logrotate -f /etc/logrotate.conf') - self.assertEqual(status, 0, msg = "logrotate service could not be reloaded. Status and output: %s and %s" % (status, output)) - output = self.target.run('ls -la $HOME/logrotate_dir/ | wc -l')[1] - self.assertTrue(int(output)>=3, msg = "new logfile could not be created. List of files within log directory: %s" %(self.target.run('ls -la $HOME/logrotate_dir')[1])) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/multilib.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/multilib.py deleted file mode 100644 index 593d38502..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/multilib.py +++ /dev/null @@ -1,42 +0,0 @@ -import unittest -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - multilibs = oeRuntimeTest.tc.d.getVar("MULTILIBS", True) or "" - if "multilib:lib32" not in multilibs: - skipModule("this isn't a multilib:lib32 image") - - -class MultilibTest(oeRuntimeTest): - - def archtest(self, binary, arch): - """ - Check that ``binary`` has the ELF class ``arch`` (e.g. ELF32/ELF64). - """ - - (status, output) = self.target.run("readelf -h %s" % binary) - self.assertEqual(status, 0, "Failed to readelf %s" % binary) - - l = [l.split()[1] for l in output.split('\n') if "Class:" in l] - if l: - theclass = l[0] - else: - self.fail("Cannot parse readelf output\n" + s) - - self.assertEqual(theclass, arch, msg="%s isn't %s (is %s)" % (binary, arch, theclass)) - - @skipUnlessPassed('test_ssh') - def test_check_multilib_libc(self): - """ - Check that a multilib image has both 32-bit and 64-bit libc in. - """ - self.archtest("/lib/libc.so.6", "ELF32") - self.archtest("/lib64/libc.so.6", "ELF64") - - @testcase('279') - @skipUnlessPassed('test_check_multilib_libc') - def test_file_connman(self): - self.assertTrue(oeRuntimeTest.hasPackage('lib32-connman'), msg="This test assumes lib32-connman is installed") - - self.archtest("/usr/sbin/connmand", "ELF32") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/pam.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/pam.py deleted file mode 100644 index c8205c9ab..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/pam.py +++ /dev/null @@ -1,25 +0,0 @@ -# This test should cover https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=287 testcase -# Note that the image under test must have "pam" in DISTRO_FEATURES - -import unittest -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeRuntimeTest.hasFeature("pam"): - skipModule("target doesn't have 'pam' in DISTRO_FEATURES") - - -class PamBasicTest(oeRuntimeTest): - - @testcase(287) - @skipUnlessPassed('test_ssh') - def test_pam(self): - (status, output) = self.target.run('login --help') - self.assertEqual(status, 1, msg = "login command does not work as expected. Status and output:%s and %s" %(status, output)) - (status, output) = self.target.run('passwd --help') - self.assertEqual(status, 0, msg = "passwd command does not work as expected. Status and output:%s and %s" %(status, output)) - (status, output) = self.target.run('su --help') - self.assertEqual(status, 0, msg = "su command does not work as expected. Status and output:%s and %s" %(status, output)) - (status, output) = self.target.run('useradd --help') - self.assertEqual(status, 0, msg = "useradd command does not work as expected. Status and output:%s and %s" %(status, output)) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/parselogs.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/parselogs.py deleted file mode 100644 index aa5008bba..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/parselogs.py +++ /dev/null @@ -1,313 +0,0 @@ -import os -import unittest -import subprocess -from oeqa.oetest import oeRuntimeTest -from oeqa.utils.decorators import * - -#in the future these lists could be moved outside of module -errors = ["error", "cannot", "can\'t", "failed"] - -common_errors = [ - "(WW) warning, (EE) error, (NI) not implemented, (??) unknown.", - "dma timeout", - "can\'t add hid device:", - "usbhid: probe of ", - "_OSC failed (AE_ERROR)", - "_OSC failed (AE_SUPPORT)", - "AE_ALREADY_EXISTS", - "ACPI _OSC request failed (AE_SUPPORT)", - "can\'t disable ASPM", - "Failed to load module \"vesa\"", - "Failed to load module vesa", - "Failed to load module \"modesetting\"", - "Failed to load module modesetting", - "Failed to load module \"glx\"", - "Failed to load module \"fbdev\"", - "Failed to load module fbdev", - "Failed to load module glx", - "[drm] Cannot find any crtc or sizes - going 1024x768", - "_OSC failed (AE_NOT_FOUND); disabling ASPM", - "Open ACPI failed (/var/run/acpid.socket) (No such file or directory)", - "NX (Execute Disable) protection cannot be enabled: non-PAE kernel!", - "hd.: possibly failed opcode", - 'NETLINK INITIALIZATION FAILED', - 'kernel: Cannot find map file', - 'omap_hwmod: debugss: _wait_target_disable failed', - 'VGA arbiter: cannot open kernel arbiter, no multi-card support', - 'Failed to find URL:http://ipv4.connman.net/online/status.html', - 'Online check failed for', - 'netlink init failed', - 'Fast TSC calibration', - "BAR 0-9", - "Failed to load module \"ati\"", - "controller can't do DEVSLP, turning off", - "stmmac_dvr_probe: warning: cannot get CSR clock", - "error: couldn\'t mount because of unsupported optional features", - "GPT: Use GNU Parted to correct GPT errors", - ] - -video_related = [ - "uvesafb", -] - -x86_common = [ - '[drm:psb_do_init] *ERROR* Debug is', - 'wrong ELF class', - 'Could not enable PowerButton event', - 'probe of LNXPWRBN:00 failed with error -22', - 'pmd_set_huge: Cannot satisfy', - 'failed to setup card detect gpio', - 'amd_nb: Cannot enumerate AMD northbridges', - 'failed to retrieve link info, disabling eDP', - 'Direct firmware load for iwlwifi', -] + common_errors - -qemux86_common = [ - 'wrong ELF class', - "fail to add MMCONFIG information, can't access extended PCI configuration space under this bridge.", - "can't claim BAR ", - 'amd_nb: Cannot enumerate AMD northbridges', - 'uvesafb: 5000 ms task timeout, infinitely waiting', - 'tsc: HPET/PMTIMER calibration failed', -] + common_errors - -ignore_errors = { - 'default' : common_errors, - 'qemux86' : [ - 'Failed to access perfctr msr (MSR', - 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)', - ] + qemux86_common, - 'qemux86-64' : qemux86_common, - 'qemumips' : [ - 'Failed to load module "glx"', - 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)', - ] + common_errors, - 'qemumips64' : [ - 'pci 0000:00:00.0: [Firmware Bug]: reg 0x..: invalid BAR (can\'t size)', - ] + common_errors, - 'qemuppc' : [ - 'PCI 0000:00 Cannot reserve Legacy IO [io 0x0000-0x0fff]', - 'host side 80-wire cable detection failed, limiting max speed', - 'mode "640x480" test failed', - 'Failed to load module "glx"', - 'can\'t handle BAR above 4GB', - 'Cannot reserve Legacy IO', - ] + common_errors, - 'qemuarm' : [ - 'mmci-pl18x: probe of fpga:05 failed with error -22', - 'mmci-pl18x: probe of fpga:0b failed with error -22', - 'Failed to load module "glx"', - 'OF: amba_device_add() failed (-19) for /amba/smc@10100000', - 'OF: amba_device_add() failed (-19) for /amba/mpmc@10110000', - 'OF: amba_device_add() failed (-19) for /amba/sctl@101e0000', - 'OF: amba_device_add() failed (-19) for /amba/watchdog@101e1000', - 'OF: amba_device_add() failed (-19) for /amba/sci@101f0000', - 'OF: amba_device_add() failed (-19) for /amba/ssp@101f4000', - 'OF: amba_device_add() failed (-19) for /amba/fpga/sci@a000', - 'Failed to initialize \'/amba/timer@101e3000\': -22', - 'jitterentropy: Initialization failed with host not compliant with requirements: 2', - ] + common_errors, - 'qemuarm64' : [ - 'Fatal server error:', - '(EE) Server terminated with error (1). Closing log file.', - 'dmi: Firmware registration failed.', - 'irq: type mismatch, failed to map hwirq-27 for /intc', - ] + common_errors, - 'emenlow' : [ - '[Firmware Bug]: ACPI: No _BQC method, cannot determine initial brightness', - '(EE) Failed to load module "psb"', - '(EE) Failed to load module psb', - '(EE) Failed to load module "psbdrv"', - '(EE) Failed to load module psbdrv', - '(EE) open /dev/fb0: No such file or directory', - '(EE) AIGLX: reverting to software rendering', - ] + x86_common, - 'intel-core2-32' : [ - 'ACPI: No _BQC method, cannot determine initial brightness', - '[Firmware Bug]: ACPI: No _BQC method, cannot determine initial brightness', - '(EE) Failed to load module "psb"', - '(EE) Failed to load module psb', - '(EE) Failed to load module "psbdrv"', - '(EE) Failed to load module psbdrv', - '(EE) open /dev/fb0: No such file or directory', - '(EE) AIGLX: reverting to software rendering', - 'dmi: Firmware registration failed.', - 'ioremap error for 0x78', - ] + x86_common, - 'intel-corei7-64' : x86_common, - 'crownbay' : x86_common, - 'genericx86' : x86_common, - 'genericx86-64' : [ - 'Direct firmware load for i915', - 'Failed to load firmware i915', - 'Failed to fetch GuC', - 'Failed to initialize GuC', - 'Failed to load DMC firmware', - 'The driver is built-in, so to load the firmware you need to', - ] + x86_common, - 'edgerouter' : [ - 'Fatal server error:', - ] + common_errors, - 'jasperforest' : [ - 'Activated service \'org.bluez\' failed:', - 'Unable to find NFC netlink family', - ] + common_errors, -} - -log_locations = ["/var/log/","/var/log/dmesg", "/tmp/dmesg_output.log"] - -class ParseLogsTest(oeRuntimeTest): - - @classmethod - def setUpClass(self): - self.errors = errors - - # When systemd is enabled we need to notice errors on - # circular dependencies in units. - if self.hasFeature("systemd"): - self.errors.extend([ - 'Found ordering cycle on', - 'Breaking ordering cycle by deleting job', - 'deleted to break ordering cycle', - 'Ordering cycle found, skipping', - ]) - - self.ignore_errors = ignore_errors - self.log_locations = log_locations - self.msg = "" - (is_lsb, location) = oeRuntimeTest.tc.target.run("which LSB_Test.sh") - if is_lsb == 0: - for machine in self.ignore_errors: - self.ignore_errors[machine] = self.ignore_errors[machine] + video_related - - def getMachine(self): - return oeRuntimeTest.tc.d.getVar("MACHINE", True) - - def getWorkdir(self): - return oeRuntimeTest.tc.d.getVar("WORKDIR", True) - - #get some information on the CPU of the machine to display at the beginning of the output. This info might be useful in some cases. - def getHardwareInfo(self): - hwi = "" - (status, cpu_name) = self.target.run("cat /proc/cpuinfo | grep \"model name\" | head -n1 | awk 'BEGIN{FS=\":\"}{print $2}'") - (status, cpu_physical_cores) = self.target.run("cat /proc/cpuinfo | grep \"cpu cores\" | head -n1 | awk {'print $4'}") - (status, cpu_logical_cores) = self.target.run("cat /proc/cpuinfo | grep \"processor\" | wc -l") - (status, cpu_arch) = self.target.run("uname -m") - hwi += "Machine information: \n" - hwi += "*******************************\n" - hwi += "Machine name: "+self.getMachine()+"\n" - hwi += "CPU: "+str(cpu_name)+"\n" - hwi += "Arch: "+str(cpu_arch)+"\n" - hwi += "Physical cores: "+str(cpu_physical_cores)+"\n" - hwi += "Logical cores: "+str(cpu_logical_cores)+"\n" - hwi += "*******************************\n" - return hwi - - #go through the log locations provided and if it's a folder create a list with all the .log files in it, if it's a file just add - #it to that list - def getLogList(self, log_locations): - logs = [] - for location in log_locations: - (status, output) = self.target.run("test -f "+str(location)) - if (status == 0): - logs.append(str(location)) - else: - (status, output) = self.target.run("test -d "+str(location)) - if (status == 0): - (status, output) = self.target.run("find "+str(location)+"/*.log -maxdepth 1 -type f") - if (status == 0): - output = output.splitlines() - for logfile in output: - logs.append(os.path.join(location,str(logfile))) - return logs - - #copy the log files to be parsed locally - def transfer_logs(self, log_list): - workdir = self.getWorkdir() - self.target_logs = workdir + '/' + 'target_logs' - target_logs = self.target_logs - if not os.path.exists(target_logs): - os.makedirs(target_logs) - bb.utils.remove(self.target_logs + "/*") - for f in log_list: - self.target.copy_from(f, target_logs) - - #get the local list of logs - def get_local_log_list(self, log_locations): - self.transfer_logs(self.getLogList(log_locations)) - logs = [ os.path.join(self.target_logs, f) for f in os.listdir(self.target_logs) if os.path.isfile(os.path.join(self.target_logs, f)) ] - return logs - - #build the grep command to be used with filters and exclusions - def build_grepcmd(self, errors, ignore_errors, log): - grepcmd = "grep " - grepcmd +="-Ei \"" - for error in errors: - grepcmd += error+"|" - grepcmd = grepcmd[:-1] - grepcmd += "\" "+str(log)+" | grep -Eiv \'" - try: - errorlist = ignore_errors[self.getMachine()] - except KeyError: - self.msg += "No ignore list found for this machine, using default\n" - errorlist = ignore_errors['default'] - for ignore_error in errorlist: - ignore_error = ignore_error.replace("(", "\(") - ignore_error = ignore_error.replace(")", "\)") - ignore_error = ignore_error.replace("'", ".") - ignore_error = ignore_error.replace("?", "\?") - ignore_error = ignore_error.replace("[", "\[") - ignore_error = ignore_error.replace("]", "\]") - ignore_error = ignore_error.replace("*", "\*") - ignore_error = ignore_error.replace("0-9", "[0-9]") - grepcmd += ignore_error+"|" - grepcmd = grepcmd[:-1] - grepcmd += "\'" - return grepcmd - - #grep only the errors so that their context could be collected. Default context is 10 lines before and after the error itself - def parse_logs(self, errors, ignore_errors, logs, lines_before = 10, lines_after = 10): - results = {} - rez = [] - grep_output = '' - for log in logs: - result = None - thegrep = self.build_grepcmd(errors, ignore_errors, log) - try: - result = subprocess.check_output(thegrep, shell=True).decode("utf-8") - except: - pass - if (result is not None): - results[log.replace('target_logs/','')] = {} - rez = result.splitlines() - for xrez in rez: - try: - grep_output = subprocess.check_output(['grep', '-F', xrez, '-B', str(lines_before), '-A', str(lines_after), log]).decode("utf-8") - except: - pass - results[log.replace('target_logs/','')][xrez]=grep_output - return results - - #get the output of dmesg and write it in a file. This file is added to log_locations. - def write_dmesg(self): - (status, dmesg) = self.target.run("dmesg > /tmp/dmesg_output.log") - - @testcase(1059) - @skipUnlessPassed('test_ssh') - def test_parselogs(self): - self.write_dmesg() - log_list = self.get_local_log_list(self.log_locations) - result = self.parse_logs(self.errors, self.ignore_errors, log_list) - print(self.getHardwareInfo()) - errcount = 0 - for log in result: - self.msg += "Log: "+log+"\n" - self.msg += "-----------------------\n" - for error in result[log]: - errcount += 1 - self.msg += "Central error: "+str(error)+"\n" - self.msg += "***********************\n" - self.msg += result[str(log)][str(error)]+"\n" - self.msg += "***********************\n" - self.msg += "%s errors found in logs." % errcount - self.assertEqual(errcount, 0, msg=self.msg) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/perl.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/perl.py deleted file mode 100644 index e044d0a5f..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/perl.py +++ /dev/null @@ -1,30 +0,0 @@ -import unittest -import os -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeRuntimeTest.hasPackage("perl"): - skipModule("No perl package in the image") - - -class PerlTest(oeRuntimeTest): - - @classmethod - def setUpClass(self): - oeRuntimeTest.tc.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "test.pl"), "/tmp/test.pl") - - @testcase(1141) - def test_perl_exists(self): - (status, output) = self.target.run('which perl') - self.assertEqual(status, 0, msg="Perl binary not in PATH or not on target.") - - @testcase(208) - def test_perl_works(self): - (status, output) = self.target.run('perl /tmp/test.pl') - self.assertEqual(status, 0, msg="Exit status was not 0. Output: %s" % output) - self.assertEqual(output, "the value of a is 0.01", msg="Incorrect output: %s" % output) - - @classmethod - def tearDownClass(self): - oeRuntimeTest.tc.target.run("rm /tmp/test.pl") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/ping.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/ping.py deleted file mode 100644 index 0f2744792..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/ping.py +++ /dev/null @@ -1,22 +0,0 @@ -import subprocess -import unittest -import sys -import time -from oeqa.oetest import oeRuntimeTest -from oeqa.utils.decorators import * - -class PingTest(oeRuntimeTest): - - @testcase(964) - def test_ping(self): - output = '' - count = 0 - endtime = time.time() + 60 - while count < 5 and time.time() < endtime: - proc = subprocess.Popen("ping -c 1 %s" % self.target.ip, shell=True, stdout=subprocess.PIPE) - output += proc.communicate()[0].decode("utf-8") - if proc.poll() == 0: - count += 1 - else: - count = 0 - self.assertEqual(count, 5, msg = "Expected 5 consecutive replies, got %d.\nping output is:\n%s" % (count,output)) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/python.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/python.py deleted file mode 100644 index 29a231c7c..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/python.py +++ /dev/null @@ -1,35 +0,0 @@ -import unittest -import os -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeRuntimeTest.hasPackage("python-core"): - skipModule("No python package in the image") - - -class PythonTest(oeRuntimeTest): - - @classmethod - def setUpClass(self): - oeRuntimeTest.tc.target.copy_to(os.path.join(oeRuntimeTest.tc.filesdir, "test.py"), "/tmp/test.py") - - @testcase(1145) - def test_python_exists(self): - (status, output) = self.target.run('which python') - self.assertEqual(status, 0, msg="Python binary not in PATH or not on target.") - - @testcase(965) - def test_python_stdout(self): - (status, output) = self.target.run('python /tmp/test.py') - self.assertEqual(status, 0, msg="Exit status was not 0. Output: %s" % output) - self.assertEqual(output, "the value of a is 0.01", msg="Incorrect output: %s" % output) - - @testcase(1146) - def test_python_testfile(self): - (status, output) = self.target.run('ls /tmp/testfile.python') - self.assertEqual(status, 0, msg="Python test file generate failed.") - - @classmethod - def tearDownClass(self): - oeRuntimeTest.tc.target.run("rm /tmp/test.py /tmp/testfile.python") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/rpm.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/rpm.py deleted file mode 100644 index 7f514ca00..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/rpm.py +++ /dev/null @@ -1,120 +0,0 @@ -import unittest -import os -import fnmatch -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeRuntimeTest.hasFeature("package-management"): - skipModule("rpm module skipped: target doesn't have package-management in IMAGE_FEATURES") - if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES", True).split()[0]: - skipModule("rpm module skipped: target doesn't have rpm as primary package manager") - - -class RpmBasicTest(oeRuntimeTest): - - @testcase(960) - @skipUnlessPassed('test_ssh') - def test_rpm_help(self): - (status, output) = self.target.run('rpm --help') - self.assertEqual(status, 0, msg="status and output: %s and %s" % (status,output)) - - @testcase(191) - @skipUnlessPassed('test_rpm_help') - def test_rpm_query(self): - (status, output) = self.target.run('rpm -q rpm') - self.assertEqual(status, 0, msg="status and output: %s and %s" % (status,output)) - -class RpmInstallRemoveTest(oeRuntimeTest): - - @classmethod - def setUpClass(self): - pkgarch = oeRuntimeTest.tc.d.getVar('TUNE_PKGARCH', True).replace("-", "_") - rpmdir = os.path.join(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR', True), "rpm", pkgarch) - # pick rpm-doc as a test file to get installed, because it's small and it will always be built for standard targets - for f in fnmatch.filter(os.listdir(rpmdir), "rpm-doc-*.%s.rpm" % pkgarch): - testrpmfile = f - oeRuntimeTest.tc.target.copy_to(os.path.join(rpmdir,testrpmfile), "/tmp/rpm-doc.rpm") - - @testcase(192) - @skipUnlessPassed('test_rpm_help') - def test_rpm_install(self): - (status, output) = self.target.run('rpm -ivh /tmp/rpm-doc.rpm') - self.assertEqual(status, 0, msg="Failed to install rpm-doc package: %s" % output) - - @testcase(194) - @skipUnlessPassed('test_rpm_install') - def test_rpm_remove(self): - (status,output) = self.target.run('rpm -e rpm-doc') - self.assertEqual(status, 0, msg="Failed to remove rpm-doc package: %s" % output) - - @testcase(1096) - @skipUnlessPassed('test_ssh') - def test_rpm_query_nonroot(self): - - def set_up_test_user(u): - (status, output) = self.target.run("id -u %s" % u) - if status == 0: - pass - else: - (status, output) = self.target.run("useradd %s" % u) - self.assertTrue(status == 0, msg="Failed to create new user: " + output) - - def exec_as_test_user(u): - (status, output) = self.target.run("su -c id %s" % u) - self.assertTrue("({0})".format(u) in output, msg="Failed to execute as new user") - (status, output) = self.target.run("su -c \"rpm -qa\" %s " % u) - self.assertEqual(status, 0, msg="status: %s. Cannot run rpm -qa: %s" % (status, output)) - - def unset_up_test_user(u): - (status, output) = self.target.run("userdel -r %s" % u) - self.assertTrue(status == 0, msg="Failed to erase user: %s" % output) - - tuser = 'test1' - - try: - set_up_test_user(tuser) - exec_as_test_user(tuser) - finally: - unset_up_test_user(tuser) - - @testcase(195) - @skipUnlessPassed('test_rpm_install') - def test_check_rpm_install_removal_log_file_size(self): - """ - Summary: Check rpm install/removal log file size - Expected: There should be some method to keep rpm log in a small size . - Product: BSPs - Author: Alexandru Georgescu - AutomatedBy: Daniel Istrate - """ - db_files_cmd = 'ls /var/lib/rpm/__db.*' - get_log_size_cmd = "du /var/lib/rpm/log/log.* | awk '{print $1}'" - - # Make sure that some database files are under /var/lib/rpm as '__db.xxx' - (status, output) = self.target.run(db_files_cmd) - self.assertEqual(0, status, 'Failed to find database files under /var/lib/rpm/ as __db.xxx') - - # Remove the package just in case - self.target.run('rpm -e rpm-doc') - - # Install/Remove a package 10 times - for i in range(10): - (status, output) = self.target.run('rpm -ivh /tmp/rpm-doc.rpm') - self.assertEqual(0, status, "Failed to install rpm-doc package. Reason: {}".format(output)) - - (status, output) = self.target.run('rpm -e rpm-doc') - self.assertEqual(0, status, "Failed to remove rpm-doc package. Reason: {}".format(output)) - - # Get the size of log file - (status, output) = self.target.run(get_log_size_cmd) - self.assertEqual(0, status, 'Failed to get the final size of the log file.') - - # Compare each log size - for log_file_size in output: - self.assertLessEqual(int(log_file_size), 11264, - 'Log file size is greater that expected (~10MB), found {} bytes'.format(log_file_size)) - - @classmethod - def tearDownClass(self): - oeRuntimeTest.tc.target.run('rm -f /tmp/rpm-doc.rpm') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/scanelf.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/scanelf.py deleted file mode 100644 index 67e02ff45..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/scanelf.py +++ /dev/null @@ -1,28 +0,0 @@ -import unittest -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeRuntimeTest.hasPackage("pax-utils"): - skipModule("pax-utils package not installed") - -class ScanelfTest(oeRuntimeTest): - - def setUpLocal(self): - self.scancmd = 'scanelf --quiet --recursive --mount --ldpath --path' - - @testcase(966) - @skipUnlessPassed('test_ssh') - def test_scanelf_textrel(self): - # print TEXTREL information - self.scancmd += " --textrel" - (status, output) = self.target.run(self.scancmd) - self.assertEqual(output.strip(), "", "\n".join([self.scancmd, output])) - - @testcase(967) - @skipUnlessPassed('test_ssh') - def test_scanelf_rpath(self): - # print RPATH information - self.scancmd += " --rpath" - (status, output) = self.target.run(self.scancmd) - self.assertEqual(output.strip(), "", "\n".join([self.scancmd, output])) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/scp.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/scp.py deleted file mode 100644 index 48e87d2d0..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/scp.py +++ /dev/null @@ -1,22 +0,0 @@ -import os -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import skipUnlessPassed, testcase - -def setUpModule(): - if not (oeRuntimeTest.hasPackage("dropbear") or oeRuntimeTest.hasPackage("openssh-sshd")): - skipModule("No ssh package in image") - -class ScpTest(oeRuntimeTest): - - @testcase(220) - @skipUnlessPassed('test_ssh') - def test_scp_file(self): - test_log_dir = oeRuntimeTest.tc.d.getVar("TEST_LOG_DIR", True) - test_file_path = os.path.join(test_log_dir, 'test_scp_file') - with open(test_file_path, 'w') as test_scp_file: - test_scp_file.seek(2 ** 22 - 1) - test_scp_file.write(os.linesep) - (status, output) = self.target.copy_to(test_file_path, '/tmp/test_scp_file') - self.assertEqual(status, 0, msg = "File could not be copied. Output: %s" % output) - (status, output) = self.target.run("ls -la /tmp/test_scp_file") - self.assertEqual(status, 0, msg = "SCP test failed") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/skeletoninit.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/skeletoninit.py deleted file mode 100644 index cb0cb9b4c..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/skeletoninit.py +++ /dev/null @@ -1,29 +0,0 @@ -# This test should cover https://bugzilla.yoctoproject.org/tr_show_case.cgi?case_id=284 testcase -# Note that the image under test must have meta-skeleton layer in bblayers and IMAGE_INSTALL_append = " service" in local.conf - -import unittest -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeRuntimeTest.hasPackage("service"): - skipModule("No service package in image") - - -class SkeletonBasicTest(oeRuntimeTest): - - @skipUnlessPassed('test_ssh') - @unittest.skipIf("systemd" == oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", False), "Not appropiate for systemd image") - def test_skeleton_availability(self): - (status, output) = self.target.run('ls /etc/init.d/skeleton') - self.assertEqual(status, 0, msg = "skeleton init script not found. Output:\n%s " % output) - (status, output) = self.target.run('ls /usr/sbin/skeleton-test') - self.assertEqual(status, 0, msg = "skeleton-test not found. Output:\n%s" % output) - - @testcase(284) - @skipUnlessPassed('test_skeleton_availability') - @unittest.skipIf("systemd" == oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", False), "Not appropiate for systemd image") - def test_skeleton_script(self): - output1 = self.target.run("/etc/init.d/skeleton start")[1] - (status, output2) = self.target.run(oeRuntimeTest.pscmd + ' | grep [s]keleton-test') - self.assertEqual(status, 0, msg = "Skeleton script could not be started:\n%s\n%s" % (output1, output2)) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/smart.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/smart.py deleted file mode 100644 index 6cdb10d63..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/smart.py +++ /dev/null @@ -1,218 +0,0 @@ -import unittest -import re -import oe -import subprocess -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * -from oeqa.utils.httpserver import HTTPService - -def setUpModule(): - if not oeRuntimeTest.hasFeature("package-management"): - skipModule("Image doesn't have package management feature") - if not oeRuntimeTest.hasPackage("smartpm"): - skipModule("Image doesn't have smart installed") - if "package_rpm" != oeRuntimeTest.tc.d.getVar("PACKAGE_CLASSES", True).split()[0]: - skipModule("Rpm is not the primary package manager") - -class SmartTest(oeRuntimeTest): - - @skipUnlessPassed('test_smart_help') - def smart(self, command, expected = 0): - command = 'smart %s' % command - status, output = self.target.run(command, 1500) - message = os.linesep.join([command, output]) - self.assertEqual(status, expected, message) - self.assertFalse("Cannot allocate memory" in output, message) - return output - -class SmartBasicTest(SmartTest): - - @testcase(716) - @skipUnlessPassed('test_ssh') - def test_smart_help(self): - self.smart('--help') - - @testcase(968) - def test_smart_version(self): - self.smart('--version') - - @testcase(721) - def test_smart_info(self): - self.smart('info python-smartpm') - - @testcase(421) - def test_smart_query(self): - self.smart('query python-smartpm') - - @testcase(720) - def test_smart_search(self): - self.smart('search python-smartpm') - - @testcase(722) - def test_smart_stats(self): - self.smart('stats') - -class SmartRepoTest(SmartTest): - - @classmethod - def create_index(self, arg): - index_cmd = arg - try: - bb.note("Executing '%s' ..." % index_cmd) - result = subprocess.check_output(index_cmd, stderr=subprocess.STDOUT, shell=True).decode("utf-8") - except subprocess.CalledProcessError as e: - return("Index creation command '%s' failed with return code %d:\n%s" % - (e.cmd, e.returncode, e.output.decode("utf-8"))) - if result: - bb.note(result) - return None - - @classmethod - def setUpClass(self): - self.repolist = [] - - # Index RPMs - rpm_createrepo = bb.utils.which(os.getenv('PATH'), "createrepo") - index_cmds = [] - rpm_dirs_found = False - archs = (oeRuntimeTest.tc.d.getVar('ALL_MULTILIB_PACKAGE_ARCHS', True) or "").replace('-', '_').split() - for arch in archs: - rpm_dir = os.path.join(oeRuntimeTest.tc.d.getVar('DEPLOY_DIR_RPM', True), arch) - idx_path = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR', True), 'rpm', arch) - db_path = os.path.join(oeRuntimeTest.tc.d.getVar('WORKDIR', True), 'rpmdb', arch) - if not os.path.isdir(rpm_dir): - continue - if os.path.exists(db_path): - bb.utils.remove(dbpath, True) - lockfilename = oeRuntimeTest.tc.d.getVar('DEPLOY_DIR_RPM', True) + "/rpm.lock" - lf = bb.utils.lockfile(lockfilename, False) - oe.path.copyhardlinktree(rpm_dir, idx_path) - # Full indexes overload a 256MB image so reduce the number of rpms - # in the feed. Filter to p* since we use the psplash packages and - # this leaves some allarch and machine arch packages too. - bb.utils.remove(idx_path + "*/[a-oq-z]*.rpm") - bb.utils.unlockfile(lf) - index_cmds.append("%s --dbpath %s --update -q %s" % (rpm_createrepo, db_path, idx_path)) - rpm_dirs_found = True - # Create repodata¬ - result = oe.utils.multiprocess_exec(index_cmds, self.create_index) - if result: - bb.fatal('%s' % ('\n'.join(result))) - self.repo_server = HTTPService(oeRuntimeTest.tc.d.getVar('WORKDIR', True), oeRuntimeTest.tc.target.server_ip) - self.repo_server.start() - - @classmethod - def tearDownClass(self): - self.repo_server.stop() - for i in self.repolist: - oeRuntimeTest.tc.target.run('smart channel -y --remove '+str(i)) - - @testcase(1143) - def test_smart_channel(self): - self.smart('channel', 1) - - @testcase(719) - def test_smart_channel_add(self): - image_pkgtype = self.tc.d.getVar('IMAGE_PKGTYPE', True) - deploy_url = 'http://%s:%s/%s' %(self.target.server_ip, self.repo_server.port, image_pkgtype) - pkgarchs = self.tc.d.getVar('PACKAGE_ARCHS', True).replace("-","_").split() - for arch in os.listdir('%s/%s' % (self.repo_server.root_dir, image_pkgtype)): - if arch in pkgarchs: - self.smart('channel -y --add {a} type=rpm-md baseurl={u}/{a}'.format(a=arch, u=deploy_url)) - self.repolist.append(arch) - self.smart('update') - - @testcase(969) - def test_smart_channel_help(self): - self.smart('channel --help') - - @testcase(970) - def test_smart_channel_list(self): - self.smart('channel --list') - - @testcase(971) - def test_smart_channel_show(self): - self.smart('channel --show') - - @testcase(717) - def test_smart_channel_rpmsys(self): - self.smart('channel --show rpmsys') - self.smart('channel --disable rpmsys') - self.smart('channel --enable rpmsys') - - @testcase(1144) - @skipUnlessPassed('test_smart_channel_add') - def test_smart_install(self): - self.smart('remove -y psplash-default') - self.smart('install -y psplash-default') - - @testcase(728) - @skipUnlessPassed('test_smart_install') - def test_smart_install_dependency(self): - self.smart('remove -y psplash') - self.smart('install -y psplash-default') - - @testcase(723) - @skipUnlessPassed('test_smart_channel_add') - def test_smart_install_from_disk(self): - self.smart('remove -y psplash-default') - self.smart('download psplash-default') - self.smart('install -y ./psplash-default*') - - @testcase(725) - @skipUnlessPassed('test_smart_channel_add') - def test_smart_install_from_http(self): - output = self.smart('download --urls psplash-default') - url = re.search('(http://.*/psplash-default.*\.rpm)', output) - self.assertTrue(url, msg="Couln't find download url in %s" % output) - self.smart('remove -y psplash-default') - self.smart('install -y %s' % url.group(0)) - - @testcase(729) - @skipUnlessPassed('test_smart_install') - def test_smart_reinstall(self): - self.smart('reinstall -y psplash-default') - - @testcase(727) - @skipUnlessPassed('test_smart_channel_add') - def test_smart_remote_repo(self): - self.smart('update') - self.smart('install -y psplash') - self.smart('remove -y psplash') - - @testcase(726) - def test_smart_local_dir(self): - self.target.run('mkdir /tmp/myrpmdir') - self.smart('channel --add myrpmdir type=rpm-dir path=/tmp/myrpmdir -y') - self.target.run('cd /tmp/myrpmdir') - self.smart('download psplash') - output = self.smart('channel --list') - for i in output.split("\n"): - if ("rpmsys" != str(i)) and ("myrpmdir" != str(i)): - self.smart('channel --disable '+str(i)) - self.target.run('cd $HOME') - self.smart('install psplash') - for i in output.split("\n"): - if ("rpmsys" != str(i)) and ("myrpmdir" != str(i)): - self.smart('channel --enable '+str(i)) - self.smart('channel --remove myrpmdir -y') - self.target.run("rm -rf /tmp/myrpmdir") - - @testcase(718) - def test_smart_add_rpmdir(self): - self.target.run('mkdir /tmp/myrpmdir') - self.smart('channel --add myrpmdir type=rpm-dir path=/tmp/myrpmdir -y') - self.smart('channel --disable myrpmdir -y') - output = self.smart('channel --show myrpmdir') - self.assertTrue("disabled = yes" in output, msg="Failed to disable rpm dir") - self.smart('channel --enable myrpmdir -y') - output = self.smart('channel --show myrpmdir') - self.assertFalse("disabled = yes" in output, msg="Failed to enable rpm dir") - self.smart('channel --remove myrpmdir -y') - self.target.run("rm -rf /tmp/myrpmdir") - - @testcase(731) - @skipUnlessPassed('test_smart_channel_add') - def test_smart_remove_package(self): - self.smart('install -y psplash') - self.smart('remove -y psplash') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/ssh.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/ssh.py deleted file mode 100644 index 0e76d5d51..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/ssh.py +++ /dev/null @@ -1,19 +0,0 @@ -import subprocess -import unittest -import sys -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not (oeRuntimeTest.hasPackage("dropbear") or oeRuntimeTest.hasPackage("openssh")): - skipModule("No ssh package in image") - -class SshTest(oeRuntimeTest): - - @testcase(224) - @skipUnlessPassed('test_ping') - def test_ssh(self): - (status, output) = self.target.run('uname -a') - self.assertEqual(status, 0, msg="SSH Test failed: %s" % output) - (status, output) = self.target.run('cat /etc/masterimage') - self.assertEqual(status, 1, msg="This isn't the right image - /etc/masterimage shouldn't be here %s" % output) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/syslog.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/syslog.py deleted file mode 100644 index 8f550329e..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/syslog.py +++ /dev/null @@ -1,52 +0,0 @@ -import unittest -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not (oeRuntimeTest.hasPackage("busybox-syslog") or oeRuntimeTest.hasPackage("sysklogd")): - skipModule("No syslog package in image") - -class SyslogTest(oeRuntimeTest): - - @testcase(201) - def test_syslog_running(self): - (status,output) = self.target.run(oeRuntimeTest.pscmd + ' | grep -i [s]yslogd') - self.assertEqual(status, 0, msg="no syslogd process, ps output: %s" % self.target.run(oeRuntimeTest.pscmd)[1]) - -class SyslogTestConfig(oeRuntimeTest): - - @testcase(1149) - @skipUnlessPassed("test_syslog_running") - def test_syslog_logger(self): - (status, output) = self.target.run('logger foobar') - self.assertEqual(status, 0, msg="Can't log into syslog. Output: %s " % output) - - (status, output) = self.target.run('grep foobar /var/log/messages') - if status != 0: - if oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", "") == "systemd": - (status, output) = self.target.run('journalctl -o cat | grep foobar') - else: - (status, output) = self.target.run('logread | grep foobar') - self.assertEqual(status, 0, msg="Test log string not found in /var/log/messages or logread. Output: %s " % output) - - @testcase(1150) - @skipUnlessPassed("test_syslog_running") - def test_syslog_restart(self): - if "systemd" != oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", False): - (status,output) = self.target.run('/etc/init.d/syslog restart') - else: - (status,output) = self.target.run('systemctl restart syslog.service') - - @testcase(202) - @skipUnlessPassed("test_syslog_restart") - @skipUnlessPassed("test_syslog_logger") - @unittest.skipIf("systemd" == oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", False), "Not appropiate for systemd image") - @unittest.skipIf(oeRuntimeTest.hasPackage("sysklogd") or not oeRuntimeTest.hasPackage("busybox"), "Non-busybox syslog") - def test_syslog_startup_config(self): - self.target.run('echo "LOGFILE=/var/log/test" >> /etc/syslog-startup.conf') - (status,output) = self.target.run('/etc/init.d/syslog restart') - self.assertEqual(status, 0, msg="Could not restart syslog service. Status and output: %s and %s" % (status,output)) - (status,output) = self.target.run('logger foobar && grep foobar /var/log/test') - self.assertEqual(status, 0, msg="Test log string not found. Output: %s " % output) - self.target.run("sed -i 's#LOGFILE=/var/log/test##' /etc/syslog-startup.conf") - self.target.run('/etc/init.d/syslog restart') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/systemd.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/systemd.py deleted file mode 100644 index 8de799cd6..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/systemd.py +++ /dev/null @@ -1,178 +0,0 @@ -import unittest -import re -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeRuntimeTest.hasFeature("systemd"): - skipModule("target doesn't have systemd in DISTRO_FEATURES") - if "systemd" != oeRuntimeTest.tc.d.getVar("VIRTUAL-RUNTIME_init_manager", True): - skipModule("systemd is not the init manager for this image") - - -class SystemdTest(oeRuntimeTest): - - def systemctl(self, action = '', target = '', expected = 0, verbose = False): - command = 'systemctl %s %s' % (action, target) - status, output = self.target.run(command) - message = '\n'.join([command, output]) - if status != expected and verbose: - message += self.target.run('systemctl status --full %s' % target)[1] - self.assertEqual(status, expected, message) - return output - - #TODO: use pyjournalctl instead - def journalctl(self, args='',l_match_units=[]): - """ - Request for the journalctl output to the current target system - - Arguments: - -args, an optional argument pass through argument - -l_match_units, an optional list of units to filter the output - Returns: - -string output of the journalctl command - Raises: - -AssertionError, on remote commands that fail - -ValueError, on a journalctl call with filtering by l_match_units that - returned no entries - """ - query_units="" - if len(l_match_units): - query_units = ['_SYSTEMD_UNIT='+unit for unit in l_match_units] - query_units = " ".join(query_units) - command = 'journalctl %s %s' %(args, query_units) - status, output = self.target.run(command) - if status: - raise AssertionError("Command '%s' returned non-zero exit \ - code %d:\n%s" % (command, status, output)) - if len(output) == 1 and "-- No entries --" in output: - raise ValueError("List of units to match: %s, returned no entries" - % l_match_units) - return output - -class SystemdBasicTests(SystemdTest): - - @skipUnlessPassed('test_ssh') - def test_systemd_basic(self): - self.systemctl('--version') - - @testcase(551) - @skipUnlessPassed('test_systemd_basic') - def test_systemd_list(self): - self.systemctl('list-unit-files') - - def settle(self): - """ - Block until systemd has finished activating any units being activated, - or until two minutes has elapsed. - - Returns a tuple, either (True, '') if all units have finished - activating, or (False, message string) if there are still units - activating (generally, failing units that restart). - """ - import time - endtime = time.time() + (60 * 2) - while True: - status, output = self.target.run('systemctl --state=activating') - if "0 loaded units listed" in output: - return (True, '') - if time.time() >= endtime: - return (False, output) - time.sleep(10) - - @testcase(550) - @skipUnlessPassed('test_systemd_basic') - def test_systemd_failed(self): - settled, output = self.settle() - self.assertTrue(settled, msg="Timed out waiting for systemd to settle:\n" + output) - - output = self.systemctl('list-units', '--failed') - match = re.search("0 loaded units listed", output) - if not match: - output += self.systemctl('status --full --failed') - self.assertTrue(match, msg="Some systemd units failed:\n%s" % output) - - -class SystemdServiceTests(SystemdTest): - - def check_for_avahi(self): - if not self.hasPackage('avahi-daemon'): - raise unittest.SkipTest("Testcase dependency not met: need avahi-daemon installed on target") - - @skipUnlessPassed('test_systemd_basic') - def test_systemd_status(self): - self.check_for_avahi() - self.systemctl('status --full', 'avahi-daemon.service') - - @testcase(695) - @skipUnlessPassed('test_systemd_status') - def test_systemd_stop_start(self): - self.check_for_avahi() - self.systemctl('stop', 'avahi-daemon.service') - self.systemctl('is-active', 'avahi-daemon.service', expected=3, verbose=True) - self.systemctl('start','avahi-daemon.service') - self.systemctl('is-active', 'avahi-daemon.service', verbose=True) - - @testcase(696) - @skipUnlessPassed('test_systemd_basic') - def test_systemd_disable_enable(self): - self.check_for_avahi() - self.systemctl('disable', 'avahi-daemon.service') - self.systemctl('is-enabled', 'avahi-daemon.service', expected=1) - self.systemctl('enable', 'avahi-daemon.service') - self.systemctl('is-enabled', 'avahi-daemon.service') - -class SystemdJournalTests(SystemdTest): - @skipUnlessPassed('test_ssh') - def test_systemd_journal(self): - (status, output) = self.target.run('journalctl') - self.assertEqual(status, 0, output) - - @skipUnlessPassed('test_systemd_basic') - def test_systemd_boot_time(self, systemd_TimeoutStartSec=90): - """ - Get the target boot time from journalctl and log it - - Arguments: - -systemd_TimeoutStartSec, an optional argument containing systemd's - unit start timeout to compare against - """ - - # the expression chain that uniquely identifies the time boot message - expr_items=["Startup finished","kernel", "userspace","\.$"] - try: - output = self.journalctl(args="-o cat --reverse") - except AssertionError: - self.fail("Error occurred while calling journalctl") - if not len(output): - self.fail("Error, unable to get startup time from systemd journal") - - # check for the regular expression items that match the startup time - for line in output.split('\n'): - check_match = "".join(re.findall(".*".join(expr_items), line)) - if check_match: break - # put the startup time in the test log - if check_match: - print("%s" % check_match) - else: - self.skipTest("Error at obtaining the boot time from journalctl") - boot_time_sec = 0 - - # get the numeric values from the string and convert them to seconds - # same data will be placed in list and string for manipulation - l_boot_time = check_match.split(" ")[-2:] - s_boot_time = " ".join(l_boot_time) - try: - # Obtain the minutes it took to boot - if l_boot_time[0].endswith('min') and l_boot_time[0][0].isdigit(): - boot_time_min = s_boot_time.split("min")[0] - # convert to seconds and accumulate it - boot_time_sec += int(boot_time_min) * 60 - # Obtain the seconds it took to boot and accumulate - boot_time_sec += float(l_boot_time[1].split("s")[0]) - except ValueError: - self.skipTest("Error when parsing time from boot string") - #Assert the target boot time against systemd's unit start timeout - if boot_time_sec > systemd_TimeoutStartSec: - print("Target boot time %s exceeds systemd's TimeoutStartSec %s"\ - %(boot_time_sec, systemd_TimeoutStartSec)) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/utils/__init__.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/utils/targetbuildproject.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/utils/targetbuildproject.py new file mode 100644 index 000000000..5af55d736 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/runtime/utils/targetbuildproject.py @@ -0,0 +1,39 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from oeqa.utils.buildproject import BuildProject + +class TargetBuildProject(BuildProject): + + def __init__(self, target, uri, foldername=None, dl_dir=None): + self.target = target + self.targetdir = "~/" + BuildProject.__init__(self, uri, foldername, dl_dir=dl_dir) + + def download_archive(self): + self._download_archive() + + status, output = self.target.copyTo(self.localarchive, self.targetdir) + if status: + raise Exception('Failed to copy archive to target, ' + 'output: %s' % output) + + cmd = 'tar xf %s%s -C %s' % (self.targetdir, + self.archive, + self.targetdir) + status, output = self.target.run(cmd) + if status: + raise Exception('Failed to extract archive, ' + 'output: %s' % output) + + # Change targetdir to project folder + self.targetdir = self.targetdir + self.fname + + # The timeout parameter of target.run is set to 0 + # to make the ssh command run with no timeout. + def _run(self, cmd): + ret = self.target.run(cmd, 0) + msg = "Command %s failed with exit code %s: %s" % (cmd, ret[0], ret[1]) + if ret[0] != 0: + raise Exception(msg) + return ret[0] diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/x32lib.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/x32lib.py deleted file mode 100644 index ce5e21403..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/x32lib.py +++ /dev/null @@ -1,18 +0,0 @@ -import unittest -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - #check if DEFAULTTUNE is set and it's value is: x86-64-x32 - defaulttune = oeRuntimeTest.tc.d.getVar("DEFAULTTUNE", True) - if "x86-64-x32" not in defaulttune: - skipModule("DEFAULTTUNE is not set to x86-64-x32") - -class X32libTest(oeRuntimeTest): - - @testcase(281) - @skipUnlessPassed("test_ssh") - def test_x32_file(self): - status1 = self.target.run("readelf -h /bin/ls | grep Class | grep ELF32")[0] - status2 = self.target.run("readelf -h /bin/ls | grep Machine | grep X86-64")[0] - self.assertTrue(status1 == 0 and status2 == 0, msg="/bin/ls isn't an X86-64 ELF32 binary. readelf says: %s" % self.target.run("readelf -h /bin/ls")[1]) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/runtime/xorg.py b/import-layers/yocto-poky/meta/lib/oeqa/runtime/xorg.py deleted file mode 100644 index 12bcd371a..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/runtime/xorg.py +++ /dev/null @@ -1,16 +0,0 @@ -import unittest -from oeqa.oetest import oeRuntimeTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeRuntimeTest.hasFeature("x11-base"): - skipModule("target doesn't have x11 in IMAGE_FEATURES") - - -class XorgTest(oeRuntimeTest): - - @testcase(1151) - @skipUnlessPassed('test_ssh') - def test_xorg_running(self): - (status, output) = self.target.run(oeRuntimeTest.pscmd + ' | grep -v xinit | grep [X]org') - self.assertEqual(status, 0, msg="Xorg does not appear to be running %s" % self.target.run(oeRuntimeTest.pscmd)[1]) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/__init__.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/__init__.py index 4cf3fa76b..e69de29bb 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/sdk/__init__.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdk/__init__.py @@ -1,3 +0,0 @@ -# Enable other layers to have tests in the same named directory -from pkgutil import extend_path -__path__ = extend_path(__path__, __name__) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/buildcvs.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/buildcvs.py deleted file mode 100644 index c7146fa4a..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/sdk/buildcvs.py +++ /dev/null @@ -1,25 +0,0 @@ -from oeqa.oetest import oeSDKTest, skipModule -from oeqa.utils.decorators import * -from oeqa.utils.targetbuild import SDKBuildProject - -class BuildCvsTest(oeSDKTest): - - @classmethod - def setUpClass(self): - self.project = SDKBuildProject(oeSDKTest.tc.sdktestdir + "/cvs/", oeSDKTest.tc.sdkenv, oeSDKTest.tc.d, - "http://ftp.gnu.org/non-gnu/cvs/source/feature/1.12.13/cvs-1.12.13.tar.bz2") - self.project.download_archive() - - def test_cvs(self): - self.assertEqual(self.project.run_configure(), 0, - msg="Running configure failed") - - self.assertEqual(self.project.run_make(), 0, - msg="Running make failed") - - self.assertEqual(self.project.run_install(), 0, - msg="Running make install failed") - - @classmethod - def tearDownClass(self): - self.project.clean() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/buildgalculator.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/buildgalculator.py deleted file mode 100644 index dc2fa9ce1..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/sdk/buildgalculator.py +++ /dev/null @@ -1,27 +0,0 @@ -from oeqa.oetest import oeSDKTest, skipModule -from oeqa.utils.decorators import * -from oeqa.utils.targetbuild import SDKBuildProject - -def setUpModule(): - if not (oeSDKTest.hasPackage("gtk+3") or oeSDKTest.hasPackage("libgtk-3.0")): - skipModule("Image doesn't have gtk+3 in manifest") - -class GalculatorTest(oeSDKTest): - def test_galculator(self): - try: - project = SDKBuildProject(oeSDKTest.tc.sdktestdir + "/galculator/", - oeSDKTest.tc.sdkenv, oeSDKTest.tc.d, - "http://galculator.mnim.org/downloads/galculator-2.1.4.tar.bz2") - - project.download_archive() - - # regenerate configure to get support for --with-libtool-sysroot - legacy_preconf=("autoreconf -i -f -I ${OECORE_TARGET_SYSROOT}/usr/share/aclocal -I m4;") - - self.assertEqual(project.run_configure(extra_cmds=legacy_preconf), - 0, msg="Running configure failed") - - self.assertEqual(project.run_make(), 0, - msg="Running make failed") - finally: - project.clean() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/buildiptables.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/buildiptables.py deleted file mode 100644 index f0cb8a428..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/sdk/buildiptables.py +++ /dev/null @@ -1,26 +0,0 @@ -from oeqa.oetest import oeSDKTest -from oeqa.utils.decorators import * -from oeqa.utils.targetbuild import SDKBuildProject - - -class BuildIptablesTest(oeSDKTest): - - @classmethod - def setUpClass(self): - self.project = SDKBuildProject(oeSDKTest.tc.sdktestdir + "/iptables/", oeSDKTest.tc.sdkenv, oeSDKTest.tc.d, - "http://downloads.yoctoproject.org/mirror/sources/iptables-1.4.13.tar.bz2") - self.project.download_archive() - - def test_iptables(self): - self.assertEqual(self.project.run_configure(), 0, - msg="Running configure failed") - - self.assertEqual(self.project.run_make(), 0, - msg="Running make failed") - - self.assertEqual(self.project.run_install(), 0, - msg="Running make install failed") - - @classmethod - def tearDownClass(self): - self.project.clean() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/case.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/case.py new file mode 100644 index 000000000..963aa8d35 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdk/case.py @@ -0,0 +1,12 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import subprocess + +from oeqa.core.case import OETestCase + +class OESDKTestCase(OETestCase): + def _run(self, cmd): + return subprocess.check_output(". %s > /dev/null; %s;" % \ + (self.tc.sdk_env, cmd), shell=True, + stderr=subprocess.STDOUT, universal_newlines=True) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/buildcpio.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/buildcpio.py new file mode 100644 index 000000000..333dc7c22 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/buildcpio.py @@ -0,0 +1,33 @@ +import unittest +from oeqa.sdk.case import OESDKTestCase +from oeqa.sdk.utils.sdkbuildproject import SDKBuildProject + +class BuildCpioTest(OESDKTestCase): + td_vars = ['DATETIME'] + + @classmethod + def setUpClass(self): + dl_dir = self.td.get('DL_DIR', None) + + self.project = SDKBuildProject(self.tc.sdk_dir + "/cpio/", self.tc.sdk_env, + "https://ftp.gnu.org/gnu/cpio/cpio-2.12.tar.gz", + self.tc.sdk_dir, self.td['DATETIME'], dl_dir=dl_dir) + self.project.download_archive() + + machine = self.td.get("MACHINE") + if not self.tc.hasHostPackage("packagegroup-cross-canadian-%s" % machine): + raise unittest.SkipTest("SDK doesn't contain a cross-canadian toolchain") + + def test_cpio(self): + self.assertEqual(self.project.run_configure(), 0, + msg="Running configure failed") + + self.assertEqual(self.project.run_make(), 0, + msg="Running make failed") + + self.assertEqual(self.project.run_install(), 0, + msg="Running make install failed") + + @classmethod + def tearDownClass(self): + self.project.clean() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/buildgalculator.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/buildgalculator.py new file mode 100644 index 000000000..42e8ddb18 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/buildgalculator.py @@ -0,0 +1,35 @@ +import unittest + +from oeqa.sdk.case import OESDKTestCase +from oeqa.sdk.utils.sdkbuildproject import SDKBuildProject + +class GalculatorTest(OESDKTestCase): + td_vars = ['DATETIME'] + + @classmethod + def setUpClass(self): + if not (self.tc.hasTargetPackage("gtk+3") or\ + self.tc.hasTargetPackage("libgtk-3.0")): + raise unittest.SkipTest("GalculatorTest class: SDK don't support gtk+3") + + def test_galculator(self): + dl_dir = self.td.get('DL_DIR', None) + project = None + try: + project = SDKBuildProject(self.tc.sdk_dir + "/galculator/", + self.tc.sdk_env, + "http://galculator.mnim.org/downloads/galculator-2.1.4.tar.bz2", + self.tc.sdk_dir, self.td['DATETIME'], dl_dir=dl_dir) + + project.download_archive() + + # regenerate configure to get support for --with-libtool-sysroot + legacy_preconf=("autoreconf -i -f -I ${OECORE_TARGET_SYSROOT}/usr/share/aclocal -I m4;") + + self.assertEqual(project.run_configure(extra_cmds=legacy_preconf), + 0, msg="Running configure failed") + + self.assertEqual(project.run_make(), 0, + msg="Running make failed") + finally: + project.clean() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/buildlzip.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/buildlzip.py new file mode 100644 index 000000000..2a53b783c --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/buildlzip.py @@ -0,0 +1,35 @@ +import unittest +from oeqa.sdk.case import OESDKTestCase +from oeqa.sdk.utils.sdkbuildproject import SDKBuildProject + + +class BuildLzipTest(OESDKTestCase): + td_vars = ['DATETIME'] + + @classmethod + def setUpClass(self): + dl_dir = self.td.get('DL_DIR', None) + + self.project = SDKBuildProject(self.tc.sdk_dir + "/lzip/", self.tc.sdk_env, + "http://downloads.yoctoproject.org/mirror/sources/lzip-1.19.tar.gz", + self.tc.sdk_dir, self.td['DATETIME'], dl_dir=dl_dir) + self.project.download_archive() + + machine = self.td.get("MACHINE") + + if not self.tc.hasHostPackage("packagegroup-cross-canadian-%s" % machine): + raise unittest.SkipTest("SDK doesn't contain a cross-canadian toolchain") + + def test_lzip(self): + self.assertEqual(self.project.run_configure(), 0, + msg="Running configure failed") + + self.assertEqual(self.project.run_make(), 0, + msg="Running make failed") + + self.assertEqual(self.project.run_install(), 0, + msg="Running make install failed") + + @classmethod + def tearDownClass(self): + self.project.clean() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/gcc.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/gcc.py new file mode 100644 index 000000000..74ad2a2f2 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/gcc.py @@ -0,0 +1,42 @@ +import os +import shutil +import unittest + +from oeqa.core.utils.path import remove_safe +from oeqa.sdk.case import OESDKTestCase + +class GccCompileTest(OESDKTestCase): + td_vars = ['MACHINE'] + + @classmethod + def setUpClass(self): + files = {'test.c' : self.tc.files_dir, 'test.cpp' : self.tc.files_dir, + 'testsdkmakefile' : self.tc.sdk_files_dir} + for f in files: + shutil.copyfile(os.path.join(files[f], f), + os.path.join(self.tc.sdk_dir, f)) + + def setUp(self): + machine = self.td.get("MACHINE") + if not self.tc.hasHostPackage("packagegroup-cross-canadian-%s" % machine): + raise unittest.SkipTest("GccCompileTest class: SDK doesn't contain a cross-canadian toolchain") + + def test_gcc_compile(self): + self._run('$CC %s/test.c -o %s/test -lm' % (self.tc.sdk_dir, self.tc.sdk_dir)) + + def test_gpp_compile(self): + self._run('$CXX %s/test.c -o %s/test -lm' % (self.tc.sdk_dir, self.tc.sdk_dir)) + + def test_gpp2_compile(self): + self._run('$CXX %s/test.cpp -o %s/test -lm' % (self.tc.sdk_dir, self.tc.sdk_dir)) + + def test_make(self): + self._run('cd %s; make -f testsdkmakefile' % self.tc.sdk_dir) + + @classmethod + def tearDownClass(self): + files = [os.path.join(self.tc.sdk_dir, f) \ + for f in ['test.c', 'test.cpp', 'test.o', 'test', + 'testsdkmakefile']] + for f in files: + remove_safe(f) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/perl.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/perl.py new file mode 100644 index 000000000..e1bded2ff --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/perl.py @@ -0,0 +1,27 @@ +import os +import shutil +import unittest + +from oeqa.core.utils.path import remove_safe +from oeqa.sdk.case import OESDKTestCase + +class PerlTest(OESDKTestCase): + @classmethod + def setUpClass(self): + if not self.tc.hasHostPackage("nativesdk-perl"): + raise unittest.SkipTest("No perl package in the SDK") + + for f in ['test.pl']: + shutil.copyfile(os.path.join(self.tc.files_dir, f), + os.path.join(self.tc.sdk_dir, f)) + self.testfile = os.path.join(self.tc.sdk_dir, "test.pl") + + def test_perl_exists(self): + self._run('which perl') + + def test_perl_works(self): + self._run('perl %s' % self.testfile) + + @classmethod + def tearDownClass(self): + remove_safe(self.testfile) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/python.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/python.py new file mode 100644 index 000000000..94a296f0e --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdk/cases/python.py @@ -0,0 +1,31 @@ +import os +import shutil +import unittest + +from oeqa.core.utils.path import remove_safe +from oeqa.sdk.case import OESDKTestCase + +class PythonTest(OESDKTestCase): + @classmethod + def setUpClass(self): + if not self.tc.hasHostPackage("nativesdk-python"): + raise unittest.SkipTest("No python package in the SDK") + + for f in ['test.py']: + shutil.copyfile(os.path.join(self.tc.files_dir, f), + os.path.join(self.tc.sdk_dir, f)) + + def test_python_exists(self): + self._run('which python') + + def test_python_stdout(self): + output = self._run('python %s/test.py' % self.tc.sdk_dir) + self.assertEqual(output.strip(), "the value of a is 0.01", msg="Incorrect output: %s" % output) + + def test_python_testfile(self): + self._run('ls /tmp/testfile.python') + + @classmethod + def tearDownClass(self): + remove_safe("%s/test.py" % self.tc.sdk_dir) + remove_safe("/tmp/testfile.python") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/context.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/context.py new file mode 100644 index 000000000..0189ed851 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdk/context.py @@ -0,0 +1,133 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import sys +import glob +import re + +from oeqa.core.context import OETestContext, OETestContextExecutor + +class OESDKTestContext(OETestContext): + sdk_files_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "files") + + def __init__(self, td=None, logger=None, sdk_dir=None, sdk_env=None, + target_pkg_manifest=None, host_pkg_manifest=None): + super(OESDKTestContext, self).__init__(td, logger) + + self.sdk_dir = sdk_dir + self.sdk_env = sdk_env + self.target_pkg_manifest = target_pkg_manifest + self.host_pkg_manifest = host_pkg_manifest + + def _hasPackage(self, manifest, pkg): + for host_pkg in manifest.keys(): + if re.search(pkg, host_pkg): + return True + return False + + def hasHostPackage(self, pkg): + return self._hasPackage(self.host_pkg_manifest, pkg) + + def hasTargetPackage(self, pkg): + return self._hasPackage(self.target_pkg_manifest, pkg) + +class OESDKTestContextExecutor(OETestContextExecutor): + _context_class = OESDKTestContext + + name = 'sdk' + help = 'sdk test component' + description = 'executes sdk tests' + + default_cases = [os.path.join(os.path.abspath(os.path.dirname(__file__)), + 'cases')] + default_test_data = None + + def register_commands(self, logger, subparsers): + import argparse_oe + + super(OESDKTestContextExecutor, self).register_commands(logger, subparsers) + + sdk_group = self.parser.add_argument_group('sdk options') + sdk_group.add_argument('--sdk-env', action='store', + help='sdk environment') + sdk_group.add_argument('--target-manifest', action='store', + help='sdk target manifest') + sdk_group.add_argument('--host-manifest', action='store', + help='sdk host manifest') + + sdk_dgroup = self.parser.add_argument_group('sdk display options') + sdk_dgroup.add_argument('--list-sdk-env', action='store_true', + default=False, help='sdk list available environment') + + # XXX this option is required but argparse_oe has a bug handling + # required options, seems that don't keep track of already parsed + # options + sdk_rgroup = self.parser.add_argument_group('sdk required options') + sdk_rgroup.add_argument('--sdk-dir', required=False, action='store', + help='sdk installed directory') + + @staticmethod + def _load_manifest(manifest): + pkg_manifest = {} + if manifest: + with open(manifest) as f: + for line in f: + (pkg, arch, version) = line.strip().split() + pkg_manifest[pkg] = (version, arch) + + return pkg_manifest + + def _process_args(self, logger, args): + super(OESDKTestContextExecutor, self)._process_args(logger, args) + + self.tc_kwargs['init']['sdk_dir'] = args.sdk_dir + self.tc_kwargs['init']['sdk_env'] = self.sdk_env + self.tc_kwargs['init']['target_pkg_manifest'] = \ + OESDKTestContextExecutor._load_manifest(args.target_manifest) + self.tc_kwargs['init']['host_pkg_manifest'] = \ + OESDKTestContextExecutor._load_manifest(args.host_manifest) + + @staticmethod + def _get_sdk_environs(sdk_dir): + sdk_env = {} + + environ_pattern = sdk_dir + '/environment-setup-*' + full_sdk_env = glob.glob(sdk_dir + '/environment-setup-*') + for env in full_sdk_env: + m = re.search('environment-setup-(.*)', env) + if m: + sdk_env[m.group(1)] = env + + return sdk_env + + def _display_sdk_envs(self, log, args, sdk_envs): + log("Available SDK environments at directory %s:" \ + % args.sdk_dir) + log("") + for env in sdk_envs: + log(env) + + def run(self, logger, args): + if not args.sdk_dir: + raise argparse_oe.ArgumentUsageError("No SDK directory "\ + "specified please do, --sdk-dir SDK_DIR", self.name) + + sdk_envs = OESDKTestContextExecutor._get_sdk_environs(args.sdk_dir) + if not sdk_envs: + raise argparse_oe.ArgumentUsageError("No available SDK "\ + "enviroments found at %s" % args.sdk_dir, self.name) + + if args.list_sdk_env: + self._display_sdk_envs(logger.info, args, sdk_envs) + sys.exit(0) + + if not args.sdk_env in sdk_envs: + self._display_sdk_envs(logger.error, args, sdk_envs) + raise argparse_oe.ArgumentUsageError("No valid SDK "\ + "environment (%s) specified" % args.sdk_env, self.name) + + self.sdk_env = sdk_envs[args.sdk_env] + super(OESDKTestContextExecutor, self).run(logger, args) + +_executor_class = OESDKTestContextExecutor diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/files/testsdkmakefile b/import-layers/yocto-poky/meta/lib/oeqa/sdk/files/testsdkmakefile new file mode 100644 index 000000000..fb05f822f --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdk/files/testsdkmakefile @@ -0,0 +1,5 @@ +test: test.o + $(CC) -o test test.o -lm +test.o: test.c + $(CC) -c test.c + diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/gcc.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/gcc.py deleted file mode 100644 index 8395b9b90..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/sdk/gcc.py +++ /dev/null @@ -1,36 +0,0 @@ -import unittest -import os -import shutil -from oeqa.oetest import oeSDKTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - machine = oeSDKTest.tc.d.getVar("MACHINE", True) - if not oeSDKTest.hasHostPackage("packagegroup-cross-canadian-" + machine): - skipModule("SDK doesn't contain a cross-canadian toolchain") - - -class GccCompileTest(oeSDKTest): - - @classmethod - def setUpClass(self): - for f in ['test.c', 'test.cpp', 'testsdkmakefile']: - shutil.copyfile(os.path.join(self.tc.filesdir, f), self.tc.sdktestdir + f) - - def test_gcc_compile(self): - self._run('$CC %s/test.c -o %s/test -lm' % (self.tc.sdktestdir, self.tc.sdktestdir)) - - def test_gpp_compile(self): - self._run('$CXX %s/test.c -o %s/test -lm' % (self.tc.sdktestdir, self.tc.sdktestdir)) - - def test_gpp2_compile(self): - self._run('$CXX %s/test.cpp -o %s/test -lm' % (self.tc.sdktestdir, self.tc.sdktestdir)) - - def test_make(self): - self._run('cd %s; make -f testsdkmakefile' % self.tc.sdktestdir) - - @classmethod - def tearDownClass(self): - files = [self.tc.sdktestdir + f for f in ['test.c', 'test.cpp', 'test.o', 'test', 'testsdkmakefile']] - for f in files: - bb.utils.remove(f) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/perl.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/perl.py deleted file mode 100644 index 45f422ef0..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/sdk/perl.py +++ /dev/null @@ -1,28 +0,0 @@ -import unittest -import os -import shutil -from oeqa.oetest import oeSDKTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeSDKTest.hasHostPackage("nativesdk-perl"): - skipModule("No perl package in the SDK") - - -class PerlTest(oeSDKTest): - - @classmethod - def setUpClass(self): - for f in ['test.pl']: - shutil.copyfile(os.path.join(self.tc.filesdir, f), self.tc.sdktestdir + f) - self.testfile = self.tc.sdktestdir + "test.pl" - - def test_perl_exists(self): - self._run('which perl') - - def test_perl_works(self): - self._run('perl %s/test.pl' % self.tc.sdktestdir) - - @classmethod - def tearDownClass(self): - bb.utils.remove("%s/test.pl" % self.tc.sdktestdir) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/python.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/python.py deleted file mode 100644 index 896fab4df..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/sdk/python.py +++ /dev/null @@ -1,32 +0,0 @@ -import unittest -import os -import shutil -from oeqa.oetest import oeSDKTest, skipModule -from oeqa.utils.decorators import * - -def setUpModule(): - if not oeSDKTest.hasHostPackage("nativesdk-python"): - skipModule("No python package in the SDK") - - -class PythonTest(oeSDKTest): - - @classmethod - def setUpClass(self): - for f in ['test.py']: - shutil.copyfile(os.path.join(self.tc.filesdir, f), self.tc.sdktestdir + f) - - def test_python_exists(self): - self._run('which python') - - def test_python_stdout(self): - output = self._run('python %s/test.py' % self.tc.sdktestdir) - self.assertEqual(output.strip(), "the value of a is 0.01", msg="Incorrect output: %s" % output) - - def test_python_testfile(self): - self._run('ls /tmp/testfile.python') - - @classmethod - def tearDownClass(self): - bb.utils.remove("%s/test.py" % self.tc.sdktestdir) - bb.utils.remove("/tmp/testfile.python") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/utils/__init__.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdk/utils/sdkbuildproject.py b/import-layers/yocto-poky/meta/lib/oeqa/sdk/utils/sdkbuildproject.py new file mode 100644 index 000000000..4e251142d --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdk/utils/sdkbuildproject.py @@ -0,0 +1,45 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import subprocess + +from oeqa.utils.buildproject import BuildProject + +class SDKBuildProject(BuildProject): + def __init__(self, testpath, sdkenv, uri, testlogdir, builddatetime, + foldername=None, dl_dir=None): + self.sdkenv = sdkenv + self.testdir = testpath + self.targetdir = testpath + os.makedirs(testpath, exist_ok=True) + self.datetime = builddatetime + self.testlogdir = testlogdir + os.makedirs(self.testlogdir, exist_ok=True) + self.logfile = os.path.join(self.testlogdir, "sdk_target_log.%s" % self.datetime) + BuildProject.__init__(self, uri, foldername, tmpdir=testpath, dl_dir=dl_dir) + + def download_archive(self): + + self._download_archive() + + cmd = 'tar xf %s%s -C %s' % (self.targetdir, self.archive, self.targetdir) + subprocess.check_output(cmd, shell=True) + + #Change targetdir to project folder + self.targetdir = os.path.join(self.targetdir, self.fname) + + def run_configure(self, configure_args='', extra_cmds=''): + return super(SDKBuildProject, self).run_configure(configure_args=(configure_args or '$CONFIGURE_FLAGS'), extra_cmds=extra_cmds) + + def run_install(self, install_args=''): + return super(SDKBuildProject, self).run_install(install_args=(install_args or "DESTDIR=%s/../install" % self.targetdir)) + + def log(self, msg): + if self.logfile: + with open(self.logfile, "a") as f: + f.write("%s\n" % msg) + + def _run(self, cmd): + self.log("Running . %s; " % self.sdkenv + cmd) + return subprocess.call(". %s; " % self.sdkenv + cmd, shell=True) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdkext/__init__.py b/import-layers/yocto-poky/meta/lib/oeqa/sdkext/__init__.py index 4cf3fa76b..e69de29bb 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/sdkext/__init__.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdkext/__init__.py @@ -1,3 +0,0 @@ -# Enable other layers to have tests in the same named directory -from pkgutil import extend_path -__path__ = extend_path(__path__, __name__) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdkext/case.py b/import-layers/yocto-poky/meta/lib/oeqa/sdkext/case.py new file mode 100644 index 000000000..21b718831 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdkext/case.py @@ -0,0 +1,21 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import subprocess + +from oeqa.utils import avoid_paths_in_environ +from oeqa.sdk.case import OESDKTestCase + +class OESDKExtTestCase(OESDKTestCase): + def _run(self, cmd): + # extensible sdk shows a warning if found bitbake in the path + # because can cause contamination, i.e. use devtool from + # poky/scripts instead of eSDK one. + env = os.environ.copy() + paths_to_avoid = ['bitbake/bin', 'poky/scripts'] + env['PATH'] = avoid_paths_in_environ(paths_to_avoid) + + return subprocess.check_output(". %s > /dev/null;"\ + " %s;" % (self.tc.sdk_env, cmd), stderr=subprocess.STDOUT, + shell=True, env=env, universal_newlines=True) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdkext/cases/devtool.py b/import-layers/yocto-poky/meta/lib/oeqa/sdkext/cases/devtool.py new file mode 100644 index 000000000..a01bc0bfe --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdkext/cases/devtool.py @@ -0,0 +1,97 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import shutil +import subprocess + +from oeqa.sdkext.case import OESDKExtTestCase +from oeqa.core.decorator.depends import OETestDepends +from oeqa.core.decorator.oeid import OETestID + +class DevtoolTest(OESDKExtTestCase): + @classmethod + def setUpClass(cls): + myapp_src = os.path.join(cls.tc.esdk_files_dir, "myapp") + cls.myapp_dst = os.path.join(cls.tc.sdk_dir, "myapp") + shutil.copytree(myapp_src, cls.myapp_dst) + + myapp_cmake_src = os.path.join(cls.tc.esdk_files_dir, "myapp_cmake") + cls.myapp_cmake_dst = os.path.join(cls.tc.sdk_dir, "myapp_cmake") + shutil.copytree(myapp_cmake_src, cls.myapp_cmake_dst) + + @classmethod + def tearDownClass(cls): + shutil.rmtree(cls.myapp_dst) + shutil.rmtree(cls.myapp_cmake_dst) + + def _test_devtool_build(self, directory): + self._run('devtool add myapp %s' % directory) + try: + self._run('devtool build myapp') + finally: + self._run('devtool reset myapp') + + def _test_devtool_build_package(self, directory): + self._run('devtool add myapp %s' % directory) + try: + self._run('devtool package myapp') + finally: + self._run('devtool reset myapp') + + def test_devtool_location(self): + output = self._run('which devtool') + self.assertEqual(output.startswith(self.tc.sdk_dir), True, \ + msg="Seems that devtool isn't the eSDK one: %s" % output) + + @OETestDepends(['test_devtool_location']) + def test_devtool_add_reset(self): + self._run('devtool add myapp %s' % self.myapp_dst) + self._run('devtool reset myapp') + + @OETestID(1605) + @OETestDepends(['test_devtool_location']) + def test_devtool_build_make(self): + self._test_devtool_build(self.myapp_dst) + + @OETestID(1606) + @OETestDepends(['test_devtool_location']) + def test_devtool_build_esdk_package(self): + self._test_devtool_build_package(self.myapp_dst) + + @OETestID(1607) + @OETestDepends(['test_devtool_location']) + def test_devtool_build_cmake(self): + self._test_devtool_build(self.myapp_cmake_dst) + + @OETestID(1608) + @OETestDepends(['test_devtool_location']) + def test_extend_autotools_recipe_creation(self): + req = 'https://github.com/rdfa/librdfa' + recipe = "librdfa" + self._run('devtool sdk-install libxml2') + self._run('devtool add %s %s' % (recipe, req) ) + try: + self._run('devtool build %s' % recipe) + finally: + self._run('devtool reset %s' % recipe) + + @OETestID(1609) + @OETestDepends(['test_devtool_location']) + def test_devtool_kernelmodule(self): + docfile = 'https://github.com/umlaeute/v4l2loopback.git' + recipe = 'v4l2loopback-driver' + self._run('devtool add %s %s' % (recipe, docfile) ) + try: + self._run('devtool build %s' % recipe) + finally: + self._run('devtool reset %s' % recipe) + + @OETestID(1610) + @OETestDepends(['test_devtool_location']) + def test_recipes_for_nodejs(self): + package_nodejs = "npm://registry.npmjs.org;name=winston;version=2.2.0" + self._run('devtool add %s ' % package_nodejs) + try: + self._run('devtool build %s ' % package_nodejs) + finally: + self._run('devtool reset %s '% package_nodejs) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdkext/cases/sdk_update.py b/import-layers/yocto-poky/meta/lib/oeqa/sdkext/cases/sdk_update.py new file mode 100644 index 000000000..2f8598bbe --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdkext/cases/sdk_update.py @@ -0,0 +1,39 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import shutil +import subprocess + +from oeqa.sdkext.case import OESDKExtTestCase +from oeqa.utils.httpserver import HTTPService + +class SdkUpdateTest(OESDKExtTestCase): + @classmethod + def setUpClass(self): + self.publish_dir = os.path.join(self.tc.sdk_dir, 'esdk_publish') + if os.path.exists(self.publish_dir): + shutil.rmtree(self.publish_dir) + os.mkdir(self.publish_dir) + + base_tcname = "%s/%s" % (self.td.get("SDK_DEPLOY", ''), + self.td.get("TOOLCHAINEXT_OUTPUTNAME", '')) + tcname_new = "%s-new.sh" % base_tcname + if not os.path.exists(tcname_new): + tcname_new = "%s.sh" % base_tcname + + cmd = 'oe-publish-sdk %s %s' % (tcname_new, self.publish_dir) + subprocess.check_output(cmd, shell=True) + + self.http_service = HTTPService(self.publish_dir) + self.http_service.start() + + self.http_url = "http://127.0.0.1:%d" % self.http_service.port + + def test_sdk_update_http(self): + output = self._run("devtool sdk-update \"%s\"" % self.http_url) + + @classmethod + def tearDownClass(self): + self.http_service.stop() + shutil.rmtree(self.publish_dir) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdkext/context.py b/import-layers/yocto-poky/meta/lib/oeqa/sdkext/context.py new file mode 100644 index 000000000..65da4c6e1 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/sdkext/context.py @@ -0,0 +1,29 @@ +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +from oeqa.sdk.context import OESDKTestContext, OESDKTestContextExecutor + +class OESDKExtTestContext(OESDKTestContext): + esdk_files_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "files") + + # FIXME - We really need to do better mapping of names here, this at + # least allows some tests to run + def hasHostPackage(self, pkg): + # We force a toolchain to be installed into the eSDK even if its minimal + if pkg.startswith("packagegroup-cross-canadian-"): + return True + return self._hasPackage(self.host_pkg_manifest, pkg) + +class OESDKExtTestContextExecutor(OESDKTestContextExecutor): + _context_class = OESDKExtTestContext + + name = 'esdk' + help = 'esdk test component' + description = 'executes esdk tests' + + default_cases = OESDKTestContextExecutor.default_cases + \ + [os.path.join(os.path.abspath(os.path.dirname(__file__)), 'cases')] + default_test_data = None + +_executor_class = OESDKExtTestContextExecutor diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdkext/devtool.py b/import-layers/yocto-poky/meta/lib/oeqa/sdkext/devtool.py deleted file mode 100644 index 65f41f687..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/sdkext/devtool.py +++ /dev/null @@ -1,108 +0,0 @@ -import shutil -import subprocess -import urllib.request -from oeqa.oetest import oeSDKExtTest -from oeqa.utils.decorators import * - -class DevtoolTest(oeSDKExtTest): - @classmethod - def setUpClass(self): - self.myapp_src = os.path.join(self.tc.sdkextfilesdir, "myapp") - self.myapp_dst = os.path.join(self.tc.sdktestdir, "myapp") - shutil.copytree(self.myapp_src, self.myapp_dst) - - self.myapp_cmake_src = os.path.join(self.tc.sdkextfilesdir, "myapp_cmake") - self.myapp_cmake_dst = os.path.join(self.tc.sdktestdir, "myapp_cmake") - shutil.copytree(self.myapp_cmake_src, self.myapp_cmake_dst) - - def _test_devtool_build(self, directory): - self._run('devtool add myapp %s' % directory) - try: - self._run('devtool build myapp') - except Exception as e: - print(e.output) - self._run('devtool reset myapp') - raise e - self._run('devtool reset myapp') - - def _test_devtool_build_package(self, directory): - self._run('devtool add myapp %s' % directory) - try: - self._run('devtool package myapp') - except Exception as e: - print(e.output) - self._run('devtool reset myapp') - raise e - self._run('devtool reset myapp') - - def test_devtool_location(self): - output = self._run('which devtool') - self.assertEqual(output.startswith(self.tc.sdktestdir), True, \ - msg="Seems that devtool isn't the eSDK one: %s" % output) - - @skipUnlessPassed('test_devtool_location') - def test_devtool_add_reset(self): - self._run('devtool add myapp %s' % self.myapp_dst) - self._run('devtool reset myapp') - - @testcase(1473) - @skipUnlessPassed('test_devtool_location') - def test_devtool_build_make(self): - self._test_devtool_build(self.myapp_dst) - - @testcase(1474) - @skipUnlessPassed('test_devtool_location') - def test_devtool_build_esdk_package(self): - self._test_devtool_build_package(self.myapp_dst) - - @testcase(1479) - @skipUnlessPassed('test_devtool_location') - def test_devtool_build_cmake(self): - self._test_devtool_build(self.myapp_cmake_dst) - - @testcase(1482) - @skipUnlessPassed('test_devtool_location') - def test_extend_autotools_recipe_creation(self): - req = 'https://github.com/rdfa/librdfa' - recipe = "bbexample" - self._run('devtool add %s %s' % (recipe, req) ) - try: - self._run('devtool build %s' % recipe) - except Exception as e: - print(e.output) - self._run('devtool reset %s' % recipe) - raise e - self._run('devtool reset %s' % recipe) - - @testcase(1484) - @skipUnlessPassed('test_devtool_location') - def test_devtool_kernelmodule(self): - docfile = 'https://github.com/umlaeute/v4l2loopback.git' - recipe = 'v4l2loopback-driver' - self._run('devtool add %s %s' % (recipe, docfile) ) - try: - self._run('devtool build %s' % recipe) - except Exception as e: - print(e.output) - self._run('devtool reset %s' % recipe) - raise e - self._run('devtool reset %s' % recipe) - - @testcase(1478) - @skipUnlessPassed('test_devtool_location') - def test_recipes_for_nodejs(self): - package_nodejs = "npm://registry.npmjs.org;name=winston;version=2.2.0" - self._run('devtool add %s ' % package_nodejs) - try: - self._run('devtool build %s ' % package_nodejs) - except Exception as e: - print(e.output) - self._run('devtool reset %s' % package_nodejs) - raise e - self._run('devtool reset %s '% package_nodejs) - - - @classmethod - def tearDownClass(self): - shutil.rmtree(self.myapp_dst) - shutil.rmtree(self.myapp_cmake_dst) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/sdkext/sdk_update.py b/import-layers/yocto-poky/meta/lib/oeqa/sdkext/sdk_update.py deleted file mode 100644 index 2ade839c0..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/sdkext/sdk_update.py +++ /dev/null @@ -1,36 +0,0 @@ -import os -import shutil -import subprocess - -from oeqa.oetest import oeSDKExtTest -from oeqa.utils.httpserver import HTTPService - -class SdkUpdateTest(oeSDKExtTest): - - @classmethod - def setUpClass(self): - self.publish_dir = os.path.join(self.tc.sdktestdir, 'esdk_publish') - if os.path.exists(self.publish_dir): - shutil.rmtree(self.publish_dir) - os.mkdir(self.publish_dir) - - tcname_new = self.tc.d.expand( - "${SDK_DEPLOY}/${TOOLCHAINEXT_OUTPUTNAME}-new.sh") - if not os.path.exists(tcname_new): - tcname_new = self.tc.tcname - - cmd = 'oe-publish-sdk %s %s' % (tcname_new, self.publish_dir) - subprocess.check_output(cmd, shell=True) - - self.http_service = HTTPService(self.publish_dir) - self.http_service.start() - - self.http_url = "http://127.0.0.1:%d" % self.http_service.port - - def test_sdk_update_http(self): - output = self._run("devtool sdk-update \"%s\"" % self.http_url) - - @classmethod - def tearDownClass(self): - self.http_service.stop() - shutil.rmtree(self.publish_dir) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/_toaster.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/_toaster.py deleted file mode 100644 index 15ea9df9e..000000000 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/_toaster.py +++ /dev/null @@ -1,320 +0,0 @@ -import unittest -import os -import sys -import shlex, subprocess -import urllib.request, urllib.parse, urllib.error, subprocess, time, getpass, re, json, shlex - -import oeqa.utils.ftools as ftools -from oeqa.selftest.base import oeSelfTest -from oeqa.utils.commands import runCmd - -sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../../', 'bitbake/lib/toaster'))) -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "toastermain.settings") - -import toastermain.settings -from django.db.models import Q -from orm.models import * -from oeqa.utils.decorators import testcase - -class ToasterSetup(oeSelfTest): - - def recipe_parse(self, file_path, var): - for line in open(file_path,'r'): - if line.find(var) > -1: - val = line.split(" = ")[1].replace("\"", "").strip() - return val - - def fix_file_path(self, file_path): - if ":" in file_path: - file_path=file_path.split(":")[2] - return file_path - -class Toaster_DB_Tests(ToasterSetup): - - # Check if build name is unique - tc_id=795 - @testcase(795) - def test_Build_Unique_Name(self): - all_builds = Build.objects.all().count() - distinct_builds = Build.objects.values('id').distinct().count() - self.assertEqual(distinct_builds, all_builds, msg = 'Build name is not unique') - - # Check if build coocker log path is unique - tc_id=819 - @testcase(819) - def test_Build_Unique_Cooker_Log_Path(self): - distinct_path = Build.objects.values('cooker_log_path').distinct().count() - total_builds = Build.objects.values('id').count() - self.assertEqual(distinct_path, total_builds, msg = 'Build coocker log path is not unique') - - # Check if task order is unique for one build - tc=824 - @testcase(824) - def test_Task_Unique_Order(self): - builds = Build.objects.values('id') - cnt_err = [] - for build in builds: - total_task_order = Task.objects.filter(build = build['id']).values('order').count() - distinct_task_order = Task.objects.filter(build = build['id']).values('order').distinct().count() - if (total_task_order != distinct_task_order): - cnt_err.append(build['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for build id: %s' % cnt_err) - - # Check task order sequence for one build - tc=825 - @testcase(825) - def test_Task_Order_Sequence(self): - builds = builds = Build.objects.values('id') - cnt_err = [] - for build in builds: - tasks = Task.objects.filter(Q(build = build['id']), ~Q(order = None), ~Q(task_name__contains = '_setscene')).values('id', 'order').order_by("order") - cnt_tasks = 0 - for task in tasks: - cnt_tasks += 1 - if (task['order'] != cnt_tasks): - cnt_err.append(task['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) - - # Check if disk_io matches the difference between EndTimeIO and StartTimeIO in build stats - tc=828 - ### this needs to be updated ### - #def test_Task_Disk_IO_TC828(self): - - # Check if outcome = 2 (SSTATE) then sstate_result must be 3 (RESTORED) - tc=832 - @testcase(832) - def test_Task_If_Outcome_2_Sstate_Result_Must_Be_3(self): - tasks = Task.objects.filter(outcome = 2).values('id', 'sstate_result') - cnt_err = [] - for task in tasks: - if (row['sstate_result'] != 3): - cnt_err.append(task['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) - - # Check if outcome = 1 (COVERED) or 3 (EXISTING) then sstate_result must be 0 (SSTATE_NA) - tc=833 - @testcase(833) - def test_Task_If_Outcome_1_3_Sstate_Result_Must_Be_0(self): - tasks = Task.objects.filter(outcome__in = (1, 3)).values('id', 'sstate_result') - cnt_err = [] - for task in tasks: - if (task['sstate_result'] != 0): - cnt_err.append(task['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) - - # Check if outcome is 0 (SUCCESS) or 4 (FAILED) then sstate_result must be 0 (NA), 1 (MISS) or 2 (FAILED) - tc=834 - @testcase(834) - def test_Task_If_Outcome_0_4_Sstate_Result_Must_Be_0_1_2(self): - tasks = Task.objects.filter(outcome__in = (0, 4)).values('id', 'sstate_result') - cnt_err = [] - for task in tasks: - if (task['sstate_result'] not in [0, 1, 2]): - cnt_err.append(task['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) - - # Check if task_executed = TRUE (1), script_type must be 0 (CODING_NA), 2 (CODING_PYTHON), 3 (CODING_SHELL) - tc=891 - @testcase(891) - def test_Task_If_Task_Executed_True_Script_Type_0_2_3(self): - tasks = Task.objects.filter(task_executed = 1).values('id', 'script_type') - cnt_err = [] - for task in tasks: - if (task['script_type'] not in [0, 2, 3]): - cnt_err.append(task['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) - - # Check if task_executed = TRUE (1), outcome must be 0 (SUCCESS) or 4 (FAILED) - tc=836 - @testcase(836) - def test_Task_If_Task_Executed_True_Outcome_0_4(self): - tasks = Task.objects.filter(task_executed = 1).values('id', 'outcome') - cnt_err = [] - for task in tasks: - if (task['outcome'] not in [0, 4]): - cnt_err.append(task['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) - - # Check if task_executed = FALSE (0), script_type must be 0 - tc=890 - @testcase(890) - def test_Task_If_Task_Executed_False_Script_Type_0(self): - tasks = Task.objects.filter(task_executed = 0).values('id', 'script_type') - cnt_err = [] - for task in tasks: - if (task['script_type'] != 0): - cnt_err.append(task['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) - - # Check if task_executed = FALSE (0) and build outcome = SUCCEEDED (0), task outcome must be 1 (COVERED), 2 (CACHED), 3 (PREBUILT), 5 (EMPTY) - tc=837 - @testcase(837) - def test_Task_If_Task_Executed_False_Outcome_1_2_3_5(self): - builds = Build.objects.filter(outcome = 0).values('id') - cnt_err = [] - for build in builds: - tasks = Task.objects.filter(build = build['id'], task_executed = 0).values('id', 'outcome') - for task in tasks: - if (task['outcome'] not in [1, 2, 3, 5]): - cnt_err.append(task['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for task id: %s' % cnt_err) - - # Key verification - tc=888 - @testcase(888) - def test_Target_Installed_Package(self): - rows = Target_Installed_Package.objects.values('id', 'target_id', 'package_id') - cnt_err = [] - for row in rows: - target = Target.objects.filter(id = row['target_id']).values('id') - package = Package.objects.filter(id = row['package_id']).values('id') - if (not target or not package): - cnt_err.append(row['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for target installed package id: %s' % cnt_err) - - # Key verification - tc=889 - @testcase(889) - def test_Task_Dependency(self): - rows = Task_Dependency.objects.values('id', 'task_id', 'depends_on_id') - cnt_err = [] - for row in rows: - task_id = Task.objects.filter(id = row['task_id']).values('id') - depends_on_id = Task.objects.filter(id = row['depends_on_id']).values('id') - if (not task_id or not depends_on_id): - cnt_err.append(row['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for task dependency id: %s' % cnt_err) - - # Check if build target file_name is populated only if is_image=true AND orm_build.outcome=0 then if the file exists and its size matches the file_size value - ### Need to add the tc in the test run - @testcase(1037) - def test_Target_File_Name_Populated(self): - builds = Build.objects.filter(outcome = 0).values('id') - for build in builds: - targets = Target.objects.filter(build_id = build['id'], is_image = 1).values('id') - for target in targets: - target_files = Target_Image_File.objects.filter(target_id = target['id']).values('id', 'file_name', 'file_size') - cnt_err = [] - for file_info in target_files: - target_id = file_info['id'] - target_file_name = file_info['file_name'] - target_file_size = file_info['file_size'] - if (not target_file_name or not target_file_size): - cnt_err.append(target_id) - else: - if (not os.path.exists(target_file_name)): - cnt_err.append(target_id) - else: - if (os.path.getsize(target_file_name) != target_file_size): - cnt_err.append(target_id) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for target image file id: %s' % cnt_err) - - # Key verification - tc=884 - @testcase(884) - def test_Package_Dependency(self): - cnt_err = [] - deps = Package_Dependency.objects.values('id', 'package_id', 'depends_on_id') - for dep in deps: - if (dep['package_id'] == dep['depends_on_id']): - cnt_err.append(dep['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for package dependency id: %s' % cnt_err) - - # Recipe key verification, recipe name does not depends on a recipe having the same name - tc=883 - @testcase(883) - def test_Recipe_Dependency(self): - deps = Recipe_Dependency.objects.values('id', 'recipe_id', 'depends_on_id') - cnt_err = [] - for dep in deps: - if (not dep['recipe_id'] or not dep['depends_on_id']): - cnt_err.append(dep['id']) - else: - name = Recipe.objects.filter(id = dep['recipe_id']).values('name') - dep_name = Recipe.objects.filter(id = dep['depends_on_id']).values('name') - if (name == dep_name): - cnt_err.append(dep['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for recipe dependency id: %s' % cnt_err) - - # Check if package name does not start with a number (0-9) - tc=846 - @testcase(846) - def test_Package_Name_For_Number(self): - packages = Package.objects.filter(~Q(size = -1)).values('id', 'name') - cnt_err = [] - for package in packages: - if (package['name'][0].isdigit() is True): - cnt_err.append(package['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err) - - # Check if package version starts with a number (0-9) - tc=847 - @testcase(847) - def test_Package_Version_Starts_With_Number(self): - packages = Package.objects.filter(~Q(size = -1)).values('id', 'version') - cnt_err = [] - for package in packages: - if (package['version'][0].isdigit() is False): - cnt_err.append(package['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err) - - # Check if package revision starts with 'r' - tc=848 - @testcase(848) - def test_Package_Revision_Starts_With_r(self): - packages = Package.objects.filter(~Q(size = -1)).values('id', 'revision') - cnt_err = [] - for package in packages: - if (package['revision'][0].startswith("r") is False): - cnt_err.append(package['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err) - - # Check the validity of the package build_id - ### TC must be added in test run - @testcase(1038) - def test_Package_Build_Id(self): - packages = Package.objects.filter(~Q(size = -1)).values('id', 'build_id') - cnt_err = [] - for package in packages: - build_id = Build.objects.filter(id = package['build_id']).values('id') - if (not build_id): - cnt_err.append(package['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err) - - # Check the validity of package recipe_id - ### TC must be added in test run - @testcase(1039) - def test_Package_Recipe_Id(self): - packages = Package.objects.filter(~Q(size = -1)).values('id', 'recipe_id') - cnt_err = [] - for package in packages: - recipe_id = Recipe.objects.filter(id = package['recipe_id']).values('id') - if (not recipe_id): - cnt_err.append(package['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err) - - # Check if package installed_size field is not null - ### TC must be aded in test run - @testcase(1040) - def test_Package_Installed_Size_Not_NULL(self): - packages = Package.objects.filter(installed_size__isnull = True).values('id') - cnt_err = [] - for package in packages: - cnt_err.append(package['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for package id: %s' % cnt_err) - - # Check if all layers requests return exit code is 200 - tc=843 - @testcase(843) - def test_Layers_Requests_Exit_Code(self): - layers = Layer.objects.values('id', 'layer_index_url') - cnt_err = [] - for layer in layers: - resp = urllib.request.urlopen(layer['layer_index_url']) - if (resp.getcode() != 200): - cnt_err.append(layer['id']) - self.assertEqual(len(cnt_err), 0, msg = 'Errors for layer id: %s' % cnt_err) - - # Check if django server starts regardless of the timezone set on the machine - tc=905 - @testcase(905) - def test_Start_Django_Timezone(self): - current_path = os.getcwd() - zonefilelist = [] - ZONEINFOPATH = '/usr/share/zoneinfo/' - os.chdir("../bitbake/lib/toaster/") - cnt_err = 0 - for filename in os.listdir(ZONEINFOPATH): - if os.path.isfile(os.path.join(ZONEINFOPATH, filename)): - zonefilelist.append(filename) - for k in range(len(zonefilelist)): - if k <= 5: - files = zonefilelist[k] - os.system("export TZ="+str(files)+"; python manage.py runserver > /dev/null 2>&1 &") - time.sleep(3) - pid = subprocess.check_output("ps aux | grep '[/u]sr/bin/python manage.py runserver' | awk '{print $2}'", shell = True) - if pid: - os.system("kill -9 "+str(pid)) - else: - cnt_err.append(zonefilelist[k]) - self.assertEqual(cnt_err, 0, msg = 'Errors django server does not start with timezone: %s' % cnt_err) - os.chdir(current_path) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/archiver.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/archiver.py index f2030c446..7f01c36d4 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/archiver.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/archiver.py @@ -1,5 +1,5 @@ from oeqa.selftest.base import oeSelfTest -from oeqa.utils.commands import bitbake, get_bb_var +from oeqa.utils.commands import bitbake, get_bb_vars from oeqa.utils.decorators import testcase import glob import os @@ -26,25 +26,94 @@ class Archiver(oeSelfTest): features += 'ARCHIVER_MODE[src] = "original"\n' features += 'COPYLEFT_PN_INCLUDE = "%s"\n' % include_recipe features += 'COPYLEFT_PN_EXCLUDE = "%s"\n' % exclude_recipe - - # Update local.conf self.write_config(features) - tmp_dir = get_bb_var('TMPDIR') - deploy_dir_src = get_bb_var('DEPLOY_DIR_SRC') - target_sys = get_bb_var('TARGET_SYS') - src_path = os.path.join(deploy_dir_src, target_sys) - - # Delete tmp directory - shutil.rmtree(tmp_dir) + bitbake('-c clean %s %s' % (include_recipe, exclude_recipe)) + bitbake("-c deploy_archives %s %s" % (include_recipe, exclude_recipe)) - # Build core-image-minimal - bitbake('core-image-minimal') + bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'TARGET_SYS']) + src_path = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS']) # Check that include_recipe was included - is_included = len(glob.glob(src_path + '/%s*' % include_recipe)) - self.assertEqual(1, is_included, 'Recipe %s was not included.' % include_recipe) + included_present = len(glob.glob(src_path + '/%s-*' % include_recipe)) + self.assertTrue(included_present, 'Recipe %s was not included.' % include_recipe) # Check that exclude_recipe was excluded - is_excluded = len(glob.glob(src_path + '/%s*' % exclude_recipe)) - self.assertEqual(0, is_excluded, 'Recipe %s was not excluded.' % exclude_recipe) + excluded_present = len(glob.glob(src_path + '/%s-*' % exclude_recipe)) + self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % exclude_recipe) + + + def test_archiver_filters_by_type(self): + """ + Summary: The archiver is documented to filter on the recipe type. + Expected: 1. included recipe type (target) should be included + 2. other types should be excluded + Product: oe-core + Author: André Draszik + """ + + target_recipe = 'initscripts' + native_recipe = 'zlib-native' + + features = 'INHERIT += "archiver"\n' + features += 'ARCHIVER_MODE[src] = "original"\n' + features += 'COPYLEFT_RECIPE_TYPES = "target"\n' + self.write_config(features) + + bitbake('-c clean %s %s' % (target_recipe, native_recipe)) + bitbake("%s -c deploy_archives %s" % (target_recipe, native_recipe)) + + bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'TARGET_SYS', 'BUILD_SYS']) + src_path_target = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS']) + src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) + + # Check that target_recipe was included + included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipe)) + self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipe) + + # Check that native_recipe was excluded + excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipe)) + self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipe) + + def test_archiver_filters_by_type_and_name(self): + """ + Summary: Test that the archiver archives by recipe type, taking the + recipe name into account. + Expected: 1. included recipe type (target) should be included + 2. other types should be excluded + 3. recipe by name should be included / excluded, + overriding previous decision by type + Product: oe-core + Author: André Draszik + """ + + target_recipes = [ 'initscripts', 'zlib' ] + native_recipes = [ 'update-rc.d-native', 'zlib-native' ] + + features = 'INHERIT += "archiver"\n' + features += 'ARCHIVER_MODE[src] = "original"\n' + features += 'COPYLEFT_RECIPE_TYPES = "target"\n' + features += 'COPYLEFT_PN_INCLUDE = "%s"\n' % native_recipes[1] + features += 'COPYLEFT_PN_EXCLUDE = "%s"\n' % target_recipes[1] + self.write_config(features) + + bitbake('-c clean %s %s' % (' '.join(target_recipes), ' '.join(native_recipes))) + bitbake('-c deploy_archives %s %s' % (' '.join(target_recipes), ' '.join(native_recipes))) + + bb_vars = get_bb_vars(['DEPLOY_DIR_SRC', 'TARGET_SYS', 'BUILD_SYS']) + src_path_target = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['TARGET_SYS']) + src_path_native = os.path.join(bb_vars['DEPLOY_DIR_SRC'], bb_vars['BUILD_SYS']) + + # Check that target_recipe[0] and native_recipes[1] were included + included_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[0])) + self.assertTrue(included_present, 'Recipe %s was not included.' % target_recipes[0]) + + included_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[1])) + self.assertTrue(included_present, 'Recipe %s was not included.' % native_recipes[1]) + + # Check that native_recipes[0] and target_recipes[1] were excluded + excluded_present = len(glob.glob(src_path_native + '/%s-*' % native_recipes[0])) + self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % native_recipes[0]) + + excluded_present = len(glob.glob(src_path_target + '/%s-*' % target_recipes[1])) + self.assertFalse(excluded_present, 'Recipe %s was not excluded.' % target_recipes[1]) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/base.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/base.py index 26c93f905..47a8ea827 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/base.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/base.py @@ -163,7 +163,7 @@ be re-executed from a clean environment to ensure accurate results.") # remove data from /conf/selftest.inc def remove_config(self, data): - self.log.debug("Removing from: %s\n\%s\n" % (self.testinc_path, data)) + self.log.debug("Removing from: %s\n%s\n" % (self.testinc_path, data)) ftools.remove_from_file(self.testinc_path, data) # write to meta-sefltest/recipes-test//test_recipe.inc @@ -206,7 +206,7 @@ be re-executed from a clean environment to ensure accurate results.") # remove data from /conf/bblayers.inc def remove_bblayers_config(self, data): - self.log.debug("Removing from: %s\n\%s\n" % (self.testinc_bblayers_path, data)) + self.log.debug("Removing from: %s\n%s\n" % (self.testinc_bblayers_path, data)) ftools.remove_from_file(self.testinc_bblayers_path, data) # write to /conf/machine.inc diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/bblayers.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/bblayers.py index d23675e84..cd658c5d4 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/bblayers.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/bblayers.py @@ -71,17 +71,12 @@ class BitbakeLayers(oeSelfTest): result = runCmd('bitbake-layers show-recipes') self.assertIn('aspell:', result.output) self.assertIn('mtd-utils:', result.output) - self.assertIn('linux-yocto:', result.output) self.assertIn('core-image-minimal:', result.output) result = runCmd('bitbake-layers show-recipes mtd-utils') self.assertIn('mtd-utils:', result.output) self.assertNotIn('aspell:', result.output) - result = runCmd('bitbake-layers show-recipes -i kernel') - self.assertIn('linux-yocto:', result.output) - self.assertNotIn('mtd-utils:', result.output) result = runCmd('bitbake-layers show-recipes -i image') self.assertIn('core-image-minimal', result.output) - self.assertNotIn('linux-yocto:', result.output) self.assertNotIn('mtd-utils:', result.output) result = runCmd('bitbake-layers show-recipes -i cmake,pkgconfig') self.assertIn('libproxy:', result.output) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/bbtests.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/bbtests.py index 4ce935fc1..46e09f509 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/bbtests.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/bbtests.py @@ -3,7 +3,7 @@ import re import oeqa.utils.ftools as ftools from oeqa.selftest.base import oeSelfTest -from oeqa.utils.commands import runCmd, bitbake, get_bb_var +from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars from oeqa.utils.decorators import testcase class BitbakeTests(oeSelfTest): @@ -78,9 +78,10 @@ class BitbakeTests(oeSelfTest): # test 1 from bug 5875 test_recipe = 'zlib' test_data = "Microsoft Made No Profit From Anyone's Zunes Yo" - image_dir = get_bb_var('D', test_recipe) - pkgsplit_dir = get_bb_var('PKGDEST', test_recipe) - man_dir = get_bb_var('mandir', test_recipe) + bb_vars = get_bb_vars(['D', 'PKGDEST', 'mandir'], test_recipe) + image_dir = bb_vars['D'] + pkgsplit_dir = bb_vars['PKGDEST'] + man_dir = bb_vars['mandir'] bitbake('-c clean %s' % test_recipe) bitbake('-c package -f %s' % test_recipe) @@ -112,17 +113,18 @@ class BitbakeTests(oeSelfTest): @testcase(167) def test_bitbake_g(self): - result = bitbake('-g core-image-full-cmdline') - for f in ['pn-buildlist', 'pn-depends.dot', 'package-depends.dot', 'task-depends.dot']: + result = bitbake('-g core-image-minimal') + for f in ['pn-buildlist', 'recipe-depends.dot', 'task-depends.dot']: self.addCleanup(os.remove, f) - self.assertTrue('NOTE: PN build list saved to \'pn-buildlist\'' in result.output, msg = "No dependency \"pn-buildlist\" file was generated for the given task target. bitbake output: %s" % result.output) - self.assertTrue('openssh' in ftools.read_file(os.path.join(self.builddir, 'pn-buildlist')), msg = "No \"openssh\" dependency found in pn-buildlist file.") + self.assertTrue('Task dependencies saved to \'task-depends.dot\'' in result.output, msg = "No task dependency \"task-depends.dot\" file was generated for the given task target. bitbake output: %s" % result.output) + self.assertTrue('busybox' in ftools.read_file(os.path.join(self.builddir, 'task-depends.dot')), msg = "No \"busybox\" dependency found in task-depends.dot file.") @testcase(899) def test_image_manifest(self): bitbake('core-image-minimal') - deploydir = get_bb_var("DEPLOY_DIR_IMAGE", target="core-image-minimal") - imagename = get_bb_var("IMAGE_LINK_NAME", target="core-image-minimal") + bb_vars = get_bb_vars(["DEPLOY_DIR_IMAGE", "IMAGE_LINK_NAME"], "core-image-minimal") + deploydir = bb_vars["DEPLOY_DIR_IMAGE"] + imagename = bb_vars["IMAGE_LINK_NAME"] manifest = os.path.join(deploydir, imagename + ".manifest") self.assertTrue(os.path.islink(manifest), msg="No manifest file created for image. It should have been created in %s" % manifest) @@ -149,19 +151,21 @@ doesn't exist, yet fetcher didn't report any error. bitbake output: %s" % result @testcase(171) def test_rename_downloaded_file(self): + # TODO unique dldir instead of using cleanall + # TODO: need to set sstatedir? self.write_config("""DL_DIR = \"${TOPDIR}/download-selftest\" SSTATE_DIR = \"${TOPDIR}/download-selftest\" """) self.track_for_cleanup(os.path.join(self.builddir, "download-selftest")) - data = 'SRC_URI_append = ";downloadfilename=test-aspell.tar.gz"' + data = 'SRC_URI = "${GNU_MIRROR}/aspell/aspell-${PV}.tar.gz;downloadfilename=test-aspell.tar.gz"' self.write_recipeinc('aspell', data) - bitbake('-ccleanall aspell') - result = bitbake('-c fetch aspell', ignore_status=True) + result = bitbake('-f -c fetch aspell', ignore_status=True) self.delete_recipeinc('aspell') self.assertEqual(result.status, 0, msg = "Couldn't fetch aspell. %s" % result.output) - self.assertTrue(os.path.isfile(os.path.join(get_bb_var("DL_DIR"), 'test-aspell.tar.gz')), msg = "File rename failed. No corresponding test-aspell.tar.gz file found under %s" % str(get_bb_var("DL_DIR"))) - self.assertTrue(os.path.isfile(os.path.join(get_bb_var("DL_DIR"), 'test-aspell.tar.gz.done')), "File rename failed. No corresponding test-aspell.tar.gz.done file found under %s" % str(get_bb_var("DL_DIR"))) + dl_dir = get_bb_var("DL_DIR") + self.assertTrue(os.path.isfile(os.path.join(dl_dir, 'test-aspell.tar.gz')), msg = "File rename failed. No corresponding test-aspell.tar.gz file found under %s" % dl_dir) + self.assertTrue(os.path.isfile(os.path.join(dl_dir, 'test-aspell.tar.gz.done')), "File rename failed. No corresponding test-aspell.tar.gz.done file found under %s" % dl_dir) @testcase(1028) def test_environment(self): @@ -227,14 +231,12 @@ INHERIT_remove = \"report-error\" @testcase(1119) def test_non_gplv3(self): - data = 'INCOMPATIBLE_LICENSE = "GPLv3"' - conf = os.path.join(self.builddir, 'conf/local.conf') - ftools.append_file(conf ,data) - self.addCleanup(ftools.remove_from_file, conf ,data) - result = bitbake('readline', ignore_status=True) + self.write_config('INCOMPATIBLE_LICENSE = "GPLv3"') + result = bitbake('selftest-ed', ignore_status=True) self.assertEqual(result.status, 0, "Bitbake failed, exit code %s, output %s" % (result.status, result.output)) - self.assertFalse(os.path.isfile(os.path.join(self.builddir, 'tmp/deploy/licenses/readline/generic_GPLv3'))) - self.assertTrue(os.path.isfile(os.path.join(self.builddir, 'tmp/deploy/licenses/readline/generic_GPLv2'))) + lic_dir = get_bb_var('LICENSE_DIRECTORY') + self.assertFalse(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv3'))) + self.assertTrue(os.path.isfile(os.path.join(lic_dir, 'selftest-ed/generic_GPLv2'))) @testcase(1422) def test_setscene_only(self): @@ -255,8 +257,9 @@ INHERIT_remove = \"report-error\" def test_bbappend_order(self): """ Bitbake should bbappend to recipe in a predictable order """ test_recipe = 'ed' - test_recipe_summary_before = get_bb_var('SUMMARY', test_recipe) - test_recipe_pv = get_bb_var('PV', test_recipe) + bb_vars = get_bb_vars(['SUMMARY', 'PV'], test_recipe) + test_recipe_summary_before = bb_vars['SUMMARY'] + test_recipe_pv = bb_vars['PV'] recipe_append_file = test_recipe + '_' + test_recipe_pv + '.bbappend' expected_recipe_summary = test_recipe_summary_before diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/buildhistory.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/buildhistory.py index 674da6205..008c39c95 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/buildhistory.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/buildhistory.py @@ -3,14 +3,15 @@ import re import datetime from oeqa.selftest.base import oeSelfTest -from oeqa.utils.commands import bitbake, get_bb_var +from oeqa.utils.commands import bitbake, get_bb_vars from oeqa.utils.decorators import testcase class BuildhistoryBase(oeSelfTest): def config_buildhistory(self, tmp_bh_location=False): - if (not 'buildhistory' in get_bb_var('USER_CLASSES')) and (not 'buildhistory' in get_bb_var('INHERIT')): + bb_vars = get_bb_vars(['USER_CLASSES', 'INHERIT']) + if (not 'buildhistory' in bb_vars['USER_CLASSES']) and (not 'buildhistory' in bb_vars['INHERIT']): add_buildhistory_config = 'INHERIT += "buildhistory"\nBUILDHISTORY_COMMIT = "1"' self.append_config(add_buildhistory_config) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/buildoptions.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/buildoptions.py index 47549550c..a6e0203f5 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/buildoptions.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/buildoptions.py @@ -5,7 +5,7 @@ import shutil import tempfile from oeqa.selftest.base import oeSelfTest from oeqa.selftest.buildhistory import BuildhistoryBase -from oeqa.utils.commands import runCmd, bitbake, get_bb_var +from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars import oeqa.utils.ftools as ftools from oeqa.utils.decorators import testcase @@ -16,32 +16,38 @@ class ImageOptionsTests(oeSelfTest): image_pkgtype = get_bb_var("IMAGE_PKGTYPE") if image_pkgtype != 'rpm': self.skipTest('Not using RPM as main package format') - bitbake("-c cleanall core-image-minimal") + bitbake("-c clean core-image-minimal") self.write_config('INC_RPM_IMAGE_GEN = "1"') self.append_config('IMAGE_FEATURES += "ssh-server-openssh"') bitbake("core-image-minimal") log_data_file = os.path.join(get_bb_var("WORKDIR", "core-image-minimal"), "temp/log.do_rootfs") log_data_created = ftools.read_file(log_data_file) - incremental_created = re.search("NOTE: load old install solution for incremental install\nNOTE: old install solution not exist\nNOTE: creating new install solution for incremental install(\n.*)*NOTE: Installing the following packages:.*packagegroup-core-ssh-openssh", log_data_created) + incremental_created = re.search("Installing : packagegroup-core-ssh-openssh", log_data_created) self.remove_config('IMAGE_FEATURES += "ssh-server-openssh"') self.assertTrue(incremental_created, msg = "Match failed in:\n%s" % log_data_created) bitbake("core-image-minimal") log_data_removed = ftools.read_file(log_data_file) - incremental_removed = re.search("NOTE: load old install solution for incremental install\nNOTE: creating new install solution for incremental install(\n.*)*NOTE: incremental removed:.*openssh-sshd-.*", log_data_removed) + incremental_removed = re.search("Erasing : packagegroup-core-ssh-openssh", log_data_removed) self.assertTrue(incremental_removed, msg = "Match failed in:\n%s" % log_data_removed) @testcase(286) def test_ccache_tool(self): bitbake("ccache-native") - self.assertTrue(os.path.isfile(os.path.join(get_bb_var('STAGING_BINDIR_NATIVE', 'ccache-native'), "ccache")), msg = "No ccache found under %s" % str(get_bb_var('STAGING_BINDIR_NATIVE', 'ccache-native'))) + bb_vars = get_bb_vars(['SYSROOT_DESTDIR', 'bindir'], 'ccache-native') + p = bb_vars['SYSROOT_DESTDIR'] + bb_vars['bindir'] + "/" + "ccache" + self.assertTrue(os.path.isfile(p), msg = "No ccache found (%s)" % p) self.write_config('INHERIT += "ccache"') self.add_command_to_tearDown('bitbake -c clean m4') bitbake("m4 -f -c compile") - res = runCmd("grep ccache %s" % (os.path.join(get_bb_var("WORKDIR","m4"),"temp/log.do_compile")), ignore_status=True) - self.assertEqual(0, res.status, msg="No match for ccache in m4 log.do_compile. For further details: %s" % os.path.join(get_bb_var("WORKDIR","m4"),"temp/log.do_compile")) + log_compile = os.path.join(get_bb_var("WORKDIR","m4"), "temp/log.do_compile") + res = runCmd("grep ccache %s" % log_compile, ignore_status=True) + self.assertEqual(0, res.status, msg="No match for ccache in m4 log.do_compile. For further details: %s" % log_compile) @testcase(1435) def test_read_only_image(self): + distro_features = get_bb_var('DISTRO_FEATURES') + if not ('x11' in distro_features and 'opengl' in distro_features): + self.skipTest('core-image-sato requires x11 and opengl in distro features') self.write_config('IMAGE_FEATURES += "read-only-rootfs"') bitbake("core-image-sato") # do_image will fail if there are any pending postinsts @@ -157,7 +163,6 @@ class BuildhistoryTests(BuildhistoryBase): @testcase(294) def test_buildhistory_buildtime_pr_backwards(self): - self.add_command_to_tearDown('cleanup-workdir') target = 'xcursor-transparent-theme' error = "ERROR:.*QA Issue: Package version for package %s went backwards which would break package feeds from (.*-r1.* to .*-r0.*)" % target self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True) @@ -169,11 +174,11 @@ class ArchiverTest(oeSelfTest): """ Test for archiving the work directory and exporting the source files. """ - self.add_command_to_tearDown('cleanup-workdir') self.write_config("INHERIT += \"archiver\"\nARCHIVER_MODE[src] = \"original\"\nARCHIVER_MODE[srpm] = \"1\"") res = bitbake("xcursor-transparent-theme", ignore_status=True) self.assertEqual(res.status, 0, "\nCouldn't build xcursortransparenttheme.\nbitbake output %s" % res.output) - pkgs_path = g.glob(str(self.builddir) + "/tmp/deploy/sources/allarch*/xcurs*") + deploy_dir_src = get_bb_var('DEPLOY_DIR_SRC') + pkgs_path = g.glob(str(deploy_dir_src) + "/allarch*/xcurs*") src_file_glob = str(pkgs_path[0]) + "/xcursor*.src.rpm" tar_file_glob = str(pkgs_path[0]) + "/xcursor*.tar.gz" - self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.gz files under tmp/deploy/sources/allarch*/xcursor*") + self.assertTrue((g.glob(src_file_glob) and g.glob(tar_file_glob)), "Couldn't find .src.rpm and .tar.gz files under %s/allarch*/xcursor*" % deploy_dir_src) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/containerimage.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/containerimage.py new file mode 100644 index 000000000..def481f14 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/containerimage.py @@ -0,0 +1,83 @@ +import os + +from oeqa.selftest.base import oeSelfTest +from oeqa.utils.commands import bitbake, get_bb_vars, runCmd + +# This test builds an image with using the "container" IMAGE_FSTYPE, and +# ensures that then files in the image are only the ones expected. +# +# The only package added to the image is container_image_testpkg, which +# contains one file. However, due to some other things not cleaning up during +# rootfs creation, there is some cruft. Ideally bugs will be filed and the +# cruft removed, but for now we whitelist some known set. +# +# Also for performance reasons we're only checking the cruft when using ipk. +# When using deb, and rpm it is a bit different and we could test all +# of them, but this test is more to catch if other packages get added by +# default other than what is in ROOTFS_BOOTSTRAP_INSTALL. +# +class ContainerImageTests(oeSelfTest): + + # Verify that when specifying a IMAGE_TYPEDEP_ of the form "foo.bar" that + # the conversion type bar gets added as a dep as well + def test_expected_files(self): + + def get_each_path_part(path): + if path: + part = [ '.' + path + '/' ] + result = get_each_path_part(path.rsplit('/', 1)[0]) + if result: + return part + result + else: + return part + else: + return None + + self.write_config("""PREFERRED_PROVIDER_virtual/kernel = "linux-dummy" +IMAGE_FSTYPES = "container" +PACKAGE_CLASSES = "package_ipk" +IMAGE_FEATURES = "" +""") + + bbvars = get_bb_vars(['bindir', 'sysconfdir', 'localstatedir', + 'DEPLOY_DIR_IMAGE', 'IMAGE_LINK_NAME'], + target='container-test-image') + expected_files = [ + './', + '.{bindir}/theapp', + '.{sysconfdir}/default/', + '.{sysconfdir}/default/postinst', + '.{sysconfdir}/ld.so.cache', + '.{sysconfdir}/timestamp', + '.{sysconfdir}/version', + './run/', + '.{localstatedir}/cache/', + '.{localstatedir}/cache/ldconfig/', + '.{localstatedir}/cache/ldconfig/aux-cache', + '.{localstatedir}/cache/opkg/', + '.{localstatedir}/lib/', + '.{localstatedir}/lib/opkg/' + ] + + expected_files = [ x.format(bindir=bbvars['bindir'], + sysconfdir=bbvars['sysconfdir'], + localstatedir=bbvars['localstatedir']) + for x in expected_files ] + + # Since tar lists all directories individually, make sure each element + # from bindir, sysconfdir, etc is added + expected_files += get_each_path_part(bbvars['bindir']) + expected_files += get_each_path_part(bbvars['sysconfdir']) + expected_files += get_each_path_part(bbvars['localstatedir']) + + expected_files = sorted(expected_files) + + # Build the image of course + bitbake('container-test-image') + + image = os.path.join(bbvars['DEPLOY_DIR_IMAGE'], + bbvars['IMAGE_LINK_NAME'] + '.tar.bz2') + + # Ensure the files in the image are what we expect + result = runCmd("tar tf {} | sort".format(image), shell=True) + self.assertEqual(result.output.split('\n'), expected_files) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/devtool.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/devtool.py index 302ec5d42..57048665c 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/devtool.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/devtool.py @@ -45,9 +45,12 @@ class DevtoolBase(oeSelfTest): if var and var in checkvars: needvalue = checkvars.pop(var) if needvalue is None: - self.fail('Variable %s should not appear in recipe') + self.fail('Variable %s should not appear in recipe, but value is being set to "%s"' % (var, value)) if isinstance(needvalue, set): - value = set(value.split()) + if var == 'LICENSE': + value = set(value.split(' & ')) + else: + value = set(value.split()) self.assertEqual(value, needvalue, 'values for %s do not match' % var) @@ -210,9 +213,10 @@ class DevtoolTests(DevtoolBase): bitbake('pv -c cleansstate') # Test devtool build result = runCmd('devtool build pv') - installdir = get_bb_var('D', 'pv') + bb_vars = get_bb_vars(['D', 'bindir'], 'pv') + installdir = bb_vars['D'] self.assertTrue(installdir, 'Could not query installdir variable') - bindir = get_bb_var('bindir', 'pv') + bindir = bb_vars['bindir'] self.assertTrue(bindir, 'Could not query bindir variable') if bindir[0] == '/': bindir = bindir[1:] @@ -260,8 +264,6 @@ class DevtoolTests(DevtoolBase): @testcase(1162) def test_devtool_add_library(self): - # We don't have the ability to pick up this dependency automatically yet... - bitbake('libusb1') # Fetch source tempdir = tempfile.mkdtemp(prefix='devtoolqa') self.track_for_cleanup(tempdir) @@ -290,13 +292,17 @@ class DevtoolTests(DevtoolBase): result = runCmd('recipetool setvar %s EXTRA_OECMAKE -- \'-DPYTHON_BINDINGS=OFF -DLIBFTDI_CMAKE_CONFIG_DIR=${datadir}/cmake/Modules\'' % recipefile) with open(recipefile, 'a') as f: f.write('\nFILES_${PN}-dev += "${datadir}/cmake/Modules"\n') + # We don't have the ability to pick up this dependency automatically yet... + f.write('\nDEPENDS += "libusb1"\n') + f.write('\nTESTLIBOUTPUT = "${COMPONENTS_DIR}/${TUNE_PKGARCH}/${PN}/${libdir}"\n') # Test devtool build result = runCmd('devtool build libftdi') - staging_libdir = get_bb_var('STAGING_LIBDIR', 'libftdi') - self.assertTrue(staging_libdir, 'Could not query STAGING_LIBDIR variable') + bb_vars = get_bb_vars(['TESTLIBOUTPUT', 'STAMP'], 'libftdi') + staging_libdir = bb_vars['TESTLIBOUTPUT'] + self.assertTrue(staging_libdir, 'Could not query TESTLIBOUTPUT variable') self.assertTrue(os.path.isfile(os.path.join(staging_libdir, 'libftdi1.so.2.1.0')), "libftdi binary not found in STAGING_LIBDIR. Output of devtool build libftdi %s" % result.output) # Test devtool reset - stampprefix = get_bb_var('STAMP', 'libftdi') + stampprefix = bb_vars['STAMP'] result = runCmd('devtool reset libftdi') result = runCmd('devtool status') self.assertNotIn('libftdi', result.output) @@ -353,12 +359,11 @@ class DevtoolTests(DevtoolBase): @testcase(1161) def test_devtool_add_fetch_git(self): - # Fetch source tempdir = tempfile.mkdtemp(prefix='devtoolqa') self.track_for_cleanup(tempdir) - url = 'git://git.yoctoproject.org/libmatchbox' - checkrev = '462f0652055d89c648ddd54fd7b03f175c2c6973' - testrecipe = 'libmatchbox2' + url = 'gitsm://git.yoctoproject.org/mraa' + checkrev = 'ae127b19a50aa54255e4330ccfdd9a5d058e581d' + testrecipe = 'mraa' srcdir = os.path.join(tempdir, testrecipe) # Test devtool add self.track_for_cleanup(self.workspacedir) @@ -366,7 +371,7 @@ class DevtoolTests(DevtoolBase): self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') result = runCmd('devtool add %s %s -a -f %s' % (testrecipe, srcdir, url)) self.assertTrue(os.path.exists(os.path.join(self.workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created: %s' % result.output) - self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure.ac in source directory') + self.assertTrue(os.path.isfile(os.path.join(srcdir, 'imraa', 'imraa.c')), 'Unable to find imraa/imraa.c in source directory') # Test devtool status result = runCmd('devtool status') self.assertIn(testrecipe, result.output) @@ -376,7 +381,7 @@ class DevtoolTests(DevtoolBase): self.assertIn('_git.bb', recipefile, 'Recipe file incorrectly named') checkvars = {} checkvars['S'] = '${WORKDIR}/git' - checkvars['PV'] = '1.12+git${SRCPV}' + checkvars['PV'] = '1.0+git${SRCPV}' checkvars['SRC_URI'] = url checkvars['SRCREV'] = '${AUTOREV}' self._test_recipe_contents(recipefile, checkvars, []) @@ -385,7 +390,7 @@ class DevtoolTests(DevtoolBase): shutil.rmtree(srcdir) url_rev = '%s;rev=%s' % (url, checkrev) result = runCmd('devtool add %s %s -f "%s" -V 1.5' % (testrecipe, srcdir, url_rev)) - self.assertTrue(os.path.isfile(os.path.join(srcdir, 'configure.ac')), 'Unable to find configure.ac in source directory') + self.assertTrue(os.path.isfile(os.path.join(srcdir, 'imraa', 'imraa.c')), 'Unable to find imraa/imraa.c in source directory') # Test devtool status result = runCmd('devtool status') self.assertIn(testrecipe, result.output) @@ -430,9 +435,8 @@ class DevtoolTests(DevtoolBase): @testcase(1164) def test_devtool_modify(self): - # Clean up anything in the workdir/sysroot/sstate cache - bitbake('mdadm -c cleansstate') - # Try modifying a recipe + import oe.path + tempdir = tempfile.mkdtemp(prefix='devtoolqa') self.track_for_cleanup(tempdir) self.track_for_cleanup(self.workspacedir) @@ -443,35 +447,95 @@ class DevtoolTests(DevtoolBase): self.assertTrue(os.path.exists(os.path.join(self.workspacedir, 'conf', 'layer.conf')), 'Workspace directory not created') matches = glob.glob(os.path.join(self.workspacedir, 'appends', 'mdadm_*.bbappend')) self.assertTrue(matches, 'bbappend not created %s' % result.output) + # Test devtool status result = runCmd('devtool status') self.assertIn('mdadm', result.output) self.assertIn(tempdir, result.output) - # Check git repo self._check_src_repo(tempdir) - # Try building - bitbake('mdadm') - # Try making (minor) modifications to the source - result = runCmd("sed -i 's!^\.TH.*!.TH MDADM 8 \"\" v9.999-custom!' %s" % os.path.join(tempdir, 'mdadm.8.in')) - bitbake('mdadm -c package') - pkgd = get_bb_var('PKGD', 'mdadm') + + bitbake('mdadm -C unpack') + + def check_line(checkfile, expected, message, present=True): + # Check for $expected, on a line on its own, in checkfile. + with open(checkfile, 'r') as f: + if present: + self.assertIn(expected + '\n', f, message) + else: + self.assertNotIn(expected + '\n', f, message) + + modfile = os.path.join(tempdir, 'mdadm.8.in') + bb_vars = get_bb_vars(['PKGD', 'mandir'], 'mdadm') + pkgd = bb_vars['PKGD'] self.assertTrue(pkgd, 'Could not query PKGD variable') - mandir = get_bb_var('mandir', 'mdadm') + mandir = bb_vars['mandir'] self.assertTrue(mandir, 'Could not query mandir variable') - if mandir[0] == '/': - mandir = mandir[1:] - with open(os.path.join(pkgd, mandir, 'man8', 'mdadm.8'), 'r') as f: - for line in f: - if line.startswith('.TH'): - self.assertEqual(line.rstrip(), '.TH MDADM 8 "" v9.999-custom', 'man file not modified. man searched file path: %s' % os.path.join(pkgd, mandir, 'man8', 'mdadm.8')) - # Test devtool reset - stampprefix = get_bb_var('STAMP', 'mdadm') + manfile = oe.path.join(pkgd, mandir, 'man8', 'mdadm.8') + + check_line(modfile, 'Linux Software RAID', 'Could not find initial string') + check_line(modfile, 'antique pin sardine', 'Unexpectedly found replacement string', present=False) + + result = runCmd("sed -i 's!^Linux Software RAID$!antique pin sardine!' %s" % modfile) + check_line(modfile, 'antique pin sardine', 'mdadm.8.in file not modified (sed failed)') + + bitbake('mdadm -c package') + check_line(manfile, 'antique pin sardine', 'man file not modified. man searched file path: %s' % manfile) + + result = runCmd('git checkout -- %s' % modfile, cwd=tempdir) + check_line(modfile, 'Linux Software RAID', 'man .in file not restored (git failed)') + + bitbake('mdadm -c package') + check_line(manfile, 'Linux Software RAID', 'man file not updated. man searched file path: %s' % manfile) + result = runCmd('devtool reset mdadm') result = runCmd('devtool status') self.assertNotIn('mdadm', result.output) - self.assertTrue(stampprefix, 'Unable to get STAMP value for recipe mdadm') - matches = glob.glob(stampprefix + '*') - self.assertFalse(matches, 'Stamp files exist for recipe mdadm that should have been cleaned') + + def test_devtool_buildclean(self): + def assertFile(path, *paths): + f = os.path.join(path, *paths) + self.assertTrue(os.path.exists(f), "%r does not exist" % f) + def assertNoFile(path, *paths): + f = os.path.join(path, *paths) + self.assertFalse(os.path.exists(os.path.join(f)), "%r exists" % f) + + # Clean up anything in the workdir/sysroot/sstate cache + bitbake('mdadm m4 -c cleansstate') + # Try modifying a recipe + tempdir_mdadm = tempfile.mkdtemp(prefix='devtoolqa') + tempdir_m4 = tempfile.mkdtemp(prefix='devtoolqa') + builddir_m4 = tempfile.mkdtemp(prefix='devtoolqa') + self.track_for_cleanup(tempdir_mdadm) + self.track_for_cleanup(tempdir_m4) + self.track_for_cleanup(builddir_m4) + self.track_for_cleanup(self.workspacedir) + self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') + self.add_command_to_tearDown('bitbake -c clean mdadm m4') + self.write_recipeinc('m4', 'EXTERNALSRC_BUILD = "%s"\ndo_clean() {\n\t:\n}\n' % builddir_m4) + try: + runCmd('devtool modify mdadm -x %s' % tempdir_mdadm) + runCmd('devtool modify m4 -x %s' % tempdir_m4) + assertNoFile(tempdir_mdadm, 'mdadm') + assertNoFile(builddir_m4, 'src/m4') + result = bitbake('m4 -e') + result = bitbake('mdadm m4 -c compile') + self.assertEqual(result.status, 0) + assertFile(tempdir_mdadm, 'mdadm') + assertFile(builddir_m4, 'src/m4') + # Check that buildclean task exists and does call make clean + bitbake('mdadm m4 -c buildclean') + assertNoFile(tempdir_mdadm, 'mdadm') + assertNoFile(builddir_m4, 'src/m4') + bitbake('mdadm m4 -c compile') + assertFile(tempdir_mdadm, 'mdadm') + assertFile(builddir_m4, 'src/m4') + bitbake('mdadm m4 -c clean') + # Check that buildclean task is run before clean for B == S + assertNoFile(tempdir_mdadm, 'mdadm') + # Check that buildclean task is not run before clean for B != S + assertFile(builddir_m4, 'src/m4') + finally: + self.delete_recipeinc('m4') @testcase(1166) def test_devtool_modify_invalid(self): @@ -594,8 +658,8 @@ class DevtoolTests(DevtoolBase): @testcase(1378) def test_devtool_modify_virtual(self): # Try modifying a virtual recipe - virtrecipe = 'virtual/libx11' - realrecipe = 'libx11' + virtrecipe = 'virtual/make' + realrecipe = 'make' tempdir = tempfile.mkdtemp(prefix='devtoolqa') self.track_for_cleanup(tempdir) self.track_for_cleanup(self.workspacedir) @@ -618,8 +682,9 @@ class DevtoolTests(DevtoolBase): def test_devtool_update_recipe(self): # Check preconditions testrecipe = 'minicom' - recipefile = get_bb_var('FILE', testrecipe) - src_uri = get_bb_var('SRC_URI', testrecipe) + bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) + recipefile = bb_vars['FILE'] + src_uri = bb_vars['SRC_URI'] self.assertNotIn('git://', src_uri, 'This test expects the %s recipe to NOT be a git recipe' % testrecipe) self._check_repo_status(os.path.dirname(recipefile), []) # First, modify a recipe @@ -650,8 +715,9 @@ class DevtoolTests(DevtoolBase): def test_devtool_update_recipe_git(self): # Check preconditions testrecipe = 'mtd-utils' - recipefile = get_bb_var('FILE', testrecipe) - src_uri = get_bb_var('SRC_URI', testrecipe) + bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) + recipefile = bb_vars['FILE'] + src_uri = bb_vars['SRC_URI'] self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe) patches = [] for entry in src_uri.split(): @@ -670,7 +736,7 @@ class DevtoolTests(DevtoolBase): self._check_src_repo(tempdir) # Add a couple of commits # FIXME: this only tests adding, need to also test update and remove - result = runCmd('echo "# Additional line" >> Makefile', cwd=tempdir) + result = runCmd('echo "# Additional line" >> Makefile.am', cwd=tempdir) result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempdir) result = runCmd('echo "A new file" > devtool-new-file', cwd=tempdir) result = runCmd('git add devtool-new-file', cwd=tempdir) @@ -719,8 +785,9 @@ class DevtoolTests(DevtoolBase): def test_devtool_update_recipe_append(self): # Check preconditions testrecipe = 'mdadm' - recipefile = get_bb_var('FILE', testrecipe) - src_uri = get_bb_var('SRC_URI', testrecipe) + bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) + recipefile = bb_vars['FILE'] + src_uri = bb_vars['SRC_URI'] self.assertNotIn('git://', src_uri, 'This test expects the %s recipe to NOT be a git recipe' % testrecipe) self._check_repo_status(os.path.dirname(recipefile), []) # First, modify a recipe @@ -787,8 +854,9 @@ class DevtoolTests(DevtoolBase): def test_devtool_update_recipe_append_git(self): # Check preconditions testrecipe = 'mtd-utils' - recipefile = get_bb_var('FILE', testrecipe) - src_uri = get_bb_var('SRC_URI', testrecipe) + bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) + recipefile = bb_vars['FILE'] + src_uri = bb_vars['SRC_URI'] self.assertIn('git://', src_uri, 'This test expects the %s recipe to be a git recipe' % testrecipe) for entry in src_uri.split(): if entry.startswith('git://'): @@ -807,7 +875,7 @@ class DevtoolTests(DevtoolBase): # Check git repo self._check_src_repo(tempsrcdir) # Add a commit - result = runCmd('echo "# Additional line" >> Makefile', cwd=tempsrcdir) + result = runCmd('echo "# Additional line" >> Makefile.am', cwd=tempsrcdir) result = runCmd('git commit -a -m "Change the Makefile"', cwd=tempsrcdir) self.add_command_to_tearDown('cd %s; rm -f %s/*.patch; git checkout .' % (os.path.dirname(recipefile), testrecipe)) # Create a temporary layer @@ -887,6 +955,8 @@ class DevtoolTests(DevtoolBase): result = runCmd('devtool modify %s -x %s' % (testrecipe, tempdir)) # Check git repo self._check_src_repo(tempdir) + # Try building just to ensure we haven't broken that + bitbake("%s" % testrecipe) # Edit / commit local source runCmd('echo "/* Foobar */" >> oe-local-files/makedevs.c', cwd=tempdir) runCmd('echo "Foo" > oe-local-files/new-local', cwd=tempdir) @@ -943,6 +1013,78 @@ class DevtoolTests(DevtoolBase): ('??', '.*/0001-Add-new-file.patch$')] self._check_repo_status(os.path.dirname(recipefile), expected_status) + def test_devtool_update_recipe_local_files_3(self): + # First, modify the recipe + testrecipe = 'devtool-test-localonly' + bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) + recipefile = bb_vars['FILE'] + src_uri = bb_vars['SRC_URI'] + tempdir = tempfile.mkdtemp(prefix='devtoolqa') + self.track_for_cleanup(tempdir) + self.track_for_cleanup(self.workspacedir) + self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') + # (don't bother with cleaning the recipe on teardown, we won't be building it) + result = runCmd('devtool modify %s' % testrecipe) + # Modify one file + runCmd('echo "Another line" >> file2', cwd=os.path.join(self.workspacedir, 'sources', testrecipe, 'oe-local-files')) + self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) + result = runCmd('devtool update-recipe %s' % testrecipe) + expected_status = [(' M', '.*/%s/file2$' % testrecipe)] + self._check_repo_status(os.path.dirname(recipefile), expected_status) + + def test_devtool_update_recipe_local_patch_gz(self): + # First, modify the recipe + testrecipe = 'devtool-test-patch-gz' + if get_bb_var('DISTRO') == 'poky-tiny': + self.skipTest("The DISTRO 'poky-tiny' does not provide the dependencies needed by %s" % testrecipe) + bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) + recipefile = bb_vars['FILE'] + src_uri = bb_vars['SRC_URI'] + tempdir = tempfile.mkdtemp(prefix='devtoolqa') + self.track_for_cleanup(tempdir) + self.track_for_cleanup(self.workspacedir) + self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') + # (don't bother with cleaning the recipe on teardown, we won't be building it) + result = runCmd('devtool modify %s' % testrecipe) + # Modify one file + srctree = os.path.join(self.workspacedir, 'sources', testrecipe) + runCmd('echo "Another line" >> README', cwd=srctree) + runCmd('git commit -a --amend --no-edit', cwd=srctree) + self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) + result = runCmd('devtool update-recipe %s' % testrecipe) + expected_status = [(' M', '.*/%s/readme.patch.gz$' % testrecipe)] + self._check_repo_status(os.path.dirname(recipefile), expected_status) + patch_gz = os.path.join(os.path.dirname(recipefile), testrecipe, 'readme.patch.gz') + result = runCmd('file %s' % patch_gz) + if 'gzip compressed data' not in result.output: + self.fail('New patch file is not gzipped - file reports:\n%s' % result.output) + + def test_devtool_update_recipe_local_files_subdir(self): + # Try devtool extract on a recipe that has a file with subdir= set in + # SRC_URI such that it overwrites a file that was in an archive that + # was also in SRC_URI + # First, modify the recipe + testrecipe = 'devtool-test-subdir' + bb_vars = get_bb_vars(['FILE', 'SRC_URI'], testrecipe) + recipefile = bb_vars['FILE'] + src_uri = bb_vars['SRC_URI'] + tempdir = tempfile.mkdtemp(prefix='devtoolqa') + self.track_for_cleanup(tempdir) + self.track_for_cleanup(self.workspacedir) + self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') + # (don't bother with cleaning the recipe on teardown, we won't be building it) + result = runCmd('devtool modify %s' % testrecipe) + testfile = os.path.join(self.workspacedir, 'sources', testrecipe, 'testfile') + self.assertTrue(os.path.exists(testfile), 'Extracted source could not be found') + with open(testfile, 'r') as f: + contents = f.read().rstrip() + self.assertEqual(contents, 'Modified version', 'File has apparently not been overwritten as it should have been') + # Test devtool update-recipe without modifying any files + self.add_command_to_tearDown('cd %s; rm %s/*; git checkout %s %s' % (os.path.dirname(recipefile), testrecipe, testrecipe, os.path.basename(recipefile))) + result = runCmd('devtool update-recipe %s' % testrecipe) + expected_status = [] + self._check_repo_status(os.path.dirname(recipefile), expected_status) + @testcase(1163) def test_devtool_extract(self): tempdir = tempfile.mkdtemp(prefix='devtoolqa') @@ -960,7 +1102,7 @@ class DevtoolTests(DevtoolBase): tempdir = tempfile.mkdtemp(prefix='devtoolqa') # Try devtool extract self.track_for_cleanup(tempdir) - result = runCmd('devtool extract virtual/libx11 %s' % tempdir) + result = runCmd('devtool extract virtual/make %s' % tempdir) self.assertTrue(os.path.exists(os.path.join(tempdir, 'Makefile.am')), 'Extracted source could not be found') # devtool extract shouldn't create the workspace self.assertFalse(os.path.exists(self.workspacedir)) @@ -1054,9 +1196,10 @@ class DevtoolTests(DevtoolBase): result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand)) # Check if it deployed all of the files with the right ownership/perms # First look on the host - need to do this under pseudo to get the correct ownership/perms - installdir = get_bb_var('D', testrecipe) - fakerootenv = get_bb_var('FAKEROOTENV', testrecipe) - fakerootcmd = get_bb_var('FAKEROOTCMD', testrecipe) + bb_vars = get_bb_vars(['D', 'FAKEROOTENV', 'FAKEROOTCMD'], testrecipe) + installdir = bb_vars['D'] + fakerootenv = bb_vars['FAKEROOTENV'] + fakerootcmd = bb_vars['FAKEROOTCMD'] result = runCmd('%s %s find . -type f -exec ls -l {} \;' % (fakerootenv, fakerootcmd), cwd=installdir) filelist1 = self._process_ls_output(result.output) @@ -1207,6 +1350,49 @@ class DevtoolTests(DevtoolBase): result = runCmd("devtool --quiet selftest-reverse \"%s\"" % s) self.assertEqual(result.output, s[::-1]) + def _copy_file_with_cleanup(self, srcfile, basedstdir, *paths): + dstdir = basedstdir + self.assertTrue(os.path.exists(dstdir)) + for p in paths: + dstdir = os.path.join(dstdir, p) + if not os.path.exists(dstdir): + os.makedirs(dstdir) + self.track_for_cleanup(dstdir) + dstfile = os.path.join(dstdir, os.path.basename(srcfile)) + if srcfile != dstfile: + shutil.copy(srcfile, dstfile) + self.track_for_cleanup(dstfile) + + def test_devtool_load_plugin(self): + """Test that devtool loads only the first found plugin in BBPATH.""" + + self.track_for_cleanup(self.workspacedir) + self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') + + devtool = runCmd("which devtool") + fromname = runCmd("devtool --quiet pluginfile") + srcfile = fromname.output + bbpath = get_bb_var('BBPATH') + searchpath = bbpath.split(':') + [os.path.dirname(devtool.output)] + plugincontent = [] + with open(srcfile) as fh: + plugincontent = fh.readlines() + try: + self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found') + for path in searchpath: + self._copy_file_with_cleanup(srcfile, path, 'lib', 'devtool') + result = runCmd("devtool --quiet count") + self.assertEqual(result.output, '1') + result = runCmd("devtool --quiet multiloaded") + self.assertEqual(result.output, "no") + for path in searchpath: + result = runCmd("devtool --quiet bbdir") + self.assertEqual(result.output, path) + os.unlink(os.path.join(result.output, 'lib', 'devtool', 'bbpath.py')) + finally: + with open(srcfile, 'w') as fh: + fh.writelines(plugincontent) + def _setup_test_devtool_finish_upgrade(self): # Check preconditions self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') @@ -1362,3 +1548,149 @@ class DevtoolTests(DevtoolBase): files.remove(foundpatch) if files: self.fail('Unexpected file(s) copied next to bbappend: %s' % ', '.join(files)) + + def test_devtool_rename(self): + # Check preconditions + self.assertTrue(not os.path.exists(self.workspacedir), 'This test cannot be run with a workspace directory under the build directory') + self.track_for_cleanup(self.workspacedir) + self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') + + # First run devtool add + # We already have this recipe in OE-Core, but that doesn't matter + recipename = 'i2c-tools' + recipever = '3.1.2' + recipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, recipever)) + url = 'http://downloads.yoctoproject.org/mirror/sources/i2c-tools-%s.tar.bz2' % recipever + def add_recipe(): + result = runCmd('devtool add %s' % url) + self.assertTrue(os.path.exists(recipefile), 'Expected recipe file not created') + self.assertTrue(os.path.exists(os.path.join(self.workspacedir, 'sources', recipename)), 'Source directory not created') + checkvars = {} + checkvars['S'] = None + checkvars['SRC_URI'] = url.replace(recipever, '${PV}') + self._test_recipe_contents(recipefile, checkvars, []) + add_recipe() + # Now rename it - change both name and version + newrecipename = 'mynewrecipe' + newrecipever = '456' + newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, newrecipever)) + result = runCmd('devtool rename %s %s -V %s' % (recipename, newrecipename, newrecipever)) + self.assertTrue(os.path.exists(newrecipefile), 'Recipe file not renamed') + self.assertFalse(os.path.exists(os.path.join(self.workspacedir, 'recipes', recipename)), 'Old recipe directory still exists') + newsrctree = os.path.join(self.workspacedir, 'sources', newrecipename) + self.assertTrue(os.path.exists(newsrctree), 'Source directory not renamed') + checkvars = {} + checkvars['S'] = '${WORKDIR}/%s-%s' % (recipename, recipever) + checkvars['SRC_URI'] = url + self._test_recipe_contents(newrecipefile, checkvars, []) + # Try again - change just name this time + result = runCmd('devtool reset -n %s' % newrecipename) + shutil.rmtree(newsrctree) + add_recipe() + newrecipefile = os.path.join(self.workspacedir, 'recipes', newrecipename, '%s_%s.bb' % (newrecipename, recipever)) + result = runCmd('devtool rename %s %s' % (recipename, newrecipename)) + self.assertTrue(os.path.exists(newrecipefile), 'Recipe file not renamed') + self.assertFalse(os.path.exists(os.path.join(self.workspacedir, 'recipes', recipename)), 'Old recipe directory still exists') + self.assertTrue(os.path.exists(os.path.join(self.workspacedir, 'sources', newrecipename)), 'Source directory not renamed') + checkvars = {} + checkvars['S'] = '${WORKDIR}/%s-${PV}' % recipename + checkvars['SRC_URI'] = url.replace(recipever, '${PV}') + self._test_recipe_contents(newrecipefile, checkvars, []) + # Try again - change just version this time + result = runCmd('devtool reset -n %s' % newrecipename) + shutil.rmtree(newsrctree) + add_recipe() + newrecipefile = os.path.join(self.workspacedir, 'recipes', recipename, '%s_%s.bb' % (recipename, newrecipever)) + result = runCmd('devtool rename %s -V %s' % (recipename, newrecipever)) + self.assertTrue(os.path.exists(newrecipefile), 'Recipe file not renamed') + self.assertTrue(os.path.exists(os.path.join(self.workspacedir, 'sources', recipename)), 'Source directory no longer exists') + checkvars = {} + checkvars['S'] = '${WORKDIR}/${BPN}-%s' % recipever + checkvars['SRC_URI'] = url + self._test_recipe_contents(newrecipefile, checkvars, []) + + @testcase(1577) + def test_devtool_virtual_kernel_modify(self): + """ + Summary: The purpose of this test case is to verify that + devtool modify works correctly when building + the kernel. + Dependencies: NA + Steps: 1. Build kernel with bitbake. + 2. Save the config file generated. + 3. Clean the environment. + 4. Use `devtool modify virtual/kernel` to validate following: + 4.1 The source is checked out correctly. + 4.2 The resulting configuration is the same as + what was get on step 2. + 4.3 The Kernel can be build correctly. + 4.4 Changes made on the source are reflected on the + subsequent builds. + 4.5 Changes on the configuration are reflected on the + subsequent builds + Expected: devtool modify is able to checkout the source of the kernel + and modification to the source and configurations are reflected + when building the kernel. + """ + #Set machine to qemxu86 to be able to modify the kernel and + #verify the modification. + features = 'MACHINE = "qemux86"\n' + self.write_config(features) + kernel_provider = get_bb_var('PREFERRED_PROVIDER_virtual/kernel') + # Clean up the enviroment + bitbake('%s -c clean' % kernel_provider) + tempdir = tempfile.mkdtemp(prefix='devtoolqa') + self.track_for_cleanup(tempdir) + self.track_for_cleanup(self.workspacedir) + self.add_command_to_tearDown('bitbake-layers remove-layer */workspace') + self.add_command_to_tearDown('bitbake -c clean %s' % kernel_provider) + #Step 1 + #Here is just generated the config file instead of all the kernel to optimize the + #time of executing this test case. + bitbake('%s -c configure' % kernel_provider) + bbconfig = os.path.join(get_bb_var('B', kernel_provider),'.config') + buildir= get_bb_var('TOPDIR') + #Step 2 + runCmd('cp %s %s' % (bbconfig, buildir)) + self.assertTrue(os.path.exists(os.path.join(buildir, '.config')), + 'Could not copy .config file from kernel') + + tmpconfig = os.path.join(buildir, '.config') + #Step 3 + bitbake('%s -c clean' % kernel_provider) + #Step 4.1 + runCmd('devtool modify virtual/kernel -x %s' % tempdir) + self.assertTrue(os.path.exists(os.path.join(tempdir, 'Makefile')), + 'Extracted source could not be found') + #Step 4.2 + configfile = os.path.join(tempdir,'.config') + diff = runCmd('diff %s %s' % (tmpconfig, configfile)) + self.assertEqual(0,diff.status,'Kernel .config file is not the same using bitbake and devtool') + #Step 4.3 + #NOTE: virtual/kernel is mapped to kernel_provider + result = runCmd('devtool build %s' % kernel_provider) + self.assertEqual(0,result.status,'Cannot build kernel using `devtool build`') + kernelfile = os.path.join(get_bb_var('KBUILD_OUTPUT', kernel_provider), 'vmlinux') + self.assertTrue(os.path.exists(kernelfile),'Kernel was not build correctly') + + #Modify the kernel source, this is specific for qemux86 + modfile = os.path.join(tempdir,'arch/x86/boot/header.S') + modstring = "use a boot loader - Devtool kernel testing" + modapplied = runCmd("sed -i 's/boot loader/%s/' %s" % (modstring, modfile)) + self.assertEqual(0,modapplied.status,'Modification to %s on kernel source failed' % modfile) + #Modify the configuration + codeconfigfile = os.path.join(tempdir,'.config.new') + modconfopt = "CONFIG_SG_POOL=n" + modconf = runCmd("sed -i 's/CONFIG_SG_POOL=y/%s/' %s" % (modconfopt, codeconfigfile)) + self.assertEqual(0,modconf.status,'Modification to %s failed' % codeconfigfile) + #Build again kernel with devtool + rebuild = runCmd('devtool build %s' % kernel_provider) + self.assertEqual(0,rebuild.status,'Fail to build kernel after modification of source and config') + #Step 4.4 + bzimagename = 'bzImage-' + get_bb_var('KERNEL_VERSION_NAME', kernel_provider) + bzimagefile = os.path.join(get_bb_var('D', kernel_provider),'boot', bzimagename) + checkmodcode = runCmd("grep '%s' %s" % (modstring, bzimagefile)) + self.assertEqual(0,checkmodcode.status,'Modification on kernel source failed') + #Step 4.5 + checkmodconfg = runCmd("grep %s %s" % (modconfopt, codeconfigfile)) + self.assertEqual(0,checkmodconfg.status,'Modification to configuration file failed') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/eSDK.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/eSDK.py index 9d5c68094..1596c6e9d 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/eSDK.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/eSDK.py @@ -6,16 +6,15 @@ import glob import logging import subprocess import oeqa.utils.ftools as ftools -from oeqa.utils.decorators import testcase +from oeqa.utils.decorators import testcase from oeqa.selftest.base import oeSelfTest -from oeqa.utils.commands import runCmd, bitbake, get_bb_var -from oeqa.utils.httpserver import HTTPService +from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars class oeSDKExtSelfTest(oeSelfTest): """ # Bugzilla Test Plan: 6033 # This code is planned to be part of the automation for eSDK containig - # Install libraries and headers, image generation binary feeds. + # Install libraries and headers, image generation binary feeds, sdk-update. """ @staticmethod @@ -24,7 +23,7 @@ class oeSDKExtSelfTest(oeSelfTest): # what environment load oe-selftest, i586, x86_64 pattern = os.path.join(tmpdir_eSDKQA, 'environment-setup-*') return glob.glob(pattern)[0] - + @staticmethod def run_esdk_cmd(env_eSDK, tmpdir_eSDKQA, cmd, postconfig=None, **options): if postconfig: @@ -47,53 +46,66 @@ class oeSDKExtSelfTest(oeSelfTest): def get_eSDK_toolchain(image): pn_task = '%s -c populate_sdk_ext' % image - sdk_deploy = get_bb_var('SDK_DEPLOY', pn_task) - toolchain_name = get_bb_var('TOOLCHAINEXT_OUTPUTNAME', pn_task) + bb_vars = get_bb_vars(['SDK_DEPLOY', 'TOOLCHAINEXT_OUTPUTNAME'], pn_task) + sdk_deploy = bb_vars['SDK_DEPLOY'] + toolchain_name = bb_vars['TOOLCHAINEXT_OUTPUTNAME'] return os.path.join(sdk_deploy, toolchain_name + '.sh') - - @classmethod - def setUpClass(cls): - # Start to serve sstate dir + @staticmethod + def update_configuration(cls, image, tmpdir_eSDKQA, env_eSDK, ext_sdk_path): sstate_dir = os.path.join(os.environ['BUILDDIR'], 'sstate-cache') - cls.http_service = HTTPService(sstate_dir) - cls.http_service.start() - http_url = "127.0.0.1:%d" % cls.http_service.port - - image = 'core-image-minimal' + oeSDKExtSelfTest.generate_eSDK(cls.image) + cls.ext_sdk_path = oeSDKExtSelfTest.get_eSDK_toolchain(cls.image) + runCmd("%s -y -d \"%s\"" % (cls.ext_sdk_path, cls.tmpdir_eSDKQA)) + + cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA) + + sstate_config=""" +SDK_LOCAL_CONF_WHITELIST = "SSTATE_MIRRORS" +SSTATE_MIRRORS = "file://.* file://%s/PATH" +CORE_IMAGE_EXTRA_INSTALL = "perl" + """ % sstate_dir + + with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f: + f.write(sstate_config) + + @classmethod + def setUpClass(cls): cls.tmpdir_eSDKQA = tempfile.mkdtemp(prefix='eSDKQA') - oeSDKExtSelfTest.generate_eSDK(image) + + sstate_dir = get_bb_var('SSTATE_DIR') + + cls.image = 'core-image-minimal' + oeSDKExtSelfTest.generate_eSDK(cls.image) # Install eSDK - ext_sdk_path = oeSDKExtSelfTest.get_eSDK_toolchain(image) - runCmd("%s -y -d \"%s\"" % (ext_sdk_path, cls.tmpdir_eSDKQA)) + cls.ext_sdk_path = oeSDKExtSelfTest.get_eSDK_toolchain(cls.image) + runCmd("%s -y -d \"%s\"" % (cls.ext_sdk_path, cls.tmpdir_eSDKQA)) cls.env_eSDK = oeSDKExtSelfTest.get_esdk_environment('', cls.tmpdir_eSDKQA) # Configure eSDK to use sstate mirror from poky sstate_config=""" SDK_LOCAL_CONF_WHITELIST = "SSTATE_MIRRORS" -SSTATE_MIRRORS = "file://.* http://%s/PATH" - """ % http_url +SSTATE_MIRRORS = "file://.* file://%s/PATH" + """ % sstate_dir with open(os.path.join(cls.tmpdir_eSDKQA, 'conf', 'local.conf'), 'a+') as f: f.write(sstate_config) - @classmethod def tearDownClass(cls): shutil.rmtree(cls.tmpdir_eSDKQA) - cls.http_service.stop() - @testcase (1471) + @testcase (1602) def test_install_libraries_headers(self): pn_sstate = 'bc' bitbake(pn_sstate) cmd = "devtool sdk-install %s " % pn_sstate oeSDKExtSelfTest.run_esdk_cmd(self.env_eSDK, self.tmpdir_eSDKQA, cmd) - - @testcase(1472) + + @testcase(1603) def test_image_generation_binary_feeds(self): image = 'core-image-minimal' cmd = "devtool build-image %s" % image diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/image_typedep.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/image_typedep.py new file mode 100644 index 000000000..256142d25 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/image_typedep.py @@ -0,0 +1,51 @@ +import os + +from oeqa.selftest.base import oeSelfTest +from oeqa.utils.commands import bitbake + +class ImageTypeDepTests(oeSelfTest): + + # Verify that when specifying a IMAGE_TYPEDEP_ of the form "foo.bar" that + # the conversion type bar gets added as a dep as well + def test_conversion_typedep_added(self): + + self.write_recipeinc('emptytest', """ +# Try to empty out the default dependency list +PACKAGE_INSTALL = "" +DISTRO_EXTRA_RDEPENDS="" + +LICENSE = "MIT" +IMAGE_FSTYPES = "testfstype" + +IMAGE_TYPES_MASKED += "testfstype" +IMAGE_TYPEDEP_testfstype = "tar.bz2" + +inherit image + +""") + # First get the dependency that should exist for bz2, it will look + # like CONVERSION_DEPENDS_bz2="somedep" + result = bitbake('-e emptytest') + + for line in result.output.split('\n'): + if line.startswith('CONVERSION_DEPENDS_bz2'): + dep = line.split('=')[1].strip('"') + break + + # Now get the dependency task list and check for the expected task + # dependency + bitbake('-g emptytest') + + taskdependsfile = os.path.join(self.builddir, 'task-depends.dot') + dep = dep + ".do_populate_sysroot" + depfound = False + expectedline = '"emptytest.do_rootfs" -> "{}"'.format(dep) + + with open(taskdependsfile, "r") as f: + for line in f: + if line.strip() == expectedline: + depfound = True + break + + if not depfound: + raise AssertionError("\"{}\" not found".format(expectedline)) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/imagefeatures.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/imagefeatures.py index d015c4908..76896c798 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/imagefeatures.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/imagefeatures.py @@ -91,9 +91,9 @@ class ImageFeatures(oeSelfTest): AutomatedBy: Daniel Istrate """ - features = 'DISTRO_FEATURES_append = " wayland"\n' - features += 'CORE_IMAGE_EXTRA_INSTALL += "wayland weston"' - self.write_config(features) + distro_features = get_bb_var('DISTRO_FEATURES') + if not ('opengl' in distro_features and 'wayland' in distro_features): + self.skipTest('neither opengl nor wayland present on DISTRO_FEATURES so core-image-weston cannot be built') # Build a core-image-weston bitbake('core-image-weston') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/layerappend.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/layerappend.py index 4de5034a9..37bb32cd1 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/layerappend.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/layerappend.py @@ -55,7 +55,7 @@ SRC_URI_append += "file://appendtest.txt" @testcase(1196) def test_layer_appends(self): corebase = get_bb_var("COREBASE") - stagingdir = get_bb_var("STAGING_DIR_TARGET") + for l in ["0", "1", "2"]: layer = os.path.join(corebase, "meta-layertest" + l) self.assertFalse(os.path.exists(layer)) @@ -83,6 +83,7 @@ SRC_URI_append += "file://appendtest.txt" self.layerappend = "BBLAYERS += \"{0}/meta-layertest0 {0}/meta-layertest1 {0}/meta-layertest2\"".format(corebase) ftools.append_file(self.builddir + "/conf/bblayers.conf", self.layerappend) + stagingdir = get_bb_var("SYSROOT_DESTDIR", "layerappendtest") bitbake("layerappendtest") data = ftools.read_file(stagingdir + "/appendtest.txt") self.assertEqual(data, "Layer 2 test") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/liboe.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/liboe.py index 35131eb24..0b0301def 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/liboe.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/liboe.py @@ -1,11 +1,16 @@ from oeqa.selftest.base import oeSelfTest -from oeqa.utils.commands import get_bb_var, bitbake, runCmd +from oeqa.utils.commands import get_bb_var, get_bb_vars, bitbake, runCmd import oe.path import glob import os import os.path class LibOE(oeSelfTest): + + @classmethod + def setUpClass(cls): + cls.tmp_dir = get_bb_var('TMPDIR') + def test_copy_tree_special(self): """ Summary: oe.path.copytree() should copy files with special character @@ -14,8 +19,7 @@ class LibOE(oeSelfTest): Product: OE-Core Author: Joshua Lock """ - tmp_dir = get_bb_var('TMPDIR') - testloc = oe.path.join(tmp_dir, 'liboetests') + testloc = oe.path.join(self.tmp_dir, 'liboetests') src = oe.path.join(testloc, 'src') dst = oe.path.join(testloc, 'dst') bb.utils.mkdirhier(testloc) @@ -40,8 +44,7 @@ class LibOE(oeSelfTest): Product: OE-Core Author: Joshua Lock """ - tmp_dir = get_bb_var('TMPDIR') - testloc = oe.path.join(tmp_dir, 'liboetests') + testloc = oe.path.join(self.tmp_dir, 'liboetests') src = oe.path.join(testloc, 'src') dst = oe.path.join(testloc, 'dst') bb.utils.mkdirhier(testloc) @@ -50,7 +53,11 @@ class LibOE(oeSelfTest): # ensure we have setfattr available bitbake("attr-native") - bindir = get_bb_var('STAGING_BINDIR_NATIVE') + + bb_vars = get_bb_vars(['SYSROOT_DESTDIR', 'bindir'], 'attr-native') + destdir = bb_vars['SYSROOT_DESTDIR'] + bindir = bb_vars['bindir'] + bindir = destdir + bindir # create a file with xattr and copy it open(oe.path.join(src, testfilename), 'w+b').close() @@ -70,8 +77,7 @@ class LibOE(oeSelfTest): Product: OE-Core Author: Joshua Lock """ - tmp_dir = get_bb_var('TMPDIR') - testloc = oe.path.join(tmp_dir, 'liboetests') + testloc = oe.path.join(self.tmp_dir, 'liboetests') src = oe.path.join(testloc, 'src') dst = oe.path.join(testloc, 'dst') bb.utils.mkdirhier(testloc) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/manifest.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/manifest.py index 44d0404c5..fe6f94964 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/manifest.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/manifest.py @@ -2,7 +2,7 @@ import unittest import os from oeqa.selftest.base import oeSelfTest -from oeqa.utils.commands import get_bb_var, bitbake +from oeqa.utils.commands import get_bb_var, get_bb_vars, bitbake from oeqa.utils.decorators import testcase class ManifestEntry: @@ -84,9 +84,10 @@ class VerifyManifest(oeSelfTest): try: mdir = self.get_dir_from_bb_var('SDK_DEPLOY', self.buildtarget) for k in d_target.keys(): + bb_vars = get_bb_vars(['SDK_NAME', 'SDK_VERSION'], self.buildtarget) mfilename[k] = "{}-toolchain-{}.{}.manifest".format( - get_bb_var("SDK_NAME", self.buildtarget), - get_bb_var("SDK_VERSION", self.buildtarget), + bb_vars['SDK_NAME'], + bb_vars['SDK_VERSION'], k) mpath[k] = os.path.join(mdir, mfilename[k]) if not os.path.isfile(mpath[k]): diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/__init__.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/buildhistory.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/buildhistory.py new file mode 100644 index 000000000..5ed4b026f --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/buildhistory.py @@ -0,0 +1,88 @@ +import os +import unittest +import tempfile +from git import Repo +from oeqa.utils.commands import get_bb_var +from oe.buildhistory_analysis import blob_to_dict, compare_dict_blobs + +class TestBlobParsing(unittest.TestCase): + + def setUp(self): + import time + self.repo_path = tempfile.mkdtemp(prefix='selftest-buildhistory', + dir=get_bb_var('TOPDIR')) + + self.repo = Repo.init(self.repo_path) + self.test_file = "test" + self.var_map = {} + + def tearDown(self): + import shutil + shutil.rmtree(self.repo_path) + + def commit_vars(self, to_add={}, to_remove = [], msg="A commit message"): + if len(to_add) == 0 and len(to_remove) == 0: + return + + for k in to_remove: + self.var_map.pop(x,None) + for k in to_add: + self.var_map[k] = to_add[k] + + with open(os.path.join(self.repo_path, self.test_file), 'w') as repo_file: + for k in self.var_map: + repo_file.write("%s = %s\n" % (k, self.var_map[k])) + + self.repo.git.add("--all") + self.repo.git.commit(message=msg) + + def test_blob_to_dict(self): + """ + Test convertion of git blobs to dictionary + """ + valuesmap = { "foo" : "1", "bar" : "2" } + self.commit_vars(to_add = valuesmap) + + blob = self.repo.head.commit.tree.blobs[0] + self.assertEqual(valuesmap, blob_to_dict(blob), + "commit was not translated correctly to dictionary") + + def test_compare_dict_blobs(self): + """ + Test comparisson of dictionaries extracted from git blobs + """ + changesmap = { "foo-2" : ("2", "8"), "bar" : ("","4"), "bar-2" : ("","5")} + + self.commit_vars(to_add = { "foo" : "1", "foo-2" : "2", "foo-3" : "3" }) + blob1 = self.repo.heads.master.commit.tree.blobs[0] + + self.commit_vars(to_add = { "foo-2" : "8", "bar" : "4", "bar-2" : "5" }) + blob2 = self.repo.heads.master.commit.tree.blobs[0] + + change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file), + blob1, blob2, False, False) + + var_changes = { x.fieldname : (x.oldvalue, x.newvalue) for x in change_records} + self.assertEqual(changesmap, var_changes, "Changes not reported correctly") + + def test_compare_dict_blobs_default(self): + """ + Test default values for comparisson of git blob dictionaries + """ + defaultmap = { x : ("default", "1") for x in ["PKG", "PKGE", "PKGV", "PKGR"]} + + self.commit_vars(to_add = { "foo" : "1" }) + blob1 = self.repo.heads.master.commit.tree.blobs[0] + + self.commit_vars(to_add = { "PKG" : "1", "PKGE" : "1", "PKGV" : "1", "PKGR" : "1" }) + blob2 = self.repo.heads.master.commit.tree.blobs[0] + + change_records = compare_dict_blobs(os.path.join(self.repo_path, self.test_file), + blob1, blob2, False, False) + + var_changes = {} + for x in change_records: + oldvalue = "default" if ("default" in x.oldvalue) else x.oldvalue + var_changes[x.fieldname] = (oldvalue, x.newvalue) + + self.assertEqual(defaultmap, var_changes, "Defaults not set properly") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/elf.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/elf.py new file mode 100644 index 000000000..1f59037ed --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/elf.py @@ -0,0 +1,21 @@ +import unittest +import oe.qa + +class TestElf(unittest.TestCase): + def test_machine_name(self): + """ + Test elf_machine_to_string() + """ + self.assertEqual(oe.qa.elf_machine_to_string(0x02), "SPARC") + self.assertEqual(oe.qa.elf_machine_to_string(0x03), "x86") + self.assertEqual(oe.qa.elf_machine_to_string(0x08), "MIPS") + self.assertEqual(oe.qa.elf_machine_to_string(0x14), "PowerPC") + self.assertEqual(oe.qa.elf_machine_to_string(0x28), "ARM") + self.assertEqual(oe.qa.elf_machine_to_string(0x2A), "SuperH") + self.assertEqual(oe.qa.elf_machine_to_string(0x32), "IA-64") + self.assertEqual(oe.qa.elf_machine_to_string(0x3E), "x86-64") + self.assertEqual(oe.qa.elf_machine_to_string(0xB7), "AArch64") + + self.assertEqual(oe.qa.elf_machine_to_string(0x00), "Unknown (0)") + self.assertEqual(oe.qa.elf_machine_to_string(0xDEADBEEF), "Unknown (3735928559)") + self.assertEqual(oe.qa.elf_machine_to_string("foobar"), "Unknown ('foobar')") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/license.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/license.py new file mode 100644 index 000000000..c38888618 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/license.py @@ -0,0 +1,68 @@ +import unittest +import oe.license + +class SeenVisitor(oe.license.LicenseVisitor): + def __init__(self): + self.seen = [] + oe.license.LicenseVisitor.__init__(self) + + def visit_Str(self, node): + self.seen.append(node.s) + +class TestSingleLicense(unittest.TestCase): + licenses = [ + "GPLv2", + "LGPL-2.0", + "Artistic", + "MIT", + "GPLv3+", + "FOO_BAR", + ] + invalid_licenses = ["GPL/BSD"] + + @staticmethod + def parse(licensestr): + visitor = SeenVisitor() + visitor.visit_string(licensestr) + return visitor.seen + + def test_single_licenses(self): + for license in self.licenses: + licenses = self.parse(license) + self.assertListEqual(licenses, [license]) + + def test_invalid_licenses(self): + for license in self.invalid_licenses: + with self.assertRaises(oe.license.InvalidLicense) as cm: + self.parse(license) + self.assertEqual(cm.exception.license, license) + +class TestSimpleCombinations(unittest.TestCase): + tests = { + "FOO&BAR": ["FOO", "BAR"], + "BAZ & MOO": ["BAZ", "MOO"], + "ALPHA|BETA": ["ALPHA"], + "BAZ&MOO|FOO": ["FOO"], + "FOO&BAR|BAZ": ["FOO", "BAR"], + } + preferred = ["ALPHA", "FOO", "BAR"] + + def test_tests(self): + def choose(a, b): + if all(lic in self.preferred for lic in b): + return b + else: + return a + + for license, expected in self.tests.items(): + licenses = oe.license.flattened_licenses(license, choose) + self.assertListEqual(licenses, expected) + +class TestComplexCombinations(TestSimpleCombinations): + tests = { + "FOO & (BAR | BAZ)&MOO": ["FOO", "BAR", "MOO"], + "(ALPHA|(BETA&THETA)|OMEGA)&DELTA": ["OMEGA", "DELTA"], + "((ALPHA|BETA)&FOO)|BAZ": ["BETA", "FOO"], + "(GPL-2.0|Proprietary)&BSD-4-clause&MIT": ["GPL-2.0", "BSD-4-clause", "MIT"], + } + preferred = ["BAR", "OMEGA", "BETA", "GPL-2.0"] diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/path.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/path.py new file mode 100644 index 000000000..44d068143 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/path.py @@ -0,0 +1,89 @@ +import unittest +import oe, oe.path +import tempfile +import os +import errno +import shutil + +class TestRealPath(unittest.TestCase): + DIRS = [ "a", "b", "etc", "sbin", "usr", "usr/bin", "usr/binX", "usr/sbin", "usr/include", "usr/include/gdbm" ] + FILES = [ "etc/passwd", "b/file" ] + LINKS = [ + ( "bin", "/usr/bin", "/usr/bin" ), + ( "binX", "usr/binX", "/usr/binX" ), + ( "c", "broken", "/broken" ), + ( "etc/passwd-1", "passwd", "/etc/passwd" ), + ( "etc/passwd-2", "passwd-1", "/etc/passwd" ), + ( "etc/passwd-3", "/etc/passwd-1", "/etc/passwd" ), + ( "etc/shadow-1", "/etc/shadow", "/etc/shadow" ), + ( "etc/shadow-2", "/etc/shadow-1", "/etc/shadow" ), + ( "prog-A", "bin/prog-A", "/usr/bin/prog-A" ), + ( "prog-B", "/bin/prog-B", "/usr/bin/prog-B" ), + ( "usr/bin/prog-C", "../../sbin/prog-C", "/sbin/prog-C" ), + ( "usr/bin/prog-D", "/sbin/prog-D", "/sbin/prog-D" ), + ( "usr/binX/prog-E", "../sbin/prog-E", None ), + ( "usr/bin/prog-F", "../../../sbin/prog-F", "/sbin/prog-F" ), + ( "loop", "a/loop", None ), + ( "a/loop", "../loop", None ), + ( "b/test", "file/foo", "/b/file/foo" ), + ] + + LINKS_PHYS = [ + ( "./", "/", "" ), + ( "binX/prog-E", "/usr/sbin/prog-E", "/sbin/prog-E" ), + ] + + EXCEPTIONS = [ + ( "loop", errno.ELOOP ), + ( "b/test", errno.ENOENT ), + ] + + def __del__(self): + try: + #os.system("tree -F %s" % self.tmpdir) + shutil.rmtree(self.tmpdir) + except: + pass + + def setUp(self): + self.tmpdir = tempfile.mkdtemp(prefix = "oe-test_path") + self.root = os.path.join(self.tmpdir, "R") + + os.mkdir(os.path.join(self.tmpdir, "_real")) + os.symlink("_real", self.root) + + for d in self.DIRS: + os.mkdir(os.path.join(self.root, d)) + for f in self.FILES: + open(os.path.join(self.root, f), "w") + for l in self.LINKS: + os.symlink(l[1], os.path.join(self.root, l[0])) + + def __realpath(self, file, use_physdir, assume_dir = True): + return oe.path.realpath(os.path.join(self.root, file), self.root, + use_physdir, assume_dir = assume_dir) + + def test_norm(self): + for l in self.LINKS: + if l[2] == None: + continue + + target_p = self.__realpath(l[0], True) + target_l = self.__realpath(l[0], False) + + if l[2] != False: + self.assertEqual(target_p, target_l) + self.assertEqual(l[2], target_p[len(self.root):]) + + def test_phys(self): + for l in self.LINKS_PHYS: + target_p = self.__realpath(l[0], True) + target_l = self.__realpath(l[0], False) + + self.assertEqual(l[1], target_p[len(self.root):]) + self.assertEqual(l[2], target_l[len(self.root):]) + + def test_loop(self): + for e in self.EXCEPTIONS: + self.assertRaisesRegex(OSError, r'\[Errno %u\]' % e[1], + self.__realpath, e[0], False, False) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/types.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/types.py new file mode 100644 index 000000000..4fe2746a3 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/types.py @@ -0,0 +1,50 @@ +import unittest +from oe.maketype import create + +class TestBooleanType(unittest.TestCase): + def test_invalid(self): + self.assertRaises(ValueError, create, '', 'boolean') + self.assertRaises(ValueError, create, 'foo', 'boolean') + self.assertRaises(TypeError, create, object(), 'boolean') + + def test_true(self): + self.assertTrue(create('y', 'boolean')) + self.assertTrue(create('yes', 'boolean')) + self.assertTrue(create('1', 'boolean')) + self.assertTrue(create('t', 'boolean')) + self.assertTrue(create('true', 'boolean')) + self.assertTrue(create('TRUE', 'boolean')) + self.assertTrue(create('truE', 'boolean')) + + def test_false(self): + self.assertFalse(create('n', 'boolean')) + self.assertFalse(create('no', 'boolean')) + self.assertFalse(create('0', 'boolean')) + self.assertFalse(create('f', 'boolean')) + self.assertFalse(create('false', 'boolean')) + self.assertFalse(create('FALSE', 'boolean')) + self.assertFalse(create('faLse', 'boolean')) + + def test_bool_equality(self): + self.assertEqual(create('n', 'boolean'), False) + self.assertNotEqual(create('n', 'boolean'), True) + self.assertEqual(create('y', 'boolean'), True) + self.assertNotEqual(create('y', 'boolean'), False) + +class TestList(unittest.TestCase): + def assertListEqual(self, value, valid, sep=None): + obj = create(value, 'list', separator=sep) + self.assertEqual(obj, valid) + if sep is not None: + self.assertEqual(obj.separator, sep) + self.assertEqual(str(obj), obj.separator.join(obj)) + + def test_list_nosep(self): + testlist = ['alpha', 'beta', 'theta'] + self.assertListEqual('alpha beta theta', testlist) + self.assertListEqual('alpha beta\ttheta', testlist) + self.assertListEqual('alpha', ['alpha']) + + def test_list_usersep(self): + self.assertListEqual('foo:bar', ['foo', 'bar'], ':') + self.assertListEqual('foo:bar:baz', ['foo', 'bar', 'baz'], ':') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/utils.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/utils.py new file mode 100644 index 000000000..7deb10f3c --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oelib/utils.py @@ -0,0 +1,51 @@ +import unittest +from oe.utils import packages_filter_out_system, trim_version + +class TestPackagesFilterOutSystem(unittest.TestCase): + def test_filter(self): + """ + Test that oe.utils.packages_filter_out_system works. + """ + try: + import bb + except ImportError: + self.skipTest("Cannot import bb") + + d = bb.data_smart.DataSmart() + d.setVar("PN", "foo") + + d.setVar("PACKAGES", "foo foo-doc foo-dev") + pkgs = packages_filter_out_system(d) + self.assertEqual(pkgs, []) + + d.setVar("PACKAGES", "foo foo-doc foo-data foo-dev") + pkgs = packages_filter_out_system(d) + self.assertEqual(pkgs, ["foo-data"]) + + d.setVar("PACKAGES", "foo foo-locale-en-gb") + pkgs = packages_filter_out_system(d) + self.assertEqual(pkgs, []) + + d.setVar("PACKAGES", "foo foo-data foo-locale-en-gb") + pkgs = packages_filter_out_system(d) + self.assertEqual(pkgs, ["foo-data"]) + + +class TestTrimVersion(unittest.TestCase): + def test_version_exception(self): + with self.assertRaises(TypeError): + trim_version(None, 2) + with self.assertRaises(TypeError): + trim_version((1, 2, 3), 2) + + def test_num_exception(self): + with self.assertRaises(ValueError): + trim_version("1.2.3", 0) + with self.assertRaises(ValueError): + trim_version("1.2.3", -1) + + def test_valid(self): + self.assertEqual(trim_version("1.2.3", 1), "1") + self.assertEqual(trim_version("1.2.3", 2), "1.2") + self.assertEqual(trim_version("1.2.3", 3), "1.2.3") + self.assertEqual(trim_version("1.2.3", 4), "1.2.3") diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/oescripts.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oescripts.py index 28345dc6a..29547f56a 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/oescripts.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/oescripts.py @@ -10,38 +10,10 @@ from oeqa.selftest.buildhistory import BuildhistoryBase from oeqa.utils.commands import Command, runCmd, bitbake, get_bb_var, get_test_layer from oeqa.utils.decorators import testcase -class TestScripts(oeSelfTest): - - @testcase(300) - def test_cleanup_workdir(self): - path = os.path.dirname(get_bb_var('WORKDIR', 'gzip')) - old_version_recipe = os.path.join(get_bb_var('COREBASE'), 'meta/recipes-extended/gzip/gzip_1.3.12.bb') - old_version = '1.3.12' - bitbake("-c clean gzip") - bitbake("-c clean -b %s" % old_version_recipe) - - if os.path.exists(path): - initial_contents = os.listdir(path) - else: - initial_contents = [] - - bitbake('gzip') - intermediary_contents = os.listdir(path) - bitbake("-b %s" % old_version_recipe) - runCmd('cleanup-workdir') - remaining_contents = os.listdir(path) - - expected_contents = [x for x in intermediary_contents if x not in initial_contents] - remaining_not_expected = [x for x in remaining_contents if x not in expected_contents] - self.assertFalse(remaining_not_expected, msg="Not all necessary content has been deleted from %s: %s" % (path, ', '.join(map(str, remaining_not_expected)))) - expected_not_remaining = [x for x in expected_contents if x not in remaining_contents] - self.assertFalse(expected_not_remaining, msg="The script removed extra contents from %s: %s" % (path, ', '.join(map(str, expected_not_remaining)))) - class BuildhistoryDiffTests(BuildhistoryBase): @testcase(295) def test_buildhistory_diff(self): - self.add_command_to_tearDown('cleanup-workdir') target = 'xcursor-transparent-theme' self.run_buildhistory_operation(target, target_config="PR = \"r1\"", change_bh_location=True) self.run_buildhistory_operation(target, target_config="PR = \"r0\"", change_bh_location=False, expect_error=True) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/pkgdata.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/pkgdata.py index 5a63f89ff..d69c3c800 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/pkgdata.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/pkgdata.py @@ -6,7 +6,7 @@ import fnmatch import oeqa.utils.ftools as ftools from oeqa.selftest.base import oeSelfTest -from oeqa.utils.commands import runCmd, bitbake, get_bb_var +from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars from oeqa.utils.decorators import testcase class OePkgdataUtilTests(oeSelfTest): @@ -16,21 +16,21 @@ class OePkgdataUtilTests(oeSelfTest): # Ensure we have the right data in pkgdata logger = logging.getLogger("selftest") logger.info('Running bitbake to generate pkgdata') - bitbake('glibc busybox zlib bash') + bitbake('busybox zlib m4') @testcase(1203) def test_lookup_pkg(self): # Forward tests - result = runCmd('oe-pkgdata-util lookup-pkg "glibc busybox"') - self.assertEqual(result.output, 'libc6\nbusybox') + result = runCmd('oe-pkgdata-util lookup-pkg "zlib busybox"') + self.assertEqual(result.output, 'libz1\nbusybox') result = runCmd('oe-pkgdata-util lookup-pkg zlib-dev') self.assertEqual(result.output, 'libz-dev') result = runCmd('oe-pkgdata-util lookup-pkg nonexistentpkg', ignore_status=True) self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output) self.assertEqual(result.output, 'ERROR: The following packages could not be found: nonexistentpkg') # Reverse tests - result = runCmd('oe-pkgdata-util lookup-pkg -r "libc6 busybox"') - self.assertEqual(result.output, 'glibc\nbusybox') + result = runCmd('oe-pkgdata-util lookup-pkg -r "libz1 busybox"') + self.assertEqual(result.output, 'zlib\nbusybox') result = runCmd('oe-pkgdata-util lookup-pkg -r libz-dev') self.assertEqual(result.output, 'zlib-dev') result = runCmd('oe-pkgdata-util lookup-pkg -r nonexistentpkg', ignore_status=True) @@ -41,24 +41,26 @@ class OePkgdataUtilTests(oeSelfTest): def test_read_value(self): result = runCmd('oe-pkgdata-util read-value PN libz1') self.assertEqual(result.output, 'zlib') - result = runCmd('oe-pkgdata-util read-value PKGSIZE bash') + result = runCmd('oe-pkgdata-util read-value PKG libz1') + self.assertEqual(result.output, 'libz1') + result = runCmd('oe-pkgdata-util read-value PKGSIZE m4') pkgsize = int(result.output.strip()) self.assertGreater(pkgsize, 1, "Size should be greater than 1. %s" % result.output) @testcase(1198) def test_find_path(self): - result = runCmd('oe-pkgdata-util find-path /lib/libc.so.6') - self.assertEqual(result.output, 'glibc: /lib/libc.so.6') - result = runCmd('oe-pkgdata-util find-path /bin/bash') - self.assertEqual(result.output, 'bash: /bin/bash') + result = runCmd('oe-pkgdata-util find-path /lib/libz.so.1') + self.assertEqual(result.output, 'zlib: /lib/libz.so.1') + result = runCmd('oe-pkgdata-util find-path /usr/bin/m4') + self.assertEqual(result.output, 'm4: /usr/bin/m4') result = runCmd('oe-pkgdata-util find-path /not/exist', ignore_status=True) self.assertEqual(result.status, 1, "Status different than 1. output: %s" % result.output) self.assertEqual(result.output, 'ERROR: Unable to find any package producing path /not/exist') @testcase(1204) def test_lookup_recipe(self): - result = runCmd('oe-pkgdata-util lookup-recipe "libc6-staticdev busybox"') - self.assertEqual(result.output, 'glibc\nbusybox') + result = runCmd('oe-pkgdata-util lookup-recipe "libz-staticdev busybox"') + self.assertEqual(result.output, 'zlib\nbusybox') result = runCmd('oe-pkgdata-util lookup-recipe libz-dbg') self.assertEqual(result.output, 'zlib') result = runCmd('oe-pkgdata-util lookup-recipe nonexistentpkg', ignore_status=True) @@ -70,12 +72,11 @@ class OePkgdataUtilTests(oeSelfTest): # No arguments result = runCmd('oe-pkgdata-util list-pkgs') pkglist = result.output.split() - self.assertIn('glibc-utils', pkglist, "Listed packages: %s" % result.output) + self.assertIn('zlib', pkglist, "Listed packages: %s" % result.output) self.assertIn('zlib-dev', pkglist, "Listed packages: %s" % result.output) # No pkgspec, runtime result = runCmd('oe-pkgdata-util list-pkgs -r') pkglist = result.output.split() - self.assertIn('libc6-utils', pkglist, "Listed packages: %s" % result.output) self.assertIn('libz-dev', pkglist, "Listed packages: %s" % result.output) # With recipe specified result = runCmd('oe-pkgdata-util list-pkgs -p zlib') @@ -124,10 +125,11 @@ class OePkgdataUtilTests(oeSelfTest): curpkg = line.split(':')[0] files[curpkg] = [] return files - base_libdir = get_bb_var('base_libdir') - libdir = get_bb_var('libdir') - includedir = get_bb_var('includedir') - mandir = get_bb_var('mandir') + bb_vars = get_bb_vars(['base_libdir', 'libdir', 'includedir', 'mandir']) + base_libdir = bb_vars['base_libdir'] + libdir = bb_vars['libdir'] + includedir = bb_vars['includedir'] + mandir = bb_vars['mandir'] # Test recipe-space package name result = runCmd('oe-pkgdata-util list-pkg-files zlib-dev zlib-doc') files = splitoutput(result.output) @@ -205,11 +207,10 @@ class OePkgdataUtilTests(oeSelfTest): self.track_for_cleanup(tempdir) pkglistfile = os.path.join(tempdir, 'pkglist') with open(pkglistfile, 'w') as f: - f.write('libc6\n') f.write('libz1\n') f.write('busybox\n') result = runCmd('oe-pkgdata-util glob %s "*-dev"' % pkglistfile) - desiredresult = ['libc6-dev', 'libz-dev', 'busybox-dev'] + desiredresult = ['libz-dev', 'busybox-dev'] self.assertEqual(sorted(result.output.split()), sorted(desiredresult)) # The following should not error (because when we use this during rootfs construction, sometimes the complementary package won't exist) result = runCmd('oe-pkgdata-util glob %s "*-nonexistent"' % pkglistfile) @@ -222,5 +223,5 @@ class OePkgdataUtilTests(oeSelfTest): @testcase(1206) def test_specify_pkgdatadir(self): - result = runCmd('oe-pkgdata-util -p %s lookup-pkg glibc' % get_bb_var('PKGDATA_DIR')) - self.assertEqual(result.output, 'libc6') + result = runCmd('oe-pkgdata-util -p %s lookup-pkg zlib' % get_bb_var('PKGDATA_DIR')) + self.assertEqual(result.output, 'libz1') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/prservice.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/prservice.py index 0b2dfe649..34d419762 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/prservice.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/prservice.py @@ -12,10 +12,13 @@ from oeqa.utils.decorators import testcase from oeqa.utils.network import get_free_port class BitbakePrTests(oeSelfTest): - + + @classmethod + def setUpClass(cls): + cls.pkgdata_dir = get_bb_var('PKGDATA_DIR') + def get_pr_version(self, package_name): - pkgdata_dir = get_bb_var('PKGDATA_DIR') - package_data_file = os.path.join(pkgdata_dir, 'runtime', package_name) + package_data_file = os.path.join(self.pkgdata_dir, 'runtime', package_name) package_data = ftools.read_file(package_data_file) find_pr = re.search("PKGR: r[0-9]+\.([0-9]+)", package_data) self.assertTrue(find_pr, "No PKG revision found in %s" % package_data_file) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/recipetool.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/recipetool.py index 9b669248f..dc55a5e49 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/recipetool.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/recipetool.py @@ -1,9 +1,11 @@ import os import logging +import shutil import tempfile import urllib.parse -from oeqa.utils.commands import runCmd, bitbake, get_bb_var, create_temp_layer +from oeqa.utils.commands import runCmd, bitbake, get_bb_var +from oeqa.utils.commands import get_bb_vars, create_temp_layer from oeqa.utils.decorators import testcase from oeqa.selftest import devtool @@ -24,6 +26,7 @@ def tearDownModule(): class RecipetoolBase(devtool.DevtoolBase): + def setUpLocal(self): self.templayerdir = templayerdir self.tempdir = tempfile.mkdtemp(prefix='recipetoolqa') @@ -64,12 +67,16 @@ class RecipetoolBase(devtool.DevtoolBase): class RecipetoolTests(RecipetoolBase): + @classmethod def setUpClass(cls): # Ensure we have the right data in shlibs/pkgdata logger = logging.getLogger("selftest") logger.info('Running bitbake to generate pkgdata') bitbake('-c packagedata base-files coreutils busybox selftest-recipetool-appendfile') + bb_vars = get_bb_vars(['COREBASE', 'BBPATH']) + cls.corebase = bb_vars['COREBASE'] + cls.bbpath = bb_vars['BBPATH'] def _try_recipetool_appendfile(self, testrecipe, destfile, newfile, options, expectedlines, expectedfiles): cmd = 'recipetool appendfile %s %s %s %s' % (self.templayerdir, destfile, newfile, options) @@ -103,9 +110,8 @@ class RecipetoolTests(RecipetoolBase): # Now try with a file we know should be an alternative # (this is very much a fake example, but one we know is reliably an alternative) self._try_recipetool_appendfile_fail('/bin/ls', self.testfile, ['ERROR: File /bin/ls is an alternative possibly provided by the following recipes:', 'coreutils', 'busybox']) - corebase = get_bb_var('COREBASE') # Need a test file - should be executable - testfile2 = os.path.join(corebase, 'oe-init-build-env') + testfile2 = os.path.join(self.corebase, 'oe-init-build-env') testfile2name = os.path.basename(testfile2) expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', '\n', @@ -134,7 +140,6 @@ class RecipetoolTests(RecipetoolBase): @testcase(1173) def test_recipetool_appendfile_add(self): - corebase = get_bb_var('COREBASE') # Try arbitrary file add to a recipe expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', '\n', @@ -147,7 +152,7 @@ class RecipetoolTests(RecipetoolBase): self._try_recipetool_appendfile('netbase', '/usr/share/something', self.testfile, '-r netbase', expectedlines, ['testfile']) # Try adding another file, this time where the source file is executable # (so we're testing that, plus modifying an existing bbappend) - testfile2 = os.path.join(corebase, 'oe-init-build-env') + testfile2 = os.path.join(self.corebase, 'oe-init-build-env') testfile2name = os.path.basename(testfile2) expectedlines = ['FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n', '\n', @@ -363,20 +368,22 @@ class RecipetoolTests(RecipetoolBase): # Try adding a recipe tempsrc = os.path.join(self.tempdir, 'srctree') os.makedirs(tempsrc) - recipefile = os.path.join(self.tempdir, 'logrotate_3.8.7.bb') - srcuri = 'https://github.com/logrotate/logrotate/archive/r3-8-7.tar.gz' + recipefile = os.path.join(self.tempdir, 'logrotate_3.12.3.bb') + srcuri = 'https://github.com/logrotate/logrotate/releases/download/3.12.3/logrotate-3.12.3.tar.xz' result = runCmd('recipetool create -o %s %s -x %s' % (recipefile, srcuri, tempsrc)) self.assertTrue(os.path.isfile(recipefile)) checkvars = {} checkvars['LICENSE'] = 'GPLv2' - checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=18810669f13b87348459e611d31ab760' - checkvars['SRC_URI'] = 'https://github.com/logrotate/logrotate/archive/r3-8-7.tar.gz' - checkvars['SRC_URI[md5sum]'] = '6b1aa0e0d07eda3c9a2526520850397a' - checkvars['SRC_URI[sha256sum]'] = 'dece4bfeb9d8374a0ecafa34be139b5a697db5c926dcc69a9b8715431a22e733' + checkvars['LIC_FILES_CHKSUM'] = 'file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263' + checkvars['SRC_URI'] = 'https://github.com/logrotate/logrotate/releases/download/${PV}/logrotate-${PV}.tar.xz' + checkvars['SRC_URI[md5sum]'] = 'a560c57fac87c45b2fc17406cdf79288' + checkvars['SRC_URI[sha256sum]'] = '2e6a401cac9024db2288297e3be1a8ab60e7401ba8e91225218aaf4a27e82a07' self._test_recipe_contents(recipefile, checkvars, []) @testcase(1194) def test_recipetool_create_git(self): + if 'x11' not in get_bb_var('DISTRO_FEATURES'): + self.skipTest('Test requires x11 as distro feature') # Ensure we have the right data in shlibs/pkgdata bitbake('libpng pango libx11 libxext jpeg libcheck') # Try adding a recipe @@ -480,6 +487,46 @@ class RecipetoolTests(RecipetoolBase): inherits = ['pkgconfig', 'autotools'] self._test_recipe_contents(recipefile, checkvars, inherits) + def _copy_file_with_cleanup(self, srcfile, basedstdir, *paths): + dstdir = basedstdir + self.assertTrue(os.path.exists(dstdir)) + for p in paths: + dstdir = os.path.join(dstdir, p) + if not os.path.exists(dstdir): + os.makedirs(dstdir) + self.track_for_cleanup(dstdir) + dstfile = os.path.join(dstdir, os.path.basename(srcfile)) + if srcfile != dstfile: + shutil.copy(srcfile, dstfile) + self.track_for_cleanup(dstfile) + + def test_recipetool_load_plugin(self): + """Test that recipetool loads only the first found plugin in BBPATH.""" + + recipetool = runCmd("which recipetool") + fromname = runCmd("recipetool --quiet pluginfile") + srcfile = fromname.output + searchpath = self.bbpath.split(':') + [os.path.dirname(recipetool.output)] + plugincontent = [] + with open(srcfile) as fh: + plugincontent = fh.readlines() + try: + self.assertIn('meta-selftest', srcfile, 'wrong bbpath plugin found') + for path in searchpath: + self._copy_file_with_cleanup(srcfile, path, 'lib', 'recipetool') + result = runCmd("recipetool --quiet count") + self.assertEqual(result.output, '1') + result = runCmd("recipetool --quiet multiloaded") + self.assertEqual(result.output, "no") + for path in searchpath: + result = runCmd("recipetool --quiet bbdir") + self.assertEqual(result.output, path) + os.unlink(os.path.join(result.output, 'lib', 'recipetool', 'bbpath.py')) + finally: + with open(srcfile, 'w') as fh: + fh.writelines(plugincontent) + + class RecipetoolAppendsrcBase(RecipetoolBase): def _try_recipetool_appendsrcfile(self, testrecipe, newfile, destfile, options, expectedlines, expectedfiles): cmd = 'recipetool appendsrcfile %s %s %s %s %s' % (options, self.templayerdir, testrecipe, newfile, destfile) @@ -555,20 +602,23 @@ class RecipetoolAppendsrcBase(RecipetoolBase): self._try_recipetool_appendsrcfiles(testrecipe, newfiles, expectedfiles=expectedfiles, destdir=destdir, options=options) - src_uri = get_bb_var('SRC_URI', testrecipe).split() + bb_vars = get_bb_vars(['SRC_URI', 'FILE', 'FILESEXTRAPATHS'], testrecipe) + src_uri = bb_vars['SRC_URI'].split() for f in expectedfiles: if destdir: self.assertIn('file://%s;subdir=%s' % (f, destdir), src_uri) else: self.assertIn('file://%s' % f, src_uri) - recipefile = get_bb_var('FILE', testrecipe) + recipefile = bb_vars['FILE'] bbappendfile = self._check_bbappend(testrecipe, recipefile, self.templayerdir) filesdir = os.path.join(os.path.dirname(bbappendfile), testrecipe) - filesextrapaths = get_bb_var('FILESEXTRAPATHS', testrecipe).split(':') + filesextrapaths = bb_vars['FILESEXTRAPATHS'].split(':') self.assertIn(filesdir, filesextrapaths) + + class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase): @testcase(1273) @@ -594,8 +644,9 @@ class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase): @testcase(1280) def test_recipetool_appendsrcfile_srcdir_basic(self): testrecipe = 'bash' - srcdir = get_bb_var('S', testrecipe) - workdir = get_bb_var('WORKDIR', testrecipe) + bb_vars = get_bb_vars(['S', 'WORKDIR'], testrecipe) + srcdir = bb_vars['S'] + workdir = bb_vars['WORKDIR'] subdir = os.path.relpath(srcdir, workdir) self._test_appendsrcfile(testrecipe, 'a-file', srcdir=subdir) @@ -620,8 +671,9 @@ class RecipetoolAppendsrcTests(RecipetoolAppendsrcBase): def test_recipetool_appendsrcfile_replace_file_srcdir(self): testrecipe = 'bash' filepath = 'Makefile.in' - srcdir = get_bb_var('S', testrecipe) - workdir = get_bb_var('WORKDIR', testrecipe) + bb_vars = get_bb_vars(['S', 'WORKDIR'], testrecipe) + srcdir = bb_vars['S'] + workdir = bb_vars['WORKDIR'] subdir = os.path.relpath(srcdir, workdir) self._test_appendsrcfile(testrecipe, filepath, srcdir=subdir) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/runqemu.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/runqemu.py new file mode 100644 index 000000000..58c6f96f9 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/runqemu.py @@ -0,0 +1,140 @@ +# +# Copyright (c) 2017 Wind River Systems, Inc. +# + +import re +import logging + +from oeqa.selftest.base import oeSelfTest +from oeqa.utils.commands import bitbake, runqemu, get_bb_var +from oeqa.utils.decorators import testcase + +class RunqemuTests(oeSelfTest): + """Runqemu test class""" + + image_is_ready = False + deploy_dir_image = '' + + def setUpLocal(self): + self.recipe = 'core-image-minimal' + self.machine = 'qemux86-64' + self.fstypes = "ext4 iso hddimg vmdk qcow2 vdi" + self.cmd_common = "runqemu nographic" + + # Avoid emit the same record multiple times. + mainlogger = logging.getLogger("BitBake.Main") + mainlogger.propagate = False + + self.write_config( +""" +MACHINE = "%s" +IMAGE_FSTYPES = "%s" +# 10 means 1 second +SYSLINUX_TIMEOUT = "10" +""" +% (self.machine, self.fstypes) + ) + + if not RunqemuTests.image_is_ready: + RunqemuTests.deploy_dir_image = get_bb_var('DEPLOY_DIR_IMAGE') + bitbake(self.recipe) + RunqemuTests.image_is_ready = True + + @testcase(2001) + def test_boot_machine(self): + """Test runqemu machine""" + cmd = "%s %s" % (self.cmd_common, self.machine) + with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: + self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd) + + @testcase(2002) + def test_boot_machine_ext4(self): + """Test runqemu machine ext4""" + cmd = "%s %s ext4" % (self.cmd_common, self.machine) + with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: + with open(qemu.qemurunnerlog) as f: + self.assertTrue('rootfs.ext4' in f.read(), "Failed: %s" % cmd) + + @testcase(2003) + def test_boot_machine_iso(self): + """Test runqemu machine iso""" + cmd = "%s %s iso" % (self.cmd_common, self.machine) + with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: + with open(qemu.qemurunnerlog) as f: + self.assertTrue(' -cdrom ' in f.read(), "Failed: %s" % cmd) + + @testcase(2004) + def test_boot_recipe_image(self): + """Test runqemu recipe-image""" + cmd = "%s %s" % (self.cmd_common, self.recipe) + with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: + self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd) + + @testcase(2005) + def test_boot_recipe_image_vmdk(self): + """Test runqemu recipe-image vmdk""" + cmd = "%s %s vmdk" % (self.cmd_common, self.recipe) + with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: + with open(qemu.qemurunnerlog) as f: + self.assertTrue('format=vmdk' in f.read(), "Failed: %s" % cmd) + + @testcase(2006) + def test_boot_recipe_image_vdi(self): + """Test runqemu recipe-image vdi""" + cmd = "%s %s vdi" % (self.cmd_common, self.recipe) + with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: + with open(qemu.qemurunnerlog) as f: + self.assertTrue('format=vdi' in f.read(), "Failed: %s" % cmd) + + @testcase(2007) + def test_boot_deploy(self): + """Test runqemu deploy_dir_image""" + cmd = "%s %s" % (self.cmd_common, self.deploy_dir_image) + with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: + self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd) + + @testcase(2008) + def test_boot_deploy_hddimg(self): + """Test runqemu deploy_dir_image hddimg""" + cmd = "%s %s hddimg" % (self.cmd_common, self.deploy_dir_image) + with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: + with open(qemu.qemurunnerlog) as f: + self.assertTrue(re.search('file=.*.hddimg', f.read()), "Failed: %s" % cmd) + + @testcase(2009) + def test_boot_machine_slirp(self): + """Test runqemu machine slirp""" + cmd = "%s slirp %s" % (self.cmd_common, self.machine) + with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: + with open(qemu.qemurunnerlog) as f: + self.assertTrue(' -netdev user' in f.read(), "Failed: %s" % cmd) + + @testcase(2009) + def test_boot_machine_slirp_qcow2(self): + """Test runqemu machine slirp qcow2""" + cmd = "%s slirp qcow2 %s" % (self.cmd_common, self.machine) + with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: + with open(qemu.qemurunnerlog) as f: + self.assertTrue('format=qcow2' in f.read(), "Failed: %s" % cmd) + + @testcase(2010) + def test_boot_qemu_boot(self): + """Test runqemu /path/to/image.qemuboot.conf""" + qemuboot_conf = "%s-%s.qemuboot.conf" % (self.recipe, self.machine) + qemuboot_conf = os.path.join(self.deploy_dir_image, qemuboot_conf) + if not os.path.exists(qemuboot_conf): + self.skipTest("%s not found" % qemuboot_conf) + cmd = "%s %s" % (self.cmd_common, qemuboot_conf) + with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: + self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd) + + @testcase(2011) + def test_boot_rootfs(self): + """Test runqemu /path/to/rootfs.ext4""" + rootfs = "%s-%s.ext4" % (self.recipe, self.machine) + rootfs = os.path.join(self.deploy_dir_image, rootfs) + if not os.path.exists(rootfs): + self.skipTest("%s not found" % rootfs) + cmd = "%s %s" % (self.cmd_common, rootfs) + with runqemu(self.recipe, ssh=False, launch_cmd=cmd) as qemu: + self.assertTrue(qemu.runner.logged, "Failed: %s" % cmd) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/runtime-test.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/runtime-test.py index c2d5b45a4..e498d046c 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/runtime-test.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/runtime-test.py @@ -1,10 +1,15 @@ from oeqa.selftest.base import oeSelfTest -from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu +from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu from oeqa.utils.decorators import testcase import os +import re class TestExport(oeSelfTest): + @classmethod + def tearDownClass(cls): + runCmd("rm -rf /tmp/sdk") + def test_testexport_basic(self): """ Summary: Check basic testexport functionality with only ping test enabled. @@ -26,22 +31,23 @@ class TestExport(oeSelfTest): bitbake('core-image-minimal') bitbake('-c testexport core-image-minimal') - # Verify if TEST_EXPORT_DIR was created testexport_dir = get_bb_var('TEST_EXPORT_DIR', 'core-image-minimal') + + # Verify if TEST_EXPORT_DIR was created isdir = os.path.isdir(testexport_dir) self.assertEqual(True, isdir, 'Failed to create testexport dir: %s' % testexport_dir) with runqemu('core-image-minimal') as qemu: # Attempt to run runexported.py to perform ping test - runexported_path = os.path.join(testexport_dir, "runexported.py") - testdata_path = os.path.join(testexport_dir, "testdata.json") - cmd = "%s -t %s -s %s %s" % (runexported_path, qemu.ip, qemu.server_ip, testdata_path) + test_path = os.path.join(testexport_dir, "oe-test") + data_file = os.path.join(testexport_dir, 'data', 'testdata.json') + manifest = os.path.join(testexport_dir, 'data', 'manifest') + cmd = ("%s runtime --test-data-file %s --packages-manifest %s " + "--target-ip %s --server-ip %s --quiet" + % (test_path, data_file, manifest, qemu.ip, qemu.server_ip)) result = runCmd(cmd) - self.assertEqual(0, result.status, 'runexported.py returned a non 0 status') - # Verify ping test was succesful - failure = True if 'FAIL' in result.output else False - self.assertNotEqual(True, failure, 'ping test failed') + self.assertEqual(0, result.status, 'oe-test runtime returned a non 0 status') def test_testexport_sdk(self): """ @@ -60,7 +66,6 @@ class TestExport(oeSelfTest): features += 'TEST_SERVER_IP = "192.168.7.1"\n' features += 'TEST_TARGET_IP = "192.168.7.1"\n' features += 'TEST_SUITES = "ping"\n' - features += 'TEST_SUITES_TAGS = "selftest_sdk"\n' features += 'TEST_EXPORT_SDK_ENABLED = "1"\n' features += 'TEST_EXPORT_SDK_PACKAGES = "nativesdk-tar"\n' self.write_config(features) @@ -69,19 +74,31 @@ class TestExport(oeSelfTest): bitbake('core-image-minimal') bitbake('-c testexport core-image-minimal') + needed_vars = ['TEST_EXPORT_DIR', 'TEST_EXPORT_SDK_DIR', 'TEST_EXPORT_SDK_NAME'] + bb_vars = get_bb_vars(needed_vars, 'core-image-minimal') + testexport_dir = bb_vars['TEST_EXPORT_DIR'] + sdk_dir = bb_vars['TEST_EXPORT_SDK_DIR'] + sdk_name = bb_vars['TEST_EXPORT_SDK_NAME'] + # Check for SDK - testexport_dir = get_bb_var('TEST_EXPORT_DIR', 'core-image-minimal') - sdk_dir = get_bb_var('TEST_EXPORT_SDK_DIR', 'core-image-minimal') - tarball_name = "%s.sh" % get_bb_var('TEST_EXPORT_SDK_NAME', 'core-image-minimal') + tarball_name = "%s.sh" % sdk_name tarball_path = os.path.join(testexport_dir, sdk_dir, tarball_name) - self.assertEqual(os.path.isfile(tarball_path), True, "Couldn't find SDK tarball: %s" % tarball_path) + msg = "Couldn't find SDK tarball: %s" % tarball_path + self.assertEqual(os.path.isfile(tarball_path), True, msg) + + # Extract SDK and run tar from SDK + result = runCmd("%s -y -d /tmp/sdk" % tarball_path) + self.assertEqual(0, result.status, "Couldn't extract SDK") - # Run runexported.py - runexported_path = os.path.join(testexport_dir, "runexported.py") - testdata_path = os.path.join(testexport_dir, "testdata.json") - cmd = "%s %s" % (runexported_path, testdata_path) - result = runCmd(cmd) - self.assertEqual(0, result.status, 'runexported.py returned a non 0 status') + env_script = result.output.split()[-1] + result = runCmd(". %s; which tar" % env_script, shell=True) + self.assertEqual(0, result.status, "Couldn't setup SDK environment") + is_sdk_tar = True if "/tmp/sdk" in result.output else False + self.assertTrue(is_sdk_tar, "Couldn't setup SDK environment") + + tar_sdk = result.output + result = runCmd("%s --version" % tar_sdk) + self.assertEqual(0, result.status, "Couldn't run tar from SDK") class TestImage(oeSelfTest): @@ -90,16 +107,131 @@ class TestImage(oeSelfTest): """ Summary: Check install packages functionality for testimage/testexport. Expected: 1. Import tests from a directory other than meta. - 2. Check install/unistall of socat. + 2. Check install/uninstall of socat. + 3. Check that remote package feeds can be accessed Product: oe-core Author: Mariano Lopez + Author: Alexander Kanavin """ + if get_bb_var('DISTRO') == 'poky-tiny': + self.skipTest('core-image-full-cmdline not buildable for poky-tiny') features = 'INHERIT += "testimage"\n' features += 'TEST_SUITES = "ping ssh selftest"\n' - features += 'TEST_SUITES_TAGS = "selftest_package_install"\n' + # We don't yet know what the server ip and port will be - they will be patched + # in at the start of the on-image test + features += 'PACKAGE_FEED_URIS = "http://bogus_ip:bogus_port"\n' + features += 'EXTRA_IMAGE_FEATURES += "package-management"\n' + features += 'PACKAGE_CLASSES = "package_rpm"' self.write_config(features) # Build core-image-sato and testimage bitbake('core-image-full-cmdline socat') bitbake('-c testimage core-image-full-cmdline') + +class Postinst(oeSelfTest): + @testcase(1540) + def test_verify_postinst(self): + """ + Summary: The purpose of this test is to verify the execution order of postinst Bugzilla ID: [5319] + Expected : + 1. Compile a minimal image. + 2. The compiled image will add the created layer with the recipes postinst[ abdpt] + 3. Run qemux86 + 4. Validate the task execution order + Author: Francisco Pedraza + """ + features = 'INHERIT += "testimage"\n' + features += 'CORE_IMAGE_EXTRA_INSTALL += "postinst-at-rootfs \ +postinst-delayed-a \ +postinst-delayed-b \ +postinst-delayed-d \ +postinst-delayed-p \ +postinst-delayed-t \ +"\n' + self.write_config(features) + + bitbake('core-image-minimal -f ') + + postinst_list = ['100-postinst-at-rootfs', + '101-postinst-delayed-a', + '102-postinst-delayed-b', + '103-postinst-delayed-d', + '104-postinst-delayed-p', + '105-postinst-delayed-t'] + path_workdir = get_bb_var('WORKDIR','core-image-minimal') + workspacedir = 'testimage/qemu_boot_log' + workspacedir = os.path.join(path_workdir, workspacedir) + rexp = re.compile("^Running postinst .*/(?P.*)\.\.\.$") + with runqemu('core-image-minimal') as qemu: + with open(workspacedir) as f: + found = False + idx = 0 + for line in f.readlines(): + line = line.strip().replace("^M","") + if not line: # To avoid empty lines + continue + m = rexp.search(line) + if m: + self.assertEqual(postinst_list[idx], m.group('postinst'), "Fail") + idx = idx+1 + found = True + elif found: + self.assertEqual(idx, len(postinst_list), "Not found all postinsts") + break + + @testcase(1545) + def test_postinst_rootfs_and_boot(self): + """ + Summary: The purpose of this test case is to verify Post-installation + scripts are called when rootfs is created and also test + that script can be delayed to run at first boot. + Dependencies: NA + Steps: 1. Add proper configuration to local.conf file + 2. Build a "core-image-minimal" image + 3. Verify that file created by postinst_rootfs recipe is + present on rootfs dir. + 4. Boot the image created on qemu and verify that the file + created by postinst_boot recipe is present on image. + Expected: The files are successfully created during rootfs and boot + time for 3 different package managers: rpm,ipk,deb and + for initialization managers: sysvinit and systemd. + + """ + file_rootfs_name = "this-was-created-at-rootfstime" + fileboot_name = "this-was-created-at-first-boot" + rootfs_pkg = 'postinst-at-rootfs' + boot_pkg = 'postinst-delayed-a' + #Step 1 + features = 'MACHINE = "qemux86"\n' + features += 'CORE_IMAGE_EXTRA_INSTALL += "%s %s "\n'% (rootfs_pkg, boot_pkg) + features += 'IMAGE_FEATURES += "ssh-server-openssh"\n' + for init_manager in ("sysvinit", "systemd"): + #for sysvinit no extra configuration is needed, + if (init_manager is "systemd"): + features += 'DISTRO_FEATURES_append = " systemd"\n' + features += 'VIRTUAL-RUNTIME_init_manager = "systemd"\n' + features += 'DISTRO_FEATURES_BACKFILL_CONSIDERED = "sysvinit"\n' + features += 'VIRTUAL-RUNTIME_initscripts = ""\n' + for classes in ("package_rpm package_deb package_ipk", + "package_deb package_rpm package_ipk", + "package_ipk package_deb package_rpm"): + features += 'PACKAGE_CLASSES = "%s"\n' % classes + self.write_config(features) + + #Step 2 + bitbake('core-image-minimal') + + #Step 3 + file_rootfs_created = os.path.join(get_bb_var('IMAGE_ROOTFS',"core-image-minimal"), + file_rootfs_name) + found = os.path.isfile(file_rootfs_created) + self.assertTrue(found, "File %s was not created at rootfs time by %s" % \ + (file_rootfs_name, rootfs_pkg)) + + #Step 4 + testcommand = 'ls /etc/'+fileboot_name + with runqemu('core-image-minimal') as qemu: + sshargs = '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' + result = runCmd('ssh %s root@%s %s' % (sshargs, qemu.ip, testcommand)) + self.assertEqual(result.status, 0, 'File %s was not created at firts boot'% fileboot_name) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/signing.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/signing.py index 606bfd3e9..0ac3d1fac 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/signing.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/signing.py @@ -1,5 +1,5 @@ from oeqa.selftest.base import oeSelfTest -from oeqa.utils.commands import runCmd, bitbake, get_bb_var +from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars import os import glob import re @@ -27,15 +27,17 @@ class Signing(oeSelfTest): cls.pub_key_path = os.path.join(cls.testlayer_path, 'files', 'signing', "key.pub") cls.secret_key_path = os.path.join(cls.testlayer_path, 'files', 'signing', "key.secret") - runCmd('gpg --homedir %s --import %s %s' % (cls.gpg_dir, cls.pub_key_path, cls.secret_key_path)) + runCmd('gpg --batch --homedir %s --import %s %s' % (cls.gpg_dir, cls.pub_key_path, cls.secret_key_path)) @testcase(1362) def test_signing_packages(self): """ Summary: Test that packages can be signed in the package feed Expected: Package should be signed with the correct key + Expected: Images can be created from signed packages Product: oe-core Author: Daniel Istrate + Author: Alexander Kanavin AutomatedBy: Daniel Istrate """ import oe.packagedata @@ -49,7 +51,6 @@ class Signing(oeSelfTest): feature = 'INHERIT += "sign_rpm"\n' feature += 'RPM_GPG_PASSPHRASE = "test123"\n' feature += 'RPM_GPG_NAME = "testuser"\n' - feature += 'RPM_GPG_PUBKEY = "%s"\n' % self.pub_key_path feature += 'GPG_PATH = "%s"\n' % self.gpg_dir self.write_config(feature) @@ -59,30 +60,38 @@ class Signing(oeSelfTest): self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe) - pkgdatadir = get_bb_var('PKGDATA_DIR', test_recipe) + needed_vars = ['PKGDATA_DIR', 'DEPLOY_DIR_RPM', 'PACKAGE_ARCH', 'STAGING_BINDIR_NATIVE'] + bb_vars = get_bb_vars(needed_vars, test_recipe) + pkgdatadir = bb_vars['PKGDATA_DIR'] pkgdata = oe.packagedata.read_pkgdatafile(pkgdatadir + "/runtime/ed") if 'PKGE' in pkgdata: pf = pkgdata['PN'] + "-" + pkgdata['PKGE'] + pkgdata['PKGV'] + '-' + pkgdata['PKGR'] else: pf = pkgdata['PN'] + "-" + pkgdata['PKGV'] + '-' + pkgdata['PKGR'] - deploy_dir_rpm = get_bb_var('DEPLOY_DIR_RPM', test_recipe) - package_arch = get_bb_var('PACKAGE_ARCH', test_recipe).replace('-', '_') - staging_bindir_native = get_bb_var('STAGING_BINDIR_NATIVE') + deploy_dir_rpm = bb_vars['DEPLOY_DIR_RPM'] + package_arch = bb_vars['PACKAGE_ARCH'].replace('-', '_') + staging_bindir_native = bb_vars['STAGING_BINDIR_NATIVE'] pkg_deploy = os.path.join(deploy_dir_rpm, package_arch, '.'.join((pf, package_arch, 'rpm'))) # Use a temporary rpmdb rpmdb = tempfile.mkdtemp(prefix='oeqa-rpmdb') - runCmd('%s/rpm --define "_dbpath %s" --import %s' % + runCmd('%s/rpmkeys --define "_dbpath %s" --import %s' % (staging_bindir_native, rpmdb, self.pub_key_path)) - ret = runCmd('%s/rpm --define "_dbpath %s" --checksig %s' % + ret = runCmd('%s/rpmkeys --define "_dbpath %s" --checksig %s' % (staging_bindir_native, rpmdb, pkg_deploy)) # tmp/deploy/rpm/i586/ed-1.9-r0.i586.rpm: rsa sha1 md5 OK - self.assertIn('rsa sha1 md5 OK', ret.output, 'Package signed incorrectly.') + self.assertIn('rsa sha1 (md5) pgp md5 OK', ret.output, 'Package signed incorrectly.') shutil.rmtree(rpmdb) + #Check that an image can be built from signed packages + self.add_command_to_tearDown('bitbake -c clean core-image-minimal') + bitbake('-c clean core-image-minimal') + bitbake('core-image-minimal') + + @testcase(1382) def test_signing_sstate_archive(self): """ @@ -101,13 +110,7 @@ class Signing(oeSelfTest): self.add_command_to_tearDown('bitbake -c clean %s' % test_recipe) self.add_command_to_tearDown('rm -rf %s' % sstatedir) - # Determine the pub key signature - ret = runCmd('gpg --homedir %s --list-keys' % self.gpg_dir) - pub_key = re.search(r'^pub\s+\S+/(\S+)\s+', ret.output, re.M) - self.assertIsNotNone(pub_key, 'Failed to determine the public key signature.') - pub_key = pub_key.group(1) - - feature = 'SSTATE_SIG_KEY ?= "%s"\n' % pub_key + feature = 'SSTATE_SIG_KEY ?= "testuser"\n' feature += 'SSTATE_SIG_PASSPHRASE ?= "test123"\n' feature += 'SSTATE_VERIFY_SIG ?= "1"\n' feature += 'GPG_PATH = "%s"\n' % self.gpg_dir diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/sstate.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/sstate.py index 598972443..f54bc4146 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/sstate.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/sstate.py @@ -6,16 +6,24 @@ import shutil import oeqa.utils.ftools as ftools from oeqa.selftest.base import oeSelfTest -from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_test_layer +from oeqa.utils.commands import runCmd, bitbake, get_bb_vars, get_test_layer class SStateBase(oeSelfTest): def setUpLocal(self): self.temp_sstate_location = None - self.sstate_path = get_bb_var('SSTATE_DIR') - self.distro = get_bb_var('NATIVELSBSTRING') - self.distro_specific_sstate = os.path.join(self.sstate_path, self.distro) + needed_vars = ['SSTATE_DIR', 'NATIVELSBSTRING', 'TCLIBC', 'TUNE_ARCH', + 'TOPDIR', 'TARGET_VENDOR', 'TARGET_OS'] + bb_vars = get_bb_vars(needed_vars) + self.sstate_path = bb_vars['SSTATE_DIR'] + self.hostdistro = bb_vars['NATIVELSBSTRING'] + self.tclibc = bb_vars['TCLIBC'] + self.tune_arch = bb_vars['TUNE_ARCH'] + self.topdir = bb_vars['TOPDIR'] + self.target_vendor = bb_vars['TARGET_VENDOR'] + self.target_os = bb_vars['TARGET_OS'] + self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro) # Creates a special sstate configuration with the option to add sstate mirrors def config_sstate(self, temp_sstate_location=False, add_local_mirrors=[]): @@ -26,9 +34,10 @@ class SStateBase(oeSelfTest): config_temp_sstate = "SSTATE_DIR = \"%s\"" % temp_sstate_path self.append_config(config_temp_sstate) self.track_for_cleanup(temp_sstate_path) - self.sstate_path = get_bb_var('SSTATE_DIR') - self.distro = get_bb_var('NATIVELSBSTRING') - self.distro_specific_sstate = os.path.join(self.sstate_path, self.distro) + bb_vars = get_bb_vars(['SSTATE_DIR', 'NATIVELSBSTRING']) + self.sstate_path = bb_vars['SSTATE_DIR'] + self.hostdistro = bb_vars['NATIVELSBSTRING'] + self.distro_specific_sstate = os.path.join(self.sstate_path, self.hostdistro) if add_local_mirrors: config_set_sstate_if_not_set = 'SSTATE_MIRRORS ?= ""' @@ -42,7 +51,7 @@ class SStateBase(oeSelfTest): def search_sstate(self, filename_regex, distro_specific=True, distro_nonspecific=True): result = [] for root, dirs, files in os.walk(self.sstate_path): - if distro_specific and re.search("%s/[a-z0-9]{2}$" % self.distro, root): + if distro_specific and re.search("%s/[a-z0-9]{2}$" % self.hostdistro, root): for f in files: if re.search(filename_regex, f): result.append(f) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/sstatetests.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/sstatetests.py index f99d74684..e35ddfff5 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/sstatetests.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/sstatetests.py @@ -41,22 +41,19 @@ class SStateTests(SStateBase): @testcase(975) def test_sstate_creation_distro_specific_pass(self): - targetarch = get_bb_var('TUNE_ARCH') - self.run_test_sstate_creation(['binutils-cross-'+ targetarch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True) + self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True) @testcase(1374) def test_sstate_creation_distro_specific_fail(self): - targetarch = get_bb_var('TUNE_ARCH') - self.run_test_sstate_creation(['binutils-cross-'+ targetarch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False) + self.run_test_sstate_creation(['binutils-cross-'+ self.tune_arch, 'binutils-native'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True, should_pass=False) @testcase(976) def test_sstate_creation_distro_nonspecific_pass(self): - self.run_test_sstate_creation(['glibc-initial'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True) + self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True) @testcase(1375) def test_sstate_creation_distro_nonspecific_fail(self): - self.run_test_sstate_creation(['glibc-initial'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False) - + self.run_test_sstate_creation(['linux-libc-headers'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True, should_pass=False) # Test the sstate files deletion part of the do_cleansstate task def run_test_cleansstate_task(self, targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True): @@ -77,17 +74,19 @@ class SStateTests(SStateBase): @testcase(977) def test_cleansstate_task_distro_specific_nonspecific(self): - targetarch = get_bb_var('TUNE_ARCH') - self.run_test_cleansstate_task(['binutils-cross-' + targetarch, 'binutils-native', 'glibc-initial'], distro_specific=True, distro_nonspecific=True, temp_sstate_location=True) + targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native'] + targets.append('linux-libc-headers') + self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=True, temp_sstate_location=True) @testcase(1376) def test_cleansstate_task_distro_nonspecific(self): - self.run_test_cleansstate_task(['glibc-initial'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True) + self.run_test_cleansstate_task(['linux-libc-headers'], distro_specific=False, distro_nonspecific=True, temp_sstate_location=True) @testcase(1377) def test_cleansstate_task_distro_specific(self): - targetarch = get_bb_var('TUNE_ARCH') - self.run_test_cleansstate_task(['binutils-cross-'+ targetarch, 'binutils-native', 'glibc-initial'], distro_specific=True, distro_nonspecific=False, temp_sstate_location=True) + targets = ['binutils-cross-'+ self.tune_arch, 'binutils-native'] + targets.append('linux-libc-headers') + self.run_test_cleansstate_task(targets, distro_specific=True, distro_nonspecific=False, temp_sstate_location=True) # Test rebuilding of distro-specific sstate files @@ -124,13 +123,11 @@ class SStateTests(SStateBase): @testcase(175) def test_rebuild_distro_specific_sstate_cross_native_targets(self): - targetarch = get_bb_var('TUNE_ARCH') - self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + targetarch, 'binutils-native'], temp_sstate_location=True) + self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch, 'binutils-native'], temp_sstate_location=True) @testcase(1372) def test_rebuild_distro_specific_sstate_cross_target(self): - targetarch = get_bb_var('TUNE_ARCH') - self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + targetarch], temp_sstate_location=True) + self.run_test_rebuild_distro_specific_sstate(['binutils-cross-' + self.tune_arch], temp_sstate_location=True) @testcase(1373) def test_rebuild_distro_specific_sstate_native_target(self): @@ -145,10 +142,9 @@ class SStateTests(SStateBase): self.assertTrue(len(global_config) == len(target_config), msg='Lists global_config and target_config should have the same number of elements') self.config_sstate(temp_sstate_location=True, add_local_mirrors=[self.sstate_path]) - # If buildhistory is enabled, we need to disable version-going-backwards QA checks for this test. It may report errors otherwise. - if ('buildhistory' in get_bb_var('USER_CLASSES')) or ('buildhistory' in get_bb_var('INHERIT')): - remove_errors_config = 'ERROR_QA_remove = "version-going-backwards"' - self.append_config(remove_errors_config) + # If buildhistory is enabled, we need to disable version-going-backwards + # QA checks for this test. It may report errors otherwise. + self.append_config('ERROR_QA_remove = "version-going-backwards"') # For not this only checks if random sstate tasks are handled correctly as a group. # In the future we should add control over what tasks we check for. @@ -229,8 +225,6 @@ class SStateTests(SStateBase): manually and check using bitbake -S. """ - topdir = get_bb_var('TOPDIR') - targetvendor = get_bb_var('TARGET_VENDOR') self.write_config(""" MACHINE = "qemux86" TMPDIR = "${TOPDIR}/tmp-sstatesamehash" @@ -239,7 +233,7 @@ BUILD_OS = "linux" SDKMACHINE = "x86_64" PACKAGE_CLASSES = "package_rpm package_ipk package_deb" """) - self.track_for_cleanup(topdir + "/tmp-sstatesamehash") + self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") bitbake("core-image-sato -S none") self.write_config(""" MACHINE = "qemux86" @@ -249,7 +243,7 @@ BUILD_OS = "linux" SDKMACHINE = "i686" PACKAGE_CLASSES = "package_rpm package_ipk package_deb" """) - self.track_for_cleanup(topdir + "/tmp-sstatesamehash2") + self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") bitbake("core-image-sato -S none") def get_files(d): @@ -262,9 +256,9 @@ PACKAGE_CLASSES = "package_rpm package_ipk package_deb" continue f.extend(os.path.join(root, name) for name in files) return f - files1 = get_files(topdir + "/tmp-sstatesamehash/stamps/") - files2 = get_files(topdir + "/tmp-sstatesamehash2/stamps/") - files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash").replace("i686-linux", "x86_64-linux").replace("i686" + targetvendor + "-linux", "x86_64" + targetvendor + "-linux", ) for x in files2] + files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/") + files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/") + files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash").replace("i686-linux", "x86_64-linux").replace("i686" + self.target_vendor + "-linux", "x86_64" + self.target_vendor + "-linux", ) for x in files2] self.maxDiff = None self.assertCountEqual(files1, files2) @@ -277,18 +271,17 @@ PACKAGE_CLASSES = "package_rpm package_ipk package_deb" builds, override the variables manually and check using bitbake -S. """ - topdir = get_bb_var('TOPDIR') self.write_config(""" TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" NATIVELSBSTRING = \"DistroA\" """) - self.track_for_cleanup(topdir + "/tmp-sstatesamehash") + self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") bitbake("core-image-sato -S none") self.write_config(""" TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" NATIVELSBSTRING = \"DistroB\" """) - self.track_for_cleanup(topdir + "/tmp-sstatesamehash2") + self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") bitbake("core-image-sato -S none") def get_files(d): @@ -296,8 +289,8 @@ NATIVELSBSTRING = \"DistroB\" for root, dirs, files in os.walk(d): f.extend(os.path.join(root, name) for name in files) return f - files1 = get_files(topdir + "/tmp-sstatesamehash/stamps/") - files2 = get_files(topdir + "/tmp-sstatesamehash2/stamps/") + files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/") + files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/") files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2] self.maxDiff = None self.assertCountEqual(files1, files2) @@ -346,14 +339,11 @@ MULTILIBS = \"\" def sstate_allarch_samesigs(self, configA, configB): - topdir = get_bb_var('TOPDIR') - targetos = get_bb_var('TARGET_OS') - targetvendor = get_bb_var('TARGET_VENDOR') self.write_config(configA) - self.track_for_cleanup(topdir + "/tmp-sstatesamehash") + self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") bitbake("world meta-toolchain -S none") self.write_config(configB) - self.track_for_cleanup(topdir + "/tmp-sstatesamehash2") + self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") bitbake("world meta-toolchain -S none") def get_files(d): @@ -367,15 +357,15 @@ MULTILIBS = \"\" (_, task, _, shash) = name.rsplit(".", 3) f[os.path.join(os.path.basename(root), task)] = shash return f - files1 = get_files(topdir + "/tmp-sstatesamehash/stamps/all" + targetvendor + "-" + targetos) - files2 = get_files(topdir + "/tmp-sstatesamehash2/stamps/all" + targetvendor + "-" + targetos) + files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/all" + self.target_vendor + "-" + self.target_os) + files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/all" + self.target_vendor + "-" + self.target_os) self.maxDiff = None self.assertEqual(files1, files2) - nativesdkdir = os.path.basename(glob.glob(topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux")[0]) + nativesdkdir = os.path.basename(glob.glob(self.topdir + "/tmp-sstatesamehash/stamps/*-nativesdk*-linux")[0]) - files1 = get_files(topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir) - files2 = get_files(topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir) + files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/" + nativesdkdir) + files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/" + nativesdkdir) self.maxDiff = None self.assertEqual(files1, files2) @@ -387,9 +377,6 @@ MULTILIBS = \"\" qemux86copy machine to test this. Also include multilibs in the test. """ - topdir = get_bb_var('TOPDIR') - targetos = get_bb_var('TARGET_OS') - targetvendor = get_bb_var('TARGET_VENDOR') self.write_config(""" TMPDIR = \"${TOPDIR}/tmp-sstatesamehash\" MACHINE = \"qemux86\" @@ -397,7 +384,7 @@ require conf/multilib.conf MULTILIBS = "multilib:lib32" DEFAULTTUNE_virtclass-multilib-lib32 = "x86" """) - self.track_for_cleanup(topdir + "/tmp-sstatesamehash") + self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") bitbake("world meta-toolchain -S none") self.write_config(""" TMPDIR = \"${TOPDIR}/tmp-sstatesamehash2\" @@ -406,7 +393,7 @@ require conf/multilib.conf MULTILIBS = "multilib:lib32" DEFAULTTUNE_virtclass-multilib-lib32 = "x86" """) - self.track_for_cleanup(topdir + "/tmp-sstatesamehash2") + self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") bitbake("world meta-toolchain -S none") def get_files(d): @@ -420,8 +407,8 @@ DEFAULTTUNE_virtclass-multilib-lib32 = "x86" if "do_build" not in name and "do_populate_sdk" not in name: f.append(os.path.join(root, name)) return f - files1 = get_files(topdir + "/tmp-sstatesamehash/stamps") - files2 = get_files(topdir + "/tmp-sstatesamehash2/stamps") + files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps") + files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps") files2 = [x.replace("tmp-sstatesamehash2", "tmp-sstatesamehash") for x in files2] self.maxDiff = None self.assertCountEqual(files1, files2) @@ -433,8 +420,6 @@ DEFAULTTUNE_virtclass-multilib-lib32 = "x86" classes inherits should be the same. """ - topdir = get_bb_var('TOPDIR') - targetvendor = get_bb_var('TARGET_VENDOR') self.write_config(""" TMPDIR = "${TOPDIR}/tmp-sstatesamehash" BB_NUMBER_THREADS = "1" @@ -445,8 +430,8 @@ DATE = "20161111" INHERIT_remove = "buildstats-summary buildhistory uninative" http_proxy = "" """) - self.track_for_cleanup(topdir + "/tmp-sstatesamehash") - self.track_for_cleanup(topdir + "/download1") + self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash") + self.track_for_cleanup(self.topdir + "/download1") bitbake("world meta-toolchain -S none") self.write_config(""" TMPDIR = "${TOPDIR}/tmp-sstatesamehash2" @@ -460,8 +445,8 @@ INHERIT_remove = "uninative" INHERIT += "buildstats-summary buildhistory" http_proxy = "http://example.com/" """) - self.track_for_cleanup(topdir + "/tmp-sstatesamehash2") - self.track_for_cleanup(topdir + "/download2") + self.track_for_cleanup(self.topdir + "/tmp-sstatesamehash2") + self.track_for_cleanup(self.topdir + "/download2") bitbake("world meta-toolchain -S none") def get_files(d): @@ -473,8 +458,8 @@ http_proxy = "http://example.com/" base = os.sep.join(root.rsplit(os.sep, 2)[-2:] + [name]) f[base] = shash return f - files1 = get_files(topdir + "/tmp-sstatesamehash/stamps/") - files2 = get_files(topdir + "/tmp-sstatesamehash2/stamps/") + files1 = get_files(self.topdir + "/tmp-sstatesamehash/stamps/") + files2 = get_files(self.topdir + "/tmp-sstatesamehash2/stamps/") # Remove items that are identical in both sets for k,v in files1.items() & files2.items(): del files1[k] @@ -487,8 +472,8 @@ http_proxy = "http://example.com/" if k in files1 and k in files2: print("%s differs:" % k) print(subprocess.check_output(("bitbake-diffsigs", - topdir + "/tmp-sstatesamehash/stamps/" + k + "." + files1[k], - topdir + "/tmp-sstatesamehash2/stamps/" + k + "." + files2[k]))) + self.topdir + "/tmp-sstatesamehash/stamps/" + k + "." + files1[k], + self.topdir + "/tmp-sstatesamehash2/stamps/" + k + "." + files2[k]))) elif k in files1 and k not in files2: print("%s in files1" % k) elif k not in files1 and k in files2: diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/tinfoil.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/tinfoil.py new file mode 100644 index 000000000..73a0c3bac --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/tinfoil.py @@ -0,0 +1,190 @@ +import unittest +import os +import re +import bb.tinfoil + +from oeqa.selftest.base import oeSelfTest +from oeqa.utils.commands import runCmd +from oeqa.utils.decorators import testcase + +class TinfoilTests(oeSelfTest): + """ Basic tests for the tinfoil API """ + + @testcase(1568) + def test_getvar(self): + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.prepare(True) + machine = tinfoil.config_data.getVar('MACHINE') + if not machine: + self.fail('Unable to get MACHINE value - returned %s' % machine) + + @testcase(1569) + def test_expand(self): + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.prepare(True) + expr = '${@os.getpid()}' + pid = tinfoil.config_data.expand(expr) + if not pid: + self.fail('Unable to expand "%s" - returned %s' % (expr, pid)) + + @testcase(1570) + def test_getvar_bb_origenv(self): + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.prepare(True) + origenv = tinfoil.config_data.getVar('BB_ORIGENV', False) + if not origenv: + self.fail('Unable to get BB_ORIGENV value - returned %s' % origenv) + self.assertEqual(origenv.getVar('HOME', False), os.environ['HOME']) + + @testcase(1571) + def test_parse_recipe(self): + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.prepare(config_only=False, quiet=2) + testrecipe = 'mdadm' + best = tinfoil.find_best_provider(testrecipe) + if not best: + self.fail('Unable to find recipe providing %s' % testrecipe) + rd = tinfoil.parse_recipe_file(best[3]) + self.assertEqual(testrecipe, rd.getVar('PN')) + + @testcase(1572) + def test_parse_recipe_copy_expand(self): + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.prepare(config_only=False, quiet=2) + testrecipe = 'mdadm' + best = tinfoil.find_best_provider(testrecipe) + if not best: + self.fail('Unable to find recipe providing %s' % testrecipe) + rd = tinfoil.parse_recipe_file(best[3]) + # Check we can get variable values + self.assertEqual(testrecipe, rd.getVar('PN')) + # Check that expanding a value that includes a variable reference works + self.assertEqual(testrecipe, rd.getVar('BPN')) + # Now check that changing the referenced variable's value in a copy gives that + # value when expanding + localdata = bb.data.createCopy(rd) + localdata.setVar('PN', 'hello') + self.assertEqual('hello', localdata.getVar('BPN')) + + @testcase(1573) + def test_parse_recipe_initial_datastore(self): + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.prepare(config_only=False, quiet=2) + testrecipe = 'mdadm' + best = tinfoil.find_best_provider(testrecipe) + if not best: + self.fail('Unable to find recipe providing %s' % testrecipe) + dcopy = bb.data.createCopy(tinfoil.config_data) + dcopy.setVar('MYVARIABLE', 'somevalue') + rd = tinfoil.parse_recipe_file(best[3], config_data=dcopy) + # Check we can get variable values + self.assertEqual('somevalue', rd.getVar('MYVARIABLE')) + + @testcase(1574) + def test_list_recipes(self): + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.prepare(config_only=False, quiet=2) + # Check pkg_pn + checkpns = ['tar', 'automake', 'coreutils', 'm4-native', 'nativesdk-gcc'] + pkg_pn = tinfoil.cooker.recipecaches[''].pkg_pn + for pn in checkpns: + self.assertIn(pn, pkg_pn) + # Check pkg_fn + checkfns = {'nativesdk-gcc': '^virtual:nativesdk:.*', 'coreutils': '.*/coreutils_.*.bb'} + for fn, pn in tinfoil.cooker.recipecaches[''].pkg_fn.items(): + if pn in checkpns: + if pn in checkfns: + self.assertTrue(re.match(checkfns[pn], fn), 'Entry for %s: %s did not match %s' % (pn, fn, checkfns[pn])) + checkpns.remove(pn) + if checkpns: + self.fail('Unable to find pkg_fn entries for: %s' % ', '.join(checkpns)) + + @testcase(1575) + def test_wait_event(self): + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.prepare(config_only=True) + # Need to drain events otherwise events that will be masked will still be in the queue + while tinfoil.wait_event(0.25): + pass + tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted']) + pattern = 'conf' + res = tinfoil.run_command('findFilesMatchingInDir', pattern, 'conf/machine') + self.assertTrue(res) + + eventreceived = False + waitcount = 5 + while waitcount > 0: + event = tinfoil.wait_event(1) + if event: + if isinstance(event, bb.command.CommandCompleted): + break + elif isinstance(event, bb.event.FilesMatchingFound): + self.assertEqual(pattern, event._pattern) + self.assertIn('qemuarm.conf', event._matches) + eventreceived = True + else: + self.fail('Unexpected event: %s' % event) + + waitcount = waitcount - 1 + + self.assertNotEqual(waitcount, 0, 'Timed out waiting for CommandCompleted event from bitbake server') + self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server') + + @testcase(1576) + def test_setvariable_clean(self): + # First check that setVariable affects the datastore + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.prepare(config_only=True) + tinfoil.run_command('setVariable', 'TESTVAR', 'specialvalue') + self.assertEqual(tinfoil.config_data.getVar('TESTVAR'), 'specialvalue', 'Value set using setVariable is not reflected in client-side getVar()') + + # Now check that the setVariable's effects are no longer present + # (this may legitimately break in future if we stop reinitialising + # the datastore, in which case we'll have to reconsider use of + # setVariable entirely) + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.prepare(config_only=True) + self.assertNotEqual(tinfoil.config_data.getVar('TESTVAR'), 'specialvalue', 'Value set using setVariable is still present!') + + # Now check that setVar on the main datastore works (uses setVariable internally) + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.prepare(config_only=True) + tinfoil.config_data.setVar('TESTVAR', 'specialvalue') + value = tinfoil.run_command('getVariable', 'TESTVAR') + self.assertEqual(value, 'specialvalue', 'Value set using config_data.setVar() is not reflected in config_data.getVar()') + + def test_datastore_operations(self): + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.prepare(config_only=True) + # Test setVarFlag() / getVarFlag() + tinfoil.config_data.setVarFlag('TESTVAR', 'flagname', 'flagval') + value = tinfoil.config_data.getVarFlag('TESTVAR', 'flagname') + self.assertEqual(value, 'flagval', 'Value set using config_data.setVarFlag() is not reflected in config_data.getVarFlag()') + # Test delVarFlag() + tinfoil.config_data.setVarFlag('TESTVAR', 'otherflag', 'othervalue') + tinfoil.config_data.delVarFlag('TESTVAR', 'flagname') + value = tinfoil.config_data.getVarFlag('TESTVAR', 'flagname') + self.assertEqual(value, None, 'Varflag deleted using config_data.delVarFlag() is not reflected in config_data.getVarFlag()') + value = tinfoil.config_data.getVarFlag('TESTVAR', 'otherflag') + self.assertEqual(value, 'othervalue', 'Varflag deleted using config_data.delVarFlag() caused unrelated flag to be removed') + # Test delVar() + tinfoil.config_data.setVar('TESTVAR', 'varvalue') + value = tinfoil.config_data.getVar('TESTVAR') + self.assertEqual(value, 'varvalue', 'Value set using config_data.setVar() is not reflected in config_data.getVar()') + tinfoil.config_data.delVar('TESTVAR') + value = tinfoil.config_data.getVar('TESTVAR') + self.assertEqual(value, None, 'Variable deleted using config_data.delVar() appears to still have a value') + # Test renameVar() + tinfoil.config_data.setVar('TESTVAROLD', 'origvalue') + tinfoil.config_data.renameVar('TESTVAROLD', 'TESTVARNEW') + value = tinfoil.config_data.getVar('TESTVAROLD') + self.assertEqual(value, None, 'Variable renamed using config_data.renameVar() still seems to exist') + value = tinfoil.config_data.getVar('TESTVARNEW') + self.assertEqual(value, 'origvalue', 'Variable renamed using config_data.renameVar() does not appear with new name') + # Test overrides + tinfoil.config_data.setVar('TESTVAR', 'original') + tinfoil.config_data.setVar('TESTVAR_overrideone', 'one') + tinfoil.config_data.setVar('TESTVAR_overridetwo', 'two') + tinfoil.config_data.appendVar('OVERRIDES', ':overrideone') + value = tinfoil.config_data.getVar('TESTVAR') + self.assertEqual(value, 'one', 'Variable overrides not functioning correctly') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/selftest/wic.py b/import-layers/yocto-poky/meta/lib/oeqa/selftest/wic.py index e652fad24..726af19e9 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/selftest/wic.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/selftest/wic.py @@ -24,42 +24,84 @@ """Test cases for wic.""" import os +import sys +import unittest from glob import glob from shutil import rmtree +from functools import wraps, lru_cache +from tempfile import NamedTemporaryFile from oeqa.selftest.base import oeSelfTest -from oeqa.utils.commands import runCmd, bitbake, get_bb_var, runqemu +from oeqa.utils.commands import runCmd, bitbake, get_bb_var, get_bb_vars, runqemu from oeqa.utils.decorators import testcase +@lru_cache(maxsize=32) +def get_host_arch(recipe): + """A cached call to get_bb_var('HOST_ARCH', )""" + return get_bb_var('HOST_ARCH', recipe) + + +def only_for_arch(archs, image='core-image-minimal'): + """Decorator for wrapping test cases that can be run only for specific target + architectures. A list of compatible architectures is passed in `archs`. + Current architecture will be determined by parsing bitbake output for + `image` recipe. + """ + def wrapper(func): + @wraps(func) + def wrapped_f(*args, **kwargs): + arch = get_host_arch(image) + if archs and arch not in archs: + raise unittest.SkipTest("Testcase arch dependency not met: %s" % arch) + return func(*args, **kwargs) + wrapped_f.__name__ = func.__name__ + return wrapped_f + return wrapper + + class Wic(oeSelfTest): """Wic test class.""" - resultdir = "/var/tmp/wic/build/" + resultdir = "/var/tmp/wic.oe-selftest/" image_is_ready = False + native_sysroot = None + wicenv_cache = {} def setUpLocal(self): """This code is executed before each test method.""" - self.write_config('IMAGE_FSTYPES += " hddimg"\n' - 'MACHINE_FEATURES_append = " efi"\n' - 'WKS_FILE = "wic-image-minimal"\n') + if not self.native_sysroot: + Wic.native_sysroot = get_bb_var('STAGING_DIR_NATIVE', 'wic-tools') # Do this here instead of in setUpClass as the base setUp does some # clean up which can result in the native tools built earlier in # setUpClass being unavailable. if not Wic.image_is_ready: - bitbake('syslinux syslinux-native parted-native gptfdisk-native ' - 'dosfstools-native mtools-native bmap-tools-native') + if get_bb_var('USE_NLS') == 'yes': + bitbake('wic-tools') + else: + self.skipTest('wic-tools cannot be built due its (intltool|gettext)-native dependency and NLS disable') + bitbake('core-image-minimal') Wic.image_is_ready = True rmtree(self.resultdir, ignore_errors=True) + def tearDownLocal(self): + """Remove resultdir as it may contain images.""" + rmtree(self.resultdir, ignore_errors=True) + + @testcase(1552) + def test_version(self): + """Test wic --version""" + self.assertEqual(0, runCmd('wic --version').status) + @testcase(1208) def test_help(self): - """Test wic --help""" + """Test wic --help and wic -h""" self.assertEqual(0, runCmd('wic --help').status) + self.assertEqual(0, runCmd('wic -h').status) @testcase(1209) def test_createhelp(self): @@ -71,44 +113,15 @@ class Wic(oeSelfTest): """Test wic list --help""" self.assertEqual(0, runCmd('wic list --help').status) - @testcase(1211) - def test_build_image_name(self): - """Test wic create directdisk --image-name core-image-minimal""" - self.assertEqual(0, runCmd("wic create directdisk " - "--image-name core-image-minimal").status) - self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct"))) - - @testcase(1212) - def test_build_artifacts(self): - """Test wic create directdisk providing all artifacts.""" - bbvars = dict((var.lower(), get_bb_var(var, 'core-image-minimal')) \ - for var in ('STAGING_DATADIR', 'DEPLOY_DIR_IMAGE', - 'STAGING_DIR_NATIVE', 'IMAGE_ROOTFS')) - status = runCmd("wic create directdisk " - "-b %(staging_datadir)s " - "-k %(deploy_dir_image)s " - "-n %(staging_dir_native)s " - "-r %(image_rootfs)s" % bbvars).status - self.assertEqual(0, status) - self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct"))) - - @testcase(1157) - def test_gpt_image(self): - """Test creation of core-image-minimal with gpt table and UUID boot""" - self.assertEqual(0, runCmd("wic create directdisk-gpt " - "--image-name core-image-minimal").status) - self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct"))) - - @testcase(1213) - def test_unsupported_subcommand(self): - """Test unsupported subcommand""" - self.assertEqual(1, runCmd('wic unsupported', - ignore_status=True).status) + @testcase(1553) + def test_help_create(self): + """Test wic help create""" + self.assertEqual(0, runCmd('wic help create').status) - @testcase(1214) - def test_no_command(self): - """Test wic without command""" - self.assertEqual(1, runCmd('wic', ignore_status=True).status) + @testcase(1554) + def test_help_list(self): + """Test wic help list""" + self.assertEqual(0, runCmd('wic help list').status) @testcase(1215) def test_help_overview(self): @@ -125,94 +138,418 @@ class Wic(oeSelfTest): """Test wic help kickstart""" self.assertEqual(0, runCmd('wic help kickstart').status) + @testcase(1555) + def test_list_images(self): + """Test wic list images""" + self.assertEqual(0, runCmd('wic list images').status) + + @testcase(1556) + def test_list_source_plugins(self): + """Test wic list source-plugins""" + self.assertEqual(0, runCmd('wic list source-plugins').status) + + @testcase(1557) + def test_listed_images_help(self): + """Test wic listed images help""" + output = runCmd('wic list images').output + imagelist = [line.split()[0] for line in output.splitlines()] + for image in imagelist: + self.assertEqual(0, runCmd('wic list %s help' % image).status) + + @testcase(1213) + def test_unsupported_subcommand(self): + """Test unsupported subcommand""" + self.assertEqual(1, runCmd('wic unsupported', + ignore_status=True).status) + + @testcase(1214) + def test_no_command(self): + """Test wic without command""" + self.assertEqual(1, runCmd('wic', ignore_status=True).status) + + @testcase(1211) + def test_build_image_name(self): + """Test wic create wictestdisk --image-name=core-image-minimal""" + cmd = "wic create wictestdisk --image-name=core-image-minimal -o %s" % self.resultdir + self.assertEqual(0, runCmd(cmd).status) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) + + @testcase(1157) + @only_for_arch(['i586', 'i686', 'x86_64']) + def test_gpt_image(self): + """Test creation of core-image-minimal with gpt table and UUID boot""" + cmd = "wic create directdisk-gpt --image-name core-image-minimal -o %s" % self.resultdir + self.assertEqual(0, runCmd(cmd).status) + self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct"))) + + @testcase(1346) + @only_for_arch(['i586', 'i686', 'x86_64']) + def test_iso_image(self): + """Test creation of hybrid iso image with legacy and EFI boot""" + config = 'INITRAMFS_IMAGE = "core-image-minimal-initramfs"\n'\ + 'MACHINE_FEATURES_append = " efi"\n' + self.append_config(config) + bitbake('core-image-minimal') + self.remove_config(config) + cmd = "wic create mkhybridiso --image-name core-image-minimal -o %s" % self.resultdir + self.assertEqual(0, runCmd(cmd).status) + self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.direct"))) + self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.iso"))) + + @testcase(1348) + @only_for_arch(['i586', 'i686', 'x86_64']) + def test_qemux86_directdisk(self): + """Test creation of qemux-86-directdisk image""" + cmd = "wic create qemux86-directdisk -e core-image-minimal -o %s" % self.resultdir + self.assertEqual(0, runCmd(cmd).status) + self.assertEqual(1, len(glob(self.resultdir + "qemux86-directdisk-*direct"))) + + @testcase(1350) + @only_for_arch(['i586', 'i686', 'x86_64']) + def test_mkefidisk(self): + """Test creation of mkefidisk image""" + cmd = "wic create mkefidisk -e core-image-minimal -o %s" % self.resultdir + self.assertEqual(0, runCmd(cmd).status) + self.assertEqual(1, len(glob(self.resultdir + "mkefidisk-*direct"))) + + @testcase(1385) + @only_for_arch(['i586', 'i686', 'x86_64']) + def test_bootloader_config(self): + """Test creation of directdisk-bootloader-config image""" + cmd = "wic create directdisk-bootloader-config -e core-image-minimal -o %s" % self.resultdir + self.assertEqual(0, runCmd(cmd).status) + self.assertEqual(1, len(glob(self.resultdir + "directdisk-bootloader-config-*direct"))) + + @testcase(1560) + @only_for_arch(['i586', 'i686', 'x86_64']) + def test_systemd_bootdisk(self): + """Test creation of systemd-bootdisk image""" + config = 'MACHINE_FEATURES_append = " efi"\n' + self.append_config(config) + bitbake('core-image-minimal') + self.remove_config(config) + cmd = "wic create systemd-bootdisk -e core-image-minimal -o %s" % self.resultdir + self.assertEqual(0, runCmd(cmd).status) + self.assertEqual(1, len(glob(self.resultdir + "systemd-bootdisk-*direct"))) + + @testcase(1561) + def test_sdimage_bootpart(self): + """Test creation of sdimage-bootpart image""" + cmd = "wic create sdimage-bootpart -e core-image-minimal -o %s" % self.resultdir + kimgtype = get_bb_var('KERNEL_IMAGETYPE', 'core-image-minimal') + self.write_config('IMAGE_BOOT_FILES = "%s"\n' % kimgtype) + self.assertEqual(0, runCmd(cmd).status) + self.assertEqual(1, len(glob(self.resultdir + "sdimage-bootpart-*direct"))) + + @testcase(1562) + @only_for_arch(['i586', 'i686', 'x86_64']) + def test_default_output_dir(self): + """Test default output location""" + for fname in glob("directdisk-*.direct"): + os.remove(fname) + cmd = "wic create directdisk -e core-image-minimal" + self.assertEqual(0, runCmd(cmd).status) + self.assertEqual(1, len(glob("directdisk-*.direct"))) + + @testcase(1212) + @only_for_arch(['i586', 'i686', 'x86_64']) + def test_build_artifacts(self): + """Test wic create directdisk providing all artifacts.""" + bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'], + 'wic-tools') + bb_vars.update(get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_ROOTFS'], + 'core-image-minimal')) + bbvars = {key.lower(): value for key, value in bb_vars.items()} + bbvars['resultdir'] = self.resultdir + status = runCmd("wic create directdisk " + "-b %(staging_datadir)s " + "-k %(deploy_dir_image)s " + "-n %(recipe_sysroot_native)s " + "-r %(image_rootfs)s " + "-o %(resultdir)s" % bbvars).status + self.assertEqual(0, status) + self.assertEqual(1, len(glob(self.resultdir + "directdisk-*.direct"))) + @testcase(1264) def test_compress_gzip(self): """Test compressing an image with gzip""" - self.assertEqual(0, runCmd("wic create directdisk " + self.assertEqual(0, runCmd("wic create wictestdisk " "--image-name core-image-minimal " - "-c gzip").status) - self.assertEqual(1, len(glob(self.resultdir + \ - "directdisk-*.direct.gz"))) + "-c gzip -o %s" % self.resultdir).status) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.gz"))) @testcase(1265) def test_compress_bzip2(self): """Test compressing an image with bzip2""" - self.assertEqual(0, runCmd("wic create directdisk " - "--image-name core-image-minimal " - "-c bzip2").status) - self.assertEqual(1, len(glob(self.resultdir + \ - "directdisk-*.direct.bz2"))) + self.assertEqual(0, runCmd("wic create wictestdisk " + "--image-name=core-image-minimal " + "-c bzip2 -o %s" % self.resultdir).status) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.bz2"))) @testcase(1266) def test_compress_xz(self): """Test compressing an image with xz""" - self.assertEqual(0, runCmd("wic create directdisk " - "--image-name core-image-minimal " - "-c xz").status) - self.assertEqual(1, len(glob(self.resultdir + \ - "directdisk-*.direct.xz"))) + self.assertEqual(0, runCmd("wic create wictestdisk " + "--image-name=core-image-minimal " + "--compress-with=xz -o %s" % self.resultdir).status) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct.xz"))) @testcase(1267) def test_wrong_compressor(self): """Test how wic breaks if wrong compressor is provided""" - self.assertEqual(2, runCmd("wic create directdisk " - "--image-name core-image-minimal " - "-c wrong", ignore_status=True).status) + self.assertEqual(2, runCmd("wic create wictestdisk " + "--image-name=core-image-minimal " + "-c wrong -o %s" % self.resultdir, + ignore_status=True).status) + + @testcase(1558) + def test_debug_short(self): + """Test -D option""" + self.assertEqual(0, runCmd("wic create wictestdisk " + "--image-name=core-image-minimal " + "-D -o %s" % self.resultdir).status) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) + + def test_debug_long(self): + """Test --debug option""" + self.assertEqual(0, runCmd("wic create wictestdisk " + "--image-name=core-image-minimal " + "--debug -o %s" % self.resultdir).status) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) + + @testcase(1563) + def test_skip_build_check_short(self): + """Test -s option""" + self.assertEqual(0, runCmd("wic create wictestdisk " + "--image-name=core-image-minimal " + "-s -o %s" % self.resultdir).status) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) + + def test_skip_build_check_long(self): + """Test --skip-build-check option""" + self.assertEqual(0, runCmd("wic create wictestdisk " + "--image-name=core-image-minimal " + "--skip-build-check " + "--outdir %s" % self.resultdir).status) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) + + @testcase(1564) + def test_build_rootfs_short(self): + """Test -f option""" + self.assertEqual(0, runCmd("wic create wictestdisk " + "--image-name=core-image-minimal " + "-f -o %s" % self.resultdir).status) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) + + def test_build_rootfs_long(self): + """Test --build-rootfs option""" + self.assertEqual(0, runCmd("wic create wictestdisk " + "--image-name=core-image-minimal " + "--build-rootfs " + "--outdir %s" % self.resultdir).status) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*.direct"))) @testcase(1268) + @only_for_arch(['i586', 'i686', 'x86_64']) def test_rootfs_indirect_recipes(self): """Test usage of rootfs plugin with rootfs recipes""" - wks = "directdisk-multi-rootfs" - self.assertEqual(0, runCmd("wic create %s " - "--image-name core-image-minimal " - "--rootfs rootfs1=core-image-minimal " - "--rootfs rootfs2=core-image-minimal" \ - % wks).status) - self.assertEqual(1, len(glob(self.resultdir + "%s*.direct" % wks))) + status = runCmd("wic create directdisk-multi-rootfs " + "--image-name=core-image-minimal " + "--rootfs rootfs1=core-image-minimal " + "--rootfs rootfs2=core-image-minimal " + "--outdir %s" % self.resultdir).status + self.assertEqual(0, status) + self.assertEqual(1, len(glob(self.resultdir + "directdisk-multi-rootfs*.direct"))) @testcase(1269) + @only_for_arch(['i586', 'i686', 'x86_64']) def test_rootfs_artifacts(self): """Test usage of rootfs plugin with rootfs paths""" - bbvars = dict((var.lower(), get_bb_var(var, 'core-image-minimal')) \ - for var in ('STAGING_DATADIR', 'DEPLOY_DIR_IMAGE', - 'STAGING_DIR_NATIVE', 'IMAGE_ROOTFS')) + bb_vars = get_bb_vars(['STAGING_DATADIR', 'RECIPE_SYSROOT_NATIVE'], + 'wic-tools') + bb_vars.update(get_bb_vars(['DEPLOY_DIR_IMAGE', 'IMAGE_ROOTFS'], + 'core-image-minimal')) + bbvars = {key.lower(): value for key, value in bb_vars.items()} bbvars['wks'] = "directdisk-multi-rootfs" + bbvars['resultdir'] = self.resultdir status = runCmd("wic create %(wks)s " - "-b %(staging_datadir)s " - "-k %(deploy_dir_image)s " - "-n %(staging_dir_native)s " + "--bootimg-dir=%(staging_datadir)s " + "--kernel-dir=%(deploy_dir_image)s " + "--native-sysroot=%(recipe_sysroot_native)s " "--rootfs-dir rootfs1=%(image_rootfs)s " - "--rootfs-dir rootfs2=%(image_rootfs)s" \ - % bbvars).status + "--rootfs-dir rootfs2=%(image_rootfs)s " + "--outdir %(resultdir)s" % bbvars).status + self.assertEqual(0, status) + self.assertEqual(1, len(glob(self.resultdir + "%(wks)s-*.direct" % bbvars))) + + def test_exclude_path(self): + """Test --exclude-path wks option.""" + + oldpath = os.environ['PATH'] + os.environ['PATH'] = get_bb_var("PATH", "wic-tools") + + try: + wks_file = 'temp.wks' + with open(wks_file, 'w') as wks: + rootfs_dir = get_bb_var('IMAGE_ROOTFS', 'core-image-minimal') + wks.write(""" +part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path usr +part /usr --source rootfs --ondisk mmcblk0 --fstype=ext4 --rootfs-dir %s/usr +part /etc --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path bin/ --rootfs-dir %s/usr""" + % (rootfs_dir, rootfs_dir)) + self.assertEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \ + % (wks_file, self.resultdir)).status) + + os.remove(wks_file) + wicout = glob(self.resultdir + "%s-*direct" % 'temp') + self.assertEqual(1, len(wicout)) + + wicimg = wicout[0] + + # verify partition size with wic + res = runCmd("parted -m %s unit b p 2>/dev/null" % wicimg) + self.assertEqual(0, res.status) + + # parse parted output which looks like this: + # BYT;\n + # /var/tmp/wic/build/tmpfwvjjkf_-201611101222-hda.direct:200MiB:file:512:512:msdos::;\n + # 1:0.00MiB:200MiB:200MiB:ext4::;\n + partlns = res.output.splitlines()[2:] + + self.assertEqual(3, len(partlns)) + + for part in [1, 2, 3]: + part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part) + partln = partlns[part-1].split(":") + self.assertEqual(7, len(partln)) + start = int(partln[1].rstrip("B")) / 512 + length = int(partln[3].rstrip("B")) / 512 + self.assertEqual(0, runCmd("dd if=%s of=%s skip=%d count=%d" % + (wicimg, part_file, start, length)).status) + + def extract_files(debugfs_output): + """ + extract file names from the output of debugfs -R 'ls -p', + which looks like this: + + /2/040755/0/0/.//\n + /2/040755/0/0/..//\n + /11/040700/0/0/lost+found^M//\n + /12/040755/1002/1002/run//\n + /13/040755/1002/1002/sys//\n + /14/040755/1002/1002/bin//\n + /80/040755/1002/1002/var//\n + /92/040755/1002/1002/tmp//\n + """ + # NOTE the occasional ^M in file names + return [line.split('/')[5].strip() for line in \ + debugfs_output.strip().split('/\n')] + + # Test partition 1, should contain the normal root directories, except + # /usr. + res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \ + os.path.join(self.resultdir, "selftest_img.part1")) + self.assertEqual(0, res.status) + files = extract_files(res.output) + self.assertIn("etc", files) + self.assertNotIn("usr", files) + + # Partition 2, should contain common directories for /usr, not root + # directories. + res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \ + os.path.join(self.resultdir, "selftest_img.part2")) + self.assertEqual(0, res.status) + files = extract_files(res.output) + self.assertNotIn("etc", files) + self.assertNotIn("usr", files) + self.assertIn("share", files) + + # Partition 3, should contain the same as partition 2, including the bin + # directory, but not the files inside it. + res = runCmd("debugfs -R 'ls -p' %s 2>/dev/null" % \ + os.path.join(self.resultdir, "selftest_img.part3")) + self.assertEqual(0, res.status) + files = extract_files(res.output) + self.assertNotIn("etc", files) + self.assertNotIn("usr", files) + self.assertIn("share", files) + self.assertIn("bin", files) + res = runCmd("debugfs -R 'ls -p bin' %s 2>/dev/null" % \ + os.path.join(self.resultdir, "selftest_img.part3")) + self.assertEqual(0, res.status) + files = extract_files(res.output) + self.assertIn(".", files) + self.assertIn("..", files) + self.assertEqual(2, len(files)) + + for part in [1, 2, 3]: + part_file = os.path.join(self.resultdir, "selftest_img.part%d" % part) + os.remove(part_file) + + finally: + os.environ['PATH'] = oldpath + + def test_exclude_path_errors(self): + """Test --exclude-path wks option error handling.""" + wks_file = 'temp.wks' + + # Absolute argument. + with open(wks_file, 'w') as wks: + wks.write("part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path /usr") + self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \ + % (wks_file, self.resultdir), ignore_status=True).status) + os.remove(wks_file) + + # Argument pointing to parent directory. + with open(wks_file, 'w') as wks: + wks.write("part / --source rootfs --ondisk mmcblk0 --fstype=ext4 --exclude-path ././..") + self.assertNotEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \ + % (wks_file, self.resultdir), ignore_status=True).status) + os.remove(wks_file) + + @testcase(1496) + def test_bmap_short(self): + """Test generation of .bmap file -m option""" + cmd = "wic create wictestdisk -e core-image-minimal -m -o %s" % self.resultdir + status = runCmd(cmd).status self.assertEqual(0, status) - self.assertEqual(1, len(glob(self.resultdir + \ - "%(wks)s-*.direct" % bbvars))) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct.bmap"))) - @testcase(1346) - def test_iso_image(self): - """Test creation of hybrid iso image with legacy and EFI boot""" - self.assertEqual(0, runCmd("wic create mkhybridiso " - "--image-name core-image-minimal").status) - self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.direct"))) - self.assertEqual(1, len(glob(self.resultdir + "HYBRID_ISO_IMG-*.iso"))) + def test_bmap_long(self): + """Test generation of .bmap file --bmap option""" + cmd = "wic create wictestdisk -e core-image-minimal --bmap -o %s" % self.resultdir + status = runCmd(cmd).status + self.assertEqual(0, status) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct.bmap"))) + + def _get_image_env_path(self, image): + """Generate and obtain the path to .env""" + if image not in self.wicenv_cache: + self.assertEqual(0, bitbake('%s -c do_rootfs_wicenv' % image).status) + bb_vars = get_bb_vars(['STAGING_DIR', 'MACHINE'], image) + stdir = bb_vars['STAGING_DIR'] + machine = bb_vars['MACHINE'] + self.wicenv_cache[image] = os.path.join(stdir, machine, 'imgdata') + return self.wicenv_cache[image] @testcase(1347) def test_image_env(self): """Test generation of .env files.""" image = 'core-image-minimal' - self.assertEqual(0, bitbake('%s -c do_rootfs_wicenv' % image).status) - stdir = get_bb_var('STAGING_DIR_TARGET', image) - imgdatadir = os.path.join(stdir, 'imgdata') + imgdatadir = self._get_image_env_path(image) - basename = get_bb_var('IMAGE_BASENAME', image) + bb_vars = get_bb_vars(['IMAGE_BASENAME', 'WICVARS'], image) + basename = bb_vars['IMAGE_BASENAME'] self.assertEqual(basename, image) path = os.path.join(imgdatadir, basename) + '.env' self.assertTrue(os.path.isfile(path)) - wicvars = set(get_bb_var('WICVARS', image).split()) + wicvars = set(bb_vars['WICVARS'].split()) # filter out optional variables - wicvars = wicvars.difference(('HDDDIR', 'IMAGE_BOOT_FILES', - 'INITRD', 'ISODIR')) + wicvars = wicvars.difference(('DEPLOY_DIR_IMAGE', 'IMAGE_BOOT_FILES', + 'INITRD', 'INITRD_LIVE', 'ISODIR')) with open(path) as envfile: content = dict(line.split("=", 1) for line in envfile) # test if variables used by wic present in the .env file @@ -220,13 +557,41 @@ class Wic(oeSelfTest): self.assertTrue(var in content, "%s is not in .env file" % var) self.assertTrue(content[var]) + @testcase(1559) + def test_image_vars_dir_short(self): + """Test image vars directory selection -v option""" + image = 'core-image-minimal' + imgenvdir = self._get_image_env_path(image) + + self.assertEqual(0, runCmd("wic create wictestdisk " + "--image-name=%s -v %s -o %s" + % (image, imgenvdir, self.resultdir)).status) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) + + def test_image_vars_dir_long(self): + """Test image vars directory selection --vars option""" + image = 'core-image-minimal' + imgenvdir = self._get_image_env_path(image) + self.assertEqual(0, runCmd("wic create wictestdisk " + "--image-name=%s " + "--vars %s " + "--outdir %s" + % (image, imgenvdir, self.resultdir)).status) + self.assertEqual(1, len(glob(self.resultdir + "wictestdisk-*direct"))) + @testcase(1351) + @only_for_arch(['i586', 'i686', 'x86_64']) def test_wic_image_type(self): """Test building wic images by bitbake""" + config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ + 'MACHINE_FEATURES_append = " efi"\n' + self.append_config(config) self.assertEqual(0, bitbake('wic-image-minimal').status) + self.remove_config(config) - deploy_dir = get_bb_var('DEPLOY_DIR_IMAGE') - machine = get_bb_var('MACHINE') + bb_vars = get_bb_vars(['DEPLOY_DIR_IMAGE', 'MACHINE']) + deploy_dir = bb_vars['DEPLOY_DIR_IMAGE'] + machine = bb_vars['MACHINE'] prefix = os.path.join(deploy_dir, 'wic-image-minimal-%s.' % machine) # check if we have result image and manifests symlinks # pointing to existing files @@ -235,68 +600,193 @@ class Wic(oeSelfTest): self.assertTrue(os.path.islink(path)) self.assertTrue(os.path.isfile(os.path.realpath(path))) - @testcase(1348) - def test_qemux86_directdisk(self): - """Test creation of qemux-86-directdisk image""" - image = "qemux86-directdisk" - self.assertEqual(0, runCmd("wic create %s -e core-image-minimal" \ - % image).status) - self.assertEqual(1, len(glob(self.resultdir + "%s-*direct" % image))) - - @testcase(1349) - def test_mkgummidisk(self): - """Test creation of mkgummidisk image""" - image = "mkgummidisk" - self.assertEqual(0, runCmd("wic create %s -e core-image-minimal" \ - % image).status) - self.assertEqual(1, len(glob(self.resultdir + "%s-*direct" % image))) - - @testcase(1350) - def test_mkefidisk(self): - """Test creation of mkefidisk image""" - image = "mkefidisk" - self.assertEqual(0, runCmd("wic create %s -e core-image-minimal" \ - % image).status) - self.assertEqual(1, len(glob(self.resultdir + "%s-*direct" % image))) - - @testcase(1385) - def test_directdisk_bootloader_config(self): - """Test creation of directdisk-bootloader-config image""" - image = "directdisk-bootloader-config" - self.assertEqual(0, runCmd("wic create %s -e core-image-minimal" \ - % image).status) - self.assertEqual(1, len(glob(self.resultdir + "%s-*direct" % image))) - @testcase(1422) + @only_for_arch(['i586', 'i686', 'x86_64']) def test_qemu(self): """Test wic-image-minimal under qemu""" + config = 'IMAGE_FSTYPES += "wic"\nWKS_FILE = "wic-image-minimal"\n'\ + 'MACHINE_FEATURES_append = " efi"\n' + self.append_config(config) self.assertEqual(0, bitbake('wic-image-minimal').status) + self.remove_config(config) with runqemu('wic-image-minimal', ssh=False) as qemu: - command = "mount |grep '^/dev/' | cut -f1,3 -d ' '" - status, output = qemu.run_serial(command) - self.assertEqual(1, status, 'Failed to run command "%s": %s' % (command, output)) - self.assertEqual(output, '/dev/root /\r\n/dev/vda3 /mnt') - - def test_bmap(self): - """Test generation of .bmap file""" - image = "directdisk" - status = runCmd("wic create %s -e core-image-minimal --bmap" % image).status - self.assertEqual(0, status) - self.assertEqual(1, len(glob(self.resultdir + "%s-*direct" % image))) - self.assertEqual(1, len(glob(self.resultdir + "%s-*direct.bmap" % image))) - - def test_systemd_bootdisk(self): - """Test creation of systemd-bootdisk image""" - image = "systemd-bootdisk" - self.assertEqual(0, runCmd("wic create %s -e core-image-minimal" \ - % image).status) - self.assertEqual(1, len(glob(self.resultdir + "%s-*direct" % image))) - - def test_sdimage_bootpart(self): - """Test creation of sdimage-bootpart image""" - image = "sdimage-bootpart" - self.write_config('IMAGE_BOOT_FILES = "bzImage"\n') - self.assertEqual(0, runCmd("wic create %s -e core-image-minimal" \ - % image).status) - self.assertEqual(1, len(glob(self.resultdir + "%s-*direct" % image))) + cmd = "mount |grep '^/dev/' | cut -f1,3 -d ' '" + status, output = qemu.run_serial(cmd) + self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) + self.assertEqual(output, '/dev/root /\r\n/dev/sda3 /mnt') + + @only_for_arch(['i586', 'i686', 'x86_64']) + def test_qemu_efi(self): + """Test core-image-minimal efi image under qemu""" + config = 'IMAGE_FSTYPES = "wic"\nWKS_FILE = "mkefidisk.wks"\n' + self.append_config(config) + self.assertEqual(0, bitbake('core-image-minimal ovmf').status) + self.remove_config(config) + + with runqemu('core-image-minimal', ssh=False, + runqemuparams='ovmf', image_fstype='wic') as qemu: + cmd = "grep sda. /proc/partitions |wc -l" + status, output = qemu.run_serial(cmd) + self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) + self.assertEqual(output, '3') + + @staticmethod + def _make_fixed_size_wks(size): + """ + Create a wks of an image with a single partition. Size of the partition is set + using --fixed-size flag. Returns a tuple: (path to wks file, wks image name) + """ + with NamedTemporaryFile("w", suffix=".wks", delete=False) as tempf: + wkspath = tempf.name + tempf.write("part " \ + "--source rootfs --ondisk hda --align 4 --fixed-size %d " + "--fstype=ext4\n" % size) + wksname = os.path.splitext(os.path.basename(wkspath))[0] + + return wkspath, wksname + + def test_fixed_size(self): + """ + Test creation of a simple image with partition size controlled through + --fixed-size flag + """ + wkspath, wksname = Wic._make_fixed_size_wks(200) + + self.assertEqual(0, runCmd("wic create %s -e core-image-minimal -o %s" \ + % (wkspath, self.resultdir)).status) + os.remove(wkspath) + wicout = glob(self.resultdir + "%s-*direct" % wksname) + self.assertEqual(1, len(wicout)) + + wicimg = wicout[0] + + # verify partition size with wic + res = runCmd("parted -m %s unit mib p 2>/dev/null" % wicimg, + ignore_status=True, + native_sysroot=self.native_sysroot) + self.assertEqual(0, res.status) + + # parse parted output which looks like this: + # BYT;\n + # /var/tmp/wic/build/tmpfwvjjkf_-201611101222-hda.direct:200MiB:file:512:512:msdos::;\n + # 1:0.00MiB:200MiB:200MiB:ext4::;\n + partlns = res.output.splitlines()[2:] + + self.assertEqual(1, len(partlns)) + self.assertEqual("1:0.00MiB:200MiB:200MiB:ext4::;", partlns[0]) + + def test_fixed_size_error(self): + """ + Test creation of a simple image with partition size controlled through + --fixed-size flag. The size of partition is intentionally set to 1MiB + in order to trigger an error in wic. + """ + wkspath, wksname = Wic._make_fixed_size_wks(1) + + self.assertEqual(1, runCmd("wic create %s -e core-image-minimal -o %s" \ + % (wkspath, self.resultdir), ignore_status=True).status) + os.remove(wkspath) + wicout = glob(self.resultdir + "%s-*direct" % wksname) + self.assertEqual(0, len(wicout)) + + @only_for_arch(['i586', 'i686', 'x86_64']) + def test_rawcopy_plugin_qemu(self): + """Test rawcopy plugin in qemu""" + # build ext4 and wic images + for fstype in ("ext4", "wic"): + config = 'IMAGE_FSTYPES = "%s"\nWKS_FILE = "test_rawcopy_plugin.wks.in"\n' % fstype + self.append_config(config) + self.assertEqual(0, bitbake('core-image-minimal').status) + self.remove_config(config) + + with runqemu('core-image-minimal', ssh=False, image_fstype='wic') as qemu: + cmd = "grep sda. /proc/partitions |wc -l" + status, output = qemu.run_serial(cmd) + self.assertEqual(1, status, 'Failed to run command "%s": %s' % (cmd, output)) + self.assertEqual(output, '2') + + def test_rawcopy_plugin(self): + """Test rawcopy plugin""" + img = 'core-image-minimal' + machine = get_bb_var('MACHINE', img) + with NamedTemporaryFile("w", suffix=".wks") as wks: + wks.writelines(['part /boot --active --source bootimg-pcbios\n', + 'part / --source rawcopy --sourceparams="file=%s-%s.ext4" --use-uuid\n'\ + % (img, machine), + 'bootloader --timeout=0 --append="console=ttyS0,115200n8"\n']) + wks.flush() + cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) + self.assertEqual(0, runCmd(cmd).status) + wksname = os.path.splitext(os.path.basename(wks.name))[0] + out = glob(self.resultdir + "%s-*direct" % wksname) + self.assertEqual(1, len(out)) + + def test_fs_types(self): + """Test filesystem types for empty and not empty partitions""" + img = 'core-image-minimal' + with NamedTemporaryFile("w", suffix=".wks") as wks: + wks.writelines(['part ext2 --fstype ext2 --source rootfs\n', + 'part btrfs --fstype btrfs --source rootfs --size 40M\n', + 'part squash --fstype squashfs --source rootfs\n', + 'part swap --fstype swap --size 1M\n', + 'part emptyvfat --fstype vfat --size 1M\n', + 'part emptymsdos --fstype msdos --size 1M\n', + 'part emptyext2 --fstype ext2 --size 1M\n', + 'part emptybtrfs --fstype btrfs --size 100M\n']) + wks.flush() + cmd = "wic create %s -e %s -o %s" % (wks.name, img, self.resultdir) + self.assertEqual(0, runCmd(cmd).status) + wksname = os.path.splitext(os.path.basename(wks.name))[0] + out = glob(self.resultdir + "%s-*direct" % wksname) + self.assertEqual(1, len(out)) + + def test_kickstart_parser(self): + """Test wks parser options""" + with NamedTemporaryFile("w", suffix=".wks") as wks: + wks.writelines(['part / --fstype ext3 --source rootfs --system-id 0xFF '\ + '--overhead-factor 1.2 --size 100k\n']) + wks.flush() + cmd = "wic create %s -e core-image-minimal -o %s" % (wks.name, self.resultdir) + self.assertEqual(0, runCmd(cmd).status) + wksname = os.path.splitext(os.path.basename(wks.name))[0] + out = glob(self.resultdir + "%s-*direct" % wksname) + self.assertEqual(1, len(out)) + + def test_image_bootpart_globbed(self): + """Test globbed sources with image-bootpart plugin""" + img = "core-image-minimal" + cmd = "wic create sdimage-bootpart -e %s -o %s" % (img, self.resultdir) + config = 'IMAGE_BOOT_FILES = "%s*"' % get_bb_var('KERNEL_IMAGETYPE', img) + self.append_config(config) + self.assertEqual(0, runCmd(cmd).status) + self.remove_config(config) + self.assertEqual(1, len(glob(self.resultdir + "sdimage-bootpart-*direct"))) + + def test_sparse_copy(self): + """Test sparse_copy with FIEMAP and SEEK_HOLE filemap APIs""" + libpath = os.path.join(get_bb_var('COREBASE'), 'scripts', 'lib', 'wic') + sys.path.insert(0, libpath) + from filemap import FilemapFiemap, FilemapSeek, sparse_copy, ErrorNotSupp + with NamedTemporaryFile("w", suffix=".wic-sparse") as sparse: + src_name = sparse.name + src_size = 1024 * 10 + sparse.truncate(src_size) + # write one byte to the file + with open(src_name, 'r+b') as sfile: + sfile.seek(1024 * 4) + sfile.write(b'\x00') + dest = sparse.name + '.out' + # copy src file to dest using different filemap APIs + for api in (FilemapFiemap, FilemapSeek, None): + if os.path.exists(dest): + os.unlink(dest) + try: + sparse_copy(sparse.name, dest, api=api) + except ErrorNotSupp: + continue # skip unsupported API + dest_stat = os.stat(dest) + self.assertEqual(dest_stat.st_size, src_size) + # 8 blocks is 4K (physical sector size) + self.assertEqual(dest_stat.st_blocks, 8) + os.unlink(dest) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/targetcontrol.py b/import-layers/yocto-poky/meta/lib/oeqa/targetcontrol.py index 24669f461..3255e3a5c 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/targetcontrol.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/targetcontrol.py @@ -18,8 +18,10 @@ from oeqa.utils.dump import TargetDumper from oeqa.controllers.testtargetloader import TestTargetLoader from abc import ABCMeta, abstractmethod +logger = logging.getLogger('BitBake.QemuRunner') + def get_target_controller(d): - testtarget = d.getVar("TEST_TARGET", True) + testtarget = d.getVar("TEST_TARGET") # old, simple names if testtarget == "qemu": return QemuTarget(d) @@ -33,7 +35,7 @@ def get_target_controller(d): except AttributeError: # nope, perhaps a layer defined one try: - bbpath = d.getVar("BBPATH", True).split(':') + bbpath = d.getVar("BBPATH").split(':') testtargetloader = TestTargetLoader() controller = testtargetloader.get_controller_module(testtarget, bbpath) except ImportError as e: @@ -51,9 +53,9 @@ class BaseTarget(object, metaclass=ABCMeta): self.connection = None self.ip = None self.server_ip = None - self.datetime = d.getVar('DATETIME', True) - self.testdir = d.getVar("TEST_LOG_DIR", True) - self.pn = d.getVar("PN", True) + self.datetime = d.getVar('DATETIME') + self.testdir = d.getVar("TEST_LOG_DIR") + self.pn = d.getVar("PN") @abstractmethod def deploy(self): @@ -63,7 +65,7 @@ class BaseTarget(object, metaclass=ABCMeta): if os.path.islink(sshloglink): os.unlink(sshloglink) os.symlink(self.sshlog, sshloglink) - bb.note("SSH log file: %s" % self.sshlog) + logger.info("SSH log file: %s" % self.sshlog) @abstractmethod def start(self, params=None, ssh=True, extra_bootparams=None): @@ -80,7 +82,7 @@ class BaseTarget(object, metaclass=ABCMeta): @classmethod def match_image_fstype(self, d, image_fstypes=None): if not image_fstypes: - image_fstypes = d.getVar('IMAGE_FSTYPES', True).split(' ') + image_fstypes = d.getVar('IMAGE_FSTYPES').split(' ') possible_image_fstypes = [fstype for fstype in self.supported_image_fstypes if fstype in image_fstypes] if possible_image_fstypes: return possible_image_fstypes[0] @@ -113,20 +115,26 @@ class QemuTarget(BaseTarget): supported_image_fstypes = ['ext3', 'ext4', 'cpio.gz', 'wic'] - def __init__(self, d): + def __init__(self, d, image_fstype=None): super(QemuTarget, self).__init__(d) - self.image_fstype = self.get_image_fstype(d) + self.rootfs = '' + self.kernel = '' + self.image_fstype = '' + + if d.getVar('FIND_ROOTFS') == '1': + self.image_fstype = image_fstype or self.get_image_fstype(d) + self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("IMAGE_LINK_NAME") + '.' + self.image_fstype) + self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE"), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin') self.qemulog = os.path.join(self.testdir, "qemu_boot_log.%s" % self.datetime) - self.rootfs = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("IMAGE_LINK_NAME", True) + '.' + self.image_fstype) - self.kernel = os.path.join(d.getVar("DEPLOY_DIR_IMAGE", True), d.getVar("KERNEL_IMAGETYPE", False) + '-' + d.getVar('MACHINE', False) + '.bin') - dump_target_cmds = d.getVar("testimage_dump_target", True) - dump_host_cmds = d.getVar("testimage_dump_host", True) - dump_dir = d.getVar("TESTIMAGE_DUMP_DIR", True) - if d.getVar("QEMU_USE_KVM", False) is not None \ - and d.getVar("QEMU_USE_KVM", False) == "True" \ - and "x86" in d.getVar("MACHINE", True): + dump_target_cmds = d.getVar("testimage_dump_target") + dump_host_cmds = d.getVar("testimage_dump_host") + dump_dir = d.getVar("TESTIMAGE_DUMP_DIR") + qemu_use_kvm = d.getVar("QEMU_USE_KVM") + if qemu_use_kvm and \ + (qemu_use_kvm == "True" and "x86" in d.getVar("MACHINE") or \ + d.getVar("MACHINE") in qemu_use_kvm.split()): use_kvm = True else: use_kvm = False @@ -135,32 +143,31 @@ class QemuTarget(BaseTarget): import oe.path bb.utils.mkdirhier(self.testdir) self.qemurunnerlog = os.path.join(self.testdir, 'qemurunner_log.%s' % self.datetime) - logger = logging.getLogger('BitBake.QemuRunner') loggerhandler = logging.FileHandler(self.qemurunnerlog) loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s")) logger.addHandler(loggerhandler) oe.path.symlink(os.path.basename(self.qemurunnerlog), os.path.join(self.testdir, 'qemurunner_log'), force=True) - if d.getVar("DISTRO", True) == "poky-tiny": - self.runner = QemuTinyRunner(machine=d.getVar("MACHINE", True), + if d.getVar("DISTRO") == "poky-tiny": + self.runner = QemuTinyRunner(machine=d.getVar("MACHINE"), rootfs=self.rootfs, - tmpdir = d.getVar("TMPDIR", True), - deploy_dir_image = d.getVar("DEPLOY_DIR_IMAGE", True), - display = d.getVar("BB_ORIGENV", False).getVar("DISPLAY", True), + tmpdir = d.getVar("TMPDIR"), + deploy_dir_image = d.getVar("DEPLOY_DIR_IMAGE"), + display = d.getVar("BB_ORIGENV", False).getVar("DISPLAY"), logfile = self.qemulog, kernel = self.kernel, - boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT", True))) + boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT"))) else: - self.runner = QemuRunner(machine=d.getVar("MACHINE", True), + self.runner = QemuRunner(machine=d.getVar("MACHINE"), rootfs=self.rootfs, - tmpdir = d.getVar("TMPDIR", True), - deploy_dir_image = d.getVar("DEPLOY_DIR_IMAGE", True), - display = d.getVar("BB_ORIGENV", False).getVar("DISPLAY", True), + tmpdir = d.getVar("TMPDIR"), + deploy_dir_image = d.getVar("DEPLOY_DIR_IMAGE"), + display = d.getVar("BB_ORIGENV", False).getVar("DISPLAY"), logfile = self.qemulog, - boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT", True)), + boottime = int(d.getVar("TEST_QEMUBOOT_TIMEOUT")), use_kvm = use_kvm, dump_dir = dump_dir, - dump_host_cmds = d.getVar("testimage_dump_host", True)) + dump_host_cmds = d.getVar("testimage_dump_host")) self.target_dumper = TargetDumper(dump_target_cmds, dump_dir, self.runner) @@ -172,12 +179,17 @@ class QemuTarget(BaseTarget): os.unlink(qemuloglink) os.symlink(self.qemulog, qemuloglink) - bb.note("rootfs file: %s" % self.rootfs) - bb.note("Qemu log file: %s" % self.qemulog) + logger.info("rootfs file: %s" % self.rootfs) + logger.info("Qemu log file: %s" % self.qemulog) super(QemuTarget, self).deploy() - def start(self, params=None, ssh=True, extra_bootparams=None): - if self.runner.start(params, get_ip=ssh, extra_bootparams=extra_bootparams): + def start(self, params=None, ssh=True, extra_bootparams='', runqemuparams='', launch_cmd='', discard_writes=True): + if launch_cmd: + start = self.runner.launch(get_ip=ssh, launch_cmd=launch_cmd) + else: + start = self.runner.start(params, get_ip=ssh, extra_bootparams=extra_bootparams, runqemuparams=runqemuparams, discard_writes=discard_writes) + + if start: if ssh: self.ip = self.runner.ip self.server_ip = self.runner.server_ip @@ -206,28 +218,28 @@ class QemuTarget(BaseTarget): else: raise bb.build.FuncFailed("%s - FAILED to re-start qemu - check the task log and the boot log" % self.pn) - def run_serial(self, command): - return self.runner.run_serial(command) + def run_serial(self, command, timeout=5): + return self.runner.run_serial(command, timeout=timeout) class SimpleRemoteTarget(BaseTarget): def __init__(self, d): super(SimpleRemoteTarget, self).__init__(d) - addr = d.getVar("TEST_TARGET_IP", True) or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.') + addr = d.getVar("TEST_TARGET_IP") or bb.fatal('Please set TEST_TARGET_IP with the IP address of the machine you want to run the tests on.') self.ip = addr.split(":")[0] try: self.port = addr.split(":")[1] except IndexError: self.port = None - bb.note("Target IP: %s" % self.ip) - self.server_ip = d.getVar("TEST_SERVER_IP", True) + logger.info("Target IP: %s" % self.ip) + self.server_ip = d.getVar("TEST_SERVER_IP") if not self.server_ip: try: self.server_ip = subprocess.check_output(['ip', 'route', 'get', self.ip ]).split("\n")[0].split()[-1] except Exception as e: bb.fatal("Failed to determine the host IP address (alternatively you can set TEST_SERVER_IP with the IP address of this machine): %s" % e) - bb.note("Server IP: %s" % self.server_ip) + logger.info("Server IP: %s" % self.server_ip) def deploy(self): super(SimpleRemoteTarget, self).deploy() diff --git a/import-layers/yocto-poky/meta/lib/oeqa/utils/__init__.py b/import-layers/yocto-poky/meta/lib/oeqa/utils/__init__.py index 8f706f363..485de031a 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/utils/__init__.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/utils/__init__.py @@ -36,3 +36,33 @@ def avoid_paths_in_environ(paths): new_path = new_path[:-1] return new_path + +def make_logger_bitbake_compatible(logger): + import logging + + """ + Bitbake logger redifines debug() in order to + set a level within debug, this breaks compatibility + with vainilla logging, so we neeed to redifine debug() + method again also add info() method with INFO + 1 level. + """ + def _bitbake_log_debug(*args, **kwargs): + lvl = logging.DEBUG + + if isinstance(args[0], int): + lvl = args[0] + msg = args[1] + args = args[2:] + else: + msg = args[0] + args = args[1:] + + logger.log(lvl, msg, *args, **kwargs) + + def _bitbake_log_info(msg, *args, **kwargs): + logger.log(logging.INFO + 1, msg, *args, **kwargs) + + logger.debug = _bitbake_log_debug + logger.info = _bitbake_log_info + + return logger diff --git a/import-layers/yocto-poky/meta/lib/oeqa/utils/buildproject.py b/import-layers/yocto-poky/meta/lib/oeqa/utils/buildproject.py new file mode 100644 index 000000000..487f08be4 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/utils/buildproject.py @@ -0,0 +1,55 @@ +# Copyright (C) 2013-2016 Intel Corporation +# +# Released under the MIT license (see COPYING.MIT) + +# Provides a class for automating build tests for projects + +import os +import re +import subprocess +import shutil +import tempfile + +from abc import ABCMeta, abstractmethod + +class BuildProject(metaclass=ABCMeta): + def __init__(self, uri, foldername=None, tmpdir=None, dl_dir=None): + self.uri = uri + self.archive = os.path.basename(uri) + if not tmpdir: + tmpdir = tempfile.mkdtemp(prefix='buildproject') + self.localarchive = os.path.join(tmpdir, self.archive) + self.dl_dir = dl_dir + if foldername: + self.fname = foldername + else: + self.fname = re.sub(r'\.tar\.bz2$|\.tar\.gz$|\.tar\.xz$', '', self.archive) + + # Download self.archive to self.localarchive + def _download_archive(self): + if self.dl_dir and os.path.exists(os.path.join(self.dl_dir, self.archive)): + shutil.copyfile(os.path.join(self.dl_dir, self.archive), self.localarchive) + return + + cmd = "wget -O %s %s" % (self.localarchive, self.uri) + subprocess.check_output(cmd, shell=True) + + # This method should provide a way to run a command in the desired environment. + @abstractmethod + def _run(self, cmd): + pass + + # The timeout parameter of target.run is set to 0 to make the ssh command + # run with no timeout. + def run_configure(self, configure_args='', extra_cmds=''): + return self._run('cd %s; gnu-configize; %s ./configure %s' % (self.targetdir, extra_cmds, configure_args)) + + def run_make(self, make_args=''): + return self._run('cd %s; make %s' % (self.targetdir, make_args)) + + def run_install(self, install_args=''): + return self._run('cd %s; make install %s' % (self.targetdir, install_args)) + + def clean(self): + self._run('rm -rf %s' % self.targetdir) + subprocess.call('rm -f %s' % self.localarchive, shell=True) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/utils/commands.py b/import-layers/yocto-poky/meta/lib/oeqa/utils/commands.py index 5cd0f7477..57286fcb1 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/utils/commands.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/utils/commands.py @@ -97,9 +97,17 @@ class Result(object): pass -def runCmd(command, ignore_status=False, timeout=None, assert_error=True, **options): +def runCmd(command, ignore_status=False, timeout=None, assert_error=True, + native_sysroot=None, limit_exc_output=0, **options): result = Result() + if native_sysroot: + extra_paths = "%s/sbin:%s/usr/sbin:%s/usr/bin" % \ + (native_sysroot, native_sysroot, native_sysroot) + nenv = dict(options.get('env', os.environ)) + nenv['PATH'] = extra_paths + ':' + nenv.get('PATH', '') + options['env'] = nenv + cmd = Command(command, timeout=timeout, **options) cmd.run() @@ -110,10 +118,16 @@ def runCmd(command, ignore_status=False, timeout=None, assert_error=True, **opti result.pid = cmd.process.pid if result.status and not ignore_status: + exc_output = result.output + if limit_exc_output > 0: + split = result.output.splitlines() + if len(split) > limit_exc_output: + exc_output = "\n... (last %d lines of output)\n" % limit_exc_output + \ + '\n'.join(split[-limit_exc_output:]) if assert_error: - raise AssertionError("Command '%s' returned non-zero exit status %d:\n%s" % (command, result.status, result.output)) + raise AssertionError("Command '%s' returned non-zero exit status %d:\n%s" % (command, result.status, exc_output)) else: - raise CommandError(result.status, command, result.output) + raise CommandError(result.status, command, exc_output) return result @@ -149,7 +163,9 @@ def get_bb_vars(variables=None, target=None, postconfig=None): """Get values of multiple bitbake variables""" bbenv = get_bb_env(target, postconfig=postconfig) - var_re = re.compile(r'^(export )?(?P\w+)="(?P.*)"$') + if variables is not None: + variables = variables.copy() + var_re = re.compile(r'^(export )?(?P\w+(_.*)?)="(?P.*)"$') unset_re = re.compile(r'^unset (?P\w+)$') lastline = None values = {} @@ -209,21 +225,30 @@ def create_temp_layer(templayerdir, templayername, priority=999, recipepathspec= @contextlib.contextmanager -def runqemu(pn, ssh=True): +def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True): + """ + launch_cmd means directly run the command, don't need set rootfs or env vars. + """ import bb.tinfoil import bb.build tinfoil = bb.tinfoil.Tinfoil() - tinfoil.prepare(False) + tinfoil.prepare(config_only=False, quiet=True) try: tinfoil.logger.setLevel(logging.WARNING) import oeqa.targetcontrol tinfoil.config_data.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage") tinfoil.config_data.setVar("TEST_QEMUBOOT_TIMEOUT", "1000") - import oe.recipeutils - recipefile = oe.recipeutils.pn_to_recipe(tinfoil.cooker, pn) - recipedata = oe.recipeutils.parse_recipe(tinfoil.cooker, recipefile, []) + # Tell QemuTarget() whether need find rootfs/kernel or not + if launch_cmd: + tinfoil.config_data.setVar("FIND_ROOTFS", '0') + else: + tinfoil.config_data.setVar("FIND_ROOTFS", '1') + + recipedata = tinfoil.parse_recipe(pn) + for key, value in overrides.items(): + recipedata.setVar(key, value) # The QemuRunner log is saved out, but we need to ensure it is at the right # log level (and then ensure that since it's a child of the BitBake logger, @@ -231,9 +256,9 @@ def runqemu(pn, ssh=True): logger = logging.getLogger('BitBake.QemuRunner') logger.setLevel(logging.DEBUG) logger.propagate = False - logdir = recipedata.getVar("TEST_LOG_DIR", True) + logdir = recipedata.getVar("TEST_LOG_DIR") - qemu = oeqa.targetcontrol.QemuTarget(recipedata) + qemu = oeqa.targetcontrol.QemuTarget(recipedata, image_fstype) finally: # We need to shut down tinfoil early here in case we actually want # to run tinfoil-using utilities with the running QEMU instance. @@ -253,7 +278,7 @@ def runqemu(pn, ssh=True): try: qemu.deploy() try: - qemu.start(ssh=ssh) + qemu.start(params=qemuparams, ssh=ssh, runqemuparams=runqemuparams, launch_cmd=launch_cmd, discard_writes=discard_writes) except bb.build.FuncFailed: raise Exception('Failed to start QEMU - see the logs in %s' % logdir) diff --git a/import-layers/yocto-poky/meta/lib/oeqa/utils/decorators.py b/import-layers/yocto-poky/meta/lib/oeqa/utils/decorators.py index 25f9c54e6..d87689692 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/utils/decorators.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/utils/decorators.py @@ -172,18 +172,19 @@ def LogResults(original_class): #check status of tests and record it + tcid = self.id() for (name, msg) in result.errors: - if (self._testMethodName == str(name).split(' ')[0]) and (class_name in str(name).split(' ')[1]): + if tcid == name.id(): local_log.results("Testcase "+str(test_case)+": ERROR") local_log.results("Testcase "+str(test_case)+":\n"+msg) passed = False for (name, msg) in result.failures: - if (self._testMethodName == str(name).split(' ')[0]) and (class_name in str(name).split(' ')[1]): + if tcid == name.id(): local_log.results("Testcase "+str(test_case)+": FAILED") local_log.results("Testcase "+str(test_case)+":\n"+msg) passed = False for (name, msg) in result.skipped: - if (self._testMethodName == str(name).split(' ')[0]) and (class_name in str(name).split(' ')[1]): + if tcid == name.id(): local_log.results("Testcase "+str(test_case)+": SKIPPED") passed = False if passed: diff --git a/import-layers/yocto-poky/meta/lib/oeqa/utils/dump.py b/import-layers/yocto-poky/meta/lib/oeqa/utils/dump.py index 71422a9ae..5a7edc1a8 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/utils/dump.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/utils/dump.py @@ -5,12 +5,6 @@ import datetime import itertools from .commands import runCmd -def get_host_dumper(d): - cmds = d.getVar("testimage_dump_host", True) - parent_dir = d.getVar("TESTIMAGE_DUMP_DIR", True) - return HostDumper(cmds, parent_dir) - - class BaseDumper(object): """ Base class to dump commands from host/target """ @@ -77,13 +71,12 @@ class HostDumper(BaseDumper): result = runCmd(cmd, ignore_status=True) self._write_dump(cmd.split()[0], result.output) - class TargetDumper(BaseDumper): """ Class to get dumps from target, it only works with QemuRunner """ - def __init__(self, cmds, parent_dir, qemurunner): + def __init__(self, cmds, parent_dir, runner): super(TargetDumper, self).__init__(cmds, parent_dir) - self.runner = qemurunner + self.runner = runner def dump_target(self, dump_dir=""): if dump_dir: diff --git a/import-layers/yocto-poky/meta/lib/oeqa/utils/git.py b/import-layers/yocto-poky/meta/lib/oeqa/utils/git.py index ae85d2766..e0cb3f0db 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/utils/git.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/utils/git.py @@ -16,8 +16,17 @@ class GitError(Exception): class GitRepo(object): """Class representing a Git repository clone""" def __init__(self, path, is_topdir=False): - self.top_dir = self._run_git_cmd_at(['rev-parse', '--show-toplevel'], - path) + git_dir = self._run_git_cmd_at(['rev-parse', '--git-dir'], path) + git_dir = git_dir if os.path.isabs(git_dir) else os.path.join(path, git_dir) + self.git_dir = os.path.realpath(git_dir) + + if self._run_git_cmd_at(['rev-parse', '--is-bare-repository'], path) == 'true': + self.bare = True + self.top_dir = self.git_dir + else: + self.bare = False + self.top_dir = self._run_git_cmd_at(['rev-parse', '--show-toplevel'], + path) realpath = os.path.realpath(path) if is_topdir and realpath != self.top_dir: raise GitError("{} is not a Git top directory".format(realpath)) @@ -36,9 +45,12 @@ class GitRepo(object): return ret.output.strip() @staticmethod - def init(path): + def init(path, bare=False): """Initialize a new Git repository""" - GitRepo._run_git_cmd_at('init', cwd=path) + cmd = ['init'] + if bare: + cmd.append('--bare') + GitRepo._run_git_cmd_at(cmd, cwd=path) return GitRepo(path, is_topdir=True) def run_cmd(self, git_args, env_update=None): diff --git a/import-layers/yocto-poky/meta/lib/oeqa/utils/metadata.py b/import-layers/yocto-poky/meta/lib/oeqa/utils/metadata.py new file mode 100644 index 000000000..cb81155e5 --- /dev/null +++ b/import-layers/yocto-poky/meta/lib/oeqa/utils/metadata.py @@ -0,0 +1,118 @@ +# Copyright (C) 2016 Intel Corporation +# +# Released under the MIT license (see COPYING.MIT) +# +# Functions to get metadata from the testing host used +# for analytics of test results. + +from collections import OrderedDict +from collections.abc import MutableMapping +from xml.dom.minidom import parseString +from xml.etree.ElementTree import Element, tostring + +from oeqa.utils.commands import runCmd, get_bb_vars + +def get_os_release(): + """Get info from /etc/os-release as a dict""" + data = OrderedDict() + os_release_file = '/etc/os-release' + if not os.path.exists(os_release_file): + return None + with open(os_release_file) as fobj: + for line in fobj: + key, value = line.split('=', 1) + data[key.strip().lower()] = value.strip().strip('"') + return data + +def metadata_from_bb(): + """ Returns test's metadata as OrderedDict. + + Data will be gathered using bitbake -e thanks to get_bb_vars. + """ + metadata_config_vars = ('MACHINE', 'BB_NUMBER_THREADS', 'PARALLEL_MAKE') + + info_dict = OrderedDict() + hostname = runCmd('hostname') + info_dict['hostname'] = hostname.output + data_dict = get_bb_vars() + + # Distro information + info_dict['distro'] = {'id': data_dict['DISTRO'], + 'version_id': data_dict['DISTRO_VERSION'], + 'pretty_name': '%s %s' % (data_dict['DISTRO'], data_dict['DISTRO_VERSION'])} + + # Host distro information + os_release = get_os_release() + if os_release: + info_dict['host_distro'] = OrderedDict() + for key in ('id', 'version_id', 'pretty_name'): + if key in os_release: + info_dict['host_distro'][key] = os_release[key] + + info_dict['layers'] = get_layers(data_dict['BBLAYERS']) + info_dict['bitbake'] = git_rev_info(os.path.dirname(bb.__file__)) + + info_dict['config'] = OrderedDict() + for var in sorted(metadata_config_vars): + info_dict['config'][var] = data_dict[var] + return info_dict + +def metadata_from_data_store(d): + """ Returns test's metadata as OrderedDict. + + Data will be collected from the provided data store. + """ + # TODO: Getting metadata from the data store would + # be useful when running within bitbake. + pass + +def git_rev_info(path): + """Get git revision information as a dict""" + from git import Repo, InvalidGitRepositoryError, NoSuchPathError + + info = OrderedDict() + try: + repo = Repo(path, search_parent_directories=True) + except (InvalidGitRepositoryError, NoSuchPathError): + return info + info['commit'] = repo.head.commit.hexsha + info['commit_count'] = repo.head.commit.count() + try: + info['branch'] = repo.active_branch.name + except TypeError: + info['branch'] = '(nobranch)' + return info + +def get_layers(layers): + """Returns layer information in dict format""" + layer_dict = OrderedDict() + for layer in layers.split(): + layer_name = os.path.basename(layer) + layer_dict[layer_name] = git_rev_info(layer) + return layer_dict + +def write_metadata_file(file_path, metadata): + """ Writes metadata to a XML file in directory. """ + + xml = dict_to_XML('metadata', metadata) + xml_doc = parseString(tostring(xml).decode('UTF-8')) + with open(file_path, 'w') as f: + f.write(xml_doc.toprettyxml()) + +def dict_to_XML(tag, dictionary, **kwargs): + """ Return XML element converting dicts recursively. """ + + elem = Element(tag, **kwargs) + for key, val in dictionary.items(): + if tag == 'layers': + child = (dict_to_XML('layer', val, name=key)) + elif isinstance(val, MutableMapping): + child = (dict_to_XML(key, val)) + else: + if tag == 'config': + child = Element('variable', name=key) + else: + child = Element(key) + child.text = str(val) + elem.append(child) + return elem diff --git a/import-layers/yocto-poky/meta/lib/oeqa/utils/package_manager.py b/import-layers/yocto-poky/meta/lib/oeqa/utils/package_manager.py index 099ecc972..724afb2b5 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/utils/package_manager.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/utils/package_manager.py @@ -1,29 +1,210 @@ +import os +import json +import shutil + +from oeqa.core.utils.test import getCaseFile, getCaseMethod + def get_package_manager(d, root_path): """ Returns an OE package manager that can install packages in root_path. """ from oe.package_manager import RpmPM, OpkgPM, DpkgPM - pkg_class = d.getVar("IMAGE_PKGTYPE", True) + pkg_class = d.getVar("IMAGE_PKGTYPE") if pkg_class == "rpm": pm = RpmPM(d, root_path, - d.getVar('TARGET_VENDOR', True)) + d.getVar('TARGET_VENDOR')) pm.create_configs() elif pkg_class == "ipk": pm = OpkgPM(d, root_path, - d.getVar("IPKGCONF_TARGET", True), - d.getVar("ALL_MULTILIB_PACKAGE_ARCHS", True)) + d.getVar("IPKGCONF_TARGET"), + d.getVar("ALL_MULTILIB_PACKAGE_ARCHS")) elif pkg_class == "deb": pm = DpkgPM(d, root_path, - d.getVar('PACKAGE_ARCHS', True), - d.getVar('DPKG_ARCH', True)) + d.getVar('PACKAGE_ARCHS'), + d.getVar('DPKG_ARCH')) pm.write_index() pm.update() return pm + +def find_packages_to_extract(test_suite): + """ + Returns packages to extract required by runtime tests. + """ + from oeqa.core.utils.test import getSuiteCasesFiles + + needed_packages = {} + files = getSuiteCasesFiles(test_suite) + + for f in set(files): + json_file = _get_json_file(f) + if json_file: + needed_packages.update(_get_needed_packages(json_file)) + + return needed_packages + +def _get_json_file(module_path): + """ + Returns the path of the JSON file for a module, empty if doesn't exitst. + """ + + json_file = '%s.json' % module_path.rsplit('.', 1)[0] + if os.path.isfile(module_path) and os.path.isfile(json_file): + return json_file + else: + return '' + +def _get_needed_packages(json_file, test=None): + """ + Returns a dict with needed packages based on a JSON file. + + If a test is specified it will return the dict just for that test. + """ + needed_packages = {} + + with open(json_file) as f: + test_packages = json.load(f) + for key,value in test_packages.items(): + needed_packages[key] = value + + if test: + if test in needed_packages: + needed_packages = needed_packages[test] + else: + needed_packages = {} + + return needed_packages + +def extract_packages(d, needed_packages): + """ + Extract packages that will be needed during runtime. + """ + + import bb + import oe.path + + extracted_path = d.getVar('TEST_EXTRACTED_DIR') + + for key,value in needed_packages.items(): + packages = () + if isinstance(value, dict): + packages = (value, ) + elif isinstance(value, list): + packages = value + else: + bb.fatal('Failed to process needed packages for %s; ' + 'Value must be a dict or list' % key) + + for package in packages: + pkg = package['pkg'] + rm = package.get('rm', False) + extract = package.get('extract', True) + + if extract: + #logger.debug(1, 'Extracting %s' % pkg) + dst_dir = os.path.join(extracted_path, pkg) + # Same package used for more than one test, + # don't need to extract again. + if os.path.exists(dst_dir): + continue + + # Extract package and copy it to TEST_EXTRACTED_DIR + pkg_dir = _extract_in_tmpdir(d, pkg) + oe.path.copytree(pkg_dir, dst_dir) + shutil.rmtree(pkg_dir) + + else: + #logger.debug(1, 'Copying %s' % pkg) + _copy_package(d, pkg) + +def _extract_in_tmpdir(d, pkg): + """" + Returns path to a temp directory where the package was + extracted without dependencies. + """ + + from oeqa.utils.package_manager import get_package_manager + + pkg_path = os.path.join(d.getVar('TEST_INSTALL_TMP_DIR'), pkg) + pm = get_package_manager(d, pkg_path) + extract_dir = pm.extract(pkg) + shutil.rmtree(pkg_path) + + return extract_dir + +def _copy_package(d, pkg): + """ + Copy the RPM, DEB or IPK package to dst_dir + """ + + from oeqa.utils.package_manager import get_package_manager + + pkg_path = os.path.join(d.getVar('TEST_INSTALL_TMP_DIR'), pkg) + dst_dir = d.getVar('TEST_PACKAGED_DIR') + pm = get_package_manager(d, pkg_path) + pkg_info = pm.package_info(pkg) + file_path = pkg_info[pkg]['filepath'] + shutil.copy2(file_path, dst_dir) + shutil.rmtree(pkg_path) + +def install_package(test_case): + """ + Installs package in DUT if required. + """ + needed_packages = test_needs_package(test_case) + if needed_packages: + _install_uninstall_packages(needed_packages, test_case, True) + +def uninstall_package(test_case): + """ + Uninstalls package in DUT if required. + """ + needed_packages = test_needs_package(test_case) + if needed_packages: + _install_uninstall_packages(needed_packages, test_case, False) + +def test_needs_package(test_case): + """ + Checks if a test case requires to install/uninstall packages. + """ + test_file = getCaseFile(test_case) + json_file = _get_json_file(test_file) + + if json_file: + test_method = getCaseMethod(test_case) + needed_packages = _get_needed_packages(json_file, test_method) + if needed_packages: + return needed_packages + + return None + +def _install_uninstall_packages(needed_packages, test_case, install=True): + """ + Install/Uninstall packages in the DUT without using a package manager + """ + + if isinstance(needed_packages, dict): + packages = [needed_packages] + elif isinstance(needed_packages, list): + packages = needed_packages + + for package in packages: + pkg = package['pkg'] + rm = package.get('rm', False) + extract = package.get('extract', True) + src_dir = os.path.join(test_case.tc.extract_dir, pkg) + + # Install package + if install and extract: + test_case.tc.target.copyDirTo(src_dir, '/') + + # Uninstall package + elif not install and rm: + test_case.tc.target.deleteDirStructure(src_dir, '/') diff --git a/import-layers/yocto-poky/meta/lib/oeqa/utils/qemurunner.py b/import-layers/yocto-poky/meta/lib/oeqa/utils/qemurunner.py index 8f1b5b980..ba44b96f5 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/utils/qemurunner.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/utils/qemurunner.py @@ -7,6 +7,7 @@ import subprocess import os +import sys import time import signal import re @@ -36,10 +37,12 @@ class QemuRunner: self.runqemu = None # pid of the qemu process that runqemu will start self.qemupid = None - # target ip - from the command line + # target ip - from the command line or runqemu output self.ip = None # host ip - where qemu is running self.server_ip = None + # target ip netmask + self.netmask = None self.machine = machine self.rootfs = rootfs @@ -73,7 +76,7 @@ class QemuRunner: if self.logfile: # It is needed to sanitize the data received from qemu # because is possible to have control characters - msg = msg.decode("utf-8") + msg = msg.decode("utf-8", errors='ignore') msg = re_control_char.sub('', msg) with codecs.open(self.logfile, "a", encoding="utf-8") as f: f.write("%s" % msg) @@ -94,7 +97,7 @@ class QemuRunner: self._dump_host() raise SystemExit - def start(self, qemuparams = None, get_ip = True, extra_bootparams = None): + def start(self, qemuparams = None, get_ip = True, extra_bootparams = None, runqemuparams='', launch_cmd=None, discard_writes=True): if self.display: os.environ["DISPLAY"] = self.display # Set this flag so that Qemu doesn't do any grabs as SDL grabs @@ -114,6 +117,20 @@ class QemuRunner: else: os.environ["DEPLOY_DIR_IMAGE"] = self.deploy_dir_image + if not launch_cmd: + launch_cmd = 'runqemu %s %s ' % ('snapshot' if discard_writes else '', runqemuparams) + if self.use_kvm: + logger.info('Using kvm for runqemu') + launch_cmd += ' kvm' + else: + logger.info('Not using kvm for runqemu') + if not self.display: + launch_cmd += ' nographic' + launch_cmd += ' %s %s' % (self.machine, self.rootfs) + + return self.launch(launch_cmd, qemuparams=qemuparams, get_ip=get_ip, extra_bootparams=extra_bootparams) + + def launch(self, launch_cmd, get_ip = True, qemuparams = None, extra_bootparams = None): try: threadsock, threadport = self.create_socket() self.server_socket, self.serverport = self.create_socket() @@ -121,27 +138,19 @@ class QemuRunner: logger.error("Failed to create listening socket: %s" % msg[1]) return False - bootparams = 'console=tty1 console=ttyS0,115200n8 printk.time=1' if extra_bootparams: bootparams = bootparams + ' ' + extra_bootparams self.qemuparams = 'bootparams="{0}" qemuparams="-serial tcp:127.0.0.1:{1}"'.format(bootparams, threadport) - if not self.display: - self.qemuparams = 'nographic ' + self.qemuparams if qemuparams: self.qemuparams = self.qemuparams[:-1] + " " + qemuparams + " " + '\"' + launch_cmd += ' tcpserial=%s %s' % (self.serverport, self.qemuparams) + self.origchldhandler = signal.getsignal(signal.SIGCHLD) signal.signal(signal.SIGCHLD, self.handleSIGCHLD) - launch_cmd = 'runqemu snapshot ' - if self.use_kvm: - logger.info('Using kvm for runqemu') - launch_cmd += 'kvm ' - else: - logger.info('Not using kvm for runqemu') - launch_cmd += 'tcpserial=%s %s %s %s' % (self.serverport, self.machine, self.rootfs, self.qemuparams) logger.info('launchcmd=%s'%(launch_cmd)) # FIXME: We pass in stdin=subprocess.PIPE here to work around stty @@ -191,6 +200,8 @@ class QemuRunner: return False time.sleep(1) + out = self.getOutput(output) + netconf = False # network configuration is not required by default if self.is_alive(): logger.info("qemu started - qemu procces pid is %s" % self.qemupid) if get_ip: @@ -202,17 +213,27 @@ class QemuRunner: cmdline = re_control_char.sub('', cmdline) try: ips = re.findall("((?:[0-9]{1,3}\.){3}[0-9]{1,3})", cmdline.split("ip=")[1]) - if not ips or len(ips) != 3: - raise ValueError - else: - self.ip = ips[0] - self.server_ip = ips[1] + self.ip = ips[0] + self.server_ip = ips[1] + logger.info("qemu cmdline used:\n{}".format(cmdline)) except (IndexError, ValueError): - logger.info("Couldn't get ip from qemu process arguments! Here is the qemu command line used:\n%s\nand output from runqemu:\n%s" % (cmdline, self.getOutput(output))) - self._dump_host() - self.stop() - return False - logger.info("qemu cmdline used:\n{}".format(cmdline)) + # Try to get network configuration from runqemu output + match = re.match('.*Network configuration: ([0-9.]+)::([0-9.]+):([0-9.]+)$.*', + out, re.MULTILINE|re.DOTALL) + if match: + self.ip, self.server_ip, self.netmask = match.groups() + # network configuration is required as we couldn't get it + # from the runqemu command line, so qemu doesn't run kernel + # and guest networking is not configured + netconf = True + else: + logger.error("Couldn't get ip from qemu command line and runqemu output! " + "Here is the qemu command line used:\n%s\n" + "and output from runqemu:\n%s" % (cmdline, out)) + self._dump_host() + self.stop() + return False + logger.info("Target IP: %s" % self.ip) logger.info("Server IP: %s" % self.server_ip) @@ -221,12 +242,11 @@ class QemuRunner: if not self.thread.connection_established.wait(self.boottime): logger.error("Didn't receive a console connection from qemu. " "Here is the qemu command line used:\n%s\nand " - "output from runqemu:\n%s" % (cmdline, - self.getOutput(output))) + "output from runqemu:\n%s" % (cmdline, out)) self.stop_thread() return False - logger.info("Output from runqemu:\n%s", self.getOutput(output)) + logger.info("Output from runqemu:\n%s", out) logger.info("Waiting at most %d seconds for login banner" % self.boottime) endtime = time.time() + self.boottime socklist = [self.server_socket] @@ -236,7 +256,10 @@ class QemuRunner: bootlog = '' data = b'' while time.time() < endtime and not stopread: - sread, swrite, serror = select.select(socklist, [], [], 5) + try: + sread, swrite, serror = select.select(socklist, [], [], 5) + except InterruptedError: + continue for sock in sread: if sock is self.server_socket: qemusock, addr = self.server_socket.accept() @@ -278,6 +301,14 @@ class QemuRunner: if re.search("root@[a-zA-Z0-9\-]+:~#", output): self.logged = True logger.info("Logged as root in serial console") + if netconf: + # configure guest networking + cmd = "ifconfig eth0 %s netmask %s up\n" % (self.ip, self.netmask) + output = self.run_serial(cmd, raw=True)[1] + if re.search("root@[a-zA-Z0-9\-]+:~#", output): + logger.info("configured ip address %s", self.ip) + else: + logger.info("Couldn't configure guest networking") else: logger.info("Couldn't login into serial console" " as root using blank password") @@ -295,6 +326,7 @@ class QemuRunner: def stop(self): self.stop_thread() + self.stop_qemu_system() if hasattr(self, "origchldhandler"): signal.signal(signal.SIGCHLD, self.origchldhandler) if self.runqemu: @@ -319,6 +351,14 @@ class QemuRunner: self.qemupid = None self.ip = None + def stop_qemu_system(self): + if self.qemupid: + try: + # qemu-system behaves well and a SIGTERM is enough + os.kill(self.qemupid, signal.SIGTERM) + except ProcessLookupError as e: + logger.warn('qemu-system ended unexpectedly') + def stop_thread(self): if self.thread and self.thread.is_alive(): self.thread.stop() @@ -385,7 +425,7 @@ class QemuRunner: if "qemu-system" in basecmd and "-serial tcp" in commands[p]: return [int(p),commands[p]] - def run_serial(self, command, raw=False): + def run_serial(self, command, raw=False, timeout=5): # We assume target system have echo to get command status if not raw: command = "%s; echo $?\n" % command @@ -393,20 +433,26 @@ class QemuRunner: data = '' status = 0 self.server_socket.sendall(command.encode('utf-8')) - keepreading = True - while keepreading: - sread, _, _ = select.select([self.server_socket],[],[],5) + start = time.time() + end = start + timeout + while True: + now = time.time() + if now >= end: + data += "<<< run_serial(): command timed out after %d seconds without output >>>\r\n\r\n" % timeout + break + try: + sread, _, _ = select.select([self.server_socket],[],[], end - now) + except InterruptedError: + continue if sread: answer = self.server_socket.recv(1024) if answer: data += answer.decode('utf-8') # Search the prompt to stop if re.search("[a-zA-Z0-9]+@[a-zA-Z0-9\-]+:~#", data): - keepreading = False + break else: raise Exception("No data on serial console socket") - else: - keepreading = False if data: if raw: diff --git a/import-layers/yocto-poky/meta/lib/oeqa/utils/qemutinyrunner.py b/import-layers/yocto-poky/meta/lib/oeqa/utils/qemutinyrunner.py index d554f0dbc..1bf59007f 100644 --- a/import-layers/yocto-poky/meta/lib/oeqa/utils/qemutinyrunner.py +++ b/import-layers/yocto-poky/meta/lib/oeqa/utils/qemutinyrunner.py @@ -60,7 +60,7 @@ class QemuTinyRunner(QemuRunner): with open(self.logfile, "a") as f: f.write("%s" % msg) - def start(self, qemuparams = None, ssh=True, extra_bootparams=None): + def start(self, qemuparams = None, ssh=True, extra_bootparams=None, runqemuparams='', discard_writes=True): if self.display: os.environ["DISPLAY"] = self.display @@ -107,14 +107,17 @@ class QemuTinyRunner(QemuRunner): return self.is_alive() - def run_serial(self, command): + def run_serial(self, command, timeout=5): self.server_socket.sendall(command+'\n') data = '' status = 0 stopread = False - endtime = time.time()+5 + endtime = time.time()+timeout while time.time() +Date: Thu, 2 Feb 2017 13:51:27 -0500 +Subject: [PATCH] Mark our explicit fall through so -Wextra will work in gcc 7 + +gcc 7 introduces detection of fall-through behavior in switch/case +statements, and will warn if -Wimplicit-fallthrough is present and there +is no comment stating that the fall-through is intentional. This is +also triggered by -Wextra, as it enables -Wimplicit-fallthrough=1. + +This patch adds the comment in the one place we use fall-through. + +Signed-off-by: Peter Jones +--- +Upstream-Status: Pending + + lib/print.c | 1 + + 1 file changed, 1 insertion(+) + +diff --git a/lib/print.c b/lib/print.c +index b8a9d38..cb732f0 100644 +--- a/lib/print.c ++++ b/lib/print.c +@@ -1131,6 +1131,7 @@ Returns: + case 'X': + Item.Width = Item.Long ? 16 : 8; + Item.Pad = '0'; ++ /* falls through */ + case 'x': + ValueToHex ( + Item.Scratch, +-- +2.12.2 + diff --git a/import-layers/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi/aarch64-initplat.c-fix-const-qualifier.patch b/import-layers/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi/aarch64-initplat.c-fix-const-qualifier.patch deleted file mode 100644 index 965f074eb..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi/aarch64-initplat.c-fix-const-qualifier.patch +++ /dev/null @@ -1,35 +0,0 @@ -From dc83b84dc8b4e71efce47143497aac6c126065cf Mon Sep 17 00:00:00 2001 -From: Robert Yang -Date: Mon, 18 Jul 2016 08:40:29 -0700 -Subject: [PATCH] lib/aarch64/initplat.c: fix const qualifier - -Fixed: -initplat.c:44:35: error: initialization discards 'const' qualifier from pointer target type [-Werror=discarded-qualifiers] - unsigned char *p = dest, *q = src; - ^~~ -cc1: all warnings being treated as errors - -Upstream-Status: Pending - -Signed-off-by: Robert Yang ---- - lib/aarch64/initplat.c | 3 ++- - 1 file changed, 2 insertions(+), 1 deletion(-) - -diff --git a/lib/aarch64/initplat.c b/lib/aarch64/initplat.c -index 2ac03a7..aae7beb 100644 ---- a/lib/aarch64/initplat.c -+++ b/lib/aarch64/initplat.c -@@ -41,7 +41,8 @@ void *memset(void *s, int c, __SIZE_TYPE__ n) - - void *memcpy(void *dest, const void *src, __SIZE_TYPE__ n) - { -- unsigned char *p = dest, *q = src; -+ unsigned char *p = dest; -+ const unsigned char *q = src; - - while (n--) - *p++ = *q++; --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.4.bb b/import-layers/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.4.bb deleted file mode 100644 index e0d8ee76d..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.4.bb +++ /dev/null @@ -1,54 +0,0 @@ -SUMMARY = "Libraries for producing EFI binaries" -HOMEPAGE = "http://sourceforge.net/projects/gnu-efi/" -SECTION = "devel" -LICENSE = "GPLv2+ | BSD-2-Clause" -LIC_FILES_CHKSUM = "file://gnuefi/crt0-efi-arm.S;beginline=4;endline=16;md5=e582764a4776e60c95bf9ab617343d36 \ - file://gnuefi/crt0-efi-aarch64.S;beginline=4;endline=16;md5=e582764a4776e60c95bf9ab617343d36 \ - file://inc/efishellintf.h;beginline=13;endline=20;md5=202766b79d708eff3cc70fce15fb80c7 \ - file://inc/efishellparm.h;beginline=4;endline=11;md5=468b1231b05bbc84bae3a0d5774e3bb5 \ - file://lib/arm/math.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \ - file://lib/arm/initplat.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \ - file://lib/aarch64/math.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \ - file://lib/aarch64/initplat.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \ - " - -SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BP}.tar.bz2 \ - file://parallel-make-archives.patch \ - file://lib-Makefile-fix-parallel-issue.patch \ - file://gcc46-compatibility.patch \ - file://aarch64-initplat.c-fix-const-qualifier.patch \ - " - -SRC_URI[md5sum] = "612e0f327f31c4b8468ef55f4eeb9649" -SRC_URI[sha256sum] = "51a00428c3ccb96db24089ed8394843c4f83cf8f42c6a4dfddb4b7c23f2bf8af" - -COMPATIBLE_HOST = "(x86_64.*|i.86.*|aarch64.*|arm.*)-linux" -COMPATIBLE_HOST_armv4 = 'null' - -def gnu_efi_arch(d): - import re - tarch = d.getVar("TARGET_ARCH", True) - if re.match("i[3456789]86", tarch): - return "ia32" - return tarch - -EXTRA_OEMAKE = "'ARCH=${@gnu_efi_arch(d)}' 'CC=${CC}' 'AS=${AS}' 'LD=${LD}' 'AR=${AR}' \ - 'RANLIB=${RANLIB}' 'OBJCOPY=${OBJCOPY}' 'PREFIX=${prefix}' 'LIBDIR=${libdir}' \ - " - -# gnu-efi's Makefile treats prefix as toolchain prefix, so don't -# export it. -prefix[unexport] = "1" - -do_install() { - oe_runmake install INSTALLROOT="${D}" -} - -FILES_${PN} += "${libdir}/*.lds" - -BBCLASSEXTEND = "native" - -# It doesn't support sse, its make.defaults sets: -# CFLAGS += -mno-mmx -mno-sse -# So also remove -mfpmath=sse from TUNE_CCARGS -TUNE_CCARGS_remove = "-mfpmath=sse" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.5.bb b/import-layers/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.5.bb new file mode 100644 index 000000000..d6f9f536d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-bsp/gnu-efi/gnu-efi_3.0.5.bb @@ -0,0 +1,71 @@ +SUMMARY = "Libraries for producing EFI binaries" +HOMEPAGE = "http://sourceforge.net/projects/gnu-efi/" +SECTION = "devel" +LICENSE = "GPLv2+ | BSD-2-Clause" +LIC_FILES_CHKSUM = "file://gnuefi/crt0-efi-arm.S;beginline=4;endline=16;md5=e582764a4776e60c95bf9ab617343d36 \ + file://gnuefi/crt0-efi-aarch64.S;beginline=4;endline=16;md5=e582764a4776e60c95bf9ab617343d36 \ + file://inc/efishellintf.h;beginline=13;endline=20;md5=202766b79d708eff3cc70fce15fb80c7 \ + file://inc/efishellparm.h;beginline=4;endline=11;md5=468b1231b05bbc84bae3a0d5774e3bb5 \ + file://lib/arm/math.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \ + file://lib/arm/initplat.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \ + file://lib/aarch64/math.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \ + file://lib/aarch64/initplat.c;beginline=2;endline=15;md5=8ed772501da77b2b3345aa6df8744c9e \ + " + +SRC_URI = "${SOURCEFORGE_MIRROR}/${BPN}/${BP}.tar.bz2 \ + file://parallel-make-archives.patch \ + file://lib-Makefile-fix-parallel-issue.patch \ + file://gcc46-compatibility.patch \ + file://0001-Mark-our-explicit-fall-through-so-Wextra-will-work-i.patch \ + " + +SRC_URI[md5sum] = "1f719c9c135778aa6b087b89a1cc2423" +SRC_URI[sha256sum] = "bd8fcd5914f18fc0e4ba948ab03b00013e528504f529c60739b748f6ef130b22" + +COMPATIBLE_HOST = "(x86_64.*|i.86.*|aarch64.*|arm.*)-linux" +COMPATIBLE_HOST_armv4 = 'null' + +do_configure_linux-gnux32_prepend() { + cp ${STAGING_INCDIR}/gnu/stubs-x32.h ${STAGING_INCDIR}/gnu/stubs-64.h + cp ${STAGING_INCDIR}/bits/long-double-32.h ${STAGING_INCDIR}/bits/long-double-64.h +} + +def gnu_efi_arch(d): + import re + tarch = d.getVar("TARGET_ARCH") + if re.match("i[3456789]86", tarch): + return "ia32" + return tarch + +EXTRA_OEMAKE = "'ARCH=${@gnu_efi_arch(d)}' 'CC=${CC}' 'AS=${AS}' 'LD=${LD}' 'AR=${AR}' \ + 'RANLIB=${RANLIB}' 'OBJCOPY=${OBJCOPY}' 'PREFIX=${prefix}' 'LIBDIR=${libdir}' \ + " + +# gnu-efi's Makefile treats prefix as toolchain prefix, so don't +# export it. +prefix[unexport] = "1" + +do_install() { + oe_runmake install INSTALLROOT="${D}" +} + +FILES_${PN} += "${libdir}/*.lds" + +# 64-bit binaries are expected for EFI when targeting X32 +INSANE_SKIP_${PN}-dev_append_linux-gnux32 = " arch" + +BBCLASSEXTEND = "native" + +# It doesn't support sse, its make.defaults sets: +# CFLAGS += -mno-mmx -mno-sse +# So also remove -mfpmath=sse from TUNE_CCARGS +TUNE_CCARGS_remove = "-mfpmath=sse" + +python () { + ccargs = d.getVar('TUNE_CCARGS').split() + if '-mx32' in ccargs: + # use x86_64 EFI ABI + ccargs.remove('-mx32') + ccargs.append('-m64') + d.setVar('TUNE_CCARGS', ' '.join(ccargs)) +} diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0001-btrfs-avoid-used-uninitialized-error-with-GCC7.patch b/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0001-btrfs-avoid-used-uninitialized-error-with-GCC7.patch new file mode 100644 index 000000000..217a77560 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0001-btrfs-avoid-used-uninitialized-error-with-GCC7.patch @@ -0,0 +1,36 @@ +From 6cef7f6079550af3bf91dbff824398eaef08c3c5 Mon Sep 17 00:00:00 2001 +From: Andrei Borzenkov +Date: Tue, 4 Apr 2017 19:22:32 +0300 +Subject: [PATCH 1/4] btrfs: avoid "used uninitialized" error with GCC7 + +sblock was local and so considered new variable on every loop +iteration. + +Closes: 50597 +--- +Upstream-Status: Backport +Signed-off-by: Khem Raj + + grub-core/fs/btrfs.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/grub-core/fs/btrfs.c b/grub-core/fs/btrfs.c +index 9cffa91..4849c1c 100644 +--- a/grub-core/fs/btrfs.c ++++ b/grub-core/fs/btrfs.c +@@ -227,11 +227,11 @@ grub_btrfs_read_logical (struct grub_btrfs_data *data, + static grub_err_t + read_sblock (grub_disk_t disk, struct grub_btrfs_superblock *sb) + { ++ struct grub_btrfs_superblock sblock; + unsigned i; + grub_err_t err = GRUB_ERR_NONE; + for (i = 0; i < ARRAY_SIZE (superblock_sectors); i++) + { +- struct grub_btrfs_superblock sblock; + /* Don't try additional superblocks beyond device size. */ + if (i && (grub_le_to_cpu64 (sblock.this_device.size) + >> GRUB_DISK_SECTOR_BITS) <= superblock_sectors[i]) +-- +1.9.1 + diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0001-build-Use-AC_HEADER_MAJOR-to-find-device-macros.patch b/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0001-build-Use-AC_HEADER_MAJOR-to-find-device-macros.patch new file mode 100644 index 000000000..f95b9ef9a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0001-build-Use-AC_HEADER_MAJOR-to-find-device-macros.patch @@ -0,0 +1,92 @@ +From 7a5b301e3adb8e054288518a325135a1883c1c6c Mon Sep 17 00:00:00 2001 +From: Mike Gilbert +Date: Tue, 19 Apr 2016 14:27:22 -0400 +Subject: [PATCH] build: Use AC_HEADER_MAJOR to find device macros + +Depending on the OS/libc, device macros are defined in different +headers. This change ensures we include the right one. + +sys/types.h - BSD +sys/mkdev.h - Sun +sys/sysmacros.h - glibc (Linux) + +glibc currently pulls sys/sysmacros.h into sys/types.h, but this may +change in a future release. + +https://sourceware.org/ml/libc-alpha/2015-11/msg00253.html +--- +Upstream-Status: Backport + + configure.ac | 3 ++- + grub-core/osdep/devmapper/getroot.c | 6 ++++++ + grub-core/osdep/devmapper/hostdisk.c | 5 +++++ + grub-core/osdep/linux/getroot.c | 6 ++++++ + grub-core/osdep/unix/getroot.c | 4 +++- + 5 files changed, 22 insertions(+), 2 deletions(-) + +Index: grub-2.00/configure.ac +=================================================================== +--- grub-2.00.orig/configure.ac ++++ grub-2.00/configure.ac +@@ -326,7 +326,8 @@ fi + + # Check for functions and headers. + AC_CHECK_FUNCS(posix_memalign memalign asprintf vasprintf getextmntent) +-AC_CHECK_HEADERS(sys/param.h sys/mount.h sys/mnttab.h sys/mkdev.h limits.h) ++AC_CHECK_HEADERS(sys/param.h sys/mount.h sys/mnttab.h limits.h) ++AC_HEADER_MAJOR + + AC_CHECK_MEMBERS([struct statfs.f_fstypename],,,[$ac_includes_default + #include +Index: grub-2.00/grub-core/kern/emu/hostdisk.c +=================================================================== +--- grub-2.00.orig/grub-core/kern/emu/hostdisk.c ++++ grub-2.00/grub-core/kern/emu/hostdisk.c +@@ -41,6 +41,12 @@ + #include + #include + ++#if defined(MAJOR_IN_MKDEV) ++#include ++#elif defined(MAJOR_IN_SYSMACROS) ++#include ++#endif ++ + #ifdef __linux__ + # include /* ioctl */ + # include +Index: grub-2.00/util/getroot.c +=================================================================== +--- grub-2.00.orig/util/getroot.c ++++ grub-2.00/util/getroot.c +@@ -35,6 +35,13 @@ + #ifdef HAVE_LIMITS_H + #include + #endif ++ ++#if defined(MAJOR_IN_MKDEV) ++#include ++#elif defined(MAJOR_IN_SYSMACROS) ++#include ++#endif ++ + #include + #include + #include +Index: grub-2.00/util/raid.c +=================================================================== +--- grub-2.00.orig/util/raid.c ++++ grub-2.00/util/raid.c +@@ -29,6 +29,12 @@ + #include + #include + ++#if defined(MAJOR_IN_MKDEV) ++#include ++#elif defined(MAJOR_IN_SYSMACROS) ++#include ++#endif ++ + #include + #include + #include diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0002-i386-x86_64-ppc-fix-switch-fallthrough-cases-with-GC.patch b/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0002-i386-x86_64-ppc-fix-switch-fallthrough-cases-with-GC.patch new file mode 100644 index 000000000..94f048c28 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0002-i386-x86_64-ppc-fix-switch-fallthrough-cases-with-GC.patch @@ -0,0 +1,248 @@ +From 4bd4a88725604471fdbd86316c91967a7f4dba5a Mon Sep 17 00:00:00 2001 +From: Andrei Borzenkov +Date: Tue, 4 Apr 2017 19:23:55 +0300 +Subject: [PATCH 2/4] i386, x86_64, ppc: fix switch fallthrough cases with GCC7 + +In util/getroot and efidisk slightly modify exitsing comment to mostly +retain it but still make GCC7 compliant with respect to fall through +annotation. + +In grub-core/lib/xzembed/xz_dec_lzma2.c it adds same comments as +upstream. + +In grub-core/tests/setjmp_tets.c declare functions as "noreturn" to +suppress GCC7 warning. + +In grub-core/gnulib/regexec.c use new __attribute__, because existing +annotation is not recognized by GCC7 parser (which requires that comment +immediately precedes case statement). + +Otherwise add FALLTHROUGH comment. + +Closes: 50598 +--- +Upstream-Status: Backport +Signed-off-by: Khem Raj + + grub-core/commands/hdparm.c | 1 + + grub-core/commands/nativedisk.c | 1 + + grub-core/disk/cryptodisk.c | 1 + + grub-core/disk/efi/efidisk.c | 2 +- + grub-core/efiemu/mm.c | 1 + + grub-core/gdb/cstub.c | 1 + + grub-core/gnulib/regexec.c | 3 +++ + grub-core/lib/xzembed/xz_dec_lzma2.c | 4 ++++ + grub-core/lib/xzembed/xz_dec_stream.c | 6 ++++++ + grub-core/loader/i386/linux.c | 3 +++ + grub-core/tests/setjmp_test.c | 5 ++++- + grub-core/video/ieee1275.c | 1 + + grub-core/video/readers/jpeg.c | 1 + + util/getroot.c | 2 +- + util/grub-install.c | 1 + + util/grub-mkimagexx.c | 1 + + util/grub-mount.c | 1 + + 17 files changed, 32 insertions(+), 3 deletions(-) + +Index: grub-2.00/grub-core/commands/hdparm.c +=================================================================== +--- grub-2.00.orig/grub-core/commands/hdparm.c ++++ grub-2.00/grub-core/commands/hdparm.c +@@ -328,6 +328,7 @@ grub_cmd_hdparm (grub_extcmd_context_t c + ata = ((struct grub_scsi *) disk->data)->data; + break; + } ++ /* FALLTHROUGH */ + default: + return grub_error (GRUB_ERR_IO, "not an ATA device"); + } +Index: grub-2.00/grub-core/disk/cryptodisk.c +=================================================================== +--- grub-2.00.orig/grub-core/disk/cryptodisk.c ++++ grub-2.00/grub-core/disk/cryptodisk.c +@@ -268,6 +268,7 @@ grub_cryptodisk_endecrypt (struct grub_c + break; + case GRUB_CRYPTODISK_MODE_IV_PLAIN64: + iv[1] = grub_cpu_to_le32 (sector >> 32); ++ /* FALLTHROUGH */ + case GRUB_CRYPTODISK_MODE_IV_PLAIN: + iv[0] = grub_cpu_to_le32 (sector & 0xFFFFFFFF); + break; +Index: grub-2.00/grub-core/disk/efi/efidisk.c +=================================================================== +--- grub-2.00.orig/grub-core/disk/efi/efidisk.c ++++ grub-2.00/grub-core/disk/efi/efidisk.c +@@ -262,7 +262,7 @@ name_devices (struct grub_efidisk_data * + { + case GRUB_EFI_HARD_DRIVE_DEVICE_PATH_SUBTYPE: + is_hard_drive = 1; +- /* Fall through by intention. */ ++ /* Intentionally fall through. */ + case GRUB_EFI_CDROM_DEVICE_PATH_SUBTYPE: + { + struct grub_efidisk_data *parent, *parent2; +Index: grub-2.00/grub-core/efiemu/mm.c +=================================================================== +--- grub-2.00.orig/grub-core/efiemu/mm.c ++++ grub-2.00/grub-core/efiemu/mm.c +@@ -410,6 +410,7 @@ grub_efiemu_mmap_fill (void) + default: + grub_dprintf ("efiemu", + "Unknown memory type %d. Assuming unusable\n", type); ++ /* FALLTHROUGH */ + case GRUB_MEMORY_RESERVED: + return grub_efiemu_add_to_mmap (addr, size, + GRUB_EFI_UNUSABLE_MEMORY); +Index: grub-2.00/grub-core/gdb/cstub.c +=================================================================== +--- grub-2.00.orig/grub-core/gdb/cstub.c ++++ grub-2.00/grub-core/gdb/cstub.c +@@ -336,6 +336,7 @@ grub_gdb_trap (int trap_no) + /* sAA..AA: Step one instruction from AA..AA(optional). */ + case 's': + stepping = 1; ++ /* FALLTHROUGH */ + + /* cAA..AA: Continue at address AA..AA(optional). */ + case 'c': +Index: grub-2.00/grub-core/gnulib/regexec.c +=================================================================== +--- grub-2.00.orig/grub-core/gnulib/regexec.c ++++ grub-2.00/grub-core/gnulib/regexec.c +@@ -4104,6 +4104,9 @@ check_node_accept (const re_match_contex + case OP_UTF8_PERIOD: + if (ch >= ASCII_CHARS) + return false; ++#if defined __GNUC__ && __GNUC__ >= 7 ++ __attribute__ ((fallthrough)); ++#endif + /* FALLTHROUGH */ + #endif + case OP_PERIOD: +Index: grub-2.00/grub-core/lib/xzembed/xz_dec_lzma2.c +=================================================================== +--- grub-2.00.orig/grub-core/lib/xzembed/xz_dec_lzma2.c ++++ grub-2.00/grub-core/lib/xzembed/xz_dec_lzma2.c +@@ -1042,6 +1042,8 @@ enum xz_ret xz_dec_lzma2_run( + + s->lzma2.sequence = SEQ_LZMA_PREPARE; + ++ /* Fall through */ ++ + case SEQ_LZMA_PREPARE: + if (s->lzma2.compressed < RC_INIT_BYTES) + return XZ_DATA_ERROR; +@@ -1052,6 +1054,8 @@ enum xz_ret xz_dec_lzma2_run( + s->lzma2.compressed -= RC_INIT_BYTES; + s->lzma2.sequence = SEQ_LZMA_RUN; + ++ /* Fall through */ ++ + case SEQ_LZMA_RUN: + /* + * Set dictionary limit to indicate how much we want +Index: grub-2.00/grub-core/lib/xzembed/xz_dec_stream.c +=================================================================== +--- grub-2.00.orig/grub-core/lib/xzembed/xz_dec_stream.c ++++ grub-2.00/grub-core/lib/xzembed/xz_dec_stream.c +@@ -749,6 +749,7 @@ static enum xz_ret dec_main(struct xz_de + + s->sequence = SEQ_BLOCK_START; + ++ /* FALLTHROUGH */ + case SEQ_BLOCK_START: + /* We need one byte of input to continue. */ + if (b->in_pos == b->in_size) +@@ -772,6 +773,7 @@ static enum xz_ret dec_main(struct xz_de + s->temp.pos = 0; + s->sequence = SEQ_BLOCK_HEADER; + ++ /* FALLTHROUGH */ + case SEQ_BLOCK_HEADER: + if (!fill_temp(s, b)) + return XZ_OK; +@@ -782,6 +784,7 @@ static enum xz_ret dec_main(struct xz_de + + s->sequence = SEQ_BLOCK_UNCOMPRESS; + ++ /* FALLTHROUGH */ + case SEQ_BLOCK_UNCOMPRESS: + ret = dec_block(s, b); + if (ret != XZ_STREAM_END) +@@ -809,6 +812,7 @@ static enum xz_ret dec_main(struct xz_de + + s->sequence = SEQ_BLOCK_CHECK; + ++ /* FALLTHROUGH */ + case SEQ_BLOCK_CHECK: + ret = hash_validate(s, b, 0); + if (ret != XZ_STREAM_END) +@@ -863,6 +867,7 @@ static enum xz_ret dec_main(struct xz_de + + s->sequence = SEQ_INDEX_CRC32; + ++ /* FALLTHROUGH */ + case SEQ_INDEX_CRC32: + ret = hash_validate(s, b, 1); + if (ret != XZ_STREAM_END) +@@ -871,6 +876,7 @@ static enum xz_ret dec_main(struct xz_de + s->temp.size = STREAM_HEADER_SIZE; + s->sequence = SEQ_STREAM_FOOTER; + ++ /* FALLTHROUGH */ + case SEQ_STREAM_FOOTER: + if (!fill_temp(s, b)) + return XZ_OK; +Index: grub-2.00/grub-core/loader/i386/linux.c +=================================================================== +--- grub-2.00.orig/grub-core/loader/i386/linux.c ++++ grub-2.00/grub-core/loader/i386/linux.c +@@ -977,10 +977,13 @@ grub_cmd_linux (grub_command_t cmd __att + { + case 'g': + shift += 10; ++ /* FALLTHROUGH */ + case 'm': + shift += 10; ++ /* FALLTHROUGH */ + case 'k': + shift += 10; ++ /* FALLTHROUGH */ + default: + break; + } +Index: grub-2.00/grub-core/video/readers/jpeg.c +=================================================================== +--- grub-2.00.orig/grub-core/video/readers/jpeg.c ++++ grub-2.00/grub-core/video/readers/jpeg.c +@@ -701,6 +701,7 @@ grub_jpeg_decode_jpeg (struct grub_jpeg_ + case JPEG_MARKER_SOS: /* Start Of Scan. */ + if (grub_jpeg_decode_sos (data)) + break; ++ /* FALLTHROUGH */ + case JPEG_MARKER_RST0: /* Restart. */ + case JPEG_MARKER_RST1: + case JPEG_MARKER_RST2: +Index: grub-2.00/util/grub-mkimagexx.c +=================================================================== +--- grub-2.00.orig/util/grub-mkimagexx.c ++++ grub-2.00/util/grub-mkimagexx.c +@@ -485,6 +485,7 @@ SUFFIX (relocate_addresses) (Elf_Ehdr *e + + sym->st_value + - image_target->vaddr_offset)); + } ++ /* FALLTHROUGH */ + case R_IA64_LTOFF_FPTR22: + *gpptr = grub_host_to_target64 (addend + sym_addr); + add_value_to_slot_21 ((grub_addr_t) target, +Index: grub-2.00/util/grub-mount.c +=================================================================== +--- grub-2.00.orig/util/grub-mount.c ++++ grub-2.00/util/grub-mount.c +@@ -487,6 +487,7 @@ argp_parser (int key, char *arg, struct + if (arg[0] != '-') + break; + ++ /* FALLTHROUGH */ + default: + if (!arg) + return 0; diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0003-Add-gnulib-fix-gcc7-fallthrough.diff.patch b/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0003-Add-gnulib-fix-gcc7-fallthrough.diff.patch new file mode 100644 index 000000000..fcfbf5cdf --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0003-Add-gnulib-fix-gcc7-fallthrough.diff.patch @@ -0,0 +1,38 @@ +From 007f0b407f72314ec832d77e15b83ea40b160037 Mon Sep 17 00:00:00 2001 +From: Andrei Borzenkov +Date: Tue, 4 Apr 2017 19:37:47 +0300 +Subject: [PATCH 3/4] Add gnulib-fix-gcc7-fallthrough.diff + +As long as the code is not upstream, add it as explicit patch for the +case of gnulib refresh. +--- +Upstream-Status: Backport +Signed-off-by: Khem Raj + + grub-core/gnulib-fix-gcc7-fallthrough.diff | 14 ++++++++++++++ + 1 file changed, 14 insertions(+) + create mode 100644 grub-core/gnulib-fix-gcc7-fallthrough.diff + +diff --git a/grub-core/gnulib-fix-gcc7-fallthrough.diff b/grub-core/gnulib-fix-gcc7-fallthrough.diff +new file mode 100644 +index 0000000..9802e2d +--- /dev/null ++++ b/grub-core/gnulib-fix-gcc7-fallthrough.diff +@@ -0,0 +1,14 @@ ++diff --git grub-core/gnulib/regexec.c grub-core/gnulib/regexec.c ++index f632cd4..a7776f0 100644 ++--- grub-core/gnulib/regexec.c +++++ grub-core/gnulib/regexec.c ++@@ -4099,6 +4099,9 @@ check_node_accept (const re_match_context_t *mctx, const re_token_t *node, ++ case OP_UTF8_PERIOD: ++ if (ch >= ASCII_CHARS) ++ return false; +++#if defined __GNUC__ && __GNUC__ >= 7 +++ __attribute__ ((fallthrough)); +++#endif ++ /* FALLTHROUGH */ ++ #endif ++ case OP_PERIOD: +-- +1.9.1 + diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0004-Fix-remaining-cases-of-gcc-7-fallthrough-warning.patch b/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0004-Fix-remaining-cases-of-gcc-7-fallthrough-warning.patch new file mode 100644 index 000000000..78a70a2da --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-bsp/grub/files/0004-Fix-remaining-cases-of-gcc-7-fallthrough-warning.patch @@ -0,0 +1,175 @@ +From d454509bb866d4eaefbb558d94dd0ef0228830eb Mon Sep 17 00:00:00 2001 +From: Vladimir Serbinenko +Date: Wed, 12 Apr 2017 01:42:38 +0000 +Subject: [PATCH 4/4] Fix remaining cases of gcc 7 fallthrough warning. + +They are all intended, so just add the relevant comment. +--- +Upstream-Status: Backport +Signed-off-by: Khem Raj + + grub-core/kern/ia64/dl.c | 1 + + grub-core/kern/mips/dl.c | 1 + + grub-core/kern/sparc64/dl.c | 1 + + grub-core/loader/i386/coreboot/chainloader.c | 1 + + 4 files changed, 4 insertions(+) + +Index: grub-2.00/grub-core/kern/ia64/dl.c +=================================================================== +--- grub-2.00.orig/grub-core/kern/ia64/dl.c ++++ grub-2.00/grub-core/kern/ia64/dl.c +@@ -257,6 +257,7 @@ grub_arch_dl_relocate_symbols (grub_dl_t + case R_IA64_LTOFF22: + if (ELF_ST_TYPE (sym->st_info) == STT_FUNC) + value = *(grub_uint64_t *) sym->st_value + rel->r_addend; ++ /* Fallthrough. */ + case R_IA64_LTOFF_FPTR22: + *gpptr = value; + add_value_to_slot_21 (addr, (grub_addr_t) gpptr - (grub_addr_t) gp); +Index: grub-2.00/grub-core/disk/diskfilter.c +=================================================================== +--- grub-2.00.orig/grub-core/disk/diskfilter.c ++++ grub-2.00/grub-core/disk/diskfilter.c +@@ -71,10 +71,12 @@ is_lv_readable (struct grub_diskfilter_l + case GRUB_DISKFILTER_RAID6: + if (!easily) + need--; ++ /* Fallthrough. */ + case GRUB_DISKFILTER_RAID4: + case GRUB_DISKFILTER_RAID5: + if (!easily) + need--; ++ /* Fallthrough. */ + case GRUB_DISKFILTER_STRIPED: + break; + +@@ -507,6 +509,7 @@ read_segment (struct grub_diskfilter_seg + if (seg->node_count == 1) + return grub_diskfilter_read_node (&seg->nodes[0], + sector, size, buf); ++ /* Fallthrough. */ + case GRUB_DISKFILTER_MIRROR: + case GRUB_DISKFILTER_RAID10: + { +Index: grub-2.00/grub-core/font/font.c +=================================================================== +--- grub-2.00.orig/grub-core/font/font.c ++++ grub-2.00/grub-core/font/font.c +@@ -1297,6 +1297,7 @@ blit_comb (const struct grub_unicode_gly + - grub_font_get_xheight (combining_glyphs[i]->font) - 1; + if (space <= 0) + space = 1 + (grub_font_get_xheight (main_glyph->font)) / 8; ++ /* Fallthrough. */ + + case GRUB_UNICODE_STACK_ATTACHED_ABOVE: + do_blit (combining_glyphs[i], targetx, +@@ -1338,6 +1339,7 @@ blit_comb (const struct grub_unicode_gly + + combining_glyphs[i]->height); + if (space <= 0) + space = 1 + (grub_font_get_xheight (main_glyph->font)) / 8; ++ /* Fallthrough. */ + + case GRUB_UNICODE_STACK_ATTACHED_BELOW: + do_blit (combining_glyphs[i], targetx, -(bounds.y - space)); +Index: grub-2.00/grub-core/fs/udf.c +=================================================================== +--- grub-2.00.orig/grub-core/fs/udf.c ++++ grub-2.00/grub-core/fs/udf.c +@@ -970,6 +970,7 @@ grub_udf_read_symlink (grub_fshelp_node_ + case 1: + if (ptr[1]) + goto fail; ++ break; + case 2: + /* in 4 bytes. out: 1 byte. */ + optr = out; +Index: grub-2.00/grub-core/lib/legacy_parse.c +=================================================================== +--- grub-2.00.orig/grub-core/lib/legacy_parse.c ++++ grub-2.00/grub-core/lib/legacy_parse.c +@@ -626,6 +626,7 @@ grub_legacy_parse (const char *buf, char + { + case TYPE_FILE_NO_CONSUME: + hold_arg = 1; ++ /* Fallthrough. */ + case TYPE_PARTITION: + case TYPE_FILE: + args[i] = adjust_file (curarg, curarglen); +Index: grub-2.00/grub-core/lib/libgcrypt-grub/cipher/rijndael.c +=================================================================== +--- grub-2.00.orig/grub-core/lib/libgcrypt-grub/cipher/rijndael.c ++++ grub-2.00/grub-core/lib/libgcrypt-grub/cipher/rijndael.c +@@ -96,7 +96,8 @@ do_setkey (RIJNDAEL_context *ctx, const + static int initialized = 0; + static const char *selftest_failed=0; + int ROUNDS; +- int i,j, r, t, rconpointer = 0; ++ unsigned int i, t, rconpointer = 0; ++ int j, r; + int KC; + union + { +Index: grub-2.00/grub-core/mmap/efi/mmap.c +=================================================================== +--- grub-2.00.orig/grub-core/mmap/efi/mmap.c ++++ grub-2.00/grub-core/mmap/efi/mmap.c +@@ -72,6 +72,7 @@ grub_efi_mmap_iterate (grub_memory_hook_ + GRUB_MEMORY_AVAILABLE); + break; + } ++ /* Fallthrough. */ + case GRUB_EFI_RUNTIME_SERVICES_CODE: + hook (desc->physical_start, desc->num_pages * 4096, + GRUB_MEMORY_CODE); +@@ -86,6 +87,7 @@ grub_efi_mmap_iterate (grub_memory_hook_ + grub_printf ("Unknown memory type %d, considering reserved\n", + desc->type); + ++ /* Fallthrough. */ + case GRUB_EFI_BOOT_SERVICES_DATA: + if (!avoid_efi_boot_services) + { +@@ -93,6 +95,7 @@ grub_efi_mmap_iterate (grub_memory_hook_ + GRUB_MEMORY_AVAILABLE); + break; + } ++ /* Fallthrough. */ + case GRUB_EFI_RESERVED_MEMORY_TYPE: + case GRUB_EFI_RUNTIME_SERVICES_DATA: + case GRUB_EFI_MEMORY_MAPPED_IO: +Index: grub-2.00/grub-core/normal/charset.c +=================================================================== +--- grub-2.00.orig/grub-core/normal/charset.c ++++ grub-2.00/grub-core/normal/charset.c +@@ -858,6 +858,7 @@ grub_bidi_line_logical_to_visual (const + case GRUB_BIDI_TYPE_R: + case GRUB_BIDI_TYPE_AL: + bidi_needed = 1; ++ /* Fallthrough. */ + default: + { + if (join_state == JOIN_FORCE) +Index: grub-2.00/grub-core/video/bochs.c +=================================================================== +--- grub-2.00.orig/grub-core/video/bochs.c ++++ grub-2.00/grub-core/video/bochs.c +@@ -351,6 +351,7 @@ grub_video_bochs_setup (unsigned int wid + case 32: + framebuffer.mode_info.reserved_mask_size = 8; + framebuffer.mode_info.reserved_field_pos = 24; ++ /* Fallthrough. */ + + case 24: + framebuffer.mode_info.red_mask_size = 8; +Index: grub-2.00/grub-core/video/cirrus.c +=================================================================== +--- grub-2.00.orig/grub-core/video/cirrus.c ++++ grub-2.00/grub-core/video/cirrus.c +@@ -431,6 +431,7 @@ grub_video_cirrus_setup (unsigned int wi + case 32: + framebuffer.mode_info.reserved_mask_size = 8; + framebuffer.mode_info.reserved_field_pos = 24; ++ /* Fallthrough. */ + + case 24: + framebuffer.mode_info.red_mask_size = 8; diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/autohell.patch b/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/autohell.patch deleted file mode 100644 index d66207ae6..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/autohell.patch +++ /dev/null @@ -1,21 +0,0 @@ -Upstream-Status: Inappropriate [configuration] - ---- - configure.ac | 4 ++-- - 1 file changed, 2 insertions(+), 2 deletions(-) - -Index: grub-0.97/configure.ac -=================================================================== ---- grub-0.97.orig/configure.ac 2008-09-12 17:39:52.000000000 +0200 -+++ grub-0.97/configure.ac 2008-09-12 17:40:21.000000000 +0200 -@@ -60,8 +60,8 @@ AC_PROG_CC - _AM_DEPENDENCIES(CC) - - dnl Because recent automake complains about AS, set it here. --CCAS="$CC" --AC_SUBST(CCAS) -+AM_PROG_AS -+AC_SUBST(AS) - - AC_ARG_WITH(binutils, - [ --with-binutils=DIR search the directory DIR to find binutils]) diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/grub-support-256byte-inode.patch b/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/grub-support-256byte-inode.patch deleted file mode 100644 index d225d13dc..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/grub-support-256byte-inode.patch +++ /dev/null @@ -1,101 +0,0 @@ -Upstream-Status: Inappropriate [No Longer Maintained] - -diff -Naur grub-0.97-800/stage2/fsys_ext2fs.c grub-0.97-810/stage2/fsys_ext2fs.c ---- grub-0.97-800/stage2/fsys_ext2fs.c 2008-07-21 00:40:21.668879475 -0600 -+++ grub-0.97-810/stage2/fsys_ext2fs.c 2008-07-21 01:01:11.063953773 -0600 -@@ -79,7 +79,52 @@ - __u32 s_rev_level; /* Revision level */ - __u16 s_def_resuid; /* Default uid for reserved blocks */ - __u16 s_def_resgid; /* Default gid for reserved blocks */ -- __u32 s_reserved[235]; /* Padding to the end of the block */ -+ /* -+ * These fields are for EXT2_DYNAMIC_REV superblocks only. -+ * -+ * Note: the difference between the compatible feature set and -+ * the incompatible feature set is that if there is a bit set -+ * in the incompatible feature set that the kernel doesn't -+ * know about, it should refuse to mount the filesystem. -+ * -+ * e2fsck's requirements are more strict; if it doesn't know -+ * about a feature in either the compatible or incompatible -+ * feature set, it must abort and not try to meddle with -+ * things it doesn't understand... -+ */ -+ __u32 s_first_ino; /* First non-reserved inode */ -+ __u16 s_inode_size; /* size of inode structure */ -+ __u16 s_block_group_nr; /* block group # of this superblock */ -+ __u32 s_feature_compat; /* compatible feature set */ -+ __u32 s_feature_incompat; /* incompatible feature set */ -+ __u32 s_feature_ro_compat; /* readonly-compatible feature set */ -+ __u8 s_uuid[16]; /* 128-bit uuid for volume */ -+ char s_volume_name[16]; /* volume name */ -+ char s_last_mounted[64]; /* directory where last mounted */ -+ __u32 s_algorithm_usage_bitmap; /* For compression */ -+ /* -+ * Performance hints. Directory preallocation should only -+ * happen if the EXT2_FEATURE_COMPAT_DIR_PREALLOC flag is on. -+ */ -+ __u8 s_prealloc_blocks; /* Nr of blocks to try to preallocate*/ -+ __u8 s_prealloc_dir_blocks; /* Nr to preallocate for dirs */ -+ __u16 s_reserved_gdt_blocks;/* Per group table for online growth */ -+ /* -+ * Journaling support valid if EXT2_FEATURE_COMPAT_HAS_JOURNAL set. -+ */ -+ __u8 s_journal_uuid[16]; /* uuid of journal superblock */ -+ __u32 s_journal_inum; /* inode number of journal file */ -+ __u32 s_journal_dev; /* device number of journal file */ -+ __u32 s_last_orphan; /* start of list of inodes to delete */ -+ __u32 s_hash_seed[4]; /* HTREE hash seed */ -+ __u8 s_def_hash_version; /* Default hash version to use */ -+ __u8 s_jnl_backup_type; /* Default type of journal backup */ -+ __u16 s_reserved_word_pad; -+ __u32 s_default_mount_opts; -+ __u32 s_first_meta_bg; /* First metablock group */ -+ __u32 s_mkfs_time; /* When the filesystem was created */ -+ __u32 s_jnl_blocks[17]; /* Backup of the journal inode */ -+ __u32 s_reserved[172]; /* Padding to the end of the block */ - }; - - struct ext2_group_desc -@@ -218,6 +263,14 @@ - #define EXT2_ADDR_PER_BLOCK(s) (EXT2_BLOCK_SIZE(s) / sizeof (__u32)) - #define EXT2_ADDR_PER_BLOCK_BITS(s) (log2(EXT2_ADDR_PER_BLOCK(s))) - -+#define EXT2_GOOD_OLD_REV 0 /* The good old (original) format */ -+#define EXT2_DYNAMIC_REV 1 /* V2 format w/ dynamic inode sizes */ -+#define EXT2_GOOD_OLD_INODE_SIZE 128 -+#define EXT2_INODE_SIZE(s) (((s)->s_rev_level == EXT2_GOOD_OLD_REV) ? \ -+ EXT2_GOOD_OLD_INODE_SIZE : \ -+ (s)->s_inode_size) -+#define EXT2_INODES_PER_BLOCK(s) (EXT2_BLOCK_SIZE(s)/EXT2_INODE_SIZE(s)) -+ - /* linux/ext2_fs.h */ - #define EXT2_BLOCK_SIZE_BITS(s) ((s)->s_log_block_size + 10) - /* kind of from ext2/super.c */ -@@ -553,7 +606,7 @@ - gdp = GROUP_DESC; - ino_blk = gdp[desc].bg_inode_table + - (((current_ino - 1) % (SUPERBLOCK->s_inodes_per_group)) -- >> log2 (EXT2_BLOCK_SIZE (SUPERBLOCK) / sizeof (struct ext2_inode))); -+ >> log2 (EXT2_INODES_PER_BLOCK (SUPERBLOCK))); - #ifdef E2DEBUG - printf ("inode table fsblock=%d\n", ino_blk); - #endif /* E2DEBUG */ -@@ -565,13 +618,12 @@ - /* reset indirect blocks! */ - mapblock2 = mapblock1 = -1; - -- raw_inode = INODE + -- ((current_ino - 1) -- & (EXT2_BLOCK_SIZE (SUPERBLOCK) / sizeof (struct ext2_inode) - 1)); -+ raw_inode = (struct ext2_inode *)((char *)INODE + -+ ((current_ino - 1) & (EXT2_INODES_PER_BLOCK (SUPERBLOCK) - 1)) * -+ EXT2_INODE_SIZE (SUPERBLOCK)); - #ifdef E2DEBUG - printf ("ipb=%d, sizeof(inode)=%d\n", -- (EXT2_BLOCK_SIZE (SUPERBLOCK) / sizeof (struct ext2_inode)), -- sizeof (struct ext2_inode)); -+ EXT2_INODES_PER_BLOCK (SUPERBLOCK), EXT2_INODE_SIZE (SUPERBLOCK)); - printf ("inode=%x, raw_inode=%x\n", INODE, raw_inode); - printf ("offset into inode table block=%d\n", (int) raw_inode - (int) INODE); - for (i = (unsigned char *) INODE; i <= (unsigned char *) raw_inode; diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/grub_fix_for_automake-1.12.patch b/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/grub_fix_for_automake-1.12.patch deleted file mode 100644 index 0cf7dc96d..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/grub_fix_for_automake-1.12.patch +++ /dev/null @@ -1,74 +0,0 @@ -Upstream-Status: Inappropriate - -Subject: [PATCH] grub: fix for automake-1.12 - -automake 1.12 has depricated automatic de-ANSI-fication support - -this patch avoids these kinds of errors: - -| stage1/Makefile.am:2: error: 'pkglibdir' is not a legitimate directory for 'DATA' -| stage2/Makefile.am:35: error: 'pkglibdir' is not a legitimate directory for 'DATA' -| stage2/Makefile.am:46: error: 'pkglibdir' is not a legitimate directory for 'DATA' -| autoreconf: automake failed with exit status: 1 -| ERROR: autoreconf execution failed. - -The upstream status is marked as 'Inappropriate' because this problem is not uncommon, -it has been there for a long time and no change in upstream. - -Signed-off-by: Chen Qi - -Index: grub-0.97/stage1/Makefile.am -=================================================================== ---- a/stage1/Makefile.am -+++ b/stage1/Makefile.am -@@ -1,7 +1,7 @@ --pkglibdir = $(libdir)/$(PACKAGE)/$(host_cpu)-$(host_vendor) --nodist_pkglib_DATA = stage1 -+pkgdatadir = $(libdir)/$(PACKAGE)/$(host_cpu)-$(host_vendor) -+nodist_pkgdata_DATA = stage1 - --CLEANFILES = $(nodist_pkglib_DATA) -+CLEANFILES = $(nodist_pkgdata_DATA) - - # We can't use builtins or standard includes. - AM_CCASFLAGS = $(STAGE1_CFLAGS) -fno-builtin -nostdinc -Index: grub-0.97/stage2/Makefile.am -=================================================================== ---- a/stage2/Makefile.am -+++ b/stage2/Makefile.am -@@ -27,12 +27,12 @@ libgrub_a_CFLAGS = $(GRUB_CFLAGS) -I$(top_srcdir)/lib \ - -DUSE_MD5_PASSWORDS=1 -DSUPPORT_SERIAL=1 -DSUPPORT_HERCULES=1 - - # Stage 2 and Stage 1.5's. --pkglibdir = $(libdir)/$(PACKAGE)/$(host_cpu)-$(host_vendor) -+pkgdatadir = $(libdir)/$(PACKAGE)/$(host_cpu)-$(host_vendor) - - EXTRA_PROGRAMS = nbloader.exec pxeloader.exec diskless.exec - - if DISKLESS_SUPPORT --pkglib_DATA = stage2 stage2_eltorito e2fs_stage1_5 fat_stage1_5 \ -+pkgdata_DATA = stage2 stage2_eltorito e2fs_stage1_5 fat_stage1_5 \ - ffs_stage1_5 iso9660_stage1_5 jfs_stage1_5 minix_stage1_5 \ - reiserfs_stage1_5 ufs2_stage1_5 vstafs_stage1_5 xfs_stage1_5 \ - nbgrub pxegrub -@@ -43,7 +43,7 @@ noinst_PROGRAMS = pre_stage2.exec start.exec start_eltorito.exec \ - reiserfs_stage1_5.exec ufs2_stage1_5.exec vstafs_stage1_5.exec \ - xfs_stage1_5.exec nbloader.exec pxeloader.exec diskless.exec - else --pkglib_DATA = stage2 stage2_eltorito e2fs_stage1_5 fat_stage1_5 \ -+pkgdata_DATA = stage2 stage2_eltorito e2fs_stage1_5 fat_stage1_5 \ - ffs_stage1_5 iso9660_stage1_5 jfs_stage1_5 minix_stage1_5 \ - reiserfs_stage1_5 ufs2_stage1_5 vstafs_stage1_5 xfs_stage1_5 - noinst_DATA = pre_stage2 start start_eltorito -@@ -105,7 +105,7 @@ else - BUILT_SOURCES = stage2_size.h - endif - --CLEANFILES = $(pkglib_DATA) $(noinst_DATA) $(BUILT_SOURCES) -+CLEANFILES = $(pkgdata_DATA) $(noinst_DATA) $(BUILT_SOURCES) - - stage2_size.h: pre_stage2 - -rm -f stage2_size.h --- -1.7.9.5 - diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/no-reorder-functions.patch b/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/no-reorder-functions.patch deleted file mode 100644 index 70037e47c..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/no-reorder-functions.patch +++ /dev/null @@ -1,31 +0,0 @@ -Upstream-Status: Inappropriate [disable feature] - -After the commit "tcmode-default: switch to gcc 4.6.0 for x86, x86-64 & arm", -we got bug 1099 (http://bugzilla.yoctoproject.org/show_bug.cgi?id=1099): - -Running "install --stage2=/ssd/boot/grub/stage2 /boot/grub/stage1(hd0) - /boot/grub/stage2 p /boot/grub/menu list" failed -Error 6: Mismatched or corrupt version of stage1/stage2 - -This turned out to be a gcc's bug. See -https://bugs.gentoo.org/show_bug.cgi?id=360513 -http://gcc.gnu.org/bugzilla/show_bug.cgi?id=39333 - -Upstream gcc seems uninterested in the bug, so at present we can disable the -option as a workaround. Thanks Ryan Hill for the investigation and the -workaround patch. - -Dexuan Cui -Wed Jun 29 20:21:39 CST 2011 - ---- grub-0.97/stage2/Makefile.am.orig -+++ grub-0.97/stage2/Makefile.am -@@ -79,7 +79,7 @@ - HERCULES_FLAGS = - endif - --STAGE2_COMPILE = $(STAGE2_CFLAGS) -fno-builtin -nostdinc \ -+STAGE2_COMPILE = $(STAGE2_CFLAGS) -fno-reorder-functions -fno-builtin -nostdinc \ - $(NETBOOT_FLAGS) $(SERIAL_FLAGS) $(HERCULES_FLAGS) - - STAGE1_5_LINK = -nostdlib -Wl,-N -Wl,-Ttext -Wl,2000 diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/objcopy-absolute.patch b/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/objcopy-absolute.patch deleted file mode 100644 index bd8e0a89f..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-0.97/objcopy-absolute.patch +++ /dev/null @@ -1,40 +0,0 @@ - -This patch is from ubuntu: - * objcopy-absolute.diff (update): Remove .note, .comment, and - .note.gnu.build-id sections from images (LP: #444703). - -Upstream-Status: Inappropriate [no longer maintained] - -Index: b/acinclude.m4 -=================================================================== ---- a/acinclude.m4 -+++ b/acinclude.m4 -@@ -61,7 +61,7 @@ - else - AC_MSG_ERROR([${CC-cc} cannot link at address $link_addr]) - fi -- if AC_TRY_COMMAND([${OBJCOPY-objcopy} -O binary conftest.exec conftest]); then : -+ if AC_TRY_COMMAND([${OBJCOPY-objcopy} --only-section=.text -O binary conftest.exec conftest]); then : - else - AC_MSG_ERROR([${OBJCOPY-objcopy} cannot create binary files]) - fi -Index: b/stage1/Makefile.am -=================================================================== ---- a/stage1/Makefile.am -+++ b/stage1/Makefile.am -@@ -12,4 +12,4 @@ - - SUFFIXES = .exec - .exec: -- $(OBJCOPY) -O binary $< $@ -+ $(OBJCOPY) -O binary -R .note -R .comment -R .note.gnu.build-id $< $@ -Index: b/stage2/Makefile.am -=================================================================== ---- a/stage2/Makefile.am -+++ b/stage2/Makefile.am -@@ -293,4 +293,4 @@ - # General rule for making a raw binary. - SUFFIXES = .exec - .exec: -- $(OBJCOPY) -O binary $< $@ -+ $(OBJCOPY) -O binary -R .note -R .comment -R .note.gnu.build-id $< $@ diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-efi_2.00.bb b/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-efi_2.00.bb index 5a0dc954a..e12f1d773 100644 --- a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-efi_2.00.bb +++ b/import-layers/yocto-poky/meta/recipes-bsp/grub/grub-efi_2.00.bb @@ -13,13 +13,13 @@ S = "${WORKDIR}/grub-${PV}" # Determine the target arch for the grub modules python __anonymous () { import re - target = d.getVar('TARGET_ARCH', True) + target = d.getVar('TARGET_ARCH') if target == "x86_64": grubtarget = 'x86_64' - grubimage = "bootx64.efi" + grubimage = "grub-efi-bootx64.efi" elif re.match('i.86', target): grubtarget = 'i386' - grubimage = "bootia32.efi" + grubimage = "grub-efi-bootia32.efi" else: raise bb.parse.SkipPackage("grub-efi is incompatible with target %s" % target) d.setVar("GRUB_TARGET", grubtarget) @@ -31,9 +31,9 @@ inherit deploy CACHED_CONFIGUREVARS += "ac_cv_path_HELP2MAN=" EXTRA_OECONF = "--with-platform=efi --disable-grub-mkfont \ --enable-efiemu=no --program-prefix='' \ - --enable-liblzma=no --enable-device-mapper=no --enable-libzfs=no" - -EXTRA_OECONF += "${@bb.utils.contains('DISTRO_FEATURES', 'largefile', '--enable-largefile', '--disable-largefile', d)}" + --enable-liblzma=no --enable-device-mapper=no --enable-libzfs=no \ + --enable-largefile \ +" # ldm.c:114:7: error: trampoline generated for nested function 'hook' [-Werror=trampolines] # and many other places in the grub code when compiled with some native gcc compilers (specifically, gentoo) @@ -65,5 +65,8 @@ FILES_${PN} += "${libdir}/grub/${GRUB_TARGET}-efi \ ${datadir}/grub \ " -BBCLASSEXTEND = "native" +# 64-bit binaries are expected for the bootloader with an x32 userland +INSANE_SKIP_${PN}_append_linux-gnux32 = " arch" +INSANE_SKIP_${PN}-dbg_append_linux-gnux32 = " arch" +BBCLASSEXTEND = "native" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub2.inc b/import-layers/yocto-poky/meta/recipes-bsp/grub/grub2.inc index b69de9f34..a93c99e6c 100644 --- a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub2.inc +++ b/import-layers/yocto-poky/meta/recipes-bsp/grub/grub2.inc @@ -35,6 +35,11 @@ SRC_URI = "ftp://ftp.gnu.org/gnu/grub/grub-${PV}.tar.gz \ file://0001-Enforce-no-pie-if-the-compiler-supports-it.patch \ file://0001-grub-core-kern-efi-mm.c-grub_efi_finish_boot_service.patch \ file://0002-grub-core-kern-efi-mm.c-grub_efi_get_memory_map-Neve.patch \ + file://0001-build-Use-AC_HEADER_MAJOR-to-find-device-macros.patch \ + file://0001-btrfs-avoid-used-uninitialized-error-with-GCC7.patch \ + file://0002-i386-x86_64-ppc-fix-switch-fallthrough-cases-with-GC.patch \ + file://0003-Add-gnulib-fix-gcc7-fallthrough.diff.patch \ + file://0004-Fix-remaining-cases-of-gcc-7-fallthrough-warning.patch \ " DEPENDS = "flex-native bison-native autogen-native" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub_0.97.bb b/import-layers/yocto-poky/meta/recipes-bsp/grub/grub_0.97.bb deleted file mode 100644 index 997a045b2..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub_0.97.bb +++ /dev/null @@ -1,35 +0,0 @@ -SUMMARY = "GRUB is the GRand Unified Bootloader" -DESCRIPTION = "GRUB is a GPLed bootloader intended to unify bootloading across x86 \ -operating systems. In addition to loading the Linux kernel, it implements the Multiboot \ -standard, which allows for flexible loading of multiple boot images." -HOMEPAGE = "http://www.gnu.org/software/grub/" -SECTION = "bootloaders" - -LICENSE = "GPLv2+" -LIC_FILES_CHKSUM = "file://COPYING;md5=c93c0550bd3173f4504b2cbd8991e50b \ - file://grub/main.c;beginline=3;endline=9;md5=22a5f28d2130fff9f2a17ed54be90ed6" - -RDEPENDS_${PN} = "diffutils" -PR = "r6" - -SRC_URI = "ftp://alpha.gnu.org/gnu/grub/grub-${PV}.tar.gz; \ - file://no-reorder-functions.patch \ - file://autohell.patch \ - file://grub_fix_for_automake-1.12.patch \ - file://objcopy-absolute.patch \ - file://grub-support-256byte-inode.patch \ -" - -SRC_URI[md5sum] = "cd3f3eb54446be6003156158d51f4884" -SRC_URI[sha256sum] = "4e1d15d12dbd3e9208111d6b806ad5a9857ca8850c47877d36575b904559260b" - -inherit autotools texinfo - -COMPATIBLE_HOST = "i.86.*-linux" - -EXTRA_OECONF = "--without-curses" - -do_install_append_vmware() { - mkdir -p ${D}/boot/ - ln -sf ../usr/lib/grub/{$TARGET_ARCH}{$TARGET_VENDOR}/ ${D}/boot/grub -} diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub_2.00.bb b/import-layers/yocto-poky/meta/recipes-bsp/grub/grub_2.00.bb index 07e1d101b..c3829381a 100644 --- a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub_2.00.bb +++ b/import-layers/yocto-poky/meta/recipes-bsp/grub/grub_2.00.bb @@ -4,9 +4,9 @@ RDEPENDS_${PN} = "diffutils freetype grub-editenv" PR = "r1" EXTRA_OECONF = "--with-platform=pc --disable-grub-mkfont --program-prefix="" \ - --enable-liblzma=no --enable-device-mapper=no --enable-libzfs=no" - -EXTRA_OECONF += "${@bb.utils.contains('DISTRO_FEATURES', 'largefile', '--enable-largefile', '--disable-largefile', d)}" + --enable-liblzma=no --enable-device-mapper=no --enable-libzfs=no \ + --enable-largefile \ +" PACKAGES =+ "grub-editenv" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub_git.bb b/import-layers/yocto-poky/meta/recipes-bsp/grub/grub_git.bb index 493b6956e..0a81e530f 100644 --- a/import-layers/yocto-poky/meta/recipes-bsp/grub/grub_git.bb +++ b/import-layers/yocto-poky/meta/recipes-bsp/grub/grub_git.bb @@ -6,7 +6,7 @@ DEFAULT_PREFERENCE_arm = "1" FILESEXTRAPATHS =. "${FILE_DIRNAME}/grub-git:" PV = "2.00+${SRCPV}" -SRCREV = "7a5b301e3adb8e054288518a325135a1883c1c6c" +SRCREV = "ce95549cc54b5d6f494608a7c390dba3aab4fba7" SRC_URI = "git://git.savannah.gnu.org/grub.git \ file://0001-Disable-mfpmath-sse-as-well-when-SSE-is-disabled.patch \ file://autogen.sh-exclude-pc.patch \ @@ -17,8 +17,7 @@ S = "${WORKDIR}/git" COMPATIBLE_HOST = '(x86_64.*|i.86.*|arm.*|aarch64.*)-(linux.*|freebsd.*)' COMPATIBLE_HOST_armv7a = 'null' - -inherit autotools gettext texinfo +COMPATIBLE_HOST_armv7ve = 'null' # configure.ac has code to set this automagically from the target tuple # but the OE freeform one (core2-foo-bar-linux) don't work with that. @@ -28,9 +27,9 @@ GRUBPLATFORM_aarch64 = "efi" GRUBPLATFORM ??= "pc" EXTRA_OECONF = "--with-platform=${GRUBPLATFORM} --disable-grub-mkfont --program-prefix="" \ - --enable-liblzma=no --enable-device-mapper=no --enable-libzfs=no" - -EXTRA_OECONF += "${@bb.utils.contains('DISTRO_FEATURES', 'largefile', '--enable-largefile', '--disable-largefile', d)}" + --enable-liblzma=no --enable-device-mapper=no --enable-libzfs=no \ + --enable-largefile \ +" do_install_append () { install -d ${D}${sysconfdir}/grub.d diff --git a/import-layers/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot/0001-console-Fix-C-syntax-errors-for-function-declaration.patch b/import-layers/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot/0001-console-Fix-C-syntax-errors-for-function-declaration.patch deleted file mode 100644 index fa50bc4a6..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot/0001-console-Fix-C-syntax-errors-for-function-declaration.patch +++ /dev/null @@ -1,74 +0,0 @@ -From 55957faf1272c8f5f304909faeebf647a78e3701 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Wed, 9 Sep 2015 07:19:45 +0000 -Subject: [PATCH] console: Fix C syntax errors for function declaration - -To address this, the semicolons after the function parameters should be -replaced by commas, and the last one should be omitted - -Signed-off-by: Khem Raj ---- -Upstream-Status: Pending - - src/efi/console.c | 26 +++++++++++++------------- - 1 file changed, 13 insertions(+), 13 deletions(-) - -diff --git a/src/efi/console.c b/src/efi/console.c -index 6206c80..66aa88f 100644 ---- a/src/efi/console.c -+++ b/src/efi/console.c -@@ -27,8 +27,8 @@ - struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL; - - typedef EFI_STATUS (EFIAPI *EFI_INPUT_RESET_EX)( -- struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This; -- BOOLEAN ExtendedVerification; -+ struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This, -+ BOOLEAN ExtendedVerification - ); - - typedef UINT8 EFI_KEY_TOGGLE_STATE; -@@ -44,29 +44,29 @@ typedef struct { - } EFI_KEY_DATA; - - typedef EFI_STATUS (EFIAPI *EFI_INPUT_READ_KEY_EX)( -- struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This; -- EFI_KEY_DATA *KeyData; -+ struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This, -+ EFI_KEY_DATA *KeyData - ); - - typedef EFI_STATUS (EFIAPI *EFI_SET_STATE)( -- struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This; -- EFI_KEY_TOGGLE_STATE *KeyToggleState; -+ struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This, -+ EFI_KEY_TOGGLE_STATE *KeyToggleState - ); - - typedef EFI_STATUS (EFIAPI *EFI_KEY_NOTIFY_FUNCTION)( -- EFI_KEY_DATA *KeyData; -+ EFI_KEY_DATA *KeyData - ); - - typedef EFI_STATUS (EFIAPI *EFI_REGISTER_KEYSTROKE_NOTIFY)( -- struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This; -- EFI_KEY_DATA KeyData; -- EFI_KEY_NOTIFY_FUNCTION KeyNotificationFunction; -- VOID **NotifyHandle; -+ struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This, -+ EFI_KEY_DATA KeyData, -+ EFI_KEY_NOTIFY_FUNCTION KeyNotificationFunction, -+ VOID **NotifyHandle - ); - - typedef EFI_STATUS (EFIAPI *EFI_UNREGISTER_KEYSTROKE_NOTIFY)( -- struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This; -- VOID *NotificationHandle; -+ struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL *This, -+ VOID *NotificationHandle - ); - - typedef struct _EFI_SIMPLE_TEXT_INPUT_EX_PROTOCOL { --- -2.5.1 - diff --git a/import-layers/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot/fix-objcopy.patch b/import-layers/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot/fix-objcopy.patch deleted file mode 100644 index 49f55930d..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot/fix-objcopy.patch +++ /dev/null @@ -1,45 +0,0 @@ -From 0f7f9e3bb1d0e1b93f3ad8a1d5d7bdd3fbf27494 Mon Sep 17 00:00:00 2001 -From: Robert Yang -Date: Thu, 27 Mar 2014 07:20:33 +0000 -Subject: [PATCH] Makefile.am: use objcopy from the env - -It uses the "objcopy" directly, which is not suitable for cross compile. - -Upstream-Status: Pending - -Signed-off-by: Robert Yang ---- - Makefile.am | 4 +++- - 1 file changed, 3 insertions(+), 1 deletion(-) - -Index: git/Makefile.am -=================================================================== ---- git.orig/Makefile.am -+++ git/Makefile.am -@@ -19,6 +19,8 @@ - ACLOCAL_AMFLAGS = -I m4 ${ACLOCAL_FLAGS} - AM_MAKEFLAGS = --no-print-directory - -+OBJCOPY ?= objcopy -+ - gummibootlibdir = $(prefix)/lib/gummiboot - - AM_CPPFLAGS = -include config.h -@@ -148,7 +150,7 @@ $(gummiboot_solib): $(gummiboot_objects) - .DELETE_ON_ERROR: $(gummboot_solib) - - $(gummiboot): $(gummiboot_solib) -- $(AM_V_GEN) objcopy -j .text -j .sdata -j .data -j .dynamic \ -+ $(AM_V_GEN) $(OBJCOPY) -j .text -j .sdata -j .data -j .dynamic \ - -j .dynsym -j .rel -j .rela -j .reloc \ - --target=efi-app-$(ARCH) $< $@ - -@@ -183,7 +185,7 @@ $(stub_solib): $(stub_objects) - .DELETE_ON_ERROR: $(gummboot_solib) - - $(stub): $(stub_solib) -- $(AM_V_GEN) objcopy -j .text -j .sdata -j .data -j .dynamic \ -+ $(AM_V_GEN) $(OBJCOPY) -j .text -j .sdata -j .data -j .dynamic \ - -j .dynsym -j .rel -j .rela -j .reloc \ - --target=efi-app-$(ARCH) $< $@ - diff --git a/import-layers/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot_git.bb b/import-layers/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot_git.bb deleted file mode 100644 index 376ab542d..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/gummiboot/gummiboot_git.bb +++ /dev/null @@ -1,37 +0,0 @@ -SUMMARY = "Gummiboot is a simple UEFI boot manager which executes configured EFI images." -HOMEPAGE = "http://freedesktop.org/wiki/Software/gummiboot" - -LICENSE = "LGPLv2.1" -LIC_FILES_CHKSUM = "file://LICENSE;md5=4fbd65380cdd255951079008b364516c" - -DEPENDS = "gnu-efi util-linux" - -inherit autotools pkgconfig -inherit deploy - -PV = "48+git${SRCPV}" -SRCREV = "2bcd919c681c952eb867ef1bdb458f1bc49c2d55" -SRC_URI = "git://anongit.freedesktop.org/gummiboot \ - file://fix-objcopy.patch \ - file://0001-console-Fix-C-syntax-errors-for-function-declaration.patch \ - " - -# Note: Add COMPATIBLE_HOST here is only because it depends on gnu-efi -# which has set the COMPATIBLE_HOST, the gummiboot itself may work on -# more hosts. -COMPATIBLE_HOST = "(x86_64.*|i.86.*)-linux" - -S = "${WORKDIR}/git" - -EXTRA_OECONF = "--disable-manpages --with-efi-includedir=${STAGING_INCDIR} \ - --with-efi-ldsdir=${STAGING_LIBDIR} \ - --with-efi-libdir=${STAGING_LIBDIR}" - -EXTRA_OEMAKE += "gummibootlibdir=${libdir}/gummiboot" - -TUNE_CCARGS_remove = "-mfpmath=sse" - -do_deploy () { - install ${B}/gummiboot*.efi ${DEPLOYDIR} -} -addtask deploy before do_build after do_compile diff --git a/import-layers/yocto-poky/meta/recipes-bsp/keymaps/keymaps_1.0.bb b/import-layers/yocto-poky/meta/recipes-bsp/keymaps/keymaps_1.0.bb index 5793a7652..34b208c5b 100644 --- a/import-layers/yocto-poky/meta/recipes-bsp/keymaps/keymaps_1.0.bb +++ b/import-layers/yocto-poky/meta/recipes-bsp/keymaps/keymaps_1.0.bb @@ -37,7 +37,7 @@ do_install () { fi } -DEPENDS_append = " ${@bb.utils.contains('DISTRO_FEATURES','systemd','systemd-systemctl-native','',d)}" +PACKAGE_WRITE_DEPS_append = " ${@bb.utils.contains('DISTRO_FEATURES','systemd','systemd-systemctl-native','',d)}" pkg_postinst_${PN} () { if ${@bb.utils.contains('DISTRO_FEATURES','systemd sysvinit','true','false',d)}; then if [ -n "$D" ]; then diff --git a/import-layers/yocto-poky/meta/recipes-bsp/pciutils/pciutils_3.5.1.bb b/import-layers/yocto-poky/meta/recipes-bsp/pciutils/pciutils_3.5.1.bb deleted file mode 100644 index d32b11942..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/pciutils/pciutils_3.5.1.bb +++ /dev/null @@ -1,60 +0,0 @@ -SUMMARY = "PCI utilities" -DESCRIPTION = 'The PCI Utilities package contains a library for portable access \ -to PCI bus configuration space and several utilities based on this library.' -HOMEPAGE = "http://atrey.karlin.mff.cuni.cz/~mj/pciutils.shtml" -SECTION = "console/utils" - -LICENSE = "GPLv2+" -LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe" -DEPENDS = "zlib kmod" - -SRC_URI = "${KERNELORG_MIRROR}/software/utils/pciutils/pciutils-${PV}.tar.xz \ - file://configure.patch \ - file://guess-fix.patch \ - file://makefile.patch" - -SRC_URI[md5sum] = "0879a8f7ac51f4e874cfc6b3521a13cc" -SRC_URI[sha256sum] = "2bf3a4605a562fb6b8b7673bff85a474a5cf383ed7e4bd8886b4f0939013d42f" - -inherit multilib_header - -PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'hwdb', '', d)}" -PACKAGECONFIG[hwdb] = "HWDB=yes,HWDB=no,udev" - -PCI_CONF_FLAG = "ZLIB=yes DNS=yes SHARED=yes" - -# see configure.patch -do_configure () { - ( - cd lib && \ - # PACKAGECONFIG_CONFARGS for this recipe could only possibly contain 'HWDB=yes/no', - # so we put it before ./configure - ${PCI_CONF_FLAG} ${PACKAGECONFIG_CONFARGS} ./configure ${PV} ${datadir} ${TARGET_OS} ${TARGET_ARCH} - ) -} - -export PREFIX = "${prefix}" -export SBINDIR = "${sbindir}" -export SHAREDIR = "${datadir}" -export MANDIR = "${mandir}" - -EXTRA_OEMAKE = "-e MAKEFLAGS= ${PCI_CONF_FLAG}" - -# The configure script breaks if the HOST variable is set -HOST[unexport] = "1" - -do_install () { - oe_runmake DESTDIR=${D} install install-lib - - install -d ${D}${bindir} - ln -s ../sbin/lspci ${D}${bindir}/lspci - - oe_multilib_header pci/config.h -} - -PACKAGES =+ "${PN}-ids libpci" -FILES_${PN}-ids = "${datadir}/pci.ids*" -FILES_libpci = "${libdir}/libpci.so.*" -SUMMARY_${PN}-ids = "PCI utilities - device ID database" -DESCRIPTION_${PN}-ids = "Package providing the PCI device ID database for pciutils." -RDEPENDS_${PN} += "${PN}-ids" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/pciutils/pciutils_3.5.2.bb b/import-layers/yocto-poky/meta/recipes-bsp/pciutils/pciutils_3.5.2.bb new file mode 100644 index 000000000..9a7297e21 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-bsp/pciutils/pciutils_3.5.2.bb @@ -0,0 +1,60 @@ +SUMMARY = "PCI utilities" +DESCRIPTION = 'The PCI Utilities package contains a library for portable access \ +to PCI bus configuration space and several utilities based on this library.' +HOMEPAGE = "http://atrey.karlin.mff.cuni.cz/~mj/pciutils.shtml" +SECTION = "console/utils" + +LICENSE = "GPLv2+" +LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe" +DEPENDS = "zlib kmod" + +SRC_URI = "${KERNELORG_MIRROR}/software/utils/pciutils/pciutils-${PV}.tar.xz \ + file://configure.patch \ + file://guess-fix.patch \ + file://makefile.patch" + +SRC_URI[md5sum] = "1bf5b068bd9f7512e8c68b060b25a1b2" +SRC_URI[sha256sum] = "3a99141a9f40528d0a0035665a06dc37ddb1ae341658e51b50a76ecf86235efc" + +inherit multilib_header + +PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'hwdb', '', d)}" +PACKAGECONFIG[hwdb] = "HWDB=yes,HWDB=no,udev" + +PCI_CONF_FLAG = "ZLIB=yes DNS=yes SHARED=yes" + +# see configure.patch +do_configure () { + ( + cd lib && \ + # PACKAGECONFIG_CONFARGS for this recipe could only possibly contain 'HWDB=yes/no', + # so we put it before ./configure + ${PCI_CONF_FLAG} ${PACKAGECONFIG_CONFARGS} ./configure ${PV} ${datadir} ${TARGET_OS} ${TARGET_ARCH} + ) +} + +export PREFIX = "${prefix}" +export SBINDIR = "${sbindir}" +export SHAREDIR = "${datadir}" +export MANDIR = "${mandir}" + +EXTRA_OEMAKE = "-e MAKEFLAGS= ${PCI_CONF_FLAG}" + +# The configure script breaks if the HOST variable is set +HOST[unexport] = "1" + +do_install () { + oe_runmake DESTDIR=${D} install install-lib + + install -d ${D}${bindir} + ln -s ../sbin/lspci ${D}${bindir}/lspci + + oe_multilib_header pci/config.h +} + +PACKAGES =+ "${PN}-ids libpci" +FILES_${PN}-ids = "${datadir}/pci.ids*" +FILES_libpci = "${libdir}/libpci.so.*" +SUMMARY_${PN}-ids = "PCI utilities - device ID database" +DESCRIPTION_${PN}-ids = "Package providing the PCI device ID database for pciutils." +RDEPENDS_${PN} += "${PN}-ids" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/pcmciautils/pcmciautils.inc b/import-layers/yocto-poky/meta/recipes-bsp/pcmciautils/pcmciautils.inc index 581bff417..052498050 100644 --- a/import-layers/yocto-poky/meta/recipes-bsp/pcmciautils/pcmciautils.inc +++ b/import-layers/yocto-poky/meta/recipes-bsp/pcmciautils/pcmciautils.inc @@ -12,6 +12,8 @@ SRC_URI = "${KERNELORG_MIRROR}/linux/utils/kernel/pcmcia/${BP}.tar.xz" S = "${WORKDIR}/pcmciautils-${PV}" +inherit pkgconfig + export HOSTCC = "${BUILD_CC}" export etcdir = "${sysconfdir}" export sbindir = "${base_sbindir}" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb b/import-layers/yocto-poky/meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb index 27cb3dbe8..cac09101c 100644 --- a/import-layers/yocto-poky/meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb +++ b/import-layers/yocto-poky/meta/recipes-bsp/pm-utils/pm-utils_1.4.1.bb @@ -13,7 +13,9 @@ SRC_URI = "http://pm-utils.freedesktop.org/releases/pm-utils-${PV}.tar.gz" SRC_URI[md5sum] = "1742a556089c36c3a89eb1b957da5a60" SRC_URI[sha256sum] = "8ed899032866d88b2933a1d34cc75e8ae42dcde20e1cc21836baaae3d4370c0b" -inherit pkgconfig autotools +inherit pkgconfig autotools manpages + +PACKAGECONFIG[manpages] = "--enable-doc, --disable-doc, libxslt-native xmlto-native" RDEPENDS_${PN} = "grep bash" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/systemd-boot/systemd-boot.bb b/import-layers/yocto-poky/meta/recipes-bsp/systemd-boot/systemd-boot.bb deleted file mode 100644 index 5b1164ec9..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/systemd-boot/systemd-boot.bb +++ /dev/null @@ -1,38 +0,0 @@ -require recipes-core/systemd/systemd.inc - -DEPENDS = "intltool-native libcap util-linux gnu-efi" - -SRC_URI += "file://0001-use-lnr-wrapper-instead-of-looking-for-relative-opti.patch" - -inherit autotools pkgconfig gettext -inherit deploy - -EXTRA_OECONF = " --enable-gnuefi \ - --with-efi-includedir=${STAGING_INCDIR} \ - --with-efi-ldsdir=${STAGING_LIBDIR} \ - --with-efi-libdir=${STAGING_LIBDIR} \ - --disable-manpages \ - " - -# Imported from gummiboot recipe -TUNE_CCARGS_remove = "-mfpmath=sse" -COMPATIBLE_HOST = "(x86_64.*|i.86.*)-linux" - -do_compile() { - SYSTEMD_BOOT_EFI_ARCH="ia32" - if [ "${TARGET_ARCH}" = "x86_64" ]; then - SYSTEMD_BOOT_EFI_ARCH="x64" - fi - - oe_runmake systemd-boot${SYSTEMD_BOOT_EFI_ARCH}.efi -} - -do_install() { - # Bypass systemd installation with a NOP - : -} - -do_deploy () { - install ${B}/systemd-boot*.efi ${DEPLOYDIR} -} -addtask deploy before do_build after do_compile diff --git a/import-layers/yocto-poky/meta/recipes-bsp/systemd-boot/systemd-boot_232.bb b/import-layers/yocto-poky/meta/recipes-bsp/systemd-boot/systemd-boot_232.bb new file mode 100644 index 000000000..0471ce246 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-bsp/systemd-boot/systemd-boot_232.bb @@ -0,0 +1,39 @@ +require recipes-core/systemd/systemd.inc + +DEPENDS = "intltool-native libcap util-linux gnu-efi gperf-native" + +SRC_URI += "file://0001-use-lnr-wrapper-instead-of-looking-for-relative-opti.patch" + +inherit autotools pkgconfig gettext +inherit deploy + +# Man pages are packaged through the main systemd recipe +EXTRA_OECONF = " --enable-gnuefi \ + --with-efi-includedir=${STAGING_INCDIR} \ + --with-efi-ldsdir=${STAGING_LIBDIR} \ + --with-efi-libdir=${STAGING_LIBDIR} \ + --disable-manpages \ + " + +# Imported from the old gummiboot recipe +TUNE_CCARGS_remove = "-mfpmath=sse" +COMPATIBLE_HOST = "(x86_64.*|i.86.*)-linux" + +do_compile() { + SYSTEMD_BOOT_EFI_ARCH="ia32" + if [ "${TARGET_ARCH}" = "x86_64" ]; then + SYSTEMD_BOOT_EFI_ARCH="x64" + fi + + oe_runmake systemd-boot${SYSTEMD_BOOT_EFI_ARCH}.efi +} + +do_install() { + # Bypass systemd installation with a NOP + : +} + +do_deploy () { + install ${B}/systemd-boot*.efi ${DEPLOYDIR} +} +addtask deploy before do_build after do_compile diff --git a/import-layers/yocto-poky/meta/recipes-bsp/u-boot/files/default-gcc.patch b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/files/default-gcc.patch new file mode 100644 index 000000000..04184df8b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/files/default-gcc.patch @@ -0,0 +1,39 @@ +OE needs to be able to change the default compiler. If we pass in HOSTCC +through the make command, it overwrites not only this setting but also the +setting in tools/Makefile wrapped in ifneq ($(CROSS_BUILD_TOOLS),) which +breaks the build. + +We therefore use override to ensure the value of HOSTCC is overwritten when +needed. + +RP: Updated the patch to the version being submitted to upstream u-boot + +Upstream-Status: Submitted [emailed to Masahiro Yamada for discussion] +RP 2017/3/11 + +Index: git/tools/Makefile +=================================================================== +--- git.orig/tools/Makefile ++++ git/tools/Makefile +@@ -262,7 +262,7 @@ $(LICENSE_H): $(obj)/bin2header $(srctre + subdir- += env + + ifneq ($(CROSS_BUILD_TOOLS),) +-HOSTCC = $(CC) ++override HOSTCC = $(CC) + + quiet_cmd_crosstools_strip = STRIP $^ + cmd_crosstools_strip = $(STRIP) $^; touch $@ +Index: git/tools/env/Makefile +=================================================================== +--- git.orig/tools/env/Makefile ++++ git/tools/env/Makefile +@@ -8,7 +8,7 @@ + # fw_printenv is supposed to run on the target system, which means it should be + # built with cross tools. Although it may look weird, we only replace "HOSTCC" + # with "CC" here for the maximum code reuse of scripts/Makefile.host. +-HOSTCC = $(CC) ++override HOSTCC = $(CC) + + # Compile for a hosted environment on the target + HOST_EXTRACFLAGS = $(patsubst -I%,-idirafter%, $(filter -I%, $(UBOOTINCLUDE))) \ diff --git a/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-common_2017.01.inc b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-common_2017.01.inc new file mode 100644 index 000000000..df24c853d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-common_2017.01.inc @@ -0,0 +1,14 @@ +HOMEPAGE = "http://www.denx.de/wiki/U-Boot/WebHome" +SECTION = "bootloaders" + +LICENSE = "GPLv2+" +LIC_FILES_CHKSUM = "file://Licenses/README;md5=a2c678cfd4a4d97135585cad908541c6" +PE = "1" + +# We use the revision in order to avoid having to fetch it from the +# repo during parse +SRCREV = "a705ebc81b7f91bbd0ef7c634284208342901149" + +SRC_URI = "git://git.denx.de/u-boot.git" + +S = "${WORKDIR}/git" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-fw-utils_2016.03.bb b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-fw-utils_2016.03.bb deleted file mode 100644 index 79f1548ef..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-fw-utils_2016.03.bb +++ /dev/null @@ -1,46 +0,0 @@ -SUMMARY = "U-Boot bootloader fw_printenv/setenv utilities" -LICENSE = "GPLv2+" -LIC_FILES_CHKSUM = "file://Licenses/README;md5=a2c678cfd4a4d97135585cad908541c6" -SECTION = "bootloader" -DEPENDS = "mtd-utils" - -# This revision corresponds to the tag "v2016.03" -# We use the revision in order to avoid having to fetch it from the -# repo during parse -SRCREV = "df61a74e6845ec9bdcdd48d2aff5e9c2c6debeaa" - -PV = "v2016.03+git${SRCPV}" - -SRC_URI = "git://git.denx.de/u-boot.git;branch=master" - -S = "${WORKDIR}/git" - -INSANE_SKIP_${PN} = "already-stripped" -EXTRA_OEMAKE_class-target = 'CROSS_COMPILE=${TARGET_PREFIX} CC="${CC} ${CFLAGS} ${LDFLAGS}" V=1' -EXTRA_OEMAKE_class-cross = 'ARCH=${TARGET_ARCH} CC="${CC} ${CFLAGS} ${LDFLAGS}" V=1' - -inherit uboot-config - -do_compile () { - oe_runmake ${UBOOT_MACHINE} - oe_runmake env -} - -do_install () { - install -d ${D}${base_sbindir} - install -d ${D}${sysconfdir} - install -m 755 ${S}/tools/env/fw_printenv ${D}${base_sbindir}/fw_printenv - install -m 755 ${S}/tools/env/fw_printenv ${D}${base_sbindir}/fw_setenv - install -m 0644 ${S}/tools/env/fw_env.config ${D}${sysconfdir}/fw_env.config -} - -do_install_class-cross () { - install -d ${D}${bindir_cross} - install -m 755 ${S}/tools/env/fw_printenv ${D}${bindir_cross}/fw_printenv - install -m 755 ${S}/tools/env/fw_printenv ${D}${bindir_cross}/fw_setenv -} - -SYSROOT_DIRS_append_class-cross = " ${bindir_cross}" - -PACKAGE_ARCH = "${MACHINE_ARCH}" -BBCLASSEXTEND = "cross" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-fw-utils_2017.01.bb b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-fw-utils_2017.01.bb new file mode 100644 index 000000000..26314990b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-fw-utils_2017.01.bb @@ -0,0 +1,36 @@ +require u-boot-common_${PV}.inc + +SRC_URI += "file://default-gcc.patch" + +SUMMARY = "U-Boot bootloader fw_printenv/setenv utilities" +DEPENDS = "mtd-utils" + +INSANE_SKIP_${PN} = "already-stripped" +EXTRA_OEMAKE_class-target = 'CROSS_COMPILE=${TARGET_PREFIX} CC="${CC} ${CFLAGS} ${LDFLAGS}" HOSTCC="${BUILD_CC} ${BUILD_CFLAGS} ${BUILD_LDFLAGS}" V=1' +EXTRA_OEMAKE_class-cross = 'ARCH=${TARGET_ARCH} CC="${CC} ${CFLAGS} ${LDFLAGS}" V=1' + +inherit uboot-config + +do_compile () { + oe_runmake ${UBOOT_MACHINE} + oe_runmake env +} + +do_install () { + install -d ${D}${base_sbindir} + install -d ${D}${sysconfdir} + install -m 755 ${S}/tools/env/fw_printenv ${D}${base_sbindir}/fw_printenv + install -m 755 ${S}/tools/env/fw_printenv ${D}${base_sbindir}/fw_setenv + install -m 0644 ${S}/tools/env/fw_env.config ${D}${sysconfdir}/fw_env.config +} + +do_install_class-cross () { + install -d ${D}${bindir_cross} + install -m 755 ${S}/tools/env/fw_printenv ${D}${bindir_cross}/fw_printenv + install -m 755 ${S}/tools/env/fw_printenv ${D}${bindir_cross}/fw_setenv +} + +SYSROOT_DIRS_append_class-cross = " ${bindir_cross}" + +PACKAGE_ARCH = "${MACHINE_ARCH}" +BBCLASSEXTEND = "cross" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-mkimage_2016.03.bb b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-mkimage_2016.03.bb deleted file mode 100644 index d5921a044..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-mkimage_2016.03.bb +++ /dev/null @@ -1,32 +0,0 @@ -SUMMARY = "U-Boot bootloader image creation tool" -LICENSE = "GPLv2+" -LIC_FILES_CHKSUM = "file://Licenses/README;md5=a2c678cfd4a4d97135585cad908541c6" -SECTION = "bootloader" - -DEPENDS = "openssl" - -# This revision corresponds to the tag "v2016.03" -# We use the revision in order to avoid having to fetch it from the -# repo during parse -SRCREV = "df61a74e6845ec9bdcdd48d2aff5e9c2c6debeaa" - -PV = "v2016.03+git${SRCPV}" - -SRC_URI = "git://git.denx.de/u-boot.git;branch=master" - -S = "${WORKDIR}/git" - -EXTRA_OEMAKE = 'CROSS_COMPILE="${TARGET_PREFIX}" CC="${CC} ${CFLAGS} ${LDFLAGS}" STRIP=true V=1' - -do_compile () { - oe_runmake sandbox_defconfig - oe_runmake cross_tools NO_SDL=1 -} - -do_install () { - install -d ${D}${bindir} - install -m 0755 tools/mkimage ${D}${bindir}/uboot-mkimage - ln -sf uboot-mkimage ${D}${bindir}/mkimage -} - -BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-mkimage_2017.01.bb b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-mkimage_2017.01.bb new file mode 100644 index 000000000..de999e7cd --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot-mkimage_2017.01.bb @@ -0,0 +1,29 @@ +require u-boot-common_${PV}.inc + +SRC_URI += "file://default-gcc.patch" + +SUMMARY = "U-Boot bootloader image creation tool" +DEPENDS = "openssl" + +EXTRA_OEMAKE_class-target = 'CROSS_COMPILE="${TARGET_PREFIX}" CC="${CC} ${CFLAGS} ${LDFLAGS}" HOSTCC="${BUILD_CC} ${BUILD_CFLAGS} ${BUILD_LDFLAGS}" STRIP=true V=1' +EXTRA_OEMAKE_class-native = 'CC="${BUILD_CC} ${BUILD_CFLAGS} ${BUILD_LDFLAGS}" HOSTCC="${BUILD_CC} ${BUILD_CFLAGS} ${BUILD_LDFLAGS}" STRIP=true V=1' +EXTRA_OEMAKE_class-nativesdk = 'CROSS_COMPILE="${HOST_PREFIX}" CC="${CC} ${CFLAGS} ${LDFLAGS}" HOSTCC="${BUILD_CC} ${BUILD_CFLAGS} ${BUILD_LDFLAGS}" STRIP=true V=1' + +do_compile () { + oe_runmake sandbox_defconfig + + # Disable CONFIG_CMD_LICENSE, license.h is not used by tools and + # generating it requires bin2header tool, which for target build + # is built with target tools and thus cannot be executed on host. + sed -i "s/CONFIG_CMD_LICENSE=.*/# CONFIG_CMD_LICENSE is not set/" .config + + oe_runmake cross_tools NO_SDL=1 +} + +do_install () { + install -d ${D}${bindir} + install -m 0755 tools/mkimage ${D}${bindir}/uboot-mkimage + ln -sf uboot-mkimage ${D}${bindir}/mkimage +} + +BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot.inc b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot.inc index 252aae945..aa21c0e55 100644 --- a/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot.inc +++ b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot.inc @@ -1,14 +1,6 @@ SUMMARY = "Universal Boot Loader for embedded devices" -HOMEPAGE = "http://www.denx.de/wiki/U-Boot/WebHome" -SECTION = "bootloaders" PROVIDES = "virtual/bootloader" -LICENSE = "GPLv2+" -LIC_FILES_CHKSUM = "file://Licenses/README;md5=a2c678cfd4a4d97135585cad908541c6" - -SRC_URI = "git://git.denx.de/u-boot.git;branch=master" - -S = "${WORKDIR}/git" B = "${WORKDIR}/build" PACKAGE_ARCH = "${MACHINE_ARCH}" @@ -50,7 +42,7 @@ UBOOT_ELF_SYMLINK ?= "u-boot-${MACHINE}.${UBOOT_ELF_SUFFIX}" # deploy directory. For those versions they can set the following variables # to allow packaging the SPL. SPL_BINARY ?= "" -SPL_BINARYNAME ?= "${@os.path.basename(d.getVar("SPL_BINARY", True))}" +SPL_BINARYNAME ?= "${@os.path.basename(d.getVar("SPL_BINARY"))}" SPL_IMAGE ?= "${SPL_BINARYNAME}-${MACHINE}-${PV}-${PR}" SPL_SYMLINK ?= "${SPL_BINARYNAME}-${MACHINE}" @@ -72,7 +64,7 @@ UBOOT_EXTLINUX_CONF_NAME ?= "extlinux.conf" UBOOT_EXTLINUX_SYMLINK ?= "${UBOOT_EXTLINUX_CONF_NAME}-${MACHINE}-${PR}" do_compile () { - if [ "${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', 'ld-is-gold', '', d)}" = "ld-is-gold" ] ; then + if [ "${@bb.utils.filter('DISTRO_FEATURES', 'ld-is-gold', d)}" ]; then sed -i 's/$(CROSS_COMPILE)ld$/$(CROSS_COMPILE)ld.bfd/g' ${S}/config.mk fi @@ -312,4 +304,4 @@ do_deploy () { fi } -addtask deploy before do_build after do_compile +addtask deploy before do_build after do_install diff --git a/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot_2016.03.bb b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot_2016.03.bb deleted file mode 100644 index 836b0ce03..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot_2016.03.bb +++ /dev/null @@ -1,10 +0,0 @@ -require u-boot.inc - -DEPENDS += "dtc-native" - -# This revision corresponds to the tag "v2016.03" -# We use the revision in order to avoid having to fetch it from the -# repo during parse -SRCREV = "df61a74e6845ec9bdcdd48d2aff5e9c2c6debeaa" - -PV = "v2016.03+git${SRCPV}" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot_2017.01.bb b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot_2017.01.bb new file mode 100644 index 000000000..37c21dcaa --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-bsp/u-boot/u-boot_2017.01.bb @@ -0,0 +1,4 @@ +require u-boot-common_${PV}.inc +require u-boot.inc + +DEPENDS += "bc-native dtc-native" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/usbutils/usbutils_008.bb b/import-layers/yocto-poky/meta/recipes-bsp/usbutils/usbutils_008.bb index 75312c3aa..d3c5bd52a 100644 --- a/import-layers/yocto-poky/meta/recipes-bsp/usbutils/usbutils_008.bb +++ b/import-layers/yocto-poky/meta/recipes-bsp/usbutils/usbutils_008.bb @@ -21,5 +21,5 @@ inherit autotools gettext pkgconfig distro_features_check FILES_${PN}-dev += "${datadir}/pkgconfig" -RDEPENDS_${PN} = "libudev" +RRECOMMENDS_${PN} = "udev-hwdb" RDEPENDS_${PN}-ptest = "libboost-system libboost-thread" diff --git a/import-layers/yocto-poky/meta/recipes-bsp/v86d/v86d/fbsetup b/import-layers/yocto-poky/meta/recipes-bsp/v86d/v86d/fbsetup deleted file mode 100755 index 2a409cc52..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/v86d/v86d/fbsetup +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/sh - -/sbin/modprobe uvesafb diff --git a/import-layers/yocto-poky/meta/recipes-bsp/v86d/v86d/uvesafb.conf b/import-layers/yocto-poky/meta/recipes-bsp/v86d/v86d/uvesafb.conf deleted file mode 100644 index 43789755d..000000000 --- a/import-layers/yocto-poky/meta/recipes-bsp/v86d/v86d/uvesafb.conf +++ /dev/null @@ -1,2 +0,0 @@ -# Load uvesafb.ko at boot -uvesafb diff --git a/import-layers/yocto-poky/meta/recipes-bsp/v86d/v86d_0.1.10.bb b/import-layers/yocto-poky/meta/recipes-bsp/v86d/v86d_0.1.10.bb index 1046d6341..e5f6fff73 100644 --- a/import-layers/yocto-poky/meta/recipes-bsp/v86d/v86d_0.1.10.bb +++ b/import-layers/yocto-poky/meta/recipes-bsp/v86d/v86d_0.1.10.bb @@ -9,23 +9,18 @@ DEPENDS = "virtual/kernel" RRECOMMENDS_${PN} = "kernel-module-uvesafb" PR = "r2" -SRC_URI = "http://distfiles.gentoo.org/distfiles/${BP}.tar.bz2 \ +SRC_URI = "${DEBIAN_MIRROR}/main/v/${BPN}/${BPN}_${PV}.orig.tar.gz \ file://Update-x86emu-from-X.org.patch \ - file://fbsetup \ - file://uvesafb.conf \ file://ar-from-env.patch \ file://aarch64-host.patch \ " -SRC_URI[md5sum] = "51c792ba7b874ad8c43f0d3da4cfabe0" -SRC_URI[sha256sum] = "634964ae18ef68c8493add2ce150e3b4502badeb0d9194b4bd81241d25e6735c" +SRC_URI[md5sum] = "889686ec8424468fe0d205742e77a4c2" +SRC_URI[sha256sum] = "93575c82e4307d8c4c370ec6b767f5cf87e527b2378146d652a6d8e25d5bdbc5" PACKAGE_ARCH = "${MACHINE_ARCH}" COMPATIBLE_HOST = '(i.86|x86_64).*-linux' -INITSCRIPT_NAME = "fbsetup" -INITSCRIPT_PARAMS = "start 0 S ." - do_configure () { ./configure --with-x86emu } @@ -37,35 +32,4 @@ do_compile () { do_install () { install -d ${D}${base_sbindir} install v86d ${D}${base_sbindir}/ - - # Only install fbsetup script if 'sysvinit' is in DISTRO_FEATURES - if ${@bb.utils.contains('DISTRO_FEATURES','sysvinit','true','false',d)}; then - install -d ${D}${sysconfdir}/init.d/ - install -m 0755 ${WORKDIR}/fbsetup ${D}${sysconfdir}/init.d/fbsetup - fi - - # Install systemd related configuration file - if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then - install -d ${D}${sysconfdir}/modules-load.d - install -m 0644 ${WORKDIR}/uvesafb.conf ${D}${sysconfdir}/modules-load.d - fi -} - -# As the recipe doesn't inherit systemd.bbclass, we need to set this variable -# manually to avoid unnecessary postinst/preinst generated. -python __anonymous() { - if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d): - d.setVar("INHIBIT_UPDATERCD_BBCLASS", "1") -} - -inherit update-rc.d - -DEPENDS_append = " ${@bb.utils.contains('DISTRO_FEATURES','systemd','systemd-systemctl-native','',d)}" -pkg_postinst_${PN} () { - if ${@bb.utils.contains('DISTRO_FEATURES','systemd sysvinit','true','false',d)}; then - if [ -n "$D" ]; then - OPTS="--root=$D" - fi - systemctl $OPTS mask fbsetup.service - fi } diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/avahi/avahi.inc b/import-layers/yocto-poky/meta/recipes-connectivity/avahi/avahi.inc index 234646d29..faa8741dc 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/avahi/avahi.inc +++ b/import-layers/yocto-poky/meta/recipes-connectivity/avahi/avahi.inc @@ -54,7 +54,7 @@ EXTRA_OECONF = "--with-avahi-priv-access-group=adm \ --disable-qt4 \ --disable-python \ --disable-doxygen-doc \ - --disable-manpages \ + --enable-manpages \ ${EXTRA_OECONF_SYSVINIT} \ ${EXTRA_OECONF_SYSTEMD} \ " @@ -153,13 +153,3 @@ if [ -z "$D" ]; then killall -q -HUP dbus-daemon || true fi } - -pkg_postrm_avahi-daemon () { - deluser avahi || true - delgroup avahi || true -} - -pkg_postrm_avahi-autoipd () { - deluser avahi-autoipd || true - delgroup avahi-autoipd || true -} diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/bind/bind_9.10.3-P3.bb b/import-layers/yocto-poky/meta/recipes-connectivity/bind/bind_9.10.3-P3.bb index 816062528..a80227482 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/bind/bind_9.10.3-P3.bb +++ b/import-layers/yocto-poky/meta/recipes-connectivity/bind/bind_9.10.3-P3.bb @@ -85,7 +85,7 @@ do_install_append() { install -d "${D}${sysconfdir}/init.d" install -m 644 ${S}/conf/* "${D}${sysconfdir}/bind/" install -m 755 "${S}/init.d" "${D}${sysconfdir}/init.d/bind" - sed -i -e '1s,#!.*python,#! /usr/bin/python3,' ${D}${sbindir}/dnssec-coverage ${D}${sbindir}/dnssec-checkds + sed -i -e '1s,#!.*python3,#! /usr/bin/python3,' ${D}${sbindir}/dnssec-coverage ${D}${sbindir}/dnssec-checkds # Install systemd related files install -d ${D}${sbindir} diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5.inc b/import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5.inc index ecefb7b59..882873a48 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5.inc +++ b/import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5.inc @@ -6,15 +6,16 @@ LICENSE = "GPLv2+ & LGPLv2.1+" LIC_FILES_CHKSUM = "file://COPYING;md5=12f884d2ae1ff87c09e5b7ccc2c4ca7e \ file://COPYING.LIB;md5=fb504b67c50331fc78734fed90fb0e09 \ file://src/main.c;beginline=1;endline=24;md5=9bc54b93cd7e17bf03f52513f39f926e" -DEPENDS = "udev libusb dbus-glib glib-2.0 libcheck readline" +DEPENDS = "udev libusb dbus-glib glib-2.0 libcheck" PROVIDES += "bluez-hcidump" RPROVIDES_${PN} += "bluez-hcidump" RCONFLICTS_${PN} = "bluez4" -PACKAGECONFIG ??= "obex-profiles" +PACKAGECONFIG ??= "obex-profiles readline" PACKAGECONFIG[obex-profiles] = "--enable-obex,--disable-obex,libical" PACKAGECONFIG[experimental] = "--enable-experimental,--disable-experimental," +PACKAGECONFIG[readline] = "--enable-client,--disable-client,readline," SRC_URI = "\ ${KERNELORG_MIRROR}/linux/bluetooth/bluez-${PV}.tar.xz \ @@ -23,9 +24,12 @@ SRC_URI = "\ file://run-ptest \ ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', '', 'file://0001-Allow-using-obexd-without-systemd-in-the-user-sessio.patch', d)} \ file://0001-tests-add-a-target-for-building-tests-without-runnin.patch \ + file://cve-2017-1000250.patch \ " S = "${WORKDIR}/bluez-${PV}" +CVE_PRODUCT = "bluez" + inherit autotools pkgconfig systemd update-rc.d distro_features_check ptest EXTRA_OECONF = "\ @@ -42,7 +46,7 @@ EXTRA_OECONF = "\ NOINST_TOOLS_READLINE ??= "" NOINST_TOOLS_EXPERIMENTAL ??= "" NOINST_TOOLS = " \ - ${NOINST_TOOLS_READLINE} \ + ${@bb.utils.contains('PACKAGECONFIG', 'readline', '${NOINST_TOOLS_READLINE}', '', d)} \ ${@bb.utils.contains('PACKAGECONFIG', 'experimental', '${NOINST_TOOLS_EXPERIMENTAL}', '', d)} \ " @@ -95,13 +99,13 @@ FILES_${PN}-testtools = "${libdir}/bluez/test/*" def get_noinst_tools_paths (d, bb, tools): s = list() - bindir = d.getVar("bindir", True) + bindir = d.getVar("bindir") for bdp in tools.split(): f = os.path.basename(bdp) s.append("%s/%s" % (bindir, f)) return "\n".join(s) -FILES_${PN}-noinst-tools = "${@get_noinst_tools_paths(d, bb, d.getVar('NOINST_TOOLS', True))}" +FILES_${PN}-noinst-tools = "${@get_noinst_tools_paths(d, bb, d.getVar('NOINST_TOOLS'))}" RDEPENDS_${PN}-testtools += "python3 python3-dbus python3-pygobject" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5/cve-2017-1000250.patch b/import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5/cve-2017-1000250.patch new file mode 100644 index 000000000..9fac961bc --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5/cve-2017-1000250.patch @@ -0,0 +1,34 @@ +All versions of the SDP server in BlueZ 5.46 and earlier are vulnerable to an +information disclosure vulnerability which allows remote attackers to obtain +sensitive information from the bluetoothd process memory. This vulnerability +lies in the processing of SDP search attribute requests. + +CVE: CVE-2017-1000250 +Upstream-Status: Backport +Signed-off-by: Ross Burton + +From 9e009647b14e810e06626dde7f1bb9ea3c375d09 Mon Sep 17 00:00:00 2001 +From: Luiz Augusto von Dentz +Date: Wed, 13 Sep 2017 10:01:40 +0300 +Subject: sdp: Fix Out-of-bounds heap read in service_search_attr_req function + +Check if there is enough data to continue otherwise return an error. +--- + src/sdpd-request.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/src/sdpd-request.c b/src/sdpd-request.c +index 1eefdce..318d044 100644 +--- a/src/sdpd-request.c ++++ b/src/sdpd-request.c +@@ -917,7 +917,7 @@ static int service_search_attr_req(sdp_req_t *req, sdp_buf_t *buf) + } else { + /* continuation State exists -> get from cache */ + sdp_buf_t *pCache = sdp_get_cached_rsp(cstate); +- if (pCache) { ++ if (pCache && cstate->cStateValue.maxBytesSent < pCache->data_size) { + uint16_t sent = MIN(max, pCache->data_size - cstate->cStateValue.maxBytesSent); + pResponse = pCache->data; + memcpy(buf->data, pResponse + cstate->cStateValue.maxBytesSent, sent); +-- +cgit v1.1 diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5_5.41.bb b/import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5_5.41.bb deleted file mode 100644 index 522aab7d5..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5_5.41.bb +++ /dev/null @@ -1,55 +0,0 @@ -require bluez5.inc - -REQUIRED_DISTRO_FEATURES = "bluez5" - -SRC_URI[md5sum] = "318341b2188698130adb73236ee69244" -SRC_URI[sha256sum] = "df7dc4462494dad4e60a2943240d584f6e760235dca64f5f10eba46dbab7f5f0" - -# noinst programs in Makefile.tools that are conditional on READLINE -# support -NOINST_TOOLS_READLINE ?= " \ - attrib/gatttool \ - tools/obex-client-tool \ - tools/obex-server-tool \ - tools/bluetooth-player \ - tools/obexctl \ - tools/btmgmt \ -" - -# noinst programs in Makefile.tools that are conditional on EXPERIMENTAL -# support -NOINST_TOOLS_EXPERIMENTAL ?= " \ - emulator/btvirt \ - emulator/b1ee \ - emulator/hfp \ - tools/3dsp \ - tools/mgmt-tester \ - tools/gap-tester \ - tools/l2cap-tester \ - tools/sco-tester \ - tools/smp-tester \ - tools/hci-tester \ - tools/rfcomm-tester \ - tools/bdaddr \ - tools/avinfo \ - tools/avtest \ - tools/scotest \ - tools/amptest \ - tools/hwdb \ - tools/hcieventmask \ - tools/hcisecfilter \ - tools/btinfo \ - tools/btattach \ - tools/btsnoop \ - tools/btproxy \ - tools/btiotest \ - tools/mcaptest \ - tools/cltest \ - tools/oobtest \ - tools/seq2bseq \ - tools/ibeacon \ - tools/btgatt-client \ - tools/btgatt-server \ - tools/gatt-service \ - profiles/iap/iapd \ -" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5_5.43.bb b/import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5_5.43.bb new file mode 100644 index 000000000..e10b82dd6 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/bluez5/bluez5_5.43.bb @@ -0,0 +1,55 @@ +require bluez5.inc + +REQUIRED_DISTRO_FEATURES = "bluez5" + +SRC_URI[md5sum] = "698def88df96840dfbb0858bb6d73350" +SRC_URI[sha256sum] = "16c9c05d2a1da644ce3570d975ada3643d2e60c007a955bac09c0a0efeb58d15" + +# noinst programs in Makefile.tools that are conditional on READLINE +# support +NOINST_TOOLS_READLINE ?= " \ + attrib/gatttool \ + tools/obex-client-tool \ + tools/obex-server-tool \ + tools/bluetooth-player \ + tools/obexctl \ + tools/btmgmt \ +" + +# noinst programs in Makefile.tools that are conditional on EXPERIMENTAL +# support +NOINST_TOOLS_EXPERIMENTAL ?= " \ + emulator/btvirt \ + emulator/b1ee \ + emulator/hfp \ + tools/3dsp \ + tools/mgmt-tester \ + tools/gap-tester \ + tools/l2cap-tester \ + tools/sco-tester \ + tools/smp-tester \ + tools/hci-tester \ + tools/rfcomm-tester \ + tools/bdaddr \ + tools/avinfo \ + tools/avtest \ + tools/scotest \ + tools/amptest \ + tools/hwdb \ + tools/hcieventmask \ + tools/hcisecfilter \ + tools/btinfo \ + tools/btattach \ + tools/btsnoop \ + tools/btproxy \ + tools/btiotest \ + tools/mcaptest \ + tools/cltest \ + tools/oobtest \ + tools/seq2bseq \ + tools/ibeacon \ + tools/btgatt-client \ + tools/btgatt-server \ + tools/gatt-service \ + profiles/iap/iapd \ +" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/connman/connman.inc b/import-layers/yocto-poky/meta/recipes-connectivity/connman/connman.inc index 35a7eed0a..64a5418c6 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/connman/connman.inc +++ b/import-layers/yocto-poky/meta/recipes-connectivity/connman/connman.inc @@ -31,10 +31,8 @@ EXTRA_OECONF += "\ " PACKAGECONFIG ??= "wispr \ - ${@bb.utils.contains('DISTRO_FEATURES', 'systemd','systemd', '', d)} \ - ${@bb.utils.contains('DISTRO_FEATURES', 'wifi','wifi', '', d)} \ + ${@bb.utils.filter('DISTRO_FEATURES', '3g systemd wifi', d)} \ ${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluez', '', d)} \ - ${@bb.utils.contains('DISTRO_FEATURES', '3g','3g', '', d)} \ " # If you want ConnMan to support VPN, add following statement into @@ -58,7 +56,7 @@ INITSCRIPT_PARAMS = "start 05 5 2 3 . stop 22 0 1 6 ." python __anonymous () { systemd_packages = "${PN}" - pkgconfig = d.getVar('PACKAGECONFIG', True) + pkgconfig = d.getVar('PACKAGECONFIG') if ('openvpn' or 'vpnc' or 'l2tp' or 'pptp') in pkgconfig.split(): systemd_packages += " ${PN}-vpn" d.setVar('SYSTEMD_PACKAGES', systemd_packages) @@ -116,7 +114,7 @@ def add_rdepends(bb, d, file, pkg, depmap, multilib_prefix, add_insane_skip): python populate_packages_prepend() { depmap = dict(pppd="ppp") - multilib_prefix = (d.getVar("MLPREFIX", True) or "") + multilib_prefix = (d.getVar("MLPREFIX") or "") hook = lambda file,pkg,x,y,z: \ add_rdepends(bb, d, file, pkg, depmap, multilib_prefix, False) diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/connman/connman/0001-Fix-compile-on-musl-with-kernel-4.9-headers.patch b/import-layers/yocto-poky/meta/recipes-connectivity/connman/connman/0001-Fix-compile-on-musl-with-kernel-4.9-headers.patch new file mode 100644 index 000000000..bf3b86d86 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/connman/connman/0001-Fix-compile-on-musl-with-kernel-4.9-headers.patch @@ -0,0 +1,64 @@ +From c8bfad4ee9d2c505c00ccbb8b2139543b5ad6fcb Mon Sep 17 00:00:00 2001 +From: Jussi Kukkonen +Date: Mon, 23 Jan 2017 17:41:39 +0200 +Subject: [PATCH] Fix compile on musl with kernel 4.9 headers + +Kernel headers break when musl defines IFF_LOWER_UP. While +waiting for more proper fix in musl, add a hack to connman. + +Upstream-Status: Inappropriate [Workaround] +Signed-off-by: Jussi Kukkonen +--- + src/6to4.c | 4 ++++ + src/firewall.c | 4 ++++ + src/iptables.c | 4 ++++ + 3 files changed, 12 insertions(+) + +diff --git a/src/6to4.c b/src/6to4.c +index 71a2882..1938afb 100644 +--- a/src/6to4.c ++++ b/src/6to4.c +@@ -24,6 +24,10 @@ + #include + #endif + ++/* hack to make sure kernel headers understand that libc (musl) ++ does define IFF_LOWER_UP et al. */ ++#define __UAPI_DEF_IF_NET_DEVICE_FLAGS_LOWER_UP_DORMANT_ECHO 0 ++ + #include + #include + #include +diff --git a/src/firewall.c b/src/firewall.c +index c440df6..c83def9 100644 +--- a/src/firewall.c ++++ b/src/firewall.c +@@ -23,6 +23,10 @@ + #include + #endif + ++/* hack to make sure kernel headers understand that libc (musl) ++ does define IFF_LOWER_UP et al. */ ++#define __UAPI_DEF_IF_NET_DEVICE_FLAGS_LOWER_UP_DORMANT_ECHO 0 ++ + #include + + #include +diff --git a/src/iptables.c b/src/iptables.c +index 82e3ac4..46ad9e2 100644 +--- a/src/iptables.c ++++ b/src/iptables.c +@@ -23,6 +23,10 @@ + #include + #endif + ++/* hack to make sure kernel headers understand that libc (musl) ++ does define IFF_LOWER_UP et al. */ ++#define __UAPI_DEF_IF_NET_DEVICE_FLAGS_LOWER_UP_DORMANT_ECHO 0 ++ + #include + #include + #include +-- +2.1.4 + diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/connman/connman_1.33.bb b/import-layers/yocto-poky/meta/recipes-connectivity/connman/connman_1.33.bb index d8793ac8b..ee04d9b35 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/connman/connman_1.33.bb +++ b/import-layers/yocto-poky/meta/recipes-connectivity/connman/connman_1.33.bb @@ -8,7 +8,8 @@ SRC_URI = "${KERNELORG_MIRROR}/linux/network/${BPN}/${BP}.tar.xz \ file://0003-stats-Fix-bad-file-descriptor-initialisation.patch \ file://CVE-2017-12865.patch \ " -SRC_URI_append_libc-musl = " file://0002-resolve-musl-does-not-implement-res_ninit.patch" +SRC_URI_append_libc-musl = " file://0002-resolve-musl-does-not-implement-res_ninit.patch \ + file://0001-Fix-compile-on-musl-with-kernel-4.9-headers.patch" SRC_URI[md5sum] = "c51903fd3e7a6a371d12ac5d72a1fa01" SRC_URI[sha256sum] = "bc8946036fa70124d663136f9f6b6238d897ca482782df907b07a428b09df5a0" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/dhcp/dhcp_4.3.4.bb b/import-layers/yocto-poky/meta/recipes-connectivity/dhcp/dhcp_4.3.4.bb deleted file mode 100644 index 4151eb183..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/dhcp/dhcp_4.3.4.bb +++ /dev/null @@ -1,18 +0,0 @@ -require dhcp.inc - -SRC_URI += "file://dhcp-3.0.3-dhclient-dbus.patch;striplevel=0 \ - file://link-with-lcrypto.patch \ - file://fixsepbuild.patch \ - file://dhclient-script-drop-resolv.conf.dhclient.patch \ - file://replace-ifconfig-route.patch \ - file://0001-site.h-enable-gentle-shutdown.patch \ - file://libxml2-configure-argument.patch \ - file://tweak-to-support-external-bind.patch \ - file://remove-dhclient-script-bash-dependency.patch \ - " - -SRC_URI[md5sum] = "0138319fe2b788cf4bdf34fbeaf9ff54" -SRC_URI[sha256sum] = "f5115aee3dd3e6925de4ba47b80ab732ba48b481c8364b6ebade2d43698d607e" - -PACKAGECONFIG ?= "" -PACKAGECONFIG[bind-httpstats] = "--with-libxml2,--without-libxml2,libxml2" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/dhcp/dhcp_4.3.5.bb b/import-layers/yocto-poky/meta/recipes-connectivity/dhcp/dhcp_4.3.5.bb new file mode 100644 index 000000000..678c29a28 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/dhcp/dhcp_4.3.5.bb @@ -0,0 +1,18 @@ +require dhcp.inc + +SRC_URI += "file://dhcp-3.0.3-dhclient-dbus.patch;striplevel=0 \ + file://link-with-lcrypto.patch \ + file://fixsepbuild.patch \ + file://dhclient-script-drop-resolv.conf.dhclient.patch \ + file://replace-ifconfig-route.patch \ + file://0001-site.h-enable-gentle-shutdown.patch \ + file://libxml2-configure-argument.patch \ + file://tweak-to-support-external-bind.patch \ + file://remove-dhclient-script-bash-dependency.patch \ + " + +SRC_URI[md5sum] = "2b5e5b2fa31c2e27e487039d86f83d3f" +SRC_URI[sha256sum] = "eb95936bf15d2393c55dd505bc527d1d4408289cec5a9fa8abb99f7577e7f954" + +PACKAGECONFIG ?= "" +PACKAGECONFIG[bind-httpstats] = "--with-libxml2,--without-libxml2,libxml2" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2.inc b/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2.inc index 63e7ca9e8..ce64888a0 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2.inc +++ b/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2.inc @@ -11,7 +11,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=eb723b61539feef013de476e68b5c50a \ DEPENDS = "flex-native bison-native iptables elfutils" -inherit update-alternatives bash-completion +inherit update-alternatives bash-completion pkgconfig EXTRA_OEMAKE = "CC='${CC}' KERNEL_INCLUDE=${STAGING_INCDIR} DOCDIR=${docdir}/iproute2 SUBDIRS='lib tc ip bridge misc genl' SBINDIR='${base_sbindir}' LIBDIR='${libdir}'" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2/0001-libc-compat.h-add-musl-workaround.patch b/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2/0001-libc-compat.h-add-musl-workaround.patch new file mode 100644 index 000000000..3d324c96d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2/0001-libc-compat.h-add-musl-workaround.patch @@ -0,0 +1,41 @@ +From b7d96340c55afb7023ded0041107c63dbd886196 Mon Sep 17 00:00:00 2001 +From: Baruch Siach +Date: Thu, 22 Dec 2016 15:26:30 +0200 +Subject: [PATCH] libc-compat.h: add musl workaround + +The libc-compat.h kernel header uses glibc specific macros (__GLIBC__ and +__USE_MISC) to solve conflicts with libc provided headers. This patch makes +libc-compat.h work for musl libc as well. + +Upstream-Status: Pending + +Taken From: +https://git.buildroot.net/buildroot/tree/package/iproute2/0001-Add-the-musl-workaround-to-the-libc-compat.h-copy.patch + +Signed-off-by: Baruch Siach +Signed-off-by: Maxin B. John +--- + include/linux/libc-compat.h | 4 +++- + 1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/include/linux/libc-compat.h b/include/linux/libc-compat.h +index f38571d..30f0b67 100644 +--- a/include/linux/libc-compat.h ++++ b/include/linux/libc-compat.h +@@ -49,10 +49,12 @@ + #define _LIBC_COMPAT_H + + /* We have included glibc headers... */ +-#if defined(__GLIBC__) ++#if 1 ++#define __USE_MISC + + /* Coordinate with glibc net/if.h header. */ + #if defined(_NET_IF_H) && defined(__USE_MISC) ++#define __UAPI_DEF_IF_NET_DEVICE_FLAGS_LOWER_UP_DORMANT_ECHO 0 + + /* GLIBC headers included first so don't define anything + * that would already be defined. */ +-- +2.4.0 + diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2/iproute2-4.3.0-musl.patch b/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2/iproute2-4.3.0-musl.patch deleted file mode 100644 index 8c078f69d..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2/iproute2-4.3.0-musl.patch +++ /dev/null @@ -1,85 +0,0 @@ -Subject: [PATCH] Avoid in6_addr redefinition - -Due to both and being included, the -in6_addr is being redefined: once from the C library headers and once -from the kernel headers. This causes some build failures with for -example the musl C library. - -In order to fix this, use just the C library header . -Original patch taken from -http://git.alpinelinux.org/cgit/aports/tree/main/iproute2/musl-fixes.patch. - -(Refreshed the patch for 4.6 release) - -Upstream-Status: Pending - -Signed-off-by: Thomas Petazzoni -Signed-off-by: Maxin B. John ----- -diff -Naur iproute2-4.6.0-orig/include/libiptc/ipt_kernel_headers.h iproute2-4.6.0/include/libiptc/ipt_kernel_headers.h ---- iproute2-4.6.0-orig/include/libiptc/ipt_kernel_headers.h 2016-05-23 12:03:23.821826910 +0300 -+++ iproute2-4.6.0/include/libiptc/ipt_kernel_headers.h 2016-05-23 12:04:23.714078154 +0300 -@@ -6,7 +6,6 @@ - #include - - #include --#include - #include - #include - #include -diff -Naur iproute2-4.6.0-orig/include/linux/if_bridge.h iproute2-4.6.0/include/linux/if_bridge.h ---- iproute2-4.6.0-orig/include/linux/if_bridge.h 2016-05-23 12:03:23.821826910 +0300 -+++ iproute2-4.6.0/include/linux/if_bridge.h 2016-05-23 12:04:23.716078129 +0300 -@@ -15,7 +15,6 @@ - - #include - #include --#include - - #define SYSFS_BRIDGE_ATTR "bridge" - #define SYSFS_BRIDGE_FDB "brforward" -diff -Naur iproute2-4.6.0-orig/include/linux/netfilter.h iproute2-4.6.0/include/linux/netfilter.h ---- iproute2-4.6.0-orig/include/linux/netfilter.h 2016-05-23 12:03:23.821826910 +0300 -+++ iproute2-4.6.0/include/linux/netfilter.h 2016-05-23 12:04:23.717078117 +0300 -@@ -4,8 +4,6 @@ - #include - - #include --#include --#include - - /* Responses from hook functions. */ - #define NF_DROP 0 -diff -Naur iproute2-4.6.0-orig/include/linux/netfilter_ipv4/ip_tables.h iproute2-4.6.0/include/linux/netfilter_ipv4/ip_tables.h ---- iproute2-4.6.0-orig/include/linux/netfilter_ipv4/ip_tables.h 2016-05-18 21:56:02.000000000 +0300 -+++ iproute2-4.6.0/include/linux/netfilter_ipv4/ip_tables.h 2016-05-23 12:09:22.888337961 +0300 -@@ -17,7 +17,6 @@ - - #include - --#include - #include - - #include -diff -Naur iproute2-4.6.0-orig/include/linux/xfrm.h iproute2-4.6.0/include/linux/xfrm.h ---- iproute2-4.6.0-orig/include/linux/xfrm.h 2016-05-23 12:03:23.821826910 +0300 -+++ iproute2-4.6.0/include/linux/xfrm.h 2016-05-23 12:04:23.718078104 +0300 -@@ -1,7 +1,6 @@ - #ifndef _LINUX_XFRM_H - #define _LINUX_XFRM_H - --#include - #include - - /* All of the structures in this file may not change size as they are -diff -Naur iproute2-4.6.0-orig/include/utils.h iproute2-4.6.0/include/utils.h ---- iproute2-4.6.0-orig/include/utils.h 2016-05-23 12:03:23.821826910 +0300 -+++ iproute2-4.6.0/include/utils.h 2016-05-23 12:04:23.718078104 +0300 -@@ -1,6 +1,7 @@ - #ifndef __UTILS_H__ - #define __UTILS_H__ 1 - -+#include /* MAXPATHLEN */ - #include - #include - #include diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2_4.10.0.bb b/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2_4.10.0.bb new file mode 100644 index 000000000..a050e8737 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2_4.10.0.bb @@ -0,0 +1,14 @@ +require iproute2.inc + +SRC_URI = "${KERNELORG_MIRROR}/linux/utils/net/${BPN}/${BP}.tar.xz \ + file://configure-cross.patch \ + file://0001-iproute2-de-bash-scripts.patch \ + file://0001-libc-compat.h-add-musl-workaround.patch \ + " + +SRC_URI[md5sum] = "b94a2b0edefaeac124dc8f5d006931b9" +SRC_URI[sha256sum] = "22b1e1c1fc704ad35837e5a66103739727b8b48ac90b48c13f79b7367ff0a9a8" + +# CFLAGS are computed in Makefile and reference CCOPTS +# +EXTRA_OEMAKE_append = " CCOPTS='${CFLAGS}'" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2_4.7.0.bb b/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2_4.7.0.bb deleted file mode 100644 index 426f98916..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/iproute2/iproute2_4.7.0.bb +++ /dev/null @@ -1,13 +0,0 @@ -require iproute2.inc - -SRC_URI = "${KERNELORG_MIRROR}/linux/utils/net/${BPN}/${BP}.tar.xz \ - file://configure-cross.patch \ - file://0001-iproute2-de-bash-scripts.patch \ - file://iproute2-4.3.0-musl.patch \ - " -SRC_URI[md5sum] = "d4b205830cdc2702f8a0cbd6232129cd" -SRC_URI[sha256sum] = "8f60dbcfb33a79daae0638f53bdcaa4310c0aa59ae39af8a234020dc69bb7b92" - -# CFLAGS are computed in Makefile and reference CCOPTS -# -EXTRA_OEMAKE_append = " CCOPTS='${CFLAGS}'" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/iw/iw_4.7.bb b/import-layers/yocto-poky/meta/recipes-connectivity/iw/iw_4.7.bb deleted file mode 100644 index e9f414129..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/iw/iw_4.7.bb +++ /dev/null @@ -1,33 +0,0 @@ -SUMMARY = "nl80211 based CLI configuration utility for wireless devices" -DESCRIPTION = "iw is a new nl80211 based CLI configuration utility for \ -wireless devices. It supports almost all new drivers that have been added \ -to the kernel recently. " -HOMEPAGE = "http://wireless.kernel.org/en/users/Documentation/iw" -SECTION = "base" -LICENSE = "BSD" -LIC_FILES_CHKSUM = "file://COPYING;md5=878618a5c4af25e9b93ef0be1a93f774" - -DEPENDS = "libnl" - -SRC_URI = "http://www.kernel.org/pub/software/network/iw/${BP}.tar.gz \ - file://0001-iw-version.sh-don-t-use-git-describe-for-versioning.patch \ - file://separate-objdir.patch \ -" - -SRC_URI[md5sum] = "19d1edd276b2ac0c6cccfc7ae8d2b732" -SRC_URI[sha256sum] = "758092229f13d691968060a0ad41364ba8eb8da4503626c20233a5b1eb33b4d9" - -inherit pkgconfig - -EXTRA_OEMAKE = "\ - -f '${S}/Makefile' \ - \ - 'PREFIX=${prefix}' \ - 'SBINDIR=${sbindir}' \ - 'MANDIR=${mandir}' \ -" -B = "${WORKDIR}/build" - -do_install() { - oe_runmake 'DESTDIR=${D}' install -} diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/iw/iw_4.9.bb b/import-layers/yocto-poky/meta/recipes-connectivity/iw/iw_4.9.bb new file mode 100644 index 000000000..6daeb07b7 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/iw/iw_4.9.bb @@ -0,0 +1,33 @@ +SUMMARY = "nl80211 based CLI configuration utility for wireless devices" +DESCRIPTION = "iw is a new nl80211 based CLI configuration utility for \ +wireless devices. It supports almost all new drivers that have been added \ +to the kernel recently. " +HOMEPAGE = "http://wireless.kernel.org/en/users/Documentation/iw" +SECTION = "base" +LICENSE = "BSD" +LIC_FILES_CHKSUM = "file://COPYING;md5=878618a5c4af25e9b93ef0be1a93f774" + +DEPENDS = "libnl" + +SRC_URI = "http://www.kernel.org/pub/software/network/iw/${BP}.tar.gz \ + file://0001-iw-version.sh-don-t-use-git-describe-for-versioning.patch \ + file://separate-objdir.patch \ +" + +SRC_URI[md5sum] = "06e96ab7a5c652f8eaed6f71533a9e0f" +SRC_URI[sha256sum] = "12f921f3dbe0f33c309f5f2891cccf5325c94bd48dceeb102de183f5f048a9e2" + +inherit pkgconfig + +EXTRA_OEMAKE = "\ + -f '${S}/Makefile' \ + \ + 'PREFIX=${prefix}' \ + 'SBINDIR=${sbindir}' \ + 'MANDIR=${mandir}' \ +" +B = "${WORKDIR}/build" + +do_install() { + oe_runmake 'DESTDIR=${D}' install +} diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap.inc b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap.inc index 7b29a52dc..663577924 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap.inc +++ b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap.inc @@ -19,14 +19,14 @@ BINCONFIG = "${bindir}/pcap-config" inherit autotools binconfig-disabled pkgconfig bluetooth EXTRA_OECONF = "--with-pcap=linux" +EXTRA_AUTORECONF += "--exclude=aclocal" PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', '${BLUEZ}', '', d)} \ - ${@bb.utils.contains('DISTRO_FEATURES', 'ipv6', 'ipv6', '', d)} \ + ${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)} \ " PACKAGECONFIG[bluez4] = "--enable-bluetooth,--disable-bluetooth,bluez4" # Add a dummy PACKAGECONFIG for bluez5 since it is not supported by libpcap. PACKAGECONFIG[bluez5] = ",," -PACKAGECONFIG[canusb] = "--enable-canusb,--enable-canusb=no,libusb" PACKAGECONFIG[dbus] = "--enable-dbus,--disable-dbus,dbus" PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6," PACKAGECONFIG[libnl] = "--with-libnl,--without-libnl,libnl" @@ -36,8 +36,5 @@ CFLAGS_prepend = "-I${S} " CXXFLAGS_prepend = "-I${S} " do_configure_prepend () { - if [ ! -e ${S}/acinclude.m4 ]; then - cat ${S}/aclocal.m4 > ${S}/acinclude.m4 - fi sed -i -e's,^V_RPATH_OPT=.*$,V_RPATH_OPT=,' ${S}/pcap-config.in } diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/0001-Fix-compiler_state_t.ai-usage-when-INET6-is-not-defi.patch b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/0001-Fix-compiler_state_t.ai-usage-when-INET6-is-not-defi.patch new file mode 100644 index 000000000..edb6ae566 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/0001-Fix-compiler_state_t.ai-usage-when-INET6-is-not-defi.patch @@ -0,0 +1,41 @@ +From 64aa033a061c43fc15c711f2490ae41d23b868c3 Mon Sep 17 00:00:00 2001 +From: Fabio Berton +Date: Thu, 17 Nov 2016 09:44:42 -0200 +Subject: [PATCH 1/2] Fix compiler_state_t.ai usage when INET6 is not defined +Organization: O.S. Systems Software LTDA. + +Fix error: + +/ +| ../libpcap-1.8.1/gencode.c: In function 'pcap_compile': +| ../libpcap-1.8.1/gencode.c:693:8: error: 'compiler_state_t +| {aka struct _compiler_state}' has no member named 'ai' +| cstate.ai = NULL; +\ + +Upstream-Status: Submitted [1] + +[1] https://github.com/the-tcpdump-group/libpcap/pull/541 + +Signed-off-by: Fabio Berton +--- + gencode.c | 2 ++ + 1 file changed, 2 insertions(+) + +diff --git a/gencode.c b/gencode.c +index a887f27..e103c70 100644 +--- a/gencode.c ++++ b/gencode.c +@@ -690,7 +690,9 @@ pcap_compile(pcap_t *p, struct bpf_program *program, + } + initchunks(&cstate); + cstate.no_optimize = 0; ++#ifdef INET6 + cstate.ai = NULL; ++#endif + cstate.ic.root = NULL; + cstate.ic.cur_mark = 0; + cstate.bpf_pcap = p; +-- +2.1.4 + diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/0002-Add-missing-compiler_state_t-parameter.patch b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/0002-Add-missing-compiler_state_t-parameter.patch new file mode 100644 index 000000000..032b265f0 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/0002-Add-missing-compiler_state_t-parameter.patch @@ -0,0 +1,67 @@ +From 50ec0a088d5924a8305b2d70dcba71b0942dee1a Mon Sep 17 00:00:00 2001 +From: Fabio Berton +Date: Thu, 17 Nov 2016 09:47:29 -0200 +Subject: [PATCH 2/2] Add missing compiler_state_t parameter +Organization: O.S. Systems Software LTDA. + +Fix error: + +/ +|../libpcap-1.8.1/gencode.c: In function 'gen_gateway': +|../libpcap-1.8.1/gencode.c:4914:13: error: 'cstate' undeclared +| (first use in this function) +| bpf_error(cstate, "direction applied to 'gateway'"); +\ + +Upstream-Status: Submitted [1] + +[1] https://github.com/the-tcpdump-group/libpcap/pull/541 + +Signed-off-by: Fabio Berton +--- + gencode.c | 15 ++++++++------- + 1 file changed, 8 insertions(+), 7 deletions(-) + +diff --git a/gencode.c b/gencode.c +index e103c70..f07c0be 100644 +--- a/gencode.c ++++ b/gencode.c +@@ -523,7 +523,7 @@ static struct block *gen_host6(compiler_state_t *, struct in6_addr *, + struct in6_addr *, int, int, int); + #endif + #ifndef INET6 +-static struct block *gen_gateway(const u_char *, bpf_u_int32 **, int, int); ++static struct block *gen_gateway(compiler_state_t *, const u_char *, bpf_u_int32 **, int, int); + #endif + static struct block *gen_ipfrag(compiler_state_t *); + static struct block *gen_portatom(compiler_state_t *, int, bpf_int32); +@@ -4904,11 +4904,12 @@ gen_host6(compiler_state_t *cstate, struct in6_addr *addr, + + #ifndef INET6 + static struct block * +-gen_gateway(eaddr, alist, proto, dir) +- const u_char *eaddr; +- bpf_u_int32 **alist; +- int proto; +- int dir; ++gen_gateway(cstate, eaddr, alist, proto, dir) ++ compiler_state_t *cstate; ++ const u_char *eaddr; ++ bpf_u_int32 **alist; ++ int proto; ++ int dir; + { + struct block *b0, *b1, *tmp; + +@@ -6472,7 +6473,7 @@ gen_scode(compiler_state_t *cstate, const char *name, struct qual q) + alist = pcap_nametoaddr(name); + if (alist == NULL || *alist == NULL) + bpf_error(cstate, "unknown host '%s'", name); +- b = gen_gateway(eaddr, alist, proto, dir); ++ b = gen_gateway(cstate, eaddr, alist, proto, dir); + free(eaddr); + return b; + #else +-- +2.1.4 + diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/aclocal.patch b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/aclocal.patch deleted file mode 100644 index 21519825c..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/aclocal.patch +++ /dev/null @@ -1,167 +0,0 @@ -Upstream-Status: Inappropriate [configuration] - -diff -ruN libpcap-1.1.1-orig/aclocal.m4 libpcap-1.1.1/aclocal.m4 ---- libpcap-1.1.1-orig/aclocal.m4 2010-06-29 10:46:32.815117569 +0800 -+++ libpcap-1.1.1/aclocal.m4 2010-06-29 10:49:17.150149949 +0800 -@@ -37,7 +37,7 @@ - dnl AC_LBL_C_INIT. Now, we run AC_LBL_C_INIT_BEFORE_CC, AC_PROG_CC, - dnl and AC_LBL_C_INIT at the top level. - dnl --AC_DEFUN(AC_LBL_C_INIT_BEFORE_CC, -+AC_DEFUN([AC_LBL_C_INIT_BEFORE_CC], - [ - AC_BEFORE([$0], [AC_LBL_C_INIT]) - AC_BEFORE([$0], [AC_PROG_CC]) -@@ -90,7 +90,7 @@ - dnl LDFLAGS - dnl LBL_CFLAGS - dnl --AC_DEFUN(AC_LBL_C_INIT, -+AC_DEFUN([AC_LBL_C_INIT], - [ - AC_BEFORE([$0], [AC_LBL_FIXINCLUDES]) - AC_BEFORE([$0], [AC_LBL_DEVEL]) -@@ -217,7 +217,7 @@ - dnl V_SONAME_OPT - dnl V_RPATH_OPT - dnl --AC_DEFUN(AC_LBL_SHLIBS_INIT, -+AC_DEFUN([AC_LBL_SHLIBS_INIT], - [AC_PREREQ(2.50) - if test "$GCC" = yes ; then - # -@@ -361,7 +361,7 @@ - # Make sure we use the V_CCOPT flags, because some of those might - # disable inlining. - # --AC_DEFUN(AC_LBL_C_INLINE, -+AC_DEFUN([AC_LBL_C_INLINE], - [AC_MSG_CHECKING(for inline) - save_CFLAGS="$CFLAGS" - CFLAGS="$V_CCOPT" -@@ -407,7 +407,7 @@ - dnl - dnl AC_LBL_FIXINCLUDES - dnl --AC_DEFUN(AC_LBL_FIXINCLUDES, -+AC_DEFUN([AC_LBL_FIXINCLUDES], - [if test "$GCC" = yes ; then - AC_MSG_CHECKING(for ANSI ioctl definitions) - AC_CACHE_VAL(ac_cv_lbl_gcc_fixincludes, -@@ -453,7 +453,7 @@ - dnl $2 (yacc appended) - dnl $3 (optional flex and bison -P prefix) - dnl --AC_DEFUN(AC_LBL_LEX_AND_YACC, -+AC_DEFUN([AC_LBL_LEX_AND_YACC], - [AC_ARG_WITH(flex, [ --without-flex don't use flex]) - AC_ARG_WITH(bison, [ --without-bison don't use bison]) - if test "$with_flex" = no ; then -@@ -506,7 +506,7 @@ - dnl - dnl DECLWAITSTATUS (defined) - dnl --AC_DEFUN(AC_LBL_UNION_WAIT, -+AC_DEFUN([AC_LBL_UNION_WAIT], - [AC_MSG_CHECKING(if union wait is used) - AC_CACHE_VAL(ac_cv_lbl_union_wait, - AC_TRY_COMPILE([ -@@ -535,7 +535,7 @@ - dnl - dnl HAVE_SOCKADDR_SA_LEN (defined) - dnl --AC_DEFUN(AC_LBL_SOCKADDR_SA_LEN, -+AC_DEFUN([AC_LBL_SOCKADDR_SA_LEN], - [AC_MSG_CHECKING(if sockaddr struct has the sa_len member) - AC_CACHE_VAL(ac_cv_lbl_sockaddr_has_sa_len, - AC_TRY_COMPILE([ -@@ -560,7 +560,7 @@ - dnl - dnl HAVE_SOCKADDR_STORAGE (defined) - dnl --AC_DEFUN(AC_LBL_SOCKADDR_STORAGE, -+AC_DEFUN([AC_LBL_SOCKADDR_STORAGE], - [AC_MSG_CHECKING(if sockaddr_storage struct exists) - AC_CACHE_VAL(ac_cv_lbl_has_sockaddr_storage, - AC_TRY_COMPILE([ -@@ -593,7 +593,7 @@ - dnl won't be using code that would use that member, or we wouldn't - dnl compile in any case). - dnl --AC_DEFUN(AC_LBL_HP_PPA_INFO_T_DL_MODULE_ID_1, -+AC_DEFUN([AC_LBL_HP_PPA_INFO_T_DL_MODULE_ID_1], - [AC_MSG_CHECKING(if dl_hp_ppa_info_t struct has dl_module_id_1 member) - AC_CACHE_VAL(ac_cv_lbl_dl_hp_ppa_info_t_has_dl_module_id_1, - AC_TRY_COMPILE([ -@@ -619,7 +619,7 @@ - dnl - dnl ac_cv_lbl_have_run_path (yes or no) - dnl --AC_DEFUN(AC_LBL_HAVE_RUN_PATH, -+AC_DEFUN([AC_LBL_HAVE_RUN_PATH], - [AC_MSG_CHECKING(for ${CC-cc} -R) - AC_CACHE_VAL(ac_cv_lbl_have_run_path, - [echo 'main(){}' > conftest.c -@@ -644,7 +644,7 @@ - dnl - dnl LBL_ALIGN (DEFINED) - dnl --AC_DEFUN(AC_LBL_UNALIGNED_ACCESS, -+AC_DEFUN([AC_LBL_UNALIGNED_ACCESS], - [AC_MSG_CHECKING(if unaligned accesses fail) - AC_CACHE_VAL(ac_cv_lbl_unaligned_fail, - [case "$host_cpu" in -@@ -749,7 +749,7 @@ - dnl HAVE_OS_PROTO_H (defined) - dnl os-proto.h (symlinked) - dnl --AC_DEFUN(AC_LBL_DEVEL, -+AC_DEFUN([AC_LBL_DEVEL], - [rm -f os-proto.h - if test "${LBL_CFLAGS+set}" = set; then - $1="$$1 ${LBL_CFLAGS}" -@@ -886,7 +886,7 @@ - dnl statically and happen to have a libresolv.a lying around (and no - dnl libnsl.a). - dnl --AC_DEFUN(AC_LBL_LIBRARY_NET, [ -+AC_DEFUN([AC_LBL_LIBRARY_NET], [ - # Most operating systems have gethostbyname() in the default searched - # libraries (i.e. libc): - # Some OSes (eg. Solaris) place it in libnsl -@@ -909,7 +909,7 @@ - dnl Test for __attribute__ - dnl - --AC_DEFUN(AC_C___ATTRIBUTE__, [ -+AC_DEFUN([AC_C___ATTRIBUTE__], [ - AC_MSG_CHECKING(for __attribute__) - AC_CACHE_VAL(ac_cv___attribute__, [ - AC_COMPILE_IFELSE( -@@ -947,7 +947,7 @@ - dnl - dnl -Scott Barron - dnl --AC_DEFUN(AC_LBL_TPACKET_STATS, -+AC_DEFUN([AC_LBL_TPACKET_STATS], - [AC_MSG_CHECKING(if if_packet.h has tpacket_stats defined) - AC_CACHE_VAL(ac_cv_lbl_tpacket_stats, - AC_TRY_COMPILE([ -@@ -976,7 +976,7 @@ - dnl doesn't have that member (which is OK, as either we won't be using - dnl code that would use that member, or we wouldn't compile in any case). - dnl --AC_DEFUN(AC_LBL_LINUX_TPACKET_AUXDATA_TP_VLAN_TCI, -+AC_DEFUN([AC_LBL_LINUX_TPACKET_AUXDATA_TP_VLAN_TCI], - [AC_MSG_CHECKING(if tpacket_auxdata struct has tp_vlan_tci member) - AC_CACHE_VAL(ac_cv_lbl_dl_hp_ppa_info_t_has_dl_module_id_1, - AC_TRY_COMPILE([ -@@ -1003,7 +1003,7 @@ - dnl - dnl HAVE_DLPI_PASSIVE (defined) - dnl --AC_DEFUN(AC_LBL_DL_PASSIVE_REQ_T, -+AC_DEFUN([AC_LBL_DL_PASSIVE_REQ_T], - [AC_MSG_CHECKING(if dl_passive_req_t struct exists) - AC_CACHE_VAL(ac_cv_lbl_has_dl_passive_req_t, - AC_TRY_COMPILE([ diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/disable-remote.patch b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/disable-remote.patch new file mode 100644 index 000000000..7e1eea6b1 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/disable-remote.patch @@ -0,0 +1,36 @@ +Disable bits of remote capture support inherited from the WinPCAP merge +which cause applications to FTBFS if they define HAVE_REMOTE. + +Patch from: +https://anonscm.debian.org/cgit/users/rfrancoise/libpcap.git/commit/? +id=f35949969269dfdcc3549b12fade604755e1e326 + +Upstream-Status: Pending + +--- a/pcap/pcap.h ++++ b/pcap/pcap.h +@@ -506,6 +506,11 @@ + #define MODE_STAT 1 + #define MODE_MON 2 + ++#ifdef HAVE_REMOTE ++ /* Includes most of the public stuff that is needed for the remote capture */ ++ #include ++#endif /* HAVE_REMOTE */ ++ + #elif defined(MSDOS) + + /* +@@ -526,11 +531,6 @@ + + #endif /* _WIN32/MSDOS/UN*X */ + +-#ifdef HAVE_REMOTE +- /* Includes most of the public stuff that is needed for the remote capture */ +- #include +-#endif /* HAVE_REMOTE */ +- + #ifdef __cplusplus + } + #endif + diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/fix-grammar-deps.patch b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/fix-grammar-deps.patch new file mode 100644 index 000000000..f40e655c4 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/fix-grammar-deps.patch @@ -0,0 +1,29 @@ +Fix a missing dependency that can result in: + +../libpcap-1.8.1/grammar.y:78:10: fatal error: scanner.h: No such file or directory + +Upstream-Status: Backport +Signed-off-by: Ross Burton + +From 0dd90a6bdbce4dca14106859eee63ef643a106e2 Mon Sep 17 00:00:00 2001 +From: Alfredo Alvarez Fernandez +Date: Tue, 21 Feb 2017 11:41:43 +0100 +Subject: [PATCH] Makefile.in: Fix missing dependency + +--- + Makefile.in | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/Makefile.in b/Makefile.in +index 7044f043..f5d443ae 100644 +--- a/Makefile.in ++++ b/Makefile.in +@@ -465,7 +465,7 @@ grammar.h: grammar.c + $(MAKE) $(MAKEFLAGS) grammar.c; \ + fi + +-grammar.o: grammar.c ++grammar.o: grammar.c scanner.h + $(CC) $(FULL_CFLAGS) -c grammar.c + + gencode.o: $(srcdir)/gencode.c grammar.h scanner.h diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/libpcap-pkgconfig-support.patch b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/libpcap-pkgconfig-support.patch index b8615135b..afaa3bea9 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/libpcap-pkgconfig-support.patch +++ b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap/libpcap-pkgconfig-support.patch @@ -1,25 +1,27 @@ -From 8887132e85892a72a84ca3878e60f254ad2ce939 Mon Sep 17 00:00:00 2001 -From: Joe MacDonald -Date: Tue, 24 Feb 2015 15:56:06 -0500 +From 2796129af52901dd68595e5e88a639308541def9 Mon Sep 17 00:00:00 2001 +From: Fabio Berton +Date: Thu, 3 Nov 2016 17:56:29 -0200 Subject: [PATCH] libpcap: pkgconfig support +Organization: O.S. Systems Software LTDA. Adding basic structure to support pkg-config. Upstream-Status: Inappropriate [embedded specific] Signed-off-by: Joe MacDonald +Signed-off-by: Fabio Berton --- Makefile.in | 5 +++++ - configure.in | 1 + + configure.ac | 1 + libpcap.pc.in | 10 ++++++++++ 3 files changed, 16 insertions(+) create mode 100644 libpcap.pc.in diff --git a/Makefile.in b/Makefile.in -index 1c2d745..1f25faf 100644 +index e71d973..d7004ed 100644 --- a/Makefile.in +++ b/Makefile.in -@@ -60,6 +60,10 @@ V_RPATH_OPT = @V_RPATH_OPT@ +@@ -61,6 +61,10 @@ V_RPATH_OPT = @V_RPATH_OPT@ DEPENDENCY_CFLAG = @DEPENDENCY_CFLAG@ PROG=libpcap @@ -30,19 +32,19 @@ index 1c2d745..1f25faf 100644 # Standard CFLAGS FULL_CFLAGS = $(CCOPT) $(INCLS) $(DEFS) $(CFLAGS) -@@ -275,6 +279,7 @@ EXTRA_DIST = \ +@@ -286,6 +290,7 @@ EXTRA_DIST = \ lbl/os-solaris2.h \ lbl/os-sunos4.h \ lbl/os-ultrix4.h \ + libpcap.pc \ + missing/getopt.c \ + missing/getopt.h \ missing/snprintf.c \ - mkdep \ - msdos/bin2c.c \ -diff --git a/configure.in b/configure.in -index 8f5c86b..fb51b35 100644 ---- a/configure.in -+++ b/configure.in -@@ -1700,6 +1700,7 @@ esac +diff --git a/configure.ac b/configure.ac +index da2f940..4fc67bf 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -1805,6 +1805,7 @@ fi AC_PROG_INSTALL AC_CONFIG_HEADER(config.h) @@ -67,5 +69,5 @@ index 0000000..4f78ad8 +Libs: -L${libdir} -lpcap +Cflags: -I${includedir} -- -1.9.1 +2.1.4 diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap_1.7.4.bb b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap_1.7.4.bb deleted file mode 100644 index 8d12b2521..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap_1.7.4.bb +++ /dev/null @@ -1,26 +0,0 @@ -require libpcap.inc - -SRC_URI += "file://aclocal.patch \ - file://libpcap-pkgconfig-support.patch \ - " -SRC_URI[md5sum] = "b2e13142bbaba857ab1c6894aedaf547" -SRC_URI[sha256sum] = "7ad3112187e88328b85e46dce7a9b949632af18ee74d97ffc3f2b41fe7f448b0" - -# -# make install doesn't cover the shared lib -# make install-shared is just broken (no symlinks) -# - -do_configure_prepend () { - #remove hardcoded references to /usr/include - sed 's|\([ "^'\''I]\+\)/usr/include/|\1${STAGING_INCDIR}/|g' -i ${S}/configure.in -} - -do_install_prepend () { - install -d ${D}${libdir} - install -d ${D}${bindir} - oe_runmake install-shared DESTDIR=${D} - oe_libinstall -a -so libpcap ${D}${libdir} - sed "s|@VERSION@|${PV}|" -i ${B}/libpcap.pc - install -D -m 0644 libpcap.pc ${D}${libdir}/pkgconfig/libpcap.pc -} diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap_1.8.1.bb b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap_1.8.1.bb new file mode 100644 index 000000000..13dfbd67a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/libpcap/libpcap_1.8.1.bb @@ -0,0 +1,31 @@ +require libpcap.inc + +SRC_URI += " \ + file://libpcap-pkgconfig-support.patch \ + file://0001-Fix-compiler_state_t.ai-usage-when-INET6-is-not-defi.patch \ + file://0002-Add-missing-compiler_state_t-parameter.patch \ + file://disable-remote.patch \ + file://fix-grammar-deps.patch \ +" + +SRC_URI[md5sum] = "3d48f9cd171ff12b0efd9134b52f1447" +SRC_URI[sha256sum] = "673dbc69fdc3f5a86fb5759ab19899039a8e5e6c631749e48dcd9c6f0c83541e" + +# +# make install doesn't cover the shared lib +# make install-shared is just broken (no symlinks) +# + +do_configure_prepend () { + #remove hardcoded references to /usr/include + sed 's|\([ "^'\''I]\+\)/usr/include/|\1${STAGING_INCDIR}/|g' -i ${S}/configure.ac +} + +do_install_prepend () { + install -d ${D}${libdir} + install -d ${D}${bindir} + oe_runmake install-shared DESTDIR=${D} + oe_libinstall -a -so libpcap ${D}${libdir} + sed "s|@VERSION@|${PV}|" -i ${B}/libpcap.pc + install -D -m 0644 libpcap.pc ${D}${libdir}/pkgconfig/libpcap.pc +} diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/neard/neard/0001-Add-header-dependency-to-nciattach.o.patch b/import-layers/yocto-poky/meta/recipes-connectivity/neard/neard/0001-Add-header-dependency-to-nciattach.o.patch new file mode 100644 index 000000000..d8e8a5e5d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/neard/neard/0001-Add-header-dependency-to-nciattach.o.patch @@ -0,0 +1,35 @@ +From affaa2021a54c30353e4e1fee09c13a4de2196be Mon Sep 17 00:00:00 2001 +From: Jussi Kukkonen +Date: Fri, 17 Mar 2017 14:24:29 +0200 +Subject: [PATCH] Add header dependency to nciattach.o + +This can happen when compiling nciattach.o: + +| In file included from ../neard-0.16/tools/nciattach.c:47:0: +| ../neard-0.16/src/near.h:30:27: fatal error: near/nfc_copy.h: No such +file or directory +| #include + +Add the missing dependency to local headers. + +Signed-off-by: Jussi Kukkonen +Upstream-Status: Submitted [mailinglist] +--- + Makefile.am | 1 + + 1 file changed, 1 insertion(+) + +diff --git a/Makefile.am b/Makefile.am +index fa552ee..acef6ba 100644 +--- a/Makefile.am ++++ b/Makefile.am +@@ -253,6 +253,7 @@ se/builtin.h: src/genbuiltin $(builtin_se_sources) + + $(src_neard_OBJECTS) \ + $(tools_nfctool_nfctool_OBJECTS) \ ++$(tools_nciattach_OBJECTS) \ + $(plugin_objects) \ + $(se_seeld_OBJECTS) \ + $(unit_test_ndef_parse_OBJECTS) \ +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/neard/neard_0.16.bb b/import-layers/yocto-poky/meta/recipes-connectivity/neard/neard_0.16.bb index 5433dc3c3..cc6af4e1c 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/neard/neard_0.16.bb +++ b/import-layers/yocto-poky/meta/recipes-connectivity/neard/neard_0.16.bb @@ -9,6 +9,7 @@ SRC_URI = "${KERNELORG_MIRROR}/linux/network/nfc/${BP}.tar.xz \ file://neard.in \ file://Makefile.am-fix-parallel-issue.patch \ file://Makefile.am-do-not-ship-version.h.patch \ + file://0001-Add-header-dependency-to-nciattach.o.patch \ " SRC_URI[md5sum] = "5c691fb7872856dc0d909c298bc8cb41" SRC_URI[sha256sum] = "eae3b11c541a988ec11ca94b7deab01080cd5b58cfef3ced6ceac9b6e6e65b36" @@ -19,7 +20,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=12f884d2ae1ff87c09e5b7ccc2c4ca7e \ inherit autotools pkgconfig systemd update-rc.d bluetooth -PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'systemd', '', d)}" +PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)}" PACKAGECONFIG[systemd] = "--enable-systemd --with-systemdsystemunitdir=${systemd_unitdir}/system/ --with-systemduserunitdir=${systemd_unitdir}/user/,--disable-systemd" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/files/nfs-utils-debianize-start-statd.patch b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/files/nfs-utils-debianize-start-statd.patch index 85002290f..ede0dcefc 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/files/nfs-utils-debianize-start-statd.patch +++ b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/files/nfs-utils-debianize-start-statd.patch @@ -9,17 +9,18 @@ Signed-off-by: Li Wang Signed-off-by: Roy Li Signed-off-by: Wenzong Fan --- - utils/statd/start-statd | 9 ++++++++- - 1 file changed, 8 insertions(+), 1 deletion(-) + utils/statd/start-statd | 10 +++++++++- + 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/utils/statd/start-statd b/utils/statd/start-statd -index ec9383b..3969b8c 100755 +index 2fd6039..f591b34 100755 --- a/utils/statd/start-statd +++ b/utils/statd/start-statd -@@ -6,6 +6,13 @@ - # site. - PATH="/sbin:/usr/sbin:/bin:/usr/bin" - +@@ -17,6 +17,14 @@ then + # statd already running - must have been slow to respond. + exit 0 + fi ++ +# Read config +DEFAULTFILE=/etc/default/nfs-common +NEED_IDMAPD= @@ -28,14 +29,14 @@ index ec9383b..3969b8c 100755 +fi + # First try systemd if it's installed. - if systemctl --help >/dev/null 2>&1; then + if [ -d /run/systemd/system ]; then # Quit only if the call worked. -@@ -13,4 +20,4 @@ if systemctl --help >/dev/null 2>&1; then - fi +@@ -25,4 +33,4 @@ fi + cd / # Fall back to launching it ourselves. -exec rpc.statd --no-notify +exec rpc.statd --no-notify $STATDOPTS -- -1.9.1 +2.6.6 diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-nfs-utils-statd-fix-a-segfault-caused-by-improper-us.patch b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-nfs-utils-statd-fix-a-segfault-caused-by-improper-us.patch deleted file mode 100644 index de0b045c8..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/0001-nfs-utils-statd-fix-a-segfault-caused-by-improper-us.patch +++ /dev/null @@ -1,113 +0,0 @@ -Upstream-Status: Pending - -Subject: nfs-utils/statd: fix a segfault caused by improper usage of RPC interface - -There is a hack which uses the bottom-level RPC improperly as below -in the current statd implementation: -insert a socket in the svc_fdset without a corresponding transport handle -and passes the socket to the svc_getreqset subroutine, this usage causes -a segfault of statd on a huge amount of sm-notifications. - -Fix the issue by separating the non-RPC-server sock from RPC dispatcher. - -Signed-off-by: Shan Hai -Signed-off-by: Chen Qi ---- - utils/statd/rmtcall.c | 1 - - utils/statd/statd.c | 5 +++-- - utils/statd/statd.h | 2 +- - utils/statd/svc_run.c | 8 ++++++-- - 4 files changed, 10 insertions(+), 6 deletions(-) - -diff --git a/utils/statd/rmtcall.c b/utils/statd/rmtcall.c -index fd576d9..cde091b 100644 ---- a/utils/statd/rmtcall.c -+++ b/utils/statd/rmtcall.c -@@ -104,7 +104,6 @@ statd_get_socket(void) - if (sockfd < 0) - return -1; - -- FD_SET(sockfd, &SVC_FDSET); - return sockfd; - } - -diff --git a/utils/statd/statd.c b/utils/statd/statd.c -index 51a016e..e21a259 100644 ---- a/utils/statd/statd.c -+++ b/utils/statd/statd.c -@@ -247,6 +247,7 @@ int main (int argc, char **argv) - int port = 0, out_port = 0; - int nlm_udp = 0, nlm_tcp = 0; - struct rlimit rlim; -+ int notify_sockfd; - - int pipefds[2] = { -1, -1}; - char status; -@@ -473,7 +474,7 @@ int main (int argc, char **argv) - } - - /* Make sure we have a privilege port for calling into the kernel */ -- if (statd_get_socket() < 0) -+ if ((notify_sockfd = statd_get_socket()) < 0) - exit(1); - - /* If sm-notify didn't take all the state files, load -@@ -528,7 +529,7 @@ int main (int argc, char **argv) - * Handle incoming requests: SM_NOTIFY socket requests, as - * well as callbacks from lockd. - */ -- my_svc_run(); /* I rolled my own, Olaf made it better... */ -+ my_svc_run(notify_sockfd); /* I rolled my own, Olaf made it better... */ - - /* Only get here when simulating a crash so we should probably - * start sm-notify running again. As we have already dropped -diff --git a/utils/statd/statd.h b/utils/statd/statd.h -index a1d8035..231ac7e 100644 ---- a/utils/statd/statd.h -+++ b/utils/statd/statd.h -@@ -28,7 +28,7 @@ extern _Bool statd_present_address(const struct sockaddr *sap, char *buf, - __attribute__((__malloc__)) - extern char * statd_canonical_name(const char *hostname); - --extern void my_svc_run(void); -+extern void my_svc_run(int); - extern void notify_hosts(void); - extern void shuffle_dirs(void); - extern int statd_get_socket(void); -diff --git a/utils/statd/svc_run.c b/utils/statd/svc_run.c -index d98ecee..28c1ad6 100644 ---- a/utils/statd/svc_run.c -+++ b/utils/statd/svc_run.c -@@ -78,7 +78,7 @@ my_svc_exit(void) - * The heart of the server. A crib from libc for the most part... - */ - void --my_svc_run(void) -+my_svc_run(int sockfd) - { - FD_SET_TYPE readfds; - int selret; -@@ -96,6 +96,8 @@ my_svc_run(void) - } - - readfds = SVC_FDSET; -+ /* Set notify sockfd for waiting for reply */ -+ FD_SET(sockfd, &readfds); - if (notify) { - struct timeval tv; - -@@ -125,8 +127,10 @@ my_svc_run(void) - - default: - selret -= process_reply(&readfds); -- if (selret) -+ if (selret) { -+ FD_CLR(sockfd, &readfds); - svc_getreqset(&readfds); -+ } - } - } - } --- -1.9.1 - diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/fix-protocol-minor-version-fall-back.patch b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/fix-protocol-minor-version-fall-back.patch deleted file mode 100644 index 683246c4a..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/fix-protocol-minor-version-fall-back.patch +++ /dev/null @@ -1,55 +0,0 @@ -From 78bb645a42c216b37b8d930c7c849a3fa89babf8 Mon Sep 17 00:00:00 2001 -From: Takashi Iwai -Date: Sat, 16 Jan 2016 12:02:30 -0500 -Subject: [PATCH] Fix protocol minor version fall-back - -mount.nfs currently expects mount(2) to fail with EPROTONOSUPPORT if -the kernel doesn't understand the requested NFS version. - -Unfortunately if the requested minor is not known to the kernel -it returns -EINVAL. -In kernels since 3.11 this can happen in nfs4_alloc_client(), if -compiled without NFS_V4_2. - -More generally it can happen in in nfs_validate_text_mount_data() -when nfs_parse_mount_options() returns 0 because -nfs_parse_version_string() -didn't recognise the version. - -EPROTONOSUPPORT is only returned if NFSv4 support is completely compiled -out. - -So nfs_autonegotiate needs to check for EINVAL as well as -EPROTONOSUPPORT. - -URL: https://bugzilla.opensuse.org/show_bug.cgi?id=959211 -Reported-by: Takashi Iwai -Signed-off-by: NeilBrown -Signed-off-by: Steve Dickson - - -Upstream-Status: Backport -http://git.linux-nfs.org/?p=steved/nfs-utils.git;a=patch;h=78bb645a42c216b37b8d930c7c849a3fa89babf8 - -Signed-off-by: Yi Zhao ---- - utils/mount/stropts.c | 3 +++ - 1 file changed, 3 insertions(+) - -diff --git a/utils/mount/stropts.c b/utils/mount/stropts.c -index c8f5a6d..86829a9 100644 ---- a/utils/mount/stropts.c -+++ b/utils/mount/stropts.c -@@ -841,6 +841,9 @@ check_result: - case EPROTONOSUPPORT: - /* A clear indication that the server or our - * client does not support NFS version 4 and minor */ -+ case EINVAL: -+ /* A less clear indication that our client -+ * does not support NFSv4 minor version. */ - if (mi->version.v_mode == V_GENERAL && - mi->version.minor == 0) - return result; --- -2.7.4 - diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-mountd.service b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-mountd.service index 613ddc003..27ea58d36 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-mountd.service +++ b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-mountd.service @@ -1,7 +1,11 @@ [Unit] Description=NFS Mount Daemon -After=rpcbind.service nfs-server.service -Requires=rpcbind.service nfs-server.service +DefaultDependencies=no +Requires=proc-fs-nfsd.mount +After=proc-fs-nfsd.mount +After=network.target local-fs.target +BindsTo=nfs-server.service +ConditionPathExists=@SYSCONFDIR@/exports [Service] EnvironmentFile=-@SYSCONFDIR@/nfs-utils.conf diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-server.service b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-server.service index 147d7a7b5..6481377d8 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-server.service +++ b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-server.service @@ -1,7 +1,12 @@ [Unit] -Description=NFS Server -Requires=rpcbind.service nfs-mountd.service -After=rpcbind.service +Description=NFS server and services +DefaultDependencies=no +Requires=network.target proc-fs-nfsd.mount +Requires=nfs-mountd.service +Wants=rpcbind.service +After=local-fs.target +After=network.target proc-fs-nfsd.mount rpcbind.service nfs-mountd.service +ConditionPathExists=@SYSCONFDIR@/exports [Service] Type=oneshot @@ -9,6 +14,7 @@ EnvironmentFile=-@SYSCONFDIR@/nfs-utils.conf ExecStartPre=@SBINDIR@/exportfs -r ExecStart=@SBINDIR@/rpc.nfsd $NFSD_OPTS $NFSD_COUNT ExecStop=@SBINDIR@/rpc.nfsd 0 +ExecStopPost=@SBINDIR@/exportfs -au ExecStopPost=@SBINDIR@/exportfs -f ExecReload=@SBINDIR@/exportfs -r StandardError=syslog diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-statd.service b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-statd.service index 746dacf05..6e196b8c8 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-statd.service +++ b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfs-statd.service @@ -1,8 +1,9 @@ [Unit] -Description=NFS file locking service -After=rpcbind.service -Requires=rpcbind.service -Before=remote-fs-pre.target +Description=NFS status monitor for NFSv2/3 locking. +DefaultDependencies=no +Conflicts=umount.target +Requires=nss-lookup.target rpcbind.service +After=network.target nss-lookup.target rpcbind.service [Service] EnvironmentFile=-@SYSCONFDIR@/nfs-utils.conf diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfsserver b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfsserver index 7ed93a59d..d5e9c38a9 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfsserver +++ b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils/nfsserver @@ -40,7 +40,7 @@ test "$NFS_SERVERS" != "" && test "$NFS_SERVERS" -gt 0 && test "$NFS_SERVERS" -l #mountd start_mountd(){ echo -n 'starting mountd: ' - start-stop-daemon --start --exec "$NFS_MOUNTD" -- "-f /etc/exports $@" + start-stop-daemon --start --exec "$NFS_MOUNTD" -- "$@" echo done } stop_mountd(){ diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils_1.3.3.bb b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils_1.3.3.bb deleted file mode 100644 index a2bebe0ba..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils_1.3.3.bb +++ /dev/null @@ -1,151 +0,0 @@ -SUMMARY = "userspace utilities for kernel nfs" -DESCRIPTION = "The nfs-utils package provides a daemon for the kernel \ -NFS server and related tools." -HOMEPAGE = "http://nfs.sourceforge.net/" -SECTION = "console/network" - -LICENSE = "MIT & GPLv2+ & BSD" -LIC_FILES_CHKSUM = "file://COPYING;md5=95f3a93a5c3c7888de623b46ea085a84" - -# util-linux for libblkid -DEPENDS = "libcap libnfsidmap libevent util-linux sqlite3 libtirpc" -RDEPENDS_${PN}-client = "rpcbind bash" -RDEPENDS_${PN} = "${PN}-client bash" -RRECOMMENDS_${PN} = "kernel-module-nfsd" - -inherit useradd - -USERADD_PACKAGES = "${PN}-client" -USERADD_PARAM_${PN}-client = "--system --home-dir /var/lib/nfs \ - --shell /bin/false --user-group rpcuser" - -SRC_URI = "${KERNELORG_MIRROR}/linux/utils/nfs-utils/${PV}/nfs-utils-${PV}.tar.xz \ - file://0001-configure-Allow-to-explicitly-disable-nfsidmap.patch \ - file://nfs-utils-1.2.3-sm-notify-res_init.patch \ - file://nfsserver \ - file://nfscommon \ - file://nfs-utils.conf \ - file://nfs-server.service \ - file://nfs-mountd.service \ - file://nfs-statd.service \ - file://proc-fs-nfsd.mount \ - file://nfs-utils-Do-not-pass-CFLAGS-to-gcc-while-building.patch \ - file://nfs-utils-debianize-start-statd.patch \ - file://0001-nfs-utils-statd-fix-a-segfault-caused-by-improper-us.patch \ - file://bugfix-adjust-statd-service-name.patch \ - file://fix-protocol-minor-version-fall-back.patch \ -" - -SRC_URI[md5sum] = "cd6b568c2e9301cc3bfac09d87fbbc0b" -SRC_URI[sha256sum] = "700d689c5622c87953c34102e5befafc4d3c811e676852238f0dd79c9c0c084d" - -# Only kernel-module-nfsd is required here (but can be built-in) - the nfsd module will -# pull in the remainder of the dependencies. - -INITSCRIPT_PACKAGES = "${PN} ${PN}-client" -INITSCRIPT_NAME = "nfsserver" -INITSCRIPT_PARAMS = "defaults" -INITSCRIPT_NAME_${PN}-client = "nfscommon" -INITSCRIPT_PARAMS_${PN}-client = "defaults 19 21" - -inherit autotools-brokensep update-rc.d systemd pkgconfig - -SYSTEMD_SERVICE_${PN} = "nfs-server.service nfs-mountd.service" -SYSTEMD_SERVICE_${PN}-client = "nfs-statd.service" -SYSTEMD_AUTO_ENABLE = "disable" - -# --enable-uuid is need for cross-compiling -EXTRA_OECONF = "--with-statduser=rpcuser \ - --enable-mountconfig \ - --enable-libmount-mount \ - --disable-nfsv41 \ - --enable-uuid \ - --disable-gss \ - --disable-nfsdcltrack \ - --with-statdpath=/var/lib/nfs/statd \ - " - -PACKAGECONFIG ??= "tcp-wrappers \ - ${@bb.utils.contains('DISTRO_FEATURES', 'ipv6', 'ipv6', '', d)} \ -" -PACKAGECONFIG_remove_libc-musl = "tcp-wrappers" -PACKAGECONFIG[tcp-wrappers] = "--with-tcp-wrappers,--without-tcp-wrappers,tcp-wrappers" -PACKAGECONFIG[nfsidmap] = "--enable-nfsidmap,--disable-nfsidmap,keyutils" -PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6," - -INHIBIT_AUTO_STAGE = "1" - -PACKAGES =+ "${PN}-client ${PN}-stats" - -CONFFILES_${PN}-client += "${localstatedir}/lib/nfs/etab \ - ${localstatedir}/lib/nfs/rmtab \ - ${localstatedir}/lib/nfs/xtab \ - ${localstatedir}/lib/nfs/statd/state \ - ${sysconfdir}/nfsmount.conf" - -FILES_${PN}-client = "${base_sbindir}/*mount.nfs* ${sbindir}/*statd \ - ${sbindir}/rpc.idmapd ${sbindir}/sm-notify \ - ${sbindir}/showmount ${sbindir}/nfsstat \ - ${localstatedir}/lib/nfs \ - ${sysconfdir}/nfs-utils.conf \ - ${sysconfdir}/nfsmount.conf \ - ${sysconfdir}/init.d/nfscommon \ - ${systemd_unitdir}/system/nfs-statd.service" -FILES_${PN}-stats = "${sbindir}/mountstats ${sbindir}/nfsiostat" -RDEPENDS_${PN}-stats = "python3-core" - -FILES_${PN} += "${systemd_unitdir}" - -do_configure_prepend() { - sed -i -e 's,sbindir = /sbin,sbindir = ${base_sbindir},g' \ - ${S}/utils/mount/Makefile.am - - sed -i -e 's,sbindir = /sbin,sbindir = ${base_sbindir},g' \ - ${S}/utils/osd_login/Makefile.am -} - -# Make clean needed because the package comes with -# precompiled 64-bit objects that break the build -do_compile_prepend() { - make clean -} - -do_install_append () { - install -d ${D}${sysconfdir}/init.d - install -m 0755 ${WORKDIR}/nfsserver ${D}${sysconfdir}/init.d/nfsserver - install -m 0755 ${WORKDIR}/nfscommon ${D}${sysconfdir}/init.d/nfscommon - - install -m 0755 ${WORKDIR}/nfs-utils.conf ${D}${sysconfdir} - install -m 0755 ${S}/utils/mount/nfsmount.conf ${D}${sysconfdir} - - install -d ${D}${systemd_unitdir}/system - install -m 0644 ${WORKDIR}/nfs-server.service ${D}${systemd_unitdir}/system/ - install -m 0644 ${WORKDIR}/nfs-mountd.service ${D}${systemd_unitdir}/system/ - install -m 0644 ${WORKDIR}/nfs-statd.service ${D}${systemd_unitdir}/system/ - sed -i -e 's,@SBINDIR@,${sbindir},g' \ - -e 's,@SYSCONFDIR@,${sysconfdir},g' \ - ${D}${systemd_unitdir}/system/*.service - if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then - install -d ${D}${sysconfdir}/modules-load.d - echo "nfsd" > ${D}${sysconfdir}/modules-load.d/nfsd.conf - install -m 0644 ${WORKDIR}/proc-fs-nfsd.mount ${D}${systemd_unitdir}/system/ - install -d ${D}${systemd_unitdir}/system/sysinit.target.wants/ - ln -sf ../proc-fs-nfsd.mount ${D}${systemd_unitdir}/system/sysinit.target.wants/proc-fs-nfsd.mount - fi - - # kernel code as of 3.8 hard-codes this path as a default - install -d ${D}/var/lib/nfs/v4recovery - - # chown the directories and files - chown -R rpcuser:rpcuser ${D}${localstatedir}/lib/nfs/statd - chmod 0644 ${D}${localstatedir}/lib/nfs/statd/state - - # the following are built by CC_FOR_BUILD - rm -f ${D}${sbindir}/rpcdebug - rm -f ${D}${sbindir}/rpcgen - rm -f ${D}${sbindir}/locktest - - # Make python tools use python 3 - sed -i -e '1s,#!.*python.*,#!${bindir}/python3,' ${D}${sbindir}/mountstats ${D}${sbindir}/nfsiostat - -} diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils_1.3.4.bb b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils_1.3.4.bb new file mode 100644 index 000000000..4ca9ab2a3 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/nfs-utils/nfs-utils_1.3.4.bb @@ -0,0 +1,148 @@ +SUMMARY = "userspace utilities for kernel nfs" +DESCRIPTION = "The nfs-utils package provides a daemon for the kernel \ +NFS server and related tools." +HOMEPAGE = "http://nfs.sourceforge.net/" +SECTION = "console/network" + +LICENSE = "MIT & GPLv2+ & BSD" +LIC_FILES_CHKSUM = "file://COPYING;md5=95f3a93a5c3c7888de623b46ea085a84" + +# util-linux for libblkid +DEPENDS = "libcap libnfsidmap libevent util-linux sqlite3 libtirpc" +RDEPENDS_${PN} = "${PN}-client bash" +RRECOMMENDS_${PN} = "kernel-module-nfsd" + +inherit useradd + +USERADD_PACKAGES = "${PN}-client" +USERADD_PARAM_${PN}-client = "--system --home-dir /var/lib/nfs \ + --shell /bin/false --user-group rpcuser" + +SRC_URI = "${KERNELORG_MIRROR}/linux/utils/nfs-utils/${PV}/nfs-utils-${PV}.tar.xz \ + file://0001-configure-Allow-to-explicitly-disable-nfsidmap.patch \ + file://nfs-utils-1.2.3-sm-notify-res_init.patch \ + file://nfsserver \ + file://nfscommon \ + file://nfs-utils.conf \ + file://nfs-server.service \ + file://nfs-mountd.service \ + file://nfs-statd.service \ + file://proc-fs-nfsd.mount \ + file://nfs-utils-Do-not-pass-CFLAGS-to-gcc-while-building.patch \ + file://nfs-utils-debianize-start-statd.patch \ + file://bugfix-adjust-statd-service-name.patch \ +" + +SRC_URI[md5sum] = "54e4119043ec8507a2a0e054cf2889a4" +SRC_URI[sha256sum] = "b42a5bc0a8d80d04650030ceb9a11f08f4acfbcb1ee297f657fb94e339c45975" + +# Only kernel-module-nfsd is required here (but can be built-in) - the nfsd module will +# pull in the remainder of the dependencies. + +INITSCRIPT_PACKAGES = "${PN} ${PN}-client" +INITSCRIPT_NAME = "nfsserver" +INITSCRIPT_PARAMS = "defaults" +INITSCRIPT_NAME_${PN}-client = "nfscommon" +INITSCRIPT_PARAMS_${PN}-client = "defaults 19 21" + +inherit autotools-brokensep update-rc.d systemd pkgconfig + +SYSTEMD_PACKAGES = "${PN} ${PN}-client" +SYSTEMD_SERVICE_${PN} = "nfs-server.service nfs-mountd.service" +SYSTEMD_SERVICE_${PN}-client = "nfs-statd.service" + +# --enable-uuid is need for cross-compiling +EXTRA_OECONF = "--with-statduser=rpcuser \ + --enable-mountconfig \ + --enable-libmount-mount \ + --disable-nfsv41 \ + --enable-uuid \ + --disable-gss \ + --disable-nfsdcltrack \ + --with-statdpath=/var/lib/nfs/statd \ + " + +PACKAGECONFIG ??= "tcp-wrappers \ + ${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)} \ +" +PACKAGECONFIG_remove_libc-musl = "tcp-wrappers" +PACKAGECONFIG[tcp-wrappers] = "--with-tcp-wrappers,--without-tcp-wrappers,tcp-wrappers" +PACKAGECONFIG[nfsidmap] = "--enable-nfsidmap,--disable-nfsidmap,keyutils" +PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6," + +PACKAGES =+ "${PN}-client ${PN}-mount ${PN}-stats" + +CONFFILES_${PN}-client += "${localstatedir}/lib/nfs/etab \ + ${localstatedir}/lib/nfs/rmtab \ + ${localstatedir}/lib/nfs/xtab \ + ${localstatedir}/lib/nfs/statd/state \ + ${sysconfdir}/nfsmount.conf" + +FILES_${PN}-client = "${sbindir}/*statd \ + ${sbindir}/rpc.idmapd ${sbindir}/sm-notify \ + ${sbindir}/showmount ${sbindir}/nfsstat \ + ${localstatedir}/lib/nfs \ + ${sysconfdir}/nfs-utils.conf \ + ${sysconfdir}/nfsmount.conf \ + ${sysconfdir}/init.d/nfscommon \ + ${systemd_unitdir}/system/nfs-statd.service" +RDEPENDS_${PN}-client = "${PN}-mount rpcbind" + +FILES_${PN}-mount = "${base_sbindir}/*mount.nfs*" + +FILES_${PN}-stats = "${sbindir}/mountstats ${sbindir}/nfsiostat" +RDEPENDS_${PN}-stats = "python3-core" + +FILES_${PN} += "${systemd_unitdir}" + +do_configure_prepend() { + sed -i -e 's,sbindir = /sbin,sbindir = ${base_sbindir},g' \ + ${S}/utils/mount/Makefile.am + + sed -i -e 's,sbindir = /sbin,sbindir = ${base_sbindir},g' \ + ${S}/utils/osd_login/Makefile.am +} + +# Make clean needed because the package comes with +# precompiled 64-bit objects that break the build +do_compile_prepend() { + make clean +} + +do_install_append () { + install -d ${D}${sysconfdir}/init.d + install -m 0755 ${WORKDIR}/nfsserver ${D}${sysconfdir}/init.d/nfsserver + install -m 0755 ${WORKDIR}/nfscommon ${D}${sysconfdir}/init.d/nfscommon + + install -m 0755 ${WORKDIR}/nfs-utils.conf ${D}${sysconfdir} + install -m 0755 ${S}/utils/mount/nfsmount.conf ${D}${sysconfdir} + + install -d ${D}${systemd_unitdir}/system + install -m 0644 ${WORKDIR}/nfs-server.service ${D}${systemd_unitdir}/system/ + install -m 0644 ${WORKDIR}/nfs-mountd.service ${D}${systemd_unitdir}/system/ + install -m 0644 ${WORKDIR}/nfs-statd.service ${D}${systemd_unitdir}/system/ + sed -i -e 's,@SBINDIR@,${sbindir},g' \ + -e 's,@SYSCONFDIR@,${sysconfdir},g' \ + ${D}${systemd_unitdir}/system/*.service + if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then + install -m 0644 ${WORKDIR}/proc-fs-nfsd.mount ${D}${systemd_unitdir}/system/ + install -d ${D}${systemd_unitdir}/system/sysinit.target.wants/ + ln -sf ../proc-fs-nfsd.mount ${D}${systemd_unitdir}/system/sysinit.target.wants/proc-fs-nfsd.mount + fi + + # kernel code as of 3.8 hard-codes this path as a default + install -d ${D}/var/lib/nfs/v4recovery + + # chown the directories and files + chown -R rpcuser:rpcuser ${D}${localstatedir}/lib/nfs/statd + chmod 0644 ${D}${localstatedir}/lib/nfs/statd/state + + # the following are built by CC_FOR_BUILD + rm -f ${D}${sbindir}/rpcdebug + rm -f ${D}${sbindir}/rpcgen + rm -f ${D}${sbindir}/locktest + + # Make python tools use python 3 + sed -i -e '1s,#!.*python.*,#!${bindir}/python3,' ${D}${sbindir}/mountstats ${D}${sbindir}/nfsiostat + +} diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/ofono/ofono.inc b/import-layers/yocto-poky/meta/recipes-connectivity/ofono/ofono.inc index 9c47c6fde..676a0c004 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/ofono/ofono.inc +++ b/import-layers/yocto-poky/meta/recipes-connectivity/ofono/ofono.inc @@ -13,7 +13,7 @@ INITSCRIPT_NAME = "ofono" INITSCRIPT_PARAMS = "defaults 22" PACKAGECONFIG ??= "\ - ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'systemd', '', d)} \ + ${@bb.utils.filter('DISTRO_FEATURES', 'systemd', d)} \ ${@bb.utils.contains('DISTRO_FEATURES', 'bluetooth', 'bluez', '', d)} \ " PACKAGECONFIG[systemd] = "--with-systemdunitdir=${systemd_unitdir}/system/,--with-systemdunitdir=" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/ofono/ofono_1.18.bb b/import-layers/yocto-poky/meta/recipes-connectivity/ofono/ofono_1.18.bb deleted file mode 100644 index b0707311a..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/ofono/ofono_1.18.bb +++ /dev/null @@ -1,10 +0,0 @@ -require ofono.inc - -SRC_URI = "\ - ${KERNELORG_MIRROR}/linux/network/${BPN}/${BP}.tar.xz \ - file://ofono \ -" -SRC_URI[md5sum] = "0a6b37c8ace891cb2a7ca5d121043a0a" -SRC_URI[sha256sum] = "53cdbf342913f46bce4827241c60e24255a3d43a94945edf77482ae5b312d51f" - -CFLAGS_append_libc-uclibc = " -D_GNU_SOURCE" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/ofono/ofono_1.19.bb b/import-layers/yocto-poky/meta/recipes-connectivity/ofono/ofono_1.19.bb new file mode 100644 index 000000000..adebd71c3 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/ofono/ofono_1.19.bb @@ -0,0 +1,10 @@ +require ofono.inc + +SRC_URI = "\ + ${KERNELORG_MIRROR}/linux/network/${BPN}/${BP}.tar.xz \ + file://ofono \ +" +SRC_URI[md5sum] = "a5f8803ace110511b6ff5a2b39782e8b" +SRC_URI[sha256sum] = "a0e09bdd8b53b8d2e4b54f1863ecd9aebe4786477a6cbf8f655496e8edb31c81" + +CFLAGS_append_libc-uclibc = " -D_GNU_SOURCE" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/fix-CVE-2016-8858.patch b/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/fix-CVE-2016-8858.patch deleted file mode 100644 index b26ee81b9..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/fix-CVE-2016-8858.patch +++ /dev/null @@ -1,39 +0,0 @@ -Fix CVE-2016-8858 of openssh - -Backport patch from upstream and drop the change of comment which can NOT be applied. - -Upstream-Status: Backport [ https://anongit.mindrot.org/openssh.git/commit/?id=ec165c3 ] -CVE: CVE-2016-8858 - -Signed-off-by: Kai Kang ---- -From ec165c392ca54317dbe3064a8c200de6531e89ad Mon Sep 17 00:00:00 2001 -From: "markus@openbsd.org" -Date: Mon, 10 Oct 2016 19:28:48 +0000 -Subject: [PATCH] upstream commit - -Unregister the KEXINIT handler after message has been -received. Otherwise an unauthenticated peer can repeat the KEXINIT and cause -allocation of up to 128MB -- until the connection is closed. Reported by -shilei-c at 360.cn - -Upstream-ID: 43649ae12a27ef94290db16d1a98294588b75c05 ---- - kex.c | 3 ++- - 1 file changed, 2 insertions(+), 1 deletion(-) - -diff --git a/kex.c b/kex.c -index 3f97f8c..6a94bc5 100644 ---- a/kex.c -+++ b/kex.c -@@ -481,6 +481,7 @@ kex_input_kexinit(int type, u_int32_t seq, void *ctxt) - if (kex == NULL) - return SSH_ERR_INVALID_ARGUMENT; - -+ ssh_dispatch_set(ssh, SSH2_MSG_KEXINIT, NULL); - ptr = sshpkt_ptr(ssh, &dlen); - if ((r = sshbuf_put(kex->peer, ptr, dlen)) != 0) - return r; --- -2.10.1 - diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/init b/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/init index 1f63725cc..34ba0f846 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/init +++ b/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/init @@ -19,11 +19,6 @@ fi [ -z "$SYSCONFDIR" ] && SYSCONFDIR=/etc/ssh mkdir -p $SYSCONFDIR -HOST_KEY_RSA=$SYSCONFDIR/ssh_host_rsa_key -HOST_KEY_DSA=$SYSCONFDIR/ssh_host_dsa_key -HOST_KEY_ECDSA=$SYSCONFDIR/ssh_host_ecdsa_key -HOST_KEY_ED25519=$SYSCONFDIR/ssh_host_ed25519_key - check_for_no_start() { # forget it if we're trying to start, and /etc/ssh/sshd_not_to_be_run exists if [ -e $SYSCONFDIR/sshd_not_to_be_run ]; then @@ -44,33 +39,13 @@ check_config() { /usr/sbin/sshd -t $SSHD_OPTS || exit 1 } -check_keys() { - # create keys if necessary - if [ ! -f $HOST_KEY_RSA ]; then - echo " generating ssh RSA key..." - ssh-keygen -q -f $HOST_KEY_RSA -N '' -t rsa - fi - if [ ! -f $HOST_KEY_ECDSA ]; then - echo " generating ssh ECDSA key..." - ssh-keygen -q -f $HOST_KEY_ECDSA -N '' -t ecdsa - fi - if [ ! -f $HOST_KEY_DSA ]; then - echo " generating ssh DSA key..." - ssh-keygen -q -f $HOST_KEY_DSA -N '' -t dsa - fi - if [ ! -f $HOST_KEY_ED25519 ]; then - echo " generating ssh ED25519 key..." - ssh-keygen -q -f $HOST_KEY_ED25519 -N '' -t ed25519 - fi -} - export PATH="${PATH:+$PATH:}/usr/sbin:/sbin" case "$1" in start) check_for_no_start echo "Starting OpenBSD Secure Shell server: sshd" - check_keys + @LIBEXECDIR@/sshd_check_keys check_privsep_dir start-stop-daemon -S -p $PIDFILE -x /usr/sbin/sshd -- $SSHD_OPTS echo "done." @@ -83,7 +58,7 @@ case "$1" in reload|force-reload) check_for_no_start - check_keys + @LIBEXECDIR@/sshd_check_keys check_config echo -n "Reloading OpenBSD Secure Shell server's configuration" start-stop-daemon -K -p $PIDFILE -s 1 -x /usr/sbin/sshd @@ -91,7 +66,7 @@ case "$1" in ;; restart) - check_keys + @LIBEXECDIR@/sshd_check_keys check_config echo -n "Restarting OpenBSD Secure Shell server: sshd" start-stop-daemon -K -p $PIDFILE --oknodo -x /usr/sbin/sshd diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/openssh-7.1p1-conditional-compile-des-in-cipher.patch b/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/openssh-7.1p1-conditional-compile-des-in-cipher.patch index 2773c14e5..1098b972c 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/openssh-7.1p1-conditional-compile-des-in-cipher.patch +++ b/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/openssh-7.1p1-conditional-compile-des-in-cipher.patch @@ -1,18 +1,19 @@ -From d7eb26785ad4f25fb09fae46726ab8ca3fe16921 Mon Sep 17 00:00:00 2001 -From: Haiqing Bai -Date: Mon, 22 Aug 2016 14:11:16 +0300 -Subject: [PATCH] Remove des in cipher. +From 27740c918fe5d78441bcf69e7d2eefb23ddeca4c Mon Sep 17 00:00:00 2001 +From: Dengke Du +Date: Thu, 19 Jan 2017 03:00:08 -0500 +Subject: [PATCH 1/3] Remove des in cipher. Upstream-Status: Pending Signed-off-by: Haiqing Bai Signed-off-by: Jussi Kukkonen +Signed-off-by: Dengke Du --- cipher.c | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/cipher.c b/cipher.c -index 031bda9..6cd667a 100644 +index 2def333..59f6792 100644 --- a/cipher.c +++ b/cipher.c @@ -53,8 +53,10 @@ @@ -25,8 +26,8 @@ index 031bda9..6cd667a 100644 +#endif /* OPENSSL_NO_DES */ #endif - struct sshcipher { -@@ -79,15 +81,19 @@ struct sshcipher { + struct sshcipher_ctx { +@@ -88,15 +90,19 @@ struct sshcipher { static const struct sshcipher ciphers[] = { #ifdef WITH_SSH1 @@ -39,14 +40,14 @@ index 031bda9..6cd667a 100644 # endif /* OPENSSL_NO_BF */ #endif /* WITH_SSH1 */ #ifdef WITH_OPENSSL - { "none", SSH_CIPHER_NONE, 8, 0, 0, 0, 0, 0, EVP_enc_null }, +#ifndef OPENSSL_NO_DES + { "none", SSH_CIPHER_NONE, 8, 0, 0, 0, 0, 0, EVP_enc_null }, { "3des-cbc", SSH_CIPHER_SSH2, 8, 24, 0, 0, 0, 1, EVP_des_ede3_cbc }, +#endif /* OPENSSL_NO_DES */ # ifndef OPENSSL_NO_BF { "blowfish-cbc", SSH_CIPHER_SSH2, 8, 16, 0, 0, 0, 1, EVP_bf_cbc }, -@@ -171,8 +177,10 @@ cipher_keylen(const struct sshcipher *c) +@@ -180,8 +186,10 @@ cipher_keylen(const struct sshcipher *c) u_int cipher_seclen(const struct sshcipher *c) { @@ -57,7 +58,7 @@ index 031bda9..6cd667a 100644 return cipher_keylen(c); } -@@ -209,11 +217,13 @@ u_int +@@ -230,11 +238,13 @@ u_int cipher_mask_ssh1(int client) { u_int mask = 0; @@ -71,7 +72,7 @@ index 031bda9..6cd667a 100644 return mask; } -@@ -553,7 +563,9 @@ cipher_get_keyiv(struct sshcipher_ctx *cc, u_char *iv, u_int len) +@@ -606,7 +616,9 @@ cipher_get_keyiv(struct sshcipher_ctx *cc, u_char *iv, u_int len) switch (c->number) { #ifdef WITH_OPENSSL case SSH_CIPHER_SSH2: @@ -79,20 +80,20 @@ index 031bda9..6cd667a 100644 case SSH_CIPHER_DES: +#endif /* OPENSSL_NO_DES */ case SSH_CIPHER_BLOWFISH: - evplen = EVP_CIPHER_CTX_iv_length(&cc->evp); + evplen = EVP_CIPHER_CTX_iv_length(cc->evp); if (evplen == 0) -@@ -576,8 +588,10 @@ cipher_get_keyiv(struct sshcipher_ctx *cc, u_char *iv, u_int len) +@@ -629,8 +641,10 @@ cipher_get_keyiv(struct sshcipher_ctx *cc, u_char *iv, u_int len) break; #endif #ifdef WITH_SSH1 +#ifndef OPENSSL_NO_DES case SSH_CIPHER_3DES: - return ssh1_3des_iv(&cc->evp, 0, iv, 24); + return ssh1_3des_iv(cc->evp, 0, iv, 24); +#endif /* OPENSSL_NO_DES */ #endif default: return SSH_ERR_INVALID_ARGUMENT; -@@ -601,7 +615,9 @@ cipher_set_keyiv(struct sshcipher_ctx *cc, const u_char *iv) +@@ -654,7 +668,9 @@ cipher_set_keyiv(struct sshcipher_ctx *cc, const u_char *iv) switch (c->number) { #ifdef WITH_OPENSSL case SSH_CIPHER_SSH2: @@ -100,19 +101,19 @@ index 031bda9..6cd667a 100644 case SSH_CIPHER_DES: +#endif /* OPENSSL_NO_DES */ case SSH_CIPHER_BLOWFISH: - evplen = EVP_CIPHER_CTX_iv_length(&cc->evp); + evplen = EVP_CIPHER_CTX_iv_length(cc->evp); if (evplen <= 0) -@@ -616,8 +632,10 @@ cipher_set_keyiv(struct sshcipher_ctx *cc, const u_char *iv) +@@ -675,8 +691,10 @@ cipher_set_keyiv(struct sshcipher_ctx *cc, const u_char *iv) break; #endif #ifdef WITH_SSH1 +#ifndef OPENSSL_NO_DES case SSH_CIPHER_3DES: - return ssh1_3des_iv(&cc->evp, 1, (u_char *)iv, 24); + return ssh1_3des_iv(cc->evp, 1, (u_char *)iv, 24); +#endif /* OPENSSL_NO_DES */ #endif default: return SSH_ERR_INVALID_ARGUMENT; -- -2.1.4 +2.8.1 diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/openssh-7.1p1-conditional-compile-des-in-pkcs11.patch b/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/openssh-7.1p1-conditional-compile-des-in-pkcs11.patch index 815af422f..47dc73ba1 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/openssh-7.1p1-conditional-compile-des-in-pkcs11.patch +++ b/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/openssh-7.1p1-conditional-compile-des-in-pkcs11.patch @@ -1,12 +1,12 @@ -From 04cfd84423f693d879dc3ffebb0f6fe2680c254f Mon Sep 17 00:00:00 2001 -From: Haiqing Bai -Date: Fri, 18 Mar 2016 15:59:21 +0800 -Subject: [PATCH 3/3] remove des in pkcs11. +From e816fc06e4f8070b09e677ead4d21768784e4c99 Mon Sep 17 00:00:00 2001 +From: Dengke Du +Date: Thu, 19 Jan 2017 03:21:40 -0500 +Subject: [PATCH 2/3] remove des in pkcs11. Upstream-Status: Pending Signed-off-by: Haiqing Bai - +Signed-off-by: Dengke Du --- pkcs11.h | 8 ++++++++ 1 file changed, 8 insertions(+) @@ -66,5 +66,5 @@ index b01d58f..98b36e6 100644 #define CKM_PBE_SHA1_RC2_40_CBC (0x3ab) #define CKM_PKCS5_PBKD2 (0x3b0) -- -1.9.1 +2.8.1 diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshd_check_keys b/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshd_check_keys new file mode 100644 index 000000000..f5bba53ca --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshd_check_keys @@ -0,0 +1,64 @@ +#! /bin/sh + +# /etc/default/ssh may set SYSCONFDIR and SSHD_OPTS +if test -f /etc/default/ssh; then + . /etc/default/ssh +fi + +[ -z "$SYSCONFDIR" ] && SYSCONFDIR=/etc/ssh +mkdir -p $SYSCONFDIR + +# parse sshd options +set -- ${SSHD_OPTS} -- +sshd_config=/etc/ssh/sshd_config +while true ; do + case "$1" in + -f*) if [ "$1" = "-f" ] ; then + sshd_config="$2" + shift + else + sshd_config="${1#-f}" + fi + shift + ;; + --) shift; break;; + *) shift;; + esac +done + +# parse location of keys +HOST_KEY_RSA=$(grep ^HostKey "${sshd_config}" | grep _rsa_ | tail -1 | awk ' { print $2 } ') +[ -z "${HOST_KEY_RSA}" ] && HOST_KEY_RSA=$(grep HostKey "${sshd_config}" | grep _rsa_ | tail -1 | awk ' { print $2 } ') +[ -z "${HOST_KEY_RSA}" ] && HOST_KEY_RSA=$SYSCONFDIR/ssh_host_rsa_key +HOST_KEY_DSA=$(grep ^HostKey "${sshd_config}" | grep _dsa_ | tail -1 | awk ' { print $2 } ') +[ -z "${HOST_KEY_DSA}" ] && HOST_KEY_DSA=$(grep HostKey "${sshd_config}" | grep _dsa_ | tail -1 | awk ' { print $2 } ') +[ -z "${HOST_KEY_DSA}" ] && HOST_KEY_DSA=$SYSCONFDIR/ssh_host_dsa_key +HOST_KEY_ECDSA=$(grep ^HostKey "${sshd_config}" | grep _ecdsa_ | tail -1 | awk ' { print $2 } ') +[ -z "${HOST_KEY_ECDSA}" ] && HOST_KEY_ECDSA=$(grep HostKey "${sshd_config}" | grep _ecdsa_ | tail -1 | awk ' { print $2 } ') +[ -z "${HOST_KEY_ECDSA}" ] && HOST_KEY_ECDSA=$SYSCONFDIR/ssh_host_ecdsa_key +HOST_KEY_ED25519=$(grep ^HostKey "${sshd_config}" | grep _ed25519_ | tail -1 | awk ' { print $2 } ') +[ -z "${HOST_KEY_ED25519}" ] && HOST_KEY_ED25519=$(grep HostKey "${sshd_config}" | grep _ed25519_ | tail -1 | awk ' { print $2 } ') +[ -z "${HOST_KEY_ED25519}" ] && HOST_KEY_ED25519=$SYSCONFDIR/ssh_host_ed25519_key + +# create keys if necessary +if [ ! -f $HOST_KEY_RSA ]; then + echo " generating ssh RSA key..." + mkdir -p $(dirname $HOST_KEY_RSA) + ssh-keygen -q -f $HOST_KEY_RSA -N '' -t rsa +fi +if [ ! -f $HOST_KEY_ECDSA ]; then + echo " generating ssh ECDSA key..." + mkdir -p $(dirname $HOST_KEY_ECDSA) + ssh-keygen -q -f $HOST_KEY_ECDSA -N '' -t ecdsa +fi +if [ ! -f $HOST_KEY_DSA ]; then + echo " generating ssh DSA key..." + mkdir -p $(dirname $HOST_KEY_DSA) + ssh-keygen -q -f $HOST_KEY_DSA -N '' -t dsa +fi +if [ ! -f $HOST_KEY_ED25519 ]; then + echo " generating ssh ED25519 key..." + mkdir -p $(dirname $HOST_KEY_ED25519) + ssh-keygen -q -f $HOST_KEY_ED25519 -N '' -t ed25519 +fi + diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshdgenkeys.service b/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshdgenkeys.service index 148e6ad63..603c33787 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshdgenkeys.service +++ b/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh/sshdgenkeys.service @@ -1,22 +1,8 @@ [Unit] Description=OpenSSH Key Generation RequiresMountsFor=/var /run -ConditionPathExists=!/var/run/ssh/ssh_host_rsa_key -ConditionPathExists=!/var/run/ssh/ssh_host_dsa_key -ConditionPathExists=!/var/run/ssh/ssh_host_ecdsa_key -ConditionPathExists=!/var/run/ssh/ssh_host_ed25519_key -ConditionPathExists=!/etc/ssh/ssh_host_rsa_key -ConditionPathExists=!/etc/ssh/ssh_host_dsa_key -ConditionPathExists=!/etc/ssh/ssh_host_ecdsa_key -ConditionPathExists=!/etc/ssh/ssh_host_ed25519_key [Service] -Environment="SYSCONFDIR=/etc/ssh" -EnvironmentFile=-/etc/default/ssh -ExecStart=@BASE_BINDIR@/mkdir -p $SYSCONFDIR -ExecStart=@BINDIR@/ssh-keygen -q -f ${SYSCONFDIR}/ssh_host_rsa_key -N '' -t rsa -ExecStart=@BINDIR@/ssh-keygen -q -f ${SYSCONFDIR}/ssh_host_dsa_key -N '' -t dsa -ExecStart=@BINDIR@/ssh-keygen -q -f ${SYSCONFDIR}/ssh_host_ecdsa_key -N '' -t ecdsa -ExecStart=@BINDIR@/ssh-keygen -q -f ${SYSCONFDIR}/ssh_host_ed25519_key -N '' -t ed25519 +ExecStart=@LIBEXECDIR@/sshd_check_keys Type=oneshot RemainAfterExit=yes diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh_7.3p1.bb b/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh_7.3p1.bb deleted file mode 100644 index 94eb0ed20..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh_7.3p1.bb +++ /dev/null @@ -1,165 +0,0 @@ -SUMMARY = "A suite of security-related network utilities based on \ -the SSH protocol including the ssh client and sshd server" -DESCRIPTION = "Secure rlogin/rsh/rcp/telnet replacement (OpenSSH) \ -Ssh (Secure Shell) is a program for logging into a remote machine \ -and for executing commands on a remote machine." -HOMEPAGE = "http://www.openssh.com/" -SECTION = "console/network" -LICENSE = "BSD" -LIC_FILES_CHKSUM = "file://LICENCE;md5=e326045657e842541d3f35aada442507" - -DEPENDS = "zlib openssl" -DEPENDS += "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'libpam', '', d)}" - -SRC_URI = "http://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/openssh-${PV}.tar.gz \ - file://sshd_config \ - file://ssh_config \ - file://init \ - ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_SRC_URI}', '', d)} \ - file://sshd.socket \ - file://sshd@.service \ - file://sshdgenkeys.service \ - file://volatiles.99_sshd \ - file://add-test-support-for-busybox.patch \ - file://run-ptest \ - file://openssh-7.1p1-conditional-compile-des-in-cipher.patch \ - file://openssh-7.1p1-conditional-compile-des-in-pkcs11.patch \ - file://fix-potential-signed-overflow-in-pointer-arithmatic.patch \ - file://fix-CVE-2016-8858.patch \ - " - -PAM_SRC_URI = "file://sshd" - -SRC_URI[md5sum] = "dfadd9f035d38ce5d58a3bf130b86d08" -SRC_URI[sha256sum] = "3ffb989a6dcaa69594c3b550d4855a5a2e1718ccdde7f5e36387b424220fbecc" - -inherit useradd update-rc.d update-alternatives systemd - -USERADD_PACKAGES = "${PN}-sshd" -USERADD_PARAM_${PN}-sshd = "--system --no-create-home --home-dir /var/run/sshd --shell /bin/false --user-group sshd" -INITSCRIPT_PACKAGES = "${PN}-sshd" -INITSCRIPT_NAME_${PN}-sshd = "sshd" -INITSCRIPT_PARAMS_${PN}-sshd = "defaults 9" - -SYSTEMD_PACKAGES = "${PN}-sshd" -SYSTEMD_SERVICE_${PN}-sshd = "sshd.socket" - -inherit autotools-brokensep ptest - -# LFS support: -CFLAGS += "-D__FILE_OFFSET_BITS=64" - -# login path is hardcoded in sshd -EXTRA_OECONF = "'LOGIN_PROGRAM=${base_bindir}/login' \ - ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '--with-pam', '--without-pam', d)} \ - --without-zlib-version-check \ - --with-privsep-path=/var/run/sshd \ - --sysconfdir=${sysconfdir}/ssh \ - --with-xauth=/usr/bin/xauth \ - --disable-strip \ - " - -# Since we do not depend on libbsd, we do not want configure to use it -# just because it finds libutil.h. But, specifying --disable-libutil -# causes compile errors, so... -CACHED_CONFIGUREVARS += "ac_cv_header_bsd_libutil_h=no ac_cv_header_libutil_h=no" - -# passwd path is hardcoded in sshd -CACHED_CONFIGUREVARS += "ac_cv_path_PATH_PASSWD_PROG=${bindir}/passwd" - -# We don't want to depend on libblockfile -CACHED_CONFIGUREVARS += "ac_cv_header_maillock_h=no" - -# This is a workaround for uclibc because including stdio.h -# pulls in pthreads.h and causes conflicts in function prototypes. -# This results in compilation failure, so unless this is fixed, -# disable pam for uclibc. -EXTRA_OECONF_append_libc-uclibc=" --without-pam" - -do_configure_prepend () { - export LD="${CC}" - install -m 0644 ${WORKDIR}/sshd_config ${B}/ - install -m 0644 ${WORKDIR}/ssh_config ${B}/ - if [ ! -e acinclude.m4 -a -e aclocal.m4 ]; then - cp aclocal.m4 acinclude.m4 - fi -} - -do_compile_ptest() { - # skip regress/unittests/ binaries: this will silently skip - # unittests in run-ptests which is good because they are so slow. - oe_runmake regress/modpipe regress/setuid-allowed regress/netcat -} - -do_install_append () { - if [ "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam', '', d)}" = "pam" ]; then - install -D -m 0644 ${WORKDIR}/sshd ${D}${sysconfdir}/pam.d/sshd - sed -i -e 's:#UsePAM no:UsePAM yes:' ${D}${sysconfdir}/ssh/sshd_config - fi - - if [ "${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11', '', d)}" = "x11" ]; then - sed -i -e 's:#X11Forwarding no:X11Forwarding yes:' ${D}${sysconfdir}/ssh/sshd_config - fi - - install -d ${D}${sysconfdir}/init.d - install -m 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/sshd - rm -f ${D}${bindir}/slogin ${D}${datadir}/Ssh.bin - rmdir ${D}${localstatedir}/run/sshd ${D}${localstatedir}/run ${D}${localstatedir} - install -d ${D}/${sysconfdir}/default/volatiles - install -m 644 ${WORKDIR}/volatiles.99_sshd ${D}/${sysconfdir}/default/volatiles/99_sshd - install -m 0755 ${S}/contrib/ssh-copy-id ${D}${bindir} - - # Create config files for read-only rootfs - install -d ${D}${sysconfdir}/ssh - install -m 644 ${D}${sysconfdir}/ssh/sshd_config ${D}${sysconfdir}/ssh/sshd_config_readonly - sed -i '/HostKey/d' ${D}${sysconfdir}/ssh/sshd_config_readonly - echo "HostKey /var/run/ssh/ssh_host_rsa_key" >> ${D}${sysconfdir}/ssh/sshd_config_readonly - echo "HostKey /var/run/ssh/ssh_host_dsa_key" >> ${D}${sysconfdir}/ssh/sshd_config_readonly - echo "HostKey /var/run/ssh/ssh_host_ecdsa_key" >> ${D}${sysconfdir}/ssh/sshd_config_readonly - echo "HostKey /var/run/ssh/ssh_host_ed25519_key" >> ${D}${sysconfdir}/ssh/sshd_config_readonly - - install -d ${D}${systemd_unitdir}/system - install -c -m 0644 ${WORKDIR}/sshd.socket ${D}${systemd_unitdir}/system - install -c -m 0644 ${WORKDIR}/sshd@.service ${D}${systemd_unitdir}/system - install -c -m 0644 ${WORKDIR}/sshdgenkeys.service ${D}${systemd_unitdir}/system - sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \ - -e 's,@SBINDIR@,${sbindir},g' \ - -e 's,@BINDIR@,${bindir},g' \ - ${D}${systemd_unitdir}/system/sshd.socket ${D}${systemd_unitdir}/system/*.service -} - -do_install_ptest () { - sed -i -e "s|^SFTPSERVER=.*|SFTPSERVER=${libexecdir}/sftp-server|" regress/test-exec.sh - cp -r regress ${D}${PTEST_PATH} -} - -ALLOW_EMPTY_${PN} = "1" - -PACKAGES =+ "${PN}-keygen ${PN}-scp ${PN}-ssh ${PN}-sshd ${PN}-sftp ${PN}-misc ${PN}-sftp-server" -FILES_${PN}-scp = "${bindir}/scp.${BPN}" -FILES_${PN}-ssh = "${bindir}/ssh.${BPN} ${sysconfdir}/ssh/ssh_config" -FILES_${PN}-sshd = "${sbindir}/sshd ${sysconfdir}/init.d/sshd ${systemd_unitdir}/system" -FILES_${PN}-sshd += "${sysconfdir}/ssh/moduli ${sysconfdir}/ssh/sshd_config ${sysconfdir}/ssh/sshd_config_readonly ${sysconfdir}/default/volatiles/99_sshd ${sysconfdir}/pam.d/sshd" -FILES_${PN}-sftp = "${bindir}/sftp" -FILES_${PN}-sftp-server = "${libexecdir}/sftp-server" -FILES_${PN}-misc = "${bindir}/ssh* ${libexecdir}/ssh*" -FILES_${PN}-keygen = "${bindir}/ssh-keygen" - -RDEPENDS_${PN} += "${PN}-scp ${PN}-ssh ${PN}-sshd ${PN}-keygen" -RDEPENDS_${PN}-sshd += "${PN}-keygen ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam-plugin-keyinit pam-plugin-loginuid', '', d)}" -RDEPENDS_${PN}-ptest += "${PN}-sftp ${PN}-misc ${PN}-sftp-server make" - -RPROVIDES_${PN}-ssh = "ssh" -RPROVIDES_${PN}-sshd = "sshd" - -RCONFLICTS_${PN} = "dropbear" -RCONFLICTS_${PN}-sshd = "dropbear" -RCONFLICTS_${PN}-keygen = "ssh-keygen" - -CONFFILES_${PN}-sshd = "${sysconfdir}/ssh/sshd_config" -CONFFILES_${PN}-ssh = "${sysconfdir}/ssh/ssh_config" - -ALTERNATIVE_PRIORITY = "90" -ALTERNATIVE_${PN}-scp = "scp" -ALTERNATIVE_${PN}-ssh = "ssh" - diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh_7.4p1.bb b/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh_7.4p1.bb new file mode 100644 index 000000000..e501eadd6 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/openssh/openssh_7.4p1.bb @@ -0,0 +1,172 @@ +SUMMARY = "A suite of security-related network utilities based on \ +the SSH protocol including the ssh client and sshd server" +DESCRIPTION = "Secure rlogin/rsh/rcp/telnet replacement (OpenSSH) \ +Ssh (Secure Shell) is a program for logging into a remote machine \ +and for executing commands on a remote machine." +HOMEPAGE = "http://www.openssh.com/" +SECTION = "console/network" +LICENSE = "BSD" +LIC_FILES_CHKSUM = "file://LICENCE;md5=e326045657e842541d3f35aada442507" + +DEPENDS = "zlib openssl" +DEPENDS += "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'libpam', '', d)}" + +SRC_URI = "http://ftp.openbsd.org/pub/OpenBSD/OpenSSH/portable/openssh-${PV}.tar.gz \ + file://sshd_config \ + file://ssh_config \ + file://init \ + ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '${PAM_SRC_URI}', '', d)} \ + file://sshd.socket \ + file://sshd@.service \ + file://sshdgenkeys.service \ + file://volatiles.99_sshd \ + file://add-test-support-for-busybox.patch \ + file://run-ptest \ + file://openssh-7.1p1-conditional-compile-des-in-cipher.patch \ + file://openssh-7.1p1-conditional-compile-des-in-pkcs11.patch \ + file://fix-potential-signed-overflow-in-pointer-arithmatic.patch \ + file://sshd_check_keys \ + " + +PAM_SRC_URI = "file://sshd" + +SRC_URI[md5sum] = "b2db2a83caf66a208bb78d6d287cdaa3" +SRC_URI[sha256sum] = "1b1fc4a14e2024293181924ed24872e6f2e06293f3e8926a376b8aec481f19d1" + +inherit useradd update-rc.d update-alternatives systemd + +USERADD_PACKAGES = "${PN}-sshd" +USERADD_PARAM_${PN}-sshd = "--system --no-create-home --home-dir /var/run/sshd --shell /bin/false --user-group sshd" +INITSCRIPT_PACKAGES = "${PN}-sshd" +INITSCRIPT_NAME_${PN}-sshd = "sshd" +INITSCRIPT_PARAMS_${PN}-sshd = "defaults 9" + +SYSTEMD_PACKAGES = "${PN}-sshd" +SYSTEMD_SERVICE_${PN}-sshd = "sshd.socket" + +inherit autotools-brokensep ptest + +# LFS support: +CFLAGS += "-D__FILE_OFFSET_BITS=64" + +# login path is hardcoded in sshd +EXTRA_OECONF = "'LOGIN_PROGRAM=${base_bindir}/login' \ + ${@bb.utils.contains('DISTRO_FEATURES', 'pam', '--with-pam', '--without-pam', d)} \ + --without-zlib-version-check \ + --with-privsep-path=/var/run/sshd \ + --sysconfdir=${sysconfdir}/ssh \ + --with-xauth=/usr/bin/xauth \ + --disable-strip \ + " + +# Since we do not depend on libbsd, we do not want configure to use it +# just because it finds libutil.h. But, specifying --disable-libutil +# causes compile errors, so... +CACHED_CONFIGUREVARS += "ac_cv_header_bsd_libutil_h=no ac_cv_header_libutil_h=no" + +# passwd path is hardcoded in sshd +CACHED_CONFIGUREVARS += "ac_cv_path_PATH_PASSWD_PROG=${bindir}/passwd" + +# We don't want to depend on libblockfile +CACHED_CONFIGUREVARS += "ac_cv_header_maillock_h=no" + +# This is a workaround for uclibc because including stdio.h +# pulls in pthreads.h and causes conflicts in function prototypes. +# This results in compilation failure, so unless this is fixed, +# disable pam for uclibc. +EXTRA_OECONF_append_libc-uclibc=" --without-pam" + +do_configure_prepend () { + export LD="${CC}" + install -m 0644 ${WORKDIR}/sshd_config ${B}/ + install -m 0644 ${WORKDIR}/ssh_config ${B}/ + if [ ! -e acinclude.m4 -a -e aclocal.m4 ]; then + cp aclocal.m4 acinclude.m4 + fi +} + +do_compile_ptest() { + # skip regress/unittests/ binaries: this will silently skip + # unittests in run-ptests which is good because they are so slow. + oe_runmake regress/modpipe regress/setuid-allowed regress/netcat +} + +do_install_append () { + if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then + install -D -m 0644 ${WORKDIR}/sshd ${D}${sysconfdir}/pam.d/sshd + sed -i -e 's:#UsePAM no:UsePAM yes:' ${D}${sysconfdir}/ssh/sshd_config + fi + + if [ "${@bb.utils.filter('DISTRO_FEATURES', 'x11', d)}" ]; then + sed -i -e 's:#X11Forwarding no:X11Forwarding yes:' ${D}${sysconfdir}/ssh/sshd_config + fi + + install -d ${D}${sysconfdir}/init.d + install -m 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/sshd + rm -f ${D}${bindir}/slogin ${D}${datadir}/Ssh.bin + rmdir ${D}${localstatedir}/run/sshd ${D}${localstatedir}/run ${D}${localstatedir} + install -d ${D}/${sysconfdir}/default/volatiles + install -m 644 ${WORKDIR}/volatiles.99_sshd ${D}/${sysconfdir}/default/volatiles/99_sshd + install -m 0755 ${S}/contrib/ssh-copy-id ${D}${bindir} + + # Create config files for read-only rootfs + install -d ${D}${sysconfdir}/ssh + install -m 644 ${D}${sysconfdir}/ssh/sshd_config ${D}${sysconfdir}/ssh/sshd_config_readonly + sed -i '/HostKey/d' ${D}${sysconfdir}/ssh/sshd_config_readonly + echo "HostKey /var/run/ssh/ssh_host_rsa_key" >> ${D}${sysconfdir}/ssh/sshd_config_readonly + echo "HostKey /var/run/ssh/ssh_host_dsa_key" >> ${D}${sysconfdir}/ssh/sshd_config_readonly + echo "HostKey /var/run/ssh/ssh_host_ecdsa_key" >> ${D}${sysconfdir}/ssh/sshd_config_readonly + echo "HostKey /var/run/ssh/ssh_host_ed25519_key" >> ${D}${sysconfdir}/ssh/sshd_config_readonly + + install -d ${D}${systemd_unitdir}/system + install -c -m 0644 ${WORKDIR}/sshd.socket ${D}${systemd_unitdir}/system + install -c -m 0644 ${WORKDIR}/sshd@.service ${D}${systemd_unitdir}/system + install -c -m 0644 ${WORKDIR}/sshdgenkeys.service ${D}${systemd_unitdir}/system + sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \ + -e 's,@SBINDIR@,${sbindir},g' \ + -e 's,@BINDIR@,${bindir},g' \ + -e 's,@LIBEXECDIR@,${libexecdir}/${BPN},g' \ + ${D}${systemd_unitdir}/system/sshd.socket ${D}${systemd_unitdir}/system/*.service + + sed -i -e 's,@LIBEXECDIR@,${libexecdir}/${BPN},g' \ + ${D}${sysconfdir}/init.d/sshd + + install -D -m 0755 ${WORKDIR}/sshd_check_keys ${D}${libexecdir}/${BPN}/sshd_check_keys +} + +do_install_ptest () { + sed -i -e "s|^SFTPSERVER=.*|SFTPSERVER=${libexecdir}/sftp-server|" regress/test-exec.sh + cp -r regress ${D}${PTEST_PATH} +} + +ALLOW_EMPTY_${PN} = "1" + +PACKAGES =+ "${PN}-keygen ${PN}-scp ${PN}-ssh ${PN}-sshd ${PN}-sftp ${PN}-misc ${PN}-sftp-server" +FILES_${PN}-scp = "${bindir}/scp.${BPN}" +FILES_${PN}-ssh = "${bindir}/ssh.${BPN} ${sysconfdir}/ssh/ssh_config" +FILES_${PN}-sshd = "${sbindir}/sshd ${sysconfdir}/init.d/sshd ${systemd_unitdir}/system" +FILES_${PN}-sshd += "${sysconfdir}/ssh/moduli ${sysconfdir}/ssh/sshd_config ${sysconfdir}/ssh/sshd_config_readonly ${sysconfdir}/default/volatiles/99_sshd ${sysconfdir}/pam.d/sshd" +FILES_${PN}-sshd += "${libexecdir}/${BPN}/sshd_check_keys" +FILES_${PN}-sftp = "${bindir}/sftp" +FILES_${PN}-sftp-server = "${libexecdir}/sftp-server" +FILES_${PN}-misc = "${bindir}/ssh* ${libexecdir}/ssh*" +FILES_${PN}-keygen = "${bindir}/ssh-keygen" + +RDEPENDS_${PN} += "${PN}-scp ${PN}-ssh ${PN}-sshd ${PN}-keygen" +RDEPENDS_${PN}-sshd += "${PN}-keygen ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam-plugin-keyinit pam-plugin-loginuid', '', d)}" +RDEPENDS_${PN}-ptest += "${PN}-sftp ${PN}-misc ${PN}-sftp-server make" + +RPROVIDES_${PN}-ssh = "ssh" +RPROVIDES_${PN}-sshd = "sshd" + +RCONFLICTS_${PN} = "dropbear" +RCONFLICTS_${PN}-sshd = "dropbear" +RCONFLICTS_${PN}-keygen = "ssh-keygen" + +CONFFILES_${PN}-sshd = "${sysconfdir}/ssh/sshd_config" +CONFFILES_${PN}-ssh = "${sysconfdir}/ssh/ssh_config" + +ALTERNATIVE_PRIORITY = "90" +ALTERNATIVE_${PN}-scp = "scp" +ALTERNATIVE_${PN}-ssh = "ssh" + diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl.inc b/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl.inc index 2ef8b38be..8f2a797b8 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl.inc +++ b/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl.inc @@ -17,7 +17,6 @@ S = "${WORKDIR}/openssl-${PV}" PACKAGECONFIG[perl] = ",,," -AR_append = " r" TERMIO_libc-musl = "-DTERMIOS" TERMIO ?= "-DTERMIO" # Avoid binaries being marked as requiring an executable stack since it @@ -28,16 +27,17 @@ CFLAG = "${@base_conditional('SITEINFO_ENDIANNESS', 'le', '-DL_ENDIAN', '-DB_END export DIRS = "crypto ssl apps" export EX_LIBS = "-lgcc -ldl" export AS = "${CC} -c" -EXTRA_OEMAKE = "-e MAKEFLAGS=" -inherit pkgconfig siteinfo multilib_header ptest +inherit pkgconfig siteinfo multilib_header ptest relative_symlinks PACKAGES =+ "libcrypto libssl ${PN}-misc openssl-conf" FILES_libcrypto = "${libdir}/libcrypto${SOLIBS}" FILES_libssl = "${libdir}/libssl${SOLIBS}" FILES_${PN} =+ " ${libdir}/ssl/*" FILES_${PN}-misc = "${libdir}/ssl/misc" -RDEPENDS_${PN}-misc = "${@bb.utils.contains('PACKAGECONFIG', 'perl', 'perl', '', d)}" +RDEPENDS_${PN}-misc = "${@bb.utils.filter('PACKAGECONFIG', 'perl', d)}" + +PROVIDES += "openssl10" # Add the openssl.cnf file to the openssl-conf package. Make the libcrypto # package RRECOMMENDS on this package. This will enable the configuration @@ -84,7 +84,7 @@ do_configure () { target=linux-elf-armeb ;; linux-aarch64*) - target=linux-generic64 + target=linux-aarch64 ;; linux-sh3) target=debian-sh3 @@ -185,7 +185,7 @@ do_install () { sed -i -e 's,/etc/openssl,${sysconfdir}/ssl,g' ${D}${bindir}/c_rehash oe_multilib_header openssl/opensslconf.h - if [ "${@bb.utils.contains('PACKAGECONFIG', 'perl', 'perl', '', d)}" = "perl" ]; then + if [ "${@bb.utils.filter('PACKAGECONFIG', 'perl', d)}" ]; then sed -i -e '1s,.*,#!${bindir}/env perl,' ${D}${libdir}/ssl/misc/CA.pl sed -i -e '1s,.*,#!${bindir}/env perl,' ${D}${libdir}/ssl/misc/tsget else @@ -206,6 +206,10 @@ do_install () { do_install_ptest () { cp -r -L Makefile.org Makefile test ${D}${PTEST_PATH} + + # Replace the path to native perl with the path to target perl + sed -i 's,^PERL=.*,PERL=${bindir}/perl,' ${D}${PTEST_PATH}/Makefile + cp Configure config e_os.h ${D}${PTEST_PATH} cp -r -L include ${D}${PTEST_PATH} ln -sf ${libdir}/libcrypto.a ${D}${PTEST_PATH} diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/0001-CVE-2017-3731.patch b/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/0001-CVE-2017-3731.patch deleted file mode 100644 index 04ef52682..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/0001-CVE-2017-3731.patch +++ /dev/null @@ -1,46 +0,0 @@ -From 0cde9a9645c949fd0acf657dadc747676245cfaf Mon Sep 17 00:00:00 2001 -From: Alexandru Moise -Date: Tue, 7 Feb 2017 11:13:19 +0200 -Subject: [PATCH 1/2] crypto/evp: harden RC4_MD5 cipher. -MIME-Version: 1.0 -Content-Type: text/plain; charset=UTF-8 -Content-Transfer-Encoding: 8bit - -Originally a crash in 32-bit build was reported CHACHA20-POLY1305 -cipher. The crash is triggered by truncated packet and is result -of excessive hashing to the edge of accessible memory (or bogus -MAC value is produced if x86 MD5 assembly module is involved). Since -hash operation is read-only it is not considered to be exploitable -beyond a DoS condition. - -Thanks to Robert Święcki for report. - -CVE-2017-3731 - -Backported from upstream commit: -8e20499629b6bcf868d0072c7011e590b5c2294d - -Upstream-Status: Backport - -Reviewed-by: Rich Salz -Signed-off-by: Alexandru Moise ---- - crypto/evp/e_rc4_hmac_md5.c | 2 ++ - 1 file changed, 2 insertions(+) - -diff --git a/crypto/evp/e_rc4_hmac_md5.c b/crypto/evp/e_rc4_hmac_md5.c -index 5e92855..3293419 100644 ---- a/crypto/evp/e_rc4_hmac_md5.c -+++ b/crypto/evp/e_rc4_hmac_md5.c -@@ -269,6 +269,8 @@ static int rc4_hmac_md5_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg, - len = p[arg - 2] << 8 | p[arg - 1]; - - if (!ctx->encrypt) { -+ if (len < MD5_DIGEST_LENGTH) -+ return -1; - len -= MD5_DIGEST_LENGTH; - p[arg - 2] = len >> 8; - p[arg - 1] = len; --- -2.10.2 - diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/0001-Fix-build-with-clang-using-external-assembler.patch b/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/0001-Fix-build-with-clang-using-external-assembler.patch new file mode 100644 index 000000000..2270962a6 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/0001-Fix-build-with-clang-using-external-assembler.patch @@ -0,0 +1,45 @@ +From 2f6026cb8b16cf00726e3c5625c023f196680f07 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 17 Mar 2017 12:52:08 -0700 +Subject: [PATCH] Fix build with clang using external assembler + +Cherry-picked from +https://github.com/openssl/openssl/commit/11208dcfb9105e8afa37233185decefd45e89e17 +https://github.com/openssl/openssl/commit/fbab8baddef8d3346ae40ff068871e2ddaf10270 +https://github.com/openssl/openssl/commit/6cf412c473d8145562b76219ce3da73b201b3255 + +Fixes + +| ghash-armv4.S: Assembler messages: +| ghash-armv4.S:81: Error: bad instruction `ldrbpl r12,[r2,r3]' +| ghash-armv4.S:91: Error: bad instruction `ldrbpl r8,[r0,r3]' +| ghash-armv4.S:137: Error: bad instruction `ldrbne r12,[r2,#15]' +| ghash-armv4.S:224: Error: bad instruction `ldrbpl r12,[r0,r3]' +| clang-4.0: error: assembler command failed with exit code 1 (use -v to see invocation) +| make[2]: *** [: ghash-armv4.o] Error 1 + +Upstream-Status: Backport + +Signed-off-by: Khem Raj +--- + crypto/modes/asm/ghash-armv4.pl | 7 +++++++ + 1 file changed, 7 insertions(+) + +diff --git a/crypto/modes/asm/ghash-armv4.pl b/crypto/modes/asm/ghash-armv4.pl +index 8ccc963ef..442fed4da 100644 +--- a/crypto/modes/asm/ghash-armv4.pl ++++ b/crypto/modes/asm/ghash-armv4.pl +@@ -124,7 +124,10 @@ $code=<<___; + #include "arm_arch.h" + + .text ++#if defined(__thumb2__) || defined(__clang__) ++.syntax unified ++#endif + .code 32 + + #ifdef __clang__ + #define ldrplb ldrbpl +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/0002-CVE-2017-3731.patch b/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/0002-CVE-2017-3731.patch deleted file mode 100644 index b56b2d5bd..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/0002-CVE-2017-3731.patch +++ /dev/null @@ -1,53 +0,0 @@ -From 6427f1accc54b515bb899370f1a662bfcb1caa52 Mon Sep 17 00:00:00 2001 -From: Alexandru Moise -Date: Tue, 7 Feb 2017 11:16:13 +0200 -Subject: [PATCH 2/2] crypto/evp: harden AEAD ciphers. -MIME-Version: 1.0 -Content-Type: text/plain; charset=UTF-8 -Content-Transfer-Encoding: 8bit - -Originally a crash in 32-bit build was reported CHACHA20-POLY1305 -cipher. The crash is triggered by truncated packet and is result -of excessive hashing to the edge of accessible memory. Since hash -operation is read-only it is not considered to be exploitable -beyond a DoS condition. Other ciphers were hardened. - -Thanks to Robert Święcki for report. - -CVE-2017-3731 - -Backported from upstream commit: -2198b3a55de681e1f3c23edb0586afe13f438051 - -Upstream-Status: Backport - -Reviewed-by: Rich Salz -Signed-off-by: Alexandru Moise ---- - crypto/evp/e_aes.c | 7 ++++++- - 1 file changed, 6 insertions(+), 1 deletion(-) - -diff --git a/crypto/evp/e_aes.c b/crypto/evp/e_aes.c -index 1734a82..16dcd10 100644 ---- a/crypto/evp/e_aes.c -+++ b/crypto/evp/e_aes.c -@@ -1235,10 +1235,15 @@ static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) - { - unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1]; - /* Correct length for explicit IV */ -+ if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN) -+ return 0; - len -= EVP_GCM_TLS_EXPLICIT_IV_LEN; - /* If decrypting correct for tag too */ -- if (!c->encrypt) -+ if (!c->encrypt) { -+ if (len < EVP_GCM_TLS_TAG_LEN) -+ return 0; - len -= EVP_GCM_TLS_TAG_LEN; -+ } - c->buf[arg - 2] = len >> 8; - c->buf[arg - 1] = len & 0xff; - } --- -2.10.2 - diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2016-7055.patch b/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2016-7055.patch deleted file mode 100644 index 83a74cdac..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/CVE-2016-7055.patch +++ /dev/null @@ -1,43 +0,0 @@ -From 57c4b9f6a2f800b41ce2836986fe33640f6c3f8a Mon Sep 17 00:00:00 2001 -From: Andy Polyakov -Date: Sun, 6 Nov 2016 18:33:17 +0100 -Subject: [PATCH] bn/asm/x86_64-mont.pl: fix for CVE-2016-7055 (Low severity). - -Reviewed-by: Rich Salz -(cherry picked from commit 2fac86d9abeaa643677d1ffd0a139239fdf9406a) - -Upstream-Status: Backport [https://github.com/openssl/openssl/commit/57c4b9f6a2f800b41ce2836986fe33640f6c3f8a] -CVE: CVE-2016-7055 -Signed-off-by: Yi Zhao ---- - crypto/bn/asm/x86_64-mont.pl | 5 ++--- - 1 file changed, 2 insertions(+), 3 deletions(-) - -diff --git a/crypto/bn/asm/x86_64-mont.pl b/crypto/bn/asm/x86_64-mont.pl -index 044fd7e..80492d8 100755 ---- a/crypto/bn/asm/x86_64-mont.pl -+++ b/crypto/bn/asm/x86_64-mont.pl -@@ -1148,18 +1148,17 @@ $code.=<<___; - mulx 2*8($aptr),%r15,%r13 # ... - adox -3*8($tptr),%r11 - adcx %r15,%r12 -- adox $zero,%r12 -+ adox -2*8($tptr),%r12 - adcx $zero,%r13 -+ adox $zero,%r13 - - mov $bptr,8(%rsp) # off-load &b[i] -- .byte 0x67 - mov $mi,%r15 - imulq 24(%rsp),$mi # "t[0]"*n0 - xor %ebp,%ebp # xor $zero,$zero # cf=0, of=0 - - mulx 3*8($aptr),%rax,%r14 - mov $mi,%rdx -- adox -2*8($tptr),%r12 - adcx %rax,%r13 - adox -1*8($tptr),%r13 - adcx $zero,%r14 --- -2.7.4 - diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/debian1.0.2/soname.patch b/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/debian1.0.2/soname.patch new file mode 100644 index 000000000..f9cdfec87 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl/debian1.0.2/soname.patch @@ -0,0 +1,13 @@ +Index: openssl-1.0.2d/crypto/opensslv.h +=================================================================== +--- openssl-1.0.2d.orig/crypto/opensslv.h ++++ openssl-1.0.2d/crypto/opensslv.h +@@ -88,7 +88,7 @@ extern "C" { + * should only keep the versions that are binary compatible with the current. + */ + # define SHLIB_VERSION_HISTORY "" +-# define SHLIB_VERSION_NUMBER "1.0.0" ++# define SHLIB_VERSION_NUMBER "1.0.2" + + + #ifdef __cplusplus diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl_1.0.2j.bb b/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl_1.0.2j.bb deleted file mode 100644 index b6fb12634..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl_1.0.2j.bb +++ /dev/null @@ -1,62 +0,0 @@ -require openssl.inc - -# For target side versions of openssl enable support for OCF Linux driver -# if they are available. -DEPENDS += "cryptodev-linux" - -CFLAG += "-DHAVE_CRYPTODEV -DUSE_CRYPTODEV_DIGESTS" -CFLAG_append_class-native = " -fPIC" - -LIC_FILES_CHKSUM = "file://LICENSE;md5=27ffa5d74bb5a337056c14b2ef93fbf6" - -export DIRS = "crypto ssl apps engines" -export OE_LDFLAGS="${LDFLAGS}" - -SRC_URI += "file://find.pl;subdir=${BP}/util/ \ - file://run-ptest \ - file://openssl-c_rehash.sh \ - file://configure-targets.patch \ - file://shared-libs.patch \ - file://oe-ldflags.patch \ - file://engines-install-in-libdir-ssl.patch \ - file://debian1.0.2/block_diginotar.patch \ - file://debian1.0.2/block_digicert_malaysia.patch \ - file://debian/ca.patch \ - file://debian/c_rehash-compat.patch \ - file://debian/debian-targets.patch \ - file://debian/man-dir.patch \ - file://debian/man-section.patch \ - file://debian/no-rpath.patch \ - file://debian/no-symbolic.patch \ - file://debian/pic.patch \ - file://debian1.0.2/version-script.patch \ - file://openssl_fix_for_x32.patch \ - file://fix-cipher-des-ede3-cfb1.patch \ - file://openssl-avoid-NULL-pointer-dereference-in-EVP_DigestInit_ex.patch \ - file://openssl-fix-des.pod-error.patch \ - file://Makefiles-ptest.patch \ - file://ptest-deps.patch \ - file://openssl-1.0.2a-x32-asm.patch \ - file://ptest_makefile_deps.patch \ - file://configure-musl-target.patch \ - file://parallel.patch \ - file://openssl-util-perlpath.pl-cwd.patch \ - file://CVE-2016-7055.patch \ - file://0001-CVE-2017-3731.patch \ - file://0002-CVE-2017-3731.patch \ - " -SRC_URI[md5sum] = "96322138f0b69e61b7212bc53d5e912b" -SRC_URI[sha256sum] = "e7aff292be21c259c6af26469c7a9b3ba26e9abaaffd325e3dccc9785256c431" - -PACKAGES =+ "${PN}-engines" -FILES_${PN}-engines = "${libdir}/ssl/engines/*.so ${libdir}/engines" - -# The crypto_use_bigint patch means that perl's bignum module needs to be -# installed, but some distributions (for example Fedora 23) don't ship it by -# default. As the resulting error is very misleading check for bignum before -# building. -do_configure_prepend() { - if ! perl -Mbigint -e true; then - bbfatal "The perl module 'bignum' was not found but this is required to build openssl. Please install this module (often packaged as perl-bignum) and re-run bitbake." - fi -} diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl_1.0.2k.bb b/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl_1.0.2k.bb new file mode 100644 index 000000000..83d1a500c --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/openssl/openssl_1.0.2k.bb @@ -0,0 +1,62 @@ +require openssl.inc + +# For target side versions of openssl enable support for OCF Linux driver +# if they are available. +DEPENDS += "cryptodev-linux" + +CFLAG += "-DHAVE_CRYPTODEV -DUSE_CRYPTODEV_DIGESTS" +CFLAG_append_class-native = " -fPIC" + +LIC_FILES_CHKSUM = "file://LICENSE;md5=27ffa5d74bb5a337056c14b2ef93fbf6" + +export DIRS = "crypto ssl apps engines" +export OE_LDFLAGS="${LDFLAGS}" + +SRC_URI += "file://find.pl;subdir=${BP}/util/ \ + file://run-ptest \ + file://openssl-c_rehash.sh \ + file://configure-targets.patch \ + file://shared-libs.patch \ + file://oe-ldflags.patch \ + file://engines-install-in-libdir-ssl.patch \ + file://debian1.0.2/block_diginotar.patch \ + file://debian1.0.2/block_digicert_malaysia.patch \ + file://debian/ca.patch \ + file://debian/c_rehash-compat.patch \ + file://debian/debian-targets.patch \ + file://debian/man-dir.patch \ + file://debian/man-section.patch \ + file://debian/no-rpath.patch \ + file://debian/no-symbolic.patch \ + file://debian/pic.patch \ + file://debian1.0.2/version-script.patch \ + file://debian1.0.2/soname.patch \ + file://openssl_fix_for_x32.patch \ + file://fix-cipher-des-ede3-cfb1.patch \ + file://openssl-avoid-NULL-pointer-dereference-in-EVP_DigestInit_ex.patch \ + file://openssl-fix-des.pod-error.patch \ + file://Makefiles-ptest.patch \ + file://ptest-deps.patch \ + file://openssl-1.0.2a-x32-asm.patch \ + file://ptest_makefile_deps.patch \ + file://configure-musl-target.patch \ + file://parallel.patch \ + file://openssl-util-perlpath.pl-cwd.patch \ + file://Use-SHA256-not-MD5-as-default-digest.patch \ + file://0001-Fix-build-with-clang-using-external-assembler.patch \ + " +SRC_URI[md5sum] = "f965fc0bf01bf882b31314b61391ae65" +SRC_URI[sha256sum] = "6b3977c61f2aedf0f96367dcfb5c6e578cf37e7b8d913b4ecb6643c3cb88d8c0" + +PACKAGES =+ "${PN}-engines" +FILES_${PN}-engines = "${libdir}/ssl/engines/*.so ${libdir}/engines" + +# The crypto_use_bigint patch means that perl's bignum module needs to be +# installed, but some distributions (for example Fedora 23) don't ship it by +# default. As the resulting error is very misleading check for bignum before +# building. +do_configure_prepend() { + if ! perl -Mbigint -e true; then + bbfatal "The perl module 'bignum' was not found but this is required to build openssl. Please install this module (often packaged as perl-bignum) and re-run bitbake." + fi +} diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/portmap/portmap_6.0.bb b/import-layers/yocto-poky/meta/recipes-connectivity/portmap/portmap_6.0.bb index 999b4a937..d9700950e 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/portmap/portmap_6.0.bb +++ b/import-layers/yocto-poky/meta/recipes-connectivity/portmap/portmap_6.0.bb @@ -4,7 +4,7 @@ DEPENDS_append_libc-musl = " libtirpc " PR = "r9" -SRC_URI = "http://www.sourcefiles.org/Networking/Tools/Miscellanenous/portmap-6.0.tgz \ +SRC_URI = "https://fossies.org/linux/misc/old/portmap-6.0.tgz \ file://destdir-no-strip.patch \ file://tcpd-config.patch \ file://portmap.init \ diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/ppp-dialin/ppp-dialin_0.1.bb b/import-layers/yocto-poky/meta/recipes-connectivity/ppp-dialin/ppp-dialin_0.1.bb index 51a76b429..b5f68951d 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/ppp-dialin/ppp-dialin_0.1.bb +++ b/import-layers/yocto-poky/meta/recipes-connectivity/ppp-dialin/ppp-dialin_0.1.bb @@ -4,8 +4,7 @@ DEPENDS = "ppp" RDEPENDS_${PN} = "ppp" PR = "r8" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \ - file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" +LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" SRC_URI = "file://host-peer \ file://ppp-dialin" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/ppp/ppp/0001-ppp-Fix-compilation-errors-in-Makefile.patch b/import-layers/yocto-poky/meta/recipes-connectivity/ppp/ppp/0001-ppp-Fix-compilation-errors-in-Makefile.patch index 8aa2d2e67..ea4969b36 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/ppp/ppp/0001-ppp-Fix-compilation-errors-in-Makefile.patch +++ b/import-layers/yocto-poky/meta/recipes-connectivity/ppp/ppp/0001-ppp-Fix-compilation-errors-in-Makefile.patch @@ -3,34 +3,14 @@ From: Lu Chong Date: Tue, 5 Nov 2013 17:32:56 +0800 Subject: [PATCH] ppp: Fix compilation errors in Makefile -This patch fixes below issues: - -1. Make can't exit while compilation error occurs in subdir for plugins building. - -2. If build ppp with newer kernel (3.10.10), it will pick 'if_pppox.h' from sysroot-dir and - 'if_pppol2tp.h' from its own source dir, this cause below build errors: - - bitbake_build/tmp/sysroots/intel-x86-64/usr/include/linux/if_pppox.h:84:26: - error: field 'pppol2tp' has incomplete type - struct pppol2tpin6_addr pppol2tp; - ^ - bitbake_build/tmp/sysroots/intel-x86-64/usr/include/linux/if_pppox.h:99:28: - error: field 'pppol2tp' has incomplete type - struct pppol2tpv3in6_addr pppol2tp; - ^ - -The 'sysroot-dir/if_pppox.h' enabled ipv6 support but the 'source-dir/if_pppol2tp.h' lost -related structure definitions, we should use both header files from sysroots to fix this -build failure. +Make can't exit while compilation error occurs in subdir for plugins building. Upstream-Status: Pending Signed-off-by: Lu Chong --- - pppd/plugins/Makefile.linux | 2 +- - pppd/plugins/pppol2tp/Makefile.linux | 2 +- - pppd/plugins/rp-pppoe/Makefile.linux | 2 +- - 3 files changed, 3 insertions(+), 3 deletions(-) + pppd/plugins/Makefile.linux | 1 +- + 1 files changed, 1 insertions(+), 1 deletions(-) diff --git a/pppd/plugins/Makefile.linux b/pppd/plugins/Makefile.linux index 0a7ec7b..2a2c15a 100644 @@ -45,31 +25,6 @@ index 0a7ec7b..2a2c15a 100644 %.so: %.c $(CC) -o $@ $(LDFLAGS) $(CFLAGS) $^ -diff --git a/pppd/plugins/pppol2tp/Makefile.linux b/pppd/plugins/pppol2tp/Makefile.linux -index 19eff67..feb2f52 100644 ---- a/pppd/plugins/pppol2tp/Makefile.linux -+++ b/pppd/plugins/pppol2tp/Makefile.linux -@@ -1,6 +1,6 @@ - #CC = gcc - COPTS = -O2 -g --CFLAGS = $(COPTS) -I. -I../.. -I../../../include -fPIC -+CFLAGS = $(COPTS) -I. -I../.. -fPIC - LDFLAGS = -shared - INSTALL = install - -diff --git a/pppd/plugins/rp-pppoe/Makefile.linux b/pppd/plugins/rp-pppoe/Makefile.linux -index f078991..15b9118 100644 ---- a/pppd/plugins/rp-pppoe/Makefile.linux -+++ b/pppd/plugins/rp-pppoe/Makefile.linux -@@ -26,7 +26,7 @@ INSTALL = install - RP_VERSION=3.8p - - COPTS=-O2 -g --CFLAGS=$(COPTS) -I../../../include '-DRP_VERSION="$(RP_VERSION)"' -+CFLAGS=$(COPTS) '-DRP_VERSION="$(RP_VERSION)"' - all: rp-pppoe.so pppoe-discovery - - pppoe-discovery: pppoe-discovery.o debug.o -- 1.7.9.5 diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/ppp/ppp/0001-ppp-Remove-unneeded-include.patch b/import-layers/yocto-poky/meta/recipes-connectivity/ppp/ppp/0001-ppp-Remove-unneeded-include.patch new file mode 100644 index 000000000..a32f89fbc --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/ppp/ppp/0001-ppp-Remove-unneeded-include.patch @@ -0,0 +1,43 @@ +commit cd90fd147844a0cfec101f1e2db7a3c59d236621 +Author: Jussi Kukkonen +Date: Wed Dec 28 14:11:22 2016 +0200 + +pppol2tp plugin: Remove unneeded include + +The include is not required and will break compile on musl libc with + +| In file included from pppol2tp.c:34:0: +| /usr/include/linux/if.h:97:2: error: expected identifier before numeric constant +| IFF_LOWER_UP = 1<<16, /* __volatile__ */ + +Patch originally from Khem Raj. + +Upstream-Status: Pending [https://github.com/paulusmack/ppp/issues/73] +Signed-off-by: Jussi Kukkonen + +diff --git a/pppd/plugins/pppol2tp/openl2tp.c b/pppd/plugins/pppol2tp/openl2tp.c +index 9643b96..458316b 100644 +--- a/pppd/plugins/pppol2tp/openl2tp.c ++++ b/pppd/plugins/pppol2tp/openl2tp.c +@@ -47,7 +47,6 @@ + #include + #include + #include +-#include + #include + + #include "l2tp_event.h" +diff --git a/pppd/plugins/pppol2tp/pppol2tp.c b/pppd/plugins/pppol2tp/pppol2tp.c +index 0e28606..4f6d98c 100644 +--- a/pppd/plugins/pppol2tp/pppol2tp.c ++++ b/pppd/plugins/pppol2tp/pppol2tp.c +@@ -46,7 +46,6 @@ + #include + #include + #include +-#include + #include + + /* should be added to system's socket.h... */ +--- + diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/ppp/ppp_2.4.7.bb b/import-layers/yocto-poky/meta/recipes-connectivity/ppp/ppp_2.4.7.bb index 56dbd98d2..b2c4d4c65 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/ppp/ppp_2.4.7.bb +++ b/import-layers/yocto-poky/meta/recipes-connectivity/ppp/ppp_2.4.7.bb @@ -11,7 +11,7 @@ LIC_FILES_CHKSUM = "file://pppd/ccp.c;beginline=1;endline=29;md5=e2c43fe6e81ff77 file://pppd/tdb.c;beginline=1;endline=27;md5=4ca3a9991b011038d085d6675ae7c4e6 \ file://chat/chat.c;beginline=1;endline=15;md5=0d374b8545ee5c62d7aff1acbd38add2" -SRC_URI = "http://ppp.samba.org/ftp/ppp/ppp-${PV}.tar.gz \ +SRC_URI = "https://download.samba.org/pub/${BPN}/${BP}.tar.gz \ file://makefile.patch \ file://cifdefroute.patch \ file://pppd-resolv-varrun.patch \ @@ -31,6 +31,7 @@ SRC_URI = "http://ppp.samba.org/ftp/ppp/ppp-${PV}.tar.gz \ file://ppp@.service \ file://fix-CVE-2015-3310.patch \ file://ppp-fix-building-with-linux-4.8.patch \ + file://0001-ppp-Remove-unneeded-include.patch \ " SRC_URI_append_libc-musl = "\ diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/socat/socat_1.7.3.1.bb b/import-layers/yocto-poky/meta/recipes-connectivity/socat/socat_1.7.3.1.bb deleted file mode 100644 index 4da6d3970..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/socat/socat_1.7.3.1.bb +++ /dev/null @@ -1,38 +0,0 @@ -SUMMARY = "Multipurpose relay for bidirectional data transfer" -DESCRIPTION = "Socat is a relay for bidirectional data \ -transfer between two independent data channels." -HOMEPAGE = "http://www.dest-unreach.org/socat/" - -SECTION = "console/network" - -DEPENDS = "openssl readline" - -LICENSE = "GPL-2.0+-with-OpenSSL-exception" -LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \ - file://README;beginline=257;endline=287;md5=338c05eadd013872abb1d6e198e10a3f" - - -SRC_URI = "http://www.dest-unreach.org/socat/download/socat-${PV}.tar.bz2 \ - file://Makefile.in-fix-for-parallel-build.patch \ - file://0001-define-NETDB_INTERNAL-to-1-if-not-available.patch \ - file://0001-Access-c_ispeed-and-c_ospeed-via-APIs.patch \ -" - -SRC_URI[md5sum] = "334e46924f2b386299c9db2ac22bcd36" -SRC_URI[sha256sum] = "d2da659540c38139f388e9437bfaae16bb458d174d056cb3228432a8f489fbaa" - -inherit autotools - -EXTRA_AUTORECONF += "--exclude=autoheader" - -EXTRA_OECONF += "ac_cv_have_z_modifier=yes \ - ac_cv_header_bsd_libutil_h=no \ -" - -PACKAGECONFIG ??= "tcp-wrappers" -PACKAGECONFIG[tcp-wrappers] = "--enable-libwrap,--disable-libwrap,tcp-wrappers" - -do_install_prepend () { - mkdir -p ${D}${bindir} - install -d ${D}${bindir} ${D}${mandir}/man1 -} diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/socat/socat_1.7.3.2.bb b/import-layers/yocto-poky/meta/recipes-connectivity/socat/socat_1.7.3.2.bb new file mode 100644 index 000000000..4dcb7b4ad --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/socat/socat_1.7.3.2.bb @@ -0,0 +1,41 @@ +SUMMARY = "Multipurpose relay for bidirectional data transfer" +DESCRIPTION = "Socat is a relay for bidirectional data \ +transfer between two independent data channels." +HOMEPAGE = "http://www.dest-unreach.org/socat/" + +SECTION = "console/network" + +DEPENDS = "openssl readline" + +LICENSE = "GPL-2.0+-with-OpenSSL-exception" +LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \ + file://README;beginline=257;endline=287;md5=338c05eadd013872abb1d6e198e10a3f" + + +SRC_URI = "http://www.dest-unreach.org/socat/download/socat-${PV}.tar.bz2 \ + file://Makefile.in-fix-for-parallel-build.patch \ + file://0001-define-NETDB_INTERNAL-to-1-if-not-available.patch \ + file://0001-Access-c_ispeed-and-c_ospeed-via-APIs.patch \ +" + +SRC_URI[md5sum] = "607a24c15bd2cb54e9328bfbbd3a1ae9" +SRC_URI[sha256sum] = "e3561f808739383eb10fada1e5d4f26883f0311b34fd0af7837d0c95ef379251" + +inherit autotools + +EXTRA_AUTORECONF += "--exclude=autoheader" + +EXTRA_OECONF += "ac_cv_have_z_modifier=yes \ + ac_cv_header_bsd_libutil_h=no \ +" + +PACKAGECONFIG_class-target ??= "tcp-wrappers" +PACKAGECONFIG ??= "" +PACKAGECONFIG[tcp-wrappers] = "--enable-libwrap,--disable-libwrap,tcp-wrappers" + +do_install_prepend () { + mkdir -p ${D}${bindir} + install -d ${D}${bindir} ${D}${mandir}/man1 +} + +BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/wireless-tools/wireless-tools_30.pre9.bb b/import-layers/yocto-poky/meta/recipes-connectivity/wireless-tools/wireless-tools_30.pre9.bb index c3b8f665b..0a342071e 100644 --- a/import-layers/yocto-poky/meta/recipes-connectivity/wireless-tools/wireless-tools_30.pre9.bb +++ b/import-layers/yocto-poky/meta/recipes-connectivity/wireless-tools/wireless-tools_30.pre9.bb @@ -1,5 +1,5 @@ SUMMARY = "Tools for the Linux Standard Wireless Extension Subsystem" -HOMEPAGE = "http://www.hpl.hp.com/personal/Jean_Tourrilhes/Linux/Tools.html" +HOMEPAGE = "https://hewlettpackard.github.io/wireless-tools/Tools.html" LICENSE = "GPLv2 & (LGPLv2.1 | MPL-1.1 | BSD)" LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \ file://iwconfig.c;beginline=1;endline=12;md5=cf710eb1795c376eb10ea4ff04649caf \ @@ -8,7 +8,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \ SECTION = "base" PE = "1" -SRC_URI = "http://www.hpl.hp.com/personal/Jean_Tourrilhes/Linux/wireless_tools.${PV}.tar.gz \ +SRC_URI = "https://hewlettpackard.github.io/wireless-tools/wireless_tools.${PV}.tar.gz \ file://remove.ldconfig.call.patch \ file://man.patch \ file://avoid_strip.patch \ @@ -17,7 +17,7 @@ SRC_URI = "http://www.hpl.hp.com/personal/Jean_Tourrilhes/Linux/wireless_tools.$ SRC_URI[md5sum] = "ca91ba7c7eff9bfff6926b1a34a4697d" SRC_URI[sha256sum] = "abd9c5c98abf1fdd11892ac2f8a56737544fe101e1be27c6241a564948f34c63" -UPSTREAM_CHECK_URI = "http://www.hpl.hp.com/personal/Jean_Tourrilhes/Linux/Tools.html" +UPSTREAM_CHECK_URI = "https://hewlettpackard.github.io/wireless-tools/Tools.html" UPSTREAM_CHECK_REGEX = "wireless_tools\.(?P(\d+)(\..*|))\.tar\.gz" S = "${WORKDIR}/wireless_tools.30" diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0001-Reject-psk-parameter-set-with-invalid-passphrase-cha.patch b/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0001-Reject-psk-parameter-set-with-invalid-passphrase-cha.patch deleted file mode 100644 index dd7d5f726..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0001-Reject-psk-parameter-set-with-invalid-passphrase-cha.patch +++ /dev/null @@ -1,55 +0,0 @@ -From 73e4abb24a936014727924d8b0b2965edfc117dd Mon Sep 17 00:00:00 2001 -From: Jouni Malinen -Date: Fri, 4 Mar 2016 18:46:41 +0200 -Subject: [PATCH 1/3] Reject psk parameter set with invalid passphrase - character - -WPA/WPA2-Personal passphrase is not allowed to include control -characters. Reject a passphrase configuration attempt if that passphrase -includes an invalid passphrase. - -This fixes an issue where wpa_supplicant could have updated the -configuration file psk parameter with arbitrary data from the control -interface or D-Bus interface. While those interfaces are supposed to be -accessible only for trusted users/applications, it may be possible that -an untrusted user has access to a management software component that -does not validate the passphrase value before passing it to -wpa_supplicant. - -This could allow such an untrusted user to inject up to 63 characters of -almost arbitrary data into the configuration file. Such configuration -file could result in wpa_supplicant trying to load a library (e.g., -opensc_engine_path, pkcs11_engine_path, pkcs11_module_path, -load_dynamic_eap) from user controlled location when starting again. -This would allow code from that library to be executed under the -wpa_supplicant process privileges. - -Upstream-Status: Backport - -CVE: CVE-2016-4477 - -Signed-off-by: Jouni Malinen -Signed-off-by: Zhixiong Chi ---- - wpa_supplicant/config.c | 6 ++++++ - 1 file changed, 6 insertions(+) - -diff --git a/wpa_supplicant/config.c b/wpa_supplicant/config.c -index b1c7870..fdd9643 100644 ---- a/wpa_supplicant/config.c -+++ b/wpa_supplicant/config.c -@@ -478,6 +478,12 @@ static int wpa_config_parse_psk(const struct parse_data *data, - } - wpa_hexdump_ascii_key(MSG_MSGDUMP, "PSK (ASCII passphrase)", - (u8 *) value, len); -+ if (has_ctrl_char((u8 *) value, len)) { -+ wpa_printf(MSG_ERROR, -+ "Line %d: Invalid passphrase character", -+ line); -+ return -1; -+ } - if (ssid->passphrase && os_strlen(ssid->passphrase) == len && - os_memcmp(ssid->passphrase, value, len) == 0) { - /* No change to the previously configured value */ --- -1.9.1 diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0001-WPS-Reject-a-Credential-with-invalid-passphrase.patch b/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0001-WPS-Reject-a-Credential-with-invalid-passphrase.patch deleted file mode 100644 index db222e41d..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0001-WPS-Reject-a-Credential-with-invalid-passphrase.patch +++ /dev/null @@ -1,86 +0,0 @@ -From ecbb0b3dc122b0d290987cf9c84010bbe53e1022 Mon Sep 17 00:00:00 2001 -From: Jouni Malinen -Date: Fri, 4 Mar 2016 17:20:18 +0200 -Subject: [PATCH 1/2] WPS: Reject a Credential with invalid passphrase - -WPA/WPA2-Personal passphrase is not allowed to include control -characters. Reject a Credential received from a WPS Registrar both as -STA (Credential) and AP (AP Settings) if the credential is for WPAPSK or -WPA2PSK authentication type and includes an invalid passphrase. - -This fixes an issue where hostapd or wpa_supplicant could have updated -the configuration file PSK/passphrase parameter with arbitrary data from -an external device (Registrar) that may not be fully trusted. Should -such data include a newline character, the resulting configuration file -could become invalid and fail to be parsed. - -Upstream-Status: Backport - -CVE: CVE-2016-4476 - -Signed-off-by: Jouni Malinen -Signed-off-by: Zhixiong Chi ---- - src/utils/common.c | 12 ++++++++++++ - src/utils/common.h | 1 + - src/wps/wps_attr_process.c | 10 ++++++++++ - 3 files changed, 23 insertions(+) - -diff --git a/src/utils/common.c b/src/utils/common.c -index 450e2c6..27b7c02 100644 ---- a/src/utils/common.c -+++ b/src/utils/common.c -@@ -697,6 +697,18 @@ int is_hex(const u8 *data, size_t len) - } - - -+int has_ctrl_char(const u8 *data, size_t len) -+{ -+ size_t i; -+ -+ for (i = 0; i < len; i++) { -+ if (data[i] < 32 || data[i] == 127) -+ return 1; -+ } -+ return 0; -+} -+ -+ - size_t merge_byte_arrays(u8 *res, size_t res_len, - const u8 *src1, size_t src1_len, - const u8 *src2, size_t src2_len) -diff --git a/src/utils/common.h b/src/utils/common.h -index 701dbb2..a972240 100644 ---- a/src/utils/common.h -+++ b/src/utils/common.h -@@ -488,6 +488,7 @@ const char * wpa_ssid_txt(const u8 *ssid, size_t ssid_len); - - char * wpa_config_parse_string(const char *value, size_t *len); - int is_hex(const u8 *data, size_t len); -+int has_ctrl_char(const u8 *data, size_t len); - size_t merge_byte_arrays(u8 *res, size_t res_len, - const u8 *src1, size_t src1_len, - const u8 *src2, size_t src2_len); -diff --git a/src/wps/wps_attr_process.c b/src/wps/wps_attr_process.c -index eadb22f..e8c4579 100644 ---- a/src/wps/wps_attr_process.c -+++ b/src/wps/wps_attr_process.c -@@ -229,6 +229,16 @@ static int wps_workaround_cred_key(struct wps_credential *cred) - cred->key_len--; - #endif /* CONFIG_WPS_STRICT */ - } -+ -+ -+ if (cred->auth_type & (WPS_AUTH_WPAPSK | WPS_AUTH_WPA2PSK) && -+ (cred->key_len < 8 || has_ctrl_char(cred->key, cred->key_len))) { -+ wpa_printf(MSG_INFO, "WPS: Reject credential with invalid WPA/WPA2-Personal passphrase"); -+ wpa_hexdump_ascii_key(MSG_INFO, "WPS: Network Key", -+ cred->key, cred->key_len); -+ return -1; -+ } -+ - return 0; - } - --- -1.9.1 diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0002-Reject-SET_CRED-commands-with-newline-characters-in-.patch b/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0002-Reject-SET_CRED-commands-with-newline-characters-in-.patch deleted file mode 100644 index cad7425c3..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0002-Reject-SET_CRED-commands-with-newline-characters-in-.patch +++ /dev/null @@ -1,66 +0,0 @@ -From b166cd84a77a6717be9600bf95378a0055d6f5a5 Mon Sep 17 00:00:00 2001 -From: Jouni Malinen -Date: Tue, 5 Apr 2016 23:33:10 +0300 -Subject: [PATCH 2/3] Reject SET_CRED commands with newline characters in the - string values - -Most of the cred block parameters are written as strings without -filtering and if there is an embedded newline character in the value, -unexpected configuration file data might be written. - -This fixes an issue where wpa_supplicant could have updated the -configuration file cred parameter with arbitrary data from the control -interface or D-Bus interface. While those interfaces are supposed to be -accessible only for trusted users/applications, it may be possible that -an untrusted user has access to a management software component that -does not validate the credential value before passing it to -wpa_supplicant. - -This could allow such an untrusted user to inject almost arbitrary data -into the configuration file. Such configuration file could result in -wpa_supplicant trying to load a library (e.g., opensc_engine_path, -pkcs11_engine_path, pkcs11_module_path, load_dynamic_eap) from user -controlled location when starting again. This would allow code from that -library to be executed under the wpa_supplicant process privileges. - -Upstream-Status: Backport - -CVE: CVE-2016-4477 - -Signed-off-by: Jouni Malinen -Signed-off-by: Zhixiong Chi ---- - wpa_supplicant/config.c | 9 ++++++++- - 1 file changed, 8 insertions(+), 1 deletion(-) - -diff --git a/wpa_supplicant/config.c b/wpa_supplicant/config.c -index eb97cd5..69152ef 100644 ---- a/wpa_supplicant/config.c -+++ b/wpa_supplicant/config.c -@@ -2896,6 +2896,8 @@ int wpa_config_set_cred(struct wpa_cred *cred, const char *var, - - if (os_strcmp(var, "password") == 0 && - os_strncmp(value, "ext:", 4) == 0) { -+ if (has_newline(value)) -+ return -1; - str_clear_free(cred->password); - cred->password = os_strdup(value); - cred->ext_password = 1; -@@ -2946,9 +2948,14 @@ int wpa_config_set_cred(struct wpa_cred *cred, const char *var, - } - - val = wpa_config_parse_string(value, &len); -- if (val == NULL) { -+ if (val == NULL || -+ (os_strcmp(var, "excluded_ssid") != 0 && -+ os_strcmp(var, "roaming_consortium") != 0 && -+ os_strcmp(var, "required_roaming_consortium") != 0 && -+ has_newline(val))) { - wpa_printf(MSG_ERROR, "Line %d: invalid field '%s' string " - "value '%s'.", line, var, value); -+ os_free(val); - return -1; - } - --- -1.9.1 diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0002-Remove-newlines-from-wpa_supplicant-config-network-o.patch b/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0002-Remove-newlines-from-wpa_supplicant-config-network-o.patch deleted file mode 100644 index cc7b01ad5..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0002-Remove-newlines-from-wpa_supplicant-config-network-o.patch +++ /dev/null @@ -1,86 +0,0 @@ -From 0fe5a234240a108b294a87174ad197f6b5cb38e9 Mon Sep 17 00:00:00 2001 -From: Paul Stewart -Date: Thu, 3 Mar 2016 15:40:19 -0800 -Subject: [PATCH 2/2] Remove newlines from wpa_supplicant config network - output - -Spurious newlines output while writing the config file can corrupt the -wpa_supplicant configuration. Avoid writing these for the network block -parameters. This is a generic filter that cover cases that may not have -been explicitly addressed with a more specific commit to avoid control -characters in the psk parameter. - -Upstream-Status: Backport - -CVE: CVE-2016-4476 - -Signed-off-by: Paul Stewart -Signed-off-by: Zhixiong Chi ---- - src/utils/common.c | 11 +++++++++++ - src/utils/common.h | 1 + - wpa_supplicant/config.c | 15 +++++++++++++-- - 3 files changed, 25 insertions(+), 2 deletions(-) - -diff --git a/src/utils/common.c b/src/utils/common.c -index 27b7c02..9856463 100644 ---- a/src/utils/common.c -+++ b/src/utils/common.c -@@ -709,6 +709,17 @@ int has_ctrl_char(const u8 *data, size_t len) - } - - -+int has_newline(const char *str) -+{ -+ while (*str) { -+ if (*str == '\n' || *str == '\r') -+ return 1; -+ str++; -+ } -+ return 0; -+} -+ -+ - size_t merge_byte_arrays(u8 *res, size_t res_len, - const u8 *src1, size_t src1_len, - const u8 *src2, size_t src2_len) -diff --git a/src/utils/common.h b/src/utils/common.h -index a972240..d19927b 100644 ---- a/src/utils/common.h -+++ b/src/utils/common.h -@@ -489,6 +489,7 @@ const char * wpa_ssid_txt(const u8 *ssid, size_t ssid_len); - char * wpa_config_parse_string(const char *value, size_t *len); - int is_hex(const u8 *data, size_t len); - int has_ctrl_char(const u8 *data, size_t len); -+int has_newline(const char *str); - size_t merge_byte_arrays(u8 *res, size_t res_len, - const u8 *src1, size_t src1_len, - const u8 *src2, size_t src2_len); -diff --git a/wpa_supplicant/config.c b/wpa_supplicant/config.c -index fdd9643..eb97cd5 100644 ---- a/wpa_supplicant/config.c -+++ b/wpa_supplicant/config.c -@@ -2699,8 +2699,19 @@ char * wpa_config_get(struct wpa_ssid *ssid, const char *var) - - for (i = 0; i < NUM_SSID_FIELDS; i++) { - const struct parse_data *field = &ssid_fields[i]; -- if (os_strcmp(var, field->name) == 0) -- return field->writer(field, ssid); -+ if (os_strcmp(var, field->name) == 0) { -+ char *ret = field->writer(field, ssid); -+ -+ if (ret && has_newline(ret)) { -+ wpa_printf(MSG_ERROR, -+ "Found newline in value for %s; not returning it", -+ var); -+ os_free(ret); -+ ret = NULL; -+ } -+ -+ return ret; -+ } - } - - return NULL; --- -1.9.1 diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0003-Reject-SET-commands-with-newline-characters-in-the-s.patch b/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0003-Reject-SET-commands-with-newline-characters-in-the-s.patch deleted file mode 100644 index 5375db74b..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/0003-Reject-SET-commands-with-newline-characters-in-the-s.patch +++ /dev/null @@ -1,54 +0,0 @@ -From 2a3f56502b52375c3bf113cf92adfa99bad6b488 Mon Sep 17 00:00:00 2001 -From: Jouni Malinen -Date: Tue, 5 Apr 2016 23:55:48 +0300 -Subject: [PATCH 3/3] Reject SET commands with newline characters in the - string values - -Many of the global configuration parameters are written as strings -without filtering and if there is an embedded newline character in the -value, unexpected configuration file data might be written. - -This fixes an issue where wpa_supplicant could have updated the -configuration file global parameter with arbitrary data from the control -interface or D-Bus interface. While those interfaces are supposed to be -accessible only for trusted users/applications, it may be possible that -an untrusted user has access to a management software component that -does not validate the value of a parameter before passing it to -wpa_supplicant. - -This could allow such an untrusted user to inject almost arbitrary data -into the configuration file. Such configuration file could result in -wpa_supplicant trying to load a library (e.g., opensc_engine_path, -pkcs11_engine_path, pkcs11_module_path, load_dynamic_eap) from user -controlled location when starting again. This would allow code from that -library to be executed under the wpa_supplicant process privileges. - -Upstream-Status: Backport - -CVE: CVE-2016-4477 - -Signed-off-by: Jouni Malinen -Signed-off-by: Zhixiong Chi ---- - wpa_supplicant/config.c | 6 ++++++ - 1 file changed, 6 insertions(+) - -diff --git a/wpa_supplicant/config.c b/wpa_supplicant/config.c -index 69152ef..d9a1603 100644 ---- a/wpa_supplicant/config.c -+++ b/wpa_supplicant/config.c -@@ -3764,6 +3764,12 @@ static int wpa_global_config_parse_str(const struct global_parse_data *data, - return -1; - } - -+ if (has_newline(pos)) { -+ wpa_printf(MSG_ERROR, "Line %d: invalid %s value with newline", -+ line, data->name); -+ return -1; -+ } -+ - tmp = os_strdup(pos); - if (tmp == NULL) - return -1; --- -1.9.1 diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/key-replay-cve-multiple.patch b/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/key-replay-cve-multiple.patch new file mode 100644 index 000000000..436520fe6 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant/key-replay-cve-multiple.patch @@ -0,0 +1,1025 @@ +The WPA2 four-way handshake protocol is vulnerable to replay attacks which can +result in unauthenticated clients gaining access to the network. + +Backport a number of patches from upstream to fix this. + +CVE: CVE-2017-13077 +CVE: CVE-2017-13078 +CVE: CVE-2017-13079 +CVE: CVE-2017-13080 +CVE: CVE-2017-13081 +CVE: CVE-2017-13082 +CVE: CVE-2017-13086 +CVE: CVE-2017-13087 +CVE: CVE-2017-13088 + +Upstream-Status: Backport +Signed-off-by: Ross Burton + +From cf4cab804c7afd5c45505528a8d16e46163243a2 Mon Sep 17 00:00:00 2001 +From: Mathy Vanhoef +Date: Fri, 14 Jul 2017 15:15:35 +0200 +Subject: [PATCH 1/8] hostapd: Avoid key reinstallation in FT handshake + +Do not reinstall TK to the driver during Reassociation Response frame +processing if the first attempt of setting the TK succeeded. This avoids +issues related to clearing the TX/RX PN that could result in reusing +same PN values for transmitted frames (e.g., due to CCM nonce reuse and +also hitting replay protection on the receiver) and accepting replayed +frames on RX side. + +This issue was introduced by the commit +0e84c25434e6a1f283c7b4e62e483729085b78d2 ('FT: Fix PTK configuration in +authenticator') which allowed wpa_ft_install_ptk() to be called multiple +times with the same PTK. While the second configuration attempt is +needed with some drivers, it must be done only if the first attempt +failed. + +Signed-off-by: Mathy Vanhoef +--- + src/ap/ieee802_11.c | 16 +++++++++++++--- + src/ap/wpa_auth.c | 11 +++++++++++ + src/ap/wpa_auth.h | 3 ++- + src/ap/wpa_auth_ft.c | 10 ++++++++++ + src/ap/wpa_auth_i.h | 1 + + 5 files changed, 37 insertions(+), 4 deletions(-) + +diff --git a/src/ap/ieee802_11.c b/src/ap/ieee802_11.c +index 4e04169..333035f 100644 +--- a/src/ap/ieee802_11.c ++++ b/src/ap/ieee802_11.c +@@ -1841,6 +1841,7 @@ static int add_associated_sta(struct hostapd_data *hapd, + { + struct ieee80211_ht_capabilities ht_cap; + struct ieee80211_vht_capabilities vht_cap; ++ int set = 1; + + /* + * Remove the STA entry to ensure the STA PS state gets cleared and +@@ -1848,9 +1849,18 @@ static int add_associated_sta(struct hostapd_data *hapd, + * FT-over-the-DS, where a station re-associates back to the same AP but + * skips the authentication flow, or if working with a driver that + * does not support full AP client state. ++ * ++ * Skip this if the STA has already completed FT reassociation and the ++ * TK has been configured since the TX/RX PN must not be reset to 0 for ++ * the same key. + */ +- if (!sta->added_unassoc) ++ if (!sta->added_unassoc && ++ (!(sta->flags & WLAN_STA_AUTHORIZED) || ++ !wpa_auth_sta_ft_tk_already_set(sta->wpa_sm))) { + hostapd_drv_sta_remove(hapd, sta->addr); ++ wpa_auth_sm_event(sta->wpa_sm, WPA_DRV_STA_REMOVED); ++ set = 0; ++ } + + #ifdef CONFIG_IEEE80211N + if (sta->flags & WLAN_STA_HT) +@@ -1873,11 +1883,11 @@ static int add_associated_sta(struct hostapd_data *hapd, + sta->flags & WLAN_STA_VHT ? &vht_cap : NULL, + sta->flags | WLAN_STA_ASSOC, sta->qosinfo, + sta->vht_opmode, sta->p2p_ie ? 1 : 0, +- sta->added_unassoc)) { ++ set)) { + hostapd_logger(hapd, sta->addr, + HOSTAPD_MODULE_IEEE80211, HOSTAPD_LEVEL_NOTICE, + "Could not %s STA to kernel driver", +- sta->added_unassoc ? "set" : "add"); ++ set ? "set" : "add"); + + if (sta->added_unassoc) { + hostapd_drv_sta_remove(hapd, sta->addr); +diff --git a/src/ap/wpa_auth.c b/src/ap/wpa_auth.c +index 3587086..707971d 100644 +--- a/src/ap/wpa_auth.c ++++ b/src/ap/wpa_auth.c +@@ -1745,6 +1745,9 @@ int wpa_auth_sm_event(struct wpa_state_machine *sm, enum wpa_event event) + #else /* CONFIG_IEEE80211R */ + break; + #endif /* CONFIG_IEEE80211R */ ++ case WPA_DRV_STA_REMOVED: ++ sm->tk_already_set = FALSE; ++ return 0; + } + + #ifdef CONFIG_IEEE80211R +@@ -3250,6 +3253,14 @@ int wpa_auth_sta_wpa_version(struct wpa_state_machine *sm) + } + + ++int wpa_auth_sta_ft_tk_already_set(struct wpa_state_machine *sm) ++{ ++ if (!sm || !wpa_key_mgmt_ft(sm->wpa_key_mgmt)) ++ return 0; ++ return sm->tk_already_set; ++} ++ ++ + int wpa_auth_sta_clear_pmksa(struct wpa_state_machine *sm, + struct rsn_pmksa_cache_entry *entry) + { +diff --git a/src/ap/wpa_auth.h b/src/ap/wpa_auth.h +index 0de8d97..97461b0 100644 +--- a/src/ap/wpa_auth.h ++++ b/src/ap/wpa_auth.h +@@ -267,7 +267,7 @@ void wpa_receive(struct wpa_authenticator *wpa_auth, + u8 *data, size_t data_len); + enum wpa_event { + WPA_AUTH, WPA_ASSOC, WPA_DISASSOC, WPA_DEAUTH, WPA_REAUTH, +- WPA_REAUTH_EAPOL, WPA_ASSOC_FT ++ WPA_REAUTH_EAPOL, WPA_ASSOC_FT, WPA_DRV_STA_REMOVED + }; + void wpa_remove_ptk(struct wpa_state_machine *sm); + int wpa_auth_sm_event(struct wpa_state_machine *sm, enum wpa_event event); +@@ -280,6 +280,7 @@ int wpa_auth_pairwise_set(struct wpa_state_machine *sm); + int wpa_auth_get_pairwise(struct wpa_state_machine *sm); + int wpa_auth_sta_key_mgmt(struct wpa_state_machine *sm); + int wpa_auth_sta_wpa_version(struct wpa_state_machine *sm); ++int wpa_auth_sta_ft_tk_already_set(struct wpa_state_machine *sm); + int wpa_auth_sta_clear_pmksa(struct wpa_state_machine *sm, + struct rsn_pmksa_cache_entry *entry); + struct rsn_pmksa_cache_entry * +diff --git a/src/ap/wpa_auth_ft.c b/src/ap/wpa_auth_ft.c +index 42242a5..e63b99a 100644 +--- a/src/ap/wpa_auth_ft.c ++++ b/src/ap/wpa_auth_ft.c +@@ -780,6 +780,14 @@ void wpa_ft_install_ptk(struct wpa_state_machine *sm) + return; + } + ++ if (sm->tk_already_set) { ++ /* Must avoid TK reconfiguration to prevent clearing of TX/RX ++ * PN in the driver */ ++ wpa_printf(MSG_DEBUG, ++ "FT: Do not re-install same PTK to the driver"); ++ return; ++ } ++ + /* FIX: add STA entry to kernel/driver here? The set_key will fail + * most likely without this.. At the moment, STA entry is added only + * after association has been completed. This function will be called +@@ -792,6 +800,7 @@ void wpa_ft_install_ptk(struct wpa_state_machine *sm) + + /* FIX: MLME-SetProtection.Request(TA, Tx_Rx) */ + sm->pairwise_set = TRUE; ++ sm->tk_already_set = TRUE; + } + + +@@ -898,6 +907,7 @@ static int wpa_ft_process_auth_req(struct wpa_state_machine *sm, + + sm->pairwise = pairwise; + sm->PTK_valid = TRUE; ++ sm->tk_already_set = FALSE; + wpa_ft_install_ptk(sm); + + buflen = 2 + sizeof(struct rsn_mdie) + 2 + sizeof(struct rsn_ftie) + +diff --git a/src/ap/wpa_auth_i.h b/src/ap/wpa_auth_i.h +index 72b7eb3..7fd8f05 100644 +--- a/src/ap/wpa_auth_i.h ++++ b/src/ap/wpa_auth_i.h +@@ -65,6 +65,7 @@ struct wpa_state_machine { + struct wpa_ptk PTK; + Boolean PTK_valid; + Boolean pairwise_set; ++ Boolean tk_already_set; + int keycount; + Boolean Pair; + struct wpa_key_replay_counter { +-- +2.7.4 + +From 927f891007c402fefd1ff384645b3f07597c3ede Mon Sep 17 00:00:00 2001 +From: Mathy Vanhoef +Date: Wed, 12 Jul 2017 16:03:24 +0200 +Subject: [PATCH 2/8] Prevent reinstallation of an already in-use group key + +Track the current GTK and IGTK that is in use and when receiving a +(possibly retransmitted) Group Message 1 or WNM-Sleep Mode Response, do +not install the given key if it is already in use. This prevents an +attacker from trying to trick the client into resetting or lowering the +sequence counter associated to the group key. + +Signed-off-by: Mathy Vanhoef +--- + src/common/wpa_common.h | 11 +++++ + src/rsn_supp/wpa.c | 116 ++++++++++++++++++++++++++++++------------------ + src/rsn_supp/wpa_i.h | 4 ++ + 3 files changed, 87 insertions(+), 44 deletions(-) + +diff --git a/src/common/wpa_common.h b/src/common/wpa_common.h +index af1d0f0..d200285 100644 +--- a/src/common/wpa_common.h ++++ b/src/common/wpa_common.h +@@ -217,6 +217,17 @@ struct wpa_ptk { + size_t tk_len; + }; + ++struct wpa_gtk { ++ u8 gtk[WPA_GTK_MAX_LEN]; ++ size_t gtk_len; ++}; ++ ++#ifdef CONFIG_IEEE80211W ++struct wpa_igtk { ++ u8 igtk[WPA_IGTK_MAX_LEN]; ++ size_t igtk_len; ++}; ++#endif /* CONFIG_IEEE80211W */ + + /* WPA IE version 1 + * 00-50-f2:1 (OUI:OUI type) +diff --git a/src/rsn_supp/wpa.c b/src/rsn_supp/wpa.c +index 3c47879..95bd7be 100644 +--- a/src/rsn_supp/wpa.c ++++ b/src/rsn_supp/wpa.c +@@ -714,6 +714,15 @@ static int wpa_supplicant_install_gtk(struct wpa_sm *sm, + const u8 *_gtk = gd->gtk; + u8 gtk_buf[32]; + ++ /* Detect possible key reinstallation */ ++ if (sm->gtk.gtk_len == (size_t) gd->gtk_len && ++ os_memcmp(sm->gtk.gtk, gd->gtk, sm->gtk.gtk_len) == 0) { ++ wpa_dbg(sm->ctx->msg_ctx, MSG_DEBUG, ++ "WPA: Not reinstalling already in-use GTK to the driver (keyidx=%d tx=%d len=%d)", ++ gd->keyidx, gd->tx, gd->gtk_len); ++ return 0; ++ } ++ + wpa_hexdump_key(MSG_DEBUG, "WPA: Group Key", gd->gtk, gd->gtk_len); + wpa_dbg(sm->ctx->msg_ctx, MSG_DEBUG, + "WPA: Installing GTK to the driver (keyidx=%d tx=%d len=%d)", +@@ -748,6 +757,9 @@ static int wpa_supplicant_install_gtk(struct wpa_sm *sm, + } + os_memset(gtk_buf, 0, sizeof(gtk_buf)); + ++ sm->gtk.gtk_len = gd->gtk_len; ++ os_memcpy(sm->gtk.gtk, gd->gtk, sm->gtk.gtk_len); ++ + return 0; + } + +@@ -854,6 +866,48 @@ static int wpa_supplicant_pairwise_gtk(struct wpa_sm *sm, + } + + ++#ifdef CONFIG_IEEE80211W ++static int wpa_supplicant_install_igtk(struct wpa_sm *sm, ++ const struct wpa_igtk_kde *igtk) ++{ ++ size_t len = wpa_cipher_key_len(sm->mgmt_group_cipher); ++ u16 keyidx = WPA_GET_LE16(igtk->keyid); ++ ++ /* Detect possible key reinstallation */ ++ if (sm->igtk.igtk_len == len && ++ os_memcmp(sm->igtk.igtk, igtk->igtk, sm->igtk.igtk_len) == 0) { ++ wpa_dbg(sm->ctx->msg_ctx, MSG_DEBUG, ++ "WPA: Not reinstalling already in-use IGTK to the driver (keyidx=%d)", ++ keyidx); ++ return 0; ++ } ++ ++ wpa_dbg(sm->ctx->msg_ctx, MSG_DEBUG, ++ "WPA: IGTK keyid %d pn %02x%02x%02x%02x%02x%02x", ++ keyidx, MAC2STR(igtk->pn)); ++ wpa_hexdump_key(MSG_DEBUG, "WPA: IGTK", igtk->igtk, len); ++ if (keyidx > 4095) { ++ wpa_msg(sm->ctx->msg_ctx, MSG_WARNING, ++ "WPA: Invalid IGTK KeyID %d", keyidx); ++ return -1; ++ } ++ if (wpa_sm_set_key(sm, wpa_cipher_to_alg(sm->mgmt_group_cipher), ++ broadcast_ether_addr, ++ keyidx, 0, igtk->pn, sizeof(igtk->pn), ++ igtk->igtk, len) < 0) { ++ wpa_msg(sm->ctx->msg_ctx, MSG_WARNING, ++ "WPA: Failed to configure IGTK to the driver"); ++ return -1; ++ } ++ ++ sm->igtk.igtk_len = len; ++ os_memcpy(sm->igtk.igtk, igtk->igtk, sm->igtk.igtk_len); ++ ++ return 0; ++} ++#endif /* CONFIG_IEEE80211W */ ++ ++ + static int ieee80211w_set_keys(struct wpa_sm *sm, + struct wpa_eapol_ie_parse *ie) + { +@@ -864,30 +918,14 @@ static int ieee80211w_set_keys(struct wpa_sm *sm, + if (ie->igtk) { + size_t len; + const struct wpa_igtk_kde *igtk; +- u16 keyidx; ++ + len = wpa_cipher_key_len(sm->mgmt_group_cipher); + if (ie->igtk_len != WPA_IGTK_KDE_PREFIX_LEN + len) + return -1; ++ + igtk = (const struct wpa_igtk_kde *) ie->igtk; +- keyidx = WPA_GET_LE16(igtk->keyid); +- wpa_dbg(sm->ctx->msg_ctx, MSG_DEBUG, "WPA: IGTK keyid %d " +- "pn %02x%02x%02x%02x%02x%02x", +- keyidx, MAC2STR(igtk->pn)); +- wpa_hexdump_key(MSG_DEBUG, "WPA: IGTK", +- igtk->igtk, len); +- if (keyidx > 4095) { +- wpa_msg(sm->ctx->msg_ctx, MSG_WARNING, +- "WPA: Invalid IGTK KeyID %d", keyidx); +- return -1; +- } +- if (wpa_sm_set_key(sm, wpa_cipher_to_alg(sm->mgmt_group_cipher), +- broadcast_ether_addr, +- keyidx, 0, igtk->pn, sizeof(igtk->pn), +- igtk->igtk, len) < 0) { +- wpa_msg(sm->ctx->msg_ctx, MSG_WARNING, +- "WPA: Failed to configure IGTK to the driver"); ++ if (wpa_supplicant_install_igtk(sm, igtk) < 0) + return -1; +- } + } + + return 0; +@@ -2307,7 +2345,7 @@ void wpa_sm_deinit(struct wpa_sm *sm) + */ + void wpa_sm_notify_assoc(struct wpa_sm *sm, const u8 *bssid) + { +- int clear_ptk = 1; ++ int clear_keys = 1; + + if (sm == NULL) + return; +@@ -2333,11 +2371,11 @@ void wpa_sm_notify_assoc(struct wpa_sm *sm, const u8 *bssid) + /* Prepare for the next transition */ + wpa_ft_prepare_auth_request(sm, NULL); + +- clear_ptk = 0; ++ clear_keys = 0; + } + #endif /* CONFIG_IEEE80211R */ + +- if (clear_ptk) { ++ if (clear_keys) { + /* + * IEEE 802.11, 8.4.10: Delete PTK SA on (re)association if + * this is not part of a Fast BSS Transition. +@@ -2347,6 +2385,10 @@ void wpa_sm_notify_assoc(struct wpa_sm *sm, const u8 *bssid) + os_memset(&sm->ptk, 0, sizeof(sm->ptk)); + sm->tptk_set = 0; + os_memset(&sm->tptk, 0, sizeof(sm->tptk)); ++ os_memset(&sm->gtk, 0, sizeof(sm->gtk)); ++#ifdef CONFIG_IEEE80211W ++ os_memset(&sm->igtk, 0, sizeof(sm->igtk)); ++#endif /* CONFIG_IEEE80211W */ + } + + #ifdef CONFIG_TDLS +@@ -2877,6 +2919,10 @@ void wpa_sm_drop_sa(struct wpa_sm *sm) + os_memset(sm->pmk, 0, sizeof(sm->pmk)); + os_memset(&sm->ptk, 0, sizeof(sm->ptk)); + os_memset(&sm->tptk, 0, sizeof(sm->tptk)); ++ os_memset(&sm->gtk, 0, sizeof(sm->gtk)); ++#ifdef CONFIG_IEEE80211W ++ os_memset(&sm->igtk, 0, sizeof(sm->igtk)); ++#endif /* CONFIG_IEEE80211W */ + #ifdef CONFIG_IEEE80211R + os_memset(sm->xxkey, 0, sizeof(sm->xxkey)); + os_memset(sm->pmk_r0, 0, sizeof(sm->pmk_r0)); +@@ -2949,29 +2995,11 @@ int wpa_wnmsleep_install_key(struct wpa_sm *sm, u8 subelem_id, u8 *buf) + os_memset(&gd, 0, sizeof(gd)); + #ifdef CONFIG_IEEE80211W + } else if (subelem_id == WNM_SLEEP_SUBELEM_IGTK) { +- struct wpa_igtk_kde igd; +- u16 keyidx; +- +- os_memset(&igd, 0, sizeof(igd)); +- keylen = wpa_cipher_key_len(sm->mgmt_group_cipher); +- os_memcpy(igd.keyid, buf + 2, 2); +- os_memcpy(igd.pn, buf + 4, 6); +- +- keyidx = WPA_GET_LE16(igd.keyid); +- os_memcpy(igd.igtk, buf + 10, keylen); +- +- wpa_hexdump_key(MSG_DEBUG, "Install IGTK (WNM SLEEP)", +- igd.igtk, keylen); +- if (wpa_sm_set_key(sm, wpa_cipher_to_alg(sm->mgmt_group_cipher), +- broadcast_ether_addr, +- keyidx, 0, igd.pn, sizeof(igd.pn), +- igd.igtk, keylen) < 0) { +- wpa_printf(MSG_DEBUG, "Failed to install the IGTK in " +- "WNM mode"); +- os_memset(&igd, 0, sizeof(igd)); ++ const struct wpa_igtk_kde *igtk; ++ ++ igtk = (const struct wpa_igtk_kde *) (buf + 2); ++ if (wpa_supplicant_install_igtk(sm, igtk) < 0) + return -1; +- } +- os_memset(&igd, 0, sizeof(igd)); + #endif /* CONFIG_IEEE80211W */ + } else { + wpa_printf(MSG_DEBUG, "Unknown element id"); +diff --git a/src/rsn_supp/wpa_i.h b/src/rsn_supp/wpa_i.h +index f653ba6..afc9e37 100644 +--- a/src/rsn_supp/wpa_i.h ++++ b/src/rsn_supp/wpa_i.h +@@ -31,6 +31,10 @@ struct wpa_sm { + u8 rx_replay_counter[WPA_REPLAY_COUNTER_LEN]; + int rx_replay_counter_set; + u8 request_counter[WPA_REPLAY_COUNTER_LEN]; ++ struct wpa_gtk gtk; ++#ifdef CONFIG_IEEE80211W ++ struct wpa_igtk igtk; ++#endif /* CONFIG_IEEE80211W */ + + struct eapol_sm *eapol; /* EAPOL state machine from upper level code */ + +-- +2.7.4 + +From 8280294e74846ea342389a0cd17215050fa5afe8 Mon Sep 17 00:00:00 2001 +From: Jouni Malinen +Date: Sun, 1 Oct 2017 12:12:24 +0300 +Subject: [PATCH 3/8] Extend protection of GTK/IGTK reinstallation of WNM-Sleep + Mode cases + +This extends the protection to track last configured GTK/IGTK value +separately from EAPOL-Key frames and WNM-Sleep Mode frames to cover a +corner case where these two different mechanisms may get used when the +GTK/IGTK has changed and tracking a single value is not sufficient to +detect a possible key reconfiguration. + +Signed-off-by: Jouni Malinen +--- + src/rsn_supp/wpa.c | 53 +++++++++++++++++++++++++++++++++++++--------------- + src/rsn_supp/wpa_i.h | 2 ++ + 2 files changed, 40 insertions(+), 15 deletions(-) + +diff --git a/src/rsn_supp/wpa.c b/src/rsn_supp/wpa.c +index 95bd7be..7a2c68d 100644 +--- a/src/rsn_supp/wpa.c ++++ b/src/rsn_supp/wpa.c +@@ -709,14 +709,17 @@ struct wpa_gtk_data { + + static int wpa_supplicant_install_gtk(struct wpa_sm *sm, + const struct wpa_gtk_data *gd, +- const u8 *key_rsc) ++ const u8 *key_rsc, int wnm_sleep) + { + const u8 *_gtk = gd->gtk; + u8 gtk_buf[32]; + + /* Detect possible key reinstallation */ +- if (sm->gtk.gtk_len == (size_t) gd->gtk_len && +- os_memcmp(sm->gtk.gtk, gd->gtk, sm->gtk.gtk_len) == 0) { ++ if ((sm->gtk.gtk_len == (size_t) gd->gtk_len && ++ os_memcmp(sm->gtk.gtk, gd->gtk, sm->gtk.gtk_len) == 0) || ++ (sm->gtk_wnm_sleep.gtk_len == (size_t) gd->gtk_len && ++ os_memcmp(sm->gtk_wnm_sleep.gtk, gd->gtk, ++ sm->gtk_wnm_sleep.gtk_len) == 0)) { + wpa_dbg(sm->ctx->msg_ctx, MSG_DEBUG, + "WPA: Not reinstalling already in-use GTK to the driver (keyidx=%d tx=%d len=%d)", + gd->keyidx, gd->tx, gd->gtk_len); +@@ -757,8 +760,14 @@ static int wpa_supplicant_install_gtk(struct wpa_sm *sm, + } + os_memset(gtk_buf, 0, sizeof(gtk_buf)); + +- sm->gtk.gtk_len = gd->gtk_len; +- os_memcpy(sm->gtk.gtk, gd->gtk, sm->gtk.gtk_len); ++ if (wnm_sleep) { ++ sm->gtk_wnm_sleep.gtk_len = gd->gtk_len; ++ os_memcpy(sm->gtk_wnm_sleep.gtk, gd->gtk, ++ sm->gtk_wnm_sleep.gtk_len); ++ } else { ++ sm->gtk.gtk_len = gd->gtk_len; ++ os_memcpy(sm->gtk.gtk, gd->gtk, sm->gtk.gtk_len); ++ } + + return 0; + } +@@ -852,7 +861,7 @@ static int wpa_supplicant_pairwise_gtk(struct wpa_sm *sm, + (wpa_supplicant_check_group_cipher(sm, sm->group_cipher, + gtk_len, gtk_len, + &gd.key_rsc_len, &gd.alg) || +- wpa_supplicant_install_gtk(sm, &gd, key_rsc))) { ++ wpa_supplicant_install_gtk(sm, &gd, key_rsc, 0))) { + wpa_dbg(sm->ctx->msg_ctx, MSG_DEBUG, + "RSN: Failed to install GTK"); + os_memset(&gd, 0, sizeof(gd)); +@@ -868,14 +877,18 @@ static int wpa_supplicant_pairwise_gtk(struct wpa_sm *sm, + + #ifdef CONFIG_IEEE80211W + static int wpa_supplicant_install_igtk(struct wpa_sm *sm, +- const struct wpa_igtk_kde *igtk) ++ const struct wpa_igtk_kde *igtk, ++ int wnm_sleep) + { + size_t len = wpa_cipher_key_len(sm->mgmt_group_cipher); + u16 keyidx = WPA_GET_LE16(igtk->keyid); + + /* Detect possible key reinstallation */ +- if (sm->igtk.igtk_len == len && +- os_memcmp(sm->igtk.igtk, igtk->igtk, sm->igtk.igtk_len) == 0) { ++ if ((sm->igtk.igtk_len == len && ++ os_memcmp(sm->igtk.igtk, igtk->igtk, sm->igtk.igtk_len) == 0) || ++ (sm->igtk_wnm_sleep.igtk_len == len && ++ os_memcmp(sm->igtk_wnm_sleep.igtk, igtk->igtk, ++ sm->igtk_wnm_sleep.igtk_len) == 0)) { + wpa_dbg(sm->ctx->msg_ctx, MSG_DEBUG, + "WPA: Not reinstalling already in-use IGTK to the driver (keyidx=%d)", + keyidx); +@@ -900,8 +913,14 @@ static int wpa_supplicant_install_igtk(struct wpa_sm *sm, + return -1; + } + +- sm->igtk.igtk_len = len; +- os_memcpy(sm->igtk.igtk, igtk->igtk, sm->igtk.igtk_len); ++ if (wnm_sleep) { ++ sm->igtk_wnm_sleep.igtk_len = len; ++ os_memcpy(sm->igtk_wnm_sleep.igtk, igtk->igtk, ++ sm->igtk_wnm_sleep.igtk_len); ++ } else { ++ sm->igtk.igtk_len = len; ++ os_memcpy(sm->igtk.igtk, igtk->igtk, sm->igtk.igtk_len); ++ } + + return 0; + } +@@ -924,7 +943,7 @@ static int ieee80211w_set_keys(struct wpa_sm *sm, + return -1; + + igtk = (const struct wpa_igtk_kde *) ie->igtk; +- if (wpa_supplicant_install_igtk(sm, igtk) < 0) ++ if (wpa_supplicant_install_igtk(sm, igtk, 0) < 0) + return -1; + } + +@@ -1574,7 +1593,7 @@ static void wpa_supplicant_process_1_of_2(struct wpa_sm *sm, + if (wpa_supplicant_rsc_relaxation(sm, key->key_rsc)) + key_rsc = null_rsc; + +- if (wpa_supplicant_install_gtk(sm, &gd, key_rsc) || ++ if (wpa_supplicant_install_gtk(sm, &gd, key_rsc, 0) || + wpa_supplicant_send_2_of_2(sm, key, ver, key_info) < 0) + goto failed; + os_memset(&gd, 0, sizeof(gd)); +@@ -2386,8 +2405,10 @@ void wpa_sm_notify_assoc(struct wpa_sm *sm, const u8 *bssid) + sm->tptk_set = 0; + os_memset(&sm->tptk, 0, sizeof(sm->tptk)); + os_memset(&sm->gtk, 0, sizeof(sm->gtk)); ++ os_memset(&sm->gtk_wnm_sleep, 0, sizeof(sm->gtk_wnm_sleep)); + #ifdef CONFIG_IEEE80211W + os_memset(&sm->igtk, 0, sizeof(sm->igtk)); ++ os_memset(&sm->igtk_wnm_sleep, 0, sizeof(sm->igtk_wnm_sleep)); + #endif /* CONFIG_IEEE80211W */ + } + +@@ -2920,8 +2941,10 @@ void wpa_sm_drop_sa(struct wpa_sm *sm) + os_memset(&sm->ptk, 0, sizeof(sm->ptk)); + os_memset(&sm->tptk, 0, sizeof(sm->tptk)); + os_memset(&sm->gtk, 0, sizeof(sm->gtk)); ++ os_memset(&sm->gtk_wnm_sleep, 0, sizeof(sm->gtk_wnm_sleep)); + #ifdef CONFIG_IEEE80211W + os_memset(&sm->igtk, 0, sizeof(sm->igtk)); ++ os_memset(&sm->igtk_wnm_sleep, 0, sizeof(sm->igtk_wnm_sleep)); + #endif /* CONFIG_IEEE80211W */ + #ifdef CONFIG_IEEE80211R + os_memset(sm->xxkey, 0, sizeof(sm->xxkey)); +@@ -2986,7 +3009,7 @@ int wpa_wnmsleep_install_key(struct wpa_sm *sm, u8 subelem_id, u8 *buf) + + wpa_hexdump_key(MSG_DEBUG, "Install GTK (WNM SLEEP)", + gd.gtk, gd.gtk_len); +- if (wpa_supplicant_install_gtk(sm, &gd, key_rsc)) { ++ if (wpa_supplicant_install_gtk(sm, &gd, key_rsc, 1)) { + os_memset(&gd, 0, sizeof(gd)); + wpa_printf(MSG_DEBUG, "Failed to install the GTK in " + "WNM mode"); +@@ -2998,7 +3021,7 @@ int wpa_wnmsleep_install_key(struct wpa_sm *sm, u8 subelem_id, u8 *buf) + const struct wpa_igtk_kde *igtk; + + igtk = (const struct wpa_igtk_kde *) (buf + 2); +- if (wpa_supplicant_install_igtk(sm, igtk) < 0) ++ if (wpa_supplicant_install_igtk(sm, igtk, 1) < 0) + return -1; + #endif /* CONFIG_IEEE80211W */ + } else { +diff --git a/src/rsn_supp/wpa_i.h b/src/rsn_supp/wpa_i.h +index afc9e37..9a54631 100644 +--- a/src/rsn_supp/wpa_i.h ++++ b/src/rsn_supp/wpa_i.h +@@ -32,8 +32,10 @@ struct wpa_sm { + int rx_replay_counter_set; + u8 request_counter[WPA_REPLAY_COUNTER_LEN]; + struct wpa_gtk gtk; ++ struct wpa_gtk gtk_wnm_sleep; + #ifdef CONFIG_IEEE80211W + struct wpa_igtk igtk; ++ struct wpa_igtk igtk_wnm_sleep; + #endif /* CONFIG_IEEE80211W */ + + struct eapol_sm *eapol; /* EAPOL state machine from upper level code */ +-- +2.7.4 + +From 8f82bc94e8697a9d47fa8774dfdaaede1084912c Mon Sep 17 00:00:00 2001 +From: Mathy Vanhoef +Date: Fri, 29 Sep 2017 04:22:51 +0200 +Subject: [PATCH 4/8] Prevent installation of an all-zero TK + +Properly track whether a PTK has already been installed to the driver +and the TK part cleared from memory. This prevents an attacker from +trying to trick the client into installing an all-zero TK. + +This fixes the earlier fix in commit +ad00d64e7d8827b3cebd665a0ceb08adabf15e1e ('Fix TK configuration to the +driver in EAPOL-Key 3/4 retry case') which did not take into account +possibility of an extra message 1/4 showing up between retries of +message 3/4. + +Signed-off-by: Mathy Vanhoef +--- + src/common/wpa_common.h | 1 + + src/rsn_supp/wpa.c | 5 ++--- + src/rsn_supp/wpa_i.h | 1 - + 3 files changed, 3 insertions(+), 4 deletions(-) + +diff --git a/src/common/wpa_common.h b/src/common/wpa_common.h +index d200285..1021ccb 100644 +--- a/src/common/wpa_common.h ++++ b/src/common/wpa_common.h +@@ -215,6 +215,7 @@ struct wpa_ptk { + size_t kck_len; + size_t kek_len; + size_t tk_len; ++ int installed; /* 1 if key has already been installed to driver */ + }; + + struct wpa_gtk { +diff --git a/src/rsn_supp/wpa.c b/src/rsn_supp/wpa.c +index 7a2c68d..0550a41 100644 +--- a/src/rsn_supp/wpa.c ++++ b/src/rsn_supp/wpa.c +@@ -510,7 +510,6 @@ static void wpa_supplicant_process_1_of_4(struct wpa_sm *sm, + os_memset(buf, 0, sizeof(buf)); + } + sm->tptk_set = 1; +- sm->tk_to_set = 1; + + kde = sm->assoc_wpa_ie; + kde_len = sm->assoc_wpa_ie_len; +@@ -615,7 +614,7 @@ static int wpa_supplicant_install_ptk(struct wpa_sm *sm, + enum wpa_alg alg; + const u8 *key_rsc; + +- if (!sm->tk_to_set) { ++ if (sm->ptk.installed) { + wpa_dbg(sm->ctx->msg_ctx, MSG_DEBUG, + "WPA: Do not re-install same PTK to the driver"); + return 0; +@@ -659,7 +658,7 @@ static int wpa_supplicant_install_ptk(struct wpa_sm *sm, + + /* TK is not needed anymore in supplicant */ + os_memset(sm->ptk.tk, 0, WPA_TK_MAX_LEN); +- sm->tk_to_set = 0; ++ sm->ptk.installed = 1; + + if (sm->wpa_ptk_rekey) { + eloop_cancel_timeout(wpa_sm_rekey_ptk, sm, NULL); +diff --git a/src/rsn_supp/wpa_i.h b/src/rsn_supp/wpa_i.h +index 9a54631..41f371f 100644 +--- a/src/rsn_supp/wpa_i.h ++++ b/src/rsn_supp/wpa_i.h +@@ -24,7 +24,6 @@ struct wpa_sm { + struct wpa_ptk ptk, tptk; + int ptk_set, tptk_set; + unsigned int msg_3_of_4_ok:1; +- unsigned int tk_to_set:1; + u8 snonce[WPA_NONCE_LEN]; + u8 anonce[WPA_NONCE_LEN]; /* ANonce from the last 1/4 msg */ + int renew_snonce; +-- +2.7.4 + +From 12fac09b437a1dc8a0f253e265934a8aaf4d2f8b Mon Sep 17 00:00:00 2001 +From: Jouni Malinen +Date: Sun, 1 Oct 2017 12:32:57 +0300 +Subject: [PATCH 5/8] Fix PTK rekeying to generate a new ANonce + +The Authenticator state machine path for PTK rekeying ended up bypassing +the AUTHENTICATION2 state where a new ANonce is generated when going +directly to the PTKSTART state since there is no need to try to +determine the PMK again in such a case. This is far from ideal since the +new PTK would depend on a new nonce only from the supplicant. + +Fix this by generating a new ANonce when moving to the PTKSTART state +for the purpose of starting new 4-way handshake to rekey PTK. + +Signed-off-by: Jouni Malinen +--- + src/ap/wpa_auth.c | 24 +++++++++++++++++++++--- + 1 file changed, 21 insertions(+), 3 deletions(-) + +diff --git a/src/ap/wpa_auth.c b/src/ap/wpa_auth.c +index 707971d..bf10cc1 100644 +--- a/src/ap/wpa_auth.c ++++ b/src/ap/wpa_auth.c +@@ -1901,6 +1901,21 @@ SM_STATE(WPA_PTK, AUTHENTICATION2) + } + + ++static int wpa_auth_sm_ptk_update(struct wpa_state_machine *sm) ++{ ++ if (random_get_bytes(sm->ANonce, WPA_NONCE_LEN)) { ++ wpa_printf(MSG_ERROR, ++ "WPA: Failed to get random data for ANonce"); ++ sm->Disconnect = TRUE; ++ return -1; ++ } ++ wpa_hexdump(MSG_DEBUG, "WPA: Assign new ANonce", sm->ANonce, ++ WPA_NONCE_LEN); ++ sm->TimeoutCtr = 0; ++ return 0; ++} ++ ++ + SM_STATE(WPA_PTK, INITPMK) + { + u8 msk[2 * PMK_LEN]; +@@ -2458,9 +2473,12 @@ SM_STEP(WPA_PTK) + SM_ENTER(WPA_PTK, AUTHENTICATION); + else if (sm->ReAuthenticationRequest) + SM_ENTER(WPA_PTK, AUTHENTICATION2); +- else if (sm->PTKRequest) +- SM_ENTER(WPA_PTK, PTKSTART); +- else switch (sm->wpa_ptk_state) { ++ else if (sm->PTKRequest) { ++ if (wpa_auth_sm_ptk_update(sm) < 0) ++ SM_ENTER(WPA_PTK, DISCONNECTED); ++ else ++ SM_ENTER(WPA_PTK, PTKSTART); ++ } else switch (sm->wpa_ptk_state) { + case WPA_PTK_INITIALIZE: + break; + case WPA_PTK_DISCONNECT: +-- +2.7.4 + +From 6c4bed4f47d1960ec04981a9d50e5076aea5223d Mon Sep 17 00:00:00 2001 +From: Jouni Malinen +Date: Fri, 22 Sep 2017 11:03:15 +0300 +Subject: [PATCH 6/8] TDLS: Reject TPK-TK reconfiguration + +Do not try to reconfigure the same TPK-TK to the driver after it has +been successfully configured. This is an explicit check to avoid issues +related to resetting the TX/RX packet number. There was already a check +for this for TPK M2 (retries of that message are ignored completely), so +that behavior does not get modified. + +For TPK M3, the TPK-TK could have been reconfigured, but that was +followed by immediate teardown of the link due to an issue in updating +the STA entry. Furthermore, for TDLS with any real security (i.e., +ignoring open/WEP), the TPK message exchange is protected on the AP path +and simple replay attacks are not feasible. + +As an additional corner case, make sure the local nonce gets updated if +the peer uses a very unlikely "random nonce" of all zeros. + +Signed-off-by: Jouni Malinen +--- + src/rsn_supp/tdls.c | 38 ++++++++++++++++++++++++++++++++++++-- + 1 file changed, 36 insertions(+), 2 deletions(-) + +diff --git a/src/rsn_supp/tdls.c b/src/rsn_supp/tdls.c +index e424168..9eb9738 100644 +--- a/src/rsn_supp/tdls.c ++++ b/src/rsn_supp/tdls.c +@@ -112,6 +112,7 @@ struct wpa_tdls_peer { + u8 tk[16]; /* TPK-TK; assuming only CCMP will be used */ + } tpk; + int tpk_set; ++ int tk_set; /* TPK-TK configured to the driver */ + int tpk_success; + int tpk_in_progress; + +@@ -192,6 +193,20 @@ static int wpa_tdls_set_key(struct wpa_sm *sm, struct wpa_tdls_peer *peer) + u8 rsc[6]; + enum wpa_alg alg; + ++ if (peer->tk_set) { ++ /* ++ * This same TPK-TK has already been configured to the driver ++ * and this new configuration attempt (likely due to an ++ * unexpected retransmitted frame) would result in clearing ++ * the TX/RX sequence number which can break security, so must ++ * not allow that to happen. ++ */ ++ wpa_printf(MSG_INFO, "TDLS: TPK-TK for the peer " MACSTR ++ " has already been configured to the driver - do not reconfigure", ++ MAC2STR(peer->addr)); ++ return -1; ++ } ++ + os_memset(rsc, 0, 6); + + switch (peer->cipher) { +@@ -209,12 +224,15 @@ static int wpa_tdls_set_key(struct wpa_sm *sm, struct wpa_tdls_peer *peer) + return -1; + } + ++ wpa_printf(MSG_DEBUG, "TDLS: Configure pairwise key for peer " MACSTR, ++ MAC2STR(peer->addr)); + if (wpa_sm_set_key(sm, alg, peer->addr, -1, 1, + rsc, sizeof(rsc), peer->tpk.tk, key_len) < 0) { + wpa_printf(MSG_WARNING, "TDLS: Failed to set TPK to the " + "driver"); + return -1; + } ++ peer->tk_set = 1; + return 0; + } + +@@ -696,7 +714,7 @@ static void wpa_tdls_peer_clear(struct wpa_sm *sm, struct wpa_tdls_peer *peer) + peer->cipher = 0; + peer->qos_info = 0; + peer->wmm_capable = 0; +- peer->tpk_set = peer->tpk_success = 0; ++ peer->tk_set = peer->tpk_set = peer->tpk_success = 0; + peer->chan_switch_enabled = 0; + os_memset(&peer->tpk, 0, sizeof(peer->tpk)); + os_memset(peer->inonce, 0, WPA_NONCE_LEN); +@@ -1159,6 +1177,7 @@ skip_rsnie: + wpa_tdls_peer_free(sm, peer); + return -1; + } ++ peer->tk_set = 0; /* A new nonce results in a new TK */ + wpa_hexdump(MSG_DEBUG, "TDLS: Initiator Nonce for TPK handshake", + peer->inonce, WPA_NONCE_LEN); + os_memcpy(ftie->Snonce, peer->inonce, WPA_NONCE_LEN); +@@ -1751,6 +1770,19 @@ static int wpa_tdls_addset_peer(struct wpa_sm *sm, struct wpa_tdls_peer *peer, + } + + ++static int tdls_nonce_set(const u8 *nonce) ++{ ++ int i; ++ ++ for (i = 0; i < WPA_NONCE_LEN; i++) { ++ if (nonce[i]) ++ return 1; ++ } ++ ++ return 0; ++} ++ ++ + static int wpa_tdls_process_tpk_m1(struct wpa_sm *sm, const u8 *src_addr, + const u8 *buf, size_t len) + { +@@ -2004,7 +2036,8 @@ skip_rsn: + peer->rsnie_i_len = kde.rsn_ie_len; + peer->cipher = cipher; + +- if (os_memcmp(peer->inonce, ftie->Snonce, WPA_NONCE_LEN) != 0) { ++ if (os_memcmp(peer->inonce, ftie->Snonce, WPA_NONCE_LEN) != 0 || ++ !tdls_nonce_set(peer->inonce)) { + /* + * There is no point in updating the RNonce for every obtained + * TPK M1 frame (e.g., retransmission due to timeout) with the +@@ -2020,6 +2053,7 @@ skip_rsn: + "TDLS: Failed to get random data for responder nonce"); + goto error; + } ++ peer->tk_set = 0; /* A new nonce results in a new TK */ + } + + #if 0 +-- +2.7.4 + +From 53c5eb58e95004f86e65ee9fbfccbc291b139057 Mon Sep 17 00:00:00 2001 +From: Jouni Malinen +Date: Fri, 22 Sep 2017 11:25:02 +0300 +Subject: [PATCH 7/8] WNM: Ignore WNM-Sleep Mode Response without pending + request + +Commit 03ed0a52393710be6bdae657d1b36efa146520e5 ('WNM: Ignore WNM-Sleep +Mode Response if WNM-Sleep Mode has not been used') started ignoring the +response when no WNM-Sleep Mode Request had been used during the +association. This can be made tighter by clearing the used flag when +successfully processing a response. This adds an additional layer of +protection against unexpected retransmissions of the response frame. + +Signed-off-by: Jouni Malinen +--- + wpa_supplicant/wnm_sta.c | 4 +++- + 1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/wpa_supplicant/wnm_sta.c b/wpa_supplicant/wnm_sta.c +index 1b3409c..67a07ff 100644 +--- a/wpa_supplicant/wnm_sta.c ++++ b/wpa_supplicant/wnm_sta.c +@@ -260,7 +260,7 @@ static void ieee802_11_rx_wnmsleep_resp(struct wpa_supplicant *wpa_s, + + if (!wpa_s->wnmsleep_used) { + wpa_printf(MSG_DEBUG, +- "WNM: Ignore WNM-Sleep Mode Response frame since WNM-Sleep Mode has not been used in this association"); ++ "WNM: Ignore WNM-Sleep Mode Response frame since WNM-Sleep Mode operation has not been requested"); + return; + } + +@@ -299,6 +299,8 @@ static void ieee802_11_rx_wnmsleep_resp(struct wpa_supplicant *wpa_s, + return; + } + ++ wpa_s->wnmsleep_used = 0; ++ + if (wnmsleep_ie->status == WNM_STATUS_SLEEP_ACCEPT || + wnmsleep_ie->status == WNM_STATUS_SLEEP_EXIT_ACCEPT_GTK_UPDATE) { + wpa_printf(MSG_DEBUG, "Successfully recv WNM-Sleep Response " +-- +2.7.4 + +From b372ab0b7daea719749194dc554b26e6367603f2 Mon Sep 17 00:00:00 2001 +From: Jouni Malinen +Date: Fri, 22 Sep 2017 12:06:37 +0300 +Subject: [PATCH 8/8] FT: Do not allow multiple Reassociation Response frames + +The driver is expected to not report a second association event without +the station having explicitly request a new association. As such, this +case should not be reachable. However, since reconfiguring the same +pairwise or group keys to the driver could result in nonce reuse issues, +be extra careful here and do an additional state check to avoid this +even if the local driver ends up somehow accepting an unexpected +Reassociation Response frame. + +Signed-off-by: Jouni Malinen +--- + src/rsn_supp/wpa.c | 3 +++ + src/rsn_supp/wpa_ft.c | 8 ++++++++ + src/rsn_supp/wpa_i.h | 1 + + 3 files changed, 12 insertions(+) + +diff --git a/src/rsn_supp/wpa.c b/src/rsn_supp/wpa.c +index 0550a41..2a53c6f 100644 +--- a/src/rsn_supp/wpa.c ++++ b/src/rsn_supp/wpa.c +@@ -2440,6 +2440,9 @@ void wpa_sm_notify_disassoc(struct wpa_sm *sm) + #ifdef CONFIG_TDLS + wpa_tdls_disassoc(sm); + #endif /* CONFIG_TDLS */ ++#ifdef CONFIG_IEEE80211R ++ sm->ft_reassoc_completed = 0; ++#endif /* CONFIG_IEEE80211R */ + + /* Keys are not needed in the WPA state machine anymore */ + wpa_sm_drop_sa(sm); +diff --git a/src/rsn_supp/wpa_ft.c b/src/rsn_supp/wpa_ft.c +index 205793e..d45bb45 100644 +--- a/src/rsn_supp/wpa_ft.c ++++ b/src/rsn_supp/wpa_ft.c +@@ -153,6 +153,7 @@ static u8 * wpa_ft_gen_req_ies(struct wpa_sm *sm, size_t *len, + u16 capab; + + sm->ft_completed = 0; ++ sm->ft_reassoc_completed = 0; + + buf_len = 2 + sizeof(struct rsn_mdie) + 2 + sizeof(struct rsn_ftie) + + 2 + sm->r0kh_id_len + ric_ies_len + 100; +@@ -681,6 +682,11 @@ int wpa_ft_validate_reassoc_resp(struct wpa_sm *sm, const u8 *ies, + return -1; + } + ++ if (sm->ft_reassoc_completed) { ++ wpa_printf(MSG_DEBUG, "FT: Reassociation has already been completed for this FT protocol instance - ignore unexpected retransmission"); ++ return 0; ++ } ++ + if (wpa_ft_parse_ies(ies, ies_len, &parse) < 0) { + wpa_printf(MSG_DEBUG, "FT: Failed to parse IEs"); + return -1; +@@ -781,6 +787,8 @@ int wpa_ft_validate_reassoc_resp(struct wpa_sm *sm, const u8 *ies, + return -1; + } + ++ sm->ft_reassoc_completed = 1; ++ + if (wpa_ft_process_gtk_subelem(sm, parse.gtk, parse.gtk_len) < 0) + return -1; + +diff --git a/src/rsn_supp/wpa_i.h b/src/rsn_supp/wpa_i.h +index 41f371f..56f88dc 100644 +--- a/src/rsn_supp/wpa_i.h ++++ b/src/rsn_supp/wpa_i.h +@@ -128,6 +128,7 @@ struct wpa_sm { + size_t r0kh_id_len; + u8 r1kh_id[FT_R1KH_ID_LEN]; + int ft_completed; ++ int ft_reassoc_completed; + int over_the_ds_in_progress; + u8 target_ap[ETH_ALEN]; /* over-the-DS target AP */ + int set_ptk_after_assoc; +-- +2.7.4 diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.5.bb b/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.5.bb deleted file mode 100644 index a4160e1c5..000000000 --- a/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.5.bb +++ /dev/null @@ -1,113 +0,0 @@ -SUMMARY = "Client for Wi-Fi Protected Access (WPA)" -HOMEPAGE = "http://w1.fi/wpa_supplicant/" -BUGTRACKER = "http://w1.fi/security/" -SECTION = "network" -LICENSE = "BSD" -LIC_FILES_CHKSUM = "file://COPYING;md5=36b27801447e0662ee0138d17fe93880 \ - file://README;beginline=1;endline=56;md5=7f393579f8b109fe91f3b9765d26c7d3 \ - file://wpa_supplicant/wpa_supplicant.c;beginline=1;endline=12;md5=3430fda79f2ba1dd545f0b3c4d6e4d24" -DEPENDS = "dbus libnl" -RRECOMMENDS_${PN} = "wpa-supplicant-passphrase wpa-supplicant-cli" - -PACKAGECONFIG ??= "gnutls" -PACKAGECONFIG[gnutls] = ",,gnutls libgcrypt" -PACKAGECONFIG[openssl] = ",,openssl" - -inherit systemd - -SYSTEMD_SERVICE_${PN} = "wpa_supplicant.service wpa_supplicant-nl80211@.service wpa_supplicant-wired@.service" -SYSTEMD_AUTO_ENABLE = "disable" - -SRC_URI = "http://w1.fi/releases/wpa_supplicant-${PV}.tar.gz \ - file://defconfig \ - file://wpa-supplicant.sh \ - file://wpa_supplicant.conf \ - file://wpa_supplicant.conf-sane \ - file://99_wpa_supplicant \ - file://0001-WPS-Reject-a-Credential-with-invalid-passphrase.patch \ - file://0002-Remove-newlines-from-wpa_supplicant-config-network-o.patch \ - file://0001-Reject-psk-parameter-set-with-invalid-passphrase-cha.patch \ - file://0002-Reject-SET_CRED-commands-with-newline-characters-in-.patch \ - file://0003-Reject-SET-commands-with-newline-characters-in-the-s.patch \ - " -SRC_URI[md5sum] = "96ff75c3a514f1f324560a2376f13110" -SRC_URI[sha256sum] = "cce55bae483b364eae55c35ba567c279be442ed8bab5b80a3c7fb0d057b9b316" - -S = "${WORKDIR}/wpa_supplicant-${PV}" - -PACKAGES_prepend = "wpa-supplicant-passphrase wpa-supplicant-cli " -FILES_wpa-supplicant-passphrase = "${bindir}/wpa_passphrase" -FILES_wpa-supplicant-cli = "${sbindir}/wpa_cli" -FILES_${PN} += "${datadir}/dbus-1/system-services/*" -CONFFILES_${PN} += "${sysconfdir}/wpa_supplicant.conf" - -do_configure () { - ${MAKE} -C wpa_supplicant clean - install -m 0755 ${WORKDIR}/defconfig wpa_supplicant/.config - echo "CFLAGS +=\"-I${STAGING_INCDIR}/libnl3\"" >> wpa_supplicant/.config - echo "DRV_CFLAGS +=\"-I${STAGING_INCDIR}/libnl3\"" >> wpa_supplicant/.config - - if echo "${PACKAGECONFIG}" | grep -qw "openssl"; then - ssl=openssl - elif echo "${PACKAGECONFIG}" | grep -qw "gnutls"; then - ssl=gnutls - fi - if [ -n "$ssl" ]; then - sed -i "s/%ssl%/$ssl/" wpa_supplicant/.config - fi - - # For rebuild - rm -f wpa_supplicant/*.d wpa_supplicant/dbus/*.d -} - -export EXTRA_CFLAGS = "${CFLAGS}" -export BINDIR = "${sbindir}" - -do_compile () { - unset CFLAGS CPPFLAGS CXXFLAGS - sed -e "s:CFLAGS\ =.*:& \$(EXTRA_CFLAGS):g" -i ${S}/src/lib.rules - oe_runmake -C wpa_supplicant -} - -do_install () { - install -d ${D}${sbindir} - install -m 755 wpa_supplicant/wpa_supplicant ${D}${sbindir} - install -m 755 wpa_supplicant/wpa_cli ${D}${sbindir} - - install -d ${D}${bindir} - install -m 755 wpa_supplicant/wpa_passphrase ${D}${bindir} - - install -d ${D}${docdir}/wpa_supplicant - install -m 644 wpa_supplicant/README ${WORKDIR}/wpa_supplicant.conf ${D}${docdir}/wpa_supplicant - - install -d ${D}${sysconfdir} - install -m 600 ${WORKDIR}/wpa_supplicant.conf-sane ${D}${sysconfdir}/wpa_supplicant.conf - - install -d ${D}${sysconfdir}/network/if-pre-up.d/ - install -d ${D}${sysconfdir}/network/if-post-down.d/ - install -d ${D}${sysconfdir}/network/if-down.d/ - install -m 755 ${WORKDIR}/wpa-supplicant.sh ${D}${sysconfdir}/network/if-pre-up.d/wpa-supplicant - cd ${D}${sysconfdir}/network/ && \ - ln -sf ../if-pre-up.d/wpa-supplicant if-post-down.d/wpa-supplicant - - install -d ${D}/${sysconfdir}/dbus-1/system.d - install -m 644 ${S}/wpa_supplicant/dbus/dbus-wpa_supplicant.conf ${D}/${sysconfdir}/dbus-1/system.d - install -d ${D}/${datadir}/dbus-1/system-services - install -m 644 ${S}/wpa_supplicant/dbus/*.service ${D}/${datadir}/dbus-1/system-services - - if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then - install -d ${D}/${systemd_unitdir}/system - install -m 644 ${S}/wpa_supplicant/systemd/*.service ${D}/${systemd_unitdir}/system - fi - - install -d ${D}/etc/default/volatiles - install -m 0644 ${WORKDIR}/99_wpa_supplicant ${D}/etc/default/volatiles -} - -pkg_postinst_wpa-supplicant () { - # If we're offline, we don't need to do this. - if [ "x$D" = "x" ]; then - killall -q -HUP dbus-daemon || true - fi - -} diff --git a/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.6.bb b/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.6.bb new file mode 100644 index 000000000..d6d4206a5 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-connectivity/wpa-supplicant/wpa-supplicant_2.6.bb @@ -0,0 +1,111 @@ +SUMMARY = "Client for Wi-Fi Protected Access (WPA)" +HOMEPAGE = "http://w1.fi/wpa_supplicant/" +BUGTRACKER = "http://w1.fi/security/" +SECTION = "network" +LICENSE = "BSD" +LIC_FILES_CHKSUM = "file://COPYING;md5=292eece3f2ebbaa25608eed8464018a3 \ + file://README;beginline=1;endline=56;md5=3f01d778be8f953962388307ee38ed2b \ + file://wpa_supplicant/wpa_supplicant.c;beginline=1;endline=12;md5=4061612fc5715696134e3baf933e8aba" +DEPENDS = "dbus libnl" +RRECOMMENDS_${PN} = "wpa-supplicant-passphrase wpa-supplicant-cli" + +PACKAGECONFIG ??= "gnutls" +PACKAGECONFIG[gnutls] = ",,gnutls libgcrypt" +PACKAGECONFIG[openssl] = ",,openssl" + +inherit pkgconfig systemd + +SYSTEMD_SERVICE_${PN} = "wpa_supplicant.service wpa_supplicant-nl80211@.service wpa_supplicant-wired@.service" +SYSTEMD_AUTO_ENABLE = "disable" + +SRC_URI = "http://w1.fi/releases/wpa_supplicant-${PV}.tar.gz \ + file://defconfig \ + file://wpa-supplicant.sh \ + file://wpa_supplicant.conf \ + file://wpa_supplicant.conf-sane \ + file://99_wpa_supplicant \ + file://key-replay-cve-multiple.patch \ + " +SRC_URI[md5sum] = "091569eb4440b7d7f2b4276dbfc03c3c" +SRC_URI[sha256sum] = "b4936d34c4e6cdd44954beba74296d964bc2c9668ecaa5255e499636fe2b1450" + +CVE_PRODUCT = "wpa_supplicant" + +S = "${WORKDIR}/wpa_supplicant-${PV}" + +PACKAGES_prepend = "wpa-supplicant-passphrase wpa-supplicant-cli " +FILES_wpa-supplicant-passphrase = "${bindir}/wpa_passphrase" +FILES_wpa-supplicant-cli = "${sbindir}/wpa_cli" +FILES_${PN} += "${datadir}/dbus-1/system-services/*" +CONFFILES_${PN} += "${sysconfdir}/wpa_supplicant.conf" + +do_configure () { + ${MAKE} -C wpa_supplicant clean + install -m 0755 ${WORKDIR}/defconfig wpa_supplicant/.config + echo "CFLAGS +=\"-I${STAGING_INCDIR}/libnl3\"" >> wpa_supplicant/.config + echo "DRV_CFLAGS +=\"-I${STAGING_INCDIR}/libnl3\"" >> wpa_supplicant/.config + + if echo "${PACKAGECONFIG}" | grep -qw "openssl"; then + ssl=openssl + elif echo "${PACKAGECONFIG}" | grep -qw "gnutls"; then + ssl=gnutls + fi + if [ -n "$ssl" ]; then + sed -i "s/%ssl%/$ssl/" wpa_supplicant/.config + fi + + # For rebuild + rm -f wpa_supplicant/*.d wpa_supplicant/dbus/*.d +} + +export EXTRA_CFLAGS = "${CFLAGS}" +export BINDIR = "${sbindir}" + +do_compile () { + unset CFLAGS CPPFLAGS CXXFLAGS + sed -e "s:CFLAGS\ =.*:& \$(EXTRA_CFLAGS):g" -i ${S}/src/lib.rules + oe_runmake -C wpa_supplicant +} + +do_install () { + install -d ${D}${sbindir} + install -m 755 wpa_supplicant/wpa_supplicant ${D}${sbindir} + install -m 755 wpa_supplicant/wpa_cli ${D}${sbindir} + + install -d ${D}${bindir} + install -m 755 wpa_supplicant/wpa_passphrase ${D}${bindir} + + install -d ${D}${docdir}/wpa_supplicant + install -m 644 wpa_supplicant/README ${WORKDIR}/wpa_supplicant.conf ${D}${docdir}/wpa_supplicant + + install -d ${D}${sysconfdir} + install -m 600 ${WORKDIR}/wpa_supplicant.conf-sane ${D}${sysconfdir}/wpa_supplicant.conf + + install -d ${D}${sysconfdir}/network/if-pre-up.d/ + install -d ${D}${sysconfdir}/network/if-post-down.d/ + install -d ${D}${sysconfdir}/network/if-down.d/ + install -m 755 ${WORKDIR}/wpa-supplicant.sh ${D}${sysconfdir}/network/if-pre-up.d/wpa-supplicant + cd ${D}${sysconfdir}/network/ && \ + ln -sf ../if-pre-up.d/wpa-supplicant if-post-down.d/wpa-supplicant + + install -d ${D}/${sysconfdir}/dbus-1/system.d + install -m 644 ${S}/wpa_supplicant/dbus/dbus-wpa_supplicant.conf ${D}/${sysconfdir}/dbus-1/system.d + install -d ${D}/${datadir}/dbus-1/system-services + install -m 644 ${S}/wpa_supplicant/dbus/*.service ${D}/${datadir}/dbus-1/system-services + + if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then + install -d ${D}/${systemd_unitdir}/system + install -m 644 ${S}/wpa_supplicant/systemd/*.service ${D}/${systemd_unitdir}/system + fi + + install -d ${D}/etc/default/volatiles + install -m 0644 ${WORKDIR}/99_wpa_supplicant ${D}/etc/default/volatiles +} + +pkg_postinst_wpa-supplicant () { + # If we're offline, we don't need to do this. + if [ "x$D" = "x" ]; then + killall -q -HUP dbus-daemon || true + fi + +} diff --git a/import-layers/yocto-poky/meta/recipes-core/base-files/base-files/profile b/import-layers/yocto-poky/meta/recipes-core/base-files/base-files/profile index c616616ce..ceaf15f79 100644 --- a/import-layers/yocto-poky/meta/recipes-core/base-files/base-files/profile +++ b/import-layers/yocto-poky/meta/recipes-core/base-files/base-files/profile @@ -6,28 +6,29 @@ EDITOR="vi" # needed for packages like cron, git-commit test -z "$TERM" && TERM="vt100" # Basic terminal capab. For screen etc. if [ "$HOME" = "ROOTHOME" ]; then - PATH=$PATH:/usr/local/sbin:/usr/sbin:/sbin + PATH=$PATH:/usr/local/sbin:/usr/sbin:/sbin fi if [ "$PS1" ]; then -# works for bash and ash (no other shells known to be in use here) - PS1='\u@\h:\w\$ ' + # works for bash and ash (no other shells known to be in use here) + PS1='\u@\h:\w\$ ' fi if [ -d /etc/profile.d ]; then - for i in /etc/profile.d/*.sh ; do - if [ -f $i -a -r $i ]; then - . $i - fi - done - unset i + for i in /etc/profile.d/*.sh; do + if [ -f $i -a -r $i ]; then + . $i + fi + done + unset i fi -if [ -x /usr/bin/resize ] && termpath="`tty`"; then - # Make sure we are on a serial console (i.e. the device used starts with /dev/tty), - # otherwise we confuse e.g. the eclipse launcher which tries do use ssh - case "$termpath" in - /dev/tty[A-z]*) resize >/dev/null - esac +if command -v resize >/dev/null && command -v tty >/dev/null; then + # Make sure we are on a serial console (i.e. the device used starts with + # /dev/tty[A-z]), otherwise we confuse e.g. the eclipse launcher which + # tries do use ssh + case $(tty) in + /dev/tty[A-z]*) resize >/dev/null;; + esac fi export PATH PS1 OPIEDIR QPEDIR QTDIR EDITOR TERM diff --git a/import-layers/yocto-poky/meta/recipes-core/base-files/base-files_3.0.14.bb b/import-layers/yocto-poky/meta/recipes-core/base-files/base-files_3.0.14.bb index 533311061..ca7bf0635 100644 --- a/import-layers/yocto-poky/meta/recipes-core/base-files/base-files_3.0.14.bb +++ b/import-layers/yocto-poky/meta/recipes-core/base-files/base-files_3.0.14.bb @@ -32,8 +32,9 @@ INHIBIT_DEFAULT_DEPS = "1" docdir_append = "/${P}" dirs1777 = "/tmp ${localstatedir}/volatile/tmp" dirs2775 = "" -dirs755 = "/bin /boot /dev ${sysconfdir} ${sysconfdir}/default \ - ${sysconfdir}/skel /lib /mnt /proc ${ROOT_HOME} /run /sbin \ +dirs755 = "/boot /dev ${base_bindir} ${base_sbindir} ${base_libdir} \ + ${sysconfdir} ${sysconfdir}/default \ + ${sysconfdir}/skel ${nonarch_base_libdir} /mnt /proc ${ROOT_HOME} /run \ ${prefix} ${bindir} ${docdir} /usr/games ${includedir} \ ${libdir} ${sbindir} ${datadir} \ ${datadir}/common-licenses ${datadir}/dict ${infodir} \ @@ -127,10 +128,6 @@ do_install () { install -m 0644 ${WORKDIR}/host.conf ${D}${sysconfdir}/host.conf install -m 0644 ${WORKDIR}/motd ${D}${sysconfdir}/motd - if [ "/usr/bin" != "${bindir}" ]; then - sed -i "s,/usr/bin/resize,${bindir}/resize," ${D}${sysconfdir}/profile - fi - ln -sf /proc/mounts ${D}${sysconfdir}/mtab } @@ -145,8 +142,9 @@ do_install_basefilesissue () { printf "${DISTRO_NAME} " >> ${D}${sysconfdir}/issue printf "${DISTRO_NAME} " >> ${D}${sysconfdir}/issue.net if [ -n "${DISTRO_VERSION}" ]; then - printf "${DISTRO_VERSION} " >> ${D}${sysconfdir}/issue - printf "${DISTRO_VERSION} " >> ${D}${sysconfdir}/issue.net + distro_version_nodate=${@'${DISTRO_VERSION}'.replace('snapshot-${DATE}','snapshot').replace('${DATE}','')} + printf "%s " $distro_version_nodate >> ${D}${sysconfdir}/issue + printf "%s " $distro_version_nodate >> ${D}${sysconfdir}/issue.net fi printf "\\\n \\\l\n" >> ${D}${sysconfdir}/issue echo >> ${D}${sysconfdir}/issue @@ -154,6 +152,7 @@ do_install_basefilesissue () { echo >> ${D}${sysconfdir}/issue.net fi } +do_install_basefilesissue[vardepsexclude] += "DATE" do_install_append_linuxstdbase() { for d in ${dirs755-lsb}; do @@ -173,5 +172,5 @@ FILES_${PN}-doc = "${docdir} ${datadir}/common-licenses" PACKAGE_ARCH = "${MACHINE_ARCH}" -CONFFILES_${PN} = "${sysconfdir}/fstab ${@['', '${sysconfdir}/hostname'][(d.getVar('hostname', True) != '')]} ${sysconfdir}/shells" +CONFFILES_${PN} = "${sysconfdir}/fstab ${@['', '${sysconfdir}/hostname'][(d.getVar('hostname') != '')]} ${sysconfdir}/shells" CONFFILES_${PN} += "${sysconfdir}/motd ${sysconfdir}/nsswitch.conf ${sysconfdir}/profile" diff --git a/import-layers/yocto-poky/meta/recipes-core/base-passwd/base-passwd_3.5.29.bb b/import-layers/yocto-poky/meta/recipes-core/base-passwd/base-passwd_3.5.29.bb index 10457b2de..c6be1c1d0 100644 --- a/import-layers/yocto-poky/meta/recipes-core/base-passwd/base-passwd_3.5.29.bb +++ b/import-layers/yocto-poky/meta/recipes-core/base-passwd/base-passwd_3.5.29.bb @@ -23,8 +23,6 @@ UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/b/base-passwd/" inherit autotools -SSTATEPOSTINSTFUNCS += "base_passwd_sstate_postinst" - do_install () { install -d -m 755 ${D}${sbindir} install -o root -g root -p -m 755 ${B}/update-passwd ${D}${sbindir}/ @@ -45,23 +43,32 @@ do_install () { install -p -m 644 ${S}/debian/copyright ${D}${docdir}/${BPN}/ } -base_passwd_sstate_postinst() { - if [ "${BB_CURRENTTASK}" = "populate_sysroot" -o "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ] - then - # Staging does not copy ${sysconfdir} files into the - # target sysroot, so we need to do so manually. We - # put these files in the target sysroot so they can - # be used by recipes which use custom user/group - # permissions. - # Install passwd.master and group.master to sysconfdir and mv - # them to make sure they are atomically install. - install -d -m 755 ${STAGING_DIR_TARGET}${sysconfdir} - for i in passwd group; do - install -p -m 644 ${STAGING_DIR_TARGET}${datadir}/base-passwd/$i.master \ - ${STAGING_DIR_TARGET}${sysconfdir}/ - mv ${STAGING_DIR_TARGET}${sysconfdir}/$i.master ${STAGING_DIR_TARGET}${sysconfdir}/$i - done +basepasswd_sysroot_postinst() { +#!/bin/sh + +# Install passwd.master and group.master to sysconfdir +install -d -m 755 ${STAGING_DIR_TARGET}${sysconfdir} +for i in passwd group; do + install -p -m 644 ${STAGING_DIR_TARGET}${datadir}/base-passwd/\$i.master \ + ${STAGING_DIR_TARGET}${sysconfdir}/\$i +done + +# Run any useradd postinsts +for script in ${STAGING_DIR_TARGET}${bindir}/postinst-useradd-*; do + if [ -f \$script ]; then + \$script fi +done +} + +SYSROOT_DIRS += "${sysconfdir}" +SYSROOT_PREPROCESS_FUNCS += "base_passwd_tweaksysroot" + +base_passwd_tweaksysroot () { + mkdir -p ${SYSROOT_DESTDIR}${bindir} + dest=${SYSROOT_DESTDIR}${bindir}/postinst-${PN} + echo "${basepasswd_sysroot_postinst}" > $dest + chmod 0755 $dest } python populate_packages_prepend() { diff --git a/import-layers/yocto-poky/meta/recipes-core/busybox/busybox.inc b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox.inc index b2f196022..adc6e9a71 100644 --- a/import-layers/yocto-poky/meta/recipes-core/busybox/busybox.inc +++ b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox.inc @@ -18,7 +18,6 @@ BUSYBOX_SPLIT_SUID ?= "1" export EXTRA_CFLAGS = "${CFLAGS}" export EXTRA_LDFLAGS = "${LDFLAGS}" -# We don't want '-e MAKEFLAGS=' in EXTRA_OEMAKE EXTRA_OEMAKE = "CC='${CC}' LD='${CCLD}' V=1 ARCH=${TARGET_ARCH} CROSS_COMPILE=${TARGET_PREFIX} SKIP_STRIP=y HOSTCC='${BUILD_CC}' HOSTCPP='${BUILD_CPP}'" PACKAGES =+ "${PN}-httpd ${PN}-udhcpd ${PN}-udhcpc ${PN}-syslog ${PN}-mdev ${PN}-hwclock" @@ -63,8 +62,8 @@ def busybox_cfg(feature, tokens, cnf, rem): def features_to_busybox_settings(d): cnf, rem = ([], []) busybox_cfg(bb.utils.contains('DISTRO_FEATURES', 'ipv6', True, False, d), 'CONFIG_FEATURE_IPV6', cnf, rem) - busybox_cfg(bb.utils.contains('DISTRO_FEATURES', 'largefile', True, False, d), 'CONFIG_LFS', cnf, rem) - busybox_cfg(bb.utils.contains('DISTRO_FEATURES', 'largefile', True, False, d), 'CONFIG_FDISK_SUPPORT_LARGE_DISKS', cnf, rem) + busybox_cfg(True, 'CONFIG_LFS', cnf, rem) + busybox_cfg(True, 'CONFIG_FDISK_SUPPORT_LARGE_DISKS', cnf, rem) busybox_cfg(bb.utils.contains('DISTRO_FEATURES', 'nls', True, False, d), 'CONFIG_LOCALE_SUPPORT', cnf, rem) busybox_cfg(bb.utils.contains('DISTRO_FEATURES', 'ipv4', True, False, d), 'CONFIG_FEATURE_IFUPDOWN_IPV4', cnf, rem) busybox_cfg(bb.utils.contains('DISTRO_FEATURES', 'ipv6', True, False, d), 'CONFIG_FEATURE_IFUPDOWN_IPV6', cnf, rem) @@ -141,6 +140,10 @@ do_compile() { unset CFLAGS CPPFLAGS CXXFLAGS LDFLAGS if [ "${BUSYBOX_SPLIT_SUID}" = "1" -a x`grep "CONFIG_FEATURE_INDIVIDUAL=y" .config` = x ]; then # split the .config into two parts, and make two busybox binaries + if [ -e .config.orig ]; then + # Need to guard again an interrupted do_compile - restore any backup + cp .config.orig .config + fi cp .config .config.orig oe_runmake busybox.cfg.suid oe_runmake busybox.cfg.nosuid @@ -331,21 +334,21 @@ ALTERNATIVE_LINK_NAME[syslog-conf] = "${sysconfdir}/syslog.conf" python () { if bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d): - pn = d.getVar('PN', True) + pn = d.getVar('PN') d.appendVar('ALTERNATIVE_%s-syslog' % (pn), ' syslog-init') - d.setVarFlag('ALTERNATIVE_LINK_NAME', 'syslog-init', '%s/init.d/syslog' % (d.getVar('sysconfdir', True))) - d.setVarFlag('ALTERNATIVE_TARGET', 'syslog-init', '%s/init.d/syslog.%s' % (d.getVar('sysconfdir', True), d.getVar('BPN', True))) + d.setVarFlag('ALTERNATIVE_LINK_NAME', 'syslog-init', '%s/init.d/syslog' % (d.getVar('sysconfdir'))) + d.setVarFlag('ALTERNATIVE_TARGET', 'syslog-init', '%s/init.d/syslog.%s' % (d.getVar('sysconfdir'), d.getVar('BPN'))) d.appendVar('ALTERNATIVE_%s-syslog' % (pn), ' syslog-startup-conf') - d.setVarFlag('ALTERNATIVE_LINK_NAME', 'syslog-startup-conf', '%s/syslog-startup.conf' % (d.getVar('sysconfdir', True))) - d.setVarFlag('ALTERNATIVE_TARGET', 'syslog-startup-conf', '%s/syslog-startup.conf.%s' % (d.getVar('sysconfdir', True), d.getVar('BPN', True))) + d.setVarFlag('ALTERNATIVE_LINK_NAME', 'syslog-startup-conf', '%s/syslog-startup.conf' % (d.getVar('sysconfdir'))) + d.setVarFlag('ALTERNATIVE_TARGET', 'syslog-startup-conf', '%s/syslog-startup.conf.%s' % (d.getVar('sysconfdir'), d.getVar('BPN'))) } python do_package_prepend () { # We need to load the full set of busybox provides from the /etc/busybox.links # Use this to see the update-alternatives with the right information - dvar = d.getVar('D', True) - pn = d.getVar('PN', True) + dvar = d.getVar('D') + pn = d.getVar('PN') def set_alternative_vars(links, target): links = d.expand(links) target = d.expand(target) @@ -395,6 +398,9 @@ pkg_postinst_${PN} () { fi done fi + if grep -q "^${base_bindir}/bash$" $D${sysconfdir}/busybox.links*; then + grep -q "^${base_bindir}/bash$" $D${sysconfdir}/shells || echo ${base_bindir}/bash >> $D${sysconfdir}/shells + fi } pkg_prerm_${PN} () { @@ -419,6 +425,12 @@ pkg_prerm_${PN} () { export PATH=$PATH:$tmpdir } +pkg_postrm_${PN} () { + if grep -q "^${base_bindir}/bash$" $D${sysconfdir}/busybox.links* && [ ! -e $D${base_bindir}/bash ]; then + printf "$(grep -v "^${base_bindir}/bash$" $D${sysconfdir}/shells)\n" > $D${sysconfdir}/shells + fi +} + pkg_prerm_${PN}-syslog () { # remove syslog if test "x$D" = "x"; then diff --git a/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/0001-ip-fix-an-improper-optimization-req.r.rtm_scope-may-.patch b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/0001-ip-fix-an-improper-optimization-req.r.rtm_scope-may-.patch new file mode 100644 index 000000000..812a50748 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/0001-ip-fix-an-improper-optimization-req.r.rtm_scope-may-.patch @@ -0,0 +1,33 @@ +From 34ecc3b7aefdd6c31e8691bd5485037bbabedbd4 Mon Sep 17 00:00:00 2001 +From: Denys Vlasenko +Date: Sun, 14 Aug 2016 01:30:34 +0200 +Subject: [PATCH] ip: fix an improper optimization: req.r.rtm_scope may be + nonzero here + +Signed-off-by: Denys Vlasenko +--- +Upstream-Status: Backport +Signed-off-by: André Draszik + + networking/libiproute/iproute.c | 3 +-- + 1 file changed, 1 insertion(+), 2 deletions(-) + +diff --git a/networking/libiproute/iproute.c b/networking/libiproute/iproute.c +index e674e9a0d..48dc6e3d9 100644 +--- a/networking/libiproute/iproute.c ++++ b/networking/libiproute/iproute.c +@@ -362,10 +362,9 @@ IF_FEATURE_IP_RULE(ARG_table,) + req.r.rtm_scope = RT_SCOPE_NOWHERE; + + if (cmd != RTM_DELROUTE) { ++ req.r.rtm_scope = RT_SCOPE_UNIVERSE; + if (RTPROT_BOOT != 0) + req.r.rtm_protocol = RTPROT_BOOT; +- if (RT_SCOPE_UNIVERSE != 0) +- req.r.rtm_scope = RT_SCOPE_UNIVERSE; + if (RTN_UNICAST != 0) + req.r.rtm_type = RTN_UNICAST; + } +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/0001-iproute-support-scope-.-Closes-8561.patch b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/0001-iproute-support-scope-.-Closes-8561.patch new file mode 100644 index 000000000..66bc76e65 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/0001-iproute-support-scope-.-Closes-8561.patch @@ -0,0 +1,122 @@ +From ce4bc1ed048233e89ee4cb95830bf6f01d523d1e Mon Sep 17 00:00:00 2001 +From: Denys Vlasenko +Date: Wed, 30 Dec 2015 17:32:51 +0100 +Subject: [PATCH] iproute: support "scope". Closes 8561 + +function old new delta +iproute_modify 1051 1120 +69 + +Signed-off-by: Denys Vlasenko + +Upstream-Status: Backport +Modified patch to build against busybox 1.24.1: +- s/invarg_1_to_2/invarg +Signed-off-by: André Draszik +--- + networking/libiproute/iproute.c | 52 ++++++++++++++++++++++++++--------------- + 1 file changed, 33 insertions(+), 19 deletions(-) + +diff --git a/networking/libiproute/iproute.c b/networking/libiproute/iproute.c +index d232ee6fd..82827488f 100644 +--- a/networking/libiproute/iproute.c ++++ b/networking/libiproute/iproute.c +@@ -313,12 +313,13 @@ static int FAST_FUNC print_route(const struct sockaddr_nl *who UNUSED_PARAM, + static int iproute_modify(int cmd, unsigned flags, char **argv) + { + static const char keywords[] ALIGN1 = +- "src\0""via\0""mtu\0""lock\0""protocol\0"IF_FEATURE_IP_RULE("table\0") ++ "src\0""via\0""mtu\0""lock\0""scope\0""protocol\0"IF_FEATURE_IP_RULE("table\0") + "dev\0""oif\0""to\0""metric\0""onlink\0"; + enum { + ARG_src, + ARG_via, + ARG_mtu, PARM_lock, ++ ARG_scope, + ARG_protocol, + IF_FEATURE_IP_RULE(ARG_table,) + ARG_dev, +@@ -344,6 +345,7 @@ IF_FEATURE_IP_RULE(ARG_table,) + unsigned mxlock = 0; + char *d = NULL; + smalluint ok = 0; ++ smalluint scope_ok = 0; + int arg; + + memset(&req, 0, sizeof(req)); +@@ -352,15 +354,18 @@ IF_FEATURE_IP_RULE(ARG_table,) + req.n.nlmsg_flags = NLM_F_REQUEST | flags; + req.n.nlmsg_type = cmd; + req.r.rtm_family = preferred_family; +- if (RT_TABLE_MAIN) /* if it is zero, memset already did it */ ++ if (RT_TABLE_MAIN != 0) /* if it is zero, memset already did it */ + req.r.rtm_table = RT_TABLE_MAIN; +- if (RT_SCOPE_NOWHERE) ++ if (RT_SCOPE_NOWHERE != 0) + req.r.rtm_scope = RT_SCOPE_NOWHERE; + + if (cmd != RTM_DELROUTE) { +- req.r.rtm_protocol = RTPROT_BOOT; +- req.r.rtm_scope = RT_SCOPE_UNIVERSE; +- req.r.rtm_type = RTN_UNICAST; ++ if (RTPROT_BOOT != 0) ++ req.r.rtm_protocol = RTPROT_BOOT; ++ if (RT_SCOPE_UNIVERSE != 0) ++ req.r.rtm_scope = RT_SCOPE_UNIVERSE; ++ if (RTN_UNICAST != 0) ++ req.r.rtm_type = RTN_UNICAST; + } + + mxrta->rta_type = RTA_METRICS; +@@ -393,6 +398,13 @@ IF_FEATURE_IP_RULE(ARG_table,) + } + mtu = get_unsigned(*argv, "mtu"); + rta_addattr32(mxrta, sizeof(mxbuf), RTAX_MTU, mtu); ++ } else if (arg == ARG_scope) { ++ uint32_t scope; ++ NEXT_ARG(); ++ if (rtnl_rtscope_a2n(&scope, *argv)) ++ invarg(*argv, "scope"); ++ req.r.rtm_scope = scope; ++ scope_ok = 1; + } else if (arg == ARG_protocol) { + uint32_t prot; + NEXT_ARG(); +@@ -469,20 +481,22 @@ IF_FEATURE_IP_RULE(ARG_table,) + addattr_l(&req.n, sizeof(req), RTA_METRICS, RTA_DATA(mxrta), RTA_PAYLOAD(mxrta)); + } + +- if (req.r.rtm_type == RTN_LOCAL || req.r.rtm_type == RTN_NAT) +- req.r.rtm_scope = RT_SCOPE_HOST; +- else +- if (req.r.rtm_type == RTN_BROADCAST +- || req.r.rtm_type == RTN_MULTICAST +- || req.r.rtm_type == RTN_ANYCAST +- ) { +- req.r.rtm_scope = RT_SCOPE_LINK; +- } +- else if (req.r.rtm_type == RTN_UNICAST || req.r.rtm_type == RTN_UNSPEC) { +- if (cmd == RTM_DELROUTE) +- req.r.rtm_scope = RT_SCOPE_NOWHERE; +- else if (!(ok & gw_ok)) ++ if (!scope_ok) { ++ if (req.r.rtm_type == RTN_LOCAL || req.r.rtm_type == RTN_NAT) ++ req.r.rtm_scope = RT_SCOPE_HOST; ++ else ++ if (req.r.rtm_type == RTN_BROADCAST ++ || req.r.rtm_type == RTN_MULTICAST ++ || req.r.rtm_type == RTN_ANYCAST ++ ) { + req.r.rtm_scope = RT_SCOPE_LINK; ++ } ++ else if (req.r.rtm_type == RTN_UNICAST || req.r.rtm_type == RTN_UNSPEC) { ++ if (cmd == RTM_DELROUTE) ++ req.r.rtm_scope = RT_SCOPE_NOWHERE; ++ else if (!(ok & gw_ok)) ++ req.r.rtm_scope = RT_SCOPE_LINK; ++ } + } + + if (req.r.rtm_family == AF_UNSPEC) { +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/CVE-2016-2147_2.patch b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/CVE-2016-2147_2.patch index 1473d4603..b8349c04a 100644 --- a/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/CVE-2016-2147_2.patch +++ b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/CVE-2016-2147_2.patch @@ -4,7 +4,7 @@ Date: Fri, 11 Mar 2016 00:26:58 +0100 Subject: [PATCH] udhcpc: fix a warning in debug code Signed-off-by: Denys Vlasenko -Upsteam-Status: Backport +Upstream-Status: Backport CVE: CVE-2016-2147 regression fix https://git.busybox.net/busybox/commit/?id=1b7c17 diff --git a/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/busybox-tar-add-IF_FEATURE_-checks.patch b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/busybox-tar-add-IF_FEATURE_-checks.patch new file mode 100644 index 000000000..0c3c9c0f4 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/busybox-tar-add-IF_FEATURE_-checks.patch @@ -0,0 +1,70 @@ +From f94412f6bb49136694c5478d0aecb19118d1b08d Mon Sep 17 00:00:00 2001 +From: Ming Liu +Date: Wed, 31 May 2017 11:48:09 +0200 +Subject: [PATCH] tar: add IF_FEATURE_* checks + +A following linking error was observed: +| ========== +| archival/lib.a(tar.o): In function `tar_main': +| archival/tar.c:1168: undefined reference to `unpack_Z_stream' +| archival/tar.c:1168: undefined reference to `unpack_Z_stream' +| ld: busybox_unstripped: hidden symbol `unpack_Z_stream' isn't defined +| ld: final link failed: Bad value + +this happened with clang compiler, with the following configs: +| CONFIG_TAR=y +| # CONFIG_FEATURE_SEAMLESS_Z is not set + +which can be fixed by adding IF_FEATURE_* checks in. + +Upstream-Status: Pending [ Sent to busybox upstream on 2017-06-02 ] + +Signed-off-by: Ming Liu +--- + archival/tar.c | 25 +++++++++++++++---------- + 1 file changed, 15 insertions(+), 10 deletions(-) + +diff --git a/archival/tar.c b/archival/tar.c +index b70e00a..7598b71 100644 +--- a/archival/tar.c ++++ b/archival/tar.c +@@ -1216,21 +1216,26 @@ int tar_main(int argc UNUSED_PARAM, char **argv) + USE_FOR_MMU(IF_DESKTOP(long long) int FAST_FUNC (*xformer)(transformer_state_t *xstate);) + USE_FOR_NOMMU(const char *xformer_prog;) + +- if (opt & OPT_COMPRESS) +- USE_FOR_MMU(xformer = unpack_Z_stream;) ++ if (opt & OPT_COMPRESS) { ++ USE_FOR_MMU(IF_FEATURE_SEAMLESS_Z(xformer = unpack_Z_stream;)) + USE_FOR_NOMMU(xformer_prog = "uncompress";) +- if (opt & OPT_GZIP) +- USE_FOR_MMU(xformer = unpack_gz_stream;) ++ } ++ if (opt & OPT_GZIP) { ++ USE_FOR_MMU(IF_FEATURE_SEAMLESS_GZ(xformer = unpack_gz_stream;)) + USE_FOR_NOMMU(xformer_prog = "gunzip";) +- if (opt & OPT_BZIP2) +- USE_FOR_MMU(xformer = unpack_bz2_stream;) ++ } ++ if (opt & OPT_BZIP2) { ++ USE_FOR_MMU(IF_FEATURE_SEAMLESS_BZ2(xformer = unpack_bz2_stream;)) + USE_FOR_NOMMU(xformer_prog = "bunzip2";) +- if (opt & OPT_LZMA) +- USE_FOR_MMU(xformer = unpack_lzma_stream;) ++ } ++ if (opt & OPT_LZMA) { ++ USE_FOR_MMU(IF_FEATURE_SEAMLESS_LZMA(xformer = unpack_lzma_stream;)) + USE_FOR_NOMMU(xformer_prog = "unlzma";) +- if (opt & OPT_XZ) +- USE_FOR_MMU(xformer = unpack_xz_stream;) ++ } ++ if (opt & OPT_XZ) { ++ USE_FOR_MMU(IF_FEATURE_SEAMLESS_XZ(xformer = unpack_xz_stream;)) + USE_FOR_NOMMU(xformer_prog = "unxz";) ++ } + + fork_transformer_with_sig(tar_handle->src_fd, xformer, xformer_prog); + /* Can't lseek over pipes */ +-- +2.7.4 + diff --git a/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/defconfig b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/defconfig index c0459d596..8803b52ac 100644 --- a/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/defconfig +++ b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox/defconfig @@ -279,7 +279,7 @@ CONFIG_SEQ=y # CONFIG_SHA3SUM is not set CONFIG_SLEEP=y CONFIG_FEATURE_FANCY_SLEEP=y -# CONFIG_FEATURE_FLOAT_SLEEP is not set +CONFIG_FEATURE_FLOAT_SLEEP=y CONFIG_SORT=y CONFIG_FEATURE_SORT_BIG=y # CONFIG_SPLIT is not set @@ -582,7 +582,7 @@ CONFIG_FEATURE_FBSET_READMODE=y # CONFIG_FDFLUSH is not set # CONFIG_FDFORMAT is not set CONFIG_FDISK=y -# CONFIG_FDISK_SUPPORT_LARGE_DISKS is not set +CONFIG_FDISK_SUPPORT_LARGE_DISKS=y CONFIG_FEATURE_FDISK_WRITABLE=y # CONFIG_FEATURE_AIX_LABEL is not set # CONFIG_FEATURE_SGI_LABEL is not set @@ -981,6 +981,10 @@ CONFIG_SV_DEFAULT_SERVICE_DIR="" # CONFIG_ENVUIDGID is not set # CONFIG_ENVDIR is not set # CONFIG_SOFTLIMIT is not set + +# +# SELinux utilities +# # CONFIG_CHCON is not set # CONFIG_FEATURE_CHCON_LONG_OPTIONS is not set # CONFIG_GETENFORCE is not set @@ -1010,7 +1014,7 @@ CONFIG_ASH_BUILTIN_ECHO=y CONFIG_ASH_BUILTIN_PRINTF=y CONFIG_ASH_BUILTIN_TEST=y CONFIG_ASH_HELP=y -# CONFIG_ASH_CMDCMD is not set +CONFIG_ASH_CMDCMD=y # CONFIG_ASH_MAIL is not set CONFIG_ASH_OPTIMIZE_FOR_SIZE=y # CONFIG_ASH_RANDOM_SUPPORT is not set diff --git a/import-layers/yocto-poky/meta/recipes-core/busybox/busybox_1.24.1.bb b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox_1.24.1.bb index 400cdfe28..6ccbffd97 100644 --- a/import-layers/yocto-poky/meta/recipes-core/busybox/busybox_1.24.1.bb +++ b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox_1.24.1.bb @@ -38,8 +38,8 @@ SRC_URI = "http://www.busybox.net/downloads/busybox-${PV}.tar.bz2;name=tarball \ file://sha256sum.cfg \ file://getopts.cfg \ file://resize.cfg \ - ${@["", "file://init.cfg"][(d.getVar('VIRTUAL-RUNTIME_init_manager', True) == 'busybox')]} \ - ${@["", "file://mdev.cfg"][(d.getVar('VIRTUAL-RUNTIME_dev_manager', True) == 'busybox-mdev')]} \ + ${@["", "file://init.cfg"][(d.getVar('VIRTUAL-RUNTIME_init_manager') == 'busybox')]} \ + ${@["", "file://mdev.cfg"][(d.getVar('VIRTUAL-RUNTIME_dev_manager') == 'busybox-mdev')]} \ file://inittab \ file://rcS \ file://rcK \ @@ -57,6 +57,9 @@ SRC_URI = "http://www.busybox.net/downloads/busybox-${PV}.tar.bz2;name=tarball \ file://0001-libiproute-handle-table-ids-larger-than-255.patch \ file://ifupdown-pass-interface-device-name-for-ipv6-route-c.patch \ file://BUG9071_buffer_overflow_arp.patch \ + file://busybox-tar-add-IF_FEATURE_-checks.patch \ + file://0001-iproute-support-scope-.-Closes-8561.patch \ + file://0001-ip-fix-an-improper-optimization-req.r.rtm_scope-may-.patch \ " SRC_URI_append_libc-musl = " file://musl.cfg " diff --git a/import-layers/yocto-poky/meta/recipes-core/busybox/busybox_git.bb b/import-layers/yocto-poky/meta/recipes-core/busybox/busybox_git.bb deleted file mode 100644 index c29b89434..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/busybox/busybox_git.bb +++ /dev/null @@ -1,52 +0,0 @@ -require busybox.inc - -SRCREV = "1b7c17391de66502dd7a97c866e0a33681edbb1f" -# Lookout for PV bump too when SRCREV is changed -PV = "1.25.0+git${SRCPV}" - -S = "${WORKDIR}/git" - -SRC_URI = "git://busybox.net/busybox.git \ - file://busybox-udhcpc-no_deconfig.patch \ - file://find-touchscreen.sh \ - file://busybox-cron \ - file://busybox-httpd \ - file://busybox-udhcpd \ - file://default.script \ - file://simple.script \ - file://hwclock.sh \ - file://mount.busybox \ - file://syslog \ - file://syslog-startup.conf \ - file://syslog.conf \ - file://busybox-syslog.default \ - file://mdev \ - file://mdev.conf \ - file://mdev-mount.sh \ - file://umount.busybox \ - file://defconfig \ - file://busybox-syslog.service.in \ - file://busybox-klogd.service.in \ - file://fail_on_no_media.patch \ - file://run-ptest \ - file://inetd.conf \ - file://inetd \ - file://login-utilities.cfg \ - file://recognize_connmand.patch \ - file://busybox-cross-menuconfig.patch \ - file://0001-Use-CC-when-linking-instead-of-LD-and-use-CFLAGS-and.patch \ - file://mount-via-label.cfg \ - file://sha1sum.cfg \ - file://sha256sum.cfg \ - file://getopts.cfg \ - file://resize.cfg \ - ${@["", "file://init.cfg"][(d.getVar('VIRTUAL-RUNTIME_init_manager', True) == 'busybox')]} \ - ${@["", "file://mdev.cfg"][(d.getVar('VIRTUAL-RUNTIME_dev_manager', True) == 'busybox-mdev')]} \ - file://inittab \ - file://rcS \ - file://rcK \ - file://runlevel \ -" -SRC_URI_append_libc-musl = " file://musl.cfg " - -DEFAULT_PREFERENCE = "-1" diff --git a/import-layers/yocto-poky/meta/recipes-core/console-tools/console-tools-0.3.2/0001-Fix-format-security-compilation-error.patch b/import-layers/yocto-poky/meta/recipes-core/console-tools/console-tools-0.3.2/0001-Fix-format-security-compilation-error.patch new file mode 100644 index 000000000..6e72ec665 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/console-tools/console-tools-0.3.2/0001-Fix-format-security-compilation-error.patch @@ -0,0 +1,57 @@ +Subject: Fix 'format-security' compilation error. + +Upstream-Status: Pending + +Signed-off-by: Chen Qi +--- + kbdtools/kbd_mode.c | 6 +++--- + kbdtools/setmetamode.c | 2 +- + 2 files changed, 4 insertions(+), 4 deletions(-) + +diff --git a/kbdtools/kbd_mode.c b/kbdtools/kbd_mode.c +index 02dca38..8dbcd39 100644 +--- a/kbdtools/kbd_mode.c ++++ b/kbdtools/kbd_mode.c +@@ -120,7 +120,7 @@ int main(int argc, char *argv[]) + /* report mode */ + if (ioctl(fd, KDGKBMODE, &mode)) + { +- fprintf(stderr, progname); ++ fprintf(stderr, "%s", progname); + perror(_(": error reading keyboard mode\n")); + exit(1); + } +@@ -151,7 +151,7 @@ int main(int argc, char *argv[]) + kbd_rep.period = rate; + if (ioctl(fd, KDKBDREP, &kbd_rep)) + { +- fprintf(stderr, progname); ++ fprintf(stderr, "%s", progname); + perror(_(": error setting keyboard repeat mode\n")); + exit(1); + } +@@ -160,7 +160,7 @@ int main(int argc, char *argv[]) + + if (ioctl(fd, KDSKBMODE, mode)) + { +- fprintf(stderr, progname); ++ fprintf(stderr, "%s", progname); + perror(_(": error setting keyboard mode\n")); + exit(1); + } +diff --git a/kbdtools/setmetamode.c b/kbdtools/setmetamode.c +index 5bed945..0ba7676 100644 +--- a/kbdtools/setmetamode.c ++++ b/kbdtools/setmetamode.c +@@ -42,7 +42,7 @@ void report(int meta) + default: + s = N_("Strange mode for Meta key?\n"); + } +- printf(_(s)); ++ printf("%s", _(s)); + } + + struct meta +-- +2.8.3 + diff --git a/import-layers/yocto-poky/meta/recipes-core/console-tools/console-tools_0.3.2.bb b/import-layers/yocto-poky/meta/recipes-core/console-tools/console-tools_0.3.2.bb index 1db8414cb..286c2a475 100644 --- a/import-layers/yocto-poky/meta/recipes-core/console-tools/console-tools_0.3.2.bb +++ b/import-layers/yocto-poky/meta/recipes-core/console-tools/console-tools_0.3.2.bb @@ -16,6 +16,7 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/lct/console-tools-${PV}.tar.gz \ file://no-dep-on-libfl.patch \ file://0001-kbdtools-Include-sys-types.h-for-u_char-and-u_short-.patch \ file://0001-Cover-the-else-with-__GLIBC__.patch \ + file://0001-Fix-format-security-compilation-error.patch \ file://lcmessage.m4 \ file://Makevars" diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-6.9-cp-i-u.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-6.9-cp-i-u.patch deleted file mode 100644 index 5452b46bb..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-6.9-cp-i-u.patch +++ /dev/null @@ -1,120 +0,0 @@ -Upstream-Status: Inappropriate [legacy version] - -This patch was imported from the Fedora Core 8 coreutils-6.9-9 package. - -The package is stated as being Licensed as GPLv2+. - -Signed-off-by: Mark Hatle - ----- - -When "cp -i --update old new" would do nothing because "new" is -newer than "old", cp would nonetheless prompt for whether it is -ok to overwrite "new". Then, regardless of the response (because -of the --update option), cp would do nothing. - -The following patch eliminates the unnecessary prompt in that case. - -diff --git a/src/copy.c b/src/copy.c -index b7bf73b..0e549d2 100644 ---- a/src/copy.c -+++ b/src/copy.c -@@ -1210,6 +1210,30 @@ copy_internal (char const *src_name, char const *dst_name, - return false; - } - -+ if (!S_ISDIR (src_mode) && x->update) -+ { -+ /* When preserving time stamps (but not moving within a file -+ system), don't worry if the destination time stamp is -+ less than the source merely because of time stamp -+ truncation. */ -+ int options = ((x->preserve_timestamps -+ && ! (x->move_mode -+ && dst_sb.st_dev == src_sb.st_dev)) -+ ? UTIMECMP_TRUNCATE_SOURCE -+ : 0); -+ -+ if (0 <= utimecmp (dst_name, &dst_sb, &src_sb, options)) -+ { -+ /* We're using --update and the destination is not older -+ than the source, so do not copy or move. Pretend the -+ rename succeeded, so the caller (if it's mv) doesn't -+ end up removing the source file. */ -+ if (rename_succeeded) -+ *rename_succeeded = true; -+ return true; -+ } -+ } -+ - /* When there is an existing destination file, we may end up - returning early, and hence not copying/moving the file. - This may be due to an interactive `negative' reply to the -@@ -1302,30 +1326,6 @@ copy_internal (char const *src_name, char const *dst_name, - return false; - } - } -- -- if (x->update) -- { -- /* When preserving time stamps (but not moving within a file -- system), don't worry if the destination time stamp is -- less than the source merely because of time stamp -- truncation. */ -- int options = ((x->preserve_timestamps -- && ! (x->move_mode -- && dst_sb.st_dev == src_sb.st_dev)) -- ? UTIMECMP_TRUNCATE_SOURCE -- : 0); -- -- if (0 <= utimecmp (dst_name, &dst_sb, &src_sb, options)) -- { -- /* We're using --update and the destination is not older -- than the source, so do not copy or move. Pretend the -- rename succeeded, so the caller (if it's mv) doesn't -- end up removing the source file. */ -- if (rename_succeeded) -- *rename_succeeded = true; -- return true; -- } -- } - } - - if (x->move_mode) -diff --git a/tests/mv/update b/tests/mv/update -index 0c06024..6c3d149 100755 ---- a/tests/mv/update -+++ b/tests/mv/update -@@ -1,7 +1,7 @@ - #!/bin/sh - # make sure --update works as advertised - --# Copyright (C) 2001, 2004, 2006 Free Software Foundation, Inc. -+# Copyright (C) 2001, 2004, 2006-2007 Free Software Foundation, Inc. - - # This program is free software; you can redistribute it and/or modify - # it under the terms of the GNU General Public License as published by -@@ -46,11 +46,16 @@ fi - - fail=0 - --for cp_or_mv in cp mv; do -- # This is a no-op. -- $cp_or_mv --update old new || fail=1 -- case "`cat new`" in new) ;; *) fail=1 ;; esac -- case "`cat old`" in old) ;; *) fail=1 ;; esac -+for interactive in '' -i; do -+ for cp_or_mv in cp mv; do -+ # This is a no-op, with no prompt. -+ # With coreutils-6.9 and earlier, using --update with -i would -+ # mistakenly elicit a prompt. -+ $cp_or_mv $interactive --update old new < /dev/null > out 2>&1 || fail=1 -+ test -s out && fail=1 -+ case "`cat new`" in new) ;; *) fail=1 ;; esac -+ case "`cat old`" in old) ;; *) fail=1 ;; esac -+ done - done - - # This will actually perform the rename. --- -1.5.3.rc1.16.g9d6f diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-fix-install.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-fix-install.patch deleted file mode 100644 index 88f61fa10..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-fix-install.patch +++ /dev/null @@ -1,101 +0,0 @@ -Upstream-Status: Inappropriate [legacy version] - -The install command doesn't over write the dangling symlink, for -example: - -$ install fileA /tmp/fileA - -If /tmp/fileA is a dangling symlink, there would be an error: - -install: cannot create regular file '/tmp/fileA': File exists - -This is because of the following code in copy.c: - - if (!new_dst) - { - if (XSTAT (x, dst_name, &dst_sb) != 0) - { - if (errno != ENOENT) - { - error (0, errno, _("cannot stat %s"), quote (dst_name)); - return false; - } - else - { - new_dst = true; - } - } - -XSTAT() use stat() for dst_name(the dangling symlink /tmp/fileA) when -install.c invokes it, and stat will set errno to ENOENT, and then -new_dst will be set to true which means that /tmp/fileA doesn't exist, -then we will create /tmp/fileA without remove it first, so the error -comes. - -This is fixed in a way which adds the member cmd_install in -struct cp_options to make sure my change only affected to the install -command and use lstat to fix the problem. - -Signed-off-by: Robert Yang -Signed-off-by: Mark Hatle - ---- - src/copy.c | 10 +++++++++- - src/copy.h | 3 +++ - src/install.c | 1 + - 3 files changed, 13 insertions(+), 1 deletions(-) - -diff --git a/src/copy.c b/src/copy.c ---- a/src/copy.c -+++ b/src/copy.c -@@ -1029,6 +1029,7 @@ copy_internal (char const *src_name, char const *dst_name, - bool delayed_ok; - bool copied_as_regular = false; - bool preserve_metadata; -+ int dst_stat_result; - - if (x->move_mode && rename_succeeded) - *rename_succeeded = false; -@@ -1069,7 +1070,14 @@ copy_internal (char const *src_name, char const *dst_name, - - if (!new_dst) - { -- if (XSTAT (x, dst_name, &dst_sb) != 0) -+ if ( x->cmd_install && ( x->backup_type == no_backups)) -+ dst_stat_result = lstat (dst_name, &dst_sb); -+ else -+ { -+ dst_stat_result = XSTAT (x, dst_name, &dst_sb); -+ } -+ -+ if (dst_stat_result != 0) - { - if (errno != ENOENT) - { -diff --git a/src/copy.h b/src/copy.h ---- a/src/copy.h -+++ b/src/copy.h -@@ -114,6 +114,9 @@ struct cp_options - If that fails, then resort to copying. */ - bool move_mode; - -+ /* For the install command */ -+ bool cmd_install; -+ - /* Whether this process has appropriate privileges to chown a file - whose owner is not the effective user ID. */ - bool chown_privileges; -diff --git a/src/install.c b/src/install.c ---- a/src/install.c -+++ b/src/install.c -@@ -149,6 +149,7 @@ cp_option_init (struct cp_options *x) - x->hard_link = false; - x->interactive = I_UNSPECIFIED; - x->move_mode = false; -+ x->cmd_install = true; - x->chown_privileges = chown_privileges (); - x->one_file_system = false; - x->preserve_ownership = false; --- -1.7.0.1 - diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-fix-texinfo.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-fix-texinfo.patch deleted file mode 100644 index 3ae5a2fae..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-fix-texinfo.patch +++ /dev/null @@ -1,375 +0,0 @@ -From 170be4023bbf9e9698a709e03265945588ac8e01 Mon Sep 17 00:00:00 2001 -From: Robert Yang -Date: Tue, 26 Nov 2013 00:21:50 +0800 -Subject: [PATCH] doc/coreutils.texi: Use '@item' instead of '@itemx' - -Use '@item' instead of '@itemx' in several places, as Texinfo 5 refuses -to process an '@itemx' that is not preceded by an '@item'. Ensure that -node extended names in menus and sectioning are consistent, and that -ordering and presence of nodes in menus and in the actual text are -consistent as well. - -Upstream-Status: Backport [From: coreutils.7620.n7.nabble.com, bug#11828] - -Signed-off-by: Robert Yang ---- - doc/coreutils.texi | 82 +++++++++++++++++++++++++++--------------------------- - 1 file changed, 41 insertions(+), 41 deletions(-) - -diff --git a/doc/coreutils.texi b/doc/coreutils.texi -index 588147f..2dae3fe 100644 ---- a/doc/coreutils.texi -+++ b/doc/coreutils.texi -@@ -555,7 +555,7 @@ symbolic link to a directory. @xref{Target directory}. - @end macro - - @macro optSi --@itemx --si -+@item --si - @opindex --si - @cindex SI output - Append an SI-style abbreviation to each size, such as @samp{M} for -@@ -578,7 +578,7 @@ Use the @option{--si} option if you prefer powers of 1000. - @end macro - - @macro optStripTrailingSlashes --@itemx @w{@kbd{--strip-trailing-slashes}} -+@item @w{@kbd{--strip-trailing-slashes}} - @opindex --strip-trailing-slashes - @cindex stripping trailing slashes - Remove any trailing slashes from each @var{source} argument. -@@ -2496,7 +2496,7 @@ by 1048576. - However, if @var{n} starts with a @samp{-}, - print all but the last @var{n} bytes of each file. - --@itemx -n @var{n} -+@item -n @var{n} - @itemx --lines=@var{n} - @opindex -n - @opindex --lines -@@ -2633,7 +2633,7 @@ This option is the same as @option{--follow=name --retry}. That is, tail - will attempt to reopen a file when it is removed. Should this fail, tail - will keep trying until it becomes accessible again. - --@itemx --retry -+@item --retry - @opindex --retry - This option is useful mainly when following by name (i.e., with - @option{--follow=name}). -@@ -2641,7 +2641,7 @@ Without this option, when tail encounters a file that doesn't - exist or is otherwise inaccessible, it reports that fact and - never checks it again. - --@itemx --sleep-interval=@var{number} -+@item --sleep-interval=@var{number} - @opindex --sleep-interval - Change the number of seconds to wait between iterations (the default is 1.0). - During one iteration, every specified file is checked to see if it has -@@ -2651,7 +2651,7 @@ Historical implementations of @command{tail} have required that - an arbitrary floating point number (using a period before any - fractional digits). - --@itemx --pid=@var{pid} -+@item --pid=@var{pid} - @opindex --pid - When following by name or by descriptor, you may specify the process ID, - @var{pid}, of the sole writer of all @var{file} arguments. Then, shortly -@@ -2674,7 +2674,7 @@ terminate until long after the real writer has terminated. - Note that @option{--pid} cannot be supported on some systems; @command{tail} - will print a warning if this is the case. - --@itemx --max-unchanged-stats=@var{n} -+@item --max-unchanged-stats=@var{n} - @opindex --max-unchanged-stats - When tailing a file by name, if there have been @var{n} (default - n=@value{DEFAULT_MAX_N_UNCHANGED_STATS_BETWEEN_OPENS}) consecutive -@@ -2686,7 +2686,7 @@ number of seconds between when tail prints the last pre-rotation lines - and when it prints the lines that have accumulated in the new log file. - This option is meaningful only when following by name. - --@itemx -n @var{n} -+@item -n @var{n} - @itemx --lines=@var{n} - @opindex -n - @opindex --lines -@@ -2817,7 +2817,7 @@ option. - @opindex --numeric-suffixes - Use digits in suffixes rather than lower-case letters. - --@itemx --verbose -+@item --verbose - @opindex --verbose - Write a diagnostic to standard error just before each output file is opened. - -@@ -3055,7 +3055,7 @@ Print only the newline counts. - @opindex --max-line-length - Print only the maximum line lengths. - --@itemx --files0-from=@var{FILE} -+@item --files0-from=@var{FILE} - @opindex --files0-from=@var{FILE} - @cindex including files from @command{du} - Rather than processing files named on the command line, process those -@@ -3250,7 +3250,7 @@ an MD5 checksum inconsistent with the associated file, or if no valid - line is found, @command{md5sum} exits with nonzero status. Otherwise, - it exits successfully. - --@itemx --status -+@item --status - @opindex --status - @cindex verifying MD5 checksums - This option is useful only when verifying checksums. -@@ -5837,7 +5837,7 @@ command line unless the @option{--dereference-command-line} (@option{-H}), - If a command line argument specifies a symbolic link, show information - for the file the link references rather than for the link itself. - --@itemx --dereference-command-line-symlink-to-dir -+@item --dereference-command-line-symlink-to-dir - @opindex --dereference-command-line-symlink-to-dir - @cindex symbolic links, dereferencing - Do not dereference symbolic links, with one exception: -@@ -7015,15 +7015,15 @@ If specified, the @var{attribute_list} must be a comma-separated list - of one or more of the following strings: - - @table @samp --@itemx mode -+@item mode - Preserve the file mode bits and access control lists. --@itemx ownership -+@item ownership - Preserve the owner and group. On most modern systems, - only users with appropriate privileges may change the owner of a file, - and ordinary users - may preserve the group ownership of a file only if they happen to be - a member of the desired group. --@itemx timestamps -+@item timestamps - Preserve the times of last access and last modification, when possible. - In general, it is not possible to preserve these attributes - when the affected file is a symbolic link. -@@ -7031,12 +7031,12 @@ However, FreeBSD now provides the @code{lutimes} function, which makes - it possibile even for symbolic links. However, this implementation does - not yet take advantage of that. - @c FIXME: once we provide lutimes support, update the above. --@itemx links -+@item links - Preserve in the destination files - any links between corresponding source files. - @c Give examples illustrating how hard links are preserved. - @c Also, show how soft links map to hard links with -L and -H. --@itemx all -+@item all - Preserve all file attributes. - Equivalent to specifying all of the above. - @end table -@@ -7049,12 +7049,12 @@ mode bits of the corresponding source file, minus the bits set in the - umask and minus the set-user-ID and set-group-ID bits. - @xref{File permissions}. - --@itemx @w{@kbd{--no-preserve}=@var{attribute_list}} -+@item @w{@kbd{--no-preserve}=@var{attribute_list}} - @cindex file information, preserving - Do not preserve the specified attributes. The @var{attribute_list} - has the same form as for @option{--preserve}. - --@itemx --parents -+@item --parents - @opindex --parents - @cindex parent directories and @command{cp} - Form the name of each destination file by appending to the target -@@ -7070,7 +7070,7 @@ cp --parents a/b/c existing_dir - copies the file @file{a/b/c} to @file{existing_dir/a/b/c}, creating - any missing intermediate directories. - --@itemx @w{@kbd{--reply}=@var{how}} -+@item @w{@kbd{--reply}=@var{how}} - @opindex --reply - @cindex interactivity - @c FIXME: remove in 2008 -@@ -7742,7 +7742,7 @@ Prompt whether to overwrite each existing destination file, regardless - of its permissions. - If the response is not affirmative, the file is skipped. - --@itemx @w{@kbd{--reply}=@var{how}} -+@item @w{@kbd{--reply}=@var{how}} - @opindex --reply - @cindex interactivity - @c FIXME: remove in 2008 -@@ -7847,7 +7847,7 @@ files are named or if a recursive removal is requested. Ignore any - previous @option{--force} (@option{-f}) option. Equivalent to - @option{--interactive=once}. - --@itemx --interactive [=@var{when}] -+@item --interactive [=@var{when}] - @opindex --interactive - Specify when to issue an interactive prompt. @var{when} may be - omitted, or one of: -@@ -7866,7 +7866,7 @@ removal is requested. Equivalent to @option{-I}. - Specifying @option{--interactive} and no @var{when} is equivalent to - @option{--interactive=always}. - --@itemx --one-file-system -+@item --one-file-system - @opindex --one-file-system - @cindex one file system, restricting @command{rm} to - When removing a hierarchy recursively, skip any directory that is on a -@@ -7884,7 +7884,7 @@ warn about and skip directories on other file systems. - Of course, this will not save your @file{/home} if it and your - chroot happen to be on the same file system. - --@itemx --preserve-root -+@item --preserve-root - @opindex --preserve-root - @cindex root directory, disallow recursive destruction - Fail upon any attempt to remove the root directory, @file{/}, -@@ -7892,7 +7892,7 @@ when used with the @option{--recursive} option. - This is the default behavior. - @xref{Treating / specially}. - --@itemx --no-preserve-root -+@item --no-preserve-root - @opindex --no-preserve-root - @cindex root directory, allow recursive destruction - Do not treat @file{/} specially when removing recursively. -@@ -8874,7 +8874,7 @@ actually changes. - Do not print error messages about files whose ownership cannot be - changed. - --@itemx @w{@kbd{--from}=@var{old-owner}} -+@item @w{@kbd{--from}=@var{old-owner}} - @opindex --from - @cindex symbolic links, changing owner - Change a @var{file}'s ownership only if it has current attributes specified -@@ -8928,14 +8928,14 @@ is a symbolic link. - By default, no diagnostic is issued for symbolic links encountered - during a recursive traversal, but see @option{--verbose}. - --@itemx --preserve-root -+@item --preserve-root - @opindex --preserve-root - @cindex root directory, disallow recursive modification - Fail upon any attempt to recursively change the root directory, @file{/}. - Without @option{--recursive}, this option has no effect. - @xref{Treating / specially}. - --@itemx --no-preserve-root -+@item --no-preserve-root - @opindex --no-preserve-root - @cindex root directory, allow recursive modification - Cancel the effect of any preceding @option{--preserve-root} option. -@@ -9054,14 +9054,14 @@ is a symbolic link. - By default, no diagnostic is issued for symbolic links encountered - during a recursive traversal, but see @option{--verbose}. - --@itemx --preserve-root -+@item --preserve-root - @opindex --preserve-root - @cindex root directory, disallow recursive modification - Fail upon any attempt to recursively change the root directory, @file{/}. - Without @option{--recursive}, this option has no effect. - @xref{Treating / specially}. - --@itemx --no-preserve-root -+@item --no-preserve-root - @opindex --no-preserve-root - @cindex root directory, allow recursive modification - Cancel the effect of any preceding @option{--preserve-root} option. -@@ -9175,14 +9175,14 @@ actually changes. - Do not print error messages about files whose permissions cannot be - changed. - --@itemx --preserve-root -+@item --preserve-root - @opindex --preserve-root - @cindex root directory, disallow recursive modification - Fail upon any attempt to recursively change the root directory, @file{/}. - Without @option{--recursive}, this option has no effect. - @xref{Treating / specially}. - --@itemx --no-preserve-root -+@item --no-preserve-root - @opindex --no-preserve-root - @cindex root directory, allow recursive modification - Cancel the effect of any preceding @option{--preserve-root} option. -@@ -9603,7 +9603,7 @@ The program accepts the following options. Also see @ref{Common options}. - @opindex --all - Show counts for all files, not just directories. - --@itemx --apparent-size -+@item --apparent-size - @opindex --apparent-size - Print apparent sizes, rather than disk usage. The apparent size of a - file is the number of bytes reported by @code{wc -c} on regular files, -@@ -9654,7 +9654,7 @@ Does not affect other symbolic links. This is helpful for finding - out the disk usage of directories, such as @file{/usr/tmp}, which - are often symbolic links. - --@itemx --files0-from=@var{FILE} -+@item --files0-from=@var{FILE} - @opindex --files0-from=@var{FILE} - @cindex including files from @command{du} - Rather than processing files named on the command line, process those -@@ -9733,7 +9733,7 @@ Output a null byte at the end of each line, rather than a newline. - This option enables other programs to parse the output of @command{du} - even when that output would contain file names with embedded newlines. - --@itemx --si -+@item --si - @opindex --si - @cindex SI output - Append an SI-style abbreviation to each size, such as @samp{MB} for -@@ -9754,13 +9754,13 @@ Display only a total for each argument. - Report the size of each directory separately, not including the sizes - of subdirectories. - --@itemx --time -+@item --time - @opindex --time - @cindex last modified dates, displaying in @command{du} - Show time of the most recent modification of any file in the directory, - or any of its subdirectories. - --@itemx --time=ctime -+@item --time=ctime - @itemx --time=status - @itemx --time=use - @opindex --time -@@ -9770,7 +9770,7 @@ or any of its subdirectories. - Show the most recent status change time (the @samp{ctime} in the inode) of - any file in the directory, instead of the modification time. - --@itemx --time=atime -+@item --time=atime - @itemx --time=access - @opindex --time - @opindex atime@r{, show the most recent} -@@ -9911,7 +9911,7 @@ $ stat --format=%d:%i / /usr - 2057:2 - @end example - --@itemx --printf=@var{format} -+@item --printf=@var{format} - @opindex --printf=@var{format} - @cindex output format - Use @var{format} rather than the default format. -@@ -12240,7 +12240,7 @@ Overrides all other options. - @opindex -s - Ignored; for compatibility with other versions of @command{who}. - --@itemx -u -+@item -u - @opindex -u - @cindex idle time - After the login time, print the number of hours and minutes that the -@@ -12254,7 +12254,7 @@ user has been idle. @samp{.} means the user was active in the last minute. - List only the entries that correspond to processes via which the - system is waiting for a user to login. The user name is always @samp{LOGIN}. - --@itemx --lookup -+@item --lookup - @opindex --lookup - Attempt to canonicalize hostnames found in utmp through a DNS lookup. This - is not the default because it can cause significant delays on systems with --- -1.8.3.1 - diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-i18n.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-i18n.patch deleted file mode 100644 index 653722348..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-i18n.patch +++ /dev/null @@ -1,4051 +0,0 @@ -Upstream-Status: Inappropriate [legacy version] - -This patch was imported from the Fedora Core 8 coreutils-6.9-9 package. - -The package is stated as being Licensed as GPLv2+. - -The comment indicates that the purpose is lin18nux/lsb compliance. - -Signed-off-by: Mark Hatle - ---- /dev/null 2007-03-01 09:16:39.219409909 +0000 -+++ coreutils-6.8+/tests/sort/sort-mb-tests 2007-03-01 15:08:24.000000000 +0000 -@@ -0,0 +1,58 @@ -+#! /bin/sh -+case $# in -+ 0) xx='../../src/sort';; -+ *) xx="$1";; -+esac -+test "$VERBOSE" && echo=echo || echo=: -+$echo testing program: $xx -+errors=0 -+test "$srcdir" || srcdir=. -+test "$VERBOSE" && $xx --version 2> /dev/null -+ -+export LC_ALL=en_US.UTF-8 -+locale -k LC_CTYPE 2>&1 | grep -q charmap.*UTF-8 || exit 77 -+errors=0 -+ -+$xx -t @ -k2 -n mb1.I > mb1.O -+code=$? -+if test $code != 0; then -+ $echo "Test mb1 failed: $xx return code $code differs from expected value 0" 1>&2 -+ errors=`expr $errors + 1` -+else -+ cmp mb1.O $srcdir/mb1.X > /dev/null 2>&1 -+ case $? in -+ 0) if test "$VERBOSE"; then $echo "passed mb1"; fi;; -+ 1) $echo "Test mb1 failed: files mb1.O and $srcdir/mb1.X differ" 1>&2 -+ (diff -c mb1.O $srcdir/mb1.X) 2> /dev/null -+ errors=`expr $errors + 1`;; -+ 2) $echo "Test mb1 may have failed." 1>&2 -+ $echo The command "cmp mb1.O $srcdir/mb1.X" failed. 1>&2 -+ errors=`expr $errors + 1`;; -+ esac -+fi -+ -+$xx -t @ -k4 -n mb2.I > mb2.O -+code=$? -+if test $code != 0; then -+ $echo "Test mb2 failed: $xx return code $code differs from expected value 0" 1>&2 -+ errors=`expr $errors + 1` -+else -+ cmp mb2.O $srcdir/mb2.X > /dev/null 2>&1 -+ case $? in -+ 0) if test "$VERBOSE"; then $echo "passed mb2"; fi;; -+ 1) $echo "Test mb2 failed: files mb2.O and $srcdir/mb2.X differ" 1>&2 -+ (diff -c mb2.O $srcdir/mb2.X) 2> /dev/null -+ errors=`expr $errors + 1`;; -+ 2) $echo "Test mb2 may have failed." 1>&2 -+ $echo The command "cmp mb2.O $srcdir/mb2.X" failed. 1>&2 -+ errors=`expr $errors + 1`;; -+ esac -+fi -+ -+if test $errors = 0; then -+ $echo Passed all 113 tests. 1>&2 -+else -+ $echo Failed $errors tests. 1>&2 -+fi -+test $errors = 0 || errors=1 -+exit $errors ---- /dev/null 2007-03-01 09:16:39.219409909 +0000 -+++ coreutils-6.8+/tests/sort/mb2.I 2007-03-01 15:08:24.000000000 +0000 -@@ -0,0 +1,4 @@ -+Apple@AA10@@20 -+Banana@AA5@@30 -+Citrus@AA20@@5 -+Cherry@AA30@@10 ---- /dev/null 2007-03-01 09:16:39.219409909 +0000 -+++ coreutils-6.8+/tests/sort/mb2.X 2007-03-01 15:08:24.000000000 +0000 -@@ -0,0 +1,4 @@ -+Citrus@AA20@@5 -+Cherry@AA30@@10 -+Apple@AA10@@20 -+Banana@AA5@@30 ---- /dev/null 2007-03-01 09:16:39.219409909 +0000 -+++ coreutils-6.8+/tests/sort/mb1.I 2007-03-01 15:08:24.000000000 +0000 -@@ -0,0 +1,4 @@ -+Apple@10 -+Banana@5 -+Citrus@20 -+Cherry@30 ---- /dev/null 2007-03-01 09:16:39.219409909 +0000 -+++ coreutils-6.8+/tests/sort/mb1.X 2007-03-01 15:08:24.000000000 +0000 -@@ -0,0 +1,4 @@ -+Banana@5 -+Apple@10 -+Citrus@20 -+Cherry@30 ---- coreutils-6.8+/tests/sort/Makefile.am.i18n 2007-01-24 07:47:37.000000000 +0000 -+++ coreutils-6.8+/tests/sort/Makefile.am 2007-03-01 15:09:59.000000000 +0000 -@@ -66,15 +66,17 @@ - bigfield.O bigfield.E - ##test-files-end - --EXTRA_DIST = Test.pm $x-tests $(explicit) $(maint_gen) --noinst_SCRIPTS = $x-tests -+run_gen += mb1.0 mb2.0 -+ -+EXTRA_DIST = Test.pm $x-tests $(explicit) $(maint_gen) mb1.I mb1.X mb2.I mb2.X -+noinst_SCRIPTS = $x-tests # $x-mb-tests - TESTS_ENVIRONMENT = \ - CU_TEST_NAME=`basename $(abs_srcdir)`,$$tst \ - PATH="$(VG_PATH_PREFIX)`pwd`/../../src$(PATH_SEPARATOR)$$PATH" - - editpl = sed -e 's,@''PERL''@,$(PERL),g' -e 's,@''srcdir''@,$(srcdir),g' - --TESTS = $x-tests -+TESTS = $x-tests $x-mb-tests - - mk_script = $(srcdir)/../mk-script - $(srcdir)/$x-tests: $(mk_script) Test.pm Makefile.am ---- coreutils-6.8+/lib/linebuffer.h.i18n 2005-05-14 07:44:24.000000000 +0100 -+++ coreutils-6.8+/lib/linebuffer.h 2007-03-01 15:08:24.000000000 +0000 -@@ -22,6 +22,11 @@ - - # include - -+/* Get mbstate_t. */ -+# if HAVE_WCHAR_H -+# include -+# endif -+ - /* A `struct linebuffer' holds a line of text. */ - - struct linebuffer -@@ -29,6 +34,9 @@ - size_t size; /* Allocated. */ - size_t length; /* Used. */ - char *buffer; -+# if HAVE_WCHAR_H -+ mbstate_t state; -+# endif - }; - - /* Initialize linebuffer LINEBUFFER for use. */ ---- coreutils-6.8+/src/expand.c.i18n 2007-01-14 15:41:28.000000000 +0000 -+++ coreutils-6.8+/src/expand.c 2007-03-01 15:08:24.000000000 +0000 -@@ -38,11 +38,28 @@ - #include - #include - #include -+ -+/* Get mbstate_t, mbrtowc(), wcwidth(). */ -+#if HAVE_WCHAR_H -+# include -+#endif -+ - #include "system.h" - #include "error.h" - #include "quote.h" - #include "xstrndup.h" - -+/* MB_LEN_MAX is incorrectly defined to be 1 in at least one GCC -+ installation; work around this configuration error. */ -+#if !defined MB_LEN_MAX || MB_LEN_MAX < 2 -+# define MB_LEN_MAX 16 -+#endif -+ -+/* Some systems, like BeOS, have multibyte encodings but lack mbstate_t. */ -+#if HAVE_MBRTOWC && defined mbstate_t -+# define mbrtowc(pwc, s, n, ps) (mbrtowc) (pwc, s, n, 0) -+#endif -+ - /* The official name of this program (e.g., no `g' prefix). */ - #define PROGRAM_NAME "expand" - -@@ -183,6 +200,7 @@ - stops = num_start + len - 1; - } - } -+ - else - { - error (0, 0, _("tab size contains invalid character(s): %s"), -@@ -365,6 +383,142 @@ - } - } - -+#if HAVE_MBRTOWC -+static void -+expand_multibyte (void) -+{ -+ FILE *fp; /* Input strem. */ -+ mbstate_t i_state; /* Current shift state of the input stream. */ -+ mbstate_t i_state_bak; /* Back up the I_STATE. */ -+ mbstate_t o_state; /* Current shift state of the output stream. */ -+ char buf[MB_LEN_MAX + BUFSIZ]; /* For spooling a read byte sequence. */ -+ char *bufpos; /* Next read position of BUF. */ -+ size_t buflen = 0; /* The length of the byte sequence in buf. */ -+ wchar_t wc; /* A gotten wide character. */ -+ size_t mblength; /* The byte size of a multibyte character -+ which shows as same character as WC. */ -+ int tab_index = 0; /* Index in `tab_list' of next tabstop. */ -+ int column = 0; /* Column on screen of the next char. */ -+ int next_tab_column; /* Column the next tab stop is on. */ -+ int convert = 1; /* If nonzero, perform translations. */ -+ -+ fp = next_file ((FILE *) NULL); -+ if (fp == NULL) -+ return; -+ -+ memset (&o_state, '\0', sizeof(mbstate_t)); -+ memset (&i_state, '\0', sizeof(mbstate_t)); -+ -+ for (;;) -+ { -+ /* Refill the buffer BUF. */ -+ if (buflen < MB_LEN_MAX && !feof(fp) && !ferror(fp)) -+ { -+ memmove (buf, bufpos, buflen); -+ buflen += fread (buf + buflen, sizeof(char), BUFSIZ, fp); -+ bufpos = buf; -+ } -+ -+ /* No character is left in BUF. */ -+ if (buflen < 1) -+ { -+ fp = next_file (fp); -+ -+ if (fp == NULL) -+ break; /* No more files. */ -+ else -+ { -+ memset (&i_state, '\0', sizeof(mbstate_t)); -+ continue; -+ } -+ } -+ -+ /* Get a wide character. */ -+ i_state_bak = i_state; -+ mblength = mbrtowc (&wc, bufpos, buflen, &i_state); -+ -+ switch (mblength) -+ { -+ case (size_t)-1: /* illegal byte sequence. */ -+ case (size_t)-2: -+ mblength = 1; -+ i_state = i_state_bak; -+ if (convert) -+ { -+ ++column; -+ if (convert_entire_line == 0) -+ convert = 0; -+ } -+ putchar (*bufpos); -+ break; -+ -+ case 0: /* null. */ -+ mblength = 1; -+ if (convert && convert_entire_line == 0) -+ convert = 0; -+ putchar ('\0'); -+ break; -+ -+ default: -+ if (wc == L'\n') /* LF. */ -+ { -+ tab_index = 0; -+ column = 0; -+ convert = 1; -+ putchar ('\n'); -+ } -+ else if (wc == L'\t' && convert) /* Tab. */ -+ { -+ if (tab_size == 0) -+ { -+ /* Do not let tab_index == first_free_tab; -+ stop when it is 1 less. */ -+ while (tab_index < first_free_tab - 1 -+ && column >= tab_list[tab_index]) -+ tab_index++; -+ next_tab_column = tab_list[tab_index]; -+ if (tab_index < first_free_tab - 1) -+ tab_index++; -+ if (column >= next_tab_column) -+ next_tab_column = column + 1; -+ } -+ else -+ next_tab_column = column + tab_size - column % tab_size; -+ -+ while (column < next_tab_column) -+ { -+ putchar (' '); -+ ++column; -+ } -+ } -+ else /* Others. */ -+ { -+ if (convert) -+ { -+ if (wc == L'\b') -+ { -+ if (column > 0) -+ --column; -+ } -+ else -+ { -+ int width; /* The width of WC. */ -+ -+ width = wcwidth (wc); -+ column += (width > 0) ? width : 0; -+ if (convert_entire_line == 0) -+ convert = 0; -+ } -+ } -+ fwrite (bufpos, sizeof(char), mblength, stdout); -+ } -+ } -+ buflen -= mblength; -+ bufpos += mblength; -+ } -+} -+#endif -+ - int - main (int argc, char **argv) - { -@@ -429,7 +583,12 @@ - - file_list = (optind < argc ? &argv[optind] : stdin_argv); - -- expand (); -+#if HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) -+ expand_multibyte (); -+ else -+#endif -+ expand (); - - if (have_read_stdin && fclose (stdin) != 0) - error (EXIT_FAILURE, errno, "-"); ---- coreutils-6.8+/src/join.c.i18n 2007-01-14 15:41:28.000000000 +0000 -+++ coreutils-6.8+/src/join.c 2007-03-01 15:08:24.000000000 +0000 -@@ -23,16 +23,30 @@ - #include - #include - -+/* Get mbstate_t, mbrtowc(), mbrtowc(), wcwidth(). */ -+#if HAVE_WCHAR_H -+# include -+#endif -+ -+/* Get iswblank(), towupper. */ -+#if HAVE_WCTYPE_H -+# include -+#endif -+ - #include "system.h" - #include "error.h" - #include "hard-locale.h" - #include "linebuffer.h" --#include "memcasecmp.h" - #include "quote.h" - #include "stdio--.h" - #include "xmemcoll.h" - #include "xstrtol.h" - -+/* Some systems, like BeOS, have multibyte encodings but lack mbstate_t. */ -+#if HAVE_MBRTOWC && defined mbstate_t -+# define mbrtowc(pwc, s, n, ps) (mbrtowc) (pwc, s, n, 0) -+#endif -+ - /* The official name of this program (e.g., no `g' prefix). */ - #define PROGRAM_NAME "join" - -@@ -104,10 +118,12 @@ - /* Last element in `outlist', where a new element can be added. */ - static struct outlist *outlist_end = &outlist_head; - --/* Tab character separating fields. If negative, fields are separated -- by any nonempty string of blanks, otherwise by exactly one -- tab character whose value (when cast to unsigned char) equals TAB. */ --static int tab = -1; -+/* Tab character separating fields. If NULL, fields are separated -+ by any nonempty string of blanks. */ -+static char *tab = NULL; -+ -+/* The number of bytes used for tab. */ -+static size_t tablen = 0; - - static struct option const longopts[] = - { -@@ -190,6 +206,8 @@ - - /* Fill in the `fields' structure in LINE. */ - -+/* Fill in the `fields' structure in LINE. */ -+ - static void - xfields (struct line *line) - { -@@ -199,10 +217,11 @@ - if (ptr == lim) - return; - -- if (0 <= tab) -+ if (tab != NULL) - { -+ unsigned char t = tab[0]; - char *sep; -- for (; (sep = memchr (ptr, tab, lim - ptr)) != NULL; ptr = sep + 1) -+ for (; (sep = memchr (ptr, t, lim - ptr)) != NULL; ptr = sep + 1) - extract_field (line, ptr, sep - ptr); - } - else -@@ -229,6 +248,148 @@ - extract_field (line, ptr, lim - ptr); - } - -+#if HAVE_MBRTOWC -+static void -+xfields_multibyte (struct line *line) -+{ -+ char *ptr = line->buf.buffer; -+ char const *lim = ptr + line->buf.length - 1; -+ wchar_t wc = 0; -+ size_t mblength = 1; -+ mbstate_t state, state_bak; -+ -+ memset (&state, 0, sizeof (mbstate_t)); -+ -+ if (ptr == lim) -+ return; -+ -+ if (tab != NULL) -+ { -+ unsigned char t = tab[0]; -+ char *sep = ptr; -+ for (; ptr < lim; ptr = sep + mblength) -+ { -+ sep = ptr; -+ while (sep < lim) -+ { -+ state_bak = state; -+ mblength = mbrtowc (&wc, sep, lim - sep + 1, &state); -+ -+ if (mblength == (size_t)-1 || mblength == (size_t)-2) -+ { -+ mblength = 1; -+ state = state_bak; -+ } -+ mblength = (mblength < 1) ? 1 : mblength; -+ -+ if (mblength == tablen && !memcmp (sep, tab, mblength)) -+ break; -+ else -+ { -+ sep += mblength; -+ continue; -+ } -+ } -+ -+ if (sep == lim) -+ break; -+ -+ extract_field (line, ptr, sep - ptr); -+ } -+ } -+ else -+ { -+ /* Skip leading blanks before the first field. */ -+ while(ptr < lim) -+ { -+ state_bak = state; -+ mblength = mbrtowc (&wc, ptr, lim - ptr + 1, &state); -+ -+ if (mblength == (size_t)-1 || mblength == (size_t)-2) -+ { -+ mblength = 1; -+ state = state_bak; -+ break; -+ } -+ mblength = (mblength < 1) ? 1 : mblength; -+ -+ if (!iswblank(wc)) -+ break; -+ ptr += mblength; -+ } -+ -+ do -+ { -+ char *sep; -+ state_bak = state; -+ mblength = mbrtowc (&wc, ptr, lim - ptr + 1, &state); -+ if (mblength == (size_t)-1 || mblength == (size_t)-2) -+ { -+ mblength = 1; -+ state = state_bak; -+ break; -+ } -+ mblength = (mblength < 1) ? 1 : mblength; -+ -+ sep = ptr + mblength; -+ while (sep != lim) -+ { -+ state_bak = state; -+ mblength = mbrtowc (&wc, sep, lim - sep + 1, &state); -+ if (mblength == (size_t)-1 || mblength == (size_t)-2) -+ { -+ mblength = 1; -+ state = state_bak; -+ break; -+ } -+ mblength = (mblength < 1) ? 1 : mblength; -+ -+ if (iswblank (wc)) -+ break; -+ -+ sep += mblength; -+ } -+ -+ extract_field (line, ptr, sep - ptr); -+ if (sep == lim) -+ return; -+ -+ state_bak = state; -+ mblength = mbrtowc (&wc, sep, lim - sep + 1, &state); -+ if (mblength == (size_t)-1 || mblength == (size_t)-2) -+ { -+ mblength = 1; -+ state = state_bak; -+ break; -+ } -+ mblength = (mblength < 1) ? 1 : mblength; -+ -+ ptr = sep + mblength; -+ while (ptr != lim) -+ { -+ state_bak = state; -+ mblength = mbrtowc (&wc, ptr, lim - ptr + 1, &state); -+ if (mblength == (size_t)-1 || mblength == (size_t)-2) -+ { -+ mblength = 1; -+ state = state_bak; -+ break; -+ } -+ mblength = (mblength < 1) ? 1 : mblength; -+ -+ if (!iswblank (wc)) -+ break; -+ -+ ptr += mblength; -+ } -+ } -+ while (ptr != lim); -+ } -+ -+ extract_field (line, ptr, lim - ptr); -+} -+#endif -+ - /* Read a line from FP into LINE and split it into fields. - Return true if successful. */ - -@@ -249,6 +410,11 @@ - line->nfields_allocated = 0; - line->nfields = 0; - line->fields = NULL; -+#if HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) -+ xfields_multibyte (line); -+ else -+#endif - xfields (line); - return true; - } -@@ -303,56 +469,114 @@ - keycmp (struct line const *line1, struct line const *line2) - { - /* Start of field to compare in each file. */ -- char *beg1; -- char *beg2; -- -- size_t len1; -- size_t len2; /* Length of fields to compare. */ -+ char *beg[2]; -+ char *copy[2]; -+ size_t len[2]; /* Length of fields to compare. */ - int diff; -+ int i, j; - - if (join_field_1 < line1->nfields) - { -- beg1 = line1->fields[join_field_1].beg; -- len1 = line1->fields[join_field_1].len; -+ beg[0] = line1->fields[join_field_1].beg; -+ len[0] = line1->fields[join_field_1].len; - } - else - { -- beg1 = NULL; -- len1 = 0; -+ beg[0] = NULL; -+ len[0] = 0; - } - - if (join_field_2 < line2->nfields) - { -- beg2 = line2->fields[join_field_2].beg; -- len2 = line2->fields[join_field_2].len; -+ beg[1] = line2->fields[join_field_2].beg; -+ len[1] = line2->fields[join_field_2].len; - } - else - { -- beg2 = NULL; -- len2 = 0; -+ beg[1] = NULL; -+ len[1] = 0; - } - -- if (len1 == 0) -- return len2 == 0 ? 0 : -1; -- if (len2 == 0) -+ if (len[0] == 0) -+ return len[1] == 0 ? 0 : -1; -+ if (len[1] == 0) - return 1; - - if (ignore_case) - { -- /* FIXME: ignore_case does not work with NLS (in particular, -- with multibyte chars). */ -- diff = memcasecmp (beg1, beg2, MIN (len1, len2)); -+#ifdef HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) -+ { -+ size_t mblength; -+ wchar_t wc, uwc; -+ mbstate_t state, state_bak; -+ -+ memset (&state, '\0', sizeof (mbstate_t)); -+ -+ for (i = 0; i < 2; i++) -+ { -+ copy[i] = alloca (len[i] + 1); -+ -+ for (j = 0; j < MIN (len[0], len[1]);) -+ { -+ state_bak = state; -+ mblength = mbrtowc (&wc, beg[i] + j, len[i] - j, &state); -+ -+ switch (mblength) -+ { -+ case (size_t) -1: -+ case (size_t) -2: -+ state = state_bak; -+ /* Fall through */ -+ case 0: -+ mblength = 1; -+ break; -+ -+ default: -+ uwc = towupper (wc); -+ -+ if (uwc != wc) -+ { -+ mbstate_t state_wc; -+ -+ memset (&state_wc, '\0', sizeof (mbstate_t)); -+ wcrtomb (copy[i] + j, uwc, &state_wc); -+ } -+ else -+ memcpy (copy[i] + j, beg[i] + j, mblength); -+ } -+ j += mblength; -+ } -+ copy[i][j] = '\0'; -+ } -+ } -+ else -+#endif -+ { -+ for (i = 0; i < 2; i++) -+ { -+ copy[i] = alloca (len[i] + 1); -+ -+ for (j = 0; j < MIN (len[0], len[1]); j++) -+ copy[i][j] = toupper (beg[i][j]); -+ -+ copy[i][j] = '\0'; -+ } -+ } - } - else - { -- if (hard_LC_COLLATE) -- return xmemcoll (beg1, len1, beg2, len2); -- diff = memcmp (beg1, beg2, MIN (len1, len2)); -+ copy[0] = (unsigned char *) beg[0]; -+ copy[1] = (unsigned char *) beg[1]; - } - -+ if (hard_LC_COLLATE) -+ return xmemcoll ((char *) copy[0], len[0], (char *) copy[1], len[1]); -+ diff = memcmp (copy[0], copy[1], MIN (len[0], len[1])); -+ - if (diff) - return diff; -- return len1 < len2 ? -1 : len1 != len2; -+ return len[0] - len[1]; - } - - /* Print field N of LINE if it exists and is nonempty, otherwise -@@ -377,11 +601,18 @@ - - /* Print the join of LINE1 and LINE2. */ - -+#define PUT_TAB_CHAR \ -+ do \ -+ { \ -+ (tab != NULL) ? \ -+ fwrite(tab, sizeof(char), tablen, stdout) : putchar (' '); \ -+ } \ -+ while (0) -+ - static void - prjoin (struct line const *line1, struct line const *line2) - { - const struct outlist *outlist; -- char output_separator = tab < 0 ? ' ' : tab; - - outlist = outlist_head.next; - if (outlist) -@@ -397,12 +628,12 @@ - if (o->file == 0) - { - if (line1 == &uni_blank) -- { -+ { - line = line2; - field = join_field_2; - } - else -- { -+ { - line = line1; - field = join_field_1; - } -@@ -416,7 +647,7 @@ - o = o->next; - if (o == NULL) - break; -- putchar (output_separator); -+ PUT_TAB_CHAR; - } - putchar ('\n'); - } -@@ -434,23 +665,23 @@ - prfield (join_field_1, line1); - for (i = 0; i < join_field_1 && i < line1->nfields; ++i) - { -- putchar (output_separator); -+ PUT_TAB_CHAR; - prfield (i, line1); - } - for (i = join_field_1 + 1; i < line1->nfields; ++i) - { -- putchar (output_separator); -+ PUT_TAB_CHAR; - prfield (i, line1); - } - - for (i = 0; i < join_field_2 && i < line2->nfields; ++i) - { -- putchar (output_separator); -+ PUT_TAB_CHAR; - prfield (i, line2); - } - for (i = join_field_2 + 1; i < line2->nfields; ++i) - { -- putchar (output_separator); -+ PUT_TAB_CHAR; - prfield (i, line2); - } - putchar ('\n'); -@@ -859,20 +1090,41 @@ - - case 't': - { -- unsigned char newtab = optarg[0]; -- if (! newtab) -+ char *newtab; -+ size_t newtablen; -+ if (! optarg[0]) - error (EXIT_FAILURE, 0, _("empty tab")); -- if (optarg[1]) -+ newtab = xstrdup (optarg); -+#if HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) -+ { -+ mbstate_t state; -+ -+ memset (&state, 0, sizeof (mbstate_t)); -+ newtablen = mbrtowc (NULL, newtab, -+ strnlen (newtab, MB_LEN_MAX), -+ &state); -+ if (newtablen == (size_t) 0 -+ || newtablen == (size_t) -1 -+ || newtablen == (size_t) -2) -+ newtablen = 1; -+ } -+ else -+#endif -+ newtablen = 1; -+ -+ if (newtablen == 1 && newtab[1]) -+ { -+ if (STREQ (newtab, "\\0")) -+ newtab[0] = '\0'; -+ } -+ if (tab != NULL && strcmp (tab, newtab)) - { -- if (STREQ (optarg, "\\0")) -- newtab = '\0'; -- else -- error (EXIT_FAILURE, 0, _("multi-character tab %s"), -- quote (optarg)); -+ free (newtab); -+ error (EXIT_FAILURE, 0, _("incompatible tabs")); - } -- if (0 <= tab && tab != newtab) -- error (EXIT_FAILURE, 0, _("incompatible tabs")); - tab = newtab; -+ tablen = newtablen; - } - break; - ---- coreutils-6.8+/src/uniq.c.i18n 2007-01-14 15:41:28.000000000 +0000 -+++ coreutils-6.8+/src/uniq.c 2007-03-01 15:08:24.000000000 +0000 -@@ -23,6 +23,16 @@ - #include - #include - -+/* Get mbstate_t, mbrtowc(). */ -+#if HAVE_WCHAR_H -+# include -+#endif -+ -+/* Get isw* functions. */ -+#if HAVE_WCTYPE_H -+# include -+#endif -+ - #include "system.h" - #include "argmatch.h" - #include "linebuffer.h" -@@ -32,7 +42,19 @@ - #include "quote.h" - #include "xmemcoll.h" - #include "xstrtol.h" --#include "memcasecmp.h" -+#include "xmemcoll.h" -+ -+/* MB_LEN_MAX is incorrectly defined to be 1 in at least one GCC -+ installation; work around this configuration error. */ -+#if !defined MB_LEN_MAX || MB_LEN_MAX < 2 -+# define MB_LEN_MAX 16 -+#endif -+ -+/* Some systems, like BeOS, have multibyte encodings but lack mbstate_t. */ -+#if HAVE_MBRTOWC && defined mbstate_t -+# define mbrtowc(pwc, s, n, ps) (mbrtowc) (pwc, s, n, 0) -+#endif -+ - - /* The official name of this program (e.g., no `g' prefix). */ - #define PROGRAM_NAME "uniq" -@@ -109,6 +131,10 @@ - /* Select whether/how to delimit groups of duplicate lines. */ - static enum delimit_method delimit_groups; - -+/* Function pointers. */ -+static char * -+(*find_field) (struct linebuffer *line); -+ - static struct option const longopts[] = - { - {"count", no_argument, NULL, 'c'}, -@@ -198,7 +224,7 @@ - return a pointer to the beginning of the line's field to be compared. */ - - static char * --find_field (const struct linebuffer *line) -+find_field_uni (struct linebuffer *line) - { - size_t count; - char *lp = line->buffer; -@@ -219,6 +245,83 @@ - return lp + i; - } - -+#if HAVE_MBRTOWC -+ -+# define MBCHAR_TO_WCHAR(WC, MBLENGTH, LP, POS, SIZE, STATEP, CONVFAIL) \ -+ do \ -+ { \ -+ mbstate_t state_bak; \ -+ \ -+ CONVFAIL = 0; \ -+ state_bak = *STATEP; \ -+ \ -+ MBLENGTH = mbrtowc (&WC, LP + POS, SIZE - POS, STATEP); \ -+ \ -+ switch (MBLENGTH) \ -+ { \ -+ case (size_t)-2: \ -+ case (size_t)-1: \ -+ *STATEP = state_bak; \ -+ CONVFAIL++; \ -+ /* Fall through */ \ -+ case 0: \ -+ MBLENGTH = 1; \ -+ } \ -+ } \ -+ while (0) -+ -+static char * -+find_field_multi (struct linebuffer *line) -+{ -+ size_t count; -+ char *lp = line->buffer; -+ size_t size = line->length - 1; -+ size_t pos; -+ size_t mblength; -+ wchar_t wc; -+ mbstate_t *statep; -+ int convfail; -+ -+ pos = 0; -+ statep = &(line->state); -+ -+ /* skip fields. */ -+ for (count = 0; count < skip_fields && pos < size; count++) -+ { -+ while (pos < size) -+ { -+ MBCHAR_TO_WCHAR (wc, mblength, lp, pos, size, statep, convfail); -+ -+ if (convfail || !iswblank (wc)) -+ { -+ pos += mblength; -+ break; -+ } -+ pos += mblength; -+ } -+ -+ while (pos < size) -+ { -+ MBCHAR_TO_WCHAR (wc, mblength, lp, pos, size, statep, convfail); -+ -+ if (!convfail && iswblank (wc)) -+ break; -+ -+ pos += mblength; -+ } -+ } -+ -+ /* skip fields. */ -+ for (count = 0; count < skip_chars && pos < size; count++) -+ { -+ MBCHAR_TO_WCHAR (wc, mblength, lp, pos, size, statep, convfail); -+ pos += mblength; -+ } -+ -+ return lp + pos; -+} -+#endif -+ - /* Return false if two strings OLD and NEW match, true if not. - OLD and NEW point not to the beginnings of the lines - but rather to the beginnings of the fields to compare. -@@ -227,6 +330,8 @@ - static bool - different (char *old, char *new, size_t oldlen, size_t newlen) - { -+ char *copy_old, *copy_new; -+ - if (check_chars < oldlen) - oldlen = check_chars; - if (check_chars < newlen) -@@ -234,14 +339,92 @@ - - if (ignore_case) - { -- /* FIXME: This should invoke strcoll somehow. */ -- return oldlen != newlen || memcasecmp (old, new, oldlen); -+ size_t i; -+ -+ copy_old = alloca (oldlen + 1); -+ copy_new = alloca (oldlen + 1); -+ -+ for (i = 0; i < oldlen; i++) -+ { -+ copy_old[i] = toupper (old[i]); -+ copy_new[i] = toupper (new[i]); -+ } - } -- else if (hard_LC_COLLATE) -- return xmemcoll (old, oldlen, new, newlen) != 0; - else -- return oldlen != newlen || memcmp (old, new, oldlen); -+ { -+ copy_old = (char *)old; -+ copy_new = (char *)new; -+ } -+ -+ return xmemcoll (copy_old, oldlen, copy_new, newlen); -+} -+ -+#if HAVE_MBRTOWC -+static int -+different_multi (const char *old, const char *new, size_t oldlen, size_t newlen, mbstate_t oldstate, mbstate_t newstate) -+{ -+ size_t i, j, chars; -+ const char *str[2]; -+ char *copy[2]; -+ size_t len[2]; -+ mbstate_t state[2]; -+ size_t mblength; -+ wchar_t wc, uwc; -+ mbstate_t state_bak; -+ -+ str[0] = old; -+ str[1] = new; -+ len[0] = oldlen; -+ len[1] = newlen; -+ state[0] = oldstate; -+ state[1] = newstate; -+ -+ for (i = 0; i < 2; i++) -+ { -+ copy[i] = alloca (len[i] + 1); -+ -+ for (j = 0, chars = 0; j < len[i] && chars < check_chars; chars++) -+ { -+ state_bak = state[i]; -+ mblength = mbrtowc (&wc, str[i] + j, len[i] - j, &(state[i])); -+ -+ switch (mblength) -+ { -+ case (size_t)-1: -+ case (size_t)-2: -+ state[i] = state_bak; -+ /* Fall through */ -+ case 0: -+ mblength = 1; -+ break; -+ -+ default: -+ if (ignore_case) -+ { -+ uwc = towupper (wc); -+ -+ if (uwc != wc) -+ { -+ mbstate_t state_wc; -+ -+ memset (&state_wc, '\0', sizeof(mbstate_t)); -+ wcrtomb (copy[i] + j, uwc, &state_wc); -+ } -+ else -+ memcpy (copy[i] + j, str[i] + j, mblength); -+ } -+ else -+ memcpy (copy[i] + j, str[i] + j, mblength); -+ } -+ j += mblength; -+ } -+ copy[i][j] = '\0'; -+ len[i] = j; -+ } -+ -+ return xmemcoll (copy[0], len[0], copy[1], len[1]); - } -+#endif - - /* Output the line in linebuffer LINE to standard output - provided that the switches say it should be output. -@@ -295,15 +478,43 @@ - { - char *prevfield IF_LINT (= NULL); - size_t prevlen IF_LINT (= 0); -+#if HAVE_MBRTOWC -+ mbstate_t prevstate; -+ -+ memset (&prevstate, '\0', sizeof (mbstate_t)); -+#endif - - while (!feof (stdin)) - { - char *thisfield; - size_t thislen; -+#if HAVE_MBRTOWC -+ mbstate_t thisstate; -+#endif -+ - if (readlinebuffer (thisline, stdin) == 0) - break; - thisfield = find_field (thisline); - thislen = thisline->length - 1 - (thisfield - thisline->buffer); -+#if HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) -+ { -+ thisstate = thisline->state; -+ -+ if (prevline->length == 0 || different_multi -+ (thisfield, prevfield, thislen, prevlen, thisstate, prevstate)) -+ { -+ fwrite (thisline->buffer, sizeof (char), -+ thisline->length, stdout); -+ -+ SWAP_LINES (prevline, thisline); -+ prevfield = thisfield; -+ prevlen = thislen; -+ prevstate = thisstate; -+ } -+ } -+ else -+#endif - if (prevline->length == 0 - || different (thisfield, prevfield, thislen, prevlen)) - { -@@ -322,17 +533,26 @@ - size_t prevlen; - uintmax_t match_count = 0; - bool first_delimiter = true; -+#if HAVE_MBRTOWC -+ mbstate_t prevstate; -+#endif - - if (readlinebuffer (prevline, stdin) == 0) - goto closefiles; - prevfield = find_field (prevline); - prevlen = prevline->length - 1 - (prevfield - prevline->buffer); -+#if HAVE_MBRTOWC -+ prevstate = prevline->state; -+#endif - - while (!feof (stdin)) - { - bool match; - char *thisfield; - size_t thislen; -+#if HAVE_MBRTOWC -+ mbstate_t thisstate; -+#endif - if (readlinebuffer (thisline, stdin) == 0) - { - if (ferror (stdin)) -@@ -341,6 +561,15 @@ - } - thisfield = find_field (thisline); - thislen = thisline->length - 1 - (thisfield - thisline->buffer); -+#if HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) -+ { -+ thisstate = thisline->state; -+ match = !different_multi (thisfield, prevfield, -+ thislen, prevlen, thisstate, prevstate); -+ } -+ else -+#endif - match = !different (thisfield, prevfield, thislen, prevlen); - match_count += match; - -@@ -373,6 +602,9 @@ - SWAP_LINES (prevline, thisline); - prevfield = thisfield; - prevlen = thislen; -+#if HAVE_MBRTOWC -+ prevstate = thisstate; -+#endif - if (!match) - match_count = 0; - } -@@ -417,6 +649,19 @@ - - atexit (close_stdout); - -+#if HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) -+ { -+ find_field = find_field_multi; -+ } -+ else -+#endif -+ { -+ find_field = find_field_uni; -+ } -+ -+ -+ - skip_chars = 0; - skip_fields = 0; - check_chars = SIZE_MAX; ---- coreutils-6.8+/src/fold.c.i18n 2007-02-23 12:01:47.000000000 +0000 -+++ coreutils-6.8+/src/fold.c 2007-03-01 15:08:24.000000000 +0000 -@@ -23,11 +23,33 @@ - #include - #include - -+/* Get mbstate_t, mbrtowc(), wcwidth(). */ -+#if HAVE_WCHAR_H -+# include -+#endif -+ -+/* Get iswprint(), iswblank(), wcwidth(). */ -+#if HAVE_WCTYPE_H -+# include -+#endif -+ - #include "system.h" - #include "error.h" - #include "quote.h" - #include "xstrtol.h" - -+/* MB_LEN_MAX is incorrectly defined to be 1 in at least one GCC -+ installation; work around this configuration error. */ -+#if !defined MB_LEN_MAX || MB_LEN_MAX < 2 -+# undef MB_LEN_MAX -+# define MB_LEN_MAX 16 -+#endif -+ -+/* Some systems, like BeOS, have multibyte encodings but lack mbstate_t. */ -+#if HAVE_MBRTOWC && defined mbstate_t -+# define mbrtowc(pwc, s, n, ps) (mbrtowc) (pwc, s, n, 0) -+#endif -+ - #define TAB_WIDTH 8 - - /* The official name of this program (e.g., no `g' prefix). */ -@@ -35,23 +57,44 @@ - - #define AUTHORS "David MacKenzie" - -+#define FATAL_ERROR(Message) \ -+ do \ -+ { \ -+ error (0, 0, (Message)); \ -+ usage (2); \ -+ } \ -+ while (0) -+ -+enum operating_mode -+{ -+ /* Fold texts by columns that are at the given positions. */ -+ column_mode, -+ -+ /* Fold texts by bytes that are at the given positions. */ -+ byte_mode, -+ -+ /* Fold texts by characters that are at the given positions. */ -+ character_mode, -+}; -+ - /* The name this program was run with. */ - char *program_name; - -+/* The argument shows current mode. (Default: column_mode) */ -+static enum operating_mode operating_mode; -+ - /* If nonzero, try to break on whitespace. */ - static bool break_spaces; - --/* If nonzero, count bytes, not column positions. */ --static bool count_bytes; -- - /* If nonzero, at least one of the files we read was standard input. */ - static bool have_read_stdin; - --static char const shortopts[] = "bsw:0::1::2::3::4::5::6::7::8::9::"; -+static char const shortopts[] = "bcsw:0::1::2::3::4::5::6::7::8::9::"; - - static struct option const longopts[] = - { - {"bytes", no_argument, NULL, 'b'}, -+ {"characters", no_argument, NULL, 'c'}, - {"spaces", no_argument, NULL, 's'}, - {"width", required_argument, NULL, 'w'}, - {GETOPT_HELP_OPTION_DECL}, -@@ -81,6 +124,7 @@ - "), stdout); - fputs (_("\ - -b, --bytes count bytes rather than columns\n\ -+ -c, --characters count characters rather than columns\n\ - -s, --spaces break at spaces\n\ - -w, --width=WIDTH use WIDTH columns instead of 80\n\ - "), stdout); -@@ -98,7 +142,7 @@ - static size_t - adjust_column (size_t column, char c) - { -- if (!count_bytes) -+ if (operating_mode != byte_mode) - { - if (c == '\b') - { -@@ -121,30 +165,14 @@ - to stdout, with maximum line length WIDTH. - Return true if successful. */ - --static bool --fold_file (char const *filename, size_t width) -+static void -+fold_text (FILE *istream, size_t width, int *saved_errno) - { -- FILE *istream; - int c; - size_t column = 0; /* Screen column where next char will go. */ - size_t offset_out = 0; /* Index in `line_out' for next char. */ - static char *line_out = NULL; - static size_t allocated_out = 0; -- int saved_errno; -- -- if (STREQ (filename, "-")) -- { -- istream = stdin; -- have_read_stdin = true; -- } -- else -- istream = fopen (filename, "r"); -- -- if (istream == NULL) -- { -- error (0, errno, "%s", filename); -- return false; -- } - - while ((c = getc (istream)) != EOF) - { -@@ -172,6 +200,15 @@ - bool found_blank = false; - size_t logical_end = offset_out; - -+ /* If LINE_OUT has no wide character, -+ put a new wide character in LINE_OUT -+ if column is bigger than width. */ -+ if (offset_out == 0) -+ { -+ line_out[offset_out++] = c; -+ continue; -+ } -+ - /* Look for the last blank. */ - while (logical_end) - { -@@ -218,11 +255,225 @@ - line_out[offset_out++] = c; - } - -- saved_errno = errno; -+ *saved_errno = errno; -+ -+ if (offset_out) -+ fwrite (line_out, sizeof (char), (size_t) offset_out, stdout); -+ -+ free(line_out); -+} -+ -+#if HAVE_MBRTOWC -+static void -+fold_multibyte_text (FILE *istream, size_t width, int *saved_errno) -+{ -+ char buf[MB_LEN_MAX + BUFSIZ]; /* For spooling a read byte sequence. */ -+ size_t buflen = 0; /* The length of the byte sequence in buf. */ -+ char *bufpos; /* Next read position of BUF. */ -+ wint_t wc; /* A gotten wide character. */ -+ size_t mblength; /* The byte size of a multibyte character which shows -+ as same character as WC. */ -+ mbstate_t state, state_bak; /* State of the stream. */ -+ int convfail; /* 1, when conversion is failed. Otherwise 0. */ -+ -+ char *line_out = NULL; -+ size_t offset_out = 0; /* Index in `line_out' for next char. */ -+ size_t allocated_out = 0; -+ -+ int increment; -+ size_t column = 0; -+ -+ size_t last_blank_pos; -+ size_t last_blank_column; -+ int is_blank_seen; -+ int last_blank_increment; -+ int is_bs_following_last_blank; -+ size_t bs_following_last_blank_num; -+ int is_cr_after_last_blank; -+ -+#define CLEAR_FLAGS \ -+ do \ -+ { \ -+ last_blank_pos = 0; \ -+ last_blank_column = 0; \ -+ is_blank_seen = 0; \ -+ is_bs_following_last_blank = 0; \ -+ bs_following_last_blank_num = 0; \ -+ is_cr_after_last_blank = 0; \ -+ } \ -+ while (0) -+ -+#define START_NEW_LINE \ -+ do \ -+ { \ -+ putchar ('\n'); \ -+ column = 0; \ -+ offset_out = 0; \ -+ CLEAR_FLAGS; \ -+ } \ -+ while (0) -+ -+ CLEAR_FLAGS; -+ memset (&state, '\0', sizeof(mbstate_t)); -+ -+ for (;; bufpos += mblength, buflen -= mblength) -+ { -+ if (buflen < MB_LEN_MAX && !feof (istream) && !ferror (istream)) -+ { -+ memmove (buf, bufpos, buflen); -+ buflen += fread (buf + buflen, sizeof(char), BUFSIZ, istream); -+ bufpos = buf; -+ } -+ -+ if (buflen < 1) -+ break; -+ -+ /* Get a wide character. */ -+ convfail = 0; -+ state_bak = state; -+ mblength = mbrtowc ((wchar_t *)&wc, bufpos, buflen, &state); -+ -+ switch (mblength) -+ { -+ case (size_t)-1: -+ case (size_t)-2: -+ convfail++; -+ state = state_bak; -+ /* Fall through. */ -+ -+ case 0: -+ mblength = 1; -+ break; -+ } -+ -+rescan: -+ if (operating_mode == byte_mode) /* byte mode */ -+ increment = mblength; -+ else if (operating_mode == character_mode) /* character mode */ -+ increment = 1; -+ else /* column mode */ -+ { -+ if (convfail) -+ increment = 1; -+ else -+ { -+ switch (wc) -+ { -+ case L'\n': -+ fwrite (line_out, sizeof(char), offset_out, stdout); -+ START_NEW_LINE; -+ continue; -+ -+ case L'\b': -+ increment = (column > 0) ? -1 : 0; -+ break; -+ -+ case L'\r': -+ increment = -1 * column; -+ break; -+ -+ case L'\t': -+ increment = 8 - column % 8; -+ break; -+ -+ default: -+ increment = wcwidth (wc); -+ increment = (increment < 0) ? 0 : increment; -+ } -+ } -+ } -+ -+ if (column + increment > width && break_spaces && last_blank_pos) -+ { -+ fwrite (line_out, sizeof(char), last_blank_pos, stdout); -+ putchar ('\n'); -+ -+ offset_out = offset_out - last_blank_pos; -+ column = column - last_blank_column + ((is_cr_after_last_blank) -+ ? last_blank_increment : bs_following_last_blank_num); -+ memmove (line_out, line_out + last_blank_pos, offset_out); -+ CLEAR_FLAGS; -+ goto rescan; -+ } -+ -+ if (column + increment > width && column != 0) -+ { -+ fwrite (line_out, sizeof(char), offset_out, stdout); -+ START_NEW_LINE; -+ goto rescan; -+ } -+ -+ if (allocated_out < offset_out + mblength) -+ { -+ allocated_out += 1024; -+ line_out = xrealloc (line_out, allocated_out); -+ } -+ -+ memcpy (line_out + offset_out, bufpos, mblength); -+ offset_out += mblength; -+ column += increment; -+ -+ if (is_blank_seen && !convfail && wc == L'\r') -+ is_cr_after_last_blank = 1; -+ -+ if (is_bs_following_last_blank && !convfail && wc == L'\b') -+ ++bs_following_last_blank_num; -+ else -+ is_bs_following_last_blank = 0; -+ -+ if (break_spaces && !convfail && iswblank (wc)) -+ { -+ last_blank_pos = offset_out; -+ last_blank_column = column; -+ is_blank_seen = 1; -+ last_blank_increment = increment; -+ is_bs_following_last_blank = 1; -+ bs_following_last_blank_num = 0; -+ is_cr_after_last_blank = 0; -+ } -+ } -+ -+ *saved_errno = errno; - - if (offset_out) - fwrite (line_out, sizeof (char), (size_t) offset_out, stdout); - -+ free(line_out); -+} -+#endif -+ -+/* Fold file FILENAME, or standard input if FILENAME is "-", -+ to stdout, with maximum line length WIDTH. -+ Return 0 if successful, 1 if an error occurs. */ -+ -+static bool -+fold_file (char *filename, size_t width) -+{ -+ FILE *istream; -+ int saved_errno; -+ -+ if (STREQ (filename, "-")) -+ { -+ istream = stdin; -+ have_read_stdin = 1; -+ } -+ else -+ istream = fopen (filename, "r"); -+ -+ if (istream == NULL) -+ { -+ error (0, errno, "%s", filename); -+ return 1; -+ } -+ -+ /* Define how ISTREAM is being folded. */ -+#if HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) -+ fold_multibyte_text (istream, width, &saved_errno); -+ else -+#endif -+ fold_text (istream, width, &saved_errno); -+ - if (ferror (istream)) - { - error (0, saved_errno, "%s", filename); -@@ -255,7 +506,8 @@ - - atexit (close_stdout); - -- break_spaces = count_bytes = have_read_stdin = false; -+ operating_mode = column_mode; -+ break_spaces = have_read_stdin = false; - - while ((optc = getopt_long (argc, argv, shortopts, longopts, NULL)) != -1) - { -@@ -264,7 +516,15 @@ - switch (optc) - { - case 'b': /* Count bytes rather than columns. */ -- count_bytes = true; -+ if (operating_mode != column_mode) -+ FATAL_ERROR (_("only one way of folding may be specified")); -+ operating_mode = byte_mode; -+ break; -+ -+ case 'c': -+ if (operating_mode != column_mode) -+ FATAL_ERROR (_("only one way of folding may be specified")); -+ operating_mode = character_mode; - break; - - case 's': /* Break at word boundaries. */ ---- coreutils-6.8+/src/sort.c.i18n 2007-02-24 11:23:23.000000000 +0000 -+++ coreutils-6.8+/src/sort.c 2007-03-01 15:10:57.000000000 +0000 -@@ -23,10 +23,19 @@ - - #include - -+#include - #include - #include - #include - #include -+#if HAVE_WCHAR_H -+# include -+#endif -+/* Get isw* functions. */ -+#if HAVE_WCTYPE_H -+# include -+#endif -+ - #include "system.h" - #include "argmatch.h" - #include "error.h" -@@ -116,14 +125,38 @@ - /* Thousands separator; if -1, then there isn't one. */ - static int thousands_sep; - -+static int force_general_numcompare = 0; -+ - /* Nonzero if the corresponding locales are hard. */ - static bool hard_LC_COLLATE; --#if HAVE_NL_LANGINFO -+#if HAVE_LANGINFO_CODESET - static bool hard_LC_TIME; - #endif - - #define NONZERO(x) ((x) != 0) - -+/* get a multibyte character's byte length. */ -+#define GET_BYTELEN_OF_CHAR(LIM, PTR, MBLENGTH, STATE) \ -+ do \ -+ { \ -+ wchar_t wc; \ -+ mbstate_t state_bak; \ -+ \ -+ state_bak = STATE; \ -+ mblength = mbrtowc (&wc, PTR, LIM - PTR, &STATE); \ -+ \ -+ switch (MBLENGTH) \ -+ { \ -+ case (size_t)-1: \ -+ case (size_t)-2: \ -+ STATE = state_bak; \ -+ /* Fall through. */ \ -+ case 0: \ -+ MBLENGTH = 1; \ -+ } \ -+ } \ -+ while (0) -+ - /* The kind of blanks for '-b' to skip in various options. */ - enum blanktype { bl_start, bl_end, bl_both }; - -@@ -261,13 +294,11 @@ - they were read if all keys compare equal. */ - static bool stable; - --/* If TAB has this value, blanks separate fields. */ --enum { TAB_DEFAULT = CHAR_MAX + 1 }; -- --/* Tab character separating fields. If TAB_DEFAULT, then fields are -+/* Tab character separating fields. If tab_length is 0, then fields are - separated by the empty string between a non-blank character and a blank - character. */ --static int tab = TAB_DEFAULT; -+static char tab[MB_LEN_MAX + 1]; -+static size_t tab_length = 0; - - /* Flag to remove consecutive duplicate lines from the output. - Only the last of a sequence of equal lines will be output. */ -@@ -639,6 +670,44 @@ - update_proc (pid); - } - -+/* Function pointers. */ -+static void -+(*inittables) (void); -+static char * -+(*begfield) (const struct line*, const struct keyfield *); -+static char * -+(*limfield) (const struct line*, const struct keyfield *); -+static int -+(*getmonth) (char const *, size_t); -+static int -+(*keycompare) (const struct line *, const struct line *); -+static int -+(*numcompare) (const char *, const char *); -+ -+/* Test for white space multibyte character. -+ Set LENGTH the byte length of investigated multibyte character. */ -+#if HAVE_MBRTOWC -+static int -+ismbblank (const char *str, size_t len, size_t *length) -+{ -+ size_t mblength; -+ wchar_t wc; -+ mbstate_t state; -+ -+ memset (&state, '\0', sizeof(mbstate_t)); -+ mblength = mbrtowc (&wc, str, len, &state); -+ -+ if (mblength == (size_t)-1 || mblength == (size_t)-2) -+ { -+ *length = 1; -+ return 0; -+ } -+ -+ *length = (mblength < 1) ? 1 : mblength; -+ return iswblank (wc); -+} -+#endif -+ - /* Clean up any remaining temporary files. */ - - static void -@@ -978,7 +1047,7 @@ - free (node); - } - --#if HAVE_NL_LANGINFO -+#if HAVE_LANGINFO_CODESET - - static int - struct_month_cmp (const void *m1, const void *m2) -@@ -993,7 +1062,7 @@ - /* Initialize the character class tables. */ - - static void --inittables (void) -+inittables_uni (void) - { - size_t i; - -@@ -1005,7 +1074,7 @@ - fold_toupper[i] = toupper (i); - } - --#if HAVE_NL_LANGINFO -+#if HAVE_LANGINFO_CODESET - /* If we're not in the "C" locale, read different names for months. */ - if (hard_LC_TIME) - { -@@ -1031,6 +1100,64 @@ - #endif - } - -+#if HAVE_MBRTOWC -+static void -+inittables_mb (void) -+{ -+ int i, j, k, l; -+ char *name, *s; -+ size_t s_len, mblength; -+ char mbc[MB_LEN_MAX]; -+ wchar_t wc, pwc; -+ mbstate_t state_mb, state_wc; -+ -+ for (i = 0; i < MONTHS_PER_YEAR; i++) -+ { -+ s = (char *) nl_langinfo (ABMON_1 + i); -+ s_len = strlen (s); -+ monthtab[i].name = name = (char *) xmalloc (s_len + 1); -+ monthtab[i].val = i + 1; -+ -+ memset (&state_mb, '\0', sizeof (mbstate_t)); -+ memset (&state_wc, '\0', sizeof (mbstate_t)); -+ -+ for (j = 0; j < s_len;) -+ { -+ if (!ismbblank (s + j, s_len - j, &mblength)) -+ break; -+ j += mblength; -+ } -+ -+ for (k = 0; j < s_len;) -+ { -+ mblength = mbrtowc (&wc, (s + j), (s_len - j), &state_mb); -+ assert (mblength != (size_t)-1 && mblength != (size_t)-2); -+ if (mblength == 0) -+ break; -+ -+ pwc = towupper (wc); -+ if (pwc == wc) -+ { -+ memcpy (mbc, s + j, mblength); -+ j += mblength; -+ } -+ else -+ { -+ j += mblength; -+ mblength = wcrtomb (mbc, pwc, &state_wc); -+ assert (mblength != (size_t)0 && mblength != (size_t)-1); -+ } -+ -+ for (l = 0; l < mblength; l++) -+ name[k++] = mbc[l]; -+ } -+ name[k] = '\0'; -+ } -+ qsort ((void *) monthtab, MONTHS_PER_YEAR, -+ sizeof (struct month), struct_month_cmp); -+} -+#endif -+ - /* Specify the amount of main memory to use when sorting. */ - static void - specify_sort_size (char const *s) -@@ -1241,7 +1368,7 @@ - by KEY in LINE. */ - - static char * --begfield (const struct line *line, const struct keyfield *key) -+begfield_uni (const struct line *line, const struct keyfield *key) - { - char *ptr = line->text, *lim = ptr + line->length - 1; - size_t sword = key->sword; -@@ -1251,10 +1378,10 @@ - /* The leading field separator itself is included in a field when -t - is absent. */ - -- if (tab != TAB_DEFAULT) -+ if (tab_length) - while (ptr < lim && sword--) - { -- while (ptr < lim && *ptr != tab) -+ while (ptr < lim && *ptr != tab[0]) - ++ptr; - if (ptr < lim) - ++ptr; -@@ -1282,11 +1409,70 @@ - return ptr; - } - -+#if HAVE_MBRTOWC -+static char * -+begfield_mb (const struct line *line, const struct keyfield *key) -+{ -+ int i; -+ char *ptr = line->text, *lim = ptr + line->length - 1; -+ size_t sword = key->sword; -+ size_t schar = key->schar; -+ size_t mblength; -+ mbstate_t state; -+ -+ memset (&state, '\0', sizeof(mbstate_t)); -+ -+ if (tab_length) -+ while (ptr < lim && sword--) -+ { -+ while (ptr < lim && memcmp (ptr, tab, tab_length) != 0) -+ { -+ GET_BYTELEN_OF_CHAR (lim, ptr, mblength, state); -+ ptr += mblength; -+ } -+ if (ptr < lim) -+ { -+ GET_BYTELEN_OF_CHAR (lim, ptr, mblength, state); -+ ptr += mblength; -+ } -+ } -+ else -+ while (ptr < lim && sword--) -+ { -+ while (ptr < lim && ismbblank (ptr, lim - ptr, &mblength)) -+ ptr += mblength; -+ if (ptr < lim) -+ { -+ GET_BYTELEN_OF_CHAR (lim, ptr, mblength, state); -+ ptr += mblength; -+ } -+ while (ptr < lim && !ismbblank (ptr, lim - ptr, &mblength)) -+ ptr += mblength; -+ } -+ -+ if (key->skipsblanks) -+ while (ptr < lim && ismbblank (ptr, lim - ptr, &mblength)) -+ ptr += mblength; -+ -+ for (i = 0; i < schar; i++) -+ { -+ GET_BYTELEN_OF_CHAR (lim, ptr, mblength, state); -+ -+ if (ptr + mblength > lim) -+ break; -+ else -+ ptr += mblength; -+ } -+ -+ return ptr; -+} -+#endif -+ - /* Return the limit of (a pointer to the first character after) the field - in LINE specified by KEY. */ - - static char * --limfield (const struct line *line, const struct keyfield *key) -+limfield_uni (const struct line *line, const struct keyfield *key) - { - char *ptr = line->text, *lim = ptr + line->length - 1; - size_t eword = key->eword, echar = key->echar; -@@ -1299,10 +1485,10 @@ - `beginning' is the first character following the delimiting TAB. - Otherwise, leave PTR pointing at the first `blank' character after - the preceding field. */ -- if (tab != TAB_DEFAULT) -+ if (tab_length) - while (ptr < lim && eword--) - { -- while (ptr < lim && *ptr != tab) -+ while (ptr < lim && *ptr != tab[0]) - ++ptr; - if (ptr < lim && (eword | echar)) - ++ptr; -@@ -1348,10 +1534,10 @@ - */ - - /* Make LIM point to the end of (one byte past) the current field. */ -- if (tab != TAB_DEFAULT) -+ if (tab_length) - { - char *newlim; -- newlim = memchr (ptr, tab, lim - ptr); -+ newlim = memchr (ptr, tab[0], lim - ptr); - if (newlim) - lim = newlim; - } -@@ -1384,6 +1570,107 @@ - return ptr; - } - -+#if HAVE_MBRTOWC -+static char * -+limfield_mb (const struct line *line, const struct keyfield *key) -+{ -+ char *ptr = line->text, *lim = ptr + line->length - 1; -+ size_t eword = key->eword, echar = key->echar; -+ int i; -+ size_t mblength; -+ mbstate_t state; -+ -+ memset (&state, '\0', sizeof(mbstate_t)); -+ -+ if (tab_length) -+ while (ptr < lim && eword--) -+ { -+ while (ptr < lim && memcmp (ptr, tab, tab_length) != 0) -+ { -+ GET_BYTELEN_OF_CHAR (lim, ptr, mblength, state); -+ ptr += mblength; -+ } -+ if (ptr < lim && (eword | echar)) -+ { -+ GET_BYTELEN_OF_CHAR (lim, ptr, mblength, state); -+ ptr += mblength; -+ } -+ } -+ else -+ while (ptr < lim && eword--) -+ { -+ while (ptr < lim && ismbblank (ptr, lim - ptr, &mblength)) -+ ptr += mblength; -+ if (ptr < lim) -+ { -+ GET_BYTELEN_OF_CHAR (lim, ptr, mblength, state); -+ ptr += mblength; -+ } -+ while (ptr < lim && !ismbblank (ptr, lim - ptr, &mblength)) -+ ptr += mblength; -+ } -+ -+ -+# ifdef POSIX_UNSPECIFIED -+ /* Make LIM point to the end of (one byte past) the current field. */ -+ if (tab_length) -+ { -+ char *newlim, *p; -+ -+ newlim = NULL; -+ for (p = ptr; p < lim;) -+ { -+ if (memcmp (p, tab, tab_length) == 0) -+ { -+ newlim = p; -+ break; -+ } -+ -+ GET_BYTELEN_OF_CHAR (lim, ptr, mblength, state); -+ p += mblength; -+ } -+ } -+ else -+ { -+ char *newlim; -+ newlim = ptr; -+ -+ while (newlim < lim && ismbblank (newlim, lim - newlim, &mblength)) -+ newlim += mblength; -+ if (ptr < lim) -+ { -+ GET_BYTELEN_OF_CHAR (lim, ptr, mblength, state); -+ ptr += mblength; -+ } -+ while (newlim < lim && !ismbblank (newlim, lim - newlim, &mblength)) -+ newlim += mblength; -+ lim = newlim; -+ } -+# endif -+ -+ /* If we're skipping leading blanks, don't start counting characters -+ * until after skipping past any leading blanks. */ -+ if (key->skipsblanks) -+ while (ptr < lim && ismbblank (ptr, lim - ptr, &mblength)) -+ ptr += mblength; -+ -+ memset (&state, '\0', sizeof(mbstate_t)); -+ -+ /* Advance PTR by ECHAR (if possible), but no further than LIM. */ -+ for (i = 0; i < echar; i++) -+ { -+ GET_BYTELEN_OF_CHAR (lim, ptr, mblength, state); -+ -+ if (ptr + mblength > lim) -+ break; -+ else -+ ptr += mblength; -+ } -+ -+ return ptr; -+} -+#endif -+ - /* Fill BUF reading from FP, moving buf->left bytes from the end - of buf->buf to the beginning first. If EOF is reached and the - file wasn't terminated by a newline, supply one. Set up BUF's line -@@ -1466,8 +1753,24 @@ - else - { - if (key->skipsblanks) -- while (blanks[to_uchar (*line_start)]) -- line_start++; -+ { -+#if HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) -+ { -+ size_t mblength; -+ mbstate_t state; -+ memset (&state, '\0', sizeof(mbstate_t)); -+ while (line_start < line->keylim && -+ ismbblank (line_start, -+ line->keylim - line_start, -+ &mblength)) -+ line_start += mblength; -+ } -+ else -+#endif -+ while (blanks[to_uchar (*line_start)]) -+ line_start++; -+ } - line->keybeg = line_start; - } - } -@@ -1500,7 +1803,7 @@ - hideously fast. */ - - static int --numcompare (const char *a, const char *b) -+numcompare_uni (const char *a, const char *b) - { - while (blanks[to_uchar (*a)]) - a++; -@@ -1510,6 +1813,25 @@ - return strnumcmp (a, b, decimal_point, thousands_sep); - } - -+#if HAVE_MBRTOWC -+static int -+numcompare_mb (const char *a, const char *b) -+{ -+ size_t mblength, len; -+ len = strlen (a); /* okay for UTF-8 */ -+ while (*a && ismbblank (a, len > MB_CUR_MAX ? MB_CUR_MAX : len, &mblength)) -+ { -+ a += mblength; -+ len -= mblength; -+ } -+ len = strlen (b); /* okay for UTF-8 */ -+ while (*b && ismbblank (b, len > MB_CUR_MAX ? MB_CUR_MAX : len, &mblength)) -+ b += mblength; -+ -+ return strnumcmp (a, b, decimal_point, thousands_sep); -+} -+#endif /* HAV_EMBRTOWC */ -+ - static int - general_numcompare (const char *sa, const char *sb) - { -@@ -1543,7 +1865,7 @@ - Return 0 if the name in S is not recognized. */ - - static int --getmonth (char const *month, size_t len) -+getmonth_uni (char const *month, size_t len) - { - size_t lo = 0; - size_t hi = MONTHS_PER_YEAR; -@@ -1698,11 +2020,79 @@ - return diff; - } - -+#if HAVE_MBRTOWC -+static int -+getmonth_mb (const char *s, size_t len) -+{ -+ char *month; -+ register size_t i; -+ register int lo = 0, hi = MONTHS_PER_YEAR, result; -+ char *tmp; -+ size_t wclength, mblength; -+ const char **pp; -+ const wchar_t **wpp; -+ wchar_t *month_wcs; -+ mbstate_t state; -+ -+ while (len > 0 && ismbblank (s, len, &mblength)) -+ { -+ s += mblength; -+ len -= mblength; -+ } -+ -+ if (len == 0) -+ return 0; -+ -+ month = (char *) alloca (len + 1); -+ -+ tmp = (char *) alloca (len + 1); -+ memcpy (tmp, s, len); -+ tmp[len] = '\0'; -+ pp = (const char **)&tmp; -+ month_wcs = (wchar_t *) alloca ((len + 1) * sizeof (wchar_t)); -+ memset (&state, '\0', sizeof(mbstate_t)); -+ -+ wclength = mbsrtowcs (month_wcs, pp, len + 1, &state); -+ assert (wclength != (size_t)-1 && *pp == NULL); -+ -+ for (i = 0; i < wclength; i++) -+ { -+ month_wcs[i] = towupper(month_wcs[i]); -+ if (iswblank (month_wcs[i])) -+ { -+ month_wcs[i] = L'\0'; -+ break; -+ } -+ } -+ -+ wpp = (const wchar_t **)&month_wcs; -+ -+ mblength = wcsrtombs (month, wpp, len + 1, &state); -+ assert (mblength != (-1) && *wpp == NULL); -+ -+ do -+ { -+ int ix = (lo + hi) / 2; -+ -+ if (strncmp (month, monthtab[ix].name, strlen (monthtab[ix].name)) < 0) -+ hi = ix; -+ else -+ lo = ix; -+ } -+ while (hi - lo > 1); -+ -+ result = (!strncmp (month, monthtab[lo].name, strlen (monthtab[lo].name)) -+ ? monthtab[lo].val : 0); -+ -+ return result; -+} -+#endif -+ - /* Compare two lines A and B trying every key in sequence until there - are no more keys or a difference is found. */ - - static int --keycompare (const struct line *a, const struct line *b) -+keycompare_uni (const struct line *a, const struct line *b) - { - struct keyfield const *key = keylist; - -@@ -1875,6 +2265,179 @@ - return key->reverse ? -diff : diff; - } - -+#if HAVE_MBRTOWC -+static int -+keycompare_mb (const struct line *a, const struct line *b) -+{ -+ struct keyfield *key = keylist; -+ -+ /* For the first iteration only, the key positions have been -+ precomputed for us. */ -+ char *texta = a->keybeg; -+ char *textb = b->keybeg; -+ char *lima = a->keylim; -+ char *limb = b->keylim; -+ -+ size_t mblength_a, mblength_b; -+ wchar_t wc_a, wc_b; -+ mbstate_t state_a, state_b; -+ -+ int diff; -+ -+ memset (&state_a, '\0', sizeof(mbstate_t)); -+ memset (&state_b, '\0', sizeof(mbstate_t)); -+ -+ for (;;) -+ { -+ unsigned char *translate = (unsigned char *) key->translate; -+ bool const *ignore = key->ignore; -+ -+ /* Find the lengths. */ -+ size_t lena = lima <= texta ? 0 : lima - texta; -+ size_t lenb = limb <= textb ? 0 : limb - textb; -+ -+ /* Actually compare the fields. */ -+ if (key->random) -+ diff = compare_random (texta, lena, textb, lenb); -+ else if (key->numeric | key->general_numeric) -+ { -+ char savea = *lima, saveb = *limb; -+ -+ *lima = *limb = '\0'; -+ if (force_general_numcompare) -+ diff = general_numcompare (texta, textb); -+ else -+ diff = ((key->numeric ? numcompare : general_numcompare) -+ (texta, textb)); -+ *lima = savea, *limb = saveb; -+ } -+ else if (key->month) -+ diff = getmonth (texta, lena) - getmonth (textb, lenb); -+ else -+ { -+ if (ignore || translate) -+ { -+ char *copy_a = (char *) alloca (lena + 1 + lenb + 1); -+ char *copy_b = copy_a + lena + 1; -+ size_t new_len_a, new_len_b; -+ size_t i, j; -+ -+ /* Ignore and/or translate chars before comparing. */ -+# define IGNORE_CHARS(NEW_LEN, LEN, TEXT, COPY, WC, MBLENGTH, STATE) \ -+ do \ -+ { \ -+ wchar_t uwc; \ -+ char mbc[MB_LEN_MAX]; \ -+ mbstate_t state_wc; \ -+ \ -+ for (NEW_LEN = i = 0; i < LEN;) \ -+ { \ -+ mbstate_t state_bak; \ -+ \ -+ state_bak = STATE; \ -+ MBLENGTH = mbrtowc (&WC, TEXT + i, LEN - i, &STATE); \ -+ \ -+ if (MBLENGTH == (size_t)-2 || MBLENGTH == (size_t)-1 \ -+ || MBLENGTH == 0) \ -+ { \ -+ if (MBLENGTH == (size_t)-2 || MBLENGTH == (size_t)-1) \ -+ STATE = state_bak; \ -+ if (!ignore) \ -+ COPY[NEW_LEN++] = TEXT[i++]; \ -+ continue; \ -+ } \ -+ \ -+ if (ignore) \ -+ { \ -+ if ((ignore == nonprinting && !iswprint (WC)) \ -+ || (ignore == nondictionary \ -+ && !iswalnum (WC) && !iswblank (WC))) \ -+ { \ -+ i += MBLENGTH; \ -+ continue; \ -+ } \ -+ } \ -+ \ -+ if (translate) \ -+ { \ -+ \ -+ uwc = towupper(WC); \ -+ if (WC == uwc) \ -+ { \ -+ memcpy (mbc, TEXT + i, MBLENGTH); \ -+ i += MBLENGTH; \ -+ } \ -+ else \ -+ { \ -+ i += MBLENGTH; \ -+ WC = uwc; \ -+ memset (&state_wc, '\0', sizeof (mbstate_t)); \ -+ \ -+ MBLENGTH = wcrtomb (mbc, WC, &state_wc); \ -+ assert (MBLENGTH != (size_t)-1 && MBLENGTH != 0); \ -+ } \ -+ \ -+ for (j = 0; j < MBLENGTH; j++) \ -+ COPY[NEW_LEN++] = mbc[j]; \ -+ } \ -+ else \ -+ for (j = 0; j < MBLENGTH; j++) \ -+ COPY[NEW_LEN++] = TEXT[i++]; \ -+ } \ -+ COPY[NEW_LEN] = '\0'; \ -+ } \ -+ while (0) -+ IGNORE_CHARS (new_len_a, lena, texta, copy_a, -+ wc_a, mblength_a, state_a); -+ IGNORE_CHARS (new_len_b, lenb, textb, copy_b, -+ wc_b, mblength_b, state_b); -+ diff = xmemcoll (copy_a, new_len_a, copy_b, new_len_b); -+ } -+ else if (lena == 0) -+ diff = - NONZERO (lenb); -+ else if (lenb == 0) -+ goto greater; -+ else -+ diff = xmemcoll (texta, lena, textb, lenb); -+ } -+ -+ if (diff) -+ goto not_equal; -+ -+ key = key->next; -+ if (! key) -+ break; -+ -+ /* Find the beginning and limit of the next field. */ -+ if (key->eword != -1) -+ lima = limfield (a, key), limb = limfield (b, key); -+ else -+ lima = a->text + a->length - 1, limb = b->text + b->length - 1; -+ -+ if (key->sword != -1) -+ texta = begfield (a, key), textb = begfield (b, key); -+ else -+ { -+ texta = a->text, textb = b->text; -+ if (key->skipsblanks) -+ { -+ while (texta < lima && ismbblank (texta, lima - texta, &mblength_a)) -+ texta += mblength_a; -+ while (textb < limb && ismbblank (textb, limb - textb, &mblength_b)) -+ textb += mblength_b; -+ } -+ } -+ } -+ -+ return 0; -+ -+greater: -+ diff = 1; -+not_equal: -+ return key->reverse ? -diff : diff; -+} -+#endif -+ - /* Compare two lines A and B, returning negative, zero, or positive - depending on whether A compares less than, equal to, or greater than B. */ - -@@ -2744,7 +3305,7 @@ - initialize_exit_failure (SORT_FAILURE); - - hard_LC_COLLATE = hard_locale (LC_COLLATE); --#if HAVE_NL_LANGINFO -+#if HAVE_LANGINFO_CODESET - hard_LC_TIME = hard_locale (LC_TIME); - #endif - -@@ -2765,6 +3326,27 @@ - thousands_sep = -1; - } - -+#if HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) -+ { -+ inittables = inittables_mb; -+ begfield = begfield_mb; -+ limfield = limfield_mb; -+ getmonth = getmonth_mb; -+ keycompare = keycompare_mb; -+ numcompare = numcompare_mb; -+ } -+ else -+#endif -+ { -+ inittables = inittables_uni; -+ begfield = begfield_uni; -+ limfield = limfield_uni; -+ getmonth = getmonth_uni; -+ keycompare = keycompare_uni; -+ numcompare = numcompare_uni; -+ } -+ - have_read_stdin = false; - inittables (); - -@@ -3015,13 +3597,35 @@ - - case 't': - { -- char newtab = optarg[0]; -- if (! newtab) -+ char newtab[MB_LEN_MAX + 1]; -+ size_t newtab_length = 1; -+ strncpy (newtab, optarg, MB_LEN_MAX); -+ if (! newtab[0]) - error (SORT_FAILURE, 0, _("empty tab")); -- if (optarg[1]) -+#if HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) -+ { -+ wchar_t wc; -+ mbstate_t state; -+ size_t i; -+ -+ memset (&state, '\0', sizeof (mbstate_t)); -+ newtab_length = mbrtowc (&wc, newtab, strnlen (newtab, -+ MB_LEN_MAX), -+ &state); -+ switch (newtab_length) -+ { -+ case (size_t) -1: -+ case (size_t) -2: -+ case 0: -+ newtab_length = 1; -+ } -+ } -+#endif -+ if (newtab_length == 1 && optarg[1]) - { - if (STREQ (optarg, "\\0")) -- newtab = '\0'; -+ newtab[0] = '\0'; - else - { - /* Provoke with `sort -txx'. Complain about -@@ -3032,9 +3636,12 @@ - quote (optarg)); - } - } -- if (tab != TAB_DEFAULT && tab != newtab) -+ if (tab_length -+ && (tab_length != newtab_length -+ || memcmp (tab, newtab, tab_length) != 0)) - error (SORT_FAILURE, 0, _("incompatible tabs")); -- tab = newtab; -+ memcpy (tab, newtab, newtab_length); -+ tab_length = newtab_length; - } - break; - ---- coreutils-6.8+/src/unexpand.c.i18n 2007-01-14 15:41:28.000000000 +0000 -+++ coreutils-6.8+/src/unexpand.c 2007-03-01 15:08:24.000000000 +0000 -@@ -39,11 +39,28 @@ - #include - #include - #include -+ -+/* Get mbstate_t, mbrtowc(), wcwidth(). */ -+#if HAVE_WCHAR_H -+# include -+#endif -+ - #include "system.h" - #include "error.h" - #include "quote.h" - #include "xstrndup.h" - -+/* MB_LEN_MAX is incorrectly defined to be 1 in at least one GCC -+ installation; work around this configuration error. */ -+#if !defined MB_LEN_MAX || MB_LEN_MAX < 2 -+# define MB_LEN_MAX 16 -+#endif -+ -+/* Some systems, like BeOS, have multibyte encodings but lack mbstate_t. */ -+#if HAVE_MBRTOWC && defined mbstate_t -+# define mbrtowc(pwc, s, n, ps) (mbrtowc) (pwc, s, n, 0) -+#endif -+ - /* The official name of this program (e.g., no `g' prefix). */ - #define PROGRAM_NAME "unexpand" - -@@ -110,6 +127,208 @@ - {NULL, 0, NULL, 0} - }; - -+static FILE *next_file (FILE *fp); -+ -+#if HAVE_MBRTOWC -+static void -+unexpand_multibyte (void) -+{ -+ FILE *fp; /* Input stream. */ -+ mbstate_t i_state; /* Current shift state of the input stream. */ -+ mbstate_t i_state_bak; /* Back up the I_STATE. */ -+ mbstate_t o_state; /* Current shift state of the output stream. */ -+ char buf[MB_LEN_MAX + BUFSIZ]; /* For spooling a read byte sequence. */ -+ char *bufpos; /* Next read position of BUF. */ -+ size_t buflen = 0; /* The length of the byte sequence in buf. */ -+ wint_t wc; /* A gotten wide character. */ -+ size_t mblength; /* The byte size of a multibyte character -+ which shows as same character as WC. */ -+ -+ /* Index in `tab_list' of next tabstop: */ -+ int tab_index = 0; /* For calculating width of pending tabs. */ -+ int print_tab_index = 0; /* For printing as many tabs as possible. */ -+ unsigned int column = 0; /* Column on screen of next char. */ -+ int next_tab_column; /* Column the next tab stop is on. */ -+ int convert = 1; /* If nonzero, perform translations. */ -+ unsigned int pending = 0; /* Pending columns of blanks. */ -+ -+ fp = next_file ((FILE *) NULL); -+ if (fp == NULL) -+ return; -+ -+ memset (&o_state, '\0', sizeof(mbstate_t)); -+ memset (&i_state, '\0', sizeof(mbstate_t)); -+ -+ for (;;) -+ { -+ if (buflen < MB_LEN_MAX && !feof(fp) && !ferror(fp)) -+ { -+ memmove (buf, bufpos, buflen); -+ buflen += fread (buf + buflen, sizeof(char), BUFSIZ, fp); -+ bufpos = buf; -+ } -+ -+ /* Get a wide character. */ -+ if (buflen < 1) -+ { -+ mblength = 1; -+ wc = WEOF; -+ } -+ else -+ { -+ i_state_bak = i_state; -+ mblength = mbrtowc ((wchar_t *)&wc, bufpos, buflen, &i_state); -+ } -+ -+ if (mblength == (size_t)-1 || mblength == (size_t)-2) -+ { -+ i_state = i_state_bak; -+ wc = L'\0'; -+ } -+ -+ if (wc == L' ' && convert && column < INT_MAX) -+ { -+ ++pending; -+ ++column; -+ } -+ else if (wc == L'\t' && convert) -+ { -+ if (tab_size == 0) -+ { -+ /* Do not let tab_index == first_free_tab; -+ stop when it is 1 less. */ -+ while (tab_index < first_free_tab - 1 -+ && column >= tab_list[tab_index]) -+ tab_index++; -+ next_tab_column = tab_list[tab_index]; -+ if (tab_index < first_free_tab - 1) -+ tab_index++; -+ if (column >= next_tab_column) -+ { -+ convert = 0; /* Ran out of tab stops. */ -+ goto flush_pend_mb; -+ } -+ } -+ else -+ { -+ next_tab_column = column + tab_size - column % tab_size; -+ } -+ pending += next_tab_column - column; -+ column = next_tab_column; -+ } -+ else -+ { -+flush_pend_mb: -+ /* Flush pending spaces. Print as many tabs as possible, -+ then print the rest as spaces. */ -+ if (pending == 1) -+ { -+ putchar (' '); -+ pending = 0; -+ } -+ column -= pending; -+ while (pending > 0) -+ { -+ if (tab_size == 0) -+ { -+ /* Do not let print_tab_index == first_free_tab; -+ stop when it is 1 less. */ -+ while (print_tab_index < first_free_tab - 1 -+ && column >= tab_list[print_tab_index]) -+ print_tab_index++; -+ next_tab_column = tab_list[print_tab_index]; -+ if (print_tab_index < first_free_tab - 1) -+ print_tab_index++; -+ } -+ else -+ { -+ next_tab_column = -+ column + tab_size - column % tab_size; -+ } -+ if (next_tab_column - column <= pending) -+ { -+ putchar ('\t'); -+ pending -= next_tab_column - column; -+ column = next_tab_column; -+ } -+ else -+ { -+ --print_tab_index; -+ column += pending; -+ while (pending != 0) -+ { -+ putchar (' '); -+ pending--; -+ } -+ } -+ } -+ -+ if (wc == WEOF) -+ { -+ fp = next_file (fp); -+ if (fp == NULL) -+ break; /* No more files. */ -+ else -+ { -+ memset (&i_state, '\0', sizeof(mbstate_t)); -+ continue; -+ } -+ } -+ -+ if (mblength == (size_t)-1 || mblength == (size_t)-2) -+ { -+ if (convert) -+ { -+ ++column; -+ if (convert_entire_line == 0) -+ convert = 0; -+ } -+ mblength = 1; -+ putchar (buf[0]); -+ } -+ else if (mblength == 0) -+ { -+ if (convert && convert_entire_line == 0) -+ convert = 0; -+ mblength = 1; -+ putchar ('\0'); -+ } -+ else -+ { -+ if (convert) -+ { -+ if (wc == L'\b') -+ { -+ if (column > 0) -+ --column; -+ } -+ else -+ { -+ int width; /* The width of WC. */ -+ -+ width = wcwidth (wc); -+ column += (width > 0) ? width : 0; -+ if (convert_entire_line == 0) -+ convert = 0; -+ } -+ } -+ -+ if (wc == L'\n') -+ { -+ tab_index = print_tab_index = 0; -+ column = pending = 0; -+ convert = 1; -+ } -+ fwrite (bufpos, sizeof(char), mblength, stdout); -+ } -+ } -+ buflen -= mblength; -+ bufpos += mblength; -+ } -+} -+#endif -+ -+ - void - usage (int status) - { -@@ -531,7 +750,12 @@ - - file_list = (optind < argc ? &argv[optind] : stdin_argv); - -- unexpand (); -+#if HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) -+ unexpand_multibyte (); -+ else -+#endif -+ unexpand (); - - if (have_read_stdin && fclose (stdin) != 0) - error (EXIT_FAILURE, errno, "-"); ---- coreutils-6.8+/src/pr.c.i18n 2007-01-14 15:41:28.000000000 +0000 -+++ coreutils-6.8+/src/pr.c 2007-03-01 15:08:24.000000000 +0000 -@@ -313,6 +313,32 @@ - - #include - #include -+ -+/* Get MB_LEN_MAX. */ -+#include -+/* MB_LEN_MAX is incorrectly defined to be 1 in at least one GCC -+ installation; work around this configuration error. */ -+#if !defined MB_LEN_MAX || MB_LEN_MAX == 1 -+# define MB_LEN_MAX 16 -+#endif -+ -+/* Get MB_CUR_MAX. */ -+#include -+ -+/* Solaris 2.5 has a bug: must be included before . */ -+/* Get mbstate_t, mbrtowc(), wcwidth(). */ -+#if HAVE_WCHAR_H -+# include -+#endif -+ -+/* Get iswprint(). -- for wcwidth(). */ -+#if HAVE_WCTYPE_H -+# include -+#endif -+#if !defined iswprint && !HAVE_ISWPRINT -+# define iswprint(wc) 1 -+#endif -+ - #include "system.h" - #include "error.h" - #include "hard-locale.h" -@@ -324,6 +350,18 @@ - #include "strftime.h" - #include "xstrtol.h" - -+/* Some systems, like BeOS, have multibyte encodings but lack mbstate_t. */ -+#if HAVE_MBRTOWC && defined mbstate_t -+# define mbrtowc(pwc, s, n, ps) (mbrtowc) (pwc, s, n, 0) -+#endif -+ -+#ifndef HAVE_DECL_WCWIDTH -+"this configure-time declaration test was not run" -+#endif -+#if !HAVE_DECL_WCWIDTH -+extern int wcwidth (); -+#endif -+ - /* The official name of this program (e.g., no `g' prefix). */ - #define PROGRAM_NAME "pr" - -@@ -416,7 +454,20 @@ - - #define NULLCOL (COLUMN *)0 - --static int char_to_clump (char c); -+/* Funtion pointers to switch functions for single byte locale or for -+ multibyte locale. If multibyte functions do not exist in your sysytem, -+ these pointers always point the function for single byte locale. */ -+static void (*print_char) (char c); -+static int (*char_to_clump) (char c); -+ -+/* Functions for single byte locale. */ -+static void print_char_single (char c); -+static int char_to_clump_single (char c); -+ -+/* Functions for multibyte locale. */ -+static void print_char_multi (char c); -+static int char_to_clump_multi (char c); -+ - static bool read_line (COLUMN *p); - static bool print_page (void); - static bool print_stored (COLUMN *p); -@@ -426,6 +477,7 @@ - static void pad_across_to (int position); - static void add_line_number (COLUMN *p); - static void getoptarg (char *arg, char switch_char, char *character, -+ int *character_length, int *character_width, - int *number); - void usage (int status); - static void print_files (int number_of_files, char **av); -@@ -440,7 +492,6 @@ - static void pad_down (int lines); - static void read_rest_of_line (COLUMN *p); - static void skip_read (COLUMN *p, int column_number); --static void print_char (char c); - static void cleanup (void); - static void print_sep_string (void); - static void separator_string (const char *optarg_S); -@@ -455,7 +506,7 @@ - we store the leftmost columns contiguously in buff. - To print a line from buff, get the index of the first character - from line_vector[i], and print up to line_vector[i + 1]. */ --static char *buff; -+static unsigned char *buff; - - /* Index of the position in buff where the next character - will be stored. */ -@@ -559,7 +610,7 @@ - static bool untabify_input = false; - - /* (-e) The input tab character. */ --static char input_tab_char = '\t'; -+static char input_tab_char[MB_LEN_MAX] = "\t"; - - /* (-e) Tabstops are at chars_per_tab, 2*chars_per_tab, 3*chars_per_tab, ... - where the leftmost column is 1. */ -@@ -569,7 +620,10 @@ - static bool tabify_output = false; - - /* (-i) The output tab character. */ --static char output_tab_char = '\t'; -+static char output_tab_char[MB_LEN_MAX] = "\t"; -+ -+/* (-i) The byte length of output tab character. */ -+static int output_tab_char_length = 1; - - /* (-i) The width of the output tab. */ - static int chars_per_output_tab = 8; -@@ -643,7 +697,13 @@ - static bool numbered_lines = false; - - /* (-n) Character which follows each line number. */ --static char number_separator = '\t'; -+static char number_separator[MB_LEN_MAX] = "\t"; -+ -+/* (-n) The byte length of the character which follows each line number. */ -+static int number_separator_length = 1; -+ -+/* (-n) The character width of the character which follows each line number. */ -+static int number_separator_width = 0; - - /* (-n) line counting starts with 1st line of input file (not with 1st - line of 1st page printed). */ -@@ -696,6 +756,7 @@ - -a|COLUMN|-m is a `space' and with the -J option a `tab'. */ - static char *col_sep_string = ""; - static int col_sep_length = 0; -+static int col_sep_width = 0; - static char *column_separator = " "; - static char *line_separator = "\t"; - -@@ -852,6 +913,13 @@ - col_sep_length = (int) strlen (optarg_S); - col_sep_string = xmalloc (col_sep_length + 1); - strcpy (col_sep_string, optarg_S); -+ -+#if HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) -+ col_sep_width = mbswidth (col_sep_string, 0); -+ else -+#endif -+ col_sep_width = col_sep_length; - } - - int -@@ -877,6 +945,21 @@ - - atexit (close_stdout); - -+/* Define which functions are used, the ones for single byte locale or the ones -+ for multibyte locale. */ -+#if HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) -+ { -+ print_char = print_char_multi; -+ char_to_clump = char_to_clump_multi; -+ } -+ else -+#endif -+ { -+ print_char = print_char_single; -+ char_to_clump = char_to_clump_single; -+ } -+ - n_files = 0; - file_names = (argc > 1 - ? xmalloc ((argc - 1) * sizeof (char *)) -@@ -949,8 +1032,12 @@ - break; - case 'e': - if (optarg) -- getoptarg (optarg, 'e', &input_tab_char, -- &chars_per_input_tab); -+ { -+ int dummy_length, dummy_width; -+ -+ getoptarg (optarg, 'e', input_tab_char, &dummy_length, -+ &dummy_width, &chars_per_input_tab); -+ } - /* Could check tab width > 0. */ - untabify_input = true; - break; -@@ -963,8 +1050,12 @@ - break; - case 'i': - if (optarg) -- getoptarg (optarg, 'i', &output_tab_char, -- &chars_per_output_tab); -+ { -+ int dummy_width; -+ -+ getoptarg (optarg, 'i', output_tab_char, &output_tab_char_length, -+ &dummy_width, &chars_per_output_tab); -+ } - /* Could check tab width > 0. */ - tabify_output = true; - break; -@@ -991,8 +1082,8 @@ - case 'n': - numbered_lines = true; - if (optarg) -- getoptarg (optarg, 'n', &number_separator, -- &chars_per_number); -+ getoptarg (optarg, 'n', number_separator, &number_separator_length, -+ &number_separator_width, &chars_per_number); - break; - case 'N': - skip_count = false; -@@ -1031,7 +1122,7 @@ - old_s = false; - /* Reset an additional input of -s, -S dominates -s */ - col_sep_string = ""; -- col_sep_length = 0; -+ col_sep_length = col_sep_width = 0; - use_col_separator = true; - if (optarg) - separator_string (optarg); -@@ -1188,10 +1279,45 @@ - a number. */ - - static void --getoptarg (char *arg, char switch_char, char *character, int *number) -+getoptarg (char *arg, char switch_char, char *character, int *character_length, -+ int *character_width, int *number) - { - if (!ISDIGIT (*arg)) -- *character = *arg++; -+ { -+#ifdef HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1) /* for multibyte locale. */ -+ { -+ wchar_t wc; -+ size_t mblength; -+ int width; -+ mbstate_t state = {'\0'}; -+ -+ mblength = mbrtowc (&wc, arg, strnlen(arg, MB_LEN_MAX), &state); -+ -+ if (mblength == (size_t)-1 || mblength == (size_t)-2) -+ { -+ *character_length = 1; -+ *character_width = 1; -+ } -+ else -+ { -+ *character_length = (mblength < 1) ? 1 : mblength; -+ width = wcwidth (wc); -+ *character_width = (width < 0) ? 0 : width; -+ } -+ -+ strncpy (character, arg, *character_length); -+ arg += *character_length; -+ } -+ else /* for single byte locale. */ -+#endif -+ { -+ *character = *arg++; -+ *character_length = 1; -+ *character_width = 1; -+ } -+ } -+ - if (*arg) - { - long int tmp_long; -@@ -1256,7 +1382,7 @@ - else - col_sep_string = column_separator; - -- col_sep_length = 1; -+ col_sep_length = col_sep_width = 1; - use_col_separator = true; - } - /* It's rather pointless to define a TAB separator with column -@@ -1288,11 +1414,11 @@ - TAB_WIDTH (chars_per_input_tab, chars_per_number); */ - - /* Estimate chars_per_text without any margin and keep it constant. */ -- if (number_separator == '\t') -+ if (number_separator[0] == '\t') - number_width = chars_per_number + - TAB_WIDTH (chars_per_default_tab, chars_per_number); - else -- number_width = chars_per_number + 1; -+ number_width = chars_per_number + number_separator_width; - - /* The number is part of the column width unless we are - printing files in parallel. */ -@@ -1307,7 +1433,7 @@ - } - - chars_per_column = (chars_per_line - chars_used_by_number - -- (columns - 1) * col_sep_length) / columns; -+ (columns - 1) * col_sep_width) / columns; - - if (chars_per_column < 1) - error (EXIT_FAILURE, 0, _("page width too narrow")); -@@ -1432,7 +1558,7 @@ - - /* Enlarge p->start_position of first column to use the same form of - padding_not_printed with all columns. */ -- h = h + col_sep_length; -+ h = h + col_sep_width; - - /* This loop takes care of all but the rightmost column. */ - -@@ -1466,7 +1592,7 @@ - } - else - { -- h = h_next + col_sep_length; -+ h = h_next + col_sep_width; - h_next = h + chars_per_column; - } - } -@@ -1756,9 +1882,9 @@ - align_column (COLUMN *p) - { - padding_not_printed = p->start_position; -- if (padding_not_printed - col_sep_length > 0) -+ if (padding_not_printed - col_sep_width > 0) - { -- pad_across_to (padding_not_printed - col_sep_length); -+ pad_across_to (padding_not_printed - col_sep_width); - padding_not_printed = ANYWHERE; - } - -@@ -2029,13 +2155,13 @@ - /* May be too generous. */ - buff = X2REALLOC (buff, &buff_allocated); - } -- buff[buff_current++] = c; -+ buff[buff_current++] = (unsigned char) c; - } - - static void - add_line_number (COLUMN *p) - { -- int i; -+ int i, j; - char *s; - int left_cut; - -@@ -2058,22 +2184,24 @@ - /* Tabification is assumed for multiple columns, also for n-separators, - but `default n-separator = TAB' hasn't been given priority over - equal column_width also specified by POSIX. */ -- if (number_separator == '\t') -+ if (number_separator[0] == '\t') - { - i = number_width - chars_per_number; - while (i-- > 0) - (p->char_func) (' '); - } - else -- (p->char_func) (number_separator); -+ for (j = 0; j < number_separator_length; j++) -+ (p->char_func) (number_separator[j]); - } - else - /* To comply with POSIX, we avoid any expansion of default TAB - separator with a single column output. No column_width requirement - has to be considered. */ - { -- (p->char_func) (number_separator); -- if (number_separator == '\t') -+ for (j = 0; j < number_separator_length; j++) -+ (p->char_func) (number_separator[j]); -+ if (number_separator[0] == '\t') - output_position = POS_AFTER_TAB (chars_per_output_tab, - output_position); - } -@@ -2234,7 +2362,7 @@ - while (goal - h_old > 1 - && (h_new = POS_AFTER_TAB (chars_per_output_tab, h_old)) <= goal) - { -- putchar (output_tab_char); -+ fwrite (output_tab_char, sizeof(char), output_tab_char_length, stdout); - h_old = h_new; - } - while (++h_old <= goal) -@@ -2254,6 +2382,7 @@ - { - char *s; - int l = col_sep_length; -+ int not_space_flag; - - s = col_sep_string; - -@@ -2267,6 +2396,7 @@ - { - for (; separators_not_printed > 0; --separators_not_printed) - { -+ not_space_flag = 0; - while (l-- > 0) - { - /* 3 types of sep_strings: spaces only, spaces and chars, -@@ -2280,12 +2410,15 @@ - } - else - { -+ not_space_flag = 1; - if (spaces_not_printed > 0) - print_white_space (); - putchar (*s++); -- ++output_position; - } - } -+ if (not_space_flag) -+ output_position += col_sep_width; -+ - /* sep_string ends with some spaces */ - if (spaces_not_printed > 0) - print_white_space (); -@@ -2313,7 +2446,7 @@ - required number of tabs and spaces. */ - - static void --print_char (char c) -+print_char_single (char c) - { - if (tabify_output) - { -@@ -2337,6 +2470,74 @@ - putchar (c); - } - -+#ifdef HAVE_MBRTOWC -+static void -+print_char_multi (char c) -+{ -+ static size_t mbc_pos = 0; -+ static char mbc[MB_LEN_MAX] = {'\0'}; -+ static mbstate_t state = {'\0'}; -+ mbstate_t state_bak; -+ wchar_t wc; -+ size_t mblength; -+ int width; -+ -+ if (tabify_output) -+ { -+ state_bak = state; -+ mbc[mbc_pos++] = c; -+ mblength = mbrtowc (&wc, mbc, mbc_pos, &state); -+ -+ while (mbc_pos > 0) -+ { -+ switch (mblength) -+ { -+ case (size_t)-2: -+ state = state_bak; -+ return; -+ -+ case (size_t)-1: -+ state = state_bak; -+ ++output_position; -+ putchar (mbc[0]); -+ memmove (mbc, mbc + 1, MB_CUR_MAX - 1); -+ --mbc_pos; -+ break; -+ -+ case 0: -+ mblength = 1; -+ -+ default: -+ if (wc == L' ') -+ { -+ memmove (mbc, mbc + mblength, MB_CUR_MAX - mblength); -+ --mbc_pos; -+ ++spaces_not_printed; -+ return; -+ } -+ else if (spaces_not_printed > 0) -+ print_white_space (); -+ -+ /* Nonprintables are assumed to have width 0, except L'\b'. */ -+ if ((width = wcwidth (wc)) < 1) -+ { -+ if (wc == L'\b') -+ --output_position; -+ } -+ else -+ output_position += width; -+ -+ fwrite (mbc, sizeof(char), mblength, stdout); -+ memmove (mbc, mbc + mblength, MB_CUR_MAX - mblength); -+ mbc_pos -= mblength; -+ } -+ } -+ return; -+ } -+ putchar (c); -+} -+#endif -+ - /* Skip to page PAGE before printing. - PAGE may be larger than total number of pages. */ - -@@ -2517,9 +2718,9 @@ - align_empty_cols = false; - } - -- if (padding_not_printed - col_sep_length > 0) -+ if (padding_not_printed - col_sep_width > 0) - { -- pad_across_to (padding_not_printed - col_sep_length); -+ pad_across_to (padding_not_printed - col_sep_width); - padding_not_printed = ANYWHERE; - } - -@@ -2620,9 +2821,9 @@ - } - } - -- if (padding_not_printed - col_sep_length > 0) -+ if (padding_not_printed - col_sep_width > 0) - { -- pad_across_to (padding_not_printed - col_sep_length); -+ pad_across_to (padding_not_printed - col_sep_width); - padding_not_printed = ANYWHERE; - } - -@@ -2635,8 +2836,8 @@ - if (spaces_not_printed == 0) - { - output_position = p->start_position + end_vector[line]; -- if (p->start_position - col_sep_length == chars_per_margin) -- output_position -= col_sep_length; -+ if (p->start_position - col_sep_width == chars_per_margin) -+ output_position -= col_sep_width; - } - - return true; -@@ -2655,7 +2856,7 @@ - number of characters is 1.) */ - - static int --char_to_clump (char c) -+char_to_clump_single (char c) - { - unsigned char uc = c; - char *s = clump_buff; -@@ -2665,10 +2866,10 @@ - int chars; - int chars_per_c = 8; - -- if (c == input_tab_char) -+ if (c == input_tab_char[0]) - chars_per_c = chars_per_input_tab; - -- if (c == input_tab_char || c == '\t') -+ if (c == input_tab_char[0] || c == '\t') - { - width = TAB_WIDTH (chars_per_c, input_position); - -@@ -2739,6 +2940,154 @@ - return chars; - } - -+#ifdef HAVE_MBRTOWC -+static int -+char_to_clump_multi (char c) -+{ -+ static size_t mbc_pos = 0; -+ static char mbc[MB_LEN_MAX] = {'\0'}; -+ static mbstate_t state = {'\0'}; -+ mbstate_t state_bak; -+ wchar_t wc; -+ size_t mblength; -+ int wc_width; -+ register char *s = clump_buff; -+ register int i, j; -+ char esc_buff[4]; -+ int width; -+ int chars; -+ int chars_per_c = 8; -+ -+ state_bak = state; -+ mbc[mbc_pos++] = c; -+ mblength = mbrtowc (&wc, mbc, mbc_pos, &state); -+ -+ width = 0; -+ chars = 0; -+ while (mbc_pos > 0) -+ { -+ switch (mblength) -+ { -+ case (size_t)-2: -+ state = state_bak; -+ return 0; -+ -+ case (size_t)-1: -+ state = state_bak; -+ mblength = 1; -+ -+ if (use_esc_sequence || use_cntrl_prefix) -+ { -+ width = +4; -+ chars = +4; -+ *s++ = '\\'; -+ sprintf (esc_buff, "%03o", mbc[0]); -+ for (i = 0; i <= 2; ++i) -+ *s++ = (int) esc_buff[i]; -+ } -+ else -+ { -+ width += 1; -+ chars += 1; -+ *s++ = mbc[0]; -+ } -+ break; -+ -+ case 0: -+ mblength = 1; -+ /* Fall through */ -+ -+ default: -+ if (memcmp (mbc, input_tab_char, mblength) == 0) -+ chars_per_c = chars_per_input_tab; -+ -+ if (memcmp (mbc, input_tab_char, mblength) == 0 || c == '\t') -+ { -+ int width_inc; -+ -+ width_inc = TAB_WIDTH (chars_per_c, input_position); -+ width += width_inc; -+ -+ if (untabify_input) -+ { -+ for (i = width_inc; i; --i) -+ *s++ = ' '; -+ chars += width_inc; -+ } -+ else -+ { -+ for (i = 0; i < mblength; i++) -+ *s++ = mbc[i]; -+ chars += mblength; -+ } -+ } -+ else if ((wc_width = wcwidth (wc)) < 1) -+ { -+ if (use_esc_sequence) -+ { -+ for (i = 0; i < mblength; i++) -+ { -+ width += 4; -+ chars += 4; -+ *s++ = '\\'; -+ sprintf (esc_buff, "%03o", c); -+ for (j = 0; j <= 2; ++j) -+ *s++ = (int) esc_buff[j]; -+ } -+ } -+ else if (use_cntrl_prefix) -+ { -+ if (wc < 0200) -+ { -+ width += 2; -+ chars += 2; -+ *s++ = '^'; -+ *s++ = wc ^ 0100; -+ } -+ else -+ { -+ for (i = 0; i < mblength; i++) -+ { -+ width += 4; -+ chars += 4; -+ *s++ = '\\'; -+ sprintf (esc_buff, "%03o", c); -+ for (j = 0; j <= 2; ++j) -+ *s++ = (int) esc_buff[j]; -+ } -+ } -+ } -+ else if (wc == L'\b') -+ { -+ width += -1; -+ chars += 1; -+ *s++ = c; -+ } -+ else -+ { -+ width += 0; -+ chars += mblength; -+ for (i = 0; i < mblength; i++) -+ *s++ = mbc[i]; -+ } -+ } -+ else -+ { -+ width += wc_width; -+ chars += mblength; -+ for (i = 0; i < mblength; i++) -+ *s++ = mbc[i]; -+ } -+ } -+ memmove (mbc, mbc + mblength, MB_CUR_MAX - mblength); -+ mbc_pos -= mblength; -+ } -+ -+ input_position += width; -+ return chars; -+} -+#endif -+ - /* We've just printed some files and need to clean up things before - looking for more options and printing the next batch of files. - ---- coreutils-6.8+/src/cut.c.i18n 2007-01-14 15:41:28.000000000 +0000 -+++ coreutils-6.8+/src/cut.c 2007-03-01 15:08:24.000000000 +0000 -@@ -29,6 +29,11 @@ - #include - #include - #include -+ -+/* Get mbstate_t, mbrtowc(). */ -+#if HAVE_WCHAR_H -+# include -+#endif - #include "system.h" - - #include "error.h" -@@ -37,6 +42,18 @@ - #include "quote.h" - #include "xstrndup.h" - -+/* MB_LEN_MAX is incorrectly defined to be 1 in at least one GCC -+ installation; work around this configuration error. */ -+#if !defined MB_LEN_MAX || MB_LEN_MAX < 2 -+# undef MB_LEN_MAX -+# define MB_LEN_MAX 16 -+#endif -+ -+/* Some systems, like BeOS, have multibyte encodings but lack mbstate_t. */ -+#if HAVE_MBRTOWC && defined mbstate_t -+# define mbrtowc(pwc, s, n, ps) (mbrtowc) (pwc, s, n, 0) -+#endif -+ - /* The official name of this program (e.g., no `g' prefix). */ - #define PROGRAM_NAME "cut" - -@@ -67,6 +84,52 @@ - } \ - while (0) - -+/* Refill the buffer BUF to get a multibyte character. */ -+#define REFILL_BUFFER(BUF, BUFPOS, BUFLEN, STREAM) \ -+ do \ -+ { \ -+ if (BUFLEN < MB_LEN_MAX && !feof (STREAM) && !ferror (STREAM)) \ -+ { \ -+ memmove (BUF, BUFPOS, BUFLEN); \ -+ BUFLEN += fread (BUF + BUFLEN, sizeof(char), BUFSIZ, STREAM); \ -+ BUFPOS = BUF; \ -+ } \ -+ } \ -+ while (0) -+ -+/* Get wide character on BUFPOS. BUFPOS is not included after that. -+ If byte sequence is not valid as a character, CONVFAIL is 1. Otherwise 0. */ -+#define GET_NEXT_WC_FROM_BUFFER(WC, BUFPOS, BUFLEN, MBLENGTH, STATE, CONVFAIL) \ -+ do \ -+ { \ -+ mbstate_t state_bak; \ -+ \ -+ if (BUFLEN < 1) \ -+ { \ -+ WC = WEOF; \ -+ break; \ -+ } \ -+ \ -+ /* Get a wide character. */ \ -+ CONVFAIL = 0; \ -+ state_bak = STATE; \ -+ MBLENGTH = mbrtowc ((wchar_t *)&WC, BUFPOS, BUFLEN, &STATE); \ -+ \ -+ switch (MBLENGTH) \ -+ { \ -+ case (size_t)-1: \ -+ case (size_t)-2: \ -+ CONVFAIL++; \ -+ STATE = state_bak; \ -+ /* Fall througn. */ \ -+ \ -+ case 0: \ -+ MBLENGTH = 1; \ -+ break; \ -+ } \ -+ } \ -+ while (0) -+ - struct range_pair - { - size_t lo; -@@ -85,7 +148,7 @@ - /* The number of bytes allocated for FIELD_1_BUFFER. */ - static size_t field_1_bufsize; - --/* The largest field or byte index used as an endpoint of a closed -+/* The largest byte, character or field index used as an endpoint of a closed - or degenerate range specification; this doesn't include the starting - index of right-open-ended ranges. For example, with either range spec - `2-5,9-', `2-3,5,9-' this variable would be set to 5. */ -@@ -97,10 +160,11 @@ - - /* This is a bit vector. - In byte mode, which bytes to output. -+ In character mode, which characters to output. - In field mode, which DELIM-separated fields to output. -- Both bytes and fields are numbered starting with 1, -+ Bytes, characters and fields are numbered starting with 1, - so the zeroth bit of this array is unused. -- A field or byte K has been selected if -+ A byte, character or field K has been selected if - (K <= MAX_RANGE_ENDPOINT and is_printable_field(K)) - || (EOL_RANGE_START > 0 && K >= EOL_RANGE_START). */ - static unsigned char *printable_field; -@@ -109,9 +173,12 @@ - { - undefined_mode, - -- /* Output characters that are in the given bytes. */ -+ /* Output bytes that are at the given positions. */ - byte_mode, - -+ /* Output characters that are at the given positions. */ -+ character_mode, -+ - /* Output the given delimeter-separated fields. */ - field_mode - }; -@@ -121,6 +188,13 @@ - - static enum operating_mode operating_mode; - -+/* If nonzero, when in byte mode, don't split multibyte characters. */ -+static int byte_mode_character_aware; -+ -+/* If nonzero, the function for single byte locale is work -+ if this program runs on multibyte locale. */ -+static int force_singlebyte_mode; -+ - /* If true do not output lines containing no delimeter characters. - Otherwise, all such lines are printed. This option is valid only - with field mode. */ -@@ -132,6 +206,9 @@ - - /* The delimeter character for field mode. */ - static unsigned char delim; -+#if HAVE_WCHAR_H -+static wchar_t wcdelim; -+#endif - - /* True if the --output-delimiter=STRING option was specified. */ - static bool output_delimiter_specified; -@@ -205,7 +282,7 @@ - -f, --fields=LIST select only these fields; also print any line\n\ - that contains no delimiter character, unless\n\ - the -s option is specified\n\ -- -n (ignored)\n\ -+ -n with -b: don't split multibyte characters\n\ - "), stdout); - fputs (_("\ - --complement complement the set of selected bytes, characters\n\ -@@ -362,7 +439,7 @@ - in_digits = false; - /* Starting a range. */ - if (dash_found) -- FATAL_ERROR (_("invalid byte or field list")); -+ FATAL_ERROR (_("invalid byte, character or field list")); - dash_found = true; - fieldstr++; - -@@ -387,14 +464,16 @@ - if (value == 0) - { - /* `n-'. From `initial' to end of line. */ -- eol_range_start = initial; -+ if (eol_range_start == 0 || -+ (eol_range_start != 0 && eol_range_start > initial)) -+ eol_range_start = initial; - field_found = true; - } - else - { - /* `m-n' or `-n' (1-n). */ - if (value < initial) -- FATAL_ERROR (_("invalid byte or field list")); -+ FATAL_ERROR (_("invalid byte, character or field list")); - - /* Is there already a range going to end of line? */ - if (eol_range_start != 0) -@@ -467,6 +546,9 @@ - if (operating_mode == byte_mode) - error (0, 0, - _("byte offset %s is too large"), quote (bad_num)); -+ else if (operating_mode == character_mode) -+ error (0, 0, -+ _("character offset %s is too large"), quote (bad_num)); - else - error (0, 0, - _("field number %s is too large"), quote (bad_num)); -@@ -477,7 +559,7 @@ - fieldstr++; - } - else -- FATAL_ERROR (_("invalid byte or field list")); -+ FATAL_ERROR (_("invalid byte, character or field list")); - } - - max_range_endpoint = 0; -@@ -570,6 +652,63 @@ - } - } - -+#if HAVE_MBRTOWC -+/* This function is in use for the following case. -+ -+ 1. Read from the stream STREAM, printing to standard output any selected -+ characters. -+ -+ 2. Read from stream STREAM, printing to standard output any selected bytes, -+ without splitting multibyte characters. */ -+ -+static void -+cut_characters_or_cut_bytes_no_split (FILE *stream) -+{ -+ int idx; /* number of bytes or characters in the line so far. */ -+ char buf[MB_LEN_MAX + BUFSIZ]; /* For spooling a read byte sequence. */ -+ char *bufpos; /* Next read position of BUF. */ -+ size_t buflen; /* The length of the byte sequence in buf. */ -+ wint_t wc; /* A gotten wide character. */ -+ size_t mblength; /* The byte size of a multibyte character which shows -+ as same character as WC. */ -+ mbstate_t state; /* State of the stream. */ -+ int convfail; /* 1, when conversion is failed. Otherwise 0. */ -+ -+ idx = 0; -+ buflen = 0; -+ bufpos = buf; -+ memset (&state, '\0', sizeof(mbstate_t)); -+ -+ while (1) -+ { -+ REFILL_BUFFER (buf, bufpos, buflen, stream); -+ -+ GET_NEXT_WC_FROM_BUFFER (wc, bufpos, buflen, mblength, state, convfail); -+ -+ if (wc == WEOF) -+ { -+ if (idx > 0) -+ putchar ('\n'); -+ break; -+ } -+ else if (wc == L'\n') -+ { -+ putchar ('\n'); -+ idx = 0; -+ } -+ else -+ { -+ idx += (operating_mode == byte_mode) ? mblength : 1; -+ if (print_kth (idx, NULL)) -+ fwrite (bufpos, mblength, sizeof(char), stdout); -+ } -+ -+ buflen -= mblength; -+ bufpos += mblength; -+ } -+} -+#endif -+ - /* Read from stream STREAM, printing to standard output any selected fields. */ - - static void -@@ -692,13 +831,192 @@ - } - } - -+#if HAVE_MBRTOWC -+static void -+cut_fields_mb (FILE *stream) -+{ -+ int c; -+ unsigned int field_idx; -+ int found_any_selected_field; -+ int buffer_first_field; -+ int empty_input; -+ char buf[MB_LEN_MAX + BUFSIZ]; /* For spooling a read byte sequence. */ -+ char *bufpos; /* Next read position of BUF. */ -+ size_t buflen; /* The length of the byte sequence in buf. */ -+ wint_t wc = 0; /* A gotten wide character. */ -+ size_t mblength; /* The byte size of a multibyte character which shows -+ as same character as WC. */ -+ mbstate_t state; /* State of the stream. */ -+ int convfail; /* 1, when conversion is failed. Otherwise 0. */ -+ -+ found_any_selected_field = 0; -+ field_idx = 1; -+ bufpos = buf; -+ buflen = 0; -+ memset (&state, '\0', sizeof(mbstate_t)); -+ -+ c = getc (stream); -+ empty_input = (c == EOF); -+ if (c != EOF) -+ ungetc (c, stream); -+ else -+ wc = WEOF; -+ -+ /* To support the semantics of the -s flag, we may have to buffer -+ all of the first field to determine whether it is `delimited.' -+ But that is unnecessary if all non-delimited lines must be printed -+ and the first field has been selected, or if non-delimited lines -+ must be suppressed and the first field has *not* been selected. -+ That is because a non-delimited line has exactly one field. */ -+ buffer_first_field = (suppress_non_delimited ^ !print_kth (1, NULL)); -+ -+ while (1) -+ { -+ if (field_idx == 1 && buffer_first_field) -+ { -+ int len = 0; -+ -+ while (1) -+ { -+ REFILL_BUFFER (buf, bufpos, buflen, stream); -+ -+ GET_NEXT_WC_FROM_BUFFER -+ (wc, bufpos, buflen, mblength, state, convfail); -+ -+ if (wc == WEOF) -+ break; -+ -+ field_1_buffer = xrealloc (field_1_buffer, len + mblength); -+ memcpy (field_1_buffer + len, bufpos, mblength); -+ len += mblength; -+ buflen -= mblength; -+ bufpos += mblength; -+ -+ if (!convfail && (wc == L'\n' || wc == wcdelim)) -+ break; -+ } -+ -+ if (wc == WEOF) -+ break; -+ -+ /* If the first field extends to the end of line (it is not -+ delimited) and we are printing all non-delimited lines, -+ print this one. */ -+ if (convfail || (!convfail && wc != wcdelim)) -+ { -+ if (suppress_non_delimited) -+ { -+ /* Empty. */ -+ } -+ else -+ { -+ fwrite (field_1_buffer, sizeof (char), len, stdout); -+ /* Make sure the output line is newline terminated. */ -+ if (convfail || (!convfail && wc != L'\n')) -+ putchar ('\n'); -+ } -+ continue; -+ } -+ -+ if (print_kth (1, NULL)) -+ { -+ /* Print the field, but not the trailing delimiter. */ -+ fwrite (field_1_buffer, sizeof (char), len - 1, stdout); -+ found_any_selected_field = 1; -+ } -+ ++field_idx; -+ } -+ -+ if (wc != WEOF) -+ { -+ if (print_kth (field_idx, NULL)) -+ { -+ if (found_any_selected_field) -+ { -+ fwrite (output_delimiter_string, sizeof (char), -+ output_delimiter_length, stdout); -+ } -+ found_any_selected_field = 1; -+ } -+ -+ while (1) -+ { -+ REFILL_BUFFER (buf, bufpos, buflen, stream); -+ -+ GET_NEXT_WC_FROM_BUFFER -+ (wc, bufpos, buflen, mblength, state, convfail); -+ -+ if (wc == WEOF) -+ break; -+ else if (!convfail && (wc == wcdelim || wc == L'\n')) -+ { -+ buflen -= mblength; -+ bufpos += mblength; -+ break; -+ } -+ -+ if (print_kth (field_idx, NULL)) -+ fwrite (bufpos, mblength, sizeof(char), stdout); -+ -+ buflen -= mblength; -+ bufpos += mblength; -+ } -+ } -+ -+ if ((!convfail || wc == L'\n') && buflen < 1) -+ wc = WEOF; -+ -+ if (!convfail && wc == wcdelim) -+ ++field_idx; -+ else if (wc == WEOF || (!convfail && wc == L'\n')) -+ { -+ if (found_any_selected_field -+ || (!empty_input && !(suppress_non_delimited && field_idx == 1))) -+ putchar ('\n'); -+ if (wc == WEOF) -+ break; -+ field_idx = 1; -+ found_any_selected_field = 0; -+ } -+ } -+} -+#endif -+ - static void - cut_stream (FILE *stream) - { -- if (operating_mode == byte_mode) -- cut_bytes (stream); -+#if HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1 && !force_singlebyte_mode) -+ { -+ switch (operating_mode) -+ { -+ case byte_mode: -+ if (byte_mode_character_aware) -+ cut_characters_or_cut_bytes_no_split (stream); -+ else -+ cut_bytes (stream); -+ break; -+ -+ case character_mode: -+ cut_characters_or_cut_bytes_no_split (stream); -+ break; -+ -+ case field_mode: -+ cut_fields_mb (stream); -+ break; -+ -+ default: -+ abort (); -+ } -+ } - else -- cut_fields (stream); -+#endif -+ { -+ if (operating_mode == field_mode) -+ cut_fields (stream); -+ else -+ cut_bytes (stream); -+ } - } - - /* Process file FILE to standard output. -@@ -748,6 +1066,8 @@ - bool ok; - bool delim_specified = false; - char *spec_list_string IF_LINT(= NULL); -+ char mbdelim[MB_LEN_MAX + 1]; -+ size_t delimlen = 0; - - initialize_main (&argc, &argv); - program_name = argv[0]; -@@ -770,7 +1090,6 @@ - switch (optc) - { - case 'b': -- case 'c': - /* Build the byte list. */ - if (operating_mode != undefined_mode) - FATAL_ERROR (_("only one type of list may be specified")); -@@ -778,6 +1097,14 @@ - spec_list_string = optarg; - break; - -+ case 'c': -+ /* Build the character list. */ -+ if (operating_mode != undefined_mode) -+ FATAL_ERROR (_("only one type of list may be specified")); -+ operating_mode = character_mode; -+ spec_list_string = optarg; -+ break; -+ - case 'f': - /* Build the field list. */ - if (operating_mode != undefined_mode) -@@ -789,10 +1116,35 @@ - case 'd': - /* New delimiter. */ - /* Interpret -d '' to mean `use the NUL byte as the delimiter.' */ -- if (optarg[0] != '\0' && optarg[1] != '\0') -- FATAL_ERROR (_("the delimiter must be a single character")); -- delim = optarg[0]; -- delim_specified = true; -+#if HAVE_MBRTOWC -+ { -+ if(MB_CUR_MAX > 1) -+ { -+ mbstate_t state; -+ -+ memset (&state, '\0', sizeof(mbstate_t)); -+ delimlen = mbrtowc (&wcdelim, optarg, strnlen(optarg, MB_LEN_MAX), &state); -+ -+ if (delimlen == (size_t)-1 || delimlen == (size_t)-2) -+ ++force_singlebyte_mode; -+ else -+ { -+ delimlen = (delimlen < 1) ? 1 : delimlen; -+ if (wcdelim != L'\0' && *(optarg + delimlen) != '\0') -+ FATAL_ERROR (_("the delimiter must be a single character")); -+ memcpy (mbdelim, optarg, delimlen); -+ } -+ } -+ -+ if (MB_CUR_MAX <= 1 || force_singlebyte_mode) -+#endif -+ { -+ if (optarg[0] != '\0' && optarg[1] != '\0') -+ FATAL_ERROR (_("the delimiter must be a single character")); -+ delim = (unsigned char) optarg[0]; -+ } -+ delim_specified = true; -+ } - break; - - case OUTPUT_DELIMITER_OPTION: -@@ -805,6 +1157,7 @@ - break; - - case 'n': -+ byte_mode_character_aware = 1; - break; - - case 's': -@@ -827,7 +1180,7 @@ - if (operating_mode == undefined_mode) - FATAL_ERROR (_("you must specify a list of bytes, characters, or fields")); - -- if (delim != '\0' && operating_mode != field_mode) -+ if (delim_specified && operating_mode != field_mode) - FATAL_ERROR (_("an input delimiter may be specified only\ - when operating on fields")); - -@@ -854,15 +1207,34 @@ - } - - if (!delim_specified) -- delim = '\t'; -+ { -+ delim = '\t'; -+#ifdef HAVE_MBRTOWC -+ wcdelim = L'\t'; -+ mbdelim[0] = '\t'; -+ mbdelim[1] = '\0'; -+ delimlen = 1; -+#endif -+ } - - if (output_delimiter_string == NULL) - { -- static char dummy[2]; -- dummy[0] = delim; -- dummy[1] = '\0'; -- output_delimiter_string = dummy; -- output_delimiter_length = 1; -+#ifdef HAVE_MBRTOWC -+ if (MB_CUR_MAX > 1 && !force_singlebyte_mode) -+ { -+ output_delimiter_string = xstrdup(mbdelim); -+ output_delimiter_length = delimlen; -+ } -+ -+ if (MB_CUR_MAX <= 1 || force_singlebyte_mode) -+#endif -+ { -+ static char dummy[2]; -+ dummy[0] = delim; -+ dummy[1] = '\0'; -+ output_delimiter_string = dummy; -+ output_delimiter_length = 1; -+ } - } - - if (optind == argc) diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-ls-x.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-ls-x.patch deleted file mode 100644 index aba8742f6..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-ls-x.patch +++ /dev/null @@ -1,117 +0,0 @@ -Upstream-Status: Inappropriate [legacy version] - -This patch was imported from the Fedora Core 8 coreutils-6.9-9 package. - -The package is stated as being Licensed as GPLv2+. - -Signed-off-by: Mark Hatle - ---- coreutils-6.9/src/ls.c.ls-x 2007-06-13 14:27:36.000000000 +0100 -+++ coreutils-6.9/src/ls.c 2007-06-13 14:28:42.000000000 +0100 -@@ -4151,16 +4151,16 @@ - size_t pos = 0; - size_t cols = calculate_columns (false); - struct column_info const *line_fmt = &column_info[cols - 1]; -- size_t name_length = length_of_file_name_and_frills (cwd_file); -+ struct fileinfo const *f = sorted_file[0]; -+ size_t name_length = length_of_file_name_and_frills (f); - size_t max_name_length = line_fmt->col_arr[0]; - - /* Print first entry. */ -- print_file_name_and_frills (cwd_file); -+ print_file_name_and_frills (f); - - /* Now the rest. */ - for (filesno = 1; filesno < cwd_n_used; ++filesno) - { -- struct fileinfo const *f; - size_t col = filesno % cols; - - if (col == 0) ---- coreutils-6.9/tests/ls/Makefile.am.ls-x 2007-03-18 21:36:43.000000000 +0000 -+++ coreutils-6.9/tests/ls/Makefile.am 2007-06-13 14:28:42.000000000 +0100 -@@ -24,7 +24,7 @@ - stat-dtype \ - inode dangle file-type recursive dired infloop \ - rt-1 time-1 symlink-slash follow-slink no-arg m-option \ -- stat-vs-dirent -+ stat-vs-dirent x-option - - EXTRA_DIST = $(TESTS) - TESTS_ENVIRONMENT = \ ---- /dev/null 2007-06-13 08:43:51.993263382 +0100 -+++ coreutils-6.9/tests/ls/x-option 2007-06-13 14:28:42.000000000 +0100 -@@ -0,0 +1,59 @@ -+#!/bin/sh -+# Exercise the -x option. -+ -+# Copyright (C) 2007 Free Software Foundation, Inc. -+ -+# This program is free software; you can redistribute it and/or modify -+# it under the terms of the GNU General Public License as published by -+# the Free Software Foundation; either version 2 of the License, or -+# (at your option) any later version. -+ -+# This program is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of -+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+# GNU General Public License for more details. -+ -+# You should have received a copy of the GNU General Public License -+# along with this program; if not, write to the Free Software -+# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA -+# 02110-1301, USA. -+ -+if test "$VERBOSE" = yes; then -+ set -x -+ ls --version -+fi -+ -+. $srcdir/../envvar-check -+. $srcdir/../lang-default -+ -+pwd=`pwd` -+t0=`echo "$0"|sed 's,.*/,,'`.tmp; tmp=$t0/$$ -+trap 'status=$?; cd "$pwd" && chmod -R u+rwx $t0 && rm -rf $t0 && exit $status' 0 -+trap '(exit $?); exit $?' 1 2 13 15 -+ -+framework_failure=0 -+mkdir -p $tmp || framework_failure=1 -+cd $tmp || framework_failure=1 -+mkdir subdir || framework_failure=1 -+touch subdir/b || framework_failure=1 -+touch subdir/a || framework_failure=1 -+ -+if test $framework_failure = 1; then -+ echo "$0: failure in testing framework" 1>&2 -+ (exit 1); exit 1 -+fi -+ -+fail=0 -+ -+# Coreutils 6.8 and 6.9 would output this in the wrong order. -+ls -x subdir > out || fail=1 -+ls -rx subdir >> out || fail=1 -+cat <<\EOF > exp || fail=1 -+a b -+b a -+EOF -+ -+cmp out exp || fail=1 -+test $fail = 1 && diff out exp 2> /dev/null -+ -+(exit $fail); exit $fail ---- coreutils-6.9/NEWS.ls-x 2007-03-22 21:19:45.000000000 +0000 -+++ coreutils-6.9/NEWS 2007-06-13 14:28:42.000000000 +0100 -@@ -13,6 +13,11 @@ - Using pr -m -s (i.e. merging files, with TAB as the output separator) - no longer inserts extraneous spaces between output columns. - -+** Bug fixes -+ -+ ls -x DIR would sometimes output the wrong string in place of the -+ first entry. [introduced in coreutils-6.8] -+ - - * Noteworthy changes in release 6.8 (2007-02-24) [not-unstable] - diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-overflow.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-overflow.patch deleted file mode 100644 index 58074c09a..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils-overflow.patch +++ /dev/null @@ -1,19 +0,0 @@ -Upstream-Status: Inappropriate [legacy version] - -This patch was imported from the Fedora Core 8 coreutils-6.9-9 package. - -The package is stated as being Licensed as GPLv2+. - -Signed-off-by: Mark Hatle - ---- coreutils-5.2.1/src/who.c.overflow 2005-05-25 09:59:06.000000000 +0100 -+++ coreutils-5.2.1/src/who.c 2005-05-25 10:00:31.000000000 +0100 -@@ -75,7 +75,7 @@ - # define NEW_TIME 0 - #endif - --#define IDLESTR_LEN 6 -+#define IDLESTR_LEN 10 - - #if HAVE_STRUCT_XTMP_UT_PID - # define PIDSTR_DECL_AND_INIT(Var, Utmp_ent) \ diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils_fix_for_automake-1.12.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils_fix_for_automake-1.12.patch deleted file mode 100644 index 64e5f12ba..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/coreutils_fix_for_automake-1.12.patch +++ /dev/null @@ -1,32 +0,0 @@ -Upstream-Status: Pending - -automake 1.12 has depricated automatic de-ANSI-fication support - -this patch avoids these kinds of errors: - -| configure.ac:40: error: automatic de-ANSI-fication support has been removed -| /srv/home/nitin/builds/build-gcc47/tmp/sysroots/x86_64-linux/usr/share/aclocal-1.12/protos.m4:12: AM_C_PROTOTYPES is expanded from... -| /srv/home/nitin/builds/build-gcc47/tmp/work/x86_64-linux/coreutils-native-6.9-r2/coreutils-6.9/m4/jm-macros.m4:138: gl_CHECK_ALL_TYPES is expanded from... -| /srv/home/nitin/builds/build-gcc47/tmp/work/x86_64-linux/coreutils-native-6.9-r2/coreutils-6.9/m4/jm-macros.m4:24: coreutils_MACROS is expanded from... -| configure.ac:40: the top level -| autom4te: m4 failed with exit status: 1 - -Signed-off-by: Nitin A Kamble -2012/05/04 - -Index: coreutils-6.9/m4/jm-macros.m4 -=================================================================== ---- coreutils-6.9.orig/m4/jm-macros.m4 -+++ coreutils-6.9/m4/jm-macros.m4 -@@ -142,11 +142,6 @@ AC_DEFUN([gl_CHECK_ALL_TYPES], - dnl whether functions and headers are available, whether they work, etc. - AC_REQUIRE([AC_SYS_LARGEFILE]) - -- dnl This test must precede tests of compiler characteristics like -- dnl that for the inline keyword, since it may change the degree to -- dnl which the compiler supports such features. -- AC_REQUIRE([AM_C_PROTOTYPES]) -- - dnl Checks for typedefs, structures, and compiler characteristics. - AC_REQUIRE([AC_C_BIGENDIAN]) - AC_REQUIRE([AC_C_VOLATILE]) diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/fix_for_manpage_building.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/fix_for_manpage_building.patch deleted file mode 100644 index e0d600a39..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/fix_for_manpage_building.patch +++ /dev/null @@ -1,85 +0,0 @@ -Upstream-Status: Inappropriate [legacy version] - -Use native coreutils binaries to build manpages in cross environment. -This avoids man page build issues like this: - -| Making all in man -| make[1]: Entering directory `/build_disk/poky_build/build1/tmp/work/i586-poky-linux/coreutils-6.9-r0/coreutils-6.9/man' -| Updating man page cut.1 -| Updating man page dir.1 -| Updating man page expand.1 -| Updating man page fold.1 -| Updating man page install.1 -| Updating man page join.1 -| Updating man page pr.1 -| Updating man page ls.1 -| Updating man page sort.1 -| Updating man page unexpand.1 -| Updating man page uniq.1 -| Updating man page who.1 -| Updating man page vdir.1 -| help2man: can't get `--help' info from dir.td/dir -| help2man: can't get `--help' info from cut.td/cut -| make[1]: *** [dir.1] Error 126 -| make[1]: *** Waiting for unfinished jobs.... -| help2man: can't get `--help' info from fold.td/fold -| help2man: can't get `--help' info from install.td/install -| help2man: can't get `--help' info from expand.td/expand -| help2man: can't get `--help' info from join.td/join -| make[1]: *** [cut.1] Error 126 -| make[1]: *** [fold.1] Error 126 -| make[1]: *** [install.1] Error 126 -| help2man: can't get `--help' info from sort.td/sort -| make[1]: *** [expand.1] Error 126 -| help2man: can't get `--help' info from pr.td/pr -| make[1]: *** [join.1] Error 126 -| help2man: can't get `--help' info from ls.td/ls -| help2man: can't get `--help' info from unexpand.td/unexpand -| help2man: can't get `--help' info from uniq.td/uniq -| help2man: can't get `--help' info from who.td/who -| make[1]: *** [sort.1] Error 126 -| make[1]: *** [pr.1] Error 126 -| help2man: can't get `--help' info from vdir.td/vdir -| make[1]: *** [ls.1] Error 126 -| make[1]: *** [uniq.1] Error 126 -| make[1]: *** [unexpand.1] Error 126 -| make[1]: *** [who.1] Error 126 -| make[1]: *** [vdir.1] Error 126 -| make[1]: Leaving directory `/build_disk/poky_build/build1/tmp/work/i586-poky-linux/coreutils-6.9-r0/coreutils-6.9/man' -| make: *** [all-recursive] Error 1 -| FATAL: oe_runmake failed -| ERROR: Function 'do_compile' failed (see /build_disk/poky_build/build1/tmp/work/i586-poky-linux/coreutils-6.9-r0/temp/log.do_compile.12780 for further information) -NOTE: package coreutils-6.9-r0: task do_compile: Failed -ERROR: Task 8 (/home/nitin/prj/poky.git/meta/recipes-core/coreutils/coreutils_6.9.bb, do_compile) failed with exit code '1' - - -This patch is made for gplv2 coreutils the recipe -Nitin A Kamble -2011/03/17 - -Index: man/Makefile.am -=================================================================== ---- a/man.orig/Makefile.am -+++ b/man/Makefile.am -@@ -167,7 +167,7 @@ mapped_name = `echo $*|sed 's/install/gi - $(PERL) -- $(srcdir)/help2man \ - --source='$(PACKAGE_STRING)' \ - --include=$(srcdir)/$*.x \ -- --output=$t/$@ $t/$*; \ -+ --output=$t/$@ $*; \ - } \ - && sed 's|$*\.td/||g' $t/$@ > $@ \ - && chmod a-w $@ \ -Index: man/Makefile.in -=================================================================== ---- a/man.orig/Makefile.in -+++ b/man/Makefile.in -@@ -865,7 +865,7 @@ yes.1: $(common_dep) $(srcdir)/yes.x . - $(PERL) -- $(srcdir)/help2man \ - --source='$(PACKAGE_STRING)' \ - --include=$(srcdir)/$*.x \ -- --output=$t/$@ $t/$*; \ -+ --output=$t/$@ $*; \ - } \ - && sed 's|$*\.td/||g' $t/$@ > $@ \ - && chmod a-w $@ \ diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/futimens.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/futimens.patch deleted file mode 100644 index 508810623..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/futimens.patch +++ /dev/null @@ -1,63 +0,0 @@ -Upstream-Status: Inappropriate [legacy version] - -# coreutils uses gnulib which conflicts with newer libc header on futimens -# this patch simply renames coreutils futimes to avoid confliction -# -# Signed-off-by: Kevin Tian , 2010-08-18 -# (this patch is licensed under GPLv2) - -diff --git a/lib/utimens.c b/lib/utimens.c -index 71bc510..ae870b8 100644 ---- a/lib/utimens.c -+++ b/lib/utimens.c -@@ -75,7 +75,7 @@ struct utimbuf - Return 0 on success, -1 (setting errno) on failure. */ - - int --futimens (int fd ATTRIBUTE_UNUSED, -+futimens_coreutils (int fd ATTRIBUTE_UNUSED, - char const *file, struct timespec const timespec[2]) - { - /* Some Linux-based NFS clients are buggy, and mishandle time stamps -@@ -185,5 +185,5 @@ futimens (int fd ATTRIBUTE_UNUSED, - int - utimens (char const *file, struct timespec const timespec[2]) - { -- return futimens (-1, file, timespec); -+ return futimens_coreutils (-1, file, timespec); - } -diff --git a/lib/utimens.h b/lib/utimens.h -index 0097aaa..13fc45a 100644 ---- a/lib/utimens.h -+++ b/lib/utimens.h -@@ -1,3 +1,3 @@ - #include --int futimens (int, char const *, struct timespec const [2]); -+int futimens_coreutils (int, char const *, struct timespec const [2]); - int utimens (char const *, struct timespec const [2]); -diff --git a/src/copy.c b/src/copy.c -index 4bdb75c..04634f1 100644 ---- a/src/copy.c -+++ b/src/copy.c -@@ -518,7 +518,7 @@ copy_reg (char const *src_name, char const *dst_name, - timespec[0] = get_stat_atime (src_sb); - timespec[1] = get_stat_mtime (src_sb); - -- if (futimens (dest_desc, dst_name, timespec) != 0) -+ if (futimens_coreutils (dest_desc, dst_name, timespec) != 0) - { - error (0, errno, _("preserving times for %s"), quote (dst_name)); - if (x->require_preserve) -diff --git a/src/touch.c b/src/touch.c -index a79c26d..6ef317d 100644 ---- a/src/touch.c -+++ b/src/touch.c -@@ -182,7 +182,7 @@ touch (const char *file) - t = timespec; - } - -- ok = (futimens (fd, (fd == STDOUT_FILENO ? NULL : file), t) == 0); -+ ok = (futimens_coreutils (fd, (fd == STDOUT_FILENO ? NULL : file), t) == 0); - - if (fd == STDIN_FILENO) - { diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/gnulib_m4.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/gnulib_m4.patch deleted file mode 100644 index 8a8ffa726..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/gnulib_m4.patch +++ /dev/null @@ -1,21 +0,0 @@ -Upstream-Status: Inappropriate [legacy version] - -# remove the line to cause recursive inclusion error from autoreconf, sicne -# newer autoconf has included this definition. Simply rename it here. -# -# Signed-off-by: Kevin Tian , 2010-08-18 -# (this patch is licensed under GPLv2) - -diff --git a/extensions.m4 b/extensions.m4 -index 143a9e5..f6558f1 100644 ---- a/m4/extensions.m4 -+++ b/m4/extensions.m4 -@@ -16,7 +16,7 @@ - # ------------------------ - # Enable extensions on systems that normally disable them, - # typically due to standards-conformance issues. --AC_DEFUN([AC_USE_SYSTEM_EXTENSIONS], -+AC_DEFUN([AC_USE_SYSTEM_EXTENSIONS_DUMMY], - [ - AC_BEFORE([$0], [AC_COMPILE_IFELSE]) - AC_BEFORE([$0], [AC_RUN_IFELSE]) diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/loadavg.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/loadavg.patch deleted file mode 100644 index c72efd4d3..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/loadavg.patch +++ /dev/null @@ -1,18 +0,0 @@ -Remove hardcoded paths so OE's configure QA does not detect it and fail the builds -For cross compilation is less interesting to look into host paths for target libraries anyway - -Upstream-Status: Inappropriate [OE Specific] -Signed-off-by: Khem Raj - -Index: coreutils-6.9/m4/getloadavg.m4 -=================================================================== ---- coreutils-6.9.orig/m4/getloadavg.m4 -+++ coreutils-6.9/m4/getloadavg.m4 -@@ -49,7 +49,6 @@ if test $gl_have_func = no; then - # There is a commonly available library for RS/6000 AIX. - # Since it is not a standard part of AIX, it might be installed locally. - gl_getloadavg_LIBS=$LIBS -- LIBS="-L/usr/local/lib $LIBS" - AC_CHECK_LIB(getloadavg, getloadavg, - [LIBS="-lgetloadavg $LIBS"], [LIBS=$gl_getloadavg_LIBS]) - fi diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/man-touch.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/man-touch.patch deleted file mode 100644 index 59a4fe005..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-6.9/man-touch.patch +++ /dev/null @@ -1,24 +0,0 @@ -Upstream-Status: Inappropriate [legacy version] - -# man page for 'touch' is generated differently from others. All other utilities -# are provided static man source files, while for 'touch' it requires help2man -# to invoke "touch --help" and then convert the output into the manual. Since touch -# is with target format which can't be invoked on build system, disable building -# 'touch' man page here. -# -# Signed-off-by: Kevin Tian , 2010-08-18 -# (this patch is licensed under GPLv2) - -diff --git a/man/Makefile.am b/man/Makefile.am -index 32df9d1..37b09e3 100644 ---- a/man/Makefile.am -+++ b/man/Makefile.am -@@ -27,7 +27,7 @@ dist_man_MANS = \ - paste.1 pathchk.1 pr.1 printenv.1 printf.1 ptx.1 pwd.1 readlink.1 \ - rm.1 rmdir.1 seq.1 sha1sum.1 sha224sum.1 sha256sum.1 sha384sum.1 sha512sum.1 \ - shred.1 shuf.1 sleep.1 sort.1 split.1 stat.1 \ -- su.1 sum.1 sync.1 tac.1 tail.1 tee.1 test.1 touch.1 tr.1 true.1 tsort.1 \ -+ su.1 sum.1 sync.1 tac.1 tail.1 tee.1 test.1 tr.1 true.1 tsort.1 \ - tty.1 unexpand.1 uniq.1 unlink.1 vdir.1 wc.1 \ - whoami.1 yes.1 $(MAN) - optional_mans = \ diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/0001-Unset-need_charset_alias-when-building-for-musl.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/0001-Unset-need_charset_alias-when-building-for-musl.patch deleted file mode 100644 index ba1a4bab4..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/0001-Unset-need_charset_alias-when-building-for-musl.patch +++ /dev/null @@ -1,33 +0,0 @@ -From b9565dc2fe0c4f7daaec91b7e83bc7313dee2f4a Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Mon, 13 Apr 2015 17:02:13 -0700 -Subject: [PATCH] Unset need_charset_alias when building for musl - -localcharset uses ac_cv_gnu_library_2_1 from glibc21.m4 -which actually shoudl be fixed in gnulib and then all downstream -projects will get it eventually. For now we apply the fix to -coreutils - -Upstream-Status: Pending - -Signed-off-by: Khem Raj ---- - lib/gnulib.mk | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/lib/gnulib.mk b/lib/gnulib.mk -index e1d74db..c0e92dd 100644 ---- a/lib/gnulib.mk -+++ b/lib/gnulib.mk -@@ -1882,7 +1882,7 @@ install-exec-localcharset: all-local - case '$(host_os)' in \ - darwin[56]*) \ - need_charset_alias=true ;; \ -- darwin* | cygwin* | mingw* | pw32* | cegcc*) \ -+ darwin* | cygwin* | mingw* | pw32* | cegcc* | linux-musl*) \ - need_charset_alias=false ;; \ - *) \ - need_charset_alias=true ;; \ --- -2.1.4 - diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/0001-uname-report-processor-and-hardware-correctly.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/0001-uname-report-processor-and-hardware-correctly.patch deleted file mode 100644 index 3c43e1d5d..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/0001-uname-report-processor-and-hardware-correctly.patch +++ /dev/null @@ -1,64 +0,0 @@ -Upstream-Status: Denied - -Subject: uname: report processor and hardware correctly - -This patch is rejected by coreutils upstream, but distros like debian and fedora -uses this patch to make `uname -i' and `uname -p' to not report 'unknown'. - -Signed-off-by: Chen Qi ---- - src/uname.c | 18 ++++++++++++++++-- - 1 file changed, 16 insertions(+), 2 deletions(-) - -diff --git a/src/uname.c b/src/uname.c -index 39bd28c..c84582d 100644 ---- a/src/uname.c -+++ b/src/uname.c -@@ -299,13 +299,19 @@ main (int argc, char **argv) - - if (toprint & PRINT_PROCESSOR) - { -- char const *element = unknown; -+ char *element = unknown; - #if HAVE_SYSINFO && defined SI_ARCHITECTURE - { - static char processor[257]; - if (0 <= sysinfo (SI_ARCHITECTURE, processor, sizeof processor)) - element = processor; - } -+#else -+ { -+ static struct utsname u; -+ uname(&u); -+ element = u.machine; -+ } - #endif - #ifdef UNAME_PROCESSOR - if (element == unknown) -@@ -343,7 +349,7 @@ main (int argc, char **argv) - - if (toprint & PRINT_HARDWARE_PLATFORM) - { -- char const *element = unknown; -+ char *element = unknown; - #if HAVE_SYSINFO && defined SI_PLATFORM - { - static char hardware_platform[257]; -@@ -361,6 +367,14 @@ main (int argc, char **argv) - if (sysctl (mib, 2, hardware_platform, &s, 0, 0) >= 0) - element = hardware_platform; - } -+#else -+ { -+ static struct utsname u; -+ uname(&u); -+ element = u.machine; -+ if(strlen(element)==4 && element[0]=='i' && element[2]=='8' && element[3]=='6') -+ element[1]='3'; -+ } - #endif - if (! (toprint == UINT_MAX && element == unknown)) - print_element (element); --- -1.9.1 - diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/disable-ls-output-quoting.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/disable-ls-output-quoting.patch deleted file mode 100644 index e68c21355..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/disable-ls-output-quoting.patch +++ /dev/null @@ -1,49 +0,0 @@ -Subject: revert inconsistent ls quoting - -This is a revert of upstream commit 109b9220cead6e979d22d16327c4d9f8350431cc. - -Bug-Debian: https://bugs.debian.org/813164 - -Upstream-Status: Submitted - -Originally-by: Adam Borowski -[PG: patch from https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=813164#78 ] -Signed-off-by: Paul Gortmaker - ---- - ---- coreutils-8.25.orig/NEWS -+++ coreutils-8.25/NEWS -@@ -71,9 +71,6 @@ GNU coreutils NEWS - df now prefers sources towards the root of a device when - eliding duplicate bind mounted entries. - -- ls now quotes file names unambiguously and appropriate for use in a shell, -- when outputting to a terminal. -- - join, sort, uniq with --zero-terminated, now treat '\n' as a field delimiter. - - ** Improvements ---- coreutils-8.25.orig/doc/coreutils.texi -+++ coreutils-8.25/doc/coreutils.texi -@@ -7750,8 +7750,8 @@ this"} in the default C locale. This lo - - You can specify the default value of the @option{--quoting-style} option - with the environment variable @env{QUOTING_STYLE}@. If that environment --variable is not set, the default value is @samp{shell-escape} when the --output is a terminal, and @samp{literal} otherwise. -+variable is not set, the default value is @samp{literal}, but this -+default may change to @samp{shell} in a future version of this package. - - @item --show-control-chars - @opindex --show-control-chars ---- coreutils-8.25.orig/src/ls.c -+++ coreutils-8.25/src/ls.c -@@ -1581,7 +1581,6 @@ decode_switches (int argc, char **argv) - if (isatty (STDOUT_FILENO)) - { - format = many_per_line; -- set_quoting_style (NULL, shell_escape_quoting_style); - /* See description of qmark_funny_chars, above. */ - qmark_funny_chars = true; - } diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/fix-selinux-flask.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/fix-selinux-flask.patch deleted file mode 100644 index 9d1ae55d4..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/fix-selinux-flask.patch +++ /dev/null @@ -1,39 +0,0 @@ -From a1d360509fa3a4aff57eedcd528cc0347a87531d Mon Sep 17 00:00:00 2001 -From: Robert Yang -Date: Tue, 16 Sep 2014 01:59:08 -0700 -Subject: [PATCH] gnulib-comp.m4: selinux/flask.h should respect to - with_selinux - -Fixed when build with meta-selinux even when --without-selinux: -runcon.c:49:28: fatal error: selinux/flask.h: No such file or directory - # include - ^ -compilation terminated. - -Upstream-Status: Pending - -Signed-off-by: Robert Yang ---- - m4/gnulib-comp.m4 | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/m4/gnulib-comp.m4 b/m4/gnulib-comp.m4 -index 472d3a0..5f09734 100644 ---- a/m4/gnulib-comp.m4 -+++ b/m4/gnulib-comp.m4 -@@ -1730,11 +1730,11 @@ AC_DEFUN([gl_INIT], - AC_LIBOBJ([select]) - fi - gl_SYS_SELECT_MODULE_INDICATOR([select]) -- AC_CHECK_HEADERS([selinux/flask.h]) - AC_LIBOBJ([selinux-at]) - gl_HEADERS_SELINUX_SELINUX_H - gl_HEADERS_SELINUX_CONTEXT_H - if test "$with_selinux" != no && test "$ac_cv_header_selinux_selinux_h" = yes; then -+ AC_CHECK_HEADERS([selinux/flask.h]) - AC_LIBOBJ([getfilecon]) - fi - gl_SERVENT --- -1.7.9.5 - diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/man-decouple-manpages-from-build.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/man-decouple-manpages-from-build.patch deleted file mode 100644 index 3c896a11b..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/man-decouple-manpages-from-build.patch +++ /dev/null @@ -1,27 +0,0 @@ -From b4d258629f090066783c3b4c91b40f63b9d0a296 Mon Sep 17 00:00:00 2001 -From: Paul Gortmaker -Date: Sun, 8 Feb 2015 16:51:57 -0500 -Subject: [PATCH] man: decouple manpages from build - -The use of "help2man" doesn't work at all for cross compile, in -addition to the extra requirement of perl it adds. - -Just decouple the manpages from the build in order to pave the way for -importing prebuilt manpages that can be used in a cross build situation. - -Upstream-Status: Inappropriate [upstream doesn't care about x-compile case.] -Signed-off-by: Paul Gortmaker - -diff --git a/Makefile.am b/Makefile.am -index fb4af27..7576b2c 100644 ---- a/Makefile.am -+++ b/Makefile.am -@@ -214,5 +214,4 @@ AM_CPPFLAGS = -Ilib -I$(top_srcdir)/lib -Isrc -I$(top_srcdir)/src - include $(top_srcdir)/lib/local.mk - include $(top_srcdir)/src/local.mk - include $(top_srcdir)/doc/local.mk --include $(top_srcdir)/man/local.mk - include $(top_srcdir)/tests/local.mk --- -2.2.2 - diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/remove-usr-local-lib-from-m4.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/remove-usr-local-lib-from-m4.patch deleted file mode 100644 index 2ef8a548a..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.25/remove-usr-local-lib-from-m4.patch +++ /dev/null @@ -1,31 +0,0 @@ -We have problem using hardcoded directories like /usr/local here -which will be checked for cross builds. This is a special case which -is valid for AIX only. We do not have AIX as one of our supported -build host or target. Therefore we get rid of the hardcoded paths -and make life easier for cross compilation process. - -Signed-off-by: Khem Raj - -Upstream-Status: Inappropriate [Upstream does care for AIX while we may not] - -Index: coreutils-8.14/m4/getloadavg.m4 -=================================================================== ---- coreutils-8.14.orig/m4/getloadavg.m4 2011-09-19 08:09:24.000000000 -0700 -+++ coreutils-8.14/m4/getloadavg.m4 2011-10-19 21:42:00.385533357 -0700 -@@ -41,16 +41,6 @@ - [LIBS="-lutil $LIBS" gl_func_getloadavg_done=yes]) - fi - -- if test $gl_func_getloadavg_done = no; then -- # There is a commonly available library for RS/6000 AIX. -- # Since it is not a standard part of AIX, it might be installed locally. -- gl_getloadavg_LIBS=$LIBS -- LIBS="-L/usr/local/lib $LIBS" -- AC_CHECK_LIB([getloadavg], [getloadavg], -- [LIBS="-lgetloadavg $LIBS" gl_func_getloadavg_done=yes], -- [LIBS=$gl_getloadavg_LIBS]) -- fi -- - # Set up the replacement function if necessary. - if test $gl_func_getloadavg_done = no; then - HAVE_GETLOADAVG=0 diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/0001-Unset-need_charset_alias-when-building-for-musl.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/0001-Unset-need_charset_alias-when-building-for-musl.patch new file mode 100644 index 000000000..ba1a4bab4 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/0001-Unset-need_charset_alias-when-building-for-musl.patch @@ -0,0 +1,33 @@ +From b9565dc2fe0c4f7daaec91b7e83bc7313dee2f4a Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 13 Apr 2015 17:02:13 -0700 +Subject: [PATCH] Unset need_charset_alias when building for musl + +localcharset uses ac_cv_gnu_library_2_1 from glibc21.m4 +which actually shoudl be fixed in gnulib and then all downstream +projects will get it eventually. For now we apply the fix to +coreutils + +Upstream-Status: Pending + +Signed-off-by: Khem Raj +--- + lib/gnulib.mk | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/lib/gnulib.mk b/lib/gnulib.mk +index e1d74db..c0e92dd 100644 +--- a/lib/gnulib.mk ++++ b/lib/gnulib.mk +@@ -1882,7 +1882,7 @@ install-exec-localcharset: all-local + case '$(host_os)' in \ + darwin[56]*) \ + need_charset_alias=true ;; \ +- darwin* | cygwin* | mingw* | pw32* | cegcc*) \ ++ darwin* | cygwin* | mingw* | pw32* | cegcc* | linux-musl*) \ + need_charset_alias=false ;; \ + *) \ + need_charset_alias=true ;; \ +-- +2.1.4 + diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/0001-local.mk-fix-cross-compiling-problem.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/0001-local.mk-fix-cross-compiling-problem.patch new file mode 100644 index 000000000..66f9a716c --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/0001-local.mk-fix-cross-compiling-problem.patch @@ -0,0 +1,26 @@ +Subject: local.mk: fix cross compiling problem + +We meet the following error when cross compiling. +| Makefile:3418: *** Recursive variable 'INSTALL' references itself (eventually). Stop. +This patch fixes this problem. + +Upstream-Status: Pending + +Signed-off-by: Chen Qi +--- + src/local.mk | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/src/local.mk b/src/local.mk +index 36dfa4e..c5898cc 100644 +--- a/src/local.mk ++++ b/src/local.mk +@@ -649,4 +649,4 @@ cu_install_program = @INSTALL_PROGRAM@ + else + cu_install_program = src/ginstall + endif +-INSTALL = $(cu_install_program) -c ++INSTALL_PROGRAM = $(cu_install_program) +-- +2.1.0 + diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/0001-uname-report-processor-and-hardware-correctly.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/0001-uname-report-processor-and-hardware-correctly.patch new file mode 100644 index 000000000..3c43e1d5d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/0001-uname-report-processor-and-hardware-correctly.patch @@ -0,0 +1,64 @@ +Upstream-Status: Denied + +Subject: uname: report processor and hardware correctly + +This patch is rejected by coreutils upstream, but distros like debian and fedora +uses this patch to make `uname -i' and `uname -p' to not report 'unknown'. + +Signed-off-by: Chen Qi +--- + src/uname.c | 18 ++++++++++++++++-- + 1 file changed, 16 insertions(+), 2 deletions(-) + +diff --git a/src/uname.c b/src/uname.c +index 39bd28c..c84582d 100644 +--- a/src/uname.c ++++ b/src/uname.c +@@ -299,13 +299,19 @@ main (int argc, char **argv) + + if (toprint & PRINT_PROCESSOR) + { +- char const *element = unknown; ++ char *element = unknown; + #if HAVE_SYSINFO && defined SI_ARCHITECTURE + { + static char processor[257]; + if (0 <= sysinfo (SI_ARCHITECTURE, processor, sizeof processor)) + element = processor; + } ++#else ++ { ++ static struct utsname u; ++ uname(&u); ++ element = u.machine; ++ } + #endif + #ifdef UNAME_PROCESSOR + if (element == unknown) +@@ -343,7 +349,7 @@ main (int argc, char **argv) + + if (toprint & PRINT_HARDWARE_PLATFORM) + { +- char const *element = unknown; ++ char *element = unknown; + #if HAVE_SYSINFO && defined SI_PLATFORM + { + static char hardware_platform[257]; +@@ -361,6 +367,14 @@ main (int argc, char **argv) + if (sysctl (mib, 2, hardware_platform, &s, 0, 0) >= 0) + element = hardware_platform; + } ++#else ++ { ++ static struct utsname u; ++ uname(&u); ++ element = u.machine; ++ if(strlen(element)==4 && element[0]=='i' && element[2]=='8' && element[3]=='6') ++ element[1]='3'; ++ } + #endif + if (! (toprint == UINT_MAX && element == unknown)) + print_element (element); +-- +1.9.1 + diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/disable-ls-output-quoting.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/disable-ls-output-quoting.patch new file mode 100644 index 000000000..e68c21355 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/disable-ls-output-quoting.patch @@ -0,0 +1,49 @@ +Subject: revert inconsistent ls quoting + +This is a revert of upstream commit 109b9220cead6e979d22d16327c4d9f8350431cc. + +Bug-Debian: https://bugs.debian.org/813164 + +Upstream-Status: Submitted + +Originally-by: Adam Borowski +[PG: patch from https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=813164#78 ] +Signed-off-by: Paul Gortmaker + +--- + +--- coreutils-8.25.orig/NEWS ++++ coreutils-8.25/NEWS +@@ -71,9 +71,6 @@ GNU coreutils NEWS + df now prefers sources towards the root of a device when + eliding duplicate bind mounted entries. + +- ls now quotes file names unambiguously and appropriate for use in a shell, +- when outputting to a terminal. +- + join, sort, uniq with --zero-terminated, now treat '\n' as a field delimiter. + + ** Improvements +--- coreutils-8.25.orig/doc/coreutils.texi ++++ coreutils-8.25/doc/coreutils.texi +@@ -7750,8 +7750,8 @@ this"} in the default C locale. This lo + + You can specify the default value of the @option{--quoting-style} option + with the environment variable @env{QUOTING_STYLE}@. If that environment +-variable is not set, the default value is @samp{shell-escape} when the +-output is a terminal, and @samp{literal} otherwise. ++variable is not set, the default value is @samp{literal}, but this ++default may change to @samp{shell} in a future version of this package. + + @item --show-control-chars + @opindex --show-control-chars +--- coreutils-8.25.orig/src/ls.c ++++ coreutils-8.25/src/ls.c +@@ -1581,7 +1581,6 @@ decode_switches (int argc, char **argv) + if (isatty (STDOUT_FILENO)) + { + format = many_per_line; +- set_quoting_style (NULL, shell_escape_quoting_style); + /* See description of qmark_funny_chars, above. */ + qmark_funny_chars = true; + } diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/fix-selinux-flask.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/fix-selinux-flask.patch new file mode 100644 index 000000000..9d1ae55d4 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/fix-selinux-flask.patch @@ -0,0 +1,39 @@ +From a1d360509fa3a4aff57eedcd528cc0347a87531d Mon Sep 17 00:00:00 2001 +From: Robert Yang +Date: Tue, 16 Sep 2014 01:59:08 -0700 +Subject: [PATCH] gnulib-comp.m4: selinux/flask.h should respect to + with_selinux + +Fixed when build with meta-selinux even when --without-selinux: +runcon.c:49:28: fatal error: selinux/flask.h: No such file or directory + # include + ^ +compilation terminated. + +Upstream-Status: Pending + +Signed-off-by: Robert Yang +--- + m4/gnulib-comp.m4 | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/m4/gnulib-comp.m4 b/m4/gnulib-comp.m4 +index 472d3a0..5f09734 100644 +--- a/m4/gnulib-comp.m4 ++++ b/m4/gnulib-comp.m4 +@@ -1730,11 +1730,11 @@ AC_DEFUN([gl_INIT], + AC_LIBOBJ([select]) + fi + gl_SYS_SELECT_MODULE_INDICATOR([select]) +- AC_CHECK_HEADERS([selinux/flask.h]) + AC_LIBOBJ([selinux-at]) + gl_HEADERS_SELINUX_SELINUX_H + gl_HEADERS_SELINUX_CONTEXT_H + if test "$with_selinux" != no && test "$ac_cv_header_selinux_selinux_h" = yes; then ++ AC_CHECK_HEADERS([selinux/flask.h]) + AC_LIBOBJ([getfilecon]) + fi + gl_SERVENT +-- +1.7.9.5 + diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/man-decouple-manpages-from-build.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/man-decouple-manpages-from-build.patch new file mode 100644 index 000000000..3c896a11b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/man-decouple-manpages-from-build.patch @@ -0,0 +1,27 @@ +From b4d258629f090066783c3b4c91b40f63b9d0a296 Mon Sep 17 00:00:00 2001 +From: Paul Gortmaker +Date: Sun, 8 Feb 2015 16:51:57 -0500 +Subject: [PATCH] man: decouple manpages from build + +The use of "help2man" doesn't work at all for cross compile, in +addition to the extra requirement of perl it adds. + +Just decouple the manpages from the build in order to pave the way for +importing prebuilt manpages that can be used in a cross build situation. + +Upstream-Status: Inappropriate [upstream doesn't care about x-compile case.] +Signed-off-by: Paul Gortmaker + +diff --git a/Makefile.am b/Makefile.am +index fb4af27..7576b2c 100644 +--- a/Makefile.am ++++ b/Makefile.am +@@ -214,5 +214,4 @@ AM_CPPFLAGS = -Ilib -I$(top_srcdir)/lib -Isrc -I$(top_srcdir)/src + include $(top_srcdir)/lib/local.mk + include $(top_srcdir)/src/local.mk + include $(top_srcdir)/doc/local.mk +-include $(top_srcdir)/man/local.mk + include $(top_srcdir)/tests/local.mk +-- +2.2.2 + diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/remove-usr-local-lib-from-m4.patch b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/remove-usr-local-lib-from-m4.patch new file mode 100644 index 000000000..2ef8a548a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils-8.26/remove-usr-local-lib-from-m4.patch @@ -0,0 +1,31 @@ +We have problem using hardcoded directories like /usr/local here +which will be checked for cross builds. This is a special case which +is valid for AIX only. We do not have AIX as one of our supported +build host or target. Therefore we get rid of the hardcoded paths +and make life easier for cross compilation process. + +Signed-off-by: Khem Raj + +Upstream-Status: Inappropriate [Upstream does care for AIX while we may not] + +Index: coreutils-8.14/m4/getloadavg.m4 +=================================================================== +--- coreutils-8.14.orig/m4/getloadavg.m4 2011-09-19 08:09:24.000000000 -0700 ++++ coreutils-8.14/m4/getloadavg.m4 2011-10-19 21:42:00.385533357 -0700 +@@ -41,16 +41,6 @@ + [LIBS="-lutil $LIBS" gl_func_getloadavg_done=yes]) + fi + +- if test $gl_func_getloadavg_done = no; then +- # There is a commonly available library for RS/6000 AIX. +- # Since it is not a standard part of AIX, it might be installed locally. +- gl_getloadavg_LIBS=$LIBS +- LIBS="-L/usr/local/lib $LIBS" +- AC_CHECK_LIB([getloadavg], [getloadavg], +- [LIBS="-lgetloadavg $LIBS" gl_func_getloadavg_done=yes], +- [LIBS=$gl_getloadavg_LIBS]) +- fi +- + # Set up the replacement function if necessary. + if test $gl_func_getloadavg_done = no; then + HAVE_GETLOADAVG=0 diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils_6.9.bb b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils_6.9.bb deleted file mode 100644 index 35700a32f..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils_6.9.bb +++ /dev/null @@ -1,107 +0,0 @@ -SUMMARY = "The basic file, shell and text manipulation utilities" -DESCRIPTION = "The GNU Core Utilities provide the basic file, shell and text \ -manipulation utilities. These are the core utilities which are expected to exist on \ -every system." - -HOMEPAGE = "http://www.gnu.org/software/coreutils/" -BUGTRACKER = "http://debbugs.gnu.org/coreutils" -LICENSE = "GPLv2+" -LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe \ - file://src/ls.c;beginline=4;endline=16;md5=15ed60f67b1db5fedd5dbc37cf8a9543" -PR = "r5" -DEPENDS = "virtual/libiconv" - -inherit autotools gettext texinfo - -SRC_URI = "${GNU_MIRROR}/coreutils/${BP}.tar.bz2 \ - file://gnulib_m4.patch \ - file://futimens.patch \ - file://coreutils-ls-x.patch \ - file://coreutils-6.9-cp-i-u.patch \ - file://coreutils-i18n.patch \ - file://coreutils-overflow.patch \ - file://coreutils-fix-install.patch \ - file://man-touch.patch \ - file://coreutils_fix_for_automake-1.12.patch \ - file://coreutils-fix-texinfo.patch \ - file://fix_for_manpage_building.patch \ - file://loadavg.patch \ - " - -SRC_URI[md5sum] = "c9607d8495f16e98906e7ed2d9751a06" -SRC_URI[sha256sum] = "89c2895ad157de50e53298b22d91db116ee4e1dd3fdf4019260254e2e31497b0" - -EXTRA_OECONF += "ac_cv_func_getgroups_works=yes \ - ac_cv_func_strcoll_works=yes" - -# acl is not a default feature -# -PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'acl', 'acl', '', d)}" - -# with, without, depends, rdepends -# -PACKAGECONFIG[acl] = "ac_cv_header_sys_acl_h=yes ac_cv_header_acl_libacl_h=yes ac_cv_search_acl_get_file=-lacl,ac_cv_header_sys_acl_h=no ac_cv_header_acl_libacl_h=no ac_cv_search_acl_get_file=,acl," - - -# [ gets a special treatment and is not included in this -bindir_progs = "base64 basename cksum comm csplit cut dir dircolors dirname du \ - env expand expr factor fmt fold groups head hostid id install \ - join link logname md5sum mkfifo nice nl nohup od paste pathchk \ - pinky pr printenv printf ptx readlink seq sha1sum sha224sum sha256sum \ - sha384sum sha512sum shred shuf sort split sum tac tail tee test \ - tr tsort tty unexpand uniq unlink users vdir wc who whoami yes uptime" - -# hostname gets a special treatment and is not included in this -base_bindir_progs = "cat chgrp chmod chown cp date dd echo false kill ln ls mkdir \ - mknod mv pwd rm rmdir sleep stty sync touch true uname hostname stat" - -sbindir_progs= "chroot" - -# Let aclocal use the relative path for the m4 file rather than the -# absolute since coreutils has a lot of m4 files, otherwise there might -# be an "Argument list too long" error when it is built in a long/deep -# directory. -acpaths = "-I ./m4" - -do_install() { - autotools_do_install - - install -d ${D}${base_bindir} - [ "${bindir}" != "${base_bindir}" ] && for i in ${base_bindir_progs}; do mv ${D}${bindir}/$i ${D}${base_bindir}/$i; done - - install -d ${D}${sbindir} - [ "${bindir}" != "${sbindir}" ] && for i in ${sbindir_progs}; do mv ${D}${bindir}/$i ${D}${sbindir}/$i; done - - # [ requires special handling because [.coreutils will cause the sed stuff - # in update-alternatives to fail, therefore use lbracket - the name used - # for the actual source file. - mv ${D}${bindir}/[ ${D}${bindir}/lbracket.${BPN} - - # Newer versions of coreutils do not include su, to mimic this behavior - # we simply remove it. - rm -f ${D}${bindir}/su -} - -inherit update-alternatives - -ALTERNATIVE_PRIORITY = "100" - -ALTERNATIVE_${PN} = "lbracket ${bindir_progs} ${base_bindir_progs} ${sbindir_progs}" - -ALTERNATIVE_${PN}-doc = "su.1 hostname.1" -ALTERNATIVE_LINK_NAME[su.1] = "${mandir}/man1/su.1" -ALTERNATIVE_LINK_NAME[hostname.1] = "${mandir}/man1/hostname.1" - -ALTERNATIVE_PRIORITY[uptime] = "10" -ALTERNATIVE_PRIORITY[hostname] = "10" - -ALTERNATIVE_LINK_NAME[lbracket] = "${bindir}/[" -ALTERNATIVE_TARGET[lbracket] = "${bindir}/lbracket.${BPN}" - -python __anonymous() { - for prog in d.getVar('base_bindir_progs', True).split(): - d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir', True), prog)) - - for prog in d.getVar('sbindir_progs', True).split(): - d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('sbindir', True), prog)) -} diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils_8.25.bb b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils_8.25.bb deleted file mode 100644 index df6ad380f..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils_8.25.bb +++ /dev/null @@ -1,142 +0,0 @@ -SUMMARY = "The basic file, shell and text manipulation utilities" -DESCRIPTION = "The GNU Core Utilities provide the basic file, shell and text \ -manipulation utilities. These are the core utilities which are expected to exist on \ -every system." -HOMEPAGE = "http://www.gnu.org/software/coreutils/" -BUGTRACKER = "http://debbugs.gnu.org/coreutils" -LICENSE = "GPLv3+" -LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504\ - file://src/ls.c;beginline=5;endline=16;md5=38b79785ca88537b75871782a2a3c6b8" -DEPENDS = "gmp libcap" -DEPENDS_class-native = "" - -inherit autotools gettext texinfo - -SRC_URI = "${GNU_MIRROR}/coreutils/${BP}.tar.xz;name=tarball \ - http://distfiles.gentoo.org/distfiles/${BP}-man.tar.xz;name=manpages \ - file://man-decouple-manpages-from-build.patch \ - file://remove-usr-local-lib-from-m4.patch \ - file://fix-selinux-flask.patch \ - file://0001-Unset-need_charset_alias-when-building-for-musl.patch \ - file://0001-uname-report-processor-and-hardware-correctly.patch \ - file://disable-ls-output-quoting.patch \ - " - -SRC_URI[tarball.md5sum] = "070e43ba7f618d747414ef56ab248a48" -SRC_URI[tarball.sha256sum] = "31e67c057a5b32a582f26408c789e11c2e8d676593324849dcf5779296cdce87" -SRC_URI[manpages.md5sum] = "415cc0552bc4e480b27ce8b2aebfdeb5" -SRC_URI[manpages.sha256sum] = "2ee31c3a6d2276f49c5515375d4a0c1047580da6ac10536898e0f0de81707f29" - -EXTRA_OECONF_class-native = "--without-gmp" -EXTRA_OECONF_class-target = "--enable-install-program=arch --libexecdir=${libdir}" -EXTRA_OECONF_class-nativesdk = "--enable-install-program=arch" - -# acl and xattr are not default features -# -PACKAGECONFIG_class-target ??= "\ - ${@bb.utils.contains('DISTRO_FEATURES', 'acl', 'acl', '', d)} \ - ${@bb.utils.contains('DISTRO_FEATURES', 'xattr', 'xattr', '', d)} \ -" - -# The lib/oe/path.py requires xattr -PACKAGECONFIG_class-native ??= "xattr" - -# with, without, depends, rdepends -# -PACKAGECONFIG[acl] = "--enable-acl,--disable-acl,acl," -PACKAGECONFIG[xattr] = "--enable-xattr,--disable-xattr,attr," - -# [ df mktemp base64 gets a special treatment and is not included in this -bindir_progs = "arch basename chcon cksum comm csplit cut dir dircolors dirname du \ - env expand expr factor fmt fold groups head hostid id install \ - join link logname md5sum mkfifo nice nl nohup nproc od paste pathchk \ - pinky pr printenv printf ptx readlink realpath runcon seq sha1sum sha224sum sha256sum \ - sha384sum sha512sum shred shuf sort split stdbuf sum tac tail tee test timeout\ - tr truncate tsort tty unexpand uniq unlink uptime users vdir wc who whoami yes" - -# hostname gets a special treatment and is not included in this -base_bindir_progs = "cat chgrp chmod chown cp date dd echo false kill ln ls mkdir \ - mknod mv pwd rm rmdir sleep stty sync touch true uname stat" - -sbindir_progs= "chroot" - -# Let aclocal use the relative path for the m4 file rather than the -# absolute since coreutils has a lot of m4 files, otherwise there might -# be an "Argument list too long" error when it is built in a long/deep -# directory. -acpaths = "-I ./m4" - -# Deal with a separate builddir failure if src doesn't exist when creating version.c/version.h -do_compile_prepend () { - mkdir -p ${B}/src -} - -do_install_class-native() { - autotools_do_install - # remove groups to fix conflict with shadow-native - rm -f ${D}${STAGING_BINDIR_NATIVE}/groups - # The return is a must since native doesn't need the - # do_install_append() in the below. - return -} - -do_install_append() { - for i in df mktemp base64; do mv ${D}${bindir}/$i ${D}${bindir}/$i.${BPN}; done - - install -d ${D}${base_bindir} - [ "${base_bindir}" != "${bindir}" ] && for i in ${base_bindir_progs}; do mv ${D}${bindir}/$i ${D}${base_bindir}/$i.${BPN}; done - - install -d ${D}${sbindir} - [ "${sbindir}" != "${bindir}" ] && for i in ${sbindir_progs}; do mv ${D}${bindir}/$i ${D}${sbindir}/$i.${BPN}; done - - # [ requires special handling because [.coreutils will cause the sed stuff - # in update-alternatives to fail, therefore use lbracket - the name used - # for the actual source file. - mv ${D}${bindir}/[ ${D}${bindir}/lbracket.${BPN} - - # prebuilt man pages - install -d ${D}/${mandir}/man1 - install -t ${D}/${mandir}/man1 ${S}/man/*.1 - # prebuilt man pages don't do a separate man page for [ vs test. - # see comment above r.e. sed and update-alternatives - cp -a ${D}${mandir}/man1/test.1 ${D}${mandir}/man1/lbracket.1.${BPN} -} - -inherit update-alternatives - -ALTERNATIVE_PRIORITY = "100" -ALTERNATIVE_${PN} = "lbracket ${bindir_progs} ${base_bindir_progs} ${sbindir_progs} base64 mktemp df" -ALTERNATIVE_${PN}-doc = "base64.1 mktemp.1 df.1 lbracket.1 groups.1 kill.1 uptime.1 stat.1 hostname.1" - -ALTERNATIVE_LINK_NAME[hostname.1] = "${mandir}/man1/hostname.1" - -ALTERNATIVE_LINK_NAME[base64] = "${base_bindir}/base64" -ALTERNATIVE_TARGET[base64] = "${bindir}/base64.${BPN}" -ALTERNATIVE_LINK_NAME[base64.1] = "${mandir}/man1/base64.1" - -ALTERNATIVE_LINK_NAME[mktemp] = "${base_bindir}/mktemp" -ALTERNATIVE_TARGET[mktemp] = "${bindir}/mktemp.${BPN}" -ALTERNATIVE_LINK_NAME[mktemp.1] = "${mandir}/man1/mktemp.1" - -ALTERNATIVE_LINK_NAME[df] = "${base_bindir}/df" -ALTERNATIVE_TARGET[df] = "${bindir}/df.${BPN}" -ALTERNATIVE_LINK_NAME[df.1] = "${mandir}/man1/df.1" - -ALTERNATIVE_LINK_NAME[lbracket] = "${bindir}/[" -ALTERNATIVE_TARGET[lbracket] = "${bindir}/lbracket.${BPN}" -ALTERNATIVE_LINK_NAME[lbracket.1] = "${mandir}/man1/lbracket.1" - -ALTERNATIVE_LINK_NAME[groups.1] = "${mandir}/man1/groups.1" -ALTERNATIVE_LINK_NAME[uptime.1] = "${mandir}/man1/uptime.1" -ALTERNATIVE_LINK_NAME[kill.1] = "${mandir}/man1/kill.1" -ALTERNATIVE_LINK_NAME[stat.1] = "${mandir}/man1/stat.1" - -python __anonymous() { - for prog in d.getVar('base_bindir_progs', True).split(): - d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir', True), prog)) - - for prog in d.getVar('sbindir_progs', True).split(): - d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('sbindir', True), prog)) -} - -BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils_8.26.bb b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils_8.26.bb new file mode 100644 index 000000000..52ef1013c --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/coreutils/coreutils_8.26.bb @@ -0,0 +1,142 @@ +SUMMARY = "The basic file, shell and text manipulation utilities" +DESCRIPTION = "The GNU Core Utilities provide the basic file, shell and text \ +manipulation utilities. These are the core utilities which are expected to exist on \ +every system." +HOMEPAGE = "http://www.gnu.org/software/coreutils/" +BUGTRACKER = "http://debbugs.gnu.org/coreutils" +LICENSE = "GPLv3+" +LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504\ + file://src/ls.c;beginline=5;endline=16;md5=38b79785ca88537b75871782a2a3c6b8" +DEPENDS = "gmp libcap" +DEPENDS_class-native = "" + +inherit autotools gettext texinfo + +SRC_URI = "${GNU_MIRROR}/coreutils/${BP}.tar.xz;name=tarball \ + http://distfiles.gentoo.org/distfiles/${BP}-man.tar.xz;name=manpages \ + file://man-decouple-manpages-from-build.patch \ + file://remove-usr-local-lib-from-m4.patch \ + file://fix-selinux-flask.patch \ + file://0001-Unset-need_charset_alias-when-building-for-musl.patch \ + file://0001-uname-report-processor-and-hardware-correctly.patch \ + file://disable-ls-output-quoting.patch \ + file://0001-local.mk-fix-cross-compiling-problem.patch \ + " + +SRC_URI[tarball.md5sum] = "d5aa2072f662d4118b9f4c63b94601a6" +SRC_URI[tarball.sha256sum] = "155e94d748f8e2bc327c66e0cbebdb8d6ab265d2f37c3c928f7bf6c3beba9a8e" +SRC_URI[manpages.md5sum] = "b58107f532f7beffcb2f38e2ac1f2da3" +SRC_URI[manpages.sha256sum] = "9324ec412ffca3b0431e6299720c33ac98e749e430f72a7c6e65f3635c86aa29" + +EXTRA_OECONF_class-native = "--without-gmp" +EXTRA_OECONF_class-target = "--enable-install-program=arch --libexecdir=${libdir}" +EXTRA_OECONF_class-nativesdk = "--enable-install-program=arch" + +# acl and xattr are not default features +# +PACKAGECONFIG_class-target ??= "\ + ${@bb.utils.filter('DISTRO_FEATURES', 'acl xattr', d)} \ +" + +# The lib/oe/path.py requires xattr +PACKAGECONFIG_class-native ??= "xattr" + +# with, without, depends, rdepends +# +PACKAGECONFIG[acl] = "--enable-acl,--disable-acl,acl," +PACKAGECONFIG[xattr] = "--enable-xattr,--disable-xattr,attr," + +# [ df mktemp base64 gets a special treatment and is not included in this +bindir_progs = "arch basename chcon cksum comm csplit cut dir dircolors dirname du \ + env expand expr factor fmt fold groups head hostid id install \ + join link logname md5sum mkfifo nice nl nohup nproc od paste pathchk \ + pinky pr printenv printf ptx readlink realpath runcon seq sha1sum sha224sum sha256sum \ + sha384sum sha512sum shred shuf sort split stdbuf sum tac tail tee test timeout\ + tr truncate tsort tty unexpand uniq unlink uptime users vdir wc who whoami yes" + +# hostname gets a special treatment and is not included in this +base_bindir_progs = "cat chgrp chmod chown cp date dd echo false kill ln ls mkdir \ + mknod mv pwd rm rmdir sleep stty sync touch true uname stat" + +sbindir_progs= "chroot" + +# Let aclocal use the relative path for the m4 file rather than the +# absolute since coreutils has a lot of m4 files, otherwise there might +# be an "Argument list too long" error when it is built in a long/deep +# directory. +acpaths = "-I ./m4" + +# Deal with a separate builddir failure if src doesn't exist when creating version.c/version.h +do_compile_prepend () { + mkdir -p ${B}/src +} + +do_install_class-native() { + autotools_do_install + # remove groups to fix conflict with shadow-native + rm -f ${D}${STAGING_BINDIR_NATIVE}/groups + # The return is a must since native doesn't need the + # do_install_append() in the below. + return +} + +do_install_append() { + for i in df mktemp base64; do mv ${D}${bindir}/$i ${D}${bindir}/$i.${BPN}; done + + install -d ${D}${base_bindir} + [ "${base_bindir}" != "${bindir}" ] && for i in ${base_bindir_progs}; do mv ${D}${bindir}/$i ${D}${base_bindir}/$i.${BPN}; done + + install -d ${D}${sbindir} + [ "${sbindir}" != "${bindir}" ] && for i in ${sbindir_progs}; do mv ${D}${bindir}/$i ${D}${sbindir}/$i.${BPN}; done + + # [ requires special handling because [.coreutils will cause the sed stuff + # in update-alternatives to fail, therefore use lbracket - the name used + # for the actual source file. + mv ${D}${bindir}/[ ${D}${bindir}/lbracket.${BPN} + + # prebuilt man pages + install -d ${D}/${mandir}/man1 + install -t ${D}/${mandir}/man1 ${S}/man/*.1 + # prebuilt man pages don't do a separate man page for [ vs test. + # see comment above r.e. sed and update-alternatives + cp -a ${D}${mandir}/man1/test.1 ${D}${mandir}/man1/lbracket.1.${BPN} +} + +inherit update-alternatives + +ALTERNATIVE_PRIORITY = "100" +ALTERNATIVE_${PN} = "lbracket ${bindir_progs} ${base_bindir_progs} ${sbindir_progs} base64 mktemp df" +ALTERNATIVE_${PN}-doc = "base64.1 mktemp.1 df.1 lbracket.1 groups.1 kill.1 uptime.1 stat.1 hostname.1" + +ALTERNATIVE_LINK_NAME[hostname.1] = "${mandir}/man1/hostname.1" + +ALTERNATIVE_LINK_NAME[base64] = "${base_bindir}/base64" +ALTERNATIVE_TARGET[base64] = "${bindir}/base64.${BPN}" +ALTERNATIVE_LINK_NAME[base64.1] = "${mandir}/man1/base64.1" + +ALTERNATIVE_LINK_NAME[mktemp] = "${base_bindir}/mktemp" +ALTERNATIVE_TARGET[mktemp] = "${bindir}/mktemp.${BPN}" +ALTERNATIVE_LINK_NAME[mktemp.1] = "${mandir}/man1/mktemp.1" + +ALTERNATIVE_LINK_NAME[df] = "${base_bindir}/df" +ALTERNATIVE_TARGET[df] = "${bindir}/df.${BPN}" +ALTERNATIVE_LINK_NAME[df.1] = "${mandir}/man1/df.1" + +ALTERNATIVE_LINK_NAME[lbracket] = "${bindir}/[" +ALTERNATIVE_TARGET[lbracket] = "${bindir}/lbracket.${BPN}" +ALTERNATIVE_LINK_NAME[lbracket.1] = "${mandir}/man1/lbracket.1" + +ALTERNATIVE_LINK_NAME[groups.1] = "${mandir}/man1/groups.1" +ALTERNATIVE_LINK_NAME[uptime.1] = "${mandir}/man1/uptime.1" +ALTERNATIVE_LINK_NAME[kill.1] = "${mandir}/man1/kill.1" +ALTERNATIVE_LINK_NAME[stat.1] = "${mandir}/man1/stat.1" + +python __anonymous() { + for prog in d.getVar('base_bindir_progs').split(): + d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('base_bindir'), prog)) + + for prog in d.getVar('sbindir_progs').split(): + d.setVarFlag('ALTERNATIVE_LINK_NAME', prog, '%s/%s' % (d.getVar('sbindir'), prog)) +} + +BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-core/dbus/dbus-glib_0.106.bb b/import-layers/yocto-poky/meta/recipes-core/dbus/dbus-glib_0.106.bb deleted file mode 100644 index 0ae848e40..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/dbus/dbus-glib_0.106.bb +++ /dev/null @@ -1,4 +0,0 @@ -require dbus-glib.inc - -SRC_URI[md5sum] = "2eea0b7f52b49f600a07abfd8535d4e4" -SRC_URI[sha256sum] = "b38952706dcf68bad9c302999ef0f420b8cf1a2428227123f0ac4764b689c046" diff --git a/import-layers/yocto-poky/meta/recipes-core/dbus/dbus-glib_0.108.bb b/import-layers/yocto-poky/meta/recipes-core/dbus/dbus-glib_0.108.bb new file mode 100644 index 000000000..7a9a69e1e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/dbus/dbus-glib_0.108.bb @@ -0,0 +1,4 @@ +require dbus-glib.inc + +SRC_URI[md5sum] = "a66a613705870752ca9786e0359aea97" +SRC_URI[sha256sum] = "9f340c7e2352e9cdf113893ca77ca9075d9f8d5e81476bf2bf361099383c602c" diff --git a/import-layers/yocto-poky/meta/recipes-core/dbus/dbus-test_1.10.10.bb b/import-layers/yocto-poky/meta/recipes-core/dbus/dbus-test_1.10.10.bb deleted file mode 100644 index 650b7ab02..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/dbus/dbus-test_1.10.10.bb +++ /dev/null @@ -1,58 +0,0 @@ -SUMMARY = "D-Bus test package (for D-bus functionality testing only)" -HOMEPAGE = "http://dbus.freedesktop.org" -SECTION = "base" -LICENSE = "AFL-2 | GPLv2+" -LIC_FILES_CHKSUM = "file://COPYING;md5=10dded3b58148f3f1fd804b26354af3e \ - file://dbus/dbus.h;beginline=6;endline=20;md5=7755c9d7abccd5dbd25a6a974538bb3c" - -DEPENDS = "dbus glib-2.0" - -RDEPENDS_${PN} += "make" -RDEPENDS_${PN}-dev = "" - -SRC_URI = "http://dbus.freedesktop.org/releases/dbus/dbus-${PV}.tar.gz \ - file://tmpdir.patch \ - file://run-ptest \ - file://python-config.patch \ - file://clear-guid_from_server-if-send_negotiate_unix_f.patch \ - " - -SRC_URI[md5sum] = "495676d240eb982921b3ad1343526849" -SRC_URI[sha256sum] = "9d8f1d069ab4d1a0255d7b400ea3bcef4430c42e729b1012abb2890e3f739a43" - -S="${WORKDIR}/dbus-${PV}" -FILESEXTRAPATHS =. "${FILE_DIRNAME}/dbus:" - -inherit autotools pkgconfig gettext ptest upstream-version-is-even - -EXTRA_OECONF_X = "${@bb.utils.contains('DISTRO_FEATURES', 'x11', '--with-x', '--without-x', d)}" -EXTRA_OECONF_X_class-native = "--without-x" - -EXTRA_OECONF = "--enable-tests \ - --enable-modular-tests \ - --enable-installed-tests \ - --enable-checks \ - --enable-asserts \ - --enable-verbose-mode \ - --disable-xml-docs \ - --disable-doxygen-docs \ - --disable-libaudit \ - --disable-systemd \ - --without-systemdsystemunitdir \ - --with-dbus-test-dir=${PTEST_PATH} \ - ${EXTRA_OECONF_X}" - -do_install() { - : -} - -do_install_ptest() { - install -d ${D}${PTEST_PATH}/test - l="shell printf refs syslog marshal syntax corrupt dbus-daemon dbus-daemon-eavesdrop loopback relay" - for i in $l; do install ${B}/test/.libs/test-$i ${D}${PTEST_PATH}/test; done - l="bus bus-system bus-launch-helper" - for i in $l; do install ${B}/bus/.libs/test-$i ${D}${PTEST_PATH}/test; done - install ${B}/dbus/.libs/test-dbus ${D}${PTEST_PATH}/test - cp -r ${B}/test/data ${D}${PTEST_PATH}/test -} -RDEPENDS_${PN}-ptest += "bash" diff --git a/import-layers/yocto-poky/meta/recipes-core/dbus/dbus-test_1.10.14.bb b/import-layers/yocto-poky/meta/recipes-core/dbus/dbus-test_1.10.14.bb new file mode 100644 index 000000000..539481472 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/dbus/dbus-test_1.10.14.bb @@ -0,0 +1,58 @@ +SUMMARY = "D-Bus test package (for D-bus functionality testing only)" +HOMEPAGE = "http://dbus.freedesktop.org" +SECTION = "base" +LICENSE = "AFL-2 | GPLv2+" +LIC_FILES_CHKSUM = "file://COPYING;md5=10dded3b58148f3f1fd804b26354af3e \ + file://dbus/dbus.h;beginline=6;endline=20;md5=7755c9d7abccd5dbd25a6a974538bb3c" + +DEPENDS = "dbus glib-2.0" + +RDEPENDS_${PN} += "make" +RDEPENDS_${PN}-dev = "" + +SRC_URI = "http://dbus.freedesktop.org/releases/dbus/dbus-${PV}.tar.gz \ + file://tmpdir.patch \ + file://run-ptest \ + file://python-config.patch \ + file://clear-guid_from_server-if-send_negotiate_unix_f.patch \ + " + +SRC_URI[md5sum] = "3f7b013ce8f641cd4c897acda0ef3467" +SRC_URI[sha256sum] = "23238f70353e38ce5ca183ebc9525c0d97ac00ef640ad29cf794782af6e6a083" + +S="${WORKDIR}/dbus-${PV}" +FILESEXTRAPATHS =. "${FILE_DIRNAME}/dbus:" + +inherit autotools pkgconfig gettext ptest upstream-version-is-even + +EXTRA_OECONF_X = "${@bb.utils.contains('DISTRO_FEATURES', 'x11', '--with-x', '--without-x', d)}" +EXTRA_OECONF_X_class-native = "--without-x" + +EXTRA_OECONF = "--enable-tests \ + --enable-modular-tests \ + --enable-installed-tests \ + --enable-checks \ + --enable-asserts \ + --enable-verbose-mode \ + --disable-xml-docs \ + --disable-doxygen-docs \ + --disable-libaudit \ + --disable-systemd \ + --without-systemdsystemunitdir \ + --with-dbus-test-dir=${PTEST_PATH} \ + ${EXTRA_OECONF_X}" + +do_install() { + : +} + +do_install_ptest() { + install -d ${D}${PTEST_PATH}/test + l="shell printf refs syslog marshal syntax corrupt dbus-daemon dbus-daemon-eavesdrop loopback relay" + for i in $l; do install ${B}/test/.libs/test-$i ${D}${PTEST_PATH}/test; done + l="bus bus-system bus-launch-helper" + for i in $l; do install ${B}/bus/.libs/test-$i ${D}${PTEST_PATH}/test; done + install ${B}/dbus/.libs/test-dbus ${D}${PTEST_PATH}/test + cp -r ${B}/test/data ${D}${PTEST_PATH}/test +} +RDEPENDS_${PN}-ptest += "bash" diff --git a/import-layers/yocto-poky/meta/recipes-core/dbus/dbus_1.10.10.bb b/import-layers/yocto-poky/meta/recipes-core/dbus/dbus_1.10.10.bb deleted file mode 100644 index 4db0b9b0b..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/dbus/dbus_1.10.10.bb +++ /dev/null @@ -1,181 +0,0 @@ -SUMMARY = "D-Bus message bus" -DESCRIPTION = "D-Bus is a message bus system, a simple way for applications to talk to one another. In addition to interprocess communication, D-Bus helps coordinate process lifecycle; it makes it simple and reliable to code a \"single instance\" application or daemon, and to launch applications and daemons on demand when their services are needed." -HOMEPAGE = "http://dbus.freedesktop.org" -SECTION = "base" -LICENSE = "AFL-2 | GPLv2+" -LIC_FILES_CHKSUM = "file://COPYING;md5=10dded3b58148f3f1fd804b26354af3e \ - file://dbus/dbus.h;beginline=6;endline=20;md5=7755c9d7abccd5dbd25a6a974538bb3c" -DEPENDS = "expat virtual/libintl" -RDEPENDS_dbus_class-native = "" -RDEPENDS_dbus_class-nativesdk = "" -PACKAGES += "${@bb.utils.contains('DISTRO_FEATURES', 'ptest', '${PN}-ptest', '', d)}" -ALLOW_EMPTY_dbus-ptest = "1" -RDEPENDS_dbus-ptest_class-target = "dbus-test-ptest" - -SRC_URI = "http://dbus.freedesktop.org/releases/dbus/dbus-${PV}.tar.gz \ - file://tmpdir.patch \ - file://dbus-1.init \ - file://os-test.patch \ - file://clear-guid_from_server-if-send_negotiate_unix_f.patch \ - file://0001-configure.ac-explicitely-check-stdint.h.patch \ -" - -SRC_URI[md5sum] = "495676d240eb982921b3ad1343526849" -SRC_URI[sha256sum] = "9d8f1d069ab4d1a0255d7b400ea3bcef4430c42e729b1012abb2890e3f739a43" - -inherit useradd autotools pkgconfig gettext update-rc.d upstream-version-is-even - -INITSCRIPT_NAME = "dbus-1" -INITSCRIPT_PARAMS = "start 02 5 3 2 . stop 20 0 1 6 ." - -python __anonymous() { - if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d): - d.setVar("INHIBIT_UPDATERCD_BBCLASS", "1") -} - -USERADD_PACKAGES = "${PN}" -GROUPADD_PARAM_${PN} = "-r netdev" -USERADD_PARAM_${PN} = "--system --home ${localstatedir}/lib/dbus \ - --no-create-home --shell /bin/false \ - --user-group messagebus" - -CONFFILES_${PN} = "${sysconfdir}/dbus-1/system.conf ${sysconfdir}/dbus-1/session.conf" - -DEBIANNAME_${PN} = "dbus-1" - -PACKAGES =+ "${PN}-lib" - -OLDPKGNAME = "dbus-x11" -OLDPKGNAME_class-nativesdk = "" - -# for compatibility -RPROVIDES_${PN} = "${OLDPKGNAME}" -RREPLACES_${PN} += "${OLDPKGNAME}" - -FILES_${PN} = "${bindir}/dbus-daemon* \ - ${bindir}/dbus-uuidgen \ - ${bindir}/dbus-cleanup-sockets \ - ${bindir}/dbus-send \ - ${bindir}/dbus-monitor \ - ${bindir}/dbus-launch \ - ${bindir}/dbus-run-session \ - ${bindir}/dbus-update-activation-environment \ - ${libexecdir}/dbus* \ - ${sysconfdir} \ - ${localstatedir} \ - ${datadir}/dbus-1/services \ - ${datadir}/dbus-1/system-services \ - ${datadir}/dbus-1/session.d \ - ${datadir}/dbus-1/session.conf \ - ${datadir}/dbus-1/system.d \ - ${datadir}/dbus-1/system.conf \ - ${systemd_system_unitdir} \ - ${systemd_user_unitdir} \ -" -FILES_${PN}-lib = "${libdir}/lib*.so.*" -RRECOMMENDS_${PN}-lib = "${PN}" -FILES_${PN}-dev += "${libdir}/dbus-1.0/include ${bindir}/dbus-test-tool" - -pkg_postinst_dbus() { - # If both systemd and sysvinit are enabled, mask the dbus-1 init script - if ${@bb.utils.contains('DISTRO_FEATURES','systemd sysvinit','true','false',d)}; then - if [ -n "$D" ]; then - OPTS="--root=$D" - fi - systemctl $OPTS mask dbus-1.service - fi - - if [ -z "$D" ] && [ -e /etc/init.d/populate-volatile.sh ] ; then - /etc/init.d/populate-volatile.sh update - fi -} - -EXTRA_OECONF = "--disable-tests \ - --disable-xml-docs \ - --disable-doxygen-docs \ - --disable-libaudit \ - " - -EXTRA_OECONF_append_class-native = " --disable-selinux" - -PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'systemd', '', d)} \ - ${@bb.utils.contains('DISTRO_FEATURES', 'largefile', 'largefile', '', d)} \ - ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'x11', '', d)}" -PACKAGECONFIG_class-native = "" -PACKAGECONFIG_class-nativesdk = "" - -PACKAGECONFIG[systemd] = "--enable-systemd --with-systemdsystemunitdir=${systemd_system_unitdir},--disable-systemd --without-systemdsystemunitdir,systemd" -PACKAGECONFIG[x11] = "--with-x --enable-x11-autolaunch,--without-x --disable-x11-autolaunch, virtual/libx11 libsm" -PACKAGECONFIG[largefile] = "--enable-largefile,--disable-largefile,," -PACKAGECONFIG[user-session] = "--enable-user-session --with-systemduserunitdir=${systemd_user_unitdir},--disable-user-session" - -do_install() { - autotools_do_install - - if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then - install -d ${D}${sysconfdir}/init.d - sed 's:@bindir@:${bindir}:' < ${WORKDIR}/dbus-1.init >${WORKDIR}/dbus-1.init.sh - install -m 0755 ${WORKDIR}/dbus-1.init.sh ${D}${sysconfdir}/init.d/dbus-1 - fi - - if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then - for i in dbus.target.wants sockets.target.wants multi-user.target.wants; do \ - install -d ${D}${systemd_system_unitdir}/$i; done - install -m 0644 ${B}/bus/dbus.service ${B}/bus/dbus.socket ${D}${systemd_system_unitdir}/ - cd ${D}${systemd_system_unitdir}/dbus.target.wants/ - ln -fs ../dbus.socket ${D}${systemd_system_unitdir}/dbus.target.wants/dbus.socket - ln -fs ../dbus.socket ${D}${systemd_system_unitdir}/sockets.target.wants/dbus.socket - ln -fs ../dbus.service ${D}${systemd_system_unitdir}/multi-user.target.wants/dbus.service - fi - - install -d ${D}${sysconfdir}/default/volatiles - echo "d messagebus messagebus 0755 ${localstatedir}/run/dbus none" \ - > ${D}${sysconfdir}/default/volatiles/99_dbus - - - mkdir -p ${D}${localstatedir}/lib/dbus - - chown messagebus:messagebus ${D}${localstatedir}/lib/dbus - - chown root:messagebus ${D}${libexecdir}/dbus-daemon-launch-helper - chmod 4755 ${D}${libexecdir}/dbus-daemon-launch-helper - - # Remove Red Hat initscript - rm -rf ${D}${sysconfdir}/rc.d - - # Remove empty testexec directory as we don't build tests - rm -rf ${D}${libdir}/dbus-1.0/test - - # Remove /var/run as it is created on startup - rm -rf ${D}${localstatedir}/run -} - -do_install_class-native() { - autotools_do_install - - # for dbus-glib-native introspection generation - install -d ${D}${STAGING_DATADIR_NATIVE}/dbus/ - # N.B. is below install actually required? - install -m 0644 bus/session.conf ${D}${STAGING_DATADIR_NATIVE}/dbus/session.conf - - # dbus-glib-native and dbus-glib need this xml file - ./bus/dbus-daemon --introspect > ${D}${STAGING_DATADIR_NATIVE}/dbus/dbus-bus-introspect.xml - - # dbus-launch has no X support so lets not install it in case the host - # has a more featured and useful version - rm -f ${D}${bindir}/dbus-launch -} - -do_install_class-nativesdk() { - autotools_do_install - - # dbus-launch has no X support so lets not install it in case the host - # has a more featured and useful version - rm -f ${D}${bindir}/dbus-launch - - # Remove /var/run to avoid QA error - rm -rf ${D}${localstatedir}/run -} -BBCLASSEXTEND = "native nativesdk" - -INSANE_SKIP_${PN}-ptest += "build-deps" diff --git a/import-layers/yocto-poky/meta/recipes-core/dbus/dbus_1.10.14.bb b/import-layers/yocto-poky/meta/recipes-core/dbus/dbus_1.10.14.bb new file mode 100644 index 000000000..e1d735630 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/dbus/dbus_1.10.14.bb @@ -0,0 +1,180 @@ +SUMMARY = "D-Bus message bus" +DESCRIPTION = "D-Bus is a message bus system, a simple way for applications to talk to one another. In addition to interprocess communication, D-Bus helps coordinate process lifecycle; it makes it simple and reliable to code a \"single instance\" application or daemon, and to launch applications and daemons on demand when their services are needed." +HOMEPAGE = "http://dbus.freedesktop.org" +SECTION = "base" +LICENSE = "AFL-2 | GPLv2+" +LIC_FILES_CHKSUM = "file://COPYING;md5=10dded3b58148f3f1fd804b26354af3e \ + file://dbus/dbus.h;beginline=6;endline=20;md5=7755c9d7abccd5dbd25a6a974538bb3c" +DEPENDS = "expat virtual/libintl" +RDEPENDS_dbus_class-native = "" +RDEPENDS_dbus_class-nativesdk = "" +PACKAGES += "${@bb.utils.contains('DISTRO_FEATURES', 'ptest', '${PN}-ptest', '', d)}" +ALLOW_EMPTY_dbus-ptest = "1" +RDEPENDS_dbus-ptest_class-target = "dbus-test-ptest" + +SRC_URI = "http://dbus.freedesktop.org/releases/dbus/dbus-${PV}.tar.gz \ + file://tmpdir.patch \ + file://dbus-1.init \ + file://os-test.patch \ + file://clear-guid_from_server-if-send_negotiate_unix_f.patch \ + file://0001-configure.ac-explicitely-check-stdint.h.patch \ +" + +SRC_URI[md5sum] = "3f7b013ce8f641cd4c897acda0ef3467" +SRC_URI[sha256sum] = "23238f70353e38ce5ca183ebc9525c0d97ac00ef640ad29cf794782af6e6a083" + +inherit useradd autotools pkgconfig gettext update-rc.d upstream-version-is-even + +INITSCRIPT_NAME = "dbus-1" +INITSCRIPT_PARAMS = "start 02 5 3 2 . stop 20 0 1 6 ." + +python __anonymous() { + if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d): + d.setVar("INHIBIT_UPDATERCD_BBCLASS", "1") +} + +USERADD_PACKAGES = "${PN}" +GROUPADD_PARAM_${PN} = "-r netdev" +USERADD_PARAM_${PN} = "--system --home ${localstatedir}/lib/dbus \ + --no-create-home --shell /bin/false \ + --user-group messagebus" + +CONFFILES_${PN} = "${sysconfdir}/dbus-1/system.conf ${sysconfdir}/dbus-1/session.conf" + +DEBIANNAME_${PN} = "dbus-1" + +PACKAGES =+ "${PN}-lib" + +OLDPKGNAME = "dbus-x11" +OLDPKGNAME_class-nativesdk = "" + +# for compatibility +RPROVIDES_${PN} = "${OLDPKGNAME}" +RREPLACES_${PN} += "${OLDPKGNAME}" + +FILES_${PN} = "${bindir}/dbus-daemon* \ + ${bindir}/dbus-uuidgen \ + ${bindir}/dbus-cleanup-sockets \ + ${bindir}/dbus-send \ + ${bindir}/dbus-monitor \ + ${bindir}/dbus-launch \ + ${bindir}/dbus-run-session \ + ${bindir}/dbus-update-activation-environment \ + ${libexecdir}/dbus* \ + ${sysconfdir} \ + ${localstatedir} \ + ${datadir}/dbus-1/services \ + ${datadir}/dbus-1/system-services \ + ${datadir}/dbus-1/session.d \ + ${datadir}/dbus-1/session.conf \ + ${datadir}/dbus-1/system.d \ + ${datadir}/dbus-1/system.conf \ + ${systemd_system_unitdir} \ + ${systemd_user_unitdir} \ +" +FILES_${PN}-lib = "${libdir}/lib*.so.*" +RRECOMMENDS_${PN}-lib = "${PN}" +FILES_${PN}-dev += "${libdir}/dbus-1.0/include ${bindir}/dbus-test-tool" + +PACKAGE_WRITE_DEPS += "${@bb.utils.contains('DISTRO_FEATURES','systemd sysvinit','systemd-systemctl-native','',d)}" +pkg_postinst_dbus() { + # If both systemd and sysvinit are enabled, mask the dbus-1 init script + if ${@bb.utils.contains('DISTRO_FEATURES','systemd sysvinit','true','false',d)}; then + if [ -n "$D" ]; then + OPTS="--root=$D" + fi + systemctl $OPTS mask dbus-1.service + fi + + if [ -z "$D" ] && [ -e /etc/init.d/populate-volatile.sh ] ; then + /etc/init.d/populate-volatile.sh update + fi +} + +EXTRA_OECONF = "--disable-tests \ + --disable-xml-docs \ + --disable-doxygen-docs \ + --disable-libaudit \ + --enable-largefile \ + " + +EXTRA_OECONF_append_class-target = " SYSTEMCTL=${base_bindir}/systemctl" +EXTRA_OECONF_append_class-native = " --disable-selinux" + +PACKAGECONFIG ??= "${@bb.utils.filter('DISTRO_FEATURES', 'systemd x11', d)}" +PACKAGECONFIG_class-native = "" +PACKAGECONFIG_class-nativesdk = "" + +PACKAGECONFIG[systemd] = "--enable-systemd --with-systemdsystemunitdir=${systemd_system_unitdir},--disable-systemd --without-systemdsystemunitdir,systemd" +PACKAGECONFIG[x11] = "--with-x --enable-x11-autolaunch,--without-x --disable-x11-autolaunch, virtual/libx11 libsm" +PACKAGECONFIG[user-session] = "--enable-user-session --with-systemduserunitdir=${systemd_user_unitdir},--disable-user-session" + +do_install() { + autotools_do_install + + if ${@bb.utils.contains('DISTRO_FEATURES', 'sysvinit', 'true', 'false', d)}; then + install -d ${D}${sysconfdir}/init.d + sed 's:@bindir@:${bindir}:' < ${WORKDIR}/dbus-1.init >${WORKDIR}/dbus-1.init.sh + install -m 0755 ${WORKDIR}/dbus-1.init.sh ${D}${sysconfdir}/init.d/dbus-1 + fi + + if ${@bb.utils.contains('DISTRO_FEATURES', 'systemd', 'true', 'false', d)}; then + for i in dbus.target.wants sockets.target.wants multi-user.target.wants; do \ + install -d ${D}${systemd_system_unitdir}/$i; done + install -m 0644 ${B}/bus/dbus.service ${B}/bus/dbus.socket ${D}${systemd_system_unitdir}/ + ln -fs ../dbus.socket ${D}${systemd_system_unitdir}/dbus.target.wants/dbus.socket + ln -fs ../dbus.socket ${D}${systemd_system_unitdir}/sockets.target.wants/dbus.socket + ln -fs ../dbus.service ${D}${systemd_system_unitdir}/multi-user.target.wants/dbus.service + fi + + install -d ${D}${sysconfdir}/default/volatiles + echo "d messagebus messagebus 0755 ${localstatedir}/run/dbus none" \ + > ${D}${sysconfdir}/default/volatiles/99_dbus + + + mkdir -p ${D}${localstatedir}/lib/dbus + + chown messagebus:messagebus ${D}${localstatedir}/lib/dbus + + chown root:messagebus ${D}${libexecdir}/dbus-daemon-launch-helper + chmod 4755 ${D}${libexecdir}/dbus-daemon-launch-helper + + # Remove Red Hat initscript + rm -rf ${D}${sysconfdir}/rc.d + + # Remove empty testexec directory as we don't build tests + rm -rf ${D}${libdir}/dbus-1.0/test + + # Remove /var/run as it is created on startup + rm -rf ${D}${localstatedir}/run +} + +do_install_class-native() { + autotools_do_install + + # for dbus-glib-native introspection generation + install -d ${D}${STAGING_DATADIR_NATIVE}/dbus/ + # N.B. is below install actually required? + install -m 0644 bus/session.conf ${D}${STAGING_DATADIR_NATIVE}/dbus/session.conf + + # dbus-glib-native and dbus-glib need this xml file + ./bus/dbus-daemon --introspect > ${D}${STAGING_DATADIR_NATIVE}/dbus/dbus-bus-introspect.xml + + # dbus-launch has no X support so lets not install it in case the host + # has a more featured and useful version + rm -f ${D}${bindir}/dbus-launch +} + +do_install_class-nativesdk() { + autotools_do_install + + # dbus-launch has no X support so lets not install it in case the host + # has a more featured and useful version + rm -f ${D}${bindir}/dbus-launch + + # Remove /var/run to avoid QA error + rm -rf ${D}${localstatedir}/run +} +BBCLASSEXTEND = "native nativesdk" + +INSANE_SKIP_${PN}-ptest += "build-deps" diff --git a/import-layers/yocto-poky/meta/recipes-core/dropbear/dropbear.inc b/import-layers/yocto-poky/meta/recipes-core/dropbear/dropbear.inc index bda7eb847..b6b436c58 100644 --- a/import-layers/yocto-poky/meta/recipes-core/dropbear/dropbear.inc +++ b/import-layers/yocto-poky/meta/recipes-core/dropbear/dropbear.inc @@ -8,7 +8,7 @@ LICENSE = "MIT & BSD-3-Clause & BSD-2-Clause & PD" LIC_FILES_CHKSUM = "file://LICENSE;md5=a5ec40cafba26fc4396d0b550f824e01" DEPENDS = "zlib" -RPROVIDES_${PN} = "ssh sshd" +RPROVIDES_${PN} = "ssh sshd" DEPENDS += "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'libpam', '', d)}" @@ -63,7 +63,7 @@ do_install() { install -m 0755 dropbearmulti ${D}${sbindir}/ ln -s ${sbindir}/dropbearmulti ${D}${bindir}/dbclient - + for i in ${SBINCOMMANDS} do ln -s ./dropbearmulti ${D}${sbindir}/$i @@ -74,7 +74,7 @@ do_install() { -e 's,/usr/bin,${bindir},g' \ -e 's,/usr,${prefix},g' ${WORKDIR}/init > ${D}${sysconfdir}/init.d/dropbear chmod 755 ${D}${sysconfdir}/init.d/dropbear - if [ "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam', '', d)}" = "pam" ]; then + if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then install -d ${D}${sysconfdir}/pam.d install -m 0644 ${WORKDIR}/dropbear ${D}${sysconfdir}/pam.d/ fi diff --git a/import-layers/yocto-poky/meta/recipes-core/dropbear/dropbear/init b/import-layers/yocto-poky/meta/recipes-core/dropbear/dropbear/init index 434bd6b97..f6e1c462f 100755 --- a/import-layers/yocto-poky/meta/recipes-core/dropbear/dropbear/init +++ b/import-layers/yocto-poky/meta/recipes-core/dropbear/dropbear/init @@ -40,49 +40,28 @@ done if [ $readonly_rootfs = "1" ]; then mkdir -p /var/lib/dropbear DROPBEAR_RSAKEY_DEFAULT="/var/lib/dropbear/dropbear_rsa_host_key" - DROPBEAR_DSSKEY_DEFAULT="/var/lib/dropbear/dropbear_dss_host_key" else DROPBEAR_RSAKEY_DEFAULT="/etc/dropbear/dropbear_rsa_host_key" - DROPBEAR_DSSKEY_DEFAULT="/etc/dropbear/dropbear_dss_host_key" fi test -z "$DROPBEAR_BANNER" || \ DROPBEAR_EXTRA_ARGS="$DROPBEAR_EXTRA_ARGS -b $DROPBEAR_BANNER" test -n "$DROPBEAR_RSAKEY" || \ DROPBEAR_RSAKEY=$DROPBEAR_RSAKEY_DEFAULT -test -n "$DROPBEAR_DSSKEY" || \ - DROPBEAR_DSSKEY=$DROPBEAR_DSSKEY_DEFAULT -test -n "$DROPBEAR_KEYTYPES" || \ - DROPBEAR_KEYTYPES="rsa" gen_keys() { -for t in $DROPBEAR_KEYTYPES; do - case $t in - rsa) - if [ -f "$DROPBEAR_RSAKEY" -a ! -s "$DROPBEAR_RSAKEY" ]; then - rm $DROPBEAR_RSAKEY || true - fi - test -f $DROPBEAR_RSAKEY || dropbearkey -t rsa -f $DROPBEAR_RSAKEY $DROPBEAR_RSAKEY_ARGS - ;; - dsa) - if [ -f "$DROPBEAR_DSSKEY" -a ! -s "$DROPBEAR_DSSKEY" ]; then - rm $DROPBEAR_DSSKEY || true - fi - test -f $DROPBEAR_DSSKEY || dropbearkey -t dss -f $DROPBEAR_DSSKEY $DROPBEAR_DSSKEY_ARGS - ;; - esac -done + if [ -f "$DROPBEAR_RSAKEY" -a ! -s "$DROPBEAR_RSAKEY" ]; then + rm $DROPBEAR_RSAKEY || true + fi + test -f $DROPBEAR_RSAKEY || dropbearkey -t rsa -f $DROPBEAR_RSAKEY $DROPBEAR_RSAKEY_ARGS } case "$1" in start) echo -n "Starting $DESC: " gen_keys - KEY_ARGS="" - test -f $DROPBEAR_DSSKEY && KEY_ARGS="$KEY_ARGS -d $DROPBEAR_DSSKEY" - test -f $DROPBEAR_RSAKEY && KEY_ARGS="$KEY_ARGS -r $DROPBEAR_RSAKEY" start-stop-daemon -S -p $PIDFILE \ - -x "$DAEMON" -- $KEY_ARGS \ + -x "$DAEMON" -- -r $DROPBEAR_RSAKEY \ -p "$DROPBEAR_PORT" $DROPBEAR_EXTRA_ARGS echo "$NAME." ;; @@ -95,11 +74,8 @@ case "$1" in echo -n "Restarting $DESC: " start-stop-daemon -K -x "$DAEMON" -p $PIDFILE sleep 1 - KEY_ARGS="" - test -f $DROPBEAR_DSSKEY && KEY_ARGS="$KEY_ARGS -d $DROPBEAR_DSSKEY" - test -f $DROPBEAR_RSAKEY && KEY_ARGS="$KEY_ARGS -r $DROPBEAR_RSAKEY" start-stop-daemon -S -p $PIDFILE \ - -x "$DAEMON" -- $KEY_ARGS \ + -x "$DAEMON" -- -r $DROPBEAR_RSAKEY \ -p "$DROPBEAR_PORT" $DROPBEAR_EXTRA_ARGS echo "$NAME." ;; diff --git a/import-layers/yocto-poky/meta/recipes-core/expat/expat.inc b/import-layers/yocto-poky/meta/recipes-core/expat/expat.inc index fe9d7e74f..9fa0ca2eb 100644 --- a/import-layers/yocto-poky/meta/recipes-core/expat/expat.inc +++ b/import-layers/yocto-poky/meta/recipes-core/expat/expat.inc @@ -7,12 +7,12 @@ LICENSE = "MIT" SRC_URI = "${SOURCEFORGE_MIRROR}/expat/expat-${PV}.tar.bz2 \ file://autotools.patch \ " - -inherit autotools lib_package gzipnative +inherit autotools lib_package # This package uses an archive format known to have issue with some # versions of gzip -do_unpack[depends] += "gzip-native:do_populate_sysroot" +DEPENDS += "pigz-native" +do_unpack[depends] += "pigz-native:do_populate_sysroot" do_configure_prepend () { rm -f ${S}/conftools/libtool.m4 diff --git a/import-layers/yocto-poky/meta/recipes-core/fts/fts.bb b/import-layers/yocto-poky/meta/recipes-core/fts/fts.bb index 9d8230fa3..de9297ebe 100644 --- a/import-layers/yocto-poky/meta/recipes-core/fts/fts.bb +++ b/import-layers/yocto-poky/meta/recipes-core/fts/fts.bb @@ -22,10 +22,7 @@ S = "${WORKDIR}/${BPN}" do_configure[noexec] = "1" -HASHSTYLE_mips = "sysv" -HASHSTYLE_mipsel = "sysv" -HASHSTYLE_mips64 = "sysv" -HASHSTYLE_mips64el = "sysv" +HASHSTYLE_mipsarch = "sysv" HASHSTYLE = "gnu" VER = "0" diff --git a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/disable_java.patch b/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/disable_java.patch deleted file mode 100644 index e4c5f1566..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/disable_java.patch +++ /dev/null @@ -1,39 +0,0 @@ - -# Pulled from OpenEmbedded -# -# Commented by: Saul Wold - -Upstream-Status: Inappropriate [licensing] - -Index: gettext-0.16.1/gettext-tools/configure.ac -=================================================================== ---- gettext-0.16.1.orig/gettext-tools/configure.ac 2006-11-27 09:02:01.000000000 -0800 -+++ gettext-0.16.1/gettext-tools/configure.ac 2011-03-16 16:55:36.111396557 -0700 -@@ -36,27 +36,15 @@ - gt_JAVA_CHOICE - - gt_GCJ --if test -n "$HAVE_GCJ" && test "$JAVA_CHOICE" = yes; then -- BUILDJAVAEXE=yes --else - BUILDJAVAEXE=no --fi - AC_SUBST(BUILDJAVAEXE) - - gt_JAVAEXEC - gt_JAVACOMP([1.3]) - AC_CHECK_PROG(JAR, jar, jar) --if test -n "$HAVE_JAVACOMP" && test -n "$JAR" && test "$JAVA_CHOICE" != no; then -- BUILDJAVA=yes --else - BUILDJAVA=no --fi - AC_SUBST(BUILDJAVA) --if test -n "$HAVE_JAVAEXEC" && test $BUILDJAVA = yes; then -- TESTJAVA=yes --else - TESTJAVA=no --fi - AC_SUBST(TESTJAVA) - - gt_CSHARPCOMP diff --git a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/fix_aclocal_version.patch b/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/fix_aclocal_version.patch deleted file mode 100644 index 49a10f5b1..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/fix_aclocal_version.patch +++ /dev/null @@ -1,110 +0,0 @@ - -This patch updates the required version number from 2.61 to the -current 2.65 version of aclocal, this will need to be updated -when we update aclocal - -Upstream-Status: Inappropriate [configuration] - -Signed-off-by: Saul Wold - -Index: gettext-0.16.1/autoconf-lib-link/m4/lib-link.m4 -=================================================================== ---- gettext-0.16.1.orig/autoconf-lib-link/m4/lib-link.m4 2011-03-17 02:24:36.391519775 -0700 -+++ gettext-0.16.1/autoconf-lib-link/m4/lib-link.m4 2011-03-17 02:24:45.569526073 -0700 -@@ -148,7 +148,7 @@ - define([NAME],[translit([$1],[abcdefghijklmnopqrstuvwxyz./-], - [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])]) - dnl Autoconf >= 2.61 supports dots in --with options. -- define([N_A_M_E],[m4_if(m4_version_compare(m4_defn([m4_PACKAGE_VERSION]),[2.61]),[-1],[translit([$1],[.],[_])],[$1])]) -+ define([N_A_M_E],[m4_if(m4_version_compare(m4_defn([m4_PACKAGE_VERSION]),[2.65]),[-1],[translit([$1],[.],[_])],[$1])]) - dnl By default, look in $includedir and $libdir. - use_additional=yes - AC_LIB_WITH_FINAL_PREFIX([ -Index: gettext-0.16.1/autoconf-lib-link/tests/rpathly/aclocal.m4 -=================================================================== ---- gettext-0.16.1.orig/autoconf-lib-link/tests/rpathly/aclocal.m4 2011-03-17 02:24:36.391519775 -0700 -+++ gettext-0.16.1/autoconf-lib-link/tests/rpathly/aclocal.m4 2011-03-17 02:24:45.570526904 -0700 -@@ -11,8 +11,8 @@ - # even the implied warranty of MERCHANTABILITY or FITNESS FOR A - # PARTICULAR PURPOSE. - --m4_if(m4_PACKAGE_VERSION, [2.61],, --[m4_fatal([this file was generated for autoconf 2.61. -+m4_if(m4_PACKAGE_VERSION, [2.65],, -+[m4_fatal([this file was generated for autoconf 2.65. - You have another version of autoconf. If you want to use that, - you should regenerate the build system entirely.], [63])]) - -Index: gettext-0.16.1/autoconf-lib-link/tests/rpathx/aclocal.m4 -=================================================================== ---- gettext-0.16.1.orig/autoconf-lib-link/tests/rpathx/aclocal.m4 2011-03-17 02:24:36.391519775 -0700 -+++ gettext-0.16.1/autoconf-lib-link/tests/rpathx/aclocal.m4 2011-03-17 02:24:45.572525773 -0700 -@@ -11,8 +11,8 @@ - # even the implied warranty of MERCHANTABILITY or FITNESS FOR A - # PARTICULAR PURPOSE. - --m4_if(m4_PACKAGE_VERSION, [2.61],, --[m4_fatal([this file was generated for autoconf 2.61. -+m4_if(m4_PACKAGE_VERSION, [2.65],, -+[m4_fatal([this file was generated for autoconf 2.65. - You have another version of autoconf. If you want to use that, - you should regenerate the build system entirely.], [63])]) - -Index: gettext-0.16.1/autoconf-lib-link/tests/rpathy/aclocal.m4 -=================================================================== ---- gettext-0.16.1.orig/autoconf-lib-link/tests/rpathy/aclocal.m4 2011-03-17 02:24:36.391519775 -0700 -+++ gettext-0.16.1/autoconf-lib-link/tests/rpathy/aclocal.m4 2011-03-17 02:24:45.578394916 -0700 -@@ -11,7 +11,7 @@ - # even the implied warranty of MERCHANTABILITY or FITNESS FOR A - # PARTICULAR PURPOSE. - --m4_if(m4_PACKAGE_VERSION, [2.61],, -+m4_if(m4_PACKAGE_VERSION, [2.65],, - [m4_fatal([this file was generated for autoconf 2.61. - You have another version of autoconf. If you want to use that, - you should regenerate the build system entirely.], [63])]) -Index: gettext-0.16.1/autoconf-lib-link/tests/rpathz/aclocal.m4 -=================================================================== ---- gettext-0.16.1.orig/autoconf-lib-link/tests/rpathz/aclocal.m4 2011-03-17 02:24:36.391519775 -0700 -+++ gettext-0.16.1/autoconf-lib-link/tests/rpathz/aclocal.m4 2011-03-17 02:24:45.586395416 -0700 -@@ -11,8 +11,8 @@ - # even the implied warranty of MERCHANTABILITY or FITNESS FOR A - # PARTICULAR PURPOSE. - --m4_if(m4_PACKAGE_VERSION, [2.61],, --[m4_fatal([this file was generated for autoconf 2.61. -+m4_if(m4_PACKAGE_VERSION, [2.65],, -+[m4_fatal([this file was generated for autoconf 2.65. - You have another version of autoconf. If you want to use that, - you should regenerate the build system entirely.], [63])]) - -Index: gettext-0.16.1/gettext-tools/aclocal.m4 -=================================================================== ---- gettext-0.16.1.orig/gettext-tools/aclocal.m4 2006-11-27 09:34:32.000000000 -0800 -+++ gettext-0.16.1/gettext-tools/aclocal.m4 2011-03-17 02:27:01.135682807 -0700 -@@ -11,8 +11,8 @@ - # even the implied warranty of MERCHANTABILITY or FITNESS FOR A - # PARTICULAR PURPOSE. - --m4_if(m4_PACKAGE_VERSION, [2.61],, --[m4_fatal([this file was generated for autoconf 2.61. -+m4_if(m4_PACKAGE_VERSION, [2.65],, -+[m4_fatal([this file was generated for autoconf 2.65. - You have another version of autoconf. If you want to use that, - you should regenerate the build system entirely.], [63])]) - -Index: gettext-0.16.1/gettext-tools/examples/aclocal.m4 -=================================================================== ---- gettext-0.16.1.orig/gettext-tools/examples/aclocal.m4 2006-11-27 09:16:23.000000000 -0800 -+++ gettext-0.16.1/gettext-tools/examples/aclocal.m4 2011-03-17 02:26:52.589396683 -0700 -@@ -11,8 +11,8 @@ - # even the implied warranty of MERCHANTABILITY or FITNESS FOR A - # PARTICULAR PURPOSE. - --m4_if(m4_PACKAGE_VERSION, [2.61],, --[m4_fatal([this file was generated for autoconf 2.61. -+m4_if(m4_PACKAGE_VERSION, [2.65],, -+[m4_fatal([this file was generated for autoconf 2.65. - You have another version of autoconf. If you want to use that, - you should regenerate the build system entirely.], [63])]) - diff --git a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/fix_gnu_source_circular.patch b/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/fix_gnu_source_circular.patch deleted file mode 100644 index fd5feebc8..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/fix_gnu_source_circular.patch +++ /dev/null @@ -1,348 +0,0 @@ - -This patch removes the circular dependency on AC_GNU_SOURCE in -AC_USE_SYSTEM_EXTENSIONS. - -Thanks to Mark Hatle for the timely pointers and fixes. - -Upstream-Status: Inappropriate [licensing] - -Signed-off-by: Saul Wold - -diff -ru gettext-0.16.1/gettext-runtime/gnulib-m4/extensions.m4 gettext.patched/gettext-runtime/gnulib-m4/extensions.m4 ---- gettext-0.16.1/gettext-runtime/gnulib-m4/extensions.m4 2006-11-27 09:14:38.000000000 -0800 -+++ gettext.patched/gettext-runtime/gnulib-m4/extensions.m4 2011-03-16 23:54:01.711923448 -0700 -@@ -16,43 +16,47 @@ - # ------------------------ - # Enable extensions on systems that normally disable them, - # typically due to standards-conformance issues. --AC_DEFUN([AC_USE_SYSTEM_EXTENSIONS], --[ -- AC_BEFORE([$0], [AC_COMPILE_IFELSE]) -- AC_BEFORE([$0], [AC_RUN_IFELSE]) -- -- AC_REQUIRE([AC_GNU_SOURCE]) -- AC_REQUIRE([AC_AIX]) -- AC_REQUIRE([AC_MINIX]) -- -- AH_VERBATIM([__EXTENSIONS__], --[/* Enable extensions on Solaris. */ --#ifndef __EXTENSIONS__ --# undef __EXTENSIONS__ --#endif --#ifndef _POSIX_PTHREAD_SEMANTICS --# undef _POSIX_PTHREAD_SEMANTICS --#endif --#ifndef _TANDEM_SOURCE --# undef _TANDEM_SOURCE --#endif]) -- AC_CACHE_CHECK([whether it is safe to define __EXTENSIONS__], -- [ac_cv_safe_to_define___extensions__], -- [AC_COMPILE_IFELSE( -- [AC_LANG_PROGRAM([ --# define __EXTENSIONS__ 1 -- AC_INCLUDES_DEFAULT])], -- [ac_cv_safe_to_define___extensions__=yes], -- [ac_cv_safe_to_define___extensions__=no])]) -- test $ac_cv_safe_to_define___extensions__ = yes && -- AC_DEFINE([__EXTENSIONS__]) -- AC_DEFINE([_POSIX_PTHREAD_SEMANTICS]) -- AC_DEFINE([_TANDEM_SOURCE]) --]) -+#AC_DEFUN([AC_USE_SYSTEM_EXTENSIONS], -+#[ -+# AC_BEFORE([$0], [AC_COMPILE_IFELSE]) -+# AC_BEFORE([$0], [AC_RUN_IFELSE]) -+# -+# AC_REQUIRE([AC_GNU_SOURCE]) -+# AC_REQUIRE([AC_AIX]) -+# AC_REQUIRE([AC_MINIX]) -+# -+# AH_VERBATIM([__EXTENSIONS__], -+#[/* Enable extensions on Solaris. */ -+##ifndef __EXTENSIONS__ -+## undef __EXTENSIONS__ -+##endif -+##ifndef _POSIX_PTHREAD_SEMANTICS -+## undef _POSIX_PTHREAD_SEMANTICS -+##endif -+##ifndef _TANDEM_SOURCE -+## undef _TANDEM_SOURCE -+##endif]) -+# AC_CACHE_CHECK([whether it is safe to define __EXTENSIONS__], -+# [ac_cv_safe_to_define___extensions__], -+# [AC_COMPILE_IFELSE( -+# [AC_LANG_PROGRAM([ -+## define __EXTENSIONS__ 1 -+# AC_INCLUDES_DEFAULT])], -+# [ac_cv_safe_to_define___extensions__=yes], -+# [ac_cv_safe_to_define___extensions__=no])]) -+# test $ac_cv_safe_to_define___extensions__ = yes && -+# AC_DEFINE([__EXTENSIONS__]) -+# AC_DEFINE([_POSIX_PTHREAD_SEMANTICS]) -+# AC_DEFINE([_TANDEM_SOURCE]) -+#]) - - # gl_USE_SYSTEM_EXTENSIONS - # ------------------------ - # Enable extensions on systems that normally disable them, - # typically due to standards-conformance issues. - AC_DEFUN([gl_USE_SYSTEM_EXTENSIONS], -- [AC_REQUIRE([AC_USE_SYSTEM_EXTENSIONS])]) -+[ -+ AC_REQUIRE([AC_GNU_SOURCE]) -+ -+ AC_REQUIRE([AC_USE_SYSTEM_EXTENSIONS]) -+]) -diff -ru gettext-0.16.1/gettext-runtime/gnulib-m4/gnulib-comp.m4 gettext.patched/gettext-runtime/gnulib-m4/gnulib-comp.m4 ---- gettext-0.16.1/gettext-runtime/gnulib-m4/gnulib-comp.m4 2006-11-27 09:33:19.000000000 -0800 -+++ gettext.patched/gettext-runtime/gnulib-m4/gnulib-comp.m4 2011-03-16 23:50:17.471531838 -0700 -@@ -25,7 +25,7 @@ - m4_pattern_allow([^gl_LIBOBJS$])dnl a variable - m4_pattern_allow([^gl_LTLIBOBJS$])dnl a variable - AC_REQUIRE([AC_PROG_RANLIB]) -- AC_REQUIRE([AC_GNU_SOURCE]) -+ dnl AC_REQUIRE([AC_GNU_SOURCE]) - AC_REQUIRE([gl_USE_SYSTEM_EXTENSIONS]) - ]) - -diff -ru gettext-0.16.1/gettext-runtime/gnulib-m4/mbchar.m4 gettext.patched/gettext-runtime/gnulib-m4/mbchar.m4 ---- gettext-0.16.1/gettext-runtime/gnulib-m4/mbchar.m4 2006-11-27 09:14:38.000000000 -0800 -+++ gettext.patched/gettext-runtime/gnulib-m4/mbchar.m4 2011-03-16 23:50:10.814396529 -0700 -@@ -9,7 +9,7 @@ - - AC_DEFUN([gl_MBCHAR], - [ -- AC_REQUIRE([AC_GNU_SOURCE]) -+ dnl AC_REQUIRE([AC_GNU_SOURCE]) - dnl The following line is that so the user can test HAVE_WCHAR_H - dnl before #include "mbchar.h". - AC_CHECK_HEADERS_ONCE([wchar.h]) -diff -ru gettext-0.16.1/gettext-runtime/gnulib-m4/wcwidth.m4 gettext.patched/gettext-runtime/gnulib-m4/wcwidth.m4 ---- gettext-0.16.1/gettext-runtime/gnulib-m4/wcwidth.m4 2006-11-27 09:14:38.000000000 -0800 -+++ gettext.patched/gettext-runtime/gnulib-m4/wcwidth.m4 2011-03-16 23:50:08.357396247 -0700 -@@ -7,7 +7,7 @@ - AC_DEFUN([gl_FUNC_WCWIDTH], - [ - dnl Persuade glibc to declare wcwidth(). -- AC_REQUIRE([AC_GNU_SOURCE]) -+ dnl AC_REQUIRE([AC_GNU_SOURCE]) - - AC_REQUIRE([AC_C_INLINE]) - AC_REQUIRE([gt_TYPE_WCHAR_T]) -diff -ru gettext-0.16.1/gettext-runtime/m4/lock.m4 gettext.patched/gettext-runtime/m4/lock.m4 ---- gettext-0.16.1/gettext-runtime/m4/lock.m4 2006-10-24 13:59:59.000000000 -0700 -+++ gettext.patched/gettext-runtime/m4/lock.m4 2011-03-16 23:50:04.355891676 -0700 -@@ -35,7 +35,7 @@ - AC_BEFORE([$0], [gl_ARGP])dnl - - AC_REQUIRE([AC_CANONICAL_HOST]) -- AC_REQUIRE([AC_GNU_SOURCE]) dnl needed for pthread_rwlock_t on glibc systems -+ dnl AC_REQUIRE([AC_GNU_SOURCE]) dnl needed for pthread_rwlock_t on glibc systems - dnl Check for multithreading. - AC_ARG_ENABLE(threads, - AC_HELP_STRING([--enable-threads={posix|solaris|pth|win32}], [specify multithreading API]) -diff -ru gettext-0.16.1/gettext-tools/gnulib-m4/extensions.m4 gettext.patched/gettext-tools/gnulib-m4/extensions.m4 ---- gettext-0.16.1/gettext-tools/gnulib-m4/extensions.m4 2006-11-27 09:14:54.000000000 -0800 -+++ gettext.patched/gettext-tools/gnulib-m4/extensions.m4 2011-03-16 23:53:28.487671266 -0700 -@@ -16,43 +16,47 @@ - # ------------------------ - # Enable extensions on systems that normally disable them, - # typically due to standards-conformance issues. --AC_DEFUN([AC_USE_SYSTEM_EXTENSIONS], --[ -- AC_BEFORE([$0], [AC_COMPILE_IFELSE]) -- AC_BEFORE([$0], [AC_RUN_IFELSE]) -- -- AC_REQUIRE([AC_GNU_SOURCE]) -- AC_REQUIRE([AC_AIX]) -- AC_REQUIRE([AC_MINIX]) -- -- AH_VERBATIM([__EXTENSIONS__], --[/* Enable extensions on Solaris. */ --#ifndef __EXTENSIONS__ --# undef __EXTENSIONS__ --#endif --#ifndef _POSIX_PTHREAD_SEMANTICS --# undef _POSIX_PTHREAD_SEMANTICS --#endif --#ifndef _TANDEM_SOURCE --# undef _TANDEM_SOURCE --#endif]) -- AC_CACHE_CHECK([whether it is safe to define __EXTENSIONS__], -- [ac_cv_safe_to_define___extensions__], -- [AC_COMPILE_IFELSE( -- [AC_LANG_PROGRAM([ --# define __EXTENSIONS__ 1 -- AC_INCLUDES_DEFAULT])], -- [ac_cv_safe_to_define___extensions__=yes], -- [ac_cv_safe_to_define___extensions__=no])]) -- test $ac_cv_safe_to_define___extensions__ = yes && -- AC_DEFINE([__EXTENSIONS__]) -- AC_DEFINE([_POSIX_PTHREAD_SEMANTICS]) -- AC_DEFINE([_TANDEM_SOURCE]) --]) -+#AC_DEFUN([AC_USE_SYSTEM_EXTENSIONS], -+#[ -+# AC_BEFORE([$0], [AC_COMPILE_IFELSE]) -+# AC_BEFORE([$0], [AC_RUN_IFELSE]) -+# -+# AC_REQUIRE([AC_GNU_SOURCE]) -+# AC_REQUIRE([AC_AIX]) -+# AC_REQUIRE([AC_MINIX]) -+# -+# AH_VERBATIM([__EXTENSIONS__], -+#[/* Enable extensions on Solaris. */ -+##ifndef __EXTENSIONS__ -+## undef __EXTENSIONS__ -+##endif -+##ifndef _POSIX_PTHREAD_SEMANTICS -+## undef _POSIX_PTHREAD_SEMANTICS -+##endif -+##ifndef _TANDEM_SOURCE -+## undef _TANDEM_SOURCE -+##endif]) -+# AC_CACHE_CHECK([whether it is safe to define __EXTENSIONS__], -+# [ac_cv_safe_to_define___extensions__], -+# [AC_COMPILE_IFELSE( -+# [AC_LANG_PROGRAM([ -+## define __EXTENSIONS__ 1 -+# AC_INCLUDES_DEFAULT])], -+# [ac_cv_safe_to_define___extensions__=yes], -+# [ac_cv_safe_to_define___extensions__=no])]) -+# test $ac_cv_safe_to_define___extensions__ = yes && -+# AC_DEFINE([__EXTENSIONS__]) -+# AC_DEFINE([_POSIX_PTHREAD_SEMANTICS]) -+# AC_DEFINE([_TANDEM_SOURCE]) -+#]) - - # gl_USE_SYSTEM_EXTENSIONS - # ------------------------ - # Enable extensions on systems that normally disable them, - # typically due to standards-conformance issues. - AC_DEFUN([gl_USE_SYSTEM_EXTENSIONS], -- [AC_REQUIRE([AC_USE_SYSTEM_EXTENSIONS])]) -+[ -+ AC_REQUIRE([AC_GNU_SOURCE]) -+ -+ AC_REQUIRE([AC_USE_SYSTEM_EXTENSIONS]) -+]) -diff -ru gettext-0.16.1/gettext-tools/gnulib-m4/fnmatch.m4 gettext.patched/gettext-tools/gnulib-m4/fnmatch.m4 ---- gettext-0.16.1/gettext-tools/gnulib-m4/fnmatch.m4 2006-11-27 09:14:54.000000000 -0800 -+++ gettext.patched/gettext-tools/gnulib-m4/fnmatch.m4 2011-03-16 23:52:06.477463671 -0700 -@@ -91,7 +91,7 @@ - AC_DEFUN([gl_FUNC_FNMATCH_GNU], - [ - dnl Persuade glibc to declare FNM_CASEFOLD etc. -- AC_REQUIRE([AC_GNU_SOURCE]) -+ dnl AC_REQUIRE([AC_GNU_SOURCE]) - - FNMATCH_H= - _AC_FUNC_FNMATCH_IF([GNU], [ac_cv_func_fnmatch_gnu], -diff -ru gettext-0.16.1/gettext-tools/gnulib-m4/getdelim.m4 gettext.patched/gettext-tools/gnulib-m4/getdelim.m4 ---- gettext-0.16.1/gettext-tools/gnulib-m4/getdelim.m4 2006-11-27 09:14:54.000000000 -0800 -+++ gettext.patched/gettext-tools/gnulib-m4/getdelim.m4 2011-03-16 23:52:22.871674845 -0700 -@@ -12,7 +12,7 @@ - [ - - dnl Persuade glibc to declare getdelim(). -- AC_REQUIRE([AC_GNU_SOURCE]) -+ dnl AC_REQUIRE([AC_GNU_SOURCE]) - - AC_REPLACE_FUNCS(getdelim) - AC_CHECK_DECLS_ONCE(getdelim) -diff -ru gettext-0.16.1/gettext-tools/gnulib-m4/getline.m4 gettext.patched/gettext-tools/gnulib-m4/getline.m4 ---- gettext-0.16.1/gettext-tools/gnulib-m4/getline.m4 2006-11-27 09:14:54.000000000 -0800 -+++ gettext.patched/gettext-tools/gnulib-m4/getline.m4 2011-03-16 23:51:49.829971108 -0700 -@@ -16,7 +16,7 @@ - AC_DEFUN([gl_FUNC_GETLINE], - [ - dnl Persuade glibc to declare getline(). -- AC_REQUIRE([AC_GNU_SOURCE]) -+ dnl AC_REQUIRE([AC_GNU_SOURCE]) - - AC_CHECK_DECLS([getline]) - -diff -ru gettext-0.16.1/gettext-tools/gnulib-m4/gnulib-comp.m4 gettext.patched/gettext-tools/gnulib-m4/gnulib-comp.m4 ---- gettext-0.16.1/gettext-tools/gnulib-m4/gnulib-comp.m4 2006-11-27 09:33:36.000000000 -0800 -+++ gettext.patched/gettext-tools/gnulib-m4/gnulib-comp.m4 2011-03-16 23:52:18.970450488 -0700 -@@ -25,7 +25,7 @@ - m4_pattern_allow([^gl_LIBOBJS$])dnl a variable - m4_pattern_allow([^gl_LTLIBOBJS$])dnl a variable - AC_REQUIRE([AC_PROG_RANLIB]) -- AC_REQUIRE([AC_GNU_SOURCE]) -+ dnl AC_REQUIRE([AC_GNU_SOURCE]) - AC_REQUIRE([gl_USE_SYSTEM_EXTENSIONS]) - AC_REQUIRE([gl_LOCK_EARLY]) - ]) -diff -ru gettext-0.16.1/gettext-tools/gnulib-m4/mbchar.m4 gettext.patched/gettext-tools/gnulib-m4/mbchar.m4 ---- gettext-0.16.1/gettext-tools/gnulib-m4/mbchar.m4 2006-11-27 09:14:55.000000000 -0800 -+++ gettext.patched/gettext-tools/gnulib-m4/mbchar.m4 2011-03-16 23:51:40.844410216 -0700 -@@ -9,7 +9,7 @@ - - AC_DEFUN([gl_MBCHAR], - [ -- AC_REQUIRE([AC_GNU_SOURCE]) -+ dnl AC_REQUIRE([AC_GNU_SOURCE]) - dnl The following line is that so the user can test HAVE_WCHAR_H - dnl before #include "mbchar.h". - AC_CHECK_HEADERS_ONCE([wchar.h]) -diff -ru gettext-0.16.1/gettext-tools/gnulib-m4/stpcpy.m4 gettext.patched/gettext-tools/gnulib-m4/stpcpy.m4 ---- gettext-0.16.1/gettext-tools/gnulib-m4/stpcpy.m4 2006-11-27 09:14:55.000000000 -0800 -+++ gettext.patched/gettext-tools/gnulib-m4/stpcpy.m4 2011-03-16 23:52:14.691396045 -0700 -@@ -7,7 +7,7 @@ - AC_DEFUN([gl_FUNC_STPCPY], - [ - dnl Persuade glibc to declare stpcpy(). -- AC_REQUIRE([AC_GNU_SOURCE]) -+ dnl AC_REQUIRE([AC_GNU_SOURCE]) - - AC_REPLACE_FUNCS(stpcpy) - if test $ac_cv_func_stpcpy = no; then -diff -ru gettext-0.16.1/gettext-tools/gnulib-m4/stpncpy.m4 gettext.patched/gettext-tools/gnulib-m4/stpncpy.m4 ---- gettext-0.16.1/gettext-tools/gnulib-m4/stpncpy.m4 2006-11-27 09:14:55.000000000 -0800 -+++ gettext.patched/gettext-tools/gnulib-m4/stpncpy.m4 2011-03-16 23:52:10.356641459 -0700 -@@ -7,7 +7,7 @@ - AC_DEFUN([gl_FUNC_STPNCPY], - [ - dnl Persuade glibc to declare stpncpy(). -- AC_REQUIRE([AC_GNU_SOURCE]) -+ dnl AC_REQUIRE([AC_GNU_SOURCE]) - - dnl Both glibc and AIX (4.3.3, 5.1) have an stpncpy() function - dnl declared in . Its side effects are the same as those -diff -ru gettext-0.16.1/gettext-tools/gnulib-m4/wcwidth.m4 gettext.patched/gettext-tools/gnulib-m4/wcwidth.m4 ---- gettext-0.16.1/gettext-tools/gnulib-m4/wcwidth.m4 2006-11-27 09:14:55.000000000 -0800 -+++ gettext.patched/gettext-tools/gnulib-m4/wcwidth.m4 2011-03-16 23:51:08.260324221 -0700 -@@ -7,7 +7,7 @@ - AC_DEFUN([gl_FUNC_WCWIDTH], - [ - dnl Persuade glibc to declare wcwidth(). -- AC_REQUIRE([AC_GNU_SOURCE]) -+ dnl AC_REQUIRE([AC_GNU_SOURCE]) - - AC_REQUIRE([AC_C_INLINE]) - AC_REQUIRE([gt_TYPE_WCHAR_T]) -diff -ru gettext-0.16.1/gettext-tools/libgettextpo/gnulib-m4/gnulib-comp.m4 gettext.patched/gettext-tools/libgettextpo/gnulib-m4/gnulib-comp.m4 ---- gettext-0.16.1/gettext-tools/libgettextpo/gnulib-m4/gnulib-comp.m4 2006-11-27 09:33:45.000000000 -0800 -+++ gettext.patched/gettext-tools/libgettextpo/gnulib-m4/gnulib-comp.m4 2011-03-16 23:51:02.036061317 -0700 -@@ -25,7 +25,7 @@ - m4_pattern_allow([^gl_LIBOBJS$])dnl a variable - m4_pattern_allow([^gl_LTLIBOBJS$])dnl a variable - AC_REQUIRE([AC_PROG_RANLIB]) -- AC_REQUIRE([AC_GNU_SOURCE]) -+ dnl AC_REQUIRE([AC_GNU_SOURCE]) - AC_REQUIRE([gl_USE_SYSTEM_EXTENSIONS]) - AC_REQUIRE([gl_LOCK_EARLY]) - ]) -diff -ru gettext-0.16.1/gettext-tools/m4/regex.m4 gettext.patched/gettext-tools/m4/regex.m4 ---- gettext-0.16.1/gettext-tools/m4/regex.m4 2006-11-27 09:02:05.000000000 -0800 -+++ gettext.patched/gettext-tools/m4/regex.m4 2011-03-16 23:50:53.533477195 -0700 -@@ -116,7 +116,7 @@ - dnl to get them. - - dnl Persuade glibc to declare mempcpy(). -- AC_REQUIRE([AC_GNU_SOURCE]) -+ dnl AC_REQUIRE([AC_GNU_SOURCE]) - - AC_REQUIRE([AC_C_RESTRICT]) - AC_REQUIRE([AC_FUNC_ALLOCA]) diff --git a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/gettext-autoconf-lib-link-no-L.patch b/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/gettext-autoconf-lib-link-no-L.patch deleted file mode 100644 index 072fe5ff6..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/gettext-autoconf-lib-link-no-L.patch +++ /dev/null @@ -1,19 +0,0 @@ - -# Pulled from OpenEmbedded -# -# Commented by: Saul Wold - -Upstream-Status: Inappropriate [licensing] - ---- gettext-0.17/autoconf-lib-link/m4/lib-link.m4~ 2009-04-17 15:12:30.000000000 -0700 -+++ gettext-0.17/autoconf-lib-link/m4/lib-link.m4 2009-04-17 15:37:39.000000000 -0700 -@@ -267,6 +267,9 @@ - fi - fi - fi -+ dnl Just let the compiler find the library, the compiler and user are smarter then this script -+ dnl when cross compiling and working with a relocated install. -+ found_dir="" - if test "X$found_dir" = "X"; then - for x in $LDFLAGS $LTLIB[]NAME; do - AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) diff --git a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/gettext-vpath.patch b/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/gettext-vpath.patch deleted file mode 100644 index f09e450ef..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/gettext-vpath.patch +++ /dev/null @@ -1,20 +0,0 @@ - -# Pulled from OpenEmbedded -# -# Commented by: Saul Wold - -Upstream-Status: Inappropriate [licensing] - -Index: gettext-0.16.1/gettext-runtime/intl/Makefile.in -=================================================================== ---- gettext-0.16.1.orig/gettext-runtime/intl/Makefile.in 2006-11-27 09:02:00.000000000 -0800 -+++ gettext-0.16.1/gettext-runtime/intl/Makefile.in 2011-03-16 16:04:49.175419930 -0700 -@@ -35,7 +35,7 @@ - # 'make' does the wrong thing if GNU gettext was configured with - # "./configure --srcdir=`pwd`", namely it gets confused by the .lo and .la - # files it finds in srcdir = ../../gettext-runtime/intl. --VPATH = $(srcdir) -+#VPATH = $(srcdir) - - prefix = @prefix@ - exec_prefix = @exec_prefix@ diff --git a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/hardcode_macro_version.patch b/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/hardcode_macro_version.patch deleted file mode 100644 index 4da8dd453..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/hardcode_macro_version.patch +++ /dev/null @@ -1,51 +0,0 @@ - -This patch hardcodes in version 0.17 for the GETTEXT_MACRO_VERSION, -the version check is only part of 0.17 and will not affect any 0.16.1 -operations - -Upstream-Status: Inappropriate [licensing] - -Signed-off-by: Saul Wold - -Index: gettext-0.16.1/gettext-runtime/m4/po.m4 -=================================================================== ---- gettext-0.16.1.orig/gettext-runtime/m4/po.m4 2011-03-17 02:24:28.953520231 -0700 -+++ gettext-0.16.1/gettext-runtime/m4/po.m4 2011-03-17 02:28:25.455396862 -0700 -@@ -27,6 +27,9 @@ - AC_REQUIRE([AM_PROG_MKDIR_P])dnl defined by automake - AC_REQUIRE([AM_NLS])dnl - -+ dnl Hardcode the MACRO_VERSION to 0.17 for gnutls -+ AC_SUBST([GETTEXT_MACRO_VERSION], [0.17]) -+ - dnl Perform the following tests also if --disable-nls has been given, - dnl because they are needed for "make dist" to work. - -Index: gettext-0.16.1/gettext-runtime/po/Makefile.in.in -=================================================================== ---- gettext-0.16.1.orig/gettext-runtime/po/Makefile.in.in 2011-03-17 02:24:28.953520231 -0700 -+++ gettext-0.16.1/gettext-runtime/po/Makefile.in.in 2011-03-17 02:28:07.574395144 -0700 -@@ -10,6 +10,9 @@ - # - # Origin: gettext-0.16 - -+# Hardcode this value for gnutls building against gplv2 code -+GETTEXT_MACRO_VERSION = 0.17 -+ - PACKAGE = @PACKAGE@ - VERSION = @VERSION@ - PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ -Index: gettext-0.16.1/gettext-tools/po/Makefile.in.in -=================================================================== ---- gettext-0.16.1.orig/gettext-tools/po/Makefile.in.in 2011-03-17 02:24:28.953520231 -0700 -+++ gettext-0.16.1/gettext-tools/po/Makefile.in.in 2011-03-17 02:28:07.574395144 -0700 -@@ -10,6 +10,9 @@ - # - # Origin: gettext-0.16 - -+# Hardcode this value for gnutls building against gplv2 code -+GETTEXT_MACRO_VERSION = 0.17 -+ - PACKAGE = @PACKAGE@ - VERSION = @VERSION@ - PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ diff --git a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/linklib_from_0.17.patch b/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/linklib_from_0.17.patch deleted file mode 100644 index d92106971..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext-0.16.1/linklib_from_0.17.patch +++ /dev/null @@ -1,720 +0,0 @@ - -# Pulled from OpenEmbedded -# -# Commented by: Saul Wold - -Upstream-Status: Inappropriate [licensing] - -Index: gettext-0.16.1/autoconf-lib-link/m4/lib-link.m4 -=================================================================== ---- gettext-0.16.1.orig/autoconf-lib-link/m4/lib-link.m4 2006-11-27 09:01:58.000000000 -0800 -+++ gettext-0.16.1/autoconf-lib-link/m4/lib-link.m4 2011-03-17 00:36:08.710836720 -0700 -@@ -6,12 +6,14 @@ - - dnl From Bruno Haible. - --AC_PREREQ(2.50) -+AC_PREREQ(2.54) - - dnl AC_LIB_LINKFLAGS(name [, dependencies]) searches for libname and - dnl the libraries corresponding to explicit and implicit dependencies. - dnl Sets and AC_SUBSTs the LIB${NAME} and LTLIB${NAME} variables and - dnl augments the CPPFLAGS variable. -+dnl Sets and AC_SUBSTs the LIB${NAME}_PREFIX variable to nonempty if libname -+dnl was found in ${LIB${NAME}_PREFIX}/$acl_libdirstem. - AC_DEFUN([AC_LIB_LINKFLAGS], - [ - AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) -@@ -24,13 +26,16 @@ - ac_cv_lib[]Name[]_libs="$LIB[]NAME" - ac_cv_lib[]Name[]_ltlibs="$LTLIB[]NAME" - ac_cv_lib[]Name[]_cppflags="$INC[]NAME" -+ ac_cv_lib[]Name[]_prefix="$LIB[]NAME[]_PREFIX" - ]) - LIB[]NAME="$ac_cv_lib[]Name[]_libs" - LTLIB[]NAME="$ac_cv_lib[]Name[]_ltlibs" - INC[]NAME="$ac_cv_lib[]Name[]_cppflags" -+ LIB[]NAME[]_PREFIX="$ac_cv_lib[]Name[]_prefix" - AC_LIB_APPENDTOVAR([CPPFLAGS], [$INC]NAME) - AC_SUBST([LIB]NAME) - AC_SUBST([LTLIB]NAME) -+ AC_SUBST([LIB]NAME[_PREFIX]) - dnl Also set HAVE_LIB[]NAME so that AC_LIB_HAVE_LINKFLAGS can reuse the - dnl results of this search when this library appears as a dependency. - HAVE_LIB[]NAME=yes -@@ -46,6 +51,8 @@ - dnl LTLIB${NAME} variables and augments the CPPFLAGS variable, and - dnl #defines HAVE_LIB${NAME} to 1. Otherwise, it sets and AC_SUBSTs - dnl HAVE_LIB${NAME}=no and LIB${NAME} and LTLIB${NAME} to empty. -+dnl Sets and AC_SUBSTs the LIB${NAME}_PREFIX variable to nonempty if libname -+dnl was found in ${LIB${NAME}_PREFIX}/$acl_libdirstem. - AC_DEFUN([AC_LIB_HAVE_LINKFLAGS], - [ - AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) -@@ -82,17 +89,24 @@ - CPPFLAGS="$ac_save_CPPFLAGS" - LIB[]NAME= - LTLIB[]NAME= -+ LIB[]NAME[]_PREFIX= -+ - fi - AC_SUBST([HAVE_LIB]NAME) - AC_SUBST([LIB]NAME) - AC_SUBST([LTLIB]NAME) -+ AC_SUBST([LIB]NAME[_PREFIX]) - undefine([Name]) - undefine([NAME]) - ]) - - dnl Determine the platform dependent parameters needed to use rpath: --dnl libext, shlibext, hardcode_libdir_flag_spec, hardcode_libdir_separator, --dnl hardcode_direct, hardcode_minus_L. -+dnl acl_libext, -+dnl acl_shlibext, -+dnl acl_hardcode_libdir_flag_spec, -+dnl acl_hardcode_libdir_separator, -+dnl acl_hardcode_direct, -+dnl acl_hardcode_minus_L. - AC_DEFUN([AC_LIB_RPATH], - [ - dnl Tell automake >= 1.10 to complain if config.rpath is missing. -@@ -109,12 +123,14 @@ - acl_cv_rpath=done - ]) - wl="$acl_cv_wl" -- libext="$acl_cv_libext" -- shlibext="$acl_cv_shlibext" -- hardcode_libdir_flag_spec="$acl_cv_hardcode_libdir_flag_spec" -- hardcode_libdir_separator="$acl_cv_hardcode_libdir_separator" -- hardcode_direct="$acl_cv_hardcode_direct" -- hardcode_minus_L="$acl_cv_hardcode_minus_L" -+ acl_libext="$acl_cv_libext" -+ acl_shlibext="$acl_cv_shlibext" -+ acl_libname_spec="$acl_cv_libname_spec" -+ acl_library_names_spec="$acl_cv_library_names_spec" -+ acl_hardcode_libdir_flag_spec="$acl_cv_hardcode_libdir_flag_spec" -+ acl_hardcode_libdir_separator="$acl_cv_hardcode_libdir_separator" -+ acl_hardcode_direct="$acl_cv_hardcode_direct" -+ acl_hardcode_minus_L="$acl_cv_hardcode_minus_L" - dnl Determine whether the user wants rpath handling at all. - AC_ARG_ENABLE(rpath, - [ --disable-rpath do not hardcode runtime library paths], -@@ -124,20 +140,24 @@ - dnl AC_LIB_LINKFLAGS_BODY(name [, dependencies]) searches for libname and - dnl the libraries corresponding to explicit and implicit dependencies. - dnl Sets the LIB${NAME}, LTLIB${NAME} and INC${NAME} variables. -+dnl Also, sets the LIB${NAME}_PREFIX variable to nonempty if libname was found -+dnl in ${LIB${NAME}_PREFIX}/$acl_libdirstem. - AC_DEFUN([AC_LIB_LINKFLAGS_BODY], - [ - AC_REQUIRE([AC_LIB_PREPARE_MULTILIB]) - define([NAME],[translit([$1],[abcdefghijklmnopqrstuvwxyz./-], - [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])]) -+ dnl Autoconf >= 2.61 supports dots in --with options. -+ define([N_A_M_E],[m4_if(m4_version_compare(m4_defn([m4_PACKAGE_VERSION]),[2.61]),[-1],[translit([$1],[.],[_])],[$1])]) - dnl By default, look in $includedir and $libdir. - use_additional=yes - AC_LIB_WITH_FINAL_PREFIX([ - eval additional_includedir=\"$includedir\" - eval additional_libdir=\"$libdir\" - ]) -- AC_LIB_ARG_WITH([lib$1-prefix], --[ --with-lib$1-prefix[=DIR] search for lib$1 in DIR/include and DIR/lib -- --without-lib$1-prefix don't search for lib$1 in includedir and libdir], -+ AC_LIB_ARG_WITH([lib]N_A_M_E[-prefix], -+[ --with-lib]N_A_M_E[-prefix[=DIR] search for lib$1 in DIR/include and DIR/lib -+ --without-lib]N_A_M_E[-prefix don't search for lib$1 in includedir and libdir], - [ - if test "X$withval" = "Xno"; then - use_additional=no -@@ -158,6 +178,7 @@ - LIB[]NAME= - LTLIB[]NAME= - INC[]NAME= -+ LIB[]NAME[]_PREFIX= - rpathdirs= - ltrpathdirs= - names_already_handled= -@@ -197,27 +218,53 @@ - found_la= - found_so= - found_a= -+ eval libname=\"$acl_libname_spec\" # typically: libname=lib$name -+ if test -n "$acl_shlibext"; then -+ shrext=".$acl_shlibext" # typically: shrext=.so -+ else -+ shrext= -+ fi - if test $use_additional = yes; then -- if test -n "$shlibext" \ -- && { test -f "$additional_libdir/lib$name.$shlibext" \ -- || { test "$shlibext" = dll \ -- && test -f "$additional_libdir/lib$name.dll.a"; }; }; then -- found_dir="$additional_libdir" -- if test -f "$additional_libdir/lib$name.$shlibext"; then -- found_so="$additional_libdir/lib$name.$shlibext" -+ dir="$additional_libdir" -+ dnl The same code as in the loop below: -+ dnl First look for a shared library. -+ if test -n "$acl_shlibext"; then -+ if test -f "$dir/$libname$shrext"; then -+ found_dir="$dir" -+ found_so="$dir/$libname$shrext" - else -- found_so="$additional_libdir/lib$name.dll.a" -+ if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then -+ ver=`(cd "$dir" && \ -+ for f in "$libname$shrext".*; do echo "$f"; done \ -+ | sed -e "s,^$libname$shrext\\\\.,," \ -+ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ -+ | sed 1q ) 2>/dev/null` -+ if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then -+ found_dir="$dir" -+ found_so="$dir/$libname$shrext.$ver" -+ fi -+ else -+ eval library_names=\"$acl_library_names_spec\" -+ for f in $library_names; do -+ if test -f "$dir/$f"; then -+ found_dir="$dir" -+ found_so="$dir/$f" -+ break -+ fi -+ done -+ fi - fi -- if test -f "$additional_libdir/lib$name.la"; then -- found_la="$additional_libdir/lib$name.la" -+ fi -+ dnl Then look for a static library. -+ if test "X$found_dir" = "X"; then -+ if test -f "$dir/$libname.$acl_libext"; then -+ found_dir="$dir" -+ found_a="$dir/$libname.$acl_libext" - fi -- else -- if test -f "$additional_libdir/lib$name.$libext"; then -- found_dir="$additional_libdir" -- found_a="$additional_libdir/lib$name.$libext" -- if test -f "$additional_libdir/lib$name.la"; then -- found_la="$additional_libdir/lib$name.la" -- fi -+ fi -+ if test "X$found_dir" != "X"; then -+ if test -f "$dir/$libname.la"; then -+ found_la="$dir/$libname.la" - fi - fi - fi -@@ -227,26 +274,44 @@ - case "$x" in - -L*) - dir=`echo "X$x" | sed -e 's/^X-L//'` -- if test -n "$shlibext" \ -- && { test -f "$dir/lib$name.$shlibext" \ -- || { test "$shlibext" = dll \ -- && test -f "$dir/lib$name.dll.a"; }; }; then -- found_dir="$dir" -- if test -f "$dir/lib$name.$shlibext"; then -- found_so="$dir/lib$name.$shlibext" -+ dnl First look for a shared library. -+ if test -n "$acl_shlibext"; then -+ if test -f "$dir/$libname$shrext"; then -+ found_dir="$dir" -+ found_so="$dir/$libname$shrext" - else -- found_so="$dir/lib$name.dll.a" -- fi -- if test -f "$dir/lib$name.la"; then -- found_la="$dir/lib$name.la" -+ if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then -+ ver=`(cd "$dir" && \ -+ for f in "$libname$shrext".*; do echo "$f"; done \ -+ | sed -e "s,^$libname$shrext\\\\.,," \ -+ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ -+ | sed 1q ) 2>/dev/null` -+ if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then -+ found_dir="$dir" -+ found_so="$dir/$libname$shrext.$ver" -+ fi -+ else -+ eval library_names=\"$acl_library_names_spec\" -+ for f in $library_names; do -+ if test -f "$dir/$f"; then -+ found_dir="$dir" -+ found_so="$dir/$f" -+ break -+ fi -+ done -+ fi - fi -- else -- if test -f "$dir/lib$name.$libext"; then -+ fi -+ dnl Then look for a static library. -+ if test "X$found_dir" = "X"; then -+ if test -f "$dir/$libname.$acl_libext"; then - found_dir="$dir" -- found_a="$dir/lib$name.$libext" -- if test -f "$dir/lib$name.la"; then -- found_la="$dir/lib$name.la" -- fi -+ found_a="$dir/$libname.$acl_libext" -+ fi -+ fi -+ if test "X$found_dir" != "X"; then -+ if test -f "$dir/$libname.la"; then -+ found_la="$dir/$libname.la" - fi - fi - ;; -@@ -282,12 +347,12 @@ - ltrpathdirs="$ltrpathdirs $found_dir" - fi - dnl The hardcoding into $LIBNAME is system dependent. -- if test "$hardcode_direct" = yes; then -+ if test "$acl_hardcode_direct" = yes; then - dnl Using DIR/libNAME.so during linking hardcodes DIR into the - dnl resulting binary. - LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" - else -- if test -n "$hardcode_libdir_flag_spec" && test "$hardcode_minus_L" = no; then -+ if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then - dnl Use an explicit option to hardcode DIR into the resulting - dnl binary. - LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" -@@ -318,13 +383,13 @@ - if test -z "$haveit"; then - LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$found_dir" - fi -- if test "$hardcode_minus_L" != no; then -+ if test "$acl_hardcode_minus_L" != no; then - dnl FIXME: Not sure whether we should use - dnl "-L$found_dir -l$name" or "-L$found_dir $found_so" - dnl here. - LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" - else -- dnl We cannot use $hardcode_runpath_var and LD_RUN_PATH -+ dnl We cannot use $acl_hardcode_runpath_var and LD_RUN_PATH - dnl here, because this doesn't fit in flags passed to the - dnl compiler. So give up. No hardcoding. This affects only - dnl very old systems. -@@ -512,18 +577,18 @@ - done - done - if test "X$rpathdirs" != "X"; then -- if test -n "$hardcode_libdir_separator"; then -+ if test -n "$acl_hardcode_libdir_separator"; then - dnl Weird platform: only the last -rpath option counts, the user must - dnl pass all path elements in one option. We can arrange that for a - dnl single library, but not when more than one $LIBNAMEs are used. - alldirs= - for found_dir in $rpathdirs; do -- alldirs="${alldirs}${alldirs:+$hardcode_libdir_separator}$found_dir" -+ alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" - done -- dnl Note: hardcode_libdir_flag_spec uses $libdir and $wl. -+ dnl Note: acl_hardcode_libdir_flag_spec uses $libdir and $wl. - acl_save_libdir="$libdir" - libdir="$alldirs" -- eval flag=\"$hardcode_libdir_flag_spec\" -+ eval flag=\"$acl_hardcode_libdir_flag_spec\" - libdir="$acl_save_libdir" - LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$flag" - else -@@ -531,7 +596,7 @@ - for found_dir in $rpathdirs; do - acl_save_libdir="$libdir" - libdir="$found_dir" -- eval flag=\"$hardcode_libdir_flag_spec\" -+ eval flag=\"$acl_hardcode_libdir_flag_spec\" - libdir="$acl_save_libdir" - LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$flag" - done -@@ -642,3 +707,79 @@ - fi - AC_SUBST([$1]) - ]) -+ -+dnl For those cases where a variable contains several -L and -l options -+dnl referring to unknown libraries and directories, this macro determines the -+dnl necessary additional linker options for the runtime path. -+dnl AC_LIB_LINKFLAGS_FROM_LIBS([LDADDVAR], [LIBSVALUE], [USE-LIBTOOL]) -+dnl sets LDADDVAR to linker options needed together with LIBSVALUE. -+dnl If USE-LIBTOOL evaluates to non-empty, linking with libtool is assumed, -+dnl otherwise linking without libtool is assumed. -+AC_DEFUN([AC_LIB_LINKFLAGS_FROM_LIBS], -+[ -+ AC_REQUIRE([AC_LIB_RPATH]) -+ AC_REQUIRE([AC_LIB_PREPARE_MULTILIB]) -+ $1= -+ if test "$enable_rpath" != no; then -+ if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then -+ dnl Use an explicit option to hardcode directories into the resulting -+ dnl binary. -+ rpathdirs= -+ next= -+ for opt in $2; do -+ if test -n "$next"; then -+ dir="$next" -+ dnl No need to hardcode the standard /usr/lib. -+ if test "X$dir" != "X/usr/$acl_libdirstem"; then -+ rpathdirs="$rpathdirs $dir" -+ fi -+ next= -+ else -+ case $opt in -+ -L) next=yes ;; -+ -L*) dir=`echo "X$opt" | sed -e 's,^X-L,,'` -+ dnl No need to hardcode the standard /usr/lib. -+ if test "X$dir" != "X/usr/$acl_libdirstem"; then -+ rpathdirs="$rpathdirs $dir" -+ fi -+ next= ;; -+ *) next= ;; -+ esac -+ fi -+ done -+ if test "X$rpathdirs" != "X"; then -+ if test -n ""$3""; then -+ dnl libtool is used for linking. Use -R options. -+ for dir in $rpathdirs; do -+ $1="${$1}${$1:+ }-R$dir" -+ done -+ else -+ dnl The linker is used for linking directly. -+ if test -n "$acl_hardcode_libdir_separator"; then -+ dnl Weird platform: only the last -rpath option counts, the user -+ dnl must pass all path elements in one option. -+ alldirs= -+ for dir in $rpathdirs; do -+ alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$dir" -+ done -+ acl_save_libdir="$libdir" -+ libdir="$alldirs" -+ eval flag=\"$acl_hardcode_libdir_flag_spec\" -+ libdir="$acl_save_libdir" -+ $1="$flag" -+ else -+ dnl The -rpath options are cumulative. -+ for dir in $rpathdirs; do -+ acl_save_libdir="$libdir" -+ libdir="$dir" -+ eval flag=\"$acl_hardcode_libdir_flag_spec\" -+ libdir="$acl_save_libdir" -+ $1="${$1}${$1:+ }$flag" -+ done -+ fi -+ fi -+ fi -+ fi -+ fi -+ AC_SUBST([$1]) -+]) -Index: gettext-0.16.1/autoconf-lib-link/config.rpath -=================================================================== ---- gettext-0.16.1.orig/autoconf-lib-link/config.rpath 2006-11-27 09:01:58.000000000 -0800 -+++ gettext-0.16.1/autoconf-lib-link/config.rpath 2011-03-17 00:33:23.336539490 -0700 -@@ -2,7 +2,7 @@ - # Output a system dependent set of variables, describing how to set the - # run time search path of shared libraries in an executable. - # --# Copyright 1996-2006 Free Software Foundation, Inc. -+# Copyright 1996-2007 Free Software Foundation, Inc. - # Taken from GNU libtool, 2001 - # Originally by Gordon Matzigkeit , 1996 - # -@@ -47,6 +47,18 @@ - done - cc_basename=`echo "$cc_temp" | sed -e 's%^.*/%%'` - -+# Code taken from libtool.m4's _LT_CC_BASENAME. -+ -+for cc_temp in $CC""; do -+ case $cc_temp in -+ compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; -+ distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; -+ \-*) ;; -+ *) break;; -+ esac -+done -+cc_basename=`echo "$cc_temp" | sed -e 's%^.*/%%'` -+ - # Code taken from libtool.m4's AC_LIBTOOL_PROG_COMPILER_PIC. - - wl= -@@ -64,7 +76,14 @@ - ;; - esac - ;; -- mingw* | pw32* | os2*) -+ darwin*) -+ case $cc_basename in -+ xlc*) -+ wl='-Wl,' -+ ;; -+ esac -+ ;; -+ mingw* | cygwin* | pw32* | os2*) - ;; - hpux9* | hpux10* | hpux11*) - wl='-Wl,' -@@ -74,7 +93,7 @@ - ;; - newsos6) - ;; -- linux*) -+ linux* | k*bsd*-gnu) - case $cc_basename in - icc* | ecc*) - wl='-Wl,' -@@ -100,7 +119,7 @@ - osf3* | osf4* | osf5*) - wl='-Wl,' - ;; -- sco3.2v5*) -+ rdos*) - ;; - solaris*) - wl='-Wl,' -@@ -108,11 +127,14 @@ - sunos4*) - wl='-Qoption ld ' - ;; -- sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) -+ sysv4 | sysv4.2uw2* | sysv4.3*) - wl='-Wl,' - ;; - sysv4*MP*) - ;; -+ sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) -+ wl='-Wl,' -+ ;; - unicos*) - wl='-Wl,' - ;; -@@ -141,6 +163,10 @@ - # we just hope/assume this is gcc and not c89 (= MSVC++) - with_gnu_ld=yes - ;; -+ interix*) -+ # we just hope/assume this is gcc and not c89 (= MSVC++) -+ with_gnu_ld=yes -+ ;; - openbsd*) - with_gnu_ld=no - ;; -@@ -189,11 +215,11 @@ - ld_shlibs=no - fi - ;; -- interix3*) -+ interix[3-9]*) - hardcode_direct=no - hardcode_libdir_flag_spec='${wl}-rpath,$libdir' - ;; -- linux*) -+ gnu* | linux* | k*bsd*-gnu) - if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then - : - else -@@ -280,7 +306,7 @@ - strings "$collect2name" | grep resolve_lib_name >/dev/null - then - # We have reworked collect2 -- hardcode_direct=yes -+ : - else - # We have old collect2 - hardcode_direct=unsupported -@@ -359,7 +385,7 @@ - hardcode_direct=yes - hardcode_minus_L=yes - ;; -- freebsd* | kfreebsd*-gnu | dragonfly*) -+ freebsd* | dragonfly*) - hardcode_libdir_flag_spec='-R$libdir' - hardcode_direct=yes - ;; -@@ -412,18 +438,22 @@ - hardcode_libdir_separator=: - ;; - openbsd*) -- hardcode_direct=yes -- if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then -- hardcode_libdir_flag_spec='${wl}-rpath,$libdir' -+ if test -f /usr/libexec/ld.so; then -+ hardcode_direct=yes -+ if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then -+ hardcode_libdir_flag_spec='${wl}-rpath,$libdir' -+ else -+ case "$host_os" in -+ openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) -+ hardcode_libdir_flag_spec='-R$libdir' -+ ;; -+ *) -+ hardcode_libdir_flag_spec='${wl}-rpath,$libdir' -+ ;; -+ esac -+ fi - else -- case "$host_os" in -- openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) -- hardcode_libdir_flag_spec='-R$libdir' -- ;; -- *) -- hardcode_libdir_flag_spec='${wl}-rpath,$libdir' -- ;; -- esac -+ ld_shlibs=no - fi - ;; - os2*) -@@ -471,7 +501,7 @@ - ld_shlibs=yes - fi - ;; -- sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7*) -+ sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* |sco3.2v5.0.[024]*) - ;; - sysv5* | sco3.2v5* | sco5v6*) - hardcode_libdir_flag_spec='`test -z "$SCOABSPATH" && echo ${wl}-R,$libdir`' -@@ -488,33 +518,51 @@ - - # Check dynamic linker characteristics - # Code taken from libtool.m4's AC_LIBTOOL_SYS_DYNAMIC_LINKER. -+# Unlike libtool.m4, here we don't care about _all_ names of the library, but -+# only about the one the linker finds when passed -lNAME. This is the last -+# element of library_names_spec in libtool.m4, or possibly two of them if the -+# linker has special search rules. -+library_names_spec= # the last element of library_names_spec in libtool.m4 - libname_spec='lib$name' - case "$host_os" in - aix3*) -+ library_names_spec='$libname.a' - ;; - aix4* | aix5*) -+ library_names_spec='$libname$shrext' - ;; - amigaos*) -+ library_names_spec='$libname.a' - ;; - beos*) -+ library_names_spec='$libname$shrext' - ;; - bsdi[45]*) -+ library_names_spec='$libname$shrext' - ;; - cygwin* | mingw* | pw32*) - shrext=.dll -+ library_names_spec='$libname.dll.a $libname.lib' - ;; - darwin* | rhapsody*) - shrext=.dylib -+ library_names_spec='$libname$shrext' - ;; - dgux*) -+ library_names_spec='$libname$shrext' - ;; - freebsd1*) - ;; -- kfreebsd*-gnu) -- ;; - freebsd* | dragonfly*) -+ case "$host_os" in -+ freebsd[123]*) -+ library_names_spec='$libname$shrext$versuffix' ;; -+ *) -+ library_names_spec='$libname$shrext' ;; -+ esac - ;; - gnu*) -+ library_names_spec='$libname$shrext' - ;; - hpux9* | hpux10* | hpux11*) - case $host_cpu in -@@ -528,10 +576,13 @@ - shrext=.sl - ;; - esac -+ library_names_spec='$libname$shrext' - ;; -- interix3*) -+ interix[3-9]*) -+ library_names_spec='$libname$shrext' - ;; - irix5* | irix6* | nonstopux*) -+ library_names_spec='$libname$shrext' - case "$host_os" in - irix5* | nonstopux*) - libsuff= shlibsuff= -@@ -548,33 +599,46 @@ - ;; - linux*oldld* | linux*aout* | linux*coff*) - ;; -- linux*) -+ linux* | k*bsd*-gnu) -+ library_names_spec='$libname$shrext' - ;; - knetbsd*-gnu) -+ library_names_spec='$libname$shrext' - ;; - netbsd*) -+ library_names_spec='$libname$shrext' - ;; - newsos6) -+ library_names_spec='$libname$shrext' - ;; - nto-qnx*) -+ library_names_spec='$libname$shrext' - ;; - openbsd*) -+ library_names_spec='$libname$shrext$versuffix' - ;; - os2*) - libname_spec='$name' - shrext=.dll -+ library_names_spec='$libname.a' - ;; - osf3* | osf4* | osf5*) -+ library_names_spec='$libname$shrext' - ;; - solaris*) -+ library_names_spec='$libname$shrext' - ;; - sunos4*) -+ library_names_spec='$libname$shrext$versuffix' - ;; - sysv4 | sysv4.3*) -+ library_names_spec='$libname$shrext' - ;; - sysv4*MP*) -+ library_names_spec='$libname$shrext' - ;; - sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) -+ library_names_spec='$libname$shrext' - ;; - uts4*) - ;; -@@ -583,6 +647,8 @@ - sed_quote_subst='s/\(["`$\\]\)/\\\1/g' - escaped_wl=`echo "X$wl" | sed -e 's/^X//' -e "$sed_quote_subst"` - shlibext=`echo "$shrext" | sed -e 's,^\.,,'` -+escaped_libname_spec=`echo "X$libname_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` -+escaped_library_names_spec=`echo "X$library_names_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` - escaped_hardcode_libdir_flag_spec=`echo "X$hardcode_libdir_flag_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` - - LC_ALL=C sed -e 's/^\([a-zA-Z0-9_]*\)=/acl_cv_\1=/' < +Date: Sun, 19 Feb 2017 23:32:46 -0800 +Subject: [PATCH] cr-statement.c/timsort.h: fix formatting issues + +Fixed when compile with "-Wformat -Wformat-security -Werror=format-security": +| gettext-tools/gnulib-lib/libcroco/cr-statement.c: In function 'cr_statement_dump_charset': +| gettext-tools/gnulib-lib/libcroco/cr-statement.c:2661:17: error: format not a string literal and no format arguments [-Werror=format-security] +| fprintf (a_fp, str) ; + +And: +gettext-tools/gnulib-lib/libxml/timsort.h:326:80: warning: format '%lu' expects argument of type 'long unsigned int', but argument 3 has type 'unsigned int' [-Wformat=] + fprintf(stderr, "Error allocating temporary storage for tim sort: need %lu bytes", sizeof(SORT_TYPE) * new_size); + +Upstream-Status: Pending + +Signed-off-by: Robert Yang +--- + gettext-tools/gnulib-lib/libcroco/cr-statement.c | 10 +++++----- + gettext-tools/gnulib-lib/libxml/timsort.h | 2 +- + 2 files changed, 6 insertions(+), 6 deletions(-) + +diff --git a/gettext-tools/gnulib-lib/libcroco/cr-statement.c b/gettext-tools/gnulib-lib/libcroco/cr-statement.c +index 617520f..100104b 100644 +--- a/gettext-tools/gnulib-lib/libcroco/cr-statement.c ++++ b/gettext-tools/gnulib-lib/libcroco/cr-statement.c +@@ -2607,7 +2607,7 @@ cr_statement_dump_ruleset (CRStatement * a_this, FILE * a_fp, glong a_indent) + g_return_if_fail (a_fp && a_this); + str = cr_statement_ruleset_to_string (a_this, a_indent); + if (str) { +- fprintf (a_fp, str); ++ fprintf (a_fp, "%s", str); + g_free (str); + str = NULL; + } +@@ -2658,7 +2658,7 @@ cr_statement_dump_charset (CRStatement * a_this, FILE * a_fp, gulong a_indent) + str = cr_statement_charset_to_string (a_this, + a_indent) ; + if (str) { +- fprintf (a_fp, str) ; ++ fprintf (a_fp, "%s", str) ; + g_free (str) ; + str = NULL ; + } +@@ -2685,7 +2685,7 @@ cr_statement_dump_page (CRStatement * a_this, FILE * a_fp, gulong a_indent) + + str = cr_statement_at_page_rule_to_string (a_this, a_indent) ; + if (str) { +- fprintf (a_fp, str); ++ fprintf (a_fp, "%s", str); + g_free (str) ; + str = NULL ; + } +@@ -2711,7 +2711,7 @@ cr_statement_dump_media_rule (CRStatement * a_this, + + str = cr_statement_media_rule_to_string (a_this, a_indent) ; + if (str) { +- fprintf (a_fp, str) ; ++ fprintf (a_fp, "%s", str) ; + g_free (str) ; + str = NULL ; + } +@@ -2737,7 +2737,7 @@ cr_statement_dump_import_rule (CRStatement * a_this, FILE * a_fp, + + str = cr_statement_import_rule_to_string (a_this, a_indent) ; + if (str) { +- fprintf (a_fp, str) ; ++ fprintf (a_fp, "%s", str) ; + g_free (str) ; + str = NULL ; + } +diff --git a/gettext-tools/gnulib-lib/libxml/timsort.h b/gettext-tools/gnulib-lib/libxml/timsort.h +index 795f272..443918a 100644 +--- a/gettext-tools/gnulib-lib/libxml/timsort.h ++++ b/gettext-tools/gnulib-lib/libxml/timsort.h +@@ -323,7 +323,7 @@ static void TIM_SORT_RESIZE(TEMP_STORAGE_T *store, const size_t new_size) + SORT_TYPE *tempstore = (SORT_TYPE *)realloc(store->storage, new_size * sizeof(SORT_TYPE)); + if (tempstore == NULL) + { +- fprintf(stderr, "Error allocating temporary storage for tim sort: need %lu bytes", sizeof(SORT_TYPE) * new_size); ++ fprintf(stderr, "Error allocating temporary storage for tim sort: need %zu bytes", sizeof(SORT_TYPE) * new_size); + exit(1); + } + store->storage = tempstore; +-- +2.10.2 + diff --git a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext_0.16.1.bb b/import-layers/yocto-poky/meta/recipes-core/gettext/gettext_0.16.1.bb deleted file mode 100644 index e79f4dacf..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext_0.16.1.bb +++ /dev/null @@ -1,124 +0,0 @@ -SUMMARY = "Utilities and libraries for producing multi-lingual messages" -DESCRIPTION = "GNU gettext is a set of tools that provides a framework to help other programs produce multi-lingual messages. These tools include a set of conventions about how programs should be written to support message catalogs, a directory and file naming organization for the message catalogs themselves, a runtime library supporting the retrieval of translated messages, and a few stand-alone programs to massage in various ways the sets of translatable and already translated strings." -HOMEPAGE = "http://www.gnu.org/software/gettext/gettext.html" -SECTION = "libs" -LICENSE = "GPLv2" -LIC_FILES_CHKSUM = "file://COPYING;md5=9ea3144f04c41cd2eada5d3f472e6ea5" - -PR = "r6" -DEPENDS = "virtual/libiconv" -DEPENDS_class-native = "" -PROVIDES = "virtual/libintl virtual/gettext" -PROVIDES_class-native = "virtual/gettext-native" - -SRC_URI = "${GNU_MIRROR}/gettext/gettext-${PV}.tar.gz \ - file://gettext-vpath.patch \ - file://linklib_from_0.17.patch \ - file://gettext-autoconf-lib-link-no-L.patch \ - file://disable_java.patch \ - file://fix_aclocal_version.patch \ - file://fix_gnu_source_circular.patch \ - file://hardcode_macro_version.patch \ - " - -SRC_URI[md5sum] = "3d9ad24301c6d6b17ec30704a13fe127" -SRC_URI[sha256sum] = "0bf850d1a079fb5a61f0a47b1a9efd35eb44032255375e1cedb0253bc27b376d" - -PARALLEL_MAKE = "" - -LDFLAGS_prepend_libc-uclibc = " -lrt -lpthread " - -inherit autotools texinfo - -EXTRA_OECONF += "--without-lispdir \ - --disable-csharp \ - --disable-libasprintf \ - --disable-java \ - --disable-native-java \ - --disable-openmp \ - --without-emacs \ - " -EXTRA_OECONF_append_libc-musl = "\ - gt_cv_func_gnugettext1_libc=yes \ - gt_cv_func_gnugettext2_libc=yes \ - " - -acpaths = '-I ${S}/autoconf-lib-link/m4/ \ - -I ${S}/gettext-runtime/m4 \ - -I ${S}/gettext-tools/m4' - -do_configure_prepend() { - rm -f ${S}/config/m4/libtool.m4 -} - -do_install_append_libc-musl () { - rm -f ${D}${libdir}/charset.alias -} - -# these lack the .x behind the .so, but shouldn't be in the -dev package -# Otherwise you get the following results: -# 7.4M glibc/images/ep93xx/Angstrom-console-image-glibc-ipk-2008.1-test-20080104-ep93xx.rootfs.tar.gz -# 25M uclibc/images/ep93xx/Angstrom-console-image-uclibc-ipk-2008.1-test-20080104-ep93xx.rootfs.tar.gz -# because gettext depends on gettext-dev, which pulls in more -dev packages: -# 15228 KiB /ep93xx/libstdc++-dev_4.2.2-r2_ep93xx.ipk -# 1300 KiB /ep93xx/uclibc-dev_0.9.29-r8_ep93xx.ipk -# 140 KiB /armv4t/gettext-dev_0.14.1-r6_armv4t.ipk -# 4 KiB /ep93xx/libgcc-s-dev_4.2.2-r2_ep93xx.ipk - -PACKAGES =+ "libgettextlib libgettextsrc" -FILES_libgettextlib = "${libdir}/libgettextlib-*.so*" -FILES_libgettextsrc = "${libdir}/libgettextsrc-*.so*" - -PACKAGES =+ "gettext-runtime gettext-runtime-dev gettext-runtime-staticdev gettext-runtime-doc" - -FILES_${PN} += "${libdir}/${BPN}/*" - -FILES_gettext-runtime = "${bindir}/gettext \ - ${bindir}/ngettext \ - ${bindir}/envsubst \ - ${bindir}/gettext.sh \ - ${libdir}/libasprintf${SODEV} \ - ${libdir}/GNU.Gettext.dll \ - " -FILES_gettext-runtime_append_libc-uclibc = " ${libdir}/libintl.so.* \ - ${libdir}/charset.alias \ - " -FILES_gettext-runtime-staticdev += "${libdir}/libasprintf.a" -FILES_gettext-runtime-dev += "${includedir}/autosprintf.h \ - ${libdir}/libasprintf${SOLIBDEV}" -FILES_gettext-runtime-dev_append_libc-uclibc = " ${libdir}/libintl.so \ - ${includedir}/libintl.h \ - " -FILES_gettext-runtime-doc = "${mandir}/man1/gettext.* \ - ${mandir}/man1/ngettext.* \ - ${mandir}/man1/envsubst.* \ - ${mandir}/man1/.* \ - ${mandir}/man3/* \ - ${docdir}/gettext/gettext.* \ - ${docdir}/gettext/ngettext.* \ - ${docdir}/gettext/envsubst.* \ - ${docdir}/gettext/*.3.html \ - ${datadir}/gettext/ABOUT-NLS \ - ${docdir}/gettext/csharpdoc/* \ - ${docdir}/libasprintf/autosprintf.html \ - ${infodir}/autosprintf.info \ - " - -do_install_append() { - rm -f ${D}${libdir}/preloadable_libintl.so -} - -do_install_append_class-native () { - rm ${D}${datadir}/aclocal/* - rm ${D}${datadir}/gettext/config.rpath - rm ${D}${datadir}/gettext/po/Makefile.in.in - rm ${D}${datadir}/gettext/po/remove-potcdate.sin -} - -# Anyone inheriting gettext will have both gettext-native and gettext -# available, and we don't want to use older macros from the target gettext in -# a non-gplv3 build, so kill them and let dependent recipes rely on -# gettext-native. -SYSROOT_DIRS_BLACKLIST += "${datadir}/aclocal" - -BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext_0.19.8.1.bb b/import-layers/yocto-poky/meta/recipes-core/gettext/gettext_0.19.8.1.bb index 7b8c1e8eb..83edffe53 100644 --- a/import-layers/yocto-poky/meta/recipes-core/gettext/gettext_0.19.8.1.bb +++ b/import-layers/yocto-poky/meta/recipes-core/gettext/gettext_0.19.8.1.bb @@ -1,5 +1,8 @@ SUMMARY = "Utilities and libraries for producing multi-lingual messages" -DESCRIPTION = "GNU gettext is a set of tools that provides a framework to help other programs produce multi-lingual messages. These tools include a set of conventions about how programs should be written to support message catalogs, a directory and file naming organization for the message catalogs themselves, a runtime library supporting the retrieval of translated messages, and a few stand-alone programs to massage in various ways the sets of translatable and already translated strings." +DESCRIPTION = "GNU gettext is a set of tools that provides a framework to help other programs produce multi-lingual messages. \ +These tools include a set of conventions about how programs should be written to support message catalogs, a directory and file \ +naming organization for the message catalogs themselves, a runtime library supporting the retrieval of translated messages, and \ +a few stand-alone programs to massage in various ways the sets of translatable and already translated strings." HOMEPAGE = "http://www.gnu.org/software/gettext/gettext.html" SECTION = "libs" LICENSE = "GPLv3+ & LGPL-2.1+" @@ -13,6 +16,7 @@ RCONFLICTS_${PN} = "proxy-libintl" SRC_URI = "${GNU_MIRROR}/gettext/gettext-${PV}.tar.gz \ file://parallel.patch \ file://add-with-bisonlocaledir.patch \ + file://cr-statement.c-timsort.h-fix-formatting-issues.patch \ " SRC_URI[md5sum] = "97e034cf8ce5ba73a28ff6c3c0638092" diff --git a/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0/0001-Test-for-pthread_getname_np-before-using-it.patch b/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0/0001-Test-for-pthread_getname_np-before-using-it.patch new file mode 100644 index 000000000..c6e4966bb --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0/0001-Test-for-pthread_getname_np-before-using-it.patch @@ -0,0 +1,70 @@ +From f627fe16099a2b08d8b4e9023ae6b4f352451967 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Sun, 6 Nov 2016 08:59:08 -0800 +Subject: [PATCH] Test for pthread_getname_np before using it + +Its a GNU extention and not all libc implement it +musl e.g. implements the setname API but not getname +in any case, it seems to be safer to check for the +function before using it. + +Signed-off-by: Khem Raj +--- +Upstream-Status: Submitted + + config.h.in | 3 +++ + configure.ac | 10 ++++++++++ + glib/tests/thread.c | 2 +- + 3 files changed, 14 insertions(+), 1 deletion(-) + +diff --git a/config.h.in b/config.h.in +index 2c35ff1..da7ac30 100644 +--- a/config.h.in ++++ b/config.h.in +@@ -326,6 +326,9 @@ + #undef HAVE_PTHREAD_COND_TIMEDWAIT_RELATIVE_NP + + /* Have function pthread_setname_np(const char*) */ ++#undef HAVE_PTHREAD_GETNAME_NP ++ ++/* Have function pthread_setname_np(const char*) */ + #undef HAVE_PTHREAD_SETNAME_NP_WITHOUT_TID + + /* Have function pthread_setname_np(pthread_t, const char*) */ +diff --git a/configure.ac b/configure.ac +index 4309671..209770a 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -2121,6 +2121,16 @@ AS_IF([ test x"$have_threads" = xposix], [ + AC_DEFINE(HAVE_PTHREAD_COND_TIMEDWAIT_RELATIVE_NP,1, + [Have function pthread_cond_timedwait_relative_np])], + [AC_MSG_RESULT(no)]) ++ dnl gets thread names ++ AC_MSG_CHECKING(for pthread_getname_np(pthread_t, char*, size_t)) ++ AC_LINK_IFELSE( ++ [AC_LANG_PROGRAM( ++ [#include ], ++ [pthread_getname_np(pthread_self(),"example",0)])], ++ [AC_MSG_RESULT(yes) ++ AC_DEFINE(HAVE_PTHREAD_GETNAME_NP,1, ++ [Have function pthread_setname_np(const char*)])], ++ [AC_MSG_RESULT(no)]) + dnl Sets thread names on OS X 10.6, iOS 3.2 (and higher) + AC_MSG_CHECKING(for pthread_setname_np(const char*)) + AC_LINK_IFELSE( +diff --git a/glib/tests/thread.c b/glib/tests/thread.c +index 5447836..2f248a6 100644 +--- a/glib/tests/thread.c ++++ b/glib/tests/thread.c +@@ -174,7 +174,7 @@ test_thread5 (void) + static gpointer + thread6_func (gpointer data) + { +-#ifdef HAVE_PTHREAD_SETNAME_NP_WITH_TID ++#if defined(HAVE_PTHREAD_SETNAME_NP_WITH_TID) && defined(HAVE_PTHREAD_GETNAME_NP) + char name[16]; + + pthread_getname_np (pthread_self(), name, 16); +-- +2.10.2 + diff --git a/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0/0002-tests-Ignore-y2k-warnings.patch b/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0/0002-tests-Ignore-y2k-warnings.patch deleted file mode 100644 index f61fa0ae6..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0/0002-tests-Ignore-y2k-warnings.patch +++ /dev/null @@ -1,42 +0,0 @@ -From b06b22fecc7deda8c65e28670562ca2371e4e725 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Sat, 16 Apr 2016 13:43:54 -0700 -Subject: [PATCH 2/2] tests: Ignore y2k warnings - -silences -| ../../../../../../../../workspace/sources/glib-2.0/glib/tests/gdatetime.c: In function 'test_strftime': -| ../../../../../../../../workspace/sources/glib-2.0/glib/tests/gdatetime.c:1338:3: error: '%c' yields only last 2 digits of year in some locales [-Werror=format-y2k] -| "a%a A%A b%b B%B c%c C%C d%d e%e F%F g%g G%G h%h H%H I%I j%j m%m M%M " - -Signed-off-by: Khem Raj ---- -Upstream-Status: Submitted - - glib/tests/gdatetime.c | 4 ++++ - 1 file changed, 4 insertions(+) - -diff --git a/glib/tests/gdatetime.c b/glib/tests/gdatetime.c -index 16a163c..e6062fc 100644 ---- a/glib/tests/gdatetime.c -+++ b/glib/tests/gdatetime.c -@@ -1326,6 +1326,9 @@ test_z (void) - g_time_zone_unref (tz); - } - -+#pragma GCC diagnostic push -+#pragma GCC diagnostic ignored "-Wformat-y2k" -+ - static void - test_strftime (void) - { -@@ -1351,6 +1354,7 @@ test_strftime (void) - } - #endif - } -+#pragma GCC diagnostic pop - - static void - test_find_interval (void) --- -2.8.0 - diff --git a/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0/gi-exclude.patch b/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0/gi-exclude.patch deleted file mode 100644 index dc62b92ef..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0/gi-exclude.patch +++ /dev/null @@ -1,59 +0,0 @@ -The autoptr types should be excluded from gobject-introspection parsing as -they're not user-facing. - -Upstream-Status: Submitted -Signed-off-by: Ross Burton - -diff --git a/gio/gio-autocleanups.h b/gio/gio-autocleanups.h -index a95ba65..24ccc2d 100644 ---- a/gio/gio-autocleanups.h -+++ b/gio/gio-autocleanups.h -@@ -21,6 +21,8 @@ - #error "Only can be included directly." - #endif - -+#ifndef __GI_SCANNER__ -+ - G_DEFINE_AUTOPTR_CLEANUP_FUNC(GAction, g_object_unref) - G_DEFINE_AUTOPTR_CLEANUP_FUNC(GActionMap, g_object_unref) - G_DEFINE_AUTOPTR_CLEANUP_FUNC(GAppInfo, g_object_unref) -@@ -146,3 +148,5 @@ G_DEFINE_AUTOPTR_CLEANUP_FUNC(GVolume, g_object_unref) - G_DEFINE_AUTOPTR_CLEANUP_FUNC(GVolumeMonitor, g_object_unref) - G_DEFINE_AUTOPTR_CLEANUP_FUNC(GZlibCompressor, g_object_unref) - G_DEFINE_AUTOPTR_CLEANUP_FUNC(GZlibDecompressor, g_object_unref) -+ -+#endif -diff --git a/glib/glib-autocleanups.h b/glib/glib-autocleanups.h -index 6355f75..09d28be 100644 ---- a/glib/glib-autocleanups.h -+++ b/glib/glib-autocleanups.h -@@ -21,6 +21,8 @@ - #error "Only can be included directly." - #endif - -+#ifndef __GI_SCANNER__ -+ - static inline void - g_autoptr_cleanup_generic_gfree (void *p) - { -@@ -87,3 +89,5 @@ G_DEFINE_AUTOPTR_CLEANUP_FUNC(GVariantDict, g_variant_dict_unref) - G_DEFINE_AUTO_CLEANUP_CLEAR_FUNC(GVariantDict, g_variant_dict_clear) - G_DEFINE_AUTOPTR_CLEANUP_FUNC(GVariantType, g_variant_type_free) - G_DEFINE_AUTO_CLEANUP_FREE_FUNC(GStrv, g_strfreev, NULL) -+ -+#endif -diff --git a/gobject/gobject-autocleanups.h b/gobject/gobject-autocleanups.h -index 980203f..a1d4ba1 100644 ---- a/gobject/gobject-autocleanups.h -+++ b/gobject/gobject-autocleanups.h -@@ -21,6 +21,10 @@ - #error "Only can be included directly." - #endif - -+#ifndef __GI_SCANNER__ -+ - G_DEFINE_AUTOPTR_CLEANUP_FUNC(GObject, g_object_unref) - G_DEFINE_AUTOPTR_CLEANUP_FUNC(GInitiallyUnowned, g_object_unref) - G_DEFINE_AUTO_CLEANUP_CLEAR_FUNC(GValue, g_value_unset) -+ -+#endif diff --git a/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0_2.48.2.bb b/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0_2.48.2.bb deleted file mode 100644 index a45f64444..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0_2.48.2.bb +++ /dev/null @@ -1,26 +0,0 @@ -require glib.inc - -PE = "1" - -SHRT_VER = "${@oe.utils.trim_version("${PV}", 2)}" - -SRC_URI = "${GNOME_MIRROR}/glib/${SHRT_VER}/glib-${PV}.tar.xz \ - file://configure-libtool.patch \ - file://fix-conflicting-rand.patch \ - file://run-ptest \ - file://ptest-paths.patch \ - file://uclibc_musl_translation.patch \ - file://allow-run-media-sdX-drive-mount-if-username-root.patch \ - file://0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch \ - file://Enable-more-tests-while-cross-compiling.patch \ - file://gi-exclude.patch \ - file://0001-Install-gio-querymodules-as-libexec_PROGRAM.patch \ - file://0001-Do-not-ignore-return-value-of-write.patch \ - file://0002-tests-Ignore-y2k-warnings.patch \ - " - -SRC_URI_append_class-native = " file://glib-gettextize-dir.patch \ - file://relocate-modules.patch" - -SRC_URI[md5sum] = "f4ac1aa2efd4f5798c37625ea697ac57" -SRC_URI[sha256sum] = "f25e751589cb1a58826eac24fbd4186cda4518af772806b666a3f91f66e6d3f4" diff --git a/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0_2.50.3.bb b/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0_2.50.3.bb new file mode 100644 index 000000000..22ea347e1 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib-2.0_2.50.3.bb @@ -0,0 +1,25 @@ +require glib.inc + +PE = "1" + +SHRT_VER = "${@oe.utils.trim_version("${PV}", 2)}" + +SRC_URI = "${GNOME_MIRROR}/glib/${SHRT_VER}/glib-${PV}.tar.xz \ + file://configure-libtool.patch \ + file://fix-conflicting-rand.patch \ + file://run-ptest \ + file://ptest-paths.patch \ + file://uclibc_musl_translation.patch \ + file://allow-run-media-sdX-drive-mount-if-username-root.patch \ + file://0001-Remove-the-warning-about-deprecated-paths-in-schemas.patch \ + file://Enable-more-tests-while-cross-compiling.patch \ + file://0001-Install-gio-querymodules-as-libexec_PROGRAM.patch \ + file://0001-Do-not-ignore-return-value-of-write.patch \ + file://0001-Test-for-pthread_getname_np-before-using-it.patch \ + " + +SRC_URI_append_class-native = " file://glib-gettextize-dir.patch \ + file://relocate-modules.patch" + +SRC_URI[md5sum] = "381ab22934f296750d036aa55a397ded" +SRC_URI[sha256sum] = "82ee94bf4c01459b6b00cb9db0545c2237921e3060c0b74cff13fbc020cfd999" diff --git a/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib.inc b/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib.inc index 906e0d4d5..2b30e372d 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib.inc +++ b/import-layers/yocto-poky/meta/recipes-core/glib-2.0/glib.inc @@ -15,6 +15,8 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=3bf50002aefd002f49e7bb854063f7e7 \ BUGTRACKER = "http://bugzilla.gnome.org" SECTION = "libs" +CVE_PRODUCT = "glib" + BBCLASSEXTEND = "native nativesdk" DEPENDS = "virtual/libiconv libffi zlib glib-2.0-native" @@ -26,15 +28,17 @@ PACKAGES += "${PN}-codegen ${PN}-utils" LEAD_SONAME = "libglib-2.0.*" -inherit autotools gettext gtk-doc pkgconfig ptest-gnome upstream-version-is-even bash-completion gio-module-cache python3native +inherit autotools gettext gtk-doc pkgconfig ptest-gnome upstream-version-is-even bash-completion gio-module-cache python3native manpages S = "${WORKDIR}/glib-${PV}" -PACKAGECONFIG ??= "system-pcre" +PACKAGECONFIG ??= "system-pcre libmount" # To use the system pcre it must be configured with --enable-unicode-properties PACKAGECONFIG[system-pcre] = "--with-pcre=system,--with-pcre=internal,libpcre" +PACKAGECONFIG[libmount] = "--enable-libmount,--disable-libmount,util-linux" +PACKAGECONFIG[manpages] = "--enable-man --with-xml-catalog=${STAGING_ETCDIR_NATIVE}/xml/catalog.xml, --disable-man, libxslt-native xmlto-native" -CORECONF = "--disable-dtrace --disable-fam --disable-libelf --disable-systemtap --disable-man" +CORECONF = "--disable-dtrace --disable-fam --disable-libelf --disable-systemtap" PRINTF = "--enable-included-printf=no" PRINTF_darwin = "--enable-included-printf=yes" @@ -43,6 +47,9 @@ EXTRA_OECONF = "${PRINTF} ${CORECONF}" EXTRA_OECONF_class-native = "${CORECONF} --disable-selinux" EXTRA_OECONF_append_libc-uclibc = " --with-libiconv=gnu" +# Tell configure that we'll have dbus-daemon on the target for the tests +EXTRA_OECONF_class-target_append = " ${@bb.utils.contains('PTEST_ENABLED', '1', ' ac_cv_prog_DBUS_DAEMON=dbus-daemon', '', d)}" + do_configure_prepend() { sed -i -e '1s,#!.*,#!${USRBINPATH}/env python3,' ${S}/gio/gdbus-2.0/codegen/gdbus-codegen.in } @@ -90,23 +97,28 @@ do_install_append () { sed -i -e '1s,#!.*perl,#! ${USRBINPATH}/env perl,' ${D}${bindir}/glib-mkenums fi + if [ -e ${D}${libdir}/charset.alias ]; then + rm -f ${D}${libdir}/charset.alias + fi +} + +do_install_append_class-target () { + # Tests are only installed on targets, not native builds. Separating this out + # keeps glib-2.0-native from depending on ${DISTRO_FEATURES} if [ -f ${D}${datadir}/installed-tests/glib/gdbus-serialization.test ]; then if ${@bb.utils.contains("DISTRO_FEATURES", "x11", "false", "true", d)}; then rm ${D}${datadir}/installed-tests/glib/gdbus-serialization.test fi fi - # Make sure gio-querymodules is unique among multilibs - if test "x${MLPREFIX}" != "x"; then - mv ${D}${libexecdir}/gio-querymodules ${D}${libexecdir}/${MLPREFIX}gio-querymodules - fi -} - -do_install_append_libc-musl () { - rm -f ${D}${libdir}/charset.alias + # Make sure gio-querymodules is unique among multilibs + if test "x${MLPREFIX}" != "x"; then + mv ${D}${libexecdir}/gio-querymodules ${D}${libexecdir}/${MLPREFIX}gio-querymodules + fi } RDEPENDS_${PN}-ptest += "\ + dbus \ gnome-desktop-testing \ tzdata \ tzdata-americas \ diff --git a/import-layers/yocto-poky/meta/recipes-core/glib-networking/glib-networking_2.48.2.bb b/import-layers/yocto-poky/meta/recipes-core/glib-networking/glib-networking_2.48.2.bb deleted file mode 100644 index 50d9983fe..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glib-networking/glib-networking_2.48.2.bb +++ /dev/null @@ -1,29 +0,0 @@ -SUMMARY = "GLib networking extensions" -DESCRIPTION = "glib-networking contains the implementations of certain GLib networking features that cannot be implemented directly in GLib itself because of their dependencies." -HOMEPAGE = "http://git.gnome.org/browse/glib-networking/" -BUGTRACKER = "http://bugzilla.gnome.org" - -LICENSE = "LGPLv2" -LIC_FILES_CHKSUM = "file://COPYING;md5=5f30f0716dfdd0d91eb439ebec522ec2" - -SECTION = "libs" -DEPENDS = "glib-2.0 intltool-native" - -SRC_URI[archive.md5sum] = "d7cf81d52c856b0c66f7821021f40e08" -SRC_URI[archive.sha256sum] = "925c0c49d6b2b8b5695f2e33cd952d1dbb7d18d3f2f796413577719315bb3a84" - -PACKAGECONFIG ??= "ca-certificates gnutls" - -# No explicit dependency as it works without ca-certificates installed -PACKAGECONFIG[ca-certificates] = "--with-ca-certificates=${sysconfdir}/ssl/certs/ca-certificates.crt,--without-ca-certificates" -PACKAGECONFIG[gnutls] = "--with-gnutls,--without-gnutls,gnutls" -PACKAGECONFIG[libproxy] = "--with-libproxy,--without-libproxy,libproxy" -PACKAGECONFIG[pkcs11] = "--with-pkcs11,--without-pkcs11,p11-kit" - -EXTRA_OECONF = "--without-gnome-proxy" - -inherit gnomebase gettext upstream-version-is-even gio-module-cache - -FILES_${PN} += "${libdir}/gio/modules/libgio*.so ${datadir}/dbus-1/services/" -FILES_${PN}-dev += "${libdir}/gio/modules/libgio*.la" -FILES_${PN}-staticdev += "${libdir}/gio/modules/libgio*.a" diff --git a/import-layers/yocto-poky/meta/recipes-core/glib-networking/glib-networking_2.50.0.bb b/import-layers/yocto-poky/meta/recipes-core/glib-networking/glib-networking_2.50.0.bb new file mode 100644 index 000000000..2782bd95c --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/glib-networking/glib-networking_2.50.0.bb @@ -0,0 +1,29 @@ +SUMMARY = "GLib networking extensions" +DESCRIPTION = "glib-networking contains the implementations of certain GLib networking features that cannot be implemented directly in GLib itself because of their dependencies." +HOMEPAGE = "http://git.gnome.org/browse/glib-networking/" +BUGTRACKER = "http://bugzilla.gnome.org" + +LICENSE = "LGPLv2" +LIC_FILES_CHKSUM = "file://COPYING;md5=5f30f0716dfdd0d91eb439ebec522ec2" + +SECTION = "libs" +DEPENDS = "glib-2.0" + +SRC_URI[archive.md5sum] = "4d06d0224646f274918b1cb6da9a07f6" +SRC_URI[archive.sha256sum] = "3f1a442f3c2a734946983532ce59ed49120319fdb10c938447c373d5e5286bee" + +PACKAGECONFIG ??= "ca-certificates gnutls" + +# No explicit dependency as it works without ca-certificates installed +PACKAGECONFIG[ca-certificates] = "--with-ca-certificates=${sysconfdir}/ssl/certs/ca-certificates.crt,--without-ca-certificates" +PACKAGECONFIG[gnutls] = "--with-gnutls,--without-gnutls,gnutls" +PACKAGECONFIG[libproxy] = "--with-libproxy,--without-libproxy,libproxy" +PACKAGECONFIG[pkcs11] = "--with-pkcs11,--without-pkcs11,p11-kit" + +EXTRA_OECONF = "--without-gnome-proxy" + +inherit gnomebase gettext upstream-version-is-even gio-module-cache + +FILES_${PN} += "${libdir}/gio/modules/libgio*.so ${datadir}/dbus-1/services/" +FILES_${PN}-dev += "${libdir}/gio/modules/libgio*.la" +FILES_${PN}-staticdev += "${libdir}/gio/modules/libgio*.a" diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/cross-localedef-native_2.24.bb b/import-layers/yocto-poky/meta/recipes-core/glibc/cross-localedef-native_2.24.bb deleted file mode 100644 index d4cccedb4..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/cross-localedef-native_2.24.bb +++ /dev/null @@ -1,52 +0,0 @@ -SUMMARY = "Cross locale generation tool for glibc" -HOMEPAGE = "http://www.gnu.org/software/libc/libc.html" -SECTION = "libs" -LICENSE = "LGPL-2.1" - -LIC_FILES_CHKSUM = "file://LICENSES;md5=e9a558e243b36d3209f380deb394b213 \ - file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \ - file://posix/rxspencer/COPYRIGHT;md5=dc5485bb394a13b2332ec1c785f5d83a \ - file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c" - -# Tell autotools that we're working in the localedef directory -# -AUTOTOOLS_SCRIPT_PATH = "${S}/localedef" - -inherit native -inherit autotools - -FILESEXTRAPATHS =. "${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/glibc:" - -SRCBRANCH ?= "release/${PV}/master" -GLIBC_GIT_URI ?= "git://sourceware.org/git/glibc.git" -UPSTREAM_CHECK_GITTAGREGEX = "(?P\d+\.\d+(\.\d+)*)" - -SRCREV_glibc ?= "ea23815a795f72035262953dad5beb03e09c17dd" -SRCREV_localedef ?= "29869b6dc11427c5bab839bdb155c85a7c644c71" - -SRC_URI = "${GLIBC_GIT_URI};branch=${SRCBRANCH};name=glibc \ - git://github.com/kraj/localedef;branch=master;name=localedef;destsuffix=git/localedef \ - file://0016-timezone-re-written-tzselect-as-posix-sh.patch \ - file://0017-Remove-bash-dependency-for-nscd-init-script.patch \ - file://0018-eglibc-Cross-building-and-testing-instructions.patch \ - file://0019-eglibc-Help-bootstrap-cross-toolchain.patch \ - file://0020-eglibc-cherry-picked-from.patch \ - file://0021-eglibc-Clear-cache-lines-on-ppc8xx.patch \ - file://0022-eglibc-Resolve-__fpscr_values-on-SH4.patch \ - file://0023-eglibc-Install-PIC-archives.patch \ - file://0024-eglibc-Forward-port-cross-locale-generation-support.patch \ - file://0025-Define-DUMMY_LOCALE_T-if-not-defined.patch \ -" -# Makes for a rather long rev (22 characters), but... -# -SRCREV_FORMAT = "glibc_localedef" - -S = "${WORKDIR}/git" - -EXTRA_OECONF = "--with-glibc=${S}" -CFLAGS += "-fgnu89-inline -std=gnu99 -DIS_IN\(x\)='0'" - -do_install() { - install -d ${D}${bindir} - install -m 0755 ${B}/localedef ${D}${bindir}/cross-localedef -} diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/cross-localedef-native_2.25.bb b/import-layers/yocto-poky/meta/recipes-core/glibc/cross-localedef-native_2.25.bb new file mode 100644 index 000000000..fae8683ee --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/cross-localedef-native_2.25.bb @@ -0,0 +1,53 @@ +SUMMARY = "Cross locale generation tool for glibc" +HOMEPAGE = "http://www.gnu.org/software/libc/libc.html" +SECTION = "libs" +LICENSE = "LGPL-2.1" + +LIC_FILES_CHKSUM = "file://LICENSES;md5=e9a558e243b36d3209f380deb394b213 \ + file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \ + file://posix/rxspencer/COPYRIGHT;md5=dc5485bb394a13b2332ec1c785f5d83a \ + file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c" + +# Tell autotools that we're working in the localedef directory +# +AUTOTOOLS_SCRIPT_PATH = "${S}/localedef" + +inherit native +inherit autotools + +FILESEXTRAPATHS =. "${FILE_DIRNAME}/${PN}:${FILE_DIRNAME}/glibc:" + +SRCBRANCH ?= "release/${PV}/master" +GLIBC_GIT_URI ?= "git://sourceware.org/git/glibc.git" +UPSTREAM_CHECK_GITTAGREGEX = "(?P\d+\.\d+(\.\d+)*)" + +SRCREV_glibc ?= "db0242e3023436757bbc7c488a779e6e3343db04" +SRCREV_localedef ?= "29869b6dc11427c5bab839bdb155c85a7c644c71" + +SRC_URI = "${GLIBC_GIT_URI};branch=${SRCBRANCH};name=glibc \ + git://github.com/kraj/localedef;branch=master;name=localedef;destsuffix=git/localedef \ + file://0016-timezone-re-written-tzselect-as-posix-sh.patch \ + file://0017-Remove-bash-dependency-for-nscd-init-script.patch \ + file://0018-eglibc-Cross-building-and-testing-instructions.patch \ + file://0019-eglibc-Help-bootstrap-cross-toolchain.patch \ + file://0020-eglibc-cherry-picked-from.patch \ + file://0021-eglibc-Clear-cache-lines-on-ppc8xx.patch \ + file://0022-eglibc-Resolve-__fpscr_values-on-SH4.patch \ + file://0023-eglibc-Install-PIC-archives.patch \ + file://0024-eglibc-Forward-port-cross-locale-generation-support.patch \ + file://0025-Define-DUMMY_LOCALE_T-if-not-defined.patch \ + file://0001-Include-locale_t.h-compatibility-header.patch \ +" +# Makes for a rather long rev (22 characters), but... +# +SRCREV_FORMAT = "glibc_localedef" + +S = "${WORKDIR}/git" + +EXTRA_OECONF = "--with-glibc=${S}" +CFLAGS += "-fgnu89-inline -std=gnu99 -DIS_IN\(x\)='0'" + +do_install() { + install -d ${D}${bindir} + install -m 0755 ${B}/localedef ${D}${bindir}/cross-localedef +} diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-collateral.inc b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-collateral.inc index 60655eba3..37f27ca44 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-collateral.inc +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-collateral.inc @@ -9,13 +9,13 @@ HOMEPAGE = "http://www.gnu.org/software/libc/index.html" # http://lists.openembedded.org/pipermail/openembedded-core/2015-January/100679.html ARM_INSTRUCTION_SET = "arm" -do_fetch[noexec] = "1" -do_unpack[noexec] = "1" -do_patch[noexec] = "1" +deltask do_fetch +deltask do_unpack +deltask do_patch do_configure[noexec] = "1" do_compile[noexec] = "1" -do_install[depends] += "virtual/${MLPREFIX}libc:do_populate_sysroot" +do_install[depends] += "virtual/${MLPREFIX}libc:do_stash_locale" COMPATIBLE_HOST_libc-musl_class-target = "null" COMPATIBLE_HOST_libc-uclibc_class-target = "null" diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-common.inc b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-common.inc index bba1568ba..b05e162f8 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-common.inc +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-common.inc @@ -7,3 +7,4 @@ LIC_FILES_CHKSUM ?= "file://LICENSES;md5=07a394b26e0902b9ffdec03765209770 \ file://COPYING;md5=393a5ca445f6965873eca0259a17f833 \ file://posix/rxspencer/COPYRIGHT;md5=dc5485bb394a13b2332ec1c785f5d83a \ file://COPYING.LIB;md5=bbb461211a33b134d42ed5ee802b37ff " +CVE_PRODUCT = "glibc" diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-initial.inc b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-initial.inc index 2e3bc8104..b86e2fb2d 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-initial.inc +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-initial.inc @@ -6,7 +6,6 @@ PACKAGES_DYNAMIC = "" STAGINGCC = "gcc-cross-initial-${TARGET_ARCH}" STAGINGCC_class-nativesdk = "gcc-crosssdk-initial-${SDK_SYS}" -TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TCBOOTSTRAP}" do_configure () { (cd ${S} && gnu-configize) || die "failure in running gnu-configize" @@ -42,13 +41,9 @@ do_install () { if [ -e ${B}/bits/stdio_lim.h ]; then cp ${B}/bits/stdio_lim.h ${D}${includedir}/bits/ fi - # add links to linux-libc-headers: final glibc build need this. - for t in linux asm asm-generic; do - ln -s ${STAGING_DIR_TARGET}${includedir}/$t ${D}${includedir}/ - done } -do_install_locale() { +do_stash_locale() { : } @@ -56,23 +51,7 @@ do_siteconfig () { : } -SSTATEPOSTINSTFUNCS += "glibcinitial_sstate_postinst" -glibcinitial_sstate_postinst() { - if [ "${BB_CURRENTTASK}" = "populate_sysroot" -o "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ] - then - # Recreate the symlinks to ensure they point to the correct location - for t in linux asm asm-generic; do - rm -f ${STAGING_DIR_TCBOOTSTRAP}${includedir}/$t - ln -s ${STAGING_DIR_TARGET}${includedir}/$t ${STAGING_DIR_TCBOOTSTRAP}${includedir}/ - done - fi -} - -do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR_TCBOOTSTRAP}/" - -# We don't install any scripts so there is nothing to evacuate -do_evacuate_scripts () { - : -} - inherit nopackages + +# We really only want this built by things that need it, not any recrdeptask +deltask do_build diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-initial_2.24.bb b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-initial_2.24.bb deleted file mode 100644 index e86770e12..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-initial_2.24.bb +++ /dev/null @@ -1,9 +0,0 @@ -require glibc_${PV}.bb -require glibc-initial.inc - -# main glibc recipes muck with TARGET_CPPFLAGS to point into -# final target sysroot but we -# are not there when building glibc-initial -# so reset it here - -TARGET_CPPFLAGS = "" diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-initial_2.25.bb b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-initial_2.25.bb new file mode 100644 index 000000000..e86770e12 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-initial_2.25.bb @@ -0,0 +1,9 @@ +require glibc_${PV}.bb +require glibc-initial.inc + +# main glibc recipes muck with TARGET_CPPFLAGS to point into +# final target sysroot but we +# are not there when building glibc-initial +# so reset it here + +TARGET_CPPFLAGS = "" diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-ld.inc b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-ld.inc index b982368d8..c1d635dc8 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-ld.inc +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-ld.inc @@ -1,27 +1,14 @@ def ld_append_if_tune_exists(d, infos, dict): - tune = d.getVar("DEFAULTTUNE", True) or "" - libdir = d.getVar("base_libdir", True) or "" + tune = d.getVar("DEFAULTTUNE") or "" + libdir = d.getVar("base_libdir") or "" if tune in dict: infos['ldconfig'].add('{"' + libdir + '/' + dict[tune][0] + '",' + dict[tune][1] + ' }') infos['lddrewrite'].add(libdir+'/'+dict[tune][0]) def glibc_dl_info(d): ld_info_all = { - "mips": ["ld.so.1", "FLAG_ELF_LIBC6"], - "mips64-n32": ["ld.so.1", "FLAG_ELF_LIBC6"], - "mips64": ["ld.so.1", "FLAG_ELF_LIBC6"], - "mipsel": ["ld.so.1", "FLAG_ELF_LIBC6"], - "mips64el-n32": ["ld.so.1", "FLAG_ELF_LIBC6"], - "mips64el": ["ld.so.1", "FLAG_ELF_LIBC6"], - "mips-nf": ["ld.so.1", "FLAG_ELF_LIBC6"], - "mipsisa32r6": ["ld-linux-mipsn8.so.1", "FLAG_ELF_LIBC6"], - "mipsisa32r6el": ["ld-linux-mipsn8.so.1", "FLAG_ELF_LIBC6"], - "mips64-nf-n32": ["ld.so.1", "FLAG_ELF_LIBC6"], - "mips64-nf": ["ld.so.1", "FLAG_ELF_LIBC6"], - "mips64el-nf-n32": ["ld.so.1", "FLAG_ELF_LIBC6"], - "mips64el-nf": ["ld.so.1", "FLAG_ELF_LIBC6"], - "mipsisa64r6": ["ld-linux-mipsn8.so.1", "FLAG_ELF_LIBC6"], - "mipsisa64r6el": ["ld-linux-mipsn8.so.1", "FLAG_ELF_LIBC6"], + "mipsarch": ["ld.so.1", "FLAG_ELF_LIBC6"], + "mipsarchr6": ["ld-linux-mipsn8.so.1", "FLAG_ELF_LIBC6"], "powerpc": ["ld.so.1", "FLAG_ELF_LIBC6"], "powerpc-nf": ["ld.so.1", "FLAG_ELF_LIBC6"], "powerpc64": ["ld64.so.1", "FLAG_ELF_LIBC6"], @@ -39,18 +26,17 @@ def glibc_dl_info(d): ld_append_if_tune_exists(d, infos, ld_info_all) #DEFAULTTUNE_MULTILIB_ORIGINAL - original_tune=d.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL",True) + original_tune=d.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL") if original_tune: localdata = bb.data.createCopy(d) localdata.setVar("DEFAULTTUNE", original_tune) ld_append_if_tune_exists(localdata, infos, ld_info_all) - variants = d.getVar("MULTILIB_VARIANTS", True) or "" + variants = d.getVar("MULTILIB_VARIANTS") or "" for item in variants.split(): localdata = bb.data.createCopy(d) overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item localdata.setVar("OVERRIDES", overrides) - bb.data.update_data(localdata) ld_append_if_tune_exists(localdata, infos, ld_info_all) infos['ldconfig'] = ','.join(infos['ldconfig']) infos['lddrewrite'] = ' '.join(infos['lddrewrite']) @@ -58,4 +44,5 @@ def glibc_dl_info(d): EGLIBC_KNOWN_INTERPRETER_NAMES = "${@glibc_dl_info(d)['ldconfig']}" RTLDLIST = "${@glibc_dl_info(d)['lddrewrite']}" +RTLDLIST_class-nativesdk = "${base_libdir}/${@bb.utils.contains('SDK_ARCH', 'x86_64', 'ld-linux-x86-64.so.2', 'ld-linux.so.2', d)}" glibc_dl_info[vardepsexclude] = "OVERRIDES" diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-locale.inc b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-locale.inc index 0a7adfcc8..75ababea6 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-locale.inc +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-locale.inc @@ -1,4 +1,4 @@ -include glibc-collateral.inc +require glibc-collateral.inc SUMMARY = "Locale data from glibc" @@ -12,6 +12,10 @@ BINUTILSDEP = "virtual/${MLPREFIX}${TARGET_PREFIX}binutils:do_populate_sysroot" BINUTILSDEP_class-nativesdk = "virtual/${TARGET_PREFIX}binutils-crosssdk:do_populate_sysroot" do_package[depends] += "${BINUTILSDEP}" +# localedef links with libc.so and glibc-collateral.incinhibits all default deps +# cannot add virtual/libc to DEPENDS, because it would conflict with libc-initial in RSS +RDEPENDS_localedef += "glibc" + # Binary locales are generated at build time if ENABLE_BINARY_LOCALE_GENERATION # is set. The idea is to avoid running localedef on the target (at first boot) # to decrease initial boot time and avoid localedef being killed by the OOM @@ -41,22 +45,22 @@ PACKAGES_DYNAMIC = "^locale-base-.* \ # Create a glibc-binaries package ALLOW_EMPTY_${BPN}-binaries = "1" PACKAGES += "${BPN}-binaries" -RRECOMMENDS_${BPN}-binaries = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("glibc-binary") != -1])}" +RRECOMMENDS_${BPN}-binaries = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-binary") != -1])}" # Create a glibc-charmaps package ALLOW_EMPTY_${BPN}-charmaps = "1" PACKAGES += "${BPN}-charmaps" -RRECOMMENDS_${BPN}-charmaps = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("glibc-charmap") != -1])}" +RRECOMMENDS_${BPN}-charmaps = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-charmap") != -1])}" # Create a glibc-gconvs package ALLOW_EMPTY_${BPN}-gconvs = "1" PACKAGES += "${BPN}-gconvs" -RRECOMMENDS_${BPN}-gconvs = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("glibc-gconv") != -1])}" +RRECOMMENDS_${BPN}-gconvs = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-gconv") != -1])}" # Create a glibc-localedatas package ALLOW_EMPTY_${BPN}-localedatas = "1" PACKAGES += "${BPN}-localedatas" -RRECOMMENDS_${BPN}-localedatas = "${@" ".join([p for p in d.getVar('PACKAGES', True).split() if p.find("glibc-localedata") != -1])}" +RRECOMMENDS_${BPN}-localedatas = "${@" ".join([p for p in d.getVar('PACKAGES').split() if p.find("glibc-localedata") != -1])}" DESCRIPTION_localedef = "glibc: compile locale definition files" @@ -66,7 +70,7 @@ DESCRIPTION_localedef = "glibc: compile locale definition files" FILES_${MLPREFIX}glibc-gconv = "${libdir}/gconv/*" FILES_localedef = "${bindir}/localedef" -LOCALETREESRC = "${STAGING_INCDIR}/glibc-locale-internal-${MULTIMACH_TARGET_SYS}" +LOCALETREESRC = "${COMPONENTS_DIR}/${PACKAGE_ARCH}/glibc-stash-locale" do_install () { mkdir -p ${D}${bindir} ${D}${datadir} ${D}${libdir} diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-locale_2.24.bb b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-locale_2.24.bb deleted file mode 100644 index f7702e035..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-locale_2.24.bb +++ /dev/null @@ -1 +0,0 @@ -require glibc-locale.inc diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-locale_2.25.bb b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-locale_2.25.bb new file mode 100644 index 000000000..f7702e035 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-locale_2.25.bb @@ -0,0 +1 @@ +require glibc-locale.inc diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-mtrace.inc b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-mtrace.inc index e12b079e0..d703c14bd 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-mtrace.inc +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-mtrace.inc @@ -1,11 +1,11 @@ -include glibc-collateral.inc +require glibc-collateral.inc SUMMARY = "mtrace utility provided by glibc" DESCRIPTION = "mtrace utility provided by glibc" RDEPENDS_${PN} = "perl" RPROVIDES_${PN} = "libc-mtrace" -SRC = "${STAGING_INCDIR}/glibc-scripts-internal-${MULTIMACH_TARGET_SYS}" +SRC = "${COMPONENTS_DIR}/${PACKAGE_ARCH}/glibc-stash-locale/scripts" do_install() { install -d -m 0755 ${D}${bindir} diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-mtrace_2.24.bb b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-mtrace_2.24.bb deleted file mode 100644 index 0b69bad46..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-mtrace_2.24.bb +++ /dev/null @@ -1 +0,0 @@ -require glibc-mtrace.inc diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-mtrace_2.25.bb b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-mtrace_2.25.bb new file mode 100644 index 000000000..0b69bad46 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-mtrace_2.25.bb @@ -0,0 +1 @@ +require glibc-mtrace.inc diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-package.inc b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-package.inc index bad642449..9f7fa62a3 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-package.inc +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-package.inc @@ -8,18 +8,15 @@ python __anonymous () { import bb, re - uc_os = (re.match('.*uclibc*', d.getVar('TARGET_OS', True)) != None) + uc_os = (re.match('.*uclibc*', d.getVar('TARGET_OS')) != None) if uc_os: raise bb.parse.SkipPackage("incompatible with target %s" % - d.getVar('TARGET_OS', True)) + d.getVar('TARGET_OS')) } -# Set this to zero if you don't want ldconfig in the output package -USE_LDCONFIG ?= "1" - INHIBIT_SYSROOT_STRIP = "1" -PACKAGES = "${PN}-dbg catchsegv sln nscd ldd tzcode ${PN}-utils glibc-thread-db ${PN}-pic libcidn libmemusage libsegfault ${PN}-pcprofile libsotruss ${PN} glibc-extra-nss ${PN}-dev ${PN}-staticdev ${PN}-doc" +PACKAGES = "${PN}-dbg catchsegv sln nscd ldd tzcode glibc-thread-db ${PN}-pic libcidn libmemusage libsegfault ${PN}-pcprofile libsotruss ${PN} ${PN}-utils glibc-extra-nss ${PN}-dev ${PN}-staticdev ${PN}-doc" # The ld.so in this glibc supports the GNU_HASH RPROVIDES_${PN} = "eglibc rtld(GNU_HASH)" @@ -37,7 +34,7 @@ libc_baselibs = "${base_libdir}/libcrypt*.so.* ${base_libdir}/libcrypt-*.so ${ba libc_baselibs_append_aarch64 = " /lib/ld-linux-aarch64*.so.1" INSANE_SKIP_${PN}_append_aarch64 = " libdir" -FILES_${PN} = "${libc_baselibs} ${libexecdir}/* ${@base_conditional('USE_LDCONFIG', '1', '${base_sbindir}/ldconfig ${sysconfdir}/ld.so.conf', '', d)}" +FILES_${PN} = "${libc_baselibs} ${libexecdir}/* ${base_sbindir}/ldconfig ${sysconfdir}/ld.so.conf" FILES_ldd = "${bindir}/ldd" FILES_libsegfault = "${base_libdir}/libSegFault*" FILES_libcidn = "${base_libdir}/libcidn-*.so ${base_libdir}/libcidn.so.*" @@ -85,7 +82,7 @@ do_install_append () { rmdir --ignore-fail-on-non-empty ${D}${libexecdir} fi - oe_multilib_header bits/syscall.h + oe_multilib_header bits/syscall.h bits/long-double.h if [ -f ${D}${bindir}/mtrace ]; then sed -i -e '1s,#!.*perl,#! ${USRBINPATH}/env perl,' -e '2s,exec.*perl,exec ${USRBINPATH}/env perl,' ${D}${bindir}/mtrace @@ -96,8 +93,8 @@ do_install_append () { rm -f ${D}${infodir}/dir fi - if [ "${USE_LDCONFIG}" != "1" ]; then - # We won't ship these files (see FILES above) so let's not install them + if ! ${@bb.utils.contains('DISTRO_FEATURES', 'ldconfig', 'true', 'false', d)}; then + # The distro doesn't want these files so let's not install them rm -f ${D}${sysconfdir}/ld.so.conf rm -f ${D}${base_sbindir}/ldconfig # This directory will be empty now so remove it too. @@ -143,10 +140,30 @@ do_install_append_aarch64 () { ${D}/lib/ld-linux-aarch64_be.so.1 fi fi + do_install_armmultilib +} + +do_install_append_arm () { + do_install_armmultilib +} + +do_install_armmultilib () { + + oe_multilib_header bits/endian.h bits/fcntl.h bits/fenv.h bits/fp-fast.h bits/hwcap.h bits/ipc.h bits/link.h bits/wordsize.h + oe_multilib_header bits/local_lim.h bits/mman.h bits/msq.h bits/pthreadtypes.h bits/sem.h bits/semaphore.h bits/setjmp.h + oe_multilib_header bits/shm.h bits/sigstack.h bits/stat.h bits/statfs.h bits/string.h bits/typesizes.h + + oe_multilib_header fpu_control.h gnu/lib-names.h gnu/stubs.h ieee754.h + + oe_multilib_header sys/elf.h sys/procfs.h sys/ptrace.h sys/ucontext.h sys/user.h } -do_install_locale () { - dest=${D}/${includedir}/glibc-locale-internal-${MULTIMACH_TARGET_SYS} + +LOCALESTASH = "${WORKDIR}/stashed-locale" +bashscripts = "mtrace sotruss xtrace" + +do_stash_locale () { + dest=${LOCALESTASH} install -d ${dest}${base_libdir} ${dest}${bindir} ${dest}${libdir} ${dest}${datadir} if [ "${base_libdir}" != "${libdir}" ]; then cp -fpPR ${D}${base_libdir}/* ${dest}${base_libdir} @@ -166,14 +183,8 @@ do_install_locale () { cp -fpPR ${D}${datadir}/* ${dest}${datadir} rm -rf ${D}${datadir}/locale/ cp -fpPR ${WORKDIR}/SUPPORTED ${dest} -} -addtask do_install_locale after do_install before do_populate_sysroot do_package - -bashscripts = "mtrace sotruss xtrace" - -do_evacuate_scripts () { - target=${D}${includedir}/glibc-scripts-internal-${MULTIMACH_TARGET_SYS} + target=${dest}/scripts mkdir -p $target for i in ${bashscripts}; do if [ -f ${D}${bindir}/$i ]; then @@ -182,22 +193,36 @@ do_evacuate_scripts () { done } -addtask evacuate_scripts after do_install before do_populate_sysroot do_package +addtask do_stash_locale after do_install before do_populate_sysroot do_package +do_stash_locale[dirs] = "${B}" +do_stash_locale[cleandirs] = "${LOCALESTASH}" +SSTATETASKS += "do_stash_locale" +do_stash_locale[sstate-inputdirs] = "${LOCALESTASH}" +do_stash_locale[sstate-outputdirs] = "${COMPONENTS_DIR}/${PACKAGE_ARCH}/glibc-stash-locale" +do_stash_locale[sstate-fixmedir] = "${COMPONENTS_DIR}/${PACKAGE_ARCH}/glibc-stash-locale" -PACKAGE_PREPROCESS_FUNCS += "glibc_package_preprocess" +python do_stash_locale_setscene () { + sstate_setscene(d) +} +addtask do_stash_locale_setscene -glibc_package_preprocess () { - rm -rf ${PKGD}/${includedir}/glibc-locale-internal-${MULTIMACH_TARGET_SYS} - rm -rf ${PKGD}/${includedir}/glibc-scripts-internal-${MULTIMACH_TARGET_SYS} +do_poststash_install_cleanup () { + # Remove all files which do_stash_locale would remove (mv) + # since that task could have come from sstate and not get run. for i in ${bashscripts}; do - rm -f ${PKGD}${bindir}/$i + rm -f ${D}${bindir}/$i done - rm -rf ${PKGD}/${localedir} + rm -f ${D}${bindir}/localedef + rm -rf ${D}${datadir}/i18n + rm -rf ${D}${libdir}/gconv + rm -rf ${D}/${localedir} + rm -rf ${D}${datadir}/locale if [ "${libdir}" != "${exec_prefix}/lib" ]; then # This dir only exists to hold locales - rm -rf ${PKGD}${exec_prefix}/lib + rm -rf ${D}${exec_prefix}/lib fi } +addtask do_poststash_install_cleanup after do_stash_locale do_install before do_populate_sysroot do_package pkg_postinst_nscd () { if [ -z "$D" ]; then diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-scripts.inc b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-scripts.inc index bce0a4210..2a2b41507 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-scripts.inc +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-scripts.inc @@ -1,10 +1,10 @@ -include glibc-collateral.inc +require glibc-collateral.inc SUMMARY = "utility scripts provided by glibc" DESCRIPTION = "utility scripts provided by glibc" RDEPENDS_${PN} = "bash glibc-mtrace" -SRC = "${STAGING_INCDIR}/glibc-scripts-internal-${MULTIMACH_TARGET_SYS}" +SRC = "${COMPONENTS_DIR}/${PACKAGE_ARCH}/glibc-stash-locale/scripts" bashscripts = "sotruss xtrace" diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-scripts_2.24.bb b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-scripts_2.24.bb deleted file mode 100644 index 5a89bd802..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-scripts_2.24.bb +++ /dev/null @@ -1 +0,0 @@ -require glibc-scripts.inc diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-scripts_2.25.bb b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-scripts_2.25.bb new file mode 100644 index 000000000..5a89bd802 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc-scripts_2.25.bb @@ -0,0 +1 @@ +require glibc-scripts.inc diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc.inc b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc.inc index 7bae0e955..21bbdc2ad 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc.inc +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc.inc @@ -6,17 +6,15 @@ STAGINGCC = "gcc-cross-initial-${TARGET_ARCH}" STAGINGCC_class-nativesdk = "gcc-crosssdk-initial-${SDK_SYS}" PATH_prepend = "${STAGING_BINDIR_TOOLCHAIN}.${STAGINGCC}:" -TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TCBOOTSTRAP}" - python () { opt_effective = "-O" - for opt in d.getVar('SELECTED_OPTIMIZATION', True).split(): + for opt in d.getVar('SELECTED_OPTIMIZATION').split(): if opt in ("-O0", "-O", "-O1", "-O2", "-O3", "-Os"): opt_effective = opt if opt_effective == "-O0": - bb.fatal("%s can't be built with %s, try -O1 instead" % (d.getVar('PN', True), opt_effective)) + bb.fatal("%s can't be built with %s, try -O1 instead" % (d.getVar('PN'), opt_effective)) if opt_effective in ("-O", "-O1", "-Os"): - bb.note("%s doesn't build cleanly with %s, adding -Wno-error to SELECTED_OPTIMIZATION" % (d.getVar('PN', True), opt_effective)) + bb.note("%s doesn't build cleanly with %s, adding -Wno-error to SELECTED_OPTIMIZATION" % (d.getVar('PN'), opt_effective)) d.appendVar("SELECTED_OPTIMIZATION", " -Wno-error") } diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-Add-atomic_exchange_relaxed.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-Add-atomic_exchange_relaxed.patch deleted file mode 100644 index a33a135f7..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-Add-atomic_exchange_relaxed.patch +++ /dev/null @@ -1,58 +0,0 @@ -From ce74a620bf9e1a40b7ba06d35160e20633a4d8bb Mon Sep 17 00:00:00 2001 -From: Catalin Enache -Date: Fri, 7 Jul 2017 13:11:16 +0300 -Subject: [PATCH 1/6] Add atomic_exchange_relaxed. - -* include/atomic.h (atomic_exchange_relaxed): New - -Upstream-Status: Backport - -Author: Torvald Riegel -Signed-off-by: Catalin Enache ---- - ChangeLog | 4 ++++ - include/atomic.h | 9 +++++++++ - 2 files changed, 13 insertions(+) - -diff --git a/ChangeLog b/ChangeLog -index 0fbda90..cb87279 100644 ---- a/ChangeLog -+++ b/ChangeLog -@@ -1,3 +1,7 @@ -+2016-08-05 Torvald Riegel -+ -+ * include/atomic.h (atomic_exchange_relaxed): New. -+ - 2016-01-28 Carlos O'Donell - Alexey Makhalov - Florian Weimer -diff --git a/include/atomic.h b/include/atomic.h -index ad3db25..129ee24 100644 ---- a/include/atomic.h -+++ b/include/atomic.h -@@ -588,6 +588,9 @@ void __atomic_link_error (void); - __atomic_compare_exchange_n ((mem), (expected), (desired), 1, \ - __ATOMIC_RELEASE, __ATOMIC_RELAXED); }) - -+# define atomic_exchange_relaxed(mem, desired) \ -+ ({ __atomic_check_size((mem)); \ -+ __atomic_exchange_n ((mem), (desired), __ATOMIC_RELAXED); }) - # define atomic_exchange_acquire(mem, desired) \ - ({ __atomic_check_size((mem)); \ - __atomic_exchange_n ((mem), (desired), __ATOMIC_ACQUIRE); }) -@@ -684,6 +687,12 @@ void __atomic_link_error (void); - *(expected) == __atg103_expected; }) - # endif - -+/* XXX Fall back to acquire MO because archs do not define a weaker -+ atomic_exchange. */ -+# ifndef atomic_exchange_relaxed -+# define atomic_exchange_relaxed(mem, val) \ -+ atomic_exchange_acq ((mem), (val)) -+# endif - # ifndef atomic_exchange_acquire - # define atomic_exchange_acquire(mem, val) \ - atomic_exchange_acq ((mem), (val)) --- -2.10.2 - diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-Include-locale_t.h-compatibility-header.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-Include-locale_t.h-compatibility-header.patch new file mode 100644 index 000000000..a13c428e1 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-Include-locale_t.h-compatibility-header.patch @@ -0,0 +1,29 @@ +From abfeb0cf4e3261a66a7a23abc9aed33c034c850d Mon Sep 17 00:00:00 2001 +From: Joshua Watt +Date: Wed, 6 Dec 2017 13:26:19 -0600 +Subject: [PATCH] Include locale_t.h compatibility header + +Newer versions of glibc (since 2.26) moved the locale typedefs from +xlocale.h to bits/types/locale_t.h. Create a compatibility header for +these newer versions of glibc + +See f0be25b6336db7492e47d2e8e72eb8af53b5506d in glibc + +Upstream-Status: Inappropriate compatibility with newer host glibc +Signed-off-by: Joshua Watt + +--- + locale/bits/types/locale_t.h | 1 + + 1 file changed, 1 insertion(+) + create mode 100644 locale/bits/types/locale_t.h + +diff --git a/locale/bits/types/locale_t.h b/locale/bits/types/locale_t.h +new file mode 100644 +index 0000000000..b519a6c5f8 +--- /dev/null ++++ b/locale/bits/types/locale_t.h +@@ -0,0 +1 @@ ++#include +-- +2.14.3 + diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-locale-fix-hard-coded-reference-to-gcc-E.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-locale-fix-hard-coded-reference-to-gcc-E.patch deleted file mode 100644 index d5fce7371..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-locale-fix-hard-coded-reference-to-gcc-E.patch +++ /dev/null @@ -1,39 +0,0 @@ -From 2c0ab83eb54c0e0fccbf261726dc03803b236079 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?J=C3=A9r=C3=A9my=20Rosen?= -Date: Mon, 22 Aug 2016 16:09:25 +0200 -Subject: [PATCH] locale: fix hard-coded reference to gcc -E - -When new version of compilers are published, they may not be compatible with -older versions of software. This is particularly common when software is built -with -Werror. - -Autotools provides a way for a user to specify the name of his compiler using a -set of variables ($CC $CXX $CPP etc.). Those variables are used correctly when -compiling glibc but the script used to generate transliterations in the locale/ -subdirectory directly calls the gcc binary to get the output of the -preprocessor instead of using the $CPP variable provided by the build -environment. - -This patch replaces the hard-coded reference to the gcc binary with the proper -environment variable, thus allowing a user to override it. - -Upstream-Status : Submitted [https://sourceware.org/ml/libc-alpha/2016-08/msg00746.html] - ---- - locale/gen-translit.pl | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/locale/gen-translit.pl b/locale/gen-translit.pl -index 30d3f2f..7b287fa 100644 ---- a/locale/gen-translit.pl -+++ b/locale/gen-translit.pl -@@ -1,5 +1,5 @@ - #!/usr/bin/perl -w --open F, "cat C-translit.h.in | gcc -E - |" || die "Cannot preprocess input file"; -+open F, 'cat C-translit.h.in | ${CPP:-gcc -E} - |' || die "Cannot preprocess input file"; - - - sub cstrlen { --- -2.9.3 - diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch index ca3375805..0553f8a47 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0001-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch @@ -1,7 +1,7 @@ -From 7e11aafc3a7cb873b3f648740c8acd379597e4d1 Mon Sep 17 00:00:00 2001 +From 2727e58d1d269994de17cadb12195001b14585e7 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 01:48:24 +0000 -Subject: [PATCH 01/25] nativesdk-glibc: Look for host system ld.so.cache as +Subject: [PATCH 01/26] nativesdk-glibc: Look for host system ld.so.cache as well Upstream-Status: Inappropriate [embedded specific] @@ -31,10 +31,10 @@ Signed-off-by: Khem Raj 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/elf/dl-load.c b/elf/dl-load.c -index c0d6249..2c73105 100644 +index 51fb0d0..f503dbc 100644 --- a/elf/dl-load.c +++ b/elf/dl-load.c -@@ -2094,6 +2094,14 @@ _dl_map_object (struct link_map *loader, const char *name, +@@ -2054,6 +2054,14 @@ _dl_map_object (struct link_map *loader, const char *name, } } @@ -49,7 +49,7 @@ index c0d6249..2c73105 100644 #ifdef USE_LDCONFIG if (fd == -1 && (__glibc_likely ((mode & __RTLD_SECURE) == 0) -@@ -2152,14 +2160,6 @@ _dl_map_object (struct link_map *loader, const char *name, +@@ -2112,14 +2120,6 @@ _dl_map_object (struct link_map *loader, const char *name, } #endif @@ -65,5 +65,5 @@ index c0d6249..2c73105 100644 if (__glibc_unlikely (GLRO(dl_debug_mask) & DL_DEBUG_LIBS)) _dl_debug_printf ("\n"); -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0002-Add-atomic-operations-required-by-the-new-condition-.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0002-Add-atomic-operations-required-by-the-new-condition-.patch deleted file mode 100644 index c4747fa27..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0002-Add-atomic-operations-required-by-the-new-condition-.patch +++ /dev/null @@ -1,124 +0,0 @@ -From b85e30e655027132c4326d2fdde010c517165aaf Mon Sep 17 00:00:00 2001 -From: Catalin Enache -Date: Fri, 30 Jun 2017 14:27:34 +0300 -Subject: [PATCH 2/6] Add atomic operations required by the new condition - variable. - - * include/atomic.h (atomic_fetch_and_relaxed, - atomic_fetch_and_release, atomic_fetch_or_release, - atomic_fetch_xor_release): New. - -Upstream-Status: Backport - -Author: Torvald Riegel -Signed-off-by: Catalin Enache ---- - ChangeLog | 6 ++++++ - include/atomic.h | 47 +++++++++++++++++++++++++++++++++++++++++++++++ - 2 files changed, 53 insertions(+) - -diff --git a/ChangeLog b/ChangeLog -index cb87279..96b6da2 100644 ---- a/ChangeLog -+++ b/ChangeLog -@@ -1,3 +1,9 @@ -+2016-08-09 Torvald Riegel -+ -+ * include/atomic.h (atomic_fetch_and_relaxed, -+ atomic_fetch_and_release, atomic_fetch_or_release, -+ atomic_fetch_xor_release): New. -+ - 2016-08-05 Torvald Riegel - - * include/atomic.h (atomic_exchange_relaxed): New. -diff --git a/include/atomic.h b/include/atomic.h -index 129ee24..5a8e7e7 100644 ---- a/include/atomic.h -+++ b/include/atomic.h -@@ -611,9 +611,15 @@ void __atomic_link_error (void); - ({ __atomic_check_size((mem)); \ - __atomic_fetch_add ((mem), (operand), __ATOMIC_ACQ_REL); }) - -+# define atomic_fetch_and_relaxed(mem, operand) \ -+ ({ __atomic_check_size((mem)); \ -+ __atomic_fetch_and ((mem), (operand), __ATOMIC_RELAXED); }) - # define atomic_fetch_and_acquire(mem, operand) \ - ({ __atomic_check_size((mem)); \ - __atomic_fetch_and ((mem), (operand), __ATOMIC_ACQUIRE); }) -+# define atomic_fetch_and_release(mem, operand) \ -+ ({ __atomic_check_size((mem)); \ -+ __atomic_fetch_and ((mem), (operand), __ATOMIC_RELEASE); }) - - # define atomic_fetch_or_relaxed(mem, operand) \ - ({ __atomic_check_size((mem)); \ -@@ -621,6 +627,13 @@ void __atomic_link_error (void); - # define atomic_fetch_or_acquire(mem, operand) \ - ({ __atomic_check_size((mem)); \ - __atomic_fetch_or ((mem), (operand), __ATOMIC_ACQUIRE); }) -+# define atomic_fetch_or_release(mem, operand) \ -+ ({ __atomic_check_size((mem)); \ -+ __atomic_fetch_or ((mem), (operand), __ATOMIC_RELEASE); }) -+ -+# define atomic_fetch_xor_release(mem, operand) \ -+ ({ __atomic_check_size((mem)); \ -+ __atomic_fetch_xor ((mem), (operand), __ATOMIC_RELEASE); }) - - #else /* !USE_ATOMIC_COMPILER_BUILTINS */ - -@@ -724,12 +737,24 @@ void __atomic_link_error (void); - atomic_exchange_and_add_acq ((mem), (operand)); }) - # endif - -+/* XXX Fall back to acquire MO because archs do not define a weaker -+ atomic_and_val. */ -+# ifndef atomic_fetch_and_relaxed -+# define atomic_fetch_and_relaxed(mem, operand) \ -+ atomic_fetch_and_acquire ((mem), (operand)) -+# endif - /* XXX The default for atomic_and_val has acquire semantics, but this is not - documented. */ - # ifndef atomic_fetch_and_acquire - # define atomic_fetch_and_acquire(mem, operand) \ - atomic_and_val ((mem), (operand)) - # endif -+# ifndef atomic_fetch_and_release -+/* XXX This unnecessarily has acquire MO. */ -+# define atomic_fetch_and_release(mem, operand) \ -+ ({ atomic_thread_fence_release (); \ -+ atomic_and_val ((mem), (operand)); }) -+# endif - - /* XXX The default for atomic_or_val has acquire semantics, but this is not - documented. */ -@@ -743,6 +768,28 @@ void __atomic_link_error (void); - # define atomic_fetch_or_relaxed(mem, operand) \ - atomic_fetch_or_acquire ((mem), (operand)) - # endif -+/* XXX Contains an unnecessary acquire MO because archs do not define a weaker -+ atomic_or_val. */ -+# ifndef atomic_fetch_or_release -+# define atomic_fetch_or_release(mem, operand) \ -+ ({ atomic_thread_fence_release (); \ -+ atomic_fetch_or_acquire ((mem), (operand)); }) -+# endif -+ -+# ifndef atomic_fetch_xor_release -+# define atomic_fetch_xor_release(mem, operand) \ -+ ({ __typeof (*(mem)) __atg104_old; \ -+ __typeof (mem) __atg104_memp = (mem); \ -+ __typeof (*(mem)) __atg104_op = (operand); \ -+ \ -+ do \ -+ __atg104_old = (*__atg104_memp); \ -+ while (__builtin_expect \ -+ (atomic_compare_and_exchange_bool_rel ( \ -+ __atg104_memp, __atg104_old ^ __atg104_op, __atg104_old), 0));\ -+ \ -+ __atg104_old; }) -+#endif - - #endif /* !USE_ATOMIC_COMPILER_BUILTINS */ - --- -2.10.2 - diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0002-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0002-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch index 5428468c5..e5ef3410e 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0002-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0002-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch @@ -1,7 +1,7 @@ -From 5fd884dc28d5d84001fae8ffdd8be698bb84143e Mon Sep 17 00:00:00 2001 +From 1578f52647ec8804186d1944d4cd2095132efc39 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 01:50:00 +0000 -Subject: [PATCH 02/25] nativesdk-glibc: Fix buffer overrun with a relocated +Subject: [PATCH 02/26] nativesdk-glibc: Fix buffer overrun with a relocated SDK When ld-linux-*.so.2 is relocated to a path that is longer than the @@ -22,10 +22,10 @@ Signed-off-by: Khem Raj 1 file changed, 12 insertions(+) diff --git a/elf/dl-load.c b/elf/dl-load.c -index 2c73105..c7b8797 100644 +index f503dbc..3a3d112 100644 --- a/elf/dl-load.c +++ b/elf/dl-load.c -@@ -1793,7 +1793,19 @@ open_path (const char *name, size_t namelen, int mode, +@@ -1753,7 +1753,19 @@ open_path (const char *name, size_t namelen, int mode, given on the command line when rtld is run directly. */ return -1; @@ -46,5 +46,5 @@ index 2c73105..c7b8797 100644 { struct r_search_path_elem *this_dir = *dirs; -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0003-Add-pretty-printers-for-the-NPTL-lock-types.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0003-Add-pretty-printers-for-the-NPTL-lock-types.patch deleted file mode 100644 index 9eb635d71..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0003-Add-pretty-printers-for-the-NPTL-lock-types.patch +++ /dev/null @@ -1,3197 +0,0 @@ -From 246fee86fc90c57738ee282a061039f82832f4ea Mon Sep 17 00:00:00 2001 -From: Catalin Enache -Date: Fri, 30 Jun 2017 13:42:04 +0300 -Subject: [PATCH 3/6] Add pretty printers for the NPTL lock types - -This patch adds pretty printers for the following NPTL types: - -- pthread_mutex_t -- pthread_mutexattr_t -- pthread_cond_t -- pthread_condattr_t -- pthread_rwlock_t -- pthread_rwlockattr_t - -To load the pretty printers into your gdb session, do the following: - -python -import sys -sys.path.insert(0, '/path/to/glibc/build/nptl/pretty-printers') -end - -source /path/to/glibc/source/pretty-printers/nptl-printers.py - -You can check which printers are registered and enabled by issuing the -'info pretty-printer' gdb command. Printers should trigger automatically when -trying to print a variable of one of the types mentioned above. - -The printers are architecture-independent, and were tested on an AMD64 running -Ubuntu 14.04 and an x86 VM running Fedora 24. - -In order to work, the printers need to know the values of various flags that -are scattered throughout pthread.h and pthreadP.h as enums and #defines. Since -replicating these constants in the printers file itself would create a -maintenance burden, I wrote a script called gen-py-const.awk that Makerules uses -to extract the constants. This script is pretty much the same as gen-as-const.awk, -except it doesn't cast the constant values to 'long' and is thorougly documented. -The constants need only to be enumerated in a .pysym file, which is then referenced -by a Make variable called gen-py-const-headers. - -As for the install directory, I discussed this with Mike Frysinger and Siddhesh -Poyarekar, and we agreed that it can be handled in a separate patch, and shouldn't -block merging of this one. - -In addition, I've written a series of test cases for the pretty printers. -Each lock type (mutex, condvar and rwlock) has two test programs, one for itself -and other for its related 'attributes' object. Each test program in turn has a -PExpect-based Python script that drives gdb and compares its output to the -expected printer's. The tests run on the glibc host, which is assumed to have -both gdb and PExpect; if either is absent the tests will fail with code 77 -(UNSUPPORTED). For cross-testing you should use cross-test-ssh.sh as test-wrapper. -I've tested the printers on both native builds and a cross build using a Beaglebone -Black running Debian, with the build system's filesystem shared with the board -through NFS. - -Finally, I've written a README that explains all this and more. - - * INSTALL: Regenerated. - * Makeconfig: Add comments and whitespace to make the control flow - clearer. - (+link-printers-tests, +link-pie-printers-tests, CFLAGS-printers-tests, - installed-rtld-LDFLAGS, built-rtld-LDFLAGS, link-libc-rpath, - link-libc-tests-after-rpath-link, link-libc-printers-tests): New. - (rtld-LDFLAGS, rtld-tests-LDFLAGS, link-libc-tests-rpath-link, - link-libc-tests): Use the new variables as required. - * Makerules ($(py-const)): New rule. - generated: Add $(py-const). - * README.pretty-printers: New file. - * Rules (tests-printers-programs, tests-printers-out, py-env): New. - (others): Depend on $(py-const). - (tests): Depend on $(tests-printers-programs) or $(tests-printers-out), - as required. Pass $(tests-printers) to merge-test-results.sh. - * manual/install.texi: Add requirements for testing the pretty printers. - * nptl/Makefile (gen-py-const-headers, pretty-printers, tests-printers, - CFLAGS-test-mutexattr-printers.c CFLAGS-test-mutex-printers.c, - CFLAGS-test-condattr-printers.c, CFLAGS-test-cond-printers.c, - CFLAGS-test-rwlockattr-printers.c CFLAGS-test-rwlock-printers.c, - tests-printers-libs): Define. - * nptl/nptl-printers.py: New file. - * nptl/nptl_lock_constants.pysym: Likewise. - * nptl/test-cond-printers.c: Likewise. - * nptl/test-cond-printers.py: Likewise. - * nptl/test-condattr-printers.c: Likewise. - * nptl/test-condattr-printers.py: Likewise. - * nptl/test-mutex-printers.c: Likewise. - * nptl/test-mutex-printers.py: Likewise. - * nptl/test-mutexattr-printers.c: Likewise. - * nptl/test-mutexattr-printers.py: Likewise. - * nptl/test-rwlock-printers.c: Likewise. - * nptl/test-rwlock-printers.py: Likewise. - * nptl/test-rwlockattr-printers.c: Likewise. - * nptl/test-rwlockattr-printers.py: Likewise. - * scripts/gen-py-const.awk: Likewise. - * scripts/test_printers_common.py: Likewise. - * scripts/test_printers_exceptions.py: Likewise. - -Upstream-Status: Backport - -Author: Martin Galvan -Signed-off-by: Catalin Enache ---- - ChangeLog | 45 +++ - INSTALL | 27 ++ - Makeconfig | 76 ++++- - Makerules | 46 +++ - NEWS | 6 + - README.pretty-printers | 169 ++++++++++ - Rules | 44 ++- - manual/install.texi | 30 ++ - nptl/Makefile | 18 + - nptl/nptl-printers.py | 633 ++++++++++++++++++++++++++++++++++++ - nptl/nptl_lock_constants.pysym | 75 +++++ - nptl/test-cond-printers.c | 57 ++++ - nptl/test-cond-printers.py | 50 +++ - nptl/test-condattr-printers.c | 94 ++++++ - nptl/test-condattr-printers.py | 71 ++++ - nptl/test-mutex-printers.c | 151 +++++++++ - nptl/test-mutex-printers.py | 97 ++++++ - nptl/test-mutexattr-printers.c | 144 ++++++++ - nptl/test-mutexattr-printers.py | 101 ++++++ - nptl/test-rwlock-printers.c | 78 +++++ - nptl/test-rwlock-printers.py | 64 ++++ - nptl/test-rwlockattr-printers.c | 98 ++++++ - nptl/test-rwlockattr-printers.py | 73 +++++ - scripts/gen-py-const.awk | 118 +++++++ - scripts/test_printers_common.py | 364 +++++++++++++++++++++ - scripts/test_printers_exceptions.py | 61 ++++ - 26 files changed, 2770 insertions(+), 20 deletions(-) - create mode 100644 README.pretty-printers - create mode 100644 nptl/nptl-printers.py - create mode 100644 nptl/nptl_lock_constants.pysym - create mode 100644 nptl/test-cond-printers.c - create mode 100644 nptl/test-cond-printers.py - create mode 100644 nptl/test-condattr-printers.c - create mode 100644 nptl/test-condattr-printers.py - create mode 100644 nptl/test-mutex-printers.c - create mode 100644 nptl/test-mutex-printers.py - create mode 100644 nptl/test-mutexattr-printers.c - create mode 100644 nptl/test-mutexattr-printers.py - create mode 100644 nptl/test-rwlock-printers.c - create mode 100644 nptl/test-rwlock-printers.py - create mode 100644 nptl/test-rwlockattr-printers.c - create mode 100644 nptl/test-rwlockattr-printers.py - create mode 100644 scripts/gen-py-const.awk - create mode 100644 scripts/test_printers_common.py - create mode 100644 scripts/test_printers_exceptions.py - -diff --git a/ChangeLog b/ChangeLog -index 96b6da2..8036c1e 100644 ---- a/ChangeLog -+++ b/ChangeLog -@@ -1,3 +1,48 @@ -+2016-12-08 Martin Galvan -+ -+ * INSTALL: Regenerated. -+ * Makeconfig: Add comments and whitespace to make the control flow -+ clearer. -+ (+link-printers-tests, +link-pie-printers-tests, -+ CFLAGS-printers-tests, installed-rtld-LDFLAGS, -+ built-rtld-LDFLAGS, link-libc-rpath, -+ link-libc-tests-after-rpath-link, -+ link-libc-printers-tests): New. -+ (rtld-LDFLAGS, rtld-tests-LDFLAGS, link-libc-tests-rpath-link, -+ link-libc-tests): Use the new variables as required. -+ * Makerules ($(py-const)): New rule. -+ generated: Add $(py-const). -+ * README.pretty-printers: New file. -+ * Rules (tests-printers-programs, tests-printers-out, py-env): New. -+ (others): Depend on $(py-const). -+ (tests): Depend on $(tests-printers-programs) or -+ $(tests-printers-out), -+ as required. Pass $(tests-printers) to merge-test-results.sh. -+ * manual/install.texi: Add requirements for testing the pretty -+ printers. -+ * nptl/Makefile (gen-py-const-headers, pretty-printers, -+ tests-printers, CFLAGS-test-mutexattr-printers.c -+ CFLAGS-test-mutex-printers.c, CFLAGS-test-condattr-printers.c, -+ CFLAGS-test-cond-printers.c, CFLAGS-test-rwlockattr-printers.c -+ CFLAGS-test-rwlock-printers.c, tests-printers-libs): Define. -+ * nptl/nptl-printers.py: New file. -+ * nptl/nptl_lock_constants.pysym: Likewise. -+ * nptl/test-cond-printers.c: Likewise. -+ * nptl/test-cond-printers.py: Likewise. -+ * nptl/test-condattr-printers.c: Likewise. -+ * nptl/test-condattr-printers.py: Likewise. -+ * nptl/test-mutex-printers.c: Likewise. -+ * nptl/test-mutex-printers.py: Likewise. -+ * nptl/test-mutexattr-printers.c: Likewise. -+ * nptl/test-mutexattr-printers.py: Likewise. -+ * nptl/test-rwlock-printers.c: Likewise. -+ * nptl/test-rwlock-printers.py: Likewise. -+ * nptl/test-rwlockattr-printers.c: Likewise. -+ * nptl/test-rwlockattr-printers.py: Likewise. -+ * scripts/gen-py-const.awk: Likewise. -+ * scripts/test_printers_common.py: Likewise. -+ * scripts/test_printers_exceptions.py: Likewise. -+ - 2016-08-09 Torvald Riegel - - * include/atomic.h (atomic_fetch_and_relaxed, -diff --git a/INSTALL b/INSTALL -index ec3445f..dd62c86 100644 ---- a/INSTALL -+++ b/INSTALL -@@ -224,6 +224,33 @@ You can specify 'stop-on-test-failure=y' when running 'make check' to - make the test run stop and exit with an error status immediately when a - failure occurs. - -+ The GNU C Library pretty printers come with their own set of scripts -+for testing, which run together with the rest of the testsuite through -+'make check'. These scripts require the following tools to run -+successfully: -+ -+ * Python 2.7.6/3.4.3 or later -+ -+ Python is required for running the printers' test scripts. -+ -+ * PExpect 4.0 -+ -+ The printer tests drive GDB through test programs and compare its -+ output to the printers'. PExpect is used to capture the output of -+ GDB, and should be compatible with the Python version in your -+ system. -+ -+ * GDB 7.8 or later with support for Python 2.7.6/3.4.3 or later -+ -+ GDB itself needs to be configured with Python support in order to -+ use the pretty printers. Notice that your system having Python -+ available doesn't imply that GDB supports it, nor that your -+ system's Python and GDB's have the same version. -+ -+If these tools are absent, the printer tests will report themselves as -+'UNSUPPORTED'. Notice that some of the printer tests require the GNU C -+Library to be compiled with debugging symbols. -+ - To format the 'GNU C Library Reference Manual' for printing, type - 'make dvi'. You need a working TeX installation to do this. The - distribution builds the on-line formatted version of the manual, as Info -diff --git a/Makeconfig b/Makeconfig -index 03fd89c..2d92d94 100644 ---- a/Makeconfig -+++ b/Makeconfig -@@ -416,6 +416,11 @@ $(+link-pie-before-libc) $(rtld-tests-LDFLAGS) $(link-libc-tests) \ - $(+link-pie-after-libc) - $(call after-link,$@) - endef -+define +link-pie-printers-tests -+$(+link-pie-before-libc) $(built-rtld-LDFLAGS) $(link-libc-printers-tests) \ -+ $(+link-pie-after-libc) -+$(call after-link,$@) -+endef - endif - # Command for statically linking programs with the C library. - ifndef +link-static -@@ -445,7 +450,8 @@ ifeq (yes,$(build-pie-default)) - no-pie-ldflag = -no-pie - +link = $(+link-pie) - +link-tests = $(+link-pie-tests) --else -++link-printers-tests = $(+link-pie-printers-tests) -+else # not build-pie-default - +link-before-libc = $(CC) -nostdlib -nostartfiles -o $@ \ - $(sysdep-LDFLAGS) $(LDFLAGS) $(LDFLAGS-$(@F)) \ - $(combreloc-LDFLAGS) $(relro-LDFLAGS) $(hashstyle-LDFLAGS) \ -@@ -466,51 +472,87 @@ $(+link-before-libc) $(rtld-tests-LDFLAGS) $(link-libc-tests) \ - $(+link-after-libc) - $(call after-link,$@) - endef --endif --else -+define +link-printers-tests -+$(+link-before-libc) $(built-rtld-LDFLAGS) $(link-libc-printers-tests) \ -+ $(+link-after-libc) -+$(call after-link,$@) -+endef -+endif # build-pie-default -+else # build-static - +link = $(+link-static) - +link-tests = $(+link-static-tests) --endif --endif -++link-printers-tests = $(+link-static-tests) -+endif # build-shared -+endif # +link -+ -+# The pretty printer test programs need to be compiled without optimizations -+# so they won't confuse gdb. We could use either the 'GCC optimize' pragma -+# or the 'optimize' function attribute to achieve this; however, at least on -+# ARM, gcc always produces different debugging symbols when invoked with -+# a -O greater than 0 than when invoked with -O0, regardless of anything else -+# we're using to suppress optimizations. Therefore, we need to explicitly pass -+# -O0 to it through CFLAGS. -+# Additionally, the build system will try to -include $(common-objpfx)/config.h -+# when compiling the tests, which will throw an error if some special macros -+# (such as __OPTIMIZE__ and IS_IN_build) aren't defined. To avoid this, we -+# tell gcc to define IS_IN_build. -+CFLAGS-printers-tests := -O0 -ggdb3 -DIS_IN_build -+ - ifeq (yes,$(build-shared)) -+# These indicate whether to link using the built ld.so or the installed one. -+installed-rtld-LDFLAGS = -Wl,-dynamic-linker=$(rtlddir)/$(rtld-installed-name) -+built-rtld-LDFLAGS = -Wl,-dynamic-linker=$(elf-objpfx)ld.so -+ - ifndef rtld-LDFLAGS --rtld-LDFLAGS = -Wl,-dynamic-linker=$(rtlddir)/$(rtld-installed-name) -+rtld-LDFLAGS = $(installed-rtld-LDFLAGS) - endif -+ - ifndef rtld-tests-LDFLAGS - ifeq (yes,$(build-hardcoded-path-in-tests)) --rtld-tests-LDFLAGS = -Wl,-dynamic-linker=$(elf-objpfx)ld.so -+rtld-tests-LDFLAGS = $(built-rtld-LDFLAGS) - else --rtld-tests-LDFLAGS = $(rtld-LDFLAGS) --endif --endif --endif -+rtld-tests-LDFLAGS = $(installed-rtld-LDFLAGS) -+endif # build-hardcoded-path-in-tests -+endif # rtld-tests-LDFLAGS -+ -+endif # build-shared -+ - ifndef link-libc - ifeq (yes,$(build-shared)) - # We need the versioned name of libc.so in the deps of $(others) et al - # so that the symlink to libc.so is created before anything tries to - # run the linked programs. -+link-libc-rpath = -Wl,-rpath=$(rpath-link) - link-libc-rpath-link = -Wl,-rpath-link=$(rpath-link) -+ - ifeq (yes,$(build-hardcoded-path-in-tests)) --link-libc-tests-rpath-link = -Wl,-rpath=$(rpath-link) -+link-libc-tests-rpath-link = $(link-libc-rpath) - else - link-libc-tests-rpath-link = $(link-libc-rpath-link) --endif -+endif # build-hardcoded-path-in-tests -+ - link-libc-before-gnulib = $(common-objpfx)libc.so$(libc.so-version) \ - $(common-objpfx)$(patsubst %,$(libtype.oS),c) \ - $(as-needed) $(elf-objpfx)ld.so \ - $(no-as-needed) - link-libc = $(link-libc-rpath-link) $(link-libc-before-gnulib) $(gnulib) -+ -+link-libc-tests-after-rpath-link = $(link-libc-before-gnulib) $(gnulib-tests) - link-libc-tests = $(link-libc-tests-rpath-link) \ -- $(link-libc-before-gnulib) $(gnulib-tests) -+ $(link-libc-tests-after-rpath-link) -+# Pretty printer test programs always require rpath instead of rpath-link. -+link-libc-printers-tests = $(link-libc-rpath) \ -+ $(link-libc-tests-after-rpath-link) -+ - # This is how to find at build-time things that will be installed there. - rpath-dirs = math elf dlfcn nss nis rt resolv crypt mathvec - rpath-link = \ - $(common-objdir):$(subst $(empty) ,:,$(patsubst ../$(subdir),.,$(rpath-dirs:%=$(common-objpfx)%))) --else -+else # build-static - link-libc = $(common-objpfx)libc.a $(otherlibs) $(gnulib) $(common-objpfx)libc.a $(gnulib) - link-libc-tests = $(common-objpfx)libc.a $(otherlibs) $(gnulib-tests) $(common-objpfx)libc.a $(gnulib-tests) --endif --endif -+endif # build-shared -+endif # link-libc - - # Differences in the linkers on the various platforms. - LDFLAGS-rpath-ORIGIN = -Wl,-rpath,'$$ORIGIN' -diff --git a/Makerules b/Makerules -index be3c11b..b7e0f59 100644 ---- a/Makerules -+++ b/Makerules -@@ -214,6 +214,52 @@ sed-remove-dotdot := -e 's@ *\([^ \/$$][^ \]*\)@ $$(..)\1@g' \ - -e 's@^\([^ \/$$][^ \]*\)@$$(..)\1@g' - endif - -+ifdef gen-py-const-headers -+# We'll use a static pattern rule to match .pysym files with their -+# corresponding generated .py files. -+# The generated .py files go in the submodule's dir in the glibc build dir. -+py-const-files := $(patsubst %.pysym,%.py,$(gen-py-const-headers)) -+py-const-dir := $(objpfx) -+py-const := $(addprefix $(py-const-dir),$(py-const-files)) -+py-const-script := $(..)scripts/gen-py-const.awk -+ -+# This is a hack we use to generate .py files with constants for Python -+# pretty printers. It works the same way as gen-as-const. -+# See scripts/gen-py-const.awk for details on how the awk | gcc mechanism -+# works. -+# -+# $@.tmp and $@.tmp2 are temporary files we use to store the partial contents -+# of the target file. We do this instead of just writing on $@ because, if the -+# build process terminates prematurely, re-running Make wouldn't run this rule -+# since Make would see that the target file already exists (despite it being -+# incomplete). -+# -+# The sed line replaces "@name@SOME_NAME@value@SOME_VALUE@" strings from the -+# output of 'gcc -S' with "SOME_NAME = SOME_VALUE" strings. -+# The '-n' option, combined with the '/p' command, makes sed output only the -+# modified lines instead of the whole input file. The output is redirected -+# to a .py file; we'll import it in the pretty printers file to read -+# the constants generated by gen-py-const.awk. -+# The regex has two capturing groups, for SOME_NAME and SOME_VALUE -+# respectively. Notice SOME_VALUE may be prepended by a special character, -+# depending on the assembly syntax (e.g. immediates are prefixed by a '$' -+# in AT&T x86, and by a '#' in ARM). We discard it using a complemented set -+# before the second capturing group. -+$(py-const): $(py-const-dir)%.py: %.pysym $(py-const-script) \ -+ $(common-before-compile) -+ $(make-target-directory) -+ $(AWK) -f $(py-const-script) $< \ -+ | $(CC) -S -o $@.tmp $(CFLAGS) $(CPPFLAGS) -x c - -+ echo '# GENERATED FILE\n' > $@.tmp2 -+ echo '# Constant definitions for pretty printers.' >> $@.tmp2 -+ echo '# See gen-py-const.awk for details.\n' >> $@.tmp2 -+ sed -n -r 's/^.*@name@([^@]+)@value@[^[:xdigit:]Xx-]*([[:xdigit:]Xx-]+)@.*/\1 = \2/p' \ -+ $@.tmp >> $@.tmp2 -+ mv -f $@.tmp2 $@ -+ rm -f $@.tmp -+ -+generated += $(py-const) -+endif # gen-py-const-headers - - ifdef gen-as-const-headers - # Generating headers for assembly constants. -diff --git a/NEWS b/NEWS -index b0447e7..3002773 100644 ---- a/NEWS -+++ b/NEWS -@@ -5,6 +5,12 @@ See the end for copying conditions. - Please send GNU C library bug reports via - using `glibc' in the "product" field. - -+ -+* GDB pretty printers have been added for mutex and condition variable -+ structures in POSIX Threads. When installed and loaded in gdb these pretty -+ printers show various pthread variables in human-readable form when read -+ using the 'print' or 'display' commands in gdb. -+ - Version 2.24 - - * The minimum Linux kernel version that this version of the GNU C Library -diff --git a/README.pretty-printers b/README.pretty-printers -new file mode 100644 -index 0000000..8662900 ---- /dev/null -+++ b/README.pretty-printers -@@ -0,0 +1,169 @@ -+README for the glibc Python pretty printers -+=========================================== -+ -+Pretty printers are gdb extensions that allow it to print useful, human-readable -+information about a program's variables. For example, for a pthread_mutex_t -+gdb would usually output something like this: -+ -+(gdb) print mutex -+$1 = { -+ __data = { -+ __lock = 22020096, -+ __count = 0, -+ __owner = 0, -+ __nusers = 0, -+ __kind = 576, -+ __spins = 0, -+ __elision = 0, -+ __list = { -+ __prev = 0x0, -+ __next = 0x0 -+ } -+ }, -+ __size = "\000\000P\001", '\000' , "@\002", '\000' , -+ __align = 22020096 -+} -+ -+However, with a pretty printer gdb will output something like this: -+ -+(gdb) print mutex -+$1 = pthread_mutex_t = { -+ Type = Normal, -+ Status = Unlocked, -+ Robust = No, -+ Shared = No, -+ Protocol = Priority protect, -+ Priority ceiling = 42 -+} -+ -+Before printing a value, gdb will first check if there's a pretty printer -+registered for it. If there is, it'll use it, otherwise it'll print the value -+as usual. Pretty printers can be registered in various ways; for our purposes -+we register them for the current objfile by calling -+gdb.printing.register_pretty_printer(). -+ -+Currently our printers are based on gdb.RegexpCollectionPrettyPrinter, which -+means they'll be triggered if the type of the variable we're printing matches -+a given regular expression. For example, MutexPrinter will be triggered if -+our variable's type matches the regexp '^pthread_mutex_t$'. -+ -+Besides the printers themselves, each module may have a constants file which the -+printers will import. These constants are generated from C headers during the -+build process, and need to be in the Python search path when loading the -+printers. -+ -+ -+Installing and loading -+---------------------- -+ -+The pretty printers and their constant files may be installed in different paths -+for each distro, though gdb should be able to automatically load them by itself. -+When in doubt, you can use the 'info pretty-printer' gdb command to list the -+loaded pretty printers. -+ -+If the printers aren't automatically loaded for some reason, you should add the -+following to your .gdbinit: -+ -+python -+import sys -+sys.path.insert(0, '/path/to/constants/file/directory') -+end -+ -+source /path/to/printers.py -+ -+If you're building glibc manually, '/path/to/constants/file/directory' should be -+'/path/to/glibc-build/submodule', where 'submodule' is e.g. nptl. -+ -+ -+Testing -+------- -+ -+The pretty printers come with a small test suite based on PExpect, which is a -+Python module with Expect-like features for spawning and controlling interactive -+programs. Each printer has a corresponding C program and a Python script -+that uses PExpect to drive gdb through the program and compare its output to -+the expected printer's. -+ -+The tests run on the glibc host, which is assumed to have both gdb and PExpect; -+if any of those is absent the tests will fail with code 77 (UNSUPPORTED). -+Native builds can be tested simply by doing 'make check'; cross builds must use -+cross-test-ssh.sh as test-wrapper, like this: -+ -+make test-wrapper='/path/to/scripts/cross-test-ssh.sh user@host' check -+ -+(Remember to share the build system's filesystem with the glibc host's through -+NFS or something similar). -+ -+Running 'make check' on a cross build will only compile the test programs, -+without running the scripts. -+ -+ -+Adding new pretty printers -+-------------------------- -+ -+Adding new pretty printers to glibc requires following these steps: -+ -+1. Identify which constants must be generated from C headers, and write the -+corresponding .pysym file. See scripts/gen-py-const.awk for more information -+on how this works. The name of the .pysym file must be added to the -+'gen-py-const-headers' variable in your submodule's Makefile (without the .pysym -+extension). -+ -+2. Write the pretty printer code itself. For this you can follow the gdb -+Python API documentation, and use the existing printers as examples. The printer -+code must import the generated constants file (which will have the same name -+as your .pysym file). The names of the pretty printer files must be added -+to the 'pretty-printers' variable in your submodule's Makefile (without the .py -+extension). -+ -+3. Write the unit tests for your pretty printers. The build system calls each -+test script passing it the paths to the test program source, the test program -+binary, and the printer files you added to 'pretty-printers' in the previous -+step. The test scripts, in turn, must import scripts/test_printers_common -+and call the init_test function passing it, among other things, the name of the -+set of pretty printers to enable (as seen by running 'info pretty-printer'). -+You can use the existing unit tests as examples. -+ -+4. Add the names of the pretty printer tests to the 'tests-printers' variable -+in your submodule's Makefile (without extensions). In addition, for each test -+program you must define a corresponding CFLAGS-* variable and set it to -+$(CFLAGS-printers-tests) to ensure they're compiled correctly. For example, -+test-foo-printer.c requires the following: -+ -+CFLAGS-test-foo-printer.c := $(CFLAGS-printers-tests) -+ -+Finally, if your programs need to be linked with a specific library, you can add -+its name to the 'tests-printers-libs' variable in your submodule's Makefile. -+ -+ -+Known issues -+------------ -+ -+* Pretty printers are inherently coupled to the code they're targetting, thus -+any changes to the target code must also update the corresponding printers. -+On the plus side, the printer code itself may serve as a kind of documentation -+for the target code. -+ -+* Older versions of the gdb Python API have a bug where -+gdb.RegexpCollectionPrettyPrinter would not be able to get a value's real type -+if it was typedef'd. This would cause gdb to ignore the pretty printers for -+types like pthread_mutex_t, which is defined as: -+ -+typedef union -+{ -+ ... -+} pthread_mutex_t; -+ -+This was fixed in commit 1b588015839caafc608a6944a78aea170f5fb2f6, and released -+as part of gdb 7.8. However, typedef'ing an already typedef'd type may cause -+a similar issue, e.g.: -+ -+typedef pthread_mutex_t mutex; -+mutex a_mutex; -+ -+Here, trying to print a_mutex won't trigger the pthread_mutex_t printer. -+ -+* The test programs must be compiled without optimizations. This is necessary -+because the test scripts rely on the C code structure being preserved when -+stepping through the programs. Things like aggressive instruction reordering -+or optimizing variables out may make this kind of testing impossible. -diff --git a/Rules b/Rules -index 8306d36..10a6479 100644 ---- a/Rules -+++ b/Rules -@@ -85,16 +85,27 @@ common-generated += dummy.o dummy.c - - .PHONY: others tests bench bench-build - -+# Test programs for the pretty printers. -+tests-printers-programs := $(addprefix $(objpfx),$(tests-printers)) -+ -+# .out files with the output of running the pretty printer tests. -+tests-printers-out := $(patsubst %,$(objpfx)%.out,$(tests-printers)) -+ - ifeq ($(build-programs),yes) - others: $(addprefix $(objpfx),$(others) $(sysdep-others) $(extra-objs)) - else - others: $(addprefix $(objpfx),$(extra-objs)) - endif -+ -+# Generate constant files for Python pretty printers if required. -+others: $(py-const) -+ - ifeq ($(run-built-tests),no) --tests: $(addprefix $(objpfx),$(tests) $(test-srcs)) $(tests-special) -+tests: $(addprefix $(objpfx),$(tests) $(test-srcs)) $(tests-special) \ -+ $(tests-printers-programs) - xtests: tests $(xtests-special) - else --tests: $(tests:%=$(objpfx)%.out) $(tests-special) -+tests: $(tests:%=$(objpfx)%.out) $(tests-special) $(tests-printers-out) - xtests: tests $(xtests:%=$(objpfx)%.out) $(xtests-special) - endif - -@@ -102,7 +113,8 @@ tests-special-notdir = $(patsubst $(objpfx)%, %, $(tests-special)) - xtests-special-notdir = $(patsubst $(objpfx)%, %, $(xtests-special)) - tests: - $(..)scripts/merge-test-results.sh -s $(objpfx) $(subdir) \ -- $(sort $(tests) $(tests-special-notdir:.out=)) \ -+ $(sort $(tests) $(tests-special-notdir:.out=) \ -+ $(tests-printers)) \ - > $(objpfx)subdir-tests.sum - xtests: - $(..)scripts/merge-test-results.sh -s $(objpfx) $(subdir) \ -@@ -212,6 +224,32 @@ endif - - endif # tests - -+ifneq "$(strip $(tests-printers))" "" -+# We're defining this here for now; later it'll be defined at configure time -+# inside Makeconfig. -+PYTHON := python -+ -+# Static pattern rule for building the test programs for the pretty printers. -+$(tests-printers-programs): %: %.o $(tests-printers-libs) \ -+ $(sort $(filter $(common-objpfx)lib%,$(link-libc-static-tests))) \ -+ $(addprefix $(csu-objpfx),start.o) $(+preinit) $(+postinit) -+ $(+link-printers-tests) -+ -+# Add the paths to the generated constants file and test_common_printers.py -+# to PYTHONPATH so the test scripts can find them. -+py-env := PYTHONPATH=$(py-const-dir):$(..)scripts:$${PYTHONPATH} -+ -+# Static pattern rule that matches the test-* targets to their .c and .py -+# prerequisites. It'll run the corresponding test script for each test program -+# we compiled and place its output in the corresponding .out file. -+# The pretty printer files and test_common_printers.py must be present for all. -+$(tests-printers-out): $(objpfx)%.out: $(objpfx)% %.py %.c $(pretty-printers) \ -+ $(..)scripts/test_printers_common.py -+ $(test-wrapper-env) $(py-env) \ -+ $(PYTHON) $*.py $*.c $(objpfx)$* $(pretty-printers) > $@; \ -+ $(evaluate-test) -+endif -+ - - .PHONY: distclean realclean subdir_distclean subdir_realclean \ - subdir_clean subdir_mostlyclean subdir_testclean -diff --git a/manual/install.texi b/manual/install.texi -index 79ee45f..468479e 100644 ---- a/manual/install.texi -+++ b/manual/install.texi -@@ -256,6 +256,36 @@ occurred. You can specify @samp{stop-on-test-failure=y} when running - @code{make check} to make the test run stop and exit with an error - status immediately when a failure occurs. - -+The @glibcadj{} pretty printers come with their own set of scripts for testing, -+which run together with the rest of the testsuite through @code{make check}. -+These scripts require the following tools to run successfully: -+ -+@itemize @bullet -+@item -+Python 2.7.6/3.4.3 or later -+ -+Python is required for running the printers' test scripts. -+ -+@item PExpect 4.0 -+ -+The printer tests drive GDB through test programs and compare its output -+to the printers'. PExpect is used to capture the output of GDB, and should be -+compatible with the Python version in your system. -+ -+@item -+GDB 7.8 or later with support for Python 2.7.6/3.4.3 or later -+ -+GDB itself needs to be configured with Python support in order to use the -+pretty printers. Notice that your system having Python available doesn't imply -+that GDB supports it, nor that your system's Python and GDB's have the same -+version. -+@end itemize -+ -+@noindent -+If these tools are absent, the printer tests will report themselves as -+@code{UNSUPPORTED}. Notice that some of the printer tests require @theglibc{} -+to be compiled with debugging symbols. -+ - To format the @cite{GNU C Library Reference Manual} for printing, type - @w{@code{make dvi}}. You need a working @TeX{} installation to do - this. The distribution builds the on-line formatted version of the -diff --git a/nptl/Makefile b/nptl/Makefile -index 7dec4ed..49f6ba6 100644 ---- a/nptl/Makefile -+++ b/nptl/Makefile -@@ -308,6 +308,24 @@ gen-as-const-headers = pthread-errnos.sym \ - unwindbuf.sym \ - lowlevelrobustlock.sym pthread-pi-defines.sym - -+gen-py-const-headers := nptl_lock_constants.pysym -+pretty-printers := nptl-printers.py -+tests-printers := test-mutexattr-printers test-mutex-printers \ -+ test-condattr-printers test-cond-printers \ -+ test-rwlockattr-printers test-rwlock-printers -+ -+CFLAGS-test-mutexattr-printers.c := $(CFLAGS-printers-tests) -+CFLAGS-test-mutex-printers.c := $(CFLAGS-printers-tests) -+CFLAGS-test-condattr-printers.c := $(CFLAGS-printers-tests) -+CFLAGS-test-cond-printers.c := $(CFLAGS-printers-tests) -+CFLAGS-test-rwlockattr-printers.c := $(CFLAGS-printers-tests) -+CFLAGS-test-rwlock-printers.c := $(CFLAGS-printers-tests) -+ -+ifeq ($(build-shared),yes) -+tests-printers-libs := $(shared-thread-library) -+else -+tests-printers-libs := $(static-thread-library) -+endif - - LDFLAGS-pthread.so = -Wl,--enable-new-dtags,-z,nodelete,-z,initfirst - -diff --git a/nptl/nptl-printers.py b/nptl/nptl-printers.py -new file mode 100644 -index 0000000..e402f23 ---- /dev/null -+++ b/nptl/nptl-printers.py -@@ -0,0 +1,633 @@ -+# Pretty printers for the NPTL lock types. -+# -+# Copyright (C) 2016 Free Software Foundation, Inc. -+# This file is part of the GNU C Library. -+# -+# The GNU C Library is free software; you can redistribute it and/or -+# modify it under the terms of the GNU Lesser General Public -+# License as published by the Free Software Foundation; either -+# version 2.1 of the License, or (at your option) any later version. -+# -+# The GNU C Library is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of -+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+# Lesser General Public License for more details. -+# -+# You should have received a copy of the GNU Lesser General Public -+# License along with the GNU C Library; if not, see -+# . -+ -+"""This file contains the gdb pretty printers for the following types: -+ -+ * pthread_mutex_t -+ * pthread_mutexattr_t -+ * pthread_cond_t -+ * pthread_condattr_t -+ * pthread_rwlock_t -+ * pthread_rwlockattr_t -+ -+You can check which printers are registered and enabled by issuing the -+'info pretty-printer' gdb command. Printers should trigger automatically when -+trying to print a variable of one of the types mentioned above. -+""" -+ -+from __future__ import print_function -+ -+import gdb -+import gdb.printing -+from nptl_lock_constants import * -+ -+MUTEX_TYPES = { -+ PTHREAD_MUTEX_NORMAL: ('Type', 'Normal'), -+ PTHREAD_MUTEX_RECURSIVE: ('Type', 'Recursive'), -+ PTHREAD_MUTEX_ERRORCHECK: ('Type', 'Error check'), -+ PTHREAD_MUTEX_ADAPTIVE_NP: ('Type', 'Adaptive') -+} -+ -+class MutexPrinter(object): -+ """Pretty printer for pthread_mutex_t.""" -+ -+ def __init__(self, mutex): -+ """Initialize the printer's internal data structures. -+ -+ Args: -+ mutex: A gdb.value representing a pthread_mutex_t. -+ """ -+ -+ data = mutex['__data'] -+ self.lock = data['__lock'] -+ self.count = data['__count'] -+ self.owner = data['__owner'] -+ self.kind = data['__kind'] -+ self.values = [] -+ self.read_values() -+ -+ def to_string(self): -+ """gdb API function. -+ -+ This is called from gdb when we try to print a pthread_mutex_t. -+ """ -+ -+ return 'pthread_mutex_t' -+ -+ def children(self): -+ """gdb API function. -+ -+ This is called from gdb when we try to print a pthread_mutex_t. -+ """ -+ -+ return self.values -+ -+ def read_values(self): -+ """Read the mutex's info and store it in self.values. -+ -+ The data contained in self.values will be returned by the Iterator -+ created in self.children. -+ """ -+ -+ self.read_type() -+ self.read_status() -+ self.read_attributes() -+ self.read_misc_info() -+ -+ def read_type(self): -+ """Read the mutex's type.""" -+ -+ mutex_type = self.kind & PTHREAD_MUTEX_KIND_MASK -+ -+ # mutex_type must be casted to int because it's a gdb.Value -+ self.values.append(MUTEX_TYPES[int(mutex_type)]) -+ -+ def read_status(self): -+ """Read the mutex's status. -+ -+ For architectures which support lock elision, this method reads -+ whether the mutex appears as locked in memory (i.e. it may show it as -+ unlocked even after calling pthread_mutex_lock). -+ """ -+ -+ if self.kind == PTHREAD_MUTEX_DESTROYED: -+ self.values.append(('Status', 'Destroyed')) -+ elif self.kind & PTHREAD_MUTEX_ROBUST_NORMAL_NP: -+ self.read_status_robust() -+ else: -+ self.read_status_no_robust() -+ -+ def read_status_robust(self): -+ """Read the status of a robust mutex. -+ -+ In glibc robust mutexes are implemented in a very different way than -+ non-robust ones. This method reads their locking status, -+ whether it may have waiters, their registered owner (if any), -+ whether the owner is alive or not, and the status of the state -+ they're protecting. -+ """ -+ -+ if self.lock == PTHREAD_MUTEX_UNLOCKED: -+ self.values.append(('Status', 'Unlocked')) -+ else: -+ if self.lock & FUTEX_WAITERS: -+ self.values.append(('Status', 'Locked, possibly with waiters')) -+ else: -+ self.values.append(('Status', -+ 'Locked, possibly with no waiters')) -+ -+ if self.lock & FUTEX_OWNER_DIED: -+ self.values.append(('Owner ID', '%d (dead)' % self.owner)) -+ else: -+ self.values.append(('Owner ID', self.lock & FUTEX_TID_MASK)) -+ -+ if self.owner == PTHREAD_MUTEX_INCONSISTENT: -+ self.values.append(('State protected by this mutex', -+ 'Inconsistent')) -+ elif self.owner == PTHREAD_MUTEX_NOTRECOVERABLE: -+ self.values.append(('State protected by this mutex', -+ 'Not recoverable')) -+ -+ def read_status_no_robust(self): -+ """Read the status of a non-robust mutex. -+ -+ Read info on whether the mutex is locked, if it may have waiters -+ and its owner (if any). -+ """ -+ -+ lock_value = self.lock -+ -+ if self.kind & PTHREAD_MUTEX_PRIO_PROTECT_NP: -+ lock_value &= ~(PTHREAD_MUTEX_PRIO_CEILING_MASK) -+ -+ if lock_value == PTHREAD_MUTEX_UNLOCKED: -+ self.values.append(('Status', 'Unlocked')) -+ else: -+ if self.kind & PTHREAD_MUTEX_PRIO_INHERIT_NP: -+ waiters = self.lock & FUTEX_WAITERS -+ owner = self.lock & FUTEX_TID_MASK -+ else: -+ # Mutex protocol is PP or none -+ waiters = (self.lock != PTHREAD_MUTEX_LOCKED_NO_WAITERS) -+ owner = self.owner -+ -+ if waiters: -+ self.values.append(('Status', 'Locked, possibly with waiters')) -+ else: -+ self.values.append(('Status', -+ 'Locked, possibly with no waiters')) -+ -+ self.values.append(('Owner ID', owner)) -+ -+ def read_attributes(self): -+ """Read the mutex's attributes.""" -+ -+ if self.kind != PTHREAD_MUTEX_DESTROYED: -+ if self.kind & PTHREAD_MUTEX_ROBUST_NORMAL_NP: -+ self.values.append(('Robust', 'Yes')) -+ else: -+ self.values.append(('Robust', 'No')) -+ -+ # In glibc, robust mutexes always have their pshared flag set to -+ # 'shared' regardless of what the pshared flag of their -+ # mutexattr was. Therefore a robust mutex will act as shared -+ # even if it was initialized with a 'private' mutexattr. -+ if self.kind & PTHREAD_MUTEX_PSHARED_BIT: -+ self.values.append(('Shared', 'Yes')) -+ else: -+ self.values.append(('Shared', 'No')) -+ -+ if self.kind & PTHREAD_MUTEX_PRIO_INHERIT_NP: -+ self.values.append(('Protocol', 'Priority inherit')) -+ elif self.kind & PTHREAD_MUTEX_PRIO_PROTECT_NP: -+ prio_ceiling = ((self.lock & PTHREAD_MUTEX_PRIO_CEILING_MASK) -+ >> PTHREAD_MUTEX_PRIO_CEILING_SHIFT) -+ -+ self.values.append(('Protocol', 'Priority protect')) -+ self.values.append(('Priority ceiling', prio_ceiling)) -+ else: -+ # PTHREAD_PRIO_NONE -+ self.values.append(('Protocol', 'None')) -+ -+ def read_misc_info(self): -+ """Read miscellaneous info on the mutex. -+ -+ For now this reads the number of times a recursive mutex was locked -+ by the same thread. -+ """ -+ -+ mutex_type = self.kind & PTHREAD_MUTEX_KIND_MASK -+ -+ if mutex_type == PTHREAD_MUTEX_RECURSIVE and self.count > 1: -+ self.values.append(('Times locked recursively', self.count)) -+ -+class MutexAttributesPrinter(object): -+ """Pretty printer for pthread_mutexattr_t. -+ -+ In the NPTL this is a type that's always casted to struct pthread_mutexattr -+ which has a single 'mutexkind' field containing the actual attributes. -+ """ -+ -+ def __init__(self, mutexattr): -+ """Initialize the printer's internal data structures. -+ -+ Args: -+ mutexattr: A gdb.value representing a pthread_mutexattr_t. -+ """ -+ -+ self.values = [] -+ -+ try: -+ mutexattr_struct = gdb.lookup_type('struct pthread_mutexattr') -+ self.mutexattr = mutexattr.cast(mutexattr_struct)['mutexkind'] -+ self.read_values() -+ except gdb.error: -+ # libpthread doesn't have debug symbols, thus we can't find the -+ # real struct type. Just print the union members. -+ self.values.append(('__size', mutexattr['__size'])) -+ self.values.append(('__align', mutexattr['__align'])) -+ -+ def to_string(self): -+ """gdb API function. -+ -+ This is called from gdb when we try to print a pthread_mutexattr_t. -+ """ -+ -+ return 'pthread_mutexattr_t' -+ -+ def children(self): -+ """gdb API function. -+ -+ This is called from gdb when we try to print a pthread_mutexattr_t. -+ """ -+ -+ return self.values -+ -+ def read_values(self): -+ """Read the mutexattr's info and store it in self.values. -+ -+ The data contained in self.values will be returned by the Iterator -+ created in self.children. -+ """ -+ -+ mutexattr_type = (self.mutexattr -+ & ~PTHREAD_MUTEXATTR_FLAG_BITS -+ & ~PTHREAD_MUTEX_NO_ELISION_NP) -+ -+ # mutexattr_type must be casted to int because it's a gdb.Value -+ self.values.append(MUTEX_TYPES[int(mutexattr_type)]) -+ -+ if self.mutexattr & PTHREAD_MUTEXATTR_FLAG_ROBUST: -+ self.values.append(('Robust', 'Yes')) -+ else: -+ self.values.append(('Robust', 'No')) -+ -+ if self.mutexattr & PTHREAD_MUTEXATTR_FLAG_PSHARED: -+ self.values.append(('Shared', 'Yes')) -+ else: -+ self.values.append(('Shared', 'No')) -+ -+ protocol = ((self.mutexattr & PTHREAD_MUTEXATTR_PROTOCOL_MASK) >> -+ PTHREAD_MUTEXATTR_PROTOCOL_SHIFT) -+ -+ if protocol == PTHREAD_PRIO_NONE: -+ self.values.append(('Protocol', 'None')) -+ elif protocol == PTHREAD_PRIO_INHERIT: -+ self.values.append(('Protocol', 'Priority inherit')) -+ elif protocol == PTHREAD_PRIO_PROTECT: -+ self.values.append(('Protocol', 'Priority protect')) -+ -+CLOCK_IDS = { -+ CLOCK_REALTIME: 'CLOCK_REALTIME', -+ CLOCK_MONOTONIC: 'CLOCK_MONOTONIC', -+ CLOCK_PROCESS_CPUTIME_ID: 'CLOCK_PROCESS_CPUTIME_ID', -+ CLOCK_THREAD_CPUTIME_ID: 'CLOCK_THREAD_CPUTIME_ID', -+ CLOCK_MONOTONIC_RAW: 'CLOCK_MONOTONIC_RAW', -+ CLOCK_REALTIME_COARSE: 'CLOCK_REALTIME_COARSE', -+ CLOCK_MONOTONIC_COARSE: 'CLOCK_MONOTONIC_COARSE' -+} -+ -+class ConditionVariablePrinter(object): -+ """Pretty printer for pthread_cond_t.""" -+ -+ def __init__(self, cond): -+ """Initialize the printer's internal data structures. -+ -+ Args: -+ cond: A gdb.value representing a pthread_cond_t. -+ """ -+ -+ # Since PTHREAD_COND_SHARED is an integer, we need to cast it to void * -+ # to be able to compare it to the condvar's __data.__mutex member. -+ # -+ # While it looks like self.shared_value should be a class variable, -+ # that would result in it having an incorrect size if we're loading -+ # these printers through .gdbinit for a 64-bit objfile in AMD64. -+ # This is because gdb initially assumes the pointer size to be 4 bytes, -+ # and only sets it to 8 after loading the 64-bit objfiles. Since -+ # .gdbinit runs before any objfiles are loaded, this would effectively -+ # make self.shared_value have a size of 4, thus breaking later -+ # comparisons with pointers whose types are looked up at runtime. -+ void_ptr_type = gdb.lookup_type('void').pointer() -+ self.shared_value = gdb.Value(PTHREAD_COND_SHARED).cast(void_ptr_type) -+ -+ data = cond['__data'] -+ self.total_seq = data['__total_seq'] -+ self.mutex = data['__mutex'] -+ self.nwaiters = data['__nwaiters'] -+ self.values = [] -+ -+ self.read_values() -+ -+ def to_string(self): -+ """gdb API function. -+ -+ This is called from gdb when we try to print a pthread_cond_t. -+ """ -+ -+ return 'pthread_cond_t' -+ -+ def children(self): -+ """gdb API function. -+ -+ This is called from gdb when we try to print a pthread_cond_t. -+ """ -+ -+ return self.values -+ -+ def read_values(self): -+ """Read the condvar's info and store it in self.values. -+ -+ The data contained in self.values will be returned by the Iterator -+ created in self.children. -+ """ -+ -+ self.read_status() -+ self.read_attributes() -+ self.read_mutex_info() -+ -+ def read_status(self): -+ """Read the status of the condvar. -+ -+ This method reads whether the condvar is destroyed and how many threads -+ are waiting for it. -+ """ -+ -+ if self.total_seq == PTHREAD_COND_DESTROYED: -+ self.values.append(('Status', 'Destroyed')) -+ -+ self.values.append(('Threads waiting for this condvar', -+ self.nwaiters >> COND_NWAITERS_SHIFT)) -+ -+ def read_attributes(self): -+ """Read the condvar's attributes.""" -+ -+ clock_id = self.nwaiters & ((1 << COND_NWAITERS_SHIFT) - 1) -+ -+ # clock_id must be casted to int because it's a gdb.Value -+ self.values.append(('Clock ID', CLOCK_IDS[int(clock_id)])) -+ -+ shared = (self.mutex == self.shared_value) -+ -+ if shared: -+ self.values.append(('Shared', 'Yes')) -+ else: -+ self.values.append(('Shared', 'No')) -+ -+ def read_mutex_info(self): -+ """Read the data of the mutex this condvar is bound to. -+ -+ A pthread_cond_t's __data.__mutex member is a void * which -+ must be casted to pthread_mutex_t *. For shared condvars, this -+ member isn't recorded and has a special value instead. -+ """ -+ -+ if self.mutex and self.mutex != self.shared_value: -+ mutex_type = gdb.lookup_type('pthread_mutex_t') -+ mutex = self.mutex.cast(mutex_type.pointer()).dereference() -+ -+ self.values.append(('Mutex', mutex)) -+ -+class ConditionVariableAttributesPrinter(object): -+ """Pretty printer for pthread_condattr_t. -+ -+ In the NPTL this is a type that's always casted to struct pthread_condattr, -+ which has a single 'value' field containing the actual attributes. -+ """ -+ -+ def __init__(self, condattr): -+ """Initialize the printer's internal data structures. -+ -+ Args: -+ condattr: A gdb.value representing a pthread_condattr_t. -+ """ -+ -+ self.values = [] -+ -+ try: -+ condattr_struct = gdb.lookup_type('struct pthread_condattr') -+ self.condattr = condattr.cast(condattr_struct)['value'] -+ self.read_values() -+ except gdb.error: -+ # libpthread doesn't have debug symbols, thus we can't find the -+ # real struct type. Just print the union members. -+ self.values.append(('__size', condattr['__size'])) -+ self.values.append(('__align', condattr['__align'])) -+ -+ def to_string(self): -+ """gdb API function. -+ -+ This is called from gdb when we try to print a pthread_condattr_t. -+ """ -+ -+ return 'pthread_condattr_t' -+ -+ def children(self): -+ """gdb API function. -+ -+ This is called from gdb when we try to print a pthread_condattr_t. -+ """ -+ -+ return self.values -+ -+ def read_values(self): -+ """Read the condattr's info and store it in self.values. -+ -+ The data contained in self.values will be returned by the Iterator -+ created in self.children. -+ """ -+ -+ clock_id = self.condattr & ((1 << COND_NWAITERS_SHIFT) - 1) -+ -+ # clock_id must be casted to int because it's a gdb.Value -+ self.values.append(('Clock ID', CLOCK_IDS[int(clock_id)])) -+ -+ if self.condattr & 1: -+ self.values.append(('Shared', 'Yes')) -+ else: -+ self.values.append(('Shared', 'No')) -+ -+class RWLockPrinter(object): -+ """Pretty printer for pthread_rwlock_t.""" -+ -+ def __init__(self, rwlock): -+ """Initialize the printer's internal data structures. -+ -+ Args: -+ rwlock: A gdb.value representing a pthread_rwlock_t. -+ """ -+ -+ data = rwlock['__data'] -+ self.readers = data['__nr_readers'] -+ self.queued_readers = data['__nr_readers_queued'] -+ self.queued_writers = data['__nr_writers_queued'] -+ self.writer_id = data['__writer'] -+ self.shared = data['__shared'] -+ self.prefers_writers = data['__flags'] -+ self.values = [] -+ self.read_values() -+ -+ def to_string(self): -+ """gdb API function. -+ -+ This is called from gdb when we try to print a pthread_rwlock_t. -+ """ -+ -+ return 'pthread_rwlock_t' -+ -+ def children(self): -+ """gdb API function. -+ -+ This is called from gdb when we try to print a pthread_rwlock_t. -+ """ -+ -+ return self.values -+ -+ def read_values(self): -+ """Read the rwlock's info and store it in self.values. -+ -+ The data contained in self.values will be returned by the Iterator -+ created in self.children. -+ """ -+ -+ self.read_status() -+ self.read_attributes() -+ -+ def read_status(self): -+ """Read the status of the rwlock.""" -+ -+ # Right now pthread_rwlock_destroy doesn't do anything, so there's no -+ # way to check if an rwlock is destroyed. -+ -+ if self.writer_id: -+ self.values.append(('Status', 'Locked (Write)')) -+ self.values.append(('Writer ID', self.writer_id)) -+ elif self.readers: -+ self.values.append(('Status', 'Locked (Read)')) -+ self.values.append(('Readers', self.readers)) -+ else: -+ self.values.append(('Status', 'Unlocked')) -+ -+ self.values.append(('Queued readers', self.queued_readers)) -+ self.values.append(('Queued writers', self.queued_writers)) -+ -+ def read_attributes(self): -+ """Read the attributes of the rwlock.""" -+ -+ if self.shared: -+ self.values.append(('Shared', 'Yes')) -+ else: -+ self.values.append(('Shared', 'No')) -+ -+ if self.prefers_writers: -+ self.values.append(('Prefers', 'Writers')) -+ else: -+ self.values.append(('Prefers', 'Readers')) -+ -+class RWLockAttributesPrinter(object): -+ """Pretty printer for pthread_rwlockattr_t. -+ -+ In the NPTL this is a type that's always casted to -+ struct pthread_rwlockattr, which has two fields ('lockkind' and 'pshared') -+ containing the actual attributes. -+ """ -+ -+ def __init__(self, rwlockattr): -+ """Initialize the printer's internal data structures. -+ -+ Args: -+ rwlockattr: A gdb.value representing a pthread_rwlockattr_t. -+ """ -+ -+ self.values = [] -+ -+ try: -+ rwlockattr_struct = gdb.lookup_type('struct pthread_rwlockattr') -+ self.rwlockattr = rwlockattr.cast(rwlockattr_struct) -+ self.read_values() -+ except gdb.error: -+ # libpthread doesn't have debug symbols, thus we can't find the -+ # real struct type. Just print the union members. -+ self.values.append(('__size', rwlockattr['__size'])) -+ self.values.append(('__align', rwlockattr['__align'])) -+ -+ def to_string(self): -+ """gdb API function. -+ -+ This is called from gdb when we try to print a pthread_rwlockattr_t. -+ """ -+ -+ return 'pthread_rwlockattr_t' -+ -+ def children(self): -+ """gdb API function. -+ -+ This is called from gdb when we try to print a pthread_rwlockattr_t. -+ """ -+ -+ return self.values -+ -+ def read_values(self): -+ """Read the rwlockattr's info and store it in self.values. -+ -+ The data contained in self.values will be returned by the Iterator -+ created in self.children. -+ """ -+ -+ rwlock_type = self.rwlockattr['lockkind'] -+ shared = self.rwlockattr['pshared'] -+ -+ if shared == PTHREAD_PROCESS_SHARED: -+ self.values.append(('Shared', 'Yes')) -+ else: -+ # PTHREAD_PROCESS_PRIVATE -+ self.values.append(('Shared', 'No')) -+ -+ if (rwlock_type == PTHREAD_RWLOCK_PREFER_READER_NP or -+ rwlock_type == PTHREAD_RWLOCK_PREFER_WRITER_NP): -+ # This is a known bug. Using PTHREAD_RWLOCK_PREFER_WRITER_NP will -+ # still make the rwlock prefer readers. -+ self.values.append(('Prefers', 'Readers')) -+ elif rwlock_type == PTHREAD_RWLOCK_PREFER_WRITER_NONRECURSIVE_NP: -+ self.values.append(('Prefers', 'Writers')) -+ -+def register(objfile): -+ """Register the pretty printers within the given objfile.""" -+ -+ printer = gdb.printing.RegexpCollectionPrettyPrinter('glibc-pthread-locks') -+ -+ printer.add_printer('pthread_mutex_t', r'^pthread_mutex_t$', -+ MutexPrinter) -+ printer.add_printer('pthread_mutexattr_t', r'^pthread_mutexattr_t$', -+ MutexAttributesPrinter) -+ printer.add_printer('pthread_cond_t', r'^pthread_cond_t$', -+ ConditionVariablePrinter) -+ printer.add_printer('pthread_condattr_t', r'^pthread_condattr_t$', -+ ConditionVariableAttributesPrinter) -+ printer.add_printer('pthread_rwlock_t', r'^pthread_rwlock_t$', -+ RWLockPrinter) -+ printer.add_printer('pthread_rwlockattr_t', r'^pthread_rwlockattr_t$', -+ RWLockAttributesPrinter) -+ -+ if objfile == None: -+ objfile = gdb -+ -+ gdb.printing.register_pretty_printer(objfile, printer) -+ -+register(gdb.current_objfile()) -diff --git a/nptl/nptl_lock_constants.pysym b/nptl/nptl_lock_constants.pysym -new file mode 100644 -index 0000000..303ec61 ---- /dev/null -+++ b/nptl/nptl_lock_constants.pysym -@@ -0,0 +1,75 @@ -+#include -+ -+-- Mutex types -+PTHREAD_MUTEX_KIND_MASK PTHREAD_MUTEX_KIND_MASK_NP -+PTHREAD_MUTEX_NORMAL -+PTHREAD_MUTEX_RECURSIVE PTHREAD_MUTEX_RECURSIVE_NP -+PTHREAD_MUTEX_ERRORCHECK PTHREAD_MUTEX_ERRORCHECK_NP -+PTHREAD_MUTEX_ADAPTIVE_NP -+ -+-- Mutex status -+-- These are hardcoded all over the code; there are no enums/macros for them. -+PTHREAD_MUTEX_DESTROYED -1 -+PTHREAD_MUTEX_UNLOCKED 0 -+PTHREAD_MUTEX_LOCKED_NO_WAITERS 1 -+ -+-- For robust mutexes -+PTHREAD_MUTEX_INCONSISTENT -+PTHREAD_MUTEX_NOTRECOVERABLE -+FUTEX_OWNER_DIED -+ -+-- For robust and PI mutexes -+FUTEX_WAITERS -+FUTEX_TID_MASK -+ -+-- Mutex attributes -+PTHREAD_MUTEX_ROBUST_NORMAL_NP -+PTHREAD_MUTEX_PRIO_INHERIT_NP -+PTHREAD_MUTEX_PRIO_PROTECT_NP -+PTHREAD_MUTEX_PSHARED_BIT -+PTHREAD_MUTEX_PRIO_CEILING_SHIFT -+PTHREAD_MUTEX_PRIO_CEILING_MASK -+ -+-- Mutex attribute flags -+PTHREAD_MUTEXATTR_PROTOCOL_SHIFT -+PTHREAD_MUTEXATTR_PROTOCOL_MASK -+PTHREAD_MUTEXATTR_PRIO_CEILING_MASK -+PTHREAD_MUTEXATTR_FLAG_ROBUST -+PTHREAD_MUTEXATTR_FLAG_PSHARED -+PTHREAD_MUTEXATTR_FLAG_BITS -+PTHREAD_MUTEX_NO_ELISION_NP -+ -+-- Priority protocols -+PTHREAD_PRIO_NONE -+PTHREAD_PRIO_INHERIT -+PTHREAD_PRIO_PROTECT -+ -+-- These values are hardcoded as well: -+-- Value of __mutex for shared condvars. -+PTHREAD_COND_SHARED (void *)~0l -+ -+-- Value of __total_seq for destroyed condvars. -+PTHREAD_COND_DESTROYED -1ull -+ -+-- __nwaiters encodes the number of threads waiting on a condvar -+-- and the clock ID. -+-- __nwaiters >> COND_NWAITERS_SHIFT gives us the number of waiters. -+COND_NWAITERS_SHIFT -+ -+-- Condvar clock IDs -+CLOCK_REALTIME -+CLOCK_MONOTONIC -+CLOCK_PROCESS_CPUTIME_ID -+CLOCK_THREAD_CPUTIME_ID -+CLOCK_MONOTONIC_RAW -+CLOCK_REALTIME_COARSE -+CLOCK_MONOTONIC_COARSE -+ -+-- Rwlock attributes -+PTHREAD_RWLOCK_PREFER_READER_NP -+PTHREAD_RWLOCK_PREFER_WRITER_NP -+PTHREAD_RWLOCK_PREFER_WRITER_NONRECURSIVE_NP -+ -+-- 'Shared' attribute values -+PTHREAD_PROCESS_PRIVATE -+PTHREAD_PROCESS_SHARED -diff --git a/nptl/test-cond-printers.c b/nptl/test-cond-printers.c -new file mode 100644 -index 0000000..0f2a5f4 ---- /dev/null -+++ b/nptl/test-cond-printers.c -@@ -0,0 +1,57 @@ -+/* Helper program for testing the pthread_cond_t pretty printer. -+ -+ Copyright (C) 2016 Free Software Foundation, Inc. -+ This file is part of the GNU C Library. -+ -+ The GNU C Library is free software; you can redistribute it and/or -+ modify it under the terms of the GNU Lesser General Public -+ License as published by the Free Software Foundation; either -+ version 2.1 of the License, or (at your option) any later version. -+ -+ The GNU C Library is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public -+ License along with the GNU C Library; if not, see -+ . */ -+ -+/* Keep the calls to the pthread_* functions on separate lines to make it easy -+ to advance through the program using the gdb 'next' command. */ -+ -+#include -+#include -+ -+#define PASS 0 -+#define FAIL 1 -+ -+static int test_status_destroyed (pthread_cond_t *condvar); -+ -+int -+main (void) -+{ -+ pthread_cond_t condvar; -+ pthread_condattr_t attr; -+ int result = FAIL; -+ -+ if (pthread_condattr_init (&attr) == 0 -+ && test_status_destroyed (&condvar) == PASS) -+ result = PASS; -+ /* Else, one of the pthread_cond* functions failed. */ -+ -+ return result; -+} -+ -+/* Initializes CONDVAR, then destroys it. */ -+static int -+test_status_destroyed (pthread_cond_t *condvar) -+{ -+ int result = FAIL; -+ -+ if (pthread_cond_init (condvar, NULL) == 0 -+ && pthread_cond_destroy (condvar) == 0) -+ result = PASS; /* Test status (destroyed). */ -+ -+ return result; -+} -diff --git a/nptl/test-cond-printers.py b/nptl/test-cond-printers.py -new file mode 100644 -index 0000000..af0e12e ---- /dev/null -+++ b/nptl/test-cond-printers.py -@@ -0,0 +1,50 @@ -+# Common tests for the ConditionVariablePrinter class. -+# -+# Copyright (C) 2016 Free Software Foundation, Inc. -+# This file is part of the GNU C Library. -+# -+# The GNU C Library is free software; you can redistribute it and/or -+# modify it under the terms of the GNU Lesser General Public -+# License as published by the Free Software Foundation; either -+# version 2.1 of the License, or (at your option) any later version. -+# -+# The GNU C Library is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of -+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+# Lesser General Public License for more details. -+# -+# You should have received a copy of the GNU Lesser General Public -+# License along with the GNU C Library; if not, see -+# . -+ -+import sys -+ -+from test_printers_common import * -+ -+test_source = sys.argv[1] -+test_bin = sys.argv[2] -+printer_files = sys.argv[3:] -+printer_names = ['global glibc-pthread-locks'] -+ -+try: -+ init_test(test_bin, printer_files, printer_names) -+ go_to_main() -+ -+ var = 'condvar' -+ to_string = 'pthread_cond_t' -+ -+ break_at(test_source, 'Test status (destroyed)') -+ continue_cmd() # Go to test_status_destroyed -+ test_printer(var, to_string, {'Status': 'Destroyed'}) -+ -+ continue_cmd() # Exit -+ -+except (NoLineError, pexpect.TIMEOUT) as exception: -+ print('Error: {0}'.format(exception)) -+ result = FAIL -+ -+else: -+ print('Test succeeded.') -+ result = PASS -+ -+exit(result) -diff --git a/nptl/test-condattr-printers.c b/nptl/test-condattr-printers.c -new file mode 100644 -index 0000000..4db4098 ---- /dev/null -+++ b/nptl/test-condattr-printers.c -@@ -0,0 +1,94 @@ -+/* Helper program for testing the pthread_cond_t and pthread_condattr_t -+ pretty printers. -+ -+ Copyright (C) 2016 Free Software Foundation, Inc. -+ This file is part of the GNU C Library. -+ -+ The GNU C Library is free software; you can redistribute it and/or -+ modify it under the terms of the GNU Lesser General Public -+ License as published by the Free Software Foundation; either -+ version 2.1 of the License, or (at your option) any later version. -+ -+ The GNU C Library is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public -+ License along with the GNU C Library; if not, see -+ . */ -+ -+/* Keep the calls to the pthread_* functions on separate lines to make it easy -+ to advance through the program using the gdb 'next' command. */ -+ -+#include -+#include -+ -+#define PASS 0 -+#define FAIL 1 -+ -+static int condvar_reinit (pthread_cond_t *condvar, -+ const pthread_condattr_t *attr); -+static int test_setclock (pthread_cond_t *condvar, pthread_condattr_t *attr); -+static int test_setpshared (pthread_cond_t *condvar, pthread_condattr_t *attr); -+ -+/* Need these so we don't have lines longer than 79 chars. */ -+#define SET_SHARED(attr, shared) pthread_condattr_setpshared (attr, shared) -+ -+int -+main (void) -+{ -+ pthread_cond_t condvar; -+ pthread_condattr_t attr; -+ int result = FAIL; -+ -+ if (pthread_condattr_init (&attr) == 0 -+ && pthread_cond_init (&condvar, NULL) == 0 -+ && test_setclock (&condvar, &attr) == PASS -+ && test_setpshared (&condvar, &attr) == PASS) -+ result = PASS; -+ /* Else, one of the pthread_cond* functions failed. */ -+ -+ return result; -+} -+ -+/* Destroys CONDVAR and re-initializes it using ATTR. */ -+static int -+condvar_reinit (pthread_cond_t *condvar, const pthread_condattr_t *attr) -+{ -+ int result = FAIL; -+ -+ if (pthread_cond_destroy (condvar) == 0 -+ && pthread_cond_init (condvar, attr) == 0) -+ result = PASS; -+ -+ return result; -+} -+ -+/* Tests setting the clock ID attribute. */ -+static int -+test_setclock (pthread_cond_t *condvar, pthread_condattr_t *attr) -+{ -+ int result = FAIL; -+ -+ if (pthread_condattr_setclock (attr, CLOCK_REALTIME) == 0 /* Set clock. */ -+ && condvar_reinit (condvar, attr) == PASS) -+ result = PASS; -+ -+ return result; -+} -+ -+/* Tests setting whether the condvar can be shared between processes. */ -+static int -+test_setpshared (pthread_cond_t *condvar, pthread_condattr_t *attr) -+{ -+ int result = FAIL; -+ -+ if (SET_SHARED (attr, PTHREAD_PROCESS_SHARED) == 0 /* Set shared. */ -+ && condvar_reinit (condvar, attr) == PASS -+ && SET_SHARED (attr, PTHREAD_PROCESS_PRIVATE) == 0 -+ && condvar_reinit (condvar, attr) == PASS) -+ result = PASS; -+ -+ return result; -+} -diff --git a/nptl/test-condattr-printers.py b/nptl/test-condattr-printers.py -new file mode 100644 -index 0000000..7ea01db ---- /dev/null -+++ b/nptl/test-condattr-printers.py -@@ -0,0 +1,71 @@ -+# Common tests for the ConditionVariablePrinter and -+# ConditionVariableAttributesPrinter classes. -+# -+# Copyright (C) 2016 Free Software Foundation, Inc. -+# This file is part of the GNU C Library. -+# -+# The GNU C Library is free software; you can redistribute it and/or -+# modify it under the terms of the GNU Lesser General Public -+# License as published by the Free Software Foundation; either -+# version 2.1 of the License, or (at your option) any later version. -+# -+# The GNU C Library is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of -+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+# Lesser General Public License for more details. -+# -+# You should have received a copy of the GNU Lesser General Public -+# License along with the GNU C Library; if not, see -+# . -+ -+import sys -+ -+from test_printers_common import * -+ -+test_source = sys.argv[1] -+test_bin = sys.argv[2] -+printer_files = sys.argv[3:] -+printer_names = ['global glibc-pthread-locks'] -+ -+try: -+ init_test(test_bin, printer_files, printer_names) -+ go_to_main() -+ -+ check_debug_symbol('struct pthread_condattr') -+ -+ condvar_var = 'condvar' -+ condvar_to_string = 'pthread_cond_t' -+ -+ attr_var = 'attr' -+ attr_to_string = 'pthread_condattr_t' -+ -+ break_at(test_source, 'Set clock') -+ continue_cmd() # Go to test_setclock -+ next_cmd(2) -+ test_printer(condvar_var, condvar_to_string, {'Clock ID': 'CLOCK_REALTIME'}) -+ test_printer(attr_var, attr_to_string, {'Clock ID': 'CLOCK_REALTIME'}) -+ -+ break_at(test_source, 'Set shared') -+ continue_cmd() # Go to test_setpshared -+ next_cmd(2) -+ test_printer(condvar_var, condvar_to_string, {'Shared': 'Yes'}) -+ test_printer(attr_var, attr_to_string, {'Shared': 'Yes'}) -+ next_cmd(2) -+ test_printer(condvar_var, condvar_to_string, {'Shared': 'No'}) -+ test_printer(attr_var, attr_to_string, {'Shared': 'No'}) -+ -+ continue_cmd() # Exit -+ -+except (NoLineError, pexpect.TIMEOUT) as exception: -+ print('Error: {0}'.format(exception)) -+ result = FAIL -+ -+except DebugError as exception: -+ print(exception) -+ result = UNSUPPORTED -+ -+else: -+ print('Test succeeded.') -+ result = PASS -+ -+exit(result) -diff --git a/nptl/test-mutex-printers.c b/nptl/test-mutex-printers.c -new file mode 100644 -index 0000000..b973e82 ---- /dev/null -+++ b/nptl/test-mutex-printers.c -@@ -0,0 +1,151 @@ -+/* Helper program for testing the pthread_mutex_t pretty printer. -+ -+ Copyright (C) 2016 Free Software Foundation, Inc. -+ This file is part of the GNU C Library. -+ -+ The GNU C Library is free software; you can redistribute it and/or -+ modify it under the terms of the GNU Lesser General Public -+ License as published by the Free Software Foundation; either -+ version 2.1 of the License, or (at your option) any later version. -+ -+ The GNU C Library is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public -+ License along with the GNU C Library; if not, see -+ . */ -+ -+/* Keep the calls to the pthread_* functions on separate lines to make it easy -+ to advance through the program using the gdb 'next' command. */ -+ -+#include -+#include -+#include -+ -+#define PASS 0 -+#define FAIL 1 -+ -+static int test_status_destroyed (pthread_mutex_t *mutex); -+static int test_status_no_robust (pthread_mutex_t *mutex, -+ pthread_mutexattr_t *attr); -+static int test_status_robust (pthread_mutex_t *mutex, -+ pthread_mutexattr_t *attr); -+static int test_locking_state_robust (pthread_mutex_t *mutex); -+static void *thread_func (void *arg); -+static int test_recursive_locks (pthread_mutex_t *mutex, -+ pthread_mutexattr_t *attr); -+ -+int -+main (void) -+{ -+ pthread_mutex_t mutex; -+ pthread_mutexattr_t attr; -+ int result = FAIL; -+ -+ if (pthread_mutexattr_init (&attr) == 0 -+ && test_status_destroyed (&mutex) == PASS -+ && test_status_no_robust (&mutex, &attr) == PASS -+ && test_status_robust (&mutex, &attr) == PASS -+ && test_recursive_locks (&mutex, &attr) == PASS) -+ result = PASS; -+ /* Else, one of the pthread_mutex* functions failed. */ -+ -+ return result; -+} -+ -+/* Initializes MUTEX, then destroys it. */ -+static int -+test_status_destroyed (pthread_mutex_t *mutex) -+{ -+ int result = FAIL; -+ -+ if (pthread_mutex_init (mutex, NULL) == 0 -+ && pthread_mutex_destroy (mutex) == 0) -+ result = PASS; /* Test status (destroyed). */ -+ -+ return result; -+} -+ -+/* Tests locking of non-robust mutexes. */ -+static int -+test_status_no_robust (pthread_mutex_t *mutex, pthread_mutexattr_t *attr) -+{ -+ int result = FAIL; -+ -+ if (pthread_mutexattr_setrobust (attr, PTHREAD_MUTEX_STALLED) == 0 -+ && pthread_mutex_init (mutex, attr) == 0 -+ && pthread_mutex_lock (mutex) == 0 /* Test status (non-robust). */ -+ && pthread_mutex_unlock (mutex) == 0 -+ && pthread_mutex_destroy (mutex) == 0) -+ result = PASS; -+ -+ return result; -+} -+ -+/* Tests locking of robust mutexes. */ -+static int -+test_status_robust (pthread_mutex_t *mutex, pthread_mutexattr_t *attr) -+{ -+ int result = FAIL; -+ -+ if (pthread_mutexattr_setrobust (attr, PTHREAD_MUTEX_ROBUST) == 0 -+ && pthread_mutex_init (mutex, attr) == 0 -+ && test_locking_state_robust (mutex) == PASS /* Test status (robust). */ -+ && pthread_mutex_destroy (mutex) == 0) -+ result = PASS; -+ -+ return result; -+} -+ -+/* Tests locking and state corruption of robust mutexes. We'll mark it as -+ inconsistent, then not recoverable. */ -+static int -+test_locking_state_robust (pthread_mutex_t *mutex) -+{ -+ int result = FAIL; -+ pthread_t thread; -+ -+ if (pthread_create (&thread, NULL, thread_func, mutex) == 0 /* Create. */ -+ && pthread_join (thread, NULL) == 0 -+ && pthread_mutex_lock (mutex) == EOWNERDEAD /* Test locking (robust). */ -+ && pthread_mutex_unlock (mutex) == 0) -+ result = PASS; -+ -+ return result; -+} -+ -+/* Function to be called by the child thread when testing robust mutexes. */ -+static void * -+thread_func (void *arg) -+{ -+ pthread_mutex_t *mutex = (pthread_mutex_t *)arg; -+ -+ if (pthread_mutex_lock (mutex) != 0) /* Thread function. */ -+ exit (FAIL); -+ -+ /* Thread terminates without unlocking the mutex, thus marking it as -+ inconsistent. */ -+ return NULL; -+} -+ -+/* Tests locking the mutex multiple times in a row. */ -+static int -+test_recursive_locks (pthread_mutex_t *mutex, pthread_mutexattr_t *attr) -+{ -+ int result = FAIL; -+ -+ if (pthread_mutexattr_settype (attr, PTHREAD_MUTEX_RECURSIVE) == 0 -+ && pthread_mutex_init (mutex, attr) == 0 -+ && pthread_mutex_lock (mutex) == 0 -+ && pthread_mutex_lock (mutex) == 0 -+ && pthread_mutex_lock (mutex) == 0 /* Test recursive locks. */ -+ && pthread_mutex_unlock (mutex) == 0 -+ && pthread_mutex_unlock (mutex) == 0 -+ && pthread_mutex_unlock (mutex) == 0 -+ && pthread_mutex_destroy (mutex) == 0) -+ result = PASS; -+ -+ return result; -+} -diff --git a/nptl/test-mutex-printers.py b/nptl/test-mutex-printers.py -new file mode 100644 -index 0000000..7f542ad ---- /dev/null -+++ b/nptl/test-mutex-printers.py -@@ -0,0 +1,97 @@ -+# Tests for the MutexPrinter class. -+# -+# Copyright (C) 2016 Free Software Foundation, Inc. -+# This file is part of the GNU C Library. -+# -+# The GNU C Library is free software; you can redistribute it and/or -+# modify it under the terms of the GNU Lesser General Public -+# License as published by the Free Software Foundation; either -+# version 2.1 of the License, or (at your option) any later version. -+# -+# The GNU C Library is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of -+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+# Lesser General Public License for more details. -+# -+# You should have received a copy of the GNU Lesser General Public -+# License along with the GNU C Library; if not, see -+# . -+ -+import sys -+ -+from test_printers_common import * -+ -+test_source = sys.argv[1] -+test_bin = sys.argv[2] -+printer_files = sys.argv[3:] -+printer_names = ['global glibc-pthread-locks'] -+ -+try: -+ init_test(test_bin, printer_files, printer_names) -+ go_to_main() -+ -+ var = 'mutex' -+ to_string = 'pthread_mutex_t' -+ -+ break_at(test_source, 'Test status (destroyed)') -+ continue_cmd() # Go to test_status_destroyed -+ test_printer(var, to_string, {'Status': 'Destroyed'}) -+ -+ break_at(test_source, 'Test status (non-robust)') -+ continue_cmd() # Go to test_status_no_robust -+ test_printer(var, to_string, {'Status': 'Unlocked'}) -+ next_cmd() -+ thread_id = get_current_thread_lwpid() -+ test_printer(var, to_string, {'Status': 'Locked, possibly with no waiters', -+ 'Owner ID': thread_id}) -+ -+ break_at(test_source, 'Test status (robust)') -+ continue_cmd() # Go to test_status_robust -+ test_printer(var, to_string, {'Status': 'Unlocked'}) -+ -+ # We'll now test the robust mutex locking states. We'll create a new -+ # thread that will lock a robust mutex and exit without unlocking it. -+ break_at(test_source, 'Create') -+ continue_cmd() # Go to test_locking_state_robust -+ # Set a breakpoint for the new thread to hit. -+ break_at(test_source, 'Thread function') -+ continue_cmd() -+ # By now the new thread is created and has hit its breakpoint. -+ set_scheduler_locking(True) -+ parent = 1 -+ child = 2 -+ select_thread(child) -+ child_id = get_current_thread_lwpid() -+ # We've got the new thread's ID. -+ select_thread(parent) -+ # Make the new thread finish its function while we wait. -+ continue_cmd(thread=child) -+ # The new thread should be dead by now. -+ break_at(test_source, 'Test locking (robust)') -+ continue_cmd() -+ test_printer(var, to_string, {'Owner ID': r'{0} \(dead\)'.format(child_id)}) -+ # Try to lock and unlock the mutex. -+ next_cmd() -+ test_printer(var, to_string, {'Owner ID': thread_id, -+ 'State protected by this mutex': 'Inconsistent'}) -+ next_cmd() -+ test_printer(var, to_string, {'Status': 'Unlocked', -+ 'State protected by this mutex': 'Not recoverable'}) -+ set_scheduler_locking(False) -+ -+ break_at(test_source, 'Test recursive locks') -+ continue_cmd() # Go to test_recursive_locks -+ test_printer(var, to_string, {'Times locked recursively': '2'}) -+ next_cmd() -+ test_printer(var, to_string, {'Times locked recursively': '3'}) -+ continue_cmd() # Exit -+ -+except (NoLineError, pexpect.TIMEOUT) as exception: -+ print('Error: {0}'.format(exception)) -+ result = FAIL -+ -+else: -+ print('Test succeeded.') -+ result = PASS -+ -+exit(result) -diff --git a/nptl/test-mutexattr-printers.c b/nptl/test-mutexattr-printers.c -new file mode 100644 -index 0000000..9ecfff7 ---- /dev/null -+++ b/nptl/test-mutexattr-printers.c -@@ -0,0 +1,144 @@ -+/* Helper program for testing the pthread_mutex_t and pthread_mutexattr_t -+ pretty printers. -+ -+ Copyright (C) 2016 Free Software Foundation, Inc. -+ This file is part of the GNU C Library. -+ -+ The GNU C Library is free software; you can redistribute it and/or -+ modify it under the terms of the GNU Lesser General Public -+ License as published by the Free Software Foundation; either -+ version 2.1 of the License, or (at your option) any later version. -+ -+ The GNU C Library is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public -+ License along with the GNU C Library; if not, see -+ . */ -+ -+/* Keep the calls to the pthread_* functions on separate lines to make it easy -+ to advance through the program using the gdb 'next' command. */ -+ -+#include -+ -+#define PASS 0 -+#define FAIL 1 -+#define PRIOCEILING 42 -+ -+/* Need these so we don't have lines longer than 79 chars. */ -+#define SET_TYPE(attr, type) pthread_mutexattr_settype (attr, type) -+#define SET_ROBUST(attr, robust) pthread_mutexattr_setrobust (attr, robust) -+#define SET_SHARED(attr, shared) pthread_mutexattr_setpshared (attr, shared) -+#define SET_PROTOCOL(attr, protocol) \ -+ pthread_mutexattr_setprotocol (attr, protocol) -+#define SET_PRIOCEILING(mutex, prioceiling, old_ceiling) \ -+ pthread_mutex_setprioceiling (mutex, prioceiling, old_ceiling) -+ -+static int mutex_reinit (pthread_mutex_t *mutex, -+ const pthread_mutexattr_t *attr); -+static int test_settype (pthread_mutex_t *mutex, pthread_mutexattr_t *attr); -+static int test_setrobust (pthread_mutex_t *mutex, pthread_mutexattr_t *attr); -+static int test_setpshared (pthread_mutex_t *mutex, pthread_mutexattr_t *attr); -+static int test_setprotocol (pthread_mutex_t *mutex, -+ pthread_mutexattr_t *attr); -+ -+int -+main (void) -+{ -+ pthread_mutex_t mutex; -+ pthread_mutexattr_t attr; -+ int result = FAIL; -+ -+ if (pthread_mutexattr_init (&attr) == 0 -+ && pthread_mutex_init (&mutex, NULL) == 0 -+ && test_settype (&mutex, &attr) == PASS -+ && test_setrobust (&mutex, &attr) == PASS -+ && test_setpshared (&mutex, &attr) == PASS -+ && test_setprotocol (&mutex, &attr) == PASS) -+ result = PASS; -+ /* Else, one of the pthread_mutex* functions failed. */ -+ -+ return result; -+} -+ -+/* Destroys MUTEX and re-initializes it using ATTR. */ -+static int -+mutex_reinit (pthread_mutex_t *mutex, const pthread_mutexattr_t *attr) -+{ -+ int result = FAIL; -+ -+ if (pthread_mutex_destroy (mutex) == 0 -+ && pthread_mutex_init (mutex, attr) == 0) -+ result = PASS; -+ -+ return result; -+} -+ -+/* Tests setting the mutex type. */ -+static int -+test_settype (pthread_mutex_t *mutex, pthread_mutexattr_t *attr) -+{ -+ int result = FAIL; -+ -+ if (SET_TYPE (attr, PTHREAD_MUTEX_ERRORCHECK) == 0 /* Set type. */ -+ && mutex_reinit (mutex, attr) == 0 -+ && SET_TYPE (attr, PTHREAD_MUTEX_RECURSIVE) == 0 -+ && mutex_reinit (mutex, attr) == 0 -+ && SET_TYPE (attr, PTHREAD_MUTEX_NORMAL) == 0 -+ && mutex_reinit (mutex, attr) == 0) -+ result = PASS; -+ -+ return result; -+} -+ -+/* Tests setting whether the mutex is robust. */ -+static int -+test_setrobust (pthread_mutex_t *mutex, pthread_mutexattr_t *attr) -+{ -+ int result = FAIL; -+ -+ if (SET_ROBUST (attr, PTHREAD_MUTEX_ROBUST) == 0 /* Set robust. */ -+ && mutex_reinit (mutex, attr) == 0 -+ && SET_ROBUST (attr, PTHREAD_MUTEX_STALLED) == 0 -+ && mutex_reinit (mutex, attr) == 0) -+ result = PASS; -+ -+ return result; -+} -+ -+/* Tests setting whether the mutex can be shared between processes. */ -+static int -+test_setpshared (pthread_mutex_t *mutex, pthread_mutexattr_t *attr) -+{ -+ int result = FAIL; -+ -+ if (SET_SHARED (attr, PTHREAD_PROCESS_SHARED) == 0 /* Set shared. */ -+ && mutex_reinit (mutex, attr) == 0 -+ && SET_SHARED (attr, PTHREAD_PROCESS_PRIVATE) == 0 -+ && mutex_reinit (mutex, attr) == 0) -+ result = PASS; -+ -+ return result; -+} -+ -+/* Tests setting the mutex protocol and, for Priority Protect, the Priority -+ Ceiling. */ -+static int -+test_setprotocol (pthread_mutex_t *mutex, pthread_mutexattr_t *attr) -+{ -+ int result = FAIL; -+ int old_prioceiling; -+ -+ if (SET_PROTOCOL (attr, PTHREAD_PRIO_INHERIT) == 0 /* Set protocol. */ -+ && mutex_reinit (mutex, attr) == 0 -+ && SET_PROTOCOL (attr, PTHREAD_PRIO_PROTECT) == 0 -+ && mutex_reinit (mutex, attr) == 0 -+ && SET_PRIOCEILING(mutex, PRIOCEILING, &old_prioceiling) == 0 -+ && SET_PROTOCOL (attr, PTHREAD_PRIO_NONE) == 0 -+ && mutex_reinit (mutex, attr) == 0) -+ result = PASS; -+ -+ return result; -+} -diff --git a/nptl/test-mutexattr-printers.py b/nptl/test-mutexattr-printers.py -new file mode 100644 -index 0000000..4464723 ---- /dev/null -+++ b/nptl/test-mutexattr-printers.py -@@ -0,0 +1,101 @@ -+# Common tests for the MutexPrinter and MutexAttributesPrinter classes. -+# -+# Copyright (C) 2016 Free Software Foundation, Inc. -+# This file is part of the GNU C Library. -+# -+# The GNU C Library is free software; you can redistribute it and/or -+# modify it under the terms of the GNU Lesser General Public -+# License as published by the Free Software Foundation; either -+# version 2.1 of the License, or (at your option) any later version. -+# -+# The GNU C Library is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of -+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+# Lesser General Public License for more details. -+# -+# You should have received a copy of the GNU Lesser General Public -+# License along with the GNU C Library; if not, see -+# . -+ -+import sys -+ -+from test_printers_common import * -+ -+test_source = sys.argv[1] -+test_bin = sys.argv[2] -+printer_files = sys.argv[3:] -+printer_names = ['global glibc-pthread-locks'] -+PRIOCEILING = 42 -+ -+try: -+ init_test(test_bin, printer_files, printer_names) -+ go_to_main() -+ -+ check_debug_symbol('struct pthread_mutexattr') -+ -+ mutex_var = 'mutex' -+ mutex_to_string = 'pthread_mutex_t' -+ -+ attr_var = 'attr' -+ attr_to_string = 'pthread_mutexattr_t' -+ -+ break_at(test_source, 'Set type') -+ continue_cmd() # Go to test_settype -+ next_cmd(2) -+ test_printer(attr_var, attr_to_string, {'Type': 'Error check'}) -+ test_printer(mutex_var, mutex_to_string, {'Type': 'Error check'}) -+ next_cmd(2) -+ test_printer(attr_var, attr_to_string, {'Type': 'Recursive'}) -+ test_printer(mutex_var, mutex_to_string, {'Type': 'Recursive'}) -+ next_cmd(2) -+ test_printer(attr_var, attr_to_string, {'Type': 'Normal'}) -+ test_printer(mutex_var, mutex_to_string, {'Type': 'Normal'}) -+ -+ break_at(test_source, 'Set robust') -+ continue_cmd() # Go to test_setrobust -+ next_cmd(2) -+ test_printer(attr_var, attr_to_string, {'Robust': 'Yes'}) -+ test_printer(mutex_var, mutex_to_string, {'Robust': 'Yes'}) -+ next_cmd(2) -+ test_printer(attr_var, attr_to_string, {'Robust': 'No'}) -+ test_printer(mutex_var, mutex_to_string, {'Robust': 'No'}) -+ -+ break_at(test_source, 'Set shared') -+ continue_cmd() # Go to test_setpshared -+ next_cmd(2) -+ test_printer(attr_var, attr_to_string, {'Shared': 'Yes'}) -+ test_printer(mutex_var, mutex_to_string, {'Shared': 'Yes'}) -+ next_cmd(2) -+ test_printer(attr_var, attr_to_string, {'Shared': 'No'}) -+ test_printer(mutex_var, mutex_to_string, {'Shared': 'No'}) -+ -+ break_at(test_source, 'Set protocol') -+ continue_cmd() # Go to test_setprotocol -+ next_cmd(2) -+ test_printer(attr_var, attr_to_string, {'Protocol': 'Priority inherit'}) -+ test_printer(mutex_var, mutex_to_string, {'Protocol': 'Priority inherit'}) -+ next_cmd(2) -+ test_printer(attr_var, attr_to_string, {'Protocol': 'Priority protect'}) -+ test_printer(mutex_var, mutex_to_string, {'Protocol': 'Priority protect'}) -+ next_cmd(2) -+ test_printer(mutex_var, mutex_to_string, {'Priority ceiling': -+ str(PRIOCEILING)}) -+ next_cmd() -+ test_printer(attr_var, attr_to_string, {'Protocol': 'None'}) -+ test_printer(mutex_var, mutex_to_string, {'Protocol': 'None'}) -+ -+ continue_cmd() # Exit -+ -+except (NoLineError, pexpect.TIMEOUT) as exception: -+ print('Error: {0}'.format(exception)) -+ result = FAIL -+ -+except DebugError as exception: -+ print(exception) -+ result = UNSUPPORTED -+ -+else: -+ print('Test succeeded.') -+ result = PASS -+ -+exit(result) -diff --git a/nptl/test-rwlock-printers.c b/nptl/test-rwlock-printers.c -new file mode 100644 -index 0000000..dbbe9b8 ---- /dev/null -+++ b/nptl/test-rwlock-printers.c -@@ -0,0 +1,78 @@ -+/* Helper program for testing the pthread_rwlock_t pretty printer. -+ -+ Copyright (C) 2016 Free Software Foundation, Inc. -+ This file is part of the GNU C Library. -+ -+ The GNU C Library is free software; you can redistribute it and/or -+ modify it under the terms of the GNU Lesser General Public -+ License as published by the Free Software Foundation; either -+ version 2.1 of the License, or (at your option) any later version. -+ -+ The GNU C Library is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public -+ License along with the GNU C Library; if not, see -+ . */ -+ -+/* Keep the calls to the pthread_* functions on separate lines to make it easy -+ to advance through the program using the gdb 'next' command. */ -+ -+#include -+ -+#define PASS 0 -+#define FAIL 1 -+ -+static int test_locking_reader (pthread_rwlock_t *rwlock); -+static int test_locking_writer (pthread_rwlock_t *rwlock); -+ -+int -+main (void) -+{ -+ pthread_rwlock_t rwlock; -+ -+ int result = FAIL; -+ -+ if (test_locking_reader (&rwlock) == PASS -+ && test_locking_writer (&rwlock) == PASS) -+ result = PASS; -+ /* Else, one of the pthread_rwlock* functions failed. */ -+ -+ return result; -+} -+ -+/* Tests locking the rwlock multiple times as a reader. */ -+static int -+test_locking_reader (pthread_rwlock_t *rwlock) -+{ -+ int result = FAIL; -+ -+ if (pthread_rwlock_init (rwlock, NULL) == 0 -+ && pthread_rwlock_rdlock (rwlock) == 0 /* Test locking (reader). */ -+ && pthread_rwlock_rdlock (rwlock) == 0 -+ && pthread_rwlock_rdlock (rwlock) == 0 -+ && pthread_rwlock_unlock (rwlock) == 0 -+ && pthread_rwlock_unlock (rwlock) == 0 -+ && pthread_rwlock_unlock (rwlock) == 0 -+ && pthread_rwlock_destroy (rwlock) == 0) -+ result = PASS; -+ -+ return result; -+} -+ -+/* Tests locking the rwlock as a writer. */ -+static int -+test_locking_writer (pthread_rwlock_t *rwlock) -+{ -+ int result = FAIL; -+ -+ if (pthread_rwlock_init (rwlock, NULL) == 0 -+ && pthread_rwlock_wrlock (rwlock) == 0 /* Test locking (writer). */ -+ && pthread_rwlock_unlock (rwlock) == 0 -+ && pthread_rwlock_destroy (rwlock) == 0) -+ result = PASS; -+ -+ return result; -+} -diff --git a/nptl/test-rwlock-printers.py b/nptl/test-rwlock-printers.py -new file mode 100644 -index 0000000..b972fa6 ---- /dev/null -+++ b/nptl/test-rwlock-printers.py -@@ -0,0 +1,64 @@ -+# Common tests for the RWLockPrinter class. -+# -+# Copyright (C) 2016 Free Software Foundation, Inc. -+# This file is part of the GNU C Library. -+# -+# The GNU C Library is free software; you can redistribute it and/or -+# modify it under the terms of the GNU Lesser General Public -+# License as published by the Free Software Foundation; either -+# version 2.1 of the License, or (at your option) any later version. -+# -+# The GNU C Library is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of -+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+# Lesser General Public License for more details. -+# -+# You should have received a copy of the GNU Lesser General Public -+# License along with the GNU C Library; if not, see -+# . -+ -+import sys -+ -+from test_printers_common import * -+ -+test_source = sys.argv[1] -+test_bin = sys.argv[2] -+printer_files = sys.argv[3:] -+printer_names = ['global glibc-pthread-locks'] -+ -+try: -+ init_test(test_bin, printer_files, printer_names) -+ go_to_main() -+ -+ var = 'rwlock' -+ to_string = 'pthread_rwlock_t' -+ -+ break_at(test_source, 'Test locking (reader)') -+ continue_cmd() # Go to test_locking_reader -+ test_printer(var, to_string, {'Status': 'Unlocked'}) -+ next_cmd() -+ test_printer(var, to_string, {'Status': r'Locked \(Read\)', 'Readers': '1'}) -+ next_cmd() -+ test_printer(var, to_string, {'Readers': '2'}) -+ next_cmd() -+ test_printer(var, to_string, {'Readers': '3'}) -+ -+ break_at(test_source, 'Test locking (writer)') -+ continue_cmd() # Go to test_locking_writer -+ test_printer(var, to_string, {'Status': 'Unlocked'}) -+ next_cmd() -+ thread_id = get_current_thread_lwpid() -+ test_printer(var, to_string, {'Status': r'Locked \(Write\)', -+ 'Writer ID': thread_id}) -+ -+ continue_cmd() # Exit -+ -+except (NoLineError, pexpect.TIMEOUT) as exception: -+ print('Error: {0}'.format(exception)) -+ result = FAIL -+ -+else: -+ print('Test succeeded.') -+ result = PASS -+ -+exit(result) -diff --git a/nptl/test-rwlockattr-printers.c b/nptl/test-rwlockattr-printers.c -new file mode 100644 -index 0000000..d12facf ---- /dev/null -+++ b/nptl/test-rwlockattr-printers.c -@@ -0,0 +1,98 @@ -+/* Helper program for testing the pthread_rwlock_t and pthread_rwlockattr_t -+ pretty printers. -+ -+ Copyright (C) 2016 Free Software Foundation, Inc. -+ This file is part of the GNU C Library. -+ -+ The GNU C Library is free software; you can redistribute it and/or -+ modify it under the terms of the GNU Lesser General Public -+ License as published by the Free Software Foundation; either -+ version 2.1 of the License, or (at your option) any later version. -+ -+ The GNU C Library is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public -+ License along with the GNU C Library; if not, see -+ . */ -+ -+/* Keep the calls to the pthread_* functions on separate lines to make it easy -+ to advance through the program using the gdb 'next' command. */ -+ -+#include -+ -+#define PASS 0 -+#define FAIL 1 -+ -+/* Need these so we don't have lines longer than 79 chars. */ -+#define SET_KIND(attr, kind) pthread_rwlockattr_setkind_np (attr, kind) -+#define SET_SHARED(attr, shared) pthread_rwlockattr_setpshared (attr, shared) -+ -+static int rwlock_reinit (pthread_rwlock_t *rwlock, -+ const pthread_rwlockattr_t *attr); -+static int test_setkind_np (pthread_rwlock_t *rwlock, -+ pthread_rwlockattr_t *attr); -+static int test_setpshared (pthread_rwlock_t *rwlock, -+ pthread_rwlockattr_t *attr); -+ -+int -+main (void) -+{ -+ pthread_rwlock_t rwlock; -+ pthread_rwlockattr_t attr; -+ int result = FAIL; -+ -+ if (pthread_rwlockattr_init (&attr) == 0 -+ && pthread_rwlock_init (&rwlock, NULL) == 0 -+ && test_setkind_np (&rwlock, &attr) == PASS -+ && test_setpshared (&rwlock, &attr) == PASS) -+ result = PASS; -+ /* Else, one of the pthread_rwlock* functions failed. */ -+ -+ return result; -+} -+ -+/* Destroys RWLOCK and re-initializes it using ATTR. */ -+static int -+rwlock_reinit (pthread_rwlock_t *rwlock, const pthread_rwlockattr_t *attr) -+{ -+ int result = FAIL; -+ -+ if (pthread_rwlock_destroy (rwlock) == 0 -+ && pthread_rwlock_init (rwlock, attr) == 0) -+ result = PASS; -+ -+ return result; -+} -+ -+/* Tests setting whether the rwlock prefers readers or writers. */ -+static int -+test_setkind_np (pthread_rwlock_t *rwlock, pthread_rwlockattr_t *attr) -+{ -+ int result = FAIL; -+ -+ if (SET_KIND (attr, PTHREAD_RWLOCK_PREFER_READER_NP) == 0 /* Set kind. */ -+ && rwlock_reinit (rwlock, attr) == PASS -+ && SET_KIND (attr, PTHREAD_RWLOCK_PREFER_WRITER_NONRECURSIVE_NP) == 0 -+ && rwlock_reinit (rwlock, attr) == PASS) -+ result = PASS; -+ -+ return result; -+} -+ -+/* Tests setting whether the rwlock can be shared between processes. */ -+static int -+test_setpshared (pthread_rwlock_t *rwlock, pthread_rwlockattr_t *attr) -+{ -+ int result = FAIL; -+ -+ if (SET_SHARED (attr, PTHREAD_PROCESS_SHARED) == 0 /* Set shared. */ -+ && rwlock_reinit (rwlock, attr) == PASS -+ && SET_SHARED (attr, PTHREAD_PROCESS_PRIVATE) == 0 -+ && rwlock_reinit (rwlock, attr) == PASS) -+ result = PASS; -+ -+ return result; -+} -diff --git a/nptl/test-rwlockattr-printers.py b/nptl/test-rwlockattr-printers.py -new file mode 100644 -index 0000000..1ca2dc6 ---- /dev/null -+++ b/nptl/test-rwlockattr-printers.py -@@ -0,0 +1,73 @@ -+# Common tests for the RWLockPrinter and RWLockAttributesPrinter classes. -+# -+# Copyright (C) 2016 Free Software Foundation, Inc. -+# This file is part of the GNU C Library. -+# -+# The GNU C Library is free software; you can redistribute it and/or -+# modify it under the terms of the GNU Lesser General Public -+# License as published by the Free Software Foundation; either -+# version 2.1 of the License, or (at your option) any later version. -+# -+# The GNU C Library is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of -+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+# Lesser General Public License for more details. -+# -+# You should have received a copy of the GNU Lesser General Public -+# License along with the GNU C Library; if not, see -+# . -+ -+import sys -+ -+from test_printers_common import * -+ -+test_source = sys.argv[1] -+test_bin = sys.argv[2] -+printer_files = sys.argv[3:] -+printer_names = ['global glibc-pthread-locks'] -+ -+try: -+ init_test(test_bin, printer_files, printer_names) -+ go_to_main() -+ -+ check_debug_symbol('struct pthread_rwlockattr') -+ -+ rwlock_var = 'rwlock' -+ rwlock_to_string = 'pthread_rwlock_t' -+ -+ attr_var = 'attr' -+ attr_to_string = 'pthread_rwlockattr_t' -+ -+ break_at(test_source, 'Set kind') -+ continue_cmd() # Go to test_setkind_np -+ next_cmd(2) -+ test_printer(rwlock_var, rwlock_to_string, {'Prefers': 'Readers'}) -+ test_printer(attr_var, attr_to_string, {'Prefers': 'Readers'}) -+ next_cmd(2) -+ test_printer(rwlock_var, rwlock_to_string, {'Prefers': 'Writers'}) -+ test_printer(attr_var, attr_to_string, {'Prefers': 'Writers'}) -+ -+ break_at(test_source, 'Set shared') -+ continue_cmd() # Go to test_setpshared -+ next_cmd(2) -+ test_printer(rwlock_var, rwlock_to_string, {'Shared': 'Yes'}) -+ test_printer(attr_var, attr_to_string, {'Shared': 'Yes'}) -+ next_cmd(2) -+ test_printer(rwlock_var, rwlock_to_string, {'Shared': 'No'}) -+ test_printer(attr_var, attr_to_string, {'Shared': 'No'}) -+ -+ continue_cmd() # Exit -+ -+except (NoLineError, pexpect.TIMEOUT) as exception: -+ print('Error: {0}'.format(exception)) -+ result = FAIL -+ -+except DebugError as exception: -+ print(exception) -+ result = UNSUPPORTED -+ -+else: -+ print('Test succeeded.') -+ result = PASS -+ -+exit(result) -diff --git a/scripts/gen-py-const.awk b/scripts/gen-py-const.awk -new file mode 100644 -index 0000000..4586f59 ---- /dev/null -+++ b/scripts/gen-py-const.awk -@@ -0,0 +1,118 @@ -+# Script to generate constants for Python pretty printers. -+# -+# Copyright (C) 2016 Free Software Foundation, Inc. -+# This file is part of the GNU C Library. -+# -+# The GNU C Library is free software; you can redistribute it and/or -+# modify it under the terms of the GNU Lesser General Public -+# License as published by the Free Software Foundation; either -+# version 2.1 of the License, or (at your option) any later version. -+# -+# The GNU C Library is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of -+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+# Lesser General Public License for more details. -+# -+# You should have received a copy of the GNU Lesser General Public -+# License along with the GNU C Library; if not, see -+# . -+ -+# This script is a smaller version of the clever gen-asm-const.awk hack used to -+# generate ASM constants from .sym files. We'll use this to generate constants -+# for Python pretty printers. -+# -+# The input to this script are .pysym files that look like: -+# #C_Preprocessor_Directive... -+# NAME1 -+# NAME2 expression... -+# -+# A line giving just a name implies an expression consisting of just that name. -+# Comments start with '--'. -+# -+# The output of this script is a 'dummy' function containing 'asm' declarations -+# for each non-preprocessor line in the .pysym file. The expression values -+# will appear as input operands to the 'asm' declaration. For example, if we -+# have: -+# -+# /* header.h */ -+# #define MACRO 42 -+# -+# struct S { -+# char c1; -+# char c2; -+# char c3; -+# }; -+# -+# enum E { -+# ZERO, -+# ONE -+# }; -+# -+# /* symbols.pysym */ -+# #include -+# #include "header.h" -+# -- This is a comment -+# MACRO -+# C3_OFFSET offsetof(struct S, c3) -+# E_ONE ONE -+# -+# the output will be: -+# -+# #include -+# #include "header.h" -+# void dummy(void) -+# { -+# asm ("@name@MACRO@value@%0@" : : "i" (MACRO)); -+# asm ("@name@C3_OFFSET@value@%0@" : : "i" (offsetof(struct S, c3))); -+# asm ("@name@E_ONE@value@%0@" : : "i" (ONE)); -+# } -+# -+# We'll later feed this output to gcc -S. Since '-S' tells gcc to compile but -+# not assemble, gcc will output something like: -+# -+# dummy: -+# ... -+# @name@MACRO@value@$42@ -+# @name@C3_OFFSET@value@$2@ -+# @name@E_ONE@value@$1@ -+# -+# Finally, we can process that output to extract the constant values. -+# Notice gcc may prepend a special character such as '$' to each value. -+ -+# found_symbol indicates whether we found a non-comment, non-preprocessor line. -+BEGIN { found_symbol = 0 } -+ -+# C preprocessor directives go straight through. -+/^#/ { print; next; } -+ -+# Skip comments. -+/--/ { next; } -+ -+# Trim leading whitespace. -+{ sub(/^[[:blank:]]*/, ""); } -+ -+# If we found a non-comment, non-preprocessor line, print the 'dummy' function -+# header. -+NF > 0 && !found_symbol { -+ print "void dummy(void)\n{"; -+ found_symbol = 1; -+} -+ -+# If the line contains just a name, duplicate it so we can use that name -+# as the value of the expression. -+NF == 1 { sub(/^.*$/, "& &"); } -+ -+# If a line contains a name and an expression... -+NF > 1 { -+ name = $1; -+ -+ # Remove any characters before the second field. -+ sub(/^[^[:blank:]]+[[:blank:]]+/, ""); -+ -+ # '$0' ends up being everything that appeared after the first field -+ # separator. -+ printf " asm (\"@name@%s@value@%0@\" : : \"i\" (%s));\n", name, $0; -+} -+ -+# Close the 'dummy' function. -+END { if (found_symbol) print "}"; } -diff --git a/scripts/test_printers_common.py b/scripts/test_printers_common.py -new file mode 100644 -index 0000000..c79d7e3 ---- /dev/null -+++ b/scripts/test_printers_common.py -@@ -0,0 +1,364 @@ -+# Common functions and variables for testing the Python pretty printers. -+# -+# Copyright (C) 2016 Free Software Foundation, Inc. -+# This file is part of the GNU C Library. -+# -+# The GNU C Library is free software; you can redistribute it and/or -+# modify it under the terms of the GNU Lesser General Public -+# License as published by the Free Software Foundation; either -+# version 2.1 of the License, or (at your option) any later version. -+# -+# The GNU C Library is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of -+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+# Lesser General Public License for more details. -+# -+# You should have received a copy of the GNU Lesser General Public -+# License along with the GNU C Library; if not, see -+# . -+ -+"""These tests require PExpect 4.0 or newer. -+ -+Exported constants: -+ PASS, FAIL, UNSUPPORTED (int): Test exit codes, as per evaluate-test.sh. -+""" -+ -+import os -+import re -+from test_printers_exceptions import * -+ -+PASS = 0 -+FAIL = 1 -+UNSUPPORTED = 77 -+ -+gdb_bin = 'gdb' -+gdb_options = '-q -nx' -+gdb_invocation = '{0} {1}'.format(gdb_bin, gdb_options) -+pexpect_min_version = 4 -+gdb_min_version = (7, 8) -+encoding = 'utf-8' -+ -+try: -+ import pexpect -+except ImportError: -+ print('PExpect 4.0 or newer must be installed to test the pretty printers.') -+ exit(UNSUPPORTED) -+ -+pexpect_version = pexpect.__version__.split('.')[0] -+ -+if int(pexpect_version) < pexpect_min_version: -+ print('PExpect 4.0 or newer must be installed to test the pretty printers.') -+ exit(UNSUPPORTED) -+ -+if not pexpect.which(gdb_bin): -+ print('gdb 7.8 or newer must be installed to test the pretty printers.') -+ exit(UNSUPPORTED) -+ -+timeout = 5 -+TIMEOUTFACTOR = os.environ.get('TIMEOUTFACTOR') -+ -+if TIMEOUTFACTOR: -+ timeout = int(TIMEOUTFACTOR) -+ -+try: -+ # Check the gdb version. -+ version_cmd = '{0} --version'.format(gdb_invocation, timeout=timeout) -+ gdb_version_out = pexpect.run(version_cmd, encoding=encoding) -+ -+ # The gdb version string is "GNU gdb ", where -+ # PKGVERSION can be any text. We assume that there'll always be a space -+ # between PKGVERSION and the version number for the sake of the regexp. -+ version_match = re.search(r'GNU gdb .* ([1-9]+)\.([0-9]+)', gdb_version_out) -+ -+ if not version_match: -+ print('The gdb version string (gdb -v) is incorrectly formatted.') -+ exit(UNSUPPORTED) -+ -+ gdb_version = (int(version_match.group(1)), int(version_match.group(2))) -+ -+ if gdb_version < gdb_min_version: -+ print('gdb 7.8 or newer must be installed to test the pretty printers.') -+ exit(UNSUPPORTED) -+ -+ # Check if gdb supports Python. -+ gdb_python_cmd = '{0} -ex "python import os" -batch'.format(gdb_invocation, -+ timeout=timeout) -+ gdb_python_error = pexpect.run(gdb_python_cmd, encoding=encoding) -+ -+ if gdb_python_error: -+ print('gdb must have python support to test the pretty printers.') -+ exit(UNSUPPORTED) -+ -+ # If everything's ok, spawn the gdb process we'll use for testing. -+ gdb = pexpect.spawn(gdb_invocation, echo=False, timeout=timeout, -+ encoding=encoding) -+ gdb_prompt = u'\(gdb\)' -+ gdb.expect(gdb_prompt) -+ -+except pexpect.ExceptionPexpect as exception: -+ print('Error: {0}'.format(exception)) -+ exit(FAIL) -+ -+def test(command, pattern=None): -+ """Sends 'command' to gdb and expects the given 'pattern'. -+ -+ If 'pattern' is None, simply consumes everything up to and including -+ the gdb prompt. -+ -+ Args: -+ command (string): The command we'll send to gdb. -+ pattern (raw string): A pattern the gdb output should match. -+ -+ Returns: -+ string: The string that matched 'pattern', or an empty string if -+ 'pattern' was None. -+ """ -+ -+ match = '' -+ -+ gdb.sendline(command) -+ -+ if pattern: -+ # PExpect does a non-greedy match for '+' and '*'. Since it can't look -+ # ahead on the gdb output stream, if 'pattern' ends with a '+' or a '*' -+ # we may end up matching only part of the required output. -+ # To avoid this, we'll consume 'pattern' and anything that follows it -+ # up to and including the gdb prompt, then extract 'pattern' later. -+ index = gdb.expect([u'{0}.+{1}'.format(pattern, gdb_prompt), -+ pexpect.TIMEOUT]) -+ -+ if index == 0: -+ # gdb.after now contains the whole match. Extract the text that -+ # matches 'pattern'. -+ match = re.match(pattern, gdb.after, re.DOTALL).group() -+ elif index == 1: -+ # We got a timeout exception. Print information on what caused it -+ # and bail out. -+ error = ('Response does not match the expected pattern.\n' -+ 'Command: {0}\n' -+ 'Expected pattern: {1}\n' -+ 'Response: {2}'.format(command, pattern, gdb.before)) -+ -+ raise pexpect.TIMEOUT(error) -+ else: -+ # Consume just the the gdb prompt. -+ gdb.expect(gdb_prompt) -+ -+ return match -+ -+def init_test(test_bin, printer_files, printer_names): -+ """Loads the test binary file and the required pretty printers to gdb. -+ -+ Args: -+ test_bin (string): The name of the test binary file. -+ pretty_printers (list of strings): A list with the names of the pretty -+ printer files. -+ """ -+ -+ # Load all the pretty printer files. We're assuming these are safe. -+ for printer_file in printer_files: -+ test('source {0}'.format(printer_file)) -+ -+ # Disable all the pretty printers. -+ test('disable pretty-printer', r'0 of [0-9]+ printers enabled') -+ -+ # Enable only the required printers. -+ for printer in printer_names: -+ test('enable pretty-printer {0}'.format(printer), -+ r'[1-9][0-9]* of [1-9]+ printers enabled') -+ -+ # Finally, load the test binary. -+ test('file {0}'.format(test_bin)) -+ -+def go_to_main(): -+ """Executes a gdb 'start' command, which takes us to main.""" -+ -+ test('start', r'main') -+ -+def get_line_number(file_name, string): -+ """Returns the number of the line in which 'string' appears within a file. -+ -+ Args: -+ file_name (string): The name of the file we'll search through. -+ string (string): The string we'll look for. -+ -+ Returns: -+ int: The number of the line in which 'string' appears, starting from 1. -+ """ -+ number = -1 -+ -+ with open(file_name) as src_file: -+ for i, line in enumerate(src_file): -+ if string in line: -+ number = i + 1 -+ break -+ -+ if number == -1: -+ raise NoLineError(file_name, string) -+ -+ return number -+ -+def break_at(file_name, string, temporary=True, thread=None): -+ """Places a breakpoint on the first line in 'file_name' containing 'string'. -+ -+ 'string' is usually a comment like "Stop here". Notice this may fail unless -+ the comment is placed inline next to actual code, e.g.: -+ -+ ... -+ /* Stop here */ -+ ... -+ -+ may fail, while: -+ -+ ... -+ some_func(); /* Stop here */ -+ ... -+ -+ will succeed. -+ -+ If 'thread' isn't None, the breakpoint will be set for all the threads. -+ Otherwise, it'll be set only for 'thread'. -+ -+ Args: -+ file_name (string): The name of the file we'll place the breakpoint in. -+ string (string): A string we'll look for inside the file. -+ We'll place a breakpoint on the line which contains it. -+ temporary (bool): Whether the breakpoint should be automatically deleted -+ after we reach it. -+ thread (int): The number of the thread we'll place the breakpoint for, -+ as seen by gdb. If specified, it should be greater than zero. -+ """ -+ -+ if not thread: -+ thread_str = '' -+ else: -+ thread_str = 'thread {0}'.format(thread) -+ -+ if temporary: -+ command = 'tbreak' -+ break_type = 'Temporary breakpoint' -+ else: -+ command = 'break' -+ break_type = 'Breakpoint' -+ -+ line_number = str(get_line_number(file_name, string)) -+ -+ test('{0} {1}:{2} {3}'.format(command, file_name, line_number, thread_str), -+ r'{0} [0-9]+ at 0x[a-f0-9]+: file {1}, line {2}\.'.format(break_type, -+ file_name, -+ line_number)) -+ -+def continue_cmd(thread=None): -+ """Executes a gdb 'continue' command. -+ -+ If 'thread' isn't None, the command will be applied to all the threads. -+ Otherwise, it'll be applied only to 'thread'. -+ -+ Args: -+ thread (int): The number of the thread we'll apply the command to, -+ as seen by gdb. If specified, it should be greater than zero. -+ """ -+ -+ if not thread: -+ command = 'continue' -+ else: -+ command = 'thread apply {0} continue'.format(thread) -+ -+ test(command) -+ -+def next_cmd(count=1, thread=None): -+ """Executes a gdb 'next' command. -+ -+ If 'thread' isn't None, the command will be applied to all the threads. -+ Otherwise, it'll be applied only to 'thread'. -+ -+ Args: -+ count (int): The 'count' argument of the 'next' command. -+ thread (int): The number of the thread we'll apply the command to, -+ as seen by gdb. If specified, it should be greater than zero. -+ """ -+ -+ if not thread: -+ command = 'next' -+ else: -+ command = 'thread apply {0} next' -+ -+ test('{0} {1}'.format(command, count)) -+ -+def select_thread(thread): -+ """Selects the thread indicated by 'thread'. -+ -+ Args: -+ thread (int): The number of the thread we'll switch to, as seen by gdb. -+ This should be greater than zero. -+ """ -+ -+ if thread > 0: -+ test('thread {0}'.format(thread)) -+ -+def get_current_thread_lwpid(): -+ """Gets the current thread's Lightweight Process ID. -+ -+ Returns: -+ string: The current thread's LWP ID. -+ """ -+ -+ # It's easier to get the LWP ID through the Python API than the gdb CLI. -+ command = 'python print(gdb.selected_thread().ptid[1])' -+ -+ return test(command, r'[0-9]+') -+ -+def set_scheduler_locking(mode): -+ """Executes the gdb 'set scheduler-locking' command. -+ -+ Args: -+ mode (bool): Whether the scheduler locking mode should be 'on'. -+ """ -+ modes = { -+ True: 'on', -+ False: 'off' -+ } -+ -+ test('set scheduler-locking {0}'.format(modes[mode])) -+ -+def test_printer(var, to_string, children=None, is_ptr=True): -+ """ Tests the output of a pretty printer. -+ -+ For a variable called 'var', this tests whether its associated printer -+ outputs the expected 'to_string' and children (if any). -+ -+ Args: -+ var (string): The name of the variable we'll print. -+ to_string (raw string): The expected output of the printer's 'to_string' -+ method. -+ children (map {raw string->raw string}): A map with the expected output -+ of the printer's children' method. -+ is_ptr (bool): Whether 'var' is a pointer, and thus should be -+ dereferenced. -+ """ -+ -+ if is_ptr: -+ var = '*{0}'.format(var) -+ -+ test('print {0}'.format(var), to_string) -+ -+ if children: -+ for name, value in children.items(): -+ # Children are shown as 'name = value'. -+ test('print {0}'.format(var), r'{0} = {1}'.format(name, value)) -+ -+def check_debug_symbol(symbol): -+ """ Tests whether a given debugging symbol exists. -+ -+ If the symbol doesn't exist, raises a DebugError. -+ -+ Args: -+ symbol (string): The symbol we're going to check for. -+ """ -+ -+ try: -+ test('ptype {0}'.format(symbol), r'type = {0}'.format(symbol)) -+ -+ except pexpect.TIMEOUT: -+ # The symbol doesn't exist. -+ raise DebugError(symbol) -diff --git a/scripts/test_printers_exceptions.py b/scripts/test_printers_exceptions.py -new file mode 100644 -index 0000000..17034b5 ---- /dev/null -+++ b/scripts/test_printers_exceptions.py -@@ -0,0 +1,61 @@ -+# Exception classes used when testing the Python pretty printers. -+# -+# Copyright (C) 2016 Free Software Foundation, Inc. -+# This file is part of the GNU C Library. -+# -+# The GNU C Library is free software; you can redistribute it and/or -+# modify it under the terms of the GNU Lesser General Public -+# License as published by the Free Software Foundation; either -+# version 2.1 of the License, or (at your option) any later version. -+# -+# The GNU C Library is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of -+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+# Lesser General Public License for more details. -+# -+# You should have received a copy of the GNU Lesser General Public -+# License along with the GNU C Library; if not, see -+# . -+ -+class NoLineError(Exception): -+ """Custom exception to indicate that a test file doesn't contain -+ the requested string. -+ """ -+ -+ def __init__(self, file_name, string): -+ """Constructor. -+ -+ Args: -+ file_name (string): The name of the test file. -+ string (string): The string that was requested. -+ """ -+ -+ super(NoLineError, self).__init__() -+ self.file_name = file_name -+ self.string = string -+ -+ def __str__(self): -+ """Shows a readable representation of the exception.""" -+ -+ return ('File {0} has no line containing the following string: {1}' -+ .format(self.file_name, self.string)) -+ -+class DebugError(Exception): -+ """Custom exception to indicate that a required debugging symbol is missing. -+ """ -+ -+ def __init__(self, symbol): -+ """Constructor. -+ -+ Args: -+ symbol (string): The name of the entity whose debug info is missing. -+ """ -+ -+ super(DebugError, self).__init__() -+ self.symbol = symbol -+ -+ def __str__(self): -+ """Shows a readable representation of the exception.""" -+ -+ return ('The required debugging information for {0} is missing.' -+ .format(self.symbol)) --- -2.10.2 - diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0003-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0003-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch index 7cba6cc36..9e207e44d 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0003-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0003-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch @@ -1,7 +1,7 @@ -From c99892f2018cd7fa0f37b53e6cebec99fa036472 Mon Sep 17 00:00:00 2001 +From e53968d61804b6bab32ec6e13cc0b3cd57214796 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 01:51:38 +0000 -Subject: [PATCH 03/25] nativesdk-glibc: Raise the size of arrays containing dl +Subject: [PATCH 03/26] nativesdk-glibc: Raise the size of arrays containing dl paths This patch puts the dynamic loader path in the binaries, SYSTEM_DIRS strings @@ -21,13 +21,14 @@ Signed-off-by: Khem Raj elf/interp.c | 2 +- elf/ldconfig.c | 3 +++ elf/rtld.c | 5 +++-- + iconv/gconv_conf.c | 2 +- sysdeps/generic/dl-cache.h | 4 ---- - 6 files changed, 13 insertions(+), 9 deletions(-) + 7 files changed, 14 insertions(+), 10 deletions(-) -Index: git/elf/dl-cache.c -=================================================================== ---- git.orig/elf/dl-cache.c -+++ git/elf/dl-cache.c +diff --git a/elf/dl-cache.c b/elf/dl-cache.c +index cfa335e..daa12ec 100644 +--- a/elf/dl-cache.c ++++ b/elf/dl-cache.c @@ -132,6 +132,10 @@ do \ while (0) @@ -39,11 +40,11 @@ Index: git/elf/dl-cache.c int internal_function _dl_cache_libcmp (const char *p1, const char *p2) -Index: git/elf/dl-load.c -=================================================================== ---- git.orig/elf/dl-load.c -+++ git/elf/dl-load.c -@@ -106,8 +106,8 @@ static size_t max_capstrlen attribute_re +diff --git a/elf/dl-load.c b/elf/dl-load.c +index 3a3d112..a1410e4 100644 +--- a/elf/dl-load.c ++++ b/elf/dl-load.c +@@ -106,8 +106,8 @@ static size_t max_capstrlen attribute_relro; /* Get the generated information about the trusted directories. */ #include "trusted-dirs.h" @@ -54,10 +55,10 @@ Index: git/elf/dl-load.c { SYSTEM_DIRS_LEN }; -Index: git/elf/interp.c -=================================================================== ---- git.orig/elf/interp.c -+++ git/elf/interp.c +diff --git a/elf/interp.c b/elf/interp.c +index 9448802..e7e8c70 100644 +--- a/elf/interp.c ++++ b/elf/interp.c @@ -18,5 +18,5 @@ #include @@ -65,10 +66,10 @@ Index: git/elf/interp.c -const char __invoke_dynamic_linker__[] __attribute__ ((section (".interp"))) +const char __invoke_dynamic_linker__[4096] __attribute__ ((section (".interp"))) = RUNTIME_LINKER; -Index: git/elf/ldconfig.c -=================================================================== ---- git.orig/elf/ldconfig.c -+++ git/elf/ldconfig.c +diff --git a/elf/ldconfig.c b/elf/ldconfig.c +index 467ca82..631a2a9 100644 +--- a/elf/ldconfig.c ++++ b/elf/ldconfig.c @@ -168,6 +168,9 @@ static struct argp argp = options, parse_opt, NULL, doc, NULL, more_help, NULL }; @@ -79,10 +80,10 @@ Index: git/elf/ldconfig.c /* Check if string corresponds to an important hardware capability or a platform. */ static int -Index: git/elf/rtld.c -=================================================================== ---- git.orig/elf/rtld.c -+++ git/elf/rtld.c +diff --git a/elf/rtld.c b/elf/rtld.c +index 4ec25d7..e159c12 100644 +--- a/elf/rtld.c ++++ b/elf/rtld.c @@ -99,6 +99,7 @@ uintptr_t __pointer_chk_guard_local strong_alias (__pointer_chk_guard_local, __pointer_chk_guard) #endif @@ -91,7 +92,7 @@ Index: git/elf/rtld.c /* List of auditing DSOs. */ static struct audit_list -@@ -873,12 +874,12 @@ of this helper program; chances are you +@@ -854,12 +855,12 @@ of this helper program; chances are you did not intend to run this program.\n\ --list list all dependencies and how they are resolved\n\ --verify verify that given object really is a dynamically linked\n\ object we can handle\n\ @@ -106,10 +107,23 @@ Index: git/elf/rtld.c ++_dl_skip_args; --_dl_argc; -Index: git/sysdeps/generic/dl-cache.h -=================================================================== ---- git.orig/sysdeps/generic/dl-cache.h -+++ git/sysdeps/generic/dl-cache.h +diff --git a/iconv/gconv_conf.c b/iconv/gconv_conf.c +index e235188..569f72e 100644 +--- a/iconv/gconv_conf.c ++++ b/iconv/gconv_conf.c +@@ -36,7 +36,7 @@ + + + /* This is the default path where we look for module lists. */ +-static const char default_gconv_path[] = GCONV_PATH; ++static char default_gconv_path[4096] __attribute__ ((section (".gccrelocprefix"))) = GCONV_PATH; + + /* The path elements, as determined by the __gconv_get_path function. + All path elements end in a slash. */ +diff --git a/sysdeps/generic/dl-cache.h b/sysdeps/generic/dl-cache.h +index eb2f900..505804e 100644 +--- a/sysdeps/generic/dl-cache.h ++++ b/sysdeps/generic/dl-cache.h @@ -27,10 +27,6 @@ ((flags) == 1 || (flags) == _DL_CACHE_DEFAULT_ID) #endif @@ -121,51 +135,6 @@ Index: git/sysdeps/generic/dl-cache.h #ifndef add_system_dir # define add_system_dir(dir) add_dir (dir) #endif -Index: git/iconv/gconv_conf.c -=================================================================== ---- git.orig/iconv/gconv_conf.c -+++ git/iconv/gconv_conf.c -@@ -36,7 +36,7 @@ - - - /* This is the default path where we look for module lists. */ --static const char default_gconv_path[] = GCONV_PATH; -+static char default_gconv_path[4096] __attribute__ ((section (".gccrelocprefix"))) = GCONV_PATH; - - /* The path elements, as determined by the __gconv_get_path function. - All path elements end in a slash. */ -Index: git/locale/findlocale.c -=================================================================== ---- git.orig/locale/findlocale.c -+++ git/locale/findlocale.c -@@ -56,7 +56,7 @@ struct __locale_data *const _nl_C[] attr - which are somehow addressed. */ - struct loaded_l10nfile *_nl_locale_file_list[__LC_LAST]; - --const char _nl_default_locale_path[] attribute_hidden = COMPLOCALEDIR; -+char _nl_default_locale_path[4096] __attribute__ ((section (".gccrelocprefix"))) attribute_hidden = COMPLOCALEDIR; - - /* Checks if the name is actually present, that is, not NULL and not - empty. */ -@@ -168,7 +168,7 @@ _nl_find_locale (const char *locale_path - - /* Nothing in the archive. Set the default path to search below. */ - locale_path = _nl_default_locale_path; -- locale_path_len = sizeof _nl_default_locale_path; -+ locale_path_len = strlen(_nl_default_locale_path) + 1; - } - else - /* We really have to load some data. First see whether the name is -Index: git/locale/localeinfo.h -=================================================================== ---- git.orig/locale/localeinfo.h -+++ git/locale/localeinfo.h -@@ -322,7 +322,7 @@ _nl_lookup_word (locale_t l, int categor - } - - /* Default search path if no LOCPATH environment variable. */ --extern const char _nl_default_locale_path[] attribute_hidden; -+extern char _nl_default_locale_path[] attribute_hidden; - - /* Load the locale data for CATEGORY from the file specified by *NAME. - If *NAME is "", use environment variables as specified by POSIX, and +-- +2.10.2 + diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0004-New-condvar-implementation-that-provides-stronger-or.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0004-New-condvar-implementation-that-provides-stronger-or.patch deleted file mode 100644 index 3c7bfa160..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0004-New-condvar-implementation-that-provides-stronger-or.patch +++ /dev/null @@ -1,7171 +0,0 @@ -From 27af8689a6ba8d182f3cbe6ba42cc654ceed0351 Mon Sep 17 00:00:00 2001 -From: Catalin Enache -Date: Fri, 30 Jun 2017 11:56:41 +0300 -Subject: [PATCH 4/6] New condvar implementation that provides stronger - ordering guarantees. - -This is a new implementation for condition variables, required -after http://austingroupbugs.net/view.php?id=609 to fix bug 13165. In -essence, we need to be stricter in which waiters a signal or broadcast -is required to wake up; this couldn't be solved using the old algorithm. -ISO C++ made a similar clarification, so this also fixes a bug in -current libstdc++, for example. - -We can't use the old algorithm anymore because futexes do not guarantee -to wake in FIFO order. Thus, when we wake, we can't simply let any -waiter grab a signal, but we need to ensure that one of the waiters -happening before the signal is woken up. This is something the previous -algorithm violated (see bug 13165). - -There's another issue specific to condvars: ABA issues on the underlying -futexes. Unlike mutexes that have just three states, or semaphores that -have no tokens or a limited number of them, the state of a condvar is -the *order* of the waiters. A waiter on a semaphore can grab a token -whenever one is available; a condvar waiter must only consume a signal -if it is eligible to do so as determined by the relative order of the -waiter and the signal. -Therefore, this new algorithm maintains two groups of waiters: Those -eligible to consume signals (G1), and those that have to wait until -previous waiters have consumed signals (G2). Once G1 is empty, G2 -becomes the new G1. 64b counters are used to avoid ABA issues. - -This condvar doesn't yet use a requeue optimization (ie, on a broadcast, -waking just one thread and requeueing all others on the futex of the -mutex supplied by the program). I don't think doing the requeue is -necessarily the right approach (but I haven't done real measurements -yet): -* If a program expects to wake many threads at the same time and make -that scalable, a condvar isn't great anyway because of how it requires -waiters to operate mutually exclusive (due to the mutex usage). Thus, a -thundering herd problem is a scalability problem with or without the -optimization. Using something like a semaphore might be more -appropriate in such a case. -* The scalability problem is actually at the mutex side; the condvar -could help (and it tries to with the requeue optimization), but it -should be the mutex who decides how that is done, and whether it is done -at all. -* Forcing all but one waiter into the kernel-side wait queue of the -mutex prevents/avoids the use of lock elision on the mutex. Thus, it -prevents the only cure against the underlying scalability problem -inherent to condvars. -* If condvars use short critical sections (ie, hold the mutex just to -check a binary flag or such), which they should do ideally, then forcing -all those waiter to proceed serially with kernel-based hand-off (ie, -futex ops in the mutex' contended state, via the futex wait queues) will -be less efficient than just letting a scalable mutex implementation take -care of it. Our current mutex impl doesn't employ spinning at all, but -if critical sections are short, spinning can be much better. -* Doing the requeue stuff requires all waiters to always drive the mutex -into the contended state. This leads to each waiter having to call -futex_wake after lock release, even if this wouldn't be necessary. - - [BZ #13165] - * nptl/pthread_cond_broadcast.c (__pthread_cond_broadcast): Rewrite to - use new algorithm. - * nptl/pthread_cond_destroy.c (__pthread_cond_destroy): Likewise. - * nptl/pthread_cond_init.c (__pthread_cond_init): Likewise. - * nptl/pthread_cond_signal.c (__pthread_cond_signal): Likewise. - * nptl/pthread_cond_wait.c (__pthread_cond_wait): Likewise. - (__pthread_cond_timedwait): Move here from pthread_cond_timedwait.c. - (__condvar_confirm_wakeup, __condvar_cancel_waiting, - __condvar_cleanup_waiting, __condvar_dec_grefs, - __pthread_cond_wait_common): New. - (__condvar_cleanup): Remove. - * npt/pthread_condattr_getclock.c (pthread_condattr_getclock): Adapt. - * npt/pthread_condattr_setclock.c (pthread_condattr_setclock): - Likewise. - * npt/pthread_condattr_getpshared.c (pthread_condattr_getpshared): - Likewise. - * npt/pthread_condattr_init.c (pthread_condattr_init): Likewise. - * nptl/tst-cond1.c: Add comment. - * nptl/tst-cond20.c (do_test): Adapt. - * nptl/tst-cond22.c (do_test): Likewise. - * sysdeps/aarch64/nptl/bits/pthreadtypes.h (pthread_cond_t): Adapt - structure. - * sysdeps/arm/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. - * sysdeps/ia64/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. - * sysdeps/m68k/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. - * sysdeps/microblaze/nptl/bits/pthreadtypes.h (pthread_cond_t): - Likewise. - * sysdeps/mips/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. - * sysdeps/nios2/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. - * sysdeps/s390/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. - * sysdeps/sh/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. - * sysdeps/tile/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. - * sysdeps/unix/sysv/linux/alpha/bits/pthreadtypes.h (pthread_cond_t): - Likewise. - * sysdeps/unix/sysv/linux/powerpc/bits/pthreadtypes.h (pthread_cond_t): - Likewise. - * sysdeps/x86/bits/pthreadtypes.h (pthread_cond_t): Likewise. - * sysdeps/nptl/internaltypes.h (COND_NWAITERS_SHIFT): Remove. - (COND_CLOCK_BITS): Adapt. - * sysdeps/nptl/pthread.h (PTHREAD_COND_INITIALIZER): Adapt. - * nptl/pthreadP.h (__PTHREAD_COND_CLOCK_MONOTONIC_MASK, - __PTHREAD_COND_SHARED_MASK): New. - * nptl/nptl-printers.py (CLOCK_IDS): Remove. - (ConditionVariablePrinter, ConditionVariableAttributesPrinter): Adapt. - * nptl/nptl_lock_constants.pysym: Adapt. - * nptl/test-cond-printers.py: Adapt. - * sysdeps/unix/sysv/linux/hppa/internaltypes.h (cond_compat_clear, - cond_compat_check_and_clear): Adapt. - * sysdeps/unix/sysv/linux/hppa/pthread_cond_timedwait.c: Remove file ... - * sysdeps/unix/sysv/linux/hppa/pthread_cond_wait.c - (__pthread_cond_timedwait): ... and move here. - * nptl/DESIGN-condvar.txt: Remove file. - * nptl/lowlevelcond.sym: Likewise. - * nptl/pthread_cond_timedwait.c: Likewise. - * sysdeps/unix/sysv/linux/i386/i486/pthread_cond_broadcast.S: Likewise. - * sysdeps/unix/sysv/linux/i386/i486/pthread_cond_signal.S: Likewise. - * sysdeps/unix/sysv/linux/i386/i486/pthread_cond_timedwait.S: Likewise. - * sysdeps/unix/sysv/linux/i386/i486/pthread_cond_wait.S: Likewise. - * sysdeps/unix/sysv/linux/i386/i586/pthread_cond_broadcast.S: Likewise. - * sysdeps/unix/sysv/linux/i386/i586/pthread_cond_signal.S: Likewise. - * sysdeps/unix/sysv/linux/i386/i586/pthread_cond_timedwait.S: Likewise. - * sysdeps/unix/sysv/linux/i386/i586/pthread_cond_wait.S: Likewise. - * sysdeps/unix/sysv/linux/i386/i686/pthread_cond_broadcast.S: Likewise. - * sysdeps/unix/sysv/linux/i386/i686/pthread_cond_signal.S: Likewise. - * sysdeps/unix/sysv/linux/i386/i686/pthread_cond_timedwait.S: Likewise. - * sysdeps/unix/sysv/linux/i386/i686/pthread_cond_wait.S: Likewise. - * sysdeps/unix/sysv/linux/x86_64/pthread_cond_broadcast.S: Likewise. - * sysdeps/unix/sysv/linux/x86_64/pthread_cond_signal.S: Likewise. - * sysdeps/unix/sysv/linux/x86_64/pthread_cond_timedwait.S: Likewise. - * sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S: Likewise. - -Upstream-Status: Backport - -Author: Torvald Riegel -Signed-off-by: Catalin Enache ---- - ChangeLog | 74 ++ - nptl/DESIGN-condvar.txt | 134 --- - nptl/Makefile | 6 +- - nptl/lowlevelcond.sym | 16 - - nptl/nptl-printers.py | 70 +- - nptl/nptl_lock_constants.pysym | 27 +- - nptl/pthreadP.h | 7 + - nptl/pthread_cond_broadcast.c | 99 ++- - nptl/pthread_cond_common.c | 466 ++++++++++ - nptl/pthread_cond_destroy.c | 82 +- - nptl/pthread_cond_init.c | 28 +- - nptl/pthread_cond_signal.c | 99 ++- - nptl/pthread_cond_timedwait.c | 268 ------ - nptl/pthread_cond_wait.c | 754 ++++++++++++---- - nptl/pthread_condattr_getclock.c | 2 +- - nptl/pthread_condattr_getpshared.c | 3 +- - nptl/pthread_condattr_init.c | 4 +- - nptl/pthread_condattr_setclock.c | 11 +- - nptl/test-cond-printers.py | 2 +- - nptl/tst-cond1.c | 3 + - nptl/tst-cond20.c | 5 +- - nptl/tst-cond22.c | 18 +- - sysdeps/aarch64/nptl/bits/pthreadtypes.h | 31 +- - sysdeps/arm/nptl/bits/pthreadtypes.h | 29 +- - sysdeps/ia64/nptl/bits/pthreadtypes.h | 31 +- - sysdeps/m68k/nptl/bits/pthreadtypes.h | 32 +- - sysdeps/microblaze/nptl/bits/pthreadtypes.h | 29 +- - sysdeps/mips/nptl/bits/pthreadtypes.h | 31 +- - sysdeps/nios2/nptl/bits/pthreadtypes.h | 31 +- - sysdeps/nptl/internaltypes.h | 17 +- - sysdeps/nptl/pthread.h | 2 +- - sysdeps/s390/nptl/bits/pthreadtypes.h | 29 +- - sysdeps/sh/nptl/bits/pthreadtypes.h | 29 +- - sysdeps/tile/nptl/bits/pthreadtypes.h | 29 +- - sysdeps/unix/sysv/linux/alpha/bits/pthreadtypes.h | 31 +- - sysdeps/unix/sysv/linux/hppa/internaltypes.h | 40 +- - .../unix/sysv/linux/hppa/pthread_cond_timedwait.c | 41 - - sysdeps/unix/sysv/linux/hppa/pthread_cond_wait.c | 13 + - .../sysv/linux/i386/i686/pthread_cond_timedwait.S | 20 - - .../unix/sysv/linux/i386/pthread_cond_broadcast.S | 241 ----- - sysdeps/unix/sysv/linux/i386/pthread_cond_signal.S | 216 ----- - .../unix/sysv/linux/i386/pthread_cond_timedwait.S | 974 --------------------- - sysdeps/unix/sysv/linux/i386/pthread_cond_wait.S | 642 -------------- - .../unix/sysv/linux/powerpc/bits/pthreadtypes.h | 31 +- - .../sysv/linux/x86_64/pthread_cond_broadcast.S | 177 ---- - .../unix/sysv/linux/x86_64/pthread_cond_signal.S | 161 ---- - .../sysv/linux/x86_64/pthread_cond_timedwait.S | 623 ------------- - sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S | 555 ------------ - sysdeps/x86/bits/pthreadtypes.h | 29 +- - 49 files changed, 1671 insertions(+), 4621 deletions(-) - delete mode 100644 nptl/DESIGN-condvar.txt - delete mode 100644 nptl/lowlevelcond.sym - create mode 100644 nptl/pthread_cond_common.c - delete mode 100644 nptl/pthread_cond_timedwait.c - delete mode 100644 sysdeps/unix/sysv/linux/hppa/pthread_cond_timedwait.c - delete mode 100644 sysdeps/unix/sysv/linux/i386/i686/pthread_cond_timedwait.S - delete mode 100644 sysdeps/unix/sysv/linux/i386/pthread_cond_broadcast.S - delete mode 100644 sysdeps/unix/sysv/linux/i386/pthread_cond_signal.S - delete mode 100644 sysdeps/unix/sysv/linux/i386/pthread_cond_timedwait.S - delete mode 100644 sysdeps/unix/sysv/linux/i386/pthread_cond_wait.S - delete mode 100644 sysdeps/unix/sysv/linux/x86_64/pthread_cond_broadcast.S - delete mode 100644 sysdeps/unix/sysv/linux/x86_64/pthread_cond_signal.S - delete mode 100644 sysdeps/unix/sysv/linux/x86_64/pthread_cond_timedwait.S - delete mode 100644 sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S - -diff --git a/ChangeLog b/ChangeLog -index 8036c1e..c94db7b 100644 ---- a/ChangeLog -+++ b/ChangeLog -@@ -1,3 +1,77 @@ -+2016-12-31 Torvald Riegel -+ -+ [BZ #13165] -+ * nptl/pthread_cond_broadcast.c (__pthread_cond_broadcast): Rewrite to -+ use new algorithm. -+ * nptl/pthread_cond_destroy.c (__pthread_cond_destroy): Likewise. -+ * nptl/pthread_cond_init.c (__pthread_cond_init): Likewise. -+ * nptl/pthread_cond_signal.c (__pthread_cond_signal): Likewise. -+ * nptl/pthread_cond_wait.c (__pthread_cond_wait): Likewise. -+ (__pthread_cond_timedwait): Move here from pthread_cond_timedwait.c. -+ (__condvar_confirm_wakeup, __condvar_cancel_waiting, -+ __condvar_cleanup_waiting, __condvar_dec_grefs, -+ __pthread_cond_wait_common): New. -+ (__condvar_cleanup): Remove. -+ * npt/pthread_condattr_getclock.c (pthread_condattr_getclock): Adapt. -+ * npt/pthread_condattr_setclock.c (pthread_condattr_setclock): -+ Likewise. -+ * npt/pthread_condattr_getpshared.c (pthread_condattr_getpshared): -+ Likewise. -+ * npt/pthread_condattr_init.c (pthread_condattr_init): Likewise. -+ * nptl/tst-cond1.c: Add comment. -+ * nptl/tst-cond20.c (do_test): Adapt. -+ * nptl/tst-cond22.c (do_test): Likewise. -+ * sysdeps/aarch64/nptl/bits/pthreadtypes.h (pthread_cond_t): Adapt -+ structure. -+ * sysdeps/arm/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. -+ * sysdeps/ia64/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. -+ * sysdeps/m68k/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. -+ * sysdeps/microblaze/nptl/bits/pthreadtypes.h (pthread_cond_t): -+ Likewise. -+ * sysdeps/mips/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. -+ * sysdeps/nios2/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. -+ * sysdeps/s390/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. -+ * sysdeps/sh/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. -+ * sysdeps/tile/nptl/bits/pthreadtypes.h (pthread_cond_t): Likewise. -+ * sysdeps/unix/sysv/linux/alpha/bits/pthreadtypes.h (pthread_cond_t): -+ Likewise. -+ * sysdeps/unix/sysv/linux/powerpc/bits/pthreadtypes.h (pthread_cond_t): -+ Likewise. -+ * sysdeps/x86/bits/pthreadtypes.h (pthread_cond_t): Likewise. -+ * sysdeps/nptl/internaltypes.h (COND_NWAITERS_SHIFT): Remove. -+ (COND_CLOCK_BITS): Adapt. -+ * sysdeps/nptl/pthread.h (PTHREAD_COND_INITIALIZER): Adapt. -+ * nptl/pthreadP.h (__PTHREAD_COND_CLOCK_MONOTONIC_MASK, -+ __PTHREAD_COND_SHARED_MASK): New. -+ * nptl/nptl-printers.py (CLOCK_IDS): Remove. -+ (ConditionVariablePrinter, ConditionVariableAttributesPrinter): Adapt. -+ * nptl/nptl_lock_constants.pysym: Adapt. -+ * nptl/test-cond-printers.py: Adapt. -+ * sysdeps/unix/sysv/linux/hppa/internaltypes.h (cond_compat_clear, -+ cond_compat_check_and_clear): Adapt. -+ * sysdeps/unix/sysv/linux/hppa/pthread_cond_timedwait.c: Remove file ... -+ * sysdeps/unix/sysv/linux/hppa/pthread_cond_wait.c -+ (__pthread_cond_timedwait): ... and move here. -+ * nptl/DESIGN-condvar.txt: Remove file. -+ * nptl/lowlevelcond.sym: Likewise. -+ * nptl/pthread_cond_timedwait.c: Likewise. -+ * sysdeps/unix/sysv/linux/i386/i486/pthread_cond_broadcast.S: Likewise. -+ * sysdeps/unix/sysv/linux/i386/i486/pthread_cond_signal.S: Likewise. -+ * sysdeps/unix/sysv/linux/i386/i486/pthread_cond_timedwait.S: Likewise. -+ * sysdeps/unix/sysv/linux/i386/i486/pthread_cond_wait.S: Likewise. -+ * sysdeps/unix/sysv/linux/i386/i586/pthread_cond_broadcast.S: Likewise. -+ * sysdeps/unix/sysv/linux/i386/i586/pthread_cond_signal.S: Likewise. -+ * sysdeps/unix/sysv/linux/i386/i586/pthread_cond_timedwait.S: Likewise. -+ * sysdeps/unix/sysv/linux/i386/i586/pthread_cond_wait.S: Likewise. -+ * sysdeps/unix/sysv/linux/i386/i686/pthread_cond_broadcast.S: Likewise. -+ * sysdeps/unix/sysv/linux/i386/i686/pthread_cond_signal.S: Likewise. -+ * sysdeps/unix/sysv/linux/i386/i686/pthread_cond_timedwait.S: Likewise. -+ * sysdeps/unix/sysv/linux/i386/i686/pthread_cond_wait.S: Likewise. -+ * sysdeps/unix/sysv/linux/x86_64/pthread_cond_broadcast.S: Likewise. -+ * sysdeps/unix/sysv/linux/x86_64/pthread_cond_signal.S: Likewise. -+ * sysdeps/unix/sysv/linux/x86_64/pthread_cond_timedwait.S: Likewise. -+ * sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S: Likewise. -+ - 2016-12-08 Martin Galvan - - * INSTALL: Regenerated. -diff --git a/nptl/DESIGN-condvar.txt b/nptl/DESIGN-condvar.txt -deleted file mode 100644 -index 4845251..0000000 ---- a/nptl/DESIGN-condvar.txt -+++ /dev/null -@@ -1,134 +0,0 @@ --Conditional Variable pseudocode. --================================ -- -- int pthread_cond_timedwait (pthread_cond_t *cv, pthread_mutex_t *mutex); -- int pthread_cond_signal (pthread_cond_t *cv); -- int pthread_cond_broadcast (pthread_cond_t *cv); -- --struct pthread_cond_t { -- -- unsigned int cond_lock; -- -- internal mutex -- -- uint64_t total_seq; -- -- Total number of threads using the conditional variable. -- -- uint64_t wakeup_seq; -- -- sequence number for next wakeup. -- -- uint64_t woken_seq; -- -- sequence number of last woken thread. -- -- uint32_t broadcast_seq; -- --} -- -- --struct cv_data { -- -- pthread_cond_t *cv; -- -- uint32_t bc_seq -- --} -- -- -- --cleanup_handler(cv_data) --{ -- cv = cv_data->cv; -- lll_lock(cv->lock); -- -- if (cv_data->bc_seq == cv->broadcast_seq) { -- ++cv->wakeup_seq; -- ++cv->woken_seq; -- } -- -- /* make sure no signal gets lost. */ -- FUTEX_WAKE(cv->wakeup_seq, ALL); -- -- lll_unlock(cv->lock); --} -- -- --cond_timedwait(cv, mutex, timeout): --{ -- lll_lock(cv->lock); -- mutex_unlock(mutex); -- -- cleanup_push -- -- ++cv->total_seq; -- val = seq = cv->wakeup_seq; -- cv_data.bc = cv->broadcast_seq; -- cv_data.cv = cv; -- -- while (1) { -- -- lll_unlock(cv->lock); -- -- enable_async(&cv_data); -- -- ret = FUTEX_WAIT(cv->wakeup_seq, val, timeout); -- -- restore_async -- -- lll_lock(cv->lock); -- -- if (bc != cv->broadcast_seq) -- goto bc_out; -- -- val = cv->wakeup_seq; -- -- if (val != seq && cv->woken_seq != val) { -- ret = 0; -- break; -- } -- -- if (ret == TIMEDOUT) { -- ++cv->wakeup_seq; -- break; -- } -- } -- -- ++cv->woken_seq; -- -- bc_out: -- lll_unlock(cv->lock); -- -- cleanup_pop -- -- mutex_lock(mutex); -- -- return ret; --} -- --cond_signal(cv) --{ -- lll_lock(cv->lock); -- -- if (cv->total_seq > cv->wakeup_seq) { -- ++cv->wakeup_seq; -- FUTEX_WAKE(cv->wakeup_seq, 1); -- } -- -- lll_unlock(cv->lock); --} -- --cond_broadcast(cv) --{ -- lll_lock(cv->lock); -- -- if (cv->total_seq > cv->wakeup_seq) { -- cv->wakeup_seq = cv->total_seq; -- cv->woken_seq = cv->total_seq; -- ++cv->broadcast_seq; -- FUTEX_WAKE(cv->wakeup_seq, ALL); -- } -- -- lll_unlock(cv->lock); --} -diff --git a/nptl/Makefile b/nptl/Makefile -index 49f6ba6..1f0674c 100644 ---- a/nptl/Makefile -+++ b/nptl/Makefile -@@ -71,7 +71,7 @@ libpthread-routines = nptl-init vars events version pt-interp \ - pthread_rwlockattr_getkind_np \ - pthread_rwlockattr_setkind_np \ - pthread_cond_init pthread_cond_destroy \ -- pthread_cond_wait pthread_cond_timedwait \ -+ pthread_cond_wait \ - pthread_cond_signal pthread_cond_broadcast \ - old_pthread_cond_init old_pthread_cond_destroy \ - old_pthread_cond_wait old_pthread_cond_timedwait \ -@@ -181,7 +181,6 @@ CFLAGS-pthread_timedjoin.c = -fexceptions -fasynchronous-unwind-tables - CFLAGS-pthread_once.c = $(uses-callbacks) -fexceptions \ - -fasynchronous-unwind-tables - CFLAGS-pthread_cond_wait.c = -fexceptions -fasynchronous-unwind-tables --CFLAGS-pthread_cond_timedwait.c = -fexceptions -fasynchronous-unwind-tables - CFLAGS-sem_wait.c = -fexceptions -fasynchronous-unwind-tables - CFLAGS-sem_timedwait.c = -fexceptions -fasynchronous-unwind-tables - -@@ -303,8 +302,7 @@ test-xfail-tst-once5 = yes - # Files which must not be linked with libpthread. - tests-nolibpthread = tst-unload - --gen-as-const-headers = pthread-errnos.sym \ -- lowlevelcond.sym lowlevelrwlock.sym \ -+gen-as-const-headers = pthread-errnos.sym lowlevelrwlock.sym \ - unwindbuf.sym \ - lowlevelrobustlock.sym pthread-pi-defines.sym - -diff --git a/nptl/lowlevelcond.sym b/nptl/lowlevelcond.sym -deleted file mode 100644 -index 18e1ada..0000000 ---- a/nptl/lowlevelcond.sym -+++ /dev/null -@@ -1,16 +0,0 @@ --#include --#include --#include --#include -- ---- -- --cond_lock offsetof (pthread_cond_t, __data.__lock) --cond_futex offsetof (pthread_cond_t, __data.__futex) --cond_nwaiters offsetof (pthread_cond_t, __data.__nwaiters) --total_seq offsetof (pthread_cond_t, __data.__total_seq) --wakeup_seq offsetof (pthread_cond_t, __data.__wakeup_seq) --woken_seq offsetof (pthread_cond_t, __data.__woken_seq) --dep_mutex offsetof (pthread_cond_t, __data.__mutex) --broadcast_seq offsetof (pthread_cond_t, __data.__broadcast_seq) --nwaiters_shift COND_NWAITERS_SHIFT -diff --git a/nptl/nptl-printers.py b/nptl/nptl-printers.py -index e402f23..76adadd 100644 ---- a/nptl/nptl-printers.py -+++ b/nptl/nptl-printers.py -@@ -293,16 +293,6 @@ class MutexAttributesPrinter(object): - elif protocol == PTHREAD_PRIO_PROTECT: - self.values.append(('Protocol', 'Priority protect')) - --CLOCK_IDS = { -- CLOCK_REALTIME: 'CLOCK_REALTIME', -- CLOCK_MONOTONIC: 'CLOCK_MONOTONIC', -- CLOCK_PROCESS_CPUTIME_ID: 'CLOCK_PROCESS_CPUTIME_ID', -- CLOCK_THREAD_CPUTIME_ID: 'CLOCK_THREAD_CPUTIME_ID', -- CLOCK_MONOTONIC_RAW: 'CLOCK_MONOTONIC_RAW', -- CLOCK_REALTIME_COARSE: 'CLOCK_REALTIME_COARSE', -- CLOCK_MONOTONIC_COARSE: 'CLOCK_MONOTONIC_COARSE' --} -- - class ConditionVariablePrinter(object): - """Pretty printer for pthread_cond_t.""" - -@@ -313,24 +303,8 @@ class ConditionVariablePrinter(object): - cond: A gdb.value representing a pthread_cond_t. - """ - -- # Since PTHREAD_COND_SHARED is an integer, we need to cast it to void * -- # to be able to compare it to the condvar's __data.__mutex member. -- # -- # While it looks like self.shared_value should be a class variable, -- # that would result in it having an incorrect size if we're loading -- # these printers through .gdbinit for a 64-bit objfile in AMD64. -- # This is because gdb initially assumes the pointer size to be 4 bytes, -- # and only sets it to 8 after loading the 64-bit objfiles. Since -- # .gdbinit runs before any objfiles are loaded, this would effectively -- # make self.shared_value have a size of 4, thus breaking later -- # comparisons with pointers whose types are looked up at runtime. -- void_ptr_type = gdb.lookup_type('void').pointer() -- self.shared_value = gdb.Value(PTHREAD_COND_SHARED).cast(void_ptr_type) -- - data = cond['__data'] -- self.total_seq = data['__total_seq'] -- self.mutex = data['__mutex'] -- self.nwaiters = data['__nwaiters'] -+ self.wrefs = data['__wrefs'] - self.values = [] - - self.read_values() -@@ -360,7 +334,6 @@ class ConditionVariablePrinter(object): - - self.read_status() - self.read_attributes() -- self.read_mutex_info() - - def read_status(self): - """Read the status of the condvar. -@@ -369,41 +342,22 @@ class ConditionVariablePrinter(object): - are waiting for it. - """ - -- if self.total_seq == PTHREAD_COND_DESTROYED: -- self.values.append(('Status', 'Destroyed')) -- -- self.values.append(('Threads waiting for this condvar', -- self.nwaiters >> COND_NWAITERS_SHIFT)) -+ self.values.append(('Threads known to still execute a wait function', -+ self.wrefs >> PTHREAD_COND_WREFS_SHIFT)) - - def read_attributes(self): - """Read the condvar's attributes.""" - -- clock_id = self.nwaiters & ((1 << COND_NWAITERS_SHIFT) - 1) -- -- # clock_id must be casted to int because it's a gdb.Value -- self.values.append(('Clock ID', CLOCK_IDS[int(clock_id)])) -+ if (self.wrefs & PTHREAD_COND_CLOCK_MONOTONIC_MASK) != 0: -+ self.values.append(('Clock ID', 'CLOCK_MONOTONIC')) -+ else: -+ self.values.append(('Clock ID', 'CLOCK_REALTIME')) - -- shared = (self.mutex == self.shared_value) -- -- if shared: -+ if (self.wrefs & PTHREAD_COND_SHARED_MASK) != 0: - self.values.append(('Shared', 'Yes')) - else: - self.values.append(('Shared', 'No')) - -- def read_mutex_info(self): -- """Read the data of the mutex this condvar is bound to. -- -- A pthread_cond_t's __data.__mutex member is a void * which -- must be casted to pthread_mutex_t *. For shared condvars, this -- member isn't recorded and has a special value instead. -- """ -- -- if self.mutex and self.mutex != self.shared_value: -- mutex_type = gdb.lookup_type('pthread_mutex_t') -- mutex = self.mutex.cast(mutex_type.pointer()).dereference() -- -- self.values.append(('Mutex', mutex)) -- - class ConditionVariableAttributesPrinter(object): - """Pretty printer for pthread_condattr_t. - -@@ -453,10 +407,12 @@ class ConditionVariableAttributesPrinter(object): - created in self.children. - """ - -- clock_id = self.condattr & ((1 << COND_NWAITERS_SHIFT) - 1) -+ clock_id = (self.condattr >> 1) & ((1 << COND_CLOCK_BITS) - 1) - -- # clock_id must be casted to int because it's a gdb.Value -- self.values.append(('Clock ID', CLOCK_IDS[int(clock_id)])) -+ if clock_id != 0: -+ self.values.append(('Clock ID', 'CLOCK_MONOTONIC')) -+ else: -+ self.values.append(('Clock ID', 'CLOCK_REALTIME')) - - if self.condattr & 1: - self.values.append(('Shared', 'Yes')) -diff --git a/nptl/nptl_lock_constants.pysym b/nptl/nptl_lock_constants.pysym -index 303ec61..2ab3179 100644 ---- a/nptl/nptl_lock_constants.pysym -+++ b/nptl/nptl_lock_constants.pysym -@@ -44,26 +44,13 @@ PTHREAD_PRIO_NONE - PTHREAD_PRIO_INHERIT - PTHREAD_PRIO_PROTECT - ---- These values are hardcoded as well: ---- Value of __mutex for shared condvars. --PTHREAD_COND_SHARED (void *)~0l -- ---- Value of __total_seq for destroyed condvars. --PTHREAD_COND_DESTROYED -1ull -- ---- __nwaiters encodes the number of threads waiting on a condvar ---- and the clock ID. ---- __nwaiters >> COND_NWAITERS_SHIFT gives us the number of waiters. --COND_NWAITERS_SHIFT -- ---- Condvar clock IDs --CLOCK_REALTIME --CLOCK_MONOTONIC --CLOCK_PROCESS_CPUTIME_ID --CLOCK_THREAD_CPUTIME_ID --CLOCK_MONOTONIC_RAW --CLOCK_REALTIME_COARSE --CLOCK_MONOTONIC_COARSE -+-- Condition variable -+-- FIXME Why do macros prefixed with __ cannot be used directly? -+PTHREAD_COND_SHARED_MASK __PTHREAD_COND_SHARED_MASK -+PTHREAD_COND_CLOCK_MONOTONIC_MASK __PTHREAD_COND_CLOCK_MONOTONIC_MASK -+COND_CLOCK_BITS -+-- These values are hardcoded: -+PTHREAD_COND_WREFS_SHIFT 3 - - -- Rwlock attributes - PTHREAD_RWLOCK_PREFER_READER_NP -diff --git a/nptl/pthreadP.h b/nptl/pthreadP.h -index 4edc74b..e9992bc 100644 ---- a/nptl/pthreadP.h -+++ b/nptl/pthreadP.h -@@ -167,6 +167,13 @@ enum - #define __PTHREAD_ONCE_FORK_GEN_INCR 4 - - -+/* Condition variable definitions. See __pthread_cond_wait_common. -+ Need to be defined here so there is one place from which -+ nptl_lock_constants can grab them. */ -+#define __PTHREAD_COND_CLOCK_MONOTONIC_MASK 2 -+#define __PTHREAD_COND_SHARED_MASK 1 -+ -+ - /* Internal variables. */ - - -diff --git a/nptl/pthread_cond_broadcast.c b/nptl/pthread_cond_broadcast.c -index 552fd42..87c0755 100644 ---- a/nptl/pthread_cond_broadcast.c -+++ b/nptl/pthread_cond_broadcast.c -@@ -19,72 +19,71 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -+#include - - #include --#include - -+#include "pthread_cond_common.c" - -+ -+/* We do the following steps from __pthread_cond_signal in one critical -+ section: (1) signal all waiters in G1, (2) close G1 so that it can become -+ the new G2 and make G2 the new G1, and (3) signal all waiters in the new -+ G1. We don't need to do all these steps if there are no waiters in G1 -+ and/or G2. See __pthread_cond_signal for further details. */ - int - __pthread_cond_broadcast (pthread_cond_t *cond) - { - LIBC_PROBE (cond_broadcast, 1, cond); - -- int pshared = (cond->__data.__mutex == (void *) ~0l) -- ? LLL_SHARED : LLL_PRIVATE; -- /* Make sure we are alone. */ -- lll_lock (cond->__data.__lock, pshared); -+ unsigned int wrefs = atomic_load_relaxed (&cond->__data.__wrefs); -+ if (wrefs >> 3 == 0) -+ return 0; -+ int private = __condvar_get_private (wrefs); -+ -+ __condvar_acquire_lock (cond, private); - -- /* Are there any waiters to be woken? */ -- if (cond->__data.__total_seq > cond->__data.__wakeup_seq) -+ unsigned long long int wseq = __condvar_load_wseq_relaxed (cond); -+ unsigned int g2 = wseq & 1; -+ unsigned int g1 = g2 ^ 1; -+ wseq >>= 1; -+ bool do_futex_wake = false; -+ -+ /* Step (1): signal all waiters remaining in G1. */ -+ if (cond->__data.__g_size[g1] != 0) - { -- /* Yes. Mark them all as woken. */ -- cond->__data.__wakeup_seq = cond->__data.__total_seq; -- cond->__data.__woken_seq = cond->__data.__total_seq; -- cond->__data.__futex = (unsigned int) cond->__data.__total_seq * 2; -- int futex_val = cond->__data.__futex; -- /* Signal that a broadcast happened. */ -- ++cond->__data.__broadcast_seq; -- -- /* We are done. */ -- lll_unlock (cond->__data.__lock, pshared); -- -- /* Wake everybody. */ -- pthread_mutex_t *mut = (pthread_mutex_t *) cond->__data.__mutex; -- -- /* Do not use requeue for pshared condvars. */ -- if (mut == (void *) ~0l -- || PTHREAD_MUTEX_PSHARED (mut) & PTHREAD_MUTEX_PSHARED_BIT) -- goto wake_all; -- --#if (defined lll_futex_cmp_requeue_pi \ -- && defined __ASSUME_REQUEUE_PI) -- if (USE_REQUEUE_PI (mut)) -- { -- if (lll_futex_cmp_requeue_pi (&cond->__data.__futex, 1, INT_MAX, -- &mut->__data.__lock, futex_val, -- LLL_PRIVATE) == 0) -- return 0; -- } -- else --#endif -- /* lll_futex_requeue returns 0 for success and non-zero -- for errors. */ -- if (!__builtin_expect (lll_futex_requeue (&cond->__data.__futex, 1, -- INT_MAX, &mut->__data.__lock, -- futex_val, LLL_PRIVATE), 0)) -- return 0; -- --wake_all: -- lll_futex_wake (&cond->__data.__futex, INT_MAX, pshared); -- return 0; -+ /* Add as many signals as the remaining size of the group. */ -+ atomic_fetch_add_relaxed (cond->__data.__g_signals + g1, -+ cond->__data.__g_size[g1] << 1); -+ cond->__data.__g_size[g1] = 0; -+ -+ /* We need to wake G1 waiters before we quiesce G1 below. */ -+ /* TODO Only set it if there are indeed futex waiters. We could -+ also try to move this out of the critical section in cases when -+ G2 is empty (and we don't need to quiesce). */ -+ futex_wake (cond->__data.__g_signals + g1, INT_MAX, private); - } - -- /* We are done. */ -- lll_unlock (cond->__data.__lock, pshared); -+ /* G1 is complete. Step (2) is next unless there are no waiters in G2, in -+ which case we can stop. */ -+ if (__condvar_quiesce_and_switch_g1 (cond, wseq, &g1, private)) -+ { -+ /* Step (3): Send signals to all waiters in the old G2 / new G1. */ -+ atomic_fetch_add_relaxed (cond->__data.__g_signals + g1, -+ cond->__data.__g_size[g1] << 1); -+ cond->__data.__g_size[g1] = 0; -+ /* TODO Only set it if there are indeed futex waiters. */ -+ do_futex_wake = true; -+ } -+ -+ __condvar_release_lock (cond, private); -+ -+ if (do_futex_wake) -+ futex_wake (cond->__data.__g_signals + g1, INT_MAX, private); - - return 0; - } -diff --git a/nptl/pthread_cond_common.c b/nptl/pthread_cond_common.c -new file mode 100644 -index 0000000..b374396 ---- /dev/null -+++ b/nptl/pthread_cond_common.c -@@ -0,0 +1,466 @@ -+/* pthread_cond_common -- shared code for condition variable. -+ Copyright (C) 2016 Free Software Foundation, Inc. -+ This file is part of the GNU C Library. -+ -+ The GNU C Library is free software; you can redistribute it and/or -+ modify it under the terms of the GNU Lesser General Public -+ License as published by the Free Software Foundation; either -+ version 2.1 of the License, or (at your option) any later version. -+ -+ The GNU C Library is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public -+ License along with the GNU C Library; if not, see -+ . */ -+ -+#include -+#include -+#include -+#include -+ -+/* We need 3 least-significant bits on __wrefs for something else. */ -+#define __PTHREAD_COND_MAX_GROUP_SIZE ((unsigned) 1 << 29) -+ -+#if __HAVE_64B_ATOMICS == 1 -+ -+static uint64_t __attribute__ ((unused)) -+__condvar_load_wseq_relaxed (pthread_cond_t *cond) -+{ -+ return atomic_load_relaxed (&cond->__data.__wseq); -+} -+ -+static uint64_t __attribute__ ((unused)) -+__condvar_fetch_add_wseq_acquire (pthread_cond_t *cond, unsigned int val) -+{ -+ return atomic_fetch_add_acquire (&cond->__data.__wseq, val); -+} -+ -+static uint64_t __attribute__ ((unused)) -+__condvar_fetch_xor_wseq_release (pthread_cond_t *cond, unsigned int val) -+{ -+ return atomic_fetch_xor_release (&cond->__data.__wseq, val); -+} -+ -+static uint64_t __attribute__ ((unused)) -+__condvar_load_g1_start_relaxed (pthread_cond_t *cond) -+{ -+ return atomic_load_relaxed (&cond->__data.__g1_start); -+} -+ -+static void __attribute__ ((unused)) -+__condvar_add_g1_start_relaxed (pthread_cond_t *cond, unsigned int val) -+{ -+ atomic_store_relaxed (&cond->__data.__g1_start, -+ atomic_load_relaxed (&cond->__data.__g1_start) + val); -+} -+ -+#else -+ -+/* We use two 64b counters: __wseq and __g1_start. They are monotonically -+ increasing and single-writer-multiple-readers counters, so we can implement -+ load, fetch-and-add, and fetch-and-xor operations even when we just have -+ 32b atomics. Values we add or xor are less than or equal to 1<<31 (*), -+ so we only have to make overflow-and-addition atomic wrt. to concurrent -+ load operations and xor operations. To do that, we split each counter into -+ two 32b values of which we reserve the MSB of each to represent an -+ overflow from the lower-order half to the higher-order half. -+ -+ In the common case, the state is (higher-order / lower-order half, and . is -+ basically concatenation of the bits): -+ 0.h / 0.l = h.l -+ -+ When we add a value of x that overflows (i.e., 0.l + x == 1.L), we run the -+ following steps S1-S4 (the values these represent are on the right-hand -+ side): -+ S1: 0.h / 1.L == (h+1).L -+ S2: 1.(h+1) / 1.L == (h+1).L -+ S3: 1.(h+1) / 0.L == (h+1).L -+ S4: 0.(h+1) / 0.L == (h+1).L -+ If the LSB of the higher-order half is set, readers will ignore the -+ overflow bit in the lower-order half. -+ -+ To get an atomic snapshot in load operations, we exploit that the -+ higher-order half is monotonically increasing; if we load a value V from -+ it, then read the lower-order half, and then read the higher-order half -+ again and see the same value V, we know that both halves have existed in -+ the sequence of values the full counter had. This is similar to the -+ validated reads in the time-based STMs in GCC's libitm (e.g., -+ method_ml_wt). -+ -+ The xor operation needs to be an atomic read-modify-write. The write -+ itself is not an issue as it affects just the lower-order half but not bits -+ used in the add operation. To make the full fetch-and-xor atomic, we -+ exploit that concurrently, the value can increase by at most 1<<31 (*): The -+ xor operation is only called while having acquired the lock, so not more -+ than __PTHREAD_COND_MAX_GROUP_SIZE waiters can enter concurrently and thus -+ increment __wseq. Therefore, if the xor operation observes a value of -+ __wseq, then the value it applies the modification to later on can be -+ derived (see below). -+ -+ One benefit of this scheme is that this makes load operations -+ obstruction-free because unlike if we would just lock the counter, readers -+ can almost always interpret a snapshot of each halves. Readers can be -+ forced to read a new snapshot when the read is concurrent with an overflow. -+ However, overflows will happen infrequently, so load operations are -+ practically lock-free. -+ -+ (*) The highest value we add is __PTHREAD_COND_MAX_GROUP_SIZE << 2 to -+ __g1_start (the two extra bits are for the lock in the two LSBs of -+ __g1_start). */ -+ -+typedef struct -+{ -+ unsigned int low; -+ unsigned int high; -+} _condvar_lohi; -+ -+static uint64_t -+__condvar_fetch_add_64_relaxed (_condvar_lohi *lh, unsigned int op) -+{ -+ /* S1. Note that this is an atomic read-modify-write so it extends the -+ release sequence of release MO store at S3. */ -+ unsigned int l = atomic_fetch_add_relaxed (&lh->low, op); -+ unsigned int h = atomic_load_relaxed (&lh->high); -+ uint64_t result = ((uint64_t) h << 31) | l; -+ l += op; -+ if ((l >> 31) > 0) -+ { -+ /* Overflow. Need to increment higher-order half. Note that all -+ add operations are ordered in happens-before. */ -+ h++; -+ /* S2. Release MO to synchronize with the loads of the higher-order half -+ in the load operation. See __condvar_load_64_relaxed. */ -+ atomic_store_release (&lh->high, h | ((unsigned int) 1 << 31)); -+ l ^= (unsigned int) 1 << 31; -+ /* S3. See __condvar_load_64_relaxed. */ -+ atomic_store_release (&lh->low, l); -+ /* S4. Likewise. */ -+ atomic_store_release (&lh->high, h); -+ } -+ return result; -+} -+ -+static uint64_t -+__condvar_load_64_relaxed (_condvar_lohi *lh) -+{ -+ unsigned int h, l, h2; -+ do -+ { -+ /* This load and the second one below to the same location read from the -+ stores in the overflow handling of the add operation or the -+ initializing stores (which is a simple special case because -+ initialization always completely happens before further use). -+ Because no two stores to the higher-order half write the same value, -+ the loop ensures that if we continue to use the snapshot, this load -+ and the second one read from the same store operation. All candidate -+ store operations have release MO. -+ If we read from S2 in the first load, then we will see the value of -+ S1 on the next load (because we synchronize with S2), or a value -+ later in modification order. We correctly ignore the lower-half's -+ overflow bit in this case. If we read from S4, then we will see the -+ value of S3 in the next load (or a later value), which does not have -+ the overflow bit set anymore. -+ */ -+ h = atomic_load_acquire (&lh->high); -+ /* This will read from the release sequence of S3 (i.e, either the S3 -+ store or the read-modify-writes at S1 following S3 in modification -+ order). Thus, the read synchronizes with S3, and the following load -+ of the higher-order half will read from the matching S2 (or a later -+ value). -+ Thus, if we read a lower-half value here that already overflowed and -+ belongs to an increased higher-order half value, we will see the -+ latter and h and h2 will not be equal. */ -+ l = atomic_load_acquire (&lh->low); -+ /* See above. */ -+ h2 = atomic_load_relaxed (&lh->high); -+ } -+ while (h != h2); -+ if (((l >> 31) > 0) && ((h >> 31) > 0)) -+ l ^= (unsigned int) 1 << 31; -+ return ((uint64_t) (h & ~((unsigned int) 1 << 31)) << 31) + l; -+} -+ -+static uint64_t __attribute__ ((unused)) -+__condvar_load_wseq_relaxed (pthread_cond_t *cond) -+{ -+ return __condvar_load_64_relaxed ((_condvar_lohi *) &cond->__data.__wseq32); -+} -+ -+static uint64_t __attribute__ ((unused)) -+__condvar_fetch_add_wseq_acquire (pthread_cond_t *cond, unsigned int val) -+{ -+ uint64_t r = __condvar_fetch_add_64_relaxed -+ ((_condvar_lohi *) &cond->__data.__wseq32, val); -+ atomic_thread_fence_acquire (); -+ return r; -+} -+ -+static uint64_t __attribute__ ((unused)) -+__condvar_fetch_xor_wseq_release (pthread_cond_t *cond, unsigned int val) -+{ -+ _condvar_lohi *lh = (_condvar_lohi *) &cond->__data.__wseq32; -+ /* First, get the current value. See __condvar_load_64_relaxed. */ -+ unsigned int h, l, h2; -+ do -+ { -+ h = atomic_load_acquire (&lh->high); -+ l = atomic_load_acquire (&lh->low); -+ h2 = atomic_load_relaxed (&lh->high); -+ } -+ while (h != h2); -+ if (((l >> 31) > 0) && ((h >> 31) == 0)) -+ h++; -+ h &= ~((unsigned int) 1 << 31); -+ l &= ~((unsigned int) 1 << 31); -+ -+ /* Now modify. Due to the coherence rules, the prior load will read a value -+ earlier in modification order than the following fetch-xor. -+ This uses release MO to make the full operation have release semantics -+ (all other operations access the lower-order half). */ -+ unsigned int l2 = atomic_fetch_xor_release (&lh->low, val) -+ & ~((unsigned int) 1 << 31); -+ if (l2 < l) -+ /* The lower-order half overflowed in the meantime. This happened exactly -+ once due to the limit on concurrent waiters (see above). */ -+ h++; -+ return ((uint64_t) h << 31) + l2; -+} -+ -+static uint64_t __attribute__ ((unused)) -+__condvar_load_g1_start_relaxed (pthread_cond_t *cond) -+{ -+ return __condvar_load_64_relaxed -+ ((_condvar_lohi *) &cond->__data.__g1_start32); -+} -+ -+static void __attribute__ ((unused)) -+__condvar_add_g1_start_relaxed (pthread_cond_t *cond, unsigned int val) -+{ -+ ignore_value (__condvar_fetch_add_64_relaxed -+ ((_condvar_lohi *) &cond->__data.__g1_start32, val)); -+} -+ -+#endif /* !__HAVE_64B_ATOMICS */ -+ -+ -+/* The lock that signalers use. See pthread_cond_wait_common for uses. -+ The lock is our normal three-state lock: not acquired (0) / acquired (1) / -+ acquired-with-futex_wake-request (2). However, we need to preserve the -+ other bits in the unsigned int used for the lock, and therefore it is a -+ little more complex. */ -+static void __attribute__ ((unused)) -+__condvar_acquire_lock (pthread_cond_t *cond, int private) -+{ -+ unsigned int s = atomic_load_relaxed (&cond->__data.__g1_orig_size); -+ while ((s & 3) == 0) -+ { -+ if (atomic_compare_exchange_weak_acquire (&cond->__data.__g1_orig_size, -+ &s, s | 1)) -+ return; -+ /* TODO Spinning and back-off. */ -+ } -+ /* We can't change from not acquired to acquired, so try to change to -+ acquired-with-futex-wake-request and do a futex wait if we cannot change -+ from not acquired. */ -+ while (1) -+ { -+ while ((s & 3) != 2) -+ { -+ if (atomic_compare_exchange_weak_acquire -+ (&cond->__data.__g1_orig_size, &s, (s & ~(unsigned int) 3) | 2)) -+ { -+ if ((s & 3) == 0) -+ return; -+ break; -+ } -+ /* TODO Back off. */ -+ } -+ futex_wait_simple (&cond->__data.__g1_orig_size, -+ (s & ~(unsigned int) 3) | 2, private); -+ /* Reload so we see a recent value. */ -+ s = atomic_load_relaxed (&cond->__data.__g1_orig_size); -+ } -+} -+ -+/* See __condvar_acquire_lock. */ -+static void __attribute__ ((unused)) -+__condvar_release_lock (pthread_cond_t *cond, int private) -+{ -+ if ((atomic_fetch_and_release (&cond->__data.__g1_orig_size, -+ ~(unsigned int) 3) & 3) -+ == 2) -+ futex_wake (&cond->__data.__g1_orig_size, 1, private); -+} -+ -+/* Only use this when having acquired the lock. */ -+static unsigned int __attribute__ ((unused)) -+__condvar_get_orig_size (pthread_cond_t *cond) -+{ -+ return atomic_load_relaxed (&cond->__data.__g1_orig_size) >> 2; -+} -+ -+/* Only use this when having acquired the lock. */ -+static void __attribute__ ((unused)) -+__condvar_set_orig_size (pthread_cond_t *cond, unsigned int size) -+{ -+ /* We have acquired the lock, but might get one concurrent update due to a -+ lock state change from acquired to acquired-with-futex_wake-request. -+ The store with relaxed MO is fine because there will be no further -+ changes to the lock bits nor the size, and we will subsequently release -+ the lock with release MO. */ -+ unsigned int s; -+ s = (atomic_load_relaxed (&cond->__data.__g1_orig_size) & 3) -+ | (size << 2); -+ if ((atomic_exchange_relaxed (&cond->__data.__g1_orig_size, s) & 3) -+ != (s & 3)) -+ atomic_store_relaxed (&cond->__data.__g1_orig_size, (size << 2) | 2); -+} -+ -+/* Returns FUTEX_SHARED or FUTEX_PRIVATE based on the provided __wrefs -+ value. */ -+static int __attribute__ ((unused)) -+__condvar_get_private (int flags) -+{ -+ if ((flags & __PTHREAD_COND_SHARED_MASK) == 0) -+ return FUTEX_PRIVATE; -+ else -+ return FUTEX_SHARED; -+} -+ -+/* This closes G1 (whose index is in G1INDEX), waits for all futex waiters to -+ leave G1, converts G1 into a fresh G2, and then switches group roles so that -+ the former G2 becomes the new G1 ending at the current __wseq value when we -+ eventually make the switch (WSEQ is just an observation of __wseq by the -+ signaler). -+ If G2 is empty, it will not switch groups because then it would create an -+ empty G1 which would require switching groups again on the next signal. -+ Returns false iff groups were not switched because G2 was empty. */ -+static bool __attribute__ ((unused)) -+__condvar_quiesce_and_switch_g1 (pthread_cond_t *cond, uint64_t wseq, -+ unsigned int *g1index, int private) -+{ -+ const unsigned int maxspin = 0; -+ unsigned int g1 = *g1index; -+ -+ /* If there is no waiter in G2, we don't do anything. The expression may -+ look odd but remember that __g_size might hold a negative value, so -+ putting the expression this way avoids relying on implementation-defined -+ behavior. -+ Note that this works correctly for a zero-initialized condvar too. */ -+ unsigned int old_orig_size = __condvar_get_orig_size (cond); -+ uint64_t old_g1_start = __condvar_load_g1_start_relaxed (cond) >> 1; -+ if (((unsigned) (wseq - old_g1_start - old_orig_size) -+ + cond->__data.__g_size[g1 ^ 1]) == 0) -+ return false; -+ -+ /* Now try to close and quiesce G1. We have to consider the following kinds -+ of waiters: -+ * Waiters from less recent groups than G1 are not affected because -+ nothing will change for them apart from __g1_start getting larger. -+ * New waiters arriving concurrently with the group switching will all go -+ into G2 until we atomically make the switch. Waiters existing in G2 -+ are not affected. -+ * Waiters in G1 will be closed out immediately by setting a flag in -+ __g_signals, which will prevent waiters from blocking using a futex on -+ __g_signals and also notifies them that the group is closed. As a -+ result, they will eventually remove their group reference, allowing us -+ to close switch group roles. */ -+ -+ /* First, set the closed flag on __g_signals. This tells waiters that are -+ about to wait that they shouldn't do that anymore. This basically -+ serves as an advance notificaton of the upcoming change to __g1_start; -+ waiters interpret it as if __g1_start was larger than their waiter -+ sequence position. This allows us to change __g1_start after waiting -+ for all existing waiters with group references to leave, which in turn -+ makes recovery after stealing a signal simpler because it then can be -+ skipped if __g1_start indicates that the group is closed (otherwise, -+ we would have to recover always because waiters don't know how big their -+ groups are). Relaxed MO is fine. */ -+ atomic_fetch_or_relaxed (cond->__data.__g_signals + g1, 1); -+ -+ /* Wait until there are no group references anymore. The fetch-or operation -+ injects us into the modification order of __g_refs; release MO ensures -+ that waiters incrementing __g_refs after our fetch-or see the previous -+ changes to __g_signals and to __g1_start that had to happen before we can -+ switch this G1 and alias with an older group (we have two groups, so -+ aliasing requires switching group roles twice). Note that nobody else -+ can have set the wake-request flag, so we do not have to act upon it. -+ -+ Also note that it is harmless if older waiters or waiters from this G1 -+ get a group reference after we have quiesced the group because it will -+ remain closed for them either because of the closed flag in __g_signals -+ or the later update to __g1_start. New waiters will never arrive here -+ but instead continue to go into the still current G2. */ -+ unsigned r = atomic_fetch_or_release (cond->__data.__g_refs + g1, 0); -+ while ((r >> 1) > 0) -+ { -+ for (unsigned int spin = maxspin; ((r >> 1) > 0) && (spin > 0); spin--) -+ { -+ /* TODO Back off. */ -+ r = atomic_load_relaxed (cond->__data.__g_refs + g1); -+ } -+ if ((r >> 1) > 0) -+ { -+ /* There is still a waiter after spinning. Set the wake-request -+ flag and block. Relaxed MO is fine because this is just about -+ this futex word. */ -+ r = atomic_fetch_or_relaxed (cond->__data.__g_refs + g1, 1); -+ -+ if ((r >> 1) > 0) -+ futex_wait_simple (cond->__data.__g_refs + g1, r, private); -+ /* Reload here so we eventually see the most recent value even if we -+ do not spin. */ -+ r = atomic_load_relaxed (cond->__data.__g_refs + g1); -+ } -+ } -+ /* Acquire MO so that we synchronize with the release operation that waiters -+ use to decrement __g_refs and thus happen after the waiters we waited -+ for. */ -+ atomic_thread_fence_acquire (); -+ -+ /* Update __g1_start, which finishes closing this group. The value we add -+ will never be negative because old_orig_size can only be zero when we -+ switch groups the first time after a condvar was initialized, in which -+ case G1 will be at index 1 and we will add a value of 1. See above for -+ why this takes place after waiting for quiescence of the group. -+ Relaxed MO is fine because the change comes with no additional -+ constraints that others would have to observe. */ -+ __condvar_add_g1_start_relaxed (cond, -+ (old_orig_size << 1) + (g1 == 1 ? 1 : - 1)); -+ -+ /* Now reopen the group, thus enabling waiters to again block using the -+ futex controlled by __g_signals. Release MO so that observers that see -+ no signals (and thus can block) also see the write __g1_start and thus -+ that this is now a new group (see __pthread_cond_wait_common for the -+ matching acquire MO loads). */ -+ atomic_store_release (cond->__data.__g_signals + g1, 0); -+ -+ /* At this point, the old G1 is now a valid new G2 (but not in use yet). -+ No old waiter can neither grab a signal nor acquire a reference without -+ noticing that __g1_start is larger. -+ We can now publish the group switch by flipping the G2 index in __wseq. -+ Release MO so that this synchronizes with the acquire MO operation -+ waiters use to obtain a position in the waiter sequence. */ -+ wseq = __condvar_fetch_xor_wseq_release (cond, 1) >> 1; -+ g1 ^= 1; -+ *g1index ^= 1; -+ -+ /* These values are just observed by signalers, and thus protected by the -+ lock. */ -+ unsigned int orig_size = wseq - (old_g1_start + old_orig_size); -+ __condvar_set_orig_size (cond, orig_size); -+ /* Use and addition to not loose track of cancellations in what was -+ previously G2. */ -+ cond->__data.__g_size[g1] += orig_size; -+ -+ /* The new G1's size may be zero because of cancellations during its time -+ as G2. If this happens, there are no waiters that have to receive a -+ signal, so we do not need to add any and return false. */ -+ if (cond->__data.__g_size[g1] == 0) -+ return false; -+ -+ return true; -+} -diff --git a/nptl/pthread_cond_destroy.c b/nptl/pthread_cond_destroy.c -index 1acd804..5845c6a 100644 ---- a/nptl/pthread_cond_destroy.c -+++ b/nptl/pthread_cond_destroy.c -@@ -20,66 +20,42 @@ - #include - #include "pthreadP.h" - #include -- -- -+#include -+#include -+ -+#include "pthread_cond_common.c" -+ -+/* See __pthread_cond_wait for a high-level description of the algorithm. -+ -+ A correct program must make sure that no waiters are blocked on the condvar -+ when it is destroyed, and that there are no concurrent signals or -+ broadcasts. To wake waiters reliably, the program must signal or -+ broadcast while holding the mutex or after having held the mutex. It must -+ also ensure that no signal or broadcast are still pending to unblock -+ waiters; IOW, because waiters can wake up spuriously, the program must -+ effectively ensure that destruction happens after the execution of those -+ signal or broadcast calls. -+ Thus, we can assume that all waiters that are still accessing the condvar -+ have been woken. We wait until they have confirmed to have woken up by -+ decrementing __wrefs. */ - int - __pthread_cond_destroy (pthread_cond_t *cond) - { -- int pshared = (cond->__data.__mutex == (void *) ~0l) -- ? LLL_SHARED : LLL_PRIVATE; -- - LIBC_PROBE (cond_destroy, 1, cond); - -- /* Make sure we are alone. */ -- lll_lock (cond->__data.__lock, pshared); -- -- if (cond->__data.__total_seq > cond->__data.__wakeup_seq) -- { -- /* If there are still some waiters which have not been -- woken up, this is an application bug. */ -- lll_unlock (cond->__data.__lock, pshared); -- return EBUSY; -- } -- -- /* Tell pthread_cond_*wait that this condvar is being destroyed. */ -- cond->__data.__total_seq = -1ULL; -- -- /* If there are waiters which have been already signalled or -- broadcasted, but still are using the pthread_cond_t structure, -- pthread_cond_destroy needs to wait for them. */ -- unsigned int nwaiters = cond->__data.__nwaiters; -- -- if (nwaiters >= (1 << COND_NWAITERS_SHIFT)) -+ /* Set the wake request flag. We could also spin, but destruction that is -+ concurrent with still-active waiters is probably neither common nor -+ performance critical. Acquire MO to synchronize with waiters confirming -+ that they finished. */ -+ unsigned int wrefs = atomic_fetch_or_acquire (&cond->__data.__wrefs, 4); -+ int private = __condvar_get_private (wrefs); -+ while (wrefs >> 3 != 0) - { -- /* Wake everybody on the associated mutex in case there are -- threads that have been requeued to it. -- Without this, pthread_cond_destroy could block potentially -- for a long time or forever, as it would depend on other -- thread's using the mutex. -- When all threads waiting on the mutex are woken up, pthread_cond_wait -- only waits for threads to acquire and release the internal -- condvar lock. */ -- if (cond->__data.__mutex != NULL -- && cond->__data.__mutex != (void *) ~0l) -- { -- pthread_mutex_t *mut = (pthread_mutex_t *) cond->__data.__mutex; -- lll_futex_wake (&mut->__data.__lock, INT_MAX, -- PTHREAD_MUTEX_PSHARED (mut)); -- } -- -- do -- { -- lll_unlock (cond->__data.__lock, pshared); -- -- lll_futex_wait (&cond->__data.__nwaiters, nwaiters, pshared); -- -- lll_lock (cond->__data.__lock, pshared); -- -- nwaiters = cond->__data.__nwaiters; -- } -- while (nwaiters >= (1 << COND_NWAITERS_SHIFT)); -+ futex_wait_simple (&cond->__data.__wrefs, wrefs, private); -+ /* See above. */ -+ wrefs = atomic_load_acquire (&cond->__data.__wrefs); - } -- -+ /* The memory the condvar occupies can now be reused. */ - return 0; - } - versioned_symbol (libpthread, __pthread_cond_destroy, -diff --git a/nptl/pthread_cond_init.c b/nptl/pthread_cond_init.c -index 9023370..c1eac5f 100644 ---- a/nptl/pthread_cond_init.c -+++ b/nptl/pthread_cond_init.c -@@ -19,25 +19,29 @@ - #include - #include "pthreadP.h" - #include -+#include - - -+/* See __pthread_cond_wait for details. */ - int - __pthread_cond_init (pthread_cond_t *cond, const pthread_condattr_t *cond_attr) - { - struct pthread_condattr *icond_attr = (struct pthread_condattr *) cond_attr; - -- cond->__data.__lock = LLL_LOCK_INITIALIZER; -- cond->__data.__futex = 0; -- cond->__data.__nwaiters = (icond_attr != NULL -- ? ((icond_attr->value >> 1) -- & ((1 << COND_NWAITERS_SHIFT) - 1)) -- : CLOCK_REALTIME); -- cond->__data.__total_seq = 0; -- cond->__data.__wakeup_seq = 0; -- cond->__data.__woken_seq = 0; -- cond->__data.__mutex = (icond_attr == NULL || (icond_attr->value & 1) == 0 -- ? NULL : (void *) ~0l); -- cond->__data.__broadcast_seq = 0; -+ memset (cond, 0, sizeof (pthread_cond_t)); -+ -+ /* Update the pretty printers if the internal representation of icond_attr -+ is changed. */ -+ -+ /* Iff not equal to ~0l, this is a PTHREAD_PROCESS_PRIVATE condvar. */ -+ if (icond_attr != NULL && (icond_attr->value & 1) != 0) -+ cond->__data.__wrefs |= __PTHREAD_COND_SHARED_MASK; -+ int clockid = (icond_attr != NULL -+ ? ((icond_attr->value >> 1) & ((1 << COND_CLOCK_BITS) - 1)) -+ : CLOCK_REALTIME); -+ /* If 0, CLOCK_REALTIME is used; CLOCK_MONOTONIC otherwise. */ -+ if (clockid != CLOCK_REALTIME) -+ cond->__data.__wrefs |= __PTHREAD_COND_CLOCK_MONOTONIC_MASK; - - LIBC_PROBE (cond_init, 2, cond, cond_attr); - -diff --git a/nptl/pthread_cond_signal.c b/nptl/pthread_cond_signal.c -index b3a6d3d..a95d569 100644 ---- a/nptl/pthread_cond_signal.c -+++ b/nptl/pthread_cond_signal.c -@@ -19,62 +19,79 @@ - #include - #include - #include --#include -+#include - #include - #include -+#include -+#include - - #include --#include - #include - -+#include "pthread_cond_common.c" - -+/* See __pthread_cond_wait for a high-level description of the algorithm. */ - int - __pthread_cond_signal (pthread_cond_t *cond) - { -- int pshared = (cond->__data.__mutex == (void *) ~0l) -- ? LLL_SHARED : LLL_PRIVATE; -- - LIBC_PROBE (cond_signal, 1, cond); - -- /* Make sure we are alone. */ -- lll_lock (cond->__data.__lock, pshared); -- -- /* Are there any waiters to be woken? */ -- if (cond->__data.__total_seq > cond->__data.__wakeup_seq) -+ /* First check whether there are waiters. Relaxed MO is fine for that for -+ the same reasons that relaxed MO is fine when observing __wseq (see -+ below). */ -+ unsigned int wrefs = atomic_load_relaxed (&cond->__data.__wrefs); -+ if (wrefs >> 3 == 0) -+ return 0; -+ int private = __condvar_get_private (wrefs); -+ -+ __condvar_acquire_lock (cond, private); -+ -+ /* Load the waiter sequence number, which represents our relative ordering -+ to any waiters. Relaxed MO is sufficient for that because: -+ 1) We can pick any position that is allowed by external happens-before -+ constraints. In particular, if another __pthread_cond_wait call -+ happened before us, this waiter must be eligible for being woken by -+ us. The only way do establish such a happens-before is by signaling -+ while having acquired the mutex associated with the condvar and -+ ensuring that the signal's critical section happens after the waiter. -+ Thus, the mutex ensures that we see that waiter's __wseq increase. -+ 2) Once we pick a position, we do not need to communicate this to the -+ program via a happens-before that we set up: First, any wake-up could -+ be a spurious wake-up, so the program must not interpret a wake-up as -+ an indication that the waiter happened before a particular signal; -+ second, a program cannot detect whether a waiter has not yet been -+ woken (i.e., it cannot distinguish between a non-woken waiter and one -+ that has been woken but hasn't resumed execution yet), and thus it -+ cannot try to deduce that a signal happened before a particular -+ waiter. */ -+ unsigned long long int wseq = __condvar_load_wseq_relaxed (cond); -+ unsigned int g1 = (wseq & 1) ^ 1; -+ wseq >>= 1; -+ bool do_futex_wake = false; -+ -+ /* If G1 is still receiving signals, we put the signal there. If not, we -+ check if G2 has waiters, and if so, quiesce and switch G1 to the former -+ G2; if this results in a new G1 with waiters (G2 might have cancellations -+ already, see __condvar_quiesce_and_switch_g1), we put the signal in the -+ new G1. */ -+ if ((cond->__data.__g_size[g1] != 0) -+ || __condvar_quiesce_and_switch_g1 (cond, wseq, &g1, private)) - { -- /* Yes. Mark one of them as woken. */ -- ++cond->__data.__wakeup_seq; -- ++cond->__data.__futex; -- --#if (defined lll_futex_cmp_requeue_pi \ -- && defined __ASSUME_REQUEUE_PI) -- pthread_mutex_t *mut = cond->__data.__mutex; -- -- if (USE_REQUEUE_PI (mut) -- /* This can only really fail with a ENOSYS, since nobody can modify -- futex while we have the cond_lock. */ -- && lll_futex_cmp_requeue_pi (&cond->__data.__futex, 1, 0, -- &mut->__data.__lock, -- cond->__data.__futex, pshared) == 0) -- { -- lll_unlock (cond->__data.__lock, pshared); -- return 0; -- } -- else --#endif -- /* Wake one. */ -- if (! __builtin_expect (lll_futex_wake_unlock (&cond->__data.__futex, -- 1, 1, -- &cond->__data.__lock, -- pshared), 0)) -- return 0; -- -- /* Fallback if neither of them work. */ -- lll_futex_wake (&cond->__data.__futex, 1, pshared); -+ /* Add a signal. Relaxed MO is fine because signaling does not need to -+ establish a happens-before relation (see above). We do not mask the -+ release-MO store when initializing a group in -+ __condvar_quiesce_and_switch_g1 because we use an atomic -+ read-modify-write and thus extend that store's release sequence. */ -+ atomic_fetch_add_relaxed (cond->__data.__g_signals + g1, 2); -+ cond->__data.__g_size[g1]--; -+ /* TODO Only set it if there are indeed futex waiters. */ -+ do_futex_wake = true; - } - -- /* We are done. */ -- lll_unlock (cond->__data.__lock, pshared); -+ __condvar_release_lock (cond, private); -+ -+ if (do_futex_wake) -+ futex_wake (cond->__data.__g_signals + g1, 1, private); - - return 0; - } -diff --git a/nptl/pthread_cond_timedwait.c b/nptl/pthread_cond_timedwait.c -deleted file mode 100644 -index 711a51d..0000000 ---- a/nptl/pthread_cond_timedwait.c -+++ /dev/null -@@ -1,268 +0,0 @@ --/* Copyright (C) 2003-2016 Free Software Foundation, Inc. -- This file is part of the GNU C Library. -- Contributed by Martin Schwidefsky , 2003. -- -- The GNU C Library is free software; you can redistribute it and/or -- modify it under the terms of the GNU Lesser General Public -- License as published by the Free Software Foundation; either -- version 2.1 of the License, or (at your option) any later version. -- -- The GNU C Library is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -- Lesser General Public License for more details. -- -- You should have received a copy of the GNU Lesser General Public -- License along with the GNU C Library; if not, see -- . */ -- --#include --#include --#include --#include --#include --#include --#include --#include -- --#include -- --#ifndef HAVE_CLOCK_GETTIME_VSYSCALL --# undef INTERNAL_VSYSCALL --# define INTERNAL_VSYSCALL INTERNAL_SYSCALL --# undef INLINE_VSYSCALL --# define INLINE_VSYSCALL INLINE_SYSCALL --#else --# include --#endif -- --/* Cleanup handler, defined in pthread_cond_wait.c. */ --extern void __condvar_cleanup (void *arg) -- __attribute__ ((visibility ("hidden"))); -- --struct _condvar_cleanup_buffer --{ -- int oldtype; -- pthread_cond_t *cond; -- pthread_mutex_t *mutex; -- unsigned int bc_seq; --}; -- --int --__pthread_cond_timedwait (pthread_cond_t *cond, pthread_mutex_t *mutex, -- const struct timespec *abstime) --{ -- struct _pthread_cleanup_buffer buffer; -- struct _condvar_cleanup_buffer cbuffer; -- int result = 0; -- -- /* Catch invalid parameters. */ -- if (abstime->tv_nsec < 0 || abstime->tv_nsec >= 1000000000) -- return EINVAL; -- -- int pshared = (cond->__data.__mutex == (void *) ~0l) -- ? LLL_SHARED : LLL_PRIVATE; -- --#if (defined lll_futex_timed_wait_requeue_pi \ -- && defined __ASSUME_REQUEUE_PI) -- int pi_flag = 0; --#endif -- -- /* Make sure we are alone. */ -- lll_lock (cond->__data.__lock, pshared); -- -- /* Now we can release the mutex. */ -- int err = __pthread_mutex_unlock_usercnt (mutex, 0); -- if (err) -- { -- lll_unlock (cond->__data.__lock, pshared); -- return err; -- } -- -- /* We have one new user of the condvar. */ -- ++cond->__data.__total_seq; -- ++cond->__data.__futex; -- cond->__data.__nwaiters += 1 << COND_NWAITERS_SHIFT; -- -- /* Work around the fact that the kernel rejects negative timeout values -- despite them being valid. */ -- if (__glibc_unlikely (abstime->tv_sec < 0)) -- goto timeout; -- -- /* Remember the mutex we are using here. If there is already a -- different address store this is a bad user bug. Do not store -- anything for pshared condvars. */ -- if (cond->__data.__mutex != (void *) ~0l) -- cond->__data.__mutex = mutex; -- -- /* Prepare structure passed to cancellation handler. */ -- cbuffer.cond = cond; -- cbuffer.mutex = mutex; -- -- /* Before we block we enable cancellation. Therefore we have to -- install a cancellation handler. */ -- __pthread_cleanup_push (&buffer, __condvar_cleanup, &cbuffer); -- -- /* The current values of the wakeup counter. The "woken" counter -- must exceed this value. */ -- unsigned long long int val; -- unsigned long long int seq; -- val = seq = cond->__data.__wakeup_seq; -- /* Remember the broadcast counter. */ -- cbuffer.bc_seq = cond->__data.__broadcast_seq; -- -- while (1) -- { --#if (!defined __ASSUME_FUTEX_CLOCK_REALTIME \ -- || !defined lll_futex_timed_wait_bitset) -- struct timespec rt; -- { --# ifdef __NR_clock_gettime -- INTERNAL_SYSCALL_DECL (err); -- (void) INTERNAL_VSYSCALL (clock_gettime, err, 2, -- (cond->__data.__nwaiters -- & ((1 << COND_NWAITERS_SHIFT) - 1)), -- &rt); -- /* Convert the absolute timeout value to a relative timeout. */ -- rt.tv_sec = abstime->tv_sec - rt.tv_sec; -- rt.tv_nsec = abstime->tv_nsec - rt.tv_nsec; --# else -- /* Get the current time. So far we support only one clock. */ -- struct timeval tv; -- (void) __gettimeofday (&tv, NULL); -- -- /* Convert the absolute timeout value to a relative timeout. */ -- rt.tv_sec = abstime->tv_sec - tv.tv_sec; -- rt.tv_nsec = abstime->tv_nsec - tv.tv_usec * 1000; --# endif -- } -- if (rt.tv_nsec < 0) -- { -- rt.tv_nsec += 1000000000; -- --rt.tv_sec; -- } -- /* Did we already time out? */ -- if (__glibc_unlikely (rt.tv_sec < 0)) -- { -- if (cbuffer.bc_seq != cond->__data.__broadcast_seq) -- goto bc_out; -- -- goto timeout; -- } --#endif -- -- unsigned int futex_val = cond->__data.__futex; -- -- /* Prepare to wait. Release the condvar futex. */ -- lll_unlock (cond->__data.__lock, pshared); -- -- /* Enable asynchronous cancellation. Required by the standard. */ -- cbuffer.oldtype = __pthread_enable_asynccancel (); -- --/* REQUEUE_PI was implemented after FUTEX_CLOCK_REALTIME, so it is sufficient -- to check just the former. */ --#if (defined lll_futex_timed_wait_requeue_pi \ -- && defined __ASSUME_REQUEUE_PI) -- /* If pi_flag remained 1 then it means that we had the lock and the mutex -- but a spurious waker raced ahead of us. Give back the mutex before -- going into wait again. */ -- if (pi_flag) -- { -- __pthread_mutex_cond_lock_adjust (mutex); -- __pthread_mutex_unlock_usercnt (mutex, 0); -- } -- pi_flag = USE_REQUEUE_PI (mutex); -- -- if (pi_flag) -- { -- unsigned int clockbit = (cond->__data.__nwaiters & 1 -- ? 0 : FUTEX_CLOCK_REALTIME); -- err = lll_futex_timed_wait_requeue_pi (&cond->__data.__futex, -- futex_val, abstime, clockbit, -- &mutex->__data.__lock, -- pshared); -- pi_flag = (err == 0); -- } -- else --#endif -- -- { --#if (!defined __ASSUME_FUTEX_CLOCK_REALTIME \ -- || !defined lll_futex_timed_wait_bitset) -- /* Wait until woken by signal or broadcast. */ -- err = lll_futex_timed_wait (&cond->__data.__futex, -- futex_val, &rt, pshared); --#else -- unsigned int clockbit = (cond->__data.__nwaiters & 1 -- ? 0 : FUTEX_CLOCK_REALTIME); -- err = lll_futex_timed_wait_bitset (&cond->__data.__futex, futex_val, -- abstime, clockbit, pshared); --#endif -- } -- -- /* Disable asynchronous cancellation. */ -- __pthread_disable_asynccancel (cbuffer.oldtype); -- -- /* We are going to look at shared data again, so get the lock. */ -- lll_lock (cond->__data.__lock, pshared); -- -- /* If a broadcast happened, we are done. */ -- if (cbuffer.bc_seq != cond->__data.__broadcast_seq) -- goto bc_out; -- -- /* Check whether we are eligible for wakeup. */ -- val = cond->__data.__wakeup_seq; -- if (val != seq && cond->__data.__woken_seq != val) -- break; -- -- /* Not woken yet. Maybe the time expired? */ -- if (__glibc_unlikely (err == -ETIMEDOUT)) -- { -- timeout: -- /* Yep. Adjust the counters. */ -- ++cond->__data.__wakeup_seq; -- ++cond->__data.__futex; -- -- /* The error value. */ -- result = ETIMEDOUT; -- break; -- } -- } -- -- /* Another thread woken up. */ -- ++cond->__data.__woken_seq; -- -- bc_out: -- -- cond->__data.__nwaiters -= 1 << COND_NWAITERS_SHIFT; -- -- /* If pthread_cond_destroy was called on this variable already, -- notify the pthread_cond_destroy caller all waiters have left -- and it can be successfully destroyed. */ -- if (cond->__data.__total_seq == -1ULL -- && cond->__data.__nwaiters < (1 << COND_NWAITERS_SHIFT)) -- lll_futex_wake (&cond->__data.__nwaiters, 1, pshared); -- -- /* We are done with the condvar. */ -- lll_unlock (cond->__data.__lock, pshared); -- -- /* The cancellation handling is back to normal, remove the handler. */ -- __pthread_cleanup_pop (&buffer, 0); -- -- /* Get the mutex before returning. */ --#if (defined lll_futex_timed_wait_requeue_pi \ -- && defined __ASSUME_REQUEUE_PI) -- if (pi_flag) -- { -- __pthread_mutex_cond_lock_adjust (mutex); -- err = 0; -- } -- else --#endif -- err = __pthread_mutex_cond_lock (mutex); -- -- return err ?: result; --} -- --versioned_symbol (libpthread, __pthread_cond_timedwait, pthread_cond_timedwait, -- GLIBC_2_3_2); -diff --git a/nptl/pthread_cond_wait.c b/nptl/pthread_cond_wait.c -index 3f62acc..2b43402 100644 ---- a/nptl/pthread_cond_wait.c -+++ b/nptl/pthread_cond_wait.c -@@ -19,219 +19,655 @@ - #include - #include - #include --#include -+#include - #include - #include --#include -+#include -+#include -+#include -+#include - - #include - #include -+#include -+ -+#include "pthread_cond_common.c" -+ - - struct _condvar_cleanup_buffer - { -- int oldtype; -+ uint64_t wseq; - pthread_cond_t *cond; - pthread_mutex_t *mutex; -- unsigned int bc_seq; -+ int private; - }; - - --void --__attribute__ ((visibility ("hidden"))) --__condvar_cleanup (void *arg) -+/* Decrease the waiter reference count. */ -+static void -+__condvar_confirm_wakeup (pthread_cond_t *cond, int private) - { -- struct _condvar_cleanup_buffer *cbuffer = -- (struct _condvar_cleanup_buffer *) arg; -- unsigned int destroying; -- int pshared = (cbuffer->cond->__data.__mutex == (void *) ~0l) -- ? LLL_SHARED : LLL_PRIVATE; -+ /* If destruction is pending (i.e., the wake-request flag is nonzero) and we -+ are the last waiter (prior value of __wrefs was 1 << 3), then wake any -+ threads waiting in pthread_cond_destroy. Release MO to synchronize with -+ these threads. Don't bother clearing the wake-up request flag. */ -+ if ((atomic_fetch_add_release (&cond->__data.__wrefs, -8) >> 2) == 3) -+ futex_wake (&cond->__data.__wrefs, INT_MAX, private); -+} -+ - -- /* We are going to modify shared data. */ -- lll_lock (cbuffer->cond->__data.__lock, pshared); -+/* Cancel waiting after having registered as a waiter previously. SEQ is our -+ position and G is our group index. -+ The goal of cancellation is to make our group smaller if that is still -+ possible. If we are in a closed group, this is not possible anymore; in -+ this case, we need to send a replacement signal for the one we effectively -+ consumed because the signal should have gotten consumed by another waiter -+ instead; we must not both cancel waiting and consume a signal. -+ -+ Must not be called while still holding a reference on the group. -+ -+ Returns true iff we consumed a signal. -+ -+ On some kind of timeouts, we may be able to pretend that a signal we -+ effectively consumed happened before the timeout (i.e., similarly to first -+ spinning on signals before actually checking whether the timeout has -+ passed already). Doing this would allow us to skip sending a replacement -+ signal, but this case might happen rarely because the end of the timeout -+ must race with someone else sending a signal. Therefore, we don't bother -+ trying to optimize this. */ -+static void -+__condvar_cancel_waiting (pthread_cond_t *cond, uint64_t seq, unsigned int g, -+ int private) -+{ -+ bool consumed_signal = false; - -- if (cbuffer->bc_seq == cbuffer->cond->__data.__broadcast_seq) -+ /* No deadlock with group switching is possible here because we have do -+ not hold a reference on the group. */ -+ __condvar_acquire_lock (cond, private); -+ -+ uint64_t g1_start = __condvar_load_g1_start_relaxed (cond) >> 1; -+ if (g1_start > seq) -+ { -+ /* Our group is closed, so someone provided enough signals for it. -+ Thus, we effectively consumed a signal. */ -+ consumed_signal = true; -+ } -+ else - { -- /* This thread is not waiting anymore. Adjust the sequence counters -- appropriately. We do not increment WAKEUP_SEQ if this would -- bump it over the value of TOTAL_SEQ. This can happen if a thread -- was woken and then canceled. */ -- if (cbuffer->cond->__data.__wakeup_seq -- < cbuffer->cond->__data.__total_seq) -+ if (g1_start + __condvar_get_orig_size (cond) <= seq) -+ { -+ /* We are in the current G2 and thus cannot have consumed a signal. -+ Reduce its effective size or handle overflow. Remember that in -+ G2, unsigned int size is zero or a negative value. */ -+ if (cond->__data.__g_size[g] + __PTHREAD_COND_MAX_GROUP_SIZE > 0) -+ { -+ cond->__data.__g_size[g]--; -+ } -+ else -+ { -+ /* Cancellations would overflow the maximum group size. Just -+ wake up everyone spuriously to create a clean state. This -+ also means we do not consume a signal someone else sent. */ -+ __condvar_release_lock (cond, private); -+ __pthread_cond_broadcast (cond); -+ return; -+ } -+ } -+ else - { -- ++cbuffer->cond->__data.__wakeup_seq; -- ++cbuffer->cond->__data.__futex; -+ /* We are in current G1. If the group's size is zero, someone put -+ a signal in the group that nobody else but us can consume. */ -+ if (cond->__data.__g_size[g] == 0) -+ consumed_signal = true; -+ else -+ { -+ /* Otherwise, we decrease the size of the group. This is -+ equivalent to atomically putting in a signal just for us and -+ consuming it right away. We do not consume a signal sent -+ by someone else. We also cannot have consumed a futex -+ wake-up because if we were cancelled or timed out in a futex -+ call, the futex will wake another waiter. */ -+ cond->__data.__g_size[g]--; -+ } - } -- ++cbuffer->cond->__data.__woken_seq; - } - -- cbuffer->cond->__data.__nwaiters -= 1 << COND_NWAITERS_SHIFT; -+ __condvar_release_lock (cond, private); - -- /* If pthread_cond_destroy was called on this variable already, -- notify the pthread_cond_destroy caller all waiters have left -- and it can be successfully destroyed. */ -- destroying = 0; -- if (cbuffer->cond->__data.__total_seq == -1ULL -- && cbuffer->cond->__data.__nwaiters < (1 << COND_NWAITERS_SHIFT)) -+ if (consumed_signal) - { -- lll_futex_wake (&cbuffer->cond->__data.__nwaiters, 1, pshared); -- destroying = 1; -+ /* We effectively consumed a signal even though we didn't want to. -+ Therefore, we need to send a replacement signal. -+ If we would want to optimize this, we could do what -+ pthread_cond_signal does right in the critical section above. */ -+ __pthread_cond_signal (cond); - } -+} - -- /* We are done. */ -- lll_unlock (cbuffer->cond->__data.__lock, pshared); -- -- /* Wake everybody to make sure no condvar signal gets lost. */ -- if (! destroying) -- lll_futex_wake (&cbuffer->cond->__data.__futex, INT_MAX, pshared); -- -- /* Get the mutex before returning unless asynchronous cancellation -- is in effect. We don't try to get the mutex if we already own it. */ -- if (!(USE_REQUEUE_PI (cbuffer->mutex)) -- || ((cbuffer->mutex->__data.__lock & FUTEX_TID_MASK) -- != THREAD_GETMEM (THREAD_SELF, tid))) -- { -- __pthread_mutex_cond_lock (cbuffer->mutex); -- } -- else -- __pthread_mutex_cond_lock_adjust (cbuffer->mutex); -+/* Wake up any signalers that might be waiting. */ -+static void -+__condvar_dec_grefs (pthread_cond_t *cond, unsigned int g, int private) -+{ -+ /* Release MO to synchronize-with the acquire load in -+ __condvar_quiesce_and_switch_g1. */ -+ if (atomic_fetch_add_release (cond->__data.__g_refs + g, -2) == 3) -+ { -+ /* Clear the wake-up request flag before waking up. We do not need more -+ than relaxed MO and it doesn't matter if we apply this for an aliased -+ group because we wake all futex waiters right after clearing the -+ flag. */ -+ atomic_fetch_and_relaxed (cond->__data.__g_refs + g, ~(unsigned int) 1); -+ futex_wake (cond->__data.__g_refs + g, INT_MAX, private); -+ } - } - -+/* Clean-up for cancellation of waiters waiting for normal signals. We cancel -+ our registration as a waiter, confirm we have woken up, and re-acquire the -+ mutex. */ -+static void -+__condvar_cleanup_waiting (void *arg) -+{ -+ struct _condvar_cleanup_buffer *cbuffer = -+ (struct _condvar_cleanup_buffer *) arg; -+ pthread_cond_t *cond = cbuffer->cond; -+ unsigned g = cbuffer->wseq & 1; - --int --__pthread_cond_wait (pthread_cond_t *cond, pthread_mutex_t *mutex) -+ __condvar_dec_grefs (cond, g, cbuffer->private); -+ -+ __condvar_cancel_waiting (cond, cbuffer->wseq >> 1, g, cbuffer->private); -+ /* FIXME With the current cancellation implementation, it is possible that -+ a thread is cancelled after it has returned from a syscall. This could -+ result in a cancelled waiter consuming a futex wake-up that is then -+ causing another waiter in the same group to not wake up. To work around -+ this issue until we have fixed cancellation, just add a futex wake-up -+ conservatively. */ -+ futex_wake (cond->__data.__g_signals + g, 1, cbuffer->private); -+ -+ __condvar_confirm_wakeup (cond, cbuffer->private); -+ -+ /* XXX If locking the mutex fails, should we just stop execution? This -+ might be better than silently ignoring the error. */ -+ __pthread_mutex_cond_lock (cbuffer->mutex); -+} -+ -+/* This condvar implementation guarantees that all calls to signal and -+ broadcast and all of the three virtually atomic parts of each call to wait -+ (i.e., (1) releasing the mutex and blocking, (2) unblocking, and (3) re- -+ acquiring the mutex) happen in some total order that is consistent with the -+ happens-before relations in the calling program. However, this order does -+ not necessarily result in additional happens-before relations being -+ established (which aligns well with spurious wake-ups being allowed). -+ -+ All waiters acquire a certain position in a 64b waiter sequence (__wseq). -+ This sequence determines which waiters are allowed to consume signals. -+ A broadcast is equal to sending as many signals as are unblocked waiters. -+ When a signal arrives, it samples the current value of __wseq with a -+ relaxed-MO load (i.e., the position the next waiter would get). (This is -+ sufficient because it is consistent with happens-before; the caller can -+ enforce stronger ordering constraints by calling signal while holding the -+ mutex.) Only waiters with a position less than the __wseq value observed -+ by the signal are eligible to consume this signal. -+ -+ This would be straight-forward to implement if waiters would just spin but -+ we need to let them block using futexes. Futexes give no guarantee of -+ waking in FIFO order, so we cannot reliably wake eligible waiters if we -+ just use a single futex. Also, futex words are 32b in size, but we need -+ to distinguish more than 1<<32 states because we need to represent the -+ order of wake-up (and thus which waiters are eligible to consume signals); -+ blocking in a futex is not atomic with a waiter determining its position in -+ the waiter sequence, so we need the futex word to reliably notify waiters -+ that they should not attempt to block anymore because they have been -+ already signaled in the meantime. While an ABA issue on a 32b value will -+ be rare, ignoring it when we are aware of it is not the right thing to do -+ either. -+ -+ Therefore, we use a 64b counter to represent the waiter sequence (on -+ architectures which only support 32b atomics, we use a few bits less). -+ To deal with the blocking using futexes, we maintain two groups of waiters: -+ * Group G1 consists of waiters that are all eligible to consume signals; -+ incoming signals will always signal waiters in this group until all -+ waiters in G1 have been signaled. -+ * Group G2 consists of waiters that arrive when a G1 is present and still -+ contains waiters that have not been signaled. When all waiters in G1 -+ are signaled and a new signal arrives, the new signal will convert G2 -+ into the new G1 and create a new G2 for future waiters. -+ -+ We cannot allocate new memory because of process-shared condvars, so we -+ have just two slots of groups that change their role between G1 and G2. -+ Each has a separate futex word, a number of signals available for -+ consumption, a size (number of waiters in the group that have not been -+ signaled), and a reference count. -+ -+ The group reference count is used to maintain the number of waiters that -+ are using the group's futex. Before a group can change its role, the -+ reference count must show that no waiters are using the futex anymore; this -+ prevents ABA issues on the futex word. -+ -+ To represent which intervals in the waiter sequence the groups cover (and -+ thus also which group slot contains G1 or G2), we use a 64b counter to -+ designate the start position of G1 (inclusive), and a single bit in the -+ waiter sequence counter to represent which group slot currently contains -+ G2. This allows us to switch group roles atomically wrt. waiters obtaining -+ a position in the waiter sequence. The G1 start position allows waiters to -+ figure out whether they are in a group that has already been completely -+ signaled (i.e., if the current G1 starts at a later position that the -+ waiter's position). Waiters cannot determine whether they are currently -+ in G2 or G1 -- but they do not have too because all they are interested in -+ is whether there are available signals, and they always start in G2 (whose -+ group slot they know because of the bit in the waiter sequence. Signalers -+ will simply fill the right group until it is completely signaled and can -+ be closed (they do not switch group roles until they really have to to -+ decrease the likelihood of having to wait for waiters still holding a -+ reference on the now-closed G1). -+ -+ Signalers maintain the initial size of G1 to be able to determine where -+ G2 starts (G2 is always open-ended until it becomes G1). They track the -+ remaining size of a group; when waiters cancel waiting (due to PThreads -+ cancellation or timeouts), they will decrease this remaining size as well. -+ -+ To implement condvar destruction requirements (i.e., that -+ pthread_cond_destroy can be called as soon as all waiters have been -+ signaled), waiters increment a reference count before starting to wait and -+ decrement it after they stopped waiting but right before they acquire the -+ mutex associated with the condvar. -+ -+ pthread_cond_t thus consists of the following (bits that are used for -+ flags and are not part of the primary value of each field but necessary -+ to make some things atomic or because there was no space for them -+ elsewhere in the data structure): -+ -+ __wseq: Waiter sequence counter -+ * LSB is index of current G2. -+ * Waiters fetch-add while having acquire the mutex associated with the -+ condvar. Signalers load it and fetch-xor it concurrently. -+ __g1_start: Starting position of G1 (inclusive) -+ * LSB is index of current G2. -+ * Modified by signalers while having acquired the condvar-internal lock -+ and observed concurrently by waiters. -+ __g1_orig_size: Initial size of G1 -+ * The two least-significant bits represent the condvar-internal lock. -+ * Only accessed while having acquired the condvar-internal lock. -+ __wrefs: Waiter reference counter. -+ * Bit 2 is true if waiters should run futex_wake when they remove the -+ last reference. pthread_cond_destroy uses this as futex word. -+ * Bit 1 is the clock ID (0 == CLOCK_REALTIME, 1 == CLOCK_MONOTONIC). -+ * Bit 0 is true iff this is a process-shared condvar. -+ * Simple reference count used by both waiters and pthread_cond_destroy. -+ (If the format of __wrefs is changed, update nptl_lock_constants.pysym -+ and the pretty printers.) -+ For each of the two groups, we have: -+ __g_refs: Futex waiter reference count. -+ * LSB is true if waiters should run futex_wake when they remove the -+ last reference. -+ * Reference count used by waiters concurrently with signalers that have -+ acquired the condvar-internal lock. -+ __g_signals: The number of signals that can still be consumed. -+ * Used as a futex word by waiters. Used concurrently by waiters and -+ signalers. -+ * LSB is true iff this group has been completely signaled (i.e., it is -+ closed). -+ __g_size: Waiters remaining in this group (i.e., which have not been -+ signaled yet. -+ * Accessed by signalers and waiters that cancel waiting (both do so only -+ when having acquired the condvar-internal lock. -+ * The size of G2 is always zero because it cannot be determined until -+ the group becomes G1. -+ * Although this is of unsigned type, we rely on using unsigned overflow -+ rules to make this hold effectively negative values too (in -+ particular, when waiters in G2 cancel waiting). -+ -+ A PTHREAD_COND_INITIALIZER condvar has all fields set to zero, which yields -+ a condvar that has G2 starting at position 0 and a G1 that is closed. -+ -+ Because waiters do not claim ownership of a group right when obtaining a -+ position in __wseq but only reference count the group when using futexes -+ to block, it can happen that a group gets closed before a waiter can -+ increment the reference count. Therefore, waiters have to check whether -+ their group is already closed using __g1_start. They also have to perform -+ this check when spinning when trying to grab a signal from __g_signals. -+ Note that for these checks, using relaxed MO to load __g1_start is -+ sufficient because if a waiter can see a sufficiently large value, it could -+ have also consume a signal in the waiters group. -+ -+ Waiters try to grab a signal from __g_signals without holding a reference -+ count, which can lead to stealing a signal from a more recent group after -+ their own group was already closed. They cannot always detect whether they -+ in fact did because they do not know when they stole, but they can -+ conservatively add a signal back to the group they stole from; if they -+ did so unnecessarily, all that happens is a spurious wake-up. To make this -+ even less likely, __g1_start contains the index of the current g2 too, -+ which allows waiters to check if there aliasing on the group slots; if -+ there wasn't, they didn't steal from the current G1, which means that the -+ G1 they stole from must have been already closed and they do not need to -+ fix anything. -+ -+ It is essential that the last field in pthread_cond_t is __g_signals[1]: -+ The previous condvar used a pointer-sized field in pthread_cond_t, so a -+ PTHREAD_COND_INITIALIZER from that condvar implementation might only -+ initialize 4 bytes to zero instead of the 8 bytes we need (i.e., 44 bytes -+ in total instead of the 48 we need). __g_signals[1] is not accessed before -+ the first group switch (G2 starts at index 0), which will set its value to -+ zero after a harmless fetch-or whose return value is ignored. This -+ effectively completes initialization. -+ -+ -+ Limitations: -+ * This condvar isn't designed to allow for more than -+ __PTHREAD_COND_MAX_GROUP_SIZE * (1 << 31) calls to __pthread_cond_wait. -+ * More than __PTHREAD_COND_MAX_GROUP_SIZE concurrent waiters are not -+ supported. -+ * Beyond what is allowed as errors by POSIX or documented, we can also -+ return the following errors: -+ * EPERM if MUTEX is a recursive mutex and the caller doesn't own it. -+ * EOWNERDEAD or ENOTRECOVERABLE when using robust mutexes. Unlike -+ for other errors, this can happen when we re-acquire the mutex; this -+ isn't allowed by POSIX (which requires all errors to virtually happen -+ before we release the mutex or change the condvar state), but there's -+ nothing we can do really. -+ * When using PTHREAD_MUTEX_PP_* mutexes, we can also return all errors -+ returned by __pthread_tpp_change_priority. We will already have -+ released the mutex in such cases, so the caller cannot expect to own -+ MUTEX. -+ -+ Other notes: -+ * Instead of the normal mutex unlock / lock functions, we use -+ __pthread_mutex_unlock_usercnt(m, 0) / __pthread_mutex_cond_lock(m) -+ because those will not change the mutex-internal users count, so that it -+ can be detected when a condvar is still associated with a particular -+ mutex because there is a waiter blocked on this condvar using this mutex. -+*/ -+static __always_inline int -+__pthread_cond_wait_common (pthread_cond_t *cond, pthread_mutex_t *mutex, -+ const struct timespec *abstime) - { -- struct _pthread_cleanup_buffer buffer; -- struct _condvar_cleanup_buffer cbuffer; -+ const int maxspin = 0; - int err; -- int pshared = (cond->__data.__mutex == (void *) ~0l) -- ? LLL_SHARED : LLL_PRIVATE; -- --#if (defined lll_futex_wait_requeue_pi \ -- && defined __ASSUME_REQUEUE_PI) -- int pi_flag = 0; --#endif -+ int result = 0; - - LIBC_PROBE (cond_wait, 2, cond, mutex); - -- /* Make sure we are alone. */ -- lll_lock (cond->__data.__lock, pshared); -- -- /* Now we can release the mutex. */ -+ /* Acquire a position (SEQ) in the waiter sequence (WSEQ). We use an -+ atomic operation because signals and broadcasts may update the group -+ switch without acquiring the mutex. We do not need release MO here -+ because we do not need to establish any happens-before relation with -+ signalers (see __pthread_cond_signal); modification order alone -+ establishes a total order of waiters/signals. We do need acquire MO -+ to synchronize with group reinitialization in -+ __condvar_quiesce_and_switch_g1. */ -+ uint64_t wseq = __condvar_fetch_add_wseq_acquire (cond, 2); -+ /* Find our group's index. We always go into what was G2 when we acquired -+ our position. */ -+ unsigned int g = wseq & 1; -+ uint64_t seq = wseq >> 1; -+ -+ /* Increase the waiter reference count. Relaxed MO is sufficient because -+ we only need to synchronize when decrementing the reference count. */ -+ unsigned int flags = atomic_fetch_add_relaxed (&cond->__data.__wrefs, 8); -+ int private = __condvar_get_private (flags); -+ -+ /* Now that we are registered as a waiter, we can release the mutex. -+ Waiting on the condvar must be atomic with releasing the mutex, so if -+ the mutex is used to establish a happens-before relation with any -+ signaler, the waiter must be visible to the latter; thus, we release the -+ mutex after registering as waiter. -+ If releasing the mutex fails, we just cancel our registration as a -+ waiter and confirm that we have woken up. */ - err = __pthread_mutex_unlock_usercnt (mutex, 0); -- if (__glibc_unlikely (err)) -+ if (__glibc_unlikely (err != 0)) - { -- lll_unlock (cond->__data.__lock, pshared); -+ __condvar_cancel_waiting (cond, seq, g, private); -+ __condvar_confirm_wakeup (cond, private); - return err; - } - -- /* We have one new user of the condvar. */ -- ++cond->__data.__total_seq; -- ++cond->__data.__futex; -- cond->__data.__nwaiters += 1 << COND_NWAITERS_SHIFT; -- -- /* Remember the mutex we are using here. If there is already a -- different address store this is a bad user bug. Do not store -- anything for pshared condvars. */ -- if (cond->__data.__mutex != (void *) ~0l) -- cond->__data.__mutex = mutex; -- -- /* Prepare structure passed to cancellation handler. */ -- cbuffer.cond = cond; -- cbuffer.mutex = mutex; -- -- /* Before we block we enable cancellation. Therefore we have to -- install a cancellation handler. */ -- __pthread_cleanup_push (&buffer, __condvar_cleanup, &cbuffer); -- -- /* The current values of the wakeup counter. The "woken" counter -- must exceed this value. */ -- unsigned long long int val; -- unsigned long long int seq; -- val = seq = cond->__data.__wakeup_seq; -- /* Remember the broadcast counter. */ -- cbuffer.bc_seq = cond->__data.__broadcast_seq; -+ /* Now wait until a signal is available in our group or it is closed. -+ Acquire MO so that if we observe a value of zero written after group -+ switching in __condvar_quiesce_and_switch_g1, we synchronize with that -+ store and will see the prior update of __g1_start done while switching -+ groups too. */ -+ unsigned int signals = atomic_load_acquire (cond->__data.__g_signals + g); - - do - { -- unsigned int futex_val = cond->__data.__futex; -- /* Prepare to wait. Release the condvar futex. */ -- lll_unlock (cond->__data.__lock, pshared); -- -- /* Enable asynchronous cancellation. Required by the standard. */ -- cbuffer.oldtype = __pthread_enable_asynccancel (); -- --#if (defined lll_futex_wait_requeue_pi \ -- && defined __ASSUME_REQUEUE_PI) -- /* If pi_flag remained 1 then it means that we had the lock and the mutex -- but a spurious waker raced ahead of us. Give back the mutex before -- going into wait again. */ -- if (pi_flag) -+ while (1) - { -- __pthread_mutex_cond_lock_adjust (mutex); -- __pthread_mutex_unlock_usercnt (mutex, 0); -+ /* Spin-wait first. -+ Note that spinning first without checking whether a timeout -+ passed might lead to what looks like a spurious wake-up even -+ though we should return ETIMEDOUT (e.g., if the caller provides -+ an absolute timeout that is clearly in the past). However, -+ (1) spurious wake-ups are allowed, (2) it seems unlikely that a -+ user will (ab)use pthread_cond_wait as a check for whether a -+ point in time is in the past, and (3) spinning first without -+ having to compare against the current time seems to be the right -+ choice from a performance perspective for most use cases. */ -+ unsigned int spin = maxspin; -+ while (signals == 0 && spin > 0) -+ { -+ /* Check that we are not spinning on a group that's already -+ closed. */ -+ if (seq < (__condvar_load_g1_start_relaxed (cond) >> 1)) -+ goto done; -+ -+ /* TODO Back off. */ -+ -+ /* Reload signals. See above for MO. */ -+ signals = atomic_load_acquire (cond->__data.__g_signals + g); -+ spin--; -+ } -+ -+ /* If our group will be closed as indicated by the flag on signals, -+ don't bother grabbing a signal. */ -+ if (signals & 1) -+ goto done; -+ -+ /* If there is an available signal, don't block. */ -+ if (signals != 0) -+ break; -+ -+ /* No signals available after spinning, so prepare to block. -+ We first acquire a group reference and use acquire MO for that so -+ that we synchronize with the dummy read-modify-write in -+ __condvar_quiesce_and_switch_g1 if we read from that. In turn, -+ in this case this will make us see the closed flag on __g_signals -+ that designates a concurrent attempt to reuse the group's slot. -+ We use acquire MO for the __g_signals check to make the -+ __g1_start check work (see spinning above). -+ Note that the group reference acquisition will not mask the -+ release MO when decrementing the reference count because we use -+ an atomic read-modify-write operation and thus extend the release -+ sequence. */ -+ atomic_fetch_add_acquire (cond->__data.__g_refs + g, 2); -+ if (((atomic_load_acquire (cond->__data.__g_signals + g) & 1) != 0) -+ || (seq < (__condvar_load_g1_start_relaxed (cond) >> 1))) -+ { -+ /* Our group is closed. Wake up any signalers that might be -+ waiting. */ -+ __condvar_dec_grefs (cond, g, private); -+ goto done; -+ } -+ -+ // Now block. -+ struct _pthread_cleanup_buffer buffer; -+ struct _condvar_cleanup_buffer cbuffer; -+ cbuffer.wseq = wseq; -+ cbuffer.cond = cond; -+ cbuffer.mutex = mutex; -+ cbuffer.private = private; -+ __pthread_cleanup_push (&buffer, __condvar_cleanup_waiting, &cbuffer); -+ -+ if (abstime == NULL) -+ { -+ /* Block without a timeout. */ -+ err = futex_wait_cancelable ( -+ cond->__data.__g_signals + g, 0, private); -+ } -+ else -+ { -+ /* Block, but with a timeout. -+ Work around the fact that the kernel rejects negative timeout -+ values despite them being valid. */ -+ if (__glibc_unlikely (abstime->tv_sec < 0)) -+ err = ETIMEDOUT; -+ -+ else if ((flags & __PTHREAD_COND_CLOCK_MONOTONIC_MASK) != 0) -+ { -+ /* CLOCK_MONOTONIC is requested. */ -+ struct timespec rt; -+ if (__clock_gettime (CLOCK_MONOTONIC, &rt) != 0) -+ __libc_fatal ("clock_gettime does not support " -+ "CLOCK_MONOTONIC"); -+ /* Convert the absolute timeout value to a relative -+ timeout. */ -+ rt.tv_sec = abstime->tv_sec - rt.tv_sec; -+ rt.tv_nsec = abstime->tv_nsec - rt.tv_nsec; -+ if (rt.tv_nsec < 0) -+ { -+ rt.tv_nsec += 1000000000; -+ --rt.tv_sec; -+ } -+ /* Did we already time out? */ -+ if (__glibc_unlikely (rt.tv_sec < 0)) -+ err = ETIMEDOUT; -+ else -+ err = futex_reltimed_wait_cancelable -+ (cond->__data.__g_signals + g, 0, &rt, private); -+ } -+ else -+ { -+ /* Use CLOCK_REALTIME. */ -+ err = futex_abstimed_wait_cancelable -+ (cond->__data.__g_signals + g, 0, abstime, private); -+ } -+ } -+ -+ __pthread_cleanup_pop (&buffer, 0); -+ -+ if (__glibc_unlikely (err == ETIMEDOUT)) -+ { -+ __condvar_dec_grefs (cond, g, private); -+ /* If we timed out, we effectively cancel waiting. Note that -+ we have decremented __g_refs before cancellation, so that a -+ deadlock between waiting for quiescence of our group in -+ __condvar_quiesce_and_switch_g1 and us trying to acquire -+ the lock during cancellation is not possible. */ -+ __condvar_cancel_waiting (cond, seq, g, private); -+ result = ETIMEDOUT; -+ goto done; -+ } -+ else -+ __condvar_dec_grefs (cond, g, private); -+ -+ /* Reload signals. See above for MO. */ -+ signals = atomic_load_acquire (cond->__data.__g_signals + g); - } -- pi_flag = USE_REQUEUE_PI (mutex); - -- if (pi_flag) -+ } -+ /* Try to grab a signal. Use acquire MO so that we see an up-to-date value -+ of __g1_start below (see spinning above for a similar case). In -+ particular, if we steal from a more recent group, we will also see a -+ more recent __g1_start below. */ -+ while (!atomic_compare_exchange_weak_acquire (cond->__data.__g_signals + g, -+ &signals, signals - 2)); -+ -+ /* We consumed a signal but we could have consumed from a more recent group -+ that aliased with ours due to being in the same group slot. If this -+ might be the case our group must be closed as visible through -+ __g1_start. */ -+ uint64_t g1_start = __condvar_load_g1_start_relaxed (cond); -+ if (seq < (g1_start >> 1)) -+ { -+ /* We potentially stole a signal from a more recent group but we do not -+ know which group we really consumed from. -+ We do not care about groups older than current G1 because they are -+ closed; we could have stolen from these, but then we just add a -+ spurious wake-up for the current groups. -+ We will never steal a signal from current G2 that was really intended -+ for G2 because G2 never receives signals (until it becomes G1). We -+ could have stolen a signal from G2 that was conservatively added by a -+ previous waiter that also thought it stole a signal -- but given that -+ that signal was added unnecessarily, it's not a problem if we steal -+ it. -+ Thus, the remaining case is that we could have stolen from the current -+ G1, where "current" means the __g1_start value we observed. However, -+ if the current G1 does not have the same slot index as we do, we did -+ not steal from it and do not need to undo that. This is the reason -+ for putting a bit with G2's index into__g1_start as well. */ -+ if (((g1_start & 1) ^ 1) == g) - { -- err = lll_futex_wait_requeue_pi (&cond->__data.__futex, -- futex_val, &mutex->__data.__lock, -- pshared); -- -- pi_flag = (err == 0); -+ /* We have to conservatively undo our potential mistake of stealing -+ a signal. We can stop trying to do that when the current G1 -+ changes because other spinning waiters will notice this too and -+ __condvar_quiesce_and_switch_g1 has checked that there are no -+ futex waiters anymore before switching G1. -+ Relaxed MO is fine for the __g1_start load because we need to -+ merely be able to observe this fact and not have to observe -+ something else as well. -+ ??? Would it help to spin for a little while to see whether the -+ current G1 gets closed? This might be worthwhile if the group is -+ small or close to being closed. */ -+ unsigned int s = atomic_load_relaxed (cond->__data.__g_signals + g); -+ while (__condvar_load_g1_start_relaxed (cond) == g1_start) -+ { -+ /* Try to add a signal. We don't need to acquire the lock -+ because at worst we can cause a spurious wake-up. If the -+ group is in the process of being closed (LSB is true), this -+ has an effect similar to us adding a signal. */ -+ if (((s & 1) != 0) -+ || atomic_compare_exchange_weak_relaxed -+ (cond->__data.__g_signals + g, &s, s + 2)) -+ { -+ /* If we added a signal, we also need to add a wake-up on -+ the futex. We also need to do that if we skipped adding -+ a signal because the group is being closed because -+ while __condvar_quiesce_and_switch_g1 could have closed -+ the group, it might stil be waiting for futex waiters to -+ leave (and one of those waiters might be the one we stole -+ the signal from, which cause it to block using the -+ futex). */ -+ futex_wake (cond->__data.__g_signals + g, 1, private); -+ break; -+ } -+ /* TODO Back off. */ -+ } - } -- else --#endif -- /* Wait until woken by signal or broadcast. */ -- lll_futex_wait (&cond->__data.__futex, futex_val, pshared); -- -- /* Disable asynchronous cancellation. */ -- __pthread_disable_asynccancel (cbuffer.oldtype); -- -- /* We are going to look at shared data again, so get the lock. */ -- lll_lock (cond->__data.__lock, pshared); -- -- /* If a broadcast happened, we are done. */ -- if (cbuffer.bc_seq != cond->__data.__broadcast_seq) -- goto bc_out; -- -- /* Check whether we are eligible for wakeup. */ -- val = cond->__data.__wakeup_seq; - } -- while (val == seq || cond->__data.__woken_seq == val); - -- /* Another thread woken up. */ -- ++cond->__data.__woken_seq; -+ done: - -- bc_out: -+ /* Confirm that we have been woken. We do that before acquiring the mutex -+ to allow for execution of pthread_cond_destroy while having acquired the -+ mutex. */ -+ __condvar_confirm_wakeup (cond, private); - -- cond->__data.__nwaiters -= 1 << COND_NWAITERS_SHIFT; -- -- /* If pthread_cond_destroy was called on this varaible already, -- notify the pthread_cond_destroy caller all waiters have left -- and it can be successfully destroyed. */ -- if (cond->__data.__total_seq == -1ULL -- && cond->__data.__nwaiters < (1 << COND_NWAITERS_SHIFT)) -- lll_futex_wake (&cond->__data.__nwaiters, 1, pshared); -+ /* Woken up; now re-acquire the mutex. If this doesn't fail, return RESULT, -+ which is set to ETIMEDOUT if a timeout occured, or zero otherwise. */ -+ err = __pthread_mutex_cond_lock (mutex); -+ /* XXX Abort on errors that are disallowed by POSIX? */ -+ return (err != 0) ? err : result; -+} - -- /* We are done with the condvar. */ -- lll_unlock (cond->__data.__lock, pshared); - -- /* The cancellation handling is back to normal, remove the handler. */ -- __pthread_cleanup_pop (&buffer, 0); -+/* See __pthread_cond_wait_common. */ -+int -+__pthread_cond_wait (pthread_cond_t *cond, pthread_mutex_t *mutex) -+{ -+ return __pthread_cond_wait_common (cond, mutex, NULL); -+} - -- /* Get the mutex before returning. Not needed for PI. */ --#if (defined lll_futex_wait_requeue_pi \ -- && defined __ASSUME_REQUEUE_PI) -- if (pi_flag) -- { -- __pthread_mutex_cond_lock_adjust (mutex); -- return 0; -- } -- else --#endif -- return __pthread_mutex_cond_lock (mutex); -+/* See __pthread_cond_wait_common. */ -+int -+__pthread_cond_timedwait (pthread_cond_t *cond, pthread_mutex_t *mutex, -+ const struct timespec *abstime) -+{ -+ /* Check parameter validity. This should also tell the compiler that -+ it can assume that abstime is not NULL. */ -+ if (abstime->tv_nsec < 0 || abstime->tv_nsec >= 1000000000) -+ return EINVAL; -+ return __pthread_cond_wait_common (cond, mutex, abstime); - } - - versioned_symbol (libpthread, __pthread_cond_wait, pthread_cond_wait, - GLIBC_2_3_2); -+versioned_symbol (libpthread, __pthread_cond_timedwait, pthread_cond_timedwait, -+ GLIBC_2_3_2); -diff --git a/nptl/pthread_condattr_getclock.c b/nptl/pthread_condattr_getclock.c -index d156302..cecb4aa 100644 ---- a/nptl/pthread_condattr_getclock.c -+++ b/nptl/pthread_condattr_getclock.c -@@ -23,6 +23,6 @@ int - pthread_condattr_getclock (const pthread_condattr_t *attr, clockid_t *clock_id) - { - *clock_id = (((((const struct pthread_condattr *) attr)->value) >> 1) -- & ((1 << COND_NWAITERS_SHIFT) - 1)); -+ & ((1 << COND_CLOCK_BITS) - 1)); - return 0; - } -diff --git a/nptl/pthread_condattr_getpshared.c b/nptl/pthread_condattr_getpshared.c -index 5a10f3e..8147966 100644 ---- a/nptl/pthread_condattr_getpshared.c -+++ b/nptl/pthread_condattr_getpshared.c -@@ -22,7 +22,8 @@ - int - pthread_condattr_getpshared (const pthread_condattr_t *attr, int *pshared) - { -- *pshared = ((const struct pthread_condattr *) attr)->value & 1; -+ *pshared = (((const struct pthread_condattr *) attr)->value & 1 -+ ? PTHREAD_PROCESS_SHARED : PTHREAD_PROCESS_PRIVATE); - - return 0; - } -diff --git a/nptl/pthread_condattr_init.c b/nptl/pthread_condattr_init.c -index 0ce42e5..6e5168d 100644 ---- a/nptl/pthread_condattr_init.c -+++ b/nptl/pthread_condattr_init.c -@@ -23,7 +23,9 @@ - int - __pthread_condattr_init (pthread_condattr_t *attr) - { -- memset (attr, '\0', sizeof (*attr)); -+ struct pthread_condattr *iattr = (struct pthread_condattr *) attr; -+ /* Default is not pshared and CLOCK_REALTIME. */ -+ iattr-> value = CLOCK_REALTIME << 1; - - return 0; - } -diff --git a/nptl/pthread_condattr_setclock.c b/nptl/pthread_condattr_setclock.c -index 25e2a17..3cfad84 100644 ---- a/nptl/pthread_condattr_setclock.c -+++ b/nptl/pthread_condattr_setclock.c -@@ -18,7 +18,7 @@ - - #include - #include --#include -+#include - #include - #include - #include "pthreadP.h" -@@ -33,12 +33,17 @@ pthread_condattr_setclock (pthread_condattr_t *attr, clockid_t clock_id) - in the pthread_cond_t structure needs to be adjusted. */ - return EINVAL; - -+ /* If we do not support waiting using CLOCK_MONOTONIC, return an error. */ -+ if (clock_id == CLOCK_MONOTONIC -+ && !futex_supports_exact_relative_timeouts()) -+ return ENOTSUP; -+ - /* Make sure the value fits in the bits we reserved. */ -- assert (clock_id < (1 << COND_NWAITERS_SHIFT)); -+ assert (clock_id < (1 << COND_CLOCK_BITS)); - - int *valuep = &((struct pthread_condattr *) attr)->value; - -- *valuep = ((*valuep & ~(((1 << COND_NWAITERS_SHIFT) - 1) << 1)) -+ *valuep = ((*valuep & ~(((1 << COND_CLOCK_BITS) - 1) << 1)) - | (clock_id << 1)); - - return 0; -diff --git a/nptl/test-cond-printers.py b/nptl/test-cond-printers.py -index af0e12e..9e807c9 100644 ---- a/nptl/test-cond-printers.py -+++ b/nptl/test-cond-printers.py -@@ -35,7 +35,7 @@ try: - - break_at(test_source, 'Test status (destroyed)') - continue_cmd() # Go to test_status_destroyed -- test_printer(var, to_string, {'Status': 'Destroyed'}) -+ test_printer(var, to_string, {'Threads known to still execute a wait function': '0'}) - - continue_cmd() # Exit - -diff --git a/nptl/tst-cond1.c b/nptl/tst-cond1.c -index 75ab9c8..509bbd0 100644 ---- a/nptl/tst-cond1.c -+++ b/nptl/tst-cond1.c -@@ -73,6 +73,9 @@ do_test (void) - - puts ("parent: wait for condition"); - -+ /* This test will fail on spurious wake-ups, which are allowed; however, -+ the current implementation shouldn't produce spurious wake-ups in the -+ scenario we are testing here. */ - err = pthread_cond_wait (&cond, &mut); - if (err != 0) - error (EXIT_FAILURE, err, "parent: cannot wait fir signal"); -diff --git a/nptl/tst-cond20.c b/nptl/tst-cond20.c -index 918c4ad..665a66a 100644 ---- a/nptl/tst-cond20.c -+++ b/nptl/tst-cond20.c -@@ -96,7 +96,10 @@ do_test (void) - - for (i = 0; i < ROUNDS; ++i) - { -- pthread_cond_wait (&cond2, &mut); -+ /* Make sure we discard spurious wake-ups. */ -+ do -+ pthread_cond_wait (&cond2, &mut); -+ while (count != N); - - if (i & 1) - pthread_mutex_unlock (&mut); -diff --git a/nptl/tst-cond22.c b/nptl/tst-cond22.c -index bd978e5..64f19ea 100644 ---- a/nptl/tst-cond22.c -+++ b/nptl/tst-cond22.c -@@ -106,10 +106,11 @@ do_test (void) - status = 1; - } - -- printf ("cond = { %d, %x, %lld, %lld, %lld, %p, %u, %u }\n", -- c.__data.__lock, c.__data.__futex, c.__data.__total_seq, -- c.__data.__wakeup_seq, c.__data.__woken_seq, c.__data.__mutex, -- c.__data.__nwaiters, c.__data.__broadcast_seq); -+ printf ("cond = { %llu, %llu, %u/%u/%u, %u/%u/%u, %u, %u }\n", -+ c.__data.__wseq, c.__data.__g1_start, -+ c.__data.__g_signals[0], c.__data.__g_refs[0], c.__data.__g_size[0], -+ c.__data.__g_signals[1], c.__data.__g_refs[1], c.__data.__g_size[1], -+ c.__data.__g1_orig_size, c.__data.__wrefs); - - if (pthread_create (&th, NULL, tf, (void *) 1l) != 0) - { -@@ -148,10 +149,11 @@ do_test (void) - status = 1; - } - -- printf ("cond = { %d, %x, %lld, %lld, %lld, %p, %u, %u }\n", -- c.__data.__lock, c.__data.__futex, c.__data.__total_seq, -- c.__data.__wakeup_seq, c.__data.__woken_seq, c.__data.__mutex, -- c.__data.__nwaiters, c.__data.__broadcast_seq); -+ printf ("cond = { %llu, %llu, %u/%u/%u, %u/%u/%u, %u, %u }\n", -+ c.__data.__wseq, c.__data.__g1_start, -+ c.__data.__g_signals[0], c.__data.__g_refs[0], c.__data.__g_size[0], -+ c.__data.__g_signals[1], c.__data.__g_refs[1], c.__data.__g_size[1], -+ c.__data.__g1_orig_size, c.__data.__wrefs); - - return status; - } -diff --git a/sysdeps/aarch64/nptl/bits/pthreadtypes.h b/sysdeps/aarch64/nptl/bits/pthreadtypes.h -index 13984a7..c6fa632 100644 ---- a/sysdeps/aarch64/nptl/bits/pthreadtypes.h -+++ b/sysdeps/aarch64/nptl/bits/pthreadtypes.h -@@ -90,17 +90,30 @@ typedef union - { - struct - { -- int __lock; -- unsigned int __futex; -- __extension__ unsigned long long int __total_seq; -- __extension__ unsigned long long int __wakeup_seq; -- __extension__ unsigned long long int __woken_seq; -- void *__mutex; -- unsigned int __nwaiters; -- unsigned int __broadcast_seq; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __wseq; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __wseq32; -+ }; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __g1_start; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __g1_start32; -+ }; -+ unsigned int __g_refs[2]; -+ unsigned int __g_size[2]; -+ unsigned int __g1_orig_size; -+ unsigned int __wrefs; -+ unsigned int __g_signals[2]; - } __data; - char __size[__SIZEOF_PTHREAD_COND_T]; -- long int __align; -+ __extension__ long long int __align; - } pthread_cond_t; - - typedef union -diff --git a/sysdeps/arm/nptl/bits/pthreadtypes.h b/sysdeps/arm/nptl/bits/pthreadtypes.h -index afb5392..53518c6 100644 ---- a/sysdeps/arm/nptl/bits/pthreadtypes.h -+++ b/sysdeps/arm/nptl/bits/pthreadtypes.h -@@ -93,14 +93,27 @@ typedef union - { - struct - { -- int __lock; -- unsigned int __futex; -- __extension__ unsigned long long int __total_seq; -- __extension__ unsigned long long int __wakeup_seq; -- __extension__ unsigned long long int __woken_seq; -- void *__mutex; -- unsigned int __nwaiters; -- unsigned int __broadcast_seq; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __wseq; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __wseq32; -+ }; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __g1_start; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __g1_start32; -+ }; -+ unsigned int __g_refs[2]; -+ unsigned int __g_size[2]; -+ unsigned int __g1_orig_size; -+ unsigned int __wrefs; -+ unsigned int __g_signals[2]; - } __data; - char __size[__SIZEOF_PTHREAD_COND_T]; - __extension__ long long int __align; -diff --git a/sysdeps/ia64/nptl/bits/pthreadtypes.h b/sysdeps/ia64/nptl/bits/pthreadtypes.h -index f2e6dac..e72dbfd 100644 ---- a/sysdeps/ia64/nptl/bits/pthreadtypes.h -+++ b/sysdeps/ia64/nptl/bits/pthreadtypes.h -@@ -90,17 +90,30 @@ typedef union - { - struct - { -- int __lock; -- unsigned int __futex; -- __extension__ unsigned long long int __total_seq; -- __extension__ unsigned long long int __wakeup_seq; -- __extension__ unsigned long long int __woken_seq; -- void *__mutex; -- unsigned int __nwaiters; -- unsigned int __broadcast_seq; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __wseq; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __wseq32; -+ }; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __g1_start; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __g1_start32; -+ }; -+ unsigned int __g_refs[2]; -+ unsigned int __g_size[2]; -+ unsigned int __g1_orig_size; -+ unsigned int __wrefs; -+ unsigned int __g_signals[2]; - } __data; - char __size[__SIZEOF_PTHREAD_COND_T]; -- long int __align; -+ __extension__ long long int __align; - } pthread_cond_t; - - typedef union -diff --git a/sysdeps/m68k/nptl/bits/pthreadtypes.h b/sysdeps/m68k/nptl/bits/pthreadtypes.h -index d8faa7a..c5e9021 100644 ---- a/sysdeps/m68k/nptl/bits/pthreadtypes.h -+++ b/sysdeps/m68k/nptl/bits/pthreadtypes.h -@@ -88,19 +88,33 @@ typedef union - - - /* Data structure for conditional variable handling. The structure of -- the attribute type is deliberately not exposed. */ -+ the attribute type is not exposed on purpose. */ - typedef union - { - struct - { -- int __lock __attribute__ ((__aligned__ (4))); -- unsigned int __futex; -- __extension__ unsigned long long int __total_seq; -- __extension__ unsigned long long int __wakeup_seq; -- __extension__ unsigned long long int __woken_seq; -- void *__mutex; -- unsigned int __nwaiters; -- unsigned int __broadcast_seq; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __wseq; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __wseq32; -+ }; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __g1_start; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __g1_start32; -+ }; -+ /* Enforce proper alignment of fields used as futex words. */ -+ unsigned int __g_refs[2] __attribute__ ((__aligned__ (4))); -+ unsigned int __g_size[2]; -+ unsigned int __g1_orig_size; -+ unsigned int __wrefs; -+ unsigned int __g_signals[2]; - } __data; - char __size[__SIZEOF_PTHREAD_COND_T]; - __extension__ long long int __align; -diff --git a/sysdeps/microblaze/nptl/bits/pthreadtypes.h b/sysdeps/microblaze/nptl/bits/pthreadtypes.h -index 9e9e307..b6623c2 100644 ---- a/sysdeps/microblaze/nptl/bits/pthreadtypes.h -+++ b/sysdeps/microblaze/nptl/bits/pthreadtypes.h -@@ -91,14 +91,27 @@ typedef union - { - struct - { -- int __lock; -- unsigned int __futex; -- __extension__ unsigned long long int __total_seq; -- __extension__ unsigned long long int __wakeup_seq; -- __extension__ unsigned long long int __woken_seq; -- void *__mutex; -- unsigned int __nwaiters; -- unsigned int __broadcast_seq; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __wseq; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __wseq32; -+ }; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __g1_start; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __g1_start32; -+ }; -+ unsigned int __g_refs[2]; -+ unsigned int __g_size[2]; -+ unsigned int __g1_orig_size; -+ unsigned int __wrefs; -+ unsigned int __g_signals[2]; - } __data; - char __size[__SIZEOF_PTHREAD_COND_T]; - __extension__ long long int __align; -diff --git a/sysdeps/mips/nptl/bits/pthreadtypes.h b/sysdeps/mips/nptl/bits/pthreadtypes.h -index 68ed94b..7ddc7bf 100644 ---- a/sysdeps/mips/nptl/bits/pthreadtypes.h -+++ b/sysdeps/mips/nptl/bits/pthreadtypes.h -@@ -117,19 +117,32 @@ typedef union - - - /* Data structure for conditional variable handling. The structure of -- the attribute type is deliberately not exposed. */ -+ the attribute type is not exposed on purpose. */ - typedef union - { - struct - { -- int __lock; -- unsigned int __futex; -- __extension__ unsigned long long int __total_seq; -- __extension__ unsigned long long int __wakeup_seq; -- __extension__ unsigned long long int __woken_seq; -- void *__mutex; -- unsigned int __nwaiters; -- unsigned int __broadcast_seq; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __wseq; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __wseq32; -+ }; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __g1_start; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __g1_start32; -+ }; -+ unsigned int __g_refs[2]; -+ unsigned int __g_size[2]; -+ unsigned int __g1_orig_size; -+ unsigned int __wrefs; -+ unsigned int __g_signals[2]; - } __data; - char __size[__SIZEOF_PTHREAD_COND_T]; - __extension__ long long int __align; -diff --git a/sysdeps/nios2/nptl/bits/pthreadtypes.h b/sysdeps/nios2/nptl/bits/pthreadtypes.h -index 76076d0..3995e26 100644 ---- a/sysdeps/nios2/nptl/bits/pthreadtypes.h -+++ b/sysdeps/nios2/nptl/bits/pthreadtypes.h -@@ -88,19 +88,32 @@ typedef union - - - /* Data structure for conditional variable handling. The structure of -- the attribute type is deliberately not exposed. */ -+ the attribute type is not exposed on purpose. */ - typedef union - { - struct - { -- int __lock; -- unsigned int __futex; -- __extension__ unsigned long long int __total_seq; -- __extension__ unsigned long long int __wakeup_seq; -- __extension__ unsigned long long int __woken_seq; -- void *__mutex; -- unsigned int __nwaiters; -- unsigned int __broadcast_seq; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __wseq; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __wseq32; -+ }; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __g1_start; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __g1_start32; -+ }; -+ unsigned int __g_refs[2]; -+ unsigned int __g_size[2]; -+ unsigned int __g1_orig_size; -+ unsigned int __wrefs; -+ unsigned int __g_signals[2]; - } __data; - char __size[__SIZEOF_PTHREAD_COND_T]; - __extension__ long long int __align; -diff --git a/sysdeps/nptl/internaltypes.h b/sysdeps/nptl/internaltypes.h -index 203c548..31e5a43 100644 ---- a/sysdeps/nptl/internaltypes.h -+++ b/sysdeps/nptl/internaltypes.h -@@ -68,20 +68,13 @@ struct pthread_condattr - { - /* Combination of values: - -- Bit 0 : flag whether conditional variable will be sharable between -- processes. -- -- Bit 1-7: clock ID. */ -+ Bit 0 : flag whether conditional variable will be -+ sharable between processes. -+ Bit 1-COND_CLOCK_BITS: Clock ID. COND_CLOCK_BITS is the number of bits -+ needed to represent the ID of the clock. */ - int value; - }; -- -- --/* The __NWAITERS field is used as a counter and to house the number -- of bits for other purposes. COND_CLOCK_BITS is the number -- of bits needed to represent the ID of the clock. COND_NWAITERS_SHIFT -- is the number of bits reserved for other purposes like the clock. */ --#define COND_CLOCK_BITS 1 --#define COND_NWAITERS_SHIFT 1 -+#define COND_CLOCK_BITS 1 - - - /* Read-write lock variable attribute data structure. */ -diff --git a/sysdeps/nptl/pthread.h b/sysdeps/nptl/pthread.h -index fd0894e..c122446 100644 ---- a/sysdeps/nptl/pthread.h -+++ b/sysdeps/nptl/pthread.h -@@ -183,7 +183,7 @@ enum - - - /* Conditional variable handling. */ --#define PTHREAD_COND_INITIALIZER { { 0, 0, 0, 0, 0, (void *) 0, 0, 0 } } -+#define PTHREAD_COND_INITIALIZER { { {0}, {0}, {0, 0}, {0, 0}, 0, 0, {0, 0} } } - - - /* Cleanup buffers */ -diff --git a/sysdeps/s390/nptl/bits/pthreadtypes.h b/sysdeps/s390/nptl/bits/pthreadtypes.h -index 40d10fe..4e455ab 100644 ---- a/sysdeps/s390/nptl/bits/pthreadtypes.h -+++ b/sysdeps/s390/nptl/bits/pthreadtypes.h -@@ -142,14 +142,27 @@ typedef union - { - struct - { -- int __lock; -- unsigned int __futex; -- __extension__ unsigned long long int __total_seq; -- __extension__ unsigned long long int __wakeup_seq; -- __extension__ unsigned long long int __woken_seq; -- void *__mutex; -- unsigned int __nwaiters; -- unsigned int __broadcast_seq; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __wseq; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __wseq32; -+ }; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __g1_start; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __g1_start32; -+ }; -+ unsigned int __g_refs[2]; -+ unsigned int __g_size[2]; -+ unsigned int __g1_orig_size; -+ unsigned int __wrefs; -+ unsigned int __g_signals[2]; - } __data; - char __size[__SIZEOF_PTHREAD_COND_T]; - __extension__ long long int __align; -diff --git a/sysdeps/sh/nptl/bits/pthreadtypes.h b/sysdeps/sh/nptl/bits/pthreadtypes.h -index 13fbd73..065dd11 100644 ---- a/sysdeps/sh/nptl/bits/pthreadtypes.h -+++ b/sysdeps/sh/nptl/bits/pthreadtypes.h -@@ -93,14 +93,27 @@ typedef union - { - struct - { -- int __lock; -- unsigned int __futex; -- __extension__ unsigned long long int __total_seq; -- __extension__ unsigned long long int __wakeup_seq; -- __extension__ unsigned long long int __woken_seq; -- void *__mutex; -- unsigned int __nwaiters; -- unsigned int __broadcast_seq; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __wseq; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __wseq32; -+ }; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __g1_start; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __g1_start32; -+ }; -+ unsigned int __g_refs[2]; -+ unsigned int __g_size[2]; -+ unsigned int __g1_orig_size; -+ unsigned int __wrefs; -+ unsigned int __g_signals[2]; - } __data; - char __size[__SIZEOF_PTHREAD_COND_T]; - __extension__ long long int __align; -diff --git a/sysdeps/tile/nptl/bits/pthreadtypes.h b/sysdeps/tile/nptl/bits/pthreadtypes.h -index 7d68650..c12737f 100644 ---- a/sysdeps/tile/nptl/bits/pthreadtypes.h -+++ b/sysdeps/tile/nptl/bits/pthreadtypes.h -@@ -122,14 +122,27 @@ typedef union - { - struct - { -- int __lock; -- unsigned int __futex; -- __extension__ unsigned long long int __total_seq; -- __extension__ unsigned long long int __wakeup_seq; -- __extension__ unsigned long long int __woken_seq; -- void *__mutex; -- unsigned int __nwaiters; -- unsigned int __broadcast_seq; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __wseq; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __wseq32; -+ }; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __g1_start; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __g1_start32; -+ }; -+ unsigned int __g_refs[2]; -+ unsigned int __g_size[2]; -+ unsigned int __g1_orig_size; -+ unsigned int __wrefs; -+ unsigned int __g_signals[2]; - } __data; - char __size[__SIZEOF_PTHREAD_COND_T]; - __extension__ long long int __align; -diff --git a/sysdeps/unix/sysv/linux/alpha/bits/pthreadtypes.h b/sysdeps/unix/sysv/linux/alpha/bits/pthreadtypes.h -index 1a1779b..d88b045 100644 ---- a/sysdeps/unix/sysv/linux/alpha/bits/pthreadtypes.h -+++ b/sysdeps/unix/sysv/linux/alpha/bits/pthreadtypes.h -@@ -84,19 +84,32 @@ typedef union - - - /* Data structure for conditional variable handling. The structure of -- the attribute type is deliberately not exposed. */ -+ the attribute type is not exposed on purpose. */ - typedef union - { - struct - { -- int __lock; -- unsigned int __futex; -- __extension__ unsigned long long int __total_seq; -- __extension__ unsigned long long int __wakeup_seq; -- __extension__ unsigned long long int __woken_seq; -- void *__mutex; -- unsigned int __nwaiters; -- unsigned int __broadcast_seq; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __wseq; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __wseq32; -+ }; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __g1_start; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __g1_start32; -+ }; -+ unsigned int __g_refs[2]; -+ unsigned int __g_size[2]; -+ unsigned int __g1_orig_size; -+ unsigned int __wrefs; -+ unsigned int __g_signals[2]; - } __data; - char __size[__SIZEOF_PTHREAD_COND_T]; - __extension__ long long int __align; -diff --git a/sysdeps/unix/sysv/linux/hppa/internaltypes.h b/sysdeps/unix/sysv/linux/hppa/internaltypes.h -index 651ce2e..d649657 100644 ---- a/sysdeps/unix/sysv/linux/hppa/internaltypes.h -+++ b/sysdeps/unix/sysv/linux/hppa/internaltypes.h -@@ -46,32 +46,38 @@ fails because __initializer is zero, and the structure will be used as - is correctly. */ - - #define cond_compat_clear(var) \ --({ \ -- int tmp = 0; \ -- var->__data.__lock = 0; \ -- var->__data.__futex = 0; \ -- var->__data.__mutex = NULL; \ -- /* Clear __initializer last, to indicate initialization is done. */ \ -- __asm__ __volatile__ ("stw,ma %1,0(%0)" \ -- : : "r" (&var->__data.__initializer), "r" (tmp) : "memory"); \ -+({ \ -+ int tmp = 0; \ -+ var->__data.__wseq = 0; \ -+ var->__data.__signals_sent = 0; \ -+ var->__data.__confirmed = 0; \ -+ var->__data.__generation = 0; \ -+ var->__data.__mutex = NULL; \ -+ var->__data.__quiescence_waiters = 0; \ -+ var->__data.__clockid = 0; \ -+ /* Clear __initializer last, to indicate initialization is done. */ \ -+ /* This synchronizes-with the acquire load below. */ \ -+ atomic_store_release (&var->__data.__initializer, 0); \ - }) - - #define cond_compat_check_and_clear(var) \ - ({ \ -- int ret; \ -- volatile int *value = &var->__data.__initializer; \ -- if ((ret = atomic_compare_and_exchange_val_acq(value, 2, 1))) \ -+ int v; \ -+ int *value = &var->__data.__initializer; \ -+ /* This synchronizes-with the release store above. */ \ -+ while ((v = atomic_load_acquire (value)) != 0) \ - { \ -- if (ret == 1) \ -+ if (v == 1 \ -+ /* Relaxed MO is fine; it only matters who's first. */ \ -+ && atomic_compare_exchange_acquire_weak_relaxed (value, 1, 2)) \ - { \ -- /* Initialize structure. */ \ -+ /* We're first; initialize structure. */ \ - cond_compat_clear (var); \ -+ break; \ - } \ - else \ -- { \ -- /* Yield until structure is initialized. */ \ -- while (*value == 2) sched_yield (); \ -- } \ -+ /* Yield before we re-check initialization status. */ \ -+ sched_yield (); \ - } \ - }) - -diff --git a/sysdeps/unix/sysv/linux/hppa/pthread_cond_timedwait.c b/sysdeps/unix/sysv/linux/hppa/pthread_cond_timedwait.c -deleted file mode 100644 -index ec6fd23..0000000 ---- a/sysdeps/unix/sysv/linux/hppa/pthread_cond_timedwait.c -+++ /dev/null -@@ -1,41 +0,0 @@ --/* Copyright (C) 2009-2016 Free Software Foundation, Inc. -- This file is part of the GNU C Library. -- Contributed by Carlos O'Donell , 2009. -- -- The GNU C Library is free software; you can redistribute it and/or -- modify it under the terms of the GNU Lesser General Public -- License as published by the Free Software Foundation; either -- version 2.1 of the License, or (at your option) any later version. -- -- The GNU C Library is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -- Lesser General Public License for more details. -- -- You should have received a copy of the GNU Lesser General Public -- License along with the GNU C Library. If not, see -- . */ -- --#ifndef INCLUDED_SELF --# define INCLUDED_SELF --# include --#else --# include --# include --# include --# include --int --__pthread_cond_timedwait (pthread_cond_t *cond, pthread_mutex_t *mutex, -- const struct timespec *abstime) --{ -- cond_compat_check_and_clear (cond); -- return __pthread_cond_timedwait_internal (cond, mutex, abstime); --} --versioned_symbol (libpthread, __pthread_cond_timedwait, pthread_cond_timedwait, -- GLIBC_2_3_2); --# undef versioned_symbol --# define versioned_symbol(lib, local, symbol, version) --# undef __pthread_cond_timedwait --# define __pthread_cond_timedwait __pthread_cond_timedwait_internal --# include_next --#endif -diff --git a/sysdeps/unix/sysv/linux/hppa/pthread_cond_wait.c b/sysdeps/unix/sysv/linux/hppa/pthread_cond_wait.c -index 8f02831..0611f7d 100644 ---- a/sysdeps/unix/sysv/linux/hppa/pthread_cond_wait.c -+++ b/sysdeps/unix/sysv/linux/hppa/pthread_cond_wait.c -@@ -32,9 +32,22 @@ __pthread_cond_wait (pthread_cond_t *cond, pthread_mutex_t *mutex) - } - versioned_symbol (libpthread, __pthread_cond_wait, pthread_cond_wait, - GLIBC_2_3_2); -+int -+__pthread_cond_timedwait (cond, mutex, abstime) -+ pthread_cond_t *cond; -+ pthread_mutex_t *mutex; -+ const struct timespec *abstime; -+{ -+ cond_compat_check_and_clear (cond); -+ return __pthread_cond_timedwait_internal (cond, mutex, abstime); -+} -+versioned_symbol (libpthread, __pthread_cond_timedwait, pthread_cond_timedwait, -+ GLIBC_2_3_2); - # undef versioned_symbol - # define versioned_symbol(lib, local, symbol, version) - # undef __pthread_cond_wait - # define __pthread_cond_wait __pthread_cond_wait_internal -+# undef __pthread_cond_timedwait -+# define __pthread_cond_timedwait __pthread_cond_timedwait_internal - # include_next - #endif -diff --git a/sysdeps/unix/sysv/linux/i386/i686/pthread_cond_timedwait.S b/sysdeps/unix/sysv/linux/i386/i686/pthread_cond_timedwait.S -deleted file mode 100644 -index f697e5b..0000000 ---- a/sysdeps/unix/sysv/linux/i386/i686/pthread_cond_timedwait.S -+++ /dev/null -@@ -1,20 +0,0 @@ --/* Copyright (C) 2003-2016 Free Software Foundation, Inc. -- This file is part of the GNU C Library. -- Contributed by Ulrich Drepper , 2003. -- -- The GNU C Library is free software; you can redistribute it and/or -- modify it under the terms of the GNU Lesser General Public -- License as published by the Free Software Foundation; either -- version 2.1 of the License, or (at your option) any later version. -- -- The GNU C Library is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -- Lesser General Public License for more details. -- -- You should have received a copy of the GNU Lesser General Public -- License along with the GNU C Library; if not, see -- . */ -- --#define HAVE_CMOV 1 --#include "../pthread_cond_timedwait.S" -diff --git a/sysdeps/unix/sysv/linux/i386/pthread_cond_broadcast.S b/sysdeps/unix/sysv/linux/i386/pthread_cond_broadcast.S -deleted file mode 100644 -index 5996688..0000000 ---- a/sysdeps/unix/sysv/linux/i386/pthread_cond_broadcast.S -+++ /dev/null -@@ -1,241 +0,0 @@ --/* Copyright (C) 2002-2016 Free Software Foundation, Inc. -- This file is part of the GNU C Library. -- Contributed by Ulrich Drepper , 2002. -- -- The GNU C Library is free software; you can redistribute it and/or -- modify it under the terms of the GNU Lesser General Public -- License as published by the Free Software Foundation; either -- version 2.1 of the License, or (at your option) any later version. -- -- The GNU C Library is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -- Lesser General Public License for more details. -- -- You should have received a copy of the GNU Lesser General Public -- License along with the GNU C Library; if not, see -- . */ -- --#include --#include --#include --#include --#include --#include --#include --#include -- -- .text -- -- /* int pthread_cond_broadcast (pthread_cond_t *cond) */ -- .globl __pthread_cond_broadcast -- .type __pthread_cond_broadcast, @function -- .align 16 --__pthread_cond_broadcast: -- cfi_startproc -- pushl %ebx -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%ebx, 0) -- pushl %esi -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%esi, 0) -- pushl %edi -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%edi, 0) -- pushl %ebp -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%ebp, 0) -- cfi_remember_state -- -- movl 20(%esp), %ebx -- -- LIBC_PROBE (cond_broadcast, 1, %edx) -- -- /* Get internal lock. */ -- movl $1, %edx -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %edx, (%ebx) --#else -- cmpxchgl %edx, cond_lock(%ebx) --#endif -- jnz 1f -- --2: addl $cond_futex, %ebx -- movl total_seq+4-cond_futex(%ebx), %eax -- movl total_seq-cond_futex(%ebx), %ebp -- cmpl wakeup_seq+4-cond_futex(%ebx), %eax -- ja 3f -- jb 4f -- cmpl wakeup_seq-cond_futex(%ebx), %ebp -- jna 4f -- -- /* Cause all currently waiting threads to recognize they are -- woken up. */ --3: movl %ebp, wakeup_seq-cond_futex(%ebx) -- movl %eax, wakeup_seq-cond_futex+4(%ebx) -- movl %ebp, woken_seq-cond_futex(%ebx) -- movl %eax, woken_seq-cond_futex+4(%ebx) -- addl %ebp, %ebp -- addl $1, broadcast_seq-cond_futex(%ebx) -- movl %ebp, (%ebx) -- -- /* Get the address of the mutex used. */ -- movl dep_mutex-cond_futex(%ebx), %edi -- -- /* Unlock. */ -- LOCK -- subl $1, cond_lock-cond_futex(%ebx) -- jne 7f -- -- /* Don't use requeue for pshared condvars. */ --8: cmpl $-1, %edi -- je 9f -- -- /* Do not use requeue for pshared condvars. */ -- testl $PS_BIT, MUTEX_KIND(%edi) -- jne 9f -- -- /* Requeue to a non-robust PI mutex if the PI bit is set and -- the robust bit is not set. */ -- movl MUTEX_KIND(%edi), %eax -- andl $(ROBUST_BIT|PI_BIT), %eax -- cmpl $PI_BIT, %eax -- je 81f -- -- /* Wake up all threads. */ --#ifdef __ASSUME_PRIVATE_FUTEX -- movl $(FUTEX_CMP_REQUEUE|FUTEX_PRIVATE_FLAG), %ecx --#else -- movl %gs:PRIVATE_FUTEX, %ecx -- orl $FUTEX_CMP_REQUEUE, %ecx --#endif -- movl $SYS_futex, %eax -- movl $0x7fffffff, %esi -- movl $1, %edx -- /* Get the address of the futex involved. */ --# if MUTEX_FUTEX != 0 -- addl $MUTEX_FUTEX, %edi --# endif --/* FIXME: Until Ingo fixes 4G/4G vDSO, 6 arg syscalls are broken for sysenter. -- ENTER_KERNEL */ -- int $0x80 -- -- /* For any kind of error, which mainly is EAGAIN, we try again -- with WAKE. The general test also covers running on old -- kernels. */ -- cmpl $0xfffff001, %eax -- jae 9f -- --6: xorl %eax, %eax -- popl %ebp -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%ebp) -- popl %edi -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%edi) -- popl %esi -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%esi) -- popl %ebx -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%ebx) -- ret -- -- cfi_restore_state -- --81: movl $(FUTEX_CMP_REQUEUE_PI|FUTEX_PRIVATE_FLAG), %ecx -- movl $SYS_futex, %eax -- movl $0x7fffffff, %esi -- movl $1, %edx -- /* Get the address of the futex involved. */ --# if MUTEX_FUTEX != 0 -- addl $MUTEX_FUTEX, %edi --# endif -- int $0x80 -- -- /* For any kind of error, which mainly is EAGAIN, we try again -- with WAKE. The general test also covers running on old -- kernels. */ -- cmpl $0xfffff001, %eax -- jb 6b -- jmp 9f -- -- /* Initial locking failed. */ --1: --#if cond_lock == 0 -- movl %ebx, %edx --#else -- leal cond_lock(%ebx), %edx --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_lock_wait -- jmp 2b -- -- .align 16 -- /* Unlock. */ --4: LOCK -- subl $1, cond_lock-cond_futex(%ebx) -- je 6b -- -- /* Unlock in loop requires wakeup. */ --5: leal cond_lock-cond_futex(%ebx), %eax --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex-cond_futex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_unlock_wake -- jmp 6b -- -- /* Unlock in loop requires wakeup. */ --7: leal cond_lock-cond_futex(%ebx), %eax --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex-cond_futex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_unlock_wake -- jmp 8b -- --9: /* The futex requeue functionality is not available. */ -- movl $0x7fffffff, %edx --#if FUTEX_PRIVATE_FLAG > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex-cond_futex(%ebx) -- sete %cl -- subl $1, %ecx --#ifdef __ASSUME_PRIVATE_FUTEX -- andl $FUTEX_PRIVATE_FLAG, %ecx --#else -- andl %gs:PRIVATE_FUTEX, %ecx --#endif -- addl $FUTEX_WAKE, %ecx -- movl $SYS_futex, %eax -- ENTER_KERNEL -- jmp 6b -- cfi_endproc -- .size __pthread_cond_broadcast, .-__pthread_cond_broadcast --versioned_symbol (libpthread, __pthread_cond_broadcast, pthread_cond_broadcast, -- GLIBC_2_3_2) -diff --git a/sysdeps/unix/sysv/linux/i386/pthread_cond_signal.S b/sysdeps/unix/sysv/linux/i386/pthread_cond_signal.S -deleted file mode 100644 -index 0038775..0000000 ---- a/sysdeps/unix/sysv/linux/i386/pthread_cond_signal.S -+++ /dev/null -@@ -1,216 +0,0 @@ --/* Copyright (C) 2002-2016 Free Software Foundation, Inc. -- This file is part of the GNU C Library. -- Contributed by Ulrich Drepper , 2002. -- -- The GNU C Library is free software; you can redistribute it and/or -- modify it under the terms of the GNU Lesser General Public -- License as published by the Free Software Foundation; either -- version 2.1 of the License, or (at your option) any later version. -- -- The GNU C Library is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -- Lesser General Public License for more details. -- -- You should have received a copy of the GNU Lesser General Public -- License along with the GNU C Library; if not, see -- . */ -- --#include --#include --#include --#include --#include --#include --#include --#include -- -- .text -- -- /* int pthread_cond_signal (pthread_cond_t *cond) */ -- .globl __pthread_cond_signal -- .type __pthread_cond_signal, @function -- .align 16 --__pthread_cond_signal: -- -- cfi_startproc -- pushl %ebx -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%ebx, 0) -- pushl %edi -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%edi, 0) -- cfi_remember_state -- -- movl 12(%esp), %edi -- -- LIBC_PROBE (cond_signal, 1, %edi) -- -- /* Get internal lock. */ -- movl $1, %edx -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %edx, (%edi) --#else -- cmpxchgl %edx, cond_lock(%edi) --#endif -- jnz 1f -- --2: leal cond_futex(%edi), %ebx -- movl total_seq+4(%edi), %eax -- movl total_seq(%edi), %ecx -- cmpl wakeup_seq+4(%edi), %eax --#if cond_lock != 0 -- /* Must use leal to preserve the flags. */ -- leal cond_lock(%edi), %edi --#endif -- ja 3f -- jb 4f -- cmpl wakeup_seq-cond_futex(%ebx), %ecx -- jbe 4f -- -- /* Bump the wakeup number. */ --3: addl $1, wakeup_seq-cond_futex(%ebx) -- adcl $0, wakeup_seq-cond_futex+4(%ebx) -- addl $1, (%ebx) -- -- /* Wake up one thread. */ -- pushl %esi -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%esi, 0) -- pushl %ebp -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%ebp, 0) -- --#if FUTEX_PRIVATE_FLAG > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex-cond_futex(%ebx) -- sete %cl -- je 8f -- -- movl dep_mutex-cond_futex(%ebx), %edx -- /* Requeue to a non-robust PI mutex if the PI bit is set and -- the robust bit is not set. */ -- movl MUTEX_KIND(%edx), %eax -- andl $(ROBUST_BIT|PI_BIT), %eax -- cmpl $PI_BIT, %eax -- je 9f -- --8: subl $1, %ecx --#ifdef __ASSUME_PRIVATE_FUTEX -- andl $FUTEX_PRIVATE_FLAG, %ecx --#else -- andl %gs:PRIVATE_FUTEX, %ecx --#endif -- addl $FUTEX_WAKE_OP, %ecx -- movl $SYS_futex, %eax -- movl $1, %edx -- movl $1, %esi -- movl $FUTEX_OP_CLEAR_WAKE_IF_GT_ONE, %ebp -- /* FIXME: Until Ingo fixes 4G/4G vDSO, 6 arg syscalls are broken for -- sysenter. -- ENTER_KERNEL */ -- int $0x80 -- popl %ebp -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%ebp) -- popl %esi -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%esi) -- -- /* For any kind of error, we try again with WAKE. -- The general test also covers running on old kernels. */ -- cmpl $-4095, %eax -- jae 7f -- --6: xorl %eax, %eax -- popl %edi -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%edi) -- popl %ebx -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%ebx) -- ret -- -- cfi_restore_state -- --9: movl $(FUTEX_CMP_REQUEUE_PI|FUTEX_PRIVATE_FLAG), %ecx -- movl $SYS_futex, %eax -- movl $1, %edx -- xorl %esi, %esi -- movl dep_mutex-cond_futex(%ebx), %edi -- movl (%ebx), %ebp -- /* FIXME: Until Ingo fixes 4G/4G vDSO, 6 arg syscalls are broken for -- sysenter. -- ENTER_KERNEL */ -- int $0x80 -- popl %ebp -- popl %esi -- -- leal -cond_futex(%ebx), %edi -- -- /* For any kind of error, we try again with WAKE. -- The general test also covers running on old kernels. */ -- cmpl $-4095, %eax -- jb 4f -- --7: --#ifdef __ASSUME_PRIVATE_FUTEX -- andl $FUTEX_PRIVATE_FLAG, %ecx --#else -- andl %gs:PRIVATE_FUTEX, %ecx --#endif -- orl $FUTEX_WAKE, %ecx -- -- movl $SYS_futex, %eax -- /* %edx should be 1 already from $FUTEX_WAKE_OP syscall. -- movl $1, %edx */ -- ENTER_KERNEL -- -- /* Unlock. Note that at this point %edi always points to -- cond_lock. */ --4: LOCK -- subl $1, (%edi) -- je 6b -- -- /* Unlock in loop requires wakeup. */ --5: movl %edi, %eax --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex-cond_futex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_unlock_wake -- jmp 6b -- -- /* Initial locking failed. */ --1: --#if cond_lock == 0 -- movl %edi, %edx --#else -- leal cond_lock(%edi), %edx --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%edi) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_lock_wait -- jmp 2b -- -- cfi_endproc -- .size __pthread_cond_signal, .-__pthread_cond_signal --versioned_symbol (libpthread, __pthread_cond_signal, pthread_cond_signal, -- GLIBC_2_3_2) -diff --git a/sysdeps/unix/sysv/linux/i386/pthread_cond_timedwait.S b/sysdeps/unix/sysv/linux/i386/pthread_cond_timedwait.S -deleted file mode 100644 -index 6256376..0000000 ---- a/sysdeps/unix/sysv/linux/i386/pthread_cond_timedwait.S -+++ /dev/null -@@ -1,974 +0,0 @@ --/* Copyright (C) 2002-2016 Free Software Foundation, Inc. -- This file is part of the GNU C Library. -- Contributed by Ulrich Drepper , 2002. -- -- The GNU C Library is free software; you can redistribute it and/or -- modify it under the terms of the GNU Lesser General Public -- License as published by the Free Software Foundation; either -- version 2.1 of the License, or (at your option) any later version. -- -- The GNU C Library is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -- Lesser General Public License for more details. -- -- You should have received a copy of the GNU Lesser General Public -- License along with the GNU C Library; if not, see -- . */ -- --#include --#include --#include --#include --#include --#include --#include --#include -- -- .text -- --/* int pthread_cond_timedwait (pthread_cond_t *cond, pthread_mutex_t *mutex, -- const struct timespec *abstime) */ -- .globl __pthread_cond_timedwait -- .type __pthread_cond_timedwait, @function -- .align 16 --__pthread_cond_timedwait: --.LSTARTCODE: -- cfi_startproc --#ifdef SHARED -- cfi_personality(DW_EH_PE_pcrel | DW_EH_PE_sdata4 | DW_EH_PE_indirect, -- DW.ref.__gcc_personality_v0) -- cfi_lsda(DW_EH_PE_pcrel | DW_EH_PE_sdata4, .LexceptSTART) --#else -- cfi_personality(DW_EH_PE_udata4, __gcc_personality_v0) -- cfi_lsda(DW_EH_PE_udata4, .LexceptSTART) --#endif -- -- pushl %ebp -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%ebp, 0) -- pushl %edi -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%edi, 0) -- pushl %esi -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%esi, 0) -- pushl %ebx -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%ebx, 0) -- -- movl 20(%esp), %ebx -- movl 28(%esp), %ebp -- -- LIBC_PROBE (cond_timedwait, 3, %ebx, 24(%esp), %ebp) -- -- cmpl $1000000000, 4(%ebp) -- movl $EINVAL, %eax -- jae 18f -- -- /* Stack frame: -- -- esp + 32 -- +--------------------------+ -- esp + 24 | timeout value | -- +--------------------------+ -- esp + 20 | futex pointer | -- +--------------------------+ -- esp + 16 | pi-requeued flag | -- +--------------------------+ -- esp + 12 | old broadcast_seq value | -- +--------------------------+ -- esp + 4 | old wake_seq value | -- +--------------------------+ -- esp + 0 | old cancellation mode | -- +--------------------------+ -- */ -- --#ifndef __ASSUME_FUTEX_CLOCK_REALTIME --# ifdef PIC -- LOAD_PIC_REG (cx) -- cmpl $0, __have_futex_clock_realtime@GOTOFF(%ecx) --# else -- cmpl $0, __have_futex_clock_realtime --# endif -- je .Lreltmo --#endif -- -- /* Get internal lock. */ -- movl $1, %edx -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %edx, (%ebx) --#else -- cmpxchgl %edx, cond_lock(%ebx) --#endif -- jnz 1f -- -- /* Store the reference to the mutex. If there is already a -- different value in there this is a bad user bug. */ --2: cmpl $-1, dep_mutex(%ebx) -- movl 24(%esp), %eax -- je 17f -- movl %eax, dep_mutex(%ebx) -- -- /* Unlock the mutex. */ --17: xorl %edx, %edx -- call __pthread_mutex_unlock_usercnt -- -- testl %eax, %eax -- jne 16f -- -- addl $1, total_seq(%ebx) -- adcl $0, total_seq+4(%ebx) -- addl $1, cond_futex(%ebx) -- addl $(1 << nwaiters_shift), cond_nwaiters(%ebx) -- --#ifdef __ASSUME_FUTEX_CLOCK_REALTIME --# define FRAME_SIZE 24 --#else --# define FRAME_SIZE 32 --#endif -- subl $FRAME_SIZE, %esp -- cfi_adjust_cfa_offset(FRAME_SIZE) -- cfi_remember_state -- -- /* Get and store current wakeup_seq value. */ -- movl wakeup_seq(%ebx), %edi -- movl wakeup_seq+4(%ebx), %edx -- movl broadcast_seq(%ebx), %eax -- movl %edi, 4(%esp) -- movl %edx, 8(%esp) -- movl %eax, 12(%esp) -- -- /* Reset the pi-requeued flag. */ -- movl $0, 16(%esp) -- -- cmpl $0, (%ebp) -- movl $-ETIMEDOUT, %esi -- js 6f -- --8: movl cond_futex(%ebx), %edi -- movl %edi, 20(%esp) -- -- /* Unlock. */ -- LOCK --#if cond_lock == 0 -- subl $1, (%ebx) --#else -- subl $1, cond_lock(%ebx) --#endif -- jne 3f -- --.LcleanupSTART: --4: call __pthread_enable_asynccancel -- movl %eax, (%esp) -- -- leal (%ebp), %esi --#if FUTEX_PRIVATE_FLAG > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- sete %cl -- je 40f -- -- movl dep_mutex(%ebx), %edi -- /* Requeue to a non-robust PI mutex if the PI bit is set and -- the robust bit is not set. */ -- movl MUTEX_KIND(%edi), %eax -- andl $(ROBUST_BIT|PI_BIT), %eax -- cmpl $PI_BIT, %eax -- jne 40f -- -- movl $(FUTEX_WAIT_REQUEUE_PI|FUTEX_PRIVATE_FLAG), %ecx -- /* The following only works like this because we only support -- two clocks, represented using a single bit. */ -- testl $1, cond_nwaiters(%ebx) -- /* XXX Need to implement using sete instead of a jump. */ -- jne 42f -- orl $FUTEX_CLOCK_REALTIME, %ecx -- --42: movl 20(%esp), %edx -- addl $cond_futex, %ebx --.Ladd_cond_futex_pi: -- movl $SYS_futex, %eax -- ENTER_KERNEL -- subl $cond_futex, %ebx --.Lsub_cond_futex_pi: -- movl %eax, %esi -- /* Set the pi-requeued flag only if the kernel has returned 0. The -- kernel does not hold the mutex on ETIMEDOUT or any other error. */ -- cmpl $0, %eax -- sete 16(%esp) -- je 41f -- -- /* When a futex syscall with FUTEX_WAIT_REQUEUE_PI returns -- successfully, it has already locked the mutex for us and the -- pi_flag (16(%esp)) is set to denote that fact. However, if another -- thread changed the futex value before we entered the wait, the -- syscall may return an EAGAIN and the mutex is not locked. We go -- ahead with a success anyway since later we look at the pi_flag to -- decide if we got the mutex or not. The sequence numbers then make -- sure that only one of the threads actually wake up. We retry using -- normal FUTEX_WAIT only if the kernel returned ENOSYS, since normal -- and PI futexes don't mix. -- -- Note that we don't check for EAGAIN specifically; we assume that the -- only other error the futex function could return is EAGAIN (barring -- the ETIMEOUT of course, for the timeout case in futex) since -- anything else would mean an error in our function. It is too -- expensive to do that check for every call (which is quite common in -- case of a large number of threads), so it has been skipped. */ -- cmpl $-ENOSYS, %eax -- jne 41f -- xorl %ecx, %ecx -- --40: subl $1, %ecx -- movl $0, 16(%esp) --#ifdef __ASSUME_PRIVATE_FUTEX -- andl $FUTEX_PRIVATE_FLAG, %ecx --#else -- andl %gs:PRIVATE_FUTEX, %ecx --#endif -- addl $FUTEX_WAIT_BITSET, %ecx -- /* The following only works like this because we only support -- two clocks, represented using a single bit. */ -- testl $1, cond_nwaiters(%ebx) -- jne 30f -- orl $FUTEX_CLOCK_REALTIME, %ecx --30: -- movl 20(%esp), %edx -- movl $0xffffffff, %ebp -- addl $cond_futex, %ebx --.Ladd_cond_futex: -- movl $SYS_futex, %eax -- ENTER_KERNEL -- subl $cond_futex, %ebx --.Lsub_cond_futex: -- movl 28+FRAME_SIZE(%esp), %ebp -- movl %eax, %esi -- --41: movl (%esp), %eax -- call __pthread_disable_asynccancel --.LcleanupEND: -- -- /* Lock. */ -- movl $1, %edx -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %edx, (%ebx) --#else -- cmpxchgl %edx, cond_lock(%ebx) --#endif -- jnz 5f -- --6: movl broadcast_seq(%ebx), %eax -- cmpl 12(%esp), %eax -- jne 23f -- -- movl woken_seq(%ebx), %eax -- movl woken_seq+4(%ebx), %ecx -- -- movl wakeup_seq(%ebx), %edi -- movl wakeup_seq+4(%ebx), %edx -- -- cmpl 8(%esp), %edx -- jne 7f -- cmpl 4(%esp), %edi -- je 15f -- --7: cmpl %ecx, %edx -- jne 9f -- cmp %eax, %edi -- jne 9f -- --15: cmpl $-ETIMEDOUT, %esi -- je 28f -- -- /* We need to go back to futex_wait. If we're using requeue_pi, then -- release the mutex we had acquired and go back. */ -- movl 16(%esp), %edx -- test %edx, %edx -- jz 8b -- -- /* Adjust the mutex values first and then unlock it. The unlock -- should always succeed or else the kernel did not lock the mutex -- correctly. */ -- movl dep_mutex(%ebx), %eax -- call __pthread_mutex_cond_lock_adjust -- movl dep_mutex(%ebx), %eax -- xorl %edx, %edx -- call __pthread_mutex_unlock_usercnt -- jmp 8b -- --28: addl $1, wakeup_seq(%ebx) -- adcl $0, wakeup_seq+4(%ebx) -- addl $1, cond_futex(%ebx) -- movl $ETIMEDOUT, %esi -- jmp 14f -- --23: xorl %esi, %esi -- jmp 24f -- --9: xorl %esi, %esi --14: addl $1, woken_seq(%ebx) -- adcl $0, woken_seq+4(%ebx) -- --24: subl $(1 << nwaiters_shift), cond_nwaiters(%ebx) -- -- /* Wake up a thread which wants to destroy the condvar object. */ -- movl total_seq(%ebx), %eax -- andl total_seq+4(%ebx), %eax -- cmpl $0xffffffff, %eax -- jne 25f -- movl cond_nwaiters(%ebx), %eax -- andl $~((1 << nwaiters_shift) - 1), %eax -- jne 25f -- -- addl $cond_nwaiters, %ebx -- movl $SYS_futex, %eax --#if FUTEX_PRIVATE_FLAG > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex-cond_nwaiters(%ebx) -- sete %cl -- subl $1, %ecx --#ifdef __ASSUME_PRIVATE_FUTEX -- andl $FUTEX_PRIVATE_FLAG, %ecx --#else -- andl %gs:PRIVATE_FUTEX, %ecx --#endif -- addl $FUTEX_WAKE, %ecx -- movl $1, %edx -- ENTER_KERNEL -- subl $cond_nwaiters, %ebx -- --25: LOCK --#if cond_lock == 0 -- subl $1, (%ebx) --#else -- subl $1, cond_lock(%ebx) --#endif -- jne 10f -- --11: movl 24+FRAME_SIZE(%esp), %eax -- /* With requeue_pi, the mutex lock is held in the kernel. */ -- movl 16(%esp), %ecx -- testl %ecx, %ecx -- jnz 27f -- -- call __pthread_mutex_cond_lock --26: addl $FRAME_SIZE, %esp -- cfi_adjust_cfa_offset(-FRAME_SIZE) -- -- /* We return the result of the mutex_lock operation if it failed. */ -- testl %eax, %eax --#ifdef HAVE_CMOV -- cmovel %esi, %eax --#else -- jne 22f -- movl %esi, %eax --22: --#endif -- --18: popl %ebx -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%ebx) -- popl %esi -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%esi) -- popl %edi -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%edi) -- popl %ebp -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%ebp) -- -- ret -- -- cfi_restore_state -- --27: call __pthread_mutex_cond_lock_adjust -- xorl %eax, %eax -- jmp 26b -- -- cfi_adjust_cfa_offset(-FRAME_SIZE); -- /* Initial locking failed. */ --1: --#if cond_lock == 0 -- movl %ebx, %edx --#else -- leal cond_lock(%ebx), %edx --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_lock_wait -- jmp 2b -- -- /* The initial unlocking of the mutex failed. */ --16: -- LOCK --#if cond_lock == 0 -- subl $1, (%ebx) --#else -- subl $1, cond_lock(%ebx) --#endif -- jne 18b -- -- movl %eax, %esi --#if cond_lock == 0 -- movl %ebx, %eax --#else -- leal cond_lock(%ebx), %eax --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_unlock_wake -- -- movl %esi, %eax -- jmp 18b -- -- cfi_adjust_cfa_offset(FRAME_SIZE) -- -- /* Unlock in loop requires wakeup. */ --3: --#if cond_lock == 0 -- movl %ebx, %eax --#else -- leal cond_lock(%ebx), %eax --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_unlock_wake -- jmp 4b -- -- /* Locking in loop failed. */ --5: --#if cond_lock == 0 -- movl %ebx, %edx --#else -- leal cond_lock(%ebx), %edx --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_lock_wait -- jmp 6b -- -- /* Unlock after loop requires wakeup. */ --10: --#if cond_lock == 0 -- movl %ebx, %eax --#else -- leal cond_lock(%ebx), %eax --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_unlock_wake -- jmp 11b -- --#ifndef __ASSUME_FUTEX_CLOCK_REALTIME -- cfi_adjust_cfa_offset(-FRAME_SIZE) --.Lreltmo: -- /* Get internal lock. */ -- movl $1, %edx -- xorl %eax, %eax -- LOCK --# if cond_lock == 0 -- cmpxchgl %edx, (%ebx) --# else -- cmpxchgl %edx, cond_lock(%ebx) --# endif -- jnz 101f -- -- /* Store the reference to the mutex. If there is already a -- different value in there this is a bad user bug. */ --102: cmpl $-1, dep_mutex(%ebx) -- movl 24(%esp), %eax -- je 117f -- movl %eax, dep_mutex(%ebx) -- -- /* Unlock the mutex. */ --117: xorl %edx, %edx -- call __pthread_mutex_unlock_usercnt -- -- testl %eax, %eax -- jne 16b -- -- addl $1, total_seq(%ebx) -- adcl $0, total_seq+4(%ebx) -- addl $1, cond_futex(%ebx) -- addl $(1 << nwaiters_shift), cond_nwaiters(%ebx) -- -- subl $FRAME_SIZE, %esp -- cfi_adjust_cfa_offset(FRAME_SIZE) -- -- /* Get and store current wakeup_seq value. */ -- movl wakeup_seq(%ebx), %edi -- movl wakeup_seq+4(%ebx), %edx -- movl broadcast_seq(%ebx), %eax -- movl %edi, 4(%esp) -- movl %edx, 8(%esp) -- movl %eax, 12(%esp) -- -- /* Reset the pi-requeued flag. */ -- movl $0, 16(%esp) -- -- /* Get the current time. */ --108: movl %ebx, %edx --# ifdef __NR_clock_gettime -- /* Get the clock number. */ -- movl cond_nwaiters(%ebx), %ebx -- andl $((1 << nwaiters_shift) - 1), %ebx -- /* Only clocks 0 and 1 are allowed so far. Both are handled in the -- kernel. */ -- leal 24(%esp), %ecx -- movl $__NR_clock_gettime, %eax -- ENTER_KERNEL -- movl %edx, %ebx -- -- /* Compute relative timeout. */ -- movl (%ebp), %ecx -- movl 4(%ebp), %edx -- subl 24(%esp), %ecx -- subl 28(%esp), %edx --# else -- /* Get the current time. */ -- leal 24(%esp), %ebx -- xorl %ecx, %ecx -- movl $__NR_gettimeofday, %eax -- ENTER_KERNEL -- movl %edx, %ebx -- -- /* Compute relative timeout. */ -- movl 28(%esp), %eax -- movl $1000, %edx -- mul %edx /* Milli seconds to nano seconds. */ -- movl (%ebp), %ecx -- movl 4(%ebp), %edx -- subl 24(%esp), %ecx -- subl %eax, %edx --# endif -- jns 112f -- addl $1000000000, %edx -- subl $1, %ecx --112: testl %ecx, %ecx -- movl $-ETIMEDOUT, %esi -- js 106f -- -- /* Store relative timeout. */ --121: movl %ecx, 24(%esp) -- movl %edx, 28(%esp) -- -- movl cond_futex(%ebx), %edi -- movl %edi, 20(%esp) -- -- /* Unlock. */ -- LOCK --# if cond_lock == 0 -- subl $1, (%ebx) --# else -- subl $1, cond_lock(%ebx) --# endif -- jne 103f -- --.LcleanupSTART2: --104: call __pthread_enable_asynccancel -- movl %eax, (%esp) -- -- leal 24(%esp), %esi --# if FUTEX_PRIVATE_FLAG > 255 -- xorl %ecx, %ecx --# endif -- cmpl $-1, dep_mutex(%ebx) -- sete %cl -- subl $1, %ecx --# ifdef __ASSUME_PRIVATE_FUTEX -- andl $FUTEX_PRIVATE_FLAG, %ecx --# else -- andl %gs:PRIVATE_FUTEX, %ecx --# endif --# if FUTEX_WAIT != 0 -- addl $FUTEX_WAIT, %ecx --# endif -- movl 20(%esp), %edx -- addl $cond_futex, %ebx --.Ladd_cond_futex2: -- movl $SYS_futex, %eax -- ENTER_KERNEL -- subl $cond_futex, %ebx --.Lsub_cond_futex2: -- movl %eax, %esi -- --141: movl (%esp), %eax -- call __pthread_disable_asynccancel --.LcleanupEND2: -- -- -- /* Lock. */ -- movl $1, %edx -- xorl %eax, %eax -- LOCK --# if cond_lock == 0 -- cmpxchgl %edx, (%ebx) --# else -- cmpxchgl %edx, cond_lock(%ebx) --# endif -- jnz 105f -- --106: movl broadcast_seq(%ebx), %eax -- cmpl 12(%esp), %eax -- jne 23b -- -- movl woken_seq(%ebx), %eax -- movl woken_seq+4(%ebx), %ecx -- -- movl wakeup_seq(%ebx), %edi -- movl wakeup_seq+4(%ebx), %edx -- -- cmpl 8(%esp), %edx -- jne 107f -- cmpl 4(%esp), %edi -- je 115f -- --107: cmpl %ecx, %edx -- jne 9b -- cmp %eax, %edi -- jne 9b -- --115: cmpl $-ETIMEDOUT, %esi -- je 28b -- -- jmp 8b -- -- cfi_adjust_cfa_offset(-FRAME_SIZE) -- /* Initial locking failed. */ --101: --# if cond_lock == 0 -- movl %ebx, %edx --# else -- leal cond_lock(%ebx), %edx --# endif --# if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --# endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --# if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --# endif -- call __lll_lock_wait -- jmp 102b -- -- cfi_adjust_cfa_offset(FRAME_SIZE) -- -- /* Unlock in loop requires wakeup. */ --103: --# if cond_lock == 0 -- movl %ebx, %eax --# else -- leal cond_lock(%ebx), %eax --# endif --# if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --# endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --# if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --# endif -- call __lll_unlock_wake -- jmp 104b -- -- /* Locking in loop failed. */ --105: --# if cond_lock == 0 -- movl %ebx, %edx --# else -- leal cond_lock(%ebx), %edx --# endif --# if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --# endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --# if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --# endif -- call __lll_lock_wait -- jmp 106b --#endif -- -- .size __pthread_cond_timedwait, .-__pthread_cond_timedwait --versioned_symbol (libpthread, __pthread_cond_timedwait, pthread_cond_timedwait, -- GLIBC_2_3_2) -- -- -- .type __condvar_tw_cleanup2, @function --__condvar_tw_cleanup2: -- subl $cond_futex, %ebx -- .size __condvar_tw_cleanup2, .-__condvar_tw_cleanup2 -- .type __condvar_tw_cleanup, @function --__condvar_tw_cleanup: -- movl %eax, %esi -- -- /* Get internal lock. */ -- movl $1, %edx -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %edx, (%ebx) --#else -- cmpxchgl %edx, cond_lock(%ebx) --#endif -- jz 1f -- --#if cond_lock == 0 -- movl %ebx, %edx --#else -- leal cond_lock(%ebx), %edx --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_lock_wait -- --1: movl broadcast_seq(%ebx), %eax -- cmpl 12(%esp), %eax -- jne 3f -- -- /* We increment the wakeup_seq counter only if it is lower than -- total_seq. If this is not the case the thread was woken and -- then canceled. In this case we ignore the signal. */ -- movl total_seq(%ebx), %eax -- movl total_seq+4(%ebx), %edi -- cmpl wakeup_seq+4(%ebx), %edi -- jb 6f -- ja 7f -- cmpl wakeup_seq(%ebx), %eax -- jbe 7f -- --6: addl $1, wakeup_seq(%ebx) -- adcl $0, wakeup_seq+4(%ebx) -- addl $1, cond_futex(%ebx) -- --7: addl $1, woken_seq(%ebx) -- adcl $0, woken_seq+4(%ebx) -- --3: subl $(1 << nwaiters_shift), cond_nwaiters(%ebx) -- -- /* Wake up a thread which wants to destroy the condvar object. */ -- xorl %edi, %edi -- movl total_seq(%ebx), %eax -- andl total_seq+4(%ebx), %eax -- cmpl $0xffffffff, %eax -- jne 4f -- movl cond_nwaiters(%ebx), %eax -- andl $~((1 << nwaiters_shift) - 1), %eax -- jne 4f -- -- addl $cond_nwaiters, %ebx -- movl $SYS_futex, %eax --#if FUTEX_PRIVATE_FLAG > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex-cond_nwaiters(%ebx) -- sete %cl -- subl $1, %ecx --#ifdef __ASSUME_PRIVATE_FUTEX -- andl $FUTEX_PRIVATE_FLAG, %ecx --#else -- andl %gs:PRIVATE_FUTEX, %ecx --#endif -- addl $FUTEX_WAKE, %ecx -- movl $1, %edx -- ENTER_KERNEL -- subl $cond_nwaiters, %ebx -- movl $1, %edi -- --4: LOCK --#if cond_lock == 0 -- subl $1, (%ebx) --#else -- subl $1, cond_lock(%ebx) --#endif -- je 2f -- --#if cond_lock == 0 -- movl %ebx, %eax --#else -- leal cond_lock(%ebx), %eax --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_unlock_wake -- -- /* Wake up all waiters to make sure no signal gets lost. */ --2: testl %edi, %edi -- jnz 5f -- addl $cond_futex, %ebx --#if FUTEX_PRIVATE_FLAG > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex-cond_futex(%ebx) -- sete %cl -- subl $1, %ecx --#ifdef __ASSUME_PRIVATE_FUTEX -- andl $FUTEX_PRIVATE_FLAG, %ecx --#else -- andl %gs:PRIVATE_FUTEX, %ecx --#endif -- addl $FUTEX_WAKE, %ecx -- movl $SYS_futex, %eax -- movl $0x7fffffff, %edx -- ENTER_KERNEL -- -- /* Lock the mutex only if we don't own it already. This only happens -- in case of PI mutexes, if we got cancelled after a successful -- return of the futex syscall and before disabling async -- cancellation. */ --5: movl 24+FRAME_SIZE(%esp), %eax -- movl MUTEX_KIND(%eax), %ebx -- andl $(ROBUST_BIT|PI_BIT), %ebx -- cmpl $PI_BIT, %ebx -- jne 8f -- -- movl (%eax), %ebx -- andl $TID_MASK, %ebx -- cmpl %ebx, %gs:TID -- jne 8f -- /* We managed to get the lock. Fix it up before returning. */ -- call __pthread_mutex_cond_lock_adjust -- jmp 9f -- --8: call __pthread_mutex_cond_lock -- --9: movl %esi, (%esp) --.LcallUR: -- call _Unwind_Resume -- hlt --.LENDCODE: -- cfi_endproc -- .size __condvar_tw_cleanup, .-__condvar_tw_cleanup -- -- -- .section .gcc_except_table,"a",@progbits --.LexceptSTART: -- .byte DW_EH_PE_omit # @LPStart format (omit) -- .byte DW_EH_PE_omit # @TType format (omit) -- .byte DW_EH_PE_sdata4 # call-site format -- # DW_EH_PE_sdata4 -- .uleb128 .Lcstend-.Lcstbegin --.Lcstbegin: -- .long .LcleanupSTART-.LSTARTCODE -- .long .Ladd_cond_futex_pi-.LcleanupSTART -- .long __condvar_tw_cleanup-.LSTARTCODE -- .uleb128 0 -- .long .Ladd_cond_futex_pi-.LSTARTCODE -- .long .Lsub_cond_futex_pi-.Ladd_cond_futex_pi -- .long __condvar_tw_cleanup2-.LSTARTCODE -- .uleb128 0 -- .long .Lsub_cond_futex_pi-.LSTARTCODE -- .long .Ladd_cond_futex-.Lsub_cond_futex_pi -- .long __condvar_tw_cleanup-.LSTARTCODE -- .uleb128 0 -- .long .Ladd_cond_futex-.LSTARTCODE -- .long .Lsub_cond_futex-.Ladd_cond_futex -- .long __condvar_tw_cleanup2-.LSTARTCODE -- .uleb128 0 -- .long .Lsub_cond_futex-.LSTARTCODE -- .long .LcleanupEND-.Lsub_cond_futex -- .long __condvar_tw_cleanup-.LSTARTCODE -- .uleb128 0 --#ifndef __ASSUME_FUTEX_CLOCK_REALTIME -- .long .LcleanupSTART2-.LSTARTCODE -- .long .Ladd_cond_futex2-.LcleanupSTART2 -- .long __condvar_tw_cleanup-.LSTARTCODE -- .uleb128 0 -- .long .Ladd_cond_futex2-.LSTARTCODE -- .long .Lsub_cond_futex2-.Ladd_cond_futex2 -- .long __condvar_tw_cleanup2-.LSTARTCODE -- .uleb128 0 -- .long .Lsub_cond_futex2-.LSTARTCODE -- .long .LcleanupEND2-.Lsub_cond_futex2 -- .long __condvar_tw_cleanup-.LSTARTCODE -- .uleb128 0 --#endif -- .long .LcallUR-.LSTARTCODE -- .long .LENDCODE-.LcallUR -- .long 0 -- .uleb128 0 --.Lcstend: -- -- --#ifdef SHARED -- .hidden DW.ref.__gcc_personality_v0 -- .weak DW.ref.__gcc_personality_v0 -- .section .gnu.linkonce.d.DW.ref.__gcc_personality_v0,"aw",@progbits -- .align 4 -- .type DW.ref.__gcc_personality_v0, @object -- .size DW.ref.__gcc_personality_v0, 4 --DW.ref.__gcc_personality_v0: -- .long __gcc_personality_v0 --#endif -diff --git a/sysdeps/unix/sysv/linux/i386/pthread_cond_wait.S b/sysdeps/unix/sysv/linux/i386/pthread_cond_wait.S -deleted file mode 100644 -index 5016718..0000000 ---- a/sysdeps/unix/sysv/linux/i386/pthread_cond_wait.S -+++ /dev/null -@@ -1,642 +0,0 @@ --/* Copyright (C) 2002-2016 Free Software Foundation, Inc. -- This file is part of the GNU C Library. -- Contributed by Ulrich Drepper , 2002. -- -- The GNU C Library is free software; you can redistribute it and/or -- modify it under the terms of the GNU Lesser General Public -- License as published by the Free Software Foundation; either -- version 2.1 of the License, or (at your option) any later version. -- -- The GNU C Library is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -- Lesser General Public License for more details. -- -- You should have received a copy of the GNU Lesser General Public -- License along with the GNU C Library; if not, see -- . */ -- --#include --#include --#include --#include --#include --#include --#include --#include --#include -- -- -- .text -- --/* int pthread_cond_wait (pthread_cond_t *cond, pthread_mutex_t *mutex) */ -- .globl __pthread_cond_wait -- .type __pthread_cond_wait, @function -- .align 16 --__pthread_cond_wait: --.LSTARTCODE: -- cfi_startproc --#ifdef SHARED -- cfi_personality(DW_EH_PE_pcrel | DW_EH_PE_sdata4 | DW_EH_PE_indirect, -- DW.ref.__gcc_personality_v0) -- cfi_lsda(DW_EH_PE_pcrel | DW_EH_PE_sdata4, .LexceptSTART) --#else -- cfi_personality(DW_EH_PE_udata4, __gcc_personality_v0) -- cfi_lsda(DW_EH_PE_udata4, .LexceptSTART) --#endif -- -- pushl %ebp -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%ebp, 0) -- pushl %edi -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%edi, 0) -- pushl %esi -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%esi, 0) -- pushl %ebx -- cfi_adjust_cfa_offset(4) -- cfi_rel_offset(%ebx, 0) -- -- xorl %esi, %esi -- movl 20(%esp), %ebx -- -- LIBC_PROBE (cond_wait, 2, 24(%esp), %ebx) -- -- /* Get internal lock. */ -- movl $1, %edx -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %edx, (%ebx) --#else -- cmpxchgl %edx, cond_lock(%ebx) --#endif -- jnz 1f -- -- /* Store the reference to the mutex. If there is already a -- different value in there this is a bad user bug. */ --2: cmpl $-1, dep_mutex(%ebx) -- movl 24(%esp), %eax -- je 15f -- movl %eax, dep_mutex(%ebx) -- -- /* Unlock the mutex. */ --15: xorl %edx, %edx -- call __pthread_mutex_unlock_usercnt -- -- testl %eax, %eax -- jne 12f -- -- addl $1, total_seq(%ebx) -- adcl $0, total_seq+4(%ebx) -- addl $1, cond_futex(%ebx) -- addl $(1 << nwaiters_shift), cond_nwaiters(%ebx) -- --#define FRAME_SIZE 20 -- subl $FRAME_SIZE, %esp -- cfi_adjust_cfa_offset(FRAME_SIZE) -- cfi_remember_state -- -- /* Get and store current wakeup_seq value. */ -- movl wakeup_seq(%ebx), %edi -- movl wakeup_seq+4(%ebx), %edx -- movl broadcast_seq(%ebx), %eax -- movl %edi, 4(%esp) -- movl %edx, 8(%esp) -- movl %eax, 12(%esp) -- -- /* Reset the pi-requeued flag. */ --8: movl $0, 16(%esp) -- movl cond_futex(%ebx), %ebp -- -- /* Unlock. */ -- LOCK --#if cond_lock == 0 -- subl $1, (%ebx) --#else -- subl $1, cond_lock(%ebx) --#endif -- jne 3f -- --.LcleanupSTART: --4: call __pthread_enable_asynccancel -- movl %eax, (%esp) -- -- xorl %ecx, %ecx -- cmpl $-1, dep_mutex(%ebx) -- sete %cl -- je 18f -- -- movl dep_mutex(%ebx), %edi -- /* Requeue to a non-robust PI mutex if the PI bit is set and -- the robust bit is not set. */ -- movl MUTEX_KIND(%edi), %eax -- andl $(ROBUST_BIT|PI_BIT), %eax -- cmpl $PI_BIT, %eax -- jne 18f -- -- movl $(FUTEX_WAIT_REQUEUE_PI|FUTEX_PRIVATE_FLAG), %ecx -- movl %ebp, %edx -- xorl %esi, %esi -- addl $cond_futex, %ebx --.Ladd_cond_futex_pi: -- movl $SYS_futex, %eax -- ENTER_KERNEL -- subl $cond_futex, %ebx --.Lsub_cond_futex_pi: -- /* Set the pi-requeued flag only if the kernel has returned 0. The -- kernel does not hold the mutex on error. */ -- cmpl $0, %eax -- sete 16(%esp) -- je 19f -- -- /* When a futex syscall with FUTEX_WAIT_REQUEUE_PI returns -- successfully, it has already locked the mutex for us and the -- pi_flag (16(%esp)) is set to denote that fact. However, if another -- thread changed the futex value before we entered the wait, the -- syscall may return an EAGAIN and the mutex is not locked. We go -- ahead with a success anyway since later we look at the pi_flag to -- decide if we got the mutex or not. The sequence numbers then make -- sure that only one of the threads actually wake up. We retry using -- normal FUTEX_WAIT only if the kernel returned ENOSYS, since normal -- and PI futexes don't mix. -- -- Note that we don't check for EAGAIN specifically; we assume that the -- only other error the futex function could return is EAGAIN since -- anything else would mean an error in our function. It is too -- expensive to do that check for every call (which is quite common in -- case of a large number of threads), so it has been skipped. */ -- cmpl $-ENOSYS, %eax -- jne 19f -- xorl %ecx, %ecx -- --18: subl $1, %ecx --#ifdef __ASSUME_PRIVATE_FUTEX -- andl $FUTEX_PRIVATE_FLAG, %ecx --#else -- andl %gs:PRIVATE_FUTEX, %ecx --#endif --#if FUTEX_WAIT != 0 -- addl $FUTEX_WAIT, %ecx --#endif -- movl %ebp, %edx -- addl $cond_futex, %ebx --.Ladd_cond_futex: -- movl $SYS_futex, %eax -- ENTER_KERNEL -- subl $cond_futex, %ebx --.Lsub_cond_futex: -- --19: movl (%esp), %eax -- call __pthread_disable_asynccancel --.LcleanupEND: -- -- /* Lock. */ -- movl $1, %edx -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %edx, (%ebx) --#else -- cmpxchgl %edx, cond_lock(%ebx) --#endif -- jnz 5f -- --6: movl broadcast_seq(%ebx), %eax -- cmpl 12(%esp), %eax -- jne 16f -- -- movl woken_seq(%ebx), %eax -- movl woken_seq+4(%ebx), %ecx -- -- movl wakeup_seq(%ebx), %edi -- movl wakeup_seq+4(%ebx), %edx -- -- cmpl 8(%esp), %edx -- jne 7f -- cmpl 4(%esp), %edi -- je 22f -- --7: cmpl %ecx, %edx -- jne 9f -- cmp %eax, %edi -- je 22f -- --9: addl $1, woken_seq(%ebx) -- adcl $0, woken_seq+4(%ebx) -- -- /* Unlock */ --16: subl $(1 << nwaiters_shift), cond_nwaiters(%ebx) -- -- /* Wake up a thread which wants to destroy the condvar object. */ -- movl total_seq(%ebx), %eax -- andl total_seq+4(%ebx), %eax -- cmpl $0xffffffff, %eax -- jne 17f -- movl cond_nwaiters(%ebx), %eax -- andl $~((1 << nwaiters_shift) - 1), %eax -- jne 17f -- -- addl $cond_nwaiters, %ebx -- movl $SYS_futex, %eax --#if FUTEX_PRIVATE_FLAG > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex-cond_nwaiters(%ebx) -- sete %cl -- subl $1, %ecx --#ifdef __ASSUME_PRIVATE_FUTEX -- andl $FUTEX_PRIVATE_FLAG, %ecx --#else -- andl %gs:PRIVATE_FUTEX, %ecx --#endif -- addl $FUTEX_WAKE, %ecx -- movl $1, %edx -- ENTER_KERNEL -- subl $cond_nwaiters, %ebx -- --17: LOCK --#if cond_lock == 0 -- subl $1, (%ebx) --#else -- subl $1, cond_lock(%ebx) --#endif -- jne 10f -- -- /* With requeue_pi, the mutex lock is held in the kernel. */ --11: movl 24+FRAME_SIZE(%esp), %eax -- movl 16(%esp), %ecx -- testl %ecx, %ecx -- jnz 21f -- -- call __pthread_mutex_cond_lock --20: addl $FRAME_SIZE, %esp -- cfi_adjust_cfa_offset(-FRAME_SIZE); -- --14: popl %ebx -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%ebx) -- popl %esi -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%esi) -- popl %edi -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%edi) -- popl %ebp -- cfi_adjust_cfa_offset(-4) -- cfi_restore(%ebp) -- -- /* We return the result of the mutex_lock operation. */ -- ret -- -- cfi_restore_state -- --21: call __pthread_mutex_cond_lock_adjust -- xorl %eax, %eax -- jmp 20b -- -- cfi_adjust_cfa_offset(-FRAME_SIZE); -- -- /* We need to go back to futex_wait. If we're using requeue_pi, then -- release the mutex we had acquired and go back. */ --22: movl 16(%esp), %edx -- test %edx, %edx -- jz 8b -- -- /* Adjust the mutex values first and then unlock it. The unlock -- should always succeed or else the kernel did not lock the mutex -- correctly. */ -- movl dep_mutex(%ebx), %eax -- call __pthread_mutex_cond_lock_adjust -- movl dep_mutex(%ebx), %eax -- xorl %edx, %edx -- call __pthread_mutex_unlock_usercnt -- jmp 8b -- -- /* Initial locking failed. */ --1: --#if cond_lock == 0 -- movl %ebx, %edx --#else -- leal cond_lock(%ebx), %edx --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_lock_wait -- jmp 2b -- -- /* The initial unlocking of the mutex failed. */ --12: -- LOCK --#if cond_lock == 0 -- subl $1, (%ebx) --#else -- subl $1, cond_lock(%ebx) --#endif -- jne 14b -- -- movl %eax, %esi --#if cond_lock == 0 -- movl %ebx, %eax --#else -- leal cond_lock(%ebx), %eax --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_unlock_wake -- -- movl %esi, %eax -- jmp 14b -- -- cfi_adjust_cfa_offset(FRAME_SIZE) -- -- /* Unlock in loop requires wakeup. */ --3: --#if cond_lock == 0 -- movl %ebx, %eax --#else -- leal cond_lock(%ebx), %eax --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_unlock_wake -- jmp 4b -- -- /* Locking in loop failed. */ --5: --#if cond_lock == 0 -- movl %ebx, %edx --#else -- leal cond_lock(%ebx), %edx --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_lock_wait -- jmp 6b -- -- /* Unlock after loop requires wakeup. */ --10: --#if cond_lock == 0 -- movl %ebx, %eax --#else -- leal cond_lock(%ebx), %eax --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_unlock_wake -- jmp 11b -- -- .size __pthread_cond_wait, .-__pthread_cond_wait --versioned_symbol (libpthread, __pthread_cond_wait, pthread_cond_wait, -- GLIBC_2_3_2) -- -- -- .type __condvar_w_cleanup2, @function --__condvar_w_cleanup2: -- subl $cond_futex, %ebx -- .size __condvar_w_cleanup2, .-__condvar_w_cleanup2 --.LSbl4: -- .type __condvar_w_cleanup, @function --__condvar_w_cleanup: -- movl %eax, %esi -- -- /* Get internal lock. */ -- movl $1, %edx -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %edx, (%ebx) --#else -- cmpxchgl %edx, cond_lock(%ebx) --#endif -- jz 1f -- --#if cond_lock == 0 -- movl %ebx, %edx --#else -- leal cond_lock(%ebx), %edx --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_lock_wait -- --1: movl broadcast_seq(%ebx), %eax -- cmpl 12(%esp), %eax -- jne 3f -- -- /* We increment the wakeup_seq counter only if it is lower than -- total_seq. If this is not the case the thread was woken and -- then canceled. In this case we ignore the signal. */ -- movl total_seq(%ebx), %eax -- movl total_seq+4(%ebx), %edi -- cmpl wakeup_seq+4(%ebx), %edi -- jb 6f -- ja 7f -- cmpl wakeup_seq(%ebx), %eax -- jbe 7f -- --6: addl $1, wakeup_seq(%ebx) -- adcl $0, wakeup_seq+4(%ebx) -- addl $1, cond_futex(%ebx) -- --7: addl $1, woken_seq(%ebx) -- adcl $0, woken_seq+4(%ebx) -- --3: subl $(1 << nwaiters_shift), cond_nwaiters(%ebx) -- -- /* Wake up a thread which wants to destroy the condvar object. */ -- xorl %edi, %edi -- movl total_seq(%ebx), %eax -- andl total_seq+4(%ebx), %eax -- cmpl $0xffffffff, %eax -- jne 4f -- movl cond_nwaiters(%ebx), %eax -- andl $~((1 << nwaiters_shift) - 1), %eax -- jne 4f -- -- addl $cond_nwaiters, %ebx -- movl $SYS_futex, %eax --#if FUTEX_PRIVATE_FLAG > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex-cond_nwaiters(%ebx) -- sete %cl -- subl $1, %ecx --#ifdef __ASSUME_PRIVATE_FUTEX -- andl $FUTEX_PRIVATE_FLAG, %ecx --#else -- andl %gs:PRIVATE_FUTEX, %ecx --#endif -- addl $FUTEX_WAKE, %ecx -- movl $1, %edx -- ENTER_KERNEL -- subl $cond_nwaiters, %ebx -- movl $1, %edi -- --4: LOCK --#if cond_lock == 0 -- subl $1, (%ebx) --#else -- subl $1, cond_lock(%ebx) --#endif -- je 2f -- --#if cond_lock == 0 -- movl %ebx, %eax --#else -- leal cond_lock(%ebx), %eax --#endif --#if (LLL_SHARED-LLL_PRIVATE) > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex(%ebx) -- setne %cl -- subl $1, %ecx -- andl $(LLL_SHARED-LLL_PRIVATE), %ecx --#if LLL_PRIVATE != 0 -- addl $LLL_PRIVATE, %ecx --#endif -- call __lll_unlock_wake -- -- /* Wake up all waiters to make sure no signal gets lost. */ --2: testl %edi, %edi -- jnz 5f -- addl $cond_futex, %ebx --#if FUTEX_PRIVATE_FLAG > 255 -- xorl %ecx, %ecx --#endif -- cmpl $-1, dep_mutex-cond_futex(%ebx) -- sete %cl -- subl $1, %ecx --#ifdef __ASSUME_PRIVATE_FUTEX -- andl $FUTEX_PRIVATE_FLAG, %ecx --#else -- andl %gs:PRIVATE_FUTEX, %ecx --#endif -- addl $FUTEX_WAKE, %ecx -- movl $SYS_futex, %eax -- movl $0x7fffffff, %edx -- ENTER_KERNEL -- -- /* Lock the mutex only if we don't own it already. This only happens -- in case of PI mutexes, if we got cancelled after a successful -- return of the futex syscall and before disabling async -- cancellation. */ --5: movl 24+FRAME_SIZE(%esp), %eax -- movl MUTEX_KIND(%eax), %ebx -- andl $(ROBUST_BIT|PI_BIT), %ebx -- cmpl $PI_BIT, %ebx -- jne 8f -- -- movl (%eax), %ebx -- andl $TID_MASK, %ebx -- cmpl %ebx, %gs:TID -- jne 8f -- /* We managed to get the lock. Fix it up before returning. */ -- call __pthread_mutex_cond_lock_adjust -- jmp 9f -- --8: call __pthread_mutex_cond_lock -- --9: movl %esi, (%esp) --.LcallUR: -- call _Unwind_Resume -- hlt --.LENDCODE: -- cfi_endproc -- .size __condvar_w_cleanup, .-__condvar_w_cleanup -- -- -- .section .gcc_except_table,"a",@progbits --.LexceptSTART: -- .byte DW_EH_PE_omit # @LPStart format (omit) -- .byte DW_EH_PE_omit # @TType format (omit) -- .byte DW_EH_PE_sdata4 # call-site format -- # DW_EH_PE_sdata4 -- .uleb128 .Lcstend-.Lcstbegin --.Lcstbegin: -- .long .LcleanupSTART-.LSTARTCODE -- .long .Ladd_cond_futex_pi-.LcleanupSTART -- .long __condvar_w_cleanup-.LSTARTCODE -- .uleb128 0 -- .long .Ladd_cond_futex_pi-.LSTARTCODE -- .long .Lsub_cond_futex_pi-.Ladd_cond_futex_pi -- .long __condvar_w_cleanup2-.LSTARTCODE -- .uleb128 0 -- .long .Lsub_cond_futex_pi-.LSTARTCODE -- .long .Ladd_cond_futex-.Lsub_cond_futex_pi -- .long __condvar_w_cleanup-.LSTARTCODE -- .uleb128 0 -- .long .Ladd_cond_futex-.LSTARTCODE -- .long .Lsub_cond_futex-.Ladd_cond_futex -- .long __condvar_w_cleanup2-.LSTARTCODE -- .uleb128 0 -- .long .Lsub_cond_futex-.LSTARTCODE -- .long .LcleanupEND-.Lsub_cond_futex -- .long __condvar_w_cleanup-.LSTARTCODE -- .uleb128 0 -- .long .LcallUR-.LSTARTCODE -- .long .LENDCODE-.LcallUR -- .long 0 -- .uleb128 0 --.Lcstend: -- --#ifdef SHARED -- .hidden DW.ref.__gcc_personality_v0 -- .weak DW.ref.__gcc_personality_v0 -- .section .gnu.linkonce.d.DW.ref.__gcc_personality_v0,"aw",@progbits -- .align 4 -- .type DW.ref.__gcc_personality_v0, @object -- .size DW.ref.__gcc_personality_v0, 4 --DW.ref.__gcc_personality_v0: -- .long __gcc_personality_v0 --#endif -diff --git a/sysdeps/unix/sysv/linux/powerpc/bits/pthreadtypes.h b/sysdeps/unix/sysv/linux/powerpc/bits/pthreadtypes.h -index 345e79a..371bc3c 100644 ---- a/sysdeps/unix/sysv/linux/powerpc/bits/pthreadtypes.h -+++ b/sysdeps/unix/sysv/linux/powerpc/bits/pthreadtypes.h -@@ -123,19 +123,32 @@ typedef union - - - /* Data structure for conditional variable handling. The structure of -- the attribute type is deliberately not exposed. */ -+ the attribute type is not exposed on purpose. */ - typedef union - { - struct - { -- int __lock; -- unsigned int __futex; -- __extension__ unsigned long long int __total_seq; -- __extension__ unsigned long long int __wakeup_seq; -- __extension__ unsigned long long int __woken_seq; -- void *__mutex; -- unsigned int __nwaiters; -- unsigned int __broadcast_seq; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __wseq; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __wseq32; -+ }; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __g1_start; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __g1_start32; -+ }; -+ unsigned int __g_refs[2]; -+ unsigned int __g_size[2]; -+ unsigned int __g1_orig_size; -+ unsigned int __wrefs; -+ unsigned int __g_signals[2]; - } __data; - char __size[__SIZEOF_PTHREAD_COND_T]; - __extension__ long long int __align; -diff --git a/sysdeps/unix/sysv/linux/x86_64/pthread_cond_broadcast.S b/sysdeps/unix/sysv/linux/x86_64/pthread_cond_broadcast.S -deleted file mode 100644 -index de455dd..0000000 ---- a/sysdeps/unix/sysv/linux/x86_64/pthread_cond_broadcast.S -+++ /dev/null -@@ -1,177 +0,0 @@ --/* Copyright (C) 2002-2016 Free Software Foundation, Inc. -- This file is part of the GNU C Library. -- Contributed by Ulrich Drepper , 2002. -- -- The GNU C Library is free software; you can redistribute it and/or -- modify it under the terms of the GNU Lesser General Public -- License as published by the Free Software Foundation; either -- version 2.1 of the License, or (at your option) any later version. -- -- The GNU C Library is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -- Lesser General Public License for more details. -- -- You should have received a copy of the GNU Lesser General Public -- License along with the GNU C Library; if not, see -- . */ -- --#include --#include --#include --#include --#include --#include --#include --#include -- -- .text -- -- /* int pthread_cond_broadcast (pthread_cond_t *cond) */ --ENTRY(__pthread_cond_broadcast) -- -- LIBC_PROBE (cond_broadcast, 1, %rdi) -- -- /* Get internal lock. */ -- movl $1, %esi -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %esi, (%rdi) --#else -- cmpxchgl %esi, cond_lock(%rdi) --#endif -- jnz 1f -- --2: addq $cond_futex, %rdi -- movq total_seq-cond_futex(%rdi), %r9 -- cmpq wakeup_seq-cond_futex(%rdi), %r9 -- jna 4f -- -- /* Cause all currently waiting threads to recognize they are -- woken up. */ -- movq %r9, wakeup_seq-cond_futex(%rdi) -- movq %r9, woken_seq-cond_futex(%rdi) -- addq %r9, %r9 -- movl %r9d, (%rdi) -- incl broadcast_seq-cond_futex(%rdi) -- -- /* Get the address of the mutex used. */ -- mov dep_mutex-cond_futex(%rdi), %R8_LP -- -- /* Unlock. */ -- LOCK -- decl cond_lock-cond_futex(%rdi) -- jne 7f -- --8: cmp $-1, %R8_LP -- je 9f -- -- /* Do not use requeue for pshared condvars. */ -- testl $PS_BIT, MUTEX_KIND(%r8) -- jne 9f -- -- /* Requeue to a PI mutex if the PI bit is set. */ -- movl MUTEX_KIND(%r8), %eax -- andl $(ROBUST_BIT|PI_BIT), %eax -- cmpl $PI_BIT, %eax -- je 81f -- -- /* Wake up all threads. */ --#ifdef __ASSUME_PRIVATE_FUTEX -- movl $(FUTEX_CMP_REQUEUE|FUTEX_PRIVATE_FLAG), %esi --#else -- movl %fs:PRIVATE_FUTEX, %esi -- orl $FUTEX_CMP_REQUEUE, %esi --#endif -- movl $SYS_futex, %eax -- movl $1, %edx -- movl $0x7fffffff, %r10d -- syscall -- -- /* For any kind of error, which mainly is EAGAIN, we try again -- with WAKE. The general test also covers running on old -- kernels. */ -- cmpq $-4095, %rax -- jae 9f -- --10: xorl %eax, %eax -- retq -- -- /* Wake up all threads. */ --81: movl $(FUTEX_CMP_REQUEUE_PI|FUTEX_PRIVATE_FLAG), %esi -- movl $SYS_futex, %eax -- movl $1, %edx -- movl $0x7fffffff, %r10d -- syscall -- -- /* For any kind of error, which mainly is EAGAIN, we try again -- with WAKE. The general test also covers running on old -- kernels. */ -- cmpq $-4095, %rax -- jb 10b -- jmp 9f -- -- .align 16 -- /* Unlock. */ --4: LOCK -- decl cond_lock-cond_futex(%rdi) -- jne 5f -- --6: xorl %eax, %eax -- retq -- -- /* Initial locking failed. */ --1: --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_lock_wait --#if cond_lock != 0 -- subq $cond_lock, %rdi --#endif -- jmp 2b -- -- /* Unlock in loop requires wakeup. */ --5: addq $cond_lock-cond_futex, %rdi -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_unlock_wake -- jmp 6b -- -- /* Unlock in loop requires wakeup. */ --7: addq $cond_lock-cond_futex, %rdi -- cmp $-1, %R8_LP -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_unlock_wake -- subq $cond_lock-cond_futex, %rdi -- jmp 8b -- --9: /* The futex requeue functionality is not available. */ -- cmp $-1, %R8_LP -- movl $0x7fffffff, %edx --#ifdef __ASSUME_PRIVATE_FUTEX -- movl $FUTEX_WAKE, %eax -- movl $(FUTEX_WAKE|FUTEX_PRIVATE_FLAG), %esi -- cmove %eax, %esi --#else -- movl $0, %eax -- movl %fs:PRIVATE_FUTEX, %esi -- cmove %eax, %esi -- orl $FUTEX_WAKE, %esi --#endif -- movl $SYS_futex, %eax -- syscall -- jmp 10b --END(__pthread_cond_broadcast) -- --versioned_symbol (libpthread, __pthread_cond_broadcast, pthread_cond_broadcast, -- GLIBC_2_3_2) -diff --git a/sysdeps/unix/sysv/linux/x86_64/pthread_cond_signal.S b/sysdeps/unix/sysv/linux/x86_64/pthread_cond_signal.S -deleted file mode 100644 -index da14bc3..0000000 ---- a/sysdeps/unix/sysv/linux/x86_64/pthread_cond_signal.S -+++ /dev/null -@@ -1,161 +0,0 @@ --/* Copyright (C) 2002-2016 Free Software Foundation, Inc. -- This file is part of the GNU C Library. -- Contributed by Ulrich Drepper , 2002. -- -- The GNU C Library is free software; you can redistribute it and/or -- modify it under the terms of the GNU Lesser General Public -- License as published by the Free Software Foundation; either -- version 2.1 of the License, or (at your option) any later version. -- -- The GNU C Library is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -- Lesser General Public License for more details. -- -- You should have received a copy of the GNU Lesser General Public -- License along with the GNU C Library; if not, see -- . */ -- --#include --#include --#include --#include --#include --#include --#include --#include -- -- -- .text -- --ENTRY(__pthread_cond_signal) -- -- LIBC_PROBE (cond_signal, 1, %rdi) -- -- /* Get internal lock. */ -- movq %rdi, %r8 -- movl $1, %esi -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %esi, (%rdi) --#else -- cmpxchgl %esi, cond_lock(%rdi) --#endif -- jnz 1f -- --2: addq $cond_futex, %rdi -- movq total_seq(%r8), %rcx -- cmpq wakeup_seq(%r8), %rcx -- jbe 4f -- -- /* Bump the wakeup number. */ -- addq $1, wakeup_seq(%r8) -- addl $1, (%rdi) -- -- /* Wake up one thread. */ -- LP_OP(cmp) $-1, dep_mutex(%r8) -- movl $FUTEX_WAKE_OP, %esi -- movl $1, %edx -- movl $SYS_futex, %eax -- je 8f -- -- /* Get the address of the mutex used. */ -- mov dep_mutex(%r8), %RCX_LP -- movl MUTEX_KIND(%rcx), %r11d -- andl $(ROBUST_BIT|PI_BIT), %r11d -- cmpl $PI_BIT, %r11d -- je 9f -- --#ifdef __ASSUME_PRIVATE_FUTEX -- movl $(FUTEX_WAKE_OP|FUTEX_PRIVATE_FLAG), %esi --#else -- orl %fs:PRIVATE_FUTEX, %esi --#endif -- --8: movl $1, %r10d --#if cond_lock != 0 -- addq $cond_lock, %r8 --#endif -- movl $FUTEX_OP_CLEAR_WAKE_IF_GT_ONE, %r9d -- syscall --#if cond_lock != 0 -- subq $cond_lock, %r8 --#endif -- /* For any kind of error, we try again with WAKE. -- The general test also covers running on old kernels. */ -- cmpq $-4095, %rax -- jae 7f -- -- xorl %eax, %eax -- retq -- -- /* Wake up one thread and requeue none in the PI Mutex case. */ --9: movl $(FUTEX_CMP_REQUEUE_PI|FUTEX_PRIVATE_FLAG), %esi -- movq %rcx, %r8 -- xorq %r10, %r10 -- movl (%rdi), %r9d // XXX Can this be right? -- syscall -- -- leaq -cond_futex(%rdi), %r8 -- -- /* For any kind of error, we try again with WAKE. -- The general test also covers running on old kernels. */ -- cmpq $-4095, %rax -- jb 4f -- --7: --#ifdef __ASSUME_PRIVATE_FUTEX -- andl $FUTEX_PRIVATE_FLAG, %esi --#else -- andl %fs:PRIVATE_FUTEX, %esi --#endif -- orl $FUTEX_WAKE, %esi -- movl $SYS_futex, %eax -- /* %rdx should be 1 already from $FUTEX_WAKE_OP syscall. -- movl $1, %edx */ -- syscall -- -- /* Unlock. */ --4: LOCK --#if cond_lock == 0 -- decl (%r8) --#else -- decl cond_lock(%r8) --#endif -- jne 5f -- --6: xorl %eax, %eax -- retq -- -- /* Initial locking failed. */ --1: --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_lock_wait --#if cond_lock != 0 -- subq $cond_lock, %rdi --#endif -- jmp 2b -- -- /* Unlock in loop requires wakeup. */ --5: -- movq %r8, %rdi --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_unlock_wake -- jmp 6b --END(__pthread_cond_signal) -- --versioned_symbol (libpthread, __pthread_cond_signal, pthread_cond_signal, -- GLIBC_2_3_2) -diff --git a/sysdeps/unix/sysv/linux/x86_64/pthread_cond_timedwait.S b/sysdeps/unix/sysv/linux/x86_64/pthread_cond_timedwait.S -deleted file mode 100644 -index 82ffa1a..0000000 ---- a/sysdeps/unix/sysv/linux/x86_64/pthread_cond_timedwait.S -+++ /dev/null -@@ -1,623 +0,0 @@ --/* Copyright (C) 2002-2016 Free Software Foundation, Inc. -- This file is part of the GNU C Library. -- Contributed by Ulrich Drepper , 2002. -- -- The GNU C Library is free software; you can redistribute it and/or -- modify it under the terms of the GNU Lesser General Public -- License as published by the Free Software Foundation; either -- version 2.1 of the License, or (at your option) any later version. -- -- The GNU C Library is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -- Lesser General Public License for more details. -- -- You should have received a copy of the GNU Lesser General Public -- License along with the GNU C Library; if not, see -- . */ -- --#include --#include --#include --#include --#include --#include --#include -- --#include -- -- -- .text -- -- --/* int pthread_cond_timedwait (pthread_cond_t *cond, pthread_mutex_t *mutex, -- const struct timespec *abstime) */ -- .globl __pthread_cond_timedwait -- .type __pthread_cond_timedwait, @function -- .align 16 --__pthread_cond_timedwait: --.LSTARTCODE: -- cfi_startproc --#ifdef SHARED -- cfi_personality(DW_EH_PE_pcrel | DW_EH_PE_sdata4 | DW_EH_PE_indirect, -- DW.ref.__gcc_personality_v0) -- cfi_lsda(DW_EH_PE_pcrel | DW_EH_PE_sdata4, .LexceptSTART) --#else -- cfi_personality(DW_EH_PE_udata4, __gcc_personality_v0) -- cfi_lsda(DW_EH_PE_udata4, .LexceptSTART) --#endif -- -- pushq %r12 -- cfi_adjust_cfa_offset(8) -- cfi_rel_offset(%r12, 0) -- pushq %r13 -- cfi_adjust_cfa_offset(8) -- cfi_rel_offset(%r13, 0) -- pushq %r14 -- cfi_adjust_cfa_offset(8) -- cfi_rel_offset(%r14, 0) -- pushq %r15 -- cfi_adjust_cfa_offset(8) -- cfi_rel_offset(%r15, 0) --#define FRAME_SIZE (32+8) -- subq $FRAME_SIZE, %rsp -- cfi_adjust_cfa_offset(FRAME_SIZE) -- cfi_remember_state -- -- LIBC_PROBE (cond_timedwait, 3, %rdi, %rsi, %rdx) -- -- cmpq $1000000000, 8(%rdx) -- movl $EINVAL, %eax -- jae 48f -- -- /* Stack frame: -- -- rsp + 48 -- +--------------------------+ -- rsp + 32 | timeout value | -- +--------------------------+ -- rsp + 24 | old wake_seq value | -- +--------------------------+ -- rsp + 16 | mutex pointer | -- +--------------------------+ -- rsp + 8 | condvar pointer | -- +--------------------------+ -- rsp + 4 | old broadcast_seq value | -- +--------------------------+ -- rsp + 0 | old cancellation mode | -- +--------------------------+ -- */ -- -- LP_OP(cmp) $-1, dep_mutex(%rdi) -- -- /* Prepare structure passed to cancellation handler. */ -- movq %rdi, 8(%rsp) -- movq %rsi, 16(%rsp) -- movq %rdx, %r13 -- -- je 22f -- mov %RSI_LP, dep_mutex(%rdi) -- --22: -- xorb %r15b, %r15b -- -- /* Get internal lock. */ -- movl $1, %esi -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %esi, (%rdi) --#else -- cmpxchgl %esi, cond_lock(%rdi) --#endif -- jnz 31f -- -- /* Unlock the mutex. */ --32: movq 16(%rsp), %rdi -- xorl %esi, %esi -- callq __pthread_mutex_unlock_usercnt -- -- testl %eax, %eax -- jne 46f -- -- movq 8(%rsp), %rdi -- incq total_seq(%rdi) -- incl cond_futex(%rdi) -- addl $(1 << nwaiters_shift), cond_nwaiters(%rdi) -- -- /* Get and store current wakeup_seq value. */ -- movq 8(%rsp), %rdi -- movq wakeup_seq(%rdi), %r9 -- movl broadcast_seq(%rdi), %edx -- movq %r9, 24(%rsp) -- movl %edx, 4(%rsp) -- -- cmpq $0, (%r13) -- movq $-ETIMEDOUT, %r14 -- js 36f -- --38: movl cond_futex(%rdi), %r12d -- -- /* Unlock. */ -- LOCK --#if cond_lock == 0 -- decl (%rdi) --#else -- decl cond_lock(%rdi) --#endif -- jne 33f -- --.LcleanupSTART1: --34: callq __pthread_enable_asynccancel -- movl %eax, (%rsp) -- -- movq %r13, %r10 -- movl $FUTEX_WAIT_BITSET, %esi -- LP_OP(cmp) $-1, dep_mutex(%rdi) -- je 60f -- -- mov dep_mutex(%rdi), %R8_LP -- /* Requeue to a non-robust PI mutex if the PI bit is set and -- the robust bit is not set. */ -- movl MUTEX_KIND(%r8), %eax -- andl $(ROBUST_BIT|PI_BIT), %eax -- cmpl $PI_BIT, %eax -- jne 61f -- -- movl $(FUTEX_WAIT_REQUEUE_PI|FUTEX_PRIVATE_FLAG), %esi -- xorl %eax, %eax -- /* The following only works like this because we only support -- two clocks, represented using a single bit. */ -- testl $1, cond_nwaiters(%rdi) -- movl $FUTEX_CLOCK_REALTIME, %edx -- cmove %edx, %eax -- orl %eax, %esi -- movq %r12, %rdx -- addq $cond_futex, %rdi -- movl $SYS_futex, %eax -- syscall -- -- cmpl $0, %eax -- sete %r15b -- --#ifdef __ASSUME_REQUEUE_PI -- jmp 62f --#else -- je 62f -- -- /* When a futex syscall with FUTEX_WAIT_REQUEUE_PI returns -- successfully, it has already locked the mutex for us and the -- pi_flag (%r15b) is set to denote that fact. However, if another -- thread changed the futex value before we entered the wait, the -- syscall may return an EAGAIN and the mutex is not locked. We go -- ahead with a success anyway since later we look at the pi_flag to -- decide if we got the mutex or not. The sequence numbers then make -- sure that only one of the threads actually wake up. We retry using -- normal FUTEX_WAIT only if the kernel returned ENOSYS, since normal -- and PI futexes don't mix. -- -- Note that we don't check for EAGAIN specifically; we assume that the -- only other error the futex function could return is EAGAIN (barring -- the ETIMEOUT of course, for the timeout case in futex) since -- anything else would mean an error in our function. It is too -- expensive to do that check for every call (which is quite common in -- case of a large number of threads), so it has been skipped. */ -- cmpl $-ENOSYS, %eax -- jne 62f -- -- subq $cond_futex, %rdi --#endif -- --61: movl $(FUTEX_WAIT_BITSET|FUTEX_PRIVATE_FLAG), %esi --60: xorb %r15b, %r15b -- xorl %eax, %eax -- /* The following only works like this because we only support -- two clocks, represented using a single bit. */ -- testl $1, cond_nwaiters(%rdi) -- movl $FUTEX_CLOCK_REALTIME, %edx -- movl $0xffffffff, %r9d -- cmove %edx, %eax -- orl %eax, %esi -- movq %r12, %rdx -- addq $cond_futex, %rdi -- movl $SYS_futex, %eax -- syscall --62: movq %rax, %r14 -- -- movl (%rsp), %edi -- callq __pthread_disable_asynccancel --.LcleanupEND1: -- -- /* Lock. */ -- movq 8(%rsp), %rdi -- movl $1, %esi -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %esi, (%rdi) --#else -- cmpxchgl %esi, cond_lock(%rdi) --#endif -- jne 35f -- --36: movl broadcast_seq(%rdi), %edx -- -- movq woken_seq(%rdi), %rax -- -- movq wakeup_seq(%rdi), %r9 -- -- cmpl 4(%rsp), %edx -- jne 53f -- -- cmpq 24(%rsp), %r9 -- jbe 45f -- -- cmpq %rax, %r9 -- ja 39f -- --45: cmpq $-ETIMEDOUT, %r14 -- je 99f -- -- /* We need to go back to futex_wait. If we're using requeue_pi, then -- release the mutex we had acquired and go back. */ -- test %r15b, %r15b -- jz 38b -- -- /* Adjust the mutex values first and then unlock it. The unlock -- should always succeed or else the kernel did not lock the -- mutex correctly. */ -- movq %r8, %rdi -- callq __pthread_mutex_cond_lock_adjust -- xorl %esi, %esi -- callq __pthread_mutex_unlock_usercnt -- /* Reload cond_var. */ -- movq 8(%rsp), %rdi -- jmp 38b -- --99: incq wakeup_seq(%rdi) -- incl cond_futex(%rdi) -- movl $ETIMEDOUT, %r14d -- jmp 44f -- --53: xorq %r14, %r14 -- jmp 54f -- --39: xorq %r14, %r14 --44: incq woken_seq(%rdi) -- --54: subl $(1 << nwaiters_shift), cond_nwaiters(%rdi) -- -- /* Wake up a thread which wants to destroy the condvar object. */ -- cmpq $0xffffffffffffffff, total_seq(%rdi) -- jne 55f -- movl cond_nwaiters(%rdi), %eax -- andl $~((1 << nwaiters_shift) - 1), %eax -- jne 55f -- -- addq $cond_nwaiters, %rdi -- LP_OP(cmp) $-1, dep_mutex-cond_nwaiters(%rdi) -- movl $1, %edx --#ifdef __ASSUME_PRIVATE_FUTEX -- movl $FUTEX_WAKE, %eax -- movl $(FUTEX_WAKE|FUTEX_PRIVATE_FLAG), %esi -- cmove %eax, %esi --#else -- movl $0, %eax -- movl %fs:PRIVATE_FUTEX, %esi -- cmove %eax, %esi -- orl $FUTEX_WAKE, %esi --#endif -- movl $SYS_futex, %eax -- syscall -- subq $cond_nwaiters, %rdi -- --55: LOCK --#if cond_lock == 0 -- decl (%rdi) --#else -- decl cond_lock(%rdi) --#endif -- jne 40f -- -- /* If requeue_pi is used the kernel performs the locking of the -- mutex. */ --41: movq 16(%rsp), %rdi -- testb %r15b, %r15b -- jnz 64f -- -- callq __pthread_mutex_cond_lock -- --63: testq %rax, %rax -- cmoveq %r14, %rax -- --48: addq $FRAME_SIZE, %rsp -- cfi_adjust_cfa_offset(-FRAME_SIZE) -- popq %r15 -- cfi_adjust_cfa_offset(-8) -- cfi_restore(%r15) -- popq %r14 -- cfi_adjust_cfa_offset(-8) -- cfi_restore(%r14) -- popq %r13 -- cfi_adjust_cfa_offset(-8) -- cfi_restore(%r13) -- popq %r12 -- cfi_adjust_cfa_offset(-8) -- cfi_restore(%r12) -- -- retq -- -- cfi_restore_state -- --64: callq __pthread_mutex_cond_lock_adjust -- movq %r14, %rax -- jmp 48b -- -- /* Initial locking failed. */ --31: --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_lock_wait -- jmp 32b -- -- /* Unlock in loop requires wakeup. */ --33: --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_unlock_wake -- jmp 34b -- -- /* Locking in loop failed. */ --35: --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_lock_wait --#if cond_lock != 0 -- subq $cond_lock, %rdi --#endif -- jmp 36b -- -- /* Unlock after loop requires wakeup. */ --40: --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_unlock_wake -- jmp 41b -- -- /* The initial unlocking of the mutex failed. */ --46: movq 8(%rsp), %rdi -- movq %rax, (%rsp) -- LOCK --#if cond_lock == 0 -- decl (%rdi) --#else -- decl cond_lock(%rdi) --#endif -- jne 47f -- --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_unlock_wake -- --47: movq (%rsp), %rax -- jmp 48b -- -- .size __pthread_cond_timedwait, .-__pthread_cond_timedwait --versioned_symbol (libpthread, __pthread_cond_timedwait, pthread_cond_timedwait, -- GLIBC_2_3_2) -- -- -- .align 16 -- .type __condvar_cleanup2, @function --__condvar_cleanup2: -- /* Stack frame: -- -- rsp + 72 -- +--------------------------+ -- rsp + 64 | %r12 | -- +--------------------------+ -- rsp + 56 | %r13 | -- +--------------------------+ -- rsp + 48 | %r14 | -- +--------------------------+ -- rsp + 24 | unused | -- +--------------------------+ -- rsp + 16 | mutex pointer | -- +--------------------------+ -- rsp + 8 | condvar pointer | -- +--------------------------+ -- rsp + 4 | old broadcast_seq value | -- +--------------------------+ -- rsp + 0 | old cancellation mode | -- +--------------------------+ -- */ -- -- movq %rax, 24(%rsp) -- -- /* Get internal lock. */ -- movq 8(%rsp), %rdi -- movl $1, %esi -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %esi, (%rdi) --#else -- cmpxchgl %esi, cond_lock(%rdi) --#endif -- jz 1f -- --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_lock_wait --#if cond_lock != 0 -- subq $cond_lock, %rdi --#endif -- --1: movl broadcast_seq(%rdi), %edx -- cmpl 4(%rsp), %edx -- jne 3f -- -- /* We increment the wakeup_seq counter only if it is lower than -- total_seq. If this is not the case the thread was woken and -- then canceled. In this case we ignore the signal. */ -- movq total_seq(%rdi), %rax -- cmpq wakeup_seq(%rdi), %rax -- jbe 6f -- incq wakeup_seq(%rdi) -- incl cond_futex(%rdi) --6: incq woken_seq(%rdi) -- --3: subl $(1 << nwaiters_shift), cond_nwaiters(%rdi) -- -- /* Wake up a thread which wants to destroy the condvar object. */ -- xorq %r12, %r12 -- cmpq $0xffffffffffffffff, total_seq(%rdi) -- jne 4f -- movl cond_nwaiters(%rdi), %eax -- andl $~((1 << nwaiters_shift) - 1), %eax -- jne 4f -- -- LP_OP(cmp) $-1, dep_mutex(%rdi) -- leaq cond_nwaiters(%rdi), %rdi -- movl $1, %edx --#ifdef __ASSUME_PRIVATE_FUTEX -- movl $FUTEX_WAKE, %eax -- movl $(FUTEX_WAKE|FUTEX_PRIVATE_FLAG), %esi -- cmove %eax, %esi --#else -- movl $0, %eax -- movl %fs:PRIVATE_FUTEX, %esi -- cmove %eax, %esi -- orl $FUTEX_WAKE, %esi --#endif -- movl $SYS_futex, %eax -- syscall -- subq $cond_nwaiters, %rdi -- movl $1, %r12d -- --4: LOCK --#if cond_lock == 0 -- decl (%rdi) --#else -- decl cond_lock(%rdi) --#endif -- je 2f --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_unlock_wake -- -- /* Wake up all waiters to make sure no signal gets lost. */ --2: testq %r12, %r12 -- jnz 5f -- addq $cond_futex, %rdi -- LP_OP(cmp) $-1, dep_mutex-cond_futex(%rdi) -- movl $0x7fffffff, %edx --#ifdef __ASSUME_PRIVATE_FUTEX -- movl $FUTEX_WAKE, %eax -- movl $(FUTEX_WAKE|FUTEX_PRIVATE_FLAG), %esi -- cmove %eax, %esi --#else -- movl $0, %eax -- movl %fs:PRIVATE_FUTEX, %esi -- cmove %eax, %esi -- orl $FUTEX_WAKE, %esi --#endif -- movl $SYS_futex, %eax -- syscall -- -- /* Lock the mutex only if we don't own it already. This only happens -- in case of PI mutexes, if we got cancelled after a successful -- return of the futex syscall and before disabling async -- cancellation. */ --5: movq 16(%rsp), %rdi -- movl MUTEX_KIND(%rdi), %eax -- andl $(ROBUST_BIT|PI_BIT), %eax -- cmpl $PI_BIT, %eax -- jne 7f -- -- movl (%rdi), %eax -- andl $TID_MASK, %eax -- cmpl %eax, %fs:TID -- jne 7f -- /* We managed to get the lock. Fix it up before returning. */ -- callq __pthread_mutex_cond_lock_adjust -- jmp 8f -- --7: callq __pthread_mutex_cond_lock -- --8: movq 24(%rsp), %rdi -- movq FRAME_SIZE(%rsp), %r15 -- movq FRAME_SIZE+8(%rsp), %r14 -- movq FRAME_SIZE+16(%rsp), %r13 -- movq FRAME_SIZE+24(%rsp), %r12 --.LcallUR: -- call _Unwind_Resume -- hlt --.LENDCODE: -- cfi_endproc -- .size __condvar_cleanup2, .-__condvar_cleanup2 -- -- -- .section .gcc_except_table,"a",@progbits --.LexceptSTART: -- .byte DW_EH_PE_omit # @LPStart format -- .byte DW_EH_PE_omit # @TType format -- .byte DW_EH_PE_uleb128 # call-site format -- .uleb128 .Lcstend-.Lcstbegin --.Lcstbegin: -- .uleb128 .LcleanupSTART1-.LSTARTCODE -- .uleb128 .LcleanupEND1-.LcleanupSTART1 -- .uleb128 __condvar_cleanup2-.LSTARTCODE -- .uleb128 0 -- .uleb128 .LcallUR-.LSTARTCODE -- .uleb128 .LENDCODE-.LcallUR -- .uleb128 0 -- .uleb128 0 --.Lcstend: -- -- --#ifdef SHARED -- .hidden DW.ref.__gcc_personality_v0 -- .weak DW.ref.__gcc_personality_v0 -- .section .gnu.linkonce.d.DW.ref.__gcc_personality_v0,"aw",@progbits -- .align LP_SIZE -- .type DW.ref.__gcc_personality_v0, @object -- .size DW.ref.__gcc_personality_v0, LP_SIZE --DW.ref.__gcc_personality_v0: -- ASM_ADDR __gcc_personality_v0 --#endif -diff --git a/sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S b/sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S -deleted file mode 100644 -index c82f37b..0000000 ---- a/sysdeps/unix/sysv/linux/x86_64/pthread_cond_wait.S -+++ /dev/null -@@ -1,555 +0,0 @@ --/* Copyright (C) 2002-2016 Free Software Foundation, Inc. -- This file is part of the GNU C Library. -- Contributed by Ulrich Drepper , 2002. -- -- The GNU C Library is free software; you can redistribute it and/or -- modify it under the terms of the GNU Lesser General Public -- License as published by the Free Software Foundation; either -- version 2.1 of the License, or (at your option) any later version. -- -- The GNU C Library is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -- Lesser General Public License for more details. -- -- You should have received a copy of the GNU Lesser General Public -- License along with the GNU C Library; if not, see -- . */ -- --#include --#include --#include --#include --#include --#include --#include --#include -- --#include -- -- -- .text -- --/* int pthread_cond_wait (pthread_cond_t *cond, pthread_mutex_t *mutex) */ -- .globl __pthread_cond_wait -- .type __pthread_cond_wait, @function -- .align 16 --__pthread_cond_wait: --.LSTARTCODE: -- cfi_startproc --#ifdef SHARED -- cfi_personality(DW_EH_PE_pcrel | DW_EH_PE_sdata4 | DW_EH_PE_indirect, -- DW.ref.__gcc_personality_v0) -- cfi_lsda(DW_EH_PE_pcrel | DW_EH_PE_sdata4, .LexceptSTART) --#else -- cfi_personality(DW_EH_PE_udata4, __gcc_personality_v0) -- cfi_lsda(DW_EH_PE_udata4, .LexceptSTART) --#endif -- --#define FRAME_SIZE (32+8) -- leaq -FRAME_SIZE(%rsp), %rsp -- cfi_adjust_cfa_offset(FRAME_SIZE) -- -- /* Stack frame: -- -- rsp + 32 -- +--------------------------+ -- rsp + 24 | old wake_seq value | -- +--------------------------+ -- rsp + 16 | mutex pointer | -- +--------------------------+ -- rsp + 8 | condvar pointer | -- +--------------------------+ -- rsp + 4 | old broadcast_seq value | -- +--------------------------+ -- rsp + 0 | old cancellation mode | -- +--------------------------+ -- */ -- -- LIBC_PROBE (cond_wait, 2, %rdi, %rsi) -- -- LP_OP(cmp) $-1, dep_mutex(%rdi) -- -- /* Prepare structure passed to cancellation handler. */ -- movq %rdi, 8(%rsp) -- movq %rsi, 16(%rsp) -- -- je 15f -- mov %RSI_LP, dep_mutex(%rdi) -- -- /* Get internal lock. */ --15: movl $1, %esi -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %esi, (%rdi) --#else -- cmpxchgl %esi, cond_lock(%rdi) --#endif -- jne 1f -- -- /* Unlock the mutex. */ --2: movq 16(%rsp), %rdi -- xorl %esi, %esi -- callq __pthread_mutex_unlock_usercnt -- -- testl %eax, %eax -- jne 12f -- -- movq 8(%rsp), %rdi -- incq total_seq(%rdi) -- incl cond_futex(%rdi) -- addl $(1 << nwaiters_shift), cond_nwaiters(%rdi) -- -- /* Get and store current wakeup_seq value. */ -- movq 8(%rsp), %rdi -- movq wakeup_seq(%rdi), %r9 -- movl broadcast_seq(%rdi), %edx -- movq %r9, 24(%rsp) -- movl %edx, 4(%rsp) -- -- /* Unlock. */ --8: movl cond_futex(%rdi), %edx -- LOCK --#if cond_lock == 0 -- decl (%rdi) --#else -- decl cond_lock(%rdi) --#endif -- jne 3f -- --.LcleanupSTART: --4: callq __pthread_enable_asynccancel -- movl %eax, (%rsp) -- -- xorq %r10, %r10 -- LP_OP(cmp) $-1, dep_mutex(%rdi) -- leaq cond_futex(%rdi), %rdi -- movl $FUTEX_WAIT, %esi -- je 60f -- -- mov dep_mutex-cond_futex(%rdi), %R8_LP -- /* Requeue to a non-robust PI mutex if the PI bit is set and -- the robust bit is not set. */ -- movl MUTEX_KIND(%r8), %eax -- andl $(ROBUST_BIT|PI_BIT), %eax -- cmpl $PI_BIT, %eax -- jne 61f -- -- movl $(FUTEX_WAIT_REQUEUE_PI|FUTEX_PRIVATE_FLAG), %esi -- movl $SYS_futex, %eax -- syscall -- -- cmpl $0, %eax -- sete %r8b -- --#ifdef __ASSUME_REQUEUE_PI -- jmp 62f --#else -- je 62f -- -- /* When a futex syscall with FUTEX_WAIT_REQUEUE_PI returns -- successfully, it has already locked the mutex for us and the -- pi_flag (%r8b) is set to denote that fact. However, if another -- thread changed the futex value before we entered the wait, the -- syscall may return an EAGAIN and the mutex is not locked. We go -- ahead with a success anyway since later we look at the pi_flag to -- decide if we got the mutex or not. The sequence numbers then make -- sure that only one of the threads actually wake up. We retry using -- normal FUTEX_WAIT only if the kernel returned ENOSYS, since normal -- and PI futexes don't mix. -- -- Note that we don't check for EAGAIN specifically; we assume that the -- only other error the futex function could return is EAGAIN since -- anything else would mean an error in our function. It is too -- expensive to do that check for every call (which is quite common in -- case of a large number of threads), so it has been skipped. */ -- cmpl $-ENOSYS, %eax -- jne 62f -- --# ifndef __ASSUME_PRIVATE_FUTEX -- movl $FUTEX_WAIT, %esi --# endif --#endif -- --61: --#ifdef __ASSUME_PRIVATE_FUTEX -- movl $(FUTEX_WAIT|FUTEX_PRIVATE_FLAG), %esi --#else -- orl %fs:PRIVATE_FUTEX, %esi --#endif --60: xorb %r8b, %r8b -- movl $SYS_futex, %eax -- syscall -- --62: movl (%rsp), %edi -- callq __pthread_disable_asynccancel --.LcleanupEND: -- -- /* Lock. */ -- movq 8(%rsp), %rdi -- movl $1, %esi -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %esi, (%rdi) --#else -- cmpxchgl %esi, cond_lock(%rdi) --#endif -- jnz 5f -- --6: movl broadcast_seq(%rdi), %edx -- -- movq woken_seq(%rdi), %rax -- -- movq wakeup_seq(%rdi), %r9 -- -- cmpl 4(%rsp), %edx -- jne 16f -- -- cmpq 24(%rsp), %r9 -- jbe 19f -- -- cmpq %rax, %r9 -- jna 19f -- -- incq woken_seq(%rdi) -- -- /* Unlock */ --16: subl $(1 << nwaiters_shift), cond_nwaiters(%rdi) -- -- /* Wake up a thread which wants to destroy the condvar object. */ -- cmpq $0xffffffffffffffff, total_seq(%rdi) -- jne 17f -- movl cond_nwaiters(%rdi), %eax -- andl $~((1 << nwaiters_shift) - 1), %eax -- jne 17f -- -- addq $cond_nwaiters, %rdi -- LP_OP(cmp) $-1, dep_mutex-cond_nwaiters(%rdi) -- movl $1, %edx --#ifdef __ASSUME_PRIVATE_FUTEX -- movl $FUTEX_WAKE, %eax -- movl $(FUTEX_WAKE|FUTEX_PRIVATE_FLAG), %esi -- cmove %eax, %esi --#else -- movl $0, %eax -- movl %fs:PRIVATE_FUTEX, %esi -- cmove %eax, %esi -- orl $FUTEX_WAKE, %esi --#endif -- movl $SYS_futex, %eax -- syscall -- subq $cond_nwaiters, %rdi -- --17: LOCK --#if cond_lock == 0 -- decl (%rdi) --#else -- decl cond_lock(%rdi) --#endif -- jne 10f -- -- /* If requeue_pi is used the kernel performs the locking of the -- mutex. */ --11: movq 16(%rsp), %rdi -- testb %r8b, %r8b -- jnz 18f -- -- callq __pthread_mutex_cond_lock -- --14: leaq FRAME_SIZE(%rsp), %rsp -- cfi_adjust_cfa_offset(-FRAME_SIZE) -- -- /* We return the result of the mutex_lock operation. */ -- retq -- -- cfi_adjust_cfa_offset(FRAME_SIZE) -- --18: callq __pthread_mutex_cond_lock_adjust -- xorl %eax, %eax -- jmp 14b -- -- /* We need to go back to futex_wait. If we're using requeue_pi, then -- release the mutex we had acquired and go back. */ --19: testb %r8b, %r8b -- jz 8b -- -- /* Adjust the mutex values first and then unlock it. The unlock -- should always succeed or else the kernel did not lock the mutex -- correctly. */ -- movq 16(%rsp), %rdi -- callq __pthread_mutex_cond_lock_adjust -- movq %rdi, %r8 -- xorl %esi, %esi -- callq __pthread_mutex_unlock_usercnt -- /* Reload cond_var. */ -- movq 8(%rsp), %rdi -- jmp 8b -- -- /* Initial locking failed. */ --1: --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_lock_wait -- jmp 2b -- -- /* Unlock in loop requires wakeup. */ --3: --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- /* The call preserves %rdx. */ -- callq __lll_unlock_wake --#if cond_lock != 0 -- subq $cond_lock, %rdi --#endif -- jmp 4b -- -- /* Locking in loop failed. */ --5: --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_lock_wait --#if cond_lock != 0 -- subq $cond_lock, %rdi --#endif -- jmp 6b -- -- /* Unlock after loop requires wakeup. */ --10: --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_unlock_wake -- jmp 11b -- -- /* The initial unlocking of the mutex failed. */ --12: movq %rax, %r10 -- movq 8(%rsp), %rdi -- LOCK --#if cond_lock == 0 -- decl (%rdi) --#else -- decl cond_lock(%rdi) --#endif -- je 13f -- --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_unlock_wake -- --13: movq %r10, %rax -- jmp 14b -- -- .size __pthread_cond_wait, .-__pthread_cond_wait --versioned_symbol (libpthread, __pthread_cond_wait, pthread_cond_wait, -- GLIBC_2_3_2) -- -- -- .align 16 -- .type __condvar_cleanup1, @function -- .globl __condvar_cleanup1 -- .hidden __condvar_cleanup1 --__condvar_cleanup1: -- /* Stack frame: -- -- rsp + 32 -- +--------------------------+ -- rsp + 24 | unused | -- +--------------------------+ -- rsp + 16 | mutex pointer | -- +--------------------------+ -- rsp + 8 | condvar pointer | -- +--------------------------+ -- rsp + 4 | old broadcast_seq value | -- +--------------------------+ -- rsp + 0 | old cancellation mode | -- +--------------------------+ -- */ -- -- movq %rax, 24(%rsp) -- -- /* Get internal lock. */ -- movq 8(%rsp), %rdi -- movl $1, %esi -- xorl %eax, %eax -- LOCK --#if cond_lock == 0 -- cmpxchgl %esi, (%rdi) --#else -- cmpxchgl %esi, cond_lock(%rdi) --#endif -- jz 1f -- --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- callq __lll_lock_wait --#if cond_lock != 0 -- subq $cond_lock, %rdi --#endif -- --1: movl broadcast_seq(%rdi), %edx -- cmpl 4(%rsp), %edx -- jne 3f -- -- /* We increment the wakeup_seq counter only if it is lower than -- total_seq. If this is not the case the thread was woken and -- then canceled. In this case we ignore the signal. */ -- movq total_seq(%rdi), %rax -- cmpq wakeup_seq(%rdi), %rax -- jbe 6f -- incq wakeup_seq(%rdi) -- incl cond_futex(%rdi) --6: incq woken_seq(%rdi) -- --3: subl $(1 << nwaiters_shift), cond_nwaiters(%rdi) -- -- /* Wake up a thread which wants to destroy the condvar object. */ -- xorl %ecx, %ecx -- cmpq $0xffffffffffffffff, total_seq(%rdi) -- jne 4f -- movl cond_nwaiters(%rdi), %eax -- andl $~((1 << nwaiters_shift) - 1), %eax -- jne 4f -- -- LP_OP(cmp) $-1, dep_mutex(%rdi) -- leaq cond_nwaiters(%rdi), %rdi -- movl $1, %edx --#ifdef __ASSUME_PRIVATE_FUTEX -- movl $FUTEX_WAKE, %eax -- movl $(FUTEX_WAKE|FUTEX_PRIVATE_FLAG), %esi -- cmove %eax, %esi --#else -- movl $0, %eax -- movl %fs:PRIVATE_FUTEX, %esi -- cmove %eax, %esi -- orl $FUTEX_WAKE, %esi --#endif -- movl $SYS_futex, %eax -- syscall -- subq $cond_nwaiters, %rdi -- movl $1, %ecx -- --4: LOCK --#if cond_lock == 0 -- decl (%rdi) --#else -- decl cond_lock(%rdi) --#endif -- je 2f --#if cond_lock != 0 -- addq $cond_lock, %rdi --#endif -- LP_OP(cmp) $-1, dep_mutex-cond_lock(%rdi) -- movl $LLL_PRIVATE, %eax -- movl $LLL_SHARED, %esi -- cmovne %eax, %esi -- /* The call preserves %rcx. */ -- callq __lll_unlock_wake -- -- /* Wake up all waiters to make sure no signal gets lost. */ --2: testl %ecx, %ecx -- jnz 5f -- addq $cond_futex, %rdi -- LP_OP(cmp) $-1, dep_mutex-cond_futex(%rdi) -- movl $0x7fffffff, %edx --#ifdef __ASSUME_PRIVATE_FUTEX -- movl $FUTEX_WAKE, %eax -- movl $(FUTEX_WAKE|FUTEX_PRIVATE_FLAG), %esi -- cmove %eax, %esi --#else -- movl $0, %eax -- movl %fs:PRIVATE_FUTEX, %esi -- cmove %eax, %esi -- orl $FUTEX_WAKE, %esi --#endif -- movl $SYS_futex, %eax -- syscall -- -- /* Lock the mutex only if we don't own it already. This only happens -- in case of PI mutexes, if we got cancelled after a successful -- return of the futex syscall and before disabling async -- cancellation. */ --5: movq 16(%rsp), %rdi -- movl MUTEX_KIND(%rdi), %eax -- andl $(ROBUST_BIT|PI_BIT), %eax -- cmpl $PI_BIT, %eax -- jne 7f -- -- movl (%rdi), %eax -- andl $TID_MASK, %eax -- cmpl %eax, %fs:TID -- jne 7f -- /* We managed to get the lock. Fix it up before returning. */ -- callq __pthread_mutex_cond_lock_adjust -- jmp 8f -- -- --7: callq __pthread_mutex_cond_lock -- --8: movq 24(%rsp), %rdi --.LcallUR: -- call _Unwind_Resume -- hlt --.LENDCODE: -- cfi_endproc -- .size __condvar_cleanup1, .-__condvar_cleanup1 -- -- -- .section .gcc_except_table,"a",@progbits --.LexceptSTART: -- .byte DW_EH_PE_omit # @LPStart format -- .byte DW_EH_PE_omit # @TType format -- .byte DW_EH_PE_uleb128 # call-site format -- .uleb128 .Lcstend-.Lcstbegin --.Lcstbegin: -- .uleb128 .LcleanupSTART-.LSTARTCODE -- .uleb128 .LcleanupEND-.LcleanupSTART -- .uleb128 __condvar_cleanup1-.LSTARTCODE -- .uleb128 0 -- .uleb128 .LcallUR-.LSTARTCODE -- .uleb128 .LENDCODE-.LcallUR -- .uleb128 0 -- .uleb128 0 --.Lcstend: -- -- --#ifdef SHARED -- .hidden DW.ref.__gcc_personality_v0 -- .weak DW.ref.__gcc_personality_v0 -- .section .gnu.linkonce.d.DW.ref.__gcc_personality_v0,"aw",@progbits -- .align LP_SIZE -- .type DW.ref.__gcc_personality_v0, @object -- .size DW.ref.__gcc_personality_v0, LP_SIZE --DW.ref.__gcc_personality_v0: -- ASM_ADDR __gcc_personality_v0 --#endif -diff --git a/sysdeps/x86/bits/pthreadtypes.h b/sysdeps/x86/bits/pthreadtypes.h -index 16b8f4f..a3a738f 100644 ---- a/sysdeps/x86/bits/pthreadtypes.h -+++ b/sysdeps/x86/bits/pthreadtypes.h -@@ -140,14 +140,27 @@ typedef union - { - struct - { -- int __lock; -- unsigned int __futex; -- __extension__ unsigned long long int __total_seq; -- __extension__ unsigned long long int __wakeup_seq; -- __extension__ unsigned long long int __woken_seq; -- void *__mutex; -- unsigned int __nwaiters; -- unsigned int __broadcast_seq; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __wseq; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __wseq32; -+ }; -+ __extension__ union -+ { -+ __extension__ unsigned long long int __g1_start; -+ struct { -+ unsigned int __low; -+ unsigned int __high; -+ } __g1_start32; -+ }; -+ unsigned int __g_refs[2]; -+ unsigned int __g_size[2]; -+ unsigned int __g1_orig_size; -+ unsigned int __wrefs; -+ unsigned int __g_signals[2]; - } __data; - char __size[__SIZEOF_PTHREAD_COND_T]; - __extension__ long long int __align; --- -2.10.2 - diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0004-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0004-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch index 3127de68e..b981f7bf7 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0004-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0004-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch @@ -1,7 +1,7 @@ -From a56e7aa2fd10b178b9498f88a7a2ca8aad671a53 Mon Sep 17 00:00:00 2001 +From 0b95f34207ffed3aa53fa949662bfbccc7c864a4 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Thu, 31 Dec 2015 14:35:35 -0800 -Subject: [PATCH 04/25] nativesdk-glibc: Allow 64 bit atomics for x86 +Subject: [PATCH 04/26] nativesdk-glibc: Allow 64 bit atomics for x86 The fix consist of allowing 64bit atomic ops for x86. This should be safe for i586 and newer CPUs. @@ -11,8 +11,6 @@ Signed-off-by: Juro Bystricky Signed-off-by: Richard Purdie Signed-off-by: Khem Raj --- -Upstream-Status: Pending - sysdeps/i386/atomic-machine.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) @@ -30,5 +28,5 @@ index ce62b33..4fe44ea 100644 -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0005-Remove-__ASSUME_REQUEUE_PI.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0005-Remove-__ASSUME_REQUEUE_PI.patch deleted file mode 100644 index 8d4ba4107..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0005-Remove-__ASSUME_REQUEUE_PI.patch +++ /dev/null @@ -1,149 +0,0 @@ -From 27b7131d3d8133bf3a5ce72d4e4ff4dfadd71f20 Mon Sep 17 00:00:00 2001 -From: Catalin Enache -Date: Fri, 30 Jun 2017 12:08:29 +0300 -Subject: [PATCH 5/6] Remove __ASSUME_REQUEUE_PI - -The new cond var implementation (ed19993b5b0d) removed all the -__ASSUME_{REQUEUE_PI,FUTEX_LOCK_PI} internal usage so there is no -need to keep defining it. This patch removes all USE_REQUEUE_PI -and __ASSUME_REQUEUE_PI. It is as follow up from BZ#18463. - -Checked with a build for x86_64-linux-gnu, arm-linux-gnueabhf, -m68-linux-gnu, mips64-linux-gnu, and sparc64-linux-gnu. - - * nptl/pthreadP.h (USE_REQUEUE_PI): Remove ununsed macro. - * sysdeps/unix/sysv/linux/arm/kernel-features.h - (__ASSUME_REQUEUE_PI): Likewise. - * sysdeps/unix/sysv/linux/kernel-features.h - (__ASSUME_REQUEUE_PI): Likewise. - * sysdeps/unix/sysv/linux/m68k/kernel-features.h - (__ASSUME_REQUEUE_PI): Likewise. - * sysdeps/unix/sysv/linux/mips/kernel-features.h - (__ASSUME_REQUEUE_PI): Likewise. - * sysdeps/unix/sysv/linux/sparc/kernel-features.h - (__ASSUME_REQUEUE_PI): Likewise. - -Upstream-Status: Backport - -Author: Adhemerval Zanella -Signed-off-by: Catalin Enache ---- - ChangeLog | 14 ++++++++++++++ - nptl/pthreadP.h | 12 ------------ - sysdeps/unix/sysv/linux/arm/kernel-features.h | 1 - - sysdeps/unix/sysv/linux/kernel-features.h | 5 ----- - sysdeps/unix/sysv/linux/m68k/kernel-features.h | 1 - - sysdeps/unix/sysv/linux/mips/kernel-features.h | 1 - - sysdeps/unix/sysv/linux/sparc/kernel-features.h | 1 - - 7 files changed, 14 insertions(+), 21 deletions(-) - -diff --git a/ChangeLog b/ChangeLog -index c94db7b..44c518b 100644 ---- a/ChangeLog -+++ b/ChangeLog -@@ -1,3 +1,17 @@ -+2017-04-04 Adhemerval Zanella -+ -+ * nptl/pthreadP.h (USE_REQUEUE_PI): Remove ununsed macro. -+ * sysdeps/unix/sysv/linux/arm/kernel-features.h -+ (__ASSUME_REQUEUE_PI): Likewise. -+ * sysdeps/unix/sysv/linux/kernel-features.h -+ (__ASSUME_REQUEUE_PI): Likewise. -+ * sysdeps/unix/sysv/linux/m68k/kernel-features.h -+ (__ASSUME_REQUEUE_PI): Likewise. -+ * sysdeps/unix/sysv/linux/mips/kernel-features.h -+ (__ASSUME_REQUEUE_PI): Likewise. -+ * sysdeps/unix/sysv/linux/sparc/kernel-features.h -+ (__ASSUME_REQUEUE_PI): Likewise. -+ - 2016-12-31 Torvald Riegel - - [BZ #13165] -diff --git a/nptl/pthreadP.h b/nptl/pthreadP.h -index e9992bc..730c4ad 100644 ---- a/nptl/pthreadP.h -+++ b/nptl/pthreadP.h -@@ -594,18 +594,6 @@ extern void __wait_lookup_done (void) attribute_hidden; - # define PTHREAD_STATIC_FN_REQUIRE(name) __asm (".globl " #name); - #endif - --/* Test if the mutex is suitable for the FUTEX_WAIT_REQUEUE_PI operation. */ --#if (defined lll_futex_wait_requeue_pi \ -- && defined __ASSUME_REQUEUE_PI) --# define USE_REQUEUE_PI(mut) \ -- ((mut) && (mut) != (void *) ~0l \ -- && (((mut)->__data.__kind \ -- & (PTHREAD_MUTEX_PRIO_INHERIT_NP | PTHREAD_MUTEX_ROBUST_NORMAL_NP)) \ -- == PTHREAD_MUTEX_PRIO_INHERIT_NP)) --#else --# define USE_REQUEUE_PI(mut) 0 --#endif -- - /* Returns 0 if POL is a valid scheduling policy. */ - static inline int - check_sched_policy_attr (int pol) -diff --git a/sysdeps/unix/sysv/linux/arm/kernel-features.h b/sysdeps/unix/sysv/linux/arm/kernel-features.h -index 6ca607e..339ad45 100644 ---- a/sysdeps/unix/sysv/linux/arm/kernel-features.h -+++ b/sysdeps/unix/sysv/linux/arm/kernel-features.h -@@ -23,7 +23,6 @@ - futex_atomic_cmpxchg_inatomic, depending on kernel - configuration. */ - #if __LINUX_KERNEL_VERSION < 0x030E03 --# undef __ASSUME_REQUEUE_PI - # undef __ASSUME_SET_ROBUST_LIST - #endif - -diff --git a/sysdeps/unix/sysv/linux/kernel-features.h b/sysdeps/unix/sysv/linux/kernel-features.h -index 1d3b554..9f2cf9f 100644 ---- a/sysdeps/unix/sysv/linux/kernel-features.h -+++ b/sysdeps/unix/sysv/linux/kernel-features.h -@@ -101,11 +101,6 @@ - #define __ASSUME_PREADV 1 - #define __ASSUME_PWRITEV 1 - --/* Support for FUTEX_*_REQUEUE_PI was added in 2.6.31 (but some -- architectures lack futex_atomic_cmpxchg_inatomic in some -- configurations). */ --#define __ASSUME_REQUEUE_PI 1 -- - /* Support for recvmmsg functionality was added in 2.6.33. The macros - defined correspond to those for accept4. */ - #if __LINUX_KERNEL_VERSION >= 0x020621 -diff --git a/sysdeps/unix/sysv/linux/m68k/kernel-features.h b/sysdeps/unix/sysv/linux/m68k/kernel-features.h -index 46ec601..174c1c6 100644 ---- a/sysdeps/unix/sysv/linux/m68k/kernel-features.h -+++ b/sysdeps/unix/sysv/linux/m68k/kernel-features.h -@@ -51,6 +51,5 @@ - - /* No support for PI futexes or robust mutexes before 3.10 for m68k. */ - #if __LINUX_KERNEL_VERSION < 0x030a00 --# undef __ASSUME_REQUEUE_PI - # undef __ASSUME_SET_ROBUST_LIST - #endif -diff --git a/sysdeps/unix/sysv/linux/mips/kernel-features.h b/sysdeps/unix/sysv/linux/mips/kernel-features.h -index b486d90..a795911c 100644 ---- a/sysdeps/unix/sysv/linux/mips/kernel-features.h -+++ b/sysdeps/unix/sysv/linux/mips/kernel-features.h -@@ -24,7 +24,6 @@ - /* The MIPS kernel does not support futex_atomic_cmpxchg_inatomic if - emulating LL/SC. */ - #if __mips == 1 || defined _MIPS_ARCH_R5900 --# undef __ASSUME_REQUEUE_PI - # undef __ASSUME_SET_ROBUST_LIST - #endif - -diff --git a/sysdeps/unix/sysv/linux/sparc/kernel-features.h b/sysdeps/unix/sysv/linux/sparc/kernel-features.h -index 69c9c7c..dd3ddf0 100644 ---- a/sysdeps/unix/sysv/linux/sparc/kernel-features.h -+++ b/sysdeps/unix/sysv/linux/sparc/kernel-features.h -@@ -34,6 +34,5 @@ - /* 32-bit SPARC kernels do not support - futex_atomic_cmpxchg_inatomic. */ - #if !defined __arch64__ && !defined __sparc_v9__ --# undef __ASSUME_REQUEUE_PI - # undef __ASSUME_SET_ROBUST_LIST - #endif --- -2.10.2 - diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0005-fsl-e500-e5500-e6500-603e-fsqrt-implementation.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0005-fsl-e500-e5500-e6500-603e-fsqrt-implementation.patch index dc0dfad9a..ee5000031 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0005-fsl-e500-e5500-e6500-603e-fsqrt-implementation.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0005-fsl-e500-e5500-e6500-603e-fsqrt-implementation.patch @@ -1,7 +1,7 @@ -From 450473ce27b47cebec2c5c5b611a8c8bed41e6e8 Mon Sep 17 00:00:00 2001 +From 77a7495376c7d0c5507c0ec99bf1568150339ef4 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 00:01:50 +0000 -Subject: [PATCH 05/25] fsl e500/e5500/e6500/603e fsqrt implementation +Subject: [PATCH 05/26] fsl e500/e5500/e6500/603e fsqrt implementation Upstream-Status: Pending Signed-off-by: Edmar Wienskoski @@ -1580,5 +1580,5 @@ index 0000000..04ff8cc @@ -0,0 +1 @@ +powerpc/powerpc64/e6500/fpu -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0006-Fix-atomic_fetch_xor_release.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0006-Fix-atomic_fetch_xor_release.patch deleted file mode 100644 index 7616efa18..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0006-Fix-atomic_fetch_xor_release.patch +++ /dev/null @@ -1,81 +0,0 @@ -From b671f20cc160238b62894d032a55baf85867106e Mon Sep 17 00:00:00 2001 -From: Catalin Enache -Date: Fri, 30 Jun 2017 19:12:43 +0300 -Subject: [PATCH 6/6] Fix atomic_fetch_xor_release. - -No code uses atomic_fetch_xor_release except for the upcoming -conditional variable rewrite. Therefore there is no user -visible bug here. The use of atomic_compare_and_exchange_bool_rel -is removed (since it doesn't exist anymore), and is replaced -by atomic_compare_exchange_weak_release. - -We use weak_release because it provides better performance in -the loop (the weak semantic) and because the xor is release MO -(the release semantic). We don't reload expected in the loop -because atomic_compare_and_exchange_weak_release does this for -us as part of the CAS failure. - -It is otherwise a fairly plain conversion that fixes building -the new condvar for 32-bit x86. Passes all regression tests -for x86. - -Upstream-Status: Backport - -Author: Carlos O'Donell -Signed-off-by: Catalin Enache ---- - ChangeLog | 6 ++++++ - include/atomic.h | 19 +++++++++++-------- - 2 files changed, 17 insertions(+), 8 deletions(-) - -diff --git a/ChangeLog b/ChangeLog -index 44c518b..893262d 100644 ---- a/ChangeLog -+++ b/ChangeLog -@@ -1,3 +1,9 @@ -+2016-10-26 Carlos O'Donell -+ -+ * include/atomic.h -+ [USE_COMPILER_ATOMIC_BUILTINS && !atomic_fetch_xor_release] -+ (atomic_fetch_xor_release): Use atomic_compare_exchange_weak_release. -+ - 2017-04-04 Adhemerval Zanella - - * nptl/pthreadP.h (USE_REQUEUE_PI): Remove ununsed macro. -diff --git a/include/atomic.h b/include/atomic.h -index 5a8e7e7..c8b4664 100644 ---- a/include/atomic.h -+++ b/include/atomic.h -@@ -777,18 +777,21 @@ void __atomic_link_error (void); - # endif - - # ifndef atomic_fetch_xor_release -+/* Failing the atomic_compare_exchange_weak_release reloads the value in -+ __atg104_expected, so we need only do the XOR again and retry. */ - # define atomic_fetch_xor_release(mem, operand) \ -- ({ __typeof (*(mem)) __atg104_old; \ -- __typeof (mem) __atg104_memp = (mem); \ -+ ({ __typeof (mem) __atg104_memp = (mem); \ -+ __typeof (*(mem)) __atg104_expected = (*__atg104_memp); \ -+ __typeof (*(mem)) __atg104_desired; \ - __typeof (*(mem)) __atg104_op = (operand); \ - \ - do \ -- __atg104_old = (*__atg104_memp); \ -- while (__builtin_expect \ -- (atomic_compare_and_exchange_bool_rel ( \ -- __atg104_memp, __atg104_old ^ __atg104_op, __atg104_old), 0));\ -- \ -- __atg104_old; }) -+ __atg104_desired = __atg104_expected ^ __atg104_op; \ -+ while (__glibc_unlikely \ -+ (atomic_compare_exchange_weak_release ( \ -+ __atg104_memp, &__atg104_expected, __atg104_desired) \ -+ == 0)); \ -+ __atg104_expected; }) - #endif - - #endif /* !USE_ATOMIC_COMPILER_BUILTINS */ --- -2.10.2 - diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0006-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0006-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch index f267f66cd..9088d29c4 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0006-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0006-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch @@ -1,7 +1,7 @@ -From bb065b78e9e0d3301e64a26e37550a657c885ebc Mon Sep 17 00:00:00 2001 +From 520cb9e746af637cf01fea385b7f4ee4aadbdfdd Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 00:11:22 +0000 -Subject: [PATCH 06/25] readlib: Add OECORE_KNOWN_INTERPRETER_NAMES to known +Subject: [PATCH 06/26] readlib: Add OECORE_KNOWN_INTERPRETER_NAMES to known names This bolts in a hook for OE to pass its own version of interpreter @@ -29,5 +29,5 @@ index 8a66ffe..08d56fc 100644 static struct known_names known_libs[] = -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0007-ppc-sqrt-Fix-undefined-reference-to-__sqrt_finite.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0007-ppc-sqrt-Fix-undefined-reference-to-__sqrt_finite.patch index 612e892e8..f33defe07 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0007-ppc-sqrt-Fix-undefined-reference-to-__sqrt_finite.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0007-ppc-sqrt-Fix-undefined-reference-to-__sqrt_finite.patch @@ -1,7 +1,7 @@ -From 4a91bbdb11a15f93094695bba4e0849c421fad48 Mon Sep 17 00:00:00 2001 +From 64130262787d54e2e6695ae4ed8783bfec14ffef Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 00:15:07 +0000 -Subject: [PATCH 07/25] ppc/sqrt: Fix undefined reference to `__sqrt_finite' +Subject: [PATCH 07/26] ppc/sqrt: Fix undefined reference to `__sqrt_finite' on ppc fixes the errors like below | ./.libs/libpulsecore-1.1.so: undefined reference to `__sqrt_finite' @@ -204,5 +204,5 @@ index 26fa067..9d17512 100644 } +strong_alias (__ieee754_sqrtf, __sqrtf_finite) -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0008-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0008-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch index 9e222b7b4..26f65c5b1 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0008-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0008-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch @@ -1,7 +1,7 @@ -From ce2746a1560b4430d89d114e1b65c7be225b4c2a Mon Sep 17 00:00:00 2001 +From 5afb0147e3e49c3b474404524014efe51b2bca5a Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 00:16:38 +0000 -Subject: [PATCH 08/25] __ieee754_sqrt{,f} are now inline functions and call +Subject: [PATCH 08/26] __ieee754_sqrt{,f} are now inline functions and call out __slow versions Upstream-Status: Pending @@ -383,5 +383,5 @@ index 9d17512..10de1f0 100644 + strong_alias (__ieee754_sqrtf, __sqrtf_finite) -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0009-Quote-from-bug-1443-which-explains-what-the-patch-do.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0009-Quote-from-bug-1443-which-explains-what-the-patch-do.patch index 7aa74df23..d416acde3 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0009-Quote-from-bug-1443-which-explains-what-the-patch-do.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0009-Quote-from-bug-1443-which-explains-what-the-patch-do.patch @@ -1,7 +1,7 @@ -From 1b61649e545de76dd79a5e2c39bb16d0c4623160 Mon Sep 17 00:00:00 2001 +From ddd51bb4e005432cb3c0f8f33822954408a9fee1 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 00:20:09 +0000 -Subject: [PATCH 09/25] Quote from bug 1443 which explains what the patch does +Subject: [PATCH 09/26] Quote from bug 1443 which explains what the patch does : We build some random program and link it with -lust. When we run it, @@ -58,5 +58,5 @@ index 60eee00..7d54d5e 100644 case R_ARM_TLS_TPOFF32: -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0010-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0010-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch index 14963a357..276f1fa4c 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0010-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0010-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch @@ -1,7 +1,7 @@ -From 91af416408b8718b004c83d93a6daa1f34cba48d Mon Sep 17 00:00:00 2001 +From d7e74670825330f5421a55f5aa2a1ce6fda7d7fb Mon Sep 17 00:00:00 2001 From: Ting Liu Date: Wed, 19 Dec 2012 04:39:57 -0600 -Subject: [PATCH 10/25] eglibc: run libm-err-tab.pl with specific dirs in ${S} +Subject: [PATCH 10/26] eglibc: run libm-err-tab.pl with specific dirs in ${S} libm-err-tab.pl will parse all the files named "libm-test-ulps" in the given dir recursively. To avoid parsing the one in @@ -32,5 +32,5 @@ index f2f694f..e062833 100644 touch $@ -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0011-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0011-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch index e92d04c62..096dab547 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0011-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0011-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch @@ -1,7 +1,7 @@ -From 6a71f688471fe0c85f8ad9afc4ee0723a20aae1d Mon Sep 17 00:00:00 2001 +From d6e2076571263e45c48889896d3d94ff576df2be Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 00:24:46 +0000 -Subject: [PATCH 11/25] __ieee754_sqrt{,f} are now inline functions and call +Subject: [PATCH 11/26] __ieee754_sqrt{,f} are now inline functions and call out __slow versions Upstream-Status: Pending @@ -57,5 +57,5 @@ index 8126535..10de1f0 100644 #endif { -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0012-Make-ld-version-output-matching-grok-gold-s-output.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0012-Make-ld-version-output-matching-grok-gold-s-output.patch index 4cba03f23..7728c61a9 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0012-Make-ld-version-output-matching-grok-gold-s-output.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0012-Make-ld-version-output-matching-grok-gold-s-output.patch @@ -1,7 +1,7 @@ -From f9d024edb158659e403b51cc9d93da9bca6e5d7b Mon Sep 17 00:00:00 2001 +From c0974c746e026650bef5d1940eb3f519765c77af Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 00:25:45 +0000 -Subject: [PATCH 12/25] Make ld --version output matching grok gold's output +Subject: [PATCH 12/26] Make ld --version output matching grok gold's output adapted from from upstream branch roland/gold-vs-libc @@ -14,10 +14,10 @@ Signed-off-by: Khem Raj 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/configure b/configure -index 8fe5937..7888a3f 100755 +index 5cf3230..391f29d 100755 --- a/configure +++ b/configure -@@ -4482,7 +4482,7 @@ else +@@ -4555,7 +4555,7 @@ else # Found it, now check the version. { $as_echo "$as_me:${as_lineno-$LINENO}: checking version of $LD" >&5 $as_echo_n "checking version of $LD... " >&6; } @@ -27,10 +27,10 @@ index 8fe5937..7888a3f 100755 '') ac_prog_version="v. ?.??, bad"; ac_verc_fail=yes;; 2.1[0-9][0-9]*|2.2[2-9]*|2.[3-9][0-9]*|[3-9].*|[1-9][0-9]*) diff --git a/configure.ac b/configure.ac -index 3c766b7..6908a99 100644 +index d719fad..5b5877c 100644 --- a/configure.ac +++ b/configure.ac -@@ -944,7 +944,7 @@ AC_CHECK_PROG_VER(AS, $AS, --version, +@@ -990,7 +990,7 @@ AC_CHECK_PROG_VER(AS, $AS, --version, [2.1[0-9][0-9]*|2.2[2-9]*|2.[3-9][0-9]*|[3-9].*|[1-9][0-9]*], AS=: critic_missing="$critic_missing as") AC_CHECK_PROG_VER(LD, $LD, --version, @@ -40,5 +40,5 @@ index 3c766b7..6908a99 100644 LD=: critic_missing="$critic_missing ld") -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0013-sysdeps-gnu-configure.ac-handle-correctly-libc_cv_ro.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0013-sysdeps-gnu-configure.ac-handle-correctly-libc_cv_ro.patch index 6a82f8d93..1c81c729d 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0013-sysdeps-gnu-configure.ac-handle-correctly-libc_cv_ro.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0013-sysdeps-gnu-configure.ac-handle-correctly-libc_cv_ro.patch @@ -1,7 +1,7 @@ -From 2ece12093c8ba4cf688d235f35d36f1feb02324c Mon Sep 17 00:00:00 2001 +From 2a12eadfd7940b6b0913de8e95d851254cce7953 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 00:27:10 +0000 -Subject: [PATCH 13/25] sysdeps/gnu/configure.ac: handle correctly +Subject: [PATCH 13/26] sysdeps/gnu/configure.ac: handle correctly $libc_cv_rootsbindir Upstream-Status:Pending @@ -38,5 +38,5 @@ index 634fe4d..3db1697 100644 ;; esac -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0014-Add-unused-attribute.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0014-Add-unused-attribute.patch index d14a6d4cd..b23e104ff 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0014-Add-unused-attribute.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0014-Add-unused-attribute.patch @@ -1,7 +1,7 @@ -From 9fec083dc99d2b524090e1d098e03709eed64a72 Mon Sep 17 00:00:00 2001 +From ec4f7763b30603b7ba0b70bd7750e34d442821b3 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 00:28:41 +0000 -Subject: [PATCH 14/25] Add unused attribute +Subject: [PATCH 14/26] Add unused attribute Helps in avoiding gcc warning when header is is included in a source file which does not use both functions @@ -30,5 +30,5 @@ index 95cbce7..191a0dd 100644 { int slash_count = 0; -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0015-yes-within-the-path-sets-wrong-config-variables.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0015-yes-within-the-path-sets-wrong-config-variables.patch index 8ceccec41..98d425a7b 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0015-yes-within-the-path-sets-wrong-config-variables.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0015-yes-within-the-path-sets-wrong-config-variables.patch @@ -1,7 +1,7 @@ -From 1794a97cba0b09b726eebc565c783c7b7c22dfba Mon Sep 17 00:00:00 2001 +From 18d64951cbb68d8d75e8ef347cbd0e0a5c14604b Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 00:31:06 +0000 -Subject: [PATCH 15/25] 'yes' within the path sets wrong config variables +Subject: [PATCH 15/26] 'yes' within the path sets wrong config variables It seems that the 'AC_EGREP_CPP(yes...' example is quite popular but being such a short word to grep it is likely to produce @@ -259,5 +259,5 @@ index 0822915..9a32fdd 100644 ], libc_cv_ppc64_def_call_elf=yes, libc_cv_ppc64_def_call_elf=no)]) if test $libc_cv_ppc64_def_call_elf = no; then -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0016-timezone-re-written-tzselect-as-posix-sh.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0016-timezone-re-written-tzselect-as-posix-sh.patch index e5b508330..426a2c0c2 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0016-timezone-re-written-tzselect-as-posix-sh.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0016-timezone-re-written-tzselect-as-posix-sh.patch @@ -1,7 +1,7 @@ -From fdb5c9c88e3d1bc5ae6ef9ebcfcf560d3c42f0a6 Mon Sep 17 00:00:00 2001 +From 2bed515b9f9f613ae0db9b9607d8fa60a4afca5b Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 00:33:03 +0000 -Subject: [PATCH 16/25] timezone: re-written tzselect as posix sh +Subject: [PATCH 16/26] timezone: re-written tzselect as posix sh To avoid the bash dependency. @@ -41,5 +41,5 @@ index 2c3b2f4..0c04a61 100755 # Output one argument as-is to standard output. # Safer than 'echo', which can mishandle '\' or leading '-'. -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0017-Remove-bash-dependency-for-nscd-init-script.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0017-Remove-bash-dependency-for-nscd-init-script.patch index c8da05dc6..6c2506ccc 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0017-Remove-bash-dependency-for-nscd-init-script.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0017-Remove-bash-dependency-for-nscd-init-script.patch @@ -1,7 +1,7 @@ -From 88ad88d014a5ee68b0e044d2def5ab13743c8ae0 Mon Sep 17 00:00:00 2001 +From c8814875b362efbfd778345d0d2777478bf11a30 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Thu, 31 Dec 2015 14:33:02 -0800 -Subject: [PATCH 17/25] Remove bash dependency for nscd init script +Subject: [PATCH 17/26] Remove bash dependency for nscd init script The nscd init script uses #! /bin/bash but only really uses one bashism (translated strings), so remove them and switch the shell to #!/bin/sh. @@ -9,8 +9,6 @@ The nscd init script uses #! /bin/bash but only really uses one bashism Signed-off-by: Ross Burton Signed-off-by: Khem Raj --- -Upstream-Status: Pending - nscd/nscd.init | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) @@ -71,5 +69,5 @@ index a882da7..b02986e 100644 ;; esac -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0018-eglibc-Cross-building-and-testing-instructions.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0018-eglibc-Cross-building-and-testing-instructions.patch index 175076524..2ec01f05c 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0018-eglibc-Cross-building-and-testing-instructions.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0018-eglibc-Cross-building-and-testing-instructions.patch @@ -1,7 +1,7 @@ -From 49c0ad7dc2e02e808ed150296a109b586c34115a Mon Sep 17 00:00:00 2001 +From df96d6b61bb60f13cd3d4989d1afc56d705f4a33 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 00:42:58 +0000 -Subject: [PATCH 18/25] eglibc: Cross building and testing instructions +Subject: [PATCH 18/26] eglibc: Cross building and testing instructions Ported from eglibc Upstream-Status: Pending @@ -615,5 +615,5 @@ index 0000000..b67b468 + simply place copies of these libraries in the top GLIBC build + directory. -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0019-eglibc-Help-bootstrap-cross-toolchain.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0019-eglibc-Help-bootstrap-cross-toolchain.patch index f983d86a9..f5921bb7b 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0019-eglibc-Help-bootstrap-cross-toolchain.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0019-eglibc-Help-bootstrap-cross-toolchain.patch @@ -1,7 +1,7 @@ -From e881face7a10354612bf634b44e09e3bc6462c67 Mon Sep 17 00:00:00 2001 +From 2cb7e3cae4020f431d426ad1740bb25506cde899 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 00:49:28 +0000 -Subject: [PATCH 19/25] eglibc: Help bootstrap cross toolchain +Subject: [PATCH 19/26] eglibc: Help bootstrap cross toolchain Taken from EGLIBC, r1484 + r1525 @@ -29,10 +29,10 @@ Signed-off-by: Khem Raj create mode 100644 include/stubs-bootstrap.h diff --git a/Makefile b/Makefile -index 32748b3..6d055ba 100644 +index 1ae3281..26ab7bf 100644 --- a/Makefile +++ b/Makefile -@@ -69,9 +69,18 @@ subdir-dirs = include +@@ -70,9 +70,18 @@ subdir-dirs = include vpath %.h $(subdir-dirs) # What to install. @@ -52,7 +52,7 @@ index 32748b3..6d055ba 100644 ifeq (yes,$(build-shared)) headers += gnu/lib-names.h endif -@@ -151,6 +160,16 @@ others: $(common-objpfx)testrun.sh +@@ -152,6 +161,16 @@ others: $(common-objpfx)testrun.sh subdir-stubs := $(foreach dir,$(subdirs),$(common-objpfx)$(dir)/stubs) @@ -69,7 +69,7 @@ index 32748b3..6d055ba 100644 ifndef abi-variants installed-stubs = $(inst_includedir)/gnu/stubs.h else -@@ -177,6 +196,7 @@ $(inst_includedir)/gnu/stubs.h: $(+force) +@@ -178,6 +197,7 @@ $(inst_includedir)/gnu/stubs.h: $(+force) install-others-nosubdir: $(installed-stubs) endif @@ -96,5 +96,5 @@ index 0000000..1d2b669 + EGLIBC subdir 'stubs' make targets, on every .o file in EGLIBC, but + an empty stubs.h like this will do fine for GCC. */ -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0020-eglibc-cherry-picked-from.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0020-eglibc-cherry-picked-from.patch index 30c4c6987..43445739b 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0020-eglibc-cherry-picked-from.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0020-eglibc-cherry-picked-from.patch @@ -1,7 +1,7 @@ -From 723a31d3e2627211b39fbcc08f75b3c23c4096c5 Mon Sep 17 00:00:00 2001 +From b2ed906ec864583b43379ef9ad2b5630c1232565 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Thu, 31 Dec 2015 15:10:33 -0800 -Subject: [PATCH 20/25] eglibc: cherry-picked from +Subject: [PATCH 20/26] eglibc: cherry-picked from http://www.eglibc.org/archives/patches/msg00772.html @@ -60,5 +60,5 @@ index a4b376f..3256e12 100644 if (resp->nscount > 0) __res_iclose (resp, true); -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0021-eglibc-Clear-cache-lines-on-ppc8xx.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0021-eglibc-Clear-cache-lines-on-ppc8xx.patch index 8931c9e25..a9a73916f 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0021-eglibc-Clear-cache-lines-on-ppc8xx.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0021-eglibc-Clear-cache-lines-on-ppc8xx.patch @@ -1,7 +1,7 @@ -From 9699873820d0347c2f377f0d86bce615d87a5e47 Mon Sep 17 00:00:00 2001 +From 000ab518aa1269714bc0a9a4633b0a538fae91d9 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Thu, 31 Dec 2015 15:15:09 -0800 -Subject: [PATCH 21/25] eglibc: Clear cache lines on ppc8xx +Subject: [PATCH 21/26] eglibc: Clear cache lines on ppc8xx 2007-06-13 Nathan Sidwell Mark Shinwell @@ -79,5 +79,5 @@ index 0efd297..8cc0ef8 100644 break; #ifndef SHARED -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0022-eglibc-Resolve-__fpscr_values-on-SH4.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0022-eglibc-Resolve-__fpscr_values-on-SH4.patch index f53f5ff87..c0cd5b0d1 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0022-eglibc-Resolve-__fpscr_values-on-SH4.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0022-eglibc-Resolve-__fpscr_values-on-SH4.patch @@ -1,7 +1,7 @@ -From 8904f4249c930d187e19c7e9d3e73c835d11e18f Mon Sep 17 00:00:00 2001 +From a50c6e80543fb4cbc589978c11fe846bf4a94492 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 00:55:53 +0000 -Subject: [PATCH 22/25] eglibc: Resolve __fpscr_values on SH4 +Subject: [PATCH 22/26] eglibc: Resolve __fpscr_values on SH4 2010-09-29 Nobuhiro Iwamatsu Andrew Stubbs @@ -52,5 +52,5 @@ index 0024d79..d1db7e4 100644 +weak_alias (___fpscr_values, __fpscr_values) + -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0023-eglibc-Install-PIC-archives.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0023-eglibc-Install-PIC-archives.patch index f985f3ca1..c3e571f8a 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0023-eglibc-Install-PIC-archives.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0023-eglibc-Install-PIC-archives.patch @@ -1,7 +1,7 @@ -From 1947f211fab4001e84bb52868cddbd401e597889 Mon Sep 17 00:00:00 2001 +From 101568daf48d99e71b280a2fdd85460fe740d583 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 01:57:01 +0000 -Subject: [PATCH 23/25] eglibc: Install PIC archives +Subject: [PATCH 23/26] eglibc: Install PIC archives Forward port from eglibc @@ -29,10 +29,10 @@ Signed-off-by: Khem Raj 1 file changed, 40 insertions(+), 2 deletions(-) diff --git a/Makerules b/Makerules -index 53eabfa..1cd2a53 100644 +index 61a0240..373e628 100644 --- a/Makerules +++ b/Makerules -@@ -694,6 +694,9 @@ ifeq ($(build-shared),yes) +@@ -762,6 +762,9 @@ ifeq ($(build-shared),yes) $(common-objpfx)libc.so: $(common-objpfx)libc.map endif common-generated += libc.so libc_pic.os @@ -42,7 +42,7 @@ index 53eabfa..1cd2a53 100644 ifdef libc.so-version $(common-objpfx)libc.so$(libc.so-version): $(common-objpfx)libc.so $(make-link) -@@ -936,6 +939,7 @@ endif +@@ -1004,6 +1007,7 @@ endif install: check-install-supported @@ -50,7 +50,7 @@ index 53eabfa..1cd2a53 100644 install: $(installed-libcs) $(installed-libcs): $(inst_libdir)/lib$(libprefix)%: lib $(+force) $(make-target-directory) -@@ -964,6 +968,22 @@ versioned := $(strip $(foreach so,$(install-lib.so),\ +@@ -1032,6 +1036,22 @@ versioned := $(strip $(foreach so,$(install-lib.so),\ install-lib.so-versioned := $(filter $(versioned), $(install-lib.so)) install-lib.so-unversioned := $(filter-out $(versioned), $(install-lib.so)) @@ -73,7 +73,7 @@ index 53eabfa..1cd2a53 100644 # For versioned libraries, we install three files: # $(inst_libdir)/libfoo.so -- for linking, symlink or ld script # $(inst_slibdir)/libfoo.so.NN -- for loading by SONAME, symlink -@@ -1206,9 +1226,22 @@ $(addprefix $(inst_includedir)/,$(headers-nonh)): $(inst_includedir)/%: \ +@@ -1275,9 +1295,22 @@ $(addprefix $(inst_includedir)/,$(headers-nonh)): $(inst_includedir)/%: \ endif # headers-nonh endif # headers @@ -97,7 +97,7 @@ index 53eabfa..1cd2a53 100644 install-bin-nosubdir: $(addprefix $(inst_bindir)/,$(install-bin)) install-bin-script-nosubdir: $(addprefix $(inst_bindir)/,$(install-bin-script)) install-rootsbin-nosubdir: \ -@@ -1221,6 +1254,10 @@ install-data-nosubdir: $(addprefix $(inst_datadir)/,$(install-data)) +@@ -1290,6 +1323,10 @@ install-data-nosubdir: $(addprefix $(inst_datadir)/,$(install-data)) install-headers-nosubdir: $(addprefix $(inst_includedir)/,$(headers)) install-others-nosubdir: $(install-others) install-others-programs-nosubdir: $(install-others-programs) @@ -108,7 +108,7 @@ index 53eabfa..1cd2a53 100644 # We need all the `-nosubdir' targets so that `install' in the parent # doesn't depend on several things which each iterate over the subdirs. -@@ -1230,7 +1267,8 @@ install-%:: install-%-nosubdir ; +@@ -1299,7 +1336,8 @@ install-%:: install-%-nosubdir ; .PHONY: install install-no-libc.a-nosubdir install-no-libc.a-nosubdir: install-headers-nosubdir install-data-nosubdir \ @@ -119,5 +119,5 @@ index 53eabfa..1cd2a53 100644 install-no-libc.a-nosubdir: install-bin-nosubdir install-bin-script-nosubdir \ install-rootsbin-nosubdir install-sbin-nosubdir \ -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0024-eglibc-Forward-port-cross-locale-generation-support.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0024-eglibc-Forward-port-cross-locale-generation-support.patch index c45b557ab..3399890de 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0024-eglibc-Forward-port-cross-locale-generation-support.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0024-eglibc-Forward-port-cross-locale-generation-support.patch @@ -1,7 +1,7 @@ -From 11d7633178301add26a24657e2f1596a2f7dddce Mon Sep 17 00:00:00 2001 +From 82516e3ed372f618c886a2de4f9498f597aa8a8b Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 18 Mar 2015 01:33:49 +0000 -Subject: [PATCH 24/25] eglibc: Forward port cross locale generation support +Subject: [PATCH 24/26] eglibc: Forward port cross locale generation support Upstream-Status: Pending @@ -91,7 +91,7 @@ index 0000000..9fad357 + [LC_ALL] = sizeof ("LC_ALL") - 1 + }; diff --git a/locale/localeinfo.h b/locale/localeinfo.h -index 94627f3..d0db77b 100644 +index 1f4da92..7f68935 100644 --- a/locale/localeinfo.h +++ b/locale/localeinfo.h @@ -224,7 +224,7 @@ __libc_tsd_define (extern, __locale_t, LOCALE) @@ -363,10 +363,10 @@ index b885f65..0afb631 100644 size_t bufmax = 56; diff --git a/locale/programs/localedef.c b/locale/programs/localedef.c -index 6becd9a..8ddd080 100644 +index b4c48f1..ed08d48 100644 --- a/locale/programs/localedef.c +++ b/locale/programs/localedef.c -@@ -114,6 +114,7 @@ void (*argp_program_version_hook) (FILE *, struct argp_state *) = print_version; +@@ -108,6 +108,7 @@ void (*argp_program_version_hook) (FILE *, struct argp_state *) = print_version; #define OPT_LIST_ARCHIVE 309 #define OPT_LITTLE_ENDIAN 400 #define OPT_BIG_ENDIAN 401 @@ -374,7 +374,7 @@ index 6becd9a..8ddd080 100644 /* Definitions of arguments for argp functions. */ static const struct argp_option options[] = -@@ -150,6 +151,8 @@ static const struct argp_option options[] = +@@ -143,6 +144,8 @@ static const struct argp_option options[] = N_("Generate little-endian output") }, { "big-endian", OPT_BIG_ENDIAN, NULL, 0, N_("Generate big-endian output") }, @@ -383,7 +383,7 @@ index 6becd9a..8ddd080 100644 { NULL, 0, NULL, 0, NULL } }; -@@ -239,12 +242,14 @@ main (int argc, char *argv[]) +@@ -232,12 +235,14 @@ main (int argc, char *argv[]) ctype locale. (P1003.2 4.35.5.2) */ setlocale (LC_CTYPE, "POSIX"); @@ -398,7 +398,7 @@ index 6becd9a..8ddd080 100644 /* Process charmap file. */ charmap = charmap_read (charmap_file, verbose, 1, be_quiet, 1); -@@ -338,6 +343,9 @@ parse_opt (int key, char *arg, struct argp_state *state) +@@ -328,6 +333,9 @@ parse_opt (int key, char *arg, struct argp_state *state) case OPT_BIG_ENDIAN: set_big_endian (true); break; @@ -562,5 +562,5 @@ index 69b3141..1cef0be 100644 # define WEAK_POSTLOAD(postload) weak_extern (postload) #else -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0025-Define-DUMMY_LOCALE_T-if-not-defined.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0025-Define-DUMMY_LOCALE_T-if-not-defined.patch index 9d60d0282..1f0f5d4da 100644 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0025-Define-DUMMY_LOCALE_T-if-not-defined.patch +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0025-Define-DUMMY_LOCALE_T-if-not-defined.patch @@ -1,7 +1,7 @@ -From 89ec25290d34413ce5c8ba6c378e31dbae3a37c0 Mon Sep 17 00:00:00 2001 +From c2d8cdeab116caacdfedb35eeb3e743b44807bec Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 20 Apr 2016 21:11:00 -0700 -Subject: [PATCH 25/25] Define DUMMY_LOCALE_T if not defined +Subject: [PATCH 25/26] Define DUMMY_LOCALE_T if not defined This is a hack to fix building the locale bits on an older CentOs 5.X machine @@ -28,5 +28,5 @@ index f606365..0e5f8c3 100644 #define PACKAGE _libc_intl_domainname #ifndef VERSION -- -2.8.2 +2.10.2 diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0026-build_local_scope.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0026-build_local_scope.patch deleted file mode 100644 index 8e0cb8319..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0026-build_local_scope.patch +++ /dev/null @@ -1,56 +0,0 @@ -From 6e4ec5a3c5fe63b6458036f18d43124de4a7e724 Mon Sep 17 00:00:00 2001 -From: Mark Hatle -Date: Thu, 18 Aug 2016 14:07:58 -0500 -Subject: [PATCH] elf/dl-deps.c: Make _dl_build_local_scope breadth first - -According to the ELF specification: - -When resolving symbolic references, the dynamic linker examines the symbol -tables with a breadth-first search. - -This function was using a depth first search. By doing so the conflict -resolution reported to the prelinker (when LD_TRACE_PRELINKING=1 is set) -was incorrect. This caused problems when their were various circular -dependencies between libraries. The problem usually manifested itself by -the wrong IFUNC being executed. - -[BZ# 20488] - -Upstream-Status: Submitted [libc-alpha] - -Signed-off-by: Mark Hatle ---- - elf/dl-deps.c | 14 ++++++++++---- - 1 file changed, 10 insertions(+), 4 deletions(-) - -diff --git a/elf/dl-deps.c b/elf/dl-deps.c -index 6a82987..fc37c87 100644 ---- a/elf/dl-deps.c -+++ b/elf/dl-deps.c -@@ -73,13 +73,19 @@ _dl_build_local_scope (struct link_map **list, struct link_map *map) - { - struct link_map **p = list; - struct link_map **q; -+ struct link_map **r; - - *p++ = map; - map->l_reserved = 1; -- if (map->l_initfini) -- for (q = map->l_initfini + 1; *q; ++q) -- if (! (*q)->l_reserved) -- p += _dl_build_local_scope (p, *q); -+ -+ for (r = list; r < p; ++r) -+ if ((*r)->l_initfini) -+ for (q = (*r)->l_initfini + 1; *q; ++q) -+ if (! (*q)->l_reserved) -+ { -+ *p++ = *q; -+ (*q)->l_reserved = 1; -+ } - return p - list; - } - --- -2.5.5 - diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0026-elf-dl-deps.c-Make-_dl_build_local_scope-breadth-fir.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0026-elf-dl-deps.c-Make-_dl_build_local_scope-breadth-fir.patch new file mode 100644 index 000000000..852f5304c --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0026-elf-dl-deps.c-Make-_dl_build_local_scope-breadth-fir.patch @@ -0,0 +1,56 @@ +From fb315c197cca61299a6f6588ea3460145c255d06 Mon Sep 17 00:00:00 2001 +From: Mark Hatle +Date: Thu, 18 Aug 2016 14:07:58 -0500 +Subject: [PATCH 26/26] elf/dl-deps.c: Make _dl_build_local_scope breadth first + +According to the ELF specification: + +When resolving symbolic references, the dynamic linker examines the symbol +tables with a breadth-first search. + +This function was using a depth first search. By doing so the conflict +resolution reported to the prelinker (when LD_TRACE_PRELINKING=1 is set) +was incorrect. This caused problems when their were various circular +dependencies between libraries. The problem usually manifested itself by +the wrong IFUNC being executed. + +[BZ# 20488] + +Upstream-Status: Submitted [libc-alpha] + +Signed-off-by: Mark Hatle +--- + elf/dl-deps.c | 14 ++++++++++---- + 1 file changed, 10 insertions(+), 4 deletions(-) + +diff --git a/elf/dl-deps.c b/elf/dl-deps.c +index 6a82987..53be824 100644 +--- a/elf/dl-deps.c ++++ b/elf/dl-deps.c +@@ -73,13 +73,19 @@ _dl_build_local_scope (struct link_map **list, struct link_map *map) + { + struct link_map **p = list; + struct link_map **q; ++ struct link_map **r; + + *p++ = map; + map->l_reserved = 1; +- if (map->l_initfini) +- for (q = map->l_initfini + 1; *q; ++q) +- if (! (*q)->l_reserved) +- p += _dl_build_local_scope (p, *q); ++ ++ for (r = list; r < p; ++r) ++ if ((*r)->l_initfini) ++ for (q = (*r)->l_initfini + 1; *q; ++q) ++ if (! (*q)->l_reserved) ++ { ++ *p++ = *q; ++ (*q)->l_reserved = 1; ++ } + return p - list; + } + +-- +2.10.2 + diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0027-locale-fix-hard-coded-reference-to-gcc-E.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0027-locale-fix-hard-coded-reference-to-gcc-E.patch new file mode 100644 index 000000000..71c0bdcae --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0027-locale-fix-hard-coded-reference-to-gcc-E.patch @@ -0,0 +1,38 @@ +From a2fc86cb8d0366171f100ebd033aeb9609fa40de Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?J=C3=A9r=C3=A9my=20Rosen?= +Date: Mon, 22 Aug 2016 16:09:25 +0200 +Subject: [PATCH 27/27] locale: fix hard-coded reference to gcc -E + +When new version of compilers are published, they may not be compatible with +older versions of software. This is particularly common when software is built +with -Werror. + +Autotools provides a way for a user to specify the name of his compiler using a +set of variables ($CC $CXX $CPP etc.). Those variables are used correctly when +compiling glibc but the script used to generate transliterations in the locale/ +subdirectory directly calls the gcc binary to get the output of the +preprocessor instead of using the $CPP variable provided by the build +environment. + +This patch replaces the hard-coded reference to the gcc binary with the proper +environment variable, thus allowing a user to override it. + +Upstream-Status : Submitted [https://sourceware.org/ml/libc-alpha/2016-08/msg00746.html] +--- + locale/gen-translit.pl | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/locale/gen-translit.pl b/locale/gen-translit.pl +index 30d3f2f..e976530 100644 +--- a/locale/gen-translit.pl ++++ b/locale/gen-translit.pl +@@ -1,5 +1,5 @@ + #!/usr/bin/perl -w +-open F, "cat C-translit.h.in | gcc -E - |" || die "Cannot preprocess input file"; ++open F, 'cat C-translit.h.in | ${CPP:-gcc -E} - |' || die "Cannot preprocess input file"; + + + sub cstrlen { +-- +2.10.2 + diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0028-Bug-20116-Fix-use-after-free-in-pthread_create.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0028-Bug-20116-Fix-use-after-free-in-pthread_create.patch deleted file mode 100644 index 66f1fcd0f..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0028-Bug-20116-Fix-use-after-free-in-pthread_create.patch +++ /dev/null @@ -1,668 +0,0 @@ -From e7ba24f05d86acf7072e066ea6d7b235a106688c Mon Sep 17 00:00:00 2001 -From: Carlos O'Donell -Date: Sat, 28 Jan 2017 19:13:34 -0500 -Subject: [PATCH] Bug 20116: Fix use after free in pthread_create() - -The commit documents the ownership rules around 'struct pthread' and -when a thread can read or write to the descriptor. With those ownership -rules in place it becomes obvious that pd->stopped_start should not be -touched in several of the paths during thread startup, particularly so -for detached threads. In the case of detached threads, between the time -the thread is created by the OS kernel and the creating thread checks -pd->stopped_start, the detached thread might have already exited and the -memory for pd unmapped. As a regression test we add a simple test which -exercises this exact case by quickly creating detached threads with -large enough stacks to ensure the thread stack cache is bypassed and the -stacks are unmapped. Before the fix the testcase segfaults, after the -fix it works correctly and completes without issue. - -For a detailed discussion see: -https://www.sourceware.org/ml/libc-alpha/2017-01/msg00505.html - -(cherry-picked from commit f8bf15febcaf137bbec5a61101e88cd5a9d56ca8) - -Upstream-Status: Backport [master] -Signed-off-by: Yuanjie Huang ---- - ChangeLog | 23 ++++ - nptl/Makefile | 3 +- - nptl/createthread.c | 10 +- - nptl/pthread_create.c | 207 +++++++++++++++++++++++++++------ - nptl/pthread_getschedparam.c | 1 + - nptl/pthread_setschedparam.c | 1 + - nptl/pthread_setschedprio.c | 1 + - nptl/tpp.c | 2 + - nptl/tst-create-detached.c | 137 ++++++++++++++++++++++ - sysdeps/nacl/createthread.c | 10 +- - sysdeps/unix/sysv/linux/createthread.c | 16 ++- - 11 files changed, 356 insertions(+), 55 deletions(-) - create mode 100644 nptl/tst-create-detached.c - -diff --git a/ChangeLog b/ChangeLog -index 84ae7a7af8..0fbda9020e 100644 ---- a/ChangeLog -+++ b/ChangeLog -@@ -1,3 +1,26 @@ -+2016-01-28 Carlos O'Donell -+ Alexey Makhalov -+ Florian Weimer -+ -+ [BZ #20116] -+ * nptl/pthread_create.c: Document concurrency notes. -+ Enhance thread creation notes. -+ (create_thread): Use bool *stopped_start. -+ (START_THREAD_DEFN): Comment ownership of PD. -+ (__pthread_create_2_1): Add local bool stopped_start and use -+ that instead of pd->stopped_start where appropriate. -+ * nptl/createthread.c (create_thread): Use bool *stopped_start. -+ * sysdeps/nacl/createthread.c (create_thread): Use bool *stopped_start. -+ * sysdeps/unix/sysv/linux/createthread.c (create_thread): Likewise. -+ * nptl/tst-create-detached.c: New file. -+ * nptl/Makefile (tests): Add tst-create-detached. -+ * nptl/pthread_getschedparam.c (__pthread_getschedparam): -+ Reference the enhanced thread creation notes. -+ * nptl/pthread_setschedparam.c (__pthread_setschedparam): Likewise. -+ * nptl/pthread_setschedprio.c (pthread_setschedprio): Likewise. -+ * nptl/tpp.c (__pthread_tpp_change_priority): Likewise. -+ (__pthread_current_priority): Likewise. -+ - 2016-08-04 Carlos O'Donell - - * po/de.po: Update from Translation Project. -diff --git a/nptl/Makefile b/nptl/Makefile -index 0d8aadebed..7dec4edb53 100644 ---- a/nptl/Makefile -+++ b/nptl/Makefile -@@ -290,7 +290,8 @@ tests = tst-typesizes \ - tst-initializers1 $(addprefix tst-initializers1-,\ - c89 gnu89 c99 gnu99 c11 gnu11) \ - tst-bad-schedattr \ -- tst-thread_local1 tst-mutex-errorcheck tst-robust10 -+ tst-thread_local1 tst-mutex-errorcheck tst-robust10 \ -+ tst-create-detached \ - xtests = tst-setuid1 tst-setuid1-static tst-setuid2 \ - tst-mutexpp1 tst-mutexpp6 tst-mutexpp10 - test-srcs = tst-oddstacklimit -diff --git a/nptl/createthread.c b/nptl/createthread.c -index ba2f9c7167..328f85865d 100644 ---- a/nptl/createthread.c -+++ b/nptl/createthread.c -@@ -25,16 +25,14 @@ - - static int - create_thread (struct pthread *pd, const struct pthread_attr *attr, -- bool stopped_start, STACK_VARIABLES_PARMS, bool *thread_ran) -+ bool *stopped_start, STACK_VARIABLES_PARMS, bool *thread_ran) - { - /* If the implementation needs to do some tweaks to the thread after - it has been created at the OS level, it can set STOPPED_START here. */ - -- pd->stopped_start = stopped_start; -- if (__glibc_unlikely (stopped_start)) -- /* We make sure the thread does not run far by forcing it to get a -- lock. We lock it here too so that the new thread cannot continue -- until we tell it to. */ -+ pd->stopped_start = *stopped_start; -+ if (__glibc_unlikely (*stopped_start)) -+ /* See CONCURRENCY NOTES in nptl/pthread_create.c. */ - lll_lock (pd->lock, LLL_PRIVATE); - - return ENOSYS; -diff --git a/nptl/pthread_create.c b/nptl/pthread_create.c -index a834063ad5..44b17bec86 100644 ---- a/nptl/pthread_create.c -+++ b/nptl/pthread_create.c -@@ -54,25 +54,141 @@ unsigned int __nptl_nthreads = 1; - /* Code to allocate and deallocate a stack. */ - #include "allocatestack.c" - --/* createthread.c defines this function, and two macros: -+/* CONCURRENCY NOTES: -+ -+ Understanding who is the owner of the 'struct pthread' or 'PD' -+ (refers to the value of the 'struct pthread *pd' function argument) -+ is critically important in determining exactly which operations are -+ allowed and which are not and when, particularly when it comes to the -+ implementation of pthread_create, pthread_join, pthread_detach, and -+ other functions which all operate on PD. -+ -+ The owner of PD is responsible for freeing the final resources -+ associated with PD, and may examine the memory underlying PD at any -+ point in time until it frees it back to the OS or to reuse by the -+ runtime. -+ -+ The thread which calls pthread_create is called the creating thread. -+ The creating thread begins as the owner of PD. -+ -+ During startup the new thread may examine PD in coordination with the -+ owner thread (which may be itself). -+ -+ The four cases of ownership transfer are: -+ -+ (1) Ownership of PD is released to the process (all threads may use it) -+ after the new thread starts in a joinable state -+ i.e. pthread_create returns a usable pthread_t. -+ -+ (2) Ownership of PD is released to the new thread starting in a detached -+ state. -+ -+ (3) Ownership of PD is dynamically released to a running thread via -+ pthread_detach. -+ -+ (4) Ownership of PD is acquired by the thread which calls pthread_join. -+ -+ Implementation notes: -+ -+ The PD->stopped_start and thread_ran variables are used to determine -+ exactly which of the four ownership states we are in and therefore -+ what actions can be taken. For example after (2) we cannot read or -+ write from PD anymore since the thread may no longer exist and the -+ memory may be unmapped. The most complicated cases happen during -+ thread startup: -+ -+ (a) If the created thread is in a detached (PTHREAD_CREATE_DETACHED), -+ or joinable (default PTHREAD_CREATE_JOINABLE) state and -+ STOPPED_START is true, then the creating thread has ownership of -+ PD until the PD->lock is released by pthread_create. If any -+ errors occur we are in states (c), (d), or (e) below. -+ -+ (b) If the created thread is in a detached state -+ (PTHREAD_CREATED_DETACHED), and STOPPED_START is false, then the -+ creating thread has ownership of PD until it invokes the OS -+ kernel's thread creation routine. If this routine returns -+ without error, then the created thread owns PD; otherwise, see -+ (c) and (e) below. -+ -+ (c) If the detached thread setup failed and THREAD_RAN is true, then -+ the creating thread releases ownership to the new thread by -+ sending a cancellation signal. All threads set THREAD_RAN to -+ true as quickly as possible after returning from the OS kernel's -+ thread creation routine. -+ -+ (d) If the joinable thread setup failed and THREAD_RAN is true, then -+ then the creating thread retains ownership of PD and must cleanup -+ state. Ownership cannot be released to the process via the -+ return of pthread_create since a non-zero result entails PD is -+ undefined and therefore cannot be joined to free the resources. -+ We privately call pthread_join on the thread to finish handling -+ the resource shutdown (Or at least we should, see bug 19511). -+ -+ (e) If the thread creation failed and THREAD_RAN is false, then the -+ creating thread retains ownership of PD and must cleanup state. -+ No waiting for the new thread is required because it never -+ started. -+ -+ The nptl_db interface: -+ -+ The interface with nptl_db requires that we enqueue PD into a linked -+ list and then call a function which the debugger will trap. The PD -+ will then be dequeued and control returned to the thread. The caller -+ at the time must have ownership of PD and such ownership remains -+ after control returns to thread. The enqueued PD is removed from the -+ linked list by the nptl_db callback td_thr_event_getmsg. The debugger -+ must ensure that the thread does not resume execution, otherwise -+ ownership of PD may be lost and examining PD will not be possible. -+ -+ Note that the GNU Debugger as of (December 10th 2015) commit -+ c2c2a31fdb228d41ce3db62b268efea04bd39c18 no longer uses -+ td_thr_event_getmsg and several other related nptl_db interfaces. The -+ principal reason for this is that nptl_db does not support non-stop -+ mode where other threads can run concurrently and modify runtime -+ structures currently in use by the debugger and the nptl_db -+ interface. -+ -+ Axioms: -+ -+ * The create_thread function can never set stopped_start to false. -+ * The created thread can read stopped_start but never write to it. -+ * The variable thread_ran is set some time after the OS thread -+ creation routine returns, how much time after the thread is created -+ is unspecified, but it should be as quickly as possible. -+ -+*/ -+ -+/* CREATE THREAD NOTES: -+ -+ createthread.c defines the create_thread function, and two macros: - START_THREAD_DEFN and START_THREAD_SELF (see below). - -- create_thread is obliged to initialize PD->stopped_start. It -- should be true if the STOPPED_START parameter is true, or if -- create_thread needs the new thread to synchronize at startup for -- some other implementation reason. If PD->stopped_start will be -- true, then create_thread is obliged to perform the operation -- "lll_lock (PD->lock, LLL_PRIVATE)" before starting the thread. -+ create_thread must initialize PD->stopped_start. It should be true -+ if the STOPPED_START parameter is true, or if create_thread needs the -+ new thread to synchronize at startup for some other implementation -+ reason. If STOPPED_START will be true, then create_thread is obliged -+ to lock PD->lock before starting the thread. Then pthread_create -+ unlocks PD->lock which synchronizes-with START_THREAD_DEFN in the -+ child thread which does an acquire/release of PD->lock as the last -+ action before calling the user entry point. The goal of all of this -+ is to ensure that the required initial thread attributes are applied -+ (by the creating thread) before the new thread runs user code. Note -+ that the the functions pthread_getschedparam, pthread_setschedparam, -+ pthread_setschedprio, __pthread_tpp_change_priority, and -+ __pthread_current_priority reuse the same lock, PD->lock, for a -+ similar purpose e.g. synchronizing the setting of similar thread -+ attributes. These functions are never called before the thread is -+ created, so don't participate in startup syncronization, but given -+ that the lock is present already and in the unlocked state, reusing -+ it saves space. - - The return value is zero for success or an errno code for failure. - If the return value is ENOMEM, that will be translated to EAGAIN, - so create_thread need not do that. On failure, *THREAD_RAN should - be set to true iff the thread actually started up and then got -- cancelled before calling user code (*PD->start_routine), in which -- case it is responsible for doing its own cleanup. */ -- -+ canceled before calling user code (*PD->start_routine). */ - static int create_thread (struct pthread *pd, const struct pthread_attr *attr, -- bool stopped_start, STACK_VARIABLES_PARMS, -+ bool *stopped_start, STACK_VARIABLES_PARMS, - bool *thread_ran); - - #include -@@ -314,12 +430,19 @@ START_THREAD_DEFN - /* Store the new cleanup handler info. */ - THREAD_SETMEM (pd, cleanup_jmp_buf, &unwind_buf); - -+ /* We are either in (a) or (b), and in either case we either own -+ PD already (2) or are about to own PD (1), and so our only -+ restriction would be that we can't free PD until we know we -+ have ownership (see CONCURRENCY NOTES above). */ - if (__glibc_unlikely (pd->stopped_start)) - { - int oldtype = CANCEL_ASYNC (); - - /* Get the lock the parent locked to force synchronization. */ - lll_lock (pd->lock, LLL_PRIVATE); -+ -+ /* We have ownership of PD now. */ -+ - /* And give it up right away. */ - lll_unlock (pd->lock, LLL_PRIVATE); - -@@ -378,7 +501,8 @@ START_THREAD_DEFN - pd, pd->nextevent)); - } - -- /* Now call the function to signal the event. */ -+ /* Now call the function which signals the event. See -+ CONCURRENCY NOTES for the nptl_db interface comments. */ - __nptl_death_event (); - } - } -@@ -642,19 +766,28 @@ __pthread_create_2_1 (pthread_t *newthread, const pthread_attr_t *attr, - that cares whether the thread count is correct. */ - atomic_increment (&__nptl_nthreads); - -- bool thread_ran = false; -+ /* Our local value of stopped_start and thread_ran can be accessed at -+ any time. The PD->stopped_start may only be accessed if we have -+ ownership of PD (see CONCURRENCY NOTES above). */ -+ bool stopped_start = false; bool thread_ran = false; - - /* Start the thread. */ - if (__glibc_unlikely (report_thread_creation (pd))) - { -- /* Create the thread. We always create the thread stopped -- so that it does not get far before we tell the debugger. */ -- retval = create_thread (pd, iattr, true, STACK_VARIABLES_ARGS, -- &thread_ran); -+ stopped_start = true; -+ -+ /* We always create the thread stopped at startup so we can -+ notify the debugger. */ -+ retval = create_thread (pd, iattr, &stopped_start, -+ STACK_VARIABLES_ARGS, &thread_ran); - if (retval == 0) - { -- /* create_thread should have set this so that the logic below can -- test it. */ -+ /* We retain ownership of PD until (a) (see CONCURRENCY NOTES -+ above). */ -+ -+ /* Assert stopped_start is true in both our local copy and the -+ PD copy. */ -+ assert (stopped_start); - assert (pd->stopped_start); - - /* Now fill in the information about the new thread in -@@ -671,26 +804,30 @@ __pthread_create_2_1 (pthread_t *newthread, const pthread_attr_t *attr, - pd, pd->nextevent) - != 0); - -- /* Now call the function which signals the event. */ -+ /* Now call the function which signals the event. See -+ CONCURRENCY NOTES for the nptl_db interface comments. */ - __nptl_create_event (); - } - } - else -- retval = create_thread (pd, iattr, false, STACK_VARIABLES_ARGS, -- &thread_ran); -+ retval = create_thread (pd, iattr, &stopped_start, -+ STACK_VARIABLES_ARGS, &thread_ran); - - if (__glibc_unlikely (retval != 0)) - { -- /* If thread creation "failed", that might mean that the thread got -- created and ran a little--short of running user code--but then -- create_thread cancelled it. In that case, the thread will do all -- its own cleanup just like a normal thread exit after a successful -- creation would do. */ -- - if (thread_ran) -- assert (pd->stopped_start); -+ /* State (c) or (d) and we may not have PD ownership (see -+ CONCURRENCY NOTES above). We can assert that STOPPED_START -+ must have been true because thread creation didn't fail, but -+ thread attribute setting did. */ -+ /* See bug 19511 which explains why doing nothing here is a -+ resource leak for a joinable thread. */ -+ assert (stopped_start); - else - { -+ /* State (e) and we have ownership of PD (see CONCURRENCY -+ NOTES above). */ -+ - /* Oops, we lied for a second. */ - atomic_decrement (&__nptl_nthreads); - -@@ -710,10 +847,14 @@ __pthread_create_2_1 (pthread_t *newthread, const pthread_attr_t *attr, - } - else - { -- if (pd->stopped_start) -- /* The thread blocked on this lock either because we're doing TD_CREATE -- event reporting, or for some other reason that create_thread chose. -- Now let it run free. */ -+ /* We don't know if we have PD ownership. Once we check the local -+ stopped_start we'll know if we're in state (a) or (b) (see -+ CONCURRENCY NOTES above). */ -+ if (stopped_start) -+ /* State (a), we own PD. The thread blocked on this lock either -+ because we're doing TD_CREATE event reporting, or for some -+ other reason that create_thread chose. Now let it run -+ free. */ - lll_unlock (pd->lock, LLL_PRIVATE); - - /* We now have for sure more than one thread. The main thread might -diff --git a/nptl/pthread_getschedparam.c b/nptl/pthread_getschedparam.c -index b887881baf..de71171a08 100644 ---- a/nptl/pthread_getschedparam.c -+++ b/nptl/pthread_getschedparam.c -@@ -35,6 +35,7 @@ __pthread_getschedparam (pthread_t threadid, int *policy, - - int result = 0; - -+ /* See CREATE THREAD NOTES in nptl/pthread_create.c. */ - lll_lock (pd->lock, LLL_PRIVATE); - - /* The library is responsible for maintaining the values at all -diff --git a/nptl/pthread_setschedparam.c b/nptl/pthread_setschedparam.c -index dfb52b9dbf..dcb520f1c8 100644 ---- a/nptl/pthread_setschedparam.c -+++ b/nptl/pthread_setschedparam.c -@@ -36,6 +36,7 @@ __pthread_setschedparam (pthread_t threadid, int policy, - - int result = 0; - -+ /* See CREATE THREAD NOTES in nptl/pthread_create.c. */ - lll_lock (pd->lock, LLL_PRIVATE); - - struct sched_param p; -diff --git a/nptl/pthread_setschedprio.c b/nptl/pthread_setschedprio.c -index cefc6481d6..8134b50560 100644 ---- a/nptl/pthread_setschedprio.c -+++ b/nptl/pthread_setschedprio.c -@@ -38,6 +38,7 @@ pthread_setschedprio (pthread_t threadid, int prio) - struct sched_param param; - param.sched_priority = prio; - -+ /* See CREATE THREAD NOTES in nptl/pthread_create.c. */ - lll_lock (pd->lock, LLL_PRIVATE); - - /* If the thread should have higher priority because of some -diff --git a/nptl/tpp.c b/nptl/tpp.c -index e175bf4d53..223bd6bbee 100644 ---- a/nptl/tpp.c -+++ b/nptl/tpp.c -@@ -114,6 +114,7 @@ __pthread_tpp_change_priority (int previous_prio, int new_prio) - if (priomax == newpriomax) - return 0; - -+ /* See CREATE THREAD NOTES in nptl/pthread_create.c. */ - lll_lock (self->lock, LLL_PRIVATE); - - tpp->priomax = newpriomax; -@@ -165,6 +166,7 @@ __pthread_current_priority (void) - - int result = 0; - -+ /* See CREATE THREAD NOTES in nptl/pthread_create.c. */ - lll_lock (self->lock, LLL_PRIVATE); - - if ((self->flags & ATTR_FLAG_SCHED_SET) == 0) -diff --git a/nptl/tst-create-detached.c b/nptl/tst-create-detached.c -new file mode 100644 -index 0000000000..ea93e441c7 ---- /dev/null -+++ b/nptl/tst-create-detached.c -@@ -0,0 +1,137 @@ -+/* Bug 20116: Test rapid creation of detached threads. -+ Copyright (C) 2017 Free Software Foundation, Inc. -+ This file is part of the GNU C Library. -+ -+ The GNU C Library is free software; you can redistribute it and/or -+ modify it under the terms of the GNU Lesser General Public -+ License as published by the Free Software Foundation; either -+ version 2.1 of the License, or (at your option) any later version. -+ -+ The GNU C Library is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ Lesser General Public License for more details. -+ -+ You should have received a copy of the GNU Lesser General Public -+ License along with the GNU C Library; see the file COPYING.LIB. If -+ not, see . */ -+ -+/* The goal of the test is to trigger a failure if the parent touches -+ any part of the thread descriptor after the detached thread has -+ exited. We test this by creating many detached threads with large -+ stacks. The stacks quickly fill the the stack cache and subsequent -+ threads will start to cause the thread stacks to be immediately -+ unmapped to satisfy the stack cache max. With the stacks being -+ unmapped the parent's read of any part of the thread descriptor will -+ trigger a segfault. That segfault is what we are trying to cause, -+ since any segfault is a defect in the implementation. */ -+ -+#include -+#include -+#include -+#include -+#include -+#include -+#include -+ -+/* Number of threads to create. */ -+enum { threads_to_create = 100000 }; -+ -+/* Number of threads which should spawn other threads. */ -+enum { creator_threads = 2 }; -+ -+/* Counter of threads created so far. This is incremented by all the -+ running creator threads. */ -+static unsigned threads_created; -+ -+/* Thread callback which does nothing, so that the thread exits -+ immediatedly. */ -+static void * -+do_nothing (void *arg) -+{ -+ return NULL; -+} -+ -+/* Attribute indicating that the thread should be created in a detached -+ fashion. */ -+static pthread_attr_t detached; -+ -+/* Barrier to synchronize initialization. */ -+static pthread_barrier_t barrier; -+ -+static void * -+creator_thread (void *arg) -+{ -+ int ret; -+ xpthread_barrier_wait (&barrier); -+ -+ while (true) -+ { -+ pthread_t thr; -+ /* Thread creation will fail if the kernel does not free old -+ threads quickly enough, so we do not report errors. */ -+ ret = pthread_create (&thr, &detached, do_nothing, NULL); -+ if (ret == 0 && __atomic_add_fetch (&threads_created, 1, __ATOMIC_SEQ_CST) -+ >= threads_to_create) -+ break; -+ } -+ -+ return NULL; -+} -+ -+static int -+do_test (void) -+{ -+ /* Limit the size of the process, so that memory allocation will -+ fail without impacting the entire system. */ -+ { -+ struct rlimit limit; -+ if (getrlimit (RLIMIT_AS, &limit) != 0) -+ { -+ printf ("FAIL: getrlimit (RLIMIT_AS) failed: %m\n"); -+ return 1; -+ } -+ /* This limit, 800MB, is just a heuristic. Any value can be -+ picked. */ -+ long target = 800 * 1024 * 1024; -+ if (limit.rlim_cur == RLIM_INFINITY || limit.rlim_cur > target) -+ { -+ limit.rlim_cur = target; -+ if (setrlimit (RLIMIT_AS, &limit) != 0) -+ { -+ printf ("FAIL: setrlimit (RLIMIT_AS) failed: %m\n"); -+ return 1; -+ } -+ } -+ } -+ -+ xpthread_attr_init (&detached); -+ -+ xpthread_attr_setdetachstate (&detached, PTHREAD_CREATE_DETACHED); -+ -+ /* A large thread stack seems beneficial for reproducing a race -+ condition in detached thread creation. The goal is to reach the -+ limit of the runtime thread stack cache such that the detached -+ thread's stack is unmapped after exit and causes a segfault when -+ the parent reads the thread descriptor data stored on the the -+ unmapped stack. */ -+ xpthread_attr_setstacksize (&detached, 16 * 1024 * 1024); -+ -+ xpthread_barrier_init (&barrier, NULL, creator_threads); -+ -+ pthread_t threads[creator_threads]; -+ -+ for (int i = 0; i < creator_threads; ++i) -+ threads[i] = xpthread_create (NULL, creator_thread, NULL); -+ -+ for (int i = 0; i < creator_threads; ++i) -+ xpthread_join (threads[i]); -+ -+ xpthread_attr_destroy (&detached); -+ -+ xpthread_barrier_destroy (&barrier); -+ -+ return 0; -+} -+ -+#include -diff --git a/sysdeps/nacl/createthread.c b/sysdeps/nacl/createthread.c -index 7b571c34e2..5465558cc1 100644 ---- a/sysdeps/nacl/createthread.c -+++ b/sysdeps/nacl/createthread.c -@@ -32,15 +32,13 @@ static void start_thread (void) __attribute__ ((noreturn)); - - static int - create_thread (struct pthread *pd, const struct pthread_attr *attr, -- bool stopped_start, STACK_VARIABLES_PARMS, bool *thread_ran) -+ bool *stopped_start, STACK_VARIABLES_PARMS, bool *thread_ran) - { - pd->tid = __nacl_get_tid (pd); - -- pd->stopped_start = stopped_start; -- if (__glibc_unlikely (stopped_start)) -- /* We make sure the thread does not run far by forcing it to get a -- lock. We lock it here too so that the new thread cannot continue -- until we tell it to. */ -+ pd->stopped_start = *stopped_start; -+ if (__glibc_unlikely (*stopped_start)) -+ /* See CONCURRENCY NOTES in nptl/pthread_create.c. */ - lll_lock (pd->lock, LLL_PRIVATE); - - TLS_DEFINE_INIT_TP (tp, pd); -diff --git a/sysdeps/unix/sysv/linux/createthread.c b/sysdeps/unix/sysv/linux/createthread.c -index 6d32cece48..66ddae61d4 100644 ---- a/sysdeps/unix/sysv/linux/createthread.c -+++ b/sysdeps/unix/sysv/linux/createthread.c -@@ -46,7 +46,7 @@ static int start_thread (void *arg) __attribute__ ((noreturn)); - - static int - create_thread (struct pthread *pd, const struct pthread_attr *attr, -- bool stopped_start, STACK_VARIABLES_PARMS, bool *thread_ran) -+ bool *stopped_start, STACK_VARIABLES_PARMS, bool *thread_ran) - { - /* Determine whether the newly created threads has to be started - stopped since we have to set the scheduling parameters or set the -@@ -54,13 +54,11 @@ create_thread (struct pthread *pd, const struct pthread_attr *attr, - if (attr != NULL - && (__glibc_unlikely (attr->cpuset != NULL) - || __glibc_unlikely ((attr->flags & ATTR_FLAG_NOTINHERITSCHED) != 0))) -- stopped_start = true; -+ *stopped_start = true; - -- pd->stopped_start = stopped_start; -- if (__glibc_unlikely (stopped_start)) -- /* We make sure the thread does not run far by forcing it to get a -- lock. We lock it here too so that the new thread cannot continue -- until we tell it to. */ -+ pd->stopped_start = *stopped_start; -+ if (__glibc_unlikely (*stopped_start)) -+ /* See CONCURRENCY NOTES in nptl/pthread_creat.c. */ - lll_lock (pd->lock, LLL_PRIVATE); - - /* We rely heavily on various flags the CLONE function understands: -@@ -117,7 +115,7 @@ create_thread (struct pthread *pd, const struct pthread_attr *attr, - /* Set the affinity mask if necessary. */ - if (attr->cpuset != NULL) - { -- assert (stopped_start); -+ assert (*stopped_start); - - res = INTERNAL_SYSCALL (sched_setaffinity, err, 3, pd->tid, - attr->cpusetsize, attr->cpuset); -@@ -140,7 +138,7 @@ create_thread (struct pthread *pd, const struct pthread_attr *attr, - /* Set the scheduling parameters. */ - if ((attr->flags & ATTR_FLAG_NOTINHERITSCHED) != 0) - { -- assert (stopped_start); -+ assert (*stopped_start); - - res = INTERNAL_SYSCALL (sched_setscheduler, err, 3, pd->tid, - pd->schedpolicy, &pd->schedparam); --- -2.11.0 - diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0028-Rework-fno-omit-frame-pointer-support-on-i386.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0028-Rework-fno-omit-frame-pointer-support-on-i386.patch new file mode 100644 index 000000000..7ed2b902d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/0028-Rework-fno-omit-frame-pointer-support-on-i386.patch @@ -0,0 +1,268 @@ +From 1ea003d4fccc4646fd1848a182405a1c7000ab18 Mon Sep 17 00:00:00 2001 +From: Adhemerval Zanella +Date: Sun, 8 Jan 2017 11:38:23 -0200 +Subject: [PATCH 28/28] Rework -fno-omit-frame-pointer support on i386 + +Commit 6b1df8b27f fixed the -OS build issue on i386 (BZ#20729) by +expliciting disabling frame pointer (-fomit-frame-pointer) on the +faulty objects. Although it does fix the issue, it is a subpar +workaround that adds complexity in build process (a rule for each +object to add the required compiler option and pontentially more +rules for objects that call {INLINE,INTERNAL}_SYSCALL) and does not +allow the implementations to get all the possible debug/calltrack +information possible (used mainly in debuggers and performance +measurement tools). + +This patch fixes it by adding an explicit configure check to see +if -fno-omit-frame-pointer is set and to act accordingly (set or +not OPTIMIZE_FOR_GCC_5). The make rules is simplified and only +one is required: to add libc-do-syscall on loader due mmap +(which will be empty anyway for default build with +-fomit-frame-pointer). + +Checked on i386-linux-gnu with GCC 6.2.1 with CFLAGS sets as +'-Os', '-O2 -fno-omit-frame-pointer', and '-O2 -fomit-frame-pointer'. +For '-Os' the testsuite issues described by BZ#19463 and BZ#15105 +still applied. + +It fixes BZ #21029, although it is marked as duplicated of #20729 +(I reopened to track this cleanup). + + [BZ #21029] + * config.h.in [CAN_USE_REGISTER_ASM_EBP]: New define. + * sysdeps/unix/sysv/linux/i386/Makefile + [$(subdir) = elf] (sysdep-dl-routines): Add libc-do-syscall. + (uses-6-syscall-arguments): Remove. + [$(subdir) = misc] (CFLAGS-epoll_pwait.o): Likewise. + [$(subdir) = misc] (CFLAGS-epoll_pwait.os): Likewise. + [$(subdir) = misc] (CFLAGS-mmap.o): Likewise. + [$(subdir) = misc] (CFLAGS-mmap.os): Likewise. + [$(subdir) = misc] (CFLAGS-mmap64.o): Likewise. + [$(subdir) = misc] (CFLAGS-mmap64.os): Likewise. + [$(subdir) = misc] (CFLAGS-pselect.o): Likewise. + [$(subdir) = misc] (cflags-pselect.o): Likewise. + [$(subdir) = misc] (cflags-pselect.os): Likewise. + [$(subdir) = misc] (cflags-rtld-mmap.os): Likewise. + [$(subdir) = sysvipc] (cflags-semtimedop.o): Likewise. + [$(subdir) = sysvipc] (cflags-semtimedop.os): Likewise. + [$(subdir) = io] (CFLAGS-posix_fadvise64.o): Likewise. + [$(subdir) = io] (CFLAGS-posix_fadvise64.os): Likewise. + [$(subdir) = io] (CFLAGS-posix_fallocate.o): Likewise. + [$(subdir) = io] (CFLAGS-posix_fallocate.os): Likewise. + [$(subdir) = io] (CFLAGS-posix_fallocate64.o): Likewise. + [$(subdir) = io] (CFLAGS-posix_fallocate64.os): Likewise. + [$(subdir) = io] (CFLAGS-sync_file_range.o): Likewise. + [$(subdir) = io] (CFLAGS-sync_file_range.os): Likewise. + [$(subdir) = io] (CFLAGS-fallocate.o): Likewise. + [$(subdir) = io] (CFLAGS-fallocate.os): Likewise. + [$(subdir) = io] (CFLAGS-fallocate64.o): Likewise. + [$(subdir) = io] (CFLAGS-fallocate64.os): Likewise. + [$(subdir) = nptl] (CFLAGS-pthread_rwlock_timedrdlock.o): + Likewise. + [$(subdir) = nptl] (CFLAGS-pthread_rwlock_timedrdlock.os): + Likewise. + [$(subdir) = nptl] (CFLAGS-pthread_rwlock_timedrwlock.o): + Likewise. + [$(subdir) = nptl] (CFLAGS-pthread_rwlock_timedrwlock.os): + Likewise. + [$(subdir) = nptl] (CFLAGS-sem_wait.o): Likewise. + [$(subdir) = nptl] (CFLAGS-sem_wait.os): Likewise. + [$(subdir) = nptl] (CFLAGS-sem_timedwait.o): Likewise. + [$(subdir) = nptl] (CFLAGS-sem_timedwait.os): Likewise. + * sysdeps/unix/sysv/linux/i386/configure.ac: Add check if compiler allows + ebp on inline assembly. + * sysdeps/unix/sysv/linux/i386/configure: Regenerate. + * sysdeps/unix/sysv/linux/i386/sysdep.h (OPTIMIZE_FOR_GCC_5): + Set if CAN_USE_REGISTER_ASM_EBP is set. + (check_consistency): Likewise. + +Upstream-Status: Backport + + https://sourceware.org/git/?p=glibc.git;a=commitdiff;h=3b33d6ed6096c1d20d05a650b06026d673f7399a + +Signed-off-by: Andre McCurdy +--- + config.h.in | 4 ++++ + sysdeps/unix/sysv/linux/i386/Makefile | 39 +------------------------------ + sysdeps/unix/sysv/linux/i386/configure | 39 +++++++++++++++++++++++++++++++ + sysdeps/unix/sysv/linux/i386/configure.ac | 17 ++++++++++++++ + sysdeps/unix/sysv/linux/i386/sysdep.h | 6 ++--- + 5 files changed, 64 insertions(+), 41 deletions(-) + +diff --git a/config.h.in b/config.h.in +index 7bfe923..fb2cc51 100644 +--- a/config.h.in ++++ b/config.h.in +@@ -259,4 +259,8 @@ + /* Build glibc with tunables support. */ + #define HAVE_TUNABLES 0 + ++/* Some compiler options may now allow to use ebp in __asm__ (used mainly ++ in i386 6 argument syscall issue). */ ++#define CAN_USE_REGISTER_ASM_EBP 0 ++ + #endif +diff --git a/sysdeps/unix/sysv/linux/i386/Makefile b/sysdeps/unix/sysv/linux/i386/Makefile +index 9609752..6aac0df 100644 +--- a/sysdeps/unix/sysv/linux/i386/Makefile ++++ b/sysdeps/unix/sysv/linux/i386/Makefile +@@ -1,47 +1,18 @@ + # The default ABI is 32. + default-abi := 32 + +-# %ebp is used to pass the 6th argument to system calls, so these +-# system calls are incompatible with a frame pointer. +-uses-6-syscall-arguments = -fomit-frame-pointer +- + ifeq ($(subdir),misc) + sysdep_routines += ioperm iopl vm86 +-CFLAGS-epoll_pwait.o += $(uses-6-syscall-arguments) +-CFLAGS-epoll_pwait.os += $(uses-6-syscall-arguments) +-CFLAGS-mmap.o += $(uses-6-syscall-arguments) +-CFLAGS-mmap.os += $(uses-6-syscall-arguments) +-CFLAGS-mmap64.o += $(uses-6-syscall-arguments) +-CFLAGS-mmap64.os += $(uses-6-syscall-arguments) +-CFLAGS-pselect.o += $(uses-6-syscall-arguments) +-CFLAGS-pselect.os += $(uses-6-syscall-arguments) +-CFLAGS-rtld-mmap.os += $(uses-6-syscall-arguments) +-endif +- +-ifeq ($(subdir),sysvipc) +-CFLAGS-semtimedop.o += $(uses-6-syscall-arguments) +-CFLAGS-semtimedop.os += $(uses-6-syscall-arguments) + endif + + ifeq ($(subdir),elf) ++sysdep-dl-routines += libc-do-syscall + sysdep-others += lddlibc4 + install-bin += lddlibc4 + endif + + ifeq ($(subdir),io) + sysdep_routines += libc-do-syscall +-CFLAGS-posix_fadvise64.o += $(uses-6-syscall-arguments) +-CFLAGS-posix_fadvise64.os += $(uses-6-syscall-arguments) +-CFLAGS-posix_fallocate.o += $(uses-6-syscall-arguments) +-CFLAGS-posix_fallocate.os += $(uses-6-syscall-arguments) +-CFLAGS-posix_fallocate64.o += $(uses-6-syscall-arguments) +-CFLAGS-posix_fallocate64.os += $(uses-6-syscall-arguments) +-CFLAGS-sync_file_range.o += $(uses-6-syscall-arguments) +-CFLAGS-sync_file_range.os += $(uses-6-syscall-arguments) +-CFLAGS-fallocate.o += $(uses-6-syscall-arguments) +-CFLAGS-fallocate.os += $(uses-6-syscall-arguments) +-CFLAGS-fallocate64.o += $(uses-6-syscall-arguments) +-CFLAGS-fallocate64.os += $(uses-6-syscall-arguments) + endif + + ifeq ($(subdir),nptl) +@@ -61,14 +32,6 @@ ifeq ($(subdir),nptl) + # pull in __syscall_error routine + libpthread-routines += sysdep + libpthread-shared-only-routines += sysdep +-CFLAGS-pthread_rwlock_timedrdlock.o += $(uses-6-syscall-arguments) +-CFLAGS-pthread_rwlock_timedrdlock.os += $(uses-6-syscall-arguments) +-CFLAGS-pthread_rwlock_timedwrlock.o += $(uses-6-syscall-arguments) +-CFLAGS-pthread_rwlock_timedwrlock.os += $(uses-6-syscall-arguments) +-CFLAGS-sem_wait.o += $(uses-6-syscall-arguments) +-CFLAGS-sem_wait.os += $(uses-6-syscall-arguments) +-CFLAGS-sem_timedwait.o += $(uses-6-syscall-arguments) +-CFLAGS-sem_timedwait.os += $(uses-6-syscall-arguments) + endif + + ifeq ($(subdir),rt) +diff --git a/sysdeps/unix/sysv/linux/i386/configure b/sysdeps/unix/sysv/linux/i386/configure +index eb72659..ae2c356 100644 +--- a/sysdeps/unix/sysv/linux/i386/configure ++++ b/sysdeps/unix/sysv/linux/i386/configure +@@ -3,5 +3,44 @@ + + arch_minimum_kernel=2.6.32 + ++# Check if CFLAGS allows compiler to use ebp register in inline assembly. ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if compiler flags allows ebp in inline assembly" >&5 ++$as_echo_n "checking if compiler flags allows ebp in inline assembly... " >&6; } ++if ${libc_cv_can_use_register_asm_ebp+:} false; then : ++ $as_echo_n "(cached) " >&6 ++else ++ ++cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++ void foo (int i) ++ { ++ register int reg asm ("ebp") = i; ++ asm ("# %0" : : "r" (reg)); ++ } ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ libc_cv_can_use_register_asm_ebp=yes ++else ++ libc_cv_can_use_register_asm_ebp=no ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $libc_cv_can_use_register_asm_ebp" >&5 ++$as_echo "$libc_cv_can_use_register_asm_ebp" >&6; } ++if test $libc_cv_can_use_register_asm_ebp = yes; then ++ $as_echo "#define CAN_USE_REGISTER_ASM_EBP 1" >>confdefs.h ++ ++fi ++ + libc_cv_gcc_unwind_find_fde=yes + ldd_rewrite_script=sysdeps/unix/sysv/linux/ldd-rewrite.sed +diff --git a/sysdeps/unix/sysv/linux/i386/configure.ac b/sysdeps/unix/sysv/linux/i386/configure.ac +index 1a11da6..1cd632e 100644 +--- a/sysdeps/unix/sysv/linux/i386/configure.ac ++++ b/sysdeps/unix/sysv/linux/i386/configure.ac +@@ -3,5 +3,22 @@ GLIBC_PROVIDES dnl See aclocal.m4 in the top level source directory. + + arch_minimum_kernel=2.6.32 + ++# Check if CFLAGS allows compiler to use ebp register in inline assembly. ++AC_CACHE_CHECK([if compiler flags allows ebp in inline assembly], ++ libc_cv_can_use_register_asm_ebp, [ ++AC_COMPILE_IFELSE( ++ [AC_LANG_PROGRAM([ ++ void foo (int i) ++ { ++ register int reg asm ("ebp") = i; ++ asm ("# %0" : : "r" (reg)); ++ }])], ++ [libc_cv_can_use_register_asm_ebp=yes], ++ [libc_cv_can_use_register_asm_ebp=no]) ++]) ++if test $libc_cv_can_use_register_asm_ebp = yes; then ++ AC_DEFINE(CAN_USE_REGISTER_ASM_EBP) ++fi ++ + libc_cv_gcc_unwind_find_fde=yes + ldd_rewrite_script=sysdeps/unix/sysv/linux/ldd-rewrite.sed +diff --git a/sysdeps/unix/sysv/linux/i386/sysdep.h b/sysdeps/unix/sysv/linux/i386/sysdep.h +index baf4642..449b23e 100644 +--- a/sysdeps/unix/sysv/linux/i386/sysdep.h ++++ b/sysdeps/unix/sysv/linux/i386/sysdep.h +@@ -44,9 +44,9 @@ + /* Since GCC 5 and above can properly spill %ebx with PIC when needed, + we can inline syscalls with 6 arguments if GCC 5 or above is used + to compile glibc. Disable GCC 5 optimization when compiling for +- profiling since asm ("ebp") can't be used to put the 6th argument +- in %ebp for syscall. */ +-#if __GNUC_PREREQ (5,0) && !defined PROF ++ profiling or when -fno-omit-frame-pointer is used since asm ("ebp") ++ can't be used to put the 6th argument in %ebp for syscall. */ ++#if __GNUC_PREREQ (5,0) && !defined PROF && CAN_USE_REGISTER_ASM_EBP + # define OPTIMIZE_FOR_GCC_5 + #endif + +-- +1.9.1 + diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2016-6323.patch b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2016-6323.patch deleted file mode 100644 index f9b9fa50d..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc/CVE-2016-6323.patch +++ /dev/null @@ -1,39 +0,0 @@ -glibc-2.24: Fix CVE-2016-6323 - -[No upstream tracking] -- https://sourceware.org/bugzilla/show_bug.cgi?id=20435 - -arm: mark __startcontext as .cantunwind, GNU - -Glibc bug where the makecontext function would create -an execution context which is incompatible with the unwinder, -causing it to hang when the generation of a backtrace is attempted. - -Upstream-Status: Backport [https://sourceware.org/git/gitweb.cgi?p=glibc.git;h=9e2ff6c9cc54c0b4402b8d49e4abe7000fde7617] -CVE: CVE-2016-6323 -Signed-off-by: Andrej Valek -Signed-off-by: Pascal Bach - -diff --git a/sysdeps/unix/sysv/linux/arm/setcontext.S b/sysdeps/unix/sysv/linux/arm/setcontext.S -index 603e508..d1f168f 100644 ---- a/sysdeps/unix/sysv/linux/arm/setcontext.S -+++ b/sysdeps/unix/sysv/linux/arm/setcontext.S -@@ -86,12 +86,19 @@ weak_alias(__setcontext, setcontext) - - /* Called when a makecontext() context returns. Start the - context in R4 or fall through to exit(). */ -+ /* Unwind descriptors are looked up based on PC - 2, so we have to -+ make sure to mark the instruction preceding the __startcontext -+ label as .cantunwind. */ -+ .fnstart -+ .cantunwind -+ nop - ENTRY(__startcontext) - movs r0, r4 - bne PLTJMP(__setcontext) - - @ New context was 0 - exit - b PLTJMP(HIDDEN_JUMPTARGET(exit)) -+ .fnend - END(__startcontext) - - #ifdef PIC diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc_2.24.bb b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc_2.24.bb deleted file mode 100644 index e723e03dc..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc_2.24.bb +++ /dev/null @@ -1,148 +0,0 @@ -require glibc.inc - -LIC_FILES_CHKSUM = "file://LICENSES;md5=e9a558e243b36d3209f380deb394b213 \ - file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \ - file://posix/rxspencer/COPYRIGHT;md5=dc5485bb394a13b2332ec1c785f5d83a \ - file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c" - -DEPENDS += "gperf-native" - -SRCREV ?= "ea23815a795f72035262953dad5beb03e09c17dd" - -SRCBRANCH ?= "release/${PV}/master" - -GLIBC_GIT_URI ?= "git://sourceware.org/git/glibc.git" -UPSTREAM_CHECK_GITTAGREGEX = "(?P\d+\.\d+(\.\d+)*)" - -SRC_URI = "${GLIBC_GIT_URI};branch=${SRCBRANCH};name=glibc \ - file://0005-fsl-e500-e5500-e6500-603e-fsqrt-implementation.patch \ - file://0006-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch \ - file://0007-ppc-sqrt-Fix-undefined-reference-to-__sqrt_finite.patch \ - file://0008-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch \ - file://0009-Quote-from-bug-1443-which-explains-what-the-patch-do.patch \ - file://0010-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch \ - file://0011-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch \ - file://0012-Make-ld-version-output-matching-grok-gold-s-output.patch \ - file://0013-sysdeps-gnu-configure.ac-handle-correctly-libc_cv_ro.patch \ - file://0014-Add-unused-attribute.patch \ - file://0015-yes-within-the-path-sets-wrong-config-variables.patch \ - file://0016-timezone-re-written-tzselect-as-posix-sh.patch \ - file://0017-Remove-bash-dependency-for-nscd-init-script.patch \ - file://0018-eglibc-Cross-building-and-testing-instructions.patch \ - file://0019-eglibc-Help-bootstrap-cross-toolchain.patch \ - file://0020-eglibc-cherry-picked-from.patch \ - file://0021-eglibc-Clear-cache-lines-on-ppc8xx.patch \ - file://0022-eglibc-Resolve-__fpscr_values-on-SH4.patch \ - file://0023-eglibc-Install-PIC-archives.patch \ - file://0024-eglibc-Forward-port-cross-locale-generation-support.patch \ - file://0025-Define-DUMMY_LOCALE_T-if-not-defined.patch \ - file://0026-build_local_scope.patch \ - file://0028-Bug-20116-Fix-use-after-free-in-pthread_create.patch \ - file://CVE-2016-6323.patch \ - file://0001-Add-atomic_exchange_relaxed.patch \ - file://0002-Add-atomic-operations-required-by-the-new-condition-.patch \ - file://0003-Add-pretty-printers-for-the-NPTL-lock-types.patch \ - file://0004-New-condvar-implementation-that-provides-stronger-or.patch \ - file://0005-Remove-__ASSUME_REQUEUE_PI.patch \ - file://0006-Fix-atomic_fetch_xor_release.patch \ -" - -SRC_URI += "\ - file://etc/ld.so.conf \ - file://generate-supported.mk \ - file://0001-locale-fix-hard-coded-reference-to-gcc-E.patch \ - " - -SRC_URI_append_class-nativesdk = "\ - file://0001-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch \ - file://0002-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch \ - file://0003-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch \ - file://0004-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch \ -" - -S = "${WORKDIR}/git" -B = "${WORKDIR}/build-${TARGET_SYS}" - -PACKAGES_DYNAMIC = "" - -# the -isystem in bitbake.conf screws up glibc do_stage -BUILD_CPPFLAGS = "-I${STAGING_INCDIR_NATIVE}" -TARGET_CPPFLAGS = "-I${STAGING_DIR_TARGET}${includedir}" - -GLIBC_BROKEN_LOCALES = "" -# -# We will skip parsing glibc when target system C library selection is not glibc -# this helps in easing out parsing for non-glibc system libraries -# -COMPATIBLE_HOST_libc-musl_class-target = "null" -COMPATIBLE_HOST_libc-uclibc_class-target = "null" - -EXTRA_OECONF = "--enable-kernel=${OLDEST_KERNEL} \ - --without-cvs --disable-profile \ - --disable-debug --without-gd \ - --enable-clocale=gnu \ - --enable-add-ons \ - --with-headers=${STAGING_INCDIR} \ - --without-selinux \ - --enable-obsolete-rpc \ - ${GLIBC_EXTRA_OECONF}" - -EXTRA_OECONF += "${@get_libc_fpu_setting(bb, d)}" -EXTRA_OECONF += "${@bb.utils.contains('DISTRO_FEATURES', 'libc-inet-anl', '--enable-nscd', '--disable-nscd', d)}" - - -do_patch_append() { - bb.build.exec_func('do_fix_readlib_c', d) -} - -do_fix_readlib_c () { - sed -i -e 's#OECORE_KNOWN_INTERPRETER_NAMES#${EGLIBC_KNOWN_INTERPRETER_NAMES}#' ${S}/elf/readlib.c -} - -do_configure () { -# override this function to avoid the autoconf/automake/aclocal/autoheader -# calls for now -# don't pass CPPFLAGS into configure, since it upsets the kernel-headers -# version check and doesn't really help with anything - (cd ${S} && gnu-configize) || die "failure in running gnu-configize" - find ${S} -name "configure" | xargs touch - CPPFLAGS="" oe_runconf -} - -rpcsvc = "bootparam_prot.x nlm_prot.x rstat.x \ - yppasswd.x klm_prot.x rex.x sm_inter.x mount.x \ - rusers.x spray.x nfs_prot.x rquota.x key_prot.x" - -do_compile () { - # -Wl,-rpath-link /lib in LDFLAGS can cause breakage if another glibc is in staging - unset LDFLAGS - base_do_compile - ( - cd ${S}/sunrpc/rpcsvc - for r in ${rpcsvc}; do - h=`echo $r|sed -e's,\.x$,.h,'` - rm -f $h - ${B}/sunrpc/cross-rpcgen -h $r -o $h || bbwarn "${PN}: unable to generate header for $r" - done - ) - echo "Adjust ldd script" - if [ -n "${RTLDLIST}" ] - then - prevrtld=`cat ${B}/elf/ldd | grep "^RTLDLIST=" | sed 's#^RTLDLIST="\?\([^"]*\)"\?$#\1#'` - if [ "${prevrtld}" != "${RTLDLIST}" ] - then - sed -i ${B}/elf/ldd -e "s#^RTLDLIST=.*\$#RTLDLIST=\"${prevrtld} ${RTLDLIST}\"#" - fi - fi - -} - -# Use the host locale archive when built for nativesdk so that we don't need to -# ship a complete (100MB) locale set. -do_compile_prepend_class-nativesdk() { - echo "complocaledir=/usr/lib/locale" >> ${S}/configparms -} - -require glibc-package.inc - -BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-core/glibc/glibc_2.25.bb b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc_2.25.bb new file mode 100644 index 000000000..0f1ec0c14 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/glibc/glibc_2.25.bb @@ -0,0 +1,141 @@ +require glibc.inc + +LIC_FILES_CHKSUM = "file://LICENSES;md5=e9a558e243b36d3209f380deb394b213 \ + file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \ + file://posix/rxspencer/COPYRIGHT;md5=dc5485bb394a13b2332ec1c785f5d83a \ + file://COPYING.LIB;md5=4fbd65380cdd255951079008b364516c" + +DEPENDS += "gperf-native" + +SRCREV ?= "db0242e3023436757bbc7c488a779e6e3343db04" + +SRCBRANCH ?= "release/${PV}/master" + +GLIBC_GIT_URI ?= "git://sourceware.org/git/glibc.git" +UPSTREAM_CHECK_GITTAGREGEX = "(?P\d+\.\d+(\.\d+)*)" + +SRC_URI = "${GLIBC_GIT_URI};branch=${SRCBRANCH};name=glibc \ + file://etc/ld.so.conf \ + file://generate-supported.mk \ + \ + ${NATIVESDKFIXES} \ + file://0005-fsl-e500-e5500-e6500-603e-fsqrt-implementation.patch \ + file://0006-readlib-Add-OECORE_KNOWN_INTERPRETER_NAMES-to-known-.patch \ + file://0007-ppc-sqrt-Fix-undefined-reference-to-__sqrt_finite.patch \ + file://0008-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch \ + file://0009-Quote-from-bug-1443-which-explains-what-the-patch-do.patch \ + file://0010-eglibc-run-libm-err-tab.pl-with-specific-dirs-in-S.patch \ + file://0011-__ieee754_sqrt-f-are-now-inline-functions-and-call-o.patch \ + file://0012-Make-ld-version-output-matching-grok-gold-s-output.patch \ + file://0013-sysdeps-gnu-configure.ac-handle-correctly-libc_cv_ro.patch \ + file://0014-Add-unused-attribute.patch \ + file://0015-yes-within-the-path-sets-wrong-config-variables.patch \ + file://0016-timezone-re-written-tzselect-as-posix-sh.patch \ + file://0017-Remove-bash-dependency-for-nscd-init-script.patch \ + file://0018-eglibc-Cross-building-and-testing-instructions.patch \ + file://0019-eglibc-Help-bootstrap-cross-toolchain.patch \ + file://0020-eglibc-cherry-picked-from.patch \ + file://0021-eglibc-Clear-cache-lines-on-ppc8xx.patch \ + file://0022-eglibc-Resolve-__fpscr_values-on-SH4.patch \ + file://0023-eglibc-Install-PIC-archives.patch \ + file://0024-eglibc-Forward-port-cross-locale-generation-support.patch \ + file://0025-Define-DUMMY_LOCALE_T-if-not-defined.patch \ + file://0026-elf-dl-deps.c-Make-_dl_build_local_scope-breadth-fir.patch \ + file://0027-locale-fix-hard-coded-reference-to-gcc-E.patch \ + file://0028-Rework-fno-omit-frame-pointer-support-on-i386.patch \ +" + +NATIVESDKFIXES ?= "" +NATIVESDKFIXES_class-nativesdk = "\ + file://0001-nativesdk-glibc-Look-for-host-system-ld.so.cache-as-.patch \ + file://0002-nativesdk-glibc-Fix-buffer-overrun-with-a-relocated-.patch \ + file://0003-nativesdk-glibc-Raise-the-size-of-arrays-containing-.patch \ + file://0004-nativesdk-glibc-Allow-64-bit-atomics-for-x86.patch \ +" + +S = "${WORKDIR}/git" +B = "${WORKDIR}/build-${TARGET_SYS}" + +PACKAGES_DYNAMIC = "" + +# the -isystem in bitbake.conf screws up glibc do_stage +BUILD_CPPFLAGS = "-I${STAGING_INCDIR_NATIVE}" +TARGET_CPPFLAGS = "-I${STAGING_DIR_TARGET}${includedir}" + +GLIBC_BROKEN_LOCALES = "" +# +# We will skip parsing glibc when target system C library selection is not glibc +# this helps in easing out parsing for non-glibc system libraries +# +COMPATIBLE_HOST_libc-musl_class-target = "null" +COMPATIBLE_HOST_libc-uclibc_class-target = "null" + +EXTRA_OECONF = "--enable-kernel=${OLDEST_KERNEL} \ + --without-cvs --disable-profile \ + --disable-debug --without-gd \ + --enable-clocale=gnu \ + --enable-add-ons \ + --with-headers=${STAGING_INCDIR} \ + --without-selinux \ + --enable-obsolete-rpc \ + ${GLIBC_EXTRA_OECONF}" + +EXTRA_OECONF += "${@get_libc_fpu_setting(bb, d)}" +EXTRA_OECONF += "${@bb.utils.contains('DISTRO_FEATURES', 'libc-inet-anl', '--enable-nscd', '--disable-nscd', d)}" + + +do_patch_append() { + bb.build.exec_func('do_fix_readlib_c', d) +} + +do_fix_readlib_c () { + sed -i -e 's#OECORE_KNOWN_INTERPRETER_NAMES#${EGLIBC_KNOWN_INTERPRETER_NAMES}#' ${S}/elf/readlib.c +} + +do_configure () { +# override this function to avoid the autoconf/automake/aclocal/autoheader +# calls for now +# don't pass CPPFLAGS into configure, since it upsets the kernel-headers +# version check and doesn't really help with anything + (cd ${S} && gnu-configize) || die "failure in running gnu-configize" + find ${S} -name "configure" | xargs touch + CPPFLAGS="" oe_runconf +} + +rpcsvc = "bootparam_prot.x nlm_prot.x rstat.x \ + yppasswd.x klm_prot.x rex.x sm_inter.x mount.x \ + rusers.x spray.x nfs_prot.x rquota.x key_prot.x" + +do_compile () { + # -Wl,-rpath-link /lib in LDFLAGS can cause breakage if another glibc is in staging + unset LDFLAGS + base_do_compile + ( + cd ${S}/sunrpc/rpcsvc + for r in ${rpcsvc}; do + h=`echo $r|sed -e's,\.x$,.h,'` + rm -f $h + ${B}/sunrpc/cross-rpcgen -h $r -o $h || bbwarn "${PN}: unable to generate header for $r" + done + ) + echo "Adjust ldd script" + if [ -n "${RTLDLIST}" ] + then + prevrtld=`cat ${B}/elf/ldd | grep "^RTLDLIST=" | sed 's#^RTLDLIST="\?\([^"]*\)"\?$#\1#'` + if [ "${prevrtld}" != "${RTLDLIST}" ] + then + sed -i ${B}/elf/ldd -e "s#^RTLDLIST=.*\$#RTLDLIST=\"${prevrtld} ${RTLDLIST}\"#" + fi + fi + +} + +# Use the host locale archive when built for nativesdk so that we don't need to +# ship a complete (100MB) locale set. +do_compile_prepend_class-nativesdk() { + echo "complocaledir=/usr/lib/locale" >> ${S}/configparms +} + +require glibc-package.inc + +BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-core/ifupdown/files/inet-6-.defn-fix-inverted-checks-for-loopback.patch b/import-layers/yocto-poky/meta/recipes-core/ifupdown/files/inet-6-.defn-fix-inverted-checks-for-loopback.patch index bff352e3a..2013933b6 100644 --- a/import-layers/yocto-poky/meta/recipes-core/ifupdown/files/inet-6-.defn-fix-inverted-checks-for-loopback.patch +++ b/import-layers/yocto-poky/meta/recipes-core/ifupdown/files/inet-6-.defn-fix-inverted-checks-for-loopback.patch @@ -1,7 +1,7 @@ -From 74152ac74a3e1ea0f3be292aa1eeca5ad1fe69c0 Mon Sep 17 00:00:00 2001 -From: Paul Gortmaker -Date: Wed, 6 Aug 2014 15:12:11 -0400 -Subject: [PATCH 2/2] inet[6].defn: fix inverted checks for loopback +From d88af5aa0312ea18aac791d66661da79b7bcd032 Mon Sep 17 00:00:00 2001 +From: "Maxin B. John" +Date: Wed, 21 Dec 2016 15:32:07 +0200 +Subject: [PATCH] inet[6].defn: fix inverted checks for loopback Compared to the hurd link.defn for loopback, we see these are inverted, meaning that you would only be able to configure @@ -11,60 +11,335 @@ The result was that we'd update /run/network/ifstate for "lo" but never actually do anything for up/down, as shown below: root@localhost:~# ifconfig -s -Iface MTU Met RX-OK RX-ERR RX-DRP RX-OVR TX-OK TX-ERR TX-DRP TX-OVR Flg -eth0 1500 0 7736329 0 2016 0 5289422 0 0 0 BMRU -lo 65536 0 18 0 0 0 18 0 0 0 LRU +Iface MTU Met RX-OK RX-ERR RX-DRP RX-OVR TX-OK TX-ERR TX-DRP TX-OVR Flg +eth0 1500 0 7736329 0 2016 0 5289422 0 0 0 BMRU +lo 65536 0 18 0 0 0 18 0 0 0 LRU root@localhost:~# ifdown lo root@localhost:~# echo $? 0 root@localhost:~# ifconfig -s -Iface MTU Met RX-OK RX-ERR RX-DRP RX-OVR TX-OK TX-ERR TX-DRP TX-OVR Flg -eth0 1500 0 7736406 0 2016 0 5289455 0 0 0 BMRU -lo 65536 0 18 0 0 0 18 0 0 0 LRU +Iface MTU Met RX-OK RX-ERR RX-DRP RX-OVR TX-OK TX-ERR TX-DRP TX-OVR Flg +eth0 1500 0 7736406 0 2016 0 5289455 0 0 0 BMRU +lo 65536 0 18 0 0 0 18 0 0 0 LRU root@localhost:~# ifconfig lo down root@localhost:~# ifconfig -s Iface MTU Met RX-OK RX-ERR RX-DRP RX-OVR TX-OK TX-ERR TX-DRP TX-OVR Flg -eth0 1500 0 7736474 0 2016 0 5289481 0 0 0 BMRU +eth0 1500 0 7736474 0 2016 0 5289481 0 0 0 BMRU root@localhost:~# +Also reverted the commit: +commit 80b878497663dae08f70b4d3cffe127b57a3cfc +which uses absolute paths to binaries called by ifup/ifdown. + +Upstream-Status: Inappropriate [OE specific] + Signed-off-by: Paul Gortmaker +Signed-off-by: Maxin B. John --- - inet.defn | 12 ++++++------ - inet6.defn | 8 ++++---- - 2 files changed, 10 insertions(+), 10 deletions(-) + inet.defn | 148 +++++++++++++++++++++++++++++++------------------------------- + 1 file changed, 74 insertions(+), 74 deletions(-) diff --git a/inet.defn b/inet.defn -index b176ab4ed03e..5fdfb14a0e1c 100644 +index 75e6744..b5f5da2 100644 --- a/inet.defn +++ b/inet.defn @@ -6,10 +6,10 @@ method loopback This method may be used to define the IPv4 loopback interface. up -- ip link set dev %iface% up if (!iface_is_lo()) +- /bin/ip link set dev %iface% up if (!iface_is_lo()) + ip link set dev %iface% up if (iface_is_lo()) down -- ip link set dev %iface% down if (!iface_is_lo()) +- /bin/ip link set dev %iface% down if (!iface_is_lo()) + ip link set dev %iface% down if (iface_is_lo()) method static description -@@ -212,11 +212,11 @@ method loopback +@@ -36,17 +36,17 @@ method static + broadcast compute_v4_broadcast + + up +- /bin/ip addr add %address%[[/%netmask%]] [[broadcast %broadcast%]] \ ++ ip addr add %address%[[/%netmask%]] [[broadcast %broadcast%]] \ + [[peer %pointopoint%]] [[scope %scope%]] dev %iface% label %iface% +- /bin/ip link set dev %iface% [[mtu %mtu%]] [[address %hwaddress%]] up ++ ip link set dev %iface% [[mtu %mtu%]] [[address %hwaddress%]] up + +- [[ /bin/ip route add default via %gateway% [[metric %metric%]] dev %iface% onlink ]] ++ [[ ip route add default via %gateway% [[metric %metric%]] dev %iface% onlink ]] + + down +- [[ /bin/ip route del default via %gateway% [[metric %metric%]] dev %iface% 2>&1 1>/dev/null || true ]] +- /bin/ip addr del %address%[[/%netmask%]] [[broadcast %broadcast%]] \ ++ [[ ip route del default via %gateway% [[metric %metric%]] dev %iface% 2>&1 1>/dev/null || true ]] ++ ip addr del %address%[[/%netmask%]] [[broadcast %broadcast%]] \ + [[peer %pointopoint%]] [[scope %scope%]] dev %iface% label %iface% +- /bin/ip link set dev %iface% down \ ++ ip link set dev %iface% down \ + if (iface_is_link()) + + method manual +@@ -63,12 +63,12 @@ method manual + hwaddress cleanup_hwaddress + + up +- [[/bin/ip link set dev %iface% mtu %mtu%]] +- [[/bin/ip link set dev %iface% address %hwaddress%]] +- /bin/ip link set dev %iface% up 2>/dev/null || true ++ [[ip link set dev %iface% mtu %mtu%]] ++ [[ip link set dev %iface% address %hwaddress%]] ++ ip link set dev %iface% up 2>/dev/null || true + + down +- /bin/ip link set dev %iface% down 2>/dev/null || true \ ++ ip link set dev %iface% down 2>/dev/null || true \ + if (iface_is_link() && !do_all) + + method dhcp +@@ -93,33 +93,33 @@ method dhcp + hwaddress cleanup_hwaddress + + up +- [[/bin/ip link set dev %iface% address %hwaddress%]] +- /sbin/dhclient -v -pf /run/dhclient.%iface%.pid -lf /var/lib/dhcp/dhclient.%iface%.leases -I -df /var/lib/dhcp/dhclient6.%iface%.leases %iface% \ ++ [[ip link set dev %iface% address %hwaddress%]] ++ dhclient -v -pf /run/dhclient.%iface%.pid -lf /var/lib/dhcp/dhclient.%iface%.leases -I -df /var/lib/dhcp/dhclient6.%iface%.leases %iface% \ + [[-e IF_METRIC=%metric%]] \ + if (execable("/sbin/dhclient")) +- /sbin/pump -i %iface% [[-h %hostname%]] [[-l %leasehours%]] \ ++ pump -i %iface% [[-h %hostname%]] [[-l %leasehours%]] \ + elsif (execable("/sbin/pump")) +- /sbin/udhcpc -n -p /run/udhcpc.%iface%.pid -i %iface% [[-x hostname:%hostname%]] \ ++ udhcpc -n -p /run/udhcpc.%iface%.pid -i %iface% [[-x hostname:%hostname%]] \ + elsif (execable("/sbin/udhcpc")) +- /sbin/dhcpcd [[-h %hostname%]] [[-i %vendor%]] [[-I %client%]] \ ++ dhcpcd [[-h %hostname%]] [[-i %vendor%]] [[-I %client%]] \ + [[-l %leasetime%]] [[-m %metric%]] %iface% \ + elsif (execable("/sbin/dhcpcd")) + echo 'No DHCP client software found!' >/dev/stderr; false \ + elsif (1) + + down +- /sbin/dhclient -v -r -pf /run/dhclient.%iface%.pid -lf /var/lib/dhcp/dhclient.%iface%.leases -I -df /var/lib/dhcp/dhclient6.%iface%.leases %iface% \ ++ dhclient -v -r -pf /run/dhclient.%iface%.pid -lf /var/lib/dhcp/dhclient.%iface%.leases -I -df /var/lib/dhcp/dhclient6.%iface%.leases %iface% \ + if (execable("/sbin/dhclient")) +- /sbin/pump -i %iface% -r \ ++ pump -i %iface% -r \ + elsif (execable("/sbin/pump")) +- if test -f /run/udhcpc.%iface%.pid; then kill -USR2 $(/bin/cat /run/udhcpc.%iface%.pid); kill -TERM $(/bin/cat /run/udhcpc.%iface%.pid); fi \ ++ if test -f /run/udhcpc.%iface%.pid; then kill -USR2 $(cat /run/udhcpc.%iface%.pid); kill -TERM $(cat /run/udhcpc.%iface%.pid); fi \ + elsif (execable("/sbin/udhcpc")) +- /sbin/dhcpcd -k %iface% \ ++ dhcpcd -k %iface% \ + elsif (execable("/sbin/dhcpcd")) + echo 'No DHCP client software found!' >/dev/stderr; false \ + elsif (1) + +- /bin/ip link set dev %iface% down \ ++ ip link set dev %iface% down \ + if (iface_is_link()) + + method bootp +@@ -134,11 +134,11 @@ method bootp + whatever it really is. + + up +- /sbin/bootpc [[--bootfile %bootfile%]] --dev %iface% [[--server %server%]] \ ++ bootpc [[--bootfile %bootfile%]] --dev %iface% [[--server %server%]] \ + [[--hwaddr %hwaddr%]] --returniffail --serverbcast + + down +- /bin/ip link set dev %iface% down \ ++ ip link set dev %iface% down \ + if (iface_is_link()) + + method tunnel +@@ -158,13 +158,13 @@ method tunnel + ttl time -- TTL setting + mtu size -- MTU size + up +- /bin/ip tunnel add %iface% mode %mode% remote %endpoint% [[local %local%]] \ ++ ip tunnel add %iface% mode %mode% remote %endpoint% [[local %local%]] \ + [[ttl %ttl%]] +- /bin/ip link set %iface% up [[mtu %mtu%]] +- /bin/ip addr add %address%/%netmask% dev %iface% [[peer %dstaddr%]] +- [[ /bin/ip route add default via %gateway% [[metric %metric%]] dev %iface% onlink ]] ++ ip link set %iface% up [[mtu %mtu%]] ++ ip addr add %address%/%netmask% dev %iface% [[peer %dstaddr%]] ++ [[ ip route add default via %gateway% [[metric %metric%]] dev %iface% onlink ]] + down +- /bin/ip tunnel del %iface% ++ ip tunnel del %iface% + + method ppp + description +@@ -175,9 +175,9 @@ method ppp + unit number -- Use /number/ as the ppp unit number. + options string -- Pass /string/ as additional options to pon. + up +- /usr/bin/pon [[%provider%]] [[unit %unit%]] [[%options%]] ++ pon [[%provider%]] [[unit %unit%]] [[%options%]] + down +- /usr/bin/poff [[%provider%]] ++ poff [[%provider%]] + + method wvdial + description +@@ -186,10 +186,10 @@ method wvdial + options + provider name -- Use /name/ as the provider (from /etc/wvdial.conf). + up +- /sbin/start-stop-daemon --start -x /usr/bin/wvdial \ ++ start-stop-daemon --start -x /usr/bin/wvdial \ + -p /run/wvdial.%iface%.pid -b -m -- [[ %provider% ]] + down +- /sbin/start-stop-daemon --stop -x /usr/bin/wvdial \ ++ start-stop-daemon --stop -x /usr/bin/wvdial \ + -p /run/wvdial.%iface%.pid -s 2 + + +@@ -200,9 +200,9 @@ method ipv4ll + known as APIPA or IPAC, and often colloquially referred to + as "Zeroconf address". + up +- /usr/sbin/avahi-autoipd -D %iface% ++ avahi-autoipd -D %iface% + down +- /usr/sbin/avahi-autoipd --kill %iface% ++ avahi-autoipd --kill %iface% + + architecture kfreebsd + +@@ -211,12 +211,12 @@ method loopback + This method may be used to define the IPv4 loopback interface. up - ifconfig %iface% 127.0.0.1 up \ +- /sbin/ifconfig %iface% 127.0.0.1 up \ - if (!iface_is_lo()) ++ ifconfig %iface% 127.0.0.1 up \ + if (iface_is_lo()) down - ifconfig %iface% down \ +- /sbin/ifconfig %iface% down \ - if (!iface_is_lo()) ++ ifconfig %iface% down \ + if (iface_is_lo()) method static description -@@ -371,11 +371,11 @@ method loopback +@@ -238,15 +238,15 @@ method static + hwaddress cleanup_hwaddress + + up +- [[ /sbin/ifconfig %iface% link %hwaddress%]] +- /sbin/ifconfig %iface% %address% [[netmask %netmask%]] [[broadcast %broadcast%]] \ ++ [[ ifconfig %iface% link %hwaddress%]] ++ ifconfig %iface% %address% [[netmask %netmask%]] [[broadcast %broadcast%]] \ + [[pointopoint %pointopoint%]] [[media %media%]] [[mtu %mtu%]] \ + up +- [[ /sbin/route add default %gateway% ]] ++ [[ route add default %gateway% ]] + + down +- [[ /sbin/route del default %gateway% 2>&1 1>/dev/null || true ]] +- /sbin/ifconfig %iface% down ++ [[ route del default %gateway% 2>&1 1>/dev/null || true ]] ++ ifconfig %iface% down + + method manual + description +@@ -279,30 +279,30 @@ method dhcp + hwaddress cleanup_hwaddress + + up +- [[/sbin/ifconfig %iface% link %hwaddress%]] +- /sbin/dhclient -v -pf /run/dhclient.%iface%.pid -lf /var/lib/dhcp/dhclient.%iface%.leases -I -df /var/lib/dhcp/dhclient6.%iface%.leases %iface% \ ++ [[ifconfig %iface% link %hwaddress%]] ++ dhclient -v -pf /run/dhclient.%iface%.pid -lf /var/lib/dhcp/dhclient.%iface%.leases -I -df /var/lib/dhcp/dhclient6.%iface%.leases %iface% \ + [[-e IF_METRIC=%metric%]] \ + if (execable("/sbin/dhclient")) +- /sbin/udhcpc -n -p /run/udhcpc.%iface%.pid -i %iface% [[-H %hostname%]] \ ++ udhcpc -n -p /run/udhcpc.%iface%.pid -i %iface% [[-H %hostname%]] \ + [[-c %client%]] \ + elsif (execable("/sbin/udhcpc")) +- /sbin/dhcpcd [[-h %hostname%]] [[-i %vendor%]] [[-I %client%]] \ ++ dhcpcd [[-h %hostname%]] [[-i %vendor%]] [[-I %client%]] \ + [[-l %leasetime%]] %iface% \ + elsif (execable("/sbin/dhcpcd")) + echo 'No DHCP client software found!' >/dev/stderr; false \ + elsif (1) + + down +- /sbin/dhclient -v -r -pf /run/dhclient.%iface%.pid -lf /var/lib/dhcp/dhclient.%iface%.leases -I -df /var/lib/dhcp/dhclient6.%iface%.leases %iface% \ ++ dhclient -v -r -pf /run/dhclient.%iface%.pid -lf /var/lib/dhcp/dhclient.%iface%.leases -I -df /var/lib/dhcp/dhclient6.%iface%.leases %iface% \ + if (execable("/sbin/dhclient")) +- if test -f /run/udhcpc.%iface%.pid; then kill -USR2 $(/bin/cat /run/udhcpc.%iface%.pid); kill -TERM $(/bin/cat /run/udhcpc.%iface%.pid); fi \ ++ if test -f /run/udhcpc.%iface%.pid; then kill -USR2 $(cat /run/udhcpc.%iface%.pid); kill -TERM $(cat /run/udhcpc.%iface%.pid); fi \ + elsif (execable("/sbin/udhcpc")) +- /sbin/dhcpcd -k %iface% \ ++ dhcpcd -k %iface% \ + elsif (execable("/sbin/dhcpcd")) + echo 'No DHCP client software found!' >/dev/stderr; false \ + elsif (1) + +- /sbin/ifconfig %iface% down ++ ifconfig %iface% down + + method bootp + description +@@ -316,11 +316,11 @@ method bootp + whatever it really is. + + up +- /sbin/bootpc [[--bootfile %bootfile%]] --dev %iface% [[--server %server%]] \ ++ bootpc [[--bootfile %bootfile%]] --dev %iface% [[--server %server%]] \ + [[--hwaddr %hwaddr%]] --returniffail --serverbcast + + down +- /sbin/ifconfig %iface% down ++ ifconfig %iface% down + + method ppp + description +@@ -331,9 +331,9 @@ method ppp + unit number -- Use /number/ as the ppp unit number. + options string -- Pass /string/ as additional options to pon. + up +- /usr/bin/pon [[%provider%]] [[unit %unit%]] [[%options%]] ++ pon [[%provider%]] [[unit %unit%]] [[%options%]] + down +- /usr/bin/poff [[%provider%]] ++ poff [[%provider%]] + + method wvdial + description +@@ -342,10 +342,10 @@ method wvdial + options + provider name -- Use /name/ as the provider (from /etc/wvdial.conf). + up +- /sbin/start-stop-daemon --start -x /usr/bin/wvdial \ ++ start-stop-daemon --start -x /usr/bin/wvdial \ + -p /run/wvdial.%iface%.pid -b -m -- [[ %provider% ]] + down +- /sbin/start-stop-daemon --stop -x /usr/bin/wvdial \ ++ start-stop-daemon --stop -x /usr/bin/wvdial \ + -p /run/wvdial.%iface%.pid -s 2 + + +@@ -356,9 +356,9 @@ method ipv4ll + known as APIPA or IPAC, and often colloquially referred to + as "Zeroconf address". + up +- /usr/sbin/avahi-autoipd -D %iface% ++ avahi-autoipd -D %iface% + down +- /usr/sbin/avahi-autoipd --kill %iface% ++ avahi-autoipd --kill %iface% + architecture hurd + + method loopback +@@ -367,11 +367,11 @@ method loopback up inetutils-ifconfig --interface %iface% --address 127.0.0.1 --up \ @@ -78,26 +353,70 @@ index b176ab4ed03e..5fdfb14a0e1c 100644 method static description -diff --git a/inet6.defn b/inet6.defn -index 09325539cd01..4df64aff38cc 100644 ---- a/inet6.defn -+++ b/inet6.defn -@@ -33,11 +33,11 @@ method loopback - description - This method may be used to define the IPv6 loopback interface. +@@ -432,23 +432,23 @@ method dhcp + up -- -ip link set dev %iface% up 2>/dev/null if (!iface_is_lo()) -- -ip addr add dev %iface% ::1 2>/dev/null if (!iface_is_lo()) -+ -ip link set dev %iface% up 2>/dev/null if (iface_is_lo()) -+ -ip addr add dev %iface% ::1 2>/dev/null if (iface_is_lo()) + [[Warning: Option hwaddress: %hwaddress% not yet supported]] +- /sbin/dhclient -v -pf /run/dhclient.%iface///.%.pid -lf /var/lib/dhcp/dhclient.%iface///.%.leases -I -df /var/lib/dhcp/dhclient6.%iface///.%.leases %iface% \ ++ dhclient -v -pf /run/dhclient.%iface///.%.pid -lf /var/lib/dhcp/dhclient.%iface///.%.leases -I -df /var/lib/dhcp/dhclient6.%iface///.%.leases %iface% \ + if (execable("/sbin/dhclient")) +- /sbin/udhcpc -n -p /run/udhcpc.%iface///.%.pid -i %iface% [[-H %hostname%]] \ ++ udhcpc -n -p /run/udhcpc.%iface///.%.pid -i %iface% [[-H %hostname%]] \ + [[-c %client%]] \ + elsif (execable("/sbin/udhcpc")) +- /sbin/dhcpcd [[-h %hostname%]] [[-i %vendor%]] [[-I %client%]] \ ++ dhcpcd [[-h %hostname%]] [[-i %vendor%]] [[-I %client%]] \ + [[-l %leasetime%]] %iface% \ + elsif (execable("/sbin/dhcpcd")) + echo 'No DHCP client software found!' >/dev/stderr; false \ + elsif (1) + + down +- /sbin/dhclient -v -r -pf /run/dhclient.%iface///.%.pid -lf /var/lib/dhcp/dhclient.%iface///.%.leases -I -df /var/lib/dhcp/dhclient6.%iface///.%.leases %iface% \ ++ dhclient -v -r -pf /run/dhclient.%iface///.%.pid -lf /var/lib/dhcp/dhclient.%iface///.%.leases -I -df /var/lib/dhcp/dhclient6.%iface///.%.leases %iface% \ + if (execable("/sbin/dhclient")) +- if test -f /run/udhcpc.%iface///.%.pid; then kill -USR2 $(/bin/cat /run/udhcpc.%iface///.%.pid); kill -TERM $(/bin/cat /run/udhcpc.%iface///.%.pid); fi \ ++ if test -f /run/udhcpc.%iface///.%.pid; then kill -USR2 $(cat /run/udhcpc.%iface///.%.pid); kill -TERM $(cat /run/udhcpc.%iface///.%.pid); fi \ + elsif (execable("/sbin/udhcpc")) +- /sbin/dhcpcd -k %iface% \ ++ dhcpcd -k %iface% \ + elsif (execable("/sbin/dhcpcd")) + echo 'No DHCP client software found!' >/dev/stderr; false \ + elsif (1) +@@ -482,9 +482,9 @@ method ppp + unit number -- Use /number/ as the ppp unit number. + options string -- Pass /string/ as additional options to pon. + up +- /usr/bin/pon [[%provider%]] [[unit %unit%]] [[%options%]] ++ pon [[%provider%]] [[unit %unit%]] [[%options%]] down -- -ip addr del dev %iface% ::1 2>/dev/null if (!iface_is_lo()) -- -ip link set dev %iface% down 2>/dev/null if (!iface_is_lo()) -+ -ip addr del dev %iface% ::1 2>/dev/null if (iface_is_lo()) -+ -ip link set dev %iface% down 2>/dev/null if (iface_is_lo()) +- /usr/bin/poff [[%provider%]] ++ poff [[%provider%]] - method static + method wvdial description +@@ -493,10 +493,10 @@ method wvdial + options + provider name -- Use /name/ as the provider (from /etc/wvdial.conf). + up +- /sbin/start-stop-daemon --start -x /usr/bin/wvdial \ ++ start-stop-daemon --start -x /usr/bin/wvdial \ + -p /run/wvdial.%iface///.%.pid -b -m -- [[ %provider% ]] + down +- /sbin/start-stop-daemon --stop -x /usr/bin/wvdial \ ++ start-stop-daemon --stop -x /usr/bin/wvdial \ + -p /run/wvdial.%iface///.%.pid -s 2 + + +@@ -507,6 +507,6 @@ method ipv4ll + known as APIPA or IPAC, and often colloquially referred to + as "Zeroconf address". + up +- /usr/sbin/avahi-autoipd -D %iface% ++ avahi-autoipd -D %iface% + down +- /usr/sbin/avahi-autoipd --kill %iface% ++ avahi-autoipd --kill %iface% -- -1.9.1 +2.4.0 diff --git a/import-layers/yocto-poky/meta/recipes-core/ifupdown/ifupdown_0.8.16.bb b/import-layers/yocto-poky/meta/recipes-core/ifupdown/ifupdown_0.8.16.bb new file mode 100644 index 000000000..5654528ae --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/ifupdown/ifupdown_0.8.16.bb @@ -0,0 +1,46 @@ +SUMMARY = "ifupdown: basic ifup and ifdown used by initscripts" +DESCRIPTION = "High level tools to configure network interfaces \ +This package provides the tools ifup and ifdown which may be used to \ +configure (or, respectively, deconfigure) network interfaces, based on \ +the file /etc/network/interfaces." +LICENSE = "GPLv2" +LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f" + +SRC_URI = "git://anonscm.debian.org/git/collab-maint/ifupdown.git \ + file://defn2-c-man-don-t-rely-on-dpkg-architecture-to-set-a.patch \ + file://inet-6-.defn-fix-inverted-checks-for-loopback.patch \ + file://99_network \ + " +SRCREV = "11b9f99f7ecc7052497e6786156cfed531f11823" + +S = "${WORKDIR}/git" + + +inherit update-alternatives + +do_compile () { + chmod a+rx *.pl *.sh + oe_runmake 'CC=${CC}' "CFLAGS=${CFLAGS} -Wall -W -D'IFUPDOWN_VERSION=\"${PV}\"'" +} + +do_install () { + install -d ${D}${mandir}/man8 \ + ${D}${mandir}/man5 \ + ${D}${base_sbindir} + + # If volatiles are used, then we'll also need /run/network there too. + install -d ${D}/etc/default/volatiles + install -m 0644 ${WORKDIR}/99_network ${D}/etc/default/volatiles + + install -m 0755 ifup ${D}${base_sbindir}/ + ln ${D}${base_sbindir}/ifup ${D}${base_sbindir}/ifdown + install -m 0644 ifup.8 ${D}${mandir}/man8 + install -m 0644 interfaces.5 ${D}${mandir}/man5 + cd ${D}${mandir}/man8 && ln -s ifup.8 ifdown.8 +} + +ALTERNATIVE_PRIORITY = "100" +ALTERNATIVE_${PN} = "ifup ifdown" + +ALTERNATIVE_LINK_NAME[ifup] = "${base_sbindir}/ifup" +ALTERNATIVE_LINK_NAME[ifdown] = "${base_sbindir}/ifdown" diff --git a/import-layers/yocto-poky/meta/recipes-core/ifupdown/ifupdown_0.8.2.bb b/import-layers/yocto-poky/meta/recipes-core/ifupdown/ifupdown_0.8.2.bb deleted file mode 100644 index 9de01f961..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/ifupdown/ifupdown_0.8.2.bb +++ /dev/null @@ -1,49 +0,0 @@ -SUMMARY = "ifupdown: basic ifup and ifdown used by initscripts" -DESCRIPTION = "High level tools to configure network interfaces \ -This package provides the tools ifup and ifdown which may be used to \ -configure (or, respectively, deconfigure) network interfaces, based on \ -the file /etc/network/interfaces." -LICENSE = "GPLv2" -LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f" - -SRC_URI = "git://anonscm.debian.org/git/collab-maint/ifupdown.git;protocol=https \ - file://defn2-c-man-don-t-rely-on-dpkg-architecture-to-set-a.patch \ - file://inet-6-.defn-fix-inverted-checks-for-loopback.patch \ - file://99_network \ - " -SRCREV = "05ea2fd4f49bb1201aeef2a42efbcff8f336112f" - -S = "${WORKDIR}/git" - - -inherit update-rc.d update-alternatives - -do_compile () { - chmod a+rx *.pl *.sh - oe_runmake 'CC=${CC}' "CFLAGS=${CFLAGS} -Wall -W -D'IFUPDOWN_VERSION=\"${PV}\"'" -} - -do_install () { - install -d ${D}${mandir}/man8 \ - ${D}${mandir}/man5 \ - ${D}${base_sbindir} - - # If volatiles are used, then we'll also need /run/network there too. - install -d ${D}/etc/default/volatiles - install -m 0644 ${WORKDIR}/99_network ${D}/etc/default/volatiles - - install -m 0755 ifup ${D}${base_sbindir}/ - ln ${D}${base_sbindir}/ifup ${D}${base_sbindir}/ifdown - install -m 0644 ifup.8 ${D}${mandir}/man8 - install -m 0644 interfaces.5 ${D}${mandir}/man5 - cd ${D}${mandir}/man8 && ln -s ifup.8 ifdown.8 -} - -ALTERNATIVE_PRIORITY = "100" -ALTERNATIVE_${PN} = "ifup ifdown" - -ALTERNATIVE_LINK_NAME[ifup] = "${base_sbindir}/ifup" -ALTERNATIVE_LINK_NAME[ifdown] = "${base_sbindir}/ifdown" - -INITSCRIPT_NAME = "ifup" -INITSCRIPT_PARAMS = "start 39 S . stop 39 0 6 1 ." diff --git a/import-layers/yocto-poky/meta/recipes-core/images/build-appliance-image/README_VirtualBox_Toaster.txt b/import-layers/yocto-poky/meta/recipes-core/images/build-appliance-image/README_VirtualBox_Toaster.txt new file mode 100644 index 000000000..a0aede2fb --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/images/build-appliance-image/README_VirtualBox_Toaster.txt @@ -0,0 +1,78 @@ + +Running Toaster in VirtualBox +============================= + +Toaster is launched via the command in VM: + + $ source toaster start webport= + +The interaction with Toaster web server is done via a host internet +browser. +The particular steps depend on the actual networking being used +by the VirtualBox. + + +Bridged Network +=============== + +Find out your VM network IP address: + + $ ifconfig + +IP address is listed under eth0 inet addr. +It should be something like: + inet addr:192.168.1.18 + +Launch the Toaster web server in VM: + + $ source toaster start webport=192.168.1.18:8000 + +Interact with the Toaster web server with your host browser using URL: + + http://192.168.1.18:8000 + + +NAT Network +=========== +Find out your VM network IP address: + + $ ifconfig + +IP address is listed under eth0 inet addr. +For NAT network it should be something like: + inet addr:10.0.2.15 + +When using NAT network, the VM web server can be accessed using +Port Forwarding. + +Using the VirtualBox GUI, navigate to: + Settings->Network->Adapter1 + +You should set: + Attached to: NAT + +Select "Advanced", click on "Port Forwarding" + +This will open a new dialog box "Port Forwarding Rules". +Create a new rule that looks like this: + +| Name | Protocol | Host IP | Host Port | Guest IP | Guest Port | ++-------+----------+---------+-----------+----------+------------+ +| Rule1 | TCP | | 8000 | | 8000 | +------------------------------------------------------------------ + +Now we can launch the Toaster web server in VM: + + $ source toaster start webport=10.0.2.15:8000 + +Interact with the Toaster web server with your host browser using URL: + + http://127.0.0.1:8000 + + + + + + + + diff --git a/import-layers/yocto-poky/meta/recipes-core/images/build-appliance-image_15.0.0.bb b/import-layers/yocto-poky/meta/recipes-core/images/build-appliance-image_15.0.0.bb index 0803ddb65..045781c21 100644 --- a/import-layers/yocto-poky/meta/recipes-core/images/build-appliance-image_15.0.0.bb +++ b/import-layers/yocto-poky/meta/recipes-core/images/build-appliance-image_15.0.0.bb @@ -1,5 +1,5 @@ SUMMARY = "An image containing the build system itself" -DESCRIPTION = "An image containing the build system that you can boot and run using either VMware Player or VMware Workstation." +DESCRIPTION = "An image containing the build system that you can boot and run using either VirtualBox, VMware Player or VMware Workstation." HOMEPAGE = "http://www.yoctoproject.org/documentation/build-appliance" LICENSE = "MIT" @@ -7,7 +7,8 @@ LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" IMAGE_INSTALL = "packagegroup-core-boot packagegroup-core-ssh-openssh packagegroup-self-hosted \ - kernel-dev kernel-devsrc connman connman-plugin-ethernet dhcp-client" + kernel-dev kernel-devsrc connman connman-plugin-ethernet dhcp-client \ + tzdata python3-pip perl-misc" IMAGE_FEATURES += "x11-base package-management splash" @@ -17,16 +18,17 @@ IMAGE_ROOTFS_EXTRA_SPACE = "41943040" # Do a quiet boot with limited console messages APPEND += "rootfstype=ext4 quiet" -DEPENDS = "zip-native" +DEPENDS = "zip-native python3-pip-native" IMAGE_FSTYPES = "vmdk" -inherit core-image module-base +inherit core-image module-base setuptools3 -SRCREV ?= "e92165f5cea1c345672dd866df6a44d1cd8b97ce" -SRC_URI = "git://git.yoctoproject.org/poky;branch=morty \ +SRCREV ?= "b859272ad4053185d4980cac05481b430e05345f" +SRC_URI = "git://git.yoctoproject.org/poky;branch=pyro \ file://Yocto_Build_Appliance.vmx \ file://Yocto_Build_Appliance.vmxf \ file://README_VirtualBox_Guest_Additions.txt \ + file://README_VirtualBox_Toaster.txt \ " BA_INCLUDE_SOURCES ??= "0" @@ -54,7 +56,11 @@ fakeroot do_populate_poky_src () { # Place the README_VirtualBox_Guest_Additions file in builders home folder. cp ${WORKDIR}/README_VirtualBox_Guest_Additions.txt ${IMAGE_ROOTFS}/home/builder/ + # Place the README_VirtualBox_Toaster file in builders home folder. + cp ${WORKDIR}/README_VirtualBox_Toaster.txt ${IMAGE_ROOTFS}/home/builder/ + # Create a symlink, needed for out-of-tree kernel modules build + rm -f ${IMAGE_ROOTFS}/lib/modules/${KERNEL_VERSION}/build lnr ${IMAGE_ROOTFS}${KERNEL_SRC_PATH} ${IMAGE_ROOTFS}/lib/modules/${KERNEL_VERSION}/build echo "INHERIT += \"rm_work\"" >> ${IMAGE_ROOTFS}/home/builder/poky/build/conf/auto.conf @@ -82,12 +88,22 @@ fakeroot do_populate_poky_src () { echo "builder ALL=(ALL) NOPASSWD: ALL" >> ${IMAGE_ROOTFS}/etc/sudoers # Load tap/tun at startup + rm -f ${IMAGE_ROOTFS}/sbin/iptables lnr ${IMAGE_ROOTFS}/usr/sbin/iptables ${IMAGE_ROOTFS}/sbin/iptables echo "tun" >> ${IMAGE_ROOTFS}/etc/modules # Use Clearlooks GTK+ theme mkdir -p ${IMAGE_ROOTFS}/etc/gtk-2.0 echo 'gtk-theme-name = "Clearlooks"' > ${IMAGE_ROOTFS}/etc/gtk-2.0/gtkrc + + # Install modules needed for toaster + export STAGING_LIBDIR=${STAGING_LIBDIR_NATIVE} + export STAGING_INCDIR=${STAGING_INCDIR_NATIVE} + export HOME=${IMAGE_ROOTFS}/home/builder + mkdir -p ${IMAGE_ROOTFS}/home/builder/.cache/pip + pip3 install --user -I -U -v -r ${IMAGE_ROOTFS}/home/builder/poky/bitbake/toaster-requirements.txt + chown -R builder.builder ${IMAGE_ROOTFS}/home/builder/.local + chown -R builder.builder ${IMAGE_ROOTFS}/home/builder/.cache } IMAGE_PREPROCESS_COMMAND += "do_populate_poky_src; " diff --git a/import-layers/yocto-poky/meta/recipes-core/images/core-image-minimal.bb b/import-layers/yocto-poky/meta/recipes-core/images/core-image-minimal.bb index 8dd77b347..4630026aa 100644 --- a/import-layers/yocto-poky/meta/recipes-core/images/core-image-minimal.bb +++ b/import-layers/yocto-poky/meta/recipes-core/images/core-image-minimal.bb @@ -1,6 +1,6 @@ SUMMARY = "A small image just capable of allowing a device to boot." -IMAGE_INSTALL = "packagegroup-core-boot ${ROOTFS_PKGMANAGE_BOOTSTRAP} ${CORE_IMAGE_EXTRA_INSTALL}" +IMAGE_INSTALL = "packagegroup-core-boot ${CORE_IMAGE_EXTRA_INSTALL}" IMAGE_LINGUAS = " " diff --git a/import-layers/yocto-poky/meta/recipes-core/images/core-image-tiny-initramfs.bb b/import-layers/yocto-poky/meta/recipes-core/images/core-image-tiny-initramfs.bb new file mode 100644 index 000000000..184727ddf --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/images/core-image-tiny-initramfs.bb @@ -0,0 +1,42 @@ +# Simple initramfs image artifact generation for tiny images. +DESCRIPTION = "Tiny image capable of booting a device. The kernel includes \ +the Minimal RAM-based Initial Root Filesystem (initramfs), which finds the \ +first 'init' program more efficiently. core-image-tiny-initramfs doesn't \ +actually generate an image but rather generates boot and rootfs artifacts \ +into a common location that can subsequently be picked up by external image \ +generation tools such as wic." + +PACKAGE_INSTALL = "initramfs-live-boot packagegroup-core-boot dropbear ${VIRTUAL-RUNTIME_base-utils} udev base-passwd ${ROOTFS_BOOTSTRAP_INSTALL}" + +# Do not pollute the initrd image with rootfs features +IMAGE_FEATURES = "" + +export IMAGE_BASENAME = "core-image-tiny-initramfs" +IMAGE_LINGUAS = "" + +LICENSE = "MIT" + +# don't actually generate an image, just the artifacts needed for one +IMAGE_FSTYPES = "${INITRAMFS_FSTYPES} wic" + +inherit core-image + +IMAGE_ROOTFS_SIZE = "8192" +IMAGE_ROOTFS_EXTRA_SPACE = "0" + +BAD_RECOMMENDATIONS += "busybox-syslog" + +# Use the same restriction as initramfs-live-install +COMPATIBLE_HOST = "(i.86|x86_64).*-linux" + +python tinyinitrd () { + # Modify our init file so the user knows we drop to shell prompt on purpose + newinit = None + with open(d.expand('${IMAGE_ROOTFS}/init'), 'r') as init: + newinit = init.read() + newinit = newinit.replace('Cannot find $ROOT_IMAGE file in /run/media/* , dropping to a shell ', 'Poky Tiny Reference Distribution:') + with open(d.expand('${IMAGE_ROOTFS}/init'), 'w') as init: + init.write(newinit) +} + +IMAGE_PREPROCESS_COMMAND += "tinyinitrd;" diff --git a/import-layers/yocto-poky/meta/recipes-core/initrdscripts/files/init-install-efi-testfs.sh b/import-layers/yocto-poky/meta/recipes-core/initrdscripts/files/init-install-efi-testfs.sh index b56210915..9c4b263d5 100644 --- a/import-layers/yocto-poky/meta/recipes-core/initrdscripts/files/init-install-efi-testfs.sh +++ b/import-layers/yocto-poky/meta/recipes-core/initrdscripts/files/init-install-efi-testfs.sh @@ -171,19 +171,19 @@ if [ -f /run/media/$1/EFI/BOOT/grub.cfg ]; then fi if [ -d /run/media/$1/loader ]; then - GUMMIBOOT_CFGS="/ssd/loader/entries/*.conf" - # copy config files for gummiboot + SYSTEMDBOOT_CFGS="/ssd/loader/entries/*.conf" + # copy config files for systemd-boot cp -dr /run/media/$1/loader /ssd # delete the install entry rm -f /ssd/loader/entries/install.conf # delete the initrd lines - sed -i "/initrd /d" $GUMMIBOOT_CFGS + sed -i "/initrd /d" $SYSTEMDBOOT_CFGS # delete any LABEL= strings - sed -i "s/ LABEL=[^ ]*/ /" $GUMMIBOOT_CFGS + sed -i "s/ LABEL=[^ ]*/ /" $SYSTEMDBOOT_CFGS # delete any root= strings - sed -i "s/ root=[^ ]*/ /" $GUMMIBOOT_CFGS + sed -i "s/ root=[^ ]*/ /" $SYSTEMDBOOT_CFGS # add the root= and other standard boot options - sed -i "s@options *@options root=$rootfs rw $rootwait quiet @" $GUMMIBOOT_CFGS + sed -i "s@options *@options root=$rootfs rw $rootwait quiet @" $SYSTEMDBOOT_CFGS # Add the test label echo -ne "title test\nlinux /test-kernel\noptions root=$testfs rw $rootwait quiet\n" > /ssd/loader/entries/test.conf fi diff --git a/import-layers/yocto-poky/meta/recipes-core/initrdscripts/files/init-install-efi.sh b/import-layers/yocto-poky/meta/recipes-core/initrdscripts/files/init-install-efi.sh index 441e25238..5ad3a60c0 100644 --- a/import-layers/yocto-poky/meta/recipes-core/initrdscripts/files/init-install-efi.sh +++ b/import-layers/yocto-poky/meta/recipes-core/initrdscripts/files/init-install-efi.sh @@ -22,6 +22,8 @@ live_dev_name=${live_dev_name#\/dev/} case $live_dev_name in mmcblk*) ;; + nvme*) + ;; *) live_dev_name=${live_dev_name%%[0-9]*} ;; @@ -146,7 +148,8 @@ swap_start=$((rootfs_end)) # 2) they are detected asynchronously (need rootwait) rootwait="" part_prefix="" -if [ ! "${device#/dev/mmcblk}" = "${device}" ]; then +if [ ! "${device#/dev/mmcblk}" = "${device}" ] || \ + [ ! "${device#/dev/nvme}" = "${device}" ]; then part_prefix="p" rootwait="rootwait" fi @@ -242,19 +245,19 @@ fi if [ -d /run/media/$1/loader ]; then rootuuid=$(blkid -o value -s PARTUUID ${rootfs}) - GUMMIBOOT_CFGS="/boot/loader/entries/*.conf" - # copy config files for gummiboot + SYSTEMDBOOT_CFGS="/boot/loader/entries/*.conf" + # copy config files for systemd-boot cp -dr /run/media/$1/loader /boot # delete the install entry rm -f /boot/loader/entries/install.conf # delete the initrd lines - sed -i "/initrd /d" $GUMMIBOOT_CFGS + sed -i "/initrd /d" $SYSTEMDBOOT_CFGS # delete any LABEL= strings - sed -i "s/ LABEL=[^ ]*/ /" $GUMMIBOOT_CFGS + sed -i "s/ LABEL=[^ ]*/ /" $SYSTEMDBOOT_CFGS # delete any root= strings - sed -i "s/ root=[^ ]*/ /" $GUMMIBOOT_CFGS + sed -i "s/ root=[^ ]*/ /" $SYSTEMDBOOT_CFGS # add the root= and other standard boot options - sed -i "s@options *@options root=PARTUUID=$rootuuid rw $rootwait quiet @" $GUMMIBOOT_CFGS + sed -i "s@options *@options root=PARTUUID=$rootuuid rw $rootwait quiet @" $SYSTEMDBOOT_CFGS fi umount /tgt_root diff --git a/import-layers/yocto-poky/meta/recipes-core/initrdscripts/files/init-install.sh b/import-layers/yocto-poky/meta/recipes-core/initrdscripts/files/init-install.sh index 04ce5fb4b..572613ecd 100644 --- a/import-layers/yocto-poky/meta/recipes-core/initrdscripts/files/init-install.sh +++ b/import-layers/yocto-poky/meta/recipes-core/initrdscripts/files/init-install.sh @@ -21,6 +21,8 @@ live_dev_name=${live_dev_name#\/dev/} case $live_dev_name in mmcblk*) ;; + nvme*) + ;; *) live_dev_name=${live_dev_name%%[0-9]*} ;; @@ -153,7 +155,8 @@ swap_start=$((rootfs_end)) # 2) they are detected asynchronously (need rootwait) rootwait="" part_prefix="" -if [ ! "${device#/dev/mmcblk}" = "${device}" ]; then +if [ ! "${device#/dev/mmcblk}" = "${device}" ] || \ + [ ! "${device#/dev/nvme}" = "${device}" ]; then part_prefix="p" rootwait="rootwait" fi diff --git a/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/populate-volatile.sh b/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/populate-volatile.sh index ce4622a5e..22a71ecaa 100755 --- a/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/populate-volatile.sh +++ b/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/populate-volatile.sh @@ -36,7 +36,7 @@ create_file() { [ "${VERBOSE}" != "no" ] && echo "Target already exists. Skipping." } || { if [ -z "$ROOT_DIR" ]; then - eval $EXEC & + eval $EXEC else # Creating some files at rootfs time may fail and should fail, # but these failures should not be logged to make sure the do_rootfs @@ -70,7 +70,7 @@ mk_dir() { link_file() { EXEC=" if [ -L \"$2\" ]; then - [ \"\$(readlink -f \"$2\")\" != \"\$(readlink -f \"$1\")\" ] && { rm -f \"$2\"; ln -sf \"$1\" \"$2\"; }; + [ \"\$(readlink -f \"$2\")\" != \"$1\" ] && { rm -f \"$2\"; ln -sf \"$1\" \"$2\"; }; elif [ -d \"$2\" ]; then if awk '\$2 == \"$2\" {exit 1}' /proc/mounts; then cp -a $2/* $1 2>/dev/null; @@ -86,7 +86,7 @@ link_file() { test "$VOLATILE_ENABLE_CACHE" = yes && echo " $EXEC" >> /etc/volatile.cache.build if [ -z "$ROOT_DIR" ]; then - eval $EXEC & + eval $EXEC else # For the same reason with create_file(), failures should # not be logged. @@ -150,9 +150,9 @@ apply_cfgfile() { return 1 } - cat ${CFGFILE} | grep -v "^#" | \ - while read LINE; do - eval `echo "$LINE" | sed -n "s/\(.*\)\ \(.*\) \(.*\)\ \(.*\)\ \(.*\)\ \(.*\)/TTYPE=\1 ; TUSER=\2; TGROUP=\3; TMODE=\4; TNAME=\5 TLTARGET=\6/p"` + cat ${CFGFILE} | sed 's/#.*//' | \ + while read TTYPE TUSER TGROUP TMODE TNAME TLTARGET; do + test -z "${TLTARGET}" && continue TNAME=${ROOT_DIR}${TNAME} [ "${VERBOSE}" != "no" ] && echo "Checking for -${TNAME}-." @@ -187,7 +187,7 @@ apply_cfgfile() { case "${TTYPE}" in "f") [ "${VERBOSE}" != "no" ] && echo "Creating file -${TNAME}-." - create_file "${TNAME}" & + create_file "${TNAME}" ;; "d") [ "${VERBOSE}" != "no" ] && echo "Creating directory -${TNAME}-." mk_dir "${TNAME}" diff --git a/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/sysfs.sh b/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/sysfs.sh index 0a52c90da..f5b5b9904 100644 --- a/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/sysfs.sh +++ b/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/sysfs.sh @@ -22,6 +22,10 @@ if [ -e /sys/kernel/debug ] && grep -q debugfs /proc/filesystems; then mount -t debugfs debugfs /sys/kernel/debug fi +if [ -e /sys/kernel/config ] && grep -q configfs /proc/filesystems; then + mount -t configfs configfs /sys/kernel/config +fi + if ! [ -e /dev/zero ] && [ -e /dev ] && grep -q devtmpfs /proc/filesystems; then mount -n -t devtmpfs devtmpfs /dev fi diff --git a/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/volatiles b/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/volatiles index 297245d0e..bc17c4553 100644 --- a/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/volatiles +++ b/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts-1.0/volatiles @@ -1,11 +1,13 @@ # This configuration file lists filesystem objects that should get verified # during startup and be created if missing. # -# Every line must either be a comment starting with # -# or a definition of format: +# Entries have the following format: # # where the items are separated by whitespace ! # +# The # character introduces a comment lasting until end of line. +# Blank lines are ignored. +# # : d|f|l : (d)irectory|(f)ile|(l)ink # # A linking example: diff --git a/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts_1.0.bb b/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts_1.0.bb index 8f110b005..2e4f7e466 100644 --- a/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts_1.0.bb +++ b/import-layers/yocto-poky/meta/recipes-core/initscripts/initscripts_1.0.bb @@ -44,7 +44,7 @@ KERNEL_VERSION = "" inherit update-alternatives DEPENDS_append = " update-rc.d-native" -DEPENDS_append = " ${@bb.utils.contains('DISTRO_FEATURES','systemd','systemd-systemctl-native','',d)}" +PACKAGE_WRITE_DEPS_append = " ${@bb.utils.contains('DISTRO_FEATURES','systemd','systemd-systemctl-native','',d)}" PACKAGES =+ "${PN}-functions" RDEPENDS_${PN} = "${PN}-functions \ diff --git a/import-layers/yocto-poky/meta/recipes-core/kbd/kbd_2.0.3.bb b/import-layers/yocto-poky/meta/recipes-core/kbd/kbd_2.0.3.bb deleted file mode 100644 index 54d16835f..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/kbd/kbd_2.0.3.bb +++ /dev/null @@ -1,53 +0,0 @@ -SUMMARY = "Keytable files and keyboard utilities" -# everything minus console-fonts is GPLv2+ -LICENSE = "GPLv2+" -LIC_FILES_CHKSUM = "file://COPYING;md5=a5fcc36121d93e1f69d96a313078c8b5" -DEPENDS = "libcheck" - -inherit autotools gettext ptest pkgconfig - -RREPLACES_${PN} = "console-tools" -RPROVIDES_${PN} = "console-tools" -RCONFLICTS_${PN} = "console-tools" - -SRC_URI = "${KERNELORG_MIRROR}/linux/utils/${BPN}/${BP}.tar.xz \ - file://run-ptest \ - ${@bb.utils.contains('DISTRO_FEATURES', 'ptest', 'file://set-proper-path-of-resources.patch', '', d)} \ - " - -SRC_URI[md5sum] = "231b46e7142eb41ea3ae06d2ded3c208" -SRC_URI[sha256sum] = "7a899de1c0eb75f3aea737095a736f2375e1cbfbe693fc14a3fe0bfb4649fb5e" - -PACKAGECONFIG ?= "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam', '', d)}" -PACKAGECONFIG[pam] = "--enable-vlock, --disable-vlock, libpam," - -do_compile_ptest() { - oe_runmake -C ${B}/tests dumpkeys-fulltable alt-is-meta -} - -do_install_ptest() { - install -D ${B}/tests/Makefile ${D}${PTEST_PATH}/tests/Makefile - sed -i -e '/Makefile:/,/^$/d' -e '/%: %.in/,/^$/d' \ - -e '/libkeymap_.*_SOURCES =/d' -e '/$(EXEEXT):/,/^$/d' ${D}${PTEST_PATH}/tests/Makefile - - find ${B}/tests -executable -exec install {} ${D}${PTEST_PATH}/tests \; - find ${S}/tests \( -name \*.map -o -name \*.bin -o -name \*.output \) -exec install {} ${D}${PTEST_PATH}/tests \; - - install -D -m 755 ${S}/config/test-driver ${D}${PTEST_PATH}/config/test-driver -} - -PACKAGES += "${PN}-consolefonts ${PN}-keymaps ${PN}-unimaps ${PN}-consoletrans" - -FILES_${PN}-consolefonts = "${datadir}/consolefonts" -FILES_${PN}-consoletrans = "${datadir}/consoletrans" -FILES_${PN}-keymaps = "${datadir}/keymaps" -FILES_${PN}-unimaps = "${datadir}/unimaps" - -RDEPENDS_${PN}-ptest = "make" - -inherit update-alternatives - -ALTERNATIVE_${PN} = "chvt deallocvt fgconsole openvt" -ALTERNATIVE_PRIORITY = "100" - -BBCLASSEXTEND = "native" diff --git a/import-layers/yocto-poky/meta/recipes-core/kbd/kbd_2.0.4.bb b/import-layers/yocto-poky/meta/recipes-core/kbd/kbd_2.0.4.bb new file mode 100644 index 000000000..65325c0ea --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/kbd/kbd_2.0.4.bb @@ -0,0 +1,53 @@ +SUMMARY = "Keytable files and keyboard utilities" +# everything minus console-fonts is GPLv2+ +LICENSE = "GPLv2+" +LIC_FILES_CHKSUM = "file://COPYING;md5=a5fcc36121d93e1f69d96a313078c8b5" +DEPENDS = "libcheck" + +inherit autotools gettext ptest pkgconfig + +RREPLACES_${PN} = "console-tools" +RPROVIDES_${PN} = "console-tools" +RCONFLICTS_${PN} = "console-tools" + +SRC_URI = "${KERNELORG_MIRROR}/linux/utils/${BPN}/${BP}.tar.xz \ + file://run-ptest \ + ${@bb.utils.contains('DISTRO_FEATURES', 'ptest', 'file://set-proper-path-of-resources.patch', '', d)} \ + " + +SRC_URI[md5sum] = "c1635a5a83b63aca7f97a3eab39ebaa6" +SRC_URI[sha256sum] = "5fd90af6beb225a9bb9b9fb414c090fba53c9a55793e172f508cd43652e59a88" + +PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" +PACKAGECONFIG[pam] = "--enable-vlock, --disable-vlock, libpam," + +do_compile_ptest() { + oe_runmake -C ${B}/tests dumpkeys-fulltable alt-is-meta +} + +do_install_ptest() { + install -D ${B}/tests/Makefile ${D}${PTEST_PATH}/tests/Makefile + sed -i -e '/Makefile:/,/^$/d' -e '/%: %.in/,/^$/d' \ + -e '/libkeymap_.*_SOURCES =/d' -e '/$(EXEEXT):/,/^$/d' ${D}${PTEST_PATH}/tests/Makefile + + find ${B}/tests -executable -exec install {} ${D}${PTEST_PATH}/tests \; + find ${S}/tests \( -name \*.map -o -name \*.bin -o -name \*.output \) -exec install {} ${D}${PTEST_PATH}/tests \; + + install -D -m 755 ${S}/config/test-driver ${D}${PTEST_PATH}/config/test-driver +} + +PACKAGES += "${PN}-consolefonts ${PN}-keymaps ${PN}-unimaps ${PN}-consoletrans" + +FILES_${PN}-consolefonts = "${datadir}/consolefonts" +FILES_${PN}-consoletrans = "${datadir}/consoletrans" +FILES_${PN}-keymaps = "${datadir}/keymaps" +FILES_${PN}-unimaps = "${datadir}/unimaps" + +RDEPENDS_${PN}-ptest = "make" + +inherit update-alternatives + +ALTERNATIVE_${PN} = "chvt deallocvt fgconsole openvt" +ALTERNATIVE_PRIORITY = "100" + +BBCLASSEXTEND = "native" diff --git a/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/0001-Make-ptest-run-the-python-tests-if-python-is-enabled.patch b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/0001-Make-ptest-run-the-python-tests-if-python-is-enabled.patch new file mode 100644 index 000000000..d1ad55116 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/0001-Make-ptest-run-the-python-tests-if-python-is-enabled.patch @@ -0,0 +1,99 @@ +From 78dbd4c09d617a9cb730d796f94ee4d93840d3cc Mon Sep 17 00:00:00 2001 +From: Peter Kjellerstedt +Date: Fri, 9 Jun 2017 17:50:46 +0200 +Subject: [PATCH] Make ptest run the python tests if python is enabled + +One of the tests (tstLastError.py) needed a minor correction. It might +be due to the fact that the tests are forced to run with Python 3. + +Upstream-Status: Inappropriate [OE specific] +Signed-off-by: Peter Kjellerstedt +--- + Makefile.am | 2 +- + python/Makefile.am | 9 +++++++++ + python/tests/Makefile.am | 12 ++++++++++-- + python/tests/tstLastError.py | 2 +- + 4 files changed, 21 insertions(+), 4 deletions(-) + +diff --git a/Makefile.am b/Makefile.am +index e181ee0..7960e7d 100644 +--- a/Makefile.am ++++ b/Makefile.am +@@ -207,9 +207,9 @@ install-ptest: + install $(noinst_PROGRAMS) $(DESTDIR)) + cp -r $(srcdir)/test $(DESTDIR) + cp -r $(srcdir)/result $(DESTDIR) +- cp -r $(srcdir)/python $(DESTDIR) + cp Makefile $(DESTDIR) + sed -i -e 's|^Makefile:|_Makefile:|' $(DESTDIR)/Makefile ++ $(MAKE) -C python install-ptest + + runtests: + [ -d test ] || $(LN_S) $(srcdir)/test . +diff --git a/python/Makefile.am b/python/Makefile.am +index 34aed96..ba3ec6a 100644 +--- a/python/Makefile.am ++++ b/python/Makefile.am +@@ -48,7 +48,16 @@ GENERATED = libxml2class.py libxml2class.txt $(BUILT_SOURCES) + + $(GENERATED): $(srcdir)/generator.py $(API_DESC) + $(PYTHON) $(srcdir)/generator.py $(srcdir) ++ ++install-ptest: ++ cp -r $(srcdir) $(DESTDIR) ++ sed -e 's|^Makefile:|_Makefile:|' \ ++ -e 's|^\(tests test:\) all|\1|' Makefile >$(DESTDIR)/python/Makefile ++ $(MAKE) -C tests install-ptest ++else ++install-ptest: + endif + ++.PHONY: tests test + tests test: all + cd tests && $(MAKE) tests +diff --git a/python/tests/Makefile.am b/python/tests/Makefile.am +index 95ebead..418e21e 100644 +--- a/python/tests/Makefile.am ++++ b/python/tests/Makefile.am +@@ -59,6 +59,11 @@ XMLS= \ + CLEANFILES = core tmp.xml *.pyc + + if WITH_PYTHON ++install-ptest: ++ cp -r $(srcdir) $(DESTDIR)/python ++ sed -e 's|^Makefile:|_Makefile:|' \ ++ -e 's|^\(srcdir = \).*|\1.|' Makefile >$(DESTDIR)/python/tests/Makefile ++ + tests: $(PYTESTS) + @for f in $(XMLS) ; do test -f $$f || $(LN_S) $(srcdir)/$$f . ; done + @echo "## running Python regression tests" +@@ -68,8 +73,11 @@ tests: $(PYTESTS) + export LD_LIBRARY_PATH; \ + for test in $(PYTESTS) ; \ + do log=`$(PYTHON) $(srcdir)/$$test` ; \ +- if [ "`echo $$log | grep OK`" = "" ] ; then \ +- echo "-- $$test" ; echo "$$log" ; fi ; done) ++ if [ "`echo $$log | grep OK`" ]; then \ ++ echo "PASS: $$test"; else \ ++ echo "$$log"; echo "FAIL: $$test"; fi; done) + else ++install-ptest: ++ + tests: + endif +diff --git a/python/tests/tstLastError.py b/python/tests/tstLastError.py +index d5f9be7..3e5bef8 100755 +--- a/python/tests/tstLastError.py ++++ b/python/tests/tstLastError.py +@@ -25,7 +25,7 @@ class TestCase(unittest.TestCase): + when the exception is raised, check the libxml2.lastError for + expected values.""" + # disable the default error handler +- libxml2.registerErrorHandler(None,None) ++ libxml2.registerErrorHandler(lambda ctx,str: None,None) + try: + f(*args) + except exc: +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2016-9318.patch b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2016-9318.patch deleted file mode 100644 index 3581ab83d..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/CVE-2016-9318.patch +++ /dev/null @@ -1,207 +0,0 @@ -From 7fa1cd31552d52d50a9101f07c816ff6dd2d9f19 Mon Sep 17 00:00:00 2001 -From: Doran Moppert -Date: Fri, 7 Apr 2017 16:45:56 +0200 -Subject: [PATCH] Add an XML_PARSE_NOXXE flag to block all entities loading - even local - -For https://bugzilla.gnome.org/show_bug.cgi?id=772726 - -* include/libxml/parser.h: Add a new parser flag XML_PARSE_NOXXE -* elfgcchack.h, xmlIO.h, xmlIO.c: associated loading routine -* include/libxml/xmlerror.h: new error raised -* xmllint.c: adds --noxxe flag to activate the option - -Upstream-Status: Backport -CVE: CVE-2016-9318 - -Signed-off-by: Catalin Enache ---- - elfgcchack.h | 10 ++++++++++ - include/libxml/parser.h | 3 ++- - include/libxml/xmlIO.h | 8 ++++++++ - include/libxml/xmlerror.h | 1 + - parser.c | 4 ++++ - xmlIO.c | 40 +++++++++++++++++++++++++++++++++++----- - xmllint.c | 5 +++++ - 7 files changed, 65 insertions(+), 6 deletions(-) - -diff --git a/elfgcchack.h b/elfgcchack.h -index 8c52884..1b81dcd 100644 ---- a/elfgcchack.h -+++ b/elfgcchack.h -@@ -6547,6 +6547,16 @@ extern __typeof (xmlNoNetExternalEntityLoader) xmlNoNetExternalEntityLoader__int - #endif - #endif - -+#ifdef bottom_xmlIO -+#undef xmlNoXxeExternalEntityLoader -+extern __typeof (xmlNoXxeExternalEntityLoader) xmlNoXxeExternalEntityLoader __attribute((alias("xmlNoXxeExternalEntityLoader__internal_alias"))); -+#else -+#ifndef xmlNoXxeExternalEntityLoader -+extern __typeof (xmlNoXxeExternalEntityLoader) xmlNoXxeExternalEntityLoader__internal_alias __attribute((visibility("hidden"))); -+#define xmlNoXxeExternalEntityLoader xmlNoXxeExternalEntityLoader__internal_alias -+#endif -+#endif -+ - #ifdef bottom_tree - #undef xmlNodeAddContent - extern __typeof (xmlNodeAddContent) xmlNodeAddContent __attribute((alias("xmlNodeAddContent__internal_alias"))); -diff --git a/include/libxml/parser.h b/include/libxml/parser.h -index 47fbec0..63ca1b9 100644 ---- a/include/libxml/parser.h -+++ b/include/libxml/parser.h -@@ -1111,7 +1111,8 @@ typedef enum { - XML_PARSE_HUGE = 1<<19,/* relax any hardcoded limit from the parser */ - XML_PARSE_OLDSAX = 1<<20,/* parse using SAX2 interface before 2.7.0 */ - XML_PARSE_IGNORE_ENC= 1<<21,/* ignore internal document encoding hint */ -- XML_PARSE_BIG_LINES = 1<<22 /* Store big lines numbers in text PSVI field */ -+ XML_PARSE_BIG_LINES = 1<<22,/* Store big lines numbers in text PSVI field */ -+ XML_PARSE_NOXXE = 1<<23 /* Forbid any external entity loading */ - } xmlParserOption; - - XMLPUBFUN void XMLCALL -diff --git a/include/libxml/xmlIO.h b/include/libxml/xmlIO.h -index 3e41744..8d3fdef 100644 ---- a/include/libxml/xmlIO.h -+++ b/include/libxml/xmlIO.h -@@ -300,6 +300,14 @@ XMLPUBFUN xmlParserInputPtr XMLCALL - xmlParserCtxtPtr ctxt); - - /* -+ * A predefined entity loader external entity expansion -+ */ -+XMLPUBFUN xmlParserInputPtr XMLCALL -+ xmlNoXxeExternalEntityLoader (const char *URL, -+ const char *ID, -+ xmlParserCtxtPtr ctxt); -+ -+/* - * xmlNormalizeWindowsPath is obsolete, don't use it. - * Check xmlCanonicPath in uri.h for a better alternative. - */ -diff --git a/include/libxml/xmlerror.h b/include/libxml/xmlerror.h -index 037c16d..3036062 100644 ---- a/include/libxml/xmlerror.h -+++ b/include/libxml/xmlerror.h -@@ -470,6 +470,7 @@ typedef enum { - XML_IO_EADDRINUSE, /* 1554 */ - XML_IO_EALREADY, /* 1555 */ - XML_IO_EAFNOSUPPORT, /* 1556 */ -+ XML_IO_ILLEGAL_XXE, /* 1557 */ - XML_XINCLUDE_RECURSION=1600, - XML_XINCLUDE_PARSE_VALUE, /* 1601 */ - XML_XINCLUDE_ENTITY_DEF_MISMATCH, /* 1602 */ -diff --git a/parser.c b/parser.c -index 53a6b7f..609a270 100644 ---- a/parser.c -+++ b/parser.c -@@ -15350,6 +15350,10 @@ xmlCtxtUseOptionsInternal(xmlParserCtxtPtr ctxt, int options, const char *encodi - ctxt->options |= XML_PARSE_NONET; - options -= XML_PARSE_NONET; - } -+ if (options & XML_PARSE_NOXXE) { -+ ctxt->options |= XML_PARSE_NOXXE; -+ options -= XML_PARSE_NOXXE; -+ } - if (options & XML_PARSE_COMPACT) { - ctxt->options |= XML_PARSE_COMPACT; - options -= XML_PARSE_COMPACT; -diff --git a/xmlIO.c b/xmlIO.c -index 1a79c09..304f822 100644 ---- a/xmlIO.c -+++ b/xmlIO.c -@@ -210,6 +210,7 @@ static const char *IOerr[] = { - "adddress in use", /* EADDRINUSE */ - "already in use", /* EALREADY */ - "unknown address familly", /* EAFNOSUPPORT */ -+ "Attempt to load external entity %s", /* XML_IO_ILLEGAL_XXE */ - }; - - #if defined(_WIN32) || defined (__DJGPP__) && !defined (__CYGWIN__) -@@ -4053,13 +4054,22 @@ xmlDefaultExternalEntityLoader(const char *URL, const char *ID, - xmlGenericError(xmlGenericErrorContext, - "xmlDefaultExternalEntityLoader(%s, xxx)\n", URL); - #endif -- if ((ctxt != NULL) && (ctxt->options & XML_PARSE_NONET)) { -+ if (ctxt != NULL) { - int options = ctxt->options; - -- ctxt->options -= XML_PARSE_NONET; -- ret = xmlNoNetExternalEntityLoader(URL, ID, ctxt); -- ctxt->options = options; -- return(ret); -+ if (options & XML_PARSE_NOXXE) { -+ ctxt->options -= XML_PARSE_NOXXE; -+ ret = xmlNoXxeExternalEntityLoader(URL, ID, ctxt); -+ ctxt->options = options; -+ return(ret); -+ } -+ -+ if (options & XML_PARSE_NONET) { -+ ctxt->options -= XML_PARSE_NONET; -+ ret = xmlNoNetExternalEntityLoader(URL, ID, ctxt); -+ ctxt->options = options; -+ return(ret); -+ } - } - #ifdef LIBXML_CATALOG_ENABLED - resource = xmlResolveResourceFromCatalog(URL, ID, ctxt); -@@ -4160,6 +4170,13 @@ xmlNoNetExternalEntityLoader(const char *URL, const char *ID, - xmlParserInputPtr input = NULL; - xmlChar *resource = NULL; - -+ if (ctxt == NULL) { -+ return(NULL); -+ } -+ if (ctxt->input_id == 1) { -+ return xmlDefaultExternalEntityLoader((const char *) URL, ID, ctxt); -+ } -+ - #ifdef LIBXML_CATALOG_ENABLED - resource = xmlResolveResourceFromCatalog(URL, ID, ctxt); - #endif -@@ -4182,5 +4199,18 @@ xmlNoNetExternalEntityLoader(const char *URL, const char *ID, - return(input); - } - -+xmlParserInputPtr -+xmlNoXxeExternalEntityLoader(const char *URL, const char *ID, -+ xmlParserCtxtPtr ctxt) { -+ if (ctxt == NULL) { -+ return(NULL); -+ } -+ if (ctxt->input_id == 1) { -+ return xmlDefaultExternalEntityLoader((const char *) URL, ID, ctxt); -+ } -+ xmlIOErr(XML_IO_ILLEGAL_XXE, (const char *) URL); -+ return(NULL); -+} -+ - #define bottom_xmlIO - #include "elfgcchack.h" -diff --git a/xmllint.c b/xmllint.c -index 67f7adb..d9368c1 100644 ---- a/xmllint.c -+++ b/xmllint.c -@@ -3019,6 +3019,7 @@ static void usage(const char *name) { - printf("\t--path 'paths': provide a set of paths for resources\n"); - printf("\t--load-trace : print trace of all external entities loaded\n"); - printf("\t--nonet : refuse to fetch DTDs or entities over network\n"); -+ printf("\t--noxxe : forbid any external entity loading\n"); - printf("\t--nocompact : do not generate compact text nodes\n"); - printf("\t--htmlout : output results as HTML\n"); - printf("\t--nowrap : do not put HTML doc wrapper\n"); -@@ -3461,6 +3462,10 @@ main(int argc, char **argv) { - (!strcmp(argv[i], "--nonet"))) { - options |= XML_PARSE_NONET; - xmlSetExternalEntityLoader(xmlNoNetExternalEntityLoader); -+ } else if ((!strcmp(argv[i], "-noxxe")) || -+ (!strcmp(argv[i], "--noxxe"))) { -+ options |= XML_PARSE_NOXXE; -+ xmlSetExternalEntityLoader(xmlNoXxeExternalEntityLoader); - } else if ((!strcmp(argv[i], "-nocompact")) || - (!strcmp(argv[i], "--nocompact"))) { - options &= ~XML_PARSE_COMPACT; --- -2.10.2 - diff --git a/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-0663.patch b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-0663.patch new file mode 100644 index 000000000..010826585 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-0663.patch @@ -0,0 +1,40 @@ +libxml2: Fix CVE-2017-0663 + +[No upstream tracking] -- https://bugzilla.gnome.org/show_bug.cgi?id=780228 + +valid: Fix type confusion in xmlValidateOneNamespace + +Comment out code that casts xmlNsPtr to xmlAttrPtr. ID types +on namespace declarations make no practical sense anyway. + +Fixes bug 780228 + +Upstream-Status: Backport [https://git.gnome.org/browse/libxml2/commit/?id=92b9e8c8b3787068565a1820ba575d042f9eec66] +CVE: CVE-2017-0663 +Signed-off-by: Andrej Valek + +diff --git a/valid.c b/valid.c +index 19f84b8..e03d35e 100644 +--- a/valid.c ++++ b/valid.c +@@ -4621,6 +4621,12 @@ xmlNodePtr elem, const xmlChar *prefix, xmlNsPtr ns, const xmlChar *value) { + } + } + ++ /* ++ * Casting ns to xmlAttrPtr is wrong. We'd need separate functions ++ * xmlAddID and xmlAddRef for namespace declarations, but it makes ++ * no practical sense to use ID types anyway. ++ */ ++#if 0 + /* Validity Constraint: ID uniqueness */ + if (attrDecl->atype == XML_ATTRIBUTE_ID) { + if (xmlAddID(ctxt, doc, value, (xmlAttrPtr) ns) == NULL) +@@ -4632,6 +4638,7 @@ xmlNodePtr elem, const xmlChar *prefix, xmlNsPtr ns, const xmlChar *value) { + if (xmlAddRef(ctxt, doc, value, (xmlAttrPtr) ns) == NULL) + ret = 0; + } ++#endif + + /* Validity Constraint: Notation Attributes */ + if (attrDecl->atype == XML_ATTRIBUTE_NOTATION) { diff --git a/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-5969.patch b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-5969.patch new file mode 100644 index 000000000..571b05c08 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-5969.patch @@ -0,0 +1,62 @@ +libxml2-2.9.4: Fix CVE-2017-5969 + +[No upstream tracking] -- https://bugzilla.gnome.org/show_bug.cgi?id=758422 + +valid: Fix NULL pointer deref in xmlDumpElementContent + +Can only be triggered in recovery mode. + +Fixes bug 758422 + +Upstream-Status: Backport - [https://git.gnome.org/browse/libxml2/commit/?id=94691dc884d1a8ada39f073408b4bb92fe7fe882] +CVE: CVE-2017-5969 +Signed-off-by: Andrej Valek + +diff --git a/valid.c b/valid.c +index 19f84b8..0a8e58a 100644 +--- a/valid.c ++++ b/valid.c +@@ -1172,29 +1172,33 @@ xmlDumpElementContent(xmlBufferPtr buf, xmlElementContentPtr content, int glob) + xmlBufferWriteCHAR(buf, content->name); + break; + case XML_ELEMENT_CONTENT_SEQ: +- if ((content->c1->type == XML_ELEMENT_CONTENT_OR) || +- (content->c1->type == XML_ELEMENT_CONTENT_SEQ)) ++ if ((content->c1 != NULL) && ++ ((content->c1->type == XML_ELEMENT_CONTENT_OR) || ++ (content->c1->type == XML_ELEMENT_CONTENT_SEQ))) + xmlDumpElementContent(buf, content->c1, 1); + else + xmlDumpElementContent(buf, content->c1, 0); + xmlBufferWriteChar(buf, " , "); +- if ((content->c2->type == XML_ELEMENT_CONTENT_OR) || +- ((content->c2->type == XML_ELEMENT_CONTENT_SEQ) && +- (content->c2->ocur != XML_ELEMENT_CONTENT_ONCE))) ++ if ((content->c2 != NULL) && ++ ((content->c2->type == XML_ELEMENT_CONTENT_OR) || ++ ((content->c2->type == XML_ELEMENT_CONTENT_SEQ) && ++ (content->c2->ocur != XML_ELEMENT_CONTENT_ONCE)))) + xmlDumpElementContent(buf, content->c2, 1); + else + xmlDumpElementContent(buf, content->c2, 0); + break; + case XML_ELEMENT_CONTENT_OR: +- if ((content->c1->type == XML_ELEMENT_CONTENT_OR) || +- (content->c1->type == XML_ELEMENT_CONTENT_SEQ)) ++ if ((content->c1 != NULL) && ++ ((content->c1->type == XML_ELEMENT_CONTENT_OR) || ++ (content->c1->type == XML_ELEMENT_CONTENT_SEQ))) + xmlDumpElementContent(buf, content->c1, 1); + else + xmlDumpElementContent(buf, content->c1, 0); + xmlBufferWriteChar(buf, " | "); +- if ((content->c2->type == XML_ELEMENT_CONTENT_SEQ) || +- ((content->c2->type == XML_ELEMENT_CONTENT_OR) && +- (content->c2->ocur != XML_ELEMENT_CONTENT_ONCE))) ++ if ((content->c2 != NULL) && ++ ((content->c2->type == XML_ELEMENT_CONTENT_SEQ) || ++ ((content->c2->type == XML_ELEMENT_CONTENT_OR) && ++ (content->c2->ocur != XML_ELEMENT_CONTENT_ONCE)))) + xmlDumpElementContent(buf, content->c2, 1); + else + xmlDumpElementContent(buf, content->c2, 0); diff --git a/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-8872.patch b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-8872.patch new file mode 100644 index 000000000..26779aa57 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-8872.patch @@ -0,0 +1,37 @@ +From d2f873a541c72b0f67e15562819bf98b884b30b7 Mon Sep 17 00:00:00 2001 +From: Hongxu Jia +Date: Wed, 23 Aug 2017 16:04:49 +0800 +Subject: [PATCH] fix CVE-2017-8872 + +this makes xmlHaltParser "empty" the buffer, as it resets cur and ava +il too here. + +this seems to cure this specific issue, and also passes the testsuite + +Signed-off-by: Marcus Meissner + +https://bugzilla.gnome.org/show_bug.cgi?id=775200 +Upstream-Status: Backport [https://bugzilla.gnome.org/attachment.cgi?id=355527&action=diff] +Signed-off-by: Hongxu Jia +--- + parser.c | 4 ++++ + 1 file changed, 4 insertions(+) + +diff --git a/parser.c b/parser.c +index 9506ead..6c07ffd 100644 +--- a/parser.c ++++ b/parser.c +@@ -12664,6 +12664,10 @@ xmlHaltParser(xmlParserCtxtPtr ctxt) { + } + ctxt->input->cur = BAD_CAST""; + ctxt->input->base = ctxt->input->cur; ++ if (ctxt->input->buf) { ++ xmlBufEmpty (ctxt->input->buf->buffer); ++ } else ++ ctxt->input->length = 0; + } + } + +-- +2.7.4 + diff --git a/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-9047_CVE-2017-9048.patch b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-9047_CVE-2017-9048.patch new file mode 100644 index 000000000..8b034560f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-9047_CVE-2017-9048.patch @@ -0,0 +1,103 @@ +libxml2-2.9.4: Fix CVE-2017-9047 and CVE-2017-9048 + +[No upstream tracking] -- https://bugzilla.gnome.org/show_bug.cgi?id=781333 + -- https://bugzilla.gnome.org/show_bug.cgi?id=781701 + +valid: Fix buffer size checks in xmlSnprintfElementContent + +xmlSnprintfElementContent failed to correctly check the available +buffer space in two locations. + +Fixes bug 781333 and bug 781701 + +Upstream-Status: Backport [https://git.gnome.org/browse/libxml2/commit/?id=932cc9896ab41475d4aa429c27d9afd175959d74] +CVE: CVE-2017-9047 CVE-2017-9048 +Signed-off-by: Andrej Valek + +diff --git a/result/valid/781333.xml b/result/valid/781333.xml +new file mode 100644 +index 0000000..01baf11 +--- /dev/null ++++ b/result/valid/781333.xml +@@ -0,0 +1,5 @@ ++ ++ ++]> ++ +diff --git a/result/valid/781333.xml.err b/result/valid/781333.xml.err +new file mode 100644 +index 0000000..2176200 +--- /dev/null ++++ b/result/valid/781333.xml.err +@@ -0,0 +1,3 @@ ++./test/valid/781333.xml:4: element a: validity error : Element a content does not follow the DTD, expecting ( ..., got ++ ++ ^ +diff --git a/result/valid/781333.xml.err.rdr b/result/valid/781333.xml.err.rdr +new file mode 100644 +index 0000000..1195a04 +--- /dev/null ++++ b/result/valid/781333.xml.err.rdr +@@ -0,0 +1,6 @@ ++./test/valid/781333.xml:4: element a: validity error : Element a content does not follow the DTD, expecting ( ..., got ++ ++ ^ ++./test/valid/781333.xml:5: element a: validity error : Element a content does not follow the DTD, Expecting more child ++ ++^ +diff --git a/test/valid/781333.xml b/test/valid/781333.xml +new file mode 100644 +index 0000000..bceac9c +--- /dev/null ++++ b/test/valid/781333.xml +@@ -0,0 +1,4 @@ ++ ++]> ++ +diff --git a/valid.c b/valid.c +index 19f84b8..aaa30f6 100644 +--- a/valid.c ++++ b/valid.c +@@ -1262,22 +1262,23 @@ xmlSnprintfElementContent(char *buf, int size, xmlElementContentPtr content, int + case XML_ELEMENT_CONTENT_PCDATA: + strcat(buf, "#PCDATA"); + break; +- case XML_ELEMENT_CONTENT_ELEMENT: ++ case XML_ELEMENT_CONTENT_ELEMENT: { ++ int qnameLen = xmlStrlen(content->name); ++ ++ if (content->prefix != NULL) ++ qnameLen += xmlStrlen(content->prefix) + 1; ++ if (size - len < qnameLen + 10) { ++ strcat(buf, " ..."); ++ return; ++ } + if (content->prefix != NULL) { +- if (size - len < xmlStrlen(content->prefix) + 10) { +- strcat(buf, " ..."); +- return; +- } + strcat(buf, (char *) content->prefix); + strcat(buf, ":"); + } +- if (size - len < xmlStrlen(content->name) + 10) { +- strcat(buf, " ..."); +- return; +- } + if (content->name != NULL) + strcat(buf, (char *) content->name); + break; ++ } + case XML_ELEMENT_CONTENT_SEQ: + if ((content->c1->type == XML_ELEMENT_CONTENT_OR) || + (content->c1->type == XML_ELEMENT_CONTENT_SEQ)) +@@ -1319,6 +1320,7 @@ xmlSnprintfElementContent(char *buf, int size, xmlElementContentPtr content, int + xmlSnprintfElementContent(buf, size, content->c2, 0); + break; + } ++ if (size - strlen(buf) <= 2) return; + if (englob) + strcat(buf, ")"); + switch (content->ocur) { diff --git a/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-9049_CVE-2017-9050.patch b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-9049_CVE-2017-9050.patch new file mode 100644 index 000000000..591075de3 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-CVE-2017-9049_CVE-2017-9050.patch @@ -0,0 +1,291 @@ +libxml2-2.9.4: Fix CVE-2017-9049 and CVE-2017-9050 + +[No upstream tracking] -- https://bugzilla.gnome.org/show_bug.cgi?id=781205 + -- https://bugzilla.gnome.org/show_bug.cgi?id=781361 + +parser: Fix handling of parameter-entity references + +There were two bugs where parameter-entity references could lead to an +unexpected change of the input buffer in xmlParseNameComplex and +xmlDictLookup being called with an invalid pointer. + +Percent sign in DTD Names +========================= + +The NEXTL macro used to call xmlParserHandlePEReference. When parsing +"complex" names inside the DTD, this could result in entity expansion +which created a new input buffer. The fix is to simply remove the call +to xmlParserHandlePEReference from the NEXTL macro. This is safe because +no users of the macro require expansion of parameter entities. + +- xmlParseNameComplex +- xmlParseNCNameComplex +- xmlParseNmtoken + +The percent sign is not allowed in names, which are grammatical tokens. + +- xmlParseEntityValue + +Parameter-entity references in entity values are expanded but this +happens in a separate step in this function. + +- xmlParseSystemLiteral + +Parameter-entity references are ignored in the system literal. + +- xmlParseAttValueComplex +- xmlParseCharDataComplex +- xmlParseCommentComplex +- xmlParsePI +- xmlParseCDSect + +Parameter-entity references are ignored outside the DTD. + +- xmlLoadEntityContent + +This function is only called from xmlStringLenDecodeEntities and +entities are replaced in a separate step immediately after the function +call. + +This bug could also be triggered with an internal subset and double +entity expansion. + +This fixes bug 766956 initially reported by Wei Lei and independently by +Chromium's ClusterFuzz, Hanno Böck, and Marco Grassi. Thanks to everyone +involved. + +xmlParseNameComplex with XML_PARSE_OLD10 +======================================== + +When parsing Names inside an expanded parameter entity with the +XML_PARSE_OLD10 option, xmlParseNameComplex would call xmlGROW via the +GROW macro if the input buffer was exhausted. At the end of the +parameter entity's replacement text, this function would then call +xmlPopInput which invalidated the input buffer. + +There should be no need to invoke GROW in this situation because the +buffer is grown periodically every XML_PARSER_CHUNK_SIZE characters and, +at least for UTF-8, in xmlCurrentChar. This also matches the code path +executed when XML_PARSE_OLD10 is not set. + +This fixes bugs 781205 (CVE-2017-9049) and 781361 (CVE-2017-9050). +Thanks to Marcel Böhme and Thuan Pham for the report. + +Additional hardening +==================== + +A separate check was added in xmlParseNameComplex to validate the +buffer size. + +Fixes bug 781205 and bug 781361 + +Upstream-Status: Backport [https://git.gnome.org/browse/libxml2/commit/?id=932cc9896ab41475d4aa429c27d9afd175959d74] +CVE: CVE-2017-9049 CVE-2017-9050 +Signed-off-by: Andrej Valek + +diff --git a/Makefile.am b/Makefile.am +index 9f988b0..dab15a4 100644 +--- a/Makefile.am ++++ b/Makefile.am +@@ -422,6 +422,24 @@ Errtests : xmllint$(EXEEXT) + if [ -n "$$log" ] ; then echo $$name result ; echo $$log ; fi ; \ + rm result.$$name error.$$name ; \ + fi ; fi ; done) ++ @echo "## Error cases regression tests (old 1.0)" ++ -@(for i in $(srcdir)/test/errors10/*.xml ; do \ ++ name=`basename $$i`; \ ++ if [ ! -d $$i ] ; then \ ++ if [ ! -f $(srcdir)/result/errors10/$$name ] ; then \ ++ echo New test file $$name ; \ ++ $(CHECKER) $(top_builddir)/xmllint --oldxml10 $$i \ ++ 2> $(srcdir)/result/errors10/$$name.err \ ++ > $(srcdir)/result/errors10/$$name ; \ ++ grep "MORY ALLO" .memdump | grep -v "MEMORY ALLOCATED : 0"; \ ++ else \ ++ log=`$(CHECKER) $(top_builddir)/xmllint --oldxml10 $$i 2> error.$$name > result.$$name ; \ ++ grep "MORY ALLO" .memdump | grep -v "MEMORY ALLOCATED : 0"; \ ++ diff $(srcdir)/result/errors10/$$name result.$$name ; \ ++ diff $(srcdir)/result/errors10/$$name.err error.$$name` ; \ ++ if [ -n "$$log" ] ; then echo $$name result ; echo "$$log" ; fi ; \ ++ rm result.$$name error.$$name ; \ ++ fi ; fi ; done) + @echo "## Error cases stream regression tests" + -@(for i in $(srcdir)/test/errors/*.xml ; do \ + name=`basename $$i`; \ +diff --git a/parser.c b/parser.c +index 609a270..8e11c12 100644 +--- a/parser.c ++++ b/parser.c +@@ -2115,7 +2115,6 @@ static void xmlGROW (xmlParserCtxtPtr ctxt) { + ctxt->input->line++; ctxt->input->col = 1; \ + } else ctxt->input->col++; \ + ctxt->input->cur += l; \ +- if (*ctxt->input->cur == '%') xmlParserHandlePEReference(ctxt); \ + } while (0) + + #define CUR_CHAR(l) xmlCurrentChar(ctxt, &l) +@@ -3406,13 +3405,6 @@ xmlParseNameComplex(xmlParserCtxtPtr ctxt) { + len += l; + NEXTL(l); + c = CUR_CHAR(l); +- if (c == 0) { +- count = 0; +- GROW; +- if (ctxt->instate == XML_PARSER_EOF) +- return(NULL); +- c = CUR_CHAR(l); +- } + } + } + if ((len > XML_MAX_NAME_LENGTH) && +@@ -3420,6 +3412,16 @@ xmlParseNameComplex(xmlParserCtxtPtr ctxt) { + xmlFatalErr(ctxt, XML_ERR_NAME_TOO_LONG, "Name"); + return(NULL); + } ++ if (ctxt->input->cur - ctxt->input->base < len) { ++ /* ++ * There were a couple of bugs where PERefs lead to to a change ++ * of the buffer. Check the buffer size to avoid passing an invalid ++ * pointer to xmlDictLookup. ++ */ ++ xmlFatalErr(ctxt, XML_ERR_INTERNAL_ERROR, ++ "unexpected change of input buffer"); ++ return (NULL); ++ } + if ((*ctxt->input->cur == '\n') && (ctxt->input->cur[-1] == '\r')) + return(xmlDictLookup(ctxt->dict, ctxt->input->cur - (len + 1), len)); + return(xmlDictLookup(ctxt->dict, ctxt->input->cur - len, len)); +diff --git a/result/errors10/781205.xml b/result/errors10/781205.xml +new file mode 100644 +index 0000000..e69de29 +diff --git a/result/errors10/781205.xml.err b/result/errors10/781205.xml.err +new file mode 100644 +index 0000000..da15c3f +--- /dev/null ++++ b/result/errors10/781205.xml.err +@@ -0,0 +1,21 @@ ++Entity: line 1: parser error : internal error: xmlParseInternalSubset: error detected in Markup declaration ++ ++ %a; ++ ^ ++Entity: line 1: ++<:0000 ++^ ++Entity: line 1: parser error : DOCTYPE improperly terminated ++ %a; ++ ^ ++Entity: line 1: ++<:0000 ++^ ++namespace error : Failed to parse QName ':0000' ++ %a; ++ ^ ++<:0000 ++ ^ ++./test/errors10/781205.xml:4: parser error : Couldn't find end of Start Tag :0000 line 1 ++ ++^ +diff --git a/result/errors10/781361.xml b/result/errors10/781361.xml +new file mode 100644 +index 0000000..e69de29 +diff --git a/result/errors10/781361.xml.err b/result/errors10/781361.xml.err +new file mode 100644 +index 0000000..655f41a +--- /dev/null ++++ b/result/errors10/781361.xml.err +@@ -0,0 +1,13 @@ ++./test/errors10/781361.xml:4: parser error : xmlParseElementDecl: 'EMPTY', 'ANY' or '(' expected ++ ++^ ++./test/errors10/781361.xml:4: parser error : internal error: xmlParseInternalSubset: error detected in Markup declaration ++ ++ ++^ ++./test/errors10/781361.xml:4: parser error : DOCTYPE improperly terminated ++ ++^ ++./test/errors10/781361.xml:4: parser error : Start tag expected, '<' not found ++ ++^ +diff --git a/result/valid/766956.xml b/result/valid/766956.xml +new file mode 100644 +index 0000000..e69de29 +diff --git a/result/valid/766956.xml.err b/result/valid/766956.xml.err +new file mode 100644 +index 0000000..34b1dae +--- /dev/null ++++ b/result/valid/766956.xml.err +@@ -0,0 +1,9 @@ ++test/valid/dtds/766956.dtd:2: parser error : PEReference: expecting ';' ++%ä%ent; ++ ^ ++Entity: line 1: parser error : Content error in the external subset ++ %ent; ++ ^ ++Entity: line 1: ++value ++^ +diff --git a/result/valid/766956.xml.err.rdr b/result/valid/766956.xml.err.rdr +new file mode 100644 +index 0000000..7760346 +--- /dev/null ++++ b/result/valid/766956.xml.err.rdr +@@ -0,0 +1,10 @@ ++test/valid/dtds/766956.dtd:2: parser error : PEReference: expecting ';' ++%ä%ent; ++ ^ ++Entity: line 1: parser error : Content error in the external subset ++ %ent; ++ ^ ++Entity: line 1: ++value ++^ ++./test/valid/766956.xml : failed to parse +diff --git a/runtest.c b/runtest.c +index bb74d2a..63e8c20 100644 +--- a/runtest.c ++++ b/runtest.c +@@ -4202,6 +4202,9 @@ testDesc testDescriptions[] = { + { "Error cases regression tests", + errParseTest, "./test/errors/*.xml", "result/errors/", "", ".err", + 0 }, ++ { "Error cases regression tests (old 1.0)", ++ errParseTest, "./test/errors10/*.xml", "result/errors10/", "", ".err", ++ XML_PARSE_OLD10 }, + #ifdef LIBXML_READER_ENABLED + { "Error cases stream regression tests", + streamParseTest, "./test/errors/*.xml", "result/errors/", NULL, ".str", +diff --git a/test/errors10/781205.xml b/test/errors10/781205.xml +new file mode 100644 +index 0000000..d9e9e83 +--- /dev/null ++++ b/test/errors10/781205.xml +@@ -0,0 +1,3 @@ ++ ++ %a; +diff --git a/test/errors10/781361.xml b/test/errors10/781361.xml +new file mode 100644 +index 0000000..67476bc +--- /dev/null ++++ b/test/errors10/781361.xml +@@ -0,0 +1,3 @@ ++ ++ %elem; +diff --git a/test/valid/766956.xml b/test/valid/766956.xml +new file mode 100644 +index 0000000..19a95a0 +--- /dev/null ++++ b/test/valid/766956.xml +@@ -0,0 +1,2 @@ ++ ++ +diff --git a/test/valid/dtds/766956.dtd b/test/valid/dtds/766956.dtd +new file mode 100644 +index 0000000..dddde68 +--- /dev/null ++++ b/test/valid/dtds/766956.dtd +@@ -0,0 +1,2 @@ ++ ++%ä%ent; diff --git a/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-fix_and_simplify_xmlParseStartTag2.patch b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-fix_and_simplify_xmlParseStartTag2.patch new file mode 100644 index 000000000..faa57701f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2/libxml2-fix_and_simplify_xmlParseStartTag2.patch @@ -0,0 +1,590 @@ +libxml2-2.9.4: Avoid reparsing and simplify control flow in xmlParseStartTag2 + +[No upstream tracking] + +parser: Avoid reparsing in xmlParseStartTag2 + +The code in xmlParseStartTag2 must handle the case that the input +buffer was grown and reallocated which can invalidate pointers to +attribute values. Before, this was handled by detecting changes of +the input buffer "base" pointer and, in case of a change, jumping +back to the beginning of the function and reparsing the start tag. + +The major problem of this approach is that whether an input buffer is +reallocated is nondeterministic, resulting in seemingly random test +failures. See the mailing list thread "runtest mystery bug: name2.xml +error case regression test" from 2012, for example. + +If a reallocation was detected, the code also made no attempts to +continue parsing in case of errors which makes a difference in +the lax "recover" mode. + +Now we store the current input buffer "base" pointer for each (not +separately allocated) attribute in the namespace URI field, which isn't +used until later. After the whole start tag was parsed, the pointers to +the attribute values are reconstructed using the offset between the +new and the old input buffer. This relies on arithmetic on dangling +pointers which is technically undefined behavior. But it seems like +the easiest and most efficient fix and a similar approach is used in +xmlParserInputGrow. + +This changes the error output of several tests, typically making it +more verbose because we try harder to continue parsing in case of errors. + +(Another possible solution is to check not only the "base" pointer +but the size of the input buffer as well. But this would result in +even more reparsing.) + +Remove some goto labels and deduplicate a bit of code after handling +namespaces. + +There were two bugs where parameter-entity references could lead to an +unexpected change of the input buffer in xmlParseNameComplex and +xmlDictLookup being called with an invalid pointer. + + +Upstream-Status: Backport + - [https://git.gnome.org/browse/libxml2/commit/?id=07b7428b69c368611d215a140fe630b2d1e61349] + - [https://git.gnome.org/browse/libxml2/commit/?id=855c19efb7cd30d927d673b3658563c4959ca6f0] +Signed-off-by: Andrej Valek + +diff --git a/parser.c b/parser.c +index 609a270..74016e3 100644 +--- a/parser.c ++++ b/parser.c +@@ -43,6 +43,7 @@ + #include + #include + #include ++#include + #include + #include + #include +@@ -9377,8 +9378,7 @@ xmlParseStartTag2(xmlParserCtxtPtr ctxt, const xmlChar **pref, + const xmlChar **atts = ctxt->atts; + int maxatts = ctxt->maxatts; + int nratts, nbatts, nbdef; +- int i, j, nbNs, attval, oldline, oldcol, inputNr; +- const xmlChar *base; ++ int i, j, nbNs, attval; + unsigned long cur; + int nsNr = ctxt->nsNr; + +@@ -9392,13 +9392,8 @@ xmlParseStartTag2(xmlParserCtxtPtr ctxt, const xmlChar **pref, + * The Shrinking is only possible once the full set of attribute + * callbacks have been done. + */ +-reparse: + SHRINK; +- base = ctxt->input->base; + cur = ctxt->input->cur - ctxt->input->base; +- inputNr = ctxt->inputNr; +- oldline = ctxt->input->line; +- oldcol = ctxt->input->col; + nbatts = 0; + nratts = 0; + nbdef = 0; +@@ -9422,8 +9417,6 @@ reparse: + */ + SKIP_BLANKS; + GROW; +- if ((ctxt->input->base != base) || (inputNr != ctxt->inputNr)) +- goto base_changed; + + while (((RAW != '>') && + ((RAW != '/') || (NXT(1) != '>')) && +@@ -9434,203 +9427,174 @@ reparse: + + attname = xmlParseAttribute2(ctxt, prefix, localname, + &aprefix, &attvalue, &len, &alloc); +- if ((ctxt->input->base != base) || (inputNr != ctxt->inputNr)) { +- if ((attvalue != NULL) && (alloc != 0)) +- xmlFree(attvalue); +- attvalue = NULL; +- goto base_changed; +- } +- if ((attname != NULL) && (attvalue != NULL)) { +- if (len < 0) len = xmlStrlen(attvalue); +- if ((attname == ctxt->str_xmlns) && (aprefix == NULL)) { +- const xmlChar *URL = xmlDictLookup(ctxt->dict, attvalue, len); +- xmlURIPtr uri; +- +- if (URL == NULL) { +- xmlErrMemory(ctxt, "dictionary allocation failure"); +- if ((attvalue != NULL) && (alloc != 0)) +- xmlFree(attvalue); +- return(NULL); +- } +- if (*URL != 0) { +- uri = xmlParseURI((const char *) URL); +- if (uri == NULL) { +- xmlNsErr(ctxt, XML_WAR_NS_URI, +- "xmlns: '%s' is not a valid URI\n", +- URL, NULL, NULL); +- } else { +- if (uri->scheme == NULL) { +- xmlNsWarn(ctxt, XML_WAR_NS_URI_RELATIVE, +- "xmlns: URI %s is not absolute\n", +- URL, NULL, NULL); +- } +- xmlFreeURI(uri); +- } +- if (URL == ctxt->str_xml_ns) { +- if (attname != ctxt->str_xml) { +- xmlNsErr(ctxt, XML_NS_ERR_XML_NAMESPACE, +- "xml namespace URI cannot be the default namespace\n", +- NULL, NULL, NULL); +- } +- goto skip_default_ns; +- } +- if ((len == 29) && +- (xmlStrEqual(URL, +- BAD_CAST "http://www.w3.org/2000/xmlns/"))) { +- xmlNsErr(ctxt, XML_NS_ERR_XML_NAMESPACE, +- "reuse of the xmlns namespace name is forbidden\n", +- NULL, NULL, NULL); +- goto skip_default_ns; +- } +- } +- /* +- * check that it's not a defined namespace +- */ +- for (j = 1;j <= nbNs;j++) +- if (ctxt->nsTab[ctxt->nsNr - 2 * j] == NULL) +- break; +- if (j <= nbNs) +- xmlErrAttributeDup(ctxt, NULL, attname); +- else +- if (nsPush(ctxt, NULL, URL) > 0) nbNs++; +-skip_default_ns: +- if ((attvalue != NULL) && (alloc != 0)) { +- xmlFree(attvalue); +- attvalue = NULL; +- } +- if ((RAW == '>') || (((RAW == '/') && (NXT(1) == '>')))) +- break; +- if (!IS_BLANK_CH(RAW)) { +- xmlFatalErrMsg(ctxt, XML_ERR_SPACE_REQUIRED, +- "attributes construct error\n"); +- break; +- } +- SKIP_BLANKS; +- if ((ctxt->input->base != base) || (inputNr != ctxt->inputNr)) +- goto base_changed; +- continue; +- } +- if (aprefix == ctxt->str_xmlns) { +- const xmlChar *URL = xmlDictLookup(ctxt->dict, attvalue, len); +- xmlURIPtr uri; +- +- if (attname == ctxt->str_xml) { +- if (URL != ctxt->str_xml_ns) { +- xmlNsErr(ctxt, XML_NS_ERR_XML_NAMESPACE, +- "xml namespace prefix mapped to wrong URI\n", +- NULL, NULL, NULL); +- } +- /* +- * Do not keep a namespace definition node +- */ +- goto skip_ns; +- } ++ if ((attname == NULL) || (attvalue == NULL)) ++ goto next_attr; ++ if (len < 0) len = xmlStrlen(attvalue); ++ ++ if ((attname == ctxt->str_xmlns) && (aprefix == NULL)) { ++ const xmlChar *URL = xmlDictLookup(ctxt->dict, attvalue, len); ++ xmlURIPtr uri; ++ ++ if (URL == NULL) { ++ xmlErrMemory(ctxt, "dictionary allocation failure"); ++ if ((attvalue != NULL) && (alloc != 0)) ++ xmlFree(attvalue); ++ return(NULL); ++ } ++ if (*URL != 0) { ++ uri = xmlParseURI((const char *) URL); ++ if (uri == NULL) { ++ xmlNsErr(ctxt, XML_WAR_NS_URI, ++ "xmlns: '%s' is not a valid URI\n", ++ URL, NULL, NULL); ++ } else { ++ if (uri->scheme == NULL) { ++ xmlNsWarn(ctxt, XML_WAR_NS_URI_RELATIVE, ++ "xmlns: URI %s is not absolute\n", ++ URL, NULL, NULL); ++ } ++ xmlFreeURI(uri); ++ } + if (URL == ctxt->str_xml_ns) { +- if (attname != ctxt->str_xml) { +- xmlNsErr(ctxt, XML_NS_ERR_XML_NAMESPACE, +- "xml namespace URI mapped to wrong prefix\n", +- NULL, NULL, NULL); +- } +- goto skip_ns; +- } +- if (attname == ctxt->str_xmlns) { +- xmlNsErr(ctxt, XML_NS_ERR_XML_NAMESPACE, +- "redefinition of the xmlns prefix is forbidden\n", +- NULL, NULL, NULL); +- goto skip_ns; +- } +- if ((len == 29) && +- (xmlStrEqual(URL, +- BAD_CAST "http://www.w3.org/2000/xmlns/"))) { +- xmlNsErr(ctxt, XML_NS_ERR_XML_NAMESPACE, +- "reuse of the xmlns namespace name is forbidden\n", +- NULL, NULL, NULL); +- goto skip_ns; +- } +- if ((URL == NULL) || (URL[0] == 0)) { +- xmlNsErr(ctxt, XML_NS_ERR_XML_NAMESPACE, +- "xmlns:%s: Empty XML namespace is not allowed\n", +- attname, NULL, NULL); +- goto skip_ns; +- } else { +- uri = xmlParseURI((const char *) URL); +- if (uri == NULL) { +- xmlNsErr(ctxt, XML_WAR_NS_URI, +- "xmlns:%s: '%s' is not a valid URI\n", +- attname, URL, NULL); +- } else { +- if ((ctxt->pedantic) && (uri->scheme == NULL)) { +- xmlNsWarn(ctxt, XML_WAR_NS_URI_RELATIVE, +- "xmlns:%s: URI %s is not absolute\n", +- attname, URL, NULL); +- } +- xmlFreeURI(uri); +- } +- } +- +- /* +- * check that it's not a defined namespace +- */ +- for (j = 1;j <= nbNs;j++) +- if (ctxt->nsTab[ctxt->nsNr - 2 * j] == attname) +- break; +- if (j <= nbNs) +- xmlErrAttributeDup(ctxt, aprefix, attname); +- else +- if (nsPush(ctxt, attname, URL) > 0) nbNs++; +-skip_ns: +- if ((attvalue != NULL) && (alloc != 0)) { +- xmlFree(attvalue); +- attvalue = NULL; +- } +- if ((RAW == '>') || (((RAW == '/') && (NXT(1) == '>')))) +- break; +- if (!IS_BLANK_CH(RAW)) { +- xmlFatalErrMsg(ctxt, XML_ERR_SPACE_REQUIRED, +- "attributes construct error\n"); +- break; +- } +- SKIP_BLANKS; +- if ((ctxt->input->base != base) || (inputNr != ctxt->inputNr)) +- goto base_changed; +- continue; +- } ++ if (attname != ctxt->str_xml) { ++ xmlNsErr(ctxt, XML_NS_ERR_XML_NAMESPACE, ++ "xml namespace URI cannot be the default namespace\n", ++ NULL, NULL, NULL); ++ } ++ goto next_attr; ++ } ++ if ((len == 29) && ++ (xmlStrEqual(URL, ++ BAD_CAST "http://www.w3.org/2000/xmlns/"))) { ++ xmlNsErr(ctxt, XML_NS_ERR_XML_NAMESPACE, ++ "reuse of the xmlns namespace name is forbidden\n", ++ NULL, NULL, NULL); ++ goto next_attr; ++ } ++ } ++ /* ++ * check that it's not a defined namespace ++ */ ++ for (j = 1;j <= nbNs;j++) ++ if (ctxt->nsTab[ctxt->nsNr - 2 * j] == NULL) ++ break; ++ if (j <= nbNs) ++ xmlErrAttributeDup(ctxt, NULL, attname); ++ else ++ if (nsPush(ctxt, NULL, URL) > 0) nbNs++; ++ ++ } else if (aprefix == ctxt->str_xmlns) { ++ const xmlChar *URL = xmlDictLookup(ctxt->dict, attvalue, len); ++ xmlURIPtr uri; ++ ++ if (attname == ctxt->str_xml) { ++ if (URL != ctxt->str_xml_ns) { ++ xmlNsErr(ctxt, XML_NS_ERR_XML_NAMESPACE, ++ "xml namespace prefix mapped to wrong URI\n", ++ NULL, NULL, NULL); ++ } ++ /* ++ * Do not keep a namespace definition node ++ */ ++ goto next_attr; ++ } ++ if (URL == ctxt->str_xml_ns) { ++ if (attname != ctxt->str_xml) { ++ xmlNsErr(ctxt, XML_NS_ERR_XML_NAMESPACE, ++ "xml namespace URI mapped to wrong prefix\n", ++ NULL, NULL, NULL); ++ } ++ goto next_attr; ++ } ++ if (attname == ctxt->str_xmlns) { ++ xmlNsErr(ctxt, XML_NS_ERR_XML_NAMESPACE, ++ "redefinition of the xmlns prefix is forbidden\n", ++ NULL, NULL, NULL); ++ goto next_attr; ++ } ++ if ((len == 29) && ++ (xmlStrEqual(URL, ++ BAD_CAST "http://www.w3.org/2000/xmlns/"))) { ++ xmlNsErr(ctxt, XML_NS_ERR_XML_NAMESPACE, ++ "reuse of the xmlns namespace name is forbidden\n", ++ NULL, NULL, NULL); ++ goto next_attr; ++ } ++ if ((URL == NULL) || (URL[0] == 0)) { ++ xmlNsErr(ctxt, XML_NS_ERR_XML_NAMESPACE, ++ "xmlns:%s: Empty XML namespace is not allowed\n", ++ attname, NULL, NULL); ++ goto next_attr; ++ } else { ++ uri = xmlParseURI((const char *) URL); ++ if (uri == NULL) { ++ xmlNsErr(ctxt, XML_WAR_NS_URI, ++ "xmlns:%s: '%s' is not a valid URI\n", ++ attname, URL, NULL); ++ } else { ++ if ((ctxt->pedantic) && (uri->scheme == NULL)) { ++ xmlNsWarn(ctxt, XML_WAR_NS_URI_RELATIVE, ++ "xmlns:%s: URI %s is not absolute\n", ++ attname, URL, NULL); ++ } ++ xmlFreeURI(uri); ++ } ++ } + +- /* +- * Add the pair to atts +- */ +- if ((atts == NULL) || (nbatts + 5 > maxatts)) { +- if (xmlCtxtGrowAttrs(ctxt, nbatts + 5) < 0) { +- if (attvalue[len] == 0) +- xmlFree(attvalue); +- goto failed; +- } +- maxatts = ctxt->maxatts; +- atts = ctxt->atts; +- } +- ctxt->attallocs[nratts++] = alloc; +- atts[nbatts++] = attname; +- atts[nbatts++] = aprefix; +- atts[nbatts++] = NULL; /* the URI will be fetched later */ +- atts[nbatts++] = attvalue; +- attvalue += len; +- atts[nbatts++] = attvalue; +- /* +- * tag if some deallocation is needed +- */ +- if (alloc != 0) attval = 1; +- } else { +- if ((attvalue != NULL) && (attvalue[len] == 0)) +- xmlFree(attvalue); +- } ++ /* ++ * check that it's not a defined namespace ++ */ ++ for (j = 1;j <= nbNs;j++) ++ if (ctxt->nsTab[ctxt->nsNr - 2 * j] == attname) ++ break; ++ if (j <= nbNs) ++ xmlErrAttributeDup(ctxt, aprefix, attname); ++ else ++ if (nsPush(ctxt, attname, URL) > 0) nbNs++; ++ ++ } else { ++ /* ++ * Add the pair to atts ++ */ ++ if ((atts == NULL) || (nbatts + 5 > maxatts)) { ++ if (xmlCtxtGrowAttrs(ctxt, nbatts + 5) < 0) { ++ goto next_attr; ++ } ++ maxatts = ctxt->maxatts; ++ atts = ctxt->atts; ++ } ++ ctxt->attallocs[nratts++] = alloc; ++ atts[nbatts++] = attname; ++ atts[nbatts++] = aprefix; ++ /* ++ * The namespace URI field is used temporarily to point at the ++ * base of the current input buffer for non-alloced attributes. ++ * When the input buffer is reallocated, all the pointers become ++ * invalid, but they can be reconstructed later. ++ */ ++ if (alloc) ++ atts[nbatts++] = NULL; ++ else ++ atts[nbatts++] = ctxt->input->base; ++ atts[nbatts++] = attvalue; ++ attvalue += len; ++ atts[nbatts++] = attvalue; ++ /* ++ * tag if some deallocation is needed ++ */ ++ if (alloc != 0) attval = 1; ++ attvalue = NULL; /* moved into atts */ ++ } + +-failed: ++next_attr: ++ if ((attvalue != NULL) && (alloc != 0)) { ++ xmlFree(attvalue); ++ attvalue = NULL; ++ } + + GROW + if (ctxt->instate == XML_PARSER_EOF) + break; +- if ((ctxt->input->base != base) || (inputNr != ctxt->inputNr)) +- goto base_changed; + if ((RAW == '>') || (((RAW == '/') && (NXT(1) == '>')))) + break; + if (!IS_BLANK_CH(RAW)) { +@@ -9646,8 +9610,20 @@ failed: + break; + } + GROW; +- if ((ctxt->input->base != base) || (inputNr != ctxt->inputNr)) +- goto base_changed; ++ } ++ ++ /* Reconstruct attribute value pointers. */ ++ for (i = 0, j = 0; j < nratts; i += 5, j++) { ++ if (atts[i+2] != NULL) { ++ /* ++ * Arithmetic on dangling pointers is technically undefined ++ * behavior, but well... ++ */ ++ ptrdiff_t offset = ctxt->input->base - atts[i+2]; ++ atts[i+2] = NULL; /* Reset repurposed namespace URI */ ++ atts[i+3] += offset; /* value */ ++ atts[i+4] += offset; /* valuend */ ++ } + } + + /* +@@ -9804,34 +9780,6 @@ failed: + } + + return(localname); +- +-base_changed: +- /* +- * the attribute strings are valid iif the base didn't changed +- */ +- if (attval != 0) { +- for (i = 3,j = 0; j < nratts;i += 5,j++) +- if ((ctxt->attallocs[j] != 0) && (atts[i] != NULL)) +- xmlFree((xmlChar *) atts[i]); +- } +- +- /* +- * We can't switch from one entity to another in the middle +- * of a start tag +- */ +- if (inputNr != ctxt->inputNr) { +- xmlFatalErrMsg(ctxt, XML_ERR_ENTITY_BOUNDARY, +- "Start tag doesn't start and stop in the same entity\n"); +- return(NULL); +- } +- +- ctxt->input->cur = ctxt->input->base + cur; +- ctxt->input->line = oldline; +- ctxt->input->col = oldcol; +- if (ctxt->wellFormed == 1) { +- goto reparse; +- } +- return(NULL); + } + + /** +diff --git a/result/errors/759398.xml.err b/result/errors/759398.xml.err +index e08d9bf..f6036a3 100644 +--- a/result/errors/759398.xml.err ++++ b/result/errors/759398.xml.err +@@ -1,9 +1,12 @@ + ./test/errors/759398.xml:210: parser error : StartTag: invalid element name + need to worry about parsers whi ++ ^ ++./test/errors/759398.xml:316: parser error : Extra content at the end of the document ++ ++^ +diff --git a/result/errors/attr1.xml.err b/result/errors/attr1.xml.err +index 4f08538..c4c4fc8 100644 +--- a/result/errors/attr1.xml.err ++++ b/result/errors/attr1.xml.err +@@ -1,6 +1,9 @@ + ./test/errors/attr1.xml:2: parser error : AttValue: ' expected + + ^ +-./test/errors/attr1.xml:1: parser error : Extra content at the end of the document +-ooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo +- ^ ++./test/errors/attr2.xml:2: parser error : attributes construct error ++ ++^ ++./test/errors/attr2.xml:2: parser error : Couldn't find end of Start Tag foo line 1 ++ ++^ +diff --git a/result/errors/name2.xml.err b/result/errors/name2.xml.err +index a6649a1..8a6acee 100644 +--- a/result/errors/name2.xml.err ++++ b/result/errors/name2.xml.err +@@ -1,6 +1,9 @@ + ./test/errors/name2.xml:2: parser error : Specification mandate value for attribute foooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo + + ^ +-./test/errors/name2.xml:1: parser error : Extra content at the end of the document +- -Upstream-Status: Pending +Signed-off-by: Andrej Valek +Upstream-Status: Backport diff -uNr a/Makefile.am b/Makefile.am ---- a/Makefile.am 2013-04-17 14:51:42.633386477 +0200 -+++ b/Makefile.am 2013-04-19 14:47:51.544720568 +0200 -@@ -202,10 +202,19 @@ +--- a/Makefile.am 2016-05-22 03:49:02.000000000 +0200 ++++ b/Makefile.am 2017-06-14 10:38:43.381305385 +0200 +@@ -202,10 +202,24 @@ #testOOM_DEPENDENCIES = $(DEPS) #testOOM_LDADD= $(LDADDS) @@ -24,10 +25,24 @@ diff -uNr a/Makefile.am b/Makefile.am [ -d test ] || $(LN_S) $(srcdir)/test . [ -d result ] || $(LN_S) $(srcdir)/result . - $(CHECKER) ./runtest$(EXEEXT) && $(CHECKER) ./testrecurse$(EXEEXT) &&$(CHECKER) ./testapi$(EXEEXT) && $(CHECKER) ./testchar$(EXEEXT)&& $(CHECKER) ./testdict$(EXEEXT) && $(CHECKER) ./runxmlconf$(EXEEXT) -+ ./runtest$(EXEEXT) ; ./testrecurse$(EXEEXT) ; ./testapi$(EXEEXT) ; ./testchar$(EXEEXT) ; ./testdict$(EXEEXT) ; ./runxmlconf$(EXEEXT) ++ $(CHECKER) ./runtest$(EXEEXT) && \ ++ $(CHECKER) ./testrecurse$(EXEEXT) && \ ++ ASAN_OPTIONS="$$ASAN_OPTIONS:detect_leaks=0" $(CHECKER) ./testapi$(EXEEXT) && \ ++ $(CHECKER) ./testchar$(EXEEXT) && \ ++ $(CHECKER) ./testdict$(EXEEXT) && \ ++ $(CHECKER) ./runxmlconf$(EXEEXT) @(if [ "$(PYTHON_SUBDIR)" != "" ] ; then cd python ; \ $(MAKE) tests ; fi) +@@ -229,7 +243,7 @@ + + APItests: testapi$(EXEEXT) + @echo "## Running the API regression tests this may take a little while" +- -@($(CHECKER) $(top_builddir)/testapi -q) ++ -@(ASAN_OPTIONS="$$ASAN_OPTIONS:detect_leaks=0" $(CHECKER) $(top_builddir)/testapi -q) + + HTMLtests : testHTML$(EXEEXT) + @(echo > .memdump) diff -uNr a/runsuite.c b/runsuite.c --- a/runsuite.c 2013-04-12 16:17:11.462823238 +0200 +++ b/runsuite.c 2013-04-17 14:07:24.352693211 +0200 diff --git a/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2_2.9.4.bb b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2_2.9.4.bb index c2c3c9326..107539b50 100644 --- a/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2_2.9.4.bb +++ b/import-layers/yocto-poky/meta/recipes-core/libxml/libxml2_2.9.4.bb @@ -23,8 +23,14 @@ SRC_URI = "ftp://xmlsoft.org/libxml2/libxml2-${PV}.tar.gz;name=libtar \ file://libxml2-CVE-2016-5131.patch \ file://libxml2-CVE-2016-4658.patch \ file://libxml2-fix_NULL_pointer_derefs.patch \ - file://CVE-2016-9318.patch \ - " + file://libxml2-fix_and_simplify_xmlParseStartTag2.patch \ + file://libxml2-CVE-2017-9047_CVE-2017-9048.patch \ + file://libxml2-CVE-2017-9049_CVE-2017-9050.patch \ + file://libxml2-CVE-2017-5969.patch \ + file://libxml2-CVE-2017-0663.patch \ + file://libxml2-CVE-2017-8872.patch \ + file://0001-Make-ptest-run-the-python-tests-if-python-is-enabled.patch \ + " SRC_URI[libtar.md5sum] = "ae249165c173b1ff386ee8ad676815f5" SRC_URI[libtar.sha256sum] = "ffb911191e509b966deb55de705387f14156e1a56b21824357cdf0053233633c" @@ -33,22 +39,24 @@ SRC_URI[testtar.sha256sum] = "96151685cec997e1f9f3387e3626d61e6284d4d6e66e0e440c BINCONFIG = "${bindir}/xml2-config" -inherit autotools pkgconfig binconfig-disabled pythonnative ptest +PACKAGECONFIG ??= "python \ + ${@bb.utils.filter('DISTRO_FEATURES', 'ipv6', d)} \ +" +PACKAGECONFIG[python] = "--with-python=${PYTHON},--without-python,python3" +PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6," -RDEPENDS_${PN}-ptest += "python-core" +inherit autotools pkgconfig binconfig-disabled ptest -RDEPENDS_${PN}-python += "python-core" +inherit ${@bb.utils.contains('PACKAGECONFIG', 'python', 'python3native', '', d)} + +RDEPENDS_${PN}-ptest += "make ${@bb.utils.contains('PACKAGECONFIG', 'python', 'libgcc python3-core python3-argparse python3-logging python3-shell python3-signal python3-stringold python3-threading python3-unittest ${PN}-python', '', d)}" + +RDEPENDS_${PN}-python += "${@bb.utils.contains('PACKAGECONFIG', 'python', 'python3-core', '', d)}" RDEPENDS_${PN}-ptest_append_libc-glibc = " glibc-gconv-ebcdic-us glibc-gconv-ibm1141" export PYTHON_SITE_PACKAGES="${PYTHON_SITEPACKAGES_DIR}" -PACKAGECONFIG ??= "python \ - ${@bb.utils.contains('DISTRO_FEATURES', 'ipv6', 'ipv6', '', d)} \ -" -PACKAGECONFIG[python] = "--with-python=${PYTHON},--without-python,python" -PACKAGECONFIG[ipv6] = "--enable-ipv6,--disable-ipv6," - # WARNING: zlib is require for RPM use EXTRA_OECONF = "--without-debug --without-legacy --with-catalog --without-docbook --with-c14n --without-lzma --with-fexceptions" EXTRA_OECONF_class-native = "--without-legacy --without-docbook --with-c14n --without-lzma --with-zlib" @@ -57,7 +65,7 @@ EXTRA_OECONF_linuxstdbase = "--with-debug --with-legacy --with-docbook --with-c1 python populate_packages_prepend () { # autonamer would call this libxml2-2, but we don't want that - if d.getVar('DEBIAN_NAMES', True): + if d.getVar('DEBIAN_NAMES'): d.setVar('PKG_libxml2', '${MLPREFIX}libxml2') } @@ -75,6 +83,17 @@ do_configure_prepend () { do_install_ptest () { cp -r ${WORKDIR}/xmlconf ${D}${PTEST_PATH} + if [ "${@bb.utils.filter('PACKAGECONFIG', 'python', d)}" ]; then + sed -i -e 's|^\(PYTHON = \).*|\1${USRBINPATH}/${PYTHON_PN}|' \ + ${D}${PTEST_PATH}/python/tests/Makefile + grep -lrZ '#!/usr/bin/python' ${D}${PTEST_PATH}/python | + xargs -0 sed -i -e 's|/usr/bin/python|${USRBINPATH}/${PYTHON_PN}|' + fi +} + +do_install_append_class-native () { + # Docs are not needed in the native case + rm ${D}${datadir}/gtk-doc -rf } BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-core/meta/build-sysroots.bb b/import-layers/yocto-poky/meta/recipes-core/meta/build-sysroots.bb new file mode 100644 index 000000000..7a712e2f3 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/meta/build-sysroots.bb @@ -0,0 +1,38 @@ +INHIBIT_DEFAULT_DEPS = "1" +LICENSE = "MIT" + +STANDALONE_SYSROOT = "${STAGING_DIR}/${MACHINE}" +STANDALONE_SYSROOT_NATIVE = "${STAGING_DIR}/${BUILD_ARCH}" +PACKAGE_ARCH = "${MACHINE_ARCH}" +EXCLUDE_FROM_WORLD = "1" + +inherit nopackages +deltask fetch +deltask unpack +deltask patch +deltask prepare_recipe_sysroot +deltask populate_lic +deltask configure +deltask compile +deltask install +deltask populate_sysroot + +python do_build_native_sysroot () { + targetsysroot = d.getVar("STANDALONE_SYSROOT") + nativesysroot = d.getVar("STANDALONE_SYSROOT_NATIVE") + staging_populate_sysroot_dir(targetsysroot, nativesysroot, True, d) +} +do_build_native_sysroot[cleandirs] = "${STANDALONE_SYSROOT_NATIVE}" +do_build_native_sysroot[nostamp] = "1" +addtask do_build_native_sysroot before do_build + +python do_build_target_sysroot () { + targetsysroot = d.getVar("STANDALONE_SYSROOT") + nativesysroot = d.getVar("STANDALONE_SYSROOT_NATIVE") + staging_populate_sysroot_dir(targetsysroot, nativesysroot, False, d) +} +do_build_target_sysroot[cleandirs] = "${STANDALONE_SYSROOT}" +do_build_target_sysroot[nostamp] = "1" +addtask do_build_target_sysroot before do_build + +do_clean[cleandirs] += "${STANDALONE_SYSROOT} ${STANDALONE_SYSROOT_NATIVE}" diff --git a/import-layers/yocto-poky/meta/recipes-core/meta/buildtools-tarball.bb b/import-layers/yocto-poky/meta/recipes-core/meta/buildtools-tarball.bb index 5808c95d8..abdc7feeb 100644 --- a/import-layers/yocto-poky/meta/recipes-core/meta/buildtools-tarball.bb +++ b/import-layers/yocto-poky/meta/recipes-core/meta/buildtools-tarball.bb @@ -2,8 +2,6 @@ DESCRIPTION = "SDK type target for building a standalone tarball containing pyth tarball can be used to run bitbake builds on systems which don't meet the usual version requirements." SUMMARY = "Standalone tarball for running builds on systems with inadequate software" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \ - file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" TOOLCHAIN_TARGET_TASK ?= "" diff --git a/import-layers/yocto-poky/meta/recipes-core/meta/meta-environment-extsdk.bb b/import-layers/yocto-poky/meta/recipes-core/meta/meta-environment-extsdk.bb index d9e596143..2076b56f2 100644 --- a/import-layers/yocto-poky/meta/recipes-core/meta/meta-environment-extsdk.bb +++ b/import-layers/yocto-poky/meta/recipes-core/meta/meta-environment-extsdk.bb @@ -5,8 +5,8 @@ require meta-environment.bb PN = "meta-environment-extsdk-${MACHINE}" create_sdk_files_append() { - local sysroot=${SDKPATH}/${@os.path.relpath(d.getVar('STAGING_DIR_TARGET', True), d.getVar('TOPDIR', True))} - local sdkpathnative=${SDKPATH}/${@os.path.relpath(d.getVar('STAGING_DIR_NATIVE',True), d.getVar('TOPDIR', True))} + local sysroot=${SDKPATH}/tmp/${@os.path.relpath(d.getVar('STAGING_DIR'), d.getVar('TMPDIR'))}/${MACHINE} + local sdkpathnative=${SDKPATH}/tmp/${@os.path.relpath(d.getVar('STAGING_DIR'), d.getVar('TMPDIR'))}/${BUILD_ARCH} toolchain_create_sdk_env_script '' '' $sysroot '' ${bindir_native} ${prefix_native} $sdkpathnative } diff --git a/import-layers/yocto-poky/meta/recipes-core/meta/meta-environment.bb b/import-layers/yocto-poky/meta/recipes-core/meta/meta-environment.bb index d9e045f04..29da121a3 100644 --- a/import-layers/yocto-poky/meta/recipes-core/meta/meta-environment.bb +++ b/import-layers/yocto-poky/meta/recipes-core/meta/meta-environment.bb @@ -1,6 +1,4 @@ SUMMARY = "Package of environment files for SDK" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \ - file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" LICENSE = "MIT" PR = "r8" @@ -23,27 +21,28 @@ inherit cross-canadian do_generate_content[cleandirs] = "${SDK_OUTPUT}" do_generate_content[dirs] = "${SDK_OUTPUT}/${SDKPATH}" +# Need to ensure we have the virtual mappings and site files for all multtilib variants +do_generate_content[depends] = "${@oe.utils.build_depends_string(all_multilib_tune_values(d, 'TOOLCHAIN_NEED_CONFIGSITE_CACHE'), 'do_populate_sysroot')}" python do_generate_content() { # Handle multilibs in the SDK environment, siteconfig, etc files... localdata = bb.data.createCopy(d) # make sure we only use the WORKDIR value from 'd', or it can change - localdata.setVar('WORKDIR', d.getVar('WORKDIR', True)) + localdata.setVar('WORKDIR', d.getVar('WORKDIR')) # make sure we only use the SDKTARGETSYSROOT value from 'd' - localdata.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT', True)) + localdata.setVar('SDKTARGETSYSROOT', d.getVar('SDKTARGETSYSROOT')) localdata.setVar('libdir', d.getVar('target_libdir', False)) # Process DEFAULTTUNE bb.build.exec_func("create_sdk_files", localdata) - variants = d.getVar("MULTILIB_VARIANTS", True) or "" + variants = d.getVar("MULTILIB_VARIANTS") or "" for item in variants.split(): # Load overrides from 'd' to avoid having to reset the value... overrides = d.getVar("OVERRIDES", False) + ":virtclass-multilib-" + item localdata.setVar("OVERRIDES", overrides) localdata.setVar("MLPREFIX", item + "-") - bb.data.update_data(localdata) bb.build.exec_func("create_sdk_files", localdata) } addtask generate_content before do_install after do_compile @@ -69,9 +68,9 @@ FILES_${PN}= " \ ${SDKPATH}/* \ " -do_fetch[noexec] = "1" -do_unpack[noexec] = "1" -do_patch[noexec] = "1" -do_configure[noexec] = "1" -do_compile[noexec] = "1" -do_populate_sysroot[noexec] = "1" +deltask do_fetch +deltask do_unpack +deltask do_patch +deltask do_configure +deltask do_compile +deltask do_populate_sysroot diff --git a/import-layers/yocto-poky/meta/recipes-core/meta/meta-extsdk-toolchain.bb b/import-layers/yocto-poky/meta/recipes-core/meta/meta-extsdk-toolchain.bb index 2bd9a5b32..235d6ecc0 100644 --- a/import-layers/yocto-poky/meta/recipes-core/meta/meta-extsdk-toolchain.bb +++ b/import-layers/yocto-poky/meta/recipes-core/meta/meta-extsdk-toolchain.bb @@ -1,8 +1,6 @@ SUMMARY = "Extensible SDK toolchain meta-recipe" DESCRIPTION = "Meta-recipe for ensuring the build directory contains all appropriate toolchain packages for using an IDE" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \ - file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" DEPENDS = "virtual/libc gdb-cross-${TARGET_ARCH} qemu-native qemu-helper-native unfs3-native" @@ -12,16 +10,16 @@ do_populate_sysroot[deptask] = "do_populate_sysroot" # within sstate.bbclass, so if you copy or rename this and expect the same # functionality you'll need to modify that as well. -LOCKED_SIGS_INDIR = "${D}/locked-sigs" +LOCKED_SIGS_INDIR = "${WORKDIR}/locked-sigs" addtask do_locked_sigs after do_populate_sysroot SSTATETASKS += "do_locked_sigs" do_locked_sigs[sstate-inputdirs] = "${LOCKED_SIGS_INDIR}" -do_locked_sigs[sstate-outputdirs] = "${STAGING_DIR_HOST}/locked-sigs" +do_locked_sigs[sstate-outputdirs] = "${STAGING_DIR}/${PACKAGE_ARCH}/${PN}/locked-sigs" python do_locked_sigs() { import oe.copy_buildsystem - outdir = os.path.join(d.getVar('LOCKED_SIGS_INDIR', True)) + outdir = os.path.join(d.getVar('LOCKED_SIGS_INDIR')) bb.utils.mkdirhier(outdir) sigfile = os.path.join(outdir, 'locked-sigs-extsdk-toolchain.inc') oe.copy_buildsystem.generate_locked_sigs(sigfile, d) diff --git a/import-layers/yocto-poky/meta/recipes-core/meta/meta-ide-support.bb b/import-layers/yocto-poky/meta/recipes-core/meta/meta-ide-support.bb index c4ddcfcb4..0692ec8b9 100644 --- a/import-layers/yocto-poky/meta/recipes-core/meta/meta-ide-support.bb +++ b/import-layers/yocto-poky/meta/recipes-core/meta/meta-ide-support.bb @@ -1,11 +1,10 @@ SUMMARY = "Integrated Development Environment support" DESCRIPTION = "Meta package for ensuring the build directory contains all appropriate toolchain packages for using an IDE" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \ - file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" DEPENDS = "virtual/libc gdb-cross-${TARGET_ARCH} qemu-native qemu-helper-native unfs3-native" PR = "r3" +RM_WORK_EXCLUDE += "${PN}" inherit meta toolchain-scripts nopackages diff --git a/import-layers/yocto-poky/meta/recipes-core/meta/meta-toolchain.bb b/import-layers/yocto-poky/meta/recipes-core/meta/meta-toolchain.bb index ba9fd8880..b02b0665e 100644 --- a/import-layers/yocto-poky/meta/recipes-core/meta/meta-toolchain.bb +++ b/import-layers/yocto-poky/meta/recipes-core/meta/meta-toolchain.bb @@ -3,7 +3,4 @@ LICENSE = "MIT" PR = "r7" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \ - file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" - inherit populate_sdk diff --git a/import-layers/yocto-poky/meta/recipes-core/meta/meta-world-pkgdata.bb b/import-layers/yocto-poky/meta/recipes-core/meta/meta-world-pkgdata.bb index 81c8647fa..02abd8aff 100644 --- a/import-layers/yocto-poky/meta/recipes-core/meta/meta-world-pkgdata.bb +++ b/import-layers/yocto-poky/meta/recipes-core/meta/meta-world-pkgdata.bb @@ -19,20 +19,20 @@ do_collect_packagedata[sstate-outputdirs] = "${STAGING_DIR_HOST}/world-pkgdata" python do_collect_packagedata() { import oe.copy_buildsystem - outdir = os.path.join(d.getVar('WORLD_PKGDATADIR', True)) + outdir = os.path.join(d.getVar('WORLD_PKGDATADIR')) bb.utils.mkdirhier(outdir) sigfile = os.path.join(outdir, 'locked-sigs-pkgdata.inc') oe.copy_buildsystem.generate_locked_sigs(sigfile, d) } -do_fetch[noexec] = "1" -do_unpack[noexec] = "1" -do_patch[noexec] = "1" -do_configure[noexec] = "1" -do_compile[noexec] = "1" -do_install[noexec] = "1" +deltask do_fetch +deltask do_unpack +deltask do_patch +deltask do_configure +deltask do_compile +deltask do_install -do_configure[deptask] = "" +do_prepare_recipe_sysroot[deptask] = "" WORLD_PKGDATA_EXCLUDE ?= "" diff --git a/import-layers/yocto-poky/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb b/import-layers/yocto-poky/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb index 2b9611160..0b58a027f 100644 --- a/import-layers/yocto-poky/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb +++ b/import-layers/yocto-poky/meta/recipes-core/meta/nativesdk-buildtools-perl-dummy.bb @@ -1,6 +1,5 @@ SUMMARY = "Dummy package which ensures perl is excluded from buildtools" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COMMON_LICENSE_DIR}/MIT;md5=0835ade698e0bcf8506ecda2f7b4f302" inherit allarch diff --git a/import-layers/yocto-poky/meta/recipes-core/meta/package-index.bb b/import-layers/yocto-poky/meta/recipes-core/meta/package-index.bb index 3c46b4997..fe022ffeb 100644 --- a/import-layers/yocto-poky/meta/recipes-core/meta/package-index.bb +++ b/import-layers/yocto-poky/meta/recipes-core/meta/package-index.bb @@ -4,18 +4,18 @@ LICENSE = "MIT" INHIBIT_DEFAULT_DEPS = "1" PACKAGES = "" -do_fetch[noexec] = "1" -do_unpack[noexec] = "1" -do_patch[noexec] = "1" -do_configure[noexec] = "1" -do_compile[noexec] = "1" -do_install[noexec] = "1" -do_package[noexec] = "1" -do_packagedata[noexec] = "1" -do_package_write_ipk[noexec] = "1" -do_package_write_rpm[noexec] = "1" -do_package_write_deb[noexec] = "1" -do_populate_sysroot[noexec] = "1" +deltask do_fetch +deltask do_unpack +deltask do_patch +deltask do_configure +deltask do_compile +deltask do_install +deltask do_package +deltask do_packagedata +deltask do_package_write_ipk +deltask do_package_write_rpm +deltask do_package_write_deb +deltask do_populate_sysroot do_package_index[nostamp] = "1" do_package_index[depends] += "${PACKAGEINDEXDEPS}" diff --git a/import-layers/yocto-poky/meta/recipes-core/meta/signing-keys.bb b/import-layers/yocto-poky/meta/recipes-core/meta/signing-keys.bb index 37790373a..aaa01d0c3 100644 --- a/import-layers/yocto-poky/meta/recipes-core/meta/signing-keys.bb +++ b/import-layers/yocto-poky/meta/recipes-core/meta/signing-keys.bb @@ -3,8 +3,6 @@ SUMMARY = "Makes public keys of the signing keys available" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \ - file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" inherit allarch deploy @@ -23,23 +21,23 @@ FILES_${PN}-packagefeed = "${sysconfdir}/pki/packagefeed-gpg" python do_get_public_keys () { from oe.gpg_sign import get_signer - if d.getVar("RPM_SIGN_PACKAGES", True): + if d.getVar("RPM_SIGN_PACKAGES"): # Export public key of the rpm signing key - signer = get_signer(d, d.getVar('RPM_GPG_BACKEND', True)) + signer = get_signer(d, d.getVar('RPM_GPG_BACKEND')) signer.export_pubkey(os.path.join(d.expand('${B}'), 'rpm-key'), - d.getVar('RPM_GPG_NAME', True)) + d.getVar('RPM_GPG_NAME')) - if d.getVar("IPK_SIGN_PACKAGES", True): + if d.getVar("IPK_SIGN_PACKAGES"): # Export public key of the ipk signing key - signer = get_signer(d, d.getVar('IPK_GPG_BACKEND', True)) + signer = get_signer(d, d.getVar('IPK_GPG_BACKEND')) signer.export_pubkey(os.path.join(d.expand('${B}'), 'ipk-key'), - d.getVar('IPK_GPG_NAME', True)) + d.getVar('IPK_GPG_NAME')) - if d.getVar('PACKAGE_FEED_SIGN', True) == '1': + if d.getVar('PACKAGE_FEED_SIGN') == '1': # Export public key of the feed signing key - signer = get_signer(d, d.getVar('PACKAGE_FEED_GPG_BACKEND', True)) + signer = get_signer(d, d.getVar('PACKAGE_FEED_GPG_BACKEND')) signer.export_pubkey(os.path.join(d.expand('${B}'), 'pf-key'), - d.getVar('PACKAGE_FEED_GPG_NAME', True)) + d.getVar('PACKAGE_FEED_GPG_NAME')) } do_get_public_keys[cleandirs] = "${B}" addtask get_public_keys before do_install diff --git a/import-layers/yocto-poky/meta/recipes-core/meta/testexport-tarball.bb b/import-layers/yocto-poky/meta/recipes-core/meta/testexport-tarball.bb index d9861c4da..72f5531c5 100644 --- a/import-layers/yocto-poky/meta/recipes-core/meta/testexport-tarball.bb +++ b/import-layers/yocto-poky/meta/recipes-core/meta/testexport-tarball.bb @@ -3,8 +3,6 @@ DESCRIPTION = "SDK type target for standalone tarball containing packages define This recipe is almost the same as buildtools-tarball" SUMMARY = "Standalone tarball for test systems with missing software" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \ - file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" TEST_EXPORT_SDK_PACKAGES ??= "" diff --git a/import-layers/yocto-poky/meta/recipes-core/meta/uninative-tarball.bb b/import-layers/yocto-poky/meta/recipes-core/meta/uninative-tarball.bb index e203a8322..f3fc1ebe6 100644 --- a/import-layers/yocto-poky/meta/recipes-core/meta/uninative-tarball.bb +++ b/import-layers/yocto-poky/meta/recipes-core/meta/uninative-tarball.bb @@ -1,16 +1,17 @@ SUMMARY = "libc and patchelf tarball for use with uninative.bbclass" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \ - file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" TOOLCHAIN_TARGET_TASK = "" # ibm850 - mcopy from mtools # iso8859-1 - guile +# utf-16, cp1252 - binutils-windres TOOLCHAIN_HOST_TASK = "\ nativesdk-glibc \ nativesdk-glibc-gconv-ibm850 \ nativesdk-glibc-gconv-iso8859-1 \ + nativesdk-glibc-gconv-utf-16 \ + nativesdk-glibc-gconv-cp1252 \ nativesdk-patchelf \ " diff --git a/import-layers/yocto-poky/meta/recipes-core/meta/wic-tools.bb b/import-layers/yocto-poky/meta/recipes-core/meta/wic-tools.bb new file mode 100644 index 000000000..cd494ec23 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/meta/wic-tools.bb @@ -0,0 +1,32 @@ +SUMMARY = "A meta recipe to build native tools used by wic." + +LICENSE = "MIT" + +DEPENDS = "\ + parted-native syslinux-native gptfdisk-native dosfstools-native \ + mtools-native bmap-tools-native grub-efi-native cdrtools-native \ + btrfs-tools-native squashfs-tools-native \ + " +DEPENDS_append_x86 = " syslinux grub-efi systemd-boot" +DEPENDS_append_x86-64 = " syslinux grub-efi systemd-boot" + +INHIBIT_DEFAULT_DEPS = "1" +inherit nopackages + +# The sysroot of wic-tools is needed for wic, but if rm_work is enabled, it will +# be removed before wic has a chance to use it, hence the exclusion below. +RM_WORK_EXCLUDE += "${PN}" + +python do_build_sysroot () { + bb.build.exec_func("extend_recipe_sysroot", d) + + # Write environment variables used by wic + # to tmp/sysroots//imgdata/wictools.env + outdir = os.path.join(d.getVar('STAGING_DIR'), d.getVar('MACHINE'), 'imgdata') + bb.utils.mkdirhier(outdir) + with open(os.path.join(outdir, "wic-tools.env"), 'w') as envf: + for var in ('RECIPE_SYSROOT_NATIVE', 'STAGING_DATADIR', 'STAGING_LIBDIR'): + envf.write('%s="%s"\n' % (var, d.getVar(var).strip())) + +} +addtask do_build_sysroot after do_prepare_recipe_sysroot before do_build diff --git a/import-layers/yocto-poky/meta/recipes-core/musl/files/0001-Make-dynamic-linker-a-relative-symlink-to-libc.patch b/import-layers/yocto-poky/meta/recipes-core/musl/files/0001-Make-dynamic-linker-a-relative-symlink-to-libc.patch index 5490b1cba..462d338b9 100644 --- a/import-layers/yocto-poky/meta/recipes-core/musl/files/0001-Make-dynamic-linker-a-relative-symlink-to-libc.patch +++ b/import-layers/yocto-poky/meta/recipes-core/musl/files/0001-Make-dynamic-linker-a-relative-symlink-to-libc.patch @@ -1,6 +1,6 @@ -From 94c0b97b62125d8bbc92dce0694e387d5b2ad181 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Sun, 10 Jan 2016 12:14:02 -0800 +From 0ec74744a4cba7c5fdfaa2685995119a4fca0260 Mon Sep 17 00:00:00 2001 +From: Amarnath Valluri +Date: Wed, 18 Jan 2017 16:14:37 +0200 Subject: [PATCH] Make dynamic linker a relative symlink to libc absolute symlink into $(libdir) fails to load in a cross build @@ -9,26 +9,68 @@ applications, which cross build systems often do, since not everything can be computed during cross builds, qemu in usermode often comes to aid in such situations to feed into cross builds. +V2: + Make use of 'ln -r' to create relative symlinks, as most fo the distros + shipping coreutils 8.16+ + Signed-off-by: Khem Raj +Signed-off-by: Amarnath Valluri --- Upstream-Status: Pending - - Makefile | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) +--- + Makefile | 2 +- + tools/install.sh | 8 +++++--- + 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile -index b2226fa..0d71f7f 100644 +index 8246b78..d1dbe39 100644 --- a/Makefile +++ b/Makefile -@@ -189,7 +189,7 @@ $(DESTDIR)$(includedir)/%: include/% +@@ -215,7 +215,7 @@ $(DESTDIR)$(includedir)/%: $(srcdir)/include/% $(INSTALL) -D -m 644 $< $@ $(DESTDIR)$(LDSO_PATHNAME): $(DESTDIR)$(libdir)/libc.so - $(INSTALL) -D -l $(libdir)/libc.so $@ || true -+ $(INSTALL) -D -l ..$(libdir)/libc.so $@ || true ++ $(INSTALL) -D -r $(DESTDIR)$(libdir)/libc.so $@ || true install-libs: $(ALL_LIBS:lib/%=$(DESTDIR)$(libdir)/%) $(if $(SHARED_LIBS),$(DESTDIR)$(LDSO_PATHNAME),) +diff --git a/tools/install.sh b/tools/install.sh +index d913b60..b6a7f79 100755 +--- a/tools/install.sh ++++ b/tools/install.sh +@@ -6,18 +6,20 @@ + # + + usage() { +-printf "usage: %s [-D] [-l] [-m mode] src dest\n" "$0" 1>&2 ++printf "usage: %s [-D] [-l] [-r] [-m mode] src dest\n" "$0" 1>&2 + exit 1 + } + + mkdirp= + symlink= ++symlinkflags="-s" + mode=755 + +-while getopts Dlm: name ; do ++while getopts Dlrm: name ; do + case "$name" in + D) mkdirp=yes ;; + l) symlink=yes ;; ++r) symlink=yes; symlinkflags="$symlinkflags -r" ;; + m) mode=$OPTARG ;; + ?) usage ;; + esac +@@ -48,7 +50,7 @@ trap 'rm -f "$tmp"' EXIT INT QUIT TERM HUP + umask 077 + + if test "$symlink" ; then +-ln -s "$1" "$tmp" ++ln $symlinkflags "$1" "$tmp" + else + cat < "$1" > "$tmp" + chmod "$mode" "$tmp" -- -2.7.0 +2.7.4 diff --git a/import-layers/yocto-poky/meta/recipes-core/musl/musl.inc b/import-layers/yocto-poky/meta/recipes-core/musl/musl.inc index 7ed931c6d..56c9d7fe1 100644 --- a/import-layers/yocto-poky/meta/recipes-core/musl/musl.inc +++ b/import-layers/yocto-poky/meta/recipes-core/musl/musl.inc @@ -12,7 +12,6 @@ SECTION = "libs" LIC_FILES_CHKSUM = "file://COPYRIGHT;md5=7928b7ad32ceda04932478e330e52f7f" PATH_prepend = "${STAGING_BINDIR_TOOLCHAIN}.${STAGINGCC}:" -TOOLCHAIN_OPTIONS = " --sysroot=${STAGING_DIR_TCBOOTSTRAP}" INHIBIT_DEFAULT_DEPS = "1" diff --git a/import-layers/yocto-poky/meta/recipes-core/musl/musl_git.bb b/import-layers/yocto-poky/meta/recipes-core/musl/musl_git.bb index c07101b74..a88bc4d42 100644 --- a/import-layers/yocto-poky/meta/recipes-core/musl/musl_git.bb +++ b/import-layers/yocto-poky/meta/recipes-core/musl/musl_git.bb @@ -3,9 +3,9 @@ require musl.inc -SRCREV = "39494a273eaa6b714e0fa0c59ce7a1f5fbc80a1e" +SRCREV = "54807d47acecab778498ced88ce8f62bfa16e379" -PV = "1.1.15+git${SRCPV}" +PV = "1.1.16+git${SRCPV}" # mirror is at git://github.com/kraj/musl.git @@ -49,7 +49,7 @@ do_install() { oe_runmake install DESTDIR='${D}' install -d ${D}${bindir} - ln -s ../../${libdir}/libc.so ${D}${bindir}/ldd + lnr ${D}${libdir}/libc.so ${D}${bindir}/ldd for l in crypt dl m pthread resolv rt util xnet do ln -s libc.so ${D}${libdir}/lib$l.so diff --git a/import-layers/yocto-poky/meta/recipes-core/ncurses/files/configure-reproducible.patch b/import-layers/yocto-poky/meta/recipes-core/ncurses/files/configure-reproducible.patch new file mode 100644 index 000000000..54a8bdc6b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/ncurses/files/configure-reproducible.patch @@ -0,0 +1,20 @@ +"configure" enforces -U for ar flags, breaking deterministic builds. +The flag was added to fix some vaguely specified "recent POSIX binutil +build problems" in 2015. + +Upstream-Status: Pending +Signed-off-by: Juro Bystricky + +diff --git a/configure b/configure +index 7f31208..aa80911 100755 +--- a/configure ++++ b/configure +@@ -4428,7 +4428,7 @@ if test "${cf_cv_ar_flags+set}" = set; then + else + + cf_cv_ar_flags=unknown +- for cf_ar_flags in -curvU -curv curv -crv crv -cqv cqv -rv rv ++ for cf_ar_flags in -curv curv -crv crv -cqv cqv -rv rv + do + + # check if $ARFLAGS already contains this choice diff --git a/import-layers/yocto-poky/meta/recipes-core/ncurses/files/fix-cflags-mangle.patch b/import-layers/yocto-poky/meta/recipes-core/ncurses/files/fix-cflags-mangle.patch new file mode 100644 index 000000000..e9447c5b8 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/ncurses/files/fix-cflags-mangle.patch @@ -0,0 +1,18 @@ +configure has a piece of logic to detect users "abusing" CC to hold compiler +flags (which we do). It also has logic to "correct" this by moving the flags +from CC to CFLAGS, but the sed only handles a single argument in CC. + +Replace the sed with awk to filter out all words that start with a hyphen. + +Upstream-Status: Pending +Signed-off-by: Ross Burton + +diff --git a/configure b/configure +index 7f31208..1a29cfc 100755 +--- a/configure ++++ b/configure +@@ -2191,2 +2191,2 @@ echo "$as_me: WARNING: your environment misuses the CC variable to hold CFLAGS/C +- cf_flags=`echo "$CC" | sed -e 's/^.*[ ]\(-[^ ]\)/\1/'` +- CC=`echo "$CC " | sed -e 's/[ ]-[^ ].*$//' -e 's/[ ]*$//'` ++ cf_flags=`echo "$CC" | awk 'BEGIN{ORS=" ";RS=" "} /^-.+/ {print $1}'` ++ CC=`echo "$CC " | awk 'BEGIN{ORS=" ";RS=" "} /^[^-].+/ {print $1}'` diff --git a/import-layers/yocto-poky/meta/recipes-core/ncurses/ncurses.inc b/import-layers/yocto-poky/meta/recipes-core/ncurses/ncurses.inc index ff0117b82..1f21cd413 100644 --- a/import-layers/yocto-poky/meta/recipes-core/ncurses/ncurses.inc +++ b/import-layers/yocto-poky/meta/recipes-core/ncurses/ncurses.inc @@ -7,7 +7,8 @@ SECTION = "libs" DEPENDS = "ncurses-native" DEPENDS_class-native = "" -BINCONFIG = "${bindir}/ncurses5-config ${bindir}/ncursesw5-config" +BINCONFIG = "${bindir}/ncurses5-config ${bindir}/ncursesw5-config \ + ${bindir}/ncurses6-config ${bindir}/ncursesw6-config" inherit autotools binconfig-disabled multilib_header pkgconfig @@ -17,6 +18,8 @@ SRC_URI = "git://anonscm.debian.org/collab-maint/ncurses.git" EXTRA_AUTORECONF = "-I m4" CONFIG_SITE =+ "${WORKDIR}/config.cache" +EXTRASITECONFIG = "CFLAGS='${CFLAGS} -I${SYSROOT_DESTDIR}${includedir}'" + # Whether to enable separate widec libraries; must be 'true' or 'false' # # TODO: remove this variable when widec is supported in every setup? @@ -65,7 +68,6 @@ ncurses_configure() { cd $1 shift oe_runconf \ - --disable-static \ --without-debug \ --without-ada \ --without-gpm \ @@ -278,6 +280,8 @@ FILES_${PN} = "\ ${bindir}/tset \ ${bindir}/ncurses5-config \ ${bindir}/ncursesw5-config \ + ${bindir}/ncurses6-config \ + ${bindir}/ncursesw6-config \ ${datadir}/tabset \ " diff --git a/import-layers/yocto-poky/meta/recipes-core/ncurses/ncurses_6.0+20160625.bb b/import-layers/yocto-poky/meta/recipes-core/ncurses/ncurses_6.0+20160625.bb deleted file mode 100644 index 6514613fe..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/ncurses/ncurses_6.0+20160625.bb +++ /dev/null @@ -1,10 +0,0 @@ -require ncurses.inc - -SRC_URI += "file://tic-hang.patch \ - file://config.cache \ -" -# commit id corresponds to the revision in package version -SRCREV = "63dd558cb8e888d6fab5f00bbf7842736a2356b9" -S = "${WORKDIR}/git" -EXTRA_OECONF += "--with-abi-version=5" -UPSTREAM_CHECK_GITTAGREGEX = "(?P\d+(\.\d+)+(\+\d+)*)" diff --git a/import-layers/yocto-poky/meta/recipes-core/ncurses/ncurses_6.0+20161126.bb b/import-layers/yocto-poky/meta/recipes-core/ncurses/ncurses_6.0+20161126.bb new file mode 100644 index 000000000..ace310800 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/ncurses/ncurses_6.0+20161126.bb @@ -0,0 +1,12 @@ +require ncurses.inc + +SRC_URI += "file://tic-hang.patch \ + file://fix-cflags-mangle.patch \ + file://config.cache \ + file://configure-reproducible.patch \ +" +# commit id corresponds to the revision in package version +SRCREV = "3db0bd19cb50e3d9b4f2cf15b7a102fe11302068" +S = "${WORKDIR}/git" +EXTRA_OECONF += "--with-abi-version=5" +UPSTREAM_CHECK_GITTAGREGEX = "(?P\d+(\.\d+)+(\+\d+)*)" diff --git a/import-layers/yocto-poky/meta/recipes-core/netbase/netbase/netbase-add-rpcbind-as-an-alias-to-sunrpc.patch b/import-layers/yocto-poky/meta/recipes-core/netbase/netbase/netbase-add-rpcbind-as-an-alias-to-sunrpc.patch index 35ce21e36..56c8d5b5c 100644 --- a/import-layers/yocto-poky/meta/recipes-core/netbase/netbase/netbase-add-rpcbind-as-an-alias-to-sunrpc.patch +++ b/import-layers/yocto-poky/meta/recipes-core/netbase/netbase/netbase-add-rpcbind-as-an-alias-to-sunrpc.patch @@ -1,4 +1,7 @@ -netbase: add rpcbind as an alias to sunrpc +From 76989205a1411f16d7ab09ff9d279539a73dc259 Mon Sep 17 00:00:00 2001 +From: "Maxin B. John" +Date: Thu, 12 Jan 2017 16:50:58 +0200 +Subject: [PATCH] netbase: add rpcbind as an alias to sunrpc the patch comes from: https://bugs.archlinux.org/task/20273 @@ -6,9 +9,10 @@ https://bugs.archlinux.org/task/20273 Upstream-Status: Pending Signed-off-by: Li Wang +Signed-off-by: Maxin B. John --- - etc-rpc | 2 +- - etc-services | 4 ++-- + etc-rpc | 2 +- + etc-services | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/etc-rpc b/etc-rpc @@ -25,20 +29,20 @@ index 1b30625..9a9a81a 100644 rusersd 100002 rusers nfs 100003 nfsprog diff --git a/etc-services b/etc-services -index 9d64a52..a19f7c8 100644 +index e3202ec..a039d7e 100644 --- a/etc-services +++ b/etc-services -@@ -72,8 +72,8 @@ pop2 109/tcp postoffice pop-2 # POP version 2 - pop2 109/udp pop-2 +@@ -64,8 +64,8 @@ csnet-ns 105/udp cso-ns + rtelnet 107/tcp # Remote Telnet + rtelnet 107/udp pop3 110/tcp pop-3 # POP version 3 - pop3 110/udp pop-3 -sunrpc 111/tcp portmapper # RPC 4.0 portmapper -sunrpc 111/udp portmapper -+sunrpc 111/tcp portmapper rpcbind # RPC 4.0 portmapper ++sunrpc 111/tcp portmapper rpcbind # RPC 4.0 portmapper +sunrpc 111/udp portmapper rpcbind auth 113/tcp authentication tap ident sftp 115/tcp - uucp-path 117/tcp + nntp 119/tcp readnews untp # USENET News Transfer Protocol -- -1.7.9.5 +2.4.0 diff --git a/import-layers/yocto-poky/meta/recipes-core/netbase/netbase_5.3.bb b/import-layers/yocto-poky/meta/recipes-core/netbase/netbase_5.3.bb deleted file mode 100644 index 543596a3d..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/netbase/netbase_5.3.bb +++ /dev/null @@ -1,25 +0,0 @@ -SUMMARY = "Basic TCP/IP networking support" -DESCRIPTION = "This package provides the necessary infrastructure for basic TCP/IP based networking" -HOMEPAGE = "http://packages.debian.org/netbase" -SECTION = "base" -LICENSE = "GPLv2" -LIC_FILES_CHKSUM = "file://debian/copyright;md5=3dd6192d306f582dee7687da3d8748ab" -PE = "1" - -SRC_URI = "http://snapshot.debian.org/archive/debian/20160728T043443Z/pool/main/n/${BPN}/${BPN}_${PV}.tar.xz \ - file://netbase-add-rpcbind-as-an-alias-to-sunrpc.patch \ - file://hosts" - -SRC_URI[md5sum] = "2637a27fd3de02a278d2b5be7e6558c1" -SRC_URI[sha256sum] = "81f6c69795044d62b8ad959cf9daf049d0545fd466c52860ad3f933b1e97b88b" - -UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/n/netbase/" -do_install () { - install -d ${D}/${mandir}/man8 ${D}${sysconfdir} - install -m 0644 ${WORKDIR}/hosts ${D}${sysconfdir}/hosts - install -m 0644 etc-rpc ${D}${sysconfdir}/rpc - install -m 0644 etc-protocols ${D}${sysconfdir}/protocols - install -m 0644 etc-services ${D}${sysconfdir}/services -} - -CONFFILES_${PN} = "${sysconfdir}/hosts" diff --git a/import-layers/yocto-poky/meta/recipes-core/netbase/netbase_5.4.bb b/import-layers/yocto-poky/meta/recipes-core/netbase/netbase_5.4.bb new file mode 100644 index 000000000..5ab0c58f8 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/netbase/netbase_5.4.bb @@ -0,0 +1,25 @@ +SUMMARY = "Basic TCP/IP networking support" +DESCRIPTION = "This package provides the necessary infrastructure for basic TCP/IP based networking" +HOMEPAGE = "http://packages.debian.org/netbase" +SECTION = "base" +LICENSE = "GPLv2" +LIC_FILES_CHKSUM = "file://debian/copyright;md5=3dd6192d306f582dee7687da3d8748ab" +PE = "1" + +SRC_URI = "http://snapshot.debian.org/archive/debian/20170112T093812Z/pool/main/n/${BPN}/${BPN}_${PV}.tar.xz \ + file://netbase-add-rpcbind-as-an-alias-to-sunrpc.patch \ + file://hosts" + +SRC_URI[md5sum] = "117cb70c55ef3c1c002f127812b114c1" +SRC_URI[sha256sum] = "66ff73d2d162e2d49db43988d8b8cd328cf7fffca042db73397f14c71825e80d" + +UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/n/netbase/" +do_install () { + install -d ${D}/${mandir}/man8 ${D}${sysconfdir} + install -m 0644 ${WORKDIR}/hosts ${D}${sysconfdir}/hosts + install -m 0644 etc-rpc ${D}${sysconfdir}/rpc + install -m 0644 etc-protocols ${D}${sysconfdir}/protocols + install -m 0644 etc-services ${D}${sysconfdir}/services +} + +CONFFILES_${PN} = "${sysconfdir}/hosts" diff --git a/import-layers/yocto-poky/meta/recipes-core/os-release/os-release.bb b/import-layers/yocto-poky/meta/recipes-core/os-release/os-release.bb index f519addd8..f98870475 100644 --- a/import-layers/yocto-poky/meta/recipes-core/os-release/os-release.bb +++ b/import-layers/yocto-poky/meta/recipes-core/os-release/os-release.bb @@ -3,7 +3,6 @@ inherit allarch SUMMARY = "Operating system identification" DESCRIPTION = "The /etc/os-release file contains operating system identification data." LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" INHIBIT_DEFAULT_DEPS = "1" do_fetch[noexec] = "1" @@ -33,8 +32,8 @@ def sanitise_version(ver): python do_compile () { import shutil with open(d.expand('${B}/os-release'), 'w') as f: - for field in d.getVar('OS_RELEASE_FIELDS', True).split(): - value = d.getVar(field, True) + for field in d.getVar('OS_RELEASE_FIELDS').split(): + value = d.getVar(field) if value and field == 'VERSION_ID': value = sanitise_version(value) if value: diff --git a/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf-shell-image.bb b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf-shell-image.bb new file mode 100644 index 000000000..029547b94 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf-shell-image.bb @@ -0,0 +1,17 @@ +DESCRIPTION = "boot image with UEFI shell and tools" + +# For this image recipe, only the wic format with a +# single vfat partition makes sense. +IMAGE_FSTYPES_forcevariable = 'wic' + +WKS_FILE = "ovmf/ovmf-shell-image.wks" +inherit image + +# We want a minimal image with just ovmf-shell-efi unpacked in it. We +# avoid installing unnecessary stuff as much as possible, but some +# things still get through and need to be removed. +PACKAGE_INSTALL = "ovmf-shell-efi" +LINGUAS_INSTALL = "" +do_image () { + rm -rf `ls -d ${IMAGE_ROOTFS}/* | grep -v efi` +} diff --git a/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0001-MdeModulePkg-UefiHiiLib-Fix-incorrect-comparison-exp.patch b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0001-MdeModulePkg-UefiHiiLib-Fix-incorrect-comparison-exp.patch new file mode 100644 index 000000000..fcd7a4690 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0001-MdeModulePkg-UefiHiiLib-Fix-incorrect-comparison-exp.patch @@ -0,0 +1,39 @@ +From 73692710d50da1f421b0e6ddff784ca3135389b3 Mon Sep 17 00:00:00 2001 +From: Dandan Bi +Date: Sat, 1 Apr 2017 10:31:14 +0800 +Subject: [PATCH] MdeModulePkg/UefiHiiLib:Fix incorrect comparison expression + +Fix the incorrect comparison between pointer and constant zero character. + +https://bugzilla.tianocore.org/show_bug.cgi?id=416 + +V2: The pointer StringPtr points to a string returned +by ExtractConfig/ExportConfig, if it is NULL, function +InternalHiiIfrValueAction will return FALSE. So in +current usage model, the StringPtr can not be NULL before +using it, so we can add ASSERT here. + +Cc: Eric Dong +Cc: Liming Gao +Contributed-under: TianoCore Contribution Agreement 1.0 +Signed-off-by: Dandan Bi +Reviewed-by: Eric Dong +--- +Upstream-Status: Backport + + MdeModulePkg/Library/UefiHiiLib/HiiLib.c | 5 +++-- + 1 file changed, 3 insertions(+), 2 deletions(-) + +Index: git/MdeModulePkg/Library/UefiHiiLib/HiiLib.c +=================================================================== +--- git.orig/MdeModulePkg/Library/UefiHiiLib/HiiLib.c ++++ git/MdeModulePkg/Library/UefiHiiLib/HiiLib.c +@@ -2180,6 +2180,8 @@ InternalHiiIfrValueAction ( + } + + StringPtr = ConfigAltResp; ++ ++ ASSERT (StringPtr != NULL); + + while (StringPtr != L'\0') { + // diff --git a/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0002-ovmf-update-path-to-native-BaseTools.patch b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0002-ovmf-update-path-to-native-BaseTools.patch new file mode 100644 index 000000000..94029a562 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0002-ovmf-update-path-to-native-BaseTools.patch @@ -0,0 +1,32 @@ +From 9e632e3f9edd09632cc877dff6ea57608f979aab Mon Sep 17 00:00:00 2001 +From: Ricardo Neri +Date: Thu, 9 Jun 2016 02:23:01 -0700 +Subject: [PATCH] ovmf: update path to native BaseTools + +BaseTools is a set of utilities to build EDK-based firmware. These utilities +are used during the build process. Thus, they need to be built natively. +When cross-compiling, we need to provide a path to the location of these +tools. The BBAKE_EDK_TOOLS_PATH string is used as a pattern to be replaced +with the appropriate location before building. + +Signed-off-by: Ricardo Neri +--- + OvmfPkg/build.sh | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/OvmfPkg/build.sh b/OvmfPkg/build.sh +index eb5eb73..9058fca 100755 +--- a/OvmfPkg/build.sh ++++ b/OvmfPkg/build.sh +@@ -30,7 +30,7 @@ then + # this assumes svn pulls have the same root dir + # export EDK_TOOLS_PATH=`pwd`/../BaseTools + # This version is for the tools source in edk2 +- export EDK_TOOLS_PATH=`pwd`/BaseTools ++ export EDK_TOOLS_PATH=BBAKE_EDK_TOOLS_PATH/BaseTools + echo $EDK_TOOLS_PATH + source edksetup.sh BaseTools + else +-- +2.8.1 + diff --git a/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0003-BaseTools-makefile-adjust-to-build-in-under-bitbake.patch b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0003-BaseTools-makefile-adjust-to-build-in-under-bitbake.patch new file mode 100644 index 000000000..0fdc278ce --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0003-BaseTools-makefile-adjust-to-build-in-under-bitbake.patch @@ -0,0 +1,39 @@ +From 2320650c6d381b914fe91b2dedaa5870279a8bcf Mon Sep 17 00:00:00 2001 +From: Ricardo Neri +Date: Sun, 27 Nov 2016 18:42:55 -0800 +Subject: [PATCH] BaseTools: makefile: adjust to build in under bitbake + +Prepend the build flags with those of bitbake. This is to build +using the bitbake native sysroot include and library directories. + +Signed-off-by: Ricardo Neri +--- + BaseTools/Source/C/Makefiles/header.makefile | 8 ++++---- + 1 file changed, 4 insertions(+), 4 deletions(-) + +diff --git a/BaseTools/Source/C/Makefiles/header.makefile b/BaseTools/Source/C/Makefiles/header.makefile +index 821d114..fe0f08b 100644 +--- a/BaseTools/Source/C/Makefiles/header.makefile ++++ b/BaseTools/Source/C/Makefiles/header.makefile +@@ -44,14 +44,14 @@ ARCH_INCLUDE = -I $(MAKEROOT)/Include/AArch64/ + endif + + INCLUDE = $(TOOL_INCLUDE) -I $(MAKEROOT) -I $(MAKEROOT)/Include/Common -I $(MAKEROOT)/Include/ -I $(MAKEROOT)/Include/IndustryStandard -I $(MAKEROOT)/Common/ -I .. -I . $(ARCH_INCLUDE) +-BUILD_CPPFLAGS = $(INCLUDE) -O2 ++BUILD_CPPFLAGS := $(BUILD_CPPFLAGS) $(INCLUDE) -O2 + ifeq ($(DARWIN),Darwin) + # assume clang or clang compatible flags on OS X +-BUILD_CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-deprecated-declarations -Wno-self-assign -Wno-unused-result -nostdlib -c -g ++BUILD_CFLAGS := $(BUILD_CFLAGS) -MD -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-deprecated-declarations -Wno-self-assign -Wno-unused-result -nostdlib -c -g + else +-BUILD_CFLAGS = -MD -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-deprecated-declarations -Wno-unused-result -nostdlib -c -g ++BUILD_CFLAGS := $(BUILD_CFLAGS) -MD -fshort-wchar -fno-strict-aliasing -Wall -Werror -Wno-deprecated-declarations -Wno-unused-result -nostdlib -c -g + endif +-BUILD_LFLAGS = ++BUILD_LFLAGS := $(LDFLAGS) + BUILD_CXXFLAGS = + + ifeq ($(ARCH), IA32) +-- +2.9.3 + diff --git a/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0007-OvmfPkg-EnrollDefaultKeys-application-for-enrolling-.patch b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0007-OvmfPkg-EnrollDefaultKeys-application-for-enrolling-.patch new file mode 100644 index 000000000..3aa6cc4ac --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/0007-OvmfPkg-EnrollDefaultKeys-application-for-enrolling-.patch @@ -0,0 +1,1124 @@ +From: Laszlo Ersek +Date: Mon, 6 Jul 2015 20:22:02 +0200 +Subject: [PATCH] OvmfPkg: EnrollDefaultKeys: application for enrolling default + keys + +(A port of the patch +to Gerd's public RPMs.) + +This application is meant to be invoked by the management layer, after +booting the UEFI shell and getting a shell prompt on the serial console. +The app enrolls a number of certificates (see below), and then reports +status to the serial console as well. The expected output is "info: +success": + +> Shell> EnrollDefaultKeys.efi +> info: SetupMode=1 SecureBoot=0 SecureBootEnable=0 CustomMode=0 VendorKeys=1 +> info: SetupMode=0 SecureBoot=1 SecureBootEnable=1 CustomMode=0 VendorKeys=0 +> info: success +> Shell> + +In case of success, the management layer can force off or reboot the VM +(for example with the "reset -s" or "reset -c" UEFI shell commands, +respectively), and start the guest installation with SecureBoot enabled. + +PK: +- A unique, static, ad-hoc certificate whose private half has been + destroyed (more precisely, never saved) and is therefore unusable for + signing. (The command for creating this certificate is saved in the + source code.) + +KEK: +- same ad-hoc certificate as used for the PK, +- "Microsoft Corporation KEK CA 2011" -- the dbx data in Fedora's dbxtool + package is signed (indirectly, through a chain) with this; enrolling + such a KEK should allow guests to install those updates. + +DB: +- "Microsoft Windows Production PCA 2011" -- to load Windows 8 and Windows + Server 2012 R2, +- "Microsoft Corporation UEFI CA 2011" -- to load Linux and signed PCI + oproms. + +Contributed-under: TianoCore Contribution Agreement 1.0 +Signed-off-by: Laszlo Ersek +Upstream-Status: Inappropriate [not author] +Signed-off-by: Patrick Ohly +--- + OvmfPkg/EnrollDefaultKeys/EnrollDefaultKeys.c | 960 ++++++++++++++++++++++++ + OvmfPkg/EnrollDefaultKeys/EnrollDefaultKeys.inf | 51 ++ + OvmfPkg/OvmfPkgIa32.dsc | 4 + + OvmfPkg/OvmfPkgIa32X64.dsc | 4 + + OvmfPkg/OvmfPkgX64.dsc | 4 + + 5 files changed, 1023 insertions(+) + create mode 100644 OvmfPkg/EnrollDefaultKeys/EnrollDefaultKeys.c + create mode 100644 OvmfPkg/EnrollDefaultKeys/EnrollDefaultKeys.inf + +diff --git a/OvmfPkg/EnrollDefaultKeys/EnrollDefaultKeys.c b/OvmfPkg/EnrollDefaultKeys/EnrollDefaultKeys.c +new file mode 100644 +index 0000000..081212b +--- /dev/null ++++ b/OvmfPkg/EnrollDefaultKeys/EnrollDefaultKeys.c +@@ -0,0 +1,960 @@ ++/** @file ++ Enroll default PK, KEK, DB. ++ ++ Copyright (C) 2014, Red Hat, Inc. ++ ++ This program and the accompanying materials are licensed and made available ++ under the terms and conditions of the BSD License which accompanies this ++ distribution. The full text of the license may be found at ++ http://opensource.org/licenses/bsd-license. ++ ++ THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, WITHOUT ++ WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. ++**/ ++#include // gEfiCustomModeEnableGuid ++#include // EFI_SETUP_MODE_NAME ++#include // EFI_IMAGE_SECURITY_DATABASE ++#include // CopyGuid() ++#include // ASSERT() ++#include // FreePool() ++#include // ShellAppMain() ++#include // AsciiPrint() ++#include // gRT ++ ++// ++// The example self-signed certificate below, which we'll use for both Platform ++// Key, and first Key Exchange Key, has been generated with the following ++// non-interactive openssl command. The passphrase is read from /dev/urandom, ++// and not saved, and the private key is written to /dev/null. In other words, ++// we can't sign anything else against this certificate, which is our purpose. ++// ++/* ++ openssl req \ ++ -passout file:<(head -c 16 /dev/urandom) \ ++ -x509 \ ++ -newkey rsa:2048 \ ++ -keyout /dev/null \ ++ -outform DER \ ++ -subj $( ++ printf /C=US ++ printf /ST=TestStateOrProvince ++ printf /L=TestLocality ++ printf /O=TestOrganization ++ printf /OU=TestOrganizationalUnit ++ printf /CN=TestCommonName ++ printf /emailAddress=test@example.com ++ ) \ ++ 2>/dev/null \ ++ | xxd -i ++*/ ++STATIC CONST UINT8 ExampleCert[] = { ++ 0x30, 0x82, 0x04, 0x45, 0x30, 0x82, 0x03, 0x2d, 0xa0, 0x03, 0x02, 0x01, 0x02, ++ 0x02, 0x09, 0x00, 0xcf, 0x9f, 0x51, 0xa3, 0x07, 0xdb, 0x54, 0xa1, 0x30, 0x0d, ++ 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x0b, 0x05, 0x00, ++ 0x30, 0x81, 0xb8, 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, ++ 0x02, 0x55, 0x53, 0x31, 0x1c, 0x30, 0x1a, 0x06, 0x03, 0x55, 0x04, 0x08, 0x0c, ++ 0x13, 0x54, 0x65, 0x73, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x4f, 0x72, 0x50, ++ 0x72, 0x6f, 0x76, 0x69, 0x6e, 0x63, 0x65, 0x31, 0x15, 0x30, 0x13, 0x06, 0x03, ++ 0x55, 0x04, 0x07, 0x0c, 0x0c, 0x54, 0x65, 0x73, 0x74, 0x4c, 0x6f, 0x63, 0x61, ++ 0x6c, 0x69, 0x74, 0x79, 0x31, 0x19, 0x30, 0x17, 0x06, 0x03, 0x55, 0x04, 0x0a, ++ 0x0c, 0x10, 0x54, 0x65, 0x73, 0x74, 0x4f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, ++ 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x31, 0x1f, 0x30, 0x1d, 0x06, 0x03, 0x55, 0x04, ++ 0x0b, 0x0c, 0x16, 0x54, 0x65, 0x73, 0x74, 0x4f, 0x72, 0x67, 0x61, 0x6e, 0x69, ++ 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x55, 0x6e, 0x69, 0x74, 0x31, ++ 0x17, 0x30, 0x15, 0x06, 0x03, 0x55, 0x04, 0x03, 0x0c, 0x0e, 0x54, 0x65, 0x73, ++ 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x31, 0x1f, ++ 0x30, 0x1d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x09, 0x01, ++ 0x16, 0x10, 0x74, 0x65, 0x73, 0x74, 0x40, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, ++ 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x30, 0x1e, 0x17, 0x0d, 0x31, 0x34, 0x31, 0x30, ++ 0x30, 0x39, 0x31, 0x33, 0x32, 0x38, 0x32, 0x32, 0x5a, 0x17, 0x0d, 0x31, 0x34, ++ 0x31, 0x31, 0x30, 0x38, 0x31, 0x33, 0x32, 0x38, 0x32, 0x32, 0x5a, 0x30, 0x81, ++ 0xb8, 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, 0x55, ++ 0x53, 0x31, 0x1c, 0x30, 0x1a, 0x06, 0x03, 0x55, 0x04, 0x08, 0x0c, 0x13, 0x54, ++ 0x65, 0x73, 0x74, 0x53, 0x74, 0x61, 0x74, 0x65, 0x4f, 0x72, 0x50, 0x72, 0x6f, ++ 0x76, 0x69, 0x6e, 0x63, 0x65, 0x31, 0x15, 0x30, 0x13, 0x06, 0x03, 0x55, 0x04, ++ 0x07, 0x0c, 0x0c, 0x54, 0x65, 0x73, 0x74, 0x4c, 0x6f, 0x63, 0x61, 0x6c, 0x69, ++ 0x74, 0x79, 0x31, 0x19, 0x30, 0x17, 0x06, 0x03, 0x55, 0x04, 0x0a, 0x0c, 0x10, ++ 0x54, 0x65, 0x73, 0x74, 0x4f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61, 0x74, ++ 0x69, 0x6f, 0x6e, 0x31, 0x1f, 0x30, 0x1d, 0x06, 0x03, 0x55, 0x04, 0x0b, 0x0c, ++ 0x16, 0x54, 0x65, 0x73, 0x74, 0x4f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61, ++ 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x55, 0x6e, 0x69, 0x74, 0x31, 0x17, 0x30, ++ 0x15, 0x06, 0x03, 0x55, 0x04, 0x03, 0x0c, 0x0e, 0x54, 0x65, 0x73, 0x74, 0x43, ++ 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x31, 0x1f, 0x30, 0x1d, ++ 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x09, 0x01, 0x16, 0x10, ++ 0x74, 0x65, 0x73, 0x74, 0x40, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x2e, ++ 0x63, 0x6f, 0x6d, 0x30, 0x82, 0x01, 0x22, 0x30, 0x0d, 0x06, 0x09, 0x2a, 0x86, ++ 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x01, 0x05, 0x00, 0x03, 0x82, 0x01, 0x0f, ++ 0x00, 0x30, 0x82, 0x01, 0x0a, 0x02, 0x82, 0x01, 0x01, 0x00, 0xbf, 0xf1, 0xce, ++ 0x17, 0x32, 0xac, 0xc4, 0x4b, 0xb2, 0xed, 0x84, 0x76, 0xe5, 0xd0, 0xf8, 0x21, ++ 0xac, 0x10, 0xf8, 0x18, 0x09, 0x0e, 0x07, 0x13, 0x76, 0x21, 0x5c, 0xc4, 0xcc, ++ 0xd5, 0xe6, 0x25, 0xa7, 0x26, 0x53, 0x79, 0x2f, 0x16, 0x4b, 0x85, 0xbd, 0xae, ++ 0x42, 0x64, 0x58, 0xcb, 0x5e, 0xe8, 0x6e, 0x5a, 0xd0, 0xc4, 0x0f, 0x38, 0x16, ++ 0xbe, 0xd3, 0x22, 0xa7, 0x3c, 0x9b, 0x8b, 0x5e, 0xcb, 0x62, 0x35, 0xc5, 0x9b, ++ 0xe2, 0x8e, 0x4c, 0x65, 0x57, 0x4f, 0xcb, 0x27, 0xad, 0xe7, 0x63, 0xa7, 0x77, ++ 0x2b, 0xd5, 0x02, 0x42, 0x70, 0x46, 0xac, 0xba, 0xb6, 0x60, 0x57, 0xd9, 0xce, ++ 0x31, 0xc5, 0x12, 0x03, 0x4a, 0xf7, 0x2a, 0x2b, 0x40, 0x06, 0xb4, 0xdb, 0x31, ++ 0xb7, 0x83, 0x6c, 0x67, 0x87, 0x98, 0x8b, 0xce, 0x1b, 0x30, 0x7a, 0xfa, 0x35, ++ 0x6c, 0x86, 0x20, 0x74, 0xc5, 0x7d, 0x32, 0x31, 0x18, 0xeb, 0x69, 0xf7, 0x2d, ++ 0x20, 0xc4, 0xf0, 0xd2, 0xfa, 0x67, 0x81, 0xc1, 0xbb, 0x23, 0xbb, 0x75, 0x1a, ++ 0xe4, 0xb4, 0x49, 0x99, 0xdf, 0x12, 0x4c, 0xe3, 0x6d, 0x76, 0x24, 0x85, 0x24, ++ 0xae, 0x5a, 0x9e, 0xbd, 0x54, 0x1c, 0xf9, 0x0e, 0xed, 0x96, 0xb5, 0xd8, 0xa2, ++ 0x0d, 0x2a, 0x38, 0x5d, 0x12, 0x97, 0xb0, 0x4d, 0x75, 0x85, 0x1e, 0x47, 0x6d, ++ 0xe1, 0x25, 0x59, 0xcb, 0xe9, 0x33, 0x86, 0x6a, 0xef, 0x98, 0x24, 0xa0, 0x2b, ++ 0x02, 0x7b, 0xc0, 0x9f, 0x88, 0x03, 0xb0, 0xbe, 0x22, 0x65, 0x83, 0x77, 0xb3, ++ 0x30, 0xba, 0xe0, 0x3b, 0x54, 0x31, 0x3a, 0x45, 0x81, 0x9c, 0x48, 0xaf, 0xc1, ++ 0x11, 0x5b, 0xf2, 0x3a, 0x1e, 0x33, 0x1b, 0x8f, 0x0e, 0x04, 0xa4, 0x16, 0xd4, ++ 0x6b, 0x57, 0xee, 0xe7, 0xba, 0xf5, 0xee, 0xaf, 0xe2, 0x4c, 0x50, 0xf8, 0x68, ++ 0x57, 0x88, 0xfb, 0x7f, 0xa3, 0xcf, 0x02, 0x03, 0x01, 0x00, 0x01, 0xa3, 0x50, ++ 0x30, 0x4e, 0x30, 0x1d, 0x06, 0x03, 0x55, 0x1d, 0x0e, 0x04, 0x16, 0x04, 0x14, ++ 0x1e, 0x44, 0xe5, 0xef, 0xcd, 0x6e, 0x1f, 0xdb, 0xcb, 0x4f, 0x94, 0x8f, 0xe3, ++ 0x3b, 0x1a, 0x8c, 0xe6, 0x95, 0x29, 0x61, 0x30, 0x1f, 0x06, 0x03, 0x55, 0x1d, ++ 0x23, 0x04, 0x18, 0x30, 0x16, 0x80, 0x14, 0x1e, 0x44, 0xe5, 0xef, 0xcd, 0x6e, ++ 0x1f, 0xdb, 0xcb, 0x4f, 0x94, 0x8f, 0xe3, 0x3b, 0x1a, 0x8c, 0xe6, 0x95, 0x29, ++ 0x61, 0x30, 0x0c, 0x06, 0x03, 0x55, 0x1d, 0x13, 0x04, 0x05, 0x30, 0x03, 0x01, ++ 0x01, 0xff, 0x30, 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, ++ 0x01, 0x0b, 0x05, 0x00, 0x03, 0x82, 0x01, 0x01, 0x00, 0x12, 0x9c, 0x3e, 0x38, ++ 0xfc, 0x26, 0xea, 0x6d, 0xb7, 0x5c, 0x29, 0x3c, 0x76, 0x20, 0x0c, 0xb2, 0xa9, ++ 0x0f, 0xdf, 0xc0, 0x85, 0xfe, 0xeb, 0xec, 0x1d, 0x5d, 0x73, 0x84, 0xac, 0x8a, ++ 0xb4, 0x2a, 0x86, 0x38, 0x30, 0xaf, 0xd2, 0x2d, 0x2a, 0xde, 0x54, 0xc8, 0x5c, ++ 0x29, 0x90, 0x24, 0xf2, 0x39, 0xc1, 0xa5, 0x00, 0xb4, 0xb7, 0xd8, 0xdc, 0x59, ++ 0x64, 0x50, 0x62, 0x5f, 0x54, 0xf1, 0x73, 0x02, 0x4d, 0x43, 0xc5, 0xc3, 0xc4, ++ 0x0e, 0x62, 0x60, 0x8c, 0x53, 0x66, 0x57, 0x77, 0xb5, 0x81, 0xda, 0x1f, 0x81, ++ 0xda, 0xe9, 0xd6, 0x5e, 0x82, 0xce, 0xa7, 0x5c, 0xc0, 0xa6, 0xbe, 0x9c, 0x5c, ++ 0x7b, 0xa5, 0x15, 0xc8, 0xd7, 0x14, 0x53, 0xd3, 0x5c, 0x1c, 0x9f, 0x8a, 0x9f, ++ 0x66, 0x15, 0xd5, 0xd3, 0x2a, 0x27, 0x0c, 0xee, 0x9f, 0x80, 0x39, 0x88, 0x7b, ++ 0x24, 0xde, 0x0c, 0x61, 0xa3, 0x44, 0xd8, 0x8d, 0x2e, 0x79, 0xf8, 0x1e, 0x04, ++ 0x5a, 0xcb, 0xd6, 0x9c, 0xa3, 0x22, 0x8f, 0x09, 0x32, 0x1e, 0xe1, 0x65, 0x8f, ++ 0x10, 0x5f, 0xd8, 0x52, 0x56, 0xd5, 0x77, 0xac, 0x58, 0x46, 0x60, 0xba, 0x2e, ++ 0xe2, 0x3f, 0x58, 0x7d, 0x60, 0xfc, 0x31, 0x4a, 0x3a, 0xaf, 0x61, 0x55, 0x5f, ++ 0xfb, 0x68, 0x14, 0x74, 0xda, 0xdc, 0x42, 0x78, 0xcc, 0xee, 0xff, 0x5c, 0x03, ++ 0x24, 0x26, 0x2c, 0xb8, 0x3a, 0x81, 0xad, 0xdb, 0xe7, 0xed, 0xe1, 0x62, 0x84, ++ 0x07, 0x1a, 0xc8, 0xa4, 0x4e, 0xb0, 0x87, 0xf7, 0x96, 0xd8, 0x33, 0x9b, 0x0d, ++ 0xa7, 0x77, 0xae, 0x5b, 0xaf, 0xad, 0xe6, 0x5a, 0xc9, 0xfa, 0xa4, 0xe4, 0xe5, ++ 0x57, 0xbb, 0x97, 0xdd, 0x92, 0x85, 0xd8, 0x03, 0x45, 0xfe, 0xd8, 0x6b, 0xb1, ++ 0xdb, 0x85, 0x36, 0xb9, 0xd9, 0x28, 0xbf, 0x17, 0xae, 0x11, 0xde, 0x10, 0x19, ++ 0x26, 0x5b, 0xc0, 0x3d, 0xc7 ++}; ++ ++// ++// Second KEK: "Microsoft Corporation KEK CA 2011". ++// SHA1: 31:59:0b:fd:89:c9:d7:4e:d0:87:df:ac:66:33:4b:39:31:25:4b:30 ++// ++// "dbx" updates in "dbxtool" are signed with a key derived from this KEK. ++// ++STATIC CONST UINT8 MicrosoftKEK[] = { ++ 0x30, 0x82, 0x05, 0xe8, 0x30, 0x82, 0x03, 0xd0, 0xa0, 0x03, 0x02, 0x01, 0x02, ++ 0x02, 0x0a, 0x61, 0x0a, 0xd1, 0x88, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x30, ++ 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x0b, 0x05, ++ 0x00, 0x30, 0x81, 0x91, 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, ++ 0x13, 0x02, 0x55, 0x53, 0x31, 0x13, 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, ++ 0x13, 0x0a, 0x57, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x74, 0x6f, 0x6e, 0x31, ++ 0x10, 0x30, 0x0e, 0x06, 0x03, 0x55, 0x04, 0x07, 0x13, 0x07, 0x52, 0x65, 0x64, ++ 0x6d, 0x6f, 0x6e, 0x64, 0x31, 0x1e, 0x30, 0x1c, 0x06, 0x03, 0x55, 0x04, 0x0a, ++ 0x13, 0x15, 0x4d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x6f, 0x66, 0x74, 0x20, 0x43, ++ 0x6f, 0x72, 0x70, 0x6f, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x31, 0x3b, 0x30, ++ 0x39, 0x06, 0x03, 0x55, 0x04, 0x03, 0x13, 0x32, 0x4d, 0x69, 0x63, 0x72, 0x6f, ++ 0x73, 0x6f, 0x66, 0x74, 0x20, 0x43, 0x6f, 0x72, 0x70, 0x6f, 0x72, 0x61, 0x74, ++ 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x68, 0x69, 0x72, 0x64, 0x20, 0x50, 0x61, 0x72, ++ 0x74, 0x79, 0x20, 0x4d, 0x61, 0x72, 0x6b, 0x65, 0x74, 0x70, 0x6c, 0x61, 0x63, ++ 0x65, 0x20, 0x52, 0x6f, 0x6f, 0x74, 0x30, 0x1e, 0x17, 0x0d, 0x31, 0x31, 0x30, ++ 0x36, 0x32, 0x34, 0x32, 0x30, 0x34, 0x31, 0x32, 0x39, 0x5a, 0x17, 0x0d, 0x32, ++ 0x36, 0x30, 0x36, 0x32, 0x34, 0x32, 0x30, 0x35, 0x31, 0x32, 0x39, 0x5a, 0x30, ++ 0x81, 0x80, 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, ++ 0x55, 0x53, 0x31, 0x13, 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, 0x13, 0x0a, ++ 0x57, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x74, 0x6f, 0x6e, 0x31, 0x10, 0x30, ++ 0x0e, 0x06, 0x03, 0x55, 0x04, 0x07, 0x13, 0x07, 0x52, 0x65, 0x64, 0x6d, 0x6f, ++ 0x6e, 0x64, 0x31, 0x1e, 0x30, 0x1c, 0x06, 0x03, 0x55, 0x04, 0x0a, 0x13, 0x15, ++ 0x4d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x6f, 0x66, 0x74, 0x20, 0x43, 0x6f, 0x72, ++ 0x70, 0x6f, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x31, 0x2a, 0x30, 0x28, 0x06, ++ 0x03, 0x55, 0x04, 0x03, 0x13, 0x21, 0x4d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x6f, ++ 0x66, 0x74, 0x20, 0x43, 0x6f, 0x72, 0x70, 0x6f, 0x72, 0x61, 0x74, 0x69, 0x6f, ++ 0x6e, 0x20, 0x4b, 0x45, 0x4b, 0x20, 0x43, 0x41, 0x20, 0x32, 0x30, 0x31, 0x31, ++ 0x30, 0x82, 0x01, 0x22, 0x30, 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, ++ 0x0d, 0x01, 0x01, 0x01, 0x05, 0x00, 0x03, 0x82, 0x01, 0x0f, 0x00, 0x30, 0x82, ++ 0x01, 0x0a, 0x02, 0x82, 0x01, 0x01, 0x00, 0xc4, 0xe8, 0xb5, 0x8a, 0xbf, 0xad, ++ 0x57, 0x26, 0xb0, 0x26, 0xc3, 0xea, 0xe7, 0xfb, 0x57, 0x7a, 0x44, 0x02, 0x5d, ++ 0x07, 0x0d, 0xda, 0x4a, 0xe5, 0x74, 0x2a, 0xe6, 0xb0, 0x0f, 0xec, 0x6d, 0xeb, ++ 0xec, 0x7f, 0xb9, 0xe3, 0x5a, 0x63, 0x32, 0x7c, 0x11, 0x17, 0x4f, 0x0e, 0xe3, ++ 0x0b, 0xa7, 0x38, 0x15, 0x93, 0x8e, 0xc6, 0xf5, 0xe0, 0x84, 0xb1, 0x9a, 0x9b, ++ 0x2c, 0xe7, 0xf5, 0xb7, 0x91, 0xd6, 0x09, 0xe1, 0xe2, 0xc0, 0x04, 0xa8, 0xac, ++ 0x30, 0x1c, 0xdf, 0x48, 0xf3, 0x06, 0x50, 0x9a, 0x64, 0xa7, 0x51, 0x7f, 0xc8, ++ 0x85, 0x4f, 0x8f, 0x20, 0x86, 0xce, 0xfe, 0x2f, 0xe1, 0x9f, 0xff, 0x82, 0xc0, ++ 0xed, 0xe9, 0xcd, 0xce, 0xf4, 0x53, 0x6a, 0x62, 0x3a, 0x0b, 0x43, 0xb9, 0xe2, ++ 0x25, 0xfd, 0xfe, 0x05, 0xf9, 0xd4, 0xc4, 0x14, 0xab, 0x11, 0xe2, 0x23, 0x89, ++ 0x8d, 0x70, 0xb7, 0xa4, 0x1d, 0x4d, 0xec, 0xae, 0xe5, 0x9c, 0xfa, 0x16, 0xc2, ++ 0xd7, 0xc1, 0xcb, 0xd4, 0xe8, 0xc4, 0x2f, 0xe5, 0x99, 0xee, 0x24, 0x8b, 0x03, ++ 0xec, 0x8d, 0xf2, 0x8b, 0xea, 0xc3, 0x4a, 0xfb, 0x43, 0x11, 0x12, 0x0b, 0x7e, ++ 0xb5, 0x47, 0x92, 0x6c, 0xdc, 0xe6, 0x04, 0x89, 0xeb, 0xf5, 0x33, 0x04, 0xeb, ++ 0x10, 0x01, 0x2a, 0x71, 0xe5, 0xf9, 0x83, 0x13, 0x3c, 0xff, 0x25, 0x09, 0x2f, ++ 0x68, 0x76, 0x46, 0xff, 0xba, 0x4f, 0xbe, 0xdc, 0xad, 0x71, 0x2a, 0x58, 0xaa, ++ 0xfb, 0x0e, 0xd2, 0x79, 0x3d, 0xe4, 0x9b, 0x65, 0x3b, 0xcc, 0x29, 0x2a, 0x9f, ++ 0xfc, 0x72, 0x59, 0xa2, 0xeb, 0xae, 0x92, 0xef, 0xf6, 0x35, 0x13, 0x80, 0xc6, ++ 0x02, 0xec, 0xe4, 0x5f, 0xcc, 0x9d, 0x76, 0xcd, 0xef, 0x63, 0x92, 0xc1, 0xaf, ++ 0x79, 0x40, 0x84, 0x79, 0x87, 0x7f, 0xe3, 0x52, 0xa8, 0xe8, 0x9d, 0x7b, 0x07, ++ 0x69, 0x8f, 0x15, 0x02, 0x03, 0x01, 0x00, 0x01, 0xa3, 0x82, 0x01, 0x4f, 0x30, ++ 0x82, 0x01, 0x4b, 0x30, 0x10, 0x06, 0x09, 0x2b, 0x06, 0x01, 0x04, 0x01, 0x82, ++ 0x37, 0x15, 0x01, 0x04, 0x03, 0x02, 0x01, 0x00, 0x30, 0x1d, 0x06, 0x03, 0x55, ++ 0x1d, 0x0e, 0x04, 0x16, 0x04, 0x14, 0x62, 0xfc, 0x43, 0xcd, 0xa0, 0x3e, 0xa4, ++ 0xcb, 0x67, 0x12, 0xd2, 0x5b, 0xd9, 0x55, 0xac, 0x7b, 0xcc, 0xb6, 0x8a, 0x5f, ++ 0x30, 0x19, 0x06, 0x09, 0x2b, 0x06, 0x01, 0x04, 0x01, 0x82, 0x37, 0x14, 0x02, ++ 0x04, 0x0c, 0x1e, 0x0a, 0x00, 0x53, 0x00, 0x75, 0x00, 0x62, 0x00, 0x43, 0x00, ++ 0x41, 0x30, 0x0b, 0x06, 0x03, 0x55, 0x1d, 0x0f, 0x04, 0x04, 0x03, 0x02, 0x01, ++ 0x86, 0x30, 0x0f, 0x06, 0x03, 0x55, 0x1d, 0x13, 0x01, 0x01, 0xff, 0x04, 0x05, ++ 0x30, 0x03, 0x01, 0x01, 0xff, 0x30, 0x1f, 0x06, 0x03, 0x55, 0x1d, 0x23, 0x04, ++ 0x18, 0x30, 0x16, 0x80, 0x14, 0x45, 0x66, 0x52, 0x43, 0xe1, 0x7e, 0x58, 0x11, ++ 0xbf, 0xd6, 0x4e, 0x9e, 0x23, 0x55, 0x08, 0x3b, 0x3a, 0x22, 0x6a, 0xa8, 0x30, ++ 0x5c, 0x06, 0x03, 0x55, 0x1d, 0x1f, 0x04, 0x55, 0x30, 0x53, 0x30, 0x51, 0xa0, ++ 0x4f, 0xa0, 0x4d, 0x86, 0x4b, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x63, ++ 0x72, 0x6c, 0x2e, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x6f, 0x66, 0x74, 0x2e, ++ 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x6b, 0x69, 0x2f, 0x63, 0x72, 0x6c, 0x2f, 0x70, ++ 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x73, 0x2f, 0x4d, 0x69, 0x63, 0x43, 0x6f, ++ 0x72, 0x54, 0x68, 0x69, 0x50, 0x61, 0x72, 0x4d, 0x61, 0x72, 0x52, 0x6f, 0x6f, ++ 0x5f, 0x32, 0x30, 0x31, 0x30, 0x2d, 0x31, 0x30, 0x2d, 0x30, 0x35, 0x2e, 0x63, ++ 0x72, 0x6c, 0x30, 0x60, 0x06, 0x08, 0x2b, 0x06, 0x01, 0x05, 0x05, 0x07, 0x01, ++ 0x01, 0x04, 0x54, 0x30, 0x52, 0x30, 0x50, 0x06, 0x08, 0x2b, 0x06, 0x01, 0x05, ++ 0x05, 0x07, 0x30, 0x02, 0x86, 0x44, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, ++ 0x77, 0x77, 0x77, 0x2e, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x6f, 0x66, 0x74, ++ 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x6b, 0x69, 0x2f, 0x63, 0x65, 0x72, 0x74, ++ 0x73, 0x2f, 0x4d, 0x69, 0x63, 0x43, 0x6f, 0x72, 0x54, 0x68, 0x69, 0x50, 0x61, ++ 0x72, 0x4d, 0x61, 0x72, 0x52, 0x6f, 0x6f, 0x5f, 0x32, 0x30, 0x31, 0x30, 0x2d, ++ 0x31, 0x30, 0x2d, 0x30, 0x35, 0x2e, 0x63, 0x72, 0x74, 0x30, 0x0d, 0x06, 0x09, ++ 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x0b, 0x05, 0x00, 0x03, 0x82, ++ 0x02, 0x01, 0x00, 0xd4, 0x84, 0x88, 0xf5, 0x14, 0x94, 0x18, 0x02, 0xca, 0x2a, ++ 0x3c, 0xfb, 0x2a, 0x92, 0x1c, 0x0c, 0xd7, 0xa0, 0xd1, 0xf1, 0xe8, 0x52, 0x66, ++ 0xa8, 0xee, 0xa2, 0xb5, 0x75, 0x7a, 0x90, 0x00, 0xaa, 0x2d, 0xa4, 0x76, 0x5a, ++ 0xea, 0x79, 0xb7, 0xb9, 0x37, 0x6a, 0x51, 0x7b, 0x10, 0x64, 0xf6, 0xe1, 0x64, ++ 0xf2, 0x02, 0x67, 0xbe, 0xf7, 0xa8, 0x1b, 0x78, 0xbd, 0xba, 0xce, 0x88, 0x58, ++ 0x64, 0x0c, 0xd6, 0x57, 0xc8, 0x19, 0xa3, 0x5f, 0x05, 0xd6, 0xdb, 0xc6, 0xd0, ++ 0x69, 0xce, 0x48, 0x4b, 0x32, 0xb7, 0xeb, 0x5d, 0xd2, 0x30, 0xf5, 0xc0, 0xf5, ++ 0xb8, 0xba, 0x78, 0x07, 0xa3, 0x2b, 0xfe, 0x9b, 0xdb, 0x34, 0x56, 0x84, 0xec, ++ 0x82, 0xca, 0xae, 0x41, 0x25, 0x70, 0x9c, 0x6b, 0xe9, 0xfe, 0x90, 0x0f, 0xd7, ++ 0x96, 0x1f, 0xe5, 0xe7, 0x94, 0x1f, 0xb2, 0x2a, 0x0c, 0x8d, 0x4b, 0xff, 0x28, ++ 0x29, 0x10, 0x7b, 0xf7, 0xd7, 0x7c, 0xa5, 0xd1, 0x76, 0xb9, 0x05, 0xc8, 0x79, ++ 0xed, 0x0f, 0x90, 0x92, 0x9c, 0xc2, 0xfe, 0xdf, 0x6f, 0x7e, 0x6c, 0x0f, 0x7b, ++ 0xd4, 0xc1, 0x45, 0xdd, 0x34, 0x51, 0x96, 0x39, 0x0f, 0xe5, 0x5e, 0x56, 0xd8, ++ 0x18, 0x05, 0x96, 0xf4, 0x07, 0xa6, 0x42, 0xb3, 0xa0, 0x77, 0xfd, 0x08, 0x19, ++ 0xf2, 0x71, 0x56, 0xcc, 0x9f, 0x86, 0x23, 0xa4, 0x87, 0xcb, 0xa6, 0xfd, 0x58, ++ 0x7e, 0xd4, 0x69, 0x67, 0x15, 0x91, 0x7e, 0x81, 0xf2, 0x7f, 0x13, 0xe5, 0x0d, ++ 0x8b, 0x8a, 0x3c, 0x87, 0x84, 0xeb, 0xe3, 0xce, 0xbd, 0x43, 0xe5, 0xad, 0x2d, ++ 0x84, 0x93, 0x8e, 0x6a, 0x2b, 0x5a, 0x7c, 0x44, 0xfa, 0x52, 0xaa, 0x81, 0xc8, ++ 0x2d, 0x1c, 0xbb, 0xe0, 0x52, 0xdf, 0x00, 0x11, 0xf8, 0x9a, 0x3d, 0xc1, 0x60, ++ 0xb0, 0xe1, 0x33, 0xb5, 0xa3, 0x88, 0xd1, 0x65, 0x19, 0x0a, 0x1a, 0xe7, 0xac, ++ 0x7c, 0xa4, 0xc1, 0x82, 0x87, 0x4e, 0x38, 0xb1, 0x2f, 0x0d, 0xc5, 0x14, 0x87, ++ 0x6f, 0xfd, 0x8d, 0x2e, 0xbc, 0x39, 0xb6, 0xe7, 0xe6, 0xc3, 0xe0, 0xe4, 0xcd, ++ 0x27, 0x84, 0xef, 0x94, 0x42, 0xef, 0x29, 0x8b, 0x90, 0x46, 0x41, 0x3b, 0x81, ++ 0x1b, 0x67, 0xd8, 0xf9, 0x43, 0x59, 0x65, 0xcb, 0x0d, 0xbc, 0xfd, 0x00, 0x92, ++ 0x4f, 0xf4, 0x75, 0x3b, 0xa7, 0xa9, 0x24, 0xfc, 0x50, 0x41, 0x40, 0x79, 0xe0, ++ 0x2d, 0x4f, 0x0a, 0x6a, 0x27, 0x76, 0x6e, 0x52, 0xed, 0x96, 0x69, 0x7b, 0xaf, ++ 0x0f, 0xf7, 0x87, 0x05, 0xd0, 0x45, 0xc2, 0xad, 0x53, 0x14, 0x81, 0x1f, 0xfb, ++ 0x30, 0x04, 0xaa, 0x37, 0x36, 0x61, 0xda, 0x4a, 0x69, 0x1b, 0x34, 0xd8, 0x68, ++ 0xed, 0xd6, 0x02, 0xcf, 0x6c, 0x94, 0x0c, 0xd3, 0xcf, 0x6c, 0x22, 0x79, 0xad, ++ 0xb1, 0xf0, 0xbc, 0x03, 0xa2, 0x46, 0x60, 0xa9, 0xc4, 0x07, 0xc2, 0x21, 0x82, ++ 0xf1, 0xfd, 0xf2, 0xe8, 0x79, 0x32, 0x60, 0xbf, 0xd8, 0xac, 0xa5, 0x22, 0x14, ++ 0x4b, 0xca, 0xc1, 0xd8, 0x4b, 0xeb, 0x7d, 0x3f, 0x57, 0x35, 0xb2, 0xe6, 0x4f, ++ 0x75, 0xb4, 0xb0, 0x60, 0x03, 0x22, 0x53, 0xae, 0x91, 0x79, 0x1d, 0xd6, 0x9b, ++ 0x41, 0x1f, 0x15, 0x86, 0x54, 0x70, 0xb2, 0xde, 0x0d, 0x35, 0x0f, 0x7c, 0xb0, ++ 0x34, 0x72, 0xba, 0x97, 0x60, 0x3b, 0xf0, 0x79, 0xeb, 0xa2, 0xb2, 0x1c, 0x5d, ++ 0xa2, 0x16, 0xb8, 0x87, 0xc5, 0xe9, 0x1b, 0xf6, 0xb5, 0x97, 0x25, 0x6f, 0x38, ++ 0x9f, 0xe3, 0x91, 0xfa, 0x8a, 0x79, 0x98, 0xc3, 0x69, 0x0e, 0xb7, 0xa3, 0x1c, ++ 0x20, 0x05, 0x97, 0xf8, 0xca, 0x14, 0xae, 0x00, 0xd7, 0xc4, 0xf3, 0xc0, 0x14, ++ 0x10, 0x75, 0x6b, 0x34, 0xa0, 0x1b, 0xb5, 0x99, 0x60, 0xf3, 0x5c, 0xb0, 0xc5, ++ 0x57, 0x4e, 0x36, 0xd2, 0x32, 0x84, 0xbf, 0x9e ++}; ++ ++// ++// First DB entry: "Microsoft Windows Production PCA 2011" ++// SHA1: 58:0a:6f:4c:c4:e4:b6:69:b9:eb:dc:1b:2b:3e:08:7b:80:d0:67:8d ++// ++// Windows 8 and Windows Server 2012 R2 boot loaders are signed with a chain ++// rooted in this certificate. ++// ++STATIC CONST UINT8 MicrosoftPCA[] = { ++ 0x30, 0x82, 0x05, 0xd7, 0x30, 0x82, 0x03, 0xbf, 0xa0, 0x03, 0x02, 0x01, 0x02, ++ 0x02, 0x0a, 0x61, 0x07, 0x76, 0x56, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, 0x30, ++ 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x0b, 0x05, ++ 0x00, 0x30, 0x81, 0x88, 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, ++ 0x13, 0x02, 0x55, 0x53, 0x31, 0x13, 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, ++ 0x13, 0x0a, 0x57, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x74, 0x6f, 0x6e, 0x31, ++ 0x10, 0x30, 0x0e, 0x06, 0x03, 0x55, 0x04, 0x07, 0x13, 0x07, 0x52, 0x65, 0x64, ++ 0x6d, 0x6f, 0x6e, 0x64, 0x31, 0x1e, 0x30, 0x1c, 0x06, 0x03, 0x55, 0x04, 0x0a, ++ 0x13, 0x15, 0x4d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x6f, 0x66, 0x74, 0x20, 0x43, ++ 0x6f, 0x72, 0x70, 0x6f, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x31, 0x32, 0x30, ++ 0x30, 0x06, 0x03, 0x55, 0x04, 0x03, 0x13, 0x29, 0x4d, 0x69, 0x63, 0x72, 0x6f, ++ 0x73, 0x6f, 0x66, 0x74, 0x20, 0x52, 0x6f, 0x6f, 0x74, 0x20, 0x43, 0x65, 0x72, ++ 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x20, 0x41, 0x75, 0x74, 0x68, ++ 0x6f, 0x72, 0x69, 0x74, 0x79, 0x20, 0x32, 0x30, 0x31, 0x30, 0x30, 0x1e, 0x17, ++ 0x0d, 0x31, 0x31, 0x31, 0x30, 0x31, 0x39, 0x31, 0x38, 0x34, 0x31, 0x34, 0x32, ++ 0x5a, 0x17, 0x0d, 0x32, 0x36, 0x31, 0x30, 0x31, 0x39, 0x31, 0x38, 0x35, 0x31, ++ 0x34, 0x32, 0x5a, 0x30, 0x81, 0x84, 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, ++ 0x04, 0x06, 0x13, 0x02, 0x55, 0x53, 0x31, 0x13, 0x30, 0x11, 0x06, 0x03, 0x55, ++ 0x04, 0x08, 0x13, 0x0a, 0x57, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x74, 0x6f, ++ 0x6e, 0x31, 0x10, 0x30, 0x0e, 0x06, 0x03, 0x55, 0x04, 0x07, 0x13, 0x07, 0x52, ++ 0x65, 0x64, 0x6d, 0x6f, 0x6e, 0x64, 0x31, 0x1e, 0x30, 0x1c, 0x06, 0x03, 0x55, ++ 0x04, 0x0a, 0x13, 0x15, 0x4d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x6f, 0x66, 0x74, ++ 0x20, 0x43, 0x6f, 0x72, 0x70, 0x6f, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x31, ++ 0x2e, 0x30, 0x2c, 0x06, 0x03, 0x55, 0x04, 0x03, 0x13, 0x25, 0x4d, 0x69, 0x63, ++ 0x72, 0x6f, 0x73, 0x6f, 0x66, 0x74, 0x20, 0x57, 0x69, 0x6e, 0x64, 0x6f, 0x77, ++ 0x73, 0x20, 0x50, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x20, ++ 0x50, 0x43, 0x41, 0x20, 0x32, 0x30, 0x31, 0x31, 0x30, 0x82, 0x01, 0x22, 0x30, ++ 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x01, 0x05, ++ 0x00, 0x03, 0x82, 0x01, 0x0f, 0x00, 0x30, 0x82, 0x01, 0x0a, 0x02, 0x82, 0x01, ++ 0x01, 0x00, 0xdd, 0x0c, 0xbb, 0xa2, 0xe4, 0x2e, 0x09, 0xe3, 0xe7, 0xc5, 0xf7, ++ 0x96, 0x69, 0xbc, 0x00, 0x21, 0xbd, 0x69, 0x33, 0x33, 0xef, 0xad, 0x04, 0xcb, ++ 0x54, 0x80, 0xee, 0x06, 0x83, 0xbb, 0xc5, 0x20, 0x84, 0xd9, 0xf7, 0xd2, 0x8b, ++ 0xf3, 0x38, 0xb0, 0xab, 0xa4, 0xad, 0x2d, 0x7c, 0x62, 0x79, 0x05, 0xff, 0xe3, ++ 0x4a, 0x3f, 0x04, 0x35, 0x20, 0x70, 0xe3, 0xc4, 0xe7, 0x6b, 0xe0, 0x9c, 0xc0, ++ 0x36, 0x75, 0xe9, 0x8a, 0x31, 0xdd, 0x8d, 0x70, 0xe5, 0xdc, 0x37, 0xb5, 0x74, ++ 0x46, 0x96, 0x28, 0x5b, 0x87, 0x60, 0x23, 0x2c, 0xbf, 0xdc, 0x47, 0xa5, 0x67, ++ 0xf7, 0x51, 0x27, 0x9e, 0x72, 0xeb, 0x07, 0xa6, 0xc9, 0xb9, 0x1e, 0x3b, 0x53, ++ 0x35, 0x7c, 0xe5, 0xd3, 0xec, 0x27, 0xb9, 0x87, 0x1c, 0xfe, 0xb9, 0xc9, 0x23, ++ 0x09, 0x6f, 0xa8, 0x46, 0x91, 0xc1, 0x6e, 0x96, 0x3c, 0x41, 0xd3, 0xcb, 0xa3, ++ 0x3f, 0x5d, 0x02, 0x6a, 0x4d, 0xec, 0x69, 0x1f, 0x25, 0x28, 0x5c, 0x36, 0xff, ++ 0xfd, 0x43, 0x15, 0x0a, 0x94, 0xe0, 0x19, 0xb4, 0xcf, 0xdf, 0xc2, 0x12, 0xe2, ++ 0xc2, 0x5b, 0x27, 0xee, 0x27, 0x78, 0x30, 0x8b, 0x5b, 0x2a, 0x09, 0x6b, 0x22, ++ 0x89, 0x53, 0x60, 0x16, 0x2c, 0xc0, 0x68, 0x1d, 0x53, 0xba, 0xec, 0x49, 0xf3, ++ 0x9d, 0x61, 0x8c, 0x85, 0x68, 0x09, 0x73, 0x44, 0x5d, 0x7d, 0xa2, 0x54, 0x2b, ++ 0xdd, 0x79, 0xf7, 0x15, 0xcf, 0x35, 0x5d, 0x6c, 0x1c, 0x2b, 0x5c, 0xce, 0xbc, ++ 0x9c, 0x23, 0x8b, 0x6f, 0x6e, 0xb5, 0x26, 0xd9, 0x36, 0x13, 0xc3, 0x4f, 0xd6, ++ 0x27, 0xae, 0xb9, 0x32, 0x3b, 0x41, 0x92, 0x2c, 0xe1, 0xc7, 0xcd, 0x77, 0xe8, ++ 0xaa, 0x54, 0x4e, 0xf7, 0x5c, 0x0b, 0x04, 0x87, 0x65, 0xb4, 0x43, 0x18, 0xa8, ++ 0xb2, 0xe0, 0x6d, 0x19, 0x77, 0xec, 0x5a, 0x24, 0xfa, 0x48, 0x03, 0x02, 0x03, ++ 0x01, 0x00, 0x01, 0xa3, 0x82, 0x01, 0x43, 0x30, 0x82, 0x01, 0x3f, 0x30, 0x10, ++ 0x06, 0x09, 0x2b, 0x06, 0x01, 0x04, 0x01, 0x82, 0x37, 0x15, 0x01, 0x04, 0x03, ++ 0x02, 0x01, 0x00, 0x30, 0x1d, 0x06, 0x03, 0x55, 0x1d, 0x0e, 0x04, 0x16, 0x04, ++ 0x14, 0xa9, 0x29, 0x02, 0x39, 0x8e, 0x16, 0xc4, 0x97, 0x78, 0xcd, 0x90, 0xf9, ++ 0x9e, 0x4f, 0x9a, 0xe1, 0x7c, 0x55, 0xaf, 0x53, 0x30, 0x19, 0x06, 0x09, 0x2b, ++ 0x06, 0x01, 0x04, 0x01, 0x82, 0x37, 0x14, 0x02, 0x04, 0x0c, 0x1e, 0x0a, 0x00, ++ 0x53, 0x00, 0x75, 0x00, 0x62, 0x00, 0x43, 0x00, 0x41, 0x30, 0x0b, 0x06, 0x03, ++ 0x55, 0x1d, 0x0f, 0x04, 0x04, 0x03, 0x02, 0x01, 0x86, 0x30, 0x0f, 0x06, 0x03, ++ 0x55, 0x1d, 0x13, 0x01, 0x01, 0xff, 0x04, 0x05, 0x30, 0x03, 0x01, 0x01, 0xff, ++ 0x30, 0x1f, 0x06, 0x03, 0x55, 0x1d, 0x23, 0x04, 0x18, 0x30, 0x16, 0x80, 0x14, ++ 0xd5, 0xf6, 0x56, 0xcb, 0x8f, 0xe8, 0xa2, 0x5c, 0x62, 0x68, 0xd1, 0x3d, 0x94, ++ 0x90, 0x5b, 0xd7, 0xce, 0x9a, 0x18, 0xc4, 0x30, 0x56, 0x06, 0x03, 0x55, 0x1d, ++ 0x1f, 0x04, 0x4f, 0x30, 0x4d, 0x30, 0x4b, 0xa0, 0x49, 0xa0, 0x47, 0x86, 0x45, ++ 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, 0x63, 0x72, 0x6c, 0x2e, 0x6d, 0x69, ++ 0x63, 0x72, 0x6f, 0x73, 0x6f, 0x66, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, ++ 0x6b, 0x69, 0x2f, 0x63, 0x72, 0x6c, 0x2f, 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, ++ 0x74, 0x73, 0x2f, 0x4d, 0x69, 0x63, 0x52, 0x6f, 0x6f, 0x43, 0x65, 0x72, 0x41, ++ 0x75, 0x74, 0x5f, 0x32, 0x30, 0x31, 0x30, 0x2d, 0x30, 0x36, 0x2d, 0x32, 0x33, ++ 0x2e, 0x63, 0x72, 0x6c, 0x30, 0x5a, 0x06, 0x08, 0x2b, 0x06, 0x01, 0x05, 0x05, ++ 0x07, 0x01, 0x01, 0x04, 0x4e, 0x30, 0x4c, 0x30, 0x4a, 0x06, 0x08, 0x2b, 0x06, ++ 0x01, 0x05, 0x05, 0x07, 0x30, 0x02, 0x86, 0x3e, 0x68, 0x74, 0x74, 0x70, 0x3a, ++ 0x2f, 0x2f, 0x77, 0x77, 0x77, 0x2e, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x6f, ++ 0x66, 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x6b, 0x69, 0x2f, 0x63, 0x65, ++ 0x72, 0x74, 0x73, 0x2f, 0x4d, 0x69, 0x63, 0x52, 0x6f, 0x6f, 0x43, 0x65, 0x72, ++ 0x41, 0x75, 0x74, 0x5f, 0x32, 0x30, 0x31, 0x30, 0x2d, 0x30, 0x36, 0x2d, 0x32, ++ 0x33, 0x2e, 0x63, 0x72, 0x74, 0x30, 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, ++ 0xf7, 0x0d, 0x01, 0x01, 0x0b, 0x05, 0x00, 0x03, 0x82, 0x02, 0x01, 0x00, 0x14, ++ 0xfc, 0x7c, 0x71, 0x51, 0xa5, 0x79, 0xc2, 0x6e, 0xb2, 0xef, 0x39, 0x3e, 0xbc, ++ 0x3c, 0x52, 0x0f, 0x6e, 0x2b, 0x3f, 0x10, 0x13, 0x73, 0xfe, 0xa8, 0x68, 0xd0, ++ 0x48, 0xa6, 0x34, 0x4d, 0x8a, 0x96, 0x05, 0x26, 0xee, 0x31, 0x46, 0x90, 0x61, ++ 0x79, 0xd6, 0xff, 0x38, 0x2e, 0x45, 0x6b, 0xf4, 0xc0, 0xe5, 0x28, 0xb8, 0xda, ++ 0x1d, 0x8f, 0x8a, 0xdb, 0x09, 0xd7, 0x1a, 0xc7, 0x4c, 0x0a, 0x36, 0x66, 0x6a, ++ 0x8c, 0xec, 0x1b, 0xd7, 0x04, 0x90, 0xa8, 0x18, 0x17, 0xa4, 0x9b, 0xb9, 0xe2, ++ 0x40, 0x32, 0x36, 0x76, 0xc4, 0xc1, 0x5a, 0xc6, 0xbf, 0xe4, 0x04, 0xc0, 0xea, ++ 0x16, 0xd3, 0xac, 0xc3, 0x68, 0xef, 0x62, 0xac, 0xdd, 0x54, 0x6c, 0x50, 0x30, ++ 0x58, 0xa6, 0xeb, 0x7c, 0xfe, 0x94, 0xa7, 0x4e, 0x8e, 0xf4, 0xec, 0x7c, 0x86, ++ 0x73, 0x57, 0xc2, 0x52, 0x21, 0x73, 0x34, 0x5a, 0xf3, 0xa3, 0x8a, 0x56, 0xc8, ++ 0x04, 0xda, 0x07, 0x09, 0xed, 0xf8, 0x8b, 0xe3, 0xce, 0xf4, 0x7e, 0x8e, 0xae, ++ 0xf0, 0xf6, 0x0b, 0x8a, 0x08, 0xfb, 0x3f, 0xc9, 0x1d, 0x72, 0x7f, 0x53, 0xb8, ++ 0xeb, 0xbe, 0x63, 0xe0, 0xe3, 0x3d, 0x31, 0x65, 0xb0, 0x81, 0xe5, 0xf2, 0xac, ++ 0xcd, 0x16, 0xa4, 0x9f, 0x3d, 0xa8, 0xb1, 0x9b, 0xc2, 0x42, 0xd0, 0x90, 0x84, ++ 0x5f, 0x54, 0x1d, 0xff, 0x89, 0xea, 0xba, 0x1d, 0x47, 0x90, 0x6f, 0xb0, 0x73, ++ 0x4e, 0x41, 0x9f, 0x40, 0x9f, 0x5f, 0xe5, 0xa1, 0x2a, 0xb2, 0x11, 0x91, 0x73, ++ 0x8a, 0x21, 0x28, 0xf0, 0xce, 0xde, 0x73, 0x39, 0x5f, 0x3e, 0xab, 0x5c, 0x60, ++ 0xec, 0xdf, 0x03, 0x10, 0xa8, 0xd3, 0x09, 0xe9, 0xf4, 0xf6, 0x96, 0x85, 0xb6, ++ 0x7f, 0x51, 0x88, 0x66, 0x47, 0x19, 0x8d, 0xa2, 0xb0, 0x12, 0x3d, 0x81, 0x2a, ++ 0x68, 0x05, 0x77, 0xbb, 0x91, 0x4c, 0x62, 0x7b, 0xb6, 0xc1, 0x07, 0xc7, 0xba, ++ 0x7a, 0x87, 0x34, 0x03, 0x0e, 0x4b, 0x62, 0x7a, 0x99, 0xe9, 0xca, 0xfc, 0xce, ++ 0x4a, 0x37, 0xc9, 0x2d, 0xa4, 0x57, 0x7c, 0x1c, 0xfe, 0x3d, 0xdc, 0xb8, 0x0f, ++ 0x5a, 0xfa, 0xd6, 0xc4, 0xb3, 0x02, 0x85, 0x02, 0x3a, 0xea, 0xb3, 0xd9, 0x6e, ++ 0xe4, 0x69, 0x21, 0x37, 0xde, 0x81, 0xd1, 0xf6, 0x75, 0x19, 0x05, 0x67, 0xd3, ++ 0x93, 0x57, 0x5e, 0x29, 0x1b, 0x39, 0xc8, 0xee, 0x2d, 0xe1, 0xcd, 0xe4, 0x45, ++ 0x73, 0x5b, 0xd0, 0xd2, 0xce, 0x7a, 0xab, 0x16, 0x19, 0x82, 0x46, 0x58, 0xd0, ++ 0x5e, 0x9d, 0x81, 0xb3, 0x67, 0xaf, 0x6c, 0x35, 0xf2, 0xbc, 0xe5, 0x3f, 0x24, ++ 0xe2, 0x35, 0xa2, 0x0a, 0x75, 0x06, 0xf6, 0x18, 0x56, 0x99, 0xd4, 0x78, 0x2c, ++ 0xd1, 0x05, 0x1b, 0xeb, 0xd0, 0x88, 0x01, 0x9d, 0xaa, 0x10, 0xf1, 0x05, 0xdf, ++ 0xba, 0x7e, 0x2c, 0x63, 0xb7, 0x06, 0x9b, 0x23, 0x21, 0xc4, 0xf9, 0x78, 0x6c, ++ 0xe2, 0x58, 0x17, 0x06, 0x36, 0x2b, 0x91, 0x12, 0x03, 0xcc, 0xa4, 0xd9, 0xf2, ++ 0x2d, 0xba, 0xf9, 0x94, 0x9d, 0x40, 0xed, 0x18, 0x45, 0xf1, 0xce, 0x8a, 0x5c, ++ 0x6b, 0x3e, 0xab, 0x03, 0xd3, 0x70, 0x18, 0x2a, 0x0a, 0x6a, 0xe0, 0x5f, 0x47, ++ 0xd1, 0xd5, 0x63, 0x0a, 0x32, 0xf2, 0xaf, 0xd7, 0x36, 0x1f, 0x2a, 0x70, 0x5a, ++ 0xe5, 0x42, 0x59, 0x08, 0x71, 0x4b, 0x57, 0xba, 0x7e, 0x83, 0x81, 0xf0, 0x21, ++ 0x3c, 0xf4, 0x1c, 0xc1, 0xc5, 0xb9, 0x90, 0x93, 0x0e, 0x88, 0x45, 0x93, 0x86, ++ 0xe9, 0xb1, 0x20, 0x99, 0xbe, 0x98, 0xcb, 0xc5, 0x95, 0xa4, 0x5d, 0x62, 0xd6, ++ 0xa0, 0x63, 0x08, 0x20, 0xbd, 0x75, 0x10, 0x77, 0x7d, 0x3d, 0xf3, 0x45, 0xb9, ++ 0x9f, 0x97, 0x9f, 0xcb, 0x57, 0x80, 0x6f, 0x33, 0xa9, 0x04, 0xcf, 0x77, 0xa4, ++ 0x62, 0x1c, 0x59, 0x7e ++}; ++ ++// ++// Second DB entry: "Microsoft Corporation UEFI CA 2011" ++// SHA1: 46:de:f6:3b:5c:e6:1c:f8:ba:0d:e2:e6:63:9c:10:19:d0:ed:14:f3 ++// ++// To verify the "shim" binary and PCI expansion ROMs with. ++// ++STATIC CONST UINT8 MicrosoftUefiCA[] = { ++ 0x30, 0x82, 0x06, 0x10, 0x30, 0x82, 0x03, 0xf8, 0xa0, 0x03, 0x02, 0x01, 0x02, ++ 0x02, 0x0a, 0x61, 0x08, 0xd3, 0xc4, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x30, ++ 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x0b, 0x05, ++ 0x00, 0x30, 0x81, 0x91, 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, ++ 0x13, 0x02, 0x55, 0x53, 0x31, 0x13, 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, ++ 0x13, 0x0a, 0x57, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x74, 0x6f, 0x6e, 0x31, ++ 0x10, 0x30, 0x0e, 0x06, 0x03, 0x55, 0x04, 0x07, 0x13, 0x07, 0x52, 0x65, 0x64, ++ 0x6d, 0x6f, 0x6e, 0x64, 0x31, 0x1e, 0x30, 0x1c, 0x06, 0x03, 0x55, 0x04, 0x0a, ++ 0x13, 0x15, 0x4d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x6f, 0x66, 0x74, 0x20, 0x43, ++ 0x6f, 0x72, 0x70, 0x6f, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x31, 0x3b, 0x30, ++ 0x39, 0x06, 0x03, 0x55, 0x04, 0x03, 0x13, 0x32, 0x4d, 0x69, 0x63, 0x72, 0x6f, ++ 0x73, 0x6f, 0x66, 0x74, 0x20, 0x43, 0x6f, 0x72, 0x70, 0x6f, 0x72, 0x61, 0x74, ++ 0x69, 0x6f, 0x6e, 0x20, 0x54, 0x68, 0x69, 0x72, 0x64, 0x20, 0x50, 0x61, 0x72, ++ 0x74, 0x79, 0x20, 0x4d, 0x61, 0x72, 0x6b, 0x65, 0x74, 0x70, 0x6c, 0x61, 0x63, ++ 0x65, 0x20, 0x52, 0x6f, 0x6f, 0x74, 0x30, 0x1e, 0x17, 0x0d, 0x31, 0x31, 0x30, ++ 0x36, 0x32, 0x37, 0x32, 0x31, 0x32, 0x32, 0x34, 0x35, 0x5a, 0x17, 0x0d, 0x32, ++ 0x36, 0x30, 0x36, 0x32, 0x37, 0x32, 0x31, 0x33, 0x32, 0x34, 0x35, 0x5a, 0x30, ++ 0x81, 0x81, 0x31, 0x0b, 0x30, 0x09, 0x06, 0x03, 0x55, 0x04, 0x06, 0x13, 0x02, ++ 0x55, 0x53, 0x31, 0x13, 0x30, 0x11, 0x06, 0x03, 0x55, 0x04, 0x08, 0x13, 0x0a, ++ 0x57, 0x61, 0x73, 0x68, 0x69, 0x6e, 0x67, 0x74, 0x6f, 0x6e, 0x31, 0x10, 0x30, ++ 0x0e, 0x06, 0x03, 0x55, 0x04, 0x07, 0x13, 0x07, 0x52, 0x65, 0x64, 0x6d, 0x6f, ++ 0x6e, 0x64, 0x31, 0x1e, 0x30, 0x1c, 0x06, 0x03, 0x55, 0x04, 0x0a, 0x13, 0x15, ++ 0x4d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x6f, 0x66, 0x74, 0x20, 0x43, 0x6f, 0x72, ++ 0x70, 0x6f, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x31, 0x2b, 0x30, 0x29, 0x06, ++ 0x03, 0x55, 0x04, 0x03, 0x13, 0x22, 0x4d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x6f, ++ 0x66, 0x74, 0x20, 0x43, 0x6f, 0x72, 0x70, 0x6f, 0x72, 0x61, 0x74, 0x69, 0x6f, ++ 0x6e, 0x20, 0x55, 0x45, 0x46, 0x49, 0x20, 0x43, 0x41, 0x20, 0x32, 0x30, 0x31, ++ 0x31, 0x30, 0x82, 0x01, 0x22, 0x30, 0x0d, 0x06, 0x09, 0x2a, 0x86, 0x48, 0x86, ++ 0xf7, 0x0d, 0x01, 0x01, 0x01, 0x05, 0x00, 0x03, 0x82, 0x01, 0x0f, 0x00, 0x30, ++ 0x82, 0x01, 0x0a, 0x02, 0x82, 0x01, 0x01, 0x00, 0xa5, 0x08, 0x6c, 0x4c, 0xc7, ++ 0x45, 0x09, 0x6a, 0x4b, 0x0c, 0xa4, 0xc0, 0x87, 0x7f, 0x06, 0x75, 0x0c, 0x43, ++ 0x01, 0x54, 0x64, 0xe0, 0x16, 0x7f, 0x07, 0xed, 0x92, 0x7d, 0x0b, 0xb2, 0x73, ++ 0xbf, 0x0c, 0x0a, 0xc6, 0x4a, 0x45, 0x61, 0xa0, 0xc5, 0x16, 0x2d, 0x96, 0xd3, ++ 0xf5, 0x2b, 0xa0, 0xfb, 0x4d, 0x49, 0x9b, 0x41, 0x80, 0x90, 0x3c, 0xb9, 0x54, ++ 0xfd, 0xe6, 0xbc, 0xd1, 0x9d, 0xc4, 0xa4, 0x18, 0x8a, 0x7f, 0x41, 0x8a, 0x5c, ++ 0x59, 0x83, 0x68, 0x32, 0xbb, 0x8c, 0x47, 0xc9, 0xee, 0x71, 0xbc, 0x21, 0x4f, ++ 0x9a, 0x8a, 0x7c, 0xff, 0x44, 0x3f, 0x8d, 0x8f, 0x32, 0xb2, 0x26, 0x48, 0xae, ++ 0x75, 0xb5, 0xee, 0xc9, 0x4c, 0x1e, 0x4a, 0x19, 0x7e, 0xe4, 0x82, 0x9a, 0x1d, ++ 0x78, 0x77, 0x4d, 0x0c, 0xb0, 0xbd, 0xf6, 0x0f, 0xd3, 0x16, 0xd3, 0xbc, 0xfa, ++ 0x2b, 0xa5, 0x51, 0x38, 0x5d, 0xf5, 0xfb, 0xba, 0xdb, 0x78, 0x02, 0xdb, 0xff, ++ 0xec, 0x0a, 0x1b, 0x96, 0xd5, 0x83, 0xb8, 0x19, 0x13, 0xe9, 0xb6, 0xc0, 0x7b, ++ 0x40, 0x7b, 0xe1, 0x1f, 0x28, 0x27, 0xc9, 0xfa, 0xef, 0x56, 0x5e, 0x1c, 0xe6, ++ 0x7e, 0x94, 0x7e, 0xc0, 0xf0, 0x44, 0xb2, 0x79, 0x39, 0xe5, 0xda, 0xb2, 0x62, ++ 0x8b, 0x4d, 0xbf, 0x38, 0x70, 0xe2, 0x68, 0x24, 0x14, 0xc9, 0x33, 0xa4, 0x08, ++ 0x37, 0xd5, 0x58, 0x69, 0x5e, 0xd3, 0x7c, 0xed, 0xc1, 0x04, 0x53, 0x08, 0xe7, ++ 0x4e, 0xb0, 0x2a, 0x87, 0x63, 0x08, 0x61, 0x6f, 0x63, 0x15, 0x59, 0xea, 0xb2, ++ 0x2b, 0x79, 0xd7, 0x0c, 0x61, 0x67, 0x8a, 0x5b, 0xfd, 0x5e, 0xad, 0x87, 0x7f, ++ 0xba, 0x86, 0x67, 0x4f, 0x71, 0x58, 0x12, 0x22, 0x04, 0x22, 0x22, 0xce, 0x8b, ++ 0xef, 0x54, 0x71, 0x00, 0xce, 0x50, 0x35, 0x58, 0x76, 0x95, 0x08, 0xee, 0x6a, ++ 0xb1, 0xa2, 0x01, 0xd5, 0x02, 0x03, 0x01, 0x00, 0x01, 0xa3, 0x82, 0x01, 0x76, ++ 0x30, 0x82, 0x01, 0x72, 0x30, 0x12, 0x06, 0x09, 0x2b, 0x06, 0x01, 0x04, 0x01, ++ 0x82, 0x37, 0x15, 0x01, 0x04, 0x05, 0x02, 0x03, 0x01, 0x00, 0x01, 0x30, 0x23, ++ 0x06, 0x09, 0x2b, 0x06, 0x01, 0x04, 0x01, 0x82, 0x37, 0x15, 0x02, 0x04, 0x16, ++ 0x04, 0x14, 0xf8, 0xc1, 0x6b, 0xb7, 0x7f, 0x77, 0x53, 0x4a, 0xf3, 0x25, 0x37, ++ 0x1d, 0x4e, 0xa1, 0x26, 0x7b, 0x0f, 0x20, 0x70, 0x80, 0x30, 0x1d, 0x06, 0x03, ++ 0x55, 0x1d, 0x0e, 0x04, 0x16, 0x04, 0x14, 0x13, 0xad, 0xbf, 0x43, 0x09, 0xbd, ++ 0x82, 0x70, 0x9c, 0x8c, 0xd5, 0x4f, 0x31, 0x6e, 0xd5, 0x22, 0x98, 0x8a, 0x1b, ++ 0xd4, 0x30, 0x19, 0x06, 0x09, 0x2b, 0x06, 0x01, 0x04, 0x01, 0x82, 0x37, 0x14, ++ 0x02, 0x04, 0x0c, 0x1e, 0x0a, 0x00, 0x53, 0x00, 0x75, 0x00, 0x62, 0x00, 0x43, ++ 0x00, 0x41, 0x30, 0x0b, 0x06, 0x03, 0x55, 0x1d, 0x0f, 0x04, 0x04, 0x03, 0x02, ++ 0x01, 0x86, 0x30, 0x0f, 0x06, 0x03, 0x55, 0x1d, 0x13, 0x01, 0x01, 0xff, 0x04, ++ 0x05, 0x30, 0x03, 0x01, 0x01, 0xff, 0x30, 0x1f, 0x06, 0x03, 0x55, 0x1d, 0x23, ++ 0x04, 0x18, 0x30, 0x16, 0x80, 0x14, 0x45, 0x66, 0x52, 0x43, 0xe1, 0x7e, 0x58, ++ 0x11, 0xbf, 0xd6, 0x4e, 0x9e, 0x23, 0x55, 0x08, 0x3b, 0x3a, 0x22, 0x6a, 0xa8, ++ 0x30, 0x5c, 0x06, 0x03, 0x55, 0x1d, 0x1f, 0x04, 0x55, 0x30, 0x53, 0x30, 0x51, ++ 0xa0, 0x4f, 0xa0, 0x4d, 0x86, 0x4b, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, 0x2f, ++ 0x63, 0x72, 0x6c, 0x2e, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x6f, 0x66, 0x74, ++ 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x6b, 0x69, 0x2f, 0x63, 0x72, 0x6c, 0x2f, ++ 0x70, 0x72, 0x6f, 0x64, 0x75, 0x63, 0x74, 0x73, 0x2f, 0x4d, 0x69, 0x63, 0x43, ++ 0x6f, 0x72, 0x54, 0x68, 0x69, 0x50, 0x61, 0x72, 0x4d, 0x61, 0x72, 0x52, 0x6f, ++ 0x6f, 0x5f, 0x32, 0x30, 0x31, 0x30, 0x2d, 0x31, 0x30, 0x2d, 0x30, 0x35, 0x2e, ++ 0x63, 0x72, 0x6c, 0x30, 0x60, 0x06, 0x08, 0x2b, 0x06, 0x01, 0x05, 0x05, 0x07, ++ 0x01, 0x01, 0x04, 0x54, 0x30, 0x52, 0x30, 0x50, 0x06, 0x08, 0x2b, 0x06, 0x01, ++ 0x05, 0x05, 0x07, 0x30, 0x02, 0x86, 0x44, 0x68, 0x74, 0x74, 0x70, 0x3a, 0x2f, ++ 0x2f, 0x77, 0x77, 0x77, 0x2e, 0x6d, 0x69, 0x63, 0x72, 0x6f, 0x73, 0x6f, 0x66, ++ 0x74, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x6b, 0x69, 0x2f, 0x63, 0x65, 0x72, ++ 0x74, 0x73, 0x2f, 0x4d, 0x69, 0x63, 0x43, 0x6f, 0x72, 0x54, 0x68, 0x69, 0x50, ++ 0x61, 0x72, 0x4d, 0x61, 0x72, 0x52, 0x6f, 0x6f, 0x5f, 0x32, 0x30, 0x31, 0x30, ++ 0x2d, 0x31, 0x30, 0x2d, 0x30, 0x35, 0x2e, 0x63, 0x72, 0x74, 0x30, 0x0d, 0x06, ++ 0x09, 0x2a, 0x86, 0x48, 0x86, 0xf7, 0x0d, 0x01, 0x01, 0x0b, 0x05, 0x00, 0x03, ++ 0x82, 0x02, 0x01, 0x00, 0x35, 0x08, 0x42, 0xff, 0x30, 0xcc, 0xce, 0xf7, 0x76, ++ 0x0c, 0xad, 0x10, 0x68, 0x58, 0x35, 0x29, 0x46, 0x32, 0x76, 0x27, 0x7c, 0xef, ++ 0x12, 0x41, 0x27, 0x42, 0x1b, 0x4a, 0xaa, 0x6d, 0x81, 0x38, 0x48, 0x59, 0x13, ++ 0x55, 0xf3, 0xe9, 0x58, 0x34, 0xa6, 0x16, 0x0b, 0x82, 0xaa, 0x5d, 0xad, 0x82, ++ 0xda, 0x80, 0x83, 0x41, 0x06, 0x8f, 0xb4, 0x1d, 0xf2, 0x03, 0xb9, 0xf3, 0x1a, ++ 0x5d, 0x1b, 0xf1, 0x50, 0x90, 0xf9, 0xb3, 0x55, 0x84, 0x42, 0x28, 0x1c, 0x20, ++ 0xbd, 0xb2, 0xae, 0x51, 0x14, 0xc5, 0xc0, 0xac, 0x97, 0x95, 0x21, 0x1c, 0x90, ++ 0xdb, 0x0f, 0xfc, 0x77, 0x9e, 0x95, 0x73, 0x91, 0x88, 0xca, 0xbd, 0xbd, 0x52, ++ 0xb9, 0x05, 0x50, 0x0d, 0xdf, 0x57, 0x9e, 0xa0, 0x61, 0xed, 0x0d, 0xe5, 0x6d, ++ 0x25, 0xd9, 0x40, 0x0f, 0x17, 0x40, 0xc8, 0xce, 0xa3, 0x4a, 0xc2, 0x4d, 0xaf, ++ 0x9a, 0x12, 0x1d, 0x08, 0x54, 0x8f, 0xbd, 0xc7, 0xbc, 0xb9, 0x2b, 0x3d, 0x49, ++ 0x2b, 0x1f, 0x32, 0xfc, 0x6a, 0x21, 0x69, 0x4f, 0x9b, 0xc8, 0x7e, 0x42, 0x34, ++ 0xfc, 0x36, 0x06, 0x17, 0x8b, 0x8f, 0x20, 0x40, 0xc0, 0xb3, 0x9a, 0x25, 0x75, ++ 0x27, 0xcd, 0xc9, 0x03, 0xa3, 0xf6, 0x5d, 0xd1, 0xe7, 0x36, 0x54, 0x7a, 0xb9, ++ 0x50, 0xb5, 0xd3, 0x12, 0xd1, 0x07, 0xbf, 0xbb, 0x74, 0xdf, 0xdc, 0x1e, 0x8f, ++ 0x80, 0xd5, 0xed, 0x18, 0xf4, 0x2f, 0x14, 0x16, 0x6b, 0x2f, 0xde, 0x66, 0x8c, ++ 0xb0, 0x23, 0xe5, 0xc7, 0x84, 0xd8, 0xed, 0xea, 0xc1, 0x33, 0x82, 0xad, 0x56, ++ 0x4b, 0x18, 0x2d, 0xf1, 0x68, 0x95, 0x07, 0xcd, 0xcf, 0xf0, 0x72, 0xf0, 0xae, ++ 0xbb, 0xdd, 0x86, 0x85, 0x98, 0x2c, 0x21, 0x4c, 0x33, 0x2b, 0xf0, 0x0f, 0x4a, ++ 0xf0, 0x68, 0x87, 0xb5, 0x92, 0x55, 0x32, 0x75, 0xa1, 0x6a, 0x82, 0x6a, 0x3c, ++ 0xa3, 0x25, 0x11, 0xa4, 0xed, 0xad, 0xd7, 0x04, 0xae, 0xcb, 0xd8, 0x40, 0x59, ++ 0xa0, 0x84, 0xd1, 0x95, 0x4c, 0x62, 0x91, 0x22, 0x1a, 0x74, 0x1d, 0x8c, 0x3d, ++ 0x47, 0x0e, 0x44, 0xa6, 0xe4, 0xb0, 0x9b, 0x34, 0x35, 0xb1, 0xfa, 0xb6, 0x53, ++ 0xa8, 0x2c, 0x81, 0xec, 0xa4, 0x05, 0x71, 0xc8, 0x9d, 0xb8, 0xba, 0xe8, 0x1b, ++ 0x44, 0x66, 0xe4, 0x47, 0x54, 0x0e, 0x8e, 0x56, 0x7f, 0xb3, 0x9f, 0x16, 0x98, ++ 0xb2, 0x86, 0xd0, 0x68, 0x3e, 0x90, 0x23, 0xb5, 0x2f, 0x5e, 0x8f, 0x50, 0x85, ++ 0x8d, 0xc6, 0x8d, 0x82, 0x5f, 0x41, 0xa1, 0xf4, 0x2e, 0x0d, 0xe0, 0x99, 0xd2, ++ 0x6c, 0x75, 0xe4, 0xb6, 0x69, 0xb5, 0x21, 0x86, 0xfa, 0x07, 0xd1, 0xf6, 0xe2, ++ 0x4d, 0xd1, 0xda, 0xad, 0x2c, 0x77, 0x53, 0x1e, 0x25, 0x32, 0x37, 0xc7, 0x6c, ++ 0x52, 0x72, 0x95, 0x86, 0xb0, 0xf1, 0x35, 0x61, 0x6a, 0x19, 0xf5, 0xb2, 0x3b, ++ 0x81, 0x50, 0x56, 0xa6, 0x32, 0x2d, 0xfe, 0xa2, 0x89, 0xf9, 0x42, 0x86, 0x27, ++ 0x18, 0x55, 0xa1, 0x82, 0xca, 0x5a, 0x9b, 0xf8, 0x30, 0x98, 0x54, 0x14, 0xa6, ++ 0x47, 0x96, 0x25, 0x2f, 0xc8, 0x26, 0xe4, 0x41, 0x94, 0x1a, 0x5c, 0x02, 0x3f, ++ 0xe5, 0x96, 0xe3, 0x85, 0x5b, 0x3c, 0x3e, 0x3f, 0xbb, 0x47, 0x16, 0x72, 0x55, ++ 0xe2, 0x25, 0x22, 0xb1, 0xd9, 0x7b, 0xe7, 0x03, 0x06, 0x2a, 0xa3, 0xf7, 0x1e, ++ 0x90, 0x46, 0xc3, 0x00, 0x0d, 0xd6, 0x19, 0x89, 0xe3, 0x0e, 0x35, 0x27, 0x62, ++ 0x03, 0x71, 0x15, 0xa6, 0xef, 0xd0, 0x27, 0xa0, 0xa0, 0x59, 0x37, 0x60, 0xf8, ++ 0x38, 0x94, 0xb8, 0xe0, 0x78, 0x70, 0xf8, 0xba, 0x4c, 0x86, 0x87, 0x94, 0xf6, ++ 0xe0, 0xae, 0x02, 0x45, 0xee, 0x65, 0xc2, 0xb6, 0xa3, 0x7e, 0x69, 0x16, 0x75, ++ 0x07, 0x92, 0x9b, 0xf5, 0xa6, 0xbc, 0x59, 0x83, 0x58 ++}; ++ ++// ++// The most important thing about the variable payload is that it is a list of ++// lists, where the element size of any given *inner* list is constant. ++// ++// Since X509 certificates vary in size, each of our *inner* lists will contain ++// one element only (one X.509 certificate). This is explicitly mentioned in ++// the UEFI specification, in "28.4.1 Signature Database", in a Note. ++// ++// The list structure looks as follows: ++// ++// struct EFI_VARIABLE_AUTHENTICATION_2 { | ++// struct EFI_TIME { | ++// UINT16 Year; | ++// UINT8 Month; | ++// UINT8 Day; | ++// UINT8 Hour; | ++// UINT8 Minute; | ++// UINT8 Second; | ++// UINT8 Pad1; | ++// UINT32 Nanosecond; | ++// INT16 TimeZone; | ++// UINT8 Daylight; | ++// UINT8 Pad2; | ++// } TimeStamp; | ++// | ++// struct WIN_CERTIFICATE_UEFI_GUID { | | ++// struct WIN_CERTIFICATE { | | ++// UINT32 dwLength; ----------------------------------------+ | ++// UINT16 wRevision; | | ++// UINT16 wCertificateType; | | ++// } Hdr; | +- DataSize ++// | | ++// EFI_GUID CertType; | | ++// UINT8 CertData[1] = { <--- "struct hack" | | ++// struct EFI_SIGNATURE_LIST { | | | ++// EFI_GUID SignatureType; | | | ++// UINT32 SignatureListSize; -------------------------+ | | ++// UINT32 SignatureHeaderSize; | | | ++// UINT32 SignatureSize; ---------------------------+ | | | ++// UINT8 SignatureHeader[SignatureHeaderSize]; | | | | ++// v | | | ++// struct EFI_SIGNATURE_DATA { | | | | ++// EFI_GUID SignatureOwner; | | | | ++// UINT8 SignatureData[1] = { <--- "struct hack" | | | | ++// X.509 payload | | | | ++// } | | | | ++// } Signatures[]; | | | ++// } SigLists[]; | | ++// }; | | ++// } AuthInfo; | | ++// }; | ++// ++// Given that the "struct hack" invokes undefined behavior (which is why C99 ++// introduced the flexible array member), and because subtracting those pesky ++// sizes of 1 is annoying, and because the format is fully specified in the ++// UEFI specification, we'll introduce two matching convenience structures that ++// are customized for our X.509 purposes. ++// ++#pragma pack(1) ++typedef struct { ++ EFI_TIME TimeStamp; ++ ++ // ++ // dwLength covers data below ++ // ++ UINT32 dwLength; ++ UINT16 wRevision; ++ UINT16 wCertificateType; ++ EFI_GUID CertType; ++} SINGLE_HEADER; ++ ++typedef struct { ++ // ++ // SignatureListSize covers data below ++ // ++ EFI_GUID SignatureType; ++ UINT32 SignatureListSize; ++ UINT32 SignatureHeaderSize; // constant 0 ++ UINT32 SignatureSize; ++ ++ // ++ // SignatureSize covers data below ++ // ++ EFI_GUID SignatureOwner; ++ ++ // ++ // X.509 certificate follows ++ // ++} REPEATING_HEADER; ++#pragma pack() ++ ++/** ++ Enroll a set of DER-formatted X.509 certificates in a global variable, ++ overwriting it. ++ ++ The variable will be rewritten with NV+BS+RT+AT attributes. ++ ++ @param[in] VariableName The name of the variable to overwrite. ++ ++ @param[in] VendorGuid The namespace (ie. vendor GUID) of the variable to ++ overwrite. ++ ++ @param[in] ... A list of ++ ++ IN CONST UINT8 *Cert, ++ IN UINTN CertSize, ++ IN CONST EFI_GUID *OwnerGuid ++ ++ triplets. If the first component of a triplet is ++ NULL, then the other two components are not ++ accessed, and processing is terminated. The list of ++ X.509 certificates is enrolled in the variable ++ specified, overwriting it. The OwnerGuid component ++ identifies the agent installing the certificate. ++ ++ @retval EFI_INVALID_PARAMETER The triplet list is empty (ie. the first Cert ++ value is NULL), or one of the CertSize values ++ is 0, or one of the CertSize values would ++ overflow the accumulated UINT32 data size. ++ ++ @retval EFI_OUT_OF_RESOURCES Out of memory while formatting variable ++ payload. ++ ++ @retval EFI_SUCCESS Enrollment successful; the variable has been ++ overwritten (or created). ++ ++ @return Error codes from gRT->GetTime() and ++ gRT->SetVariable(). ++**/ ++STATIC ++EFI_STATUS ++EFIAPI ++EnrollListOfX509Certs ( ++ IN CHAR16 *VariableName, ++ IN EFI_GUID *VendorGuid, ++ ... ++ ) ++{ ++ UINTN DataSize; ++ SINGLE_HEADER *SingleHeader; ++ REPEATING_HEADER *RepeatingHeader; ++ VA_LIST Marker; ++ CONST UINT8 *Cert; ++ EFI_STATUS Status = EFI_SUCCESS; ++ UINT8 *Data; ++ UINT8 *Position; ++ ++ // ++ // compute total size first, for UINT32 range check, and allocation ++ // ++ DataSize = sizeof *SingleHeader; ++ VA_START (Marker, VendorGuid); ++ for (Cert = VA_ARG (Marker, CONST UINT8 *); ++ Cert != NULL; ++ Cert = VA_ARG (Marker, CONST UINT8 *)) { ++ UINTN CertSize; ++ ++ CertSize = VA_ARG (Marker, UINTN); ++ (VOID)VA_ARG (Marker, CONST EFI_GUID *); ++ ++ if (CertSize == 0 || ++ CertSize > MAX_UINT32 - sizeof *RepeatingHeader || ++ DataSize > MAX_UINT32 - sizeof *RepeatingHeader - CertSize) { ++ Status = EFI_INVALID_PARAMETER; ++ break; ++ } ++ DataSize += sizeof *RepeatingHeader + CertSize; ++ } ++ VA_END (Marker); ++ ++ if (DataSize == sizeof *SingleHeader) { ++ Status = EFI_INVALID_PARAMETER; ++ } ++ if (EFI_ERROR (Status)) { ++ goto Out; ++ } ++ ++ Data = AllocatePool (DataSize); ++ if (Data == NULL) { ++ Status = EFI_OUT_OF_RESOURCES; ++ goto Out; ++ } ++ ++ Position = Data; ++ ++ SingleHeader = (SINGLE_HEADER *)Position; ++ Status = gRT->GetTime (&SingleHeader->TimeStamp, NULL); ++ if (EFI_ERROR (Status)) { ++ goto FreeData; ++ } ++ SingleHeader->TimeStamp.Pad1 = 0; ++ SingleHeader->TimeStamp.Nanosecond = 0; ++ SingleHeader->TimeStamp.TimeZone = 0; ++ SingleHeader->TimeStamp.Daylight = 0; ++ SingleHeader->TimeStamp.Pad2 = 0; ++#if 0 ++ SingleHeader->dwLength = DataSize - sizeof SingleHeader->TimeStamp; ++#else ++ // ++ // This looks like a bug in edk2. According to the UEFI specification, ++ // dwLength is "The length of the entire certificate, including the length of ++ // the header, in bytes". That shouldn't stop right after CertType -- it ++ // should include everything below it. ++ // ++ SingleHeader->dwLength = sizeof *SingleHeader ++ - sizeof SingleHeader->TimeStamp; ++#endif ++ SingleHeader->wRevision = 0x0200; ++ SingleHeader->wCertificateType = WIN_CERT_TYPE_EFI_GUID; ++ CopyGuid (&SingleHeader->CertType, &gEfiCertPkcs7Guid); ++ Position += sizeof *SingleHeader; ++ ++ VA_START (Marker, VendorGuid); ++ for (Cert = VA_ARG (Marker, CONST UINT8 *); ++ Cert != NULL; ++ Cert = VA_ARG (Marker, CONST UINT8 *)) { ++ UINTN CertSize; ++ CONST EFI_GUID *OwnerGuid; ++ ++ CertSize = VA_ARG (Marker, UINTN); ++ OwnerGuid = VA_ARG (Marker, CONST EFI_GUID *); ++ ++ RepeatingHeader = (REPEATING_HEADER *)Position; ++ CopyGuid (&RepeatingHeader->SignatureType, &gEfiCertX509Guid); ++ RepeatingHeader->SignatureListSize = sizeof *RepeatingHeader + CertSize; ++ RepeatingHeader->SignatureHeaderSize = 0; ++ RepeatingHeader->SignatureSize = ++ sizeof RepeatingHeader->SignatureOwner + CertSize; ++ CopyGuid (&RepeatingHeader->SignatureOwner, OwnerGuid); ++ Position += sizeof *RepeatingHeader; ++ ++ CopyMem (Position, Cert, CertSize); ++ Position += CertSize; ++ } ++ VA_END (Marker); ++ ++ ASSERT (Data + DataSize == Position); ++ ++ Status = gRT->SetVariable (VariableName, VendorGuid, ++ (EFI_VARIABLE_NON_VOLATILE | ++ EFI_VARIABLE_BOOTSERVICE_ACCESS | ++ EFI_VARIABLE_RUNTIME_ACCESS | ++ EFI_VARIABLE_TIME_BASED_AUTHENTICATED_WRITE_ACCESS), ++ DataSize, Data); ++ ++FreeData: ++ FreePool (Data); ++ ++Out: ++ if (EFI_ERROR (Status)) { ++ AsciiPrint ("error: %a(\"%s\", %g): %r\n", __FUNCTION__, VariableName, ++ VendorGuid, Status); ++ } ++ return Status; ++} ++ ++ ++STATIC ++EFI_STATUS ++EFIAPI ++GetExact ( ++ IN CHAR16 *VariableName, ++ IN EFI_GUID *VendorGuid, ++ OUT VOID *Data, ++ IN UINTN DataSize, ++ IN BOOLEAN AllowMissing ++ ) ++{ ++ UINTN Size; ++ EFI_STATUS Status; ++ ++ Size = DataSize; ++ Status = gRT->GetVariable (VariableName, VendorGuid, NULL, &Size, Data); ++ if (EFI_ERROR (Status)) { ++ if (Status == EFI_NOT_FOUND && AllowMissing) { ++ ZeroMem (Data, DataSize); ++ return EFI_SUCCESS; ++ } ++ ++ AsciiPrint ("error: GetVariable(\"%s\", %g): %r\n", VariableName, ++ VendorGuid, Status); ++ return Status; ++ } ++ ++ if (Size != DataSize) { ++ AsciiPrint ("error: GetVariable(\"%s\", %g): expected size 0x%Lx, " ++ "got 0x%Lx\n", VariableName, VendorGuid, (UINT64)DataSize, (UINT64)Size); ++ return EFI_PROTOCOL_ERROR; ++ } ++ ++ return EFI_SUCCESS; ++} ++ ++typedef struct { ++ UINT8 SetupMode; ++ UINT8 SecureBoot; ++ UINT8 SecureBootEnable; ++ UINT8 CustomMode; ++ UINT8 VendorKeys; ++} SETTINGS; ++ ++STATIC ++EFI_STATUS ++EFIAPI ++GetSettings ( ++ OUT SETTINGS *Settings ++ ) ++{ ++ EFI_STATUS Status; ++ ++ Status = GetExact (EFI_SETUP_MODE_NAME, &gEfiGlobalVariableGuid, ++ &Settings->SetupMode, sizeof Settings->SetupMode, FALSE); ++ if (EFI_ERROR (Status)) { ++ return Status; ++ } ++ ++ Status = GetExact (EFI_SECURE_BOOT_MODE_NAME, &gEfiGlobalVariableGuid, ++ &Settings->SecureBoot, sizeof Settings->SecureBoot, FALSE); ++ if (EFI_ERROR (Status)) { ++ return Status; ++ } ++ ++ Status = GetExact (EFI_SECURE_BOOT_ENABLE_NAME, ++ &gEfiSecureBootEnableDisableGuid, &Settings->SecureBootEnable, ++ sizeof Settings->SecureBootEnable, TRUE); ++ if (EFI_ERROR (Status)) { ++ return Status; ++ } ++ ++ Status = GetExact (EFI_CUSTOM_MODE_NAME, &gEfiCustomModeEnableGuid, ++ &Settings->CustomMode, sizeof Settings->CustomMode, FALSE); ++ if (EFI_ERROR (Status)) { ++ return Status; ++ } ++ ++ Status = GetExact (EFI_VENDOR_KEYS_VARIABLE_NAME, &gEfiGlobalVariableGuid, ++ &Settings->VendorKeys, sizeof Settings->VendorKeys, FALSE); ++ return Status; ++} ++ ++STATIC ++VOID ++EFIAPI ++PrintSettings ( ++ IN CONST SETTINGS *Settings ++ ) ++{ ++ AsciiPrint ("info: SetupMode=%d SecureBoot=%d SecureBootEnable=%d " ++ "CustomMode=%d VendorKeys=%d\n", Settings->SetupMode, Settings->SecureBoot, ++ Settings->SecureBootEnable, Settings->CustomMode, Settings->VendorKeys); ++} ++ ++ ++INTN ++EFIAPI ++ShellAppMain ( ++ IN UINTN Argc, ++ IN CHAR16 **Argv ++ ) ++{ ++ EFI_STATUS Status; ++ SETTINGS Settings; ++ ++ Status = GetSettings (&Settings); ++ if (EFI_ERROR (Status)) { ++ return 1; ++ } ++ PrintSettings (&Settings); ++ ++ if (Settings.SetupMode != 1) { ++ AsciiPrint ("error: already in User Mode\n"); ++ return 1; ++ } ++ ++ if (Settings.CustomMode != CUSTOM_SECURE_BOOT_MODE) { ++ Settings.CustomMode = CUSTOM_SECURE_BOOT_MODE; ++ Status = gRT->SetVariable (EFI_CUSTOM_MODE_NAME, &gEfiCustomModeEnableGuid, ++ (EFI_VARIABLE_NON_VOLATILE | ++ EFI_VARIABLE_BOOTSERVICE_ACCESS), ++ sizeof Settings.CustomMode, &Settings.CustomMode); ++ if (EFI_ERROR (Status)) { ++ AsciiPrint ("error: SetVariable(\"%s\", %g): %r\n", EFI_CUSTOM_MODE_NAME, ++ &gEfiCustomModeEnableGuid, Status); ++ return 1; ++ } ++ } ++ ++ Status = EnrollListOfX509Certs ( ++ EFI_IMAGE_SECURITY_DATABASE, ++ &gEfiImageSecurityDatabaseGuid, ++ MicrosoftPCA, sizeof MicrosoftPCA, &gEfiCallerIdGuid, ++ MicrosoftUefiCA, sizeof MicrosoftUefiCA, &gEfiCallerIdGuid, ++ NULL); ++ if (EFI_ERROR (Status)) { ++ return 1; ++ } ++ ++ Status = EnrollListOfX509Certs ( ++ EFI_KEY_EXCHANGE_KEY_NAME, ++ &gEfiGlobalVariableGuid, ++ ExampleCert, sizeof ExampleCert, &gEfiCallerIdGuid, ++ MicrosoftKEK, sizeof MicrosoftKEK, &gEfiCallerIdGuid, ++ NULL); ++ if (EFI_ERROR (Status)) { ++ return 1; ++ } ++ ++ Status = EnrollListOfX509Certs ( ++ EFI_PLATFORM_KEY_NAME, ++ &gEfiGlobalVariableGuid, ++ ExampleCert, sizeof ExampleCert, &gEfiGlobalVariableGuid, ++ NULL); ++ if (EFI_ERROR (Status)) { ++ return 1; ++ } ++ ++ Settings.CustomMode = STANDARD_SECURE_BOOT_MODE; ++ Status = gRT->SetVariable (EFI_CUSTOM_MODE_NAME, &gEfiCustomModeEnableGuid, ++ EFI_VARIABLE_NON_VOLATILE | EFI_VARIABLE_BOOTSERVICE_ACCESS, ++ sizeof Settings.CustomMode, &Settings.CustomMode); ++ if (EFI_ERROR (Status)) { ++ AsciiPrint ("error: SetVariable(\"%s\", %g): %r\n", EFI_CUSTOM_MODE_NAME, ++ &gEfiCustomModeEnableGuid, Status); ++ return 1; ++ } ++ ++ Status = GetSettings (&Settings); ++ if (EFI_ERROR (Status)) { ++ return 1; ++ } ++ PrintSettings (&Settings); ++ ++ if (Settings.SetupMode != 0 || Settings.SecureBoot != 1 || ++ Settings.SecureBootEnable != 1 || Settings.CustomMode != 0 || ++ Settings.VendorKeys != 0) { ++ AsciiPrint ("error: unexpected\n"); ++ return 1; ++ } ++ ++ AsciiPrint ("info: success\n"); ++ return 0; ++} +diff --git a/OvmfPkg/EnrollDefaultKeys/EnrollDefaultKeys.inf b/OvmfPkg/EnrollDefaultKeys/EnrollDefaultKeys.inf +new file mode 100644 +index 0000000..ac919bb +--- /dev/null ++++ b/OvmfPkg/EnrollDefaultKeys/EnrollDefaultKeys.inf +@@ -0,0 +1,51 @@ ++## @file ++# Enroll default PK, KEK, DB. ++# ++# Copyright (C) 2014, Red Hat, Inc. ++# ++# This program and the accompanying materials are licensed and made available ++# under the terms and conditions of the BSD License which accompanies this ++# distribution. The full text of the license may be found at ++# http://opensource.org/licenses/bsd-license. ++# ++# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, ++# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR ++# IMPLIED. ++## ++ ++[Defines] ++ INF_VERSION = 0x00010006 ++ BASE_NAME = EnrollDefaultKeys ++ FILE_GUID = D5C1DF0B-1BAC-4EDF-BA48-08834009CA5A ++ MODULE_TYPE = UEFI_APPLICATION ++ VERSION_STRING = 0.1 ++ ENTRY_POINT = ShellCEntryLib ++ ++# ++# VALID_ARCHITECTURES = IA32 X64 ++# ++ ++[Sources] ++ EnrollDefaultKeys.c ++ ++[Packages] ++ MdePkg/MdePkg.dec ++ MdeModulePkg/MdeModulePkg.dec ++ SecurityPkg/SecurityPkg.dec ++ ShellPkg/ShellPkg.dec ++ ++[Guids] ++ gEfiCertPkcs7Guid ++ gEfiCertX509Guid ++ gEfiCustomModeEnableGuid ++ gEfiGlobalVariableGuid ++ gEfiImageSecurityDatabaseGuid ++ gEfiSecureBootEnableDisableGuid ++ ++[LibraryClasses] ++ BaseMemoryLib ++ DebugLib ++ MemoryAllocationLib ++ ShellCEntryLib ++ UefiLib ++ UefiRuntimeServicesTableLib +diff --git a/OvmfPkg/OvmfPkgIa32.dsc b/OvmfPkg/OvmfPkgIa32.dsc +index fa9661c..e2e6ba3 100644 +--- a/OvmfPkg/OvmfPkgIa32.dsc ++++ b/OvmfPkg/OvmfPkgIa32.dsc +@@ -764,6 +764,10 @@ + + !if $(SECURE_BOOT_ENABLE) == TRUE + SecurityPkg/VariableAuthenticated/SecureBootConfigDxe/SecureBootConfigDxe.inf ++ OvmfPkg/EnrollDefaultKeys/EnrollDefaultKeys.inf { ++ ++ ShellCEntryLib|ShellPkg/Library/UefiShellCEntryLib/UefiShellCEntryLib.inf ++ } + !endif + + OvmfPkg/PlatformDxe/Platform.inf +diff --git a/OvmfPkg/OvmfPkgIa32X64.dsc b/OvmfPkg/OvmfPkgIa32X64.dsc +index 667584a..a0ae1aa 100644 +--- a/OvmfPkg/OvmfPkgIa32X64.dsc ++++ b/OvmfPkg/OvmfPkgIa32X64.dsc +@@ -773,6 +773,10 @@ + + !if $(SECURE_BOOT_ENABLE) == TRUE + SecurityPkg/VariableAuthenticated/SecureBootConfigDxe/SecureBootConfigDxe.inf ++ OvmfPkg/EnrollDefaultKeys/EnrollDefaultKeys.inf { ++ ++ ShellCEntryLib|ShellPkg/Library/UefiShellCEntryLib/UefiShellCEntryLib.inf ++ } + !endif + + OvmfPkg/PlatformDxe/Platform.inf +diff --git a/OvmfPkg/OvmfPkgX64.dsc b/OvmfPkg/OvmfPkgX64.dsc +index 5ae8469..87cee52 100644 +--- a/OvmfPkg/OvmfPkgX64.dsc ++++ b/OvmfPkg/OvmfPkgX64.dsc +@@ -771,6 +771,10 @@ + + !if $(SECURE_BOOT_ENABLE) == TRUE + SecurityPkg/VariableAuthenticated/SecureBootConfigDxe/SecureBootConfigDxe.inf ++ OvmfPkg/EnrollDefaultKeys/EnrollDefaultKeys.inf { ++ ++ ShellCEntryLib|ShellPkg/Library/UefiShellCEntryLib/UefiShellCEntryLib.inf ++ } + !endif + + OvmfPkg/PlatformDxe/Platform.inf diff --git a/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/VfrCompile-increase-path-length-limit.patch b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/VfrCompile-increase-path-length-limit.patch new file mode 100644 index 000000000..bb12d8beb --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/VfrCompile-increase-path-length-limit.patch @@ -0,0 +1,33 @@ +From c7722d10c7bcf6be0adcf54abb1d406599dd7914 Mon Sep 17 00:00:00 2001 +From: Patrick Ohly +Date: Fri, 24 Feb 2017 01:40:02 +0100 +Subject: [PATCH] VfrCompile: increase path length limit + +The VfrCompile tool has a hard-coded maximum length for path names +which turned out to be too small by around 20 characters in the Yocto +autobuilder setup. Increasing the maximum by a factor of 4 is +relatively easy and makes the problem less likely. + +Upstream-Status: Inappropriate [configuration] + +Signed-off-by: Patrick Ohly +--- + BaseTools/Source/C/VfrCompile/EfiVfr.h | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/BaseTools/Source/C/VfrCompile/EfiVfr.h b/BaseTools/Source/C/VfrCompile/EfiVfr.h +index d187902..9ad4a7b 100644 +--- a/BaseTools/Source/C/VfrCompile/EfiVfr.h ++++ b/BaseTools/Source/C/VfrCompile/EfiVfr.h +@@ -19,7 +19,7 @@ WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. + #include "Common/UefiInternalFormRepresentation.h" + #include "Common/MdeModuleHii.h" + +-#define MAX_PATH 255 ++#define MAX_PATH 1023 + #define MAX_VFR_LINE_LEN 4096 + + #define EFI_IFR_MAX_LENGTH 0xFF +-- +2.1.4 + diff --git a/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/ovmf-shell-image.wks b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/ovmf-shell-image.wks new file mode 100644 index 000000000..1d2f16bd3 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf/ovmf-shell-image.wks @@ -0,0 +1,4 @@ +# short-description: Create an EFI disk image with just the EFI system partition + +part / --source rootfs --ondisk sda --fstype=vfat --align 1024 +bootloader --ptable gpt --timeout=5 diff --git a/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf_git.bb b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf_git.bb new file mode 100644 index 000000000..9d988e9d4 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/ovmf/ovmf_git.bb @@ -0,0 +1,243 @@ +DESCRIPTION = "OVMF - UEFI firmware for Qemu and KVM" +HOMEPAGE = "http://sourceforge.net/apps/mediawiki/tianocore/index.php?title=OVMF" +LICENSE = "BSD" +LICENSE_class-target = "${@bb.utils.contains('PACKAGECONFIG', 'secureboot', 'BSD & OpenSSL', 'BSD', d)}" +LIC_FILES_CHKSUM = "file://OvmfPkg/License.txt;md5=343dc88e82ff33d042074f62050c3496" + +# Enabling Secure Boot adds a dependency on OpenSSL and implies +# compiling OVMF twice, so it is disabled by default. Distros +# may change that default. +PACKAGECONFIG ??= "" +PACKAGECONFIG[secureboot] = ",,," + +SRC_URI = "git://github.com/tianocore/edk2.git;branch=master \ + file://0002-ovmf-update-path-to-native-BaseTools.patch \ + file://0003-BaseTools-makefile-adjust-to-build-in-under-bitbake.patch \ + file://VfrCompile-increase-path-length-limit.patch \ + file://0001-MdeModulePkg-UefiHiiLib-Fix-incorrect-comparison-exp.patch \ + " + +SRC_URI_append_class-target = " \ + ${@bb.utils.contains('PACKAGECONFIG', 'secureboot', 'http://www.openssl.org/source/openssl-1.0.2j.tar.gz;name=openssl;subdir=${S}/CryptoPkg/Library/OpensslLib', '', d)} \ + file://0007-OvmfPkg-EnrollDefaultKeys-application-for-enrolling-.patch \ +" + +SRCREV="4575a602ca6072ee9d04150b38bfb143cbff8588" +SRC_URI[openssl.md5sum] = "96322138f0b69e61b7212bc53d5e912b" +SRC_URI[openssl.sha256sum] = "e7aff292be21c259c6af26469c7a9b3ba26e9abaaffd325e3dccc9785256c431" + +inherit deploy + +PARALLEL_MAKE_class-native = "" + +S = "${WORKDIR}/git" + +DEPENDS_class-native="util-linux-native iasl-native ossp-uuid-native qemu-native" + +DEPENDS_class-target="ovmf-native" + +DEPENDS_append = " nasm-native" + +EDK_TOOLS_DIR="edk2_basetools" + +# OVMF has trouble building with the default optimization of -O2. +BUILD_OPTIMIZATION="-pipe" + +# OVMF supports IA only, although it could conceivably support ARM someday. +COMPATIBLE_HOST='(i.86|x86_64).*' + +# Additional build flags for OVMF with Secure Boot. +# Fedora also uses "-D SMM_REQUIRE -D EXCLUDE_SHELL_FROM_FD". +OVMF_SECURE_BOOT_EXTRA_FLAGS ??= "" +OVMF_SECURE_BOOT_FLAGS = "-DSECURE_BOOT_ENABLE=TRUE ${OVMF_SECURE_BOOT_EXTRA_FLAGS}" + +do_patch[postfuncs] += "fix_basetools_location" +fix_basetools_location () { +} +fix_basetools_location_class-target() { + # Replaces the fake path inserted by 0002-ovmf-update-path-to-native-BaseTools.patch. + # Necessary for finding the actual BaseTools from ovmf-native. + sed -i -e 's#BBAKE_EDK_TOOLS_PATH#${STAGING_BINDIR_NATIVE}/${EDK_TOOLS_DIR}#' ${S}/OvmfPkg/build.sh +} + +do_patch[postfuncs] += "fix_iasl" +fix_iasl() { +} +fix_iasl_class-native() { + # iasl is not installed under /usr/bin when building with OE. + sed -i -e 's#/usr/bin/iasl#${STAGING_BINDIR_NATIVE}/iasl#' ${S}/BaseTools/Conf/tools_def.template +} + +# Inject CC and friends into the build. LINKER already is in GNUmakefile. +# Must be idempotent and thus remove old assignments that were inserted +# earlier. +do_patch[postfuncs] += "fix_toolchain" +fix_toolchain() { + sed -i \ + -e '/^\(CC\|CXX\|AS\|AR\|LD\|LINKER\) =/d' \ + -e '/^APPLICATION/a CC = ${CC}\nCXX = ${CXX}\nAS = ${AS}\nAR = ${AR}\nLD = ${LD}\nLINKER = $(CC)' \ + ${S}/BaseTools/Source/C/Makefiles/app.makefile + sed -i \ + -e '/^\(CC\|CXX\|AS\|AR\|LD\)/d' \ + -e '/^VFR_CPPFLAGS/a CC = ${CC}\nCXX = ${CXX}\nAS = ${AS}\nAR = ${AR}\nLD = ${LD}' \ + ${S}/BaseTools/Source/C/VfrCompile/GNUmakefile +} +fix_toolchain_append_class-native() { + # This tools_def.template is going to be used by the target ovmf and + # defines which compilers to use. For the GCC toolchain definitions, + # that will be ${HOST_PREFIX}gcc. However, "make" doesn't need that + # prefix. + # + # Injecting ENV(HOST_PREFIX) matches exporting that value as env + # variable in do_compile_class-target. + sed -i \ + -e 's#\(ENV\|DEF\)(GCC.*_PREFIX)#ENV(HOST_PREFIX)#' \ + -e 's#ENV(HOST_PREFIX)make#make#' \ + ${S}/BaseTools/Conf/tools_def.template + sed -i \ + -e '/^\(LFLAGS\|CFLAGS\) +=/d' \ + -e '/^LINKER/a LFLAGS += ${BUILD_LDFLAGS}\nCFLAGS += ${BUILD_CFLAGS}' \ + ${S}/BaseTools/Source/C/Makefiles/app.makefile \ + ${S}/BaseTools/Source/C/VfrCompile/GNUmakefile + # Linking with gold fails: + # internal error in do_layout, at ../../gold/object.cc:1821 + # make: *** [.../OUTPUT/Facs.acpi] Error 1 + # We intentionally hard-code the use of ld.bfd regardless of DISTRO_FEATURES + # to make ovmf-native reusable across distros. + sed -i \ + -e 's#^\(DEFINE GCC.*DLINK.*FLAGS *=\)#\1 -fuse-ld=bfd#' \ + ${S}/BaseTools/Conf/tools_def.template +} + +GCC_VER="$(${CC} -v 2>&1 | tail -n1 | awk '{print $3}')" + +fixup_target_tools() { + case ${1} in + 4.4.*) + FIXED_GCCVER=GCC44 + ;; + 4.5.*) + FIXED_GCCVER=GCC45 + ;; + 4.6.*) + FIXED_GCCVER=GCC46 + ;; + 4.7.*) + FIXED_GCCVER=GCC47 + ;; + 4.8.*) + FIXED_GCCVER=GCC48 + ;; + 4.9.*) + FIXED_GCCVER=GCC49 + ;; + *) + FIXED_GCCVER=GCC5 + ;; + esac + echo ${FIXED_GCCVER} +} + +do_compile_class-native() { + oe_runmake -C ${S}/BaseTools +} + +do_compile_class-target() { + export LFLAGS="${LDFLAGS}" + PARALLEL_JOBS="${@ '${PARALLEL_MAKE}'.replace('-j', '-n')}" + OVMF_ARCH="X64" + if [ "${TARGET_ARCH}" != "x86_64" ] ; then + OVMF_ARCH="IA32" + fi + + # The build for the target uses BaseTools/Conf/tools_def.template + # from ovmf-native to find the compiler, which depends on + # exporting HOST_PREFIX. + export HOST_PREFIX="${HOST_PREFIX}" + + # BaseTools/Conf gets copied to Conf, but only if that does not + # exist yet. To ensure that an updated template gets used during + # incremental builds, we need to remove the copy before we start. + rm -f `ls ${S}/Conf/*.txt | grep -v ReadMe.txt` + + # ${WORKDIR}/ovmf is a well-known location where do_install and + # do_deploy will be able to find the files. + rm -rf ${WORKDIR}/ovmf + mkdir ${WORKDIR}/ovmf + OVMF_DIR_SUFFIX="X64" + if [ "${TARGET_ARCH}" != "x86_64" ] ; then + OVMF_DIR_SUFFIX="Ia32" # Note the different capitalization + fi + FIXED_GCCVER=$(fixup_target_tools ${GCC_VER}) + bbnote FIXED_GCCVER is ${FIXED_GCCVER} + build_dir="${S}/Build/Ovmf$OVMF_DIR_SUFFIX/RELEASE_${FIXED_GCCVER}" + + bbnote "Building without Secure Boot." + rm -rf ${S}/Build/Ovmf$OVMF_DIR_SUFFIX + ${S}/OvmfPkg/build.sh $PARALLEL_JOBS -a $OVMF_ARCH -b RELEASE -t ${FIXED_GCCVER} + ln ${build_dir}/FV/OVMF.fd ${WORKDIR}/ovmf/ovmf.fd + ln ${build_dir}/FV/OVMF_CODE.fd ${WORKDIR}/ovmf/ovmf.code.fd + ln ${build_dir}/FV/OVMF_VARS.fd ${WORKDIR}/ovmf/ovmf.vars.fd + ln ${build_dir}/${OVMF_ARCH}/Shell.efi ${WORKDIR}/ovmf/ + + if ${@bb.utils.contains('PACKAGECONFIG', 'secureboot', 'true', 'false', d)}; then + # See CryptoPkg/Library/OpensslLib/Patch-HOWTO.txt and + # https://src.fedoraproject.org/cgit/rpms/edk2.git/tree/ for + # building with Secure Boot enabled. + bbnote "Building with Secure Boot." + rm -rf ${S}/Build/Ovmf$OVMF_DIR_SUFFIX + if ! [ -f ${S}/CryptoPkg/Library/OpensslLib/openssl-*/edk2-patch-applied ]; then + ( cd ${S}/CryptoPkg/Library/OpensslLib/openssl-* && patch -p1 <$(echo ../EDKII_openssl-*.patch) && touch edk2-patch-applied ) + fi + ( cd ${S}/CryptoPkg/Library/OpensslLib/ && ./Install.sh ) + ${S}/OvmfPkg/build.sh $PARALLEL_JOBS -a $OVMF_ARCH -b RELEASE -t ${FIXED_GCCVER} ${OVMF_SECURE_BOOT_FLAGS} + ln ${build_dir}/FV/OVMF.fd ${WORKDIR}/ovmf/ovmf.secboot.fd + ln ${build_dir}/FV/OVMF_CODE.fd ${WORKDIR}/ovmf/ovmf.secboot.code.fd + ln ${build_dir}/${OVMF_ARCH}/EnrollDefaultKeys.efi ${WORKDIR}/ovmf/ + fi +} + +do_install_class-native() { + install -d ${D}/${bindir}/edk2_basetools + cp -r ${S}/BaseTools ${D}/${bindir}/${EDK_TOOLS_DIR} +} + +do_install_class-target() { + # Content for UEFI shell iso. We install the EFI shell as + # bootx64/ia32.efi because then it can be started even when the + # firmware itself does not contain it. + install -d ${D}/efi/boot + install ${WORKDIR}/ovmf/Shell.efi ${D}/efi/boot/boot${@ "ia32" if "${TARGET_ARCH}" != "x86_64" else "x64"}.efi + if ${@bb.utils.contains('PACKAGECONFIG', 'secureboot', 'true', 'false', d)}; then + install ${WORKDIR}/ovmf/EnrollDefaultKeys.efi ${D} + fi +} + +# This always gets packaged because ovmf-shell-image depends on it. +# This allows testing that recipe in all configurations because it +# can always be part of a world build. +# +# However, EnrollDefaultKeys.efi is only included when Secure Boot is enabled. +PACKAGES =+ "ovmf-shell-efi" +FILES_ovmf-shell-efi = " \ + EnrollDefaultKeys.efi \ + efi/ \ +" + +do_deploy() { +} +do_deploy[cleandirs] = "${DEPLOYDIR}" +do_deploy_class-target() { + # For use with "runqemu ovmf". + for i in \ + ovmf \ + ovmf.code \ + ovmf.vars \ + ${@bb.utils.contains('PACKAGECONFIG', 'secureboot', 'ovmf.secboot ovmf.secboot.code', '', d)} \ + ; do + qemu-img convert -f raw -O qcow2 ${WORKDIR}/ovmf/$i.fd ${DEPLOYDIR}/$i.qcow2 + done +} +addtask do_deploy after do_compile before do_build + +BBCLASSEXTEND = "native" diff --git a/import-layers/yocto-poky/meta/recipes-core/packagegroups/nativesdk-packagegroup-sdk-host.bb b/import-layers/yocto-poky/meta/recipes-core/packagegroups/nativesdk-packagegroup-sdk-host.bb index 2c2abed33..2ca639294 100644 --- a/import-layers/yocto-poky/meta/recipes-core/packagegroups/nativesdk-packagegroup-sdk-host.bb +++ b/import-layers/yocto-poky/meta/recipes-core/packagegroups/nativesdk-packagegroup-sdk-host.bb @@ -22,7 +22,8 @@ RDEPENDS_${PN} = "\ nativesdk-automake \ nativesdk-shadow \ nativesdk-makedevs \ - nativesdk-smartpm \ + nativesdk-dnf \ + nativesdk-cmake \ nativesdk-postinst-intercept \ ${@bb.utils.contains('DISTRO_FEATURES', 'wayland', 'nativesdk-wayland', '', d)} \ " diff --git a/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-base.bb b/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-base.bb index 2e94fdefb..0069e3e0f 100644 --- a/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-base.bb +++ b/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-base.bb @@ -110,8 +110,8 @@ python __anonymous () { # If Distro want wifi and machine feature wifi/pci/pcmcia/usbhost (one of them) # then include packagegroup-base-wifi in packagegroup-base - distro_features = set(d.getVar("DISTRO_FEATURES", True).split()) - machine_features= set(d.getVar("MACHINE_FEATURES", True).split()) + distro_features = set(d.getVar("DISTRO_FEATURES").split()) + machine_features= set(d.getVar("MACHINE_FEATURES").split()) if "bluetooth" in distro_features and not "bluetooth" in machine_features and ("pcmcia" in machine_features or "pci" in machine_features or "usbhost" in machine_features): d.setVar("ADD_BT", "packagegroup-base-bluetooth") diff --git a/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-core-sdk.bb b/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-core-sdk.bb index aceba78de..7d6d41473 100644 --- a/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-core-sdk.bb +++ b/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-core-sdk.bb @@ -28,10 +28,7 @@ RDEPENDS_packagegroup-core-sdk = "\ SANITIZERS = "libasan-dev libubsan-dev" SANITIZERS_aarch64 = "" -SANITIZERS_mips = "" -SANITIZERS_mipsel = "" -SANITIZERS_mips64 = "" -SANITIZERS_mips64n32 = "" +SANITIZERS_mipsarch = "" SANITIZERS_nios2 = "" SANITIZERS_powerpc64 = "" SANITIZERS_sparc = "" @@ -45,7 +42,7 @@ RRECOMMENDS_packagegroup-core-sdk = "\ #python generate_sdk_pkgs () { # poky_pkgs = read_pkgdata('packagegroup-core', d)['PACKAGES'] -# pkgs = d.getVar('PACKAGES', True).split() +# pkgs = d.getVar('PACKAGES').split() # for pkg in poky_pkgs.split(): # newpkg = pkg.replace('packagegroup-core', 'packagegroup-core-sdk') # diff --git a/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb b/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb index dd9844504..946c947c8 100644 --- a/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb +++ b/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-core-tools-profile.bb @@ -36,10 +36,7 @@ PERF_libc-musl = "" SYSTEMTAP = "systemtap" SYSTEMTAP_libc-uclibc = "" SYSTEMTAP_libc-musl = "" -SYSTEMTAP_mips = "" -SYSTEMTAP_mipsel = "" -SYSTEMTAP_mips64 = "" -SYSTEMTAP_mips64n32 = "" +SYSTEMTAP_mipsarch = "" SYSTEMTAP_nios2 = "" SYSTEMTAP_aarch64 = "" @@ -65,15 +62,13 @@ BABELTRACE = "babeltrace" VALGRIND = "valgrind" VALGRIND_libc-uclibc = "" VALGRIND_libc-musl = "" -VALGRIND_mips = "" -VALGRIND_mipsel = "" -VALGRIND_mips64 = "" -VALGRIND_mips64n32 = "" +VALGRIND_mipsarch = "" VALGRIND_nios2 = "" VALGRIND_armv4 = "" VALGRIND_armv5 = "" VALGRIND_armv6 = "" VALGRIND_aarch64 = "" +VALGRIND_linux-gnux32 = "" RDEPENDS_${PN} = "\ ${PROFILETOOLS} \ diff --git a/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-self-hosted.bb b/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-self-hosted.bb index 73430b86c..c1bbdfcf3 100644 --- a/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-self-hosted.bb +++ b/import-layers/yocto-poky/meta/recipes-core/packagegroups/packagegroup-self-hosted.bb @@ -10,6 +10,9 @@ inherit packagegroup distro_features_check # rdepends on libx11-dev REQUIRED_DISTRO_FEATURES = "x11" +# rdepends on libgl +REQUIRED_DISTRO_FEATURES += "opengl" + PACKAGES = "\ packagegroup-self-hosted \ packagegroup-self-hosted-debug \ diff --git a/import-layers/yocto-poky/meta/recipes-core/psplash/psplash_git.bb b/import-layers/yocto-poky/meta/recipes-core/psplash/psplash_git.bb index 053742651..44297e1a6 100644 --- a/import-layers/yocto-poky/meta/recipes-core/psplash/psplash_git.bb +++ b/import-layers/yocto-poky/meta/recipes-core/psplash/psplash_git.bb @@ -16,8 +16,8 @@ SRC_URI = "git://git.yoctoproject.org/${BPN} \ SPLASH_IMAGES = "file://psplash-poky-img.h;outsuffix=default" python __anonymous() { - oldpkgs = d.getVar("PACKAGES", True).split() - splashfiles = d.getVar('SPLASH_IMAGES', True).split() + oldpkgs = d.getVar("PACKAGES").split() + splashfiles = d.getVar('SPLASH_IMAGES').split() pkgs = [] localpaths = [] haspng = False @@ -50,8 +50,8 @@ python __anonymous() { d.appendVar("DEPENDS", " gdk-pixbuf-native") d.prependVar("PACKAGES", "%s " % (" ".join(pkgs))) - mlprefix = d.getVar('MLPREFIX', True) or '' - pn = d.getVar('PN', True) or '' + mlprefix = d.getVar('MLPREFIX') or '' + pn = d.getVar('PN') or '' for p in pkgs: ep = '%s%s' % (mlprefix, p) epsplash = '%s%s' % (mlprefix, 'psplash') @@ -74,11 +74,11 @@ python do_compile () { import shutil # Build a separate executable for each splash image - workdir = d.getVar('WORKDIR', True) - convertscript = "%s/make-image-header.sh" % d.getVar('S', True) - destfile = "%s/psplash-poky-img.h" % d.getVar('S', True) - localfiles = d.getVar('SPLASH_LOCALPATHS', True).split() - outputfiles = d.getVar('SPLASH_INSTALL', True).split() + workdir = d.getVar('WORKDIR') + convertscript = "%s/make-image-header.sh" % d.getVar('S') + destfile = "%s/psplash-poky-img.h" % d.getVar('S') + localfiles = d.getVar('SPLASH_LOCALPATHS').split() + outputfiles = d.getVar('SPLASH_INSTALL').split() for localfile, outputfile in zip(localfiles, outputfiles): if localfile.endswith(".png"): outp = oe.utils.getstatusoutput('%s %s POKY' % (convertscript, os.path.join(workdir, localfile))) @@ -89,7 +89,7 @@ python do_compile () { shutil.copyfile(os.path.join(workdir, localfile), destfile) # For some reason just updating the header is not enough, we have to touch the .c # file in order to get it to rebuild - os.utime("%s/psplash.c" % d.getVar('S', True), None) + os.utime("%s/psplash.c" % d.getVar('S'), None) bb.build.exec_func("oe_runmake", d) shutil.copyfile("psplash", outputfile) } @@ -110,7 +110,7 @@ FILES_${PN} += "/mnt/.psplash" INITSCRIPT_NAME = "psplash.sh" INITSCRIPT_PARAMS = "start 0 S . stop 20 0 1 6 ." -DEPENDS_append = " ${@bb.utils.contains('DISTRO_FEATURES','systemd','systemd-systemctl-native','',d)}" +PACKAGE_WRITE_DEPS_append = " ${@bb.utils.contains('DISTRO_FEATURES','systemd','systemd-systemctl-native','',d)}" pkg_postinst_${PN} () { if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)}; then if [ -n "$D" ]; then diff --git a/import-layers/yocto-poky/meta/recipes-core/readline/files/config-dirent-symbols.patch b/import-layers/yocto-poky/meta/recipes-core/readline/files/config-dirent-symbols.patch deleted file mode 100644 index cfff03c0f..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/readline/files/config-dirent-symbols.patch +++ /dev/null @@ -1,34 +0,0 @@ -Fix the names of the STRUCT_DIRENT_D symbols to match the names used by both -configure and the source code. - -Upstream-Status: Submitted (http://lists.gnu.org/archive/html/bug-readline/2014-05/msg00008.html) -Signed-off-by: Ross Burton - -From e48656811550774892fd4e0f4b3c7d418422da52 Mon Sep 17 00:00:00 2001 -From: Ross Burton -Date: Tue, 27 May 2014 22:32:31 +0100 -Subject: [PATCH] config.h.in: fix names of STRUCT_DIRENT_D symbols - ---- - config.h.in | 4 ++-- - 1 file changed, 2 insertions(+), 2 deletions(-) - -diff --git a/config.h.in b/config.h.in -index a67e43e..a42acc5 100644 ---- a/config.h.in -+++ b/config.h.in -@@ -244,9 +244,9 @@ - - #undef HAVE_GETPW_DECLS - --#undef STRUCT_DIRENT_HAS_D_INO -+#undef HAVE_STRUCT_DIRENT_D_INO - --#undef STRUCT_DIRENT_HAS_D_FILENO -+#undef HAVE_STRUCT_DIRENT_D_FILENO - - #undef HAVE_BSD_SIGNALS - --- -1.7.10.4 - diff --git a/import-layers/yocto-poky/meta/recipes-core/readline/readline-5.2/configure-fix.patch b/import-layers/yocto-poky/meta/recipes-core/readline/readline-5.2/configure-fix.patch deleted file mode 100644 index be60a9861..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/readline/readline-5.2/configure-fix.patch +++ /dev/null @@ -1,26 +0,0 @@ -Upstream-Status: Pending - -Without this it fails to link against libtermcap causing various missing symbols -issues. - -RP - 8/10/08 - -Index: readline-5.2/configure.in -=================================================================== ---- readline-5.2.orig/configure.in 2008-10-08 09:58:52.000000000 +0100 -+++ readline-5.2/configure.in 2008-10-08 09:59:03.000000000 +0100 -@@ -211,10 +211,10 @@ - AC_MSG_CHECKING(configuration for building shared libraries) - eval `TERMCAP_LIB=$TERMCAP_LIB ${CONFIG_SHELL-/bin/sh} ${srcdir}/support/shobj-conf -C "${CC}" -c ${host_cpu} -o ${host_os} -v ${host_vendor}` - --# case "$SHLIB_LIBS" in --# *curses*|*termcap*|*termlib*) ;; --# *) SHLIB_LIBS="$SHLIB_LIBS $TERMCAP_LIB" ;; --# esac -+ case "$SHLIB_LIBS" in -+ *curses*|*termcap*|*termlib*) ;; -+ *) SHLIB_LIBS="$SHLIB_LIBS $TERMCAP_LIB" ;; -+ esac - - AC_SUBST(SHOBJ_CC) - AC_SUBST(SHOBJ_CFLAGS) diff --git a/import-layers/yocto-poky/meta/recipes-core/readline/readline-5.2/fix-redundant-rpath.patch b/import-layers/yocto-poky/meta/recipes-core/readline/readline-5.2/fix-redundant-rpath.patch deleted file mode 100644 index 3166b470f..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/readline/readline-5.2/fix-redundant-rpath.patch +++ /dev/null @@ -1,21 +0,0 @@ -This support script ends up hardcoding unnecessary rpaths into the libraries. We -will search $libdir automatically so this is just wastes space. There may be some -cases this is necessary but our use cases aren't one of them. - -Upstream-Status: Inappropriate - -Signed-off-by: Saul Wold - -Index: readline-5.2/support/shobj-conf -=================================================================== ---- readline-5.2.orig/support/shobj-conf -+++ readline-5.2/support/shobj-conf -@@ -110,7 +110,7 @@ linux*-*|gnu*-*|k*bsd*-gnu-*) - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' - -- SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir) -Wl,-soname,`basename $@ $(SHLIB_MINOR)`' -+ SHLIB_XLDFLAGS='-Wl,-soname,`basename $@ $(SHLIB_MINOR)`' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - ;; - diff --git a/import-layers/yocto-poky/meta/recipes-core/readline/readline-6.3/configure-fix.patch b/import-layers/yocto-poky/meta/recipes-core/readline/readline-6.3/configure-fix.patch deleted file mode 100644 index ef3104f8a..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/readline/readline-6.3/configure-fix.patch +++ /dev/null @@ -1,35 +0,0 @@ -Upstream-Status: Pending - -Without this it fails to link against libtermcap causing various missing -symbols issues. - -RP - 8/10/08 - -Support 6.3 which uses configure.ac rather than configure.in. -Signed-off-by: Hongxu Jia ---- - configure.ac | 8 ++++---- - 1 file changed, 4 insertions(+), 4 deletions(-) - -diff --git a/configure.ac b/configure.ac -index cea8f91..9075b8f 100644 ---- a/configure.ac -+++ b/configure.ac -@@ -218,10 +218,10 @@ if test -f ${srcdir}/support/shobj-conf; then - AC_MSG_CHECKING(configuration for building shared libraries) - eval `TERMCAP_LIB=$TERMCAP_LIB ${CONFIG_SHELL-/bin/sh} ${srcdir}/support/shobj-conf -C "${CC}" -c ${host_cpu} -o ${host_os} -v ${host_vendor}` - --# case "$SHLIB_LIBS" in --# *curses*|*termcap*|*termlib*) ;; --# *) SHLIB_LIBS="$SHLIB_LIBS $TERMCAP_LIB" ;; --# esac -+ case "$SHLIB_LIBS" in -+ *curses*|*termcap*|*termlib*) ;; -+ *) SHLIB_LIBS="$SHLIB_LIBS $TERMCAP_LIB" ;; -+ esac - - AC_SUBST(SHOBJ_CC) - AC_SUBST(SHOBJ_CFLAGS) --- -1.8.1.2 - diff --git a/import-layers/yocto-poky/meta/recipes-core/readline/readline-6.3/norpath.patch b/import-layers/yocto-poky/meta/recipes-core/readline/readline-6.3/norpath.patch deleted file mode 100644 index 5d71582b7..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/readline/readline-6.3/norpath.patch +++ /dev/null @@ -1,21 +0,0 @@ -This support script ends up hardcoding unnecessary rpaths into the libraries. We -will search $libdir automatically so this is just wastes space. There may be some -cases this is necessary but our use cases aren't one of them. - -Upstream-Status: Inappropriate - -RP 2012/2/23 - -Index: readline-6.2/support/shobj-conf -=================================================================== ---- readline-6.2.orig/support/shobj-conf 2012-02-23 11:06:37.193179379 +0000 -+++ readline-6.2/support/shobj-conf 2012-02-23 11:06:50.049178918 +0000 -@@ -114,7 +114,7 @@ - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' - -- SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir) -Wl,-soname,`basename $@ $(SHLIB_MINOR)`' -+ SHLIB_XLDFLAGS='-Wl,-soname,`basename $@ $(SHLIB_MINOR)`' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - ;; - diff --git a/import-layers/yocto-poky/meta/recipes-core/readline/readline-7.0/configure-fix.patch b/import-layers/yocto-poky/meta/recipes-core/readline/readline-7.0/configure-fix.patch new file mode 100644 index 000000000..ef3104f8a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/readline/readline-7.0/configure-fix.patch @@ -0,0 +1,35 @@ +Upstream-Status: Pending + +Without this it fails to link against libtermcap causing various missing +symbols issues. + +RP - 8/10/08 + +Support 6.3 which uses configure.ac rather than configure.in. +Signed-off-by: Hongxu Jia +--- + configure.ac | 8 ++++---- + 1 file changed, 4 insertions(+), 4 deletions(-) + +diff --git a/configure.ac b/configure.ac +index cea8f91..9075b8f 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -218,10 +218,10 @@ if test -f ${srcdir}/support/shobj-conf; then + AC_MSG_CHECKING(configuration for building shared libraries) + eval `TERMCAP_LIB=$TERMCAP_LIB ${CONFIG_SHELL-/bin/sh} ${srcdir}/support/shobj-conf -C "${CC}" -c ${host_cpu} -o ${host_os} -v ${host_vendor}` + +-# case "$SHLIB_LIBS" in +-# *curses*|*termcap*|*termlib*) ;; +-# *) SHLIB_LIBS="$SHLIB_LIBS $TERMCAP_LIB" ;; +-# esac ++ case "$SHLIB_LIBS" in ++ *curses*|*termcap*|*termlib*) ;; ++ *) SHLIB_LIBS="$SHLIB_LIBS $TERMCAP_LIB" ;; ++ esac + + AC_SUBST(SHOBJ_CC) + AC_SUBST(SHOBJ_CFLAGS) +-- +1.8.1.2 + diff --git a/import-layers/yocto-poky/meta/recipes-core/readline/readline-7.0/norpath.patch b/import-layers/yocto-poky/meta/recipes-core/readline/readline-7.0/norpath.patch new file mode 100644 index 000000000..5d71582b7 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/readline/readline-7.0/norpath.patch @@ -0,0 +1,21 @@ +This support script ends up hardcoding unnecessary rpaths into the libraries. We +will search $libdir automatically so this is just wastes space. There may be some +cases this is necessary but our use cases aren't one of them. + +Upstream-Status: Inappropriate + +RP 2012/2/23 + +Index: readline-6.2/support/shobj-conf +=================================================================== +--- readline-6.2.orig/support/shobj-conf 2012-02-23 11:06:37.193179379 +0000 ++++ readline-6.2/support/shobj-conf 2012-02-23 11:06:50.049178918 +0000 +@@ -114,7 +114,7 @@ + SHOBJ_LD='${CC}' + SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' + +- SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir) -Wl,-soname,`basename $@ $(SHLIB_MINOR)`' ++ SHLIB_XLDFLAGS='-Wl,-soname,`basename $@ $(SHLIB_MINOR)`' + SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' + ;; + diff --git a/import-layers/yocto-poky/meta/recipes-core/readline/readline_5.2.bb b/import-layers/yocto-poky/meta/recipes-core/readline/readline_5.2.bb deleted file mode 100644 index c1007a231..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/readline/readline_5.2.bb +++ /dev/null @@ -1,84 +0,0 @@ -SUMMARY = "Library for editing typed command lines" -DESCRIPTION = "The GNU Readline library provides a set of functions for use by applications that allow users to edit \ -command lines as they are typed in. Both Emacs and vi editing modes are available. The Readline library includes \ -additional functions to maintain a list of previously-entered command lines, to recall and perhaps reedit those \ -lines, and perform csh-like history expansion on previous commands." -SECTION = "libs" - -LICENSE = "GPLv2+" -LIC_FILES_CHKSUM = "file://COPYING;md5=03b36fdd84f74b8d8189a202b980b67f" - -DEPENDS += "ncurses" - -PR = "r9" - -SRC_URI = "${GNU_MIRROR}/readline/${BPN}-${PV}.tar.gz;name=archive \ - ${GNU_MIRROR}/readline/readline-5.2-patches/readline52-001;name=patch1;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-5.2-patches/readline52-002;name=patch2;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-5.2-patches/readline52-003;name=patch3;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-5.2-patches/readline52-004;name=patch4;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-5.2-patches/readline52-005;name=patch5;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-5.2-patches/readline52-006;name=patch6;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-5.2-patches/readline52-007;name=patch7;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-5.2-patches/readline52-008;name=patch8;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-5.2-patches/readline52-009;name=patch9;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-5.2-patches/readline52-010;name=patch10;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-5.2-patches/readline52-011;name=patch11;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-5.2-patches/readline52-012;name=patch12;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-5.2-patches/readline52-013;name=patch13;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-5.2-patches/readline52-014;name=patch14;apply=yes;striplevel=0 \ - file://configure-fix.patch \ - file://config-dirent-symbols.patch \ - file://fix-redundant-rpath.patch" - -SRC_URI[archive.md5sum] = "e39331f32ad14009b9ff49cc10c5e751" -SRC_URI[archive.sha256sum] = "12e88d96aee2cd1192500356f0535540db499282ca7f76339fb4228c31249f45" - -SRC_URI[patch1.md5sum] = "9d4d41622aa9b230c57f68548ce87d8f" -SRC_URI[patch1.sha256sum] = "eac304c369154059f93049ada328739faaf40338d3cb1fb4b544c93d5ce3f8d5" -SRC_URI[patch2.md5sum] = "f03e512d14206e37f7d6a748b56b9476" -SRC_URI[patch2.sha256sum] = "9deacaef25507a0c2ae0b661bf9342559b59a2954d66ea3c5f5bcd900fdfcf78" -SRC_URI[patch3.md5sum] = "252b42d8750f1a94b6bdf086612dceb2" -SRC_URI[patch3.sha256sum] = "2a55d2ecb1c9b0147aeb193a6323616ab31c1c525a83b2db3a994b15594ba934" -SRC_URI[patch4.md5sum] = "a32333c2e603a3ed250514e91050e552" -SRC_URI[patch4.sha256sum] = "a03b65633781efa7c3aae5d57162985e7b7a3c10acf0f2621be610e16f27e5f2" -SRC_URI[patch5.md5sum] = "8106796c09b789523a3a78ab69c04b6d" -SRC_URI[patch5.sha256sum] = "06001896514148a757ea6edbbd40c4fc4331dc653847244386c37b138b150f64" -SRC_URI[patch6.md5sum] = "512188e2bf0837f7eca19dbf71f182ae" -SRC_URI[patch6.sha256sum] = "dfef3e982c0adf8bb5a9b7d0468ec8f5f18138b325e28759437464de5be71013" -SRC_URI[patch7.md5sum] = "ac17aca62eb6fb398c9f2fe9de540aff" -SRC_URI[patch7.sha256sum] = "775b028c7b761397ac6ae1bdfbac7e896dc3b9b3adc2f91312499180ca13bdd1" -SRC_URI[patch8.md5sum] = "2484c392db021905f112cf97a94dfd4c" -SRC_URI[patch8.sha256sum] = "a21b4e0bf0530b878bad24d5be23d18a9e03a75a31ae30844dc0933bb3d77ecd" -SRC_URI[patch9.md5sum] = "fc6eb35d07914fae5c57d49c12483ff7" -SRC_URI[patch9.sha256sum] = "138d5e0f0709a47a2d1621295a3dd5e3cc73b63b5cc28dab03abc4e94fe95ecf" -SRC_URI[patch10.md5sum] = "7a2bf3dc7ac7680b1461a5701100e91b" -SRC_URI[patch10.sha256sum] = "83f8c1aadb86b1a2fad8821a9c6be72a8de5afd7fd9fde58a30b3b57d939693e" -SRC_URI[patch11.md5sum] = "ef6cef6822663470f6ac8c517c5a7ec6" -SRC_URI[patch11.sha256sum] = "08ad3384ab0906e6fa4cc417eb8c43ff59375bcead15fd5c8e31730f0413b3d6" -SRC_URI[patch12.md5sum] = "e3e9f441c8111589855bc363e5640f6c" -SRC_URI[patch12.sha256sum] = "20f0243be2299c23213492cc2c19cfd15cc528d2b566a76a2de58306bb9e4c9e" -SRC_URI[patch13.md5sum] = "3e2e5f543ed268a68fd1fa839faade1a" -SRC_URI[patch13.sha256sum] = "0cc649516a5bdfa61c5e56937407570288b6972d75aa1bd060ad30ebe98144d5" -SRC_URI[patch14.md5sum] = "a1be30e1c6f1099bb5fcef00a2631fb8" -SRC_URI[patch14.sha256sum] = "6f1a68320d01522ca1ea5a737124ecc8739f3dcbfea2dee21e3ccf839a21a817" - -inherit autotools - -EXTRA_AUTORECONF += "--exclude=autoheader" - -LEAD_SONAME = "libreadline.so" - -do_configure_prepend () { - if [ ! -e ${S}/acinclude.m4 ]; then - cat ${S}/aclocal.m4 > ${S}/acinclude.m4 - fi -} - -do_install_append () { - # Make install doesn't properly install these - oe_libinstall -so -C shlib libhistory ${D}${libdir} - oe_libinstall -so -C shlib libreadline ${D}${libdir} -} - -BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-core/readline/readline_6.3.bb b/import-layers/yocto-poky/meta/recipes-core/readline/readline_6.3.bb deleted file mode 100644 index 8ec7c4ada..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/readline/readline_6.3.bb +++ /dev/null @@ -1,34 +0,0 @@ -require readline.inc - -SRC_URI += "${GNU_MIRROR}/readline/readline-6.3-patches/readline63-001;name=patch1;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-6.3-patches/readline63-002;name=patch2;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-6.3-patches/readline63-003;name=patch3;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-6.3-patches/readline63-004;name=patch4;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-6.3-patches/readline63-005;name=patch5;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-6.3-patches/readline63-006;name=patch6;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-6.3-patches/readline63-007;name=patch7;apply=yes;striplevel=0 \ - ${GNU_MIRROR}/readline/readline-6.3-patches/readline63-008;name=patch8;apply=yes;striplevel=0 \ - file://configure-fix.patch \ - file://config-dirent-symbols.patch \ - file://norpath.patch" - -SRC_URI[archive.md5sum] = "33c8fb279e981274f485fd91da77e94a" -SRC_URI[archive.sha256sum] = "56ba6071b9462f980c5a72ab0023893b65ba6debb4eeb475d7a563dc65cafd43" - -SRC_URI[patch1.md5sum] = "4343f5ea9b0f42447f102fb61576b398" -SRC_URI[patch1.sha256sum] = "1a79bbb6eaee750e0d6f7f3d059b30a45fc54e8e388a8e05e9c3ae598590146f" -SRC_URI[patch2.md5sum] = "700295212f7e2978577feaee584afddb" -SRC_URI[patch2.sha256sum] = "39e304c7a526888f9e112e733848215736fb7b9d540729b9e31f3347b7a1e0a5" -SRC_URI[patch3.md5sum] = "af4963862f5156fbf9111c2c6fa86ed7" -SRC_URI[patch3.sha256sum] = "ec41bdd8b00fd884e847708513df41d51b1243cecb680189e31b7173d01ca52f" -SRC_URI[patch4.md5sum] = "11f9def89803a5052db3ba72394ce14f" -SRC_URI[patch4.sha256sum] = "4547b906fb2570866c21887807de5dee19838a60a1afb66385b272155e4355cc" -SRC_URI[patch5.md5sum] = "93721c31cd225393f80cb3aadb165544" -SRC_URI[patch5.sha256sum] = "877788f9228d1a9907a4bcfe3d6dd0439c08d728949458b41208d9bf9060274b" -SRC_URI[patch6.md5sum] = "71dc6ecce66d1489b96595f55d142a52" -SRC_URI[patch6.sha256sum] = "5c237ab3c6c97c23cf52b2a118adc265b7fb411b57c93a5f7c221d50fafbe556" -SRC_URI[patch7.md5sum] = "062a08ed60679d3c4878710b3d595b65" -SRC_URI[patch7.sha256sum] = "4d79b5a2adec3c2e8114cbd3d63c1771f7c6cf64035368624903d257014f5bea" -SRC_URI[patch8.md5sum] = "ee1c04072154826870848d8b218d7b04" -SRC_URI[patch8.sha256sum] = "3bc093cf526ceac23eb80256b0ec87fa1735540d659742107b6284d635c43787" - diff --git a/import-layers/yocto-poky/meta/recipes-core/readline/readline_7.0.bb b/import-layers/yocto-poky/meta/recipes-core/readline/readline_7.0.bb new file mode 100644 index 000000000..85dfaa629 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/readline/readline_7.0.bb @@ -0,0 +1,7 @@ +require readline.inc + +SRC_URI += "file://configure-fix.patch \ + file://norpath.patch" + +SRC_URI[archive.md5sum] = "205b03a87fc83dab653b628c59b9fc91" +SRC_URI[archive.sha256sum] = "750d437185286f40a369e1e4f4764eda932b9459b5ec9a731628393dd3d32334" diff --git a/import-layers/yocto-poky/meta/recipes-core/systemd/systemd-compat-units.bb b/import-layers/yocto-poky/meta/recipes-core/systemd/systemd-compat-units.bb index 421fc06df..fe9a521d7 100644 --- a/import-layers/yocto-poky/meta/recipes-core/systemd/systemd-compat-units.bb +++ b/import-layers/yocto-poky/meta/recipes-core/systemd/systemd-compat-units.bb @@ -1,11 +1,10 @@ SUMMARY = "Enhances systemd compatilibity with existing SysVinit scripts" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690" PR = "r29" -DEPENDS = "systemd-systemctl-native" +PACKAGE_WRITE_DEPS += "systemd-systemctl-native" S = "${WORKDIR}" diff --git a/import-layers/yocto-poky/meta/recipes-core/systemd/systemd-systemctl-native.bb b/import-layers/yocto-poky/meta/recipes-core/systemd/systemd-systemctl-native.bb index fbdc9c0a1..fadc8433d 100644 --- a/import-layers/yocto-poky/meta/recipes-core/systemd/systemd-systemctl-native.bb +++ b/import-layers/yocto-poky/meta/recipes-core/systemd/systemd-systemctl-native.bb @@ -1,7 +1,7 @@ SUMMARY = "Wrapper for enabling systemd services" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690" +LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" PR = "r6" diff --git a/import-layers/yocto-poky/meta/recipes-core/systemd/systemd/0001-core-load-fragment-refuse-units-with-errors-in-certa.patch b/import-layers/yocto-poky/meta/recipes-core/systemd/systemd/0001-core-load-fragment-refuse-units-with-errors-in-certa.patch new file mode 100644 index 000000000..80948b2ce --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/systemd/systemd/0001-core-load-fragment-refuse-units-with-errors-in-certa.patch @@ -0,0 +1,329 @@ +If a user is created with a strictly-speaking invalid name such as '0day' and a +unit created to run as that user, systemd rejects the username and runs the unit +as root. + +CVE: CVE-2017-1000082 +Upstream-Status: Backport +Signed-off-by: Ross Burton + +From d8e1310e1ed7b6f122bc7eb8ba061fbd088783c0 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Zbigniew=20J=C4=99drzejewski-Szmek?= +Date: Thu, 6 Jul 2017 13:28:19 -0400 +Subject: [PATCH] core/load-fragment: refuse units with errors in certain + directives + +If an error is encountered in any of the Exec* lines, WorkingDirectory, +SELinuxContext, ApparmorProfile, SmackProcessLabel, Service (in .socket +units), User, or Group, refuse to load the unit. If the config stanza +has support, ignore the failure if '-' is present. + +For those configuration directives, even if we started the unit, it's +pretty likely that it'll do something unexpected (like write files +in a wrong place, or with a wrong context, or run with wrong permissions, +etc). It seems better to refuse to start the unit and have the admin +clean up the configuration without giving the service a chance to mess +up stuff. + +Note that all "security" options that restrict what the unit can do +(Capabilities, AmbientCapabilities, Restrict*, SystemCallFilter, Limit*, +PrivateDevices, Protect*, etc) are _not_ treated like this. Such options are +only supplementary, and are not always available depending on the architecture +and compilation options, so unit authors have to make sure that the service +runs correctly without them anyway. + +Fixes #6237, #6277. + +Signed-off-by: Ross Burton +--- + src/core/load-fragment.c | 104 ++++++++++++++++++++++++++++------------------ + src/test/test-unit-file.c | 14 +++---- + 2 files changed, 70 insertions(+), 48 deletions(-) + +diff --git a/src/core/load-fragment.c b/src/core/load-fragment.c +index cbc826809..2047974f4 100644 +--- a/src/core/load-fragment.c ++++ b/src/core/load-fragment.c +@@ -630,20 +630,28 @@ int config_parse_exec( + + if (isempty(f)) { + /* First word is either "-" or "@" with no command. */ +- log_syntax(unit, LOG_ERR, filename, line, 0, "Empty path in command line, ignoring: \"%s\"", rvalue); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, 0, ++ "Empty path in command line%s: \"%s\"", ++ ignore ? ", ignoring" : "", rvalue); ++ return ignore ? 0 : -ENOEXEC; + } + if (!string_is_safe(f)) { +- log_syntax(unit, LOG_ERR, filename, line, 0, "Executable path contains special characters, ignoring: %s", rvalue); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, 0, ++ "Executable path contains special characters%s: %s", ++ ignore ? ", ignoring" : "", rvalue); ++ return ignore ? 0 : -ENOEXEC; + } + if (!path_is_absolute(f)) { +- log_syntax(unit, LOG_ERR, filename, line, 0, "Executable path is not absolute, ignoring: %s", rvalue); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, 0, ++ "Executable path is not absolute%s: %s", ++ ignore ? ", ignoring" : "", rvalue); ++ return ignore ? 0 : -ENOEXEC; + } + if (endswith(f, "/")) { +- log_syntax(unit, LOG_ERR, filename, line, 0, "Executable path specifies a directory, ignoring: %s", rvalue); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, 0, ++ "Executable path specifies a directory%s: %s", ++ ignore ? ", ignoring" : "", rvalue); ++ return ignore ? 0 : -ENOEXEC; + } + + if (f == firstword) { +@@ -699,7 +707,7 @@ int config_parse_exec( + if (r == 0) + break; + else if (r < 0) +- return 0; ++ return ignore ? 0 : -ENOEXEC; + + if (!GREEDY_REALLOC(n, nbufsize, nlen + 2)) + return log_oom(); +@@ -709,8 +717,10 @@ int config_parse_exec( + } + + if (!n || !n[0]) { +- log_syntax(unit, LOG_ERR, filename, line, 0, "Empty executable name or zeroeth argument, ignoring: %s", rvalue); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, 0, ++ "Empty executable name or zeroeth argument%s: %s", ++ ignore ? ", ignoring" : "", rvalue); ++ return ignore ? 0 : -ENOEXEC; + } + + nce = new0(ExecCommand, 1); +@@ -1315,8 +1325,10 @@ int config_parse_exec_selinux_context( + + r = unit_name_printf(u, rvalue, &k); + if (r < 0) { +- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve specifiers, ignoring: %m"); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, r, ++ "Failed to resolve specifiers%s: %m", ++ ignore ? ", ignoring" : ""); ++ return ignore ? 0 : -ENOEXEC; + } + + free(c->selinux_context); +@@ -1363,8 +1375,10 @@ int config_parse_exec_apparmor_profile( + + r = unit_name_printf(u, rvalue, &k); + if (r < 0) { +- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve specifiers, ignoring: %m"); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, r, ++ "Failed to resolve specifiers%s: %m", ++ ignore ? ", ignoring" : ""); ++ return ignore ? 0 : -ENOEXEC; + } + + free(c->apparmor_profile); +@@ -1411,8 +1425,10 @@ int config_parse_exec_smack_process_label( + + r = unit_name_printf(u, rvalue, &k); + if (r < 0) { +- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve specifiers, ignoring: %m"); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, r, ++ "Failed to resolve specifiers%s: %m", ++ ignore ? ", ignoring" : ""); ++ return ignore ? 0 : -ENOEXEC; + } + + free(c->smack_process_label); +@@ -1630,19 +1646,19 @@ int config_parse_socket_service( + + r = unit_name_printf(UNIT(s), rvalue, &p); + if (r < 0) { +- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve specifiers, ignoring: %s", rvalue); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve specifiers: %s", rvalue); ++ return -ENOEXEC; + } + + if (!endswith(p, ".service")) { +- log_syntax(unit, LOG_ERR, filename, line, 0, "Unit must be of type service, ignoring: %s", rvalue); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, 0, "Unit must be of type service: %s", rvalue); ++ return -ENOEXEC; + } + + r = manager_load_unit(UNIT(s)->manager, p, NULL, &error, &x); + if (r < 0) { +- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to load unit %s, ignoring: %s", rvalue, bus_error_message(&error, r)); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, r, "Failed to load unit %s: %s", rvalue, bus_error_message(&error, r)); ++ return -ENOEXEC; + } + + unit_ref_set(&s->service, x); +@@ -1893,13 +1909,13 @@ int config_parse_user_group( + + r = unit_full_printf(u, rvalue, &k); + if (r < 0) { +- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve unit specifiers in %s, ignoring: %m", rvalue); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve unit specifiers in %s: %m", rvalue); ++ return -ENOEXEC; + } + + if (!valid_user_group_name_or_id(k)) { +- log_syntax(unit, LOG_ERR, filename, line, 0, "Invalid user/group name or numeric ID, ignoring: %s", k); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, 0, "Invalid user/group name or numeric ID: %s", k); ++ return -ENOEXEC; + } + + n = k; +@@ -1957,19 +1973,19 @@ int config_parse_user_group_strv( + if (r == -ENOMEM) + return log_oom(); + if (r < 0) { +- log_syntax(unit, LOG_ERR, filename, line, r, "Invalid syntax, ignoring: %s", rvalue); +- break; ++ log_syntax(unit, LOG_ERR, filename, line, r, "Invalid syntax: %s", rvalue); ++ return -ENOEXEC; + } + + r = unit_full_printf(u, word, &k); + if (r < 0) { +- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve unit specifiers in %s, ignoring: %m", word); +- continue; ++ log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve unit specifiers in %s: %m", word); ++ return -ENOEXEC; + } + + if (!valid_user_group_name_or_id(k)) { +- log_syntax(unit, LOG_ERR, filename, line, 0, "Invalid user/group name or numeric ID, ignoring: %s", k); +- continue; ++ log_syntax(unit, LOG_ERR, filename, line, 0, "Invalid user/group name or numeric ID: %s", k); ++ return -ENOEXEC; + } + + r = strv_push(users, k); +@@ -2128,25 +2144,28 @@ int config_parse_working_directory( + + r = unit_full_printf(u, rvalue, &k); + if (r < 0) { +- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve unit specifiers in working directory path '%s', ignoring: %m", rvalue); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, r, ++ "Failed to resolve unit specifiers in working directory path '%s'%s: %m", ++ rvalue, missing_ok ? ", ignoring" : ""); ++ return missing_ok ? 0 : -ENOEXEC; + } + + path_kill_slashes(k); + + if (!utf8_is_valid(k)) { + log_syntax_invalid_utf8(unit, LOG_ERR, filename, line, rvalue); +- return 0; ++ return missing_ok ? 0 : -ENOEXEC; + } + + if (!path_is_absolute(k)) { +- log_syntax(unit, LOG_ERR, filename, line, 0, "Working directory path '%s' is not absolute, ignoring.", rvalue); +- return 0; ++ log_syntax(unit, LOG_ERR, filename, line, 0, ++ "Working directory path '%s' is not absolute%s.", ++ rvalue, missing_ok ? ", ignoring" : ""); ++ return missing_ok ? 0 : -ENOEXEC; + } + +- free_and_replace(c->working_directory, k); +- + c->working_directory_home = false; ++ free_and_replace(c->working_directory, k); + } + + c->working_directory_missing_ok = missing_ok; +@@ -4228,8 +4247,11 @@ int unit_load_fragment(Unit *u) { + return r; + + r = load_from_path(u, k); +- if (r < 0) ++ if (r < 0) { ++ if (r == -ENOEXEC) ++ log_unit_notice(u, "Unit configuration has fatal error, unit will not be started."); + return r; ++ } + + if (u->load_state == UNIT_STUB) { + SET_FOREACH(t, u->names, i) { +diff --git a/src/test/test-unit-file.c b/src/test/test-unit-file.c +index 12f48bf43..fd797b587 100644 +--- a/src/test/test-unit-file.c ++++ b/src/test/test-unit-file.c +@@ -146,7 +146,7 @@ static void test_config_parse_exec(void) { + r = config_parse_exec(NULL, "fake", 4, "section", 1, + "LValue", 0, "/RValue/ argv0 r1", + &c, u); +- assert_se(r == 0); ++ assert_se(r == -ENOEXEC); + assert_se(c1->command_next == NULL); + + log_info("/* honour_argv0 */"); +@@ -161,7 +161,7 @@ static void test_config_parse_exec(void) { + r = config_parse_exec(NULL, "fake", 3, "section", 1, + "LValue", 0, "@/RValue", + &c, u); +- assert_se(r == 0); ++ assert_se(r == -ENOEXEC); + assert_se(c1->command_next == NULL); + + log_info("/* no command, whitespace only, reset */"); +@@ -220,7 +220,7 @@ static void test_config_parse_exec(void) { + "-@/RValue argv0 r1 ; ; " + "/goo/goo boo", + &c, u); +- assert_se(r >= 0); ++ assert_se(r == -ENOEXEC); + c1 = c1->command_next; + check_execcommand(c1, "/RValue", "argv0", "r1", NULL, true); + +@@ -374,7 +374,7 @@ static void test_config_parse_exec(void) { + r = config_parse_exec(NULL, "fake", 4, "section", 1, + "LValue", 0, path, + &c, u); +- assert_se(r == 0); ++ assert_se(r == -ENOEXEC); + assert_se(c1->command_next == NULL); + } + +@@ -401,21 +401,21 @@ static void test_config_parse_exec(void) { + r = config_parse_exec(NULL, "fake", 4, "section", 1, + "LValue", 0, "/path\\", + &c, u); +- assert_se(r == 0); ++ assert_se(r == -ENOEXEC); + assert_se(c1->command_next == NULL); + + log_info("/* missing ending ' */"); + r = config_parse_exec(NULL, "fake", 4, "section", 1, + "LValue", 0, "/path 'foo", + &c, u); +- assert_se(r == 0); ++ assert_se(r == -ENOEXEC); + assert_se(c1->command_next == NULL); + + log_info("/* missing ending ' with trailing backslash */"); + r = config_parse_exec(NULL, "fake", 4, "section", 1, + "LValue", 0, "/path 'foo\\", + &c, u); +- assert_se(r == 0); ++ assert_se(r == -ENOEXEC); + assert_se(c1->command_next == NULL); + + log_info("/* invalid space between modifiers */"); +-- +2.11.0 diff --git a/import-layers/yocto-poky/meta/recipes-core/systemd/systemd/validate-user.patch b/import-layers/yocto-poky/meta/recipes-core/systemd/systemd/validate-user.patch deleted file mode 100644 index 8e0e0c1b9..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/systemd/systemd/validate-user.patch +++ /dev/null @@ -1,856 +0,0 @@ -If a user is created with a strictly-speaking invalid name such as '0day' and a -unit created to run as that user, systemd rejects the username and runs the unit -as root. - -CVE: CVE-2017-1000082 -Upstream-Status: Backport -Signed-off-by: Ross Burton - -From e0c4eb1435d50cb3797cf94100d4886dc2022bce Mon Sep 17 00:00:00 2001 -From: Lennart Poettering -Date: Thu, 14 Jul 2016 12:23:39 +0200 -Subject: [PATCH 1/3] sysusers: move various user credential validity checks to - src/basic/ - -This way we can reuse them for validating User=/Group= settings in unit files -(to be added in a later commit). - -Also, add some tests for them. ---- - src/basic/user-util.c | 93 +++++++++++++++++++++++++++++++++++++++++++++++ - src/basic/user-util.h | 5 +++ - src/sysusers/sysusers.c | 75 -------------------------------------- - src/test/test-user-util.c | 87 ++++++++++++++++++++++++++++++++++++++++++++ - 4 files changed, 185 insertions(+), 75 deletions(-) - -diff --git a/src/basic/user-util.c b/src/basic/user-util.c -index f65ca3eda..c85b5c6a8 100644 ---- a/src/basic/user-util.c -+++ b/src/basic/user-util.c -@@ -29,6 +29,7 @@ - #include - #include - #include -+#include - - #include "missing.h" - #include "alloc-util.h" -@@ -39,6 +40,7 @@ - #include "path-util.h" - #include "string-util.h" - #include "user-util.h" -+#include "utf8.h" - - bool uid_is_valid(uid_t uid) { - -@@ -479,3 +481,94 @@ int take_etc_passwd_lock(const char *root) { - - return fd; - } -+ -+bool valid_user_group_name(const char *u) { -+ const char *i; -+ long sz; -+ -+ /* Checks if the specified name is a valid user/group name. */ -+ -+ if (isempty(u)) -+ return false; -+ -+ if (!(u[0] >= 'a' && u[0] <= 'z') && -+ !(u[0] >= 'A' && u[0] <= 'Z') && -+ u[0] != '_') -+ return false; -+ -+ for (i = u+1; *i; i++) { -+ if (!(*i >= 'a' && *i <= 'z') && -+ !(*i >= 'A' && *i <= 'Z') && -+ !(*i >= '0' && *i <= '9') && -+ *i != '_' && -+ *i != '-') -+ return false; -+ } -+ -+ sz = sysconf(_SC_LOGIN_NAME_MAX); -+ assert_se(sz > 0); -+ -+ if ((size_t) (i-u) > (size_t) sz) -+ return false; -+ -+ if ((size_t) (i-u) > UT_NAMESIZE - 1) -+ return false; -+ -+ return true; -+} -+ -+bool valid_user_group_name_or_id(const char *u) { -+ -+ /* Similar as above, but is also fine with numeric UID/GID specifications, as long as they are in the right -+ * range, and not the invalid user ids. */ -+ -+ if (isempty(u)) -+ return false; -+ -+ if (valid_user_group_name(u)) -+ return true; -+ -+ return parse_uid(u, NULL) >= 0; -+} -+ -+bool valid_gecos(const char *d) { -+ -+ if (!d) -+ return false; -+ -+ if (!utf8_is_valid(d)) -+ return false; -+ -+ if (string_has_cc(d, NULL)) -+ return false; -+ -+ /* Colons are used as field separators, and hence not OK */ -+ if (strchr(d, ':')) -+ return false; -+ -+ return true; -+} -+ -+bool valid_home(const char *p) { -+ -+ if (isempty(p)) -+ return false; -+ -+ if (!utf8_is_valid(p)) -+ return false; -+ -+ if (string_has_cc(p, NULL)) -+ return false; -+ -+ if (!path_is_absolute(p)) -+ return false; -+ -+ if (!path_is_safe(p)) -+ return false; -+ -+ /* Colons are used as field separators, and hence not OK */ -+ if (strchr(p, ':')) -+ return false; -+ -+ return true; -+} -diff --git a/src/basic/user-util.h b/src/basic/user-util.h -index 8026eca3f..36f71fb00 100644 ---- a/src/basic/user-util.h -+++ b/src/basic/user-util.h -@@ -68,3 +68,8 @@ int take_etc_passwd_lock(const char *root); - static inline bool userns_supported(void) { - return access("/proc/self/uid_map", F_OK) >= 0; - } -+ -+bool valid_user_group_name(const char *u); -+bool valid_user_group_name_or_id(const char *u); -+bool valid_gecos(const char *d); -+bool valid_home(const char *p); -diff --git a/src/sysusers/sysusers.c b/src/sysusers/sysusers.c -index 4377f1b91..df3b7de30 100644 ---- a/src/sysusers/sysusers.c -+++ b/src/sysusers/sysusers.c -@@ -1299,81 +1299,6 @@ static bool item_equal(Item *a, Item *b) { - return true; - } - --static bool valid_user_group_name(const char *u) { -- const char *i; -- long sz; -- -- if (isempty(u)) -- return false; -- -- if (!(u[0] >= 'a' && u[0] <= 'z') && -- !(u[0] >= 'A' && u[0] <= 'Z') && -- u[0] != '_') -- return false; -- -- for (i = u+1; *i; i++) { -- if (!(*i >= 'a' && *i <= 'z') && -- !(*i >= 'A' && *i <= 'Z') && -- !(*i >= '0' && *i <= '9') && -- *i != '_' && -- *i != '-') -- return false; -- } -- -- sz = sysconf(_SC_LOGIN_NAME_MAX); -- assert_se(sz > 0); -- -- if ((size_t) (i-u) > (size_t) sz) -- return false; -- -- if ((size_t) (i-u) > UT_NAMESIZE - 1) -- return false; -- -- return true; --} -- --static bool valid_gecos(const char *d) { -- -- if (!d) -- return false; -- -- if (!utf8_is_valid(d)) -- return false; -- -- if (string_has_cc(d, NULL)) -- return false; -- -- /* Colons are used as field separators, and hence not OK */ -- if (strchr(d, ':')) -- return false; -- -- return true; --} -- --static bool valid_home(const char *p) { -- -- if (isempty(p)) -- return false; -- -- if (!utf8_is_valid(p)) -- return false; -- -- if (string_has_cc(p, NULL)) -- return false; -- -- if (!path_is_absolute(p)) -- return false; -- -- if (!path_is_safe(p)) -- return false; -- -- /* Colons are used as field separators, and hence not OK */ -- if (strchr(p, ':')) -- return false; -- -- return true; --} -- - static int parse_line(const char *fname, unsigned line, const char *buffer) { - - static const Specifier specifier_table[] = { -diff --git a/src/test/test-user-util.c b/src/test/test-user-util.c -index 8d1ec19f1..2a344a9f9 100644 ---- a/src/test/test-user-util.c -+++ b/src/test/test-user-util.c -@@ -61,6 +61,88 @@ static void test_uid_ptr(void) { - assert_se(PTR_TO_UID(UID_TO_PTR(1000)) == 1000); - } - -+static void test_valid_user_group_name(void) { -+ assert_se(!valid_user_group_name(NULL)); -+ assert_se(!valid_user_group_name("")); -+ assert_se(!valid_user_group_name("1")); -+ assert_se(!valid_user_group_name("65535")); -+ assert_se(!valid_user_group_name("-1")); -+ assert_se(!valid_user_group_name("-kkk")); -+ assert_se(!valid_user_group_name("rööt")); -+ assert_se(!valid_user_group_name(".")); -+ assert_se(!valid_user_group_name("eff.eff")); -+ assert_se(!valid_user_group_name("foo\nbar")); -+ assert_se(!valid_user_group_name("0123456789012345678901234567890123456789")); -+ assert_se(!valid_user_group_name_or_id("aaa:bbb")); -+ -+ assert_se(valid_user_group_name("root")); -+ assert_se(valid_user_group_name("lennart")); -+ assert_se(valid_user_group_name("LENNART")); -+ assert_se(valid_user_group_name("_kkk")); -+ assert_se(valid_user_group_name("kkk-")); -+ assert_se(valid_user_group_name("kk-k")); -+ -+ assert_se(valid_user_group_name("some5")); -+ assert_se(!valid_user_group_name("5some")); -+ assert_se(valid_user_group_name("INNER5NUMBER")); -+} -+ -+static void test_valid_user_group_name_or_id(void) { -+ assert_se(!valid_user_group_name_or_id(NULL)); -+ assert_se(!valid_user_group_name_or_id("")); -+ assert_se(valid_user_group_name_or_id("0")); -+ assert_se(valid_user_group_name_or_id("1")); -+ assert_se(valid_user_group_name_or_id("65534")); -+ assert_se(!valid_user_group_name_or_id("65535")); -+ assert_se(valid_user_group_name_or_id("65536")); -+ assert_se(!valid_user_group_name_or_id("-1")); -+ assert_se(!valid_user_group_name_or_id("-kkk")); -+ assert_se(!valid_user_group_name_or_id("rööt")); -+ assert_se(!valid_user_group_name_or_id(".")); -+ assert_se(!valid_user_group_name_or_id("eff.eff")); -+ assert_se(!valid_user_group_name_or_id("foo\nbar")); -+ assert_se(!valid_user_group_name_or_id("0123456789012345678901234567890123456789")); -+ assert_se(!valid_user_group_name_or_id("aaa:bbb")); -+ -+ assert_se(valid_user_group_name_or_id("root")); -+ assert_se(valid_user_group_name_or_id("lennart")); -+ assert_se(valid_user_group_name_or_id("LENNART")); -+ assert_se(valid_user_group_name_or_id("_kkk")); -+ assert_se(valid_user_group_name_or_id("kkk-")); -+ assert_se(valid_user_group_name_or_id("kk-k")); -+ -+ assert_se(valid_user_group_name_or_id("some5")); -+ assert_se(!valid_user_group_name_or_id("5some")); -+ assert_se(valid_user_group_name_or_id("INNER5NUMBER")); -+} -+ -+static void test_valid_gecos(void) { -+ -+ assert_se(!valid_gecos(NULL)); -+ assert_se(valid_gecos("")); -+ assert_se(valid_gecos("test")); -+ assert_se(valid_gecos("Ümläüt")); -+ assert_se(!valid_gecos("In\nvalid")); -+ assert_se(!valid_gecos("In:valid")); -+} -+ -+static void test_valid_home(void) { -+ -+ assert_se(!valid_home(NULL)); -+ assert_se(!valid_home("")); -+ assert_se(!valid_home(".")); -+ assert_se(!valid_home("/home/..")); -+ assert_se(!valid_home("/home/../")); -+ assert_se(!valid_home("/home\n/foo")); -+ assert_se(!valid_home("./piep")); -+ assert_se(!valid_home("piep")); -+ assert_se(!valid_home("/home/user:lennart")); -+ -+ assert_se(valid_home("/")); -+ assert_se(valid_home("/home")); -+ assert_se(valid_home("/home/foo")); -+} -+ - int main(int argc, char*argv[]) { - - test_uid_to_name_one(0, "root"); -@@ -75,5 +157,10 @@ int main(int argc, char*argv[]) { - test_parse_uid(); - test_uid_ptr(); - -+ test_valid_user_group_name(); -+ test_valid_user_group_name_or_id(); -+ test_valid_gecos(); -+ test_valid_home(); -+ - return 0; - } --- -2.11.0 - - -From 1affacaaf6eff93e53563a644567cc5c3930cb28 Mon Sep 17 00:00:00 2001 -From: Lennart Poettering -Date: Thu, 14 Jul 2016 12:28:06 +0200 -Subject: [PATCH 2/3] core: be stricter when parsing User=/Group= fields - -Let's verify the validity of the syntax of the user/group names set. ---- - src/core/load-fragment-gperf.gperf.m4 | 10 +-- - src/core/load-fragment.c | 118 ++++++++++++++++++++++++++++++++++ - src/core/load-fragment.h | 2 + - 3 files changed, 125 insertions(+), 5 deletions(-) - -diff --git a/src/core/load-fragment-gperf.gperf.m4 b/src/core/load-fragment-gperf.gperf.m4 -index 819341898..110089696 100644 ---- a/src/core/load-fragment-gperf.gperf.m4 -+++ b/src/core/load-fragment-gperf.gperf.m4 -@@ -19,9 +19,9 @@ m4_dnl Define the context options only once - m4_define(`EXEC_CONTEXT_CONFIG_ITEMS', - `$1.WorkingDirectory, config_parse_working_directory, 0, offsetof($1, exec_context) - $1.RootDirectory, config_parse_unit_path_printf, 0, offsetof($1, exec_context.root_directory) --$1.User, config_parse_unit_string_printf, 0, offsetof($1, exec_context.user) --$1.Group, config_parse_unit_string_printf, 0, offsetof($1, exec_context.group) --$1.SupplementaryGroups, config_parse_strv, 0, offsetof($1, exec_context.supplementary_groups) -+$1.User, config_parse_user_group, 0, offsetof($1, exec_context.user) -+$1.Group, config_parse_user_group, 0, offsetof($1, exec_context.group) -+$1.SupplementaryGroups, config_parse_user_group_strv, 0, offsetof($1, exec_context.supplementary_groups) - $1.Nice, config_parse_exec_nice, 0, offsetof($1, exec_context) - $1.OOMScoreAdjust, config_parse_exec_oom_score_adjust, 0, offsetof($1, exec_context) - $1.IOSchedulingClass, config_parse_exec_io_class, 0, offsetof($1, exec_context) -@@ -275,8 +275,8 @@ Socket.ExecStartPost, config_parse_exec, SOCKET_EXEC - Socket.ExecStopPre, config_parse_exec, SOCKET_EXEC_STOP_PRE, offsetof(Socket, exec_command) - Socket.ExecStopPost, config_parse_exec, SOCKET_EXEC_STOP_POST, offsetof(Socket, exec_command) - Socket.TimeoutSec, config_parse_sec, 0, offsetof(Socket, timeout_usec) --Socket.SocketUser, config_parse_unit_string_printf, 0, offsetof(Socket, user) --Socket.SocketGroup, config_parse_unit_string_printf, 0, offsetof(Socket, group) -+Socket.SocketUser, config_parse_user_group, 0, offsetof(Socket, user) -+Socket.SocketGroup, config_parse_user_group, 0, offsetof(Socket, group) - Socket.SocketMode, config_parse_mode, 0, offsetof(Socket, socket_mode) - Socket.DirectoryMode, config_parse_mode, 0, offsetof(Socket, directory_mode) - Socket.Accept, config_parse_bool, 0, offsetof(Socket, accept) -diff --git a/src/core/load-fragment.c b/src/core/load-fragment.c -index 86b4fb071..f43781803 100644 ---- a/src/core/load-fragment.c -+++ b/src/core/load-fragment.c -@@ -64,6 +64,7 @@ - #include "unit-name.h" - #include "unit-printf.h" - #include "unit.h" -+#include "user-util.h" - #include "utf8.h" - #include "web-util.h" - -@@ -1758,6 +1759,123 @@ int config_parse_sec_fix_0( - return 0; - } - -+int config_parse_user_group( -+ const char *unit, -+ const char *filename, -+ unsigned line, -+ const char *section, -+ unsigned section_line, -+ const char *lvalue, -+ int ltype, -+ const char *rvalue, -+ void *data, -+ void *userdata) { -+ -+ char **user = data, *n; -+ Unit *u = userdata; -+ int r; -+ -+ assert(filename); -+ assert(lvalue); -+ assert(rvalue); -+ assert(u); -+ -+ if (isempty(rvalue)) -+ n = NULL; -+ else { -+ _cleanup_free_ char *k = NULL; -+ -+ r = unit_full_printf(u, rvalue, &k); -+ if (r < 0) { -+ log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve unit specifiers in %s, ignoring: %m", rvalue); -+ return 0; -+ } -+ -+ if (!valid_user_group_name_or_id(k)) { -+ log_syntax(unit, LOG_ERR, filename, line, 0, "Invalid user/group name or numeric ID, ignoring: %s", k); -+ return 0; -+ } -+ -+ n = k; -+ k = NULL; -+ } -+ -+ free(*user); -+ *user = n; -+ -+ return 0; -+} -+ -+int config_parse_user_group_strv( -+ const char *unit, -+ const char *filename, -+ unsigned line, -+ const char *section, -+ unsigned section_line, -+ const char *lvalue, -+ int ltype, -+ const char *rvalue, -+ void *data, -+ void *userdata) { -+ -+ char ***users = data; -+ Unit *u = userdata; -+ const char *p; -+ int r; -+ -+ assert(filename); -+ assert(lvalue); -+ assert(rvalue); -+ assert(u); -+ -+ if (isempty(rvalue)) { -+ char **empty; -+ -+ empty = new0(char*, 1); -+ if (!empty) -+ return log_oom(); -+ -+ strv_free(*users); -+ *users = empty; -+ -+ return 0; -+ } -+ -+ p = rvalue; -+ for (;;) { -+ _cleanup_free_ char *word = NULL, *k = NULL; -+ -+ r = extract_first_word(&p, &word, WHITESPACE, 0); -+ if (r == 0) -+ break; -+ if (r == -ENOMEM) -+ return log_oom(); -+ if (r < 0) { -+ log_syntax(unit, LOG_ERR, filename, line, r, "Invalid syntax, ignoring: %s", rvalue); -+ break; -+ } -+ -+ r = unit_full_printf(u, word, &k); -+ if (r < 0) { -+ log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve unit specifiers in %s, ignoring: %m", word); -+ continue; -+ } -+ -+ if (!valid_user_group_name_or_id(k)) { -+ log_syntax(unit, LOG_ERR, filename, line, 0, "Invalid user/group name or numeric ID, ignoring: %s", k); -+ continue; -+ } -+ -+ r = strv_push(users, k); -+ if (r < 0) -+ return log_oom(); -+ -+ k = NULL; -+ } -+ -+ return 0; -+} -+ - int config_parse_busname_service( - const char *unit, - const char *filename, -diff --git a/src/core/load-fragment.h b/src/core/load-fragment.h -index b36a2e3a0..213bce55a 100644 ---- a/src/core/load-fragment.h -+++ b/src/core/load-fragment.h -@@ -111,6 +111,8 @@ int config_parse_exec_utmp_mode(const char *unit, const char *filename, unsigned - int config_parse_working_directory(const char *unit, const char *filename, unsigned line, const char *section, unsigned section_line, const char *lvalue, int ltype, const char *rvalue, void *data, void *userdata); - int config_parse_fdname(const char *unit, const char *filename, unsigned line, const char *section, unsigned section_line, const char *lvalue, int ltype, const char *rvalue, void *data, void *userdata); - int config_parse_sec_fix_0(const char *unit, const char *filename, unsigned line, const char *section, unsigned section_line, const char *lvalue, int ltype, const char *rvalue, void *data, void *userdata); -+int config_parse_user_group(const char *unit, const char *filename, unsigned line, const char *section, unsigned section_line, const char *lvalue, int ltype, const char *rvalue, void *data, void *userdata); -+int config_parse_user_group_strv(const char *unit, const char *filename, unsigned line, const char *section, unsigned section_line, const char *lvalue, int ltype, const char *rvalue, void *data, void *userdata); - - /* gperf prototypes */ - const struct ConfigPerfItem* load_fragment_gperf_lookup(const char *key, unsigned length); --- -2.11.0 - - -From 97e0456384ed5c930394062d340237ea6130ece0 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Zbigniew=20J=C4=99drzejewski-Szmek?= -Date: Thu, 6 Jul 2017 13:28:19 -0400 -Subject: [PATCH 3/3] core/load-fragment: refuse units with errors in certain - directives - -If an error is encountered in any of the Exec* lines, WorkingDirectory, -SELinuxContext, ApparmorProfile, SmackProcessLabel, Service (in .socket -units), User, or Group, refuse to load the unit. If the config stanza -has support, ignore the failure if '-' is present. - -For those configuration directives, even if we started the unit, it's -pretty likely that it'll do something unexpected (like write files -in a wrong place, or with a wrong context, or run with wrong permissions, -etc). It seems better to refuse to start the unit and have the admin -clean up the configuration without giving the service a chance to mess -up stuff. - -Note that all "security" options that restrict what the unit can do -(Capabilities, AmbientCapabilities, Restrict*, SystemCallFilter, Limit*, -PrivateDevices, Protect*, etc) are _not_ treated like this. Such options are -only supplementary, and are not always available depending on the architecture -and compilation options, so unit authors have to make sure that the service -runs correctly without them anyway. - -Fixes #6237, #6277. - -Signed-off-by: Ross Burton ---- - src/core/load-fragment.c | 101 ++++++++++++++++++++++++++++------------------ - src/test/test-unit-file.c | 14 +++---- - 2 files changed, 69 insertions(+), 46 deletions(-) - -diff --git a/src/core/load-fragment.c b/src/core/load-fragment.c -index f43781803..b1fb1d407 100644 ---- a/src/core/load-fragment.c -+++ b/src/core/load-fragment.c -@@ -626,20 +626,28 @@ int config_parse_exec( - - if (isempty(f)) { - /* First word is either "-" or "@" with no command. */ -- log_syntax(unit, LOG_ERR, filename, line, 0, "Empty path in command line, ignoring: \"%s\"", rvalue); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, 0, -+ "Empty path in command line%s: \"%s\"", -+ ignore ? ", ignoring" : "", rvalue); -+ return ignore ? 0 : -ENOEXEC; - } - if (!string_is_safe(f)) { -- log_syntax(unit, LOG_ERR, filename, line, 0, "Executable path contains special characters, ignoring: %s", rvalue); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, 0, -+ "Executable path contains special characters%s: %s", -+ ignore ? ", ignoring" : "", rvalue); -+ return ignore ? 0 : -ENOEXEC; - } - if (!path_is_absolute(f)) { -- log_syntax(unit, LOG_ERR, filename, line, 0, "Executable path is not absolute, ignoring: %s", rvalue); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, 0, -+ "Executable path is not absolute%s: %s", -+ ignore ? ", ignoring" : "", rvalue); -+ return ignore ? 0 : -ENOEXEC; - } - if (endswith(f, "/")) { -- log_syntax(unit, LOG_ERR, filename, line, 0, "Executable path specifies a directory, ignoring: %s", rvalue); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, 0, -+ "Executable path specifies a directory%s: %s", -+ ignore ? ", ignoring" : "", rvalue); -+ return ignore ? 0 : -ENOEXEC; - } - - if (f == firstword) { -@@ -695,7 +703,7 @@ int config_parse_exec( - if (r == 0) - break; - else if (r < 0) -- return 0; -+ return ignore ? 0 : -ENOEXEC; - - if (!GREEDY_REALLOC(n, nbufsize, nlen + 2)) - return log_oom(); -@@ -705,8 +713,10 @@ int config_parse_exec( - } - - if (!n || !n[0]) { -- log_syntax(unit, LOG_ERR, filename, line, 0, "Empty executable name or zeroeth argument, ignoring: %s", rvalue); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, 0, -+ "Empty executable name or zeroeth argument%s: %s", -+ ignore ? ", ignoring" : "", rvalue); -+ return ignore ? 0 : -ENOEXEC; - } - - nce = new0(ExecCommand, 1); -@@ -1214,8 +1224,10 @@ int config_parse_exec_selinux_context( - - r = unit_name_printf(u, rvalue, &k); - if (r < 0) { -- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve specifiers, ignoring: %m"); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, r, -+ "Failed to resolve specifiers%s: %m", -+ ignore ? ", ignoring" : ""); -+ return ignore ? 0 : -ENOEXEC; - } - - free(c->selinux_context); -@@ -1262,8 +1274,10 @@ int config_parse_exec_apparmor_profile( - - r = unit_name_printf(u, rvalue, &k); - if (r < 0) { -- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve specifiers, ignoring: %m"); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, r, -+ "Failed to resolve specifiers%s: %m", -+ ignore ? ", ignoring" : ""); -+ return ignore ? 0 : -ENOEXEC; - } - - free(c->apparmor_profile); -@@ -1310,8 +1324,10 @@ int config_parse_exec_smack_process_label( - - r = unit_name_printf(u, rvalue, &k); - if (r < 0) { -- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve specifiers, ignoring: %m"); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, r, -+ "Failed to resolve specifiers%s: %m", -+ ignore ? ", ignoring" : ""); -+ return ignore ? 0 : -ENOEXEC; - } - - free(c->smack_process_label); -@@ -1520,19 +1536,19 @@ int config_parse_socket_service( - - r = unit_name_printf(UNIT(s), rvalue, &p); - if (r < 0) { -- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve specifiers, ignoring: %s", rvalue); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve specifiers: %s", rvalue); -+ return -ENOEXEC; - } - - if (!endswith(p, ".service")) { -- log_syntax(unit, LOG_ERR, filename, line, 0, "Unit must be of type service, ignoring: %s", rvalue); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, 0, "Unit must be of type service: %s", rvalue); -+ return -ENOEXEC; - } - - r = manager_load_unit(UNIT(s)->manager, p, NULL, &error, &x); - if (r < 0) { -- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to load unit %s, ignoring: %s", rvalue, bus_error_message(&error, r)); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, r, "Failed to load unit %s: %s", rvalue, bus_error_message(&error, r)); -+ return -ENOEXEC; - } - - unit_ref_set(&s->service, x); -@@ -1787,13 +1803,13 @@ int config_parse_user_group( - - r = unit_full_printf(u, rvalue, &k); - if (r < 0) { -- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve unit specifiers in %s, ignoring: %m", rvalue); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve unit specifiers in %s: %m", rvalue); -+ return -ENOEXEC; - } - - if (!valid_user_group_name_or_id(k)) { -- log_syntax(unit, LOG_ERR, filename, line, 0, "Invalid user/group name or numeric ID, ignoring: %s", k); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, 0, "Invalid user/group name or numeric ID: %s", k); -+ return -ENOEXEC; - } - - n = k; -@@ -1851,19 +1867,19 @@ int config_parse_user_group_strv( - if (r == -ENOMEM) - return log_oom(); - if (r < 0) { -- log_syntax(unit, LOG_ERR, filename, line, r, "Invalid syntax, ignoring: %s", rvalue); -- break; -+ log_syntax(unit, LOG_ERR, filename, line, r, "Invalid syntax: %s", rvalue); -+ return -ENOEXEC; - } - - r = unit_full_printf(u, word, &k); - if (r < 0) { -- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve unit specifiers in %s, ignoring: %m", word); -- continue; -+ log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve unit specifiers in %s: %m", word); -+ return -ENOEXEC; - } - - if (!valid_user_group_name_or_id(k)) { -- log_syntax(unit, LOG_ERR, filename, line, 0, "Invalid user/group name or numeric ID, ignoring: %s", k); -- continue; -+ log_syntax(unit, LOG_ERR, filename, line, 0, "Invalid user/group name or numeric ID: %s", k); -+ return -ENOEXEC; - } - - r = strv_push(users, k); -@@ -2022,20 +2038,24 @@ int config_parse_working_directory( - - r = unit_full_printf(u, rvalue, &k); - if (r < 0) { -- log_syntax(unit, LOG_ERR, filename, line, r, "Failed to resolve unit specifiers in working directory path '%s', ignoring: %m", rvalue); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, r, -+ "Failed to resolve unit specifiers in working directory path '%s'%s: %m", -+ rvalue, missing_ok ? ", ignoring" : ""); -+ return missing_ok ? 0 : -ENOEXEC; - } - - path_kill_slashes(k); - - if (!utf8_is_valid(k)) { - log_syntax_invalid_utf8(unit, LOG_ERR, filename, line, rvalue); -- return 0; -+ return missing_ok ? 0 : -ENOEXEC; - } - - if (!path_is_absolute(k)) { -- log_syntax(unit, LOG_ERR, filename, line, 0, "Working directory path '%s' is not absolute, ignoring.", rvalue); -- return 0; -+ log_syntax(unit, LOG_ERR, filename, line, 0, -+ "Working directory path '%s' is not absolute%s.", -+ rvalue, missing_ok ? ", ignoring" : ""); -+ return missing_ok ? 0 : -ENOEXEC; - } - - free(c->working_directory); -@@ -4043,8 +4063,11 @@ int unit_load_fragment(Unit *u) { - return r; - - r = load_from_path(u, k); -- if (r < 0) -+ if (r < 0) { -+ if (r == -ENOEXEC) -+ log_unit_notice(u, "Unit configuration has fatal error, unit will not be started."); - return r; -+ } - - if (u->load_state == UNIT_STUB) { - SET_FOREACH(t, u->names, i) { -diff --git a/src/test/test-unit-file.c b/src/test/test-unit-file.c -index ade0ff2a6..fe1969570 100644 ---- a/src/test/test-unit-file.c -+++ b/src/test/test-unit-file.c -@@ -146,7 +146,7 @@ static void test_config_parse_exec(void) { - r = config_parse_exec(NULL, "fake", 4, "section", 1, - "LValue", 0, "/RValue/ argv0 r1", - &c, u); -- assert_se(r == 0); -+ assert_se(r == -ENOEXEC); - assert_se(c1->command_next == NULL); - - log_info("/* honour_argv0 */"); -@@ -161,7 +161,7 @@ static void test_config_parse_exec(void) { - r = config_parse_exec(NULL, "fake", 3, "section", 1, - "LValue", 0, "@/RValue", - &c, u); -- assert_se(r == 0); -+ assert_se(r == -ENOEXEC); - assert_se(c1->command_next == NULL); - - log_info("/* no command, whitespace only, reset */"); -@@ -220,7 +220,7 @@ static void test_config_parse_exec(void) { - "-@/RValue argv0 r1 ; ; " - "/goo/goo boo", - &c, u); -- assert_se(r >= 0); -+ assert_se(r == -ENOEXEC); - c1 = c1->command_next; - check_execcommand(c1, "/RValue", "argv0", "r1", NULL, true); - -@@ -374,7 +374,7 @@ static void test_config_parse_exec(void) { - r = config_parse_exec(NULL, "fake", 4, "section", 1, - "LValue", 0, path, - &c, u); -- assert_se(r == 0); -+ assert_se(r == -ENOEXEC); - assert_se(c1->command_next == NULL); - } - -@@ -401,21 +401,21 @@ static void test_config_parse_exec(void) { - r = config_parse_exec(NULL, "fake", 4, "section", 1, - "LValue", 0, "/path\\", - &c, u); -- assert_se(r == 0); -+ assert_se(r == -ENOEXEC); - assert_se(c1->command_next == NULL); - - log_info("/* missing ending ' */"); - r = config_parse_exec(NULL, "fake", 4, "section", 1, - "LValue", 0, "/path 'foo", - &c, u); -- assert_se(r == 0); -+ assert_se(r == -ENOEXEC); - assert_se(c1->command_next == NULL); - - log_info("/* missing ending ' with trailing backslash */"); - r = config_parse_exec(NULL, "fake", 4, "section", 1, - "LValue", 0, "/path 'foo\\", - &c, u); -- assert_se(r == 0); -+ assert_se(r == -ENOEXEC); - assert_se(c1->command_next == NULL); - - log_info("/* invalid space between modifiers */"); --- -2.11.0 - diff --git a/import-layers/yocto-poky/meta/recipes-core/systemd/systemd_232.bb b/import-layers/yocto-poky/meta/recipes-core/systemd/systemd_232.bb index df1d9e137..25fe49641 100644 --- a/import-layers/yocto-poky/meta/recipes-core/systemd/systemd_232.bb +++ b/import-layers/yocto-poky/meta/recipes-core/systemd/systemd_232.bb @@ -4,11 +4,11 @@ PROVIDES = "udev" PE = "1" -DEPENDS = "kmod docbook-sgml-dtd-4.1-native intltool-native gperf-native acl readline libcap libcgroup qemu-native util-linux" +DEPENDS = "kmod intltool-native gperf-native acl readline libcap libcgroup util-linux" SECTION = "base/shell" -inherit useradd pkgconfig autotools perlnative update-rc.d update-alternatives qemu systemd ptest gettext bash-completion +inherit useradd pkgconfig autotools perlnative update-rc.d update-alternatives qemu systemd ptest gettext bash-completion manpages SRC_URI += " \ file://touchscreen.rules \ @@ -33,6 +33,7 @@ SRC_URI += " \ file://0018-check-for-uchar.h-in-configure.patch \ file://0019-socket-util-don-t-fail-if-libc-doesn-t-support-IDN.patch \ file://0020-back-port-233-don-t-use-the-unified-hierarchy-for-the-systemd.patch \ + file://0001-core-load-fragment-refuse-units-with-errors-in-certa.patch \ " SRC_URI_append_libc-uclibc = "\ file://0002-units-Prefer-getty-to-agetty-in-console-setup-system.patch \ @@ -40,16 +41,14 @@ SRC_URI_append_libc-uclibc = "\ SRC_URI_append_qemuall = " file://0001-core-device.c-Change-the-default-device-timeout-to-2.patch" PACKAGECONFIG ??= "xz \ - ldconfig \ - ${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam', '', d)} \ + ${@bb.utils.filter('DISTRO_FEATURES', 'efi pam selinux ldconfig', d)} \ ${@bb.utils.contains('DISTRO_FEATURES', 'x11', 'xkbcommon', '', d)} \ - ${@bb.utils.contains('DISTRO_FEATURES', 'selinux', 'selinux', '', d)} \ ${@bb.utils.contains('DISTRO_FEATURES', 'wifi', 'rfkill', '', d)} \ - ${@bb.utils.contains('MACHINE_FEATURES', 'efi', 'efi', '', d)} \ binfmt \ randomseed \ machined \ backlight \ + vconsole \ quotacheck \ hostnamed \ ${@bb.utils.contains('TCLIBC', 'glibc', 'myhostname sysusers', '', d)} \ @@ -82,6 +81,7 @@ PACKAGECONFIG[resolved] = "--enable-resolved,--disable-resolved" PACKAGECONFIG[networkd] = "--enable-networkd,--disable-networkd" PACKAGECONFIG[machined] = "--enable-machined,--disable-machined" PACKAGECONFIG[backlight] = "--enable-backlight,--disable-backlight" +PACKAGECONFIG[vconsole] = "--enable-vconsole,--disable-vconsole,,${PN}-vconsole-setup" PACKAGECONFIG[quotacheck] = "--enable-quotacheck,--disable-quotacheck" PACKAGECONFIG[hostnamed] = "--enable-hostnamed,--disable-hostnamed" PACKAGECONFIG[myhostname] = "--enable-myhostname,--disable-myhostname" @@ -161,6 +161,9 @@ CFLAGS .= "${@bb.utils.contains('PACKAGECONFIG', 'valgrind', ' -DVALGRIND=1', '' # disable problematic GCC 5.2 optimizations [YOCTO #8291] FULL_OPTIMIZATION_append_arm = " -fno-schedule-insns -fno-schedule-insns2" +# Avoid login failure on qemumips64 when pam is enabled +FULL_OPTIMIZATION_append_mips64 = " -fno-tree-switch-conversion -fno-tree-tail-merge" + do_configure_prepend() { export NM="${HOST_PREFIX}gcc-nm" export AR="${HOST_PREFIX}gcc-ar" @@ -274,7 +277,7 @@ do_install_ptest () { } python populate_packages_prepend (){ - systemdlibdir = d.getVar("rootlibdir", True) + systemdlibdir = d.getVar("rootlibdir") do_split_packages(d, systemdlibdir, '^lib(.*)\.so\.*', 'lib%s', 'Systemd %s library', extra_depends='', allow_links=True) } PACKAGES_DYNAMIC += "^lib(udev|systemd|nss).*" @@ -365,6 +368,7 @@ FILES_${PN}-container = "${sysconfdir}/dbus-1/system.d/org.freedesktop.import1.c ${systemd_system_unitdir}/org.freedesktop.machine1.busname \ ${systemd_system_unitdir}/systemd-importd.service \ ${systemd_system_unitdir}/systemd-machined.service \ + ${systemd_system_unitdir}/dbus-org.freedesktop.machine1.service \ ${systemd_system_unitdir}/var-lib-machines.mount \ ${rootlibexecdir}/systemd/systemd-import \ ${rootlibexecdir}/systemd/systemd-importd \ @@ -450,7 +454,6 @@ FILES_${PN} = " ${base_bindir}/* \ ${rootlibexecdir}/systemd/* \ ${systemd_unitdir}/* \ ${base_libdir}/security/*.so \ - ${libdir}/libnss_* \ /cgroup \ ${bindir}/systemd* \ ${bindir}/busctl \ @@ -478,7 +481,6 @@ RDEPENDS_${PN} += "kmod dbus util-linux-mount udev (= ${EXTENDPKGV})" RDEPENDS_${PN} += "volatile-binds update-rc.d" RRECOMMENDS_${PN} += "${@bb.utils.contains('PACKAGECONFIG', 'serial-getty-generator', '', 'systemd-serialgetty', d)} \ - systemd-vconsole-setup \ systemd-extra-utils \ systemd-compat-units udev-hwdb \ util-linux-agetty util-linux-fsck e2fsprogs-e2fsck \ @@ -570,6 +572,7 @@ pkg_prerm_${PN} () { -i $D${sysconfdir}/nsswitch.conf } +PACKAGE_WRITE_DEPS += "qemu-native" pkg_postinst_udev-hwdb () { if test -n "$D"; then ${@qemu_run_binary(d, '$D', '${base_bindir}/udevadm')} hwdb --update \ @@ -591,6 +594,6 @@ python () { raise bb.parse.SkipPackage("'systemd' not in DISTRO_FEATURES") import re - if re.match('.*musl*', d.getVar('TARGET_OS', True)) != None: + if re.match('.*musl*', d.getVar('TARGET_OS')) != None: raise bb.parse.SkipPackage("Not _yet_ supported on musl based targets") } diff --git a/import-layers/yocto-poky/meta/recipes-core/sysvinit/sysvinit-inittab/start_getty b/import-layers/yocto-poky/meta/recipes-core/sysvinit/sysvinit-inittab/start_getty index 6eaeb7a2b..e3d052a84 100644 --- a/import-layers/yocto-poky/meta/recipes-core/sysvinit/sysvinit-inittab/start_getty +++ b/import-layers/yocto-poky/meta/recipes-core/sysvinit/sysvinit-inittab/start_getty @@ -1,5 +1,5 @@ #!/bin/sh if [ -c /dev/$2 ] then - /sbin/getty -L $1 $2 + /sbin/getty -L $1 $2 $3 fi diff --git a/import-layers/yocto-poky/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb b/import-layers/yocto-poky/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb index f136ad821..5b9c422ca 100644 --- a/import-layers/yocto-poky/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb +++ b/import-layers/yocto-poky/meta/recipes-core/sysvinit/sysvinit-inittab_2.88dsf.bb @@ -28,7 +28,7 @@ do_install() { j=`echo ${i} | sed s/\;/\ /g` l=`echo ${i} | sed -e 's/tty//' -e 's/^.*;//' -e 's/;.*//'` label=`echo $l | sed 's/.*\(....\)/\1/'` - echo "$label:12345:respawn:${base_bindir}/start_getty ${j}" >> ${D}${sysconfdir}/inittab + echo "$label:12345:respawn:${base_bindir}/start_getty ${j} vt102" >> ${D}${sysconfdir}/inittab done if [ "${USE_VT}" = "1" ]; then @@ -62,7 +62,7 @@ if [ "x$D" = "x" ] && [ -e /proc/consoles ]; then k=`echo ${i} | sed s/^.*\://g` if [ -z "`grep ${j} /proc/consoles`" ]; then if [ -z "${k}" ] || [ -z "`grep ${k} /proc/consoles`" ] || [ ! -e /dev/${j} ]; then - sed -i /^.*${j}$/d /etc/inittab + sed -i -e /^.*${j}\ /d -e /^.*${j}$/d /etc/inittab fi fi done diff --git a/import-layers/yocto-poky/meta/recipes-core/udev/eudev/0014-Revert-rules-remove-firmware-loading-rules.patch b/import-layers/yocto-poky/meta/recipes-core/udev/eudev/0014-Revert-rules-remove-firmware-loading-rules.patch new file mode 100644 index 000000000..fe2ba5328 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/udev/eudev/0014-Revert-rules-remove-firmware-loading-rules.patch @@ -0,0 +1,28 @@ +From 4f0a722489154da99e7f6b3051afde984eed2f74 Mon Sep 17 00:00:00 2001 +From: Jonathan Liu +Date: Thu, 19 Mar 2015 15:01:29 +1100 +Subject: [PATCH] Revert "rules: remove firmware loading rules" + +This reverts commit 70e7d754ddb356fb1a2942b262f8cee9650e2a19. +Userspace firmware loading support is needed for Linux < 3.7. + +Upstream-Status: Inappropriate [OE specific] + +Signed-off-by: Jonathan Liu +--- + rules/50-firmware.rules | 3 +++ + 1 file changed, 3 insertions(+) + create mode 100644 rules/50-firmware.rules + +diff --git a/rules/50-firmware.rules b/rules/50-firmware.rules +new file mode 100644 +index 0000000..f0ae684 +--- /dev/null ++++ b/rules/50-firmware.rules +@@ -0,0 +1,3 @@ ++# do not edit this file, it will be overwritten on update ++ ++SUBSYSTEM=="firmware", ACTION=="add", RUN{builtin}="firmware" +-- +2.3.3 + diff --git a/import-layers/yocto-poky/meta/recipes-core/udev/eudev/Revert-udev-remove-userspace-firmware-loading-suppor.patch b/import-layers/yocto-poky/meta/recipes-core/udev/eudev/Revert-udev-remove-userspace-firmware-loading-suppor.patch new file mode 100644 index 000000000..db333e937 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/udev/eudev/Revert-udev-remove-userspace-firmware-loading-suppor.patch @@ -0,0 +1,364 @@ +From e415372cc7a2f52e70e1cfa8c6c1f633b411355d Mon Sep 17 00:00:00 2001 +From: Lauren Post +Date: Wed, 8 Jun 2016 06:51:56 -0500 +Subject: [PATCH] Revert "udev: remove userspace firmware loading support" + +This reverts commit 3b717594600fa717cdf9bcfd0c7c1b703b245482. + +Conflicts: + configure.ac + src/udev/udevd.c + +Upstream-Status: Inappropriate [OE specific] + +Signed-off-by: Lauren Post +--- + configure.ac | 39 +++++++++- + rules/Makefile.am | 5 ++ + src/udev/Makefile.am | 10 +++ + src/udev/udev-builtin-firmware.c | 154 +++++++++++++++++++++++++++++++++++++++ + src/udev/udev-builtin.c | 3 + + src/udev/udev.h | 6 ++ + src/udev/udevd.c | 13 ++++ + 7 files changed, 227 insertions(+), 3 deletions(-) + create mode 100644 src/udev/udev-builtin-firmware.c + +diff --git a/configure.ac b/configure.ac +index 8691891..65028c2 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -238,9 +238,42 @@ AC_CHECK_DECL([unshare], + [#include ]) + + # ------------------------------------------------------------------------------ +-AC_PATH_TOOL(GPERF, gperf) +-if test -z "$GPERF" ; then +- AC_MSG_ERROR([*** gperf not found]) ++AC_ARG_WITH(firmware-path, ++ AS_HELP_STRING([--with-firmware-path=DIR[[[:DIR[...]]]]], ++ [Firmware search path (default=ROOTPREFIX/lib/firmware/updates:ROOTPREFIX/lib/firmware)]), ++ [], [with_firmware_path="$rootprefix/lib/firmware/updates:$rootprefix/lib/firmware"]) ++OLD_IFS=$IFS ++IFS=: ++for i in $with_firmware_path; do ++ if test "x${FIRMWARE_PATH}" = "x"; then ++ FIRMWARE_PATH="\\\"${i}/\\\"" ++ else ++ FIRMWARE_PATH="${FIRMWARE_PATH}, \\\"${i}/\\\"" ++ fi ++done ++IFS=$OLD_IFS ++AC_SUBST(FIRMWARE_PATH) ++AS_IF([test "x${FIRMWARE_PATH}" != "x"], [ AC_DEFINE(HAVE_FIRMWARE, 1, [Define if FIRMWARE is available]) ]) ++AM_CONDITIONAL(ENABLE_FIRMWARE, [test "x${FIRMWARE_PATH}" != "x"]) ++ ++# ------------------------------------------------------------------------------ ++AC_ARG_ENABLE([gudev], ++ AS_HELP_STRING([--disable-gudev], [disable Gobject libudev support @<:@default=enabled@:>@]), ++ [], [enable_gudev=yes]) ++AS_IF([test "x$enable_gudev" = "xyes"], [ PKG_CHECK_MODULES([GLIB], [glib-2.0 >= 2.22.0 gobject-2.0 >= 2.22.0]) ]) ++AM_CONDITIONAL([ENABLE_GUDEV], [test "x$enable_gudev" = "xyes"]) ++ ++# ------------------------------------------------------------------------------ ++AC_ARG_ENABLE([keymap], ++ AS_HELP_STRING([--disable-keymap], [disable keymap fixup support @<:@default=enabled@:>@]), ++ [], [enable_keymap=yes]) ++ ++if test "x$enable_keymap" = "xyes"; then ++ AC_PATH_TOOL(GPERF, gperf) ++ if test -z "$GPERF" ; then ++ AC_MSG_ERROR([*** gperf not found]) ++ fi ++ AC_DEFINE([ENABLE_KEYMAP], [1], [Define if we are enabling rule generator]) + fi + + # ------------------------------------------------------------------------------ +diff --git a/rules/Makefile.am b/rules/Makefile.am +index 24c099c..d714ae3 100644 +--- a/rules/Makefile.am ++++ b/rules/Makefile.am +@@ -22,6 +22,11 @@ dist_udevrules_DATA += \ + 80-net-name-slot.rules + endif + ++if ENABLE_FIRMWARE ++dist_udevrules_DATA += \ ++ 50-firmware.rules ++endif ++ + if HAVE_BLKID + dist_udevrules_DATA += \ + 60-persistent-storage.rules +diff --git a/src/udev/Makefile.am b/src/udev/Makefile.am +index 401af01..742fbc8 100644 +--- a/src/udev/Makefile.am ++++ b/src/udev/Makefile.am +@@ -53,12 +53,18 @@ libudev_core_la_SOURCES = \ + udev-ctrl.c \ + udev-builtin.c \ + udev-builtin-btrfs.c \ ++ udev-builtin-firmware.c \ + udev-builtin-hwdb.c \ + udev-builtin-input_id.c \ + udev-builtin-net_id.c \ + udev-builtin-path_id.c \ + udev-builtin-usb_id.c + ++if ENABLE_FIRMWARE ++libudev_core_la_SOURCES += \ ++ udev-builtin-firmware.c ++endif ++ + include_HEADERS = \ + udev.h + +@@ -86,6 +92,10 @@ libudev_core_la_LIBADD += \ + $(KMOD_LIBS) + endif + ++libudev_core_la_CPPFLAGS = \ ++ $(AM_CPPFLAGS) \ ++ -DFIRMWARE_PATH="$(FIRMWARE_PATH)" ++ + # + # Extras + # +diff --git a/src/udev/udev-builtin-firmware.c b/src/udev/udev-builtin-firmware.c +new file mode 100644 +index 0000000..bd8c2fb +--- /dev/null ++++ b/src/udev/udev-builtin-firmware.c +@@ -0,0 +1,154 @@ ++/* ++ * firmware - Kernel firmware loader ++ * ++ * Copyright (C) 2009 Piter Punk ++ * Copyright (C) 2009-2011 Kay Sievers ++ * ++ * This program is free software; you can redistribute it and/or ++ * modify it under the terms of the GNU General Public License as ++ * published by the Free Software Foundation; either version 2 of the ++ * License, or (at your option) any later version. ++ * ++ * This program is distributed in the hope that it will be useful, but ++ * WITHOUT ANY WARRANTY; without even the implied warranty of ++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ * General Public License for more details:* ++ */ ++ ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++#include ++ ++#include "udev.h" ++ ++static bool set_loading(struct udev *udev, char *loadpath, const char *state) { ++ FILE *ldfile; ++ ++ ldfile = fopen(loadpath, "we"); ++ if (ldfile == NULL) { ++ log_error("error: can not open '%s'", loadpath); ++ return false; ++ }; ++ fprintf(ldfile, "%s\n", state); ++ fclose(ldfile); ++ return true; ++} ++ ++static bool copy_firmware(struct udev *udev, const char *source, const char *target, size_t size) { ++ char *buf; ++ FILE *fsource = NULL, *ftarget = NULL; ++ bool ret = false; ++ ++ buf = malloc(size); ++ if (buf == NULL) { ++ log_error("No memory available to load firmware file"); ++ return false; ++ } ++ ++ log_debug("writing '%s' (%zi) to '%s'", source, size, target); ++ ++ fsource = fopen(source, "re"); ++ if (fsource == NULL) ++ goto exit; ++ ftarget = fopen(target, "we"); ++ if (ftarget == NULL) ++ goto exit; ++ if (fread(buf, size, 1, fsource) != 1) ++ goto exit; ++ if (fwrite(buf, size, 1, ftarget) == 1) ++ ret = true; ++exit: ++ if (ftarget != NULL) ++ fclose(ftarget); ++ if (fsource != NULL) ++ fclose(fsource); ++ free(buf); ++ return ret; ++} ++ ++static int builtin_firmware(struct udev_device *dev, int argc, char *argv[], bool test) { ++ struct udev *udev = udev_device_get_udev(dev); ++ static const char *searchpath[] = { FIRMWARE_PATH }; ++ char loadpath[UTIL_PATH_SIZE]; ++ char datapath[UTIL_PATH_SIZE]; ++ char fwpath[UTIL_PATH_SIZE]; ++ const char *firmware; ++ FILE *fwfile = NULL; ++ struct utsname kernel; ++ struct stat statbuf; ++ unsigned int i; ++ int rc = EXIT_SUCCESS; ++ ++ firmware = udev_device_get_property_value(dev, "FIRMWARE"); ++ if (firmware == NULL) { ++ log_error("firmware parameter missing"); ++ rc = EXIT_FAILURE; ++ goto exit; ++ } ++ ++ /* lookup firmware file */ ++ uname(&kernel); ++ for (i = 0; i < ELEMENTSOF(searchpath); i++) { ++ strscpyl(fwpath, sizeof(fwpath), searchpath[i], kernel.release, "/", firmware, NULL); ++ fwfile = fopen(fwpath, "re"); ++ if (fwfile != NULL) ++ break; ++ ++ strscpyl(fwpath, sizeof(fwpath), searchpath[i], firmware, NULL); ++ fwfile = fopen(fwpath, "re"); ++ if (fwfile != NULL) ++ break; ++ } ++ ++ strscpyl(loadpath, sizeof(loadpath), udev_device_get_syspath(dev), "/loading", NULL); ++ ++ if (fwfile == NULL) { ++ log_debug("did not find firmware file '%s'", firmware); ++ rc = EXIT_FAILURE; ++ /* ++ * Do not cancel the request in the initrd, the real root might have ++ * the firmware file and the 'coldplug' run in the real root will find ++ * this pending request and fulfill or cancel it. ++ * */ ++ if (!in_initrd()) ++ set_loading(udev, loadpath, "-1"); ++ goto exit; ++ } ++ ++ if (stat(fwpath, &statbuf) < 0 || statbuf.st_size == 0) { ++ if (!in_initrd()) ++ set_loading(udev, loadpath, "-1"); ++ rc = EXIT_FAILURE; ++ goto exit; ++ } ++ ++ if (!set_loading(udev, loadpath, "1")) ++ goto exit; ++ ++ strscpyl(datapath, sizeof(datapath), udev_device_get_syspath(dev), "/data", NULL); ++ if (!copy_firmware(udev, fwpath, datapath, statbuf.st_size)) { ++ log_error("error sending firmware '%s' to device", firmware); ++ set_loading(udev, loadpath, "-1"); ++ rc = EXIT_FAILURE; ++ goto exit; ++ }; ++ ++ set_loading(udev, loadpath, "0"); ++exit: ++ if (fwfile) ++ fclose(fwfile); ++ return rc; ++} ++ ++const struct udev_builtin udev_builtin_firmware = { ++ .name = "firmware", ++ .cmd = builtin_firmware, ++ .help = "kernel firmware loader", ++ .run_once = true, ++}; +diff --git a/src/udev/udev-builtin.c b/src/udev/udev-builtin.c +index 74b3bdc..3657106 100644 +--- a/src/udev/udev-builtin.c ++++ b/src/udev/udev-builtin.c +@@ -34,6 +34,9 @@ static const struct udev_builtin *builtins[] = { + [UDEV_BUILTIN_BLKID] = &udev_builtin_blkid, + #endif + [UDEV_BUILTIN_BTRFS] = &udev_builtin_btrfs, ++#ifdef HAVE_FIRMWARE ++ [UDEV_BUILTIN_FIRMWARE] = &udev_builtin_firmware, ++#endif + [UDEV_BUILTIN_HWDB] = &udev_builtin_hwdb, + [UDEV_BUILTIN_INPUT_ID] = &udev_builtin_input_id, + [UDEV_BUILTIN_KEYBOARD] = &udev_builtin_keyboard, +diff --git a/src/udev/udev.h b/src/udev/udev.h +index 198cb2c..01a1e9c 100644 +--- a/src/udev/udev.h ++++ b/src/udev/udev.h +@@ -149,6 +149,9 @@ enum udev_builtin_cmd { + UDEV_BUILTIN_BLKID, + #endif + UDEV_BUILTIN_BTRFS, ++#ifdef HAVE_FIRMWARE ++ UDEV_BUILTIN_FIRMWARE, ++#endif + UDEV_BUILTIN_HWDB, + UDEV_BUILTIN_INPUT_ID, + UDEV_BUILTIN_KEYBOARD, +@@ -173,6 +176,9 @@ struct udev_builtin { + extern const struct udev_builtin udev_builtin_blkid; + #endif + extern const struct udev_builtin udev_builtin_btrfs; ++#ifdef HAVE_FIRMWARE ++extern const struct udev_builtin udev_builtin_firmware; ++#endif + extern const struct udev_builtin udev_builtin_hwdb; + extern const struct udev_builtin udev_builtin_input_id; + extern const struct udev_builtin udev_builtin_keyboard; +diff --git a/src/udev/udevd.c b/src/udev/udevd.c +index b1de97a..35655d8 100644 +--- a/src/udev/udevd.c ++++ b/src/udev/udevd.c +@@ -101,6 +101,9 @@ struct event { + bool is_block; + usec_t start_usec; + bool warned; ++#ifdef HAVE_FIRMWARE ++ bool nodelay; ++#endif + }; + + static inline struct event *node_to_event(struct udev_list_node *node) { +@@ -491,6 +494,10 @@ static int event_queue_insert(struct udev_device *dev) { + event->devnum = udev_device_get_devnum(dev); + event->is_block = streq("block", udev_device_get_subsystem(dev)); + event->ifindex = udev_device_get_ifindex(dev); ++#ifdef HAVE_FIRMWARE ++ if (streq(udev_device_get_subsystem(dev), "firmware")) ++ event->nodelay = true; ++#endif + + log_debug("seq %llu queued, '%s' '%s'", udev_device_get_seqnum(dev), + udev_device_get_action(dev), udev_device_get_subsystem(dev)); +@@ -566,6 +573,12 @@ static bool is_devpath_busy(struct event *event) { + return true; + } + ++#ifdef HAVE_FIRMWARE ++ /* allow to bypass the dependency tracking */ ++ if (event->nodelay) ++ continue; ++#endif ++ + /* parent device event found */ + if (event->devpath[common] == '/') { + event->delaying_seqnum = loop_event->seqnum; +-- +1.9.1 + diff --git a/import-layers/yocto-poky/meta/recipes-core/udev/eudev_3.2.1.bb b/import-layers/yocto-poky/meta/recipes-core/udev/eudev_3.2.1.bb new file mode 100644 index 000000000..bdfb5441a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/udev/eudev_3.2.1.bb @@ -0,0 +1,106 @@ +SUMMARY = "eudev is a fork of systemd's udev" +HOMEPAGE = "https://wiki.gentoo.org/wiki/Eudev" +LICENSE = "GPLv2.0+ & LGPL-2.1+" +LICENSE_libudev = "LGPL-2.1+" +LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe" + +DEPENDS = "glib-2.0 glib-2.0-native gperf-native kmod libxslt-native util-linux" + +PROVIDES = "udev" + +SRC_URI = "https://github.com/gentoo/${BPN}/archive/v${PV}.tar.gz;downloadfilename=${BP}.tar.gz \ + file://0014-Revert-rules-remove-firmware-loading-rules.patch \ + file://Revert-udev-remove-userspace-firmware-loading-suppor.patch \ + file://devfs-udev.rules \ + file://init \ + file://links.conf \ + file://local.rules \ + file://permissions.rules \ + file://run.rules \ + file://udev-cache \ + file://udev-cache.default \ + file://udev.rules \ +" +UPSTREAM_CHECK_URI = "https://github.com/gentoo/eudev/releases" + +SRC_URI[md5sum] = "a2aae16bc432eac0e71c1267c384e295" +SRC_URI[sha256sum] = "88f530c1540750e6daa91b5eaeebf88e761e6f0c86515c1c28eedfd871f027c6" + +inherit autotools update-rc.d qemu pkgconfig + +EXTRA_OECONF = " \ + --sbindir=${base_sbindir} \ + --with-rootlibdir=${base_libdir} \ + --with-rootprefix= \ +" + +PACKAGECONFIG ??= "hwdb" +PACKAGECONFIG[hwdb] = "--enable-hwdb,--disable-hwdb" + +do_install_append() { + install -d ${D}${sysconfdir}/init.d + install -m 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/udev + install -m 0755 ${WORKDIR}/udev-cache ${D}${sysconfdir}/init.d/udev-cache + sed -i s%@UDEVD@%${base_sbindir}/udevd% ${D}${sysconfdir}/init.d/udev + sed -i s%@UDEVD@%${base_sbindir}/udevd% ${D}${sysconfdir}/init.d/udev-cache + + install -d ${D}${sysconfdir}/default + install -m 0755 ${WORKDIR}/udev-cache.default ${D}${sysconfdir}/default/udev-cache + + touch ${D}${sysconfdir}/udev/cache.data + + install -d ${D}${sysconfdir}/udev/rules.d + install -m 0644 ${WORKDIR}/local.rules ${D}${sysconfdir}/udev/rules.d/local.rules + + # Use classic network interface naming scheme + touch ${D}${sysconfdir}/udev/rules.d/80-net-name-slot.rules + + # hid2hci has moved to bluez4. removed in udev as of version 169 + rm -f ${D}${base_libdir}/udev/hid2hci +} + +INITSCRIPT_PACKAGES = "eudev udev-cache" +INITSCRIPT_NAME_eudev = "udev" +INITSCRIPT_PARAMS_eudev = "start 04 S ." +INITSCRIPT_NAME_udev-cache = "udev-cache" +INITSCRIPT_PARAMS_udev-cache = "start 36 S ." + +PACKAGES =+ "libudev" +PACKAGES =+ "udev-cache" +PACKAGES =+ "eudev-hwdb" + + +FILES_${PN} += "${libexecdir} ${base_libdir}/udev ${bindir}/udevadm" +FILES_${PN}-dev = "${datadir}/pkgconfig/udev.pc \ + ${includedir}/libudev.h ${libdir}/libudev.so \ + ${includedir}/udev.h ${libdir}/libudev.la \ + ${libdir}/libudev.a ${libdir}/pkgconfig/libudev.pc" +FILES_libudev = "${base_libdir}/libudev.so.*" +FILES_udev-cache = "${sysconfdir}/init.d/udev-cache ${sysconfdir}/default/udev-cache" +FILES_eudev-hwdb = "${sysconfdir}/udev/hwdb.d" + +RDEPENDS_eudev-hwdb += "eudev" + +RRECOMMENDS_${PN} += "udev-cache" + +RPROVIDES_${PN} = "hotplug udev" +RPROVIDES_eudev-hwdb += "udev-hwdb" + +python () { + if bb.utils.contains ('DISTRO_FEATURES', 'systemd', True, False, d): + raise bb.parse.SkipPackage("'systemd' in DISTRO_FEATURES") +} + +PACKAGE_WRITE_DEPS += "qemu-native" +pkg_postinst_eudev-hwdb () { + if test -n "$D"; then + ${@qemu_run_binary(d, '$D', '${bindir}/udevadm')} hwdb --update --root $D + chown root:root $D${sysconfdir}/udev/hwdb.bin + else + udevadm hwdb --update + fi +} + +pkg_prerm_eudev-hwdb () { + rm -f $D${sysconfdir}/udev/hwdb.bin +} diff --git a/import-layers/yocto-poky/meta/recipes-core/udev/eudev_3.2.bb b/import-layers/yocto-poky/meta/recipes-core/udev/eudev_3.2.bb deleted file mode 100644 index 857d20db7..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/udev/eudev_3.2.bb +++ /dev/null @@ -1,102 +0,0 @@ -SUMMARY = "eudev is a fork of systemd's udev" -HOMEPAGE = "https://wiki.gentoo.org/wiki/Eudev" -LICENSE = "GPLv2.0+ & LGPL-2.1+" -LICENSE_libudev = "LGPL-2.1+" -LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe" - -DEPENDS = "glib-2.0 glib-2.0-native gperf-native kmod libxslt-native util-linux" - -PROVIDES = "udev" - -SRC_URI = "https://github.com/gentoo/${BPN}/archive/v${PV}.tar.gz \ - file://devfs-udev.rules \ - file://init \ - file://links.conf \ - file://local.rules \ - file://permissions.rules \ - file://run.rules \ - file://udev-cache \ - file://udev-cache.default \ - file://udev.rules \ -" -UPSTREAM_CHECK_URI = "https://github.com/gentoo/eudev/releases" - -SRC_URI[md5sum] = "9eb477cc8718db272b5d24dff1126b04" -SRC_URI[sha256sum] = "37829d37f4beb7e358ca33abc1ad0907d87f917ce157777aeaeebeacae24efdc" - -inherit autotools update-rc.d qemu pkgconfig - -EXTRA_OECONF = " \ - --sbindir=${base_sbindir} \ - --with-rootlibdir=${base_libdir} \ - --with-rootprefix= \ -" - -PACKAGECONFIG ??= "hwdb" -PACKAGECONFIG[hwdb] = "--enable-hwdb,--disable-hwdb" - -do_install_append() { - install -d ${D}${sysconfdir}/init.d - install -m 0755 ${WORKDIR}/init ${D}${sysconfdir}/init.d/udev - install -m 0755 ${WORKDIR}/udev-cache ${D}${sysconfdir}/init.d/udev-cache - sed -i s%@UDEVD@%${base_sbindir}/udevd% ${D}${sysconfdir}/init.d/udev - sed -i s%@UDEVD@%${base_sbindir}/udevd% ${D}${sysconfdir}/init.d/udev-cache - - install -d ${D}${sysconfdir}/default - install -m 0755 ${WORKDIR}/udev-cache.default ${D}${sysconfdir}/default/udev-cache - - touch ${D}${sysconfdir}/udev/cache.data - - install -d ${D}${sysconfdir}/udev/rules.d - install -m 0644 ${WORKDIR}/local.rules ${D}${sysconfdir}/udev/rules.d/local.rules - - # Use classic network interface naming scheme - touch ${D}${sysconfdir}/udev/rules.d/80-net-name-slot.rules - - # hid2hci has moved to bluez4. removed in udev as of version 169 - rm -f ${D}${base_libdir}/udev/hid2hci -} - -INITSCRIPT_PACKAGES = "eudev udev-cache" -INITSCRIPT_NAME_eudev = "udev" -INITSCRIPT_PARAMS_eudev = "start 04 S ." -INITSCRIPT_NAME_udev-cache = "udev-cache" -INITSCRIPT_PARAMS_udev-cache = "start 36 S ." - -PACKAGES =+ "libudev" -PACKAGES =+ "udev-cache" -PACKAGES =+ "eudev-hwdb" - - -FILES_${PN} += "${libexecdir} ${base_libdir}/udev ${bindir}/udevadm" -FILES_${PN}-dev = "${datadir}/pkgconfig/udev.pc \ - ${includedir}/libudev.h ${libdir}/libudev.so \ - ${includedir}/udev.h ${libdir}/libudev.la \ - ${libdir}/libudev.a ${libdir}/pkgconfig/libudev.pc" -FILES_libudev = "${base_libdir}/libudev.so.*" -FILES_udev-cache = "${sysconfdir}/init.d/udev-cache ${sysconfdir}/default/udev-cache" -FILES_eudev-hwdb = "${sysconfdir}/udev/hwdb.d" - -RDEPENDS_eudev-hwdb += "eudev" - -RRECOMMENDS_${PN} += "udev-cache" - -RPROVIDES_${PN} = "hotplug udev" - -python () { - if bb.utils.contains ('DISTRO_FEATURES', 'systemd', True, False, d): - raise bb.parse.SkipPackage("'systemd' in DISTRO_FEATURES") -} - -pkg_postinst_eudev-hwdb () { - if test -n "$D"; then - ${@qemu_run_binary(d, '$D', '${bindir}/udevadm')} hwdb --update --root $D - chown root:root $D${sysconfdir}/udev/hwdb.bin - else - udevadm hwdb --update - fi -} - -pkg_prerm_eudev-hwdb () { - rm -f $D${sysconfdir}/udev/hwdb.bin -} diff --git a/import-layers/yocto-poky/meta/recipes-core/udev/udev-extraconf_1.1.bb b/import-layers/yocto-poky/meta/recipes-core/udev/udev-extraconf_1.1.bb index ecd4a8aab..ae125507b 100644 --- a/import-layers/yocto-poky/meta/recipes-core/udev/udev-extraconf_1.1.bb +++ b/import-layers/yocto-poky/meta/recipes-core/udev/udev-extraconf_1.1.bb @@ -1,8 +1,7 @@ SUMMARY = "Extra machine specific configuration files" DESCRIPTION = "Extra machine specific configuration files for udev, specifically blacklist information." LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \ - file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" +LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" SRC_URI = " \ file://automount.rules \ diff --git a/import-layers/yocto-poky/meta/recipes-core/util-linux/util-linux.inc b/import-layers/yocto-poky/meta/recipes-core/util-linux/util-linux.inc index 70cba6b59..63302a9fe 100644 --- a/import-layers/yocto-poky/meta/recipes-core/util-linux/util-linux.inc +++ b/import-layers/yocto-poky/meta/recipes-core/util-linux/util-linux.inc @@ -29,9 +29,9 @@ SRC_URI = "${KERNELORG_MIRROR}/linux/utils/util-linux/v${MAJOR_VERSION}/util-lin PACKAGES =+ "util-linux-agetty util-linux-fdisk util-linux-cfdisk util-linux-sfdisk \ util-linux-swaponoff util-linux-losetup util-linux-umount \ util-linux-mount util-linux-readprofile util-linux-uuidd \ - util-linux-uuidgen util-linux-lscpu util-linux-fsck util-linux-blkid \ - util-linux-mkfs util-linux-mcookie util-linux-reset \ - util-linux-mkfs.cramfs util-linux-fsck.cramfs util-linux-fstrim \ + util-linux-uuidgen util-linux-lscpu util-linux-fsck.cramfs util-linux-fsck \ + util-linux-blkid util-linux-mkfs util-linux-mcookie util-linux-reset \ + util-linux-lsblk util-linux-mkfs.cramfs util-linux-fstrim \ util-linux-partx util-linux-hwclock util-linux-mountpoint \ util-linux-findfs util-linux-getopt util-linux-sulogin util-linux-prlimit" PACKAGES += "${@bb.utils.contains('PACKAGECONFIG', 'pylibmount', 'util-linux-pylibmount', '', d)}" @@ -52,7 +52,7 @@ SHARED_EXTRA_OECONF = "--disable-use-tty-group \ EXTRA_OECONF = "${SHARED_EXTRA_OECONF} --libdir=${base_libdir}" -PACKAGECONFIG_class-target ?= "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam', '', d)}" +PACKAGECONFIG_class-target ?= "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" PACKAGECONFIG[pam] = "--enable-su --enable-runuser,--disable-su --disable-runuser, libpam," # Respect the systemd feature for uuidd @@ -84,7 +84,7 @@ FILES_util-linux-umount = "${base_bindir}/umount.${BPN}" FILES_util-linux-readprofile = "${sbindir}/readprofile.${BPN}" FILES_util-linux-uuidgen = "${bindir}/uuidgen" FILES_util-linux-uuidd = "${sbindir}/uuidd" -FILES_util-linux-reset = "${base_bindir}/reset" +FILES_util-linux-reset = "${base_bindir}/reset*" FILES_util-linux-partx = "${sbindir}/partx" FILES_util-linux-hwclock = "${base_sbindir}/hwclock.${BPN}" FILES_util-linux-findfs = "${sbindir}/findfs" @@ -95,6 +95,7 @@ FILES_util-linux-prlimit = "${bindir}/prlimit" FILES_util-linux-pylibmount = "${PYTHON_SITEPACKAGES_DIR}/libmount/pylibmount.so \ ${PYTHON_SITEPACKAGES_DIR}/libmount/__init__.* \ ${PYTHON_SITEPACKAGES_DIR}/libmount/__pycache__/*" +FILES_util-linux-lsblk = "${bindir}/lsblk" FILES_util-linux-lscpu = "${bindir}/lscpu" FILES_util-linux-fsck = "${base_sbindir}/fsck*" @@ -116,7 +117,7 @@ RDEPENDS_util-linux-reset += "ncurses" RDEPENDS_util-linux-runuser += "libpam" -RDEPENDS_${PN} = "util-linux-umount util-linux-swaponoff util-linux-losetup util-linux-sulogin" +RDEPENDS_${PN} = "util-linux-umount util-linux-swaponoff util-linux-losetup util-linux-sulogin util-linux-lsblk" RDEPENDS_${PN} += "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'util-linux-runuser', '', d)}" RRECOMMENDS_${PN} = "util-linux-fdisk util-linux-cfdisk util-linux-sfdisk util-linux-mount util-linux-readprofile util-linux-mkfs util-linux-mountpoint util-linux-prlimit" @@ -150,7 +151,7 @@ do_install () { mkdir -p ${D}${base_bindir} sbinprogs="agetty ctrlaltdel cfdisk vipw vigr" - sbinprogs_a="pivot_root hwclock mkswap mkfs.minix fsck.minix losetup swapon swapoff fdisk fsck blkid blockdev fstrim sulogin switch_root" + sbinprogs_a="pivot_root hwclock mkswap mkfs.minix fsck.minix losetup swapon swapoff fdisk fsck blkid blockdev fstrim sulogin switch_root nologin" binprogs_a="dmesg getopt kill more umount mount login reset su mountpoint" if [ "${base_sbindir}" != "${sbindir}" ]; then @@ -176,7 +177,7 @@ do_install () { rm -f ${D}${bindir}/chkdupexe - if [ "${@bb.utils.contains('DISTRO_FEATURES', 'pam', 'pam', '', d)}" = "pam" ]; then + if [ "${@bb.utils.filter('DISTRO_FEATURES', 'pam', d)}" ]; then install -d ${D}${sysconfdir}/pam.d install -m 0644 ${WORKDIR}/runuser.pamd ${D}${sysconfdir}/pam.d/runuser install -m 0644 ${WORKDIR}/runuser-l.pamd ${D}${sysconfdir}/pam.d/runuser-l @@ -193,7 +194,7 @@ ALTERNATIVE_PRIORITY = "80" ALTERNATIVE_${PN} = "dmesg kill more mkswap blockdev pivot_root switch_root" ALTERNATIVE_${PN} += "hexdump last lastb logger mesg renice wall" -ALTERNATIVE_${PN} += "setsid chrt flock utmpdump eject" +ALTERNATIVE_${PN} += "setsid chrt flock utmpdump eject nologin" ALTERNATIVE_LINK_NAME[dmesg] = "${base_bindir}/dmesg" ALTERNATIVE_LINK_NAME[kill] = "${base_bindir}/kill" @@ -203,8 +204,9 @@ ALTERNATIVE_LINK_NAME[blockdev] = "${base_sbindir}/blockdev" ALTERNATIVE_LINK_NAME[pivot_root] = "${base_sbindir}/pivot_root" ALTERNATIVE_LINK_NAME[switch_root] = "${base_sbindir}/switch_root" ALTERNATIVE_LINK_NAME[eject] = "${bindir}/eject" +ALTERNATIVE_LINK_NAME[nologin] = "${base_sbindir}/nologin" -ALTERNATIVE_${PN}-doc = "mountpoint.1 last.1 lastb.1 mesg.1 wall.1 nologin.8 sulogin.8 utmpdump.1 reset.1 kill.1 libblkid.3 blkid.8 findfs.8 fsck.8 uuid.3" +ALTERNATIVE_${PN}-doc = "mountpoint.1 last.1 lastb.1 mesg.1 wall.1 nologin.8 sulogin.8 utmpdump.1 reset.1 kill.1 libblkid.3 blkid.8 findfs.8 fsck.8 uuid.3 eject.1 logger.1" ALTERNATIVE_LINK_NAME[last.1] = "${mandir}/man1/last.1" ALTERNATIVE_LINK_NAME[lastb.1] = "${mandir}/man1/lastb.1" @@ -221,6 +223,8 @@ ALTERNATIVE_LINK_NAME[blkid.8] = "${mandir}/man8/blkid.8" ALTERNATIVE_LINK_NAME[findfs.8] = "${mandir}/man8/findfs.8" ALTERNATIVE_LINK_NAME[fsck.8] = "${mandir}/man8/fsck.8" ALTERNATIVE_LINK_NAME[uuid.3] = "${mandir}/man3/uuid.3" +ALTERNATIVE_LINK_NAME[eject.1] = "${mandir}/man1/eject.1" +ALTERNATIVE_LINK_NAME[logger.1] = "${mandir}/man1/logger.1" ALTERNATIVE_util-linux-hwclock = "hwclock" ALTERNATIVE_LINK_NAME[hwclock] = "${base_sbindir}/hwclock" @@ -270,7 +274,7 @@ ALTERNATIVE_LINK_NAME[mountpoint] = "${base_bindir}/mountpoint" BBCLASSEXTEND = "native nativesdk" python do_package_prepend () { - if '--enable-su' in d.getVar('EXTRA_OECONF', True).split(): + if '--enable-su' in d.getVar('EXTRA_OECONF').split(): d.appendVar(d.expand('ALTERNATIVE_${PN}'), ' su') d.appendVar(d.expand('ALTERNATIVE_${PN}-doc'), ' su.1') @@ -285,6 +289,7 @@ python populate_packages_prepend() { extra_depends='', prepend=True, allow_links=True) } +RDEPENDS_${PN}-bash-completion += "util-linux-lsblk" RDEPENDS_${PN}-ptest = "bash grep coreutils" do_compile_ptest() { diff --git a/import-layers/yocto-poky/meta/recipes-core/util-linux/util-linux_2.28.1.bb b/import-layers/yocto-poky/meta/recipes-core/util-linux/util-linux_2.28.1.bb deleted file mode 100644 index f232cb9f3..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/util-linux/util-linux_2.28.1.bb +++ /dev/null @@ -1,31 +0,0 @@ -MAJOR_VERSION = "2.28" -require util-linux.inc - -# To support older hosts, we need to patch and/or revert -# some upstream changes. Only do this for native packages. -OLDHOST = "" -OLDHOST_class-native = "file://util-linux-native-qsort.patch" - -SRC_URI += "file://configure-sbindir.patch \ - file://runuser.pamd \ - file://runuser-l.pamd \ - ${OLDHOST} \ - file://ptest.patch \ - file://run-ptest \ - file://display_testname_for_subtest.patch \ - file://avoid_parallel_tests.patch \ - file://uuid-test-error-api.patch \ -" -SRC_URI[md5sum] = "e2d863efaf4fd330a42c5efe9f1b02b4" -SRC_URI[sha256sum] = "3ece4ea4a34ef786b68f5c415e848390424232abd1ee00f7ee5bddc30657b60f" - -CACHED_CONFIGUREVARS += "scanf_cv_alloc_modifier=ms" - -EXTRA_OECONF_class-native = "${SHARED_EXTRA_OECONF} \ - --disable-fallocate \ - --disable-use-tty-group \ -" -EXTRA_OECONF_class-nativesdk = "${SHARED_EXTRA_OECONF} \ - --disable-fallocate \ - --disable-use-tty-group \ -" diff --git a/import-layers/yocto-poky/meta/recipes-core/util-linux/util-linux_2.29.1.bb b/import-layers/yocto-poky/meta/recipes-core/util-linux/util-linux_2.29.1.bb new file mode 100644 index 000000000..1395b473f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/util-linux/util-linux_2.29.1.bb @@ -0,0 +1,31 @@ +MAJOR_VERSION = "2.29" +require util-linux.inc + +# To support older hosts, we need to patch and/or revert +# some upstream changes. Only do this for native packages. +OLDHOST = "" +OLDHOST_class-native = "file://util-linux-native-qsort.patch" + +SRC_URI += "file://configure-sbindir.patch \ + file://runuser.pamd \ + file://runuser-l.pamd \ + ${OLDHOST} \ + file://ptest.patch \ + file://run-ptest \ + file://display_testname_for_subtest.patch \ + file://avoid_parallel_tests.patch \ + file://uuid-test-error-api.patch \ +" +SRC_URI[md5sum] = "0cbb6d16ab9c5736e5649ef1264bee6e" +SRC_URI[sha256sum] = "0ce40600b934ec2fecfa6bfc4efe6982d051ba96c2832b05201347aec582f54f" + +CACHED_CONFIGUREVARS += "scanf_cv_alloc_modifier=ms" + +EXTRA_OECONF_class-native = "${SHARED_EXTRA_OECONF} \ + --disable-fallocate \ + --disable-use-tty-group \ +" +EXTRA_OECONF_class-nativesdk = "${SHARED_EXTRA_OECONF} \ + --disable-fallocate \ + --disable-use-tty-group \ +" diff --git a/import-layers/yocto-poky/meta/recipes-core/volatile-binds/volatile-binds.bb b/import-layers/yocto-poky/meta/recipes-core/volatile-binds/volatile-binds.bb index 130ab55f0..a6e325493 100644 --- a/import-layers/yocto-poky/meta/recipes-core/volatile-binds/volatile-binds.bb +++ b/import-layers/yocto-poky/meta/recipes-core/volatile-binds/volatile-binds.bb @@ -49,7 +49,7 @@ do_compile () { -e "s#@whatparent@#${spec%/*}#g; s#@whereparent@#${mountpoint%/*}#g" \ volatile-binds.service.in >$servicefile done < +Upstream-Status: Pending +--- +diff -uNr a/Makefile.in b/Makefile.in +--- a/Makefile.in 2013-06-10 13:48:14.321959162 +0200 ++++ b/Makefile.in 2013-06-10 13:49:36.686476448 +0200 +@@ -83,6 +83,9 @@ + test: all teststatic testshared + + teststatic: static ++ @make runteststatic ++ ++runteststatic: + @TMPST=tmpst_$$; \ + if echo hello world | ./minigzip | ./minigzip -d && ./example $$TMPST ; then \ + echo ' *** zlib test OK ***'; \ +@@ -92,6 +95,9 @@ + rm -f $$TMPST + + testshared: shared ++ @make runtestshared ++ ++runtestshared: + @LD_LIBRARY_PATH=`pwd`:$(LD_LIBRARY_PATH) ; export LD_LIBRARY_PATH; \ + LD_LIBRARYN32_PATH=`pwd`:$(LD_LIBRARYN32_PATH) ; export LD_LIBRARYN32_PATH; \ + DYLD_LIBRARY_PATH=`pwd`:$(DYLD_LIBRARY_PATH) ; export DYLD_LIBRARY_PATH; \ +@@ -105,6 +111,9 @@ + rm -f $$TMPSH + + test64: all64 ++ @make runtestall64 ++ ++runtestall64: + @TMP64=tmp64_$$; \ + if echo hello world | ./minigzip64 | ./minigzip64 -d && ./example64 $$TMP64; then \ + echo ' *** zlib 64-bit test OK ***'; \ diff --git a/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.11/ldflags-tests.patch b/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.11/ldflags-tests.patch new file mode 100644 index 000000000..19c40b745 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.11/ldflags-tests.patch @@ -0,0 +1,45 @@ +Obey LDFLAGS for tests + +Signed-off-by: Christopher Larson +Upstream-Status: Pending + +--- zlib-1.2.8.orig/Makefile.in ++++ zlib-1.2.8/Makefile.in +@@ -26,7 +26,7 @@ CFLAGS=-O + + SFLAGS=-O + LDFLAGS= +-TEST_LDFLAGS=-L. libz.a ++TEST_LDFLAGS=-L. $(LDFLAGS) + LDSHARED=$(CC) + CPP=$(CC) -E + +@@ -176,22 +176,22 @@ placebo $(SHAREDLIBV): $(PIC_OBJS) libz. + -@rmdir objs + + example$(EXE): example.o $(STATICLIB) +- $(CC) $(CFLAGS) -o $@ example.o $(TEST_LDFLAGS) ++ $(CC) $(CFLAGS) -o $@ example.o $(TEST_LDFLAGS) $(STATICLIB) + + minigzip$(EXE): minigzip.o $(STATICLIB) +- $(CC) $(CFLAGS) -o $@ minigzip.o $(TEST_LDFLAGS) ++ $(CC) $(CFLAGS) -o $@ minigzip.o $(TEST_LDFLAGS) $(STATICLIB) + + examplesh$(EXE): example.o $(SHAREDLIBV) +- $(CC) $(CFLAGS) -o $@ example.o -L. $(SHAREDLIBV) ++ $(CC) $(CFLAGS) -o $@ example.o $(TEST_LDFLAGS) $(SHAREDLIBV) + + minigzipsh$(EXE): minigzip.o $(SHAREDLIBV) +- $(CC) $(CFLAGS) -o $@ minigzip.o -L. $(SHAREDLIBV) ++ $(CC) $(CFLAGS) -o $@ minigzip.o $(TEST_LDFLAGS) $(SHAREDLIBV) + + example64$(EXE): example64.o $(STATICLIB) +- $(CC) $(CFLAGS) -o $@ example64.o $(TEST_LDFLAGS) ++ $(CC) $(CFLAGS) -o $@ example64.o $(TEST_LDFLAGS) $(STATICLIB) + + minigzip64$(EXE): minigzip64.o $(STATICLIB) +- $(CC) $(CFLAGS) -o $@ minigzip64.o $(TEST_LDFLAGS) ++ $(CC) $(CFLAGS) -o $@ minigzip64.o $(TEST_LDFLAGS) $(STATICLIB) + + install-libs: $(LIBS) + -@if [ ! -d $(DESTDIR)$(exec_prefix) ]; then mkdir -p $(DESTDIR)$(exec_prefix); fi diff --git a/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.11/remove.ldconfig.call.patch b/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.11/remove.ldconfig.call.patch new file mode 100644 index 000000000..403b4825c --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.11/remove.ldconfig.call.patch @@ -0,0 +1,20 @@ + +When /etc/ld.so.cache is writeable by user running bitbake then it creates invalid cache +(in my case libstdc++.so cannot be found after building zlib(-native) and I have to call +touch */libstdc++.so && /sbin/ldconfig to fix it. + +So remove ldconfig call from make install-libs + +Upstream-Status: Inappropriate [disable feature] + +diff -uNr zlib-1.2.6.orig/Makefile.in zlib-1.2.6/Makefile.in +--- zlib-1.2.6.orig/Makefile.in 2012-01-28 23:48:50.000000000 +0100 ++++ zlib-1.2.6/Makefile.in 2012-02-13 15:38:20.577700723 +0100 +@@ -199,7 +199,6 @@ + rm -f $(DESTDIR)$(sharedlibdir)/$(SHAREDLIB) $(DESTDIR)$(sharedlibdir)/$(SHAREDLIBM); \ + ln -s $(SHAREDLIBV) $(DESTDIR)$(sharedlibdir)/$(SHAREDLIB); \ + ln -s $(SHAREDLIBV) $(DESTDIR)$(sharedlibdir)/$(SHAREDLIBM); \ +- ($(LDCONFIG) || true) >/dev/null 2>&1; \ + fi + cp zlib.3 $(DESTDIR)$(man3dir) + chmod 644 $(DESTDIR)$(man3dir)/zlib.3 diff --git a/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.11/run-ptest b/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.11/run-ptest new file mode 100644 index 000000000..884d9dc69 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.11/run-ptest @@ -0,0 +1,2 @@ +#!/bin/sh +make -k runteststatic runtestshared | sed -r -e 's/^(\s+\*+ (.+?) test OK \*+)/\1\nPASS: \2/' -e 's/^(\s+\*+ (.+?) test FAILED \*+)/\1\nFAIL: \2/' diff --git a/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/Makefile-runtests.patch b/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/Makefile-runtests.patch deleted file mode 100644 index 61eea8238..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/Makefile-runtests.patch +++ /dev/null @@ -1,38 +0,0 @@ -Add 'ptest' target to Makefile, to run tests without checking dependencies. - -Signed-off-by: Anders Roxell -Upstream-Status: Pending ---- -diff -uNr a/Makefile.in b/Makefile.in ---- a/Makefile.in 2013-06-10 13:48:14.321959162 +0200 -+++ b/Makefile.in 2013-06-10 13:49:36.686476448 +0200 -@@ -83,6 +83,9 @@ - test: all teststatic testshared - - teststatic: static -+ @make runteststatic -+ -+runteststatic: - @TMPST=tmpst_$$; \ - if echo hello world | ./minigzip | ./minigzip -d && ./example $$TMPST ; then \ - echo ' *** zlib test OK ***'; \ -@@ -92,6 +95,9 @@ - rm -f $$TMPST - - testshared: shared -+ @make runtestshared -+ -+runtestshared: - @LD_LIBRARY_PATH=`pwd`:$(LD_LIBRARY_PATH) ; export LD_LIBRARY_PATH; \ - LD_LIBRARYN32_PATH=`pwd`:$(LD_LIBRARYN32_PATH) ; export LD_LIBRARYN32_PATH; \ - DYLD_LIBRARY_PATH=`pwd`:$(DYLD_LIBRARY_PATH) ; export DYLD_LIBRARY_PATH; \ -@@ -105,6 +111,9 @@ - rm -f $$TMPSH - - test64: all64 -+ @make runtestall64 -+ -+runtestall64: - @TMP64=tmp64_$$; \ - if echo hello world | ./minigzip64 | ./minigzip64 -d && ./example64 $$TMP64; then \ - echo ' *** zlib 64-bit test OK ***'; \ diff --git a/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/ldflags-tests.patch b/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/ldflags-tests.patch deleted file mode 100644 index 19c40b745..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/ldflags-tests.patch +++ /dev/null @@ -1,45 +0,0 @@ -Obey LDFLAGS for tests - -Signed-off-by: Christopher Larson -Upstream-Status: Pending - ---- zlib-1.2.8.orig/Makefile.in -+++ zlib-1.2.8/Makefile.in -@@ -26,7 +26,7 @@ CFLAGS=-O - - SFLAGS=-O - LDFLAGS= --TEST_LDFLAGS=-L. libz.a -+TEST_LDFLAGS=-L. $(LDFLAGS) - LDSHARED=$(CC) - CPP=$(CC) -E - -@@ -176,22 +176,22 @@ placebo $(SHAREDLIBV): $(PIC_OBJS) libz. - -@rmdir objs - - example$(EXE): example.o $(STATICLIB) -- $(CC) $(CFLAGS) -o $@ example.o $(TEST_LDFLAGS) -+ $(CC) $(CFLAGS) -o $@ example.o $(TEST_LDFLAGS) $(STATICLIB) - - minigzip$(EXE): minigzip.o $(STATICLIB) -- $(CC) $(CFLAGS) -o $@ minigzip.o $(TEST_LDFLAGS) -+ $(CC) $(CFLAGS) -o $@ minigzip.o $(TEST_LDFLAGS) $(STATICLIB) - - examplesh$(EXE): example.o $(SHAREDLIBV) -- $(CC) $(CFLAGS) -o $@ example.o -L. $(SHAREDLIBV) -+ $(CC) $(CFLAGS) -o $@ example.o $(TEST_LDFLAGS) $(SHAREDLIBV) - - minigzipsh$(EXE): minigzip.o $(SHAREDLIBV) -- $(CC) $(CFLAGS) -o $@ minigzip.o -L. $(SHAREDLIBV) -+ $(CC) $(CFLAGS) -o $@ minigzip.o $(TEST_LDFLAGS) $(SHAREDLIBV) - - example64$(EXE): example64.o $(STATICLIB) -- $(CC) $(CFLAGS) -o $@ example64.o $(TEST_LDFLAGS) -+ $(CC) $(CFLAGS) -o $@ example64.o $(TEST_LDFLAGS) $(STATICLIB) - - minigzip64$(EXE): minigzip64.o $(STATICLIB) -- $(CC) $(CFLAGS) -o $@ minigzip64.o $(TEST_LDFLAGS) -+ $(CC) $(CFLAGS) -o $@ minigzip64.o $(TEST_LDFLAGS) $(STATICLIB) - - install-libs: $(LIBS) - -@if [ ! -d $(DESTDIR)$(exec_prefix) ]; then mkdir -p $(DESTDIR)$(exec_prefix); fi diff --git a/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/remove.ldconfig.call.patch b/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/remove.ldconfig.call.patch deleted file mode 100644 index 403b4825c..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/remove.ldconfig.call.patch +++ /dev/null @@ -1,20 +0,0 @@ - -When /etc/ld.so.cache is writeable by user running bitbake then it creates invalid cache -(in my case libstdc++.so cannot be found after building zlib(-native) and I have to call -touch */libstdc++.so && /sbin/ldconfig to fix it. - -So remove ldconfig call from make install-libs - -Upstream-Status: Inappropriate [disable feature] - -diff -uNr zlib-1.2.6.orig/Makefile.in zlib-1.2.6/Makefile.in ---- zlib-1.2.6.orig/Makefile.in 2012-01-28 23:48:50.000000000 +0100 -+++ zlib-1.2.6/Makefile.in 2012-02-13 15:38:20.577700723 +0100 -@@ -199,7 +199,6 @@ - rm -f $(DESTDIR)$(sharedlibdir)/$(SHAREDLIB) $(DESTDIR)$(sharedlibdir)/$(SHAREDLIBM); \ - ln -s $(SHAREDLIBV) $(DESTDIR)$(sharedlibdir)/$(SHAREDLIB); \ - ln -s $(SHAREDLIBV) $(DESTDIR)$(sharedlibdir)/$(SHAREDLIBM); \ -- ($(LDCONFIG) || true) >/dev/null 2>&1; \ - fi - cp zlib.3 $(DESTDIR)$(man3dir) - chmod 644 $(DESTDIR)$(man3dir)/zlib.3 diff --git a/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/run-ptest b/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/run-ptest deleted file mode 100644 index 884d9dc69..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/zlib/zlib-1.2.8/run-ptest +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -make -k runteststatic runtestshared | sed -r -e 's/^(\s+\*+ (.+?) test OK \*+)/\1\nPASS: \2/' -e 's/^(\s+\*+ (.+?) test FAILED \*+)/\1\nFAIL: \2/' diff --git a/import-layers/yocto-poky/meta/recipes-core/zlib/zlib_1.2.11.bb b/import-layers/yocto-poky/meta/recipes-core/zlib/zlib_1.2.11.bb new file mode 100644 index 000000000..ba216f679 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-core/zlib/zlib_1.2.11.bb @@ -0,0 +1,62 @@ +SUMMARY = "Zlib Compression Library" +DESCRIPTION = "Zlib is a general-purpose, patent-free, lossless data compression \ +library which is used by many different programs." +HOMEPAGE = "http://zlib.net/" +SECTION = "libs" +LICENSE = "Zlib" +LIC_FILES_CHKSUM = "file://zlib.h;beginline=6;endline=23;md5=5377232268e952e9ef63bc555f7aa6c0" + +SRC_URI = "${SOURCEFORGE_MIRROR}/libpng/${BPN}/${PV}/${BPN}-${PV}.tar.xz \ + file://remove.ldconfig.call.patch \ + file://Makefile-runtests.patch \ + file://ldflags-tests.patch \ + file://run-ptest \ + " +UPSTREAM_CHECK_URI = "http://zlib.net/" + +SRC_URI[md5sum] = "85adef240c5f370b308da8c938951a68" +SRC_URI[sha256sum] = "4ff941449631ace0d4d203e3483be9dbc9da454084111f97ea0a2114e19bf066" + +CFLAGS += "-D_REENTRANT" + +RDEPENDS_${PN}-ptest += "make" + +inherit ptest + +do_configure() { + uname=GNU ./configure --prefix=${prefix} --shared --libdir=${libdir} +} + +do_compile() { + oe_runmake shared +} + +do_compile_ptest() { + oe_runmake test +} + +do_install() { + oe_runmake DESTDIR=${D} install +} + +do_install_ptest() { + install ${B}/Makefile ${D}${PTEST_PATH} + install ${B}/example ${D}${PTEST_PATH} + install ${B}/minigzip ${D}${PTEST_PATH} + install ${B}/examplesh ${D}${PTEST_PATH} + install ${B}/minigzipsh ${D}${PTEST_PATH} +} + +# Move zlib shared libraries for target builds to $base_libdir so the library +# can be used in early boot before $prefix is mounted. +do_install_append_class-target() { + if [ ${base_libdir} != ${libdir} ] + then + mkdir -p ${D}/${base_libdir} + mv ${D}/${libdir}/libz.so.* ${D}/${base_libdir} + libname=`readlink ${D}/${libdir}/libz.so` + ln -sf ${@oe.path.relative("${libdir}", "${base_libdir}")}/$libname ${D}${libdir}/libz.so + fi +} + +BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-core/zlib/zlib_1.2.8.bb b/import-layers/yocto-poky/meta/recipes-core/zlib/zlib_1.2.8.bb deleted file mode 100644 index 913c7033d..000000000 --- a/import-layers/yocto-poky/meta/recipes-core/zlib/zlib_1.2.8.bb +++ /dev/null @@ -1,59 +0,0 @@ -SUMMARY = "Zlib Compression Library" -DESCRIPTION = "Zlib is a general-purpose, patent-free, lossless data compression \ -library which is used by many different programs." -HOMEPAGE = "http://zlib.net/" -SECTION = "libs" -LICENSE = "Zlib" -LIC_FILES_CHKSUM = "file://zlib.h;beginline=4;endline=23;md5=fde612df1e5933c428b73844a0c494fd" - -SRC_URI = "${SOURCEFORGE_MIRROR}/libpng/${BPN}/${PV}/${BPN}-${PV}.tar.xz \ - file://remove.ldconfig.call.patch \ - file://Makefile-runtests.patch \ - file://ldflags-tests.patch \ - file://run-ptest \ - " - -SRC_URI[md5sum] = "28f1205d8dd2001f26fec1e8c2cebe37" -SRC_URI[sha256sum] = "831df043236df8e9a7667b9e3bb37e1fcb1220a0f163b6de2626774b9590d057" - -RDEPENDS_${PN}-ptest += "make" - -inherit ptest - -do_configure (){ - ./configure --prefix=${prefix} --shared --libdir=${libdir} -} - -do_compile (){ - oe_runmake -} - -do_compile_ptest() { - oe_runmake static shared -} - -do_install() { - oe_runmake DESTDIR=${D} install -} - -do_install_ptest() { - install ${B}/Makefile ${D}${PTEST_PATH} - install ${B}/example ${D}${PTEST_PATH} - install ${B}/minigzip ${D}${PTEST_PATH} - install ${B}/examplesh ${D}${PTEST_PATH} - install ${B}/minigzipsh ${D}${PTEST_PATH} -} - -# Move zlib shared libraries for target builds to $base_libdir so the library -# can be used in early boot before $prefix is mounted. -do_install_append_class-target() { - if [ ${base_libdir} != ${libdir} ] - then - mkdir -p ${D}/${base_libdir} - mv ${D}/${libdir}/libz.so.* ${D}/${base_libdir} - libname=`readlink ${D}/${libdir}/libz.so` - ln -sf ${@oe.path.relative("${libdir}", "${base_libdir}")}/$libname ${D}${libdir}/libz.so - fi -} - -BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/apt/apt-native.inc b/import-layers/yocto-poky/meta/recipes-devtools/apt/apt-native.inc index 9fa206efe..68f1b3ce2 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/apt/apt-native.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/apt/apt-native.inc @@ -17,14 +17,14 @@ python do_install () { } python do_install_config () { - indir = os.path.dirname(d.getVar('FILE', True)) + indir = os.path.dirname(d.getVar('FILE')) infile = open(oe.path.join(indir, 'files', 'apt.conf'), 'r') data = infile.read() infile.close() data = d.expand(data) - outdir = oe.path.join(d.getVar('D', True), d.getVar('sysconfdir', True), 'apt') + outdir = oe.path.join(d.getVar('D'), d.getVar('sysconfdir'), 'apt') if not os.path.exists(outdir): os.makedirs(outdir) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/apt/apt-package.inc b/import-layers/yocto-poky/meta/recipes-devtools/apt/apt-package.inc index a553aa21f..6a01f99c6 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/apt/apt-package.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/apt/apt-package.inc @@ -32,13 +32,19 @@ FILES_${PN} = "${bindir}/apt-cdrom ${bindir}/apt-get \ ${bindir}/apt-config ${bindir}/apt-cache \ ${libdir}/apt ${libdir}/libapt*.so.* \ ${localstatedir} ${sysconfdir} \ - ${libdir}/dpkg" + ${libdir}/dpkg \ + ${systemd_unitdir}/system \ + " FILES_${PN}-utils = "${bindir}/apt-sortpkgs ${bindir}/apt-extracttemplates" -FILES_${PN}-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-manpages', True))} \ +FILES_${PN}-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-manpages'))} \ ${docdir}/apt" -FILES_${PN}-utils-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-utils-manpages', True))}" +FILES_${PN}-utils-doc = "${@get_files_apt_doc(d, bb, d.getVar('apt-utils-manpages'))}" FILES_${PN}-dev = "${libdir}/libapt*.so ${includedir}" +inherit systemd + +SYSTEMD_SERVICE_${PN} = "apt-daily.timer" + do_install () { set -x install -d ${D}${bindir} @@ -75,4 +81,13 @@ do_install () { install -d ${D}${includedir}/apt-pkg/ install -m 0644 include/apt-pkg/*.h ${D}${includedir}/apt-pkg/ + + install -d ${D}${systemd_unitdir}/system/ + install -m 0644 ${S}/debian/apt.systemd.daily ${D}${libdir}/apt/ + install -m 0644 ${S}/debian/apt-daily.service ${D}${systemd_unitdir}/system/ + sed -i 's#/usr/lib/apt/#${libdir}/apt/#g' ${D}${systemd_unitdir}/system/apt-daily.service + install -m 0644 ${S}/debian/apt-daily.timer ${D}${systemd_unitdir}/system/ + install -d ${D}${sysconfdir}/cron.daily/ + install -m 0755 ${S}/debian/apt.apt-compat.cron.daily ${D}${sysconfdir}/cron.daily/ + sed -i 's#/usr/lib/apt/#${libdir}/apt/#g' ${D}${sysconfdir}/cron.daily/apt.apt-compat.cron.daily } diff --git a/import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf.inc b/import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf.inc index b4e33569a..f1b2dfca7 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf.inc @@ -27,13 +27,18 @@ RDEPENDS_${PN} = "m4 gnu-config \ perl-module-data-dumper \ " RDEPENDS_${PN}_class-native = "m4-native gnu-config-native" +RDEPENDS_${PN}_class-nativesdk = "nativesdk-m4 nativesdk-gnu-config" SRC_URI = "${GNU_MIRROR}/autoconf/autoconf-${PV}.tar.gz \ file://program_prefix.patch" inherit autotools texinfo -CACHED_CONFIGUREVARS += "ac_cv_path_PERL=${USRBINPATH}/perl" +PERL = "${USRBINPATH}/perl" +PERL_class-native = "/usr/bin/env perl" +PERL_class-nativesdk = "/usr/bin/env perl" + +CACHED_CONFIGUREVARS += "ac_cv_path_PERL='${PERL}'" do_configure() { oe_runconf diff --git a/import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf/AC_HEADER_MAJOR-port-to-glibc-2.25.patch b/import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf/AC_HEADER_MAJOR-port-to-glibc-2.25.patch new file mode 100644 index 000000000..fc37236bf --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf/AC_HEADER_MAJOR-port-to-glibc-2.25.patch @@ -0,0 +1,162 @@ +From e17a30e987d7ee695fb4294a82d987ec3dc9b974 Mon Sep 17 00:00:00 2001 +From: Eric Blake +Date: Wed, 14 Sep 2016 08:17:06 -0500 +Subject: [PATCH] AC_HEADER_MAJOR: port to glibc 2.25 + +glibc 2.25 is deprecating the namespace pollution of +injecting major(), minor(), and makedev() into the compilation +environment, with a warning that insists that users include + instead. However, because the expansion of +AC_HEADER_MAJOR didn't bother checking sys/sysmacros.h until +after probing whether sys/types.h pollutes the namespace, it was +not defining MAJOR_IN_SYSMACROS, with the result that code +compiled with -Werror chokes on the deprecation warnings because +it was not including sysmacros.h. + +In addition to fixing autoconf (which only benefits projects +that rebuild configure after this fix is released), we can also +give a hint to distros on how they can populate config.site with +a cache variable to force pre-existing configure scripts without +the updated macro to behave sanely in the presence of glibc 2.25 +(the documentation is especially useful since that cache variable +is no longer present in autoconf after this patch). + +Note that mingw lacks major/minor/makedev in any of its standard +headers; for that platform, the behavior of this macro is unchanged +(code using the recommended include formula will get a compile error +when trying to use major(), whether before or after this patch); but +for now, it is assumed that programs actually concerned with +creating devices are not worried about portability to mingw. If +desired, a later patch could tighten AC_HEADER_MAJOR to fail at +configure time if the macros are unavailable in any of the three +system headers, but that semantic change is not worth mixing into +this patch. + +* lib/autoconf/headers.m4 (AC_HEADER_MAJOR): Drop check for +major within sys/types.h; it interferes with the need to check +sysmacros.h first. +* doc/autoconf.texi (Particular Headers) : Expand +details on usage, and on workarounds for non-updated projects. + +Signed-off-by: Eric Blake +--- +Upstream-Status: Backport + + doc/autoconf.texi | 35 +++++++++++++++++++++++++++++++---- + lib/autoconf/headers.m4 | 30 ++++++++++++++---------------- + 2 files changed, 45 insertions(+), 20 deletions(-) + +Index: autoconf-2.69/doc/autoconf.texi +=================================================================== +--- autoconf-2.69.orig/doc/autoconf.texi ++++ autoconf-2.69/doc/autoconf.texi +@@ -15,7 +15,7 @@ + @c The ARG is an optional argument. To be used for macro arguments in + @c their documentation (@defmac). + @macro ovar{varname} +-@r{[}@var{\varname\}@r{]}@c ++@r{[}@var{\varname\}@r{]} + @end macro + + @c @dvar(ARG, DEFAULT) +@@ -23,7 +23,7 @@ + @c The ARG is an optional argument, defaulting to DEFAULT. To be used + @c for macro arguments in their documentation (@defmac). + @macro dvar{varname, default} +-@r{[}@var{\varname\} = @samp{\default\}@r{]}@c ++@r{[}@var{\varname\} = @samp{\default\}@r{]} + @end macro + + @c Handling the indexes with Texinfo yields several different problems. +@@ -5926,10 +5926,37 @@ Also see @code{AC_STRUCT_DIRENT_D_INO} a + @cvindex MAJOR_IN_SYSMACROS + @hdrindex{sys/mkdev.h} + @hdrindex{sys/sysmacros.h} +-If @file{sys/types.h} does not define @code{major}, @code{minor}, and +-@code{makedev}, but @file{sys/mkdev.h} does, define +-@code{MAJOR_IN_MKDEV}; otherwise, if @file{sys/sysmacros.h} does, define +-@code{MAJOR_IN_SYSMACROS}. ++Detect the headers required to use @code{makedev}, @code{major}, and ++@code{minor}. These functions may be defined by @file{sys/mkdev.h}, ++@code{sys/sysmacros.h}, or @file{sys/types.h}. ++ ++@code{AC_HEADER_MAJOR} defines @code{MAJOR_IN_MKDEV} if they are in ++@file{sys/mkdev.h}, or @code{MAJOR_IN_SYSMACROS} if they are in ++@file{sys/sysmacros.h}. If neither macro is defined, they are either in ++@file{sys/types.h} or unavailable. ++ ++To properly use these functions, your code should contain something ++like: ++ ++@verbatim ++#include ++#ifdef MAJOR_IN_MKDEV ++# include ++#elif defined MAJOR_IN_SYSMACROS ++# include ++#endif ++@end verbatim ++ ++Note: Configure scripts built with Autoconf 2.69 or earlier will not ++detect a problem if @file{sys/types.h} contains definitions of ++@code{major}, @code{minor}, and/or @code{makedev} that trigger compiler ++warnings upon use. This is known to occur with GNU libc 2.25, where ++those definitions are being deprecated to reduce namespace pollution. ++If it is not practical to use Autoconf 2.70 to regenerate the configure ++script of affected software, you can work around the problem by setting ++@samp{ac_cv_header_sys_types_h_makedev=no}, as an argument to ++@command{configure} or as part of a @file{config.site} site default file ++(@pxref{Site Defaults}). + @end defmac + + @defmac AC_HEADER_RESOLV +Index: autoconf-2.69/lib/autoconf/headers.m4 +=================================================================== +--- autoconf-2.69.orig/lib/autoconf/headers.m4 ++++ autoconf-2.69/lib/autoconf/headers.m4 +@@ -502,31 +502,29 @@ fi + + # AC_HEADER_MAJOR + # --------------- ++# Thanks to glibc 2.25 deprecating macros in sys/types.h, coupled with ++# back-compat to autoconf 2.69, we need the following logic: ++# Check whether compiles. ++# If compiles, assume it provides major/minor/makedev. ++# Otherwise, if compiles, assume it provides the macros. ++# Otherwise, either the macros were provided by , or do ++# not exist on the platform. Code trying to use these three macros is ++# assumed to not care about platforms that lack the macros. + AN_FUNCTION([major], [AC_HEADER_MAJOR]) + AN_FUNCTION([makedev], [AC_HEADER_MAJOR]) + AN_FUNCTION([minor], [AC_HEADER_MAJOR]) + AN_HEADER([sys/mkdev.h], [AC_HEADER_MAJOR]) + AC_DEFUN([AC_HEADER_MAJOR], +-[AC_CACHE_CHECK(whether sys/types.h defines makedev, +- ac_cv_header_sys_types_h_makedev, +-[AC_LINK_IFELSE([AC_LANG_PROGRAM([[@%:@include ]], +- [[return makedev(0, 0);]])], +- [ac_cv_header_sys_types_h_makedev=yes], +- [ac_cv_header_sys_types_h_makedev=no]) +-]) +- +-if test $ac_cv_header_sys_types_h_makedev = no; then ++[AC_CHECK_HEADERS_ONCE([sys/types.h]) + AC_CHECK_HEADER(sys/mkdev.h, + [AC_DEFINE(MAJOR_IN_MKDEV, 1, + [Define to 1 if `major', `minor', and `makedev' are + declared in .])]) +- +- if test $ac_cv_header_sys_mkdev_h = no; then +- AC_CHECK_HEADER(sys/sysmacros.h, +- [AC_DEFINE(MAJOR_IN_SYSMACROS, 1, +- [Define to 1 if `major', `minor', and `makedev' +- are declared in .])]) +- fi ++if test $ac_cv_header_sys_mkdev_h = no; then ++ AC_CHECK_HEADER(sys/sysmacros.h, ++ [AC_DEFINE(MAJOR_IN_SYSMACROS, 1, ++ [Define to 1 if `major', `minor', and `makedev' ++ are declared in .])]) + fi + ])# AC_HEADER_MAJOR + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf/autoconf-replace-w-option-in-shebangs-with-modern-use-warnings.patch b/import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf/autoconf-replace-w-option-in-shebangs-with-modern-use-warnings.patch new file mode 100644 index 000000000..ae0e3825f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf/autoconf-replace-w-option-in-shebangs-with-modern-use-warnings.patch @@ -0,0 +1,120 @@ +From 236552ff5b9f1ebf666d8d0e9850007dcce03d26 Mon Sep 17 00:00:00 2001 +From: Serhii Popovych +Date: Wed, 10 Feb 2016 16:32:44 +0000 +Subject: [PATCH] perl: Replace -w option in shebangs with modern "use + warnings" + +In some builds we might provide ac_cv_path_PERL as /usr/bin/env perl +to use newer version of the perl from users PATH rather than +older from standard system path. + +However using /usr/bin/env perl -w from shebang line isn't +possible because it translates to something like +/usr/bin/env -w perl and env complains about illegal option. + +To address this we can remove -w option from perl shebang +line and add "use warnings" statement. + +Upstream-Status: Pending +Signed-off-by: Serhii Popovych +--- + bin/autom4te.in | 3 ++- + bin/autoreconf.in | 3 ++- + bin/autoscan.in | 3 ++- + bin/autoupdate.in | 3 ++- + bin/ifnames.in | 3 ++- + 5 files changed, 10 insertions(+), 5 deletions(-) + +diff --git a/bin/autom4te.in b/bin/autom4te.in +index 11773c9..a8f5e41 100644 +--- a/bin/autom4te.in ++++ b/bin/autom4te.in +@@ -1,4 +1,4 @@ +-#! @PERL@ -w ++#! @PERL@ + # -*- perl -*- + # @configure_input@ + +@@ -42,6 +42,7 @@ use Autom4te::General; + use Autom4te::XFile; + use File::Basename; + use strict; ++use warnings; + + # Data directory. + my $pkgdatadir = $ENV{'AC_MACRODIR'} || '@pkgdatadir@'; +diff --git a/bin/autoreconf.in b/bin/autoreconf.in +index e245db4..1a318cb 100644 +--- a/bin/autoreconf.in ++++ b/bin/autoreconf.in +@@ -1,4 +1,4 @@ +-#! @PERL@ -w ++#! @PERL@ + # -*- perl -*- + # @configure_input@ + +@@ -45,6 +45,7 @@ use Autom4te::XFile; + # Do not use Cwd::chdir, since it might hang. + use Cwd 'cwd'; + use strict; ++use warnings; + + ## ----------- ## + ## Variables. ## +diff --git a/bin/autoscan.in b/bin/autoscan.in +index a67c48d..b931249 100644 +--- a/bin/autoscan.in ++++ b/bin/autoscan.in +@@ -1,4 +1,4 @@ +-#! @PERL@ -w ++#! @PERL@ + # -*- perl -*- + # @configure_input@ + +@@ -43,6 +43,7 @@ use Autom4te::XFile; + use File::Basename; + use File::Find; + use strict; ++use warnings; + + use vars qw(@cfiles @makefiles @shfiles @subdirs %printed); + +diff --git a/bin/autoupdate.in b/bin/autoupdate.in +index 9737d49..92cb147 100644 +--- a/bin/autoupdate.in ++++ b/bin/autoupdate.in +@@ -1,4 +1,4 @@ +-#! @PERL@ -w ++#! @PERL@ + # -*- perl -*- + # @configure_input@ + +@@ -44,6 +44,7 @@ use Autom4te::General; + use Autom4te::XFile; + use File::Basename; + use strict; ++use warnings; + + # Lib files. + my $autom4te = $ENV{'AUTOM4TE'} || '@bindir@/@autom4te-name@'; +diff --git a/bin/ifnames.in b/bin/ifnames.in +index ba2cd05..74b0278 100644 +--- a/bin/ifnames.in ++++ b/bin/ifnames.in +@@ -1,4 +1,4 @@ +-#! @PERL@ -w ++#! @PERL@ + # -*- perl -*- + # @configure_input@ + +@@ -44,6 +44,7 @@ BEGIN + use Autom4te::General; + use Autom4te::XFile; + use Autom4te::FileUtils; ++use warnings; + + # $HELP + # ----- +-- +2.3.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf_2.69.bb b/import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf_2.69.bb index aa1877a1f..8e67f4b82 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf_2.69.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/autoconf/autoconf_2.69.bb @@ -14,6 +14,8 @@ SRC_URI += "file://check-automake-cross-warning.patch \ file://autotest-automake-result-format.patch \ file://add_musl_config.patch \ file://performance.patch \ + file://AC_HEADER_MAJOR-port-to-glibc-2.25.patch \ + file://autoconf-replace-w-option-in-shebangs-with-modern-use-warnings.patch \ " SRC_URI[md5sum] = "82d05e03b93e45f5a39b828dc9c6c29b" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen-native_5.18.10.bb b/import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen-native_5.18.10.bb deleted file mode 100644 index 274427c7e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen-native_5.18.10.bb +++ /dev/null @@ -1,36 +0,0 @@ -SUMMARY = "Automated text and program generation tool" -DESCRIPTION = "AutoGen is a tool designed to simplify the creation and\ - maintenance of programs that contain large amounts of repetitious text.\ - It is especially valuable in programs that have several blocks of text\ - that must be kept synchronized." -HOMEPAGE = "http://www.gnu.org/software/autogen/" -SECTION = "devel" -LICENSE = "GPLv3" -LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504" - -SRC_URI = "${GNU_MIRROR}/autogen/rel${PV}/autogen-${PV}.tar.gz \ - file://increase-timeout-limit.patch \ - file://mk-tpl-config.sh-force-exit-value-to-be-0-in-subproc.patch \ - file://fix-script-err-when-processing-libguile.patch \ -" - -SRC_URI[md5sum] = "2d1b5226e1929d0680011df631289571" -SRC_URI[sha256sum] = "0b8681d9724c481d3b726b5a9e81d3d09dc7f307d1a801c76d0a30d8f843d20a" - -UPSTREAM_CHECK_URI = "http://ftp.gnu.org/gnu/autogen/" -UPSTREAM_CHECK_REGEX = "rel(?P\d+(\.\d+)+)/" - -DEPENDS = "guile-native libtool-native libxml2-native" - -inherit autotools texinfo native pkgconfig - -# autogen-native links against libguile which may have been relocated with sstate -# these environment variables ensure there isn't a relocation issue -export GUILE_LOAD_PATH = "${STAGING_DATADIR_NATIVE}/guile/2.0" -export GUILE_LOAD_COMPILED_PATH = "${STAGING_LIBDIR_NATIVE}/guile/2.0/ccache" - -do_install_append () { - create_wrapper ${D}/${bindir}/autogen \ - GUILE_LOAD_PATH=${STAGING_DATADIR_NATIVE}/guile/2.0 \ - GUILE_LOAD_COMPILED_PATH=${STAGING_LIBDIR_NATIVE}/guile/2.0/ccache -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen-native_5.18.12.bb b/import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen-native_5.18.12.bb new file mode 100644 index 000000000..853477cf7 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen-native_5.18.12.bb @@ -0,0 +1,40 @@ +SUMMARY = "Automated text and program generation tool" +DESCRIPTION = "AutoGen is a tool designed to simplify the creation and\ + maintenance of programs that contain large amounts of repetitious text.\ + It is especially valuable in programs that have several blocks of text\ + that must be kept synchronized." +HOMEPAGE = "http://www.gnu.org/software/autogen/" +SECTION = "devel" +LICENSE = "GPLv3" +LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504" + +SRC_URI = "${GNU_MIRROR}/autogen/rel${PV}/autogen-${PV}.tar.gz \ + file://increase-timeout-limit.patch \ + file://mk-tpl-config.sh-force-exit-value-to-be-0-in-subproc.patch \ + file://fix-script-err-when-processing-libguile.patch \ + file://0001-config-libopts.m4-regenerate-it-from-config-libopts..patch \ + file://0002-autoopts-mk-tpl-config.sh-fix-perl-path.patch \ +" + +SRC_URI[md5sum] = "551d15ccbf5b5fc5658da375d5003389" +SRC_URI[sha256sum] = "805c20182f3cb0ebf1571d3b01972851c56fb34348dfdc38799fd0ec3b2badbe" + +UPSTREAM_CHECK_URI = "http://ftp.gnu.org/gnu/autogen/" +UPSTREAM_CHECK_REGEX = "rel(?P\d+(\.\d+)+)/" + +DEPENDS = "guile-native libtool-native libxml2-native" + +inherit autotools texinfo native pkgconfig + +# autogen-native links against libguile which may have been relocated with sstate +# these environment variables ensure there isn't a relocation issue +export GUILE_LOAD_PATH = "${STAGING_DATADIR_NATIVE}/guile/2.0" +export GUILE_LOAD_COMPILED_PATH = "${STAGING_LIBDIR_NATIVE}/guile/2.0/ccache" + +export POSIX_SHELL = "/usr/bin/env sh" + +do_install_append () { + create_wrapper ${D}/${bindir}/autogen \ + GUILE_LOAD_PATH=${STAGING_DATADIR_NATIVE}/guile/2.0 \ + GUILE_LOAD_COMPILED_PATH=${STAGING_LIBDIR_NATIVE}/guile/2.0/ccache +} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen/0001-config-libopts.m4-regenerate-it-from-config-libopts..patch b/import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen/0001-config-libopts.m4-regenerate-it-from-config-libopts..patch new file mode 100644 index 000000000..a14018e33 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen/0001-config-libopts.m4-regenerate-it-from-config-libopts..patch @@ -0,0 +1,39 @@ +From 45040e7d268329ebc40e6cb237c64a6637cfab5c Mon Sep 17 00:00:00 2001 +From: Robert Yang +Date: Mon, 13 Mar 2017 20:22:10 -0700 +Subject: [PATCH] config/libopts.m4: regenerate it from config/libopts.def + +It was out of date compared to config/libopts.def, so regenerate it via +"autogen config/libopts.def" command. + +Upstream-Status: Pending + +Signed-off-by: Robert Yang +--- + config/libopts.m4 | 3 ++- + 1 file changed, 2 insertions(+), 1 deletion(-) + +diff --git a/config/libopts.m4 b/config/libopts.m4 +index c7ba4f3..51e6a39 100644 +--- a/config/libopts.m4 ++++ b/config/libopts.m4 +@@ -2,7 +2,7 @@ dnl -*- buffer-read-only: t -*- vi: set ro: + dnl + dnl DO NOT EDIT THIS FILE (libopts.m4) + dnl +-dnl It has been AutoGen-ed ++dnl It has been AutoGen-ed March 13, 2017 at 08:21:21 PM by AutoGen 5.18 + dnl From the definitions libopts.def + dnl and the template file conftest.tpl + dnl +@@ -114,6 +114,7 @@ AC_DEFUN([INVOKE_LIBOPTS_MACROS_FIRST],[ + AC_PROG_SED + [while : + do ++ test -x "$POSIX_SHELL" && break + POSIX_SHELL=`which bash` + test -x "$POSIX_SHELL" && break + POSIX_SHELL=`which dash` +-- +2.10.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen/0002-autoopts-mk-tpl-config.sh-fix-perl-path.patch b/import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen/0002-autoopts-mk-tpl-config.sh-fix-perl-path.patch new file mode 100644 index 000000000..d5fe143ce --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/autogen/autogen/0002-autoopts-mk-tpl-config.sh-fix-perl-path.patch @@ -0,0 +1,32 @@ +From 9f69f3f5ef22bf1bcffb0e651efc260889cfaa46 Mon Sep 17 00:00:00 2001 +From: Robert Yang +Date: Mon, 13 Mar 2017 20:33:30 -0700 +Subject: [PATCH] autoopts/mk-tpl-config.sh: fix perl path + +Use "which perl" as shebang doesn't work when it is longer than +BINPRM_BUF_SIZE which is 128 usually. So use "/usr/bin/env perl" to +instead of. + +Upstream-Status: Pending + +Signed-off-by: Robert Yang +--- + autoopts/mk-tpl-config.sh | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/autoopts/mk-tpl-config.sh b/autoopts/mk-tpl-config.sh +index 093e808..8dfc6dd 100755 +--- a/autoopts/mk-tpl-config.sh ++++ b/autoopts/mk-tpl-config.sh +@@ -98,7 +98,7 @@ fix_scripts() { + st=`sed 1q $f` + + case "$st" in +- *perl ) echo '#!' `which perl` ++ *perl ) echo '#!/usr/bin/env perl' + sed 1d $f + ;; + +-- +2.10.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/automake/automake.inc b/import-layers/yocto-poky/meta/recipes-devtools/automake/automake.inc index 4f9b5f774..87cedc983 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/automake/automake.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/automake/automake.inc @@ -16,6 +16,6 @@ do_configure() { oe_runconf } -export AUTOMAKE = "${@bb.utils.which('automake', d.getVar('PATH', True))}" +export AUTOMAKE = "${@bb.utils.which('automake', d.getVar('PATH'))}" FILES_${PN} += "${datadir}/automake* ${datadir}/aclocal*" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/automake/automake/automake-replace-w-option-in-shebangs-with-modern-use-warnings.patch b/import-layers/yocto-poky/meta/recipes-devtools/automake/automake/automake-replace-w-option-in-shebangs-with-modern-use-warnings.patch new file mode 100644 index 000000000..64eb253a6 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/automake/automake/automake-replace-w-option-in-shebangs-with-modern-use-warnings.patch @@ -0,0 +1,99 @@ +From 41e06b7a354774913dcd2e32a35440e407843357 Mon Sep 17 00:00:00 2001 +From: Serhii Popovych +Date: Wed, 10 Feb 2016 17:07:32 +0000 +Subject: [PATCH] perl: Replace -w option in shebangs with modern "use + warnings" In some builds we might provide ac_cv_path_PERL as /usr/bin/env + perl to use newer version of the perl from users PATH rather than older from + standard system path. + +However using /usr/bin/env perl -w from shebang line isn't +possible because it translates to something like +/usr/bin/env -w perl and env complains about illegal option. + +To address this we can remove -w option from perl shebang +line and add "use warnings" statement. + +Upstream-Status: Pending +Signed-off-by: Serhii Popovych +--- + bin/aclocal.in | 3 ++- + bin/automake.in | 3 ++- + t/wrap/aclocal.in | 3 ++- + t/wrap/automake.in | 3 ++- + 4 files changed, 8 insertions(+), 4 deletions(-) + +diff --git a/bin/aclocal.in b/bin/aclocal.in +index 349f24a..50cb8d3 100644 +--- a/bin/aclocal.in ++++ b/bin/aclocal.in +@@ -1,4 +1,4 @@ +-#!@PERL@ -w ++#!@PERL@ + # -*- perl -*- + # @configure_input@ + +@@ -33,6 +33,7 @@ BEGIN + } + + use strict; ++use warnings; + + use Automake::Config; + use Automake::General; +diff --git a/bin/automake.in b/bin/automake.in +index eedc8bc..e0a01cf 100644 +--- a/bin/automake.in ++++ b/bin/automake.in +@@ -1,4 +1,4 @@ +-#!@PERL@ -w ++#!@PERL@ + # -*- perl -*- + # @configure_input@ + +@@ -28,6 +28,7 @@ eval 'case $# in 0) exec @PERL@ -S "$0";; *) exec @PERL@ -S "$0" "$@";; esac' + package Automake; + + use strict; ++use warnings; + + BEGIN + { +diff --git a/t/wrap/aclocal.in b/t/wrap/aclocal.in +index e64b546..9996899 100644 +--- a/t/wrap/aclocal.in ++++ b/t/wrap/aclocal.in +@@ -1,4 +1,4 @@ +-#!@PERL@ -w ++#!@PERL@ + # @configure_input@ + + # Copyright (C) 2012-2014 Free Software Foundation, Inc. +@@ -19,6 +19,7 @@ + BEGIN + { + use strict; ++ use warnings; + @Aclocal::perl_libdirs = ('@abs_top_srcdir@/lib'); + unshift @Aclocal::perl_libdirs, '@abs_top_builddir@/lib' + if '@srcdir@' ne '.'; +diff --git a/t/wrap/automake.in b/t/wrap/automake.in +index 8b943b1..be61226 100644 +--- a/t/wrap/automake.in ++++ b/t/wrap/automake.in +@@ -1,4 +1,4 @@ +-#!@PERL@ -w ++#!@PERL@ + # @configure_input@ + + # Copyright (C) 2012-2014 Free Software Foundation, Inc. +@@ -19,6 +19,7 @@ + BEGIN + { + use strict; ++ use warnings; + @Automake::perl_libdirs = ('@abs_top_srcdir@/lib'); + unshift @Automake::perl_libdirs, '@abs_top_builddir@/lib' + if '@srcdir@' ne '.'; +-- +2.3.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/automake/automake_1.15.bb b/import-layers/yocto-poky/meta/recipes-devtools/automake/automake_1.15.bb index 61ae3ba3e..902dd63db 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/automake/automake_1.15.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/automake/automake_1.15.bb @@ -17,18 +17,24 @@ RDEPENDS_${PN} += "\ perl-module-vars " RDEPENDS_${PN}_class-native = "autoconf-native hostperl-runtime-native" +RDEPENDS_${PN}_class-nativesdk = "nativesdk-autoconf" -SRC_URI += " file://python-libdir.patch \ +SRC_URI += "file://python-libdir.patch \ file://buildtest.patch \ file://performance.patch \ file://new_rt_path_for_test-driver.patch \ + file://automake-replace-w-option-in-shebangs-with-modern-use-warnings.patch \ file://0001-automake-port-to-Perl-5.22-and-later.patch \ " SRC_URI[md5sum] = "716946a105ca228ab545fc37a70df3a3" SRC_URI[sha256sum] = "7946e945a96e28152ba5a6beb0625ca715c6e32ac55f2e353ef54def0c8ed924" -CACHED_CONFIGUREVARS += "ac_cv_path_PERL=${USRBINPATH}/perl" +PERL = "${USRBINPATH}/perl" +PERL_class-native = "${USRBINPATH}/env perl" +PERL_class-nativesdk = "${USRBINPATH}/env perl" + +CACHED_CONFIGUREVARS += "ac_cv_path_PERL='${PERL}'" do_install_append () { install -d ${D}${datadir} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-2.27.inc b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-2.27.inc deleted file mode 100644 index 0936d974d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-2.27.inc +++ /dev/null @@ -1,53 +0,0 @@ -LIC_FILES_CHKSUM="\ - file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552\ - file://COPYING.LIB;md5=9f604d8a4f8e74f4f5140845a21b6674\ - file://COPYING3;md5=d32239bcb673463ab874e80d47fae504\ - file://COPYING3.LIB;md5=6a6a8e020838b23406c81b19c1d46df6\ - file://gas/COPYING;md5=d32239bcb673463ab874e80d47fae504\ - file://include/COPYING;md5=59530bdf33659b29e73d4adb9f9f6552\ - file://include/COPYING3;md5=d32239bcb673463ab874e80d47fae504\ - file://libiberty/COPYING.LIB;md5=a916467b91076e631dd8edb7424769c7\ - file://bfd/COPYING;md5=d32239bcb673463ab874e80d47fae504\ - " - -def binutils_branch_version(d): - pvsplit = d.getVar('PV', True).split('.') - return pvsplit[0] + "_" + pvsplit[1] - -BINUPV = "${@binutils_branch_version(d)}" - -UPSTREAM_CHECK_GITTAGREGEX = "binutils-(?P\d+_(\d_?)*)" - -SRCREV = "5fe53ce37f9d9dd544ec8cc9e2863b68e1a61f4c" -SRC_URI = "\ - git://sourceware.org/git/binutils-gdb.git;branch=binutils-${BINUPV}-branch;protocol=git \ - file://0002-configure-widen-the-regexp-for-SH-architectures.patch \ - file://0003-Point-scripts-location-to-libdir.patch \ - file://0004-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch \ - file://0005-Explicitly-link-with-libm-on-uclibc.patch \ - file://0006-Use-libtool-2.4.patch \ - file://0007-Add-the-armv5e-architecture-to-binutils.patch \ - file://0008-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch \ - file://0009-warn-for-uses-of-system-directories-when-cross-linki.patch \ - file://0010-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch \ - file://0011-Change-default-emulation-for-mips64-linux.patch \ - file://0012-Add-support-for-Netlogic-XLP.patch \ - file://0013-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch \ - file://0014-libtool-remove-rpath.patch \ - file://0015-binutils-mips-gas-pic-relax-linkonce.diff \ - file://0015-Refine-.cfi_sections-check-to-only-consider-compact-.patch \ - file://0016-Fix-seg-fault-in-ARM-linker-when-trying-to-parse-a-b.patch \ - file://0017-Fix-the-generation-of-alignment-frags-in-code-sectio.patch \ - file://0001-ppc-apuinfo-for-spe-parsed-incorrectly.patch \ - file://CVE-2017-6965.patch \ - file://CVE-2017-6966.patch \ - file://CVE-2017-6969.patch \ - file://CVE-2017-6969_2.patch \ - file://CVE-2017-7209.patch \ - file://CVE-2017-7210.patch \ -" -S = "${WORKDIR}/git" - -do_configure_prepend () { - rm -rf ${S}/gdb ${S}/libdecnumber ${S}/readline ${S}/sim -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-2.28.inc b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-2.28.inc new file mode 100644 index 000000000..1784c52ff --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-2.28.inc @@ -0,0 +1,85 @@ +LIC_FILES_CHKSUM="\ + file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552\ + file://COPYING.LIB;md5=9f604d8a4f8e74f4f5140845a21b6674\ + file://COPYING3;md5=d32239bcb673463ab874e80d47fae504\ + file://COPYING3.LIB;md5=6a6a8e020838b23406c81b19c1d46df6\ + file://gas/COPYING;md5=d32239bcb673463ab874e80d47fae504\ + file://include/COPYING;md5=59530bdf33659b29e73d4adb9f9f6552\ + file://include/COPYING3;md5=d32239bcb673463ab874e80d47fae504\ + file://libiberty/COPYING.LIB;md5=a916467b91076e631dd8edb7424769c7\ + file://bfd/COPYING;md5=d32239bcb673463ab874e80d47fae504\ + " + +def binutils_branch_version(d): + pvsplit = d.getVar('PV').split('.') + return pvsplit[0] + "_" + pvsplit[1] + +BINUPV = "${@binutils_branch_version(d)}" + +UPSTREAM_CHECK_GITTAGREGEX = "binutils-(?P\d+_(\d_?)*)" + +SRCREV = "354199c7692c1bed53a2a15f0e4d531457e95f17" +SRC_URI = "\ + git://sourceware.org/git/binutils-gdb.git;branch=binutils-${BINUPV}-branch;protocol=git \ + file://0003-gprof-add-uclibc-support-to-configure.patch \ + file://0004-Point-scripts-location-to-libdir.patch \ + file://0005-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch \ + file://0006-Explicitly-link-with-libm-on-uclibc.patch \ + file://0007-Use-libtool-2.4.patch \ + file://0008-Add-the-armv5e-architecture-to-binutils.patch \ + file://0009-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch \ + file://0010-warn-for-uses-of-system-directories-when-cross-linki.patch \ + file://0011-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch \ + file://0012-Change-default-emulation-for-mips64-linux.patch \ + file://0013-Add-support-for-Netlogic-XLP.patch \ + file://0014-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch \ + file://0015-sync-with-OE-libtool-changes.patch \ + file://0016-Detect-64-bit-MIPS-targets.patch \ + file://CVE-2017-6965.patch \ + file://CVE-2017-6966.patch \ + file://0017-bfd-Improve-lookup-of-file-line-information-for-erro.patch \ + file://0018-PR-21409-segfault-in-_bfd_dwarf2_find_nearest_line.patch \ + file://CVE-2017-6969.patch \ + file://CVE-2017-6969_2.patch \ + file://CVE-2017-7209.patch \ + file://CVE-2017-7210.patch \ + file://CVE-2017-7223.patch \ + file://CVE-2017-7614.patch \ + file://CVE-2017-8393.patch \ + file://CVE-2017-8394.patch \ + file://CVE-2017-8395.patch \ + file://CVE-2017-8396_8397.patch \ + file://CVE-2017-8398.patch \ + file://CVE-2017-8421.patch \ + file://CVE-2017-9038_9044.patch \ + file://CVE-2017-9039.patch \ + file://CVE-2017-9040_9042.patch \ + file://CVE-2017-9742.patch \ + file://CVE-2017-9744.patch \ + file://CVE-2017-9745.patch \ + file://CVE-2017-9746.patch \ + file://CVE-2017-9747.patch \ + file://CVE-2017-9748.patch \ + file://CVE-2017-9749.patch \ + file://CVE-2017-9750.patch \ + file://CVE-2017-9751.patch \ + file://CVE-2017-9752.patch \ + file://CVE-2017-9753.patch \ + file://CVE-2017-9755.patch \ + file://CVE-2017-9756.patch \ + file://CVE-2017-9954.patch \ + file://CVE-2017-9955_1.patch \ + file://CVE-2017-9955_2.patch \ + file://CVE-2017-9955_3.patch \ + file://CVE-2017-9955_4.patch \ + file://CVE-2017-9955_5.patch \ + file://CVE-2017-9955_6.patch \ + file://CVE-2017-9955_7.patch \ + file://CVE-2017-9955_8.patch \ + file://CVE-2017-9955_9.patch \ +" +S = "${WORKDIR}/git" + +do_configure_prepend () { + rm -rf ${S}/gdb ${S}/libdecnumber ${S}/readline ${S}/sim +} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross-canadian.inc b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross-canadian.inc index ae1464227..e98f31f9c 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross-canadian.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross-canadian.inc @@ -6,12 +6,15 @@ BPN = "binutils" DEPENDS = "flex-native bison-native virtual/${HOST_PREFIX}gcc-crosssdk virtual/nativesdk-libc nativesdk-zlib nativesdk-gettext nativesdk-flex" EXTRA_OECONF += "--with-sysroot=${SDKPATH}/sysroots/${TUNE_PKGARCH}${TARGET_VENDOR}-${TARGET_OS} \ + --enable-poison-system-directories \ " # We have to point binutils at a sysroot but we don't need to rebuild if this changes # e.g. we switch between different machines with different tunes. EXTRA_OECONF[vardepsexclude] = "TUNE_PKGARCH" +LDGOLD_sdkmingw32 = "" + do_install () { autotools_do_install diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross-canadian_2.27.bb b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross-canadian_2.27.bb deleted file mode 100644 index 5dbaa0301..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross-canadian_2.27.bb +++ /dev/null @@ -1,3 +0,0 @@ -require binutils.inc -require binutils-${PV}.inc -require binutils-cross-canadian.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross-canadian_2.28.bb b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross-canadian_2.28.bb new file mode 100644 index 000000000..5dbaa0301 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross-canadian_2.28.bb @@ -0,0 +1,3 @@ +require binutils.inc +require binutils-${PV}.inc +require binutils-cross-canadian.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross.inc b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross.inc index fd3d80149..02ec89160 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross.inc @@ -4,12 +4,20 @@ PROVIDES = "virtual/${TARGET_PREFIX}binutils" PN = "binutils-cross-${TARGET_ARCH}" BPN = "binutils" +# Ignore how TARGET_ARCH is computed. +TARGET_ARCH[vardepvalue] = "${TARGET_ARCH}" + INHIBIT_DEFAULT_DEPS = "1" INHIBIT_AUTOTOOLS_DEPS = "1" +SRC_URI += "file://0002-binutils-cross-Do-not-generate-linker-script-directo.patch" + +# Specify lib-path else we use a load of search dirs which we don't use +# and mean the linker scripts have to be relocated. EXTRA_OECONF += "--with-sysroot=${STAGING_DIR_TARGET} \ --disable-install-libbfd \ --enable-poison-system-directories \ + --with-lib-path==${target_base_libdir}:=${target_libdir} \ " do_install () { oe_runmake 'DESTDIR=${D}' install diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross_2.27.bb b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross_2.27.bb deleted file mode 100644 index fbd1f7d25..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross_2.27.bb +++ /dev/null @@ -1,3 +0,0 @@ -require binutils.inc -require binutils-${PV}.inc -require binutils-cross.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross_2.28.bb b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross_2.28.bb new file mode 100644 index 000000000..fbd1f7d25 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-cross_2.28.bb @@ -0,0 +1,3 @@ +require binutils.inc +require binutils-${PV}.inc +require binutils-cross.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-crosssdk_2.27.bb b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-crosssdk_2.27.bb deleted file mode 100644 index 62c03d724..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-crosssdk_2.27.bb +++ /dev/null @@ -1,13 +0,0 @@ -require binutils-cross_${PV}.bb - -inherit crosssdk - -PN = "binutils-crosssdk-${SDK_SYS}" - -PROVIDES = "virtual/${TARGET_PREFIX}binutils-crosssdk" - -SRC_URI += "file://0001-Generate-relocatable-SDKs.patch" - -do_configure_prepend () { - sed -i 's#/usr/local/lib /lib /usr/lib#${SDKPATHNATIVE}/lib ${SDKPATHNATIVE}/usr/lib /usr/local/lib /lib /usr/lib#' ${S}/ld/configure.tgt -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-crosssdk_2.28.bb b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-crosssdk_2.28.bb new file mode 100644 index 000000000..37f4d6d2e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils-crosssdk_2.28.bb @@ -0,0 +1,13 @@ +require binutils-cross_${PV}.bb + +inherit crosssdk + +PN = "binutils-crosssdk-${SDK_SYS}" + +PROVIDES = "virtual/${TARGET_PREFIX}binutils-crosssdk" + +SRC_URI += "file://0001-binutils-crosssdk-Generate-relocatable-SDKs.patch" + +do_configure_prepend () { + sed -i 's#/usr/local/lib /lib /usr/lib#${SDKPATHNATIVE}/lib ${SDKPATHNATIVE}/usr/lib /usr/local/lib /lib /usr/lib#' ${S}/ld/configure.tgt +} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils.inc b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils.inc index 75d190f80..37813dd86 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils.inc @@ -59,9 +59,9 @@ USE_ALTERNATIVES_FOR = " \ " python do_package_prepend() { - make_alts = d.getVar("USE_ALTERNATIVES_FOR", True) or "" - prefix = d.getVar("TARGET_PREFIX", True) - bindir = d.getVar("bindir", True) + make_alts = d.getVar("USE_ALTERNATIVES_FOR") or "" + prefix = d.getVar("TARGET_PREFIX") + bindir = d.getVar("bindir") for alt in make_alts.split(): d.setVarFlag('ALTERNATIVE_TARGET', alt, bindir + "/" + prefix + alt) d.setVarFlag('ALTERNATIVE_LINK_NAME', alt, bindir + "/" + alt) @@ -78,7 +78,7 @@ EXTRA_OECONF = "--program-prefix=${TARGET_PREFIX} \ LDGOLD_class-native = "" LDGOLD_class-crosssdk = "" -LDGOLD ?= "${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', '--enable-gold=default --enable-threads', '--enable-gold --enable-ld=default', d)}" +LDGOLD ?= "${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', '--enable-gold=default --enable-threads', '--enable-gold --enable-ld=default --enable-threads', d)}" # This is necessary due to a bug in the binutils Makefiles # EXTRA_OEMAKE = "configure-build-libiberty all" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0001-Generate-relocatable-SDKs.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0001-Generate-relocatable-SDKs.patch deleted file mode 100644 index cad704a72..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0001-Generate-relocatable-SDKs.patch +++ /dev/null @@ -1,62 +0,0 @@ -From 2dae6b137ce609fd008f2063034a4403d95484c5 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Mon, 2 Mar 2015 01:58:54 +0000 -Subject: [PATCH 01/13] Generate relocatable SDKs - -This patch will modify the ELF linker scripts so that the crosssdk -linker will generate binaries with a 4096 bytes PT_INTERP section. When the binaries -will be relocated, at SDK install time, the interpreter path can be easily -changed by the relocating script. - -Upstream-Status: Inappropriate [SDK specific] - -Signed-off-by: Laurentiu Palcu -Signed-off-by: Khem Raj ---- - ld/genscripts.sh | 3 +++ - ld/scripttempl/elf.sc | 4 ++-- - 2 files changed, 5 insertions(+), 2 deletions(-) - -diff --git a/ld/genscripts.sh b/ld/genscripts.sh -index eb262e7..1a3b8bc 100755 ---- a/ld/genscripts.sh -+++ b/ld/genscripts.sh -@@ -277,6 +277,7 @@ DATA_ALIGNMENT_u="${DATA_ALIGNMENT_u-${DATA_ALIGNMENT_r}}" - LD_FLAG=r - DATA_ALIGNMENT=${DATA_ALIGNMENT_r} - DEFAULT_DATA_ALIGNMENT="ALIGN(${SEGMENT_SIZE})" -+PARTIAL_LINKING=" " - ( echo "/* Script for ld -r: link without relocation */" - . ${CUSTOMIZER_SCRIPT} - . ${srcdir}/scripttempl/${SCRIPT_NAME}.sc -@@ -285,10 +286,12 @@ DEFAULT_DATA_ALIGNMENT="ALIGN(${SEGMENT_SIZE})" - LD_FLAG=u - DATA_ALIGNMENT=${DATA_ALIGNMENT_u} - CONSTRUCTING=" " -+PARTIAL_LINKING=" " - ( echo "/* Script for ld -Ur: link w/out relocation, do create constructors */" - . ${CUSTOMIZER_SCRIPT} - . ${srcdir}/scripttempl/${SCRIPT_NAME}.sc - ) | sed -e '/^ *$/d;s/[ ]*$//' > ldscripts/${EMULATION_NAME}.xu -+unset PARTIAL_LINKING - - LD_FLAG= - DATA_ALIGNMENT=${DATA_ALIGNMENT_} -diff --git a/ld/scripttempl/elf.sc b/ld/scripttempl/elf.sc -index f0c6439..223c55f 100644 ---- a/ld/scripttempl/elf.sc -+++ b/ld/scripttempl/elf.sc -@@ -136,8 +136,8 @@ if test -n "${COMMONPAGESIZE}"; then - DATA_SEGMENT_END=". = DATA_SEGMENT_END (.);" - DATA_SEGMENT_RELRO_END=". = DATA_SEGMENT_RELRO_END (${SEPARATE_GOTPLT-0}, .);" - fi --if test -z "${INITIAL_READONLY_SECTIONS}${CREATE_SHLIB}"; then -- INITIAL_READONLY_SECTIONS=".interp ${RELOCATING-0} : { *(.interp) }" -+if test -z "${INITIAL_READONLY_SECTIONS}${CREATE_SHLIB}${PARTIAL_LINKING}"; then -+ INITIAL_READONLY_SECTIONS=".interp ${RELOCATING-0} : { *(.interp); . = 0x1000; }" - fi - if test -z "$PLT"; then - IPLT=".iplt ${RELOCATING-0} : { *(.iplt) }" --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0001-binutils-crosssdk-Generate-relocatable-SDKs.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0001-binutils-crosssdk-Generate-relocatable-SDKs.patch new file mode 100644 index 000000000..8fb1b4ede --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0001-binutils-crosssdk-Generate-relocatable-SDKs.patch @@ -0,0 +1,62 @@ +From 689d011688b5ff9481d4367bef3dea7a7b2867fb Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 2 Mar 2015 01:58:54 +0000 +Subject: [PATCH 01/15] binutils-crosssdk: Generate relocatable SDKs + +This patch will modify the ELF linker scripts so that the crosssdk +linker will generate binaries with a 4096 bytes PT_INTERP section. When the binaries +will be relocated, at SDK install time, the interpreter path can be easily +changed by the relocating script. + +Upstream-Status: Inappropriate [SDK specific] + +Signed-off-by: Laurentiu Palcu +Signed-off-by: Khem Raj +--- + ld/genscripts.sh | 3 +++ + ld/scripttempl/elf.sc | 4 ++-- + 2 files changed, 5 insertions(+), 2 deletions(-) + +diff --git a/ld/genscripts.sh b/ld/genscripts.sh +index b6940d376d..a42c4d7a4b 100755 +--- a/ld/genscripts.sh ++++ b/ld/genscripts.sh +@@ -277,6 +277,7 @@ DATA_ALIGNMENT_u="${DATA_ALIGNMENT_u-${DATA_ALIGNMENT_r}}" + LD_FLAG=r + DATA_ALIGNMENT=${DATA_ALIGNMENT_r} + DEFAULT_DATA_ALIGNMENT="ALIGN(${SEGMENT_SIZE})" ++PARTIAL_LINKING=" " + ( echo "/* Script for ld -r: link without relocation */" + . ${CUSTOMIZER_SCRIPT} + . ${srcdir}/scripttempl/${SCRIPT_NAME}.sc +@@ -285,10 +286,12 @@ DEFAULT_DATA_ALIGNMENT="ALIGN(${SEGMENT_SIZE})" + LD_FLAG=u + DATA_ALIGNMENT=${DATA_ALIGNMENT_u} + CONSTRUCTING=" " ++PARTIAL_LINKING=" " + ( echo "/* Script for ld -Ur: link w/out relocation, do create constructors */" + . ${CUSTOMIZER_SCRIPT} + . ${srcdir}/scripttempl/${SCRIPT_NAME}.sc + ) | sed -e '/^ *$/d;s/[ ]*$//' > ldscripts/${EMULATION_NAME}.xu ++unset PARTIAL_LINKING + + LD_FLAG= + DATA_ALIGNMENT=${DATA_ALIGNMENT_} +diff --git a/ld/scripttempl/elf.sc b/ld/scripttempl/elf.sc +index e65f9a3ccf..d99d2c1d2a 100644 +--- a/ld/scripttempl/elf.sc ++++ b/ld/scripttempl/elf.sc +@@ -138,8 +138,8 @@ if test -z "$DATA_SEGMENT_ALIGN"; then + DATA_SEGMENT_RELRO_END=". = DATA_SEGMENT_RELRO_END (${SEPARATE_GOTPLT-0}, .);" + fi + fi +-if test -z "${INITIAL_READONLY_SECTIONS}${CREATE_SHLIB}"; then +- INITIAL_READONLY_SECTIONS=".interp ${RELOCATING-0} : { *(.interp) }" ++if test -z "${INITIAL_READONLY_SECTIONS}${CREATE_SHLIB}${PARTIAL_LINKING}"; then ++ INITIAL_READONLY_SECTIONS=".interp ${RELOCATING-0} : { *(.interp); . = 0x1000; }" + fi + if test -z "$PLT"; then + IPLT=".iplt ${RELOCATING-0} : { *(.iplt) }" +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0001-ppc-apuinfo-for-spe-parsed-incorrectly.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0001-ppc-apuinfo-for-spe-parsed-incorrectly.patch deleted file mode 100644 index d82a0b694..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0001-ppc-apuinfo-for-spe-parsed-incorrectly.patch +++ /dev/null @@ -1,37 +0,0 @@ -From 8941017bc0226b60ce306d5271df15820ce66a53 Mon Sep 17 00:00:00 2001 -From: Alan Modra -Date: Tue, 30 Aug 2016 20:57:32 +0930 -Subject: [PATCH] ppc apuinfo for spe parsed incorrectly -Organization: O.S. Systems Software LTDA. - -apuinfo saying SPE resulted in mach = bfd_mach_ppc_vle due to a -missing break. - - PR 20531 - * elf32-ppc.c (_bfd_elf_ppc_set_arch): Add missing "break". - - -Backport from : -https://sourceware.org/git/gitweb.cgi?p=binutils-gdb.git;a=commitdiff;h=8941017b - -Upstream-Status: Backport -Signed-off-by: Fabio Berton ---- - bfd/elf32-ppc.c | 1 + - 1 file changed, 1 insertion(+) - -diff --git a/bfd/elf32-ppc.c b/bfd/elf32-ppc.c -index 95ce1dc..e42ef1c 100644 ---- a/bfd/elf32-ppc.c -+++ b/bfd/elf32-ppc.c -@@ -2246,6 +2246,7 @@ _bfd_elf_ppc_set_arch (bfd *abfd) - case PPC_APUINFO_BRLOCK: - if (mach != bfd_mach_ppc_vle) - mach = bfd_mach_ppc_e500; -+ break; - - case PPC_APUINFO_VLE: - mach = bfd_mach_ppc_vle; --- -2.1.4 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0002-binutils-cross-Do-not-generate-linker-script-directo.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0002-binutils-cross-Do-not-generate-linker-script-directo.patch new file mode 100644 index 000000000..14299fd1a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0002-binutils-cross-Do-not-generate-linker-script-directo.patch @@ -0,0 +1,61 @@ +From 7c7de107b4b0a507d2aeca3e3a86d01cb4b51360 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 6 Mar 2017 23:37:05 -0800 +Subject: [PATCH 02/15] binutils-cross: Do not generate linker script + directories + +We don't place target libraries within ${exec_prefix}, we'd always place these +within the target sysroot within the standard library directories. Worse, the +append_to_lib_path code prefixes these paths with the sysroot which makes even +less sense. + +These directories therefore don't make sense in our case and mean we have to +relocate all the linker scripts if they're present. Dropping them +gives a reasonable performance improvement/simplification. + +Upstream-Status: Inappropriate + +RP 2017/01/30 + +Signed-off-by: Khem Raj +--- + ld/genscripts.sh | 23 ----------------------- + 1 file changed, 23 deletions(-) + +diff --git a/ld/genscripts.sh b/ld/genscripts.sh +index a42c4d7a4b..d727b4d07e 100755 +--- a/ld/genscripts.sh ++++ b/ld/genscripts.sh +@@ -189,29 +189,6 @@ append_to_lib_path() + fi + } + +-# Always search $(tooldir)/lib, aka /usr/local/TARGET/lib when native +-# except when LIBPATH=":". +-if [ "${LIB_PATH}" != ":" ] ; then +- libs= +- if [ "x${TOOL_LIB}" = "x" ] ; then +- if [ "x${NATIVE}" = "xyes" ] ; then +- libs="${exec_prefix}/${target_alias}/lib" +- fi +- else +- # For multilib'ed targets, ensure both ${target_alias}/lib${LIBPATH_SUFFIX} +- # and ${TOOL_LIB}/lib${LIBPATH_SUFFIX} are in the default search path, +- # because 64bit libraries may be in both places, depending on +- # cross-development setup method (e.g.: /usr/s390x-linux/lib64 +- # vs. /usr/s390-linux/lib64) +- case "${NATIVE}:${LIBPATH_SUFFIX}:${TOOL_LIB}" in +- :* | *::* | *:*:*${LIBPATH_SUFFIX}) ;; +- *) libs="${exec_prefix}/${target_alias}/lib${LIBPATH_SUFFIX}" ;; +- esac +- libs="${exec_prefix}/${TOOL_LIB}/lib ${libs}" +- fi +- append_to_lib_path ${libs} +-fi +- + if [ "x${LIB_PATH}" = "x" ] && [ "x${USE_LIBPATH}" = xyes ] ; then + libs=${NATIVE_LIB_DIRS} + if [ "x${NATIVE}" = "xyes" ] ; then +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0002-configure-widen-the-regexp-for-SH-architectures.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0002-configure-widen-the-regexp-for-SH-architectures.patch deleted file mode 100644 index 8d7cf00d4..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0002-configure-widen-the-regexp-for-SH-architectures.patch +++ /dev/null @@ -1,56 +0,0 @@ -From e4cd40722f597dff39c7214fab1742dd5bcc67eb Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Mon, 2 Mar 2015 01:07:33 +0000 -Subject: [PATCH 02/13] configure: widen the regexp for SH architectures - -gprof needs to know about uclibc - -Upstream-Status: Pending - -Signed-off-by: Khem Raj ---- - configure | 4 ++-- - gprof/configure | 5 +++++ - 2 files changed, 7 insertions(+), 2 deletions(-) - -diff --git a/configure b/configure -index ad3dfb7..2a74a77 100755 ---- a/configure -+++ b/configure -@@ -3444,7 +3444,7 @@ case "${target}" in - ;; - s390-*-* | s390x-*-*) - ;; -- sh-*-* | sh[34]*-*-*) -+ sh*-*-* | sh[34]*-*-*) - ;; - sh64-*-* | sh5*-*-*) - ;; -@@ -3946,7 +3946,7 @@ case "${target}" in - or1k*-*-*) - noconfigdirs="$noconfigdirs gdb" - ;; -- sh-*-* | sh64-*-*) -+ sh*-*-* | sh64-*-*) - case "${target}" in - sh*-*-elf) - ;; -diff --git a/gprof/configure b/gprof/configure -index bf3ecaa..626e1c7 100755 ---- a/gprof/configure -+++ b/gprof/configure -@@ -5873,6 +5873,11 @@ linux* | k*bsd*-gnu | kopensolaris*-gnu) - lt_cv_deplibs_check_method=pass_all - ;; - -+linux-uclibc*) -+ lt_cv_deplibs_check_method=pass_all -+ lt_cv_file_magic_test_file=`echo /lib/libuClibc-*.so` -+ ;; -+ - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0003-Point-scripts-location-to-libdir.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0003-Point-scripts-location-to-libdir.patch deleted file mode 100644 index acc198b3e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0003-Point-scripts-location-to-libdir.patch +++ /dev/null @@ -1,42 +0,0 @@ -From 93c49f6197bd59b2abbf04b3296bf10029b80f48 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Mon, 2 Mar 2015 01:09:58 +0000 -Subject: [PATCH 03/13] Point scripts location to libdir - -Upstream-Status: Inappropriate [debian patch] - -Signed-off-by: Khem Raj ---- - ld/Makefile.am | 2 +- - ld/Makefile.in | 2 +- - 2 files changed, 2 insertions(+), 2 deletions(-) - -diff --git a/ld/Makefile.am b/ld/Makefile.am -index 0598923..b478ba7 100644 ---- a/ld/Makefile.am -+++ b/ld/Makefile.am -@@ -57,7 +57,7 @@ endif - # We put the scripts in the directory $(scriptdir)/ldscripts. - # We can't put the scripts in $(datadir) because the SEARCH_DIR - # directives need to be different for native and cross linkers. --scriptdir = $(tooldir)/lib -+scriptdir = $(libdir) - - EMUL = @EMUL@ - EMULATION_OFILES = @EMULATION_OFILES@ -diff --git a/ld/Makefile.in b/ld/Makefile.in -index 7c78198..5eb67e0 100644 ---- a/ld/Makefile.in -+++ b/ld/Makefile.in -@@ -451,7 +451,7 @@ AM_CFLAGS = $(WARN_CFLAGS) $(ELF_CLFAGS) - # We put the scripts in the directory $(scriptdir)/ldscripts. - # We can't put the scripts in $(datadir) because the SEARCH_DIR - # directives need to be different for native and cross linkers. --scriptdir = $(tooldir)/lib -+scriptdir = $(libdir) - BASEDIR = $(srcdir)/.. - BFDDIR = $(BASEDIR)/bfd - INCDIR = $(BASEDIR)/include --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0003-gprof-add-uclibc-support-to-configure.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0003-gprof-add-uclibc-support-to-configure.patch new file mode 100644 index 000000000..eddb42b4e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0003-gprof-add-uclibc-support-to-configure.patch @@ -0,0 +1,31 @@ +From 7893d2b24d0303bda3a0049846489619ffd1387b Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 2 Mar 2015 01:07:33 +0000 +Subject: [PATCH 03/15] gprof: add uclibc support to configure + +Upstream-Status: Pending + +Signed-off-by: Khem Raj +--- + gprof/configure | 5 +++++ + 1 file changed, 5 insertions(+) + +diff --git a/gprof/configure b/gprof/configure +index 9e6b8f3525..38a4c0b0e5 100755 +--- a/gprof/configure ++++ b/gprof/configure +@@ -5874,6 +5874,11 @@ linux* | k*bsd*-gnu | kopensolaris*-gnu) + lt_cv_deplibs_check_method=pass_all + ;; + ++linux-uclibc*) ++ lt_cv_deplibs_check_method=pass_all ++ lt_cv_file_magic_test_file=`echo /lib/libuClibc-*.so` ++ ;; ++ + netbsd*) + if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then + lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0004-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0004-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch deleted file mode 100644 index 28a17f386..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0004-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch +++ /dev/null @@ -1,41 +0,0 @@ -From 4ff83779de8621715c6c2a79cf7ed8d74ff2d7c5 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Mon, 2 Mar 2015 01:27:17 +0000 -Subject: [PATCH 04/13] Only generate an RPATH entry if LD_RUN_PATH is not - empty - -for cases where -rpath isn't specified. debian (#151024) - -Upstream-Status: Pending - -Signed-off-by: Chris Chimelis -Signed-off-by: Khem Raj ---- - ld/emultempl/elf32.em | 4 ++++ - 1 file changed, 4 insertions(+) - -diff --git a/ld/emultempl/elf32.em b/ld/emultempl/elf32.em -index 47fa549..f4c640e 100644 ---- a/ld/emultempl/elf32.em -+++ b/ld/emultempl/elf32.em -@@ -1244,6 +1244,8 @@ fragment <link.next) - if (bfd_get_flavour (abfd) == bfd_target_elf_flavour) --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0004-Point-scripts-location-to-libdir.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0004-Point-scripts-location-to-libdir.patch new file mode 100644 index 000000000..c6b9de799 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0004-Point-scripts-location-to-libdir.patch @@ -0,0 +1,42 @@ +From e34650c50574a8a39d694567ed607a63006b6f99 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 2 Mar 2015 01:09:58 +0000 +Subject: [PATCH 04/15] Point scripts location to libdir + +Upstream-Status: Inappropriate [debian patch] + +Signed-off-by: Khem Raj +--- + ld/Makefile.am | 2 +- + ld/Makefile.in | 2 +- + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/ld/Makefile.am b/ld/Makefile.am +index 15beaa7021..bbf9c671d8 100644 +--- a/ld/Makefile.am ++++ b/ld/Makefile.am +@@ -57,7 +57,7 @@ endif + # We put the scripts in the directory $(scriptdir)/ldscripts. + # We can't put the scripts in $(datadir) because the SEARCH_DIR + # directives need to be different for native and cross linkers. +-scriptdir = $(tooldir)/lib ++scriptdir = $(libdir) + + EMUL = @EMUL@ + EMULATION_OFILES = @EMULATION_OFILES@ +diff --git a/ld/Makefile.in b/ld/Makefile.in +index 042b690ed6..37e7b25e9a 100644 +--- a/ld/Makefile.in ++++ b/ld/Makefile.in +@@ -452,7 +452,7 @@ AM_CFLAGS = $(WARN_CFLAGS) $(ELF_CLFAGS) + # We put the scripts in the directory $(scriptdir)/ldscripts. + # We can't put the scripts in $(datadir) because the SEARCH_DIR + # directives need to be different for native and cross linkers. +-scriptdir = $(tooldir)/lib ++scriptdir = $(libdir) + BASEDIR = $(srcdir)/.. + BFDDIR = $(BASEDIR)/bfd + INCDIR = $(BASEDIR)/include +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0005-Explicitly-link-with-libm-on-uclibc.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0005-Explicitly-link-with-libm-on-uclibc.patch deleted file mode 100644 index 331f78221..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0005-Explicitly-link-with-libm-on-uclibc.patch +++ /dev/null @@ -1,52 +0,0 @@ -From 18a7ae169e8d55ace683357d1de6b07e4fdf797a Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Mon, 2 Mar 2015 01:32:49 +0000 -Subject: [PATCH 05/13] Explicitly link with libm on uclibc - -Description: - -We do not need to have the libtool patch anymore for binutils after -libtool has been updated upstream it include support for it. However -for building gas natively on uclibc systems we have to link it with --lm so that it picks up missing symbols. - -/local/build_area/BUILD/arm_v5t_le_uclibc/binutils-2.17.50/objdir/libiberty/pic/libiberty.a(floatformat.o): -In function `floatformat_from_double': -floatformat.c:(.text+0x1ec): undefined reference to `frexp' -floatformat.c:(.text+0x2f8): undefined reference to `ldexp' -/local/build_area/BUILD/arm_v5t_le_uclibc/binutils-2.17.50/objdir/libiberty/pic/libiberty.a(floatformat.o): -In function `floatformat_to_double': -floatformat.c:(.text+0x38a): undefined reference to `ldexp' -floatformat.c:(.text+0x3d2): undefined reference to `ldexp' -floatformat.c:(.text+0x43e): undefined reference to `ldexp' -floatformat.c:(.text+0x4e2): undefined reference to `ldexp' -collect2: ld returned 1 exit status -make[4]: *** [as-new] Error 1 - -Upstream-Status: Pending - -Signed-off-by: Khem Raj ---- - gas/configure.tgt | 6 ++++++ - 1 file changed, 6 insertions(+) - -diff --git a/gas/configure.tgt b/gas/configure.tgt -index e2df659..1b9fd99 100644 ---- a/gas/configure.tgt -+++ b/gas/configure.tgt -@@ -487,6 +487,12 @@ case ${generic_target} in - *-*-netware) fmt=elf em=netware ;; - esac - -+case ${generic_target} in -+ arm-*-*uclibc*) -+ need_libm=yes -+ ;; -+esac -+ - case ${cpu_type} in - aarch64 | alpha | arm | i386 | ia64 | microblaze | mips | ns32k | or1k | or1knd | pdp11 | ppc | sparc | z80 | z8k) - bfd_gas=yes --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0005-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0005-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch new file mode 100644 index 000000000..726f7020d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0005-Only-generate-an-RPATH-entry-if-LD_RUN_PATH-is-not-e.patch @@ -0,0 +1,41 @@ +From 42292f5533bca904f230a8e03ceee1f84ef0c4ec Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 2 Mar 2015 01:27:17 +0000 +Subject: [PATCH 05/15] Only generate an RPATH entry if LD_RUN_PATH is not + empty + +for cases where -rpath isn't specified. debian (#151024) + +Upstream-Status: Pending + +Signed-off-by: Chris Chimelis +Signed-off-by: Khem Raj +--- + ld/emultempl/elf32.em | 4 ++++ + 1 file changed, 4 insertions(+) + +diff --git a/ld/emultempl/elf32.em b/ld/emultempl/elf32.em +index 84adaef6df..ab8c74257e 100644 +--- a/ld/emultempl/elf32.em ++++ b/ld/emultempl/elf32.em +@@ -1411,6 +1411,8 @@ fragment <link.next) + if (bfd_get_flavour (abfd) == bfd_target_elf_flavour) +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0006-Explicitly-link-with-libm-on-uclibc.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0006-Explicitly-link-with-libm-on-uclibc.patch new file mode 100644 index 000000000..9770ca7f0 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0006-Explicitly-link-with-libm-on-uclibc.patch @@ -0,0 +1,52 @@ +From 6a46bf151d7e53df8b5e7645a2d241967688368a Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 2 Mar 2015 01:32:49 +0000 +Subject: [PATCH 06/15] Explicitly link with libm on uclibc + +Description: + +We do not need to have the libtool patch anymore for binutils after +libtool has been updated upstream it include support for it. However +for building gas natively on uclibc systems we have to link it with +-lm so that it picks up missing symbols. + +/local/build_area/BUILD/arm_v5t_le_uclibc/binutils-2.17.50/objdir/libiberty/pic/libiberty.a(floatformat.o): +In function `floatformat_from_double': +floatformat.c:(.text+0x1ec): undefined reference to `frexp' +floatformat.c:(.text+0x2f8): undefined reference to `ldexp' +/local/build_area/BUILD/arm_v5t_le_uclibc/binutils-2.17.50/objdir/libiberty/pic/libiberty.a(floatformat.o): +In function `floatformat_to_double': +floatformat.c:(.text+0x38a): undefined reference to `ldexp' +floatformat.c:(.text+0x3d2): undefined reference to `ldexp' +floatformat.c:(.text+0x43e): undefined reference to `ldexp' +floatformat.c:(.text+0x4e2): undefined reference to `ldexp' +collect2: ld returned 1 exit status +make[4]: *** [as-new] Error 1 + +Upstream-Status: Pending + +Signed-off-by: Khem Raj +--- + gas/configure.tgt | 6 ++++++ + 1 file changed, 6 insertions(+) + +diff --git a/gas/configure.tgt b/gas/configure.tgt +index 711d537e95..7cd2dc176a 100644 +--- a/gas/configure.tgt ++++ b/gas/configure.tgt +@@ -494,6 +494,12 @@ case ${generic_target} in + *-*-netware) fmt=elf em=netware ;; + esac + ++case ${generic_target} in ++ arm-*-*uclibc*) ++ need_libm=yes ++ ;; ++esac ++ + case ${cpu_type} in + aarch64 | alpha | arm | i386 | ia64 | microblaze | mips | ns32k | or1k | or1knd | pdp11 | ppc | riscv | sparc | z80 | z8k) + bfd_gas=yes +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0006-Use-libtool-2.4.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0006-Use-libtool-2.4.patch deleted file mode 100644 index f0fdf46ae..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0006-Use-libtool-2.4.patch +++ /dev/null @@ -1,21177 +0,0 @@ -From 71adcde3648db47f7fa155db12810591ffd3ae34 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Sun, 14 Feb 2016 17:04:07 +0000 -Subject: [PATCH 06/13] Use libtool 2.4 - -get libtool sysroot support - -Signed-off-by: Khem Raj ---- -Upstream-Status: Pending - - bfd/configure | 1318 +++++++++++++++++------ - bfd/configure.ac | 2 +- - binutils/configure | 1316 +++++++++++++++++------ - configure | 4 +- - gas/configure | 1314 +++++++++++++++++------ - gprof/configure | 1321 +++++++++++++++++------ - ld/configure | 1691 +++++++++++++++++++++-------- - libtool.m4 | 1080 +++++++++++++------ - ltmain.sh | 2925 +++++++++++++++++++++++++++++++++----------------- - ltoptions.m4 | 2 +- - ltversion.m4 | 12 +- - lt~obsolete.m4 | 2 +- - opcodes/configure | 1318 +++++++++++++++++------ - opcodes/configure.ac | 2 +- - zlib/configure | 1320 +++++++++++++++++------ - 15 files changed, 9930 insertions(+), 3697 deletions(-) - -diff --git a/bfd/configure b/bfd/configure -index a437569..575a257 100755 ---- a/bfd/configure -+++ b/bfd/configure -@@ -671,6 +671,9 @@ OTOOL - LIPO - NMEDIT - DSYMUTIL -+MANIFEST_TOOL -+ac_ct_AR -+DLLTOOL - OBJDUMP - LN_S - NM -@@ -784,6 +787,7 @@ enable_static - with_pic - enable_fast_install - with_gnu_ld -+with_libtool_sysroot - enable_libtool_lock - enable_plugins - enable_largefile -@@ -1460,6 +1464,8 @@ Optional Packages: - --with-pic try to use only PIC/non-PIC objects [default=use - both] - --with-gnu-ld assume the C compiler uses GNU ld [default=no] -+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR -+ (or the compiler's sysroot if not specified). - --with-mmap try using mmap for BFD input files if available - --with-separate-debug-dir=DIR - Look for global separate debug info in DIR -@@ -5392,8 +5398,8 @@ esac - - - --macro_version='2.2.7a' --macro_revision='1.3134' -+macro_version='2.4' -+macro_revision='1.3293' - - - -@@ -5433,7 +5439,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO - { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 - $as_echo_n "checking how to print strings... " >&6; } - # Test print first, because it will be a builtin if present. --if test "X`print -r -- -n 2>/dev/null`" = X-n && \ -+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ - test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then - ECHO='print -r --' - elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then -@@ -6119,8 +6125,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; - # Try some XSI features - xsi_shell=no - ( _lt_dummy="a/b/c" -- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ -- = c,a/b,, \ -+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ -+ = c,a/b,b/c, \ - && eval 'test $(( 1 + 1 )) -eq 2 \ - && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ - && xsi_shell=yes -@@ -6169,6 +6175,80 @@ esac - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 -+$as_echo_n "checking how to convert $build file names to $host format... " >&6; } -+if test "${lt_cv_to_host_file_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 -+ ;; -+ esac -+ ;; -+ *-*-cygwin* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin -+ ;; -+ esac -+ ;; -+ * ) # unhandled hosts (and "normal" native builds) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+esac -+ -+fi -+ -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 -+$as_echo "$lt_cv_to_host_file_cmd" >&6; } -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 -+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } -+if test "${lt_cv_to_tool_file_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ #assume ordinary cross tools, or native build. -+lt_cv_to_tool_file_cmd=func_convert_file_noop -+case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ esac -+ ;; -+esac -+ -+fi -+ -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 -+$as_echo "$lt_cv_to_tool_file_cmd" >&6; } -+ -+ -+ -+ -+ - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 - $as_echo_n "checking for $LD option to reload object files... " >&6; } - if test "${lt_cv_ld_reload_flag+set}" = set; then : -@@ -6185,6 +6265,11 @@ case $reload_flag in - esac - reload_cmds='$LD$reload_flag -o $output$reload_objs' - case $host_os in -+ cygwin* | mingw* | pw32* | cegcc*) -+ if test "$GCC" != yes; then -+ reload_cmds=false -+ fi -+ ;; - darwin*) - if test "$GCC" = yes; then - reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' -@@ -6353,7 +6438,8 @@ mingw* | pw32*) - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else -- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' -+ # Keep this pattern in sync with the one in func_win32_libid. -+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' - lt_cv_file_magic_cmd='$OBJDUMP -f' - fi - ;; -@@ -6507,6 +6593,21 @@ esac - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 - $as_echo "$lt_cv_deplibs_check_method" >&6; } -+ -+file_magic_glob= -+want_nocaseglob=no -+if test "$build" = "$host"; then -+ case $host_os in -+ mingw* | pw32*) -+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then -+ want_nocaseglob=yes -+ else -+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` -+ fi -+ ;; -+ esac -+fi -+ - file_magic_cmd=$lt_cv_file_magic_cmd - deplibs_check_method=$lt_cv_deplibs_check_method - test -z "$deplibs_check_method" && deplibs_check_method=unknown -@@ -6522,9 +6623,162 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown - - - -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ - if test -n "$ac_tool_prefix"; then -- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. --set dummy ${ac_tool_prefix}ar; ac_word=$2 -+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. -+set dummy ${ac_tool_prefix}dlltool; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_DLLTOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$DLLTOOL"; then -+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+DLLTOOL=$ac_cv_prog_DLLTOOL -+if test -n "$DLLTOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 -+$as_echo "$DLLTOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+fi -+if test -z "$ac_cv_prog_DLLTOOL"; then -+ ac_ct_DLLTOOL=$DLLTOOL -+ # Extract the first word of "dlltool", so it can be a program name with args. -+set dummy dlltool; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$ac_ct_DLLTOOL"; then -+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_ac_ct_DLLTOOL="dlltool" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL -+if test -n "$ac_ct_DLLTOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 -+$as_echo "$ac_ct_DLLTOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ if test "x$ac_ct_DLLTOOL" = x; then -+ DLLTOOL="false" -+ else -+ case $cross_compiling:$ac_tool_warned in -+yes:) -+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -+ac_tool_warned=yes ;; -+esac -+ DLLTOOL=$ac_ct_DLLTOOL -+ fi -+else -+ DLLTOOL="$ac_cv_prog_DLLTOOL" -+fi -+ -+test -z "$DLLTOOL" && DLLTOOL=dlltool -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 -+$as_echo_n "checking how to associate runtime and link libraries... " >&6; } -+if test "${lt_cv_sharedlib_from_linklib_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_sharedlib_from_linklib_cmd='unknown' -+ -+case $host_os in -+cygwin* | mingw* | pw32* | cegcc*) -+ # two different shell functions defined in ltmain.sh -+ # decide which to use based on capabilities of $DLLTOOL -+ case `$DLLTOOL --help 2>&1` in -+ *--identify-strict*) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib -+ ;; -+ *) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback -+ ;; -+ esac -+ ;; -+*) -+ # fallback: assume linklib IS sharedlib -+ lt_cv_sharedlib_from_linklib_cmd="$ECHO" -+ ;; -+esac -+ -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 -+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } -+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd -+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO -+ -+ -+ -+ -+ -+ -+ -+if test -n "$ac_tool_prefix"; then -+ for ac_prog in ar -+ do -+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -+set dummy $ac_tool_prefix$ac_prog; ac_word=$2 - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 - $as_echo_n "checking for $ac_word... " >&6; } - if test "${ac_cv_prog_AR+set}" = set; then : -@@ -6540,7 +6794,7 @@ do - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -- ac_cv_prog_AR="${ac_tool_prefix}ar" -+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -@@ -6560,11 +6814,15 @@ $as_echo "no" >&6; } - fi - - -+ test -n "$AR" && break -+ done - fi --if test -z "$ac_cv_prog_AR"; then -+if test -z "$AR"; then - ac_ct_AR=$AR -- # Extract the first word of "ar", so it can be a program name with args. --set dummy ar; ac_word=$2 -+ for ac_prog in ar -+do -+ # Extract the first word of "$ac_prog", so it can be a program name with args. -+set dummy $ac_prog; ac_word=$2 - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 - $as_echo_n "checking for $ac_word... " >&6; } - if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : -@@ -6580,7 +6838,7 @@ do - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -- ac_cv_prog_ac_ct_AR="ar" -+ ac_cv_prog_ac_ct_AR="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -@@ -6599,6 +6857,10 @@ else - $as_echo "no" >&6; } - fi - -+ -+ test -n "$ac_ct_AR" && break -+done -+ - if test "x$ac_ct_AR" = x; then - AR="false" - else -@@ -6610,16 +6872,72 @@ ac_tool_warned=yes ;; - esac - AR=$ac_ct_AR - fi --else -- AR="$ac_cv_prog_AR" - fi - --test -z "$AR" && AR=ar --test -z "$AR_FLAGS" && AR_FLAGS=cru -+: ${AR=ar} -+: ${AR_FLAGS=cru} -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 -+$as_echo_n "checking for archiver @FILE support... " >&6; } -+if test "${lt_cv_ar_at_file+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_ar_at_file=no -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+/* end confdefs.h. */ -+ -+int -+main () -+{ - -+ ; -+ return 0; -+} -+_ACEOF -+if ac_fn_c_try_compile "$LINENO"; then : -+ echo conftest.$ac_objext > conftest.lst -+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' -+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -+ (eval $lt_ar_try) 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } -+ if test "$ac_status" -eq 0; then -+ # Ensure the archiver fails upon bogus file names. -+ rm -f conftest.$ac_objext libconftest.a -+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -+ (eval $lt_ar_try) 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } -+ if test "$ac_status" -ne 0; then -+ lt_cv_ar_at_file=@ -+ fi -+ fi -+ rm -f conftest.* libconftest.a - -+fi -+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 -+$as_echo "$lt_cv_ar_at_file" >&6; } - -+if test "x$lt_cv_ar_at_file" = xno; then -+ archiver_list_spec= -+else -+ archiver_list_spec=$lt_cv_ar_at_file -+fi - - - -@@ -6961,8 +7279,8 @@ esac - lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - - # Transform an extracted symbol line into symbol name and symbol address --lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" --lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" - - # Handle CRLF in mingw tool chain - opt_cr= -@@ -6998,6 +7316,7 @@ for ac_symprfx in "" "_"; do - else - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" - fi -+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" - - # Check to see that the pipe works correctly. - pipe_works=no -@@ -7039,6 +7358,18 @@ _LT_EOF - if $GREP ' nm_test_var$' "$nlist" >/dev/null; then - if $GREP ' nm_test_func$' "$nlist" >/dev/null; then - cat <<_LT_EOF > conftest.$ac_ext -+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime -+ relocations are performed -- see ld's documentation on pseudo-relocs. */ -+# define LT_DLSYM_CONST -+#elif defined(__osf__) -+/* This system does not cope well with relocations in const data. */ -+# define LT_DLSYM_CONST -+#else -+# define LT_DLSYM_CONST const -+#endif -+ - #ifdef __cplusplus - extern "C" { - #endif -@@ -7050,7 +7381,7 @@ _LT_EOF - cat <<_LT_EOF >> conftest.$ac_ext - - /* The mapping between symbol names and symbols. */ --const struct { -+LT_DLSYM_CONST struct { - const char *name; - void *address; - } -@@ -7076,8 +7407,8 @@ static const void *lt_preloaded_setup() { - _LT_EOF - # Now try linking the two files. - mv conftest.$ac_objext conftstm.$ac_objext -- lt_save_LIBS="$LIBS" -- lt_save_CFLAGS="$CFLAGS" -+ lt_globsym_save_LIBS=$LIBS -+ lt_globsym_save_CFLAGS=$CFLAGS - LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 -@@ -7087,8 +7418,8 @@ _LT_EOF - test $ac_status = 0; } && test -s conftest${ac_exeext}; then - pipe_works=yes - fi -- LIBS="$lt_save_LIBS" -- CFLAGS="$lt_save_CFLAGS" -+ LIBS=$lt_globsym_save_LIBS -+ CFLAGS=$lt_globsym_save_CFLAGS - else - echo "cannot find nm_test_func in $nlist" >&5 - fi -@@ -7125,6 +7456,16 @@ else - $as_echo "ok" >&6; } - fi - -+# Response file support. -+if test "$lt_cv_nm_interface" = "MS dumpbin"; then -+ nm_file_list_spec='@' -+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then -+ nm_file_list_spec='@' -+fi -+ -+ -+ -+ - - - -@@ -7146,6 +7487,45 @@ fi - - - -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 -+$as_echo_n "checking for sysroot... " >&6; } -+ -+# Check whether --with-libtool-sysroot was given. -+if test "${with_libtool_sysroot+set}" = set; then : -+ withval=$with_libtool_sysroot; -+else -+ with_libtool_sysroot=no -+fi -+ -+ -+lt_sysroot= -+case ${with_libtool_sysroot} in #( -+ yes) -+ if test "$GCC" = yes; then -+ lt_sysroot=`$CC --print-sysroot 2>/dev/null` -+ fi -+ ;; #( -+ /*) -+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` -+ ;; #( -+ no|'') -+ ;; #( -+ *) -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5 -+$as_echo "${with_libtool_sysroot}" >&6; } -+ as_fn_error "The sysroot must be an absolute path." "$LINENO" 5 -+ ;; -+esac -+ -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 -+$as_echo "${lt_sysroot:-no}" >&6; } -+ -+ -+ -+ -+ - # Check whether --enable-libtool-lock was given. - if test "${enable_libtool_lock+set}" = set; then : - enableval=$enable_libtool_lock; -@@ -7352,6 +7732,123 @@ esac - - need_locks="$enable_libtool_lock" - -+if test -n "$ac_tool_prefix"; then -+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. -+set dummy ${ac_tool_prefix}mt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_MANIFEST_TOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$MANIFEST_TOOL"; then -+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL -+if test -n "$MANIFEST_TOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 -+$as_echo "$MANIFEST_TOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+fi -+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then -+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL -+ # Extract the first word of "mt", so it can be a program name with args. -+set dummy mt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_ac_ct_MANIFEST_TOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$ac_ct_MANIFEST_TOOL"; then -+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL -+if test -n "$ac_ct_MANIFEST_TOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 -+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ if test "x$ac_ct_MANIFEST_TOOL" = x; then -+ MANIFEST_TOOL=":" -+ else -+ case $cross_compiling:$ac_tool_warned in -+yes:) -+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -+ac_tool_warned=yes ;; -+esac -+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL -+ fi -+else -+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" -+fi -+ -+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 -+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } -+if test "${lt_cv_path_mainfest_tool+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_path_mainfest_tool=no -+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 -+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out -+ cat conftest.err >&5 -+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then -+ lt_cv_path_mainfest_tool=yes -+ fi -+ rm -f conftest* -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 -+$as_echo "$lt_cv_path_mainfest_tool" >&6; } -+if test "x$lt_cv_path_mainfest_tool" != xyes; then -+ MANIFEST_TOOL=: -+fi -+ -+ -+ -+ -+ - - case $host_os in - rhapsody* | darwin*) -@@ -7915,6 +8412,8 @@ _LT_EOF - $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 - echo "$AR cru libconftest.a conftest.o" >&5 - $AR cru libconftest.a conftest.o 2>&5 -+ echo "$RANLIB libconftest.a" >&5 -+ $RANLIB libconftest.a 2>&5 - cat > conftest.c << _LT_EOF - int main() { return 0;} - _LT_EOF -@@ -8079,7 +8578,8 @@ fi - LIBTOOL_DEPS="$ltmain" - - # Always use our own libtool. --LIBTOOL='$(SHELL) $(top_builddir)/libtool' -+LIBTOOL='$(SHELL) $(top_builddir)' -+LIBTOOL="$LIBTOOL/${host_alias}-libtool" - - - -@@ -8168,7 +8668,7 @@ aix3*) - esac - - # Global variables: --ofile=libtool -+ofile=${host_alias}-libtool - can_build_shared=yes - - # All known linkers require a `.a' archive for static linking (except MSVC, -@@ -8466,8 +8966,6 @@ fi - lt_prog_compiler_pic= - lt_prog_compiler_static= - --{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 --$as_echo_n "checking for $compiler option to produce PIC... " >&6; } - - if test "$GCC" = yes; then - lt_prog_compiler_wl='-Wl,' -@@ -8633,6 +9131,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - lt_prog_compiler_pic='--shared' - lt_prog_compiler_static='--static' - ;; -+ nagfor*) -+ # NAG Fortran compiler -+ lt_prog_compiler_wl='-Wl,-Wl,,' -+ lt_prog_compiler_pic='-PIC' -+ lt_prog_compiler_static='-Bstatic' -+ ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) -@@ -8695,7 +9199,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - case $cc_basename in -- f77* | f90* | f95*) -+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) - lt_prog_compiler_wl='-Qoption ld ';; - *) - lt_prog_compiler_wl='-Wl,';; -@@ -8752,13 +9256,17 @@ case $host_os in - lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" - ;; - esac --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 --$as_echo "$lt_prog_compiler_pic" >&6; } -- -- -- -- - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 -+$as_echo_n "checking for $compiler option to produce PIC... " >&6; } -+if test "${lt_cv_prog_compiler_pic+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 -+$as_echo "$lt_cv_prog_compiler_pic" >&6; } -+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic - - # - # Check to make sure the PIC flag actually works. -@@ -8819,6 +9327,11 @@ fi - - - -+ -+ -+ -+ -+ - # - # Check to make sure the static flag actually works. - # -@@ -9169,7 +9682,8 @@ _LT_EOF - allow_undefined_flag=unsupported - always_export_symbols=no - enable_shared_with_static_runtimes=yes -- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' -+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' -+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -@@ -9268,12 +9782,12 @@ _LT_EOF - whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' - hardcode_libdir_flag_spec= - hardcode_libdir_flag_spec_ld='-rpath $libdir' -- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' -+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ -- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' -+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac -@@ -9287,8 +9801,8 @@ _LT_EOF - archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - -@@ -9306,8 +9820,8 @@ _LT_EOF - - _LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -9353,8 +9867,8 @@ _LT_EOF - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -9484,7 +9998,13 @@ _LT_EOF - allow_undefined_flag='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath_+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - - int -@@ -9497,22 +10017,29 @@ main () - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath_ -+fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" -@@ -9524,7 +10051,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - else - # Determine the default libpath from the value encoded in an - # empty executable. -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath_+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - - int -@@ -9537,22 +10070,29 @@ main () - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath_ -+fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, -@@ -9597,20 +10137,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - # Microsoft Visual C++. - # hardcode_libdir_flag_spec is actually meaningless, as there is - # no search path for DLLs. -- hardcode_libdir_flag_spec=' ' -- allow_undefined_flag=unsupported -- # Tell ltmain to make .lib files, not .a files. -- libext=lib -- # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=".dll" -- # FIXME: Setting linknames here is a bad hack. -- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -- # The linker will automatically build a .lib file if we build a DLL. -- old_archive_from_new_cmds='true' -- # FIXME: Should let the user specify the lib program. -- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' -- fix_srcfile_path='`cygpath -w "$srcfile"`' -- enable_shared_with_static_runtimes=yes -+ case $cc_basename in -+ cl*) -+ # Native MSVC -+ hardcode_libdir_flag_spec=' ' -+ allow_undefined_flag=unsupported -+ always_export_symbols=yes -+ file_list_spec='@' -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' -+ # The linker will not automatically build a static lib if we build a DLL. -+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true' -+ enable_shared_with_static_runtimes=yes -+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' -+ # Don't use ranlib -+ old_postinstall_cmds='chmod 644 $oldlib' -+ postlink_cmds='lt_outputfile="@OUTPUT@"~ -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' -+ ;; -+ *) -+ # Assume MSVC wrapper -+ hardcode_libdir_flag_spec=' ' -+ allow_undefined_flag=unsupported -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -+ # The linker will automatically build a .lib file if we build a DLL. -+ old_archive_from_new_cmds='true' -+ # FIXME: Should let the user specify the lib program. -+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' -+ enable_shared_with_static_runtimes=yes -+ ;; -+ esac - ;; - - darwin* | rhapsody*) -@@ -9671,7 +10254,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - # FreeBSD 3 and greater uses gcc -shared to do shared libraries. - freebsd* | dragonfly*) -- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec='-R$libdir' - hardcode_direct=yes - hardcode_shlibpath_var=no -@@ -9679,7 +10262,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - hpux9*) - if test "$GCC" = yes; then -- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' -+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi -@@ -9695,7 +10278,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - hpux10*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -9719,10 +10302,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else -@@ -9801,23 +10384,36 @@ fi - - irix5* | irix6* | nonstopux*) - if test "$GCC" = yes; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. -- save_LDFLAGS="$LDFLAGS" -- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ # This should be the same for all languages, so no per-tag cache variable. -+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 -+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } -+if test "${lt_cv_irix_exported_symbol+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ save_LDFLAGS="$LDFLAGS" -+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ --int foo(void) {} -+int foo (void) { return 0; } - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -- -+ lt_cv_irix_exported_symbol=yes -+else -+ lt_cv_irix_exported_symbol=no - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -- LDFLAGS="$save_LDFLAGS" -+ LDFLAGS="$save_LDFLAGS" -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 -+$as_echo "$lt_cv_irix_exported_symbol" >&6; } -+ if test "$lt_cv_irix_exported_symbol" = yes; then -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -+ fi - else - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' -@@ -9902,7 +10498,7 @@ rm -f core conftest.err conftest.$ac_objext \ - osf4* | osf5*) # as osf3* with the addition of -msym flag - if test "$GCC" = yes; then - allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' -- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag=' -expect_unresolved \*' -@@ -9921,9 +10517,9 @@ rm -f core conftest.err conftest.$ac_objext \ - no_undefined_flag=' -z defs' - if test "$GCC" = yes; then - wlarc='${wl}' -- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) -@@ -10499,8 +11095,9 @@ cygwin* | mingw* | pw32* | cegcc*) - need_version=no - need_lib_prefix=no - -- case $GCC,$host_os in -- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) -+ case $GCC,$cc_basename in -+ yes,*) -+ # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ -@@ -10533,13 +11130,71 @@ cygwin* | mingw* | pw32* | cegcc*) - library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac -+ dynamic_linker='Win32 ld.exe' -+ ;; -+ -+ *,cl*) -+ # Native MSVC -+ libname_spec='$name' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' -+ -+ case $build_os in -+ mingw*) -+ sys_lib_search_path_spec= -+ lt_save_ifs=$IFS -+ IFS=';' -+ for lt_path in $LIB -+ do -+ IFS=$lt_save_ifs -+ # Let DOS variable expansion print the short 8.3 style file name. -+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` -+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" -+ done -+ IFS=$lt_save_ifs -+ # Convert to MSYS style. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` -+ ;; -+ cygwin*) -+ # Convert to unix form, then to dos form, then back to unix form -+ # but this time dos style (no spaces!) so that the unix form looks -+ # like /cygdrive/c/PROGRA~1:/cygdr... -+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` -+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` -+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ ;; -+ *) -+ sys_lib_search_path_spec="$LIB" -+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then -+ # It is most probably a Windows format PATH. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -+ else -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ fi -+ # FIXME: find the short name or the path components, as spaces are -+ # common. (e.g. "Program Files" -> "PROGRA~1") -+ ;; -+ esac -+ -+ # DLL is installed to $(libdir)/../bin by postinstall_cmds -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ -+ dldir=$destdir/`dirname \$dlpath`~ -+ test -d \$dldir || mkdir -p \$dldir~ -+ $install_prog $dir/$dlname \$dldir/$dlname' -+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ -+ dlpath=$dir/\$dldll~ -+ $RM \$dlpath' -+ shlibpath_overrides_runpath=yes -+ dynamic_linker='Win32 link.exe' - ;; - - *) -+ # Assume MSVC wrapper - library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' -+ dynamic_linker='Win32 ld.exe' - ;; - esac -- dynamic_linker='Win32 ld.exe' - # FIXME: first we should search . and the directory the executable is in - shlibpath_var=PATH - ;; -@@ -10631,7 +11286,7 @@ haiku*) - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH - shlibpath_overrides_runpath=yes -- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' -+ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; - -@@ -11471,10 +12126,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -11577,10 +12232,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -14078,7 +14733,7 @@ SHARED_LDFLAGS= - if test "$enable_shared" = "yes"; then - x=`sed -n -e 's/^[ ]*PICFLAG[ ]*=[ ]*//p' < ../libiberty/Makefile | sed -n '$p'` - if test -n "$x"; then -- SHARED_LIBADD="-L`pwd`/../libiberty/pic -liberty" -+ SHARED_LIBADD="`pwd`/../libiberty/pic/libiberty.a" - fi - - # More hacks to build DLLs on Windows. -@@ -16789,13 +17444,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' - lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' - lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' - lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' -+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' -+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' - reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' - reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' - OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' - deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' - file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' -+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' -+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' -+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' -+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' - AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' - AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' -+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' - STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' - RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' - old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' -@@ -16810,14 +17472,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de - lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' -+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' -+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' - objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' - MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' --lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' -+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' - lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' - need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' -+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' - DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' - NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' - LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' -@@ -16850,12 +17515,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q - hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' - inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' - link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' --fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' - always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' - export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' - exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' - include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' - prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' -+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' - file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' - variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' - need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' -@@ -16910,8 +17575,13 @@ reload_flag \ - OBJDUMP \ - deplibs_check_method \ - file_magic_cmd \ -+file_magic_glob \ -+want_nocaseglob \ -+DLLTOOL \ -+sharedlib_from_linklib_cmd \ - AR \ - AR_FLAGS \ -+archiver_list_spec \ - STRIP \ - RANLIB \ - CC \ -@@ -16921,12 +17591,14 @@ lt_cv_sys_global_symbol_pipe \ - lt_cv_sys_global_symbol_to_cdecl \ - lt_cv_sys_global_symbol_to_c_name_address \ - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ -+nm_file_list_spec \ - lt_prog_compiler_no_builtin_flag \ --lt_prog_compiler_wl \ - lt_prog_compiler_pic \ -+lt_prog_compiler_wl \ - lt_prog_compiler_static \ - lt_cv_prog_compiler_c_o \ - need_locks \ -+MANIFEST_TOOL \ - DSYMUTIL \ - NMEDIT \ - LIPO \ -@@ -16942,7 +17614,6 @@ no_undefined_flag \ - hardcode_libdir_flag_spec \ - hardcode_libdir_flag_spec_ld \ - hardcode_libdir_separator \ --fix_srcfile_path \ - exclude_expsyms \ - include_expsyms \ - file_list_spec \ -@@ -16978,6 +17649,7 @@ module_cmds \ - module_expsym_cmds \ - export_symbols_cmds \ - prelink_cmds \ -+postlink_cmds \ - postinstall_cmds \ - postuninstall_cmds \ - finish_cmds \ -@@ -17757,7 +18429,8 @@ $as_echo X"$file" | - # NOTE: Changes made to this file will be lost: look at ltmain.sh. - # - # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, --# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. -+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, -+# Inc. - # Written by Gordon Matzigkeit, 1996 - # - # This file is part of GNU Libtool. -@@ -17860,19 +18533,42 @@ SP2NL=$lt_lt_SP2NL - # turn newlines into spaces. - NL2SP=$lt_lt_NL2SP - -+# convert \$build file names to \$host format. -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+ -+# convert \$build files to toolchain format. -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+ - # An object symbol dumper. - OBJDUMP=$lt_OBJDUMP - - # Method to check whether dependent libraries are shared objects. - deplibs_check_method=$lt_deplibs_check_method - --# Command to use when deplibs_check_method == "file_magic". -+# Command to use when deplibs_check_method = "file_magic". - file_magic_cmd=$lt_file_magic_cmd - -+# How to find potential files when deplibs_check_method = "file_magic". -+file_magic_glob=$lt_file_magic_glob -+ -+# Find potential files using nocaseglob when deplibs_check_method = "file_magic". -+want_nocaseglob=$lt_want_nocaseglob -+ -+# DLL creation program. -+DLLTOOL=$lt_DLLTOOL -+ -+# Command to associate shared and link libraries. -+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd -+ - # The archiver. - AR=$lt_AR -+ -+# Flags to create an archive. - AR_FLAGS=$lt_AR_FLAGS - -+# How to feed a file listing to the archiver. -+archiver_list_spec=$lt_archiver_list_spec -+ - # A symbol stripping program. - STRIP=$lt_STRIP - -@@ -17902,6 +18598,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address - # Transform the output of nm in a C name address pair when lib prefix is needed. - global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix - -+# Specify filename containing input files for \$NM. -+nm_file_list_spec=$lt_nm_file_list_spec -+ -+# The root where to search for dependent libraries,and in which our libraries should be installed. -+lt_sysroot=$lt_sysroot -+ - # The name of the directory that contains temporary libtool files. - objdir=$objdir - -@@ -17911,6 +18613,9 @@ MAGIC_CMD=$MAGIC_CMD - # Must we lock files when doing compilation? - need_locks=$lt_need_locks - -+# Manifest tool. -+MANIFEST_TOOL=$lt_MANIFEST_TOOL -+ - # Tool to manipulate archived DWARF debug symbol files on Mac OS X. - DSYMUTIL=$lt_DSYMUTIL - -@@ -18025,12 +18730,12 @@ with_gcc=$GCC - # Compiler flag to turn off builtin functions. - no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag - --# How to pass a linker flag through the compiler. --wl=$lt_lt_prog_compiler_wl -- - # Additional compiler flags for building library objects. - pic_flag=$lt_lt_prog_compiler_pic - -+# How to pass a linker flag through the compiler. -+wl=$lt_lt_prog_compiler_wl -+ - # Compiler flag to prevent dynamic linking. - link_static_flag=$lt_lt_prog_compiler_static - -@@ -18117,9 +18822,6 @@ inherit_rpath=$inherit_rpath - # Whether libtool must link a program against all its dependency libraries. - link_all_deplibs=$link_all_deplibs - --# Fix the shell variable \$srcfile for the compiler. --fix_srcfile_path=$lt_fix_srcfile_path -- - # Set to "yes" if exported symbols are required. - always_export_symbols=$always_export_symbols - -@@ -18135,6 +18837,9 @@ include_expsyms=$lt_include_expsyms - # Commands necessary for linking programs (against libraries) with templates. - prelink_cmds=$lt_prelink_cmds - -+# Commands necessary for finishing linking programs. -+postlink_cmds=$lt_postlink_cmds -+ - # Specify filename containing input files. - file_list_spec=$lt_file_list_spec - -@@ -18167,210 +18872,169 @@ ltmain="$ac_aux_dir/ltmain.sh" - # if finds mixed CR/LF and LF-only lines. Since sed operates in - # text mode, it properly converts lines to CR/LF. This bash problem - # is reportedly fixed, but why not run on old versions too? -- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- case $xsi_shell in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac --} -- --# func_basename file --func_basename () --{ -- func_basename_result="${1##*/}" --} -- --# func_dirname_and_basename file append nondir_replacement --# perform func_basename and func_dirname in a single function --# call: --# dirname: Compute the dirname of FILE. If nonempty, --# add APPEND to the result, otherwise set result --# to NONDIR_REPLACEMENT. --# value returned in "$func_dirname_result" --# basename: Compute filename of FILE. --# value retuned in "$func_basename_result" --# Implementation must be kept synchronized with func_dirname --# and func_basename. For efficiency, we do not delegate to --# those functions but instead duplicate the functionality here. --func_dirname_and_basename () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac -- func_basename_result="${1##*/}" --} -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --func_stripname () --{ -- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are -- # positional parameters, so assign one to ordinary parameter first. -- func_stripname_result=${3} -- func_stripname_result=${func_stripname_result#"${1}"} -- func_stripname_result=${func_stripname_result%"${2}"} --} -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=${1%%=*} -- func_opt_split_arg=${1#*=} --} -- --# func_lo2o object --func_lo2o () --{ -- case ${1} in -- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; -- *) func_lo2o_result=${1} ;; -- esac --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=${1%.*}.lo --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=$(( $* )) --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=${#1} --} -- --_LT_EOF -- ;; -- *) # Bourne compatible functions. -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- # Extract subdirectory from the argument. -- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` -- if test "X$func_dirname_result" = "X${1}"; then -- func_dirname_result="${3}" -- else -- func_dirname_result="$func_dirname_result${2}" -- fi --} -- --# func_basename file --func_basename () --{ -- func_basename_result=`$ECHO "${1}" | $SED "$basename"` --} -- -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --# func_strip_suffix prefix name --func_stripname () --{ -- case ${2} in -- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; -- esac --} -- --# sed scripts: --my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' --my_sed_long_arg='1s/^-[^=]*=//' -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` -- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` --} -- --# func_lo2o object --func_lo2o () --{ -- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=`expr "$@"` --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` --} -- --_LT_EOF --esac -- --case $lt_shell_append in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$1+=\$2" --} --_LT_EOF -- ;; -- *) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$1=\$$1\$2" --} -- --_LT_EOF -- ;; -- esac -- -- -- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- mv -f "$cfgfile" "$ofile" || -+ sed '$q' "$ltmain" >> "$cfgfile" \ -+ || (rm -f "$cfgfile"; exit 1) -+ -+ if test x"$xsi_shell" = xyes; then -+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ -+func_dirname ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_basename ()$/,/^} # func_basename /c\ -+func_basename ()\ -+{\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ -+func_dirname_and_basename ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ -+func_stripname ()\ -+{\ -+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ -+\ # positional parameters, so assign one to ordinary parameter first.\ -+\ func_stripname_result=${3}\ -+\ func_stripname_result=${func_stripname_result#"${1}"}\ -+\ func_stripname_result=${func_stripname_result%"${2}"}\ -+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ -+func_split_long_opt ()\ -+{\ -+\ func_split_long_opt_name=${1%%=*}\ -+\ func_split_long_opt_arg=${1#*=}\ -+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ -+func_split_short_opt ()\ -+{\ -+\ func_split_short_opt_arg=${1#??}\ -+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ -+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ -+func_lo2o ()\ -+{\ -+\ case ${1} in\ -+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ -+\ *) func_lo2o_result=${1} ;;\ -+\ esac\ -+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_xform ()$/,/^} # func_xform /c\ -+func_xform ()\ -+{\ -+ func_xform_result=${1%.*}.lo\ -+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_arith ()$/,/^} # func_arith /c\ -+func_arith ()\ -+{\ -+ func_arith_result=$(( $* ))\ -+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_len ()$/,/^} # func_len /c\ -+func_len ()\ -+{\ -+ func_len_result=${#1}\ -+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+fi -+ -+if test x"$lt_shell_append" = xyes; then -+ sed -e '/^func_append ()$/,/^} # func_append /c\ -+func_append ()\ -+{\ -+ eval "${1}+=\\${2}"\ -+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ -+func_append_quoted ()\ -+{\ -+\ func_quote_for_eval "${2}"\ -+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ -+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ # Save a `func_append' function call where possible by direct use of '+=' -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+else -+ # Save a `func_append' function call even when '+=' is not available -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+fi -+ -+if test x"$_lt_function_replace_fail" = x":"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 -+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} -+fi -+ -+ -+ mv -f "$cfgfile" "$ofile" || - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" - -diff --git a/bfd/configure.ac b/bfd/configure.ac -index 669cff7..5f57877 100644 ---- a/bfd/configure.ac -+++ b/bfd/configure.ac -@@ -253,7 +253,7 @@ changequote(,)dnl - x=`sed -n -e 's/^[ ]*PICFLAG[ ]*=[ ]*//p' < ../libiberty/Makefile | sed -n '$p'` - changequote([,])dnl - if test -n "$x"; then -- SHARED_LIBADD="-L`pwd`/../libiberty/pic -liberty" -+ SHARED_LIBADD="`pwd`/../libiberty/pic/libiberty.a" - fi - - # More hacks to build DLLs on Windows. -diff --git a/binutils/configure b/binutils/configure -index 6cbf17c..6d20600 100755 ---- a/binutils/configure -+++ b/binutils/configure -@@ -658,8 +658,11 @@ OTOOL - LIPO - NMEDIT - DSYMUTIL -+MANIFEST_TOOL - RANLIB -+ac_ct_AR - AR -+DLLTOOL - OBJDUMP - LN_S - NM -@@ -771,6 +774,7 @@ enable_static - with_pic - enable_fast_install - with_gnu_ld -+with_libtool_sysroot - enable_libtool_lock - enable_plugins - enable_largefile -@@ -1443,6 +1447,8 @@ Optional Packages: - --with-pic try to use only PIC/non-PIC objects [default=use - both] - --with-gnu-ld assume the C compiler uses GNU ld [default=no] -+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR -+ (or the compiler's sysroot if not specified). - --with-system-zlib use installed libz - --with-gnu-ld assume the C compiler uses GNU ld default=no - --with-libiconv-prefix[=DIR] search for libiconv in DIR/include and DIR/lib -@@ -5159,8 +5165,8 @@ esac - - - --macro_version='2.2.7a' --macro_revision='1.3134' -+macro_version='2.4' -+macro_revision='1.3293' - - - -@@ -5200,7 +5206,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO - { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 - $as_echo_n "checking how to print strings... " >&6; } - # Test print first, because it will be a builtin if present. --if test "X`print -r -- -n 2>/dev/null`" = X-n && \ -+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ - test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then - ECHO='print -r --' - elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then -@@ -5886,8 +5892,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; - # Try some XSI features - xsi_shell=no - ( _lt_dummy="a/b/c" -- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ -- = c,a/b,, \ -+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ -+ = c,a/b,b/c, \ - && eval 'test $(( 1 + 1 )) -eq 2 \ - && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ - && xsi_shell=yes -@@ -5936,6 +5942,80 @@ esac - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 -+$as_echo_n "checking how to convert $build file names to $host format... " >&6; } -+if test "${lt_cv_to_host_file_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 -+ ;; -+ esac -+ ;; -+ *-*-cygwin* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin -+ ;; -+ esac -+ ;; -+ * ) # unhandled hosts (and "normal" native builds) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+esac -+ -+fi -+ -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 -+$as_echo "$lt_cv_to_host_file_cmd" >&6; } -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 -+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } -+if test "${lt_cv_to_tool_file_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ #assume ordinary cross tools, or native build. -+lt_cv_to_tool_file_cmd=func_convert_file_noop -+case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ esac -+ ;; -+esac -+ -+fi -+ -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 -+$as_echo "$lt_cv_to_tool_file_cmd" >&6; } -+ -+ -+ -+ -+ - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 - $as_echo_n "checking for $LD option to reload object files... " >&6; } - if test "${lt_cv_ld_reload_flag+set}" = set; then : -@@ -5952,6 +6032,11 @@ case $reload_flag in - esac - reload_cmds='$LD$reload_flag -o $output$reload_objs' - case $host_os in -+ cygwin* | mingw* | pw32* | cegcc*) -+ if test "$GCC" != yes; then -+ reload_cmds=false -+ fi -+ ;; - darwin*) - if test "$GCC" = yes; then - reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' -@@ -6120,7 +6205,8 @@ mingw* | pw32*) - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else -- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' -+ # Keep this pattern in sync with the one in func_win32_libid. -+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' - lt_cv_file_magic_cmd='$OBJDUMP -f' - fi - ;; -@@ -6274,6 +6360,21 @@ esac - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 - $as_echo "$lt_cv_deplibs_check_method" >&6; } -+ -+file_magic_glob= -+want_nocaseglob=no -+if test "$build" = "$host"; then -+ case $host_os in -+ mingw* | pw32*) -+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then -+ want_nocaseglob=yes -+ else -+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` -+ fi -+ ;; -+ esac -+fi -+ - file_magic_cmd=$lt_cv_file_magic_cmd - deplibs_check_method=$lt_cv_deplibs_check_method - test -z "$deplibs_check_method" && deplibs_check_method=unknown -@@ -6289,9 +6390,162 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown - - - -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+if test -n "$ac_tool_prefix"; then -+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. -+set dummy ${ac_tool_prefix}dlltool; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_DLLTOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$DLLTOOL"; then -+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+DLLTOOL=$ac_cv_prog_DLLTOOL -+if test -n "$DLLTOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 -+$as_echo "$DLLTOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+fi -+if test -z "$ac_cv_prog_DLLTOOL"; then -+ ac_ct_DLLTOOL=$DLLTOOL -+ # Extract the first word of "dlltool", so it can be a program name with args. -+set dummy dlltool; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$ac_ct_DLLTOOL"; then -+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_ac_ct_DLLTOOL="dlltool" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL -+if test -n "$ac_ct_DLLTOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 -+$as_echo "$ac_ct_DLLTOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ if test "x$ac_ct_DLLTOOL" = x; then -+ DLLTOOL="false" -+ else -+ case $cross_compiling:$ac_tool_warned in -+yes:) -+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -+ac_tool_warned=yes ;; -+esac -+ DLLTOOL=$ac_ct_DLLTOOL -+ fi -+else -+ DLLTOOL="$ac_cv_prog_DLLTOOL" -+fi -+ -+test -z "$DLLTOOL" && DLLTOOL=dlltool -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 -+$as_echo_n "checking how to associate runtime and link libraries... " >&6; } -+if test "${lt_cv_sharedlib_from_linklib_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_sharedlib_from_linklib_cmd='unknown' -+ -+case $host_os in -+cygwin* | mingw* | pw32* | cegcc*) -+ # two different shell functions defined in ltmain.sh -+ # decide which to use based on capabilities of $DLLTOOL -+ case `$DLLTOOL --help 2>&1` in -+ *--identify-strict*) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib -+ ;; -+ *) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback -+ ;; -+ esac -+ ;; -+*) -+ # fallback: assume linklib IS sharedlib -+ lt_cv_sharedlib_from_linklib_cmd="$ECHO" -+ ;; -+esac -+ -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 -+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } -+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd -+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO -+ -+ -+ -+ -+ -+ -+ - if test -n "$ac_tool_prefix"; then -- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. --set dummy ${ac_tool_prefix}ar; ac_word=$2 -+ for ac_prog in ar -+ do -+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -+set dummy $ac_tool_prefix$ac_prog; ac_word=$2 - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 - $as_echo_n "checking for $ac_word... " >&6; } - if test "${ac_cv_prog_AR+set}" = set; then : -@@ -6307,7 +6561,7 @@ do - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -- ac_cv_prog_AR="${ac_tool_prefix}ar" -+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -@@ -6327,11 +6581,15 @@ $as_echo "no" >&6; } - fi - - -+ test -n "$AR" && break -+ done - fi --if test -z "$ac_cv_prog_AR"; then -+if test -z "$AR"; then - ac_ct_AR=$AR -- # Extract the first word of "ar", so it can be a program name with args. --set dummy ar; ac_word=$2 -+ for ac_prog in ar -+do -+ # Extract the first word of "$ac_prog", so it can be a program name with args. -+set dummy $ac_prog; ac_word=$2 - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 - $as_echo_n "checking for $ac_word... " >&6; } - if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : -@@ -6347,7 +6605,7 @@ do - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -- ac_cv_prog_ac_ct_AR="ar" -+ ac_cv_prog_ac_ct_AR="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -@@ -6366,6 +6624,10 @@ else - $as_echo "no" >&6; } - fi - -+ -+ test -n "$ac_ct_AR" && break -+done -+ - if test "x$ac_ct_AR" = x; then - AR="false" - else -@@ -6377,12 +6639,10 @@ ac_tool_warned=yes ;; - esac - AR=$ac_ct_AR - fi --else -- AR="$ac_cv_prog_AR" - fi - --test -z "$AR" && AR=ar --test -z "$AR_FLAGS" && AR_FLAGS=cru -+: ${AR=ar} -+: ${AR_FLAGS=cru} - - - -@@ -6394,6 +6654,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 -+$as_echo_n "checking for archiver @FILE support... " >&6; } -+if test "${lt_cv_ar_at_file+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_ar_at_file=no -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+/* end confdefs.h. */ -+ -+int -+main () -+{ -+ -+ ; -+ return 0; -+} -+_ACEOF -+if ac_fn_c_try_compile "$LINENO"; then : -+ echo conftest.$ac_objext > conftest.lst -+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' -+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -+ (eval $lt_ar_try) 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } -+ if test "$ac_status" -eq 0; then -+ # Ensure the archiver fails upon bogus file names. -+ rm -f conftest.$ac_objext libconftest.a -+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -+ (eval $lt_ar_try) 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } -+ if test "$ac_status" -ne 0; then -+ lt_cv_ar_at_file=@ -+ fi -+ fi -+ rm -f conftest.* libconftest.a -+ -+fi -+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -+ -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 -+$as_echo "$lt_cv_ar_at_file" >&6; } -+ -+if test "x$lt_cv_ar_at_file" = xno; then -+ archiver_list_spec= -+else -+ archiver_list_spec=$lt_cv_ar_at_file -+fi -+ -+ -+ -+ -+ -+ -+ - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. - set dummy ${ac_tool_prefix}strip; ac_word=$2 -@@ -6728,8 +7046,8 @@ esac - lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - - # Transform an extracted symbol line into symbol name and symbol address --lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" --lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" - - # Handle CRLF in mingw tool chain - opt_cr= -@@ -6765,6 +7083,7 @@ for ac_symprfx in "" "_"; do - else - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" - fi -+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" - - # Check to see that the pipe works correctly. - pipe_works=no -@@ -6806,6 +7125,18 @@ _LT_EOF - if $GREP ' nm_test_var$' "$nlist" >/dev/null; then - if $GREP ' nm_test_func$' "$nlist" >/dev/null; then - cat <<_LT_EOF > conftest.$ac_ext -+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime -+ relocations are performed -- see ld's documentation on pseudo-relocs. */ -+# define LT_DLSYM_CONST -+#elif defined(__osf__) -+/* This system does not cope well with relocations in const data. */ -+# define LT_DLSYM_CONST -+#else -+# define LT_DLSYM_CONST const -+#endif -+ - #ifdef __cplusplus - extern "C" { - #endif -@@ -6817,7 +7148,7 @@ _LT_EOF - cat <<_LT_EOF >> conftest.$ac_ext - - /* The mapping between symbol names and symbols. */ --const struct { -+LT_DLSYM_CONST struct { - const char *name; - void *address; - } -@@ -6843,8 +7174,8 @@ static const void *lt_preloaded_setup() { - _LT_EOF - # Now try linking the two files. - mv conftest.$ac_objext conftstm.$ac_objext -- lt_save_LIBS="$LIBS" -- lt_save_CFLAGS="$CFLAGS" -+ lt_globsym_save_LIBS=$LIBS -+ lt_globsym_save_CFLAGS=$CFLAGS - LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 -@@ -6854,8 +7185,8 @@ _LT_EOF - test $ac_status = 0; } && test -s conftest${ac_exeext}; then - pipe_works=yes - fi -- LIBS="$lt_save_LIBS" -- CFLAGS="$lt_save_CFLAGS" -+ LIBS=$lt_globsym_save_LIBS -+ CFLAGS=$lt_globsym_save_CFLAGS - else - echo "cannot find nm_test_func in $nlist" >&5 - fi -@@ -6892,6 +7223,21 @@ else - $as_echo "ok" >&6; } - fi - -+# Response file support. -+if test "$lt_cv_nm_interface" = "MS dumpbin"; then -+ nm_file_list_spec='@' -+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then -+ nm_file_list_spec='@' -+fi -+ -+ -+ -+ -+ -+ -+ -+ -+ - - - -@@ -6910,6 +7256,40 @@ fi - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 -+$as_echo_n "checking for sysroot... " >&6; } -+ -+# Check whether --with-libtool-sysroot was given. -+if test "${with_libtool_sysroot+set}" = set; then : -+ withval=$with_libtool_sysroot; -+else -+ with_libtool_sysroot=no -+fi -+ -+ -+lt_sysroot= -+case ${with_libtool_sysroot} in #( -+ yes) -+ if test "$GCC" = yes; then -+ lt_sysroot=`$CC --print-sysroot 2>/dev/null` -+ fi -+ ;; #( -+ /*) -+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` -+ ;; #( -+ no|'') -+ ;; #( -+ *) -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5 -+$as_echo "${with_libtool_sysroot}" >&6; } -+ as_fn_error "The sysroot must be an absolute path." "$LINENO" 5 -+ ;; -+esac -+ -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 -+$as_echo "${lt_sysroot:-no}" >&6; } -+ -+ - - - -@@ -7119,6 +7499,123 @@ esac - - need_locks="$enable_libtool_lock" - -+if test -n "$ac_tool_prefix"; then -+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. -+set dummy ${ac_tool_prefix}mt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_MANIFEST_TOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$MANIFEST_TOOL"; then -+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL -+if test -n "$MANIFEST_TOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 -+$as_echo "$MANIFEST_TOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+fi -+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then -+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL -+ # Extract the first word of "mt", so it can be a program name with args. -+set dummy mt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_ac_ct_MANIFEST_TOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$ac_ct_MANIFEST_TOOL"; then -+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL -+if test -n "$ac_ct_MANIFEST_TOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 -+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ if test "x$ac_ct_MANIFEST_TOOL" = x; then -+ MANIFEST_TOOL=":" -+ else -+ case $cross_compiling:$ac_tool_warned in -+yes:) -+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -+ac_tool_warned=yes ;; -+esac -+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL -+ fi -+else -+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" -+fi -+ -+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 -+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } -+if test "${lt_cv_path_mainfest_tool+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_path_mainfest_tool=no -+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 -+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out -+ cat conftest.err >&5 -+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then -+ lt_cv_path_mainfest_tool=yes -+ fi -+ rm -f conftest* -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 -+$as_echo "$lt_cv_path_mainfest_tool" >&6; } -+if test "x$lt_cv_path_mainfest_tool" != xyes; then -+ MANIFEST_TOOL=: -+fi -+ -+ -+ -+ -+ - - case $host_os in - rhapsody* | darwin*) -@@ -7682,6 +8179,8 @@ _LT_EOF - $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 - echo "$AR cru libconftest.a conftest.o" >&5 - $AR cru libconftest.a conftest.o 2>&5 -+ echo "$RANLIB libconftest.a" >&5 -+ $RANLIB libconftest.a 2>&5 - cat > conftest.c << _LT_EOF - int main() { return 0;} - _LT_EOF -@@ -7877,7 +8376,8 @@ fi - LIBTOOL_DEPS="$ltmain" - - # Always use our own libtool. --LIBTOOL='$(SHELL) $(top_builddir)/libtool' -+LIBTOOL='$(SHELL) $(top_builddir)' -+LIBTOOL="$LIBTOOL/${host_alias}-libtool" - - - -@@ -7966,7 +8466,7 @@ aix3*) - esac - - # Global variables: --ofile=libtool -+ofile=${host_alias}-libtool - can_build_shared=yes - - # All known linkers require a `.a' archive for static linking (except MSVC, -@@ -8264,8 +8764,6 @@ fi - lt_prog_compiler_pic= - lt_prog_compiler_static= - --{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 --$as_echo_n "checking for $compiler option to produce PIC... " >&6; } - - if test "$GCC" = yes; then - lt_prog_compiler_wl='-Wl,' -@@ -8431,6 +8929,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - lt_prog_compiler_pic='--shared' - lt_prog_compiler_static='--static' - ;; -+ nagfor*) -+ # NAG Fortran compiler -+ lt_prog_compiler_wl='-Wl,-Wl,,' -+ lt_prog_compiler_pic='-PIC' -+ lt_prog_compiler_static='-Bstatic' -+ ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) -@@ -8493,7 +8997,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - case $cc_basename in -- f77* | f90* | f95*) -+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) - lt_prog_compiler_wl='-Qoption ld ';; - *) - lt_prog_compiler_wl='-Wl,';; -@@ -8550,13 +9054,17 @@ case $host_os in - lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" - ;; - esac --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 --$as_echo "$lt_prog_compiler_pic" >&6; } -- -- -- -- - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 -+$as_echo_n "checking for $compiler option to produce PIC... " >&6; } -+if test "${lt_cv_prog_compiler_pic+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 -+$as_echo "$lt_cv_prog_compiler_pic" >&6; } -+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic - - # - # Check to make sure the PIC flag actually works. -@@ -8617,6 +9125,11 @@ fi - - - -+ -+ -+ -+ -+ - # - # Check to make sure the static flag actually works. - # -@@ -8967,7 +9480,8 @@ _LT_EOF - allow_undefined_flag=unsupported - always_export_symbols=no - enable_shared_with_static_runtimes=yes -- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' -+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' -+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -@@ -9066,12 +9580,12 @@ _LT_EOF - whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' - hardcode_libdir_flag_spec= - hardcode_libdir_flag_spec_ld='-rpath $libdir' -- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' -+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ -- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' -+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac -@@ -9085,8 +9599,8 @@ _LT_EOF - archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - -@@ -9104,8 +9618,8 @@ _LT_EOF - - _LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -9151,8 +9665,8 @@ _LT_EOF - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -9282,7 +9796,13 @@ _LT_EOF - allow_undefined_flag='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath_+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - - int -@@ -9295,22 +9815,29 @@ main () - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath_ -+fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" -@@ -9322,7 +9849,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - else - # Determine the default libpath from the value encoded in an - # empty executable. -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath_+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - - int -@@ -9335,22 +9868,29 @@ main () - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath_ -+fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, -@@ -9395,20 +9935,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - # Microsoft Visual C++. - # hardcode_libdir_flag_spec is actually meaningless, as there is - # no search path for DLLs. -- hardcode_libdir_flag_spec=' ' -- allow_undefined_flag=unsupported -- # Tell ltmain to make .lib files, not .a files. -- libext=lib -- # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=".dll" -- # FIXME: Setting linknames here is a bad hack. -- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -- # The linker will automatically build a .lib file if we build a DLL. -- old_archive_from_new_cmds='true' -- # FIXME: Should let the user specify the lib program. -- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' -- fix_srcfile_path='`cygpath -w "$srcfile"`' -- enable_shared_with_static_runtimes=yes -+ case $cc_basename in -+ cl*) -+ # Native MSVC -+ hardcode_libdir_flag_spec=' ' -+ allow_undefined_flag=unsupported -+ always_export_symbols=yes -+ file_list_spec='@' -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' -+ # The linker will not automatically build a static lib if we build a DLL. -+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true' -+ enable_shared_with_static_runtimes=yes -+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' -+ # Don't use ranlib -+ old_postinstall_cmds='chmod 644 $oldlib' -+ postlink_cmds='lt_outputfile="@OUTPUT@"~ -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' -+ ;; -+ *) -+ # Assume MSVC wrapper -+ hardcode_libdir_flag_spec=' ' -+ allow_undefined_flag=unsupported -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -+ # The linker will automatically build a .lib file if we build a DLL. -+ old_archive_from_new_cmds='true' -+ # FIXME: Should let the user specify the lib program. -+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' -+ enable_shared_with_static_runtimes=yes -+ ;; -+ esac - ;; - - darwin* | rhapsody*) -@@ -9469,7 +10052,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - # FreeBSD 3 and greater uses gcc -shared to do shared libraries. - freebsd* | dragonfly*) -- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec='-R$libdir' - hardcode_direct=yes - hardcode_shlibpath_var=no -@@ -9477,7 +10060,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - hpux9*) - if test "$GCC" = yes; then -- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' -+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi -@@ -9493,7 +10076,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - hpux10*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -9517,10 +10100,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else -@@ -9599,23 +10182,36 @@ fi - - irix5* | irix6* | nonstopux*) - if test "$GCC" = yes; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. -- save_LDFLAGS="$LDFLAGS" -- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ # This should be the same for all languages, so no per-tag cache variable. -+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 -+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } -+if test "${lt_cv_irix_exported_symbol+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ save_LDFLAGS="$LDFLAGS" -+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ --int foo(void) {} -+int foo (void) { return 0; } - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -- -+ lt_cv_irix_exported_symbol=yes -+else -+ lt_cv_irix_exported_symbol=no - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -- LDFLAGS="$save_LDFLAGS" -+ LDFLAGS="$save_LDFLAGS" -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 -+$as_echo "$lt_cv_irix_exported_symbol" >&6; } -+ if test "$lt_cv_irix_exported_symbol" = yes; then -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -+ fi - else - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' -@@ -9700,7 +10296,7 @@ rm -f core conftest.err conftest.$ac_objext \ - osf4* | osf5*) # as osf3* with the addition of -msym flag - if test "$GCC" = yes; then - allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' -- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag=' -expect_unresolved \*' -@@ -9719,9 +10315,9 @@ rm -f core conftest.err conftest.$ac_objext \ - no_undefined_flag=' -z defs' - if test "$GCC" = yes; then - wlarc='${wl}' -- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) -@@ -10297,8 +10893,9 @@ cygwin* | mingw* | pw32* | cegcc*) - need_version=no - need_lib_prefix=no - -- case $GCC,$host_os in -- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) -+ case $GCC,$cc_basename in -+ yes,*) -+ # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ -@@ -10331,13 +10928,71 @@ cygwin* | mingw* | pw32* | cegcc*) - library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac -+ dynamic_linker='Win32 ld.exe' -+ ;; -+ -+ *,cl*) -+ # Native MSVC -+ libname_spec='$name' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' -+ -+ case $build_os in -+ mingw*) -+ sys_lib_search_path_spec= -+ lt_save_ifs=$IFS -+ IFS=';' -+ for lt_path in $LIB -+ do -+ IFS=$lt_save_ifs -+ # Let DOS variable expansion print the short 8.3 style file name. -+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` -+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" -+ done -+ IFS=$lt_save_ifs -+ # Convert to MSYS style. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` -+ ;; -+ cygwin*) -+ # Convert to unix form, then to dos form, then back to unix form -+ # but this time dos style (no spaces!) so that the unix form looks -+ # like /cygdrive/c/PROGRA~1:/cygdr... -+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` -+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` -+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ ;; -+ *) -+ sys_lib_search_path_spec="$LIB" -+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then -+ # It is most probably a Windows format PATH. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -+ else -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ fi -+ # FIXME: find the short name or the path components, as spaces are -+ # common. (e.g. "Program Files" -> "PROGRA~1") -+ ;; -+ esac -+ -+ # DLL is installed to $(libdir)/../bin by postinstall_cmds -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ -+ dldir=$destdir/`dirname \$dlpath`~ -+ test -d \$dldir || mkdir -p \$dldir~ -+ $install_prog $dir/$dlname \$dldir/$dlname' -+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ -+ dlpath=$dir/\$dldll~ -+ $RM \$dlpath' -+ shlibpath_overrides_runpath=yes -+ dynamic_linker='Win32 link.exe' - ;; - - *) -+ # Assume MSVC wrapper - library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' -+ dynamic_linker='Win32 ld.exe' - ;; - esac -- dynamic_linker='Win32 ld.exe' - # FIXME: first we should search . and the directory the executable is in - shlibpath_var=PATH - ;; -@@ -10429,7 +11084,7 @@ haiku*) - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH - shlibpath_overrides_runpath=yes -- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' -+ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; - -@@ -11269,10 +11924,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -11375,10 +12030,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -15392,13 +16047,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' - lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' - lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' - lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' -+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' -+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' - reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' - reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' - OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' - deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' - file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' -+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' -+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' -+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' -+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' - AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' - AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' -+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' - STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' - RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' - old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' -@@ -15413,14 +16075,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de - lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' -+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' -+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' - objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' - MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' --lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' -+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' - lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' - need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' -+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' - DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' - NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' - LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' -@@ -15453,12 +16118,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q - hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' - inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' - link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' --fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' - always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' - export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' - exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' - include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' - prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' -+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' - file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' - variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' - need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' -@@ -15513,8 +16178,13 @@ reload_flag \ - OBJDUMP \ - deplibs_check_method \ - file_magic_cmd \ -+file_magic_glob \ -+want_nocaseglob \ -+DLLTOOL \ -+sharedlib_from_linklib_cmd \ - AR \ - AR_FLAGS \ -+archiver_list_spec \ - STRIP \ - RANLIB \ - CC \ -@@ -15524,12 +16194,14 @@ lt_cv_sys_global_symbol_pipe \ - lt_cv_sys_global_symbol_to_cdecl \ - lt_cv_sys_global_symbol_to_c_name_address \ - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ -+nm_file_list_spec \ - lt_prog_compiler_no_builtin_flag \ --lt_prog_compiler_wl \ - lt_prog_compiler_pic \ -+lt_prog_compiler_wl \ - lt_prog_compiler_static \ - lt_cv_prog_compiler_c_o \ - need_locks \ -+MANIFEST_TOOL \ - DSYMUTIL \ - NMEDIT \ - LIPO \ -@@ -15545,7 +16217,6 @@ no_undefined_flag \ - hardcode_libdir_flag_spec \ - hardcode_libdir_flag_spec_ld \ - hardcode_libdir_separator \ --fix_srcfile_path \ - exclude_expsyms \ - include_expsyms \ - file_list_spec \ -@@ -15581,6 +16252,7 @@ module_cmds \ - module_expsym_cmds \ - export_symbols_cmds \ - prelink_cmds \ -+postlink_cmds \ - postinstall_cmds \ - postuninstall_cmds \ - finish_cmds \ -@@ -16338,7 +17010,8 @@ $as_echo X"$file" | - # NOTE: Changes made to this file will be lost: look at ltmain.sh. - # - # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, --# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. -+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, -+# Inc. - # Written by Gordon Matzigkeit, 1996 - # - # This file is part of GNU Libtool. -@@ -16441,19 +17114,42 @@ SP2NL=$lt_lt_SP2NL - # turn newlines into spaces. - NL2SP=$lt_lt_NL2SP - -+# convert \$build file names to \$host format. -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+ -+# convert \$build files to toolchain format. -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+ - # An object symbol dumper. - OBJDUMP=$lt_OBJDUMP - - # Method to check whether dependent libraries are shared objects. - deplibs_check_method=$lt_deplibs_check_method - --# Command to use when deplibs_check_method == "file_magic". -+# Command to use when deplibs_check_method = "file_magic". - file_magic_cmd=$lt_file_magic_cmd - -+# How to find potential files when deplibs_check_method = "file_magic". -+file_magic_glob=$lt_file_magic_glob -+ -+# Find potential files using nocaseglob when deplibs_check_method = "file_magic". -+want_nocaseglob=$lt_want_nocaseglob -+ -+# DLL creation program. -+DLLTOOL=$lt_DLLTOOL -+ -+# Command to associate shared and link libraries. -+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd -+ - # The archiver. - AR=$lt_AR -+ -+# Flags to create an archive. - AR_FLAGS=$lt_AR_FLAGS - -+# How to feed a file listing to the archiver. -+archiver_list_spec=$lt_archiver_list_spec -+ - # A symbol stripping program. - STRIP=$lt_STRIP - -@@ -16483,6 +17179,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address - # Transform the output of nm in a C name address pair when lib prefix is needed. - global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix - -+# Specify filename containing input files for \$NM. -+nm_file_list_spec=$lt_nm_file_list_spec -+ -+# The root where to search for dependent libraries,and in which our libraries should be installed. -+lt_sysroot=$lt_sysroot -+ - # The name of the directory that contains temporary libtool files. - objdir=$objdir - -@@ -16492,6 +17194,9 @@ MAGIC_CMD=$MAGIC_CMD - # Must we lock files when doing compilation? - need_locks=$lt_need_locks - -+# Manifest tool. -+MANIFEST_TOOL=$lt_MANIFEST_TOOL -+ - # Tool to manipulate archived DWARF debug symbol files on Mac OS X. - DSYMUTIL=$lt_DSYMUTIL - -@@ -16606,12 +17311,12 @@ with_gcc=$GCC - # Compiler flag to turn off builtin functions. - no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag - --# How to pass a linker flag through the compiler. --wl=$lt_lt_prog_compiler_wl -- - # Additional compiler flags for building library objects. - pic_flag=$lt_lt_prog_compiler_pic - -+# How to pass a linker flag through the compiler. -+wl=$lt_lt_prog_compiler_wl -+ - # Compiler flag to prevent dynamic linking. - link_static_flag=$lt_lt_prog_compiler_static - -@@ -16698,9 +17403,6 @@ inherit_rpath=$inherit_rpath - # Whether libtool must link a program against all its dependency libraries. - link_all_deplibs=$link_all_deplibs - --# Fix the shell variable \$srcfile for the compiler. --fix_srcfile_path=$lt_fix_srcfile_path -- - # Set to "yes" if exported symbols are required. - always_export_symbols=$always_export_symbols - -@@ -16716,6 +17418,9 @@ include_expsyms=$lt_include_expsyms - # Commands necessary for linking programs (against libraries) with templates. - prelink_cmds=$lt_prelink_cmds - -+# Commands necessary for finishing linking programs. -+postlink_cmds=$lt_postlink_cmds -+ - # Specify filename containing input files. - file_list_spec=$lt_file_list_spec - -@@ -16748,210 +17453,169 @@ ltmain="$ac_aux_dir/ltmain.sh" - # if finds mixed CR/LF and LF-only lines. Since sed operates in - # text mode, it properly converts lines to CR/LF. This bash problem - # is reportedly fixed, but why not run on old versions too? -- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- case $xsi_shell in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac --} -- --# func_basename file --func_basename () --{ -- func_basename_result="${1##*/}" --} -- --# func_dirname_and_basename file append nondir_replacement --# perform func_basename and func_dirname in a single function --# call: --# dirname: Compute the dirname of FILE. If nonempty, --# add APPEND to the result, otherwise set result --# to NONDIR_REPLACEMENT. --# value returned in "$func_dirname_result" --# basename: Compute filename of FILE. --# value retuned in "$func_basename_result" --# Implementation must be kept synchronized with func_dirname --# and func_basename. For efficiency, we do not delegate to --# those functions but instead duplicate the functionality here. --func_dirname_and_basename () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac -- func_basename_result="${1##*/}" --} -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --func_stripname () --{ -- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are -- # positional parameters, so assign one to ordinary parameter first. -- func_stripname_result=${3} -- func_stripname_result=${func_stripname_result#"${1}"} -- func_stripname_result=${func_stripname_result%"${2}"} --} -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=${1%%=*} -- func_opt_split_arg=${1#*=} --} -- --# func_lo2o object --func_lo2o () --{ -- case ${1} in -- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; -- *) func_lo2o_result=${1} ;; -- esac --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=${1%.*}.lo --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=$(( $* )) --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=${#1} --} -- --_LT_EOF -- ;; -- *) # Bourne compatible functions. -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- # Extract subdirectory from the argument. -- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` -- if test "X$func_dirname_result" = "X${1}"; then -- func_dirname_result="${3}" -- else -- func_dirname_result="$func_dirname_result${2}" -- fi --} -- --# func_basename file --func_basename () --{ -- func_basename_result=`$ECHO "${1}" | $SED "$basename"` --} -- -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --# func_strip_suffix prefix name --func_stripname () --{ -- case ${2} in -- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; -- esac --} -- --# sed scripts: --my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' --my_sed_long_arg='1s/^-[^=]*=//' -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` -- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` --} -- --# func_lo2o object --func_lo2o () --{ -- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=`expr "$@"` --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` --} -- --_LT_EOF --esac -- --case $lt_shell_append in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$1+=\$2" --} --_LT_EOF -- ;; -- *) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$1=\$$1\$2" --} -- --_LT_EOF -- ;; -- esac -- -- -- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- mv -f "$cfgfile" "$ofile" || -+ sed '$q' "$ltmain" >> "$cfgfile" \ -+ || (rm -f "$cfgfile"; exit 1) -+ -+ if test x"$xsi_shell" = xyes; then -+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ -+func_dirname ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_basename ()$/,/^} # func_basename /c\ -+func_basename ()\ -+{\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ -+func_dirname_and_basename ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ -+func_stripname ()\ -+{\ -+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ -+\ # positional parameters, so assign one to ordinary parameter first.\ -+\ func_stripname_result=${3}\ -+\ func_stripname_result=${func_stripname_result#"${1}"}\ -+\ func_stripname_result=${func_stripname_result%"${2}"}\ -+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ -+func_split_long_opt ()\ -+{\ -+\ func_split_long_opt_name=${1%%=*}\ -+\ func_split_long_opt_arg=${1#*=}\ -+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ -+func_split_short_opt ()\ -+{\ -+\ func_split_short_opt_arg=${1#??}\ -+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ -+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ -+func_lo2o ()\ -+{\ -+\ case ${1} in\ -+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ -+\ *) func_lo2o_result=${1} ;;\ -+\ esac\ -+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_xform ()$/,/^} # func_xform /c\ -+func_xform ()\ -+{\ -+ func_xform_result=${1%.*}.lo\ -+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_arith ()$/,/^} # func_arith /c\ -+func_arith ()\ -+{\ -+ func_arith_result=$(( $* ))\ -+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_len ()$/,/^} # func_len /c\ -+func_len ()\ -+{\ -+ func_len_result=${#1}\ -+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+fi -+ -+if test x"$lt_shell_append" = xyes; then -+ sed -e '/^func_append ()$/,/^} # func_append /c\ -+func_append ()\ -+{\ -+ eval "${1}+=\\${2}"\ -+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ -+func_append_quoted ()\ -+{\ -+\ func_quote_for_eval "${2}"\ -+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ -+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ # Save a `func_append' function call where possible by direct use of '+=' -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+else -+ # Save a `func_append' function call even when '+=' is not available -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+fi -+ -+if test x"$_lt_function_replace_fail" = x":"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 -+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} -+fi -+ -+ -+ mv -f "$cfgfile" "$ofile" || - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" - -diff --git a/configure b/configure -index 2a74a77..ad3dfb7 100755 ---- a/configure -+++ b/configure -@@ -3444,7 +3444,7 @@ case "${target}" in - ;; - s390-*-* | s390x-*-*) - ;; -- sh*-*-* | sh[34]*-*-*) -+ sh-*-* | sh[34]*-*-*) - ;; - sh64-*-* | sh5*-*-*) - ;; -@@ -3946,7 +3946,7 @@ case "${target}" in - or1k*-*-*) - noconfigdirs="$noconfigdirs gdb" - ;; -- sh*-*-* | sh64-*-*) -+ sh-*-* | sh64-*-*) - case "${target}" in - sh*-*-elf) - ;; -diff --git a/gas/configure b/gas/configure -index f7753b8..45da030 100755 ---- a/gas/configure -+++ b/gas/configure -@@ -648,8 +648,11 @@ OTOOL - LIPO - NMEDIT - DSYMUTIL -+MANIFEST_TOOL - RANLIB -+ac_ct_AR - AR -+DLLTOOL - OBJDUMP - LN_S - NM -@@ -761,6 +764,7 @@ enable_static - with_pic - enable_fast_install - with_gnu_ld -+with_libtool_sysroot - enable_libtool_lock - enable_plugins - enable_largefile -@@ -4919,8 +4923,8 @@ esac - - - --macro_version='2.2.7a' --macro_revision='1.3134' -+macro_version='2.4' -+macro_revision='1.3293' - - - -@@ -4960,7 +4964,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO - { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 - $as_echo_n "checking how to print strings... " >&6; } - # Test print first, because it will be a builtin if present. --if test "X`print -r -- -n 2>/dev/null`" = X-n && \ -+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ - test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then - ECHO='print -r --' - elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then -@@ -5646,8 +5650,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; - # Try some XSI features - xsi_shell=no - ( _lt_dummy="a/b/c" -- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ -- = c,a/b,, \ -+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ -+ = c,a/b,b/c, \ - && eval 'test $(( 1 + 1 )) -eq 2 \ - && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ - && xsi_shell=yes -@@ -5696,6 +5700,80 @@ esac - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 -+$as_echo_n "checking how to convert $build file names to $host format... " >&6; } -+if test "${lt_cv_to_host_file_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 -+ ;; -+ esac -+ ;; -+ *-*-cygwin* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin -+ ;; -+ esac -+ ;; -+ * ) # unhandled hosts (and "normal" native builds) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+esac -+ -+fi -+ -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 -+$as_echo "$lt_cv_to_host_file_cmd" >&6; } -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 -+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } -+if test "${lt_cv_to_tool_file_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ #assume ordinary cross tools, or native build. -+lt_cv_to_tool_file_cmd=func_convert_file_noop -+case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ esac -+ ;; -+esac -+ -+fi -+ -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 -+$as_echo "$lt_cv_to_tool_file_cmd" >&6; } -+ -+ -+ -+ -+ - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 - $as_echo_n "checking for $LD option to reload object files... " >&6; } - if test "${lt_cv_ld_reload_flag+set}" = set; then : -@@ -5712,6 +5790,11 @@ case $reload_flag in - esac - reload_cmds='$LD$reload_flag -o $output$reload_objs' - case $host_os in -+ cygwin* | mingw* | pw32* | cegcc*) -+ if test "$GCC" != yes; then -+ reload_cmds=false -+ fi -+ ;; - darwin*) - if test "$GCC" = yes; then - reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' -@@ -5880,7 +5963,8 @@ mingw* | pw32*) - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else -- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' -+ # Keep this pattern in sync with the one in func_win32_libid. -+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' - lt_cv_file_magic_cmd='$OBJDUMP -f' - fi - ;; -@@ -6034,6 +6118,21 @@ esac - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 - $as_echo "$lt_cv_deplibs_check_method" >&6; } -+ -+file_magic_glob= -+want_nocaseglob=no -+if test "$build" = "$host"; then -+ case $host_os in -+ mingw* | pw32*) -+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then -+ want_nocaseglob=yes -+ else -+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` -+ fi -+ ;; -+ esac -+fi -+ - file_magic_cmd=$lt_cv_file_magic_cmd - deplibs_check_method=$lt_cv_deplibs_check_method - test -z "$deplibs_check_method" && deplibs_check_method=unknown -@@ -6049,9 +6148,162 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown - - - -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+if test -n "$ac_tool_prefix"; then -+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. -+set dummy ${ac_tool_prefix}dlltool; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_DLLTOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$DLLTOOL"; then -+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+DLLTOOL=$ac_cv_prog_DLLTOOL -+if test -n "$DLLTOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 -+$as_echo "$DLLTOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+fi -+if test -z "$ac_cv_prog_DLLTOOL"; then -+ ac_ct_DLLTOOL=$DLLTOOL -+ # Extract the first word of "dlltool", so it can be a program name with args. -+set dummy dlltool; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$ac_ct_DLLTOOL"; then -+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_ac_ct_DLLTOOL="dlltool" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL -+if test -n "$ac_ct_DLLTOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 -+$as_echo "$ac_ct_DLLTOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ if test "x$ac_ct_DLLTOOL" = x; then -+ DLLTOOL="false" -+ else -+ case $cross_compiling:$ac_tool_warned in -+yes:) -+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -+ac_tool_warned=yes ;; -+esac -+ DLLTOOL=$ac_ct_DLLTOOL -+ fi -+else -+ DLLTOOL="$ac_cv_prog_DLLTOOL" -+fi -+ -+test -z "$DLLTOOL" && DLLTOOL=dlltool -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 -+$as_echo_n "checking how to associate runtime and link libraries... " >&6; } -+if test "${lt_cv_sharedlib_from_linklib_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_sharedlib_from_linklib_cmd='unknown' -+ -+case $host_os in -+cygwin* | mingw* | pw32* | cegcc*) -+ # two different shell functions defined in ltmain.sh -+ # decide which to use based on capabilities of $DLLTOOL -+ case `$DLLTOOL --help 2>&1` in -+ *--identify-strict*) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib -+ ;; -+ *) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback -+ ;; -+ esac -+ ;; -+*) -+ # fallback: assume linklib IS sharedlib -+ lt_cv_sharedlib_from_linklib_cmd="$ECHO" -+ ;; -+esac -+ -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 -+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } -+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd -+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO -+ -+ -+ -+ -+ -+ -+ - if test -n "$ac_tool_prefix"; then -- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. --set dummy ${ac_tool_prefix}ar; ac_word=$2 -+ for ac_prog in ar -+ do -+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -+set dummy $ac_tool_prefix$ac_prog; ac_word=$2 - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 - $as_echo_n "checking for $ac_word... " >&6; } - if test "${ac_cv_prog_AR+set}" = set; then : -@@ -6067,7 +6319,7 @@ do - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -- ac_cv_prog_AR="${ac_tool_prefix}ar" -+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -@@ -6087,11 +6339,15 @@ $as_echo "no" >&6; } - fi - - -+ test -n "$AR" && break -+ done - fi --if test -z "$ac_cv_prog_AR"; then -+if test -z "$AR"; then - ac_ct_AR=$AR -- # Extract the first word of "ar", so it can be a program name with args. --set dummy ar; ac_word=$2 -+ for ac_prog in ar -+do -+ # Extract the first word of "$ac_prog", so it can be a program name with args. -+set dummy $ac_prog; ac_word=$2 - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 - $as_echo_n "checking for $ac_word... " >&6; } - if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : -@@ -6107,7 +6363,7 @@ do - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -- ac_cv_prog_ac_ct_AR="ar" -+ ac_cv_prog_ac_ct_AR="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -@@ -6126,6 +6382,10 @@ else - $as_echo "no" >&6; } - fi - -+ -+ test -n "$ac_ct_AR" && break -+done -+ - if test "x$ac_ct_AR" = x; then - AR="false" - else -@@ -6137,12 +6397,10 @@ ac_tool_warned=yes ;; - esac - AR=$ac_ct_AR - fi --else -- AR="$ac_cv_prog_AR" - fi - --test -z "$AR" && AR=ar --test -z "$AR_FLAGS" && AR_FLAGS=cru -+: ${AR=ar} -+: ${AR_FLAGS=cru} - - - -@@ -6154,6 +6412,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 -+$as_echo_n "checking for archiver @FILE support... " >&6; } -+if test "${lt_cv_ar_at_file+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_ar_at_file=no -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+/* end confdefs.h. */ -+ -+int -+main () -+{ -+ -+ ; -+ return 0; -+} -+_ACEOF -+if ac_fn_c_try_compile "$LINENO"; then : -+ echo conftest.$ac_objext > conftest.lst -+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' -+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -+ (eval $lt_ar_try) 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } -+ if test "$ac_status" -eq 0; then -+ # Ensure the archiver fails upon bogus file names. -+ rm -f conftest.$ac_objext libconftest.a -+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -+ (eval $lt_ar_try) 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } -+ if test "$ac_status" -ne 0; then -+ lt_cv_ar_at_file=@ -+ fi -+ fi -+ rm -f conftest.* libconftest.a -+ -+fi -+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -+ -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 -+$as_echo "$lt_cv_ar_at_file" >&6; } -+ -+if test "x$lt_cv_ar_at_file" = xno; then -+ archiver_list_spec= -+else -+ archiver_list_spec=$lt_cv_ar_at_file -+fi -+ -+ -+ -+ -+ -+ -+ - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. - set dummy ${ac_tool_prefix}strip; ac_word=$2 -@@ -6488,8 +6804,8 @@ esac - lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - - # Transform an extracted symbol line into symbol name and symbol address --lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" --lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" - - # Handle CRLF in mingw tool chain - opt_cr= -@@ -6525,6 +6841,7 @@ for ac_symprfx in "" "_"; do - else - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" - fi -+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" - - # Check to see that the pipe works correctly. - pipe_works=no -@@ -6566,6 +6883,18 @@ _LT_EOF - if $GREP ' nm_test_var$' "$nlist" >/dev/null; then - if $GREP ' nm_test_func$' "$nlist" >/dev/null; then - cat <<_LT_EOF > conftest.$ac_ext -+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime -+ relocations are performed -- see ld's documentation on pseudo-relocs. */ -+# define LT_DLSYM_CONST -+#elif defined(__osf__) -+/* This system does not cope well with relocations in const data. */ -+# define LT_DLSYM_CONST -+#else -+# define LT_DLSYM_CONST const -+#endif -+ - #ifdef __cplusplus - extern "C" { - #endif -@@ -6577,7 +6906,7 @@ _LT_EOF - cat <<_LT_EOF >> conftest.$ac_ext - - /* The mapping between symbol names and symbols. */ --const struct { -+LT_DLSYM_CONST struct { - const char *name; - void *address; - } -@@ -6603,8 +6932,8 @@ static const void *lt_preloaded_setup() { - _LT_EOF - # Now try linking the two files. - mv conftest.$ac_objext conftstm.$ac_objext -- lt_save_LIBS="$LIBS" -- lt_save_CFLAGS="$CFLAGS" -+ lt_globsym_save_LIBS=$LIBS -+ lt_globsym_save_CFLAGS=$CFLAGS - LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 -@@ -6614,8 +6943,8 @@ _LT_EOF - test $ac_status = 0; } && test -s conftest${ac_exeext}; then - pipe_works=yes - fi -- LIBS="$lt_save_LIBS" -- CFLAGS="$lt_save_CFLAGS" -+ LIBS=$lt_globsym_save_LIBS -+ CFLAGS=$lt_globsym_save_CFLAGS - else - echo "cannot find nm_test_func in $nlist" >&5 - fi -@@ -6652,6 +6981,21 @@ else - $as_echo "ok" >&6; } - fi - -+# Response file support. -+if test "$lt_cv_nm_interface" = "MS dumpbin"; then -+ nm_file_list_spec='@' -+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then -+ nm_file_list_spec='@' -+fi -+ -+ -+ -+ -+ -+ -+ -+ -+ - - - -@@ -6670,6 +7014,40 @@ fi - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 -+$as_echo_n "checking for sysroot... " >&6; } -+ -+# Check whether --with-libtool-sysroot was given. -+if test "${with_libtool_sysroot+set}" = set; then : -+ withval=$with_libtool_sysroot; -+else -+ with_libtool_sysroot=no -+fi -+ -+ -+lt_sysroot= -+case ${with_libtool_sysroot} in #( -+ yes) -+ if test "$GCC" = yes; then -+ lt_sysroot=`$CC --print-sysroot 2>/dev/null` -+ fi -+ ;; #( -+ /*) -+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` -+ ;; #( -+ no|'') -+ ;; #( -+ *) -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5 -+$as_echo "${with_libtool_sysroot}" >&6; } -+ as_fn_error "The sysroot must be an absolute path." "$LINENO" 5 -+ ;; -+esac -+ -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 -+$as_echo "${lt_sysroot:-no}" >&6; } -+ -+ - - - -@@ -6879,6 +7257,123 @@ esac - - need_locks="$enable_libtool_lock" - -+if test -n "$ac_tool_prefix"; then -+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. -+set dummy ${ac_tool_prefix}mt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_MANIFEST_TOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$MANIFEST_TOOL"; then -+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL -+if test -n "$MANIFEST_TOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 -+$as_echo "$MANIFEST_TOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+fi -+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then -+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL -+ # Extract the first word of "mt", so it can be a program name with args. -+set dummy mt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_ac_ct_MANIFEST_TOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$ac_ct_MANIFEST_TOOL"; then -+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL -+if test -n "$ac_ct_MANIFEST_TOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 -+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ if test "x$ac_ct_MANIFEST_TOOL" = x; then -+ MANIFEST_TOOL=":" -+ else -+ case $cross_compiling:$ac_tool_warned in -+yes:) -+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -+ac_tool_warned=yes ;; -+esac -+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL -+ fi -+else -+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" -+fi -+ -+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 -+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } -+if test "${lt_cv_path_mainfest_tool+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_path_mainfest_tool=no -+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 -+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out -+ cat conftest.err >&5 -+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then -+ lt_cv_path_mainfest_tool=yes -+ fi -+ rm -f conftest* -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 -+$as_echo "$lt_cv_path_mainfest_tool" >&6; } -+if test "x$lt_cv_path_mainfest_tool" != xyes; then -+ MANIFEST_TOOL=: -+fi -+ -+ -+ -+ -+ - - case $host_os in - rhapsody* | darwin*) -@@ -7442,6 +7937,8 @@ _LT_EOF - $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 - echo "$AR cru libconftest.a conftest.o" >&5 - $AR cru libconftest.a conftest.o 2>&5 -+ echo "$RANLIB libconftest.a" >&5 -+ $RANLIB libconftest.a 2>&5 - cat > conftest.c << _LT_EOF - int main() { return 0;} - _LT_EOF -@@ -7637,7 +8134,8 @@ fi - LIBTOOL_DEPS="$ltmain" - - # Always use our own libtool. --LIBTOOL='$(SHELL) $(top_builddir)/libtool' -+LIBTOOL='$(SHELL) $(top_builddir)' -+LIBTOOL="$LIBTOOL/${host_alias}-libtool" - - - -@@ -7726,7 +8224,7 @@ aix3*) - esac - - # Global variables: --ofile=libtool -+ofile=${host_alias}-libtool - can_build_shared=yes - - # All known linkers require a `.a' archive for static linking (except MSVC, -@@ -8024,8 +8522,6 @@ fi - lt_prog_compiler_pic= - lt_prog_compiler_static= - --{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 --$as_echo_n "checking for $compiler option to produce PIC... " >&6; } - - if test "$GCC" = yes; then - lt_prog_compiler_wl='-Wl,' -@@ -8191,6 +8687,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - lt_prog_compiler_pic='--shared' - lt_prog_compiler_static='--static' - ;; -+ nagfor*) -+ # NAG Fortran compiler -+ lt_prog_compiler_wl='-Wl,-Wl,,' -+ lt_prog_compiler_pic='-PIC' -+ lt_prog_compiler_static='-Bstatic' -+ ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) -@@ -8253,7 +8755,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - case $cc_basename in -- f77* | f90* | f95*) -+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) - lt_prog_compiler_wl='-Qoption ld ';; - *) - lt_prog_compiler_wl='-Wl,';; -@@ -8310,13 +8812,17 @@ case $host_os in - lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" - ;; - esac --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 --$as_echo "$lt_prog_compiler_pic" >&6; } -- -- -- -- - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 -+$as_echo_n "checking for $compiler option to produce PIC... " >&6; } -+if test "${lt_cv_prog_compiler_pic+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 -+$as_echo "$lt_cv_prog_compiler_pic" >&6; } -+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic - - # - # Check to make sure the PIC flag actually works. -@@ -8377,6 +8883,11 @@ fi - - - -+ -+ -+ -+ -+ - # - # Check to make sure the static flag actually works. - # -@@ -8727,7 +9238,8 @@ _LT_EOF - allow_undefined_flag=unsupported - always_export_symbols=no - enable_shared_with_static_runtimes=yes -- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' -+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' -+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -@@ -8826,12 +9338,12 @@ _LT_EOF - whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' - hardcode_libdir_flag_spec= - hardcode_libdir_flag_spec_ld='-rpath $libdir' -- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' -+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ -- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' -+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac -@@ -8845,8 +9357,8 @@ _LT_EOF - archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - -@@ -8864,8 +9376,8 @@ _LT_EOF - - _LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -8911,8 +9423,8 @@ _LT_EOF - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -9042,7 +9554,13 @@ _LT_EOF - allow_undefined_flag='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath_+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - - int -@@ -9055,22 +9573,29 @@ main () - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath_ -+fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" -@@ -9082,7 +9607,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - else - # Determine the default libpath from the value encoded in an - # empty executable. -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath_+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - - int -@@ -9095,22 +9626,29 @@ main () - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath_ -+fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, -@@ -9155,20 +9693,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - # Microsoft Visual C++. - # hardcode_libdir_flag_spec is actually meaningless, as there is - # no search path for DLLs. -- hardcode_libdir_flag_spec=' ' -- allow_undefined_flag=unsupported -- # Tell ltmain to make .lib files, not .a files. -- libext=lib -- # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=".dll" -- # FIXME: Setting linknames here is a bad hack. -- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -- # The linker will automatically build a .lib file if we build a DLL. -- old_archive_from_new_cmds='true' -- # FIXME: Should let the user specify the lib program. -- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' -- fix_srcfile_path='`cygpath -w "$srcfile"`' -- enable_shared_with_static_runtimes=yes -+ case $cc_basename in -+ cl*) -+ # Native MSVC -+ hardcode_libdir_flag_spec=' ' -+ allow_undefined_flag=unsupported -+ always_export_symbols=yes -+ file_list_spec='@' -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' -+ # The linker will not automatically build a static lib if we build a DLL. -+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true' -+ enable_shared_with_static_runtimes=yes -+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' -+ # Don't use ranlib -+ old_postinstall_cmds='chmod 644 $oldlib' -+ postlink_cmds='lt_outputfile="@OUTPUT@"~ -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' -+ ;; -+ *) -+ # Assume MSVC wrapper -+ hardcode_libdir_flag_spec=' ' -+ allow_undefined_flag=unsupported -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -+ # The linker will automatically build a .lib file if we build a DLL. -+ old_archive_from_new_cmds='true' -+ # FIXME: Should let the user specify the lib program. -+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' -+ enable_shared_with_static_runtimes=yes -+ ;; -+ esac - ;; - - darwin* | rhapsody*) -@@ -9229,7 +9810,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - # FreeBSD 3 and greater uses gcc -shared to do shared libraries. - freebsd* | dragonfly*) -- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec='-R$libdir' - hardcode_direct=yes - hardcode_shlibpath_var=no -@@ -9237,7 +9818,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - hpux9*) - if test "$GCC" = yes; then -- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' -+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi -@@ -9253,7 +9834,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - hpux10*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -9277,10 +9858,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else -@@ -9359,23 +9940,36 @@ fi - - irix5* | irix6* | nonstopux*) - if test "$GCC" = yes; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. -- save_LDFLAGS="$LDFLAGS" -- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ # This should be the same for all languages, so no per-tag cache variable. -+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 -+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } -+if test "${lt_cv_irix_exported_symbol+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ save_LDFLAGS="$LDFLAGS" -+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ --int foo(void) {} -+int foo (void) { return 0; } - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -- -+ lt_cv_irix_exported_symbol=yes -+else -+ lt_cv_irix_exported_symbol=no - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -- LDFLAGS="$save_LDFLAGS" -+ LDFLAGS="$save_LDFLAGS" -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 -+$as_echo "$lt_cv_irix_exported_symbol" >&6; } -+ if test "$lt_cv_irix_exported_symbol" = yes; then -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -+ fi - else - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' -@@ -9460,7 +10054,7 @@ rm -f core conftest.err conftest.$ac_objext \ - osf4* | osf5*) # as osf3* with the addition of -msym flag - if test "$GCC" = yes; then - allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' -- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag=' -expect_unresolved \*' -@@ -9479,9 +10073,9 @@ rm -f core conftest.err conftest.$ac_objext \ - no_undefined_flag=' -z defs' - if test "$GCC" = yes; then - wlarc='${wl}' -- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) -@@ -10057,8 +10651,9 @@ cygwin* | mingw* | pw32* | cegcc*) - need_version=no - need_lib_prefix=no - -- case $GCC,$host_os in -- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) -+ case $GCC,$cc_basename in -+ yes,*) -+ # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ -@@ -10091,13 +10686,71 @@ cygwin* | mingw* | pw32* | cegcc*) - library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac -+ dynamic_linker='Win32 ld.exe' -+ ;; -+ -+ *,cl*) -+ # Native MSVC -+ libname_spec='$name' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' -+ -+ case $build_os in -+ mingw*) -+ sys_lib_search_path_spec= -+ lt_save_ifs=$IFS -+ IFS=';' -+ for lt_path in $LIB -+ do -+ IFS=$lt_save_ifs -+ # Let DOS variable expansion print the short 8.3 style file name. -+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` -+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" -+ done -+ IFS=$lt_save_ifs -+ # Convert to MSYS style. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` -+ ;; -+ cygwin*) -+ # Convert to unix form, then to dos form, then back to unix form -+ # but this time dos style (no spaces!) so that the unix form looks -+ # like /cygdrive/c/PROGRA~1:/cygdr... -+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` -+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` -+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ ;; -+ *) -+ sys_lib_search_path_spec="$LIB" -+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then -+ # It is most probably a Windows format PATH. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -+ else -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ fi -+ # FIXME: find the short name or the path components, as spaces are -+ # common. (e.g. "Program Files" -> "PROGRA~1") -+ ;; -+ esac -+ -+ # DLL is installed to $(libdir)/../bin by postinstall_cmds -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ -+ dldir=$destdir/`dirname \$dlpath`~ -+ test -d \$dldir || mkdir -p \$dldir~ -+ $install_prog $dir/$dlname \$dldir/$dlname' -+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ -+ dlpath=$dir/\$dldll~ -+ $RM \$dlpath' -+ shlibpath_overrides_runpath=yes -+ dynamic_linker='Win32 link.exe' - ;; - - *) -+ # Assume MSVC wrapper - library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' -+ dynamic_linker='Win32 ld.exe' - ;; - esac -- dynamic_linker='Win32 ld.exe' - # FIXME: first we should search . and the directory the executable is in - shlibpath_var=PATH - ;; -@@ -10189,7 +10842,7 @@ haiku*) - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH - shlibpath_overrides_runpath=yes -- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' -+ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; - -@@ -11029,10 +11682,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -11135,10 +11788,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -15350,13 +16003,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' - lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' - lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' - lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' -+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' -+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' - reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' - reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' - OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' - deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' - file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' -+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' -+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' -+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' -+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' - AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' - AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' -+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' - STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' - RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' - old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' -@@ -15371,14 +16031,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de - lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' -+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' -+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' - objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' - MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' --lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' -+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' - lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' - need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' -+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' - DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' - NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' - LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' -@@ -15411,12 +16074,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q - hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' - inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' - link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' --fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' - always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' - export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' - exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' - include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' - prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' -+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' - file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' - variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' - need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' -@@ -15471,8 +16134,13 @@ reload_flag \ - OBJDUMP \ - deplibs_check_method \ - file_magic_cmd \ -+file_magic_glob \ -+want_nocaseglob \ -+DLLTOOL \ -+sharedlib_from_linklib_cmd \ - AR \ - AR_FLAGS \ -+archiver_list_spec \ - STRIP \ - RANLIB \ - CC \ -@@ -15482,12 +16150,14 @@ lt_cv_sys_global_symbol_pipe \ - lt_cv_sys_global_symbol_to_cdecl \ - lt_cv_sys_global_symbol_to_c_name_address \ - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ -+nm_file_list_spec \ - lt_prog_compiler_no_builtin_flag \ --lt_prog_compiler_wl \ - lt_prog_compiler_pic \ -+lt_prog_compiler_wl \ - lt_prog_compiler_static \ - lt_cv_prog_compiler_c_o \ - need_locks \ -+MANIFEST_TOOL \ - DSYMUTIL \ - NMEDIT \ - LIPO \ -@@ -15503,7 +16173,6 @@ no_undefined_flag \ - hardcode_libdir_flag_spec \ - hardcode_libdir_flag_spec_ld \ - hardcode_libdir_separator \ --fix_srcfile_path \ - exclude_expsyms \ - include_expsyms \ - file_list_spec \ -@@ -15539,6 +16208,7 @@ module_cmds \ - module_expsym_cmds \ - export_symbols_cmds \ - prelink_cmds \ -+postlink_cmds \ - postinstall_cmds \ - postuninstall_cmds \ - finish_cmds \ -@@ -16303,7 +16973,8 @@ $as_echo X"$file" | - # NOTE: Changes made to this file will be lost: look at ltmain.sh. - # - # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, --# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. -+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, -+# Inc. - # Written by Gordon Matzigkeit, 1996 - # - # This file is part of GNU Libtool. -@@ -16406,19 +17077,42 @@ SP2NL=$lt_lt_SP2NL - # turn newlines into spaces. - NL2SP=$lt_lt_NL2SP - -+# convert \$build file names to \$host format. -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+ -+# convert \$build files to toolchain format. -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+ - # An object symbol dumper. - OBJDUMP=$lt_OBJDUMP - - # Method to check whether dependent libraries are shared objects. - deplibs_check_method=$lt_deplibs_check_method - --# Command to use when deplibs_check_method == "file_magic". -+# Command to use when deplibs_check_method = "file_magic". - file_magic_cmd=$lt_file_magic_cmd - -+# How to find potential files when deplibs_check_method = "file_magic". -+file_magic_glob=$lt_file_magic_glob -+ -+# Find potential files using nocaseglob when deplibs_check_method = "file_magic". -+want_nocaseglob=$lt_want_nocaseglob -+ -+# DLL creation program. -+DLLTOOL=$lt_DLLTOOL -+ -+# Command to associate shared and link libraries. -+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd -+ - # The archiver. - AR=$lt_AR -+ -+# Flags to create an archive. - AR_FLAGS=$lt_AR_FLAGS - -+# How to feed a file listing to the archiver. -+archiver_list_spec=$lt_archiver_list_spec -+ - # A symbol stripping program. - STRIP=$lt_STRIP - -@@ -16448,6 +17142,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address - # Transform the output of nm in a C name address pair when lib prefix is needed. - global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix - -+# Specify filename containing input files for \$NM. -+nm_file_list_spec=$lt_nm_file_list_spec -+ -+# The root where to search for dependent libraries,and in which our libraries should be installed. -+lt_sysroot=$lt_sysroot -+ - # The name of the directory that contains temporary libtool files. - objdir=$objdir - -@@ -16457,6 +17157,9 @@ MAGIC_CMD=$MAGIC_CMD - # Must we lock files when doing compilation? - need_locks=$lt_need_locks - -+# Manifest tool. -+MANIFEST_TOOL=$lt_MANIFEST_TOOL -+ - # Tool to manipulate archived DWARF debug symbol files on Mac OS X. - DSYMUTIL=$lt_DSYMUTIL - -@@ -16571,12 +17274,12 @@ with_gcc=$GCC - # Compiler flag to turn off builtin functions. - no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag - --# How to pass a linker flag through the compiler. --wl=$lt_lt_prog_compiler_wl -- - # Additional compiler flags for building library objects. - pic_flag=$lt_lt_prog_compiler_pic - -+# How to pass a linker flag through the compiler. -+wl=$lt_lt_prog_compiler_wl -+ - # Compiler flag to prevent dynamic linking. - link_static_flag=$lt_lt_prog_compiler_static - -@@ -16663,9 +17366,6 @@ inherit_rpath=$inherit_rpath - # Whether libtool must link a program against all its dependency libraries. - link_all_deplibs=$link_all_deplibs - --# Fix the shell variable \$srcfile for the compiler. --fix_srcfile_path=$lt_fix_srcfile_path -- - # Set to "yes" if exported symbols are required. - always_export_symbols=$always_export_symbols - -@@ -16681,6 +17381,9 @@ include_expsyms=$lt_include_expsyms - # Commands necessary for linking programs (against libraries) with templates. - prelink_cmds=$lt_prelink_cmds - -+# Commands necessary for finishing linking programs. -+postlink_cmds=$lt_postlink_cmds -+ - # Specify filename containing input files. - file_list_spec=$lt_file_list_spec - -@@ -16713,210 +17416,169 @@ ltmain="$ac_aux_dir/ltmain.sh" - # if finds mixed CR/LF and LF-only lines. Since sed operates in - # text mode, it properly converts lines to CR/LF. This bash problem - # is reportedly fixed, but why not run on old versions too? -- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- case $xsi_shell in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac --} -- --# func_basename file --func_basename () --{ -- func_basename_result="${1##*/}" --} -- --# func_dirname_and_basename file append nondir_replacement --# perform func_basename and func_dirname in a single function --# call: --# dirname: Compute the dirname of FILE. If nonempty, --# add APPEND to the result, otherwise set result --# to NONDIR_REPLACEMENT. --# value returned in "$func_dirname_result" --# basename: Compute filename of FILE. --# value retuned in "$func_basename_result" --# Implementation must be kept synchronized with func_dirname --# and func_basename. For efficiency, we do not delegate to --# those functions but instead duplicate the functionality here. --func_dirname_and_basename () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac -- func_basename_result="${1##*/}" --} -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --func_stripname () --{ -- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are -- # positional parameters, so assign one to ordinary parameter first. -- func_stripname_result=${3} -- func_stripname_result=${func_stripname_result#"${1}"} -- func_stripname_result=${func_stripname_result%"${2}"} --} -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=${1%%=*} -- func_opt_split_arg=${1#*=} --} -- --# func_lo2o object --func_lo2o () --{ -- case ${1} in -- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; -- *) func_lo2o_result=${1} ;; -- esac --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=${1%.*}.lo --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=$(( $* )) --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=${#1} --} -- --_LT_EOF -- ;; -- *) # Bourne compatible functions. -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- # Extract subdirectory from the argument. -- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` -- if test "X$func_dirname_result" = "X${1}"; then -- func_dirname_result="${3}" -- else -- func_dirname_result="$func_dirname_result${2}" -- fi --} -- --# func_basename file --func_basename () --{ -- func_basename_result=`$ECHO "${1}" | $SED "$basename"` --} -- -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --# func_strip_suffix prefix name --func_stripname () --{ -- case ${2} in -- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; -- esac --} -- --# sed scripts: --my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' --my_sed_long_arg='1s/^-[^=]*=//' -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` -- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` --} -- --# func_lo2o object --func_lo2o () --{ -- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=`expr "$@"` --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` --} -- --_LT_EOF --esac -- --case $lt_shell_append in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$1+=\$2" --} --_LT_EOF -- ;; -- *) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$1=\$$1\$2" --} -- --_LT_EOF -- ;; -- esac -- -- -- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- mv -f "$cfgfile" "$ofile" || -+ sed '$q' "$ltmain" >> "$cfgfile" \ -+ || (rm -f "$cfgfile"; exit 1) -+ -+ if test x"$xsi_shell" = xyes; then -+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ -+func_dirname ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_basename ()$/,/^} # func_basename /c\ -+func_basename ()\ -+{\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ -+func_dirname_and_basename ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ -+func_stripname ()\ -+{\ -+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ -+\ # positional parameters, so assign one to ordinary parameter first.\ -+\ func_stripname_result=${3}\ -+\ func_stripname_result=${func_stripname_result#"${1}"}\ -+\ func_stripname_result=${func_stripname_result%"${2}"}\ -+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ -+func_split_long_opt ()\ -+{\ -+\ func_split_long_opt_name=${1%%=*}\ -+\ func_split_long_opt_arg=${1#*=}\ -+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ -+func_split_short_opt ()\ -+{\ -+\ func_split_short_opt_arg=${1#??}\ -+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ -+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ -+func_lo2o ()\ -+{\ -+\ case ${1} in\ -+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ -+\ *) func_lo2o_result=${1} ;;\ -+\ esac\ -+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_xform ()$/,/^} # func_xform /c\ -+func_xform ()\ -+{\ -+ func_xform_result=${1%.*}.lo\ -+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_arith ()$/,/^} # func_arith /c\ -+func_arith ()\ -+{\ -+ func_arith_result=$(( $* ))\ -+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_len ()$/,/^} # func_len /c\ -+func_len ()\ -+{\ -+ func_len_result=${#1}\ -+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+fi -+ -+if test x"$lt_shell_append" = xyes; then -+ sed -e '/^func_append ()$/,/^} # func_append /c\ -+func_append ()\ -+{\ -+ eval "${1}+=\\${2}"\ -+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ -+func_append_quoted ()\ -+{\ -+\ func_quote_for_eval "${2}"\ -+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ -+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ # Save a `func_append' function call where possible by direct use of '+=' -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+else -+ # Save a `func_append' function call even when '+=' is not available -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+fi -+ -+if test x"$_lt_function_replace_fail" = x":"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 -+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} -+fi -+ -+ -+ mv -f "$cfgfile" "$ofile" || - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" - -diff --git a/gprof/configure b/gprof/configure -index 626e1c7..a88719c 100755 ---- a/gprof/configure -+++ b/gprof/configure -@@ -630,8 +630,11 @@ OTOOL - LIPO - NMEDIT - DSYMUTIL -+MANIFEST_TOOL - RANLIB -+ac_ct_AR - AR -+DLLTOOL - OBJDUMP - LN_S - NM -@@ -743,6 +746,7 @@ enable_static - with_pic - enable_fast_install - with_gnu_ld -+with_libtool_sysroot - enable_libtool_lock - enable_plugins - enable_largefile -@@ -1401,6 +1405,8 @@ Optional Packages: - --with-pic try to use only PIC/non-PIC objects [default=use - both] - --with-gnu-ld assume the C compiler uses GNU ld [default=no] -+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR -+ (or the compiler's sysroot if not specified). - - Some influential environment variables: - CC C compiler command -@@ -4835,8 +4841,8 @@ esac - - - --macro_version='2.2.7a' --macro_revision='1.3134' -+macro_version='2.4' -+macro_revision='1.3293' - - - -@@ -4876,7 +4882,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO - { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 - $as_echo_n "checking how to print strings... " >&6; } - # Test print first, because it will be a builtin if present. --if test "X`print -r -- -n 2>/dev/null`" = X-n && \ -+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ - test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then - ECHO='print -r --' - elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then -@@ -5562,8 +5568,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; - # Try some XSI features - xsi_shell=no - ( _lt_dummy="a/b/c" -- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ -- = c,a/b,, \ -+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ -+ = c,a/b,b/c, \ - && eval 'test $(( 1 + 1 )) -eq 2 \ - && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ - && xsi_shell=yes -@@ -5612,6 +5618,80 @@ esac - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 -+$as_echo_n "checking how to convert $build file names to $host format... " >&6; } -+if test "${lt_cv_to_host_file_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 -+ ;; -+ esac -+ ;; -+ *-*-cygwin* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin -+ ;; -+ esac -+ ;; -+ * ) # unhandled hosts (and "normal" native builds) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+esac -+ -+fi -+ -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 -+$as_echo "$lt_cv_to_host_file_cmd" >&6; } -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 -+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } -+if test "${lt_cv_to_tool_file_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ #assume ordinary cross tools, or native build. -+lt_cv_to_tool_file_cmd=func_convert_file_noop -+case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ esac -+ ;; -+esac -+ -+fi -+ -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 -+$as_echo "$lt_cv_to_tool_file_cmd" >&6; } -+ -+ -+ -+ -+ - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 - $as_echo_n "checking for $LD option to reload object files... " >&6; } - if test "${lt_cv_ld_reload_flag+set}" = set; then : -@@ -5628,6 +5708,11 @@ case $reload_flag in - esac - reload_cmds='$LD$reload_flag -o $output$reload_objs' - case $host_os in -+ cygwin* | mingw* | pw32* | cegcc*) -+ if test "$GCC" != yes; then -+ reload_cmds=false -+ fi -+ ;; - darwin*) - if test "$GCC" = yes; then - reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' -@@ -5796,7 +5881,8 @@ mingw* | pw32*) - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else -- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' -+ # Keep this pattern in sync with the one in func_win32_libid. -+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' - lt_cv_file_magic_cmd='$OBJDUMP -f' - fi - ;; -@@ -5873,11 +5959,6 @@ linux* | k*bsd*-gnu | kopensolaris*-gnu) - lt_cv_deplibs_check_method=pass_all - ;; - --linux-uclibc*) -- lt_cv_deplibs_check_method=pass_all -- lt_cv_file_magic_test_file=`echo /lib/libuClibc-*.so` -- ;; -- - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' -@@ -5955,6 +6036,21 @@ esac - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 - $as_echo "$lt_cv_deplibs_check_method" >&6; } -+ -+file_magic_glob= -+want_nocaseglob=no -+if test "$build" = "$host"; then -+ case $host_os in -+ mingw* | pw32*) -+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then -+ want_nocaseglob=yes -+ else -+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` -+ fi -+ ;; -+ esac -+fi -+ - file_magic_cmd=$lt_cv_file_magic_cmd - deplibs_check_method=$lt_cv_deplibs_check_method - test -z "$deplibs_check_method" && deplibs_check_method=unknown -@@ -5970,9 +6066,162 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown - - - -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+if test -n "$ac_tool_prefix"; then -+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. -+set dummy ${ac_tool_prefix}dlltool; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_DLLTOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$DLLTOOL"; then -+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+DLLTOOL=$ac_cv_prog_DLLTOOL -+if test -n "$DLLTOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 -+$as_echo "$DLLTOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+fi -+if test -z "$ac_cv_prog_DLLTOOL"; then -+ ac_ct_DLLTOOL=$DLLTOOL -+ # Extract the first word of "dlltool", so it can be a program name with args. -+set dummy dlltool; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$ac_ct_DLLTOOL"; then -+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_ac_ct_DLLTOOL="dlltool" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL -+if test -n "$ac_ct_DLLTOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 -+$as_echo "$ac_ct_DLLTOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ if test "x$ac_ct_DLLTOOL" = x; then -+ DLLTOOL="false" -+ else -+ case $cross_compiling:$ac_tool_warned in -+yes:) -+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -+ac_tool_warned=yes ;; -+esac -+ DLLTOOL=$ac_ct_DLLTOOL -+ fi -+else -+ DLLTOOL="$ac_cv_prog_DLLTOOL" -+fi -+ -+test -z "$DLLTOOL" && DLLTOOL=dlltool -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 -+$as_echo_n "checking how to associate runtime and link libraries... " >&6; } -+if test "${lt_cv_sharedlib_from_linklib_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_sharedlib_from_linklib_cmd='unknown' -+ -+case $host_os in -+cygwin* | mingw* | pw32* | cegcc*) -+ # two different shell functions defined in ltmain.sh -+ # decide which to use based on capabilities of $DLLTOOL -+ case `$DLLTOOL --help 2>&1` in -+ *--identify-strict*) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib -+ ;; -+ *) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback -+ ;; -+ esac -+ ;; -+*) -+ # fallback: assume linklib IS sharedlib -+ lt_cv_sharedlib_from_linklib_cmd="$ECHO" -+ ;; -+esac -+ -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 -+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } -+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd -+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO -+ -+ -+ -+ -+ -+ -+ - if test -n "$ac_tool_prefix"; then -- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. --set dummy ${ac_tool_prefix}ar; ac_word=$2 -+ for ac_prog in ar -+ do -+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -+set dummy $ac_tool_prefix$ac_prog; ac_word=$2 - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 - $as_echo_n "checking for $ac_word... " >&6; } - if test "${ac_cv_prog_AR+set}" = set; then : -@@ -5988,7 +6237,7 @@ do - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -- ac_cv_prog_AR="${ac_tool_prefix}ar" -+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -@@ -6008,11 +6257,15 @@ $as_echo "no" >&6; } - fi - - -+ test -n "$AR" && break -+ done - fi --if test -z "$ac_cv_prog_AR"; then -+if test -z "$AR"; then - ac_ct_AR=$AR -- # Extract the first word of "ar", so it can be a program name with args. --set dummy ar; ac_word=$2 -+ for ac_prog in ar -+do -+ # Extract the first word of "$ac_prog", so it can be a program name with args. -+set dummy $ac_prog; ac_word=$2 - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 - $as_echo_n "checking for $ac_word... " >&6; } - if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : -@@ -6028,7 +6281,7 @@ do - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -- ac_cv_prog_ac_ct_AR="ar" -+ ac_cv_prog_ac_ct_AR="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -@@ -6047,6 +6300,10 @@ else - $as_echo "no" >&6; } - fi - -+ -+ test -n "$ac_ct_AR" && break -+done -+ - if test "x$ac_ct_AR" = x; then - AR="false" - else -@@ -6058,12 +6315,10 @@ ac_tool_warned=yes ;; - esac - AR=$ac_ct_AR - fi --else -- AR="$ac_cv_prog_AR" - fi - --test -z "$AR" && AR=ar --test -z "$AR_FLAGS" && AR_FLAGS=cru -+: ${AR=ar} -+: ${AR_FLAGS=cru} - - - -@@ -6075,6 +6330,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 -+$as_echo_n "checking for archiver @FILE support... " >&6; } -+if test "${lt_cv_ar_at_file+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_ar_at_file=no -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+/* end confdefs.h. */ -+ -+int -+main () -+{ -+ -+ ; -+ return 0; -+} -+_ACEOF -+if ac_fn_c_try_compile "$LINENO"; then : -+ echo conftest.$ac_objext > conftest.lst -+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' -+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -+ (eval $lt_ar_try) 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } -+ if test "$ac_status" -eq 0; then -+ # Ensure the archiver fails upon bogus file names. -+ rm -f conftest.$ac_objext libconftest.a -+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -+ (eval $lt_ar_try) 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } -+ if test "$ac_status" -ne 0; then -+ lt_cv_ar_at_file=@ -+ fi -+ fi -+ rm -f conftest.* libconftest.a -+ -+fi -+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -+ -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 -+$as_echo "$lt_cv_ar_at_file" >&6; } -+ -+if test "x$lt_cv_ar_at_file" = xno; then -+ archiver_list_spec= -+else -+ archiver_list_spec=$lt_cv_ar_at_file -+fi -+ -+ -+ -+ -+ -+ -+ - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. - set dummy ${ac_tool_prefix}strip; ac_word=$2 -@@ -6409,8 +6722,8 @@ esac - lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - - # Transform an extracted symbol line into symbol name and symbol address --lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" --lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" - - # Handle CRLF in mingw tool chain - opt_cr= -@@ -6446,6 +6759,7 @@ for ac_symprfx in "" "_"; do - else - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" - fi -+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" - - # Check to see that the pipe works correctly. - pipe_works=no -@@ -6487,6 +6801,18 @@ _LT_EOF - if $GREP ' nm_test_var$' "$nlist" >/dev/null; then - if $GREP ' nm_test_func$' "$nlist" >/dev/null; then - cat <<_LT_EOF > conftest.$ac_ext -+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime -+ relocations are performed -- see ld's documentation on pseudo-relocs. */ -+# define LT_DLSYM_CONST -+#elif defined(__osf__) -+/* This system does not cope well with relocations in const data. */ -+# define LT_DLSYM_CONST -+#else -+# define LT_DLSYM_CONST const -+#endif -+ - #ifdef __cplusplus - extern "C" { - #endif -@@ -6498,7 +6824,7 @@ _LT_EOF - cat <<_LT_EOF >> conftest.$ac_ext - - /* The mapping between symbol names and symbols. */ --const struct { -+LT_DLSYM_CONST struct { - const char *name; - void *address; - } -@@ -6524,8 +6850,8 @@ static const void *lt_preloaded_setup() { - _LT_EOF - # Now try linking the two files. - mv conftest.$ac_objext conftstm.$ac_objext -- lt_save_LIBS="$LIBS" -- lt_save_CFLAGS="$CFLAGS" -+ lt_globsym_save_LIBS=$LIBS -+ lt_globsym_save_CFLAGS=$CFLAGS - LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 -@@ -6535,8 +6861,8 @@ _LT_EOF - test $ac_status = 0; } && test -s conftest${ac_exeext}; then - pipe_works=yes - fi -- LIBS="$lt_save_LIBS" -- CFLAGS="$lt_save_CFLAGS" -+ LIBS=$lt_globsym_save_LIBS -+ CFLAGS=$lt_globsym_save_CFLAGS - else - echo "cannot find nm_test_func in $nlist" >&5 - fi -@@ -6573,6 +6899,18 @@ else - $as_echo "ok" >&6; } - fi - -+# Response file support. -+if test "$lt_cv_nm_interface" = "MS dumpbin"; then -+ nm_file_list_spec='@' -+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then -+ nm_file_list_spec='@' -+fi -+ -+ -+ -+ -+ -+ - - - -@@ -6594,6 +6932,43 @@ fi - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 -+$as_echo_n "checking for sysroot... " >&6; } -+ -+# Check whether --with-libtool-sysroot was given. -+if test "${with_libtool_sysroot+set}" = set; then : -+ withval=$with_libtool_sysroot; -+else -+ with_libtool_sysroot=no -+fi -+ -+ -+lt_sysroot= -+case ${with_libtool_sysroot} in #( -+ yes) -+ if test "$GCC" = yes; then -+ lt_sysroot=`$CC --print-sysroot 2>/dev/null` -+ fi -+ ;; #( -+ /*) -+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` -+ ;; #( -+ no|'') -+ ;; #( -+ *) -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5 -+$as_echo "${with_libtool_sysroot}" >&6; } -+ as_fn_error "The sysroot must be an absolute path." "$LINENO" 5 -+ ;; -+esac -+ -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 -+$as_echo "${lt_sysroot:-no}" >&6; } -+ -+ -+ -+ -+ - # Check whether --enable-libtool-lock was given. - if test "${enable_libtool_lock+set}" = set; then : - enableval=$enable_libtool_lock; -@@ -6800,6 +7175,123 @@ esac - - need_locks="$enable_libtool_lock" - -+if test -n "$ac_tool_prefix"; then -+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. -+set dummy ${ac_tool_prefix}mt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_MANIFEST_TOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$MANIFEST_TOOL"; then -+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL -+if test -n "$MANIFEST_TOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 -+$as_echo "$MANIFEST_TOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+fi -+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then -+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL -+ # Extract the first word of "mt", so it can be a program name with args. -+set dummy mt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_ac_ct_MANIFEST_TOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$ac_ct_MANIFEST_TOOL"; then -+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL -+if test -n "$ac_ct_MANIFEST_TOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 -+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ if test "x$ac_ct_MANIFEST_TOOL" = x; then -+ MANIFEST_TOOL=":" -+ else -+ case $cross_compiling:$ac_tool_warned in -+yes:) -+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -+ac_tool_warned=yes ;; -+esac -+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL -+ fi -+else -+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" -+fi -+ -+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 -+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } -+if test "${lt_cv_path_mainfest_tool+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_path_mainfest_tool=no -+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 -+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out -+ cat conftest.err >&5 -+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then -+ lt_cv_path_mainfest_tool=yes -+ fi -+ rm -f conftest* -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 -+$as_echo "$lt_cv_path_mainfest_tool" >&6; } -+if test "x$lt_cv_path_mainfest_tool" != xyes; then -+ MANIFEST_TOOL=: -+fi -+ -+ -+ -+ -+ - - case $host_os in - rhapsody* | darwin*) -@@ -7363,6 +7855,8 @@ _LT_EOF - $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 - echo "$AR cru libconftest.a conftest.o" >&5 - $AR cru libconftest.a conftest.o 2>&5 -+ echo "$RANLIB libconftest.a" >&5 -+ $RANLIB libconftest.a 2>&5 - cat > conftest.c << _LT_EOF - int main() { return 0;} - _LT_EOF -@@ -7558,7 +8052,8 @@ fi - LIBTOOL_DEPS="$ltmain" - - # Always use our own libtool. --LIBTOOL='$(SHELL) $(top_builddir)/libtool' -+LIBTOOL='$(SHELL) $(top_builddir)' -+LIBTOOL="$LIBTOOL/${host_alias}-libtool" - - - -@@ -7647,7 +8142,7 @@ aix3*) - esac - - # Global variables: --ofile=libtool -+ofile=${host_alias}-libtool - can_build_shared=yes - - # All known linkers require a `.a' archive for static linking (except MSVC, -@@ -7945,8 +8440,6 @@ fi - lt_prog_compiler_pic= - lt_prog_compiler_static= - --{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 --$as_echo_n "checking for $compiler option to produce PIC... " >&6; } - - if test "$GCC" = yes; then - lt_prog_compiler_wl='-Wl,' -@@ -8112,6 +8605,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - lt_prog_compiler_pic='--shared' - lt_prog_compiler_static='--static' - ;; -+ nagfor*) -+ # NAG Fortran compiler -+ lt_prog_compiler_wl='-Wl,-Wl,,' -+ lt_prog_compiler_pic='-PIC' -+ lt_prog_compiler_static='-Bstatic' -+ ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) -@@ -8174,7 +8673,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - case $cc_basename in -- f77* | f90* | f95*) -+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) - lt_prog_compiler_wl='-Qoption ld ';; - *) - lt_prog_compiler_wl='-Wl,';; -@@ -8231,13 +8730,17 @@ case $host_os in - lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" - ;; - esac --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 --$as_echo "$lt_prog_compiler_pic" >&6; } -- -- -- -- - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 -+$as_echo_n "checking for $compiler option to produce PIC... " >&6; } -+if test "${lt_cv_prog_compiler_pic+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 -+$as_echo "$lt_cv_prog_compiler_pic" >&6; } -+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic - - # - # Check to make sure the PIC flag actually works. -@@ -8298,6 +8801,11 @@ fi - - - -+ -+ -+ -+ -+ - # - # Check to make sure the static flag actually works. - # -@@ -8648,7 +9156,8 @@ _LT_EOF - allow_undefined_flag=unsupported - always_export_symbols=no - enable_shared_with_static_runtimes=yes -- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' -+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' -+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -@@ -8747,12 +9256,12 @@ _LT_EOF - whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' - hardcode_libdir_flag_spec= - hardcode_libdir_flag_spec_ld='-rpath $libdir' -- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' -+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ -- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' -+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac -@@ -8766,8 +9275,8 @@ _LT_EOF - archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - -@@ -8785,8 +9294,8 @@ _LT_EOF - - _LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -8832,8 +9341,8 @@ _LT_EOF - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -8963,7 +9472,13 @@ _LT_EOF - allow_undefined_flag='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath_+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - - int -@@ -8976,22 +9491,29 @@ main () - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath_ -+fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" -@@ -9003,7 +9525,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - else - # Determine the default libpath from the value encoded in an - # empty executable. -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath_+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - - int -@@ -9016,22 +9544,29 @@ main () - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath_ -+fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, -@@ -9076,20 +9611,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - # Microsoft Visual C++. - # hardcode_libdir_flag_spec is actually meaningless, as there is - # no search path for DLLs. -- hardcode_libdir_flag_spec=' ' -- allow_undefined_flag=unsupported -- # Tell ltmain to make .lib files, not .a files. -- libext=lib -- # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=".dll" -- # FIXME: Setting linknames here is a bad hack. -- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -- # The linker will automatically build a .lib file if we build a DLL. -- old_archive_from_new_cmds='true' -- # FIXME: Should let the user specify the lib program. -- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' -- fix_srcfile_path='`cygpath -w "$srcfile"`' -- enable_shared_with_static_runtimes=yes -+ case $cc_basename in -+ cl*) -+ # Native MSVC -+ hardcode_libdir_flag_spec=' ' -+ allow_undefined_flag=unsupported -+ always_export_symbols=yes -+ file_list_spec='@' -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' -+ # The linker will not automatically build a static lib if we build a DLL. -+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true' -+ enable_shared_with_static_runtimes=yes -+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' -+ # Don't use ranlib -+ old_postinstall_cmds='chmod 644 $oldlib' -+ postlink_cmds='lt_outputfile="@OUTPUT@"~ -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' -+ ;; -+ *) -+ # Assume MSVC wrapper -+ hardcode_libdir_flag_spec=' ' -+ allow_undefined_flag=unsupported -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -+ # The linker will automatically build a .lib file if we build a DLL. -+ old_archive_from_new_cmds='true' -+ # FIXME: Should let the user specify the lib program. -+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' -+ enable_shared_with_static_runtimes=yes -+ ;; -+ esac - ;; - - darwin* | rhapsody*) -@@ -9150,7 +9728,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - # FreeBSD 3 and greater uses gcc -shared to do shared libraries. - freebsd* | dragonfly*) -- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec='-R$libdir' - hardcode_direct=yes - hardcode_shlibpath_var=no -@@ -9158,7 +9736,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - hpux9*) - if test "$GCC" = yes; then -- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' -+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi -@@ -9174,7 +9752,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - hpux10*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -9198,10 +9776,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else -@@ -9280,23 +9858,36 @@ fi - - irix5* | irix6* | nonstopux*) - if test "$GCC" = yes; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. -- save_LDFLAGS="$LDFLAGS" -- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ # This should be the same for all languages, so no per-tag cache variable. -+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 -+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } -+if test "${lt_cv_irix_exported_symbol+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ save_LDFLAGS="$LDFLAGS" -+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ --int foo(void) {} -+int foo (void) { return 0; } - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -- -+ lt_cv_irix_exported_symbol=yes -+else -+ lt_cv_irix_exported_symbol=no - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -- LDFLAGS="$save_LDFLAGS" -+ LDFLAGS="$save_LDFLAGS" -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 -+$as_echo "$lt_cv_irix_exported_symbol" >&6; } -+ if test "$lt_cv_irix_exported_symbol" = yes; then -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -+ fi - else - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' -@@ -9381,7 +9972,7 @@ rm -f core conftest.err conftest.$ac_objext \ - osf4* | osf5*) # as osf3* with the addition of -msym flag - if test "$GCC" = yes; then - allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' -- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag=' -expect_unresolved \*' -@@ -9400,9 +9991,9 @@ rm -f core conftest.err conftest.$ac_objext \ - no_undefined_flag=' -z defs' - if test "$GCC" = yes; then - wlarc='${wl}' -- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) -@@ -9978,8 +10569,9 @@ cygwin* | mingw* | pw32* | cegcc*) - need_version=no - need_lib_prefix=no - -- case $GCC,$host_os in -- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) -+ case $GCC,$cc_basename in -+ yes,*) -+ # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ -@@ -10012,13 +10604,71 @@ cygwin* | mingw* | pw32* | cegcc*) - library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac -+ dynamic_linker='Win32 ld.exe' -+ ;; -+ -+ *,cl*) -+ # Native MSVC -+ libname_spec='$name' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' -+ -+ case $build_os in -+ mingw*) -+ sys_lib_search_path_spec= -+ lt_save_ifs=$IFS -+ IFS=';' -+ for lt_path in $LIB -+ do -+ IFS=$lt_save_ifs -+ # Let DOS variable expansion print the short 8.3 style file name. -+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` -+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" -+ done -+ IFS=$lt_save_ifs -+ # Convert to MSYS style. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` -+ ;; -+ cygwin*) -+ # Convert to unix form, then to dos form, then back to unix form -+ # but this time dos style (no spaces!) so that the unix form looks -+ # like /cygdrive/c/PROGRA~1:/cygdr... -+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` -+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` -+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ ;; -+ *) -+ sys_lib_search_path_spec="$LIB" -+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then -+ # It is most probably a Windows format PATH. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -+ else -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ fi -+ # FIXME: find the short name or the path components, as spaces are -+ # common. (e.g. "Program Files" -> "PROGRA~1") -+ ;; -+ esac -+ -+ # DLL is installed to $(libdir)/../bin by postinstall_cmds -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ -+ dldir=$destdir/`dirname \$dlpath`~ -+ test -d \$dldir || mkdir -p \$dldir~ -+ $install_prog $dir/$dlname \$dldir/$dlname' -+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ -+ dlpath=$dir/\$dldll~ -+ $RM \$dlpath' -+ shlibpath_overrides_runpath=yes -+ dynamic_linker='Win32 link.exe' - ;; - - *) -+ # Assume MSVC wrapper - library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' -+ dynamic_linker='Win32 ld.exe' - ;; - esac -- dynamic_linker='Win32 ld.exe' - # FIXME: first we should search . and the directory the executable is in - shlibpath_var=PATH - ;; -@@ -10110,7 +10760,7 @@ haiku*) - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH - shlibpath_overrides_runpath=yes -- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' -+ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; - -@@ -10950,10 +11600,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -11056,10 +11706,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -12961,13 +13611,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' - lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' - lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' - lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' -+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' -+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' - reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' - reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' - OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' - deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' - file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' -+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' -+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' -+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' -+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' - AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' - AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' -+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' - STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' - RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' - old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' -@@ -12982,14 +13639,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de - lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' -+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' -+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' - objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' - MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' --lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' -+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' - lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' - need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' -+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' - DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' - NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' - LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' -@@ -13022,12 +13682,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q - hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' - inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' - link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' --fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' - always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' - export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' - exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' - include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' - prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' -+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' - file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' - variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' - need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' -@@ -13082,8 +13742,13 @@ reload_flag \ - OBJDUMP \ - deplibs_check_method \ - file_magic_cmd \ -+file_magic_glob \ -+want_nocaseglob \ -+DLLTOOL \ -+sharedlib_from_linklib_cmd \ - AR \ - AR_FLAGS \ -+archiver_list_spec \ - STRIP \ - RANLIB \ - CC \ -@@ -13093,12 +13758,14 @@ lt_cv_sys_global_symbol_pipe \ - lt_cv_sys_global_symbol_to_cdecl \ - lt_cv_sys_global_symbol_to_c_name_address \ - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ -+nm_file_list_spec \ - lt_prog_compiler_no_builtin_flag \ --lt_prog_compiler_wl \ - lt_prog_compiler_pic \ -+lt_prog_compiler_wl \ - lt_prog_compiler_static \ - lt_cv_prog_compiler_c_o \ - need_locks \ -+MANIFEST_TOOL \ - DSYMUTIL \ - NMEDIT \ - LIPO \ -@@ -13114,7 +13781,6 @@ no_undefined_flag \ - hardcode_libdir_flag_spec \ - hardcode_libdir_flag_spec_ld \ - hardcode_libdir_separator \ --fix_srcfile_path \ - exclude_expsyms \ - include_expsyms \ - file_list_spec \ -@@ -13150,6 +13816,7 @@ module_cmds \ - module_expsym_cmds \ - export_symbols_cmds \ - prelink_cmds \ -+postlink_cmds \ - postinstall_cmds \ - postuninstall_cmds \ - finish_cmds \ -@@ -13906,7 +14573,8 @@ $as_echo X"$file" | - # NOTE: Changes made to this file will be lost: look at ltmain.sh. - # - # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, --# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. -+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, -+# Inc. - # Written by Gordon Matzigkeit, 1996 - # - # This file is part of GNU Libtool. -@@ -14009,19 +14677,42 @@ SP2NL=$lt_lt_SP2NL - # turn newlines into spaces. - NL2SP=$lt_lt_NL2SP - -+# convert \$build file names to \$host format. -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+ -+# convert \$build files to toolchain format. -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+ - # An object symbol dumper. - OBJDUMP=$lt_OBJDUMP - - # Method to check whether dependent libraries are shared objects. - deplibs_check_method=$lt_deplibs_check_method - --# Command to use when deplibs_check_method == "file_magic". -+# Command to use when deplibs_check_method = "file_magic". - file_magic_cmd=$lt_file_magic_cmd - -+# How to find potential files when deplibs_check_method = "file_magic". -+file_magic_glob=$lt_file_magic_glob -+ -+# Find potential files using nocaseglob when deplibs_check_method = "file_magic". -+want_nocaseglob=$lt_want_nocaseglob -+ -+# DLL creation program. -+DLLTOOL=$lt_DLLTOOL -+ -+# Command to associate shared and link libraries. -+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd -+ - # The archiver. - AR=$lt_AR -+ -+# Flags to create an archive. - AR_FLAGS=$lt_AR_FLAGS - -+# How to feed a file listing to the archiver. -+archiver_list_spec=$lt_archiver_list_spec -+ - # A symbol stripping program. - STRIP=$lt_STRIP - -@@ -14051,6 +14742,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address - # Transform the output of nm in a C name address pair when lib prefix is needed. - global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix - -+# Specify filename containing input files for \$NM. -+nm_file_list_spec=$lt_nm_file_list_spec -+ -+# The root where to search for dependent libraries,and in which our libraries should be installed. -+lt_sysroot=$lt_sysroot -+ - # The name of the directory that contains temporary libtool files. - objdir=$objdir - -@@ -14060,6 +14757,9 @@ MAGIC_CMD=$MAGIC_CMD - # Must we lock files when doing compilation? - need_locks=$lt_need_locks - -+# Manifest tool. -+MANIFEST_TOOL=$lt_MANIFEST_TOOL -+ - # Tool to manipulate archived DWARF debug symbol files on Mac OS X. - DSYMUTIL=$lt_DSYMUTIL - -@@ -14174,12 +14874,12 @@ with_gcc=$GCC - # Compiler flag to turn off builtin functions. - no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag - --# How to pass a linker flag through the compiler. --wl=$lt_lt_prog_compiler_wl -- - # Additional compiler flags for building library objects. - pic_flag=$lt_lt_prog_compiler_pic - -+# How to pass a linker flag through the compiler. -+wl=$lt_lt_prog_compiler_wl -+ - # Compiler flag to prevent dynamic linking. - link_static_flag=$lt_lt_prog_compiler_static - -@@ -14266,9 +14966,6 @@ inherit_rpath=$inherit_rpath - # Whether libtool must link a program against all its dependency libraries. - link_all_deplibs=$link_all_deplibs - --# Fix the shell variable \$srcfile for the compiler. --fix_srcfile_path=$lt_fix_srcfile_path -- - # Set to "yes" if exported symbols are required. - always_export_symbols=$always_export_symbols - -@@ -14284,6 +14981,9 @@ include_expsyms=$lt_include_expsyms - # Commands necessary for linking programs (against libraries) with templates. - prelink_cmds=$lt_prelink_cmds - -+# Commands necessary for finishing linking programs. -+postlink_cmds=$lt_postlink_cmds -+ - # Specify filename containing input files. - file_list_spec=$lt_file_list_spec - -@@ -14316,210 +15016,169 @@ ltmain="$ac_aux_dir/ltmain.sh" - # if finds mixed CR/LF and LF-only lines. Since sed operates in - # text mode, it properly converts lines to CR/LF. This bash problem - # is reportedly fixed, but why not run on old versions too? -- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- case $xsi_shell in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac --} -- --# func_basename file --func_basename () --{ -- func_basename_result="${1##*/}" --} -- --# func_dirname_and_basename file append nondir_replacement --# perform func_basename and func_dirname in a single function --# call: --# dirname: Compute the dirname of FILE. If nonempty, --# add APPEND to the result, otherwise set result --# to NONDIR_REPLACEMENT. --# value returned in "$func_dirname_result" --# basename: Compute filename of FILE. --# value retuned in "$func_basename_result" --# Implementation must be kept synchronized with func_dirname --# and func_basename. For efficiency, we do not delegate to --# those functions but instead duplicate the functionality here. --func_dirname_and_basename () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac -- func_basename_result="${1##*/}" --} -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --func_stripname () --{ -- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are -- # positional parameters, so assign one to ordinary parameter first. -- func_stripname_result=${3} -- func_stripname_result=${func_stripname_result#"${1}"} -- func_stripname_result=${func_stripname_result%"${2}"} --} -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=${1%%=*} -- func_opt_split_arg=${1#*=} --} -- --# func_lo2o object --func_lo2o () --{ -- case ${1} in -- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; -- *) func_lo2o_result=${1} ;; -- esac --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=${1%.*}.lo --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=$(( $* )) --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=${#1} --} -- --_LT_EOF -- ;; -- *) # Bourne compatible functions. -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- # Extract subdirectory from the argument. -- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` -- if test "X$func_dirname_result" = "X${1}"; then -- func_dirname_result="${3}" -- else -- func_dirname_result="$func_dirname_result${2}" -- fi --} -- --# func_basename file --func_basename () --{ -- func_basename_result=`$ECHO "${1}" | $SED "$basename"` --} -- -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --# func_strip_suffix prefix name --func_stripname () --{ -- case ${2} in -- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; -- esac --} -- --# sed scripts: --my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' --my_sed_long_arg='1s/^-[^=]*=//' -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` -- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` --} -- --# func_lo2o object --func_lo2o () --{ -- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=`expr "$@"` --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` --} -- --_LT_EOF --esac -- --case $lt_shell_append in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$1+=\$2" --} --_LT_EOF -- ;; -- *) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$1=\$$1\$2" --} -- --_LT_EOF -- ;; -- esac -- -- -- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- mv -f "$cfgfile" "$ofile" || -+ sed '$q' "$ltmain" >> "$cfgfile" \ -+ || (rm -f "$cfgfile"; exit 1) -+ -+ if test x"$xsi_shell" = xyes; then -+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ -+func_dirname ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_basename ()$/,/^} # func_basename /c\ -+func_basename ()\ -+{\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ -+func_dirname_and_basename ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ -+func_stripname ()\ -+{\ -+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ -+\ # positional parameters, so assign one to ordinary parameter first.\ -+\ func_stripname_result=${3}\ -+\ func_stripname_result=${func_stripname_result#"${1}"}\ -+\ func_stripname_result=${func_stripname_result%"${2}"}\ -+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ -+func_split_long_opt ()\ -+{\ -+\ func_split_long_opt_name=${1%%=*}\ -+\ func_split_long_opt_arg=${1#*=}\ -+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ -+func_split_short_opt ()\ -+{\ -+\ func_split_short_opt_arg=${1#??}\ -+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ -+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ -+func_lo2o ()\ -+{\ -+\ case ${1} in\ -+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ -+\ *) func_lo2o_result=${1} ;;\ -+\ esac\ -+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_xform ()$/,/^} # func_xform /c\ -+func_xform ()\ -+{\ -+ func_xform_result=${1%.*}.lo\ -+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_arith ()$/,/^} # func_arith /c\ -+func_arith ()\ -+{\ -+ func_arith_result=$(( $* ))\ -+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_len ()$/,/^} # func_len /c\ -+func_len ()\ -+{\ -+ func_len_result=${#1}\ -+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+fi -+ -+if test x"$lt_shell_append" = xyes; then -+ sed -e '/^func_append ()$/,/^} # func_append /c\ -+func_append ()\ -+{\ -+ eval "${1}+=\\${2}"\ -+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ -+func_append_quoted ()\ -+{\ -+\ func_quote_for_eval "${2}"\ -+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ -+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ # Save a `func_append' function call where possible by direct use of '+=' -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+else -+ # Save a `func_append' function call even when '+=' is not available -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+fi -+ -+if test x"$_lt_function_replace_fail" = x":"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 -+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} -+fi -+ -+ -+ mv -f "$cfgfile" "$ofile" || - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" - -diff --git a/ld/configure b/ld/configure -index f9be51a..3df1149 100755 ---- a/ld/configure -+++ b/ld/configure -@@ -658,8 +658,11 @@ OTOOL - LIPO - NMEDIT - DSYMUTIL -+MANIFEST_TOOL - RANLIB -+ac_ct_AR - AR -+DLLTOOL - OBJDUMP - LN_S - NM -@@ -781,6 +784,7 @@ enable_static - with_pic - enable_fast_install - with_gnu_ld -+with_libtool_sysroot - enable_libtool_lock - enable_plugins - enable_largefile -@@ -1462,6 +1466,8 @@ Optional Packages: - --with-pic try to use only PIC/non-PIC objects [default=use - both] - --with-gnu-ld assume the C compiler uses GNU ld [default=no] -+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR -+ (or the compiler's sysroot if not specified). - --with-lib-path=dir1:dir2... set default LIB_PATH - --with-sysroot=DIR Search for usr/lib et al within DIR. - -@@ -5656,8 +5662,8 @@ esac - - - --macro_version='2.2.7a' --macro_revision='1.3134' -+macro_version='2.4' -+macro_revision='1.3293' - - - -@@ -5697,7 +5703,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO - { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 - $as_echo_n "checking how to print strings... " >&6; } - # Test print first, because it will be a builtin if present. --if test "X`print -r -- -n 2>/dev/null`" = X-n && \ -+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ - test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then - ECHO='print -r --' - elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then -@@ -6383,8 +6389,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; - # Try some XSI features - xsi_shell=no - ( _lt_dummy="a/b/c" -- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ -- = c,a/b,, \ -+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ -+ = c,a/b,b/c, \ - && eval 'test $(( 1 + 1 )) -eq 2 \ - && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ - && xsi_shell=yes -@@ -6433,6 +6439,80 @@ esac - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 -+$as_echo_n "checking how to convert $build file names to $host format... " >&6; } -+if test "${lt_cv_to_host_file_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 -+ ;; -+ esac -+ ;; -+ *-*-cygwin* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin -+ ;; -+ esac -+ ;; -+ * ) # unhandled hosts (and "normal" native builds) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+esac -+ -+fi -+ -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 -+$as_echo "$lt_cv_to_host_file_cmd" >&6; } -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 -+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } -+if test "${lt_cv_to_tool_file_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ #assume ordinary cross tools, or native build. -+lt_cv_to_tool_file_cmd=func_convert_file_noop -+case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ esac -+ ;; -+esac -+ -+fi -+ -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 -+$as_echo "$lt_cv_to_tool_file_cmd" >&6; } -+ -+ -+ -+ -+ - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 - $as_echo_n "checking for $LD option to reload object files... " >&6; } - if test "${lt_cv_ld_reload_flag+set}" = set; then : -@@ -6449,6 +6529,11 @@ case $reload_flag in - esac - reload_cmds='$LD$reload_flag -o $output$reload_objs' - case $host_os in -+ cygwin* | mingw* | pw32* | cegcc*) -+ if test "$GCC" != yes; then -+ reload_cmds=false -+ fi -+ ;; - darwin*) - if test "$GCC" = yes; then - reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' -@@ -6617,7 +6702,8 @@ mingw* | pw32*) - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else -- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' -+ # Keep this pattern in sync with the one in func_win32_libid. -+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' - lt_cv_file_magic_cmd='$OBJDUMP -f' - fi - ;; -@@ -6771,6 +6857,21 @@ esac - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 - $as_echo "$lt_cv_deplibs_check_method" >&6; } -+ -+file_magic_glob= -+want_nocaseglob=no -+if test "$build" = "$host"; then -+ case $host_os in -+ mingw* | pw32*) -+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then -+ want_nocaseglob=yes -+ else -+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` -+ fi -+ ;; -+ esac -+fi -+ - file_magic_cmd=$lt_cv_file_magic_cmd - deplibs_check_method=$lt_cv_deplibs_check_method - test -z "$deplibs_check_method" && deplibs_check_method=unknown -@@ -6786,9 +6887,162 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown - - - -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+if test -n "$ac_tool_prefix"; then -+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. -+set dummy ${ac_tool_prefix}dlltool; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_DLLTOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$DLLTOOL"; then -+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+DLLTOOL=$ac_cv_prog_DLLTOOL -+if test -n "$DLLTOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 -+$as_echo "$DLLTOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+fi -+if test -z "$ac_cv_prog_DLLTOOL"; then -+ ac_ct_DLLTOOL=$DLLTOOL -+ # Extract the first word of "dlltool", so it can be a program name with args. -+set dummy dlltool; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$ac_ct_DLLTOOL"; then -+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_ac_ct_DLLTOOL="dlltool" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL -+if test -n "$ac_ct_DLLTOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 -+$as_echo "$ac_ct_DLLTOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ if test "x$ac_ct_DLLTOOL" = x; then -+ DLLTOOL="false" -+ else -+ case $cross_compiling:$ac_tool_warned in -+yes:) -+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -+ac_tool_warned=yes ;; -+esac -+ DLLTOOL=$ac_ct_DLLTOOL -+ fi -+else -+ DLLTOOL="$ac_cv_prog_DLLTOOL" -+fi -+ -+test -z "$DLLTOOL" && DLLTOOL=dlltool -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 -+$as_echo_n "checking how to associate runtime and link libraries... " >&6; } -+if test "${lt_cv_sharedlib_from_linklib_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_sharedlib_from_linklib_cmd='unknown' -+ -+case $host_os in -+cygwin* | mingw* | pw32* | cegcc*) -+ # two different shell functions defined in ltmain.sh -+ # decide which to use based on capabilities of $DLLTOOL -+ case `$DLLTOOL --help 2>&1` in -+ *--identify-strict*) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib -+ ;; -+ *) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback -+ ;; -+ esac -+ ;; -+*) -+ # fallback: assume linklib IS sharedlib -+ lt_cv_sharedlib_from_linklib_cmd="$ECHO" -+ ;; -+esac -+ -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 -+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } -+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd -+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO -+ -+ -+ -+ -+ -+ -+ - if test -n "$ac_tool_prefix"; then -- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. --set dummy ${ac_tool_prefix}ar; ac_word=$2 -+ for ac_prog in ar -+ do -+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -+set dummy $ac_tool_prefix$ac_prog; ac_word=$2 - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 - $as_echo_n "checking for $ac_word... " >&6; } - if test "${ac_cv_prog_AR+set}" = set; then : -@@ -6804,7 +7058,7 @@ do - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -- ac_cv_prog_AR="${ac_tool_prefix}ar" -+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -@@ -6824,11 +7078,15 @@ $as_echo "no" >&6; } - fi - - -+ test -n "$AR" && break -+ done - fi --if test -z "$ac_cv_prog_AR"; then -+if test -z "$AR"; then - ac_ct_AR=$AR -- # Extract the first word of "ar", so it can be a program name with args. --set dummy ar; ac_word=$2 -+ for ac_prog in ar -+do -+ # Extract the first word of "$ac_prog", so it can be a program name with args. -+set dummy $ac_prog; ac_word=$2 - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 - $as_echo_n "checking for $ac_word... " >&6; } - if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : -@@ -6844,7 +7102,7 @@ do - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -- ac_cv_prog_ac_ct_AR="ar" -+ ac_cv_prog_ac_ct_AR="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -@@ -6863,6 +7121,10 @@ else - $as_echo "no" >&6; } - fi - -+ -+ test -n "$ac_ct_AR" && break -+done -+ - if test "x$ac_ct_AR" = x; then - AR="false" - else -@@ -6874,12 +7136,12 @@ ac_tool_warned=yes ;; - esac - AR=$ac_ct_AR - fi --else -- AR="$ac_cv_prog_AR" - fi - --test -z "$AR" && AR=ar --test -z "$AR_FLAGS" && AR_FLAGS=cru -+: ${AR=ar} -+: ${AR_FLAGS=cru} -+ -+ - - - -@@ -6889,6 +7151,62 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 -+$as_echo_n "checking for archiver @FILE support... " >&6; } -+if test "${lt_cv_ar_at_file+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_ar_at_file=no -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+/* end confdefs.h. */ -+ -+int -+main () -+{ -+ -+ ; -+ return 0; -+} -+_ACEOF -+if ac_fn_c_try_compile "$LINENO"; then : -+ echo conftest.$ac_objext > conftest.lst -+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' -+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -+ (eval $lt_ar_try) 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } -+ if test "$ac_status" -eq 0; then -+ # Ensure the archiver fails upon bogus file names. -+ rm -f conftest.$ac_objext libconftest.a -+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -+ (eval $lt_ar_try) 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } -+ if test "$ac_status" -ne 0; then -+ lt_cv_ar_at_file=@ -+ fi -+ fi -+ rm -f conftest.* libconftest.a -+ -+fi -+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -+ -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 -+$as_echo "$lt_cv_ar_at_file" >&6; } -+ -+if test "x$lt_cv_ar_at_file" = xno; then -+ archiver_list_spec= -+else -+ archiver_list_spec=$lt_cv_ar_at_file -+fi -+ -+ -+ -+ -+ - - - if test -n "$ac_tool_prefix"; then -@@ -7225,8 +7543,8 @@ esac - lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - - # Transform an extracted symbol line into symbol name and symbol address --lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" --lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" - - # Handle CRLF in mingw tool chain - opt_cr= -@@ -7262,6 +7580,7 @@ for ac_symprfx in "" "_"; do - else - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" - fi -+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" - - # Check to see that the pipe works correctly. - pipe_works=no -@@ -7303,6 +7622,18 @@ _LT_EOF - if $GREP ' nm_test_var$' "$nlist" >/dev/null; then - if $GREP ' nm_test_func$' "$nlist" >/dev/null; then - cat <<_LT_EOF > conftest.$ac_ext -+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime -+ relocations are performed -- see ld's documentation on pseudo-relocs. */ -+# define LT_DLSYM_CONST -+#elif defined(__osf__) -+/* This system does not cope well with relocations in const data. */ -+# define LT_DLSYM_CONST -+#else -+# define LT_DLSYM_CONST const -+#endif -+ - #ifdef __cplusplus - extern "C" { - #endif -@@ -7314,7 +7645,7 @@ _LT_EOF - cat <<_LT_EOF >> conftest.$ac_ext - - /* The mapping between symbol names and symbols. */ --const struct { -+LT_DLSYM_CONST struct { - const char *name; - void *address; - } -@@ -7340,8 +7671,8 @@ static const void *lt_preloaded_setup() { - _LT_EOF - # Now try linking the two files. - mv conftest.$ac_objext conftstm.$ac_objext -- lt_save_LIBS="$LIBS" -- lt_save_CFLAGS="$CFLAGS" -+ lt_globsym_save_LIBS=$LIBS -+ lt_globsym_save_CFLAGS=$CFLAGS - LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 -@@ -7351,8 +7682,8 @@ _LT_EOF - test $ac_status = 0; } && test -s conftest${ac_exeext}; then - pipe_works=yes - fi -- LIBS="$lt_save_LIBS" -- CFLAGS="$lt_save_CFLAGS" -+ LIBS=$lt_globsym_save_LIBS -+ CFLAGS=$lt_globsym_save_CFLAGS - else - echo "cannot find nm_test_func in $nlist" >&5 - fi -@@ -7389,6 +7720,19 @@ else - $as_echo "ok" >&6; } - fi - -+# Response file support. -+if test "$lt_cv_nm_interface" = "MS dumpbin"; then -+ nm_file_list_spec='@' -+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then -+ nm_file_list_spec='@' -+fi -+ -+ -+ -+ -+ -+ -+ - - - -@@ -7409,6 +7753,42 @@ fi - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 -+$as_echo_n "checking for sysroot... " >&6; } -+ -+# Check whether --with-libtool-sysroot was given. -+if test "${with_libtool_sysroot+set}" = set; then : -+ withval=$with_libtool_sysroot; -+else -+ with_libtool_sysroot=no -+fi -+ -+ -+lt_sysroot= -+case ${with_libtool_sysroot} in #( -+ yes) -+ if test "$GCC" = yes; then -+ lt_sysroot=`$CC --print-sysroot 2>/dev/null` -+ fi -+ ;; #( -+ /*) -+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` -+ ;; #( -+ no|'') -+ ;; #( -+ *) -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5 -+$as_echo "${with_libtool_sysroot}" >&6; } -+ as_fn_error "The sysroot must be an absolute path." "$LINENO" 5 -+ ;; -+esac -+ -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 -+$as_echo "${lt_sysroot:-no}" >&6; } -+ -+ -+ -+ - - # Check whether --enable-libtool-lock was given. - if test "${enable_libtool_lock+set}" = set; then : -@@ -7616,6 +7996,123 @@ esac - - need_locks="$enable_libtool_lock" - -+if test -n "$ac_tool_prefix"; then -+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. -+set dummy ${ac_tool_prefix}mt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_MANIFEST_TOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$MANIFEST_TOOL"; then -+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL -+if test -n "$MANIFEST_TOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 -+$as_echo "$MANIFEST_TOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+fi -+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then -+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL -+ # Extract the first word of "mt", so it can be a program name with args. -+set dummy mt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_ac_ct_MANIFEST_TOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$ac_ct_MANIFEST_TOOL"; then -+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL -+if test -n "$ac_ct_MANIFEST_TOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 -+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ if test "x$ac_ct_MANIFEST_TOOL" = x; then -+ MANIFEST_TOOL=":" -+ else -+ case $cross_compiling:$ac_tool_warned in -+yes:) -+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -+ac_tool_warned=yes ;; -+esac -+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL -+ fi -+else -+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" -+fi -+ -+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 -+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } -+if test "${lt_cv_path_mainfest_tool+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_path_mainfest_tool=no -+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 -+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out -+ cat conftest.err >&5 -+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then -+ lt_cv_path_mainfest_tool=yes -+ fi -+ rm -f conftest* -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 -+$as_echo "$lt_cv_path_mainfest_tool" >&6; } -+if test "x$lt_cv_path_mainfest_tool" != xyes; then -+ MANIFEST_TOOL=: -+fi -+ -+ -+ -+ -+ - - case $host_os in - rhapsody* | darwin*) -@@ -8179,6 +8676,8 @@ _LT_EOF - $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 - echo "$AR cru libconftest.a conftest.o" >&5 - $AR cru libconftest.a conftest.o 2>&5 -+ echo "$RANLIB libconftest.a" >&5 -+ $RANLIB libconftest.a 2>&5 - cat > conftest.c << _LT_EOF - int main() { return 0;} - _LT_EOF -@@ -8247,6 +8746,16 @@ done - - - -+func_stripname_cnf () -+{ -+ case ${2} in -+ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -+ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; -+ esac -+} # func_stripname_cnf -+ -+ -+ - - - # Set options -@@ -8375,7 +8884,8 @@ fi - LIBTOOL_DEPS="$ltmain" - - # Always use our own libtool. --LIBTOOL='$(SHELL) $(top_builddir)/libtool' -+LIBTOOL='$(SHELL) $(top_builddir)' -+LIBTOOL="$LIBTOOL/${host_alias}-libtool" - - - -@@ -8464,7 +8974,7 @@ aix3*) - esac - - # Global variables: --ofile=libtool -+ofile=${host_alias}-libtool - can_build_shared=yes - - # All known linkers require a `.a' archive for static linking (except MSVC, -@@ -8762,8 +9272,6 @@ fi - lt_prog_compiler_pic= - lt_prog_compiler_static= - --{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 --$as_echo_n "checking for $compiler option to produce PIC... " >&6; } - - if test "$GCC" = yes; then - lt_prog_compiler_wl='-Wl,' -@@ -8929,6 +9437,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - lt_prog_compiler_pic='--shared' - lt_prog_compiler_static='--static' - ;; -+ nagfor*) -+ # NAG Fortran compiler -+ lt_prog_compiler_wl='-Wl,-Wl,,' -+ lt_prog_compiler_pic='-PIC' -+ lt_prog_compiler_static='-Bstatic' -+ ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) -@@ -8991,7 +9505,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - case $cc_basename in -- f77* | f90* | f95*) -+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) - lt_prog_compiler_wl='-Qoption ld ';; - *) - lt_prog_compiler_wl='-Wl,';; -@@ -9048,13 +9562,17 @@ case $host_os in - lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" - ;; - esac --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 --$as_echo "$lt_prog_compiler_pic" >&6; } -- -- -- -- - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 -+$as_echo_n "checking for $compiler option to produce PIC... " >&6; } -+if test "${lt_cv_prog_compiler_pic+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 -+$as_echo "$lt_cv_prog_compiler_pic" >&6; } -+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic - - # - # Check to make sure the PIC flag actually works. -@@ -9115,6 +9633,11 @@ fi - - - -+ -+ -+ -+ -+ - # - # Check to make sure the static flag actually works. - # -@@ -9465,7 +9988,8 @@ _LT_EOF - allow_undefined_flag=unsupported - always_export_symbols=no - enable_shared_with_static_runtimes=yes -- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' -+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' -+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -@@ -9564,12 +10088,12 @@ _LT_EOF - whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' - hardcode_libdir_flag_spec= - hardcode_libdir_flag_spec_ld='-rpath $libdir' -- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' -+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ -- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' -+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac -@@ -9583,8 +10107,8 @@ _LT_EOF - archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - -@@ -9602,8 +10126,8 @@ _LT_EOF - - _LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -9649,8 +10173,8 @@ _LT_EOF - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -9780,7 +10304,13 @@ _LT_EOF - allow_undefined_flag='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath_+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - - int -@@ -9793,22 +10323,29 @@ main () - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath_ -+fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" -@@ -9820,7 +10357,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - else - # Determine the default libpath from the value encoded in an - # empty executable. -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath_+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - - int -@@ -9833,22 +10376,29 @@ main () - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath_ -+fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, -@@ -9892,21 +10442,64 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. - # hardcode_libdir_flag_spec is actually meaningless, as there is -- # no search path for DLLs. -- hardcode_libdir_flag_spec=' ' -- allow_undefined_flag=unsupported -- # Tell ltmain to make .lib files, not .a files. -- libext=lib -- # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=".dll" -- # FIXME: Setting linknames here is a bad hack. -- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -- # The linker will automatically build a .lib file if we build a DLL. -- old_archive_from_new_cmds='true' -- # FIXME: Should let the user specify the lib program. -- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' -- fix_srcfile_path='`cygpath -w "$srcfile"`' -- enable_shared_with_static_runtimes=yes -+ # no search path for DLLs. -+ case $cc_basename in -+ cl*) -+ # Native MSVC -+ hardcode_libdir_flag_spec=' ' -+ allow_undefined_flag=unsupported -+ always_export_symbols=yes -+ file_list_spec='@' -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' -+ # The linker will not automatically build a static lib if we build a DLL. -+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true' -+ enable_shared_with_static_runtimes=yes -+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' -+ # Don't use ranlib -+ old_postinstall_cmds='chmod 644 $oldlib' -+ postlink_cmds='lt_outputfile="@OUTPUT@"~ -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' -+ ;; -+ *) -+ # Assume MSVC wrapper -+ hardcode_libdir_flag_spec=' ' -+ allow_undefined_flag=unsupported -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -+ # The linker will automatically build a .lib file if we build a DLL. -+ old_archive_from_new_cmds='true' -+ # FIXME: Should let the user specify the lib program. -+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' -+ enable_shared_with_static_runtimes=yes -+ ;; -+ esac - ;; - - darwin* | rhapsody*) -@@ -9967,7 +10560,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - # FreeBSD 3 and greater uses gcc -shared to do shared libraries. - freebsd* | dragonfly*) -- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec='-R$libdir' - hardcode_direct=yes - hardcode_shlibpath_var=no -@@ -9975,7 +10568,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - hpux9*) - if test "$GCC" = yes; then -- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' -+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi -@@ -9991,7 +10584,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - hpux10*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -10015,10 +10608,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else -@@ -10097,23 +10690,36 @@ fi - - irix5* | irix6* | nonstopux*) - if test "$GCC" = yes; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. -- save_LDFLAGS="$LDFLAGS" -- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ # This should be the same for all languages, so no per-tag cache variable. -+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 -+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } -+if test "${lt_cv_irix_exported_symbol+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ save_LDFLAGS="$LDFLAGS" -+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ --int foo(void) {} -+int foo (void) { return 0; } - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -- -+ lt_cv_irix_exported_symbol=yes -+else -+ lt_cv_irix_exported_symbol=no - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -- LDFLAGS="$save_LDFLAGS" -+ LDFLAGS="$save_LDFLAGS" -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 -+$as_echo "$lt_cv_irix_exported_symbol" >&6; } -+ if test "$lt_cv_irix_exported_symbol" = yes; then -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -+ fi - else - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' -@@ -10198,7 +10804,7 @@ rm -f core conftest.err conftest.$ac_objext \ - osf4* | osf5*) # as osf3* with the addition of -msym flag - if test "$GCC" = yes; then - allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' -- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag=' -expect_unresolved \*' -@@ -10217,9 +10823,9 @@ rm -f core conftest.err conftest.$ac_objext \ - no_undefined_flag=' -z defs' - if test "$GCC" = yes; then - wlarc='${wl}' -- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) -@@ -10795,8 +11401,9 @@ cygwin* | mingw* | pw32* | cegcc*) - need_version=no - need_lib_prefix=no - -- case $GCC,$host_os in -- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) -+ case $GCC,$cc_basename in -+ yes,*) -+ # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ -@@ -10829,13 +11436,71 @@ cygwin* | mingw* | pw32* | cegcc*) - library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac -+ dynamic_linker='Win32 ld.exe' -+ ;; -+ -+ *,cl*) -+ # Native MSVC -+ libname_spec='$name' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' -+ -+ case $build_os in -+ mingw*) -+ sys_lib_search_path_spec= -+ lt_save_ifs=$IFS -+ IFS=';' -+ for lt_path in $LIB -+ do -+ IFS=$lt_save_ifs -+ # Let DOS variable expansion print the short 8.3 style file name. -+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` -+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" -+ done -+ IFS=$lt_save_ifs -+ # Convert to MSYS style. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` -+ ;; -+ cygwin*) -+ # Convert to unix form, then to dos form, then back to unix form -+ # but this time dos style (no spaces!) so that the unix form looks -+ # like /cygdrive/c/PROGRA~1:/cygdr... -+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` -+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` -+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ ;; -+ *) -+ sys_lib_search_path_spec="$LIB" -+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then -+ # It is most probably a Windows format PATH. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -+ else -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ fi -+ # FIXME: find the short name or the path components, as spaces are -+ # common. (e.g. "Program Files" -> "PROGRA~1") -+ ;; -+ esac -+ -+ # DLL is installed to $(libdir)/../bin by postinstall_cmds -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ -+ dldir=$destdir/`dirname \$dlpath`~ -+ test -d \$dldir || mkdir -p \$dldir~ -+ $install_prog $dir/$dlname \$dldir/$dlname' -+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ -+ dlpath=$dir/\$dldll~ -+ $RM \$dlpath' -+ shlibpath_overrides_runpath=yes -+ dynamic_linker='Win32 link.exe' - ;; - - *) -+ # Assume MSVC wrapper - library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' -+ dynamic_linker='Win32 ld.exe' - ;; - esac -- dynamic_linker='Win32 ld.exe' - # FIXME: first we should search . and the directory the executable is in - shlibpath_var=PATH - ;; -@@ -10927,7 +11592,7 @@ haiku*) - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH - shlibpath_overrides_runpath=yes -- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' -+ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; - -@@ -11767,10 +12432,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -11873,10 +12538,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -12268,6 +12933,7 @@ $RM -r conftest* - - # Allow CC to be a program name with arguments. - lt_save_CC=$CC -+ lt_save_CFLAGS=$CFLAGS - lt_save_LD=$LD - lt_save_GCC=$GCC - GCC=$GXX -@@ -12285,6 +12951,7 @@ $RM -r conftest* - fi - test -z "${LDCXX+set}" || LD=$LDCXX - CC=${CXX-"c++"} -+ CFLAGS=$CXXFLAGS - compiler=$CC - compiler_CXX=$CC - for cc_temp in $compiler""; do -@@ -12567,7 +13234,13 @@ $as_echo_n "checking whether the $compiler linker ($LD) supports shared librarie - allow_undefined_flag_CXX='-berok' - # Determine the default libpath from the value encoded in an empty - # executable. -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath__CXX+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - - int -@@ -12580,22 +13253,29 @@ main () - _ACEOF - if ac_fn_cxx_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath__CXX"; then -+ lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath__CXX"; then -+ lt_cv_aix_libpath__CXX="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath__CXX -+fi - - hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath" - -@@ -12608,7 +13288,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - else - # Determine the default libpath from the value encoded in an - # empty executable. -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath__CXX+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - - int -@@ -12621,22 +13307,29 @@ main () - _ACEOF - if ac_fn_cxx_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath__CXX"; then -+ lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath__CXX"; then -+ lt_cv_aix_libpath__CXX="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath__CXX -+fi - - hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, -@@ -12679,29 +13372,75 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - ;; - - cygwin* | mingw* | pw32* | cegcc*) -- # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless, -- # as there is no search path for DLLs. -- hardcode_libdir_flag_spec_CXX='-L$libdir' -- export_dynamic_flag_spec_CXX='${wl}--export-all-symbols' -- allow_undefined_flag_CXX=unsupported -- always_export_symbols_CXX=no -- enable_shared_with_static_runtimes_CXX=yes -- -- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then -- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -- # If the export-symbols file already is a .def file (1st line -- # is EXPORTS), use it as is; otherwise, prepend... -- archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -- cp $export_symbols $output_objdir/$soname.def; -- else -- echo EXPORTS > $output_objdir/$soname.def; -- cat $export_symbols >> $output_objdir/$soname.def; -- fi~ -- $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -- else -- ld_shlibs_CXX=no -- fi -- ;; -+ case $GXX,$cc_basename in -+ ,cl* | no,cl*) -+ # Native MSVC -+ # hardcode_libdir_flag_spec is actually meaningless, as there is -+ # no search path for DLLs. -+ hardcode_libdir_flag_spec_CXX=' ' -+ allow_undefined_flag_CXX=unsupported -+ always_export_symbols_CXX=yes -+ file_list_spec_CXX='@' -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' -+ # The linker will not automatically build a static lib if we build a DLL. -+ # _LT_TAGVAR(old_archive_from_new_cmds, CXX)='true' -+ enable_shared_with_static_runtimes_CXX=yes -+ # Don't use ranlib -+ old_postinstall_cmds_CXX='chmod 644 $oldlib' -+ postlink_cmds_CXX='lt_outputfile="@OUTPUT@"~ -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ func_to_tool_file "$lt_outputfile"~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' -+ ;; -+ *) -+ # g++ -+ # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless, -+ # as there is no search path for DLLs. -+ hardcode_libdir_flag_spec_CXX='-L$libdir' -+ export_dynamic_flag_spec_CXX='${wl}--export-all-symbols' -+ allow_undefined_flag_CXX=unsupported -+ always_export_symbols_CXX=no -+ enable_shared_with_static_runtimes_CXX=yes -+ -+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then -+ archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ # If the export-symbols file already is a .def file (1st line -+ # is EXPORTS), use it as is; otherwise, prepend... -+ archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ cp $export_symbols $output_objdir/$soname.def; -+ else -+ echo EXPORTS > $output_objdir/$soname.def; -+ cat $export_symbols >> $output_objdir/$soname.def; -+ fi~ -+ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ else -+ ld_shlibs_CXX=no -+ fi -+ ;; -+ esac -+ ;; - darwin* | rhapsody*) - - -@@ -12807,7 +13546,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - ;; - *) - if test "$GXX" = yes; then -- archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' -+ archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - # FIXME: insert proper C++ library support - ld_shlibs_CXX=no -@@ -12878,10 +13617,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - ia64*) -- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - *) -- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - esac - fi -@@ -12922,9 +13661,9 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - *) - if test "$GXX" = yes; then - if test "$with_gnu_ld" = no; then -- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else -- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' -+ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' - fi - fi - link_all_deplibs_CXX=yes -@@ -12994,20 +13733,20 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - prelink_cmds_CXX='tpldir=Template.dir~ - rm -rf $tpldir~ - $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ -- compile_command="$compile_command `find $tpldir -name \*.o | $NL2SP`"' -+ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"' - old_archive_cmds_CXX='tpldir=Template.dir~ - rm -rf $tpldir~ - $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ -- $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | $NL2SP`~ -+ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~ - $RANLIB $oldlib' - archive_cmds_CXX='tpldir=Template.dir~ - rm -rf $tpldir~ - $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ -- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' -+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' - archive_expsym_cmds_CXX='tpldir=Template.dir~ - rm -rf $tpldir~ - $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ -- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' -+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' - ;; - *) # Version 6 and above use weak symbols - archive_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' -@@ -13202,7 +13941,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - ;; - *) -- archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - ;; - esac - -@@ -13248,7 +13987,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - solaris*) - case $cc_basename in -- CC*) -+ CC* | sunCC*) - # Sun C++ 4.2, 5.x and Centerline C++ - archive_cmds_need_lc_CXX=yes - no_undefined_flag_CXX=' -zdefs' -@@ -13289,9 +14028,9 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - if test "$GXX" = yes && test "$with_gnu_ld" = no; then - no_undefined_flag_CXX=' ${wl}-z ${wl}defs' - if $CC --version | $GREP -v '^2\.7' > /dev/null; then -- archive_cmds_CXX='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' -+ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' - archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' - - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when -@@ -13426,6 +14165,13 @@ private: - }; - _LT_EOF - -+ -+_lt_libdeps_save_CFLAGS=$CFLAGS -+case "$CC $CFLAGS " in #( -+*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;; -+*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;; -+esac -+ - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? -@@ -13439,7 +14185,7 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - pre_test_object_deps_done=no - - for p in `eval "$output_verbose_link_cmd"`; do -- case $p in -+ case ${prev}${p} in - - -L* | -R* | -l*) - # Some compilers place space between "-{L,R}" and the path. -@@ -13448,13 +14194,22 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - test $p = "-R"; then - prev=$p - continue -- else -- prev= - fi - -+ # Expand the sysroot to ease extracting the directories later. -+ if test -z "$prev"; then -+ case $p in -+ -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;; -+ -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;; -+ -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;; -+ esac -+ fi -+ case $p in -+ =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; -+ esac - if test "$pre_test_object_deps_done" = no; then -- case $p in -- -L* | -R*) -+ case ${prev} in -+ -L | -R) - # Internal compiler library paths should come after those - # provided the user. The postdeps already come after the - # user supplied libs so there is no need to process them. -@@ -13474,8 +14229,10 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - postdeps_CXX="${postdeps_CXX} ${prev}${p}" - fi - fi -+ prev= - ;; - -+ *.lto.$objext) ;; # Ignore GCC LTO objects - *.$objext) - # This assumes that the test object file only shows up - # once in the compiler output. -@@ -13511,6 +14268,7 @@ else - fi - - $RM -f confest.$objext -+CFLAGS=$_lt_libdeps_save_CFLAGS - - # PORTME: override above test on systems where it is broken - case $host_os in -@@ -13546,7 +14304,7 @@ linux*) - - solaris*) - case $cc_basename in -- CC*) -+ CC* | sunCC*) - # The more standards-conforming stlport4 library is - # incompatible with the Cstd library. Avoid specifying - # it if it's in CXXFLAGS. Ignore libCrun as -@@ -13611,8 +14369,6 @@ fi - lt_prog_compiler_pic_CXX= - lt_prog_compiler_static_CXX= - --{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 --$as_echo_n "checking for $compiler option to produce PIC... " >&6; } - - # C++ specific cases for pic, static, wl, etc. - if test "$GXX" = yes; then -@@ -13717,6 +14473,11 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - ;; - esac - ;; -+ mingw* | cygwin* | os2* | pw32* | cegcc*) -+ # This hack is so that the source file can tell whether it is being -+ # built for inclusion in a dll (and should export symbols for example). -+ lt_prog_compiler_pic_CXX='-DDLL_EXPORT' -+ ;; - dgux*) - case $cc_basename in - ec++*) -@@ -13869,7 +14630,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - ;; - solaris*) - case $cc_basename in -- CC*) -+ CC* | sunCC*) - # Sun C++ 4.2, 5.x and Centerline C++ - lt_prog_compiler_pic_CXX='-KPIC' - lt_prog_compiler_static_CXX='-Bstatic' -@@ -13934,10 +14695,17 @@ case $host_os in - lt_prog_compiler_pic_CXX="$lt_prog_compiler_pic_CXX -DPIC" - ;; - esac --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic_CXX" >&5 --$as_echo "$lt_prog_compiler_pic_CXX" >&6; } -- - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 -+$as_echo_n "checking for $compiler option to produce PIC... " >&6; } -+if test "${lt_cv_prog_compiler_pic_CXX+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_prog_compiler_pic_CXX=$lt_prog_compiler_pic_CXX -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_CXX" >&5 -+$as_echo "$lt_cv_prog_compiler_pic_CXX" >&6; } -+lt_prog_compiler_pic_CXX=$lt_cv_prog_compiler_pic_CXX - - # - # Check to make sure the PIC flag actually works. -@@ -13995,6 +14763,8 @@ fi - - - -+ -+ - # - # Check to make sure the static flag actually works. - # -@@ -14172,6 +14942,7 @@ fi - $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } - - export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' -+ exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' - case $host_os in - aix[4-9]*) - # If we're using GNU nm, then we don't want the "-C" option. -@@ -14186,15 +14957,20 @@ $as_echo_n "checking whether the $compiler linker ($LD) supports shared librarie - ;; - pw32*) - export_symbols_cmds_CXX="$ltdll_cmds" -- ;; -+ ;; - cygwin* | mingw* | cegcc*) -- export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;/^.*[ ]__nm__/s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' -- ;; -+ case $cc_basename in -+ cl*) ;; -+ *) -+ export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' -+ exclude_expsyms_CXX='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' -+ ;; -+ esac -+ ;; - *) - export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' -- ;; -+ ;; - esac -- exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' - - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5 - $as_echo "$ld_shlibs_CXX" >&6; } -@@ -14457,8 +15233,9 @@ cygwin* | mingw* | pw32* | cegcc*) - need_version=no - need_lib_prefix=no - -- case $GCC,$host_os in -- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) -+ case $GCC,$cc_basename in -+ yes,*) -+ # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ -@@ -14490,13 +15267,71 @@ cygwin* | mingw* | pw32* | cegcc*) - library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac -+ dynamic_linker='Win32 ld.exe' -+ ;; -+ -+ *,cl*) -+ # Native MSVC -+ libname_spec='$name' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' -+ -+ case $build_os in -+ mingw*) -+ sys_lib_search_path_spec= -+ lt_save_ifs=$IFS -+ IFS=';' -+ for lt_path in $LIB -+ do -+ IFS=$lt_save_ifs -+ # Let DOS variable expansion print the short 8.3 style file name. -+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` -+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" -+ done -+ IFS=$lt_save_ifs -+ # Convert to MSYS style. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` -+ ;; -+ cygwin*) -+ # Convert to unix form, then to dos form, then back to unix form -+ # but this time dos style (no spaces!) so that the unix form looks -+ # like /cygdrive/c/PROGRA~1:/cygdr... -+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` -+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` -+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ ;; -+ *) -+ sys_lib_search_path_spec="$LIB" -+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then -+ # It is most probably a Windows format PATH. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -+ else -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ fi -+ # FIXME: find the short name or the path components, as spaces are -+ # common. (e.g. "Program Files" -> "PROGRA~1") -+ ;; -+ esac -+ -+ # DLL is installed to $(libdir)/../bin by postinstall_cmds -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ -+ dldir=$destdir/`dirname \$dlpath`~ -+ test -d \$dldir || mkdir -p \$dldir~ -+ $install_prog $dir/$dlname \$dldir/$dlname' -+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ -+ dlpath=$dir/\$dldll~ -+ $RM \$dlpath' -+ shlibpath_overrides_runpath=yes -+ dynamic_linker='Win32 link.exe' - ;; - - *) -+ # Assume MSVC wrapper - library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' -+ dynamic_linker='Win32 ld.exe' - ;; - esac -- dynamic_linker='Win32 ld.exe' - # FIXME: first we should search . and the directory the executable is in - shlibpath_var=PATH - ;; -@@ -14587,7 +15422,7 @@ haiku*) - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH - shlibpath_overrides_runpath=yes -- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' -+ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; - -@@ -15046,6 +15881,7 @@ fi - fi # test -n "$compiler" - - CC=$lt_save_CC -+ CFLAGS=$lt_save_CFLAGS - LDCXX=$LD - LD=$lt_save_LD - GCC=$lt_save_GCC -@@ -17982,13 +18818,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' - lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' - lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' - lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' -+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' -+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' - reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' - reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' - OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' - deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' - file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' -+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' -+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' -+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' -+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' - AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' - AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' -+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' - STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' - RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' - old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' -@@ -18003,14 +18846,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de - lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' -+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' -+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' - objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' - MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' --lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' -+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' - lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' - need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' -+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' - DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' - NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' - LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' -@@ -18043,12 +18889,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q - hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' - inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' - link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' --fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' - always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' - export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' - exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' - include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' - prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' -+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' - file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' - variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' - need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' -@@ -18087,8 +18933,8 @@ old_archive_cmds_CXX='`$ECHO "$old_archive_cmds_CXX" | $SED "$delay_single_quote - compiler_CXX='`$ECHO "$compiler_CXX" | $SED "$delay_single_quote_subst"`' - GCC_CXX='`$ECHO "$GCC_CXX" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_no_builtin_flag_CXX='`$ECHO "$lt_prog_compiler_no_builtin_flag_CXX" | $SED "$delay_single_quote_subst"`' --lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_pic_CXX='`$ECHO "$lt_prog_compiler_pic_CXX" | $SED "$delay_single_quote_subst"`' -+lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_static_CXX='`$ECHO "$lt_prog_compiler_static_CXX" | $SED "$delay_single_quote_subst"`' - lt_cv_prog_compiler_c_o_CXX='`$ECHO "$lt_cv_prog_compiler_c_o_CXX" | $SED "$delay_single_quote_subst"`' - archive_cmds_need_lc_CXX='`$ECHO "$archive_cmds_need_lc_CXX" | $SED "$delay_single_quote_subst"`' -@@ -18115,12 +18961,12 @@ hardcode_shlibpath_var_CXX='`$ECHO "$hardcode_shlibpath_var_CXX" | $SED "$delay_ - hardcode_automatic_CXX='`$ECHO "$hardcode_automatic_CXX" | $SED "$delay_single_quote_subst"`' - inherit_rpath_CXX='`$ECHO "$inherit_rpath_CXX" | $SED "$delay_single_quote_subst"`' - link_all_deplibs_CXX='`$ECHO "$link_all_deplibs_CXX" | $SED "$delay_single_quote_subst"`' --fix_srcfile_path_CXX='`$ECHO "$fix_srcfile_path_CXX" | $SED "$delay_single_quote_subst"`' - always_export_symbols_CXX='`$ECHO "$always_export_symbols_CXX" | $SED "$delay_single_quote_subst"`' - export_symbols_cmds_CXX='`$ECHO "$export_symbols_cmds_CXX" | $SED "$delay_single_quote_subst"`' - exclude_expsyms_CXX='`$ECHO "$exclude_expsyms_CXX" | $SED "$delay_single_quote_subst"`' - include_expsyms_CXX='`$ECHO "$include_expsyms_CXX" | $SED "$delay_single_quote_subst"`' - prelink_cmds_CXX='`$ECHO "$prelink_cmds_CXX" | $SED "$delay_single_quote_subst"`' -+postlink_cmds_CXX='`$ECHO "$postlink_cmds_CXX" | $SED "$delay_single_quote_subst"`' - file_list_spec_CXX='`$ECHO "$file_list_spec_CXX" | $SED "$delay_single_quote_subst"`' - hardcode_action_CXX='`$ECHO "$hardcode_action_CXX" | $SED "$delay_single_quote_subst"`' - compiler_lib_search_dirs_CXX='`$ECHO "$compiler_lib_search_dirs_CXX" | $SED "$delay_single_quote_subst"`' -@@ -18158,8 +19004,13 @@ reload_flag \ - OBJDUMP \ - deplibs_check_method \ - file_magic_cmd \ -+file_magic_glob \ -+want_nocaseglob \ -+DLLTOOL \ -+sharedlib_from_linklib_cmd \ - AR \ - AR_FLAGS \ -+archiver_list_spec \ - STRIP \ - RANLIB \ - CC \ -@@ -18169,12 +19020,14 @@ lt_cv_sys_global_symbol_pipe \ - lt_cv_sys_global_symbol_to_cdecl \ - lt_cv_sys_global_symbol_to_c_name_address \ - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ -+nm_file_list_spec \ - lt_prog_compiler_no_builtin_flag \ --lt_prog_compiler_wl \ - lt_prog_compiler_pic \ -+lt_prog_compiler_wl \ - lt_prog_compiler_static \ - lt_cv_prog_compiler_c_o \ - need_locks \ -+MANIFEST_TOOL \ - DSYMUTIL \ - NMEDIT \ - LIPO \ -@@ -18190,7 +19043,6 @@ no_undefined_flag \ - hardcode_libdir_flag_spec \ - hardcode_libdir_flag_spec_ld \ - hardcode_libdir_separator \ --fix_srcfile_path \ - exclude_expsyms \ - include_expsyms \ - file_list_spec \ -@@ -18212,8 +19064,8 @@ LD_CXX \ - reload_flag_CXX \ - compiler_CXX \ - lt_prog_compiler_no_builtin_flag_CXX \ --lt_prog_compiler_wl_CXX \ - lt_prog_compiler_pic_CXX \ -+lt_prog_compiler_wl_CXX \ - lt_prog_compiler_static_CXX \ - lt_cv_prog_compiler_c_o_CXX \ - export_dynamic_flag_spec_CXX \ -@@ -18225,7 +19077,6 @@ no_undefined_flag_CXX \ - hardcode_libdir_flag_spec_CXX \ - hardcode_libdir_flag_spec_ld_CXX \ - hardcode_libdir_separator_CXX \ --fix_srcfile_path_CXX \ - exclude_expsyms_CXX \ - include_expsyms_CXX \ - file_list_spec_CXX \ -@@ -18259,6 +19110,7 @@ module_cmds \ - module_expsym_cmds \ - export_symbols_cmds \ - prelink_cmds \ -+postlink_cmds \ - postinstall_cmds \ - postuninstall_cmds \ - finish_cmds \ -@@ -18273,7 +19125,8 @@ archive_expsym_cmds_CXX \ - module_cmds_CXX \ - module_expsym_cmds_CXX \ - export_symbols_cmds_CXX \ --prelink_cmds_CXX; do -+prelink_cmds_CXX \ -+postlink_cmds_CXX; do - case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in - *[\\\\\\\`\\"\\\$]*) - eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" -@@ -19066,7 +19919,8 @@ $as_echo X"$file" | - # NOTE: Changes made to this file will be lost: look at ltmain.sh. - # - # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, --# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. -+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, -+# Inc. - # Written by Gordon Matzigkeit, 1996 - # - # This file is part of GNU Libtool. -@@ -19169,19 +20023,42 @@ SP2NL=$lt_lt_SP2NL - # turn newlines into spaces. - NL2SP=$lt_lt_NL2SP - -+# convert \$build file names to \$host format. -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+ -+# convert \$build files to toolchain format. -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+ - # An object symbol dumper. - OBJDUMP=$lt_OBJDUMP - - # Method to check whether dependent libraries are shared objects. - deplibs_check_method=$lt_deplibs_check_method - --# Command to use when deplibs_check_method == "file_magic". -+# Command to use when deplibs_check_method = "file_magic". - file_magic_cmd=$lt_file_magic_cmd - -+# How to find potential files when deplibs_check_method = "file_magic". -+file_magic_glob=$lt_file_magic_glob -+ -+# Find potential files using nocaseglob when deplibs_check_method = "file_magic". -+want_nocaseglob=$lt_want_nocaseglob -+ -+# DLL creation program. -+DLLTOOL=$lt_DLLTOOL -+ -+# Command to associate shared and link libraries. -+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd -+ - # The archiver. - AR=$lt_AR -+ -+# Flags to create an archive. - AR_FLAGS=$lt_AR_FLAGS - -+# How to feed a file listing to the archiver. -+archiver_list_spec=$lt_archiver_list_spec -+ - # A symbol stripping program. - STRIP=$lt_STRIP - -@@ -19211,6 +20088,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address - # Transform the output of nm in a C name address pair when lib prefix is needed. - global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix - -+# Specify filename containing input files for \$NM. -+nm_file_list_spec=$lt_nm_file_list_spec -+ -+# The root where to search for dependent libraries,and in which our libraries should be installed. -+lt_sysroot=$lt_sysroot -+ - # The name of the directory that contains temporary libtool files. - objdir=$objdir - -@@ -19220,6 +20103,9 @@ MAGIC_CMD=$MAGIC_CMD - # Must we lock files when doing compilation? - need_locks=$lt_need_locks - -+# Manifest tool. -+MANIFEST_TOOL=$lt_MANIFEST_TOOL -+ - # Tool to manipulate archived DWARF debug symbol files on Mac OS X. - DSYMUTIL=$lt_DSYMUTIL - -@@ -19334,12 +20220,12 @@ with_gcc=$GCC - # Compiler flag to turn off builtin functions. - no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag - --# How to pass a linker flag through the compiler. --wl=$lt_lt_prog_compiler_wl -- - # Additional compiler flags for building library objects. - pic_flag=$lt_lt_prog_compiler_pic - -+# How to pass a linker flag through the compiler. -+wl=$lt_lt_prog_compiler_wl -+ - # Compiler flag to prevent dynamic linking. - link_static_flag=$lt_lt_prog_compiler_static - -@@ -19426,9 +20312,6 @@ inherit_rpath=$inherit_rpath - # Whether libtool must link a program against all its dependency libraries. - link_all_deplibs=$link_all_deplibs - --# Fix the shell variable \$srcfile for the compiler. --fix_srcfile_path=$lt_fix_srcfile_path -- - # Set to "yes" if exported symbols are required. - always_export_symbols=$always_export_symbols - -@@ -19444,6 +20327,9 @@ include_expsyms=$lt_include_expsyms - # Commands necessary for linking programs (against libraries) with templates. - prelink_cmds=$lt_prelink_cmds - -+# Commands necessary for finishing linking programs. -+postlink_cmds=$lt_postlink_cmds -+ - # Specify filename containing input files. - file_list_spec=$lt_file_list_spec - -@@ -19490,210 +20376,169 @@ ltmain="$ac_aux_dir/ltmain.sh" - # if finds mixed CR/LF and LF-only lines. Since sed operates in - # text mode, it properly converts lines to CR/LF. This bash problem - # is reportedly fixed, but why not run on old versions too? -- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- case $xsi_shell in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac --} -- --# func_basename file --func_basename () --{ -- func_basename_result="${1##*/}" --} -- --# func_dirname_and_basename file append nondir_replacement --# perform func_basename and func_dirname in a single function --# call: --# dirname: Compute the dirname of FILE. If nonempty, --# add APPEND to the result, otherwise set result --# to NONDIR_REPLACEMENT. --# value returned in "$func_dirname_result" --# basename: Compute filename of FILE. --# value retuned in "$func_basename_result" --# Implementation must be kept synchronized with func_dirname --# and func_basename. For efficiency, we do not delegate to --# those functions but instead duplicate the functionality here. --func_dirname_and_basename () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac -- func_basename_result="${1##*/}" --} -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --func_stripname () --{ -- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are -- # positional parameters, so assign one to ordinary parameter first. -- func_stripname_result=${3} -- func_stripname_result=${func_stripname_result#"${1}"} -- func_stripname_result=${func_stripname_result%"${2}"} --} -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=${1%%=*} -- func_opt_split_arg=${1#*=} --} -- --# func_lo2o object --func_lo2o () --{ -- case ${1} in -- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; -- *) func_lo2o_result=${1} ;; -- esac --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=${1%.*}.lo --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=$(( $* )) --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=${#1} --} -- --_LT_EOF -- ;; -- *) # Bourne compatible functions. -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- # Extract subdirectory from the argument. -- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` -- if test "X$func_dirname_result" = "X${1}"; then -- func_dirname_result="${3}" -- else -- func_dirname_result="$func_dirname_result${2}" -- fi --} -- --# func_basename file --func_basename () --{ -- func_basename_result=`$ECHO "${1}" | $SED "$basename"` --} -- -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --# func_strip_suffix prefix name --func_stripname () --{ -- case ${2} in -- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; -- esac --} -- --# sed scripts: --my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' --my_sed_long_arg='1s/^-[^=]*=//' -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` -- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` --} -- --# func_lo2o object --func_lo2o () --{ -- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=`expr "$@"` --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` --} -- --_LT_EOF --esac -- --case $lt_shell_append in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$1+=\$2" --} --_LT_EOF -- ;; -- *) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$1=\$$1\$2" --} -- --_LT_EOF -- ;; -- esac -- -- -- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- mv -f "$cfgfile" "$ofile" || -+ sed '$q' "$ltmain" >> "$cfgfile" \ -+ || (rm -f "$cfgfile"; exit 1) -+ -+ if test x"$xsi_shell" = xyes; then -+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ -+func_dirname ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_basename ()$/,/^} # func_basename /c\ -+func_basename ()\ -+{\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ -+func_dirname_and_basename ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ -+func_stripname ()\ -+{\ -+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ -+\ # positional parameters, so assign one to ordinary parameter first.\ -+\ func_stripname_result=${3}\ -+\ func_stripname_result=${func_stripname_result#"${1}"}\ -+\ func_stripname_result=${func_stripname_result%"${2}"}\ -+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ -+func_split_long_opt ()\ -+{\ -+\ func_split_long_opt_name=${1%%=*}\ -+\ func_split_long_opt_arg=${1#*=}\ -+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ -+func_split_short_opt ()\ -+{\ -+\ func_split_short_opt_arg=${1#??}\ -+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ -+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ -+func_lo2o ()\ -+{\ -+\ case ${1} in\ -+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ -+\ *) func_lo2o_result=${1} ;;\ -+\ esac\ -+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_xform ()$/,/^} # func_xform /c\ -+func_xform ()\ -+{\ -+ func_xform_result=${1%.*}.lo\ -+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_arith ()$/,/^} # func_arith /c\ -+func_arith ()\ -+{\ -+ func_arith_result=$(( $* ))\ -+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_len ()$/,/^} # func_len /c\ -+func_len ()\ -+{\ -+ func_len_result=${#1}\ -+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+fi -+ -+if test x"$lt_shell_append" = xyes; then -+ sed -e '/^func_append ()$/,/^} # func_append /c\ -+func_append ()\ -+{\ -+ eval "${1}+=\\${2}"\ -+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ -+func_append_quoted ()\ -+{\ -+\ func_quote_for_eval "${2}"\ -+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ -+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ # Save a `func_append' function call where possible by direct use of '+=' -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+else -+ # Save a `func_append' function call even when '+=' is not available -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+fi -+ -+if test x"$_lt_function_replace_fail" = x":"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 -+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} -+fi -+ -+ -+ mv -f "$cfgfile" "$ofile" || - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" - -@@ -19721,12 +20566,12 @@ with_gcc=$GCC_CXX - # Compiler flag to turn off builtin functions. - no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_CXX - --# How to pass a linker flag through the compiler. --wl=$lt_lt_prog_compiler_wl_CXX -- - # Additional compiler flags for building library objects. - pic_flag=$lt_lt_prog_compiler_pic_CXX - -+# How to pass a linker flag through the compiler. -+wl=$lt_lt_prog_compiler_wl_CXX -+ - # Compiler flag to prevent dynamic linking. - link_static_flag=$lt_lt_prog_compiler_static_CXX - -@@ -19813,9 +20658,6 @@ inherit_rpath=$inherit_rpath_CXX - # Whether libtool must link a program against all its dependency libraries. - link_all_deplibs=$link_all_deplibs_CXX - --# Fix the shell variable \$srcfile for the compiler. --fix_srcfile_path=$lt_fix_srcfile_path_CXX -- - # Set to "yes" if exported symbols are required. - always_export_symbols=$always_export_symbols_CXX - -@@ -19831,6 +20673,9 @@ include_expsyms=$lt_include_expsyms_CXX - # Commands necessary for linking programs (against libraries) with templates. - prelink_cmds=$lt_prelink_cmds_CXX - -+# Commands necessary for finishing linking programs. -+postlink_cmds=$lt_postlink_cmds_CXX -+ - # Specify filename containing input files. - file_list_spec=$lt_file_list_spec_CXX - -diff --git a/libtool.m4 b/libtool.m4 -index 24d13f3..e45fdc6 100644 ---- a/libtool.m4 -+++ b/libtool.m4 -@@ -1,7 +1,8 @@ - # libtool.m4 - Configure libtool for the host system. -*-Autoconf-*- - # - # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, --# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. -+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, -+# Inc. - # Written by Gordon Matzigkeit, 1996 - # - # This file is free software; the Free Software Foundation gives -@@ -10,7 +11,8 @@ - - m4_define([_LT_COPYING], [dnl - # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, --# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. -+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, -+# Inc. - # Written by Gordon Matzigkeit, 1996 - # - # This file is part of GNU Libtool. -@@ -37,7 +39,7 @@ m4_define([_LT_COPYING], [dnl - # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - ]) - --# serial 56 LT_INIT -+# serial 57 LT_INIT - - - # LT_PREREQ(VERSION) -@@ -92,7 +94,8 @@ _LT_SET_OPTIONS([$0], [$1]) - LIBTOOL_DEPS="$ltmain" - - # Always use our own libtool. --LIBTOOL='$(SHELL) $(top_builddir)/libtool' -+LIBTOOL='$(SHELL) $(top_builddir)' -+LIBTOOL="$LIBTOOL/${host_alias}-libtool" - AC_SUBST(LIBTOOL)dnl - - _LT_SETUP -@@ -166,10 +169,13 @@ _LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl - dnl - m4_require([_LT_FILEUTILS_DEFAULTS])dnl - m4_require([_LT_CHECK_SHELL_FEATURES])dnl -+m4_require([_LT_PATH_CONVERSION_FUNCTIONS])dnl - m4_require([_LT_CMD_RELOAD])dnl - m4_require([_LT_CHECK_MAGIC_METHOD])dnl -+m4_require([_LT_CHECK_SHAREDLIB_FROM_LINKLIB])dnl - m4_require([_LT_CMD_OLD_ARCHIVE])dnl - m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl -+m4_require([_LT_WITH_SYSROOT])dnl - - _LT_CONFIG_LIBTOOL_INIT([ - # See if we are running on zsh, and set the options which allow our -@@ -199,7 +205,7 @@ aix3*) - esac - - # Global variables: --ofile=libtool -+ofile=${host_alias}-libtool - can_build_shared=yes - - # All known linkers require a `.a' archive for static linking (except MSVC, -@@ -632,7 +638,7 @@ m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl - m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION]) - configured by $[0], generated by m4_PACKAGE_STRING. - --Copyright (C) 2009 Free Software Foundation, Inc. -+Copyright (C) 2010 Free Software Foundation, Inc. - This config.lt script is free software; the Free Software Foundation - gives unlimited permision to copy, distribute and modify it." - -@@ -746,15 +752,12 @@ _LT_EOF - # if finds mixed CR/LF and LF-only lines. Since sed operates in - # text mode, it properly converts lines to CR/LF. This bash problem - # is reportedly fixed, but why not run on old versions too? -- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -+ sed '$q' "$ltmain" >> "$cfgfile" \ -+ || (rm -f "$cfgfile"; exit 1) - -- _LT_PROG_XSI_SHELLFNS -+ _LT_PROG_REPLACE_SHELLFNS - -- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- mv -f "$cfgfile" "$ofile" || -+ mv -f "$cfgfile" "$ofile" || - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" - ], -@@ -980,6 +983,8 @@ _LT_EOF - $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&AS_MESSAGE_LOG_FD - echo "$AR cru libconftest.a conftest.o" >&AS_MESSAGE_LOG_FD - $AR cru libconftest.a conftest.o 2>&AS_MESSAGE_LOG_FD -+ echo "$RANLIB libconftest.a" >&AS_MESSAGE_LOG_FD -+ $RANLIB libconftest.a 2>&AS_MESSAGE_LOG_FD - cat > conftest.c << _LT_EOF - int main() { return 0;} - _LT_EOF -@@ -1069,30 +1074,41 @@ m4_defun([_LT_DARWIN_LINKER_FEATURES], - fi - ]) - --# _LT_SYS_MODULE_PATH_AIX --# ----------------------- -+# _LT_SYS_MODULE_PATH_AIX([TAGNAME]) -+# ---------------------------------- - # Links a minimal program and checks the executable - # for the system default hardcoded library path. In most cases, - # this is /usr/lib:/lib, but when the MPI compilers are used - # the location of the communication and MPI libs are included too. - # If we don't find anything, use the default library path according - # to the aix ld manual. -+# Store the results from the different compilers for each TAGNAME. -+# Allow to override them for all tags through lt_cv_aix_libpath. - m4_defun([_LT_SYS_MODULE_PATH_AIX], - [m4_require([_LT_DECL_SED])dnl --AC_LINK_IFELSE(AC_LANG_PROGRAM,[ --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi],[]) --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])], -+ [AC_LINK_IFELSE([AC_LANG_PROGRAM],[ -+ lt_aix_libpath_sed='[ -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }]' -+ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then -+ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi],[]) -+ if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then -+ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])="/usr/lib:/lib" -+ fi -+ ]) -+ aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1]) -+fi - ])# _LT_SYS_MODULE_PATH_AIX - - -@@ -1117,7 +1133,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO - - AC_MSG_CHECKING([how to print strings]) - # Test print first, because it will be a builtin if present. --if test "X`print -r -- -n 2>/dev/null`" = X-n && \ -+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ - test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then - ECHO='print -r --' - elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then -@@ -1161,6 +1177,39 @@ _LT_DECL([], [ECHO], [1], [An echo program that protects backslashes]) - ])# _LT_PROG_ECHO_BACKSLASH - - -+# _LT_WITH_SYSROOT -+# ---------------- -+AC_DEFUN([_LT_WITH_SYSROOT], -+[AC_MSG_CHECKING([for sysroot]) -+AC_ARG_WITH([libtool-sysroot], -+[ --with-libtool-sysroot[=DIR] Search for dependent libraries within DIR -+ (or the compiler's sysroot if not specified).], -+[], [with_libtool_sysroot=no]) -+ -+dnl lt_sysroot will always be passed unquoted. We quote it here -+dnl in case the user passed a directory name. -+lt_sysroot= -+case ${with_libtool_sysroot} in #( -+ yes) -+ if test "$GCC" = yes; then -+ lt_sysroot=`$CC --print-sysroot 2>/dev/null` -+ fi -+ ;; #( -+ /*) -+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` -+ ;; #( -+ no|'') -+ ;; #( -+ *) -+ AC_MSG_RESULT([${with_libtool_sysroot}]) -+ AC_MSG_ERROR([The sysroot must be an absolute path.]) -+ ;; -+esac -+ -+ AC_MSG_RESULT([${lt_sysroot:-no}]) -+_LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl -+[dependent libraries, and in which our libraries should be installed.])]) -+ - # _LT_ENABLE_LOCK - # --------------- - m4_defun([_LT_ENABLE_LOCK], -@@ -1320,14 +1369,47 @@ need_locks="$enable_libtool_lock" - ])# _LT_ENABLE_LOCK - - -+# _LT_PROG_AR -+# ----------- -+m4_defun([_LT_PROG_AR], -+[AC_CHECK_TOOLS(AR, [ar], false) -+: ${AR=ar} -+: ${AR_FLAGS=cru} -+_LT_DECL([], [AR], [1], [The archiver]) -+_LT_DECL([], [AR_FLAGS], [1], [Flags to create an archive]) -+ -+AC_CACHE_CHECK([for archiver @FILE support], [lt_cv_ar_at_file], -+ [lt_cv_ar_at_file=no -+ AC_COMPILE_IFELSE([AC_LANG_PROGRAM], -+ [echo conftest.$ac_objext > conftest.lst -+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD' -+ AC_TRY_EVAL([lt_ar_try]) -+ if test "$ac_status" -eq 0; then -+ # Ensure the archiver fails upon bogus file names. -+ rm -f conftest.$ac_objext libconftest.a -+ AC_TRY_EVAL([lt_ar_try]) -+ if test "$ac_status" -ne 0; then -+ lt_cv_ar_at_file=@ -+ fi -+ fi -+ rm -f conftest.* libconftest.a -+ ]) -+ ]) -+ -+if test "x$lt_cv_ar_at_file" = xno; then -+ archiver_list_spec= -+else -+ archiver_list_spec=$lt_cv_ar_at_file -+fi -+_LT_DECL([], [archiver_list_spec], [1], -+ [How to feed a file listing to the archiver]) -+])# _LT_PROG_AR -+ -+ - # _LT_CMD_OLD_ARCHIVE - # ------------------- - m4_defun([_LT_CMD_OLD_ARCHIVE], --[AC_CHECK_TOOL(AR, ar, false) --test -z "$AR" && AR=ar --test -z "$AR_FLAGS" && AR_FLAGS=cru --_LT_DECL([], [AR], [1], [The archiver]) --_LT_DECL([], [AR_FLAGS], [1]) -+[_LT_PROG_AR - - AC_CHECK_TOOL(STRIP, strip, :) - test -z "$STRIP" && STRIP=: -@@ -1623,7 +1705,7 @@ else - lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 - lt_status=$lt_dlunknown - cat > conftest.$ac_ext <<_LT_EOF --[#line __oline__ "configure" -+[#line $LINENO "configure" - #include "confdefs.h" - - #if HAVE_DLFCN_H -@@ -1667,10 +1749,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -2210,8 +2292,9 @@ cygwin* | mingw* | pw32* | cegcc*) - need_version=no - need_lib_prefix=no - -- case $GCC,$host_os in -- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) -+ case $GCC,$cc_basename in -+ yes,*) -+ # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ -@@ -2244,13 +2327,71 @@ m4_if([$1], [],[ - library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' - ;; - esac -+ dynamic_linker='Win32 ld.exe' -+ ;; -+ -+ *,cl*) -+ # Native MSVC -+ libname_spec='$name' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' -+ -+ case $build_os in -+ mingw*) -+ sys_lib_search_path_spec= -+ lt_save_ifs=$IFS -+ IFS=';' -+ for lt_path in $LIB -+ do -+ IFS=$lt_save_ifs -+ # Let DOS variable expansion print the short 8.3 style file name. -+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` -+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" -+ done -+ IFS=$lt_save_ifs -+ # Convert to MSYS style. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([[a-zA-Z]]\\):| /\\1|g' -e 's|^ ||'` -+ ;; -+ cygwin*) -+ # Convert to unix form, then to dos form, then back to unix form -+ # but this time dos style (no spaces!) so that the unix form looks -+ # like /cygdrive/c/PROGRA~1:/cygdr... -+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` -+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` -+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ ;; -+ *) -+ sys_lib_search_path_spec="$LIB" -+ if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then -+ # It is most probably a Windows format PATH. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -+ else -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ fi -+ # FIXME: find the short name or the path components, as spaces are -+ # common. (e.g. "Program Files" -> "PROGRA~1") -+ ;; -+ esac -+ -+ # DLL is installed to $(libdir)/../bin by postinstall_cmds -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ -+ dldir=$destdir/`dirname \$dlpath`~ -+ test -d \$dldir || mkdir -p \$dldir~ -+ $install_prog $dir/$dlname \$dldir/$dlname' -+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ -+ dlpath=$dir/\$dldll~ -+ $RM \$dlpath' -+ shlibpath_overrides_runpath=yes -+ dynamic_linker='Win32 link.exe' - ;; - - *) -+ # Assume MSVC wrapper - library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib' -+ dynamic_linker='Win32 ld.exe' - ;; - esac -- dynamic_linker='Win32 ld.exe' - # FIXME: first we should search . and the directory the executable is in - shlibpath_var=PATH - ;; -@@ -2342,7 +2483,7 @@ haiku*) - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH - shlibpath_overrides_runpath=yes -- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' -+ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; - -@@ -2950,6 +3091,11 @@ case $reload_flag in - esac - reload_cmds='$LD$reload_flag -o $output$reload_objs' - case $host_os in -+ cygwin* | mingw* | pw32* | cegcc*) -+ if test "$GCC" != yes; then -+ reload_cmds=false -+ fi -+ ;; - darwin*) - if test "$GCC" = yes; then - reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' -@@ -3016,7 +3162,8 @@ mingw* | pw32*) - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else -- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' -+ # Keep this pattern in sync with the one in func_win32_libid. -+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' - lt_cv_file_magic_cmd='$OBJDUMP -f' - fi - ;; -@@ -3167,6 +3314,21 @@ tpf*) - ;; - esac - ]) -+ -+file_magic_glob= -+want_nocaseglob=no -+if test "$build" = "$host"; then -+ case $host_os in -+ mingw* | pw32*) -+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then -+ want_nocaseglob=yes -+ else -+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[[\1]]\/[[\1]]\/g;/g"` -+ fi -+ ;; -+ esac -+fi -+ - file_magic_cmd=$lt_cv_file_magic_cmd - deplibs_check_method=$lt_cv_deplibs_check_method - test -z "$deplibs_check_method" && deplibs_check_method=unknown -@@ -3174,7 +3336,11 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown - _LT_DECL([], [deplibs_check_method], [1], - [Method to check whether dependent libraries are shared objects]) - _LT_DECL([], [file_magic_cmd], [1], -- [Command to use when deplibs_check_method == "file_magic"]) -+ [Command to use when deplibs_check_method = "file_magic"]) -+_LT_DECL([], [file_magic_glob], [1], -+ [How to find potential files when deplibs_check_method = "file_magic"]) -+_LT_DECL([], [want_nocaseglob], [1], -+ [Find potential files using nocaseglob when deplibs_check_method = "file_magic"]) - ])# _LT_CHECK_MAGIC_METHOD - - -@@ -3277,6 +3443,67 @@ dnl aclocal-1.4 backwards compatibility: - dnl AC_DEFUN([AM_PROG_NM], []) - dnl AC_DEFUN([AC_PROG_NM], []) - -+# _LT_CHECK_SHAREDLIB_FROM_LINKLIB -+# -------------------------------- -+# how to determine the name of the shared library -+# associated with a specific link library. -+# -- PORTME fill in with the dynamic library characteristics -+m4_defun([_LT_CHECK_SHAREDLIB_FROM_LINKLIB], -+[m4_require([_LT_DECL_EGREP]) -+m4_require([_LT_DECL_OBJDUMP]) -+m4_require([_LT_DECL_DLLTOOL]) -+AC_CACHE_CHECK([how to associate runtime and link libraries], -+lt_cv_sharedlib_from_linklib_cmd, -+[lt_cv_sharedlib_from_linklib_cmd='unknown' -+ -+case $host_os in -+cygwin* | mingw* | pw32* | cegcc*) -+ # two different shell functions defined in ltmain.sh -+ # decide which to use based on capabilities of $DLLTOOL -+ case `$DLLTOOL --help 2>&1` in -+ *--identify-strict*) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib -+ ;; -+ *) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback -+ ;; -+ esac -+ ;; -+*) -+ # fallback: assume linklib IS sharedlib -+ lt_cv_sharedlib_from_linklib_cmd="$ECHO" -+ ;; -+esac -+]) -+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd -+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO -+ -+_LT_DECL([], [sharedlib_from_linklib_cmd], [1], -+ [Command to associate shared and link libraries]) -+])# _LT_CHECK_SHAREDLIB_FROM_LINKLIB -+ -+ -+# _LT_PATH_MANIFEST_TOOL -+# ---------------------- -+# locate the manifest tool -+m4_defun([_LT_PATH_MANIFEST_TOOL], -+[AC_CHECK_TOOL(MANIFEST_TOOL, mt, :) -+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt -+AC_CACHE_CHECK([if $MANIFEST_TOOL is a manifest tool], [lt_cv_path_mainfest_tool], -+ [lt_cv_path_mainfest_tool=no -+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&AS_MESSAGE_LOG_FD -+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out -+ cat conftest.err >&AS_MESSAGE_LOG_FD -+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then -+ lt_cv_path_mainfest_tool=yes -+ fi -+ rm -f conftest*]) -+if test "x$lt_cv_path_mainfest_tool" != xyes; then -+ MANIFEST_TOOL=: -+fi -+_LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl -+])# _LT_PATH_MANIFEST_TOOL -+ - - # LT_LIB_M - # -------- -@@ -3403,8 +3630,8 @@ esac - lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - - # Transform an extracted symbol line into symbol name and symbol address --lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'" --lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'" - - # Handle CRLF in mingw tool chain - opt_cr= -@@ -3440,6 +3667,7 @@ for ac_symprfx in "" "_"; do - else - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" - fi -+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" - - # Check to see that the pipe works correctly. - pipe_works=no -@@ -3473,6 +3701,18 @@ _LT_EOF - if $GREP ' nm_test_var$' "$nlist" >/dev/null; then - if $GREP ' nm_test_func$' "$nlist" >/dev/null; then - cat <<_LT_EOF > conftest.$ac_ext -+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime -+ relocations are performed -- see ld's documentation on pseudo-relocs. */ -+# define LT@&t@_DLSYM_CONST -+#elif defined(__osf__) -+/* This system does not cope well with relocations in const data. */ -+# define LT@&t@_DLSYM_CONST -+#else -+# define LT@&t@_DLSYM_CONST const -+#endif -+ - #ifdef __cplusplus - extern "C" { - #endif -@@ -3484,7 +3724,7 @@ _LT_EOF - cat <<_LT_EOF >> conftest.$ac_ext - - /* The mapping between symbol names and symbols. */ --const struct { -+LT@&t@_DLSYM_CONST struct { - const char *name; - void *address; - } -@@ -3510,15 +3750,15 @@ static const void *lt_preloaded_setup() { - _LT_EOF - # Now try linking the two files. - mv conftest.$ac_objext conftstm.$ac_objext -- lt_save_LIBS="$LIBS" -- lt_save_CFLAGS="$CFLAGS" -+ lt_globsym_save_LIBS=$LIBS -+ lt_globsym_save_CFLAGS=$CFLAGS - LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)" - if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then - pipe_works=yes - fi -- LIBS="$lt_save_LIBS" -- CFLAGS="$lt_save_CFLAGS" -+ LIBS=$lt_globsym_save_LIBS -+ CFLAGS=$lt_globsym_save_CFLAGS - else - echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD - fi -@@ -3551,6 +3791,13 @@ else - AC_MSG_RESULT(ok) - fi - -+# Response file support. -+if test "$lt_cv_nm_interface" = "MS dumpbin"; then -+ nm_file_list_spec='@' -+elif $NM --help 2>/dev/null | grep '[[@]]FILE' >/dev/null; then -+ nm_file_list_spec='@' -+fi -+ - _LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1], - [Take the output of nm and produce a listing of raw symbols and C names]) - _LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1], -@@ -3561,6 +3808,8 @@ _LT_DECL([global_symbol_to_c_name_address], - _LT_DECL([global_symbol_to_c_name_address_lib_prefix], - [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1], - [Transform the output of nm in a C name address pair when lib prefix is needed]) -+_LT_DECL([], [nm_file_list_spec], [1], -+ [Specify filename containing input files for $NM]) - ]) # _LT_CMD_GLOBAL_SYMBOLS - - -@@ -3572,7 +3821,6 @@ _LT_TAGVAR(lt_prog_compiler_wl, $1)= - _LT_TAGVAR(lt_prog_compiler_pic, $1)= - _LT_TAGVAR(lt_prog_compiler_static, $1)= - --AC_MSG_CHECKING([for $compiler option to produce PIC]) - m4_if([$1], [CXX], [ - # C++ specific cases for pic, static, wl, etc. - if test "$GXX" = yes; then -@@ -3678,6 +3926,12 @@ m4_if([$1], [CXX], [ - ;; - esac - ;; -+ mingw* | cygwin* | os2* | pw32* | cegcc*) -+ # This hack is so that the source file can tell whether it is being -+ # built for inclusion in a dll (and should export symbols for example). -+ m4_if([$1], [GCJ], [], -+ [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) -+ ;; - dgux*) - case $cc_basename in - ec++*) -@@ -3830,7 +4084,7 @@ m4_if([$1], [CXX], [ - ;; - solaris*) - case $cc_basename in -- CC*) -+ CC* | sunCC*) - # Sun C++ 4.2, 5.x and Centerline C++ - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' -@@ -4053,6 +4307,12 @@ m4_if([$1], [CXX], [ - _LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared' - _LT_TAGVAR(lt_prog_compiler_static, $1)='--static' - ;; -+ nagfor*) -+ # NAG Fortran compiler -+ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,' -+ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' -+ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' -+ ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) -@@ -4115,7 +4375,7 @@ m4_if([$1], [CXX], [ - _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' - _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' - case $cc_basename in -- f77* | f90* | f95*) -+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';; - *) - _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';; -@@ -4172,9 +4432,11 @@ case $host_os in - _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])" - ;; - esac --AC_MSG_RESULT([$_LT_TAGVAR(lt_prog_compiler_pic, $1)]) --_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1], -- [How to pass a linker flag through the compiler]) -+ -+AC_CACHE_CHECK([for $compiler option to produce PIC], -+ [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)], -+ [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_prog_compiler_pic, $1)]) -+_LT_TAGVAR(lt_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_cv_prog_compiler_pic, $1) - - # - # Check to make sure the PIC flag actually works. -@@ -4193,6 +4455,8 @@ fi - _LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1], - [Additional compiler flags for building library objects]) - -+_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1], -+ [How to pass a linker flag through the compiler]) - # - # Check to make sure the static flag actually works. - # -@@ -4213,6 +4477,7 @@ _LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1], - m4_defun([_LT_LINKER_SHLIBS], - [AC_REQUIRE([LT_PATH_LD])dnl - AC_REQUIRE([LT_PATH_NM])dnl -+m4_require([_LT_PATH_MANIFEST_TOOL])dnl - m4_require([_LT_FILEUTILS_DEFAULTS])dnl - m4_require([_LT_DECL_EGREP])dnl - m4_require([_LT_DECL_SED])dnl -@@ -4221,6 +4486,7 @@ m4_require([_LT_TAG_COMPILER])dnl - AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) - m4_if([$1], [CXX], [ - _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' -+ _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] - case $host_os in - aix[[4-9]]*) - # If we're using GNU nm, then we don't want the "-C" option. -@@ -4235,15 +4501,20 @@ m4_if([$1], [CXX], [ - ;; - pw32*) - _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds" -- ;; -+ ;; - cygwin* | mingw* | cegcc*) -- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;/^.*[[ ]]__nm__/s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' -- ;; -+ case $cc_basename in -+ cl*) ;; -+ *) -+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' -+ _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'] -+ ;; -+ esac -+ ;; - *) - _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' -- ;; -+ ;; - esac -- _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] - ], [ - runpath_var= - _LT_TAGVAR(allow_undefined_flag, $1)= -@@ -4411,7 +4682,8 @@ _LT_EOF - _LT_TAGVAR(allow_undefined_flag, $1)=unsupported - _LT_TAGVAR(always_export_symbols, $1)=no - _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols' -+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' -+ _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'] - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then - _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -@@ -4510,12 +4782,12 @@ _LT_EOF - _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive' - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= - _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='-rpath $libdir' -- _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' - if test "x$supports_anon_versioning" = xyes; then - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ -- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' -+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac -@@ -4529,8 +4801,8 @@ _LT_EOF - _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - -@@ -4548,8 +4820,8 @@ _LT_EOF - - _LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi -@@ -4595,8 +4867,8 @@ _LT_EOF - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - _LT_TAGVAR(ld_shlibs, $1)=no - fi -@@ -4726,7 +4998,7 @@ _LT_EOF - _LT_TAGVAR(allow_undefined_flag, $1)='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. -- _LT_SYS_MODULE_PATH_AIX -+ _LT_SYS_MODULE_PATH_AIX([$1]) - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" - _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else -@@ -4737,7 +5009,7 @@ _LT_EOF - else - # Determine the default libpath from the value encoded in an - # empty executable. -- _LT_SYS_MODULE_PATH_AIX -+ _LT_SYS_MODULE_PATH_AIX([$1]) - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. -@@ -4781,20 +5053,63 @@ _LT_EOF - # Microsoft Visual C++. - # hardcode_libdir_flag_spec is actually meaningless, as there is - # no search path for DLLs. -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' -- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported -- # Tell ltmain to make .lib files, not .a files. -- libext=lib -- # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=".dll" -- # FIXME: Setting linknames here is a bad hack. -- _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -- # The linker will automatically build a .lib file if we build a DLL. -- _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' -- # FIXME: Should let the user specify the lib program. -- _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs' -- _LT_TAGVAR(fix_srcfile_path, $1)='`cygpath -w "$srcfile"`' -- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -+ case $cc_basename in -+ cl*) -+ # Native MSVC -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' -+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported -+ _LT_TAGVAR(always_export_symbols, $1)=yes -+ _LT_TAGVAR(file_list_spec, $1)='@' -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' -+ # The linker will not automatically build a static lib if we build a DLL. -+ # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' -+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -+ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1,DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols' -+ # Don't use ranlib -+ _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' -+ _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' -+ ;; -+ *) -+ # Assume MSVC wrapper -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' -+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -+ # The linker will automatically build a .lib file if we build a DLL. -+ _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' -+ # FIXME: Should let the user specify the lib program. -+ _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs' -+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -+ ;; -+ esac - ;; - - darwin* | rhapsody*) -@@ -4828,7 +5143,7 @@ _LT_EOF - - # FreeBSD 3 and greater uses gcc -shared to do shared libraries. - freebsd* | dragonfly*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' - _LT_TAGVAR(hardcode_direct, $1)=yes - _LT_TAGVAR(hardcode_shlibpath_var, $1)=no -@@ -4836,7 +5151,7 @@ _LT_EOF - - hpux9*) - if test "$GCC" = yes; then -- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi -@@ -4852,7 +5167,7 @@ _LT_EOF - - hpux10*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -4876,10 +5191,10 @@ _LT_EOF - _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else -@@ -4926,16 +5241,31 @@ _LT_EOF - - irix5* | irix6* | nonstopux*) - if test "$GCC" = yes; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. -- save_LDFLAGS="$LDFLAGS" -- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -- AC_LINK_IFELSE(int foo(void) {}, -- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -- ) -- LDFLAGS="$save_LDFLAGS" -+ # This should be the same for all languages, so no per-tag cache variable. -+ AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol], -+ [lt_cv_irix_exported_symbol], -+ [save_LDFLAGS="$LDFLAGS" -+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -+ AC_LINK_IFELSE( -+ [AC_LANG_SOURCE( -+ [AC_LANG_CASE([C], [[int foo (void) { return 0; }]], -+ [C++], [[int foo (void) { return 0; }]], -+ [Fortran 77], [[ -+ subroutine foo -+ end]], -+ [Fortran], [[ -+ subroutine foo -+ end]])])], -+ [lt_cv_irix_exported_symbol=yes], -+ [lt_cv_irix_exported_symbol=no]) -+ LDFLAGS="$save_LDFLAGS"]) -+ if test "$lt_cv_irix_exported_symbol" = yes; then -+ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -+ fi - else - _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' -@@ -5020,7 +5350,7 @@ _LT_EOF - osf4* | osf5*) # as osf3* with the addition of -msym flag - if test "$GCC" = yes; then - _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' - else - _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' -@@ -5039,9 +5369,9 @@ _LT_EOF - _LT_TAGVAR(no_undefined_flag, $1)=' -z defs' - if test "$GCC" = yes; then - wlarc='${wl}' -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) -@@ -5313,8 +5643,6 @@ _LT_TAGDECL([], [inherit_rpath], [0], - to runtime path list]) - _LT_TAGDECL([], [link_all_deplibs], [0], - [Whether libtool must link a program against all its dependency libraries]) --_LT_TAGDECL([], [fix_srcfile_path], [1], -- [Fix the shell variable $srcfile for the compiler]) - _LT_TAGDECL([], [always_export_symbols], [0], - [Set to "yes" if exported symbols are required]) - _LT_TAGDECL([], [export_symbols_cmds], [2], -@@ -5325,6 +5653,8 @@ _LT_TAGDECL([], [include_expsyms], [1], - [Symbols that must always be exported]) - _LT_TAGDECL([], [prelink_cmds], [2], - [Commands necessary for linking programs (against libraries) with templates]) -+_LT_TAGDECL([], [postlink_cmds], [2], -+ [Commands necessary for finishing linking programs]) - _LT_TAGDECL([], [file_list_spec], [1], - [Specify filename containing input files]) - dnl FIXME: Not yet implemented -@@ -5426,6 +5756,7 @@ CC="$lt_save_CC" - m4_defun([_LT_LANG_CXX_CONFIG], - [m4_require([_LT_FILEUTILS_DEFAULTS])dnl - m4_require([_LT_DECL_EGREP])dnl -+m4_require([_LT_PATH_MANIFEST_TOOL])dnl - if test -n "$CXX" && ( test "X$CXX" != "Xno" && - ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) || - (test "X$CXX" != "Xg++"))) ; then -@@ -5487,6 +5818,7 @@ if test "$_lt_caught_CXX_error" != yes; then - - # Allow CC to be a program name with arguments. - lt_save_CC=$CC -+ lt_save_CFLAGS=$CFLAGS - lt_save_LD=$LD - lt_save_GCC=$GCC - GCC=$GXX -@@ -5504,6 +5836,7 @@ if test "$_lt_caught_CXX_error" != yes; then - fi - test -z "${LDCXX+set}" || LD=$LDCXX - CC=${CXX-"c++"} -+ CFLAGS=$CXXFLAGS - compiler=$CC - _LT_TAGVAR(compiler, $1)=$CC - _LT_CC_BASENAME([$compiler]) -@@ -5667,7 +6000,7 @@ if test "$_lt_caught_CXX_error" != yes; then - _LT_TAGVAR(allow_undefined_flag, $1)='-berok' - # Determine the default libpath from the value encoded in an empty - # executable. -- _LT_SYS_MODULE_PATH_AIX -+ _LT_SYS_MODULE_PATH_AIX([$1]) - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" - - _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" -@@ -5679,7 +6012,7 @@ if test "$_lt_caught_CXX_error" != yes; then - else - # Determine the default libpath from the value encoded in an - # empty executable. -- _LT_SYS_MODULE_PATH_AIX -+ _LT_SYS_MODULE_PATH_AIX([$1]) - _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. -@@ -5721,29 +6054,75 @@ if test "$_lt_caught_CXX_error" != yes; then - ;; - - cygwin* | mingw* | pw32* | cegcc*) -- # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, -- # as there is no search path for DLLs. -- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' -- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols' -- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported -- _LT_TAGVAR(always_export_symbols, $1)=no -- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -- -- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -- # If the export-symbols file already is a .def file (1st line -- # is EXPORTS), use it as is; otherwise, prepend... -- _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -- cp $export_symbols $output_objdir/$soname.def; -- else -- echo EXPORTS > $output_objdir/$soname.def; -- cat $export_symbols >> $output_objdir/$soname.def; -- fi~ -- $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -- else -- _LT_TAGVAR(ld_shlibs, $1)=no -- fi -- ;; -+ case $GXX,$cc_basename in -+ ,cl* | no,cl*) -+ # Native MSVC -+ # hardcode_libdir_flag_spec is actually meaningless, as there is -+ # no search path for DLLs. -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' -+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported -+ _LT_TAGVAR(always_export_symbols, $1)=yes -+ _LT_TAGVAR(file_list_spec, $1)='@' -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' -+ # The linker will not automatically build a static lib if we build a DLL. -+ # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' -+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -+ # Don't use ranlib -+ _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' -+ _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ func_to_tool_file "$lt_outputfile"~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' -+ ;; -+ *) -+ # g++ -+ # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, -+ # as there is no search path for DLLs. -+ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' -+ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols' -+ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported -+ _LT_TAGVAR(always_export_symbols, $1)=no -+ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes -+ -+ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ # If the export-symbols file already is a .def file (1st line -+ # is EXPORTS), use it as is; otherwise, prepend... -+ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ cp $export_symbols $output_objdir/$soname.def; -+ else -+ echo EXPORTS > $output_objdir/$soname.def; -+ cat $export_symbols >> $output_objdir/$soname.def; -+ fi~ -+ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -+ else -+ _LT_TAGVAR(ld_shlibs, $1)=no -+ fi -+ ;; -+ esac -+ ;; - darwin* | rhapsody*) - _LT_DARWIN_LINKER_FEATURES($1) - ;; -@@ -5818,7 +6197,7 @@ if test "$_lt_caught_CXX_error" != yes; then - ;; - *) - if test "$GXX" = yes; then -- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - # FIXME: insert proper C++ library support - _LT_TAGVAR(ld_shlibs, $1)=no -@@ -5889,10 +6268,10 @@ if test "$_lt_caught_CXX_error" != yes; then - _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - ia64*) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' - ;; - esac - fi -@@ -5933,9 +6312,9 @@ if test "$_lt_caught_CXX_error" != yes; then - *) - if test "$GXX" = yes; then - if test "$with_gnu_ld" = no; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' - fi - fi - _LT_TAGVAR(link_all_deplibs, $1)=yes -@@ -6005,20 +6384,20 @@ if test "$_lt_caught_CXX_error" != yes; then - _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~ - rm -rf $tpldir~ - $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ -- compile_command="$compile_command `find $tpldir -name \*.o | $NL2SP`"' -+ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"' - _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~ - rm -rf $tpldir~ - $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ -- $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | $NL2SP`~ -+ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~ - $RANLIB $oldlib' - _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~ - rm -rf $tpldir~ - $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ -- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' -+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~ - rm -rf $tpldir~ - $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ -- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' -+ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' - ;; - *) # Version 6 and above use weak symbols - _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' -@@ -6213,7 +6592,7 @@ if test "$_lt_caught_CXX_error" != yes; then - _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - ;; - *) -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - ;; - esac - -@@ -6259,7 +6638,7 @@ if test "$_lt_caught_CXX_error" != yes; then - - solaris*) - case $cc_basename in -- CC*) -+ CC* | sunCC*) - # Sun C++ 4.2, 5.x and Centerline C++ - _LT_TAGVAR(archive_cmds_need_lc,$1)=yes - _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' -@@ -6300,9 +6679,9 @@ if test "$_lt_caught_CXX_error" != yes; then - if test "$GXX" = yes && test "$with_gnu_ld" = no; then - _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs' - if $CC --version | $GREP -v '^2\.7' > /dev/null; then -- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' -+ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' - _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' - - # Commands to make compiler produce verbose output that lists - # what "hidden" libraries, object files and flags are used when -@@ -6431,6 +6810,7 @@ if test "$_lt_caught_CXX_error" != yes; then - fi # test -n "$compiler" - - CC=$lt_save_CC -+ CFLAGS=$lt_save_CFLAGS - LDCXX=$LD - LD=$lt_save_LD - GCC=$lt_save_GCC -@@ -6445,6 +6825,29 @@ AC_LANG_POP - ])# _LT_LANG_CXX_CONFIG - - -+# _LT_FUNC_STRIPNAME_CNF -+# ---------------------- -+# func_stripname_cnf prefix suffix name -+# strip PREFIX and SUFFIX off of NAME. -+# PREFIX and SUFFIX must not contain globbing or regex special -+# characters, hashes, percent signs, but SUFFIX may contain a leading -+# dot (in which case that matches only a dot). -+# -+# This function is identical to the (non-XSI) version of func_stripname, -+# except this one can be used by m4 code that may be executed by configure, -+# rather than the libtool script. -+m4_defun([_LT_FUNC_STRIPNAME_CNF],[dnl -+AC_REQUIRE([_LT_DECL_SED]) -+AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH]) -+func_stripname_cnf () -+{ -+ case ${2} in -+ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -+ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; -+ esac -+} # func_stripname_cnf -+])# _LT_FUNC_STRIPNAME_CNF -+ - # _LT_SYS_HIDDEN_LIBDEPS([TAGNAME]) - # --------------------------------- - # Figure out "hidden" library dependencies from verbose -@@ -6453,6 +6856,7 @@ AC_LANG_POP - # objects, libraries and library flags. - m4_defun([_LT_SYS_HIDDEN_LIBDEPS], - [m4_require([_LT_FILEUTILS_DEFAULTS])dnl -+AC_REQUIRE([_LT_FUNC_STRIPNAME_CNF])dnl - # Dependencies to place before and after the object being linked: - _LT_TAGVAR(predep_objects, $1)= - _LT_TAGVAR(postdep_objects, $1)= -@@ -6503,6 +6907,13 @@ public class foo { - }; - _LT_EOF - ]) -+ -+_lt_libdeps_save_CFLAGS=$CFLAGS -+case "$CC $CFLAGS " in #( -+*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;; -+*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;; -+esac -+ - dnl Parse the compiler output and extract the necessary - dnl objects, libraries and library flags. - if AC_TRY_EVAL(ac_compile); then -@@ -6514,7 +6925,7 @@ if AC_TRY_EVAL(ac_compile); then - pre_test_object_deps_done=no - - for p in `eval "$output_verbose_link_cmd"`; do -- case $p in -+ case ${prev}${p} in - - -L* | -R* | -l*) - # Some compilers place space between "-{L,R}" and the path. -@@ -6523,13 +6934,22 @@ if AC_TRY_EVAL(ac_compile); then - test $p = "-R"; then - prev=$p - continue -- else -- prev= - fi - -+ # Expand the sysroot to ease extracting the directories later. -+ if test -z "$prev"; then -+ case $p in -+ -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;; -+ -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;; -+ -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;; -+ esac -+ fi -+ case $p in -+ =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; -+ esac - if test "$pre_test_object_deps_done" = no; then -- case $p in -- -L* | -R*) -+ case ${prev} in -+ -L | -R) - # Internal compiler library paths should come after those - # provided the user. The postdeps already come after the - # user supplied libs so there is no need to process them. -@@ -6549,8 +6969,10 @@ if AC_TRY_EVAL(ac_compile); then - _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}" - fi - fi -+ prev= - ;; - -+ *.lto.$objext) ;; # Ignore GCC LTO objects - *.$objext) - # This assumes that the test object file only shows up - # once in the compiler output. -@@ -6586,6 +7008,7 @@ else - fi - - $RM -f confest.$objext -+CFLAGS=$_lt_libdeps_save_CFLAGS - - # PORTME: override above test on systems where it is broken - m4_if([$1], [CXX], -@@ -6622,7 +7045,7 @@ linux*) - - solaris*) - case $cc_basename in -- CC*) -+ CC* | sunCC*) - # The more standards-conforming stlport4 library is - # incompatible with the Cstd library. Avoid specifying - # it if it's in CXXFLAGS. Ignore libCrun as -@@ -6735,7 +7158,9 @@ if test "$_lt_disable_F77" != yes; then - # Allow CC to be a program name with arguments. - lt_save_CC="$CC" - lt_save_GCC=$GCC -+ lt_save_CFLAGS=$CFLAGS - CC=${F77-"f77"} -+ CFLAGS=$FFLAGS - compiler=$CC - _LT_TAGVAR(compiler, $1)=$CC - _LT_CC_BASENAME([$compiler]) -@@ -6789,6 +7214,7 @@ if test "$_lt_disable_F77" != yes; then - - GCC=$lt_save_GCC - CC="$lt_save_CC" -+ CFLAGS="$lt_save_CFLAGS" - fi # test "$_lt_disable_F77" != yes - - AC_LANG_POP -@@ -6865,7 +7291,9 @@ if test "$_lt_disable_FC" != yes; then - # Allow CC to be a program name with arguments. - lt_save_CC="$CC" - lt_save_GCC=$GCC -+ lt_save_CFLAGS=$CFLAGS - CC=${FC-"f95"} -+ CFLAGS=$FCFLAGS - compiler=$CC - GCC=$ac_cv_fc_compiler_gnu - -@@ -6921,7 +7349,8 @@ if test "$_lt_disable_FC" != yes; then - fi # test -n "$compiler" - - GCC=$lt_save_GCC -- CC="$lt_save_CC" -+ CC=$lt_save_CC -+ CFLAGS=$lt_save_CFLAGS - fi # test "$_lt_disable_FC" != yes - - AC_LANG_POP -@@ -6958,10 +7387,12 @@ _LT_COMPILER_BOILERPLATE - _LT_LINKER_BOILERPLATE - - # Allow CC to be a program name with arguments. --lt_save_CC="$CC" -+lt_save_CC=$CC -+lt_save_CFLAGS=$CFLAGS - lt_save_GCC=$GCC - GCC=yes - CC=${GCJ-"gcj"} -+CFLAGS=$GCJFLAGS - compiler=$CC - _LT_TAGVAR(compiler, $1)=$CC - _LT_TAGVAR(LD, $1)="$LD" -@@ -6992,7 +7423,8 @@ fi - AC_LANG_RESTORE - - GCC=$lt_save_GCC --CC="$lt_save_CC" -+CC=$lt_save_CC -+CFLAGS=$lt_save_CFLAGS - ])# _LT_LANG_GCJ_CONFIG - - -@@ -7027,9 +7459,11 @@ _LT_LINKER_BOILERPLATE - - # Allow CC to be a program name with arguments. - lt_save_CC="$CC" -+lt_save_CFLAGS=$CFLAGS - lt_save_GCC=$GCC - GCC= - CC=${RC-"windres"} -+CFLAGS= - compiler=$CC - _LT_TAGVAR(compiler, $1)=$CC - _LT_CC_BASENAME([$compiler]) -@@ -7042,7 +7476,8 @@ fi - - GCC=$lt_save_GCC - AC_LANG_RESTORE --CC="$lt_save_CC" -+CC=$lt_save_CC -+CFLAGS=$lt_save_CFLAGS - ])# _LT_LANG_RC_CONFIG - - -@@ -7101,6 +7536,15 @@ _LT_DECL([], [OBJDUMP], [1], [An object symbol dumper]) - AC_SUBST([OBJDUMP]) - ]) - -+# _LT_DECL_DLLTOOL -+# ---------------- -+# Ensure DLLTOOL variable is set. -+m4_defun([_LT_DECL_DLLTOOL], -+[AC_CHECK_TOOL(DLLTOOL, dlltool, false) -+test -z "$DLLTOOL" && DLLTOOL=dlltool -+_LT_DECL([], [DLLTOOL], [1], [DLL creation program]) -+AC_SUBST([DLLTOOL]) -+]) - - # _LT_DECL_SED - # ------------ -@@ -7194,8 +7638,8 @@ m4_defun([_LT_CHECK_SHELL_FEATURES], - # Try some XSI features - xsi_shell=no - ( _lt_dummy="a/b/c" -- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ -- = c,a/b,, \ -+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ -+ = c,a/b,b/c, \ - && eval 'test $(( 1 + 1 )) -eq 2 \ - && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ - && xsi_shell=yes -@@ -7234,206 +7678,162 @@ _LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl - ])# _LT_CHECK_SHELL_FEATURES - - --# _LT_PROG_XSI_SHELLFNS --# --------------------- --# Bourne and XSI compatible variants of some useful shell functions. --m4_defun([_LT_PROG_XSI_SHELLFNS], --[case $xsi_shell in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac --} -- --# func_basename file --func_basename () --{ -- func_basename_result="${1##*/}" --} -- --# func_dirname_and_basename file append nondir_replacement --# perform func_basename and func_dirname in a single function --# call: --# dirname: Compute the dirname of FILE. If nonempty, --# add APPEND to the result, otherwise set result --# to NONDIR_REPLACEMENT. --# value returned in "$func_dirname_result" --# basename: Compute filename of FILE. --# value retuned in "$func_basename_result" --# Implementation must be kept synchronized with func_dirname --# and func_basename. For efficiency, we do not delegate to --# those functions but instead duplicate the functionality here. --func_dirname_and_basename () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac -- func_basename_result="${1##*/}" --} -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --func_stripname () --{ -- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are -- # positional parameters, so assign one to ordinary parameter first. -- func_stripname_result=${3} -- func_stripname_result=${func_stripname_result#"${1}"} -- func_stripname_result=${func_stripname_result%"${2}"} --} -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=${1%%=*} -- func_opt_split_arg=${1#*=} --} -- --# func_lo2o object --func_lo2o () --{ -- case ${1} in -- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; -- *) func_lo2o_result=${1} ;; -- esac --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=${1%.*}.lo --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=$(( $[*] )) --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=${#1} --} -+# _LT_PROG_FUNCTION_REPLACE (FUNCNAME, REPLACEMENT-BODY) -+# ------------------------------------------------------ -+# In `$cfgfile', look for function FUNCNAME delimited by `^FUNCNAME ()$' and -+# '^} FUNCNAME ', and replace its body with REPLACEMENT-BODY. -+m4_defun([_LT_PROG_FUNCTION_REPLACE], -+[dnl { -+sed -e '/^$1 ()$/,/^} # $1 /c\ -+$1 ()\ -+{\ -+m4_bpatsubsts([$2], [$], [\\], [^\([ ]\)], [\\\1]) -+} # Extended-shell $1 implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+]) - --_LT_EOF -- ;; -- *) # Bourne compatible functions. -- cat << \_LT_EOF >> "$cfgfile" - --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- # Extract subdirectory from the argument. -- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` -- if test "X$func_dirname_result" = "X${1}"; then -- func_dirname_result="${3}" -- else -- func_dirname_result="$func_dirname_result${2}" -- fi --} -+# _LT_PROG_REPLACE_SHELLFNS -+# ------------------------- -+# Replace existing portable implementations of several shell functions with -+# equivalent extended shell implementations where those features are available.. -+m4_defun([_LT_PROG_REPLACE_SHELLFNS], -+[if test x"$xsi_shell" = xyes; then -+ _LT_PROG_FUNCTION_REPLACE([func_dirname], [dnl -+ case ${1} in -+ */*) func_dirname_result="${1%/*}${2}" ;; -+ * ) func_dirname_result="${3}" ;; -+ esac]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_basename], [dnl -+ func_basename_result="${1##*/}"]) -+ -+ _LT_PROG_FUNCTION_REPLACE([func_dirname_and_basename], [dnl -+ case ${1} in -+ */*) func_dirname_result="${1%/*}${2}" ;; -+ * ) func_dirname_result="${3}" ;; -+ esac -+ func_basename_result="${1##*/}"]) - --# func_basename file --func_basename () --{ -- func_basename_result=`$ECHO "${1}" | $SED "$basename"` --} -+ _LT_PROG_FUNCTION_REPLACE([func_stripname], [dnl -+ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are -+ # positional parameters, so assign one to ordinary parameter first. -+ func_stripname_result=${3} -+ func_stripname_result=${func_stripname_result#"${1}"} -+ func_stripname_result=${func_stripname_result%"${2}"}]) - --dnl func_dirname_and_basename --dnl A portable version of this function is already defined in general.m4sh --dnl so there is no need for it here. -+ _LT_PROG_FUNCTION_REPLACE([func_split_long_opt], [dnl -+ func_split_long_opt_name=${1%%=*} -+ func_split_long_opt_arg=${1#*=}]) - --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --# func_strip_suffix prefix name --func_stripname () --{ -- case ${2} in -- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; -- esac --} -+ _LT_PROG_FUNCTION_REPLACE([func_split_short_opt], [dnl -+ func_split_short_opt_arg=${1#??} -+ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}]) - --# sed scripts: --my_sed_long_opt='1s/^\(-[[^=]]*\)=.*/\1/;q' --my_sed_long_arg='1s/^-[[^=]]*=//' -+ _LT_PROG_FUNCTION_REPLACE([func_lo2o], [dnl -+ case ${1} in -+ *.lo) func_lo2o_result=${1%.lo}.${objext} ;; -+ *) func_lo2o_result=${1} ;; -+ esac]) - --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` -- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` --} -+ _LT_PROG_FUNCTION_REPLACE([func_xform], [ func_xform_result=${1%.*}.lo]) - --# func_lo2o object --func_lo2o () --{ -- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` --} -+ _LT_PROG_FUNCTION_REPLACE([func_arith], [ func_arith_result=$(( $[*] ))]) - --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=`$ECHO "${1}" | $SED 's/\.[[^.]]*$/.lo/'` --} -+ _LT_PROG_FUNCTION_REPLACE([func_len], [ func_len_result=${#1}]) -+fi - --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=`expr "$[@]"` --} -+if test x"$lt_shell_append" = xyes; then -+ _LT_PROG_FUNCTION_REPLACE([func_append], [ eval "${1}+=\\${2}"]) - --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=`expr "$[1]" : ".*" 2>/dev/null || echo $max_cmd_len` --} -+ _LT_PROG_FUNCTION_REPLACE([func_append_quoted], [dnl -+ func_quote_for_eval "${2}" -+dnl m4 expansion turns \\\\ into \\, and then the shell eval turns that into \ -+ eval "${1}+=\\\\ \\$func_quote_for_eval_result"]) - --_LT_EOF --esac -+ # Save a `func_append' function call where possible by direct use of '+=' -+ sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+else -+ # Save a `func_append' function call even when '+=' is not available -+ sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+fi - --case $lt_shell_append in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -+if test x"$_lt_function_replace_fail" = x":"; then -+ AC_MSG_WARN([Unable to substitute extended shell functions in $ofile]) -+fi -+]) - --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$[1]+=\$[2]" --} --_LT_EOF -+# _LT_PATH_CONVERSION_FUNCTIONS -+# ----------------------------- -+# Determine which file name conversion functions should be used by -+# func_to_host_file (and, implicitly, by func_to_host_path). These are needed -+# for certain cross-compile configurations and native mingw. -+m4_defun([_LT_PATH_CONVERSION_FUNCTIONS], -+[AC_REQUIRE([AC_CANONICAL_HOST])dnl -+AC_REQUIRE([AC_CANONICAL_BUILD])dnl -+AC_MSG_CHECKING([how to convert $build file names to $host format]) -+AC_CACHE_VAL(lt_cv_to_host_file_cmd, -+[case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 -+ ;; -+ esac - ;; -- *) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$[1]=\$$[1]\$[2]" --} -- --_LT_EOF -+ *-*-cygwin* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin -+ ;; -+ esac - ;; -- esac -+ * ) # unhandled hosts (and "normal" native builds) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+esac -+]) -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+AC_MSG_RESULT([$lt_cv_to_host_file_cmd]) -+_LT_DECL([to_host_file_cmd], [lt_cv_to_host_file_cmd], -+ [0], [convert $build file names to $host format])dnl -+ -+AC_MSG_CHECKING([how to convert $build file names to toolchain format]) -+AC_CACHE_VAL(lt_cv_to_tool_file_cmd, -+[#assume ordinary cross tools, or native build. -+lt_cv_to_tool_file_cmd=func_convert_file_noop -+case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ esac -+ ;; -+esac - ]) -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+AC_MSG_RESULT([$lt_cv_to_tool_file_cmd]) -+_LT_DECL([to_tool_file_cmd], [lt_cv_to_tool_file_cmd], -+ [0], [convert $build files to toolchain format])dnl -+])# _LT_PATH_CONVERSION_FUNCTIONS -diff --git a/ltmain.sh b/ltmain.sh -index 9503ec8..70e856e 100644 ---- a/ltmain.sh -+++ b/ltmain.sh -@@ -1,10 +1,9 @@ --# Generated from ltmain.m4sh. - --# libtool (GNU libtool 1.3134 2009-11-29) 2.2.7a -+# libtool (GNU libtool) 2.4 - # Written by Gordon Matzigkeit , 1996 - - # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, --# 2007, 2008, 2009 Free Software Foundation, Inc. -+# 2007, 2008, 2009, 2010 Free Software Foundation, Inc. - # This is free software; see the source for copying conditions. There is NO - # warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. - -@@ -38,7 +37,6 @@ - # -n, --dry-run display commands without modifying any files - # --features display basic configuration information and exit - # --mode=MODE use operation mode MODE --# --no-finish let install mode avoid finish commands - # --preserve-dup-deps don't remove duplicate dependency libraries - # --quiet, --silent don't print informational messages - # --no-quiet, --no-silent -@@ -71,17 +69,19 @@ - # compiler: $LTCC - # compiler flags: $LTCFLAGS - # linker: $LD (gnu? $with_gnu_ld) --# $progname: (GNU libtool 1.3134 2009-11-29) 2.2.7a -+# $progname: (GNU libtool) 2.4 - # automake: $automake_version - # autoconf: $autoconf_version - # - # Report bugs to . -+# GNU libtool home page: . -+# General help using GNU software: . - - PROGRAM=libtool - PACKAGE=libtool --VERSION=2.2.7a --TIMESTAMP=" 1.3134 2009-11-29" --package_revision=1.3134 -+VERSION=2.4 -+TIMESTAMP="" -+package_revision=1.3293 - - # Be Bourne compatible - if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then -@@ -106,9 +106,6 @@ _LTECHO_EOF' - } - - # NLS nuisances: We save the old values to restore during execute mode. --# Only set LANG and LC_ALL to C if already set. --# These must not be set unconditionally because not all systems understand --# e.g. LANG=C (notably SCO). - lt_user_locale= - lt_safe_locale= - for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES -@@ -121,15 +118,13 @@ do - lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\" - fi" - done -+LC_ALL=C -+LANGUAGE=C -+export LANGUAGE LC_ALL - - $lt_unset CDPATH - - -- -- -- -- -- - # Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh - # is ksh but when the shell is invoked as "sh" and the current value of - # the _XPG environment variable is not equal to 1 (one), the special -@@ -140,7 +135,7 @@ progpath="$0" - - - : ${CP="cp -f"} --: ${ECHO=$as_echo} -+test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'} - : ${EGREP="/bin/grep -E"} - : ${FGREP="/bin/grep -F"} - : ${GREP="/bin/grep"} -@@ -149,7 +144,7 @@ progpath="$0" - : ${MKDIR="mkdir"} - : ${MV="mv -f"} - : ${RM="rm -f"} --: ${SED="/mount/endor/wildenhu/local-x86_64/bin/sed"} -+: ${SED="/bin/sed"} - : ${SHELL="${CONFIG_SHELL-/bin/sh}"} - : ${Xsed="$SED -e 1s/^X//"} - -@@ -169,6 +164,27 @@ IFS=" $lt_nl" - dirname="s,/[^/]*$,," - basename="s,^.*/,," - -+# func_dirname file append nondir_replacement -+# Compute the dirname of FILE. If nonempty, add APPEND to the result, -+# otherwise set result to NONDIR_REPLACEMENT. -+func_dirname () -+{ -+ func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` -+ if test "X$func_dirname_result" = "X${1}"; then -+ func_dirname_result="${3}" -+ else -+ func_dirname_result="$func_dirname_result${2}" -+ fi -+} # func_dirname may be replaced by extended shell implementation -+ -+ -+# func_basename file -+func_basename () -+{ -+ func_basename_result=`$ECHO "${1}" | $SED "$basename"` -+} # func_basename may be replaced by extended shell implementation -+ -+ - # func_dirname_and_basename file append nondir_replacement - # perform func_basename and func_dirname in a single function - # call: -@@ -183,17 +199,31 @@ basename="s,^.*/,," - # those functions but instead duplicate the functionality here. - func_dirname_and_basename () - { -- # Extract subdirectory from the argument. -- func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"` -- if test "X$func_dirname_result" = "X${1}"; then -- func_dirname_result="${3}" -- else -- func_dirname_result="$func_dirname_result${2}" -- fi -- func_basename_result=`$ECHO "${1}" | $SED -e "$basename"` --} -+ # Extract subdirectory from the argument. -+ func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"` -+ if test "X$func_dirname_result" = "X${1}"; then -+ func_dirname_result="${3}" -+ else -+ func_dirname_result="$func_dirname_result${2}" -+ fi -+ func_basename_result=`$ECHO "${1}" | $SED -e "$basename"` -+} # func_dirname_and_basename may be replaced by extended shell implementation -+ -+ -+# func_stripname prefix suffix name -+# strip PREFIX and SUFFIX off of NAME. -+# PREFIX and SUFFIX must not contain globbing or regex special -+# characters, hashes, percent signs, but SUFFIX may contain a leading -+# dot (in which case that matches only a dot). -+# func_strip_suffix prefix name -+func_stripname () -+{ -+ case ${2} in -+ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -+ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; -+ esac -+} # func_stripname may be replaced by extended shell implementation - --# Generated shell functions inserted here. - - # These SED scripts presuppose an absolute path with a trailing slash. - pathcar='s,^/\([^/]*\).*$,\1,' -@@ -376,6 +406,15 @@ sed_quote_subst='s/\([`"$\\]\)/\\\1/g' - # Same as above, but do not quote variable references. - double_quote_subst='s/\(["`\\]\)/\\\1/g' - -+# Sed substitution that turns a string into a regex matching for the -+# string literally. -+sed_make_literal_regex='s,[].[^$\\*\/],\\&,g' -+ -+# Sed substitution that converts a w32 file name or path -+# which contains forward slashes, into one that contains -+# (escaped) backslashes. A very naive implementation. -+lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' -+ - # Re-`\' parameter expansions in output of double_quote_subst that were - # `\'-ed in input to the same. If an odd number of `\' preceded a '$' - # in input to double_quote_subst, that '$' was protected from expansion. -@@ -404,7 +443,7 @@ opt_warning=: - # name if it has been set yet. - func_echo () - { -- $ECHO "$progname${mode+: }$mode: $*" -+ $ECHO "$progname: ${opt_mode+$opt_mode: }$*" - } - - # func_verbose arg... -@@ -430,14 +469,14 @@ func_echo_all () - # Echo program name prefixed message to standard error. - func_error () - { -- $ECHO "$progname${mode+: }$mode: "${1+"$@"} 1>&2 -+ $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2 - } - - # func_warning arg... - # Echo program name prefixed warning message to standard error. - func_warning () - { -- $opt_warning && $ECHO "$progname${mode+: }$mode: warning: "${1+"$@"} 1>&2 -+ $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2 - - # bash bug again: - : -@@ -656,19 +695,35 @@ func_show_eval_locale () - fi - } - -- -- -+# func_tr_sh -+# Turn $1 into a string suitable for a shell variable name. -+# Result is stored in $func_tr_sh_result. All characters -+# not in the set a-zA-Z0-9_ are replaced with '_'. Further, -+# if $1 begins with a digit, a '_' is prepended as well. -+func_tr_sh () -+{ -+ case $1 in -+ [0-9]* | *[!a-zA-Z0-9_]*) -+ func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'` -+ ;; -+ * ) -+ func_tr_sh_result=$1 -+ ;; -+ esac -+} - - - # func_version - # Echo version message to standard output and exit. - func_version () - { -+ $opt_debug -+ - $SED -n '/(C)/!b go - :more - /\./!{ - N -- s/\n# // -+ s/\n# / / - b more - } - :go -@@ -685,7 +740,9 @@ func_version () - # Echo short help message to standard output and exit. - func_usage () - { -- $SED -n '/^# Usage:/,/^# *-h/ { -+ $opt_debug -+ -+ $SED -n '/^# Usage:/,/^# *.*--help/ { - s/^# // - s/^# *$// - s/\$progname/'$progname'/ -@@ -701,7 +758,10 @@ func_usage () - # unless 'noexit' is passed as argument. - func_help () - { -+ $opt_debug -+ - $SED -n '/^# Usage:/,/# Report bugs to/ { -+ :print - s/^# // - s/^# *$// - s*\$progname*'$progname'* -@@ -714,7 +774,11 @@ func_help () - s/\$automake_version/'"`(automake --version) 2>/dev/null |$SED 1q`"'/ - s/\$autoconf_version/'"`(autoconf --version) 2>/dev/null |$SED 1q`"'/ - p -- }' < "$progpath" -+ d -+ } -+ /^# .* home page:/b print -+ /^# General help using/b print -+ ' < "$progpath" - ret=$? - if test -z "$1"; then - exit $ret -@@ -726,12 +790,39 @@ func_help () - # exit_cmd. - func_missing_arg () - { -- func_error "missing argument for $1" -+ $opt_debug -+ -+ func_error "missing argument for $1." - exit_cmd=exit - } - --exit_cmd=: - -+# func_split_short_opt shortopt -+# Set func_split_short_opt_name and func_split_short_opt_arg shell -+# variables after splitting SHORTOPT after the 2nd character. -+func_split_short_opt () -+{ -+ my_sed_short_opt='1s/^\(..\).*$/\1/;q' -+ my_sed_short_rest='1s/^..\(.*\)$/\1/;q' -+ -+ func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"` -+ func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"` -+} # func_split_short_opt may be replaced by extended shell implementation -+ -+ -+# func_split_long_opt longopt -+# Set func_split_long_opt_name and func_split_long_opt_arg shell -+# variables after splitting LONGOPT at the `=' sign. -+func_split_long_opt () -+{ -+ my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q' -+ my_sed_long_arg='1s/^--[^=]*=//' -+ -+ func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"` -+ func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"` -+} # func_split_long_opt may be replaced by extended shell implementation -+ -+exit_cmd=: - - - -@@ -741,26 +832,64 @@ magic="%%%MAGIC variable%%%" - magic_exe="%%%MAGIC EXE variable%%%" - - # Global variables. --# $mode is unset - nonopt= --execute_dlfiles= - preserve_args= - lo2o="s/\\.lo\$/.${objext}/" - o2lo="s/\\.${objext}\$/.lo/" - extracted_archives= - extracted_serial=0 - --opt_dry_run=false --opt_finish=: --opt_duplicate_deps=false --opt_silent=false --opt_debug=: -- - # If this variable is set in any of the actions, the command in it - # will be execed at the end. This prevents here-documents from being - # left over by shells. - exec_cmd= - -+# func_append var value -+# Append VALUE to the end of shell variable VAR. -+func_append () -+{ -+ eval "${1}=\$${1}\${2}" -+} # func_append may be replaced by extended shell implementation -+ -+# func_append_quoted var value -+# Quote VALUE and append to the end of shell variable VAR, separated -+# by a space. -+func_append_quoted () -+{ -+ func_quote_for_eval "${2}" -+ eval "${1}=\$${1}\\ \$func_quote_for_eval_result" -+} # func_append_quoted may be replaced by extended shell implementation -+ -+ -+# func_arith arithmetic-term... -+func_arith () -+{ -+ func_arith_result=`expr "${@}"` -+} # func_arith may be replaced by extended shell implementation -+ -+ -+# func_len string -+# STRING may not start with a hyphen. -+func_len () -+{ -+ func_len_result=`expr "${1}" : ".*" 2>/dev/null || echo $max_cmd_len` -+} # func_len may be replaced by extended shell implementation -+ -+ -+# func_lo2o object -+func_lo2o () -+{ -+ func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` -+} # func_lo2o may be replaced by extended shell implementation -+ -+ -+# func_xform libobj-or-source -+func_xform () -+{ -+ func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` -+} # func_xform may be replaced by extended shell implementation -+ -+ - # func_fatal_configuration arg... - # Echo program name prefixed message to standard error, followed by - # a configuration failure hint, and exit. -@@ -850,130 +979,204 @@ func_enable_tag () - esac - } - --# Parse options once, thoroughly. This comes as soon as possible in --# the script to make things like `libtool --version' happen quickly. -+# func_check_version_match -+# Ensure that we are using m4 macros, and libtool script from the same -+# release of libtool. -+func_check_version_match () - { -+ if test "$package_revision" != "$macro_revision"; then -+ if test "$VERSION" != "$macro_version"; then -+ if test -z "$macro_version"; then -+ cat >&2 <<_LT_EOF -+$progname: Version mismatch error. This is $PACKAGE $VERSION, but the -+$progname: definition of this LT_INIT comes from an older release. -+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION -+$progname: and run autoconf again. -+_LT_EOF -+ else -+ cat >&2 <<_LT_EOF -+$progname: Version mismatch error. This is $PACKAGE $VERSION, but the -+$progname: definition of this LT_INIT comes from $PACKAGE $macro_version. -+$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION -+$progname: and run autoconf again. -+_LT_EOF -+ fi -+ else -+ cat >&2 <<_LT_EOF -+$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision, -+$progname: but the definition of this LT_INIT comes from revision $macro_revision. -+$progname: You should recreate aclocal.m4 with macros from revision $package_revision -+$progname: of $PACKAGE $VERSION and run autoconf again. -+_LT_EOF -+ fi - -- # Shorthand for --mode=foo, only valid as the first argument -- case $1 in -- clean|clea|cle|cl) -- shift; set dummy --mode clean ${1+"$@"}; shift -- ;; -- compile|compil|compi|comp|com|co|c) -- shift; set dummy --mode compile ${1+"$@"}; shift -- ;; -- execute|execut|execu|exec|exe|ex|e) -- shift; set dummy --mode execute ${1+"$@"}; shift -- ;; -- finish|finis|fini|fin|fi|f) -- shift; set dummy --mode finish ${1+"$@"}; shift -- ;; -- install|instal|insta|inst|ins|in|i) -- shift; set dummy --mode install ${1+"$@"}; shift -- ;; -- link|lin|li|l) -- shift; set dummy --mode link ${1+"$@"}; shift -- ;; -- uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) -- shift; set dummy --mode uninstall ${1+"$@"}; shift -- ;; -- esac -+ exit $EXIT_MISMATCH -+ fi -+} -+ -+ -+# Shorthand for --mode=foo, only valid as the first argument -+case $1 in -+clean|clea|cle|cl) -+ shift; set dummy --mode clean ${1+"$@"}; shift -+ ;; -+compile|compil|compi|comp|com|co|c) -+ shift; set dummy --mode compile ${1+"$@"}; shift -+ ;; -+execute|execut|execu|exec|exe|ex|e) -+ shift; set dummy --mode execute ${1+"$@"}; shift -+ ;; -+finish|finis|fini|fin|fi|f) -+ shift; set dummy --mode finish ${1+"$@"}; shift -+ ;; -+install|instal|insta|inst|ins|in|i) -+ shift; set dummy --mode install ${1+"$@"}; shift -+ ;; -+link|lin|li|l) -+ shift; set dummy --mode link ${1+"$@"}; shift -+ ;; -+uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) -+ shift; set dummy --mode uninstall ${1+"$@"}; shift -+ ;; -+esac - -- # Parse non-mode specific arguments: -- while test "$#" -gt 0; do -+ -+ -+# Option defaults: -+opt_debug=: -+opt_dry_run=false -+opt_config=false -+opt_preserve_dup_deps=false -+opt_features=false -+opt_finish=false -+opt_help=false -+opt_help_all=false -+opt_silent=: -+opt_verbose=: -+opt_silent=false -+opt_verbose=false -+ -+ -+# Parse options once, thoroughly. This comes as soon as possible in the -+# script to make things like `--version' happen as quickly as we can. -+{ -+ # this just eases exit handling -+ while test $# -gt 0; do - opt="$1" - shift -- - case $opt in -- --config) func_config ;; -- -- --debug) preserve_args="$preserve_args $opt" -+ --debug|-x) opt_debug='set -x' - func_echo "enabling shell trace mode" -- opt_debug='set -x' - $opt_debug - ;; -- -- -dlopen) test "$#" -eq 0 && func_missing_arg "$opt" && break -- execute_dlfiles="$execute_dlfiles $1" -- shift -+ --dry-run|--dryrun|-n) -+ opt_dry_run=: - ;; -- -- --dry-run | -n) opt_dry_run=: ;; -- --features) func_features ;; -- --finish) mode="finish" ;; -- --no-finish) opt_finish=false ;; -- -- --mode) test "$#" -eq 0 && func_missing_arg "$opt" && break -- case $1 in -- # Valid mode arguments: -- clean) ;; -- compile) ;; -- execute) ;; -- finish) ;; -- install) ;; -- link) ;; -- relink) ;; -- uninstall) ;; -- -- # Catch anything else as an error -- *) func_error "invalid argument for $opt" -- exit_cmd=exit -- break -- ;; -- esac -- -- mode="$1" -+ --config) -+ opt_config=: -+func_config -+ ;; -+ --dlopen|-dlopen) -+ optarg="$1" -+ opt_dlopen="${opt_dlopen+$opt_dlopen -+}$optarg" - shift - ;; -- - --preserve-dup-deps) -- opt_duplicate_deps=: ;; -- -- --quiet|--silent) preserve_args="$preserve_args $opt" -- opt_silent=: -- opt_verbose=false -+ opt_preserve_dup_deps=: - ;; -- -- --no-quiet|--no-silent) -- preserve_args="$preserve_args $opt" -- opt_silent=false -+ --features) -+ opt_features=: -+func_features - ;; -- -- --verbose| -v) preserve_args="$preserve_args $opt" -+ --finish) -+ opt_finish=: -+set dummy --mode finish ${1+"$@"}; shift -+ ;; -+ --help) -+ opt_help=: -+ ;; -+ --help-all) -+ opt_help_all=: -+opt_help=': help-all' -+ ;; -+ --mode) -+ test $# = 0 && func_missing_arg $opt && break -+ optarg="$1" -+ opt_mode="$optarg" -+case $optarg in -+ # Valid mode arguments: -+ clean|compile|execute|finish|install|link|relink|uninstall) ;; -+ -+ # Catch anything else as an error -+ *) func_error "invalid argument for $opt" -+ exit_cmd=exit -+ break -+ ;; -+esac -+ shift -+ ;; -+ --no-silent|--no-quiet) - opt_silent=false -- opt_verbose=: -+func_append preserve_args " $opt" - ;; -- -- --no-verbose) preserve_args="$preserve_args $opt" -+ --no-verbose) - opt_verbose=false -+func_append preserve_args " $opt" - ;; -- -- --tag) test "$#" -eq 0 && func_missing_arg "$opt" && break -- preserve_args="$preserve_args $opt $1" -- func_enable_tag "$1" # tagname is set here -+ --silent|--quiet) -+ opt_silent=: -+func_append preserve_args " $opt" -+ opt_verbose=false -+ ;; -+ --verbose|-v) -+ opt_verbose=: -+func_append preserve_args " $opt" -+opt_silent=false -+ ;; -+ --tag) -+ test $# = 0 && func_missing_arg $opt && break -+ optarg="$1" -+ opt_tag="$optarg" -+func_append preserve_args " $opt $optarg" -+func_enable_tag "$optarg" - shift - ;; - -+ -\?|-h) func_usage ;; -+ --help) func_help ;; -+ --version) func_version ;; -+ - # Separate optargs to long options: -- -dlopen=*|--mode=*|--tag=*) -- func_opt_split "$opt" -- set dummy "$func_opt_split_opt" "$func_opt_split_arg" ${1+"$@"} -+ --*=*) -+ func_split_long_opt "$opt" -+ set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"} - shift - ;; - -- -\?|-h) func_usage ;; -- --help) opt_help=: ;; -- --help-all) opt_help=': help-all' ;; -- --version) func_version ;; -- -- -*) func_fatal_help "unrecognized option \`$opt'" ;; -- -- *) nonopt="$opt" -- break -+ # Separate non-argument short options: -+ -\?*|-h*|-n*|-v*) -+ func_split_short_opt "$opt" -+ set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"} -+ shift - ;; -+ -+ --) break ;; -+ -*) func_fatal_help "unrecognized option \`$opt'" ;; -+ *) set dummy "$opt" ${1+"$@"}; shift; break ;; - esac - done - -+ # Validate options: -+ -+ # save first non-option argument -+ if test "$#" -gt 0; then -+ nonopt="$opt" -+ shift -+ fi -+ -+ # preserve --debug -+ test "$opt_debug" = : || func_append preserve_args " --debug" - - case $host in - *cygwin* | *mingw* | *pw32* | *cegcc* | *solaris2* ) -@@ -981,82 +1184,44 @@ func_enable_tag () - opt_duplicate_compiler_generated_deps=: - ;; - *) -- opt_duplicate_compiler_generated_deps=$opt_duplicate_deps -+ opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps - ;; - esac - -- # Having warned about all mis-specified options, bail out if -- # anything was wrong. -- $exit_cmd $EXIT_FAILURE --} -+ $opt_help || { -+ # Sanity checks first: -+ func_check_version_match - --# func_check_version_match --# Ensure that we are using m4 macros, and libtool script from the same --# release of libtool. --func_check_version_match () --{ -- if test "$package_revision" != "$macro_revision"; then -- if test "$VERSION" != "$macro_version"; then -- if test -z "$macro_version"; then -- cat >&2 <<_LT_EOF --$progname: Version mismatch error. This is $PACKAGE $VERSION, but the --$progname: definition of this LT_INIT comes from an older release. --$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION --$progname: and run autoconf again. --_LT_EOF -- else -- cat >&2 <<_LT_EOF --$progname: Version mismatch error. This is $PACKAGE $VERSION, but the --$progname: definition of this LT_INIT comes from $PACKAGE $macro_version. --$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION --$progname: and run autoconf again. --_LT_EOF -- fi -- else -- cat >&2 <<_LT_EOF --$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision, --$progname: but the definition of this LT_INIT comes from revision $macro_revision. --$progname: You should recreate aclocal.m4 with macros from revision $package_revision --$progname: of $PACKAGE $VERSION and run autoconf again. --_LT_EOF -+ if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then -+ func_fatal_configuration "not configured to build any kind of library" - fi - -- exit $EXIT_MISMATCH -- fi --} -- -+ # Darwin sucks -+ eval std_shrext=\"$shrext_cmds\" - --## ----------- ## --## Main. ## --## ----------- ## -- --$opt_help || { -- # Sanity checks first: -- func_check_version_match -- -- if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then -- func_fatal_configuration "not configured to build any kind of library" -- fi -+ # Only execute mode is allowed to have -dlopen flags. -+ if test -n "$opt_dlopen" && test "$opt_mode" != execute; then -+ func_error "unrecognized option \`-dlopen'" -+ $ECHO "$help" 1>&2 -+ exit $EXIT_FAILURE -+ fi - -- test -z "$mode" && func_fatal_error "error: you must specify a MODE." -+ # Change the help message to a mode-specific one. -+ generic_help="$help" -+ help="Try \`$progname --help --mode=$opt_mode' for more information." -+ } - - -- # Darwin sucks -- eval "std_shrext=\"$shrext_cmds\"" -+ # Bail if the options were screwed -+ $exit_cmd $EXIT_FAILURE -+} - - -- # Only execute mode is allowed to have -dlopen flags. -- if test -n "$execute_dlfiles" && test "$mode" != execute; then -- func_error "unrecognized option \`-dlopen'" -- $ECHO "$help" 1>&2 -- exit $EXIT_FAILURE -- fi - -- # Change the help message to a mode-specific one. -- generic_help="$help" -- help="Try \`$progname --help --mode=$mode' for more information." --} - -+## ----------- ## -+## Main. ## -+## ----------- ## - - # func_lalib_p file - # True iff FILE is a libtool `.la' library or `.lo' object file. -@@ -1121,12 +1286,9 @@ func_ltwrapper_executable_p () - # temporary ltwrapper_script. - func_ltwrapper_scriptname () - { -- func_ltwrapper_scriptname_result="" -- if func_ltwrapper_executable_p "$1"; then -- func_dirname_and_basename "$1" "" "." -- func_stripname '' '.exe' "$func_basename_result" -- func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper" -- fi -+ func_dirname_and_basename "$1" "" "." -+ func_stripname '' '.exe' "$func_basename_result" -+ func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper" - } - - # func_ltwrapper_p file -@@ -1149,7 +1311,7 @@ func_execute_cmds () - save_ifs=$IFS; IFS='~' - for cmd in $1; do - IFS=$save_ifs -- eval "cmd=\"$cmd\"" -+ eval cmd=\"$cmd\" - func_show_eval "$cmd" "${2-:}" - done - IFS=$save_ifs -@@ -1172,6 +1334,37 @@ func_source () - } - - -+# func_resolve_sysroot PATH -+# Replace a leading = in PATH with a sysroot. Store the result into -+# func_resolve_sysroot_result -+func_resolve_sysroot () -+{ -+ func_resolve_sysroot_result=$1 -+ case $func_resolve_sysroot_result in -+ =*) -+ func_stripname '=' '' "$func_resolve_sysroot_result" -+ func_resolve_sysroot_result=$lt_sysroot$func_stripname_result -+ ;; -+ esac -+} -+ -+# func_replace_sysroot PATH -+# If PATH begins with the sysroot, replace it with = and -+# store the result into func_replace_sysroot_result. -+func_replace_sysroot () -+{ -+ case "$lt_sysroot:$1" in -+ ?*:"$lt_sysroot"*) -+ func_stripname "$lt_sysroot" '' "$1" -+ func_replace_sysroot_result="=$func_stripname_result" -+ ;; -+ *) -+ # Including no sysroot. -+ func_replace_sysroot_result=$1 -+ ;; -+ esac -+} -+ - # func_infer_tag arg - # Infer tagged configuration to use if any are available and - # if one wasn't chosen via the "--tag" command line option. -@@ -1184,8 +1377,7 @@ func_infer_tag () - if test -n "$available_tags" && test -z "$tagname"; then - CC_quoted= - for arg in $CC; do -- func_quote_for_eval "$arg" -- CC_quoted="$CC_quoted $func_quote_for_eval_result" -+ func_append_quoted CC_quoted "$arg" - done - CC_expanded=`func_echo_all $CC` - CC_quoted_expanded=`func_echo_all $CC_quoted` -@@ -1204,8 +1396,7 @@ func_infer_tag () - CC_quoted= - for arg in $CC; do - # Double-quote args containing other shell metacharacters. -- func_quote_for_eval "$arg" -- CC_quoted="$CC_quoted $func_quote_for_eval_result" -+ func_append_quoted CC_quoted "$arg" - done - CC_expanded=`func_echo_all $CC` - CC_quoted_expanded=`func_echo_all $CC_quoted` -@@ -1274,6 +1465,486 @@ EOF - } - } - -+ -+################################################## -+# FILE NAME AND PATH CONVERSION HELPER FUNCTIONS # -+################################################## -+ -+# func_convert_core_file_wine_to_w32 ARG -+# Helper function used by file name conversion functions when $build is *nix, -+# and $host is mingw, cygwin, or some other w32 environment. Relies on a -+# correctly configured wine environment available, with the winepath program -+# in $build's $PATH. -+# -+# ARG is the $build file name to be converted to w32 format. -+# Result is available in $func_convert_core_file_wine_to_w32_result, and will -+# be empty on error (or when ARG is empty) -+func_convert_core_file_wine_to_w32 () -+{ -+ $opt_debug -+ func_convert_core_file_wine_to_w32_result="$1" -+ if test -n "$1"; then -+ # Unfortunately, winepath does not exit with a non-zero error code, so we -+ # are forced to check the contents of stdout. On the other hand, if the -+ # command is not found, the shell will set an exit code of 127 and print -+ # *an error message* to stdout. So we must check for both error code of -+ # zero AND non-empty stdout, which explains the odd construction: -+ func_convert_core_file_wine_to_w32_tmp=`winepath -w "$1" 2>/dev/null` -+ if test "$?" -eq 0 && test -n "${func_convert_core_file_wine_to_w32_tmp}"; then -+ func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" | -+ $SED -e "$lt_sed_naive_backslashify"` -+ else -+ func_convert_core_file_wine_to_w32_result= -+ fi -+ fi -+} -+# end: func_convert_core_file_wine_to_w32 -+ -+ -+# func_convert_core_path_wine_to_w32 ARG -+# Helper function used by path conversion functions when $build is *nix, and -+# $host is mingw, cygwin, or some other w32 environment. Relies on a correctly -+# configured wine environment available, with the winepath program in $build's -+# $PATH. Assumes ARG has no leading or trailing path separator characters. -+# -+# ARG is path to be converted from $build format to win32. -+# Result is available in $func_convert_core_path_wine_to_w32_result. -+# Unconvertible file (directory) names in ARG are skipped; if no directory names -+# are convertible, then the result may be empty. -+func_convert_core_path_wine_to_w32 () -+{ -+ $opt_debug -+ # unfortunately, winepath doesn't convert paths, only file names -+ func_convert_core_path_wine_to_w32_result="" -+ if test -n "$1"; then -+ oldIFS=$IFS -+ IFS=: -+ for func_convert_core_path_wine_to_w32_f in $1; do -+ IFS=$oldIFS -+ func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f" -+ if test -n "$func_convert_core_file_wine_to_w32_result" ; then -+ if test -z "$func_convert_core_path_wine_to_w32_result"; then -+ func_convert_core_path_wine_to_w32_result="$func_convert_core_file_wine_to_w32_result" -+ else -+ func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result" -+ fi -+ fi -+ done -+ IFS=$oldIFS -+ fi -+} -+# end: func_convert_core_path_wine_to_w32 -+ -+ -+# func_cygpath ARGS... -+# Wrapper around calling the cygpath program via LT_CYGPATH. This is used when -+# when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2) -+# $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or -+# (2), returns the Cygwin file name or path in func_cygpath_result (input -+# file name or path is assumed to be in w32 format, as previously converted -+# from $build's *nix or MSYS format). In case (3), returns the w32 file name -+# or path in func_cygpath_result (input file name or path is assumed to be in -+# Cygwin format). Returns an empty string on error. -+# -+# ARGS are passed to cygpath, with the last one being the file name or path to -+# be converted. -+# -+# Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH -+# environment variable; do not put it in $PATH. -+func_cygpath () -+{ -+ $opt_debug -+ if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then -+ func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null` -+ if test "$?" -ne 0; then -+ # on failure, ensure result is empty -+ func_cygpath_result= -+ fi -+ else -+ func_cygpath_result= -+ func_error "LT_CYGPATH is empty or specifies non-existent file: \`$LT_CYGPATH'" -+ fi -+} -+#end: func_cygpath -+ -+ -+# func_convert_core_msys_to_w32 ARG -+# Convert file name or path ARG from MSYS format to w32 format. Return -+# result in func_convert_core_msys_to_w32_result. -+func_convert_core_msys_to_w32 () -+{ -+ $opt_debug -+ # awkward: cmd appends spaces to result -+ func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null | -+ $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"` -+} -+#end: func_convert_core_msys_to_w32 -+ -+ -+# func_convert_file_check ARG1 ARG2 -+# Verify that ARG1 (a file name in $build format) was converted to $host -+# format in ARG2. Otherwise, emit an error message, but continue (resetting -+# func_to_host_file_result to ARG1). -+func_convert_file_check () -+{ -+ $opt_debug -+ if test -z "$2" && test -n "$1" ; then -+ func_error "Could not determine host file name corresponding to" -+ func_error " \`$1'" -+ func_error "Continuing, but uninstalled executables may not work." -+ # Fallback: -+ func_to_host_file_result="$1" -+ fi -+} -+# end func_convert_file_check -+ -+ -+# func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH -+# Verify that FROM_PATH (a path in $build format) was converted to $host -+# format in TO_PATH. Otherwise, emit an error message, but continue, resetting -+# func_to_host_file_result to a simplistic fallback value (see below). -+func_convert_path_check () -+{ -+ $opt_debug -+ if test -z "$4" && test -n "$3"; then -+ func_error "Could not determine the host path corresponding to" -+ func_error " \`$3'" -+ func_error "Continuing, but uninstalled executables may not work." -+ # Fallback. This is a deliberately simplistic "conversion" and -+ # should not be "improved". See libtool.info. -+ if test "x$1" != "x$2"; then -+ lt_replace_pathsep_chars="s|$1|$2|g" -+ func_to_host_path_result=`echo "$3" | -+ $SED -e "$lt_replace_pathsep_chars"` -+ else -+ func_to_host_path_result="$3" -+ fi -+ fi -+} -+# end func_convert_path_check -+ -+ -+# func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG -+# Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT -+# and appending REPL if ORIG matches BACKPAT. -+func_convert_path_front_back_pathsep () -+{ -+ $opt_debug -+ case $4 in -+ $1 ) func_to_host_path_result="$3$func_to_host_path_result" -+ ;; -+ esac -+ case $4 in -+ $2 ) func_append func_to_host_path_result "$3" -+ ;; -+ esac -+} -+# end func_convert_path_front_back_pathsep -+ -+ -+################################################## -+# $build to $host FILE NAME CONVERSION FUNCTIONS # -+################################################## -+# invoked via `$to_host_file_cmd ARG' -+# -+# In each case, ARG is the path to be converted from $build to $host format. -+# Result will be available in $func_to_host_file_result. -+ -+ -+# func_to_host_file ARG -+# Converts the file name ARG from $build format to $host format. Return result -+# in func_to_host_file_result. -+func_to_host_file () -+{ -+ $opt_debug -+ $to_host_file_cmd "$1" -+} -+# end func_to_host_file -+ -+ -+# func_to_tool_file ARG LAZY -+# converts the file name ARG from $build format to toolchain format. Return -+# result in func_to_tool_file_result. If the conversion in use is listed -+# in (the comma separated) LAZY, no conversion takes place. -+func_to_tool_file () -+{ -+ $opt_debug -+ case ,$2, in -+ *,"$to_tool_file_cmd",*) -+ func_to_tool_file_result=$1 -+ ;; -+ *) -+ $to_tool_file_cmd "$1" -+ func_to_tool_file_result=$func_to_host_file_result -+ ;; -+ esac -+} -+# end func_to_tool_file -+ -+ -+# func_convert_file_noop ARG -+# Copy ARG to func_to_host_file_result. -+func_convert_file_noop () -+{ -+ func_to_host_file_result="$1" -+} -+# end func_convert_file_noop -+ -+ -+# func_convert_file_msys_to_w32 ARG -+# Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic -+# conversion to w32 is not available inside the cwrapper. Returns result in -+# func_to_host_file_result. -+func_convert_file_msys_to_w32 () -+{ -+ $opt_debug -+ func_to_host_file_result="$1" -+ if test -n "$1"; then -+ func_convert_core_msys_to_w32 "$1" -+ func_to_host_file_result="$func_convert_core_msys_to_w32_result" -+ fi -+ func_convert_file_check "$1" "$func_to_host_file_result" -+} -+# end func_convert_file_msys_to_w32 -+ -+ -+# func_convert_file_cygwin_to_w32 ARG -+# Convert file name ARG from Cygwin to w32 format. Returns result in -+# func_to_host_file_result. -+func_convert_file_cygwin_to_w32 () -+{ -+ $opt_debug -+ func_to_host_file_result="$1" -+ if test -n "$1"; then -+ # because $build is cygwin, we call "the" cygpath in $PATH; no need to use -+ # LT_CYGPATH in this case. -+ func_to_host_file_result=`cygpath -m "$1"` -+ fi -+ func_convert_file_check "$1" "$func_to_host_file_result" -+} -+# end func_convert_file_cygwin_to_w32 -+ -+ -+# func_convert_file_nix_to_w32 ARG -+# Convert file name ARG from *nix to w32 format. Requires a wine environment -+# and a working winepath. Returns result in func_to_host_file_result. -+func_convert_file_nix_to_w32 () -+{ -+ $opt_debug -+ func_to_host_file_result="$1" -+ if test -n "$1"; then -+ func_convert_core_file_wine_to_w32 "$1" -+ func_to_host_file_result="$func_convert_core_file_wine_to_w32_result" -+ fi -+ func_convert_file_check "$1" "$func_to_host_file_result" -+} -+# end func_convert_file_nix_to_w32 -+ -+ -+# func_convert_file_msys_to_cygwin ARG -+# Convert file name ARG from MSYS to Cygwin format. Requires LT_CYGPATH set. -+# Returns result in func_to_host_file_result. -+func_convert_file_msys_to_cygwin () -+{ -+ $opt_debug -+ func_to_host_file_result="$1" -+ if test -n "$1"; then -+ func_convert_core_msys_to_w32 "$1" -+ func_cygpath -u "$func_convert_core_msys_to_w32_result" -+ func_to_host_file_result="$func_cygpath_result" -+ fi -+ func_convert_file_check "$1" "$func_to_host_file_result" -+} -+# end func_convert_file_msys_to_cygwin -+ -+ -+# func_convert_file_nix_to_cygwin ARG -+# Convert file name ARG from *nix to Cygwin format. Requires Cygwin installed -+# in a wine environment, working winepath, and LT_CYGPATH set. Returns result -+# in func_to_host_file_result. -+func_convert_file_nix_to_cygwin () -+{ -+ $opt_debug -+ func_to_host_file_result="$1" -+ if test -n "$1"; then -+ # convert from *nix to w32, then use cygpath to convert from w32 to cygwin. -+ func_convert_core_file_wine_to_w32 "$1" -+ func_cygpath -u "$func_convert_core_file_wine_to_w32_result" -+ func_to_host_file_result="$func_cygpath_result" -+ fi -+ func_convert_file_check "$1" "$func_to_host_file_result" -+} -+# end func_convert_file_nix_to_cygwin -+ -+ -+############################################# -+# $build to $host PATH CONVERSION FUNCTIONS # -+############################################# -+# invoked via `$to_host_path_cmd ARG' -+# -+# In each case, ARG is the path to be converted from $build to $host format. -+# The result will be available in $func_to_host_path_result. -+# -+# Path separators are also converted from $build format to $host format. If -+# ARG begins or ends with a path separator character, it is preserved (but -+# converted to $host format) on output. -+# -+# All path conversion functions are named using the following convention: -+# file name conversion function : func_convert_file_X_to_Y () -+# path conversion function : func_convert_path_X_to_Y () -+# where, for any given $build/$host combination the 'X_to_Y' value is the -+# same. If conversion functions are added for new $build/$host combinations, -+# the two new functions must follow this pattern, or func_init_to_host_path_cmd -+# will break. -+ -+ -+# func_init_to_host_path_cmd -+# Ensures that function "pointer" variable $to_host_path_cmd is set to the -+# appropriate value, based on the value of $to_host_file_cmd. -+to_host_path_cmd= -+func_init_to_host_path_cmd () -+{ -+ $opt_debug -+ if test -z "$to_host_path_cmd"; then -+ func_stripname 'func_convert_file_' '' "$to_host_file_cmd" -+ to_host_path_cmd="func_convert_path_${func_stripname_result}" -+ fi -+} -+ -+ -+# func_to_host_path ARG -+# Converts the path ARG from $build format to $host format. Return result -+# in func_to_host_path_result. -+func_to_host_path () -+{ -+ $opt_debug -+ func_init_to_host_path_cmd -+ $to_host_path_cmd "$1" -+} -+# end func_to_host_path -+ -+ -+# func_convert_path_noop ARG -+# Copy ARG to func_to_host_path_result. -+func_convert_path_noop () -+{ -+ func_to_host_path_result="$1" -+} -+# end func_convert_path_noop -+ -+ -+# func_convert_path_msys_to_w32 ARG -+# Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic -+# conversion to w32 is not available inside the cwrapper. Returns result in -+# func_to_host_path_result. -+func_convert_path_msys_to_w32 () -+{ -+ $opt_debug -+ func_to_host_path_result="$1" -+ if test -n "$1"; then -+ # Remove leading and trailing path separator characters from ARG. MSYS -+ # behavior is inconsistent here; cygpath turns them into '.;' and ';.'; -+ # and winepath ignores them completely. -+ func_stripname : : "$1" -+ func_to_host_path_tmp1=$func_stripname_result -+ func_convert_core_msys_to_w32 "$func_to_host_path_tmp1" -+ func_to_host_path_result="$func_convert_core_msys_to_w32_result" -+ func_convert_path_check : ";" \ -+ "$func_to_host_path_tmp1" "$func_to_host_path_result" -+ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" -+ fi -+} -+# end func_convert_path_msys_to_w32 -+ -+ -+# func_convert_path_cygwin_to_w32 ARG -+# Convert path ARG from Cygwin to w32 format. Returns result in -+# func_to_host_file_result. -+func_convert_path_cygwin_to_w32 () -+{ -+ $opt_debug -+ func_to_host_path_result="$1" -+ if test -n "$1"; then -+ # See func_convert_path_msys_to_w32: -+ func_stripname : : "$1" -+ func_to_host_path_tmp1=$func_stripname_result -+ func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"` -+ func_convert_path_check : ";" \ -+ "$func_to_host_path_tmp1" "$func_to_host_path_result" -+ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" -+ fi -+} -+# end func_convert_path_cygwin_to_w32 -+ -+ -+# func_convert_path_nix_to_w32 ARG -+# Convert path ARG from *nix to w32 format. Requires a wine environment and -+# a working winepath. Returns result in func_to_host_file_result. -+func_convert_path_nix_to_w32 () -+{ -+ $opt_debug -+ func_to_host_path_result="$1" -+ if test -n "$1"; then -+ # See func_convert_path_msys_to_w32: -+ func_stripname : : "$1" -+ func_to_host_path_tmp1=$func_stripname_result -+ func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1" -+ func_to_host_path_result="$func_convert_core_path_wine_to_w32_result" -+ func_convert_path_check : ";" \ -+ "$func_to_host_path_tmp1" "$func_to_host_path_result" -+ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" -+ fi -+} -+# end func_convert_path_nix_to_w32 -+ -+ -+# func_convert_path_msys_to_cygwin ARG -+# Convert path ARG from MSYS to Cygwin format. Requires LT_CYGPATH set. -+# Returns result in func_to_host_file_result. -+func_convert_path_msys_to_cygwin () -+{ -+ $opt_debug -+ func_to_host_path_result="$1" -+ if test -n "$1"; then -+ # See func_convert_path_msys_to_w32: -+ func_stripname : : "$1" -+ func_to_host_path_tmp1=$func_stripname_result -+ func_convert_core_msys_to_w32 "$func_to_host_path_tmp1" -+ func_cygpath -u -p "$func_convert_core_msys_to_w32_result" -+ func_to_host_path_result="$func_cygpath_result" -+ func_convert_path_check : : \ -+ "$func_to_host_path_tmp1" "$func_to_host_path_result" -+ func_convert_path_front_back_pathsep ":*" "*:" : "$1" -+ fi -+} -+# end func_convert_path_msys_to_cygwin -+ -+ -+# func_convert_path_nix_to_cygwin ARG -+# Convert path ARG from *nix to Cygwin format. Requires Cygwin installed in a -+# a wine environment, working winepath, and LT_CYGPATH set. Returns result in -+# func_to_host_file_result. -+func_convert_path_nix_to_cygwin () -+{ -+ $opt_debug -+ func_to_host_path_result="$1" -+ if test -n "$1"; then -+ # Remove leading and trailing path separator characters from -+ # ARG. msys behavior is inconsistent here, cygpath turns them -+ # into '.;' and ';.', and winepath ignores them completely. -+ func_stripname : : "$1" -+ func_to_host_path_tmp1=$func_stripname_result -+ func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1" -+ func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result" -+ func_to_host_path_result="$func_cygpath_result" -+ func_convert_path_check : : \ -+ "$func_to_host_path_tmp1" "$func_to_host_path_result" -+ func_convert_path_front_back_pathsep ":*" "*:" : "$1" -+ fi -+} -+# end func_convert_path_nix_to_cygwin -+ -+ - # func_mode_compile arg... - func_mode_compile () - { -@@ -1314,12 +1985,12 @@ func_mode_compile () - ;; - - -pie | -fpie | -fPIE) -- pie_flag="$pie_flag $arg" -+ func_append pie_flag " $arg" - continue - ;; - - -shared | -static | -prefer-pic | -prefer-non-pic) -- later="$later $arg" -+ func_append later " $arg" - continue - ;; - -@@ -1340,15 +2011,14 @@ func_mode_compile () - save_ifs="$IFS"; IFS=',' - for arg in $args; do - IFS="$save_ifs" -- func_quote_for_eval "$arg" -- lastarg="$lastarg $func_quote_for_eval_result" -+ func_append_quoted lastarg "$arg" - done - IFS="$save_ifs" - func_stripname ' ' '' "$lastarg" - lastarg=$func_stripname_result - - # Add the arguments to base_compile. -- base_compile="$base_compile $lastarg" -+ func_append base_compile " $lastarg" - continue - ;; - -@@ -1364,8 +2034,7 @@ func_mode_compile () - esac # case $arg_mode - - # Aesthetically quote the previous argument. -- func_quote_for_eval "$lastarg" -- base_compile="$base_compile $func_quote_for_eval_result" -+ func_append_quoted base_compile "$lastarg" - done # for arg - - case $arg_mode in -@@ -1496,17 +2165,16 @@ compiler." - $opt_dry_run || $RM $removelist - exit $EXIT_FAILURE - fi -- removelist="$removelist $output_obj" -+ func_append removelist " $output_obj" - $ECHO "$srcfile" > "$lockfile" - fi - - $opt_dry_run || $RM $removelist -- removelist="$removelist $lockfile" -+ func_append removelist " $lockfile" - trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15 - -- if test -n "$fix_srcfile_path"; then -- eval "srcfile=\"$fix_srcfile_path\"" -- fi -+ func_to_tool_file "$srcfile" func_convert_file_msys_to_w32 -+ srcfile=$func_to_tool_file_result - func_quote_for_eval "$srcfile" - qsrcfile=$func_quote_for_eval_result - -@@ -1526,7 +2194,7 @@ compiler." - - if test -z "$output_obj"; then - # Place PIC objects in $objdir -- command="$command -o $lobj" -+ func_append command " -o $lobj" - fi - - func_show_eval_locale "$command" \ -@@ -1573,11 +2241,11 @@ compiler." - command="$base_compile $qsrcfile $pic_flag" - fi - if test "$compiler_c_o" = yes; then -- command="$command -o $obj" -+ func_append command " -o $obj" - fi - - # Suppress compiler output if we already did a PIC compilation. -- command="$command$suppress_output" -+ func_append command "$suppress_output" - func_show_eval_locale "$command" \ - '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' - -@@ -1622,13 +2290,13 @@ compiler." - } - - $opt_help || { -- test "$mode" = compile && func_mode_compile ${1+"$@"} -+ test "$opt_mode" = compile && func_mode_compile ${1+"$@"} - } - - func_mode_help () - { - # We need to display help for each of the modes. -- case $mode in -+ case $opt_mode in - "") - # Generic help is extracted from the usage comments - # at the start of this file. -@@ -1659,8 +2327,8 @@ This mode accepts the following additional options: - - -o OUTPUT-FILE set the output file name to OUTPUT-FILE - -no-suppress do not suppress compiler output for multiple passes -- -prefer-pic try to building PIC objects only -- -prefer-non-pic try to building non-PIC objects only -+ -prefer-pic try to build PIC objects only -+ -prefer-non-pic try to build non-PIC objects only - -shared do not build a \`.o' file suitable for static linking - -static only build a \`.o' file suitable for static linking - -Wc,FLAG pass FLAG directly to the compiler -@@ -1804,7 +2472,7 @@ Otherwise, only FILE itself is deleted using RM." - ;; - - *) -- func_fatal_help "invalid operation mode \`$mode'" -+ func_fatal_help "invalid operation mode \`$opt_mode'" - ;; - esac - -@@ -1819,13 +2487,13 @@ if $opt_help; then - else - { - func_help noexit -- for mode in compile link execute install finish uninstall clean; do -+ for opt_mode in compile link execute install finish uninstall clean; do - func_mode_help - done - } | sed -n '1p; 2,$s/^Usage:/ or: /p' - { - func_help noexit -- for mode in compile link execute install finish uninstall clean; do -+ for opt_mode in compile link execute install finish uninstall clean; do - echo - func_mode_help - done -@@ -1854,13 +2522,16 @@ func_mode_execute () - func_fatal_help "you must specify a COMMAND" - - # Handle -dlopen flags immediately. -- for file in $execute_dlfiles; do -+ for file in $opt_dlopen; do - test -f "$file" \ - || func_fatal_help "\`$file' is not a file" - - dir= - case $file in - *.la) -+ func_resolve_sysroot "$file" -+ file=$func_resolve_sysroot_result -+ - # Check to see that this really is a libtool archive. - func_lalib_unsafe_p "$file" \ - || func_fatal_help "\`$lib' is not a valid libtool archive" -@@ -1882,7 +2553,7 @@ func_mode_execute () - dir="$func_dirname_result" - - if test -f "$dir/$objdir/$dlname"; then -- dir="$dir/$objdir" -+ func_append dir "/$objdir" - else - if test ! -f "$dir/$dlname"; then - func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'" -@@ -1907,10 +2578,10 @@ func_mode_execute () - test -n "$absdir" && dir="$absdir" - - # Now add the directory to shlibpath_var. -- if eval test -z \"\$$shlibpath_var\"; then -- eval $shlibpath_var=\$dir -+ if eval "test -z \"\$$shlibpath_var\""; then -+ eval "$shlibpath_var=\"\$dir\"" - else -- eval $shlibpath_var=\$dir:\$$shlibpath_var -+ eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\"" - fi - done - -@@ -1939,8 +2610,7 @@ func_mode_execute () - ;; - esac - # Quote arguments (to preserve shell metacharacters). -- func_quote_for_eval "$file" -- args="$args $func_quote_for_eval_result" -+ func_append_quoted args "$file" - done - - if test "X$opt_dry_run" = Xfalse; then -@@ -1972,22 +2642,59 @@ func_mode_execute () - fi - } - --test "$mode" = execute && func_mode_execute ${1+"$@"} -+test "$opt_mode" = execute && func_mode_execute ${1+"$@"} - - - # func_mode_finish arg... - func_mode_finish () - { - $opt_debug -- libdirs="$nonopt" -+ libs= -+ libdirs= - admincmds= - -- if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then -- for dir -- do -- libdirs="$libdirs $dir" -- done -+ for opt in "$nonopt" ${1+"$@"} -+ do -+ if test -d "$opt"; then -+ func_append libdirs " $opt" - -+ elif test -f "$opt"; then -+ if func_lalib_unsafe_p "$opt"; then -+ func_append libs " $opt" -+ else -+ func_warning "\`$opt' is not a valid libtool archive" -+ fi -+ -+ else -+ func_fatal_error "invalid argument \`$opt'" -+ fi -+ done -+ -+ if test -n "$libs"; then -+ if test -n "$lt_sysroot"; then -+ sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"` -+ sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;" -+ else -+ sysroot_cmd= -+ fi -+ -+ # Remove sysroot references -+ if $opt_dry_run; then -+ for lib in $libs; do -+ echo "removing references to $lt_sysroot and \`=' prefixes from $lib" -+ done -+ else -+ tmpdir=`func_mktempdir` -+ for lib in $libs; do -+ sed -e "${sysroot_cmd} s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \ -+ > $tmpdir/tmp-la -+ mv -f $tmpdir/tmp-la $lib -+ done -+ ${RM}r "$tmpdir" -+ fi -+ fi -+ -+ if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then - for libdir in $libdirs; do - if test -n "$finish_cmds"; then - # Do each command in the finish commands. -@@ -1997,7 +2704,7 @@ func_mode_finish () - if test -n "$finish_eval"; then - # Do the single finish_eval. - eval cmds=\"$finish_eval\" -- $opt_dry_run || eval "$cmds" || admincmds="$admincmds -+ $opt_dry_run || eval "$cmds" || func_append admincmds " - $cmds" - fi - done -@@ -2006,53 +2713,55 @@ func_mode_finish () - # Exit here if they wanted silent mode. - $opt_silent && exit $EXIT_SUCCESS - -- echo "----------------------------------------------------------------------" -- echo "Libraries have been installed in:" -- for libdir in $libdirs; do -- $ECHO " $libdir" -- done -- echo -- echo "If you ever happen to want to link against installed libraries" -- echo "in a given directory, LIBDIR, you must either use libtool, and" -- echo "specify the full pathname of the library, or use the \`-LLIBDIR'" -- echo "flag during linking and do at least one of the following:" -- if test -n "$shlibpath_var"; then -- echo " - add LIBDIR to the \`$shlibpath_var' environment variable" -- echo " during execution" -- fi -- if test -n "$runpath_var"; then -- echo " - add LIBDIR to the \`$runpath_var' environment variable" -- echo " during linking" -- fi -- if test -n "$hardcode_libdir_flag_spec"; then -- libdir=LIBDIR -- eval "flag=\"$hardcode_libdir_flag_spec\"" -+ if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then -+ echo "----------------------------------------------------------------------" -+ echo "Libraries have been installed in:" -+ for libdir in $libdirs; do -+ $ECHO " $libdir" -+ done -+ echo -+ echo "If you ever happen to want to link against installed libraries" -+ echo "in a given directory, LIBDIR, you must either use libtool, and" -+ echo "specify the full pathname of the library, or use the \`-LLIBDIR'" -+ echo "flag during linking and do at least one of the following:" -+ if test -n "$shlibpath_var"; then -+ echo " - add LIBDIR to the \`$shlibpath_var' environment variable" -+ echo " during execution" -+ fi -+ if test -n "$runpath_var"; then -+ echo " - add LIBDIR to the \`$runpath_var' environment variable" -+ echo " during linking" -+ fi -+ if test -n "$hardcode_libdir_flag_spec"; then -+ libdir=LIBDIR -+ eval flag=\"$hardcode_libdir_flag_spec\" - -- $ECHO " - use the \`$flag' linker flag" -- fi -- if test -n "$admincmds"; then -- $ECHO " - have your system administrator run these commands:$admincmds" -- fi -- if test -f /etc/ld.so.conf; then -- echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'" -- fi -- echo -+ $ECHO " - use the \`$flag' linker flag" -+ fi -+ if test -n "$admincmds"; then -+ $ECHO " - have your system administrator run these commands:$admincmds" -+ fi -+ if test -f /etc/ld.so.conf; then -+ echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'" -+ fi -+ echo - -- echo "See any operating system documentation about shared libraries for" -- case $host in -- solaris2.[6789]|solaris2.1[0-9]) -- echo "more information, such as the ld(1), crle(1) and ld.so(8) manual" -- echo "pages." -- ;; -- *) -- echo "more information, such as the ld(1) and ld.so(8) manual pages." -- ;; -- esac -- echo "----------------------------------------------------------------------" -+ echo "See any operating system documentation about shared libraries for" -+ case $host in -+ solaris2.[6789]|solaris2.1[0-9]) -+ echo "more information, such as the ld(1), crle(1) and ld.so(8) manual" -+ echo "pages." -+ ;; -+ *) -+ echo "more information, such as the ld(1) and ld.so(8) manual pages." -+ ;; -+ esac -+ echo "----------------------------------------------------------------------" -+ fi - exit $EXIT_SUCCESS - } - --test "$mode" = finish && func_mode_finish ${1+"$@"} -+test "$opt_mode" = finish && func_mode_finish ${1+"$@"} - - - # func_mode_install arg... -@@ -2077,7 +2786,7 @@ func_mode_install () - # The real first argument should be the name of the installation program. - # Aesthetically quote it. - func_quote_for_eval "$arg" -- install_prog="$install_prog$func_quote_for_eval_result" -+ func_append install_prog "$func_quote_for_eval_result" - install_shared_prog=$install_prog - case " $install_prog " in - *[\\\ /]cp\ *) install_cp=: ;; -@@ -2097,7 +2806,7 @@ func_mode_install () - do - arg2= - if test -n "$dest"; then -- files="$files $dest" -+ func_append files " $dest" - dest=$arg - continue - fi -@@ -2135,11 +2844,11 @@ func_mode_install () - - # Aesthetically quote the argument. - func_quote_for_eval "$arg" -- install_prog="$install_prog $func_quote_for_eval_result" -+ func_append install_prog " $func_quote_for_eval_result" - if test -n "$arg2"; then - func_quote_for_eval "$arg2" - fi -- install_shared_prog="$install_shared_prog $func_quote_for_eval_result" -+ func_append install_shared_prog " $func_quote_for_eval_result" - done - - test -z "$install_prog" && \ -@@ -2151,7 +2860,7 @@ func_mode_install () - if test -n "$install_override_mode" && $no_mode; then - if $install_cp; then :; else - func_quote_for_eval "$install_override_mode" -- install_shared_prog="$install_shared_prog -m $func_quote_for_eval_result" -+ func_append install_shared_prog " -m $func_quote_for_eval_result" - fi - fi - -@@ -2209,10 +2918,13 @@ func_mode_install () - case $file in - *.$libext) - # Do the static libraries later. -- staticlibs="$staticlibs $file" -+ func_append staticlibs " $file" - ;; - - *.la) -+ func_resolve_sysroot "$file" -+ file=$func_resolve_sysroot_result -+ - # Check to see that this really is a libtool archive. - func_lalib_unsafe_p "$file" \ - || func_fatal_help "\`$file' is not a valid libtool archive" -@@ -2226,23 +2938,30 @@ func_mode_install () - if test "X$destdir" = "X$libdir"; then - case "$current_libdirs " in - *" $libdir "*) ;; -- *) current_libdirs="$current_libdirs $libdir" ;; -+ *) func_append current_libdirs " $libdir" ;; - esac - else - # Note the libdir as a future libdir. - case "$future_libdirs " in - *" $libdir "*) ;; -- *) future_libdirs="$future_libdirs $libdir" ;; -+ *) func_append future_libdirs " $libdir" ;; - esac - fi - - func_dirname "$file" "/" "" - dir="$func_dirname_result" -- dir="$dir$objdir" -+ func_append dir "$objdir" - - if test -n "$relink_command"; then -+ # Strip any trailing slash from the destination. -+ func_stripname '' '/' "$libdir" -+ destlibdir=$func_stripname_result -+ -+ func_stripname '' '/' "$destdir" -+ s_destdir=$func_stripname_result -+ - # Determine the prefix the user has applied to our future dir. -- inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"` -+ inst_prefix_dir=`$ECHO "X$s_destdir" | $Xsed -e "s%$destlibdir\$%%"` - - # Don't allow the user to place us outside of our expected - # location b/c this prevents finding dependent libraries that -@@ -2315,7 +3034,7 @@ func_mode_install () - func_show_eval "$install_prog $instname $destdir/$name" 'exit $?' - - # Maybe install the static library, too. -- test -n "$old_library" && staticlibs="$staticlibs $dir/$old_library" -+ test -n "$old_library" && func_append staticlibs " $dir/$old_library" - ;; - - *.lo) -@@ -2503,7 +3222,7 @@ func_mode_install () - test -n "$future_libdirs" && \ - func_warning "remember to run \`$progname --finish$future_libdirs'" - -- if test -n "$current_libdirs" && $opt_finish; then -+ if test -n "$current_libdirs"; then - # Maybe just do a dry run. - $opt_dry_run && current_libdirs=" -n$current_libdirs" - exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs' -@@ -2512,7 +3231,7 @@ func_mode_install () - fi - } - --test "$mode" = install && func_mode_install ${1+"$@"} -+test "$opt_mode" = install && func_mode_install ${1+"$@"} - - - # func_generate_dlsyms outputname originator pic_p -@@ -2559,6 +3278,18 @@ extern \"C\" { - #pragma GCC diagnostic ignored \"-Wstrict-prototypes\" - #endif - -+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime -+ relocations are performed -- see ld's documentation on pseudo-relocs. */ -+# define LT_DLSYM_CONST -+#elif defined(__osf__) -+/* This system does not cope well with relocations in const data. */ -+# define LT_DLSYM_CONST -+#else -+# define LT_DLSYM_CONST const -+#endif -+ - /* External symbol declarations for the compiler. */\ - " - -@@ -2570,21 +3301,22 @@ extern \"C\" { - # Add our own program objects to the symbol list. - progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP` - for progfile in $progfiles; do -- func_verbose "extracting global C symbols from \`$progfile'" -- $opt_dry_run || eval "$NM $progfile | $global_symbol_pipe >> '$nlist'" -+ func_to_tool_file "$progfile" func_convert_file_msys_to_w32 -+ func_verbose "extracting global C symbols from \`$func_to_tool_file_result'" -+ $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'" - done - - if test -n "$exclude_expsyms"; then - $opt_dry_run || { -- $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T -- $MV "$nlist"T "$nlist" -+ eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T' -+ eval '$MV "$nlist"T "$nlist"' - } - fi - - if test -n "$export_symbols_regex"; then - $opt_dry_run || { -- $EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T -- $MV "$nlist"T "$nlist" -+ eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T' -+ eval '$MV "$nlist"T "$nlist"' - } - fi - -@@ -2593,23 +3325,23 @@ extern \"C\" { - export_symbols="$output_objdir/$outputname.exp" - $opt_dry_run || { - $RM $export_symbols -- ${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' < "$nlist" > "$export_symbols" -+ eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"' - case $host in - *cygwin* | *mingw* | *cegcc* ) -- echo EXPORTS > "$output_objdir/$outputname.def" -- cat "$export_symbols" >> "$output_objdir/$outputname.def" -+ eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' -+ eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"' - ;; - esac - } - else - $opt_dry_run || { -- ${SED} -e 's/\([].[*^$]\)/\\\1/g' -e 's/^/ /' -e 's/$/$/' < "$export_symbols" > "$output_objdir/$outputname.exp" -- $GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T -- $MV "$nlist"T "$nlist" -+ eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"' -+ eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T' -+ eval '$MV "$nlist"T "$nlist"' - case $host in - *cygwin* | *mingw* | *cegcc* ) -- echo EXPORTS > "$output_objdir/$outputname.def" -- cat "$nlist" >> "$output_objdir/$outputname.def" -+ eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' -+ eval 'cat "$nlist" >> "$output_objdir/$outputname.def"' - ;; - esac - } -@@ -2620,10 +3352,52 @@ extern \"C\" { - func_verbose "extracting global C symbols from \`$dlprefile'" - func_basename "$dlprefile" - name="$func_basename_result" -- $opt_dry_run || { -- $ECHO ": $name " >> "$nlist" -- eval "$NM $dlprefile 2>/dev/null | $global_symbol_pipe >> '$nlist'" -- } -+ case $host in -+ *cygwin* | *mingw* | *cegcc* ) -+ # if an import library, we need to obtain dlname -+ if func_win32_import_lib_p "$dlprefile"; then -+ func_tr_sh "$dlprefile" -+ eval "curr_lafile=\$libfile_$func_tr_sh_result" -+ dlprefile_dlbasename="" -+ if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then -+ # Use subshell, to avoid clobbering current variable values -+ dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"` -+ if test -n "$dlprefile_dlname" ; then -+ func_basename "$dlprefile_dlname" -+ dlprefile_dlbasename="$func_basename_result" -+ else -+ # no lafile. user explicitly requested -dlpreopen . -+ $sharedlib_from_linklib_cmd "$dlprefile" -+ dlprefile_dlbasename=$sharedlib_from_linklib_result -+ fi -+ fi -+ $opt_dry_run || { -+ if test -n "$dlprefile_dlbasename" ; then -+ eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"' -+ else -+ func_warning "Could not compute DLL name from $name" -+ eval '$ECHO ": $name " >> "$nlist"' -+ fi -+ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 -+ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe | -+ $SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'" -+ } -+ else # not an import lib -+ $opt_dry_run || { -+ eval '$ECHO ": $name " >> "$nlist"' -+ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 -+ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'" -+ } -+ fi -+ ;; -+ *) -+ $opt_dry_run || { -+ eval '$ECHO ": $name " >> "$nlist"' -+ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 -+ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'" -+ } -+ ;; -+ esac - done - - $opt_dry_run || { -@@ -2661,26 +3435,9 @@ typedef struct { - const char *name; - void *address; - } lt_dlsymlist; --" -- case $host in -- *cygwin* | *mingw* | *cegcc* ) -- echo >> "$output_objdir/$my_dlsyms" "\ --/* DATA imports from DLLs on WIN32 con't be const, because -- runtime relocations are performed -- see ld's documentation -- on pseudo-relocs. */" -- lt_dlsym_const= ;; -- *osf5*) -- echo >> "$output_objdir/$my_dlsyms" "\ --/* This system does not cope well with relocations in const data */" -- lt_dlsym_const= ;; -- *) -- lt_dlsym_const=const ;; -- esac -- -- echo >> "$output_objdir/$my_dlsyms" "\ --extern $lt_dlsym_const lt_dlsymlist -+extern LT_DLSYM_CONST lt_dlsymlist - lt_${my_prefix}_LTX_preloaded_symbols[]; --$lt_dlsym_const lt_dlsymlist -+LT_DLSYM_CONST lt_dlsymlist - lt_${my_prefix}_LTX_preloaded_symbols[] = - {\ - { \"$my_originator\", (void *) 0 }," -@@ -2736,7 +3493,7 @@ static const void *lt_preloaded_setup() { - for arg in $LTCFLAGS; do - case $arg in - -pie | -fpie | -fPIE) ;; -- *) symtab_cflags="$symtab_cflags $arg" ;; -+ *) func_append symtab_cflags " $arg" ;; - esac - done - -@@ -2796,9 +3553,11 @@ func_win32_libid () - win32_libid_type="x86 archive import" - ;; - *ar\ archive*) # could be an import, or static -- if $OBJDUMP -f "$1" | $SED -e '10q' 2>/dev/null | -- $EGREP 'file format (pe-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then -- win32_nmres=`$NM -f posix -A "$1" | -+ # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD. -+ if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null | -+ $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then -+ func_to_tool_file "$1" func_convert_file_msys_to_w32 -+ win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" | - $SED -n -e ' - 1,100{ - / I /{ -@@ -2827,6 +3586,131 @@ func_win32_libid () - $ECHO "$win32_libid_type" - } - -+# func_cygming_dll_for_implib ARG -+# -+# Platform-specific function to extract the -+# name of the DLL associated with the specified -+# import library ARG. -+# Invoked by eval'ing the libtool variable -+# $sharedlib_from_linklib_cmd -+# Result is available in the variable -+# $sharedlib_from_linklib_result -+func_cygming_dll_for_implib () -+{ -+ $opt_debug -+ sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"` -+} -+ -+# func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs -+# -+# The is the core of a fallback implementation of a -+# platform-specific function to extract the name of the -+# DLL associated with the specified import library LIBNAME. -+# -+# SECTION_NAME is either .idata$6 or .idata$7, depending -+# on the platform and compiler that created the implib. -+# -+# Echos the name of the DLL associated with the -+# specified import library. -+func_cygming_dll_for_implib_fallback_core () -+{ -+ $opt_debug -+ match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"` -+ $OBJDUMP -s --section "$1" "$2" 2>/dev/null | -+ $SED '/^Contents of section '"$match_literal"':/{ -+ # Place marker at beginning of archive member dllname section -+ s/.*/====MARK====/ -+ p -+ d -+ } -+ # These lines can sometimes be longer than 43 characters, but -+ # are always uninteresting -+ /:[ ]*file format pe[i]\{,1\}-/d -+ /^In archive [^:]*:/d -+ # Ensure marker is printed -+ /^====MARK====/p -+ # Remove all lines with less than 43 characters -+ /^.\{43\}/!d -+ # From remaining lines, remove first 43 characters -+ s/^.\{43\}//' | -+ $SED -n ' -+ # Join marker and all lines until next marker into a single line -+ /^====MARK====/ b para -+ H -+ $ b para -+ b -+ :para -+ x -+ s/\n//g -+ # Remove the marker -+ s/^====MARK====// -+ # Remove trailing dots and whitespace -+ s/[\. \t]*$// -+ # Print -+ /./p' | -+ # we now have a list, one entry per line, of the stringified -+ # contents of the appropriate section of all members of the -+ # archive which possess that section. Heuristic: eliminate -+ # all those which have a first or second character that is -+ # a '.' (that is, objdump's representation of an unprintable -+ # character.) This should work for all archives with less than -+ # 0x302f exports -- but will fail for DLLs whose name actually -+ # begins with a literal '.' or a single character followed by -+ # a '.'. -+ # -+ # Of those that remain, print the first one. -+ $SED -e '/^\./d;/^.\./d;q' -+} -+ -+# func_cygming_gnu_implib_p ARG -+# This predicate returns with zero status (TRUE) if -+# ARG is a GNU/binutils-style import library. Returns -+# with nonzero status (FALSE) otherwise. -+func_cygming_gnu_implib_p () -+{ -+ $opt_debug -+ func_to_tool_file "$1" func_convert_file_msys_to_w32 -+ func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'` -+ test -n "$func_cygming_gnu_implib_tmp" -+} -+ -+# func_cygming_ms_implib_p ARG -+# This predicate returns with zero status (TRUE) if -+# ARG is an MS-style import library. Returns -+# with nonzero status (FALSE) otherwise. -+func_cygming_ms_implib_p () -+{ -+ $opt_debug -+ func_to_tool_file "$1" func_convert_file_msys_to_w32 -+ func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'` -+ test -n "$func_cygming_ms_implib_tmp" -+} -+ -+# func_cygming_dll_for_implib_fallback ARG -+# Platform-specific function to extract the -+# name of the DLL associated with the specified -+# import library ARG. -+# -+# This fallback implementation is for use when $DLLTOOL -+# does not support the --identify-strict option. -+# Invoked by eval'ing the libtool variable -+# $sharedlib_from_linklib_cmd -+# Result is available in the variable -+# $sharedlib_from_linklib_result -+func_cygming_dll_for_implib_fallback () -+{ -+ $opt_debug -+ if func_cygming_gnu_implib_p "$1" ; then -+ # binutils import library -+ sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"` -+ elif func_cygming_ms_implib_p "$1" ; then -+ # ms-generated import library -+ sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"` -+ else -+ # unknown -+ sharedlib_from_linklib_result="" -+ fi -+} - - - # func_extract_an_archive dir oldlib -@@ -2917,7 +3801,7 @@ func_extract_archives () - darwin_file= - darwin_files= - for darwin_file in $darwin_filelist; do -- darwin_files=`find unfat-$$ -name $darwin_file -print | $NL2SP` -+ darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP` - $LIPO -create -output "$darwin_file" $darwin_files - done # $darwin_filelist - $RM -rf unfat-$$ -@@ -2932,7 +3816,7 @@ func_extract_archives () - func_extract_an_archive "$my_xdir" "$my_xabs" - ;; - esac -- my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | $NL2SP` -+ my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP` - done - - func_extract_archives_result="$my_oldobjs" -@@ -3014,7 +3898,110 @@ func_fallback_echo () - _LTECHO_EOF' - } - ECHO=\"$qECHO\" -- fi\ -+ fi -+ -+# Very basic option parsing. These options are (a) specific to -+# the libtool wrapper, (b) are identical between the wrapper -+# /script/ and the wrapper /executable/ which is used only on -+# windows platforms, and (c) all begin with the string "--lt-" -+# (application programs are unlikely to have options which match -+# this pattern). -+# -+# There are only two supported options: --lt-debug and -+# --lt-dump-script. There is, deliberately, no --lt-help. -+# -+# The first argument to this parsing function should be the -+# script's $0 value, followed by "$@". -+lt_option_debug= -+func_parse_lt_options () -+{ -+ lt_script_arg0=\$0 -+ shift -+ for lt_opt -+ do -+ case \"\$lt_opt\" in -+ --lt-debug) lt_option_debug=1 ;; -+ --lt-dump-script) -+ lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\` -+ test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=. -+ lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\` -+ cat \"\$lt_dump_D/\$lt_dump_F\" -+ exit 0 -+ ;; -+ --lt-*) -+ \$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2 -+ exit 1 -+ ;; -+ esac -+ done -+ -+ # Print the debug banner immediately: -+ if test -n \"\$lt_option_debug\"; then -+ echo \"${outputname}:${output}:\${LINENO}: libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\" 1>&2 -+ fi -+} -+ -+# Used when --lt-debug. Prints its arguments to stdout -+# (redirection is the responsibility of the caller) -+func_lt_dump_args () -+{ -+ lt_dump_args_N=1; -+ for lt_arg -+ do -+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[\$lt_dump_args_N]: \$lt_arg\" -+ lt_dump_args_N=\`expr \$lt_dump_args_N + 1\` -+ done -+} -+ -+# Core function for launching the target application -+func_exec_program_core () -+{ -+" -+ case $host in -+ # Backslashes separate directories on plain windows -+ *-*-mingw | *-*-os2* | *-cegcc*) -+ $ECHO "\ -+ if test -n \"\$lt_option_debug\"; then -+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir\\\\\$program\" 1>&2 -+ func_lt_dump_args \${1+\"\$@\"} 1>&2 -+ fi -+ exec \"\$progdir\\\\\$program\" \${1+\"\$@\"} -+" -+ ;; -+ -+ *) -+ $ECHO "\ -+ if test -n \"\$lt_option_debug\"; then -+ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir/\$program\" 1>&2 -+ func_lt_dump_args \${1+\"\$@\"} 1>&2 -+ fi -+ exec \"\$progdir/\$program\" \${1+\"\$@\"} -+" -+ ;; -+ esac -+ $ECHO "\ -+ \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2 -+ exit 1 -+} -+ -+# A function to encapsulate launching the target application -+# Strips options in the --lt-* namespace from \$@ and -+# launches target application with the remaining arguments. -+func_exec_program () -+{ -+ for lt_wr_arg -+ do -+ case \$lt_wr_arg in -+ --lt-*) ;; -+ *) set x \"\$@\" \"\$lt_wr_arg\"; shift;; -+ esac -+ shift -+ done -+ func_exec_program_core \${1+\"\$@\"} -+} -+ -+ # Parse options -+ func_parse_lt_options \"\$0\" \${1+\"\$@\"} - - # Find the directory that this script lives in. - thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\` -@@ -3078,7 +4065,7 @@ _LTECHO_EOF' - - # relink executable if necessary - if test -n \"\$relink_command\"; then -- if relink_command_output=\`eval \"\$relink_command\" 2>&1\`; then : -+ if relink_command_output=\`eval \$relink_command 2>&1\`; then : - else - $ECHO \"\$relink_command_output\" >&2 - $RM \"\$progdir/\$file\" -@@ -3102,6 +4089,18 @@ _LTECHO_EOF' - - if test -f \"\$progdir/\$program\"; then" - -+ # fixup the dll searchpath if we need to. -+ # -+ # Fix the DLL searchpath if we need to. Do this before prepending -+ # to shlibpath, because on Windows, both are PATH and uninstalled -+ # libraries must come first. -+ if test -n "$dllsearchpath"; then -+ $ECHO "\ -+ # Add the dll search path components to the executable PATH -+ PATH=$dllsearchpath:\$PATH -+" -+ fi -+ - # Export our shlibpath_var if we have one. - if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then - $ECHO "\ -@@ -3116,35 +4115,10 @@ _LTECHO_EOF' - " - fi - -- # fixup the dll searchpath if we need to. -- if test -n "$dllsearchpath"; then -- $ECHO "\ -- # Add the dll search path components to the executable PATH -- PATH=$dllsearchpath:\$PATH --" -- fi -- - $ECHO "\ - if test \"\$libtool_execute_magic\" != \"$magic\"; then - # Run the actual program with our arguments. --" -- case $host in -- # Backslashes separate directories on plain windows -- *-*-mingw | *-*-os2* | *-cegcc*) -- $ECHO "\ -- exec \"\$progdir\\\\\$program\" \${1+\"\$@\"} --" -- ;; -- -- *) -- $ECHO "\ -- exec \"\$progdir/\$program\" \${1+\"\$@\"} --" -- ;; -- esac -- $ECHO "\ -- \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2 -- exit 1 -+ func_exec_program \${1+\"\$@\"} - fi - else - # The program doesn't exist. -@@ -3158,166 +4132,6 @@ fi\ - } - - --# func_to_host_path arg --# --# Convert paths to host format when used with build tools. --# Intended for use with "native" mingw (where libtool itself --# is running under the msys shell), or in the following cross- --# build environments: --# $build $host --# mingw (msys) mingw [e.g. native] --# cygwin mingw --# *nix + wine mingw --# where wine is equipped with the `winepath' executable. --# In the native mingw case, the (msys) shell automatically --# converts paths for any non-msys applications it launches, --# but that facility isn't available from inside the cwrapper. --# Similar accommodations are necessary for $host mingw and --# $build cygwin. Calling this function does no harm for other --# $host/$build combinations not listed above. --# --# ARG is the path (on $build) that should be converted to --# the proper representation for $host. The result is stored --# in $func_to_host_path_result. --func_to_host_path () --{ -- func_to_host_path_result="$1" -- if test -n "$1"; then -- case $host in -- *mingw* ) -- lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' -- case $build in -- *mingw* ) # actually, msys -- # awkward: cmd appends spaces to result -- func_to_host_path_result=`( cmd //c echo "$1" ) 2>/dev/null | -- $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"` -- ;; -- *cygwin* ) -- func_to_host_path_result=`cygpath -w "$1" | -- $SED -e "$lt_sed_naive_backslashify"` -- ;; -- * ) -- # Unfortunately, winepath does not exit with a non-zero -- # error code, so we are forced to check the contents of -- # stdout. On the other hand, if the command is not -- # found, the shell will set an exit code of 127 and print -- # *an error message* to stdout. So we must check for both -- # error code of zero AND non-empty stdout, which explains -- # the odd construction: -- func_to_host_path_tmp1=`winepath -w "$1" 2>/dev/null` -- if test "$?" -eq 0 && test -n "${func_to_host_path_tmp1}"; then -- func_to_host_path_result=`$ECHO "$func_to_host_path_tmp1" | -- $SED -e "$lt_sed_naive_backslashify"` -- else -- # Allow warning below. -- func_to_host_path_result= -- fi -- ;; -- esac -- if test -z "$func_to_host_path_result" ; then -- func_error "Could not determine host path corresponding to" -- func_error " \`$1'" -- func_error "Continuing, but uninstalled executables may not work." -- # Fallback: -- func_to_host_path_result="$1" -- fi -- ;; -- esac -- fi --} --# end: func_to_host_path -- --# func_to_host_pathlist arg --# --# Convert pathlists to host format when used with build tools. --# See func_to_host_path(), above. This function supports the --# following $build/$host combinations (but does no harm for --# combinations not listed here): --# $build $host --# mingw (msys) mingw [e.g. native] --# cygwin mingw --# *nix + wine mingw --# --# Path separators are also converted from $build format to --# $host format. If ARG begins or ends with a path separator --# character, it is preserved (but converted to $host format) --# on output. --# --# ARG is a pathlist (on $build) that should be converted to --# the proper representation on $host. The result is stored --# in $func_to_host_pathlist_result. --func_to_host_pathlist () --{ -- func_to_host_pathlist_result="$1" -- if test -n "$1"; then -- case $host in -- *mingw* ) -- lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' -- # Remove leading and trailing path separator characters from -- # ARG. msys behavior is inconsistent here, cygpath turns them -- # into '.;' and ';.', and winepath ignores them completely. -- func_stripname : : "$1" -- func_to_host_pathlist_tmp1=$func_stripname_result -- case $build in -- *mingw* ) # Actually, msys. -- # Awkward: cmd appends spaces to result. -- func_to_host_pathlist_result=` -- ( cmd //c echo "$func_to_host_pathlist_tmp1" ) 2>/dev/null | -- $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"` -- ;; -- *cygwin* ) -- func_to_host_pathlist_result=`cygpath -w -p "$func_to_host_pathlist_tmp1" | -- $SED -e "$lt_sed_naive_backslashify"` -- ;; -- * ) -- # unfortunately, winepath doesn't convert pathlists -- func_to_host_pathlist_result="" -- func_to_host_pathlist_oldIFS=$IFS -- IFS=: -- for func_to_host_pathlist_f in $func_to_host_pathlist_tmp1 ; do -- IFS=$func_to_host_pathlist_oldIFS -- if test -n "$func_to_host_pathlist_f" ; then -- func_to_host_path "$func_to_host_pathlist_f" -- if test -n "$func_to_host_path_result" ; then -- if test -z "$func_to_host_pathlist_result" ; then -- func_to_host_pathlist_result="$func_to_host_path_result" -- else -- func_append func_to_host_pathlist_result ";$func_to_host_path_result" -- fi -- fi -- fi -- done -- IFS=$func_to_host_pathlist_oldIFS -- ;; -- esac -- if test -z "$func_to_host_pathlist_result"; then -- func_error "Could not determine the host path(s) corresponding to" -- func_error " \`$1'" -- func_error "Continuing, but uninstalled executables may not work." -- # Fallback. This may break if $1 contains DOS-style drive -- # specifications. The fix is not to complicate the expression -- # below, but for the user to provide a working wine installation -- # with winepath so that path translation in the cross-to-mingw -- # case works properly. -- lt_replace_pathsep_nix_to_dos="s|:|;|g" -- func_to_host_pathlist_result=`echo "$func_to_host_pathlist_tmp1" |\ -- $SED -e "$lt_replace_pathsep_nix_to_dos"` -- fi -- # Now, add the leading and trailing path separators back -- case "$1" in -- :* ) func_to_host_pathlist_result=";$func_to_host_pathlist_result" -- ;; -- esac -- case "$1" in -- *: ) func_append func_to_host_pathlist_result ";" -- ;; -- esac -- ;; -- esac -- fi --} --# end: func_to_host_pathlist -- - # func_emit_cwrapperexe_src - # emit the source code for a wrapper executable on stdout - # Must ONLY be called from within func_mode_link because -@@ -3334,10 +4148,6 @@ func_emit_cwrapperexe_src () - - This wrapper executable should never be moved out of the build directory. - If it is, it will not operate correctly. -- -- Currently, it simply execs the wrapper *script* "$SHELL $output", -- but could eventually absorb all of the scripts functionality and -- exec $objdir/$outputname directly. - */ - EOF - cat <<"EOF" -@@ -3462,22 +4272,13 @@ int setenv (const char *, const char *, int); - if (stale) { free ((void *) stale); stale = 0; } \ - } while (0) - --#undef LTWRAPPER_DEBUGPRINTF --#if defined LT_DEBUGWRAPPER --# define LTWRAPPER_DEBUGPRINTF(args) ltwrapper_debugprintf args --static void --ltwrapper_debugprintf (const char *fmt, ...) --{ -- va_list args; -- va_start (args, fmt); -- (void) vfprintf (stderr, fmt, args); -- va_end (args); --} -+#if defined(LT_DEBUGWRAPPER) -+static int lt_debug = 1; - #else --# define LTWRAPPER_DEBUGPRINTF(args) -+static int lt_debug = 0; - #endif - --const char *program_name = NULL; -+const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */ - - void *xmalloc (size_t num); - char *xstrdup (const char *string); -@@ -3487,7 +4288,10 @@ char *chase_symlinks (const char *pathspec); - int make_executable (const char *path); - int check_executable (const char *path); - char *strendzap (char *str, const char *pat); --void lt_fatal (const char *message, ...); -+void lt_debugprintf (const char *file, int line, const char *fmt, ...); -+void lt_fatal (const char *file, int line, const char *message, ...); -+static const char *nonnull (const char *s); -+static const char *nonempty (const char *s); - void lt_setenv (const char *name, const char *value); - char *lt_extend_str (const char *orig_value, const char *add, int to_end); - void lt_update_exe_path (const char *name, const char *value); -@@ -3497,14 +4301,14 @@ void lt_dump_script (FILE *f); - EOF - - cat <"))); -+ lt_debugprintf (__FILE__, __LINE__, "(main) lt_argv_zero: %s\n", -+ nonnull (lt_argv_zero)); - for (i = 0; i < newargc; i++) - { -- LTWRAPPER_DEBUGPRINTF (("(main) newargz[%d] : %s\n", i, (newargz[i] ? newargz[i] : ""))); -+ lt_debugprintf (__FILE__, __LINE__, "(main) newargz[%d]: %s\n", -+ i, nonnull (newargz[i])); - } - - EOF -@@ -3706,7 +4529,9 @@ EOF - if (rval == -1) - { - /* failed to start process */ -- LTWRAPPER_DEBUGPRINTF (("(main) failed to launch target \"%s\": errno = %d\n", lt_argv_zero, errno)); -+ lt_debugprintf (__FILE__, __LINE__, -+ "(main) failed to launch target \"%s\": %s\n", -+ lt_argv_zero, nonnull (strerror (errno))); - return 127; - } - return rval; -@@ -3728,7 +4553,7 @@ xmalloc (size_t num) - { - void *p = (void *) malloc (num); - if (!p) -- lt_fatal ("Memory exhausted"); -+ lt_fatal (__FILE__, __LINE__, "memory exhausted"); - - return p; - } -@@ -3762,8 +4587,8 @@ check_executable (const char *path) - { - struct stat st; - -- LTWRAPPER_DEBUGPRINTF (("(check_executable) : %s\n", -- path ? (*path ? path : "EMPTY!") : "NULL!")); -+ lt_debugprintf (__FILE__, __LINE__, "(check_executable): %s\n", -+ nonempty (path)); - if ((!path) || (!*path)) - return 0; - -@@ -3780,8 +4605,8 @@ make_executable (const char *path) - int rval = 0; - struct stat st; - -- LTWRAPPER_DEBUGPRINTF (("(make_executable) : %s\n", -- path ? (*path ? path : "EMPTY!") : "NULL!")); -+ lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n", -+ nonempty (path)); - if ((!path) || (!*path)) - return 0; - -@@ -3807,8 +4632,8 @@ find_executable (const char *wrapper) - int tmp_len; - char *concat_name; - -- LTWRAPPER_DEBUGPRINTF (("(find_executable) : %s\n", -- wrapper ? (*wrapper ? wrapper : "EMPTY!") : "NULL!")); -+ lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n", -+ nonempty (wrapper)); - - if ((wrapper == NULL) || (*wrapper == '\0')) - return NULL; -@@ -3861,7 +4686,8 @@ find_executable (const char *wrapper) - { - /* empty path: current directory */ - if (getcwd (tmp, LT_PATHMAX) == NULL) -- lt_fatal ("getcwd failed"); -+ lt_fatal (__FILE__, __LINE__, "getcwd failed: %s", -+ nonnull (strerror (errno))); - tmp_len = strlen (tmp); - concat_name = - XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1); -@@ -3886,7 +4712,8 @@ find_executable (const char *wrapper) - } - /* Relative path | not found in path: prepend cwd */ - if (getcwd (tmp, LT_PATHMAX) == NULL) -- lt_fatal ("getcwd failed"); -+ lt_fatal (__FILE__, __LINE__, "getcwd failed: %s", -+ nonnull (strerror (errno))); - tmp_len = strlen (tmp); - concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1); - memcpy (concat_name, tmp, tmp_len); -@@ -3912,8 +4739,9 @@ chase_symlinks (const char *pathspec) - int has_symlinks = 0; - while (strlen (tmp_pathspec) && !has_symlinks) - { -- LTWRAPPER_DEBUGPRINTF (("checking path component for symlinks: %s\n", -- tmp_pathspec)); -+ lt_debugprintf (__FILE__, __LINE__, -+ "checking path component for symlinks: %s\n", -+ tmp_pathspec); - if (lstat (tmp_pathspec, &s) == 0) - { - if (S_ISLNK (s.st_mode) != 0) -@@ -3935,8 +4763,9 @@ chase_symlinks (const char *pathspec) - } - else - { -- char *errstr = strerror (errno); -- lt_fatal ("Error accessing file %s (%s)", tmp_pathspec, errstr); -+ lt_fatal (__FILE__, __LINE__, -+ "error accessing file \"%s\": %s", -+ tmp_pathspec, nonnull (strerror (errno))); - } - } - XFREE (tmp_pathspec); -@@ -3949,7 +4778,8 @@ chase_symlinks (const char *pathspec) - tmp_pathspec = realpath (pathspec, buf); - if (tmp_pathspec == 0) - { -- lt_fatal ("Could not follow symlinks for %s", pathspec); -+ lt_fatal (__FILE__, __LINE__, -+ "could not follow symlinks for %s", pathspec); - } - return xstrdup (tmp_pathspec); - #endif -@@ -3975,11 +4805,25 @@ strendzap (char *str, const char *pat) - return str; - } - -+void -+lt_debugprintf (const char *file, int line, const char *fmt, ...) -+{ -+ va_list args; -+ if (lt_debug) -+ { -+ (void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line); -+ va_start (args, fmt); -+ (void) vfprintf (stderr, fmt, args); -+ va_end (args); -+ } -+} -+ - static void --lt_error_core (int exit_status, const char *mode, -+lt_error_core (int exit_status, const char *file, -+ int line, const char *mode, - const char *message, va_list ap) - { -- fprintf (stderr, "%s: %s: ", program_name, mode); -+ fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode); - vfprintf (stderr, message, ap); - fprintf (stderr, ".\n"); - -@@ -3988,20 +4832,32 @@ lt_error_core (int exit_status, const char *mode, - } - - void --lt_fatal (const char *message, ...) -+lt_fatal (const char *file, int line, const char *message, ...) - { - va_list ap; - va_start (ap, message); -- lt_error_core (EXIT_FAILURE, "FATAL", message, ap); -+ lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap); - va_end (ap); - } - -+static const char * -+nonnull (const char *s) -+{ -+ return s ? s : "(null)"; -+} -+ -+static const char * -+nonempty (const char *s) -+{ -+ return (s && !*s) ? "(empty)" : nonnull (s); -+} -+ - void - lt_setenv (const char *name, const char *value) - { -- LTWRAPPER_DEBUGPRINTF (("(lt_setenv) setting '%s' to '%s'\n", -- (name ? name : ""), -- (value ? value : ""))); -+ lt_debugprintf (__FILE__, __LINE__, -+ "(lt_setenv) setting '%s' to '%s'\n", -+ nonnull (name), nonnull (value)); - { - #ifdef HAVE_SETENV - /* always make a copy, for consistency with !HAVE_SETENV */ -@@ -4049,9 +4905,9 @@ lt_extend_str (const char *orig_value, const char *add, int to_end) - void - lt_update_exe_path (const char *name, const char *value) - { -- LTWRAPPER_DEBUGPRINTF (("(lt_update_exe_path) modifying '%s' by prepending '%s'\n", -- (name ? name : ""), -- (value ? value : ""))); -+ lt_debugprintf (__FILE__, __LINE__, -+ "(lt_update_exe_path) modifying '%s' by prepending '%s'\n", -+ nonnull (name), nonnull (value)); - - if (name && *name && value && *value) - { -@@ -4070,9 +4926,9 @@ lt_update_exe_path (const char *name, const char *value) - void - lt_update_lib_path (const char *name, const char *value) - { -- LTWRAPPER_DEBUGPRINTF (("(lt_update_lib_path) modifying '%s' by prepending '%s'\n", -- (name ? name : ""), -- (value ? value : ""))); -+ lt_debugprintf (__FILE__, __LINE__, -+ "(lt_update_lib_path) modifying '%s' by prepending '%s'\n", -+ nonnull (name), nonnull (value)); - - if (name && *name && value && *value) - { -@@ -4222,7 +5078,7 @@ EOF - func_win32_import_lib_p () - { - $opt_debug -- case `eval "$file_magic_cmd \"\$1\" 2>/dev/null" | $SED -e 10q` in -+ case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in - *import*) : ;; - *) false ;; - esac -@@ -4401,9 +5257,9 @@ func_mode_link () - ;; - *) - if test "$prev" = dlfiles; then -- dlfiles="$dlfiles $arg" -+ func_append dlfiles " $arg" - else -- dlprefiles="$dlprefiles $arg" -+ func_append dlprefiles " $arg" - fi - prev= - continue -@@ -4427,7 +5283,7 @@ func_mode_link () - *-*-darwin*) - case "$deplibs " in - *" $qarg.ltframework "*) ;; -- *) deplibs="$deplibs $qarg.ltframework" # this is fixed later -+ *) func_append deplibs " $qarg.ltframework" # this is fixed later - ;; - esac - ;; -@@ -4446,7 +5302,7 @@ func_mode_link () - moreargs= - for fil in `cat "$save_arg"` - do --# moreargs="$moreargs $fil" -+# func_append moreargs " $fil" - arg=$fil - # A libtool-controlled object. - -@@ -4475,7 +5331,7 @@ func_mode_link () - - if test "$prev" = dlfiles; then - if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then -- dlfiles="$dlfiles $pic_object" -+ func_append dlfiles " $pic_object" - prev= - continue - else -@@ -4487,7 +5343,7 @@ func_mode_link () - # CHECK ME: I think I busted this. -Ossama - if test "$prev" = dlprefiles; then - # Preload the old-style object. -- dlprefiles="$dlprefiles $pic_object" -+ func_append dlprefiles " $pic_object" - prev= - fi - -@@ -4557,12 +5413,12 @@ func_mode_link () - if test "$prev" = rpath; then - case "$rpath " in - *" $arg "*) ;; -- *) rpath="$rpath $arg" ;; -+ *) func_append rpath " $arg" ;; - esac - else - case "$xrpath " in - *" $arg "*) ;; -- *) xrpath="$xrpath $arg" ;; -+ *) func_append xrpath " $arg" ;; - esac - fi - prev= -@@ -4574,28 +5430,28 @@ func_mode_link () - continue - ;; - weak) -- weak_libs="$weak_libs $arg" -+ func_append weak_libs " $arg" - prev= - continue - ;; - xcclinker) -- linker_flags="$linker_flags $qarg" -- compiler_flags="$compiler_flags $qarg" -+ func_append linker_flags " $qarg" -+ func_append compiler_flags " $qarg" - prev= - func_append compile_command " $qarg" - func_append finalize_command " $qarg" - continue - ;; - xcompiler) -- compiler_flags="$compiler_flags $qarg" -+ func_append compiler_flags " $qarg" - prev= - func_append compile_command " $qarg" - func_append finalize_command " $qarg" - continue - ;; - xlinker) -- linker_flags="$linker_flags $qarg" -- compiler_flags="$compiler_flags $wl$qarg" -+ func_append linker_flags " $qarg" -+ func_append compiler_flags " $wl$qarg" - prev= - func_append compile_command " $wl$qarg" - func_append finalize_command " $wl$qarg" -@@ -4686,15 +5542,16 @@ func_mode_link () - ;; - - -L*) -- func_stripname '-L' '' "$arg" -- dir=$func_stripname_result -- if test -z "$dir"; then -+ func_stripname "-L" '' "$arg" -+ if test -z "$func_stripname_result"; then - if test "$#" -gt 0; then - func_fatal_error "require no space between \`-L' and \`$1'" - else - func_fatal_error "need path for \`-L' option" - fi - fi -+ func_resolve_sysroot "$func_stripname_result" -+ dir=$func_resolve_sysroot_result - # We need an absolute path. - case $dir in - [\\/]* | [A-Za-z]:[\\/]*) ;; -@@ -4706,10 +5563,16 @@ func_mode_link () - ;; - esac - case "$deplibs " in -- *" -L$dir "*) ;; -+ *" -L$dir "* | *" $arg "*) -+ # Will only happen for absolute or sysroot arguments -+ ;; - *) -- deplibs="$deplibs -L$dir" -- lib_search_path="$lib_search_path $dir" -+ # Preserve sysroot, but never include relative directories -+ case $dir in -+ [\\/]* | [A-Za-z]:[\\/]* | =*) func_append deplibs " $arg" ;; -+ *) func_append deplibs " -L$dir" ;; -+ esac -+ func_append lib_search_path " $dir" - ;; - esac - case $host in -@@ -4718,12 +5581,12 @@ func_mode_link () - case :$dllsearchpath: in - *":$dir:"*) ;; - ::) dllsearchpath=$dir;; -- *) dllsearchpath="$dllsearchpath:$dir";; -+ *) func_append dllsearchpath ":$dir";; - esac - case :$dllsearchpath: in - *":$testbindir:"*) ;; - ::) dllsearchpath=$testbindir;; -- *) dllsearchpath="$dllsearchpath:$testbindir";; -+ *) func_append dllsearchpath ":$testbindir";; - esac - ;; - esac -@@ -4747,7 +5610,7 @@ func_mode_link () - ;; - *-*-rhapsody* | *-*-darwin1.[012]) - # Rhapsody C and math libraries are in the System framework -- deplibs="$deplibs System.ltframework" -+ func_append deplibs " System.ltframework" - continue - ;; - *-*-sco3.2v5* | *-*-sco5v6*) -@@ -4758,9 +5621,6 @@ func_mode_link () - # Compiler inserts libc in the correct place for threads to work - test "X$arg" = "X-lc" && continue - ;; -- *-*-linux*) -- test "X$arg" = "X-lc" && continue -- ;; - esac - elif test "X$arg" = "X-lc_r"; then - case $host in -@@ -4770,7 +5630,7 @@ func_mode_link () - ;; - esac - fi -- deplibs="$deplibs $arg" -+ func_append deplibs " $arg" - continue - ;; - -@@ -4782,8 +5642,8 @@ func_mode_link () - # Tru64 UNIX uses -model [arg] to determine the layout of C++ - # classes, name mangling, and exception handling. - # Darwin uses the -arch flag to determine output architecture. -- -model|-arch|-isysroot) -- compiler_flags="$compiler_flags $arg" -+ -model|-arch|-isysroot|--sysroot) -+ func_append compiler_flags " $arg" - func_append compile_command " $arg" - func_append finalize_command " $arg" - prev=xcompiler -@@ -4791,12 +5651,12 @@ func_mode_link () - ;; - - -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads) -- compiler_flags="$compiler_flags $arg" -+ func_append compiler_flags " $arg" - func_append compile_command " $arg" - func_append finalize_command " $arg" - case "$new_inherited_linker_flags " in - *" $arg "*) ;; -- * ) new_inherited_linker_flags="$new_inherited_linker_flags $arg" ;; -+ * ) func_append new_inherited_linker_flags " $arg" ;; - esac - continue - ;; -@@ -4863,13 +5723,17 @@ func_mode_link () - # We need an absolute path. - case $dir in - [\\/]* | [A-Za-z]:[\\/]*) ;; -+ =*) -+ func_stripname '=' '' "$dir" -+ dir=$lt_sysroot$func_stripname_result -+ ;; - *) - func_fatal_error "only absolute run-paths are allowed" - ;; - esac - case "$xrpath " in - *" $dir "*) ;; -- *) xrpath="$xrpath $dir" ;; -+ *) func_append xrpath " $dir" ;; - esac - continue - ;; -@@ -4922,8 +5786,8 @@ func_mode_link () - for flag in $args; do - IFS="$save_ifs" - func_quote_for_eval "$flag" -- arg="$arg $func_quote_for_eval_result" -- compiler_flags="$compiler_flags $func_quote_for_eval_result" -+ func_append arg " $func_quote_for_eval_result" -+ func_append compiler_flags " $func_quote_for_eval_result" - done - IFS="$save_ifs" - func_stripname ' ' '' "$arg" -@@ -4938,9 +5802,9 @@ func_mode_link () - for flag in $args; do - IFS="$save_ifs" - func_quote_for_eval "$flag" -- arg="$arg $wl$func_quote_for_eval_result" -- compiler_flags="$compiler_flags $wl$func_quote_for_eval_result" -- linker_flags="$linker_flags $func_quote_for_eval_result" -+ func_append arg " $wl$func_quote_for_eval_result" -+ func_append compiler_flags " $wl$func_quote_for_eval_result" -+ func_append linker_flags " $func_quote_for_eval_result" - done - IFS="$save_ifs" - func_stripname ' ' '' "$arg" -@@ -4968,24 +5832,27 @@ func_mode_link () - arg="$func_quote_for_eval_result" - ;; - -- # -64, -mips[0-9] enable 64-bit mode on the SGI compiler -- # -r[0-9][0-9]* specifies the processor on the SGI compiler -- # -xarch=*, -xtarget=* enable 64-bit mode on the Sun compiler -- # +DA*, +DD* enable 64-bit mode on the HP compiler -- # -q* pass through compiler args for the IBM compiler -- # -m*, -t[45]*, -txscale* pass through architecture-specific -- # compiler args for GCC -- # -F/path gives path to uninstalled frameworks, gcc on darwin -- # -p, -pg, --coverage, -fprofile-* pass through profiling flag for GCC -- # @file GCC response files -- # -tp=* Portland pgcc target processor selection -+ # Flags to be passed through unchanged, with rationale: -+ # -64, -mips[0-9] enable 64-bit mode for the SGI compiler -+ # -r[0-9][0-9]* specify processor for the SGI compiler -+ # -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler -+ # +DA*, +DD* enable 64-bit mode for the HP compiler -+ # -q* compiler args for the IBM compiler -+ # -m*, -t[45]*, -txscale* architecture-specific flags for GCC -+ # -F/path path to uninstalled frameworks, gcc on darwin -+ # -p, -pg, --coverage, -fprofile-* profiling flags for GCC -+ # @file GCC response files -+ # -tp=* Portland pgcc target processor selection -+ # --sysroot=* for sysroot support -+ # -O*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization - -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \ -- -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*) -+ -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \ -+ -O*|-flto*|-fwhopr*|-fuse-linker-plugin) - func_quote_for_eval "$arg" - arg="$func_quote_for_eval_result" - func_append compile_command " $arg" - func_append finalize_command " $arg" -- compiler_flags="$compiler_flags $arg" -+ func_append compiler_flags " $arg" - continue - ;; - -@@ -4997,7 +5864,7 @@ func_mode_link () - - *.$objext) - # A standard object. -- objs="$objs $arg" -+ func_append objs " $arg" - ;; - - *.lo) -@@ -5028,7 +5895,7 @@ func_mode_link () - - if test "$prev" = dlfiles; then - if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then -- dlfiles="$dlfiles $pic_object" -+ func_append dlfiles " $pic_object" - prev= - continue - else -@@ -5040,7 +5907,7 @@ func_mode_link () - # CHECK ME: I think I busted this. -Ossama - if test "$prev" = dlprefiles; then - # Preload the old-style object. -- dlprefiles="$dlprefiles $pic_object" -+ func_append dlprefiles " $pic_object" - prev= - fi - -@@ -5085,24 +5952,25 @@ func_mode_link () - - *.$libext) - # An archive. -- deplibs="$deplibs $arg" -- old_deplibs="$old_deplibs $arg" -+ func_append deplibs " $arg" -+ func_append old_deplibs " $arg" - continue - ;; - - *.la) - # A libtool-controlled library. - -+ func_resolve_sysroot "$arg" - if test "$prev" = dlfiles; then - # This library was specified with -dlopen. -- dlfiles="$dlfiles $arg" -+ func_append dlfiles " $func_resolve_sysroot_result" - prev= - elif test "$prev" = dlprefiles; then - # The library was specified with -dlpreopen. -- dlprefiles="$dlprefiles $arg" -+ func_append dlprefiles " $func_resolve_sysroot_result" - prev= - else -- deplibs="$deplibs $arg" -+ func_append deplibs " $func_resolve_sysroot_result" - fi - continue - ;; -@@ -5127,7 +5995,7 @@ func_mode_link () - func_fatal_help "the \`$prevarg' option requires an argument" - - if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then -- eval "arg=\"$export_dynamic_flag_spec\"" -+ eval arg=\"$export_dynamic_flag_spec\" - func_append compile_command " $arg" - func_append finalize_command " $arg" - fi -@@ -5144,11 +6012,13 @@ func_mode_link () - else - shlib_search_path= - fi -- eval "sys_lib_search_path=\"$sys_lib_search_path_spec\"" -- eval "sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"" -+ eval sys_lib_search_path=\"$sys_lib_search_path_spec\" -+ eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\" - - func_dirname "$output" "/" "" - output_objdir="$func_dirname_result$objdir" -+ func_to_tool_file "$output_objdir/" -+ tool_output_objdir=$func_to_tool_file_result - # Create the object directory. - func_mkdir_p "$output_objdir" - -@@ -5169,12 +6039,12 @@ func_mode_link () - # Find all interdependent deplibs by searching for libraries - # that are linked more than once (e.g. -la -lb -la) - for deplib in $deplibs; do -- if $opt_duplicate_deps ; then -+ if $opt_preserve_dup_deps ; then - case "$libs " in -- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; -+ *" $deplib "*) func_append specialdeplibs " $deplib" ;; - esac - fi -- libs="$libs $deplib" -+ func_append libs " $deplib" - done - - if test "$linkmode" = lib; then -@@ -5187,9 +6057,9 @@ func_mode_link () - if $opt_duplicate_compiler_generated_deps; then - for pre_post_dep in $predeps $postdeps; do - case "$pre_post_deps " in -- *" $pre_post_dep "*) specialdeplibs="$specialdeplibs $pre_post_deps" ;; -+ *" $pre_post_dep "*) func_append specialdeplibs " $pre_post_deps" ;; - esac -- pre_post_deps="$pre_post_deps $pre_post_dep" -+ func_append pre_post_deps " $pre_post_dep" - done - fi - pre_post_deps= -@@ -5256,8 +6126,9 @@ func_mode_link () - for lib in $dlprefiles; do - # Ignore non-libtool-libs - dependency_libs= -+ func_resolve_sysroot "$lib" - case $lib in -- *.la) func_source "$lib" ;; -+ *.la) func_source "$func_resolve_sysroot_result" ;; - esac - - # Collect preopened libtool deplibs, except any this library -@@ -5267,7 +6138,7 @@ func_mode_link () - deplib_base=$func_basename_result - case " $weak_libs " in - *" $deplib_base "*) ;; -- *) deplibs="$deplibs $deplib" ;; -+ *) func_append deplibs " $deplib" ;; - esac - done - done -@@ -5288,11 +6159,11 @@ func_mode_link () - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else -- compiler_flags="$compiler_flags $deplib" -+ func_append compiler_flags " $deplib" - if test "$linkmode" = lib ; then - case "$new_inherited_linker_flags " in - *" $deplib "*) ;; -- * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;; -+ * ) func_append new_inherited_linker_flags " $deplib" ;; - esac - fi - fi -@@ -5377,7 +6248,7 @@ func_mode_link () - if test "$linkmode" = lib ; then - case "$new_inherited_linker_flags " in - *" $deplib "*) ;; -- * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;; -+ * ) func_append new_inherited_linker_flags " $deplib" ;; - esac - fi - fi -@@ -5390,7 +6261,8 @@ func_mode_link () - test "$pass" = conv && continue - newdependency_libs="$deplib $newdependency_libs" - func_stripname '-L' '' "$deplib" -- newlib_search_path="$newlib_search_path $func_stripname_result" -+ func_resolve_sysroot "$func_stripname_result" -+ func_append newlib_search_path " $func_resolve_sysroot_result" - ;; - prog) - if test "$pass" = conv; then -@@ -5404,7 +6276,8 @@ func_mode_link () - finalize_deplibs="$deplib $finalize_deplibs" - fi - func_stripname '-L' '' "$deplib" -- newlib_search_path="$newlib_search_path $func_stripname_result" -+ func_resolve_sysroot "$func_stripname_result" -+ func_append newlib_search_path " $func_resolve_sysroot_result" - ;; - *) - func_warning "\`-L' is ignored for archives/objects" -@@ -5415,17 +6288,21 @@ func_mode_link () - -R*) - if test "$pass" = link; then - func_stripname '-R' '' "$deplib" -- dir=$func_stripname_result -+ func_resolve_sysroot "$func_stripname_result" -+ dir=$func_resolve_sysroot_result - # Make sure the xrpath contains only unique directories. - case "$xrpath " in - *" $dir "*) ;; -- *) xrpath="$xrpath $dir" ;; -+ *) func_append xrpath " $dir" ;; - esac - fi - deplibs="$deplib $deplibs" - continue - ;; -- *.la) lib="$deplib" ;; -+ *.la) -+ func_resolve_sysroot "$deplib" -+ lib=$func_resolve_sysroot_result -+ ;; - *.$libext) - if test "$pass" = conv; then - deplibs="$deplib $deplibs" -@@ -5488,11 +6365,11 @@ func_mode_link () - if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then - # If there is no dlopen support or we're linking statically, - # we need to preload. -- newdlprefiles="$newdlprefiles $deplib" -+ func_append newdlprefiles " $deplib" - compile_deplibs="$deplib $compile_deplibs" - finalize_deplibs="$deplib $finalize_deplibs" - else -- newdlfiles="$newdlfiles $deplib" -+ func_append newdlfiles " $deplib" - fi - fi - continue -@@ -5538,7 +6415,7 @@ func_mode_link () - for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do - case " $new_inherited_linker_flags " in - *" $tmp_inherited_linker_flag "*) ;; -- *) new_inherited_linker_flags="$new_inherited_linker_flags $tmp_inherited_linker_flag";; -+ *) func_append new_inherited_linker_flags " $tmp_inherited_linker_flag";; - esac - done - fi -@@ -5546,8 +6423,8 @@ func_mode_link () - if test "$linkmode,$pass" = "lib,link" || - test "$linkmode,$pass" = "prog,scan" || - { test "$linkmode" != prog && test "$linkmode" != lib; }; then -- test -n "$dlopen" && dlfiles="$dlfiles $dlopen" -- test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen" -+ test -n "$dlopen" && func_append dlfiles " $dlopen" -+ test -n "$dlpreopen" && func_append dlprefiles " $dlpreopen" - fi - - if test "$pass" = conv; then -@@ -5558,20 +6435,20 @@ func_mode_link () - func_fatal_error "cannot find name of link library for \`$lib'" - fi - # It is a libtool convenience library, so add in its objects. -- convenience="$convenience $ladir/$objdir/$old_library" -- old_convenience="$old_convenience $ladir/$objdir/$old_library" -+ func_append convenience " $ladir/$objdir/$old_library" -+ func_append old_convenience " $ladir/$objdir/$old_library" - elif test "$linkmode" != prog && test "$linkmode" != lib; then - func_fatal_error "\`$lib' is not a convenience library" - fi - tmp_libs= - for deplib in $dependency_libs; do - deplibs="$deplib $deplibs" -- if $opt_duplicate_deps ; then -+ if $opt_preserve_dup_deps ; then - case "$tmp_libs " in -- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; -+ *" $deplib "*) func_append specialdeplibs " $deplib" ;; - esac - fi -- tmp_libs="$tmp_libs $deplib" -+ func_append tmp_libs " $deplib" - done - continue - fi # $pass = conv -@@ -5579,9 +6456,15 @@ func_mode_link () - - # Get the name of the library we link against. - linklib= -- for l in $old_library $library_names; do -- linklib="$l" -- done -+ if test -n "$old_library" && -+ { test "$prefer_static_libs" = yes || -+ test "$prefer_static_libs,$installed" = "built,no"; }; then -+ linklib=$old_library -+ else -+ for l in $old_library $library_names; do -+ linklib="$l" -+ done -+ fi - if test -z "$linklib"; then - func_fatal_error "cannot find name of link library for \`$lib'" - fi -@@ -5598,9 +6481,9 @@ func_mode_link () - # statically, we need to preload. We also need to preload any - # dependent libraries so libltdl's deplib preloader doesn't - # bomb out in the load deplibs phase. -- dlprefiles="$dlprefiles $lib $dependency_libs" -+ func_append dlprefiles " $lib $dependency_libs" - else -- newdlfiles="$newdlfiles $lib" -+ func_append newdlfiles " $lib" - fi - continue - fi # $pass = dlopen -@@ -5622,14 +6505,14 @@ func_mode_link () - - # Find the relevant object directory and library name. - if test "X$installed" = Xyes; then -- if test ! -f "$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then -+ if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then - func_warning "library \`$lib' was moved." - dir="$ladir" - absdir="$abs_ladir" - libdir="$abs_ladir" - else -- dir="$libdir" -- absdir="$libdir" -+ dir="$lt_sysroot$libdir" -+ absdir="$lt_sysroot$libdir" - fi - test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes - else -@@ -5637,12 +6520,12 @@ func_mode_link () - dir="$ladir" - absdir="$abs_ladir" - # Remove this search path later -- notinst_path="$notinst_path $abs_ladir" -+ func_append notinst_path " $abs_ladir" - else - dir="$ladir/$objdir" - absdir="$abs_ladir/$objdir" - # Remove this search path later -- notinst_path="$notinst_path $abs_ladir" -+ func_append notinst_path " $abs_ladir" - fi - fi # $installed = yes - func_stripname 'lib' '.la' "$laname" -@@ -5653,20 +6536,46 @@ func_mode_link () - if test -z "$libdir" && test "$linkmode" = prog; then - func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'" - fi -- # Prefer using a static library (so that no silly _DYNAMIC symbols -- # are required to link). -- if test -n "$old_library"; then -- newdlprefiles="$newdlprefiles $dir/$old_library" -- # Keep a list of preopened convenience libraries to check -- # that they are being used correctly in the link pass. -- test -z "$libdir" && \ -- dlpreconveniencelibs="$dlpreconveniencelibs $dir/$old_library" -- # Otherwise, use the dlname, so that lt_dlopen finds it. -- elif test -n "$dlname"; then -- newdlprefiles="$newdlprefiles $dir/$dlname" -- else -- newdlprefiles="$newdlprefiles $dir/$linklib" -- fi -+ case "$host" in -+ # special handling for platforms with PE-DLLs. -+ *cygwin* | *mingw* | *cegcc* ) -+ # Linker will automatically link against shared library if both -+ # static and shared are present. Therefore, ensure we extract -+ # symbols from the import library if a shared library is present -+ # (otherwise, the dlopen module name will be incorrect). We do -+ # this by putting the import library name into $newdlprefiles. -+ # We recover the dlopen module name by 'saving' the la file -+ # name in a special purpose variable, and (later) extracting the -+ # dlname from the la file. -+ if test -n "$dlname"; then -+ func_tr_sh "$dir/$linklib" -+ eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname" -+ func_append newdlprefiles " $dir/$linklib" -+ else -+ func_append newdlprefiles " $dir/$old_library" -+ # Keep a list of preopened convenience libraries to check -+ # that they are being used correctly in the link pass. -+ test -z "$libdir" && \ -+ func_append dlpreconveniencelibs " $dir/$old_library" -+ fi -+ ;; -+ * ) -+ # Prefer using a static library (so that no silly _DYNAMIC symbols -+ # are required to link). -+ if test -n "$old_library"; then -+ func_append newdlprefiles " $dir/$old_library" -+ # Keep a list of preopened convenience libraries to check -+ # that they are being used correctly in the link pass. -+ test -z "$libdir" && \ -+ func_append dlpreconveniencelibs " $dir/$old_library" -+ # Otherwise, use the dlname, so that lt_dlopen finds it. -+ elif test -n "$dlname"; then -+ func_append newdlprefiles " $dir/$dlname" -+ else -+ func_append newdlprefiles " $dir/$linklib" -+ fi -+ ;; -+ esac - fi # $pass = dlpreopen - - if test -z "$libdir"; then -@@ -5684,7 +6593,7 @@ func_mode_link () - - - if test "$linkmode" = prog && test "$pass" != link; then -- newlib_search_path="$newlib_search_path $ladir" -+ func_append newlib_search_path " $ladir" - deplibs="$lib $deplibs" - - linkalldeplibs=no -@@ -5697,7 +6606,8 @@ func_mode_link () - for deplib in $dependency_libs; do - case $deplib in - -L*) func_stripname '-L' '' "$deplib" -- newlib_search_path="$newlib_search_path $func_stripname_result" -+ func_resolve_sysroot "$func_stripname_result" -+ func_append newlib_search_path " $func_resolve_sysroot_result" - ;; - esac - # Need to link against all dependency_libs? -@@ -5708,12 +6618,12 @@ func_mode_link () - # or/and link against static libraries - newdependency_libs="$deplib $newdependency_libs" - fi -- if $opt_duplicate_deps ; then -+ if $opt_preserve_dup_deps ; then - case "$tmp_libs " in -- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; -+ *" $deplib "*) func_append specialdeplibs " $deplib" ;; - esac - fi -- tmp_libs="$tmp_libs $deplib" -+ func_append tmp_libs " $deplib" - done # for deplib - continue - fi # $linkmode = prog... -@@ -5728,7 +6638,7 @@ func_mode_link () - # Make sure the rpath contains only unique directories. - case "$temp_rpath:" in - *"$absdir:"*) ;; -- *) temp_rpath="$temp_rpath$absdir:" ;; -+ *) func_append temp_rpath "$absdir:" ;; - esac - fi - -@@ -5740,7 +6650,7 @@ func_mode_link () - *) - case "$compile_rpath " in - *" $absdir "*) ;; -- *) compile_rpath="$compile_rpath $absdir" -+ *) func_append compile_rpath " $absdir" ;; - esac - ;; - esac -@@ -5749,7 +6659,7 @@ func_mode_link () - *) - case "$finalize_rpath " in - *" $libdir "*) ;; -- *) finalize_rpath="$finalize_rpath $libdir" -+ *) func_append finalize_rpath " $libdir" ;; - esac - ;; - esac -@@ -5774,12 +6684,12 @@ func_mode_link () - case $host in - *cygwin* | *mingw* | *cegcc*) - # No point in relinking DLLs because paths are not encoded -- notinst_deplibs="$notinst_deplibs $lib" -+ func_append notinst_deplibs " $lib" - need_relink=no - ;; - *) - if test "$installed" = no; then -- notinst_deplibs="$notinst_deplibs $lib" -+ func_append notinst_deplibs " $lib" - need_relink=yes - fi - ;; -@@ -5814,7 +6724,7 @@ func_mode_link () - *) - case "$compile_rpath " in - *" $absdir "*) ;; -- *) compile_rpath="$compile_rpath $absdir" -+ *) func_append compile_rpath " $absdir" ;; - esac - ;; - esac -@@ -5823,7 +6733,7 @@ func_mode_link () - *) - case "$finalize_rpath " in - *" $libdir "*) ;; -- *) finalize_rpath="$finalize_rpath $libdir" -+ *) func_append finalize_rpath " $libdir" ;; - esac - ;; - esac -@@ -5835,7 +6745,7 @@ func_mode_link () - shift - realname="$1" - shift -- eval "libname=\"$libname_spec\"" -+ libname=`eval "\\$ECHO \"$libname_spec\""` - # use dlname if we got it. it's perfectly good, no? - if test -n "$dlname"; then - soname="$dlname" -@@ -5848,7 +6758,7 @@ func_mode_link () - versuffix="-$major" - ;; - esac -- eval "soname=\"$soname_spec\"" -+ eval soname=\"$soname_spec\" - else - soname="$realname" - fi -@@ -5877,7 +6787,7 @@ func_mode_link () - linklib=$newlib - fi # test -n "$old_archive_from_expsyms_cmds" - -- if test "$linkmode" = prog || test "$mode" != relink; then -+ if test "$linkmode" = prog || test "$opt_mode" != relink; then - add_shlibpath= - add_dir= - add= -@@ -5933,7 +6843,7 @@ func_mode_link () - if test -n "$inst_prefix_dir"; then - case $libdir in - [\\/]*) -- add_dir="$add_dir -L$inst_prefix_dir$libdir" -+ func_append add_dir " -L$inst_prefix_dir$libdir" - ;; - esac - fi -@@ -5955,7 +6865,7 @@ func_mode_link () - if test -n "$add_shlibpath"; then - case :$compile_shlibpath: in - *":$add_shlibpath:"*) ;; -- *) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;; -+ *) func_append compile_shlibpath "$add_shlibpath:" ;; - esac - fi - if test "$linkmode" = prog; then -@@ -5969,13 +6879,13 @@ func_mode_link () - test "$hardcode_shlibpath_var" = yes; then - case :$finalize_shlibpath: in - *":$libdir:"*) ;; -- *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; -+ *) func_append finalize_shlibpath "$libdir:" ;; - esac - fi - fi - fi - -- if test "$linkmode" = prog || test "$mode" = relink; then -+ if test "$linkmode" = prog || test "$opt_mode" = relink; then - add_shlibpath= - add_dir= - add= -@@ -5989,7 +6899,7 @@ func_mode_link () - elif test "$hardcode_shlibpath_var" = yes; then - case :$finalize_shlibpath: in - *":$libdir:"*) ;; -- *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; -+ *) func_append finalize_shlibpath "$libdir:" ;; - esac - add="-l$name" - elif test "$hardcode_automatic" = yes; then -@@ -6001,12 +6911,12 @@ func_mode_link () - fi - else - # We cannot seem to hardcode it, guess we'll fake it. -- add_dir="-L$libdir" -+ add_dir="-L$lt_sysroot$libdir" - # Try looking first in the location we're being installed to. - if test -n "$inst_prefix_dir"; then - case $libdir in - [\\/]*) -- add_dir="$add_dir -L$inst_prefix_dir$libdir" -+ func_append add_dir " -L$inst_prefix_dir$libdir" - ;; - esac - fi -@@ -6083,27 +6993,33 @@ func_mode_link () - temp_xrpath=$func_stripname_result - case " $xrpath " in - *" $temp_xrpath "*) ;; -- *) xrpath="$xrpath $temp_xrpath";; -+ *) func_append xrpath " $temp_xrpath";; - esac;; -- *) temp_deplibs="$temp_deplibs $libdir";; -+ *) func_append temp_deplibs " $libdir";; - esac - done - dependency_libs="$temp_deplibs" - fi - -- newlib_search_path="$newlib_search_path $absdir" -+ func_append newlib_search_path " $absdir" - # Link against this library - test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs" - # ... and its dependency_libs - tmp_libs= - for deplib in $dependency_libs; do - newdependency_libs="$deplib $newdependency_libs" -- if $opt_duplicate_deps ; then -+ case $deplib in -+ -L*) func_stripname '-L' '' "$deplib" -+ func_resolve_sysroot "$func_stripname_result";; -+ *) func_resolve_sysroot "$deplib" ;; -+ esac -+ if $opt_preserve_dup_deps ; then - case "$tmp_libs " in -- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; -+ *" $func_resolve_sysroot_result "*) -+ func_append specialdeplibs " $func_resolve_sysroot_result" ;; - esac - fi -- tmp_libs="$tmp_libs $deplib" -+ func_append tmp_libs " $func_resolve_sysroot_result" - done - - if test "$link_all_deplibs" != no; then -@@ -6113,8 +7029,10 @@ func_mode_link () - case $deplib in - -L*) path="$deplib" ;; - *.la) -+ func_resolve_sysroot "$deplib" -+ deplib=$func_resolve_sysroot_result - func_dirname "$deplib" "" "." -- dir="$func_dirname_result" -+ dir=$func_dirname_result - # We need an absolute path. - case $dir in - [\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;; -@@ -6130,7 +7048,7 @@ func_mode_link () - case $host in - *-*-darwin*) - depdepl= -- deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` -+ eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` - if test -n "$deplibrary_names" ; then - for tmp in $deplibrary_names ; do - depdepl=$tmp -@@ -6141,8 +7059,8 @@ func_mode_link () - if test -z "$darwin_install_name"; then - darwin_install_name=`${OTOOL64} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` - fi -- compiler_flags="$compiler_flags ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}" -- linker_flags="$linker_flags -dylib_file ${darwin_install_name}:${depdepl}" -+ func_append compiler_flags " ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}" -+ func_append linker_flags " -dylib_file ${darwin_install_name}:${depdepl}" - path= - fi - fi -@@ -6152,7 +7070,7 @@ func_mode_link () - ;; - esac - else -- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` -+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` - test -z "$libdir" && \ - func_fatal_error "\`$deplib' is not a valid libtool archive" - test "$absdir" != "$libdir" && \ -@@ -6192,7 +7110,7 @@ func_mode_link () - for dir in $newlib_search_path; do - case "$lib_search_path " in - *" $dir "*) ;; -- *) lib_search_path="$lib_search_path $dir" ;; -+ *) func_append lib_search_path " $dir" ;; - esac - done - newlib_search_path= -@@ -6205,7 +7123,7 @@ func_mode_link () - fi - for var in $vars dependency_libs; do - # Add libraries to $var in reverse order -- eval tmp_libs=\$$var -+ eval tmp_libs=\"\$$var\" - new_libs= - for deplib in $tmp_libs; do - # FIXME: Pedantically, this is the right thing to do, so -@@ -6250,13 +7168,13 @@ func_mode_link () - -L*) - case " $tmp_libs " in - *" $deplib "*) ;; -- *) tmp_libs="$tmp_libs $deplib" ;; -+ *) func_append tmp_libs " $deplib" ;; - esac - ;; -- *) tmp_libs="$tmp_libs $deplib" ;; -+ *) func_append tmp_libs " $deplib" ;; - esac - done -- eval $var=\$tmp_libs -+ eval $var=\"$tmp_libs\" - done # for var - fi - # Last step: remove runtime libs from dependency_libs -@@ -6269,7 +7187,7 @@ func_mode_link () - ;; - esac - if test -n "$i" ; then -- tmp_libs="$tmp_libs $i" -+ func_append tmp_libs " $i" - fi - done - dependency_libs=$tmp_libs -@@ -6310,7 +7228,7 @@ func_mode_link () - # Now set the variables for building old libraries. - build_libtool_libs=no - oldlibs="$output" -- objs="$objs$old_deplibs" -+ func_append objs "$old_deplibs" - ;; - - lib) -@@ -6319,8 +7237,8 @@ func_mode_link () - lib*) - func_stripname 'lib' '.la' "$outputname" - name=$func_stripname_result -- eval "shared_ext=\"$shrext_cmds\"" -- eval "libname=\"$libname_spec\"" -+ eval shared_ext=\"$shrext_cmds\" -+ eval libname=\"$libname_spec\" - ;; - *) - test "$module" = no && \ -@@ -6330,8 +7248,8 @@ func_mode_link () - # Add the "lib" prefix for modules if required - func_stripname '' '.la' "$outputname" - name=$func_stripname_result -- eval "shared_ext=\"$shrext_cmds\"" -- eval "libname=\"$libname_spec\"" -+ eval shared_ext=\"$shrext_cmds\" -+ eval libname=\"$libname_spec\" - else - func_stripname '' '.la' "$outputname" - libname=$func_stripname_result -@@ -6346,7 +7264,7 @@ func_mode_link () - echo - $ECHO "*** Warning: Linking the shared library $output against the non-libtool" - $ECHO "*** objects $objs is not portable!" -- libobjs="$libobjs $objs" -+ func_append libobjs " $objs" - fi - fi - -@@ -6544,7 +7462,7 @@ func_mode_link () - done - - # Make executables depend on our current version. -- verstring="$verstring:${current}.0" -+ func_append verstring ":${current}.0" - ;; - - qnx) -@@ -6612,10 +7530,10 @@ func_mode_link () - fi - - func_generate_dlsyms "$libname" "$libname" "yes" -- libobjs="$libobjs $symfileobj" -+ func_append libobjs " $symfileobj" - test "X$libobjs" = "X " && libobjs= - -- if test "$mode" != relink; then -+ if test "$opt_mode" != relink; then - # Remove our outputs, but don't remove object files since they - # may have been created when compiling PIC objects. - removelist= -@@ -6631,7 +7549,7 @@ func_mode_link () - continue - fi - fi -- removelist="$removelist $p" -+ func_append removelist " $p" - ;; - *) ;; - esac -@@ -6642,7 +7560,7 @@ func_mode_link () - - # Now set the variables for building old libraries. - if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then -- oldlibs="$oldlibs $output_objdir/$libname.$libext" -+ func_append oldlibs " $output_objdir/$libname.$libext" - - # Transform .lo files to .o files. - oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP` -@@ -6659,10 +7577,11 @@ func_mode_link () - # If the user specified any rpath flags, then add them. - temp_xrpath= - for libdir in $xrpath; do -- temp_xrpath="$temp_xrpath -R$libdir" -+ func_replace_sysroot "$libdir" -+ func_append temp_xrpath " -R$func_replace_sysroot_result" - case "$finalize_rpath " in - *" $libdir "*) ;; -- *) finalize_rpath="$finalize_rpath $libdir" ;; -+ *) func_append finalize_rpath " $libdir" ;; - esac - done - if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then -@@ -6676,7 +7595,7 @@ func_mode_link () - for lib in $old_dlfiles; do - case " $dlprefiles $dlfiles " in - *" $lib "*) ;; -- *) dlfiles="$dlfiles $lib" ;; -+ *) func_append dlfiles " $lib" ;; - esac - done - -@@ -6686,7 +7605,7 @@ func_mode_link () - for lib in $old_dlprefiles; do - case "$dlprefiles " in - *" $lib "*) ;; -- *) dlprefiles="$dlprefiles $lib" ;; -+ *) func_append dlprefiles " $lib" ;; - esac - done - -@@ -6698,7 +7617,7 @@ func_mode_link () - ;; - *-*-rhapsody* | *-*-darwin1.[012]) - # Rhapsody C library is in the System framework -- deplibs="$deplibs System.ltframework" -+ func_append deplibs " System.ltframework" - ;; - *-*-netbsd*) - # Don't link with libc until the a.out ld.so is fixed. -@@ -6715,7 +7634,7 @@ func_mode_link () - *) - # Add libc to deplibs on all other systems if necessary. - if test "$build_libtool_need_lc" = "yes"; then -- deplibs="$deplibs -lc" -+ func_append deplibs " -lc" - fi - ;; - esac -@@ -6764,18 +7683,18 @@ EOF - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $i "*) -- newdeplibs="$newdeplibs $i" -+ func_append newdeplibs " $i" - i="" - ;; - esac - fi - if test -n "$i" ; then -- eval "libname=\"$libname_spec\"" -- eval "deplib_matches=\"$library_names_spec\"" -+ libname=`eval "\\$ECHO \"$libname_spec\""` -+ deplib_matches=`eval "\\$ECHO \"$library_names_spec\""` - set dummy $deplib_matches; shift - deplib_match=$1 - if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then -- newdeplibs="$newdeplibs $i" -+ func_append newdeplibs " $i" - else - droppeddeps=yes - echo -@@ -6789,7 +7708,7 @@ EOF - fi - ;; - *) -- newdeplibs="$newdeplibs $i" -+ func_append newdeplibs " $i" - ;; - esac - done -@@ -6807,18 +7726,18 @@ EOF - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $i "*) -- newdeplibs="$newdeplibs $i" -+ func_append newdeplibs " $i" - i="" - ;; - esac - fi - if test -n "$i" ; then -- eval "libname=\"$libname_spec\"" -- eval "deplib_matches=\"$library_names_spec\"" -+ libname=`eval "\\$ECHO \"$libname_spec\""` -+ deplib_matches=`eval "\\$ECHO \"$library_names_spec\""` - set dummy $deplib_matches; shift - deplib_match=$1 - if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then -- newdeplibs="$newdeplibs $i" -+ func_append newdeplibs " $i" - else - droppeddeps=yes - echo -@@ -6840,7 +7759,7 @@ EOF - fi - ;; - *) -- newdeplibs="$newdeplibs $i" -+ func_append newdeplibs " $i" - ;; - esac - done -@@ -6857,15 +7776,27 @@ EOF - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $a_deplib "*) -- newdeplibs="$newdeplibs $a_deplib" -+ func_append newdeplibs " $a_deplib" - a_deplib="" - ;; - esac - fi - if test -n "$a_deplib" ; then -- eval "libname=\"$libname_spec\"" -+ libname=`eval "\\$ECHO \"$libname_spec\""` -+ if test -n "$file_magic_glob"; then -+ libnameglob=`func_echo_all "$libname" | $SED -e $file_magic_glob` -+ else -+ libnameglob=$libname -+ fi -+ test "$want_nocaseglob" = yes && nocaseglob=`shopt -p nocaseglob` - for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do -- potential_libs=`ls $i/$libname[.-]* 2>/dev/null` -+ if test "$want_nocaseglob" = yes; then -+ shopt -s nocaseglob -+ potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null` -+ $nocaseglob -+ else -+ potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null` -+ fi - for potent_lib in $potential_libs; do - # Follow soft links. - if ls -lLd "$potent_lib" 2>/dev/null | -@@ -6885,10 +7816,10 @@ EOF - *) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";; - esac - done -- if eval "$file_magic_cmd \"\$potlib\"" 2>/dev/null | -+ if eval $file_magic_cmd \"\$potlib\" 2>/dev/null | - $SED -e 10q | - $EGREP "$file_magic_regex" > /dev/null; then -- newdeplibs="$newdeplibs $a_deplib" -+ func_append newdeplibs " $a_deplib" - a_deplib="" - break 2 - fi -@@ -6913,7 +7844,7 @@ EOF - ;; - *) - # Add a -L argument. -- newdeplibs="$newdeplibs $a_deplib" -+ func_append newdeplibs " $a_deplib" - ;; - esac - done # Gone through all deplibs. -@@ -6929,20 +7860,20 @@ EOF - if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then - case " $predeps $postdeps " in - *" $a_deplib "*) -- newdeplibs="$newdeplibs $a_deplib" -+ func_append newdeplibs " $a_deplib" - a_deplib="" - ;; - esac - fi - if test -n "$a_deplib" ; then -- eval "libname=\"$libname_spec\"" -+ libname=`eval "\\$ECHO \"$libname_spec\""` - for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do - potential_libs=`ls $i/$libname[.-]* 2>/dev/null` - for potent_lib in $potential_libs; do - potlib="$potent_lib" # see symlink-check above in file_magic test - if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \ - $EGREP "$match_pattern_regex" > /dev/null; then -- newdeplibs="$newdeplibs $a_deplib" -+ func_append newdeplibs " $a_deplib" - a_deplib="" - break 2 - fi -@@ -6967,7 +7898,7 @@ EOF - ;; - *) - # Add a -L argument. -- newdeplibs="$newdeplibs $a_deplib" -+ func_append newdeplibs " $a_deplib" - ;; - esac - done # Gone through all deplibs. -@@ -7071,7 +8002,7 @@ EOF - *) - case " $deplibs " in - *" -L$path/$objdir "*) -- new_libs="$new_libs -L$path/$objdir" ;; -+ func_append new_libs " -L$path/$objdir" ;; - esac - ;; - esac -@@ -7081,10 +8012,10 @@ EOF - -L*) - case " $new_libs " in - *" $deplib "*) ;; -- *) new_libs="$new_libs $deplib" ;; -+ *) func_append new_libs " $deplib" ;; - esac - ;; -- *) new_libs="$new_libs $deplib" ;; -+ *) func_append new_libs " $deplib" ;; - esac - done - deplibs="$new_libs" -@@ -7101,10 +8032,12 @@ EOF - hardcode_libdirs= - dep_rpath= - rpath="$finalize_rpath" -- test "$mode" != relink && rpath="$compile_rpath$rpath" -+ test "$opt_mode" != relink && rpath="$compile_rpath$rpath" - for libdir in $rpath; do - if test -n "$hardcode_libdir_flag_spec"; then - if test -n "$hardcode_libdir_separator"; then -+ func_replace_sysroot "$libdir" -+ libdir=$func_replace_sysroot_result - if test -z "$hardcode_libdirs"; then - hardcode_libdirs="$libdir" - else -@@ -7113,18 +8046,18 @@ EOF - *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) - ;; - *) -- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" -+ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" - ;; - esac - fi - else -- eval "flag=\"$hardcode_libdir_flag_spec\"" -- dep_rpath="$dep_rpath $flag" -+ eval flag=\"$hardcode_libdir_flag_spec\" -+ func_append dep_rpath " $flag" - fi - elif test -n "$runpath_var"; then - case "$perm_rpath " in - *" $libdir "*) ;; -- *) perm_rpath="$perm_rpath $libdir" ;; -+ *) func_append perm_rpath " $libdir" ;; - esac - fi - done -@@ -7133,40 +8066,38 @@ EOF - test -n "$hardcode_libdirs"; then - libdir="$hardcode_libdirs" - if test -n "$hardcode_libdir_flag_spec_ld"; then -- eval "dep_rpath=\"$hardcode_libdir_flag_spec_ld\"" -+ eval dep_rpath=\"$hardcode_libdir_flag_spec_ld\" - else -- eval "dep_rpath=\"$hardcode_libdir_flag_spec\"" -+ eval dep_rpath=\"$hardcode_libdir_flag_spec\" - fi - fi - if test -n "$runpath_var" && test -n "$perm_rpath"; then - # We should set the runpath_var. - rpath= - for dir in $perm_rpath; do -- rpath="$rpath$dir:" -+ func_append rpath "$dir:" - done -- eval $runpath_var=\$rpath\$$runpath_var -- export $runpath_var -+ eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var" - fi - test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs" - fi - - shlibpath="$finalize_shlibpath" -- test "$mode" != relink && shlibpath="$compile_shlibpath$shlibpath" -+ test "$opt_mode" != relink && shlibpath="$compile_shlibpath$shlibpath" - if test -n "$shlibpath"; then -- eval $shlibpath_var=\$shlibpath\$$shlibpath_var -- export $shlibpath_var -+ eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var" - fi - - # Get the real and link names of the library. -- eval "shared_ext=\"$shrext_cmds\"" -- eval "library_names=\"$library_names_spec\"" -+ eval shared_ext=\"$shrext_cmds\" -+ eval library_names=\"$library_names_spec\" - set dummy $library_names - shift - realname="$1" - shift - - if test -n "$soname_spec"; then -- eval "soname=\"$soname_spec\"" -+ eval soname=\"$soname_spec\" - else - soname="$realname" - fi -@@ -7178,7 +8109,7 @@ EOF - linknames= - for link - do -- linknames="$linknames $link" -+ func_append linknames " $link" - done - - # Use standard objects if they are pic -@@ -7189,7 +8120,7 @@ EOF - if test -n "$export_symbols" && test -n "$include_expsyms"; then - $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp" - export_symbols="$output_objdir/$libname.uexp" -- delfiles="$delfiles $export_symbols" -+ func_append delfiles " $export_symbols" - fi - - orig_export_symbols= -@@ -7220,13 +8151,45 @@ EOF - $opt_dry_run || $RM $export_symbols - cmds=$export_symbols_cmds - save_ifs="$IFS"; IFS='~' -- for cmd in $cmds; do -+ for cmd1 in $cmds; do - IFS="$save_ifs" -- eval "cmd=\"$cmd\"" -- func_len " $cmd" -- len=$func_len_result -- if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then -+ # Take the normal branch if the nm_file_list_spec branch -+ # doesn't work or if tool conversion is not needed. -+ case $nm_file_list_spec~$to_tool_file_cmd in -+ *~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*) -+ try_normal_branch=yes -+ eval cmd=\"$cmd1\" -+ func_len " $cmd" -+ len=$func_len_result -+ ;; -+ *) -+ try_normal_branch=no -+ ;; -+ esac -+ if test "$try_normal_branch" = yes \ -+ && { test "$len" -lt "$max_cmd_len" \ -+ || test "$max_cmd_len" -le -1; } -+ then -+ func_show_eval "$cmd" 'exit $?' -+ skipped_export=false -+ elif test -n "$nm_file_list_spec"; then -+ func_basename "$output" -+ output_la=$func_basename_result -+ save_libobjs=$libobjs -+ save_output=$output -+ output=${output_objdir}/${output_la}.nm -+ func_to_tool_file "$output" -+ libobjs=$nm_file_list_spec$func_to_tool_file_result -+ func_append delfiles " $output" -+ func_verbose "creating $NM input file list: $output" -+ for obj in $save_libobjs; do -+ func_to_tool_file "$obj" -+ $ECHO "$func_to_tool_file_result" -+ done > "$output" -+ eval cmd=\"$cmd1\" - func_show_eval "$cmd" 'exit $?' -+ output=$save_output -+ libobjs=$save_libobjs - skipped_export=false - else - # The command line is too long to execute in one step. -@@ -7248,7 +8211,7 @@ EOF - if test -n "$export_symbols" && test -n "$include_expsyms"; then - tmp_export_symbols="$export_symbols" - test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" -- $opt_dry_run || $ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols" -+ $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"' - fi - - if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then -@@ -7260,7 +8223,7 @@ EOF - # global variables. join(1) would be nice here, but unfortunately - # isn't a blessed tool. - $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter -- delfiles="$delfiles $export_symbols $output_objdir/$libname.filter" -+ func_append delfiles " $export_symbols $output_objdir/$libname.filter" - export_symbols=$output_objdir/$libname.def - $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols - fi -@@ -7270,7 +8233,7 @@ EOF - case " $convenience " in - *" $test_deplib "*) ;; - *) -- tmp_deplibs="$tmp_deplibs $test_deplib" -+ func_append tmp_deplibs " $test_deplib" - ;; - esac - done -@@ -7286,43 +8249,43 @@ EOF - fi - if test -n "$whole_archive_flag_spec"; then - save_libobjs=$libobjs -- eval "libobjs=\"\$libobjs $whole_archive_flag_spec\"" -+ eval libobjs=\"\$libobjs $whole_archive_flag_spec\" - test "X$libobjs" = "X " && libobjs= - else - gentop="$output_objdir/${outputname}x" -- generated="$generated $gentop" -+ func_append generated " $gentop" - - func_extract_archives $gentop $convenience -- libobjs="$libobjs $func_extract_archives_result" -+ func_append libobjs " $func_extract_archives_result" - test "X$libobjs" = "X " && libobjs= - fi - fi - - if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then -- eval "flag=\"$thread_safe_flag_spec\"" -- linker_flags="$linker_flags $flag" -+ eval flag=\"$thread_safe_flag_spec\" -+ func_append linker_flags " $flag" - fi - - # Make a backup of the uninstalled library when relinking -- if test "$mode" = relink; then -- $opt_dry_run || (cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U) || exit $? -+ if test "$opt_mode" = relink; then -+ $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $? - fi - - # Do each of the archive commands. - if test "$module" = yes && test -n "$module_cmds" ; then - if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then -- eval "test_cmds=\"$module_expsym_cmds\"" -+ eval test_cmds=\"$module_expsym_cmds\" - cmds=$module_expsym_cmds - else -- eval "test_cmds=\"$module_cmds\"" -+ eval test_cmds=\"$module_cmds\" - cmds=$module_cmds - fi - else - if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then -- eval "test_cmds=\"$archive_expsym_cmds\"" -+ eval test_cmds=\"$archive_expsym_cmds\" - cmds=$archive_expsym_cmds - else -- eval "test_cmds=\"$archive_cmds\"" -+ eval test_cmds=\"$archive_cmds\" - cmds=$archive_cmds - fi - fi -@@ -7366,10 +8329,13 @@ EOF - echo 'INPUT (' > $output - for obj in $save_libobjs - do -- $ECHO "$obj" >> $output -+ func_to_tool_file "$obj" -+ $ECHO "$func_to_tool_file_result" >> $output - done - echo ')' >> $output -- delfiles="$delfiles $output" -+ func_append delfiles " $output" -+ func_to_tool_file "$output" -+ output=$func_to_tool_file_result - elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then - output=${output_objdir}/${output_la}.lnk - func_verbose "creating linker input file list: $output" -@@ -7383,15 +8349,17 @@ EOF - fi - for obj - do -- $ECHO "$obj" >> $output -+ func_to_tool_file "$obj" -+ $ECHO "$func_to_tool_file_result" >> $output - done -- delfiles="$delfiles $output" -- output=$firstobj\"$file_list_spec$output\" -+ func_append delfiles " $output" -+ func_to_tool_file "$output" -+ output=$firstobj\"$file_list_spec$func_to_tool_file_result\" - else - if test -n "$save_libobjs"; then - func_verbose "creating reloadable object files..." - output=$output_objdir/$output_la-${k}.$objext -- eval "test_cmds=\"$reload_cmds\"" -+ eval test_cmds=\"$reload_cmds\" - func_len " $test_cmds" - len0=$func_len_result - len=$len0 -@@ -7411,12 +8379,12 @@ EOF - if test "$k" -eq 1 ; then - # The first file doesn't have a previous command to add. - reload_objs=$objlist -- eval "concat_cmds=\"$reload_cmds\"" -+ eval concat_cmds=\"$reload_cmds\" - else - # All subsequent reloadable object files will link in - # the last one created. - reload_objs="$objlist $last_robj" -- eval "concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"" -+ eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\" - fi - last_robj=$output_objdir/$output_la-${k}.$objext - func_arith $k + 1 -@@ -7433,11 +8401,11 @@ EOF - # files will link in the last one created. - test -z "$concat_cmds" || concat_cmds=$concat_cmds~ - reload_objs="$objlist $last_robj" -- eval "concat_cmds=\"\${concat_cmds}$reload_cmds\"" -+ eval concat_cmds=\"\${concat_cmds}$reload_cmds\" - if test -n "$last_robj"; then -- eval "concat_cmds=\"\${concat_cmds}~\$RM $last_robj\"" -+ eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\" - fi -- delfiles="$delfiles $output" -+ func_append delfiles " $output" - - else - output= -@@ -7450,9 +8418,9 @@ EOF - libobjs=$output - # Append the command to create the export file. - test -z "$concat_cmds" || concat_cmds=$concat_cmds~ -- eval "concat_cmds=\"\$concat_cmds$export_symbols_cmds\"" -+ eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\" - if test -n "$last_robj"; then -- eval "concat_cmds=\"\$concat_cmds~\$RM $last_robj\"" -+ eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\" - fi - fi - -@@ -7471,7 +8439,7 @@ EOF - lt_exit=$? - - # Restore the uninstalled library and exit -- if test "$mode" = relink; then -+ if test "$opt_mode" = relink; then - ( cd "$output_objdir" && \ - $RM "${realname}T" && \ - $MV "${realname}U" "$realname" ) -@@ -7492,7 +8460,7 @@ EOF - if test -n "$export_symbols" && test -n "$include_expsyms"; then - tmp_export_symbols="$export_symbols" - test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" -- $opt_dry_run || $ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols" -+ $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"' - fi - - if test -n "$orig_export_symbols"; then -@@ -7504,7 +8472,7 @@ EOF - # global variables. join(1) would be nice here, but unfortunately - # isn't a blessed tool. - $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter -- delfiles="$delfiles $export_symbols $output_objdir/$libname.filter" -+ func_append delfiles " $export_symbols $output_objdir/$libname.filter" - export_symbols=$output_objdir/$libname.def - $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols - fi -@@ -7515,7 +8483,7 @@ EOF - output=$save_output - - if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then -- eval "libobjs=\"\$libobjs $whole_archive_flag_spec\"" -+ eval libobjs=\"\$libobjs $whole_archive_flag_spec\" - test "X$libobjs" = "X " && libobjs= - fi - # Expand the library linking commands again to reset the -@@ -7539,23 +8507,23 @@ EOF - - if test -n "$delfiles"; then - # Append the command to remove temporary files to $cmds. -- eval "cmds=\"\$cmds~\$RM $delfiles\"" -+ eval cmds=\"\$cmds~\$RM $delfiles\" - fi - - # Add any objects from preloaded convenience libraries - if test -n "$dlprefiles"; then - gentop="$output_objdir/${outputname}x" -- generated="$generated $gentop" -+ func_append generated " $gentop" - - func_extract_archives $gentop $dlprefiles -- libobjs="$libobjs $func_extract_archives_result" -+ func_append libobjs " $func_extract_archives_result" - test "X$libobjs" = "X " && libobjs= - fi - - save_ifs="$IFS"; IFS='~' - for cmd in $cmds; do - IFS="$save_ifs" -- eval "cmd=\"$cmd\"" -+ eval cmd=\"$cmd\" - $opt_silent || { - func_quote_for_expand "$cmd" - eval "func_echo $func_quote_for_expand_result" -@@ -7564,7 +8532,7 @@ EOF - lt_exit=$? - - # Restore the uninstalled library and exit -- if test "$mode" = relink; then -+ if test "$opt_mode" = relink; then - ( cd "$output_objdir" && \ - $RM "${realname}T" && \ - $MV "${realname}U" "$realname" ) -@@ -7576,8 +8544,8 @@ EOF - IFS="$save_ifs" - - # Restore the uninstalled library and exit -- if test "$mode" = relink; then -- $opt_dry_run || (cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname) || exit $? -+ if test "$opt_mode" = relink; then -+ $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $? - - if test -n "$convenience"; then - if test -z "$whole_archive_flag_spec"; then -@@ -7656,17 +8624,20 @@ EOF - - if test -n "$convenience"; then - if test -n "$whole_archive_flag_spec"; then -- eval "tmp_whole_archive_flags=\"$whole_archive_flag_spec\"" -+ eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\" - reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'` - else - gentop="$output_objdir/${obj}x" -- generated="$generated $gentop" -+ func_append generated " $gentop" - - func_extract_archives $gentop $convenience - reload_conv_objs="$reload_objs $func_extract_archives_result" - fi - fi - -+ # If we're not building shared, we need to use non_pic_objs -+ test "$build_libtool_libs" != yes && libobjs="$non_pic_objects" -+ - # Create the old-style object. - reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test - -@@ -7690,7 +8661,7 @@ EOF - # Create an invalid libtool object if no PIC, so that we don't - # accidentally link it into a program. - # $show "echo timestamp > $libobj" -- # $opt_dry_run || echo timestamp > $libobj || exit $? -+ # $opt_dry_run || eval "echo timestamp > $libobj" || exit $? - exit $EXIT_SUCCESS - fi - -@@ -7740,8 +8711,8 @@ EOF - if test "$tagname" = CXX ; then - case ${MACOSX_DEPLOYMENT_TARGET-10.0} in - 10.[0123]) -- compile_command="$compile_command ${wl}-bind_at_load" -- finalize_command="$finalize_command ${wl}-bind_at_load" -+ func_append compile_command " ${wl}-bind_at_load" -+ func_append finalize_command " ${wl}-bind_at_load" - ;; - esac - fi -@@ -7761,7 +8732,7 @@ EOF - *) - case " $compile_deplibs " in - *" -L$path/$objdir "*) -- new_libs="$new_libs -L$path/$objdir" ;; -+ func_append new_libs " -L$path/$objdir" ;; - esac - ;; - esac -@@ -7771,17 +8742,17 @@ EOF - -L*) - case " $new_libs " in - *" $deplib "*) ;; -- *) new_libs="$new_libs $deplib" ;; -+ *) func_append new_libs " $deplib" ;; - esac - ;; -- *) new_libs="$new_libs $deplib" ;; -+ *) func_append new_libs " $deplib" ;; - esac - done - compile_deplibs="$new_libs" - - -- compile_command="$compile_command $compile_deplibs" -- finalize_command="$finalize_command $finalize_deplibs" -+ func_append compile_command " $compile_deplibs" -+ func_append finalize_command " $finalize_deplibs" - - if test -n "$rpath$xrpath"; then - # If the user specified any rpath flags, then add them. -@@ -7789,7 +8760,7 @@ EOF - # This is the magic to use -rpath. - case "$finalize_rpath " in - *" $libdir "*) ;; -- *) finalize_rpath="$finalize_rpath $libdir" ;; -+ *) func_append finalize_rpath " $libdir" ;; - esac - done - fi -@@ -7808,18 +8779,18 @@ EOF - *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) - ;; - *) -- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" -+ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" - ;; - esac - fi - else -- eval "flag=\"$hardcode_libdir_flag_spec\"" -- rpath="$rpath $flag" -+ eval flag=\"$hardcode_libdir_flag_spec\" -+ func_append rpath " $flag" - fi - elif test -n "$runpath_var"; then - case "$perm_rpath " in - *" $libdir "*) ;; -- *) perm_rpath="$perm_rpath $libdir" ;; -+ *) func_append perm_rpath " $libdir" ;; - esac - fi - case $host in -@@ -7828,12 +8799,12 @@ EOF - case :$dllsearchpath: in - *":$libdir:"*) ;; - ::) dllsearchpath=$libdir;; -- *) dllsearchpath="$dllsearchpath:$libdir";; -+ *) func_append dllsearchpath ":$libdir";; - esac - case :$dllsearchpath: in - *":$testbindir:"*) ;; - ::) dllsearchpath=$testbindir;; -- *) dllsearchpath="$dllsearchpath:$testbindir";; -+ *) func_append dllsearchpath ":$testbindir";; - esac - ;; - esac -@@ -7842,7 +8813,7 @@ EOF - if test -n "$hardcode_libdir_separator" && - test -n "$hardcode_libdirs"; then - libdir="$hardcode_libdirs" -- eval "rpath=\" $hardcode_libdir_flag_spec\"" -+ eval rpath=\" $hardcode_libdir_flag_spec\" - fi - compile_rpath="$rpath" - -@@ -7859,18 +8830,18 @@ EOF - *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) - ;; - *) -- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" -+ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" - ;; - esac - fi - else -- eval "flag=\"$hardcode_libdir_flag_spec\"" -- rpath="$rpath $flag" -+ eval flag=\"$hardcode_libdir_flag_spec\" -+ func_append rpath " $flag" - fi - elif test -n "$runpath_var"; then - case "$finalize_perm_rpath " in - *" $libdir "*) ;; -- *) finalize_perm_rpath="$finalize_perm_rpath $libdir" ;; -+ *) func_append finalize_perm_rpath " $libdir" ;; - esac - fi - done -@@ -7878,7 +8849,7 @@ EOF - if test -n "$hardcode_libdir_separator" && - test -n "$hardcode_libdirs"; then - libdir="$hardcode_libdirs" -- eval "rpath=\" $hardcode_libdir_flag_spec\"" -+ eval rpath=\" $hardcode_libdir_flag_spec\" - fi - finalize_rpath="$rpath" - -@@ -7921,6 +8892,12 @@ EOF - exit_status=0 - func_show_eval "$link_command" 'exit_status=$?' - -+ if test -n "$postlink_cmds"; then -+ func_to_tool_file "$output" -+ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` -+ func_execute_cmds "$postlink_cmds" 'exit $?' -+ fi -+ - # Delete the generated files. - if test -f "$output_objdir/${outputname}S.${objext}"; then - func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"' -@@ -7943,7 +8920,7 @@ EOF - # We should set the runpath_var. - rpath= - for dir in $perm_rpath; do -- rpath="$rpath$dir:" -+ func_append rpath "$dir:" - done - compile_var="$runpath_var=\"$rpath\$$runpath_var\" " - fi -@@ -7951,7 +8928,7 @@ EOF - # We should set the runpath_var. - rpath= - for dir in $finalize_perm_rpath; do -- rpath="$rpath$dir:" -+ func_append rpath "$dir:" - done - finalize_var="$runpath_var=\"$rpath\$$runpath_var\" " - fi -@@ -7966,6 +8943,13 @@ EOF - $opt_dry_run || $RM $output - # Link the executable and exit - func_show_eval "$link_command" 'exit $?' -+ -+ if test -n "$postlink_cmds"; then -+ func_to_tool_file "$output" -+ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` -+ func_execute_cmds "$postlink_cmds" 'exit $?' -+ fi -+ - exit $EXIT_SUCCESS - fi - -@@ -7999,6 +8983,12 @@ EOF - - func_show_eval "$link_command" 'exit $?' - -+ if test -n "$postlink_cmds"; then -+ func_to_tool_file "$output_objdir/$outputname" -+ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` -+ func_execute_cmds "$postlink_cmds" 'exit $?' -+ fi -+ - # Now create the wrapper script. - func_verbose "creating $output" - -@@ -8096,7 +9086,7 @@ EOF - else - oldobjs="$old_deplibs $non_pic_objects" - if test "$preload" = yes && test -f "$symfileobj"; then -- oldobjs="$oldobjs $symfileobj" -+ func_append oldobjs " $symfileobj" - fi - fi - addlibs="$old_convenience" -@@ -8104,10 +9094,10 @@ EOF - - if test -n "$addlibs"; then - gentop="$output_objdir/${outputname}x" -- generated="$generated $gentop" -+ func_append generated " $gentop" - - func_extract_archives $gentop $addlibs -- oldobjs="$oldobjs $func_extract_archives_result" -+ func_append oldobjs " $func_extract_archives_result" - fi - - # Do each command in the archive commands. -@@ -8118,10 +9108,10 @@ EOF - # Add any objects from preloaded convenience libraries - if test -n "$dlprefiles"; then - gentop="$output_objdir/${outputname}x" -- generated="$generated $gentop" -+ func_append generated " $gentop" - - func_extract_archives $gentop $dlprefiles -- oldobjs="$oldobjs $func_extract_archives_result" -+ func_append oldobjs " $func_extract_archives_result" - fi - - # POSIX demands no paths to be encoded in archives. We have -@@ -8139,7 +9129,7 @@ EOF - else - echo "copying selected object files to avoid basename conflicts..." - gentop="$output_objdir/${outputname}x" -- generated="$generated $gentop" -+ func_append generated " $gentop" - func_mkdir_p "$gentop" - save_oldobjs=$oldobjs - oldobjs= -@@ -8163,18 +9153,28 @@ EOF - esac - done - func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj" -- oldobjs="$oldobjs $gentop/$newobj" -+ func_append oldobjs " $gentop/$newobj" - ;; -- *) oldobjs="$oldobjs $obj" ;; -+ *) func_append oldobjs " $obj" ;; - esac - done - fi -- eval "cmds=\"$old_archive_cmds\"" -+ eval cmds=\"$old_archive_cmds\" - - func_len " $cmds" - len=$func_len_result - if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then - cmds=$old_archive_cmds -+ elif test -n "$archiver_list_spec"; then -+ func_verbose "using command file archive linking..." -+ for obj in $oldobjs -+ do -+ func_to_tool_file "$obj" -+ $ECHO "$func_to_tool_file_result" -+ done > $output_objdir/$libname.libcmd -+ func_to_tool_file "$output_objdir/$libname.libcmd" -+ oldobjs=" $archiver_list_spec$func_to_tool_file_result" -+ cmds=$old_archive_cmds - else - # the command line is too long to link in one step, link in parts - func_verbose "using piecewise archive linking..." -@@ -8189,7 +9189,7 @@ EOF - do - last_oldobj=$obj - done -- eval "test_cmds=\"$old_archive_cmds\"" -+ eval test_cmds=\"$old_archive_cmds\" - func_len " $test_cmds" - len0=$func_len_result - len=$len0 -@@ -8208,7 +9208,7 @@ EOF - RANLIB=$save_RANLIB - fi - test -z "$concat_cmds" || concat_cmds=$concat_cmds~ -- eval "concat_cmds=\"\${concat_cmds}$old_archive_cmds\"" -+ eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\" - objlist= - len=$len0 - fi -@@ -8216,9 +9216,9 @@ EOF - RANLIB=$save_RANLIB - oldobjs=$objlist - if test "X$oldobjs" = "X" ; then -- eval "cmds=\"\$concat_cmds\"" -+ eval cmds=\"\$concat_cmds\" - else -- eval "cmds=\"\$concat_cmds~\$old_archive_cmds\"" -+ eval cmds=\"\$concat_cmds~\$old_archive_cmds\" - fi - fi - fi -@@ -8268,12 +9268,23 @@ EOF - *.la) - func_basename "$deplib" - name="$func_basename_result" -- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` -+ func_resolve_sysroot "$deplib" -+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result` - test -z "$libdir" && \ - func_fatal_error "\`$deplib' is not a valid libtool archive" -- newdependency_libs="$newdependency_libs $libdir/$name" -+ func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name" -+ ;; -+ -L*) -+ func_stripname -L '' "$deplib" -+ func_replace_sysroot "$func_stripname_result" -+ func_append newdependency_libs " -L$func_replace_sysroot_result" - ;; -- *) newdependency_libs="$newdependency_libs $deplib" ;; -+ -R*) -+ func_stripname -R '' "$deplib" -+ func_replace_sysroot "$func_stripname_result" -+ func_append newdependency_libs " -R$func_replace_sysroot_result" -+ ;; -+ *) func_append newdependency_libs " $deplib" ;; - esac - done - dependency_libs="$newdependency_libs" -@@ -8284,12 +9295,14 @@ EOF - *.la) - func_basename "$lib" - name="$func_basename_result" -- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` -+ func_resolve_sysroot "$lib" -+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result` -+ - test -z "$libdir" && \ - func_fatal_error "\`$lib' is not a valid libtool archive" -- newdlfiles="$newdlfiles $libdir/$name" -+ func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name" - ;; -- *) newdlfiles="$newdlfiles $lib" ;; -+ *) func_append newdlfiles " $lib" ;; - esac - done - dlfiles="$newdlfiles" -@@ -8303,10 +9316,11 @@ EOF - # the library: - func_basename "$lib" - name="$func_basename_result" -- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` -+ func_resolve_sysroot "$lib" -+ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result` - test -z "$libdir" && \ - func_fatal_error "\`$lib' is not a valid libtool archive" -- newdlprefiles="$newdlprefiles $libdir/$name" -+ func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name" - ;; - esac - done -@@ -8318,7 +9332,7 @@ EOF - [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; - *) abs=`pwd`"/$lib" ;; - esac -- newdlfiles="$newdlfiles $abs" -+ func_append newdlfiles " $abs" - done - dlfiles="$newdlfiles" - newdlprefiles= -@@ -8327,7 +9341,7 @@ EOF - [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; - *) abs=`pwd`"/$lib" ;; - esac -- newdlprefiles="$newdlprefiles $abs" -+ func_append newdlprefiles " $abs" - done - dlprefiles="$newdlprefiles" - fi -@@ -8412,7 +9426,7 @@ relink_command=\"$relink_command\"" - exit $EXIT_SUCCESS - } - --{ test "$mode" = link || test "$mode" = relink; } && -+{ test "$opt_mode" = link || test "$opt_mode" = relink; } && - func_mode_link ${1+"$@"} - - -@@ -8432,9 +9446,9 @@ func_mode_uninstall () - for arg - do - case $arg in -- -f) RM="$RM $arg"; rmforce=yes ;; -- -*) RM="$RM $arg" ;; -- *) files="$files $arg" ;; -+ -f) func_append RM " $arg"; rmforce=yes ;; -+ -*) func_append RM " $arg" ;; -+ *) func_append files " $arg" ;; - esac - done - -@@ -8443,24 +9457,23 @@ func_mode_uninstall () - - rmdirs= - -- origobjdir="$objdir" - for file in $files; do - func_dirname "$file" "" "." - dir="$func_dirname_result" - if test "X$dir" = X.; then -- objdir="$origobjdir" -+ odir="$objdir" - else -- objdir="$dir/$origobjdir" -+ odir="$dir/$objdir" - fi - func_basename "$file" - name="$func_basename_result" -- test "$mode" = uninstall && objdir="$dir" -+ test "$opt_mode" = uninstall && odir="$dir" - -- # Remember objdir for removal later, being careful to avoid duplicates -- if test "$mode" = clean; then -+ # Remember odir for removal later, being careful to avoid duplicates -+ if test "$opt_mode" = clean; then - case " $rmdirs " in -- *" $objdir "*) ;; -- *) rmdirs="$rmdirs $objdir" ;; -+ *" $odir "*) ;; -+ *) func_append rmdirs " $odir" ;; - esac - fi - -@@ -8486,18 +9499,17 @@ func_mode_uninstall () - - # Delete the libtool libraries and symlinks. - for n in $library_names; do -- rmfiles="$rmfiles $objdir/$n" -+ func_append rmfiles " $odir/$n" - done -- test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library" -+ test -n "$old_library" && func_append rmfiles " $odir/$old_library" - -- case "$mode" in -+ case "$opt_mode" in - clean) -- case " $library_names " in -- # " " in the beginning catches empty $dlname -+ case " $library_names " in - *" $dlname "*) ;; -- *) rmfiles="$rmfiles $objdir/$dlname" ;; -+ *) test -n "$dlname" && func_append rmfiles " $odir/$dlname" ;; - esac -- test -n "$libdir" && rmfiles="$rmfiles $objdir/$name $objdir/${name}i" -+ test -n "$libdir" && func_append rmfiles " $odir/$name $odir/${name}i" - ;; - uninstall) - if test -n "$library_names"; then -@@ -8525,19 +9537,19 @@ func_mode_uninstall () - # Add PIC object to the list of files to remove. - if test -n "$pic_object" && - test "$pic_object" != none; then -- rmfiles="$rmfiles $dir/$pic_object" -+ func_append rmfiles " $dir/$pic_object" - fi - - # Add non-PIC object to the list of files to remove. - if test -n "$non_pic_object" && - test "$non_pic_object" != none; then -- rmfiles="$rmfiles $dir/$non_pic_object" -+ func_append rmfiles " $dir/$non_pic_object" - fi - fi - ;; - - *) -- if test "$mode" = clean ; then -+ if test "$opt_mode" = clean ; then - noexename=$name - case $file in - *.exe) -@@ -8547,7 +9559,7 @@ func_mode_uninstall () - noexename=$func_stripname_result - # $file with .exe has already been added to rmfiles, - # add $file without .exe -- rmfiles="$rmfiles $file" -+ func_append rmfiles " $file" - ;; - esac - # Do a test to see if this is a libtool program. -@@ -8556,7 +9568,7 @@ func_mode_uninstall () - func_ltwrapper_scriptname "$file" - relink_command= - func_source $func_ltwrapper_scriptname_result -- rmfiles="$rmfiles $func_ltwrapper_scriptname_result" -+ func_append rmfiles " $func_ltwrapper_scriptname_result" - else - relink_command= - func_source $dir/$noexename -@@ -8564,12 +9576,12 @@ func_mode_uninstall () - - # note $name still contains .exe if it was in $file originally - # as does the version of $file that was added into $rmfiles -- rmfiles="$rmfiles $objdir/$name $objdir/${name}S.${objext}" -+ func_append rmfiles " $odir/$name $odir/${name}S.${objext}" - if test "$fast_install" = yes && test -n "$relink_command"; then -- rmfiles="$rmfiles $objdir/lt-$name" -+ func_append rmfiles " $odir/lt-$name" - fi - if test "X$noexename" != "X$name" ; then -- rmfiles="$rmfiles $objdir/lt-${noexename}.c" -+ func_append rmfiles " $odir/lt-${noexename}.c" - fi - fi - fi -@@ -8577,7 +9589,6 @@ func_mode_uninstall () - esac - func_show_eval "$RM $rmfiles" 'exit_status=1' - done -- objdir="$origobjdir" - - # Try to remove the ${objdir}s in the directories where we deleted files - for dir in $rmdirs; do -@@ -8589,16 +9600,16 @@ func_mode_uninstall () - exit $exit_status - } - --{ test "$mode" = uninstall || test "$mode" = clean; } && -+{ test "$opt_mode" = uninstall || test "$opt_mode" = clean; } && - func_mode_uninstall ${1+"$@"} - --test -z "$mode" && { -+test -z "$opt_mode" && { - help="$generic_help" - func_fatal_help "you must specify a MODE" - } - - test -z "$exec_cmd" && \ -- func_fatal_help "invalid operation mode \`$mode'" -+ func_fatal_help "invalid operation mode \`$opt_mode'" - - if test -n "$exec_cmd"; then - eval exec "$exec_cmd" -diff --git a/ltoptions.m4 b/ltoptions.m4 -index 5ef12ce..17cfd51 100644 ---- a/ltoptions.m4 -+++ b/ltoptions.m4 -@@ -8,7 +8,7 @@ - # unlimited permission to copy and/or distribute it, with or without - # modifications, as long as this notice is preserved. - --# serial 6 ltoptions.m4 -+# serial 7 ltoptions.m4 - - # This is to help aclocal find these macros, as it can't see m4_define. - AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])]) -diff --git a/ltversion.m4 b/ltversion.m4 -index bf87f77..9c7b5d4 100644 ---- a/ltversion.m4 -+++ b/ltversion.m4 -@@ -7,17 +7,17 @@ - # unlimited permission to copy and/or distribute it, with or without - # modifications, as long as this notice is preserved. - --# Generated from ltversion.in. -+# @configure_input@ - --# serial 3134 ltversion.m4 -+# serial 3293 ltversion.m4 - # This file is part of GNU Libtool - --m4_define([LT_PACKAGE_VERSION], [2.2.7a]) --m4_define([LT_PACKAGE_REVISION], [1.3134]) -+m4_define([LT_PACKAGE_VERSION], [2.4]) -+m4_define([LT_PACKAGE_REVISION], [1.3293]) - - AC_DEFUN([LTVERSION_VERSION], --[macro_version='2.2.7a' --macro_revision='1.3134' -+[macro_version='2.4' -+macro_revision='1.3293' - _LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?]) - _LT_DECL(, macro_revision, 0) - ]) -diff --git a/lt~obsolete.m4 b/lt~obsolete.m4 -index bf92b5e..c573da9 100644 ---- a/lt~obsolete.m4 -+++ b/lt~obsolete.m4 -@@ -7,7 +7,7 @@ - # unlimited permission to copy and/or distribute it, with or without - # modifications, as long as this notice is preserved. - --# serial 4 lt~obsolete.m4 -+# serial 5 lt~obsolete.m4 - - # These exist entirely to fool aclocal when bootstrapping libtool. - # -diff --git a/opcodes/configure b/opcodes/configure -index f615634..a340983 100755 ---- a/opcodes/configure -+++ b/opcodes/configure -@@ -649,6 +649,9 @@ OTOOL - LIPO - NMEDIT - DSYMUTIL -+MANIFEST_TOOL -+ac_ct_AR -+DLLTOOL - OBJDUMP - LN_S - NM -@@ -762,6 +765,7 @@ enable_static - with_pic - enable_fast_install - with_gnu_ld -+with_libtool_sysroot - enable_libtool_lock - enable_targets - enable_werror -@@ -1422,6 +1426,8 @@ Optional Packages: - --with-pic try to use only PIC/non-PIC objects [default=use - both] - --with-gnu-ld assume the C compiler uses GNU ld [default=no] -+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR -+ (or the compiler's sysroot if not specified). - - Some influential environment variables: - CC C compiler command -@@ -5114,8 +5120,8 @@ esac - - - --macro_version='2.2.7a' --macro_revision='1.3134' -+macro_version='2.4' -+macro_revision='1.3293' - - - -@@ -5155,7 +5161,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO - { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 - $as_echo_n "checking how to print strings... " >&6; } - # Test print first, because it will be a builtin if present. --if test "X`print -r -- -n 2>/dev/null`" = X-n && \ -+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ - test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then - ECHO='print -r --' - elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then -@@ -5841,8 +5847,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; - # Try some XSI features - xsi_shell=no - ( _lt_dummy="a/b/c" -- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ -- = c,a/b,, \ -+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ -+ = c,a/b,b/c, \ - && eval 'test $(( 1 + 1 )) -eq 2 \ - && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ - && xsi_shell=yes -@@ -5891,6 +5897,80 @@ esac - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 -+$as_echo_n "checking how to convert $build file names to $host format... " >&6; } -+if test "${lt_cv_to_host_file_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 -+ ;; -+ esac -+ ;; -+ *-*-cygwin* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin -+ ;; -+ esac -+ ;; -+ * ) # unhandled hosts (and "normal" native builds) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+esac -+ -+fi -+ -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 -+$as_echo "$lt_cv_to_host_file_cmd" >&6; } -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 -+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } -+if test "${lt_cv_to_tool_file_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ #assume ordinary cross tools, or native build. -+lt_cv_to_tool_file_cmd=func_convert_file_noop -+case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ esac -+ ;; -+esac -+ -+fi -+ -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 -+$as_echo "$lt_cv_to_tool_file_cmd" >&6; } -+ -+ -+ -+ -+ - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 - $as_echo_n "checking for $LD option to reload object files... " >&6; } - if test "${lt_cv_ld_reload_flag+set}" = set; then : -@@ -5907,6 +5987,11 @@ case $reload_flag in - esac - reload_cmds='$LD$reload_flag -o $output$reload_objs' - case $host_os in -+ cygwin* | mingw* | pw32* | cegcc*) -+ if test "$GCC" != yes; then -+ reload_cmds=false -+ fi -+ ;; - darwin*) - if test "$GCC" = yes; then - reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' -@@ -6075,7 +6160,8 @@ mingw* | pw32*) - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else -- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' -+ # Keep this pattern in sync with the one in func_win32_libid. -+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' - lt_cv_file_magic_cmd='$OBJDUMP -f' - fi - ;; -@@ -6229,6 +6315,21 @@ esac - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 - $as_echo "$lt_cv_deplibs_check_method" >&6; } -+ -+file_magic_glob= -+want_nocaseglob=no -+if test "$build" = "$host"; then -+ case $host_os in -+ mingw* | pw32*) -+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then -+ want_nocaseglob=yes -+ else -+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` -+ fi -+ ;; -+ esac -+fi -+ - file_magic_cmd=$lt_cv_file_magic_cmd - deplibs_check_method=$lt_cv_deplibs_check_method - test -z "$deplibs_check_method" && deplibs_check_method=unknown -@@ -6244,9 +6345,162 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown - - - -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ - if test -n "$ac_tool_prefix"; then -- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. --set dummy ${ac_tool_prefix}ar; ac_word=$2 -+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. -+set dummy ${ac_tool_prefix}dlltool; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_DLLTOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$DLLTOOL"; then -+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+DLLTOOL=$ac_cv_prog_DLLTOOL -+if test -n "$DLLTOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 -+$as_echo "$DLLTOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+fi -+if test -z "$ac_cv_prog_DLLTOOL"; then -+ ac_ct_DLLTOOL=$DLLTOOL -+ # Extract the first word of "dlltool", so it can be a program name with args. -+set dummy dlltool; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$ac_ct_DLLTOOL"; then -+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_ac_ct_DLLTOOL="dlltool" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL -+if test -n "$ac_ct_DLLTOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 -+$as_echo "$ac_ct_DLLTOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ if test "x$ac_ct_DLLTOOL" = x; then -+ DLLTOOL="false" -+ else -+ case $cross_compiling:$ac_tool_warned in -+yes:) -+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -+ac_tool_warned=yes ;; -+esac -+ DLLTOOL=$ac_ct_DLLTOOL -+ fi -+else -+ DLLTOOL="$ac_cv_prog_DLLTOOL" -+fi -+ -+test -z "$DLLTOOL" && DLLTOOL=dlltool -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 -+$as_echo_n "checking how to associate runtime and link libraries... " >&6; } -+if test "${lt_cv_sharedlib_from_linklib_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_sharedlib_from_linklib_cmd='unknown' -+ -+case $host_os in -+cygwin* | mingw* | pw32* | cegcc*) -+ # two different shell functions defined in ltmain.sh -+ # decide which to use based on capabilities of $DLLTOOL -+ case `$DLLTOOL --help 2>&1` in -+ *--identify-strict*) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib -+ ;; -+ *) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback -+ ;; -+ esac -+ ;; -+*) -+ # fallback: assume linklib IS sharedlib -+ lt_cv_sharedlib_from_linklib_cmd="$ECHO" -+ ;; -+esac -+ -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 -+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } -+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd -+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO -+ -+ -+ -+ -+ -+ -+ -+if test -n "$ac_tool_prefix"; then -+ for ac_prog in ar -+ do -+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -+set dummy $ac_tool_prefix$ac_prog; ac_word=$2 - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 - $as_echo_n "checking for $ac_word... " >&6; } - if test "${ac_cv_prog_AR+set}" = set; then : -@@ -6262,7 +6516,7 @@ do - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -- ac_cv_prog_AR="${ac_tool_prefix}ar" -+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -@@ -6282,11 +6536,15 @@ $as_echo "no" >&6; } - fi - - -+ test -n "$AR" && break -+ done - fi --if test -z "$ac_cv_prog_AR"; then -+if test -z "$AR"; then - ac_ct_AR=$AR -- # Extract the first word of "ar", so it can be a program name with args. --set dummy ar; ac_word=$2 -+ for ac_prog in ar -+do -+ # Extract the first word of "$ac_prog", so it can be a program name with args. -+set dummy $ac_prog; ac_word=$2 - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 - $as_echo_n "checking for $ac_word... " >&6; } - if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : -@@ -6302,7 +6560,7 @@ do - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -- ac_cv_prog_ac_ct_AR="ar" -+ ac_cv_prog_ac_ct_AR="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -@@ -6321,6 +6579,10 @@ else - $as_echo "no" >&6; } - fi - -+ -+ test -n "$ac_ct_AR" && break -+done -+ - if test "x$ac_ct_AR" = x; then - AR="false" - else -@@ -6332,16 +6594,72 @@ ac_tool_warned=yes ;; - esac - AR=$ac_ct_AR - fi --else -- AR="$ac_cv_prog_AR" - fi - --test -z "$AR" && AR=ar --test -z "$AR_FLAGS" && AR_FLAGS=cru -+: ${AR=ar} -+: ${AR_FLAGS=cru} -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 -+$as_echo_n "checking for archiver @FILE support... " >&6; } -+if test "${lt_cv_ar_at_file+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_ar_at_file=no -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+/* end confdefs.h. */ -+ -+int -+main () -+{ - -+ ; -+ return 0; -+} -+_ACEOF -+if ac_fn_c_try_compile "$LINENO"; then : -+ echo conftest.$ac_objext > conftest.lst -+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' -+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -+ (eval $lt_ar_try) 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } -+ if test "$ac_status" -eq 0; then -+ # Ensure the archiver fails upon bogus file names. -+ rm -f conftest.$ac_objext libconftest.a -+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -+ (eval $lt_ar_try) 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } -+ if test "$ac_status" -ne 0; then -+ lt_cv_ar_at_file=@ -+ fi -+ fi -+ rm -f conftest.* libconftest.a - -+fi -+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 -+$as_echo "$lt_cv_ar_at_file" >&6; } - -+if test "x$lt_cv_ar_at_file" = xno; then -+ archiver_list_spec= -+else -+ archiver_list_spec=$lt_cv_ar_at_file -+fi - - - -@@ -6683,8 +7001,8 @@ esac - lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - - # Transform an extracted symbol line into symbol name and symbol address --lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" --lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" - - # Handle CRLF in mingw tool chain - opt_cr= -@@ -6720,6 +7038,7 @@ for ac_symprfx in "" "_"; do - else - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" - fi -+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" - - # Check to see that the pipe works correctly. - pipe_works=no -@@ -6761,6 +7080,18 @@ _LT_EOF - if $GREP ' nm_test_var$' "$nlist" >/dev/null; then - if $GREP ' nm_test_func$' "$nlist" >/dev/null; then - cat <<_LT_EOF > conftest.$ac_ext -+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime -+ relocations are performed -- see ld's documentation on pseudo-relocs. */ -+# define LT_DLSYM_CONST -+#elif defined(__osf__) -+/* This system does not cope well with relocations in const data. */ -+# define LT_DLSYM_CONST -+#else -+# define LT_DLSYM_CONST const -+#endif -+ - #ifdef __cplusplus - extern "C" { - #endif -@@ -6772,7 +7103,7 @@ _LT_EOF - cat <<_LT_EOF >> conftest.$ac_ext - - /* The mapping between symbol names and symbols. */ --const struct { -+LT_DLSYM_CONST struct { - const char *name; - void *address; - } -@@ -6798,8 +7129,8 @@ static const void *lt_preloaded_setup() { - _LT_EOF - # Now try linking the two files. - mv conftest.$ac_objext conftstm.$ac_objext -- lt_save_LIBS="$LIBS" -- lt_save_CFLAGS="$CFLAGS" -+ lt_globsym_save_LIBS=$LIBS -+ lt_globsym_save_CFLAGS=$CFLAGS - LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 -@@ -6809,8 +7140,8 @@ _LT_EOF - test $ac_status = 0; } && test -s conftest${ac_exeext}; then - pipe_works=yes - fi -- LIBS="$lt_save_LIBS" -- CFLAGS="$lt_save_CFLAGS" -+ LIBS=$lt_globsym_save_LIBS -+ CFLAGS=$lt_globsym_save_CFLAGS - else - echo "cannot find nm_test_func in $nlist" >&5 - fi -@@ -6847,6 +7178,16 @@ else - $as_echo "ok" >&6; } - fi - -+# Response file support. -+if test "$lt_cv_nm_interface" = "MS dumpbin"; then -+ nm_file_list_spec='@' -+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then -+ nm_file_list_spec='@' -+fi -+ -+ -+ -+ - - - -@@ -6868,6 +7209,45 @@ fi - - - -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 -+$as_echo_n "checking for sysroot... " >&6; } -+ -+# Check whether --with-libtool-sysroot was given. -+if test "${with_libtool_sysroot+set}" = set; then : -+ withval=$with_libtool_sysroot; -+else -+ with_libtool_sysroot=no -+fi -+ -+ -+lt_sysroot= -+case ${with_libtool_sysroot} in #( -+ yes) -+ if test "$GCC" = yes; then -+ lt_sysroot=`$CC --print-sysroot 2>/dev/null` -+ fi -+ ;; #( -+ /*) -+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` -+ ;; #( -+ no|'') -+ ;; #( -+ *) -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5 -+$as_echo "${with_libtool_sysroot}" >&6; } -+ as_fn_error "The sysroot must be an absolute path." "$LINENO" 5 -+ ;; -+esac -+ -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 -+$as_echo "${lt_sysroot:-no}" >&6; } -+ -+ -+ -+ -+ - # Check whether --enable-libtool-lock was given. - if test "${enable_libtool_lock+set}" = set; then : - enableval=$enable_libtool_lock; -@@ -7074,6 +7454,123 @@ esac - - need_locks="$enable_libtool_lock" - -+if test -n "$ac_tool_prefix"; then -+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. -+set dummy ${ac_tool_prefix}mt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_MANIFEST_TOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$MANIFEST_TOOL"; then -+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL -+if test -n "$MANIFEST_TOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 -+$as_echo "$MANIFEST_TOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+fi -+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then -+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL -+ # Extract the first word of "mt", so it can be a program name with args. -+set dummy mt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_ac_ct_MANIFEST_TOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$ac_ct_MANIFEST_TOOL"; then -+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL -+if test -n "$ac_ct_MANIFEST_TOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 -+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ if test "x$ac_ct_MANIFEST_TOOL" = x; then -+ MANIFEST_TOOL=":" -+ else -+ case $cross_compiling:$ac_tool_warned in -+yes:) -+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -+ac_tool_warned=yes ;; -+esac -+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL -+ fi -+else -+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" -+fi -+ -+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 -+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } -+if test "${lt_cv_path_mainfest_tool+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_path_mainfest_tool=no -+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 -+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out -+ cat conftest.err >&5 -+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then -+ lt_cv_path_mainfest_tool=yes -+ fi -+ rm -f conftest* -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 -+$as_echo "$lt_cv_path_mainfest_tool" >&6; } -+if test "x$lt_cv_path_mainfest_tool" != xyes; then -+ MANIFEST_TOOL=: -+fi -+ -+ -+ -+ -+ - - case $host_os in - rhapsody* | darwin*) -@@ -7637,6 +8134,8 @@ _LT_EOF - $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 - echo "$AR cru libconftest.a conftest.o" >&5 - $AR cru libconftest.a conftest.o 2>&5 -+ echo "$RANLIB libconftest.a" >&5 -+ $RANLIB libconftest.a 2>&5 - cat > conftest.c << _LT_EOF - int main() { return 0;} - _LT_EOF -@@ -7802,7 +8301,8 @@ fi - LIBTOOL_DEPS="$ltmain" - - # Always use our own libtool. --LIBTOOL='$(SHELL) $(top_builddir)/libtool' -+LIBTOOL='$(SHELL) $(top_builddir)' -+LIBTOOL="$LIBTOOL/${host_alias}-libtool" - - - -@@ -7891,7 +8391,7 @@ aix3*) - esac - - # Global variables: --ofile=libtool -+ofile=${host_alias}-libtool - can_build_shared=yes - - # All known linkers require a `.a' archive for static linking (except MSVC, -@@ -8189,8 +8689,6 @@ fi - lt_prog_compiler_pic= - lt_prog_compiler_static= - --{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 --$as_echo_n "checking for $compiler option to produce PIC... " >&6; } - - if test "$GCC" = yes; then - lt_prog_compiler_wl='-Wl,' -@@ -8356,6 +8854,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - lt_prog_compiler_pic='--shared' - lt_prog_compiler_static='--static' - ;; -+ nagfor*) -+ # NAG Fortran compiler -+ lt_prog_compiler_wl='-Wl,-Wl,,' -+ lt_prog_compiler_pic='-PIC' -+ lt_prog_compiler_static='-Bstatic' -+ ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) -@@ -8418,7 +8922,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - case $cc_basename in -- f77* | f90* | f95*) -+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) - lt_prog_compiler_wl='-Qoption ld ';; - *) - lt_prog_compiler_wl='-Wl,';; -@@ -8475,13 +8979,17 @@ case $host_os in - lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" - ;; - esac --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 --$as_echo "$lt_prog_compiler_pic" >&6; } -- -- -- -- - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 -+$as_echo_n "checking for $compiler option to produce PIC... " >&6; } -+if test "${lt_cv_prog_compiler_pic+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 -+$as_echo "$lt_cv_prog_compiler_pic" >&6; } -+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic - - # - # Check to make sure the PIC flag actually works. -@@ -8542,6 +9050,11 @@ fi - - - -+ -+ -+ -+ -+ - # - # Check to make sure the static flag actually works. - # -@@ -8892,7 +9405,8 @@ _LT_EOF - allow_undefined_flag=unsupported - always_export_symbols=no - enable_shared_with_static_runtimes=yes -- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' -+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' -+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -@@ -8991,12 +9505,12 @@ _LT_EOF - whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' - hardcode_libdir_flag_spec= - hardcode_libdir_flag_spec_ld='-rpath $libdir' -- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' -+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ -- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' -+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac -@@ -9010,8 +9524,8 @@ _LT_EOF - archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - -@@ -9029,8 +9543,8 @@ _LT_EOF - - _LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -9076,8 +9590,8 @@ _LT_EOF - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -9207,7 +9721,13 @@ _LT_EOF - allow_undefined_flag='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath_+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - - int -@@ -9220,22 +9740,29 @@ main () - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath_ -+fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" -@@ -9247,7 +9774,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - else - # Determine the default libpath from the value encoded in an - # empty executable. -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath_+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ - - int -@@ -9260,22 +9793,29 @@ main () - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath_ -+fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, -@@ -9320,20 +9860,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - # Microsoft Visual C++. - # hardcode_libdir_flag_spec is actually meaningless, as there is - # no search path for DLLs. -- hardcode_libdir_flag_spec=' ' -- allow_undefined_flag=unsupported -- # Tell ltmain to make .lib files, not .a files. -- libext=lib -- # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=".dll" -- # FIXME: Setting linknames here is a bad hack. -- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -- # The linker will automatically build a .lib file if we build a DLL. -- old_archive_from_new_cmds='true' -- # FIXME: Should let the user specify the lib program. -- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' -- fix_srcfile_path='`cygpath -w "$srcfile"`' -- enable_shared_with_static_runtimes=yes -+ case $cc_basename in -+ cl*) -+ # Native MSVC -+ hardcode_libdir_flag_spec=' ' -+ allow_undefined_flag=unsupported -+ always_export_symbols=yes -+ file_list_spec='@' -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' -+ # The linker will not automatically build a static lib if we build a DLL. -+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true' -+ enable_shared_with_static_runtimes=yes -+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' -+ # Don't use ranlib -+ old_postinstall_cmds='chmod 644 $oldlib' -+ postlink_cmds='lt_outputfile="@OUTPUT@"~ -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' -+ ;; -+ *) -+ # Assume MSVC wrapper -+ hardcode_libdir_flag_spec=' ' -+ allow_undefined_flag=unsupported -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -+ # The linker will automatically build a .lib file if we build a DLL. -+ old_archive_from_new_cmds='true' -+ # FIXME: Should let the user specify the lib program. -+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' -+ enable_shared_with_static_runtimes=yes -+ ;; -+ esac - ;; - - darwin* | rhapsody*) -@@ -9394,7 +9977,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - # FreeBSD 3 and greater uses gcc -shared to do shared libraries. - freebsd* | dragonfly*) -- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec='-R$libdir' - hardcode_direct=yes - hardcode_shlibpath_var=no -@@ -9402,7 +9985,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - hpux9*) - if test "$GCC" = yes; then -- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' -+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi -@@ -9418,7 +10001,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - hpux10*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -9442,10 +10025,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else -@@ -9524,23 +10107,36 @@ fi - - irix5* | irix6* | nonstopux*) - if test "$GCC" = yes; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. -- save_LDFLAGS="$LDFLAGS" -- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -- cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+ # This should be the same for all languages, so no per-tag cache variable. -+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 -+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } -+if test "${lt_cv_irix_exported_symbol+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ save_LDFLAGS="$LDFLAGS" -+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ --int foo(void) {} -+int foo (void) { return 0; } - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -- -+ lt_cv_irix_exported_symbol=yes -+else -+ lt_cv_irix_exported_symbol=no - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -- LDFLAGS="$save_LDFLAGS" -+ LDFLAGS="$save_LDFLAGS" -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 -+$as_echo "$lt_cv_irix_exported_symbol" >&6; } -+ if test "$lt_cv_irix_exported_symbol" = yes; then -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -+ fi - else - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' -@@ -9625,7 +10221,7 @@ rm -f core conftest.err conftest.$ac_objext \ - osf4* | osf5*) # as osf3* with the addition of -msym flag - if test "$GCC" = yes; then - allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' -- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag=' -expect_unresolved \*' -@@ -9644,9 +10240,9 @@ rm -f core conftest.err conftest.$ac_objext \ - no_undefined_flag=' -z defs' - if test "$GCC" = yes; then - wlarc='${wl}' -- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) -@@ -10222,8 +10818,9 @@ cygwin* | mingw* | pw32* | cegcc*) - need_version=no - need_lib_prefix=no - -- case $GCC,$host_os in -- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) -+ case $GCC,$cc_basename in -+ yes,*) -+ # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ -@@ -10256,13 +10853,71 @@ cygwin* | mingw* | pw32* | cegcc*) - library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac -+ dynamic_linker='Win32 ld.exe' -+ ;; -+ -+ *,cl*) -+ # Native MSVC -+ libname_spec='$name' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' -+ -+ case $build_os in -+ mingw*) -+ sys_lib_search_path_spec= -+ lt_save_ifs=$IFS -+ IFS=';' -+ for lt_path in $LIB -+ do -+ IFS=$lt_save_ifs -+ # Let DOS variable expansion print the short 8.3 style file name. -+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` -+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" -+ done -+ IFS=$lt_save_ifs -+ # Convert to MSYS style. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` -+ ;; -+ cygwin*) -+ # Convert to unix form, then to dos form, then back to unix form -+ # but this time dos style (no spaces!) so that the unix form looks -+ # like /cygdrive/c/PROGRA~1:/cygdr... -+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` -+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` -+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ ;; -+ *) -+ sys_lib_search_path_spec="$LIB" -+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then -+ # It is most probably a Windows format PATH. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -+ else -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ fi -+ # FIXME: find the short name or the path components, as spaces are -+ # common. (e.g. "Program Files" -> "PROGRA~1") -+ ;; -+ esac -+ -+ # DLL is installed to $(libdir)/../bin by postinstall_cmds -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ -+ dldir=$destdir/`dirname \$dlpath`~ -+ test -d \$dldir || mkdir -p \$dldir~ -+ $install_prog $dir/$dlname \$dldir/$dlname' -+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ -+ dlpath=$dir/\$dldll~ -+ $RM \$dlpath' -+ shlibpath_overrides_runpath=yes -+ dynamic_linker='Win32 link.exe' - ;; - - *) -+ # Assume MSVC wrapper - library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' -+ dynamic_linker='Win32 ld.exe' - ;; - esac -- dynamic_linker='Win32 ld.exe' - # FIXME: first we should search . and the directory the executable is in - shlibpath_var=PATH - ;; -@@ -10354,7 +11009,7 @@ haiku*) - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH - shlibpath_overrides_runpath=yes -- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' -+ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; - -@@ -11194,10 +11849,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -11300,10 +11955,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -12499,7 +13154,7 @@ if test "$enable_shared" = "yes"; then - # since libbfd may not pull in the entirety of libiberty. - x=`sed -n -e 's/^[ ]*PICFLAG[ ]*=[ ]*//p' < ../libiberty/Makefile | sed -n '$p'` - if test -n "$x"; then -- SHARED_LIBADD="-L`pwd`/../libiberty/pic -liberty" -+ SHARED_LIBADD="`pwd`/../libiberty/pic/libiberty.a" - fi - - case "${host}" in -@@ -13473,13 +14128,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' - lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' - lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' - lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' -+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' -+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' - reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' - reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' - OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' - deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' - file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' -+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' -+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' -+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' -+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' - AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' - AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' -+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' - STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' - RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' - old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' -@@ -13494,14 +14156,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de - lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' -+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' -+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' - objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' - MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' --lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' -+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' - lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' - need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' -+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' - DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' - NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' - LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' -@@ -13534,12 +14199,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q - hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' - inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' - link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' --fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' - always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' - export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' - exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' - include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' - prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' -+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' - file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' - variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' - need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' -@@ -13594,8 +14259,13 @@ reload_flag \ - OBJDUMP \ - deplibs_check_method \ - file_magic_cmd \ -+file_magic_glob \ -+want_nocaseglob \ -+DLLTOOL \ -+sharedlib_from_linklib_cmd \ - AR \ - AR_FLAGS \ -+archiver_list_spec \ - STRIP \ - RANLIB \ - CC \ -@@ -13605,12 +14275,14 @@ lt_cv_sys_global_symbol_pipe \ - lt_cv_sys_global_symbol_to_cdecl \ - lt_cv_sys_global_symbol_to_c_name_address \ - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ -+nm_file_list_spec \ - lt_prog_compiler_no_builtin_flag \ --lt_prog_compiler_wl \ - lt_prog_compiler_pic \ -+lt_prog_compiler_wl \ - lt_prog_compiler_static \ - lt_cv_prog_compiler_c_o \ - need_locks \ -+MANIFEST_TOOL \ - DSYMUTIL \ - NMEDIT \ - LIPO \ -@@ -13626,7 +14298,6 @@ no_undefined_flag \ - hardcode_libdir_flag_spec \ - hardcode_libdir_flag_spec_ld \ - hardcode_libdir_separator \ --fix_srcfile_path \ - exclude_expsyms \ - include_expsyms \ - file_list_spec \ -@@ -13662,6 +14333,7 @@ module_cmds \ - module_expsym_cmds \ - export_symbols_cmds \ - prelink_cmds \ -+postlink_cmds \ - postinstall_cmds \ - postuninstall_cmds \ - finish_cmds \ -@@ -14418,7 +15090,8 @@ $as_echo X"$file" | - # NOTE: Changes made to this file will be lost: look at ltmain.sh. - # - # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, --# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. -+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, -+# Inc. - # Written by Gordon Matzigkeit, 1996 - # - # This file is part of GNU Libtool. -@@ -14521,19 +15194,42 @@ SP2NL=$lt_lt_SP2NL - # turn newlines into spaces. - NL2SP=$lt_lt_NL2SP - -+# convert \$build file names to \$host format. -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+ -+# convert \$build files to toolchain format. -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+ - # An object symbol dumper. - OBJDUMP=$lt_OBJDUMP - - # Method to check whether dependent libraries are shared objects. - deplibs_check_method=$lt_deplibs_check_method - --# Command to use when deplibs_check_method == "file_magic". -+# Command to use when deplibs_check_method = "file_magic". - file_magic_cmd=$lt_file_magic_cmd - -+# How to find potential files when deplibs_check_method = "file_magic". -+file_magic_glob=$lt_file_magic_glob -+ -+# Find potential files using nocaseglob when deplibs_check_method = "file_magic". -+want_nocaseglob=$lt_want_nocaseglob -+ -+# DLL creation program. -+DLLTOOL=$lt_DLLTOOL -+ -+# Command to associate shared and link libraries. -+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd -+ - # The archiver. - AR=$lt_AR -+ -+# Flags to create an archive. - AR_FLAGS=$lt_AR_FLAGS - -+# How to feed a file listing to the archiver. -+archiver_list_spec=$lt_archiver_list_spec -+ - # A symbol stripping program. - STRIP=$lt_STRIP - -@@ -14563,6 +15259,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address - # Transform the output of nm in a C name address pair when lib prefix is needed. - global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix - -+# Specify filename containing input files for \$NM. -+nm_file_list_spec=$lt_nm_file_list_spec -+ -+# The root where to search for dependent libraries,and in which our libraries should be installed. -+lt_sysroot=$lt_sysroot -+ - # The name of the directory that contains temporary libtool files. - objdir=$objdir - -@@ -14572,6 +15274,9 @@ MAGIC_CMD=$MAGIC_CMD - # Must we lock files when doing compilation? - need_locks=$lt_need_locks - -+# Manifest tool. -+MANIFEST_TOOL=$lt_MANIFEST_TOOL -+ - # Tool to manipulate archived DWARF debug symbol files on Mac OS X. - DSYMUTIL=$lt_DSYMUTIL - -@@ -14686,12 +15391,12 @@ with_gcc=$GCC - # Compiler flag to turn off builtin functions. - no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag - --# How to pass a linker flag through the compiler. --wl=$lt_lt_prog_compiler_wl -- - # Additional compiler flags for building library objects. - pic_flag=$lt_lt_prog_compiler_pic - -+# How to pass a linker flag through the compiler. -+wl=$lt_lt_prog_compiler_wl -+ - # Compiler flag to prevent dynamic linking. - link_static_flag=$lt_lt_prog_compiler_static - -@@ -14778,9 +15483,6 @@ inherit_rpath=$inherit_rpath - # Whether libtool must link a program against all its dependency libraries. - link_all_deplibs=$link_all_deplibs - --# Fix the shell variable \$srcfile for the compiler. --fix_srcfile_path=$lt_fix_srcfile_path -- - # Set to "yes" if exported symbols are required. - always_export_symbols=$always_export_symbols - -@@ -14796,6 +15498,9 @@ include_expsyms=$lt_include_expsyms - # Commands necessary for linking programs (against libraries) with templates. - prelink_cmds=$lt_prelink_cmds - -+# Commands necessary for finishing linking programs. -+postlink_cmds=$lt_postlink_cmds -+ - # Specify filename containing input files. - file_list_spec=$lt_file_list_spec - -@@ -14828,210 +15533,169 @@ ltmain="$ac_aux_dir/ltmain.sh" - # if finds mixed CR/LF and LF-only lines. Since sed operates in - # text mode, it properly converts lines to CR/LF. This bash problem - # is reportedly fixed, but why not run on old versions too? -- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- case $xsi_shell in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac --} -- --# func_basename file --func_basename () --{ -- func_basename_result="${1##*/}" --} -- --# func_dirname_and_basename file append nondir_replacement --# perform func_basename and func_dirname in a single function --# call: --# dirname: Compute the dirname of FILE. If nonempty, --# add APPEND to the result, otherwise set result --# to NONDIR_REPLACEMENT. --# value returned in "$func_dirname_result" --# basename: Compute filename of FILE. --# value retuned in "$func_basename_result" --# Implementation must be kept synchronized with func_dirname --# and func_basename. For efficiency, we do not delegate to --# those functions but instead duplicate the functionality here. --func_dirname_and_basename () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac -- func_basename_result="${1##*/}" --} -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --func_stripname () --{ -- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are -- # positional parameters, so assign one to ordinary parameter first. -- func_stripname_result=${3} -- func_stripname_result=${func_stripname_result#"${1}"} -- func_stripname_result=${func_stripname_result%"${2}"} --} -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=${1%%=*} -- func_opt_split_arg=${1#*=} --} -- --# func_lo2o object --func_lo2o () --{ -- case ${1} in -- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; -- *) func_lo2o_result=${1} ;; -- esac --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=${1%.*}.lo --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=$(( $* )) --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=${#1} --} -- --_LT_EOF -- ;; -- *) # Bourne compatible functions. -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- # Extract subdirectory from the argument. -- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` -- if test "X$func_dirname_result" = "X${1}"; then -- func_dirname_result="${3}" -- else -- func_dirname_result="$func_dirname_result${2}" -- fi --} -- --# func_basename file --func_basename () --{ -- func_basename_result=`$ECHO "${1}" | $SED "$basename"` --} -- -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --# func_strip_suffix prefix name --func_stripname () --{ -- case ${2} in -- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; -- esac --} -- --# sed scripts: --my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' --my_sed_long_arg='1s/^-[^=]*=//' -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` -- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` --} -- --# func_lo2o object --func_lo2o () --{ -- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=`expr "$@"` --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` --} -- --_LT_EOF --esac -- --case $lt_shell_append in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$1+=\$2" --} --_LT_EOF -- ;; -- *) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$1=\$$1\$2" --} -- --_LT_EOF -- ;; -- esac -- -- -- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- mv -f "$cfgfile" "$ofile" || -+ sed '$q' "$ltmain" >> "$cfgfile" \ -+ || (rm -f "$cfgfile"; exit 1) -+ -+ if test x"$xsi_shell" = xyes; then -+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ -+func_dirname ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_basename ()$/,/^} # func_basename /c\ -+func_basename ()\ -+{\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ -+func_dirname_and_basename ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ -+func_stripname ()\ -+{\ -+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ -+\ # positional parameters, so assign one to ordinary parameter first.\ -+\ func_stripname_result=${3}\ -+\ func_stripname_result=${func_stripname_result#"${1}"}\ -+\ func_stripname_result=${func_stripname_result%"${2}"}\ -+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ -+func_split_long_opt ()\ -+{\ -+\ func_split_long_opt_name=${1%%=*}\ -+\ func_split_long_opt_arg=${1#*=}\ -+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ -+func_split_short_opt ()\ -+{\ -+\ func_split_short_opt_arg=${1#??}\ -+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ -+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ -+func_lo2o ()\ -+{\ -+\ case ${1} in\ -+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ -+\ *) func_lo2o_result=${1} ;;\ -+\ esac\ -+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_xform ()$/,/^} # func_xform /c\ -+func_xform ()\ -+{\ -+ func_xform_result=${1%.*}.lo\ -+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_arith ()$/,/^} # func_arith /c\ -+func_arith ()\ -+{\ -+ func_arith_result=$(( $* ))\ -+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_len ()$/,/^} # func_len /c\ -+func_len ()\ -+{\ -+ func_len_result=${#1}\ -+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+fi -+ -+if test x"$lt_shell_append" = xyes; then -+ sed -e '/^func_append ()$/,/^} # func_append /c\ -+func_append ()\ -+{\ -+ eval "${1}+=\\${2}"\ -+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ -+func_append_quoted ()\ -+{\ -+\ func_quote_for_eval "${2}"\ -+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ -+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ # Save a `func_append' function call where possible by direct use of '+=' -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+else -+ # Save a `func_append' function call even when '+=' is not available -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+fi -+ -+if test x"$_lt_function_replace_fail" = x":"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 -+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} -+fi -+ -+ -+ mv -f "$cfgfile" "$ofile" || - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" - -diff --git a/opcodes/configure.ac b/opcodes/configure.ac -index c7f4783..f2e002d 100644 ---- a/opcodes/configure.ac -+++ b/opcodes/configure.ac -@@ -167,7 +167,7 @@ changequote(,)dnl - x=`sed -n -e 's/^[ ]*PICFLAG[ ]*=[ ]*//p' < ../libiberty/Makefile | sed -n '$p'` - changequote([,])dnl - if test -n "$x"; then -- SHARED_LIBADD="-L`pwd`/../libiberty/pic -liberty" -+ SHARED_LIBADD="`pwd`/../libiberty/pic/libiberty.a" - fi - - case "${host}" in -diff --git a/zlib/configure b/zlib/configure -index bba9fd2..25a2dae 100755 ---- a/zlib/configure -+++ b/zlib/configure -@@ -614,8 +614,11 @@ OTOOL - LIPO - NMEDIT - DSYMUTIL -+MANIFEST_TOOL - RANLIB -+ac_ct_AR - AR -+DLLTOOL - OBJDUMP - LN_S - NM -@@ -737,6 +740,7 @@ enable_static - with_pic - enable_fast_install - with_gnu_ld -+with_libtool_sysroot - enable_libtool_lock - enable_host_shared - ' -@@ -1385,6 +1389,8 @@ Optional Packages: - --with-pic try to use only PIC/non-PIC objects [default=use - both] - --with-gnu-ld assume the C compiler uses GNU ld [default=no] -+ --with-libtool-sysroot=DIR Search for dependent libraries within DIR -+ (or the compiler's sysroot if not specified). - - Some influential environment variables: - CC C compiler command -@@ -3910,8 +3916,8 @@ esac - - - --macro_version='2.2.7a' --macro_revision='1.3134' -+macro_version='2.4' -+macro_revision='1.3293' - - - -@@ -3951,7 +3957,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO - { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 - $as_echo_n "checking how to print strings... " >&6; } - # Test print first, because it will be a builtin if present. --if test "X`print -r -- -n 2>/dev/null`" = X-n && \ -+if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ - test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then - ECHO='print -r --' - elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then -@@ -4767,8 +4773,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; - # Try some XSI features - xsi_shell=no - ( _lt_dummy="a/b/c" -- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ -- = c,a/b,, \ -+ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ -+ = c,a/b,b/c, \ - && eval 'test $(( 1 + 1 )) -eq 2 \ - && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ - && xsi_shell=yes -@@ -4817,6 +4823,80 @@ esac - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 -+$as_echo_n "checking how to convert $build file names to $host format... " >&6; } -+if test "${lt_cv_to_host_file_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 -+ ;; -+ esac -+ ;; -+ *-*-cygwin* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin -+ ;; -+ *-*-cygwin* ) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+ * ) # otherwise, assume *nix -+ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin -+ ;; -+ esac -+ ;; -+ * ) # unhandled hosts (and "normal" native builds) -+ lt_cv_to_host_file_cmd=func_convert_file_noop -+ ;; -+esac -+ -+fi -+ -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 -+$as_echo "$lt_cv_to_host_file_cmd" >&6; } -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 -+$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } -+if test "${lt_cv_to_tool_file_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ #assume ordinary cross tools, or native build. -+lt_cv_to_tool_file_cmd=func_convert_file_noop -+case $host in -+ *-*-mingw* ) -+ case $build in -+ *-*-mingw* ) # actually msys -+ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 -+ ;; -+ esac -+ ;; -+esac -+ -+fi -+ -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 -+$as_echo "$lt_cv_to_tool_file_cmd" >&6; } -+ -+ -+ -+ -+ - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 - $as_echo_n "checking for $LD option to reload object files... " >&6; } - if test "${lt_cv_ld_reload_flag+set}" = set; then : -@@ -4833,6 +4913,11 @@ case $reload_flag in - esac - reload_cmds='$LD$reload_flag -o $output$reload_objs' - case $host_os in -+ cygwin* | mingw* | pw32* | cegcc*) -+ if test "$GCC" != yes; then -+ reload_cmds=false -+ fi -+ ;; - darwin*) - if test "$GCC" = yes; then - reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' -@@ -5001,7 +5086,8 @@ mingw* | pw32*) - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else -- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' -+ # Keep this pattern in sync with the one in func_win32_libid. -+ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' - lt_cv_file_magic_cmd='$OBJDUMP -f' - fi - ;; -@@ -5155,6 +5241,21 @@ esac - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 - $as_echo "$lt_cv_deplibs_check_method" >&6; } -+ -+file_magic_glob= -+want_nocaseglob=no -+if test "$build" = "$host"; then -+ case $host_os in -+ mingw* | pw32*) -+ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then -+ want_nocaseglob=yes -+ else -+ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` -+ fi -+ ;; -+ esac -+fi -+ - file_magic_cmd=$lt_cv_file_magic_cmd - deplibs_check_method=$lt_cv_deplibs_check_method - test -z "$deplibs_check_method" && deplibs_check_method=unknown -@@ -5170,9 +5271,163 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown - - - -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ - if test -n "$ac_tool_prefix"; then -- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. --set dummy ${ac_tool_prefix}ar; ac_word=$2 -+ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. -+set dummy ${ac_tool_prefix}dlltool; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_DLLTOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$DLLTOOL"; then -+ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+DLLTOOL=$ac_cv_prog_DLLTOOL -+if test -n "$DLLTOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 -+$as_echo "$DLLTOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+fi -+if test -z "$ac_cv_prog_DLLTOOL"; then -+ ac_ct_DLLTOOL=$DLLTOOL -+ # Extract the first word of "dlltool", so it can be a program name with args. -+set dummy dlltool; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$ac_ct_DLLTOOL"; then -+ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_ac_ct_DLLTOOL="dlltool" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL -+if test -n "$ac_ct_DLLTOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 -+$as_echo "$ac_ct_DLLTOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ if test "x$ac_ct_DLLTOOL" = x; then -+ DLLTOOL="false" -+ else -+ case $cross_compiling:$ac_tool_warned in -+yes:) -+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -+ac_tool_warned=yes ;; -+esac -+ DLLTOOL=$ac_ct_DLLTOOL -+ fi -+else -+ DLLTOOL="$ac_cv_prog_DLLTOOL" -+fi -+ -+test -z "$DLLTOOL" && DLLTOOL=dlltool -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 -+$as_echo_n "checking how to associate runtime and link libraries... " >&6; } -+if test "${lt_cv_sharedlib_from_linklib_cmd+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_sharedlib_from_linklib_cmd='unknown' -+ -+case $host_os in -+cygwin* | mingw* | pw32* | cegcc*) -+ # two different shell functions defined in ltmain.sh -+ # decide which to use based on capabilities of $DLLTOOL -+ case `$DLLTOOL --help 2>&1` in -+ *--identify-strict*) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib -+ ;; -+ *) -+ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback -+ ;; -+ esac -+ ;; -+*) -+ # fallback: assume linklib IS sharedlib -+ lt_cv_sharedlib_from_linklib_cmd="$ECHO" -+ ;; -+esac -+ -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 -+$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } -+sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd -+test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO -+ -+ -+ -+ -+ -+ -+ -+ -+if test -n "$ac_tool_prefix"; then -+ for ac_prog in ar -+ do -+ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -+set dummy $ac_tool_prefix$ac_prog; ac_word=$2 - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 - $as_echo_n "checking for $ac_word... " >&6; } - if test "${ac_cv_prog_AR+set}" = set; then : -@@ -5188,7 +5443,7 @@ do - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -- ac_cv_prog_AR="${ac_tool_prefix}ar" -+ ac_cv_prog_AR="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -@@ -5208,11 +5463,15 @@ $as_echo "no" >&6; } - fi - - -+ test -n "$AR" && break -+ done - fi --if test -z "$ac_cv_prog_AR"; then -+if test -z "$AR"; then - ac_ct_AR=$AR -- # Extract the first word of "ar", so it can be a program name with args. --set dummy ar; ac_word=$2 -+ for ac_prog in ar -+do -+ # Extract the first word of "$ac_prog", so it can be a program name with args. -+set dummy $ac_prog; ac_word=$2 - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 - $as_echo_n "checking for $ac_word... " >&6; } - if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : -@@ -5228,7 +5487,7 @@ do - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -- ac_cv_prog_ac_ct_AR="ar" -+ ac_cv_prog_ac_ct_AR="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -@@ -5247,6 +5506,10 @@ else - $as_echo "no" >&6; } - fi - -+ -+ test -n "$ac_ct_AR" && break -+done -+ - if test "x$ac_ct_AR" = x; then - AR="false" - else -@@ -5258,16 +5521,72 @@ ac_tool_warned=yes ;; - esac - AR=$ac_ct_AR - fi --else -- AR="$ac_cv_prog_AR" - fi - --test -z "$AR" && AR=ar --test -z "$AR_FLAGS" && AR_FLAGS=cru -+: ${AR=ar} -+: ${AR_FLAGS=cru} -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 -+$as_echo_n "checking for archiver @FILE support... " >&6; } -+if test "${lt_cv_ar_at_file+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_ar_at_file=no -+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext -+/* end confdefs.h. */ -+ -+int -+main () -+{ - -+ ; -+ return 0; -+} -+_ACEOF -+if ac_fn_c_try_compile "$LINENO"; then : -+ echo conftest.$ac_objext > conftest.lst -+ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' -+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -+ (eval $lt_ar_try) 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } -+ if test "$ac_status" -eq 0; then -+ # Ensure the archiver fails upon bogus file names. -+ rm -f conftest.$ac_objext libconftest.a -+ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 -+ (eval $lt_ar_try) 2>&5 -+ ac_status=$? -+ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 -+ test $ac_status = 0; } -+ if test "$ac_status" -ne 0; then -+ lt_cv_ar_at_file=@ -+ fi -+ fi -+ rm -f conftest.* libconftest.a - -+fi -+rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 -+$as_echo "$lt_cv_ar_at_file" >&6; } - -+if test "x$lt_cv_ar_at_file" = xno; then -+ archiver_list_spec= -+else -+ archiver_list_spec=$lt_cv_ar_at_file -+fi - - - -@@ -5609,8 +5928,8 @@ esac - lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - - # Transform an extracted symbol line into symbol name and symbol address --lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" --lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" -+lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" - - # Handle CRLF in mingw tool chain - opt_cr= -@@ -5646,6 +5965,7 @@ for ac_symprfx in "" "_"; do - else - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" - fi -+ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" - - # Check to see that the pipe works correctly. - pipe_works=no -@@ -5687,6 +6007,18 @@ _LT_EOF - if $GREP ' nm_test_var$' "$nlist" >/dev/null; then - if $GREP ' nm_test_func$' "$nlist" >/dev/null; then - cat <<_LT_EOF > conftest.$ac_ext -+/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ -+#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -+/* DATA imports from DLLs on WIN32 con't be const, because runtime -+ relocations are performed -- see ld's documentation on pseudo-relocs. */ -+# define LT_DLSYM_CONST -+#elif defined(__osf__) -+/* This system does not cope well with relocations in const data. */ -+# define LT_DLSYM_CONST -+#else -+# define LT_DLSYM_CONST const -+#endif -+ - #ifdef __cplusplus - extern "C" { - #endif -@@ -5698,7 +6030,7 @@ _LT_EOF - cat <<_LT_EOF >> conftest.$ac_ext - - /* The mapping between symbol names and symbols. */ --const struct { -+LT_DLSYM_CONST struct { - const char *name; - void *address; - } -@@ -5724,8 +6056,8 @@ static const void *lt_preloaded_setup() { - _LT_EOF - # Now try linking the two files. - mv conftest.$ac_objext conftstm.$ac_objext -- lt_save_LIBS="$LIBS" -- lt_save_CFLAGS="$CFLAGS" -+ lt_globsym_save_LIBS=$LIBS -+ lt_globsym_save_CFLAGS=$CFLAGS - LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 -@@ -5735,8 +6067,8 @@ _LT_EOF - test $ac_status = 0; } && test -s conftest${ac_exeext}; then - pipe_works=yes - fi -- LIBS="$lt_save_LIBS" -- CFLAGS="$lt_save_CFLAGS" -+ LIBS=$lt_globsym_save_LIBS -+ CFLAGS=$lt_globsym_save_CFLAGS - else - echo "cannot find nm_test_func in $nlist" >&5 - fi -@@ -5773,6 +6105,19 @@ else - $as_echo "ok" >&6; } - fi - -+# Response file support. -+if test "$lt_cv_nm_interface" = "MS dumpbin"; then -+ nm_file_list_spec='@' -+elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then -+ nm_file_list_spec='@' -+fi -+ -+ -+ -+ -+ -+ -+ - - - -@@ -5793,6 +6138,41 @@ fi - - - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 -+$as_echo_n "checking for sysroot... " >&6; } -+ -+# Check whether --with-libtool-sysroot was given. -+if test "${with_libtool_sysroot+set}" = set; then : -+ withval=$with_libtool_sysroot; -+else -+ with_libtool_sysroot=no -+fi -+ -+ -+lt_sysroot= -+case ${with_libtool_sysroot} in #( -+ yes) -+ if test "$GCC" = yes; then -+ lt_sysroot=`$CC --print-sysroot 2>/dev/null` -+ fi -+ ;; #( -+ /*) -+ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` -+ ;; #( -+ no|'') -+ ;; #( -+ *) -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5 -+$as_echo "${with_libtool_sysroot}" >&6; } -+ as_fn_error "The sysroot must be an absolute path." "$LINENO" 5 -+ ;; -+esac -+ -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 -+$as_echo "${lt_sysroot:-no}" >&6; } -+ -+ -+ - - - # Check whether --enable-libtool-lock was given. -@@ -6004,6 +6384,123 @@ esac - - need_locks="$enable_libtool_lock" - -+if test -n "$ac_tool_prefix"; then -+ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. -+set dummy ${ac_tool_prefix}mt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_MANIFEST_TOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$MANIFEST_TOOL"; then -+ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL -+if test -n "$MANIFEST_TOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 -+$as_echo "$MANIFEST_TOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+fi -+if test -z "$ac_cv_prog_MANIFEST_TOOL"; then -+ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL -+ # Extract the first word of "mt", so it can be a program name with args. -+set dummy mt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_ac_ct_MANIFEST_TOOL+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$ac_ct_MANIFEST_TOOL"; then -+ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+fi -+fi -+ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL -+if test -n "$ac_ct_MANIFEST_TOOL"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 -+$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ if test "x$ac_ct_MANIFEST_TOOL" = x; then -+ MANIFEST_TOOL=":" -+ else -+ case $cross_compiling:$ac_tool_warned in -+yes:) -+{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -+$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -+ac_tool_warned=yes ;; -+esac -+ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL -+ fi -+else -+ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" -+fi -+ -+test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 -+$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } -+if test "${lt_cv_path_mainfest_tool+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_path_mainfest_tool=no -+ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 -+ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out -+ cat conftest.err >&5 -+ if $GREP 'Manifest Tool' conftest.out > /dev/null; then -+ lt_cv_path_mainfest_tool=yes -+ fi -+ rm -f conftest* -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 -+$as_echo "$lt_cv_path_mainfest_tool" >&6; } -+if test "x$lt_cv_path_mainfest_tool" != xyes; then -+ MANIFEST_TOOL=: -+fi -+ -+ -+ -+ -+ - - case $host_os in - rhapsody* | darwin*) -@@ -6570,6 +7067,8 @@ _LT_EOF - $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 - echo "$AR cru libconftest.a conftest.o" >&5 - $AR cru libconftest.a conftest.o 2>&5 -+ echo "$RANLIB libconftest.a" >&5 -+ $RANLIB libconftest.a 2>&5 - cat > conftest.c << _LT_EOF - int main() { return 0;} - _LT_EOF -@@ -7033,7 +7532,8 @@ fi - LIBTOOL_DEPS="$ltmain" - - # Always use our own libtool. --LIBTOOL='$(SHELL) $(top_builddir)/libtool' -+LIBTOOL='$(SHELL) $(top_builddir)' -+LIBTOOL="$LIBTOOL/${host_alias}-libtool" - - - -@@ -7122,7 +7622,7 @@ aix3*) - esac - - # Global variables: --ofile=libtool -+ofile=${host_alias}-libtool - can_build_shared=yes - - # All known linkers require a `.a' archive for static linking (except MSVC, -@@ -7420,8 +7920,6 @@ fi - lt_prog_compiler_pic= - lt_prog_compiler_static= - --{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 --$as_echo_n "checking for $compiler option to produce PIC... " >&6; } - - if test "$GCC" = yes; then - lt_prog_compiler_wl='-Wl,' -@@ -7587,6 +8085,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - lt_prog_compiler_pic='--shared' - lt_prog_compiler_static='--static' - ;; -+ nagfor*) -+ # NAG Fortran compiler -+ lt_prog_compiler_wl='-Wl,-Wl,,' -+ lt_prog_compiler_pic='-PIC' -+ lt_prog_compiler_static='-Bstatic' -+ ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) -@@ -7649,7 +8153,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - case $cc_basename in -- f77* | f90* | f95*) -+ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) - lt_prog_compiler_wl='-Qoption ld ';; - *) - lt_prog_compiler_wl='-Wl,';; -@@ -7706,13 +8210,17 @@ case $host_os in - lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" - ;; - esac --{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 --$as_echo "$lt_prog_compiler_pic" >&6; } -- -- -- -- - -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 -+$as_echo_n "checking for $compiler option to produce PIC... " >&6; } -+if test "${lt_cv_prog_compiler_pic+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 -+$as_echo "$lt_cv_prog_compiler_pic" >&6; } -+lt_prog_compiler_pic=$lt_cv_prog_compiler_pic - - # - # Check to make sure the PIC flag actually works. -@@ -7773,6 +8281,11 @@ fi - - - -+ -+ -+ -+ -+ - # - # Check to make sure the static flag actually works. - # -@@ -8123,7 +8636,8 @@ _LT_EOF - allow_undefined_flag=unsupported - always_export_symbols=no - enable_shared_with_static_runtimes=yes -- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' -+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' -+ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' -@@ -8222,12 +8736,12 @@ _LT_EOF - whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' - hardcode_libdir_flag_spec= - hardcode_libdir_flag_spec_ld='-rpath $libdir' -- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' -+ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ -- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' -+ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac -@@ -8241,8 +8755,8 @@ _LT_EOF - archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - -@@ -8260,8 +8774,8 @@ _LT_EOF - - _LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -8307,8 +8821,8 @@ _LT_EOF - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi -@@ -8438,7 +8952,13 @@ _LT_EOF - allow_undefined_flag='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. -- if test x$gcc_no_link = xyes; then -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath_+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test x$gcc_no_link = xyes; then - as_fn_error "Link tests are not allowed after GCC_NO_EXECUTABLES." "$LINENO" 5 - fi - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -@@ -8454,22 +8974,29 @@ main () - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath_ -+fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" -@@ -8481,7 +9008,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - else - # Determine the default libpath from the value encoded in an - # empty executable. -- if test x$gcc_no_link = xyes; then -+ if test "${lt_cv_aix_libpath+set}" = set; then -+ aix_libpath=$lt_cv_aix_libpath -+else -+ if test "${lt_cv_aix_libpath_+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test x$gcc_no_link = xyes; then - as_fn_error "Link tests are not allowed after GCC_NO_EXECUTABLES." "$LINENO" 5 - fi - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -@@ -8497,22 +9030,29 @@ main () - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : - --lt_aix_libpath_sed=' -- /Import File Strings/,/^$/ { -- /^0/ { -- s/^0 *\(.*\)$/\1/ -- p -- } -- }' --aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --# Check for a 64-bit object if we didn't find anything. --if test -z "$aix_libpath"; then -- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` --fi -+ lt_aix_libpath_sed=' -+ /Import File Strings/,/^$/ { -+ /^0/ { -+ s/^0 *\([^ ]*\) *$/\1/ -+ p -+ } -+ }' -+ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ # Check for a 64-bit object if we didn't find anything. -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` -+ fi - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext --if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi -+ if test -z "$lt_cv_aix_libpath_"; then -+ lt_cv_aix_libpath_="/usr/lib:/lib" -+ fi -+ -+fi -+ -+ aix_libpath=$lt_cv_aix_libpath_ -+fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, -@@ -8557,20 +9097,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - # Microsoft Visual C++. - # hardcode_libdir_flag_spec is actually meaningless, as there is - # no search path for DLLs. -- hardcode_libdir_flag_spec=' ' -- allow_undefined_flag=unsupported -- # Tell ltmain to make .lib files, not .a files. -- libext=lib -- # Tell ltmain to make .dll files, not .so files. -- shrext_cmds=".dll" -- # FIXME: Setting linknames here is a bad hack. -- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -- # The linker will automatically build a .lib file if we build a DLL. -- old_archive_from_new_cmds='true' -- # FIXME: Should let the user specify the lib program. -- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' -- fix_srcfile_path='`cygpath -w "$srcfile"`' -- enable_shared_with_static_runtimes=yes -+ case $cc_basename in -+ cl*) -+ # Native MSVC -+ hardcode_libdir_flag_spec=' ' -+ allow_undefined_flag=unsupported -+ always_export_symbols=yes -+ file_list_spec='@' -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' -+ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then -+ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; -+ else -+ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; -+ fi~ -+ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ -+ linknames=' -+ # The linker will not automatically build a static lib if we build a DLL. -+ # _LT_TAGVAR(old_archive_from_new_cmds, )='true' -+ enable_shared_with_static_runtimes=yes -+ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' -+ # Don't use ranlib -+ old_postinstall_cmds='chmod 644 $oldlib' -+ postlink_cmds='lt_outputfile="@OUTPUT@"~ -+ lt_tool_outputfile="@TOOL_OUTPUT@"~ -+ case $lt_outputfile in -+ *.exe|*.EXE) ;; -+ *) -+ lt_outputfile="$lt_outputfile.exe" -+ lt_tool_outputfile="$lt_tool_outputfile.exe" -+ ;; -+ esac~ -+ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then -+ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; -+ $RM "$lt_outputfile.manifest"; -+ fi' -+ ;; -+ *) -+ # Assume MSVC wrapper -+ hardcode_libdir_flag_spec=' ' -+ allow_undefined_flag=unsupported -+ # Tell ltmain to make .lib files, not .a files. -+ libext=lib -+ # Tell ltmain to make .dll files, not .so files. -+ shrext_cmds=".dll" -+ # FIXME: Setting linknames here is a bad hack. -+ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' -+ # The linker will automatically build a .lib file if we build a DLL. -+ old_archive_from_new_cmds='true' -+ # FIXME: Should let the user specify the lib program. -+ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' -+ enable_shared_with_static_runtimes=yes -+ ;; -+ esac - ;; - - darwin* | rhapsody*) -@@ -8631,7 +9214,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - # FreeBSD 3 and greater uses gcc -shared to do shared libraries. - freebsd* | dragonfly*) -- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec='-R$libdir' - hardcode_direct=yes - hardcode_shlibpath_var=no -@@ -8639,7 +9222,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - hpux9*) - if test "$GCC" = yes; then -- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' -+ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi -@@ -8655,7 +9238,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - - hpux10*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi -@@ -8679,10 +9262,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi - archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) -- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else -@@ -8761,26 +9344,39 @@ fi - - irix5* | irix6* | nonstopux*) - if test "$GCC" = yes; then -- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. -- save_LDFLAGS="$LDFLAGS" -- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -- if test x$gcc_no_link = xyes; then -+ # This should be the same for all languages, so no per-tag cache variable. -+ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 -+$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } -+if test "${lt_cv_irix_exported_symbol+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ save_LDFLAGS="$LDFLAGS" -+ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" -+ if test x$gcc_no_link = xyes; then - as_fn_error "Link tests are not allowed after GCC_NO_EXECUTABLES." "$LINENO" 5 - fi - cat confdefs.h - <<_ACEOF >conftest.$ac_ext - /* end confdefs.h. */ --int foo(void) {} -+int foo (void) { return 0; } - _ACEOF - if ac_fn_c_try_link "$LINENO"; then : -- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -- -+ lt_cv_irix_exported_symbol=yes -+else -+ lt_cv_irix_exported_symbol=no - fi - rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -- LDFLAGS="$save_LDFLAGS" -+ LDFLAGS="$save_LDFLAGS" -+fi -+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 -+$as_echo "$lt_cv_irix_exported_symbol" >&6; } -+ if test "$lt_cv_irix_exported_symbol" = yes; then -+ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' -+ fi - else - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' -@@ -8865,7 +9461,7 @@ rm -f core conftest.err conftest.$ac_objext \ - osf4* | osf5*) # as osf3* with the addition of -msym flag - if test "$GCC" = yes; then - allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' -- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' -+ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag=' -expect_unresolved \*' -@@ -8884,9 +9480,9 @@ rm -f core conftest.err conftest.$ac_objext \ - no_undefined_flag=' -z defs' - if test "$GCC" = yes; then - wlarc='${wl}' -- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' -+ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ -- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' -+ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) -@@ -9462,8 +10058,9 @@ cygwin* | mingw* | pw32* | cegcc*) - need_version=no - need_lib_prefix=no - -- case $GCC,$host_os in -- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) -+ case $GCC,$cc_basename in -+ yes,*) -+ # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ -@@ -9496,13 +10093,71 @@ cygwin* | mingw* | pw32* | cegcc*) - library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac -+ dynamic_linker='Win32 ld.exe' -+ ;; -+ -+ *,cl*) -+ # Native MSVC -+ libname_spec='$name' -+ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' -+ library_names_spec='${libname}.dll.lib' -+ -+ case $build_os in -+ mingw*) -+ sys_lib_search_path_spec= -+ lt_save_ifs=$IFS -+ IFS=';' -+ for lt_path in $LIB -+ do -+ IFS=$lt_save_ifs -+ # Let DOS variable expansion print the short 8.3 style file name. -+ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` -+ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" -+ done -+ IFS=$lt_save_ifs -+ # Convert to MSYS style. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` -+ ;; -+ cygwin*) -+ # Convert to unix form, then to dos form, then back to unix form -+ # but this time dos style (no spaces!) so that the unix form looks -+ # like /cygdrive/c/PROGRA~1:/cygdr... -+ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` -+ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` -+ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ ;; -+ *) -+ sys_lib_search_path_spec="$LIB" -+ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then -+ # It is most probably a Windows format PATH. -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` -+ else -+ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` -+ fi -+ # FIXME: find the short name or the path components, as spaces are -+ # common. (e.g. "Program Files" -> "PROGRA~1") -+ ;; -+ esac -+ -+ # DLL is installed to $(libdir)/../bin by postinstall_cmds -+ postinstall_cmds='base_file=`basename \${file}`~ -+ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ -+ dldir=$destdir/`dirname \$dlpath`~ -+ test -d \$dldir || mkdir -p \$dldir~ -+ $install_prog $dir/$dlname \$dldir/$dlname' -+ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ -+ dlpath=$dir/\$dldll~ -+ $RM \$dlpath' -+ shlibpath_overrides_runpath=yes -+ dynamic_linker='Win32 link.exe' - ;; - - *) -+ # Assume MSVC wrapper - library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' -+ dynamic_linker='Win32 ld.exe' - ;; - esac -- dynamic_linker='Win32 ld.exe' - # FIXME: first we should search . and the directory the executable is in - shlibpath_var=PATH - ;; -@@ -9594,7 +10249,7 @@ haiku*) - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH - shlibpath_overrides_runpath=yes -- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' -+ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; - -@@ -10408,7 +11063,7 @@ else - lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 - lt_status=$lt_dlunknown - cat > conftest.$ac_ext <<_LT_EOF --#line 10411 "configure" -+#line $LINENO "configure" - #include "confdefs.h" - - #if HAVE_DLFCN_H -@@ -10452,10 +11107,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -10514,7 +11169,7 @@ else - lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 - lt_status=$lt_dlunknown - cat > conftest.$ac_ext <<_LT_EOF --#line 10517 "configure" -+#line $LINENO "configure" - #include "confdefs.h" - - #if HAVE_DLFCN_H -@@ -10558,10 +11213,10 @@ else - /* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ - #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) --void fnord () __attribute__((visibility("default"))); -+int fnord () __attribute__((visibility("default"))); - #endif - --void fnord () { int i=42; } -+int fnord () { return 42; } - int main () - { - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); -@@ -11993,13 +12648,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' - lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' - lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' - lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' -+lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' -+lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' - reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' - reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' - OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' - deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' - file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' -+file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' -+want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' -+DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' -+sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' - AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' - AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' -+archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' - STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' - RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' - old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' -@@ -12014,14 +12676,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de - lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' -+nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' -+lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' - objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' - MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' --lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' -+lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' - lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' - lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' - need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' -+MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' - DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' - NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' - LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' -@@ -12054,12 +12719,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q - hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' - inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' - link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' --fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' - always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' - export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' - exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' - include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' - prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' -+postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' - file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' - variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' - need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' -@@ -12114,8 +12779,13 @@ reload_flag \ - OBJDUMP \ - deplibs_check_method \ - file_magic_cmd \ -+file_magic_glob \ -+want_nocaseglob \ -+DLLTOOL \ -+sharedlib_from_linklib_cmd \ - AR \ - AR_FLAGS \ -+archiver_list_spec \ - STRIP \ - RANLIB \ - CC \ -@@ -12125,12 +12795,14 @@ lt_cv_sys_global_symbol_pipe \ - lt_cv_sys_global_symbol_to_cdecl \ - lt_cv_sys_global_symbol_to_c_name_address \ - lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ -+nm_file_list_spec \ - lt_prog_compiler_no_builtin_flag \ --lt_prog_compiler_wl \ - lt_prog_compiler_pic \ -+lt_prog_compiler_wl \ - lt_prog_compiler_static \ - lt_cv_prog_compiler_c_o \ - need_locks \ -+MANIFEST_TOOL \ - DSYMUTIL \ - NMEDIT \ - LIPO \ -@@ -12146,7 +12818,6 @@ no_undefined_flag \ - hardcode_libdir_flag_spec \ - hardcode_libdir_flag_spec_ld \ - hardcode_libdir_separator \ --fix_srcfile_path \ - exclude_expsyms \ - include_expsyms \ - file_list_spec \ -@@ -12182,6 +12853,7 @@ module_cmds \ - module_expsym_cmds \ - export_symbols_cmds \ - prelink_cmds \ -+postlink_cmds \ - postinstall_cmds \ - postuninstall_cmds \ - finish_cmds \ -@@ -12771,7 +13443,8 @@ $as_echo X"$file" | - # NOTE: Changes made to this file will be lost: look at ltmain.sh. - # - # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, --# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. -+# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, -+# Inc. - # Written by Gordon Matzigkeit, 1996 - # - # This file is part of GNU Libtool. -@@ -12874,19 +13547,42 @@ SP2NL=$lt_lt_SP2NL - # turn newlines into spaces. - NL2SP=$lt_lt_NL2SP - -+# convert \$build file names to \$host format. -+to_host_file_cmd=$lt_cv_to_host_file_cmd -+ -+# convert \$build files to toolchain format. -+to_tool_file_cmd=$lt_cv_to_tool_file_cmd -+ - # An object symbol dumper. - OBJDUMP=$lt_OBJDUMP - - # Method to check whether dependent libraries are shared objects. - deplibs_check_method=$lt_deplibs_check_method - --# Command to use when deplibs_check_method == "file_magic". -+# Command to use when deplibs_check_method = "file_magic". - file_magic_cmd=$lt_file_magic_cmd - -+# How to find potential files when deplibs_check_method = "file_magic". -+file_magic_glob=$lt_file_magic_glob -+ -+# Find potential files using nocaseglob when deplibs_check_method = "file_magic". -+want_nocaseglob=$lt_want_nocaseglob -+ -+# DLL creation program. -+DLLTOOL=$lt_DLLTOOL -+ -+# Command to associate shared and link libraries. -+sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd -+ - # The archiver. - AR=$lt_AR -+ -+# Flags to create an archive. - AR_FLAGS=$lt_AR_FLAGS - -+# How to feed a file listing to the archiver. -+archiver_list_spec=$lt_archiver_list_spec -+ - # A symbol stripping program. - STRIP=$lt_STRIP - -@@ -12916,6 +13612,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address - # Transform the output of nm in a C name address pair when lib prefix is needed. - global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix - -+# Specify filename containing input files for \$NM. -+nm_file_list_spec=$lt_nm_file_list_spec -+ -+# The root where to search for dependent libraries,and in which our libraries should be installed. -+lt_sysroot=$lt_sysroot -+ - # The name of the directory that contains temporary libtool files. - objdir=$objdir - -@@ -12925,6 +13627,9 @@ MAGIC_CMD=$MAGIC_CMD - # Must we lock files when doing compilation? - need_locks=$lt_need_locks - -+# Manifest tool. -+MANIFEST_TOOL=$lt_MANIFEST_TOOL -+ - # Tool to manipulate archived DWARF debug symbol files on Mac OS X. - DSYMUTIL=$lt_DSYMUTIL - -@@ -13039,12 +13744,12 @@ with_gcc=$GCC - # Compiler flag to turn off builtin functions. - no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag - --# How to pass a linker flag through the compiler. --wl=$lt_lt_prog_compiler_wl -- - # Additional compiler flags for building library objects. - pic_flag=$lt_lt_prog_compiler_pic - -+# How to pass a linker flag through the compiler. -+wl=$lt_lt_prog_compiler_wl -+ - # Compiler flag to prevent dynamic linking. - link_static_flag=$lt_lt_prog_compiler_static - -@@ -13131,9 +13836,6 @@ inherit_rpath=$inherit_rpath - # Whether libtool must link a program against all its dependency libraries. - link_all_deplibs=$link_all_deplibs - --# Fix the shell variable \$srcfile for the compiler. --fix_srcfile_path=$lt_fix_srcfile_path -- - # Set to "yes" if exported symbols are required. - always_export_symbols=$always_export_symbols - -@@ -13149,6 +13851,9 @@ include_expsyms=$lt_include_expsyms - # Commands necessary for linking programs (against libraries) with templates. - prelink_cmds=$lt_prelink_cmds - -+# Commands necessary for finishing linking programs. -+postlink_cmds=$lt_postlink_cmds -+ - # Specify filename containing input files. - file_list_spec=$lt_file_list_spec - -@@ -13181,210 +13886,169 @@ ltmain="$ac_aux_dir/ltmain.sh" - # if finds mixed CR/LF and LF-only lines. Since sed operates in - # text mode, it properly converts lines to CR/LF. This bash problem - # is reportedly fixed, but why not run on old versions too? -- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- case $xsi_shell in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac --} -- --# func_basename file --func_basename () --{ -- func_basename_result="${1##*/}" --} -- --# func_dirname_and_basename file append nondir_replacement --# perform func_basename and func_dirname in a single function --# call: --# dirname: Compute the dirname of FILE. If nonempty, --# add APPEND to the result, otherwise set result --# to NONDIR_REPLACEMENT. --# value returned in "$func_dirname_result" --# basename: Compute filename of FILE. --# value retuned in "$func_basename_result" --# Implementation must be kept synchronized with func_dirname --# and func_basename. For efficiency, we do not delegate to --# those functions but instead duplicate the functionality here. --func_dirname_and_basename () --{ -- case ${1} in -- */*) func_dirname_result="${1%/*}${2}" ;; -- * ) func_dirname_result="${3}" ;; -- esac -- func_basename_result="${1##*/}" --} -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --func_stripname () --{ -- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are -- # positional parameters, so assign one to ordinary parameter first. -- func_stripname_result=${3} -- func_stripname_result=${func_stripname_result#"${1}"} -- func_stripname_result=${func_stripname_result%"${2}"} --} -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=${1%%=*} -- func_opt_split_arg=${1#*=} --} -- --# func_lo2o object --func_lo2o () --{ -- case ${1} in -- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; -- *) func_lo2o_result=${1} ;; -- esac --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=${1%.*}.lo --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=$(( $* )) --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=${#1} --} -- --_LT_EOF -- ;; -- *) # Bourne compatible functions. -- cat << \_LT_EOF >> "$cfgfile" -- --# func_dirname file append nondir_replacement --# Compute the dirname of FILE. If nonempty, add APPEND to the result, --# otherwise set result to NONDIR_REPLACEMENT. --func_dirname () --{ -- # Extract subdirectory from the argument. -- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` -- if test "X$func_dirname_result" = "X${1}"; then -- func_dirname_result="${3}" -- else -- func_dirname_result="$func_dirname_result${2}" -- fi --} -- --# func_basename file --func_basename () --{ -- func_basename_result=`$ECHO "${1}" | $SED "$basename"` --} -- -- --# func_stripname prefix suffix name --# strip PREFIX and SUFFIX off of NAME. --# PREFIX and SUFFIX must not contain globbing or regex special --# characters, hashes, percent signs, but SUFFIX may contain a leading --# dot (in which case that matches only a dot). --# func_strip_suffix prefix name --func_stripname () --{ -- case ${2} in -- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; -- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; -- esac --} -- --# sed scripts: --my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' --my_sed_long_arg='1s/^-[^=]*=//' -- --# func_opt_split --func_opt_split () --{ -- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` -- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` --} -- --# func_lo2o object --func_lo2o () --{ -- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` --} -- --# func_xform libobj-or-source --func_xform () --{ -- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` --} -- --# func_arith arithmetic-term... --func_arith () --{ -- func_arith_result=`expr "$@"` --} -- --# func_len string --# STRING may not start with a hyphen. --func_len () --{ -- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` --} -- --_LT_EOF --esac -- --case $lt_shell_append in -- yes) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$1+=\$2" --} --_LT_EOF -- ;; -- *) -- cat << \_LT_EOF >> "$cfgfile" -- --# func_append var value --# Append VALUE to the end of shell variable VAR. --func_append () --{ -- eval "$1=\$$1\$2" --} -- --_LT_EOF -- ;; -- esac -- -- -- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ -- || (rm -f "$cfgfile"; exit 1) -- -- mv -f "$cfgfile" "$ofile" || -+ sed '$q' "$ltmain" >> "$cfgfile" \ -+ || (rm -f "$cfgfile"; exit 1) -+ -+ if test x"$xsi_shell" = xyes; then -+ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ -+func_dirname ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_basename ()$/,/^} # func_basename /c\ -+func_basename ()\ -+{\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ -+func_dirname_and_basename ()\ -+{\ -+\ case ${1} in\ -+\ */*) func_dirname_result="${1%/*}${2}" ;;\ -+\ * ) func_dirname_result="${3}" ;;\ -+\ esac\ -+\ func_basename_result="${1##*/}"\ -+} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ -+func_stripname ()\ -+{\ -+\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ -+\ # positional parameters, so assign one to ordinary parameter first.\ -+\ func_stripname_result=${3}\ -+\ func_stripname_result=${func_stripname_result#"${1}"}\ -+\ func_stripname_result=${func_stripname_result%"${2}"}\ -+} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ -+func_split_long_opt ()\ -+{\ -+\ func_split_long_opt_name=${1%%=*}\ -+\ func_split_long_opt_arg=${1#*=}\ -+} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ -+func_split_short_opt ()\ -+{\ -+\ func_split_short_opt_arg=${1#??}\ -+\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ -+} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ -+func_lo2o ()\ -+{\ -+\ case ${1} in\ -+\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ -+\ *) func_lo2o_result=${1} ;;\ -+\ esac\ -+} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_xform ()$/,/^} # func_xform /c\ -+func_xform ()\ -+{\ -+ func_xform_result=${1%.*}.lo\ -+} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_arith ()$/,/^} # func_arith /c\ -+func_arith ()\ -+{\ -+ func_arith_result=$(( $* ))\ -+} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_len ()$/,/^} # func_len /c\ -+func_len ()\ -+{\ -+ func_len_result=${#1}\ -+} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+fi -+ -+if test x"$lt_shell_append" = xyes; then -+ sed -e '/^func_append ()$/,/^} # func_append /c\ -+func_append ()\ -+{\ -+ eval "${1}+=\\${2}"\ -+} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ -+func_append_quoted ()\ -+{\ -+\ func_quote_for_eval "${2}"\ -+\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ -+} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+test 0 -eq $? || _lt_function_replace_fail=: -+ -+ -+ # Save a `func_append' function call where possible by direct use of '+=' -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+else -+ # Save a `func_append' function call even when '+=' is not available -+ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ -+ && mv -f "$cfgfile.tmp" "$cfgfile" \ -+ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -+ test 0 -eq $? || _lt_function_replace_fail=: -+fi -+ -+if test x"$_lt_function_replace_fail" = x":"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 -+$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} -+fi -+ -+ -+ mv -f "$cfgfile" "$ofile" || - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" - --- -2.9.0 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0007-Add-the-armv5e-architecture-to-binutils.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0007-Add-the-armv5e-architecture-to-binutils.patch deleted file mode 100644 index 795abf365..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0007-Add-the-armv5e-architecture-to-binutils.patch +++ /dev/null @@ -1,35 +0,0 @@ -From 9475aee8a702aa531737ec12dfd928d52951bc84 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Mon, 2 Mar 2015 01:37:10 +0000 -Subject: [PATCH 07/13] Add the armv5e architecture to binutils - -Binutils has a comment that indicates it is supposed to match gcc for -all of the support "-march=" settings, but it was lacking the armv5e setting. -This was a simple way to add it, as thumb instructions shouldn't be generated -by the compiler anyway. - -Upstream-Status: Denied -Upstream maintainer indicated that we should not be using armv5e, even -though it is a legal archicture defined by our gcc. - -Signed-off-by: Mark Hatle -Signed-off-by: Khem Raj ---- - gas/config/tc-arm.c | 1 + - 1 file changed, 1 insertion(+) - -diff --git a/gas/config/tc-arm.c b/gas/config/tc-arm.c -index 73d0531..4b9e32f 100644 ---- a/gas/config/tc-arm.c -+++ b/gas/config/tc-arm.c -@@ -25421,6 +25421,7 @@ static const struct arm_arch_option_table arm_archs[] = - ARM_ARCH_OPT ("armv4t", ARM_ARCH_V4T, FPU_ARCH_FPA), - ARM_ARCH_OPT ("armv4txm", ARM_ARCH_V4TxM, FPU_ARCH_FPA), - ARM_ARCH_OPT ("armv5", ARM_ARCH_V5, FPU_ARCH_VFP), -+ ARM_ARCH_OPT ("armv5e", ARM_ARCH_V5TE, FPU_ARCH_VFP), - ARM_ARCH_OPT ("armv5t", ARM_ARCH_V5T, FPU_ARCH_VFP), - ARM_ARCH_OPT ("armv5txm", ARM_ARCH_V5TxM, FPU_ARCH_VFP), - ARM_ARCH_OPT ("armv5te", ARM_ARCH_V5TE, FPU_ARCH_VFP), --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0007-Use-libtool-2.4.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0007-Use-libtool-2.4.patch new file mode 100644 index 000000000..6b7f75389 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0007-Use-libtool-2.4.patch @@ -0,0 +1,21137 @@ +From 9a3651e120261c72090689ad770ad048b0baf506 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 6 Mar 2017 23:28:33 -0800 +Subject: [PATCH 07/15] Use libtool 2.4 + +get libtool sysroot support + +Signed-off-by: Khem Raj +--- +Upstream-Status: Inappropriate [ OE configuration Specific] + + bfd/configure | 1318 +++++++++++++++++------ + bfd/configure.ac | 2 +- + binutils/configure | 1316 +++++++++++++++++------ + gas/configure | 1314 +++++++++++++++++------ + gprof/configure | 1321 +++++++++++++++++------ + ld/configure | 1691 +++++++++++++++++++++-------- + libtool.m4 | 1080 +++++++++++++------ + ltmain.sh | 2925 +++++++++++++++++++++++++++++++++----------------- + ltoptions.m4 | 2 +- + ltversion.m4 | 12 +- + lt~obsolete.m4 | 2 +- + opcodes/configure | 1318 +++++++++++++++++------ + opcodes/configure.ac | 2 +- + zlib/configure | 1316 +++++++++++++++++------ + 14 files changed, 9926 insertions(+), 3693 deletions(-) + +diff --git a/bfd/configure b/bfd/configure +index f30bfabef3..fa1a545e9d 100755 +--- a/bfd/configure ++++ b/bfd/configure +@@ -672,6 +672,9 @@ OTOOL + LIPO + NMEDIT + DSYMUTIL ++MANIFEST_TOOL ++ac_ct_AR ++DLLTOOL + OBJDUMP + LN_S + NM +@@ -785,6 +788,7 @@ enable_static + with_pic + enable_fast_install + with_gnu_ld ++with_libtool_sysroot + enable_libtool_lock + enable_plugins + enable_largefile +@@ -1461,6 +1465,8 @@ Optional Packages: + --with-pic try to use only PIC/non-PIC objects [default=use + both] + --with-gnu-ld assume the C compiler uses GNU ld [default=no] ++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR ++ (or the compiler's sysroot if not specified). + --with-mmap try using mmap for BFD input files if available + --with-separate-debug-dir=DIR + Look for global separate debug info in DIR +@@ -5393,8 +5399,8 @@ esac + + + +-macro_version='2.2.7a' +-macro_revision='1.3134' ++macro_version='2.4' ++macro_revision='1.3293' + + + +@@ -5434,7 +5440,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 + $as_echo_n "checking how to print strings... " >&6; } + # Test print first, because it will be a builtin if present. +-if test "X`print -r -- -n 2>/dev/null`" = X-n && \ ++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ + test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then + ECHO='print -r --' + elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then +@@ -6120,8 +6126,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; + # Try some XSI features + xsi_shell=no + ( _lt_dummy="a/b/c" +- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ +- = c,a/b,, \ ++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ ++ = c,a/b,b/c, \ + && eval 'test $(( 1 + 1 )) -eq 2 \ + && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ + && xsi_shell=yes +@@ -6170,6 +6176,80 @@ esac + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 ++$as_echo_n "checking how to convert $build file names to $host format... " >&6; } ++if test "${lt_cv_to_host_file_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 ++ ;; ++ esac ++ ;; ++ *-*-cygwin* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin ++ ;; ++ esac ++ ;; ++ * ) # unhandled hosts (and "normal" native builds) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++esac ++ ++fi ++ ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 ++$as_echo "$lt_cv_to_host_file_cmd" >&6; } ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 ++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } ++if test "${lt_cv_to_tool_file_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ #assume ordinary cross tools, or native build. ++lt_cv_to_tool_file_cmd=func_convert_file_noop ++case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ esac ++ ;; ++esac ++ ++fi ++ ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 ++$as_echo "$lt_cv_to_tool_file_cmd" >&6; } ++ ++ ++ ++ ++ + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 + $as_echo_n "checking for $LD option to reload object files... " >&6; } + if test "${lt_cv_ld_reload_flag+set}" = set; then : +@@ -6186,6 +6266,11 @@ case $reload_flag in + esac + reload_cmds='$LD$reload_flag -o $output$reload_objs' + case $host_os in ++ cygwin* | mingw* | pw32* | cegcc*) ++ if test "$GCC" != yes; then ++ reload_cmds=false ++ fi ++ ;; + darwin*) + if test "$GCC" = yes; then + reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' +@@ -6354,7 +6439,8 @@ mingw* | pw32*) + lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' + lt_cv_file_magic_cmd='func_win32_libid' + else +- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' ++ # Keep this pattern in sync with the one in func_win32_libid. ++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' + lt_cv_file_magic_cmd='$OBJDUMP -f' + fi + ;; +@@ -6508,6 +6594,21 @@ esac + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 + $as_echo "$lt_cv_deplibs_check_method" >&6; } ++ ++file_magic_glob= ++want_nocaseglob=no ++if test "$build" = "$host"; then ++ case $host_os in ++ mingw* | pw32*) ++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then ++ want_nocaseglob=yes ++ else ++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` ++ fi ++ ;; ++ esac ++fi ++ + file_magic_cmd=$lt_cv_file_magic_cmd + deplibs_check_method=$lt_cv_deplibs_check_method + test -z "$deplibs_check_method" && deplibs_check_method=unknown +@@ -6523,9 +6624,162 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ + if test -n "$ac_tool_prefix"; then +- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. +-set dummy ${ac_tool_prefix}ar; ac_word=$2 ++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. ++set dummy ${ac_tool_prefix}dlltool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_DLLTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$DLLTOOL"; then ++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++DLLTOOL=$ac_cv_prog_DLLTOOL ++if test -n "$DLLTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 ++$as_echo "$DLLTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_DLLTOOL"; then ++ ac_ct_DLLTOOL=$DLLTOOL ++ # Extract the first word of "dlltool", so it can be a program name with args. ++set dummy dlltool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_DLLTOOL"; then ++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_DLLTOOL="dlltool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL ++if test -n "$ac_ct_DLLTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 ++$as_echo "$ac_ct_DLLTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_DLLTOOL" = x; then ++ DLLTOOL="false" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ DLLTOOL=$ac_ct_DLLTOOL ++ fi ++else ++ DLLTOOL="$ac_cv_prog_DLLTOOL" ++fi ++ ++test -z "$DLLTOOL" && DLLTOOL=dlltool ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 ++$as_echo_n "checking how to associate runtime and link libraries... " >&6; } ++if test "${lt_cv_sharedlib_from_linklib_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_sharedlib_from_linklib_cmd='unknown' ++ ++case $host_os in ++cygwin* | mingw* | pw32* | cegcc*) ++ # two different shell functions defined in ltmain.sh ++ # decide which to use based on capabilities of $DLLTOOL ++ case `$DLLTOOL --help 2>&1` in ++ *--identify-strict*) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib ++ ;; ++ *) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback ++ ;; ++ esac ++ ;; ++*) ++ # fallback: assume linklib IS sharedlib ++ lt_cv_sharedlib_from_linklib_cmd="$ECHO" ++ ;; ++esac ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 ++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } ++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd ++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO ++ ++ ++ ++ ++ ++ ++ ++if test -n "$ac_tool_prefix"; then ++ for ac_prog in ar ++ do ++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. ++set dummy $ac_tool_prefix$ac_prog; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } + if test "${ac_cv_prog_AR+set}" = set; then : +@@ -6541,7 +6795,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then +- ac_cv_prog_AR="${ac_tool_prefix}ar" ++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -6561,11 +6815,15 @@ $as_echo "no" >&6; } + fi + + ++ test -n "$AR" && break ++ done + fi +-if test -z "$ac_cv_prog_AR"; then ++if test -z "$AR"; then + ac_ct_AR=$AR +- # Extract the first word of "ar", so it can be a program name with args. +-set dummy ar; ac_word=$2 ++ for ac_prog in ar ++do ++ # Extract the first word of "$ac_prog", so it can be a program name with args. ++set dummy $ac_prog; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } + if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : +@@ -6581,7 +6839,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then +- ac_cv_prog_ac_ct_AR="ar" ++ ac_cv_prog_ac_ct_AR="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -6600,6 +6858,10 @@ else + $as_echo "no" >&6; } + fi + ++ ++ test -n "$ac_ct_AR" && break ++done ++ + if test "x$ac_ct_AR" = x; then + AR="false" + else +@@ -6611,16 +6873,72 @@ ac_tool_warned=yes ;; + esac + AR=$ac_ct_AR + fi +-else +- AR="$ac_cv_prog_AR" + fi + +-test -z "$AR" && AR=ar +-test -z "$AR_FLAGS" && AR_FLAGS=cru ++: ${AR=ar} ++: ${AR_FLAGS=cru} ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 ++$as_echo_n "checking for archiver @FILE support... " >&6; } ++if test "${lt_cv_ar_at_file+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_ar_at_file=no ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ + ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ echo conftest.$ac_objext > conftest.lst ++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' ++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 ++ (eval $lt_ar_try) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ if test "$ac_status" -eq 0; then ++ # Ensure the archiver fails upon bogus file names. ++ rm -f conftest.$ac_objext libconftest.a ++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 ++ (eval $lt_ar_try) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ if test "$ac_status" -ne 0; then ++ lt_cv_ar_at_file=@ ++ fi ++ fi ++ rm -f conftest.* libconftest.a + ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 ++$as_echo "$lt_cv_ar_at_file" >&6; } + ++if test "x$lt_cv_ar_at_file" = xno; then ++ archiver_list_spec= ++else ++ archiver_list_spec=$lt_cv_ar_at_file ++fi + + + +@@ -6962,8 +7280,8 @@ esac + lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" + + # Transform an extracted symbol line into symbol name and symbol address +-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" +-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" + + # Handle CRLF in mingw tool chain + opt_cr= +@@ -6999,6 +7317,7 @@ for ac_symprfx in "" "_"; do + else + lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" + fi ++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" + + # Check to see that the pipe works correctly. + pipe_works=no +@@ -7040,6 +7359,18 @@ _LT_EOF + if $GREP ' nm_test_var$' "$nlist" >/dev/null; then + if $GREP ' nm_test_func$' "$nlist" >/dev/null; then + cat <<_LT_EOF > conftest.$ac_ext ++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ ++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) ++/* DATA imports from DLLs on WIN32 con't be const, because runtime ++ relocations are performed -- see ld's documentation on pseudo-relocs. */ ++# define LT_DLSYM_CONST ++#elif defined(__osf__) ++/* This system does not cope well with relocations in const data. */ ++# define LT_DLSYM_CONST ++#else ++# define LT_DLSYM_CONST const ++#endif ++ + #ifdef __cplusplus + extern "C" { + #endif +@@ -7051,7 +7382,7 @@ _LT_EOF + cat <<_LT_EOF >> conftest.$ac_ext + + /* The mapping between symbol names and symbols. */ +-const struct { ++LT_DLSYM_CONST struct { + const char *name; + void *address; + } +@@ -7077,8 +7408,8 @@ static const void *lt_preloaded_setup() { + _LT_EOF + # Now try linking the two files. + mv conftest.$ac_objext conftstm.$ac_objext +- lt_save_LIBS="$LIBS" +- lt_save_CFLAGS="$CFLAGS" ++ lt_globsym_save_LIBS=$LIBS ++ lt_globsym_save_CFLAGS=$CFLAGS + LIBS="conftstm.$ac_objext" + CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 +@@ -7088,8 +7419,8 @@ _LT_EOF + test $ac_status = 0; } && test -s conftest${ac_exeext}; then + pipe_works=yes + fi +- LIBS="$lt_save_LIBS" +- CFLAGS="$lt_save_CFLAGS" ++ LIBS=$lt_globsym_save_LIBS ++ CFLAGS=$lt_globsym_save_CFLAGS + else + echo "cannot find nm_test_func in $nlist" >&5 + fi +@@ -7126,6 +7457,16 @@ else + $as_echo "ok" >&6; } + fi + ++# Response file support. ++if test "$lt_cv_nm_interface" = "MS dumpbin"; then ++ nm_file_list_spec='@' ++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then ++ nm_file_list_spec='@' ++fi ++ ++ ++ ++ + + + +@@ -7147,6 +7488,45 @@ fi + + + ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 ++$as_echo_n "checking for sysroot... " >&6; } ++ ++# Check whether --with-libtool-sysroot was given. ++if test "${with_libtool_sysroot+set}" = set; then : ++ withval=$with_libtool_sysroot; ++else ++ with_libtool_sysroot=no ++fi ++ ++ ++lt_sysroot= ++case ${with_libtool_sysroot} in #( ++ yes) ++ if test "$GCC" = yes; then ++ lt_sysroot=`$CC --print-sysroot 2>/dev/null` ++ fi ++ ;; #( ++ /*) ++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` ++ ;; #( ++ no|'') ++ ;; #( ++ *) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5 ++$as_echo "${with_libtool_sysroot}" >&6; } ++ as_fn_error "The sysroot must be an absolute path." "$LINENO" 5 ++ ;; ++esac ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 ++$as_echo "${lt_sysroot:-no}" >&6; } ++ ++ ++ ++ ++ + # Check whether --enable-libtool-lock was given. + if test "${enable_libtool_lock+set}" = set; then : + enableval=$enable_libtool_lock; +@@ -7353,6 +7733,123 @@ esac + + need_locks="$enable_libtool_lock" + ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. ++set dummy ${ac_tool_prefix}mt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_MANIFEST_TOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$MANIFEST_TOOL"; then ++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL ++if test -n "$MANIFEST_TOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 ++$as_echo "$MANIFEST_TOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then ++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL ++ # Extract the first word of "mt", so it can be a program name with args. ++set dummy mt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_MANIFEST_TOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_MANIFEST_TOOL"; then ++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL ++if test -n "$ac_ct_MANIFEST_TOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 ++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_MANIFEST_TOOL" = x; then ++ MANIFEST_TOOL=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL ++ fi ++else ++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" ++fi ++ ++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 ++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } ++if test "${lt_cv_path_mainfest_tool+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_path_mainfest_tool=no ++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 ++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out ++ cat conftest.err >&5 ++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then ++ lt_cv_path_mainfest_tool=yes ++ fi ++ rm -f conftest* ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 ++$as_echo "$lt_cv_path_mainfest_tool" >&6; } ++if test "x$lt_cv_path_mainfest_tool" != xyes; then ++ MANIFEST_TOOL=: ++fi ++ ++ ++ ++ ++ + + case $host_os in + rhapsody* | darwin*) +@@ -7916,6 +8413,8 @@ _LT_EOF + $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 + echo "$AR cru libconftest.a conftest.o" >&5 + $AR cru libconftest.a conftest.o 2>&5 ++ echo "$RANLIB libconftest.a" >&5 ++ $RANLIB libconftest.a 2>&5 + cat > conftest.c << _LT_EOF + int main() { return 0;} + _LT_EOF +@@ -8080,7 +8579,8 @@ fi + LIBTOOL_DEPS="$ltmain" + + # Always use our own libtool. +-LIBTOOL='$(SHELL) $(top_builddir)/libtool' ++LIBTOOL='$(SHELL) $(top_builddir)' ++LIBTOOL="$LIBTOOL/${host_alias}-libtool" + + + +@@ -8169,7 +8669,7 @@ aix3*) + esac + + # Global variables: +-ofile=libtool ++ofile=${host_alias}-libtool + can_build_shared=yes + + # All known linkers require a `.a' archive for static linking (except MSVC, +@@ -8467,8 +8967,6 @@ fi + lt_prog_compiler_pic= + lt_prog_compiler_static= + +-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 +-$as_echo_n "checking for $compiler option to produce PIC... " >&6; } + + if test "$GCC" = yes; then + lt_prog_compiler_wl='-Wl,' +@@ -8634,6 +9132,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + lt_prog_compiler_pic='--shared' + lt_prog_compiler_static='--static' + ;; ++ nagfor*) ++ # NAG Fortran compiler ++ lt_prog_compiler_wl='-Wl,-Wl,,' ++ lt_prog_compiler_pic='-PIC' ++ lt_prog_compiler_static='-Bstatic' ++ ;; + pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) + # Portland Group compilers (*not* the Pentium gcc compiler, + # which looks to be a dead project) +@@ -8696,7 +9200,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + case $cc_basename in +- f77* | f90* | f95*) ++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) + lt_prog_compiler_wl='-Qoption ld ';; + *) + lt_prog_compiler_wl='-Wl,';; +@@ -8753,13 +9257,17 @@ case $host_os in + lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" + ;; + esac +-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 +-$as_echo "$lt_prog_compiler_pic" >&6; } +- +- +- +- + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 ++$as_echo_n "checking for $compiler option to produce PIC... " >&6; } ++if test "${lt_cv_prog_compiler_pic+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 ++$as_echo "$lt_cv_prog_compiler_pic" >&6; } ++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic + + # + # Check to make sure the PIC flag actually works. +@@ -8820,6 +9328,11 @@ fi + + + ++ ++ ++ ++ ++ + # + # Check to make sure the static flag actually works. + # +@@ -9170,7 +9683,8 @@ _LT_EOF + allow_undefined_flag=unsupported + always_export_symbols=no + enable_shared_with_static_runtimes=yes +- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' ++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' + + if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' +@@ -9269,12 +9783,12 @@ _LT_EOF + whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' + hardcode_libdir_flag_spec= + hardcode_libdir_flag_spec_ld='-rpath $libdir' +- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' ++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' + if test "x$supports_anon_versioning" = xyes; then + archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ +- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' ++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' + fi + ;; + esac +@@ -9288,8 +9802,8 @@ _LT_EOF + archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' + wlarc= + else +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + fi + ;; + +@@ -9307,8 +9821,8 @@ _LT_EOF + + _LT_EOF + elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi +@@ -9354,8 +9868,8 @@ _LT_EOF + + *) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi +@@ -9485,7 +9999,13 @@ _LT_EOF + allow_undefined_flag='-berok' + # Determine the default libpath from the value encoded in an + # empty executable. +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath_+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + + int +@@ -9498,22 +10018,29 @@ main () + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath_ ++fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" +@@ -9525,7 +10052,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + else + # Determine the default libpath from the value encoded in an + # empty executable. +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath_+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + + int +@@ -9538,22 +10071,29 @@ main () + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath_ ++fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + # Warning - without using the other run time loading flags, +@@ -9598,20 +10138,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + # Microsoft Visual C++. + # hardcode_libdir_flag_spec is actually meaningless, as there is + # no search path for DLLs. +- hardcode_libdir_flag_spec=' ' +- allow_undefined_flag=unsupported +- # Tell ltmain to make .lib files, not .a files. +- libext=lib +- # Tell ltmain to make .dll files, not .so files. +- shrext_cmds=".dll" +- # FIXME: Setting linknames here is a bad hack. +- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' +- # The linker will automatically build a .lib file if we build a DLL. +- old_archive_from_new_cmds='true' +- # FIXME: Should let the user specify the lib program. +- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' +- fix_srcfile_path='`cygpath -w "$srcfile"`' +- enable_shared_with_static_runtimes=yes ++ case $cc_basename in ++ cl*) ++ # Native MSVC ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ always_export_symbols=yes ++ file_list_spec='@' ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' ++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then ++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; ++ else ++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; ++ fi~ ++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ ++ linknames=' ++ # The linker will not automatically build a static lib if we build a DLL. ++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true' ++ enable_shared_with_static_runtimes=yes ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ # Don't use ranlib ++ old_postinstall_cmds='chmod 644 $oldlib' ++ postlink_cmds='lt_outputfile="@OUTPUT@"~ ++ lt_tool_outputfile="@TOOL_OUTPUT@"~ ++ case $lt_outputfile in ++ *.exe|*.EXE) ;; ++ *) ++ lt_outputfile="$lt_outputfile.exe" ++ lt_tool_outputfile="$lt_tool_outputfile.exe" ++ ;; ++ esac~ ++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then ++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; ++ $RM "$lt_outputfile.manifest"; ++ fi' ++ ;; ++ *) ++ # Assume MSVC wrapper ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' ++ # The linker will automatically build a .lib file if we build a DLL. ++ old_archive_from_new_cmds='true' ++ # FIXME: Should let the user specify the lib program. ++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' ++ enable_shared_with_static_runtimes=yes ++ ;; ++ esac + ;; + + darwin* | rhapsody*) +@@ -9672,7 +10255,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + # FreeBSD 3 and greater uses gcc -shared to do shared libraries. + freebsd* | dragonfly*) +- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + hardcode_shlibpath_var=no +@@ -9680,7 +10263,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hpux9*) + if test "$GCC" = yes; then +- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' ++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + else + archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + fi +@@ -9696,7 +10279,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hpux10*) + if test "$GCC" = yes && test "$with_gnu_ld" = no; then +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' + fi +@@ -9720,10 +10303,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + ia64*) +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + else +@@ -9802,23 +10385,36 @@ fi + + irix5* | irix6* | nonstopux*) + if test "$GCC" = yes; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + # Try to use the -exported_symbol ld option, if it does not + # work, assume that -exports_file does not work either and + # implicitly export all symbols. +- save_LDFLAGS="$LDFLAGS" +- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ # This should be the same for all languages, so no per-tag cache variable. ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 ++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } ++if test "${lt_cv_irix_exported_symbol+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ save_LDFLAGS="$LDFLAGS" ++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ +-int foo(void) {} ++int foo (void) { return 0; } + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' +- ++ lt_cv_irix_exported_symbol=yes ++else ++ lt_cv_irix_exported_symbol=no + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +- LDFLAGS="$save_LDFLAGS" ++ LDFLAGS="$save_LDFLAGS" ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 ++$as_echo "$lt_cv_irix_exported_symbol" >&6; } ++ if test "$lt_cv_irix_exported_symbol" = yes; then ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' ++ fi + else + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' +@@ -9903,7 +10499,7 @@ rm -f core conftest.err conftest.$ac_objext \ + osf4* | osf5*) # as osf3* with the addition of -msym flag + if test "$GCC" = yes; then + allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' +- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + else + allow_undefined_flag=' -expect_unresolved \*' +@@ -9922,9 +10518,9 @@ rm -f core conftest.err conftest.$ac_objext \ + no_undefined_flag=' -z defs' + if test "$GCC" = yes; then + wlarc='${wl}' +- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ +- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + else + case `$CC -V 2>&1` in + *"Compilers 5.0"*) +@@ -10500,8 +11096,9 @@ cygwin* | mingw* | pw32* | cegcc*) + need_version=no + need_lib_prefix=no + +- case $GCC,$host_os in +- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) ++ case $GCC,$cc_basename in ++ yes,*) ++ # gcc + library_names_spec='$libname.dll.a' + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ +@@ -10534,13 +11131,71 @@ cygwin* | mingw* | pw32* | cegcc*) + library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + ;; + esac ++ dynamic_linker='Win32 ld.exe' ++ ;; ++ ++ *,cl*) ++ # Native MSVC ++ libname_spec='$name' ++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ++ library_names_spec='${libname}.dll.lib' ++ ++ case $build_os in ++ mingw*) ++ sys_lib_search_path_spec= ++ lt_save_ifs=$IFS ++ IFS=';' ++ for lt_path in $LIB ++ do ++ IFS=$lt_save_ifs ++ # Let DOS variable expansion print the short 8.3 style file name. ++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` ++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" ++ done ++ IFS=$lt_save_ifs ++ # Convert to MSYS style. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` ++ ;; ++ cygwin*) ++ # Convert to unix form, then to dos form, then back to unix form ++ # but this time dos style (no spaces!) so that the unix form looks ++ # like /cygdrive/c/PROGRA~1:/cygdr... ++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` ++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` ++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ ;; ++ *) ++ sys_lib_search_path_spec="$LIB" ++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then ++ # It is most probably a Windows format PATH. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` ++ else ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ fi ++ # FIXME: find the short name or the path components, as spaces are ++ # common. (e.g. "Program Files" -> "PROGRA~1") ++ ;; ++ esac ++ ++ # DLL is installed to $(libdir)/../bin by postinstall_cmds ++ postinstall_cmds='base_file=`basename \${file}`~ ++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ ++ dldir=$destdir/`dirname \$dlpath`~ ++ test -d \$dldir || mkdir -p \$dldir~ ++ $install_prog $dir/$dlname \$dldir/$dlname' ++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ ++ dlpath=$dir/\$dldll~ ++ $RM \$dlpath' ++ shlibpath_overrides_runpath=yes ++ dynamic_linker='Win32 link.exe' + ;; + + *) ++ # Assume MSVC wrapper + library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ++ dynamic_linker='Win32 ld.exe' + ;; + esac +- dynamic_linker='Win32 ld.exe' + # FIXME: first we should search . and the directory the executable is in + shlibpath_var=PATH + ;; +@@ -10632,7 +11287,7 @@ haiku*) + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LIBRARY_PATH + shlibpath_overrides_runpath=yes +- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' ++ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' + hardcode_into_libs=yes + ;; + +@@ -11472,10 +12127,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -11578,10 +12233,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -14122,7 +14777,7 @@ SHARED_LDFLAGS= + if test "$enable_shared" = "yes"; then + x=`sed -n -e 's/^[ ]*PICFLAG[ ]*=[ ]*//p' < ../libiberty/Makefile | sed -n '$p'` + if test -n "$x"; then +- SHARED_LIBADD="-L`pwd`/../libiberty/pic -liberty" ++ SHARED_LIBADD="`pwd`/../libiberty/pic/libiberty.a" + fi + + # More hacks to build DLLs on Windows. +@@ -16826,13 +17481,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' + lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' + lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' + lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' ++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' ++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' + reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' + reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' + OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' + deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' + file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' ++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' ++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' ++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' ++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' + AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' + AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' ++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' + STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' + RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' + old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' +@@ -16847,14 +17509,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de + lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' + lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' + lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' ++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' ++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' + objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' + MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' +-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' ++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' + lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' + need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' ++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' + DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' + NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' + LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' +@@ -16887,12 +17552,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q + hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' + inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' + link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' +-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' + always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' + export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' + exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' + include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' + prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' ++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' + file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' + variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' + need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' +@@ -16947,8 +17612,13 @@ reload_flag \ + OBJDUMP \ + deplibs_check_method \ + file_magic_cmd \ ++file_magic_glob \ ++want_nocaseglob \ ++DLLTOOL \ ++sharedlib_from_linklib_cmd \ + AR \ + AR_FLAGS \ ++archiver_list_spec \ + STRIP \ + RANLIB \ + CC \ +@@ -16958,12 +17628,14 @@ lt_cv_sys_global_symbol_pipe \ + lt_cv_sys_global_symbol_to_cdecl \ + lt_cv_sys_global_symbol_to_c_name_address \ + lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ ++nm_file_list_spec \ + lt_prog_compiler_no_builtin_flag \ +-lt_prog_compiler_wl \ + lt_prog_compiler_pic \ ++lt_prog_compiler_wl \ + lt_prog_compiler_static \ + lt_cv_prog_compiler_c_o \ + need_locks \ ++MANIFEST_TOOL \ + DSYMUTIL \ + NMEDIT \ + LIPO \ +@@ -16979,7 +17651,6 @@ no_undefined_flag \ + hardcode_libdir_flag_spec \ + hardcode_libdir_flag_spec_ld \ + hardcode_libdir_separator \ +-fix_srcfile_path \ + exclude_expsyms \ + include_expsyms \ + file_list_spec \ +@@ -17015,6 +17686,7 @@ module_cmds \ + module_expsym_cmds \ + export_symbols_cmds \ + prelink_cmds \ ++postlink_cmds \ + postinstall_cmds \ + postuninstall_cmds \ + finish_cmds \ +@@ -17794,7 +18466,8 @@ $as_echo X"$file" | + # NOTE: Changes made to this file will be lost: look at ltmain.sh. + # + # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, +-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. ++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, ++# Inc. + # Written by Gordon Matzigkeit, 1996 + # + # This file is part of GNU Libtool. +@@ -17897,19 +18570,42 @@ SP2NL=$lt_lt_SP2NL + # turn newlines into spaces. + NL2SP=$lt_lt_NL2SP + ++# convert \$build file names to \$host format. ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++ ++# convert \$build files to toolchain format. ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++ + # An object symbol dumper. + OBJDUMP=$lt_OBJDUMP + + # Method to check whether dependent libraries are shared objects. + deplibs_check_method=$lt_deplibs_check_method + +-# Command to use when deplibs_check_method == "file_magic". ++# Command to use when deplibs_check_method = "file_magic". + file_magic_cmd=$lt_file_magic_cmd + ++# How to find potential files when deplibs_check_method = "file_magic". ++file_magic_glob=$lt_file_magic_glob ++ ++# Find potential files using nocaseglob when deplibs_check_method = "file_magic". ++want_nocaseglob=$lt_want_nocaseglob ++ ++# DLL creation program. ++DLLTOOL=$lt_DLLTOOL ++ ++# Command to associate shared and link libraries. ++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd ++ + # The archiver. + AR=$lt_AR ++ ++# Flags to create an archive. + AR_FLAGS=$lt_AR_FLAGS + ++# How to feed a file listing to the archiver. ++archiver_list_spec=$lt_archiver_list_spec ++ + # A symbol stripping program. + STRIP=$lt_STRIP + +@@ -17939,6 +18635,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address + # Transform the output of nm in a C name address pair when lib prefix is needed. + global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix + ++# Specify filename containing input files for \$NM. ++nm_file_list_spec=$lt_nm_file_list_spec ++ ++# The root where to search for dependent libraries,and in which our libraries should be installed. ++lt_sysroot=$lt_sysroot ++ + # The name of the directory that contains temporary libtool files. + objdir=$objdir + +@@ -17948,6 +18650,9 @@ MAGIC_CMD=$MAGIC_CMD + # Must we lock files when doing compilation? + need_locks=$lt_need_locks + ++# Manifest tool. ++MANIFEST_TOOL=$lt_MANIFEST_TOOL ++ + # Tool to manipulate archived DWARF debug symbol files on Mac OS X. + DSYMUTIL=$lt_DSYMUTIL + +@@ -18062,12 +18767,12 @@ with_gcc=$GCC + # Compiler flag to turn off builtin functions. + no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag + +-# How to pass a linker flag through the compiler. +-wl=$lt_lt_prog_compiler_wl +- + # Additional compiler flags for building library objects. + pic_flag=$lt_lt_prog_compiler_pic + ++# How to pass a linker flag through the compiler. ++wl=$lt_lt_prog_compiler_wl ++ + # Compiler flag to prevent dynamic linking. + link_static_flag=$lt_lt_prog_compiler_static + +@@ -18154,9 +18859,6 @@ inherit_rpath=$inherit_rpath + # Whether libtool must link a program against all its dependency libraries. + link_all_deplibs=$link_all_deplibs + +-# Fix the shell variable \$srcfile for the compiler. +-fix_srcfile_path=$lt_fix_srcfile_path +- + # Set to "yes" if exported symbols are required. + always_export_symbols=$always_export_symbols + +@@ -18172,6 +18874,9 @@ include_expsyms=$lt_include_expsyms + # Commands necessary for linking programs (against libraries) with templates. + prelink_cmds=$lt_prelink_cmds + ++# Commands necessary for finishing linking programs. ++postlink_cmds=$lt_postlink_cmds ++ + # Specify filename containing input files. + file_list_spec=$lt_file_list_spec + +@@ -18204,210 +18909,169 @@ ltmain="$ac_aux_dir/ltmain.sh" + # if finds mixed CR/LF and LF-only lines. Since sed operates in + # text mode, it properly converts lines to CR/LF. This bash problem + # is reportedly fixed, but why not run on old versions too? +- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- case $xsi_shell in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result="${1##*/}" +-} +- +-# func_dirname_and_basename file append nondir_replacement +-# perform func_basename and func_dirname in a single function +-# call: +-# dirname: Compute the dirname of FILE. If nonempty, +-# add APPEND to the result, otherwise set result +-# to NONDIR_REPLACEMENT. +-# value returned in "$func_dirname_result" +-# basename: Compute filename of FILE. +-# value retuned in "$func_basename_result" +-# Implementation must be kept synchronized with func_dirname +-# and func_basename. For efficiency, we do not delegate to +-# those functions but instead duplicate the functionality here. +-func_dirname_and_basename () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +- func_basename_result="${1##*/}" +-} +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-func_stripname () +-{ +- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are +- # positional parameters, so assign one to ordinary parameter first. +- func_stripname_result=${3} +- func_stripname_result=${func_stripname_result#"${1}"} +- func_stripname_result=${func_stripname_result%"${2}"} +-} +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=${1%%=*} +- func_opt_split_arg=${1#*=} +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- case ${1} in +- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; +- *) func_lo2o_result=${1} ;; +- esac +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=${1%.*}.lo +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=$(( $* )) +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=${#1} +-} +- +-_LT_EOF +- ;; +- *) # Bourne compatible functions. +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- # Extract subdirectory from the argument. +- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` +- if test "X$func_dirname_result" = "X${1}"; then +- func_dirname_result="${3}" +- else +- func_dirname_result="$func_dirname_result${2}" +- fi +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result=`$ECHO "${1}" | $SED "$basename"` +-} +- +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-# func_strip_suffix prefix name +-func_stripname () +-{ +- case ${2} in +- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; +- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; +- esac +-} +- +-# sed scripts: +-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' +-my_sed_long_arg='1s/^-[^=]*=//' +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` +- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=`expr "$@"` +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` +-} +- +-_LT_EOF +-esac +- +-case $lt_shell_append in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$1+=\$2" +-} +-_LT_EOF +- ;; +- *) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$1=\$$1\$2" +-} +- +-_LT_EOF +- ;; +- esac +- +- +- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- mv -f "$cfgfile" "$ofile" || ++ sed '$q' "$ltmain" >> "$cfgfile" \ ++ || (rm -f "$cfgfile"; exit 1) ++ ++ if test x"$xsi_shell" = xyes; then ++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ ++func_dirname ()\ ++{\ ++\ case ${1} in\ ++\ */*) func_dirname_result="${1%/*}${2}" ;;\ ++\ * ) func_dirname_result="${3}" ;;\ ++\ esac\ ++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_basename ()$/,/^} # func_basename /c\ ++func_basename ()\ ++{\ ++\ func_basename_result="${1##*/}"\ ++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ ++func_dirname_and_basename ()\ ++{\ ++\ case ${1} in\ ++\ */*) func_dirname_result="${1%/*}${2}" ;;\ ++\ * ) func_dirname_result="${3}" ;;\ ++\ esac\ ++\ func_basename_result="${1##*/}"\ ++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ ++func_stripname ()\ ++{\ ++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ ++\ # positional parameters, so assign one to ordinary parameter first.\ ++\ func_stripname_result=${3}\ ++\ func_stripname_result=${func_stripname_result#"${1}"}\ ++\ func_stripname_result=${func_stripname_result%"${2}"}\ ++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ ++func_split_long_opt ()\ ++{\ ++\ func_split_long_opt_name=${1%%=*}\ ++\ func_split_long_opt_arg=${1#*=}\ ++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ ++func_split_short_opt ()\ ++{\ ++\ func_split_short_opt_arg=${1#??}\ ++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ ++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ ++func_lo2o ()\ ++{\ ++\ case ${1} in\ ++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ ++\ *) func_lo2o_result=${1} ;;\ ++\ esac\ ++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_xform ()$/,/^} # func_xform /c\ ++func_xform ()\ ++{\ ++ func_xform_result=${1%.*}.lo\ ++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_arith ()$/,/^} # func_arith /c\ ++func_arith ()\ ++{\ ++ func_arith_result=$(( $* ))\ ++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_len ()$/,/^} # func_len /c\ ++func_len ()\ ++{\ ++ func_len_result=${#1}\ ++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++fi ++ ++if test x"$lt_shell_append" = xyes; then ++ sed -e '/^func_append ()$/,/^} # func_append /c\ ++func_append ()\ ++{\ ++ eval "${1}+=\\${2}"\ ++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ ++func_append_quoted ()\ ++{\ ++\ func_quote_for_eval "${2}"\ ++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ ++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ # Save a `func_append' function call where possible by direct use of '+=' ++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++else ++ # Save a `func_append' function call even when '+=' is not available ++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++fi ++ ++if test x"$_lt_function_replace_fail" = x":"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 ++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} ++fi ++ ++ ++ mv -f "$cfgfile" "$ofile" || + (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") + chmod +x "$ofile" + +diff --git a/bfd/configure.ac b/bfd/configure.ac +index 9a183c1628..3d8ea07836 100644 +--- a/bfd/configure.ac ++++ b/bfd/configure.ac +@@ -253,7 +253,7 @@ changequote(,)dnl + x=`sed -n -e 's/^[ ]*PICFLAG[ ]*=[ ]*//p' < ../libiberty/Makefile | sed -n '$p'` + changequote([,])dnl + if test -n "$x"; then +- SHARED_LIBADD="-L`pwd`/../libiberty/pic -liberty" ++ SHARED_LIBADD="`pwd`/../libiberty/pic/libiberty.a" + fi + + # More hacks to build DLLs on Windows. +diff --git a/binutils/configure b/binutils/configure +index 82119efe72..4a98918ce1 100755 +--- a/binutils/configure ++++ b/binutils/configure +@@ -659,8 +659,11 @@ OTOOL + LIPO + NMEDIT + DSYMUTIL ++MANIFEST_TOOL + RANLIB ++ac_ct_AR + AR ++DLLTOOL + OBJDUMP + LN_S + NM +@@ -772,6 +775,7 @@ enable_static + with_pic + enable_fast_install + with_gnu_ld ++with_libtool_sysroot + enable_libtool_lock + enable_plugins + enable_largefile +@@ -1444,6 +1448,8 @@ Optional Packages: + --with-pic try to use only PIC/non-PIC objects [default=use + both] + --with-gnu-ld assume the C compiler uses GNU ld [default=no] ++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR ++ (or the compiler's sysroot if not specified). + --with-system-zlib use installed libz + --with-gnu-ld assume the C compiler uses GNU ld default=no + --with-libiconv-prefix[=DIR] search for libiconv in DIR/include and DIR/lib +@@ -5160,8 +5166,8 @@ esac + + + +-macro_version='2.2.7a' +-macro_revision='1.3134' ++macro_version='2.4' ++macro_revision='1.3293' + + + +@@ -5201,7 +5207,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 + $as_echo_n "checking how to print strings... " >&6; } + # Test print first, because it will be a builtin if present. +-if test "X`print -r -- -n 2>/dev/null`" = X-n && \ ++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ + test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then + ECHO='print -r --' + elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then +@@ -5887,8 +5893,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; + # Try some XSI features + xsi_shell=no + ( _lt_dummy="a/b/c" +- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ +- = c,a/b,, \ ++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ ++ = c,a/b,b/c, \ + && eval 'test $(( 1 + 1 )) -eq 2 \ + && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ + && xsi_shell=yes +@@ -5937,6 +5943,80 @@ esac + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 ++$as_echo_n "checking how to convert $build file names to $host format... " >&6; } ++if test "${lt_cv_to_host_file_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 ++ ;; ++ esac ++ ;; ++ *-*-cygwin* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin ++ ;; ++ esac ++ ;; ++ * ) # unhandled hosts (and "normal" native builds) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++esac ++ ++fi ++ ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 ++$as_echo "$lt_cv_to_host_file_cmd" >&6; } ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 ++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } ++if test "${lt_cv_to_tool_file_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ #assume ordinary cross tools, or native build. ++lt_cv_to_tool_file_cmd=func_convert_file_noop ++case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ esac ++ ;; ++esac ++ ++fi ++ ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 ++$as_echo "$lt_cv_to_tool_file_cmd" >&6; } ++ ++ ++ ++ ++ + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 + $as_echo_n "checking for $LD option to reload object files... " >&6; } + if test "${lt_cv_ld_reload_flag+set}" = set; then : +@@ -5953,6 +6033,11 @@ case $reload_flag in + esac + reload_cmds='$LD$reload_flag -o $output$reload_objs' + case $host_os in ++ cygwin* | mingw* | pw32* | cegcc*) ++ if test "$GCC" != yes; then ++ reload_cmds=false ++ fi ++ ;; + darwin*) + if test "$GCC" = yes; then + reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' +@@ -6121,7 +6206,8 @@ mingw* | pw32*) + lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' + lt_cv_file_magic_cmd='func_win32_libid' + else +- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' ++ # Keep this pattern in sync with the one in func_win32_libid. ++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' + lt_cv_file_magic_cmd='$OBJDUMP -f' + fi + ;; +@@ -6275,6 +6361,21 @@ esac + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 + $as_echo "$lt_cv_deplibs_check_method" >&6; } ++ ++file_magic_glob= ++want_nocaseglob=no ++if test "$build" = "$host"; then ++ case $host_os in ++ mingw* | pw32*) ++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then ++ want_nocaseglob=yes ++ else ++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` ++ fi ++ ;; ++ esac ++fi ++ + file_magic_cmd=$lt_cv_file_magic_cmd + deplibs_check_method=$lt_cv_deplibs_check_method + test -z "$deplibs_check_method" && deplibs_check_method=unknown +@@ -6290,9 +6391,162 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. ++set dummy ${ac_tool_prefix}dlltool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_DLLTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$DLLTOOL"; then ++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++DLLTOOL=$ac_cv_prog_DLLTOOL ++if test -n "$DLLTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 ++$as_echo "$DLLTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_DLLTOOL"; then ++ ac_ct_DLLTOOL=$DLLTOOL ++ # Extract the first word of "dlltool", so it can be a program name with args. ++set dummy dlltool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_DLLTOOL"; then ++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_DLLTOOL="dlltool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL ++if test -n "$ac_ct_DLLTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 ++$as_echo "$ac_ct_DLLTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_DLLTOOL" = x; then ++ DLLTOOL="false" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ DLLTOOL=$ac_ct_DLLTOOL ++ fi ++else ++ DLLTOOL="$ac_cv_prog_DLLTOOL" ++fi ++ ++test -z "$DLLTOOL" && DLLTOOL=dlltool ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 ++$as_echo_n "checking how to associate runtime and link libraries... " >&6; } ++if test "${lt_cv_sharedlib_from_linklib_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_sharedlib_from_linklib_cmd='unknown' ++ ++case $host_os in ++cygwin* | mingw* | pw32* | cegcc*) ++ # two different shell functions defined in ltmain.sh ++ # decide which to use based on capabilities of $DLLTOOL ++ case `$DLLTOOL --help 2>&1` in ++ *--identify-strict*) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib ++ ;; ++ *) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback ++ ;; ++ esac ++ ;; ++*) ++ # fallback: assume linklib IS sharedlib ++ lt_cv_sharedlib_from_linklib_cmd="$ECHO" ++ ;; ++esac ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 ++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } ++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd ++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO ++ ++ ++ ++ ++ ++ ++ + if test -n "$ac_tool_prefix"; then +- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. +-set dummy ${ac_tool_prefix}ar; ac_word=$2 ++ for ac_prog in ar ++ do ++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. ++set dummy $ac_tool_prefix$ac_prog; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } + if test "${ac_cv_prog_AR+set}" = set; then : +@@ -6308,7 +6562,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then +- ac_cv_prog_AR="${ac_tool_prefix}ar" ++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -6328,11 +6582,15 @@ $as_echo "no" >&6; } + fi + + ++ test -n "$AR" && break ++ done + fi +-if test -z "$ac_cv_prog_AR"; then ++if test -z "$AR"; then + ac_ct_AR=$AR +- # Extract the first word of "ar", so it can be a program name with args. +-set dummy ar; ac_word=$2 ++ for ac_prog in ar ++do ++ # Extract the first word of "$ac_prog", so it can be a program name with args. ++set dummy $ac_prog; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } + if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : +@@ -6348,7 +6606,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then +- ac_cv_prog_ac_ct_AR="ar" ++ ac_cv_prog_ac_ct_AR="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -6367,6 +6625,10 @@ else + $as_echo "no" >&6; } + fi + ++ ++ test -n "$ac_ct_AR" && break ++done ++ + if test "x$ac_ct_AR" = x; then + AR="false" + else +@@ -6378,12 +6640,10 @@ ac_tool_warned=yes ;; + esac + AR=$ac_ct_AR + fi +-else +- AR="$ac_cv_prog_AR" + fi + +-test -z "$AR" && AR=ar +-test -z "$AR_FLAGS" && AR_FLAGS=cru ++: ${AR=ar} ++: ${AR_FLAGS=cru} + + + +@@ -6395,6 +6655,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 ++$as_echo_n "checking for archiver @FILE support... " >&6; } ++if test "${lt_cv_ar_at_file+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_ar_at_file=no ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ echo conftest.$ac_objext > conftest.lst ++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' ++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 ++ (eval $lt_ar_try) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ if test "$ac_status" -eq 0; then ++ # Ensure the archiver fails upon bogus file names. ++ rm -f conftest.$ac_objext libconftest.a ++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 ++ (eval $lt_ar_try) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ if test "$ac_status" -ne 0; then ++ lt_cv_ar_at_file=@ ++ fi ++ fi ++ rm -f conftest.* libconftest.a ++ ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 ++$as_echo "$lt_cv_ar_at_file" >&6; } ++ ++if test "x$lt_cv_ar_at_file" = xno; then ++ archiver_list_spec= ++else ++ archiver_list_spec=$lt_cv_ar_at_file ++fi ++ ++ ++ ++ ++ ++ ++ + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. + set dummy ${ac_tool_prefix}strip; ac_word=$2 +@@ -6729,8 +7047,8 @@ esac + lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" + + # Transform an extracted symbol line into symbol name and symbol address +-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" +-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" + + # Handle CRLF in mingw tool chain + opt_cr= +@@ -6766,6 +7084,7 @@ for ac_symprfx in "" "_"; do + else + lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" + fi ++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" + + # Check to see that the pipe works correctly. + pipe_works=no +@@ -6807,6 +7126,18 @@ _LT_EOF + if $GREP ' nm_test_var$' "$nlist" >/dev/null; then + if $GREP ' nm_test_func$' "$nlist" >/dev/null; then + cat <<_LT_EOF > conftest.$ac_ext ++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ ++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) ++/* DATA imports from DLLs on WIN32 con't be const, because runtime ++ relocations are performed -- see ld's documentation on pseudo-relocs. */ ++# define LT_DLSYM_CONST ++#elif defined(__osf__) ++/* This system does not cope well with relocations in const data. */ ++# define LT_DLSYM_CONST ++#else ++# define LT_DLSYM_CONST const ++#endif ++ + #ifdef __cplusplus + extern "C" { + #endif +@@ -6818,7 +7149,7 @@ _LT_EOF + cat <<_LT_EOF >> conftest.$ac_ext + + /* The mapping between symbol names and symbols. */ +-const struct { ++LT_DLSYM_CONST struct { + const char *name; + void *address; + } +@@ -6844,8 +7175,8 @@ static const void *lt_preloaded_setup() { + _LT_EOF + # Now try linking the two files. + mv conftest.$ac_objext conftstm.$ac_objext +- lt_save_LIBS="$LIBS" +- lt_save_CFLAGS="$CFLAGS" ++ lt_globsym_save_LIBS=$LIBS ++ lt_globsym_save_CFLAGS=$CFLAGS + LIBS="conftstm.$ac_objext" + CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 +@@ -6855,8 +7186,8 @@ _LT_EOF + test $ac_status = 0; } && test -s conftest${ac_exeext}; then + pipe_works=yes + fi +- LIBS="$lt_save_LIBS" +- CFLAGS="$lt_save_CFLAGS" ++ LIBS=$lt_globsym_save_LIBS ++ CFLAGS=$lt_globsym_save_CFLAGS + else + echo "cannot find nm_test_func in $nlist" >&5 + fi +@@ -6893,6 +7224,21 @@ else + $as_echo "ok" >&6; } + fi + ++# Response file support. ++if test "$lt_cv_nm_interface" = "MS dumpbin"; then ++ nm_file_list_spec='@' ++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then ++ nm_file_list_spec='@' ++fi ++ ++ ++ ++ ++ ++ ++ ++ ++ + + + +@@ -6910,6 +7256,40 @@ fi + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 ++$as_echo_n "checking for sysroot... " >&6; } ++ ++# Check whether --with-libtool-sysroot was given. ++if test "${with_libtool_sysroot+set}" = set; then : ++ withval=$with_libtool_sysroot; ++else ++ with_libtool_sysroot=no ++fi ++ ++ ++lt_sysroot= ++case ${with_libtool_sysroot} in #( ++ yes) ++ if test "$GCC" = yes; then ++ lt_sysroot=`$CC --print-sysroot 2>/dev/null` ++ fi ++ ;; #( ++ /*) ++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` ++ ;; #( ++ no|'') ++ ;; #( ++ *) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5 ++$as_echo "${with_libtool_sysroot}" >&6; } ++ as_fn_error "The sysroot must be an absolute path." "$LINENO" 5 ++ ;; ++esac ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 ++$as_echo "${lt_sysroot:-no}" >&6; } ++ ++ + + + +@@ -7120,6 +7500,123 @@ esac + + need_locks="$enable_libtool_lock" + ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. ++set dummy ${ac_tool_prefix}mt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_MANIFEST_TOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$MANIFEST_TOOL"; then ++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL ++if test -n "$MANIFEST_TOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 ++$as_echo "$MANIFEST_TOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then ++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL ++ # Extract the first word of "mt", so it can be a program name with args. ++set dummy mt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_MANIFEST_TOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_MANIFEST_TOOL"; then ++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL ++if test -n "$ac_ct_MANIFEST_TOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 ++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_MANIFEST_TOOL" = x; then ++ MANIFEST_TOOL=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL ++ fi ++else ++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" ++fi ++ ++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 ++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } ++if test "${lt_cv_path_mainfest_tool+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_path_mainfest_tool=no ++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 ++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out ++ cat conftest.err >&5 ++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then ++ lt_cv_path_mainfest_tool=yes ++ fi ++ rm -f conftest* ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 ++$as_echo "$lt_cv_path_mainfest_tool" >&6; } ++if test "x$lt_cv_path_mainfest_tool" != xyes; then ++ MANIFEST_TOOL=: ++fi ++ ++ ++ ++ ++ + + case $host_os in + rhapsody* | darwin*) +@@ -7683,6 +8180,8 @@ _LT_EOF + $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 + echo "$AR cru libconftest.a conftest.o" >&5 + $AR cru libconftest.a conftest.o 2>&5 ++ echo "$RANLIB libconftest.a" >&5 ++ $RANLIB libconftest.a 2>&5 + cat > conftest.c << _LT_EOF + int main() { return 0;} + _LT_EOF +@@ -7878,7 +8377,8 @@ fi + LIBTOOL_DEPS="$ltmain" + + # Always use our own libtool. +-LIBTOOL='$(SHELL) $(top_builddir)/libtool' ++LIBTOOL='$(SHELL) $(top_builddir)' ++LIBTOOL="$LIBTOOL/${host_alias}-libtool" + + + +@@ -7967,7 +8467,7 @@ aix3*) + esac + + # Global variables: +-ofile=libtool ++ofile=${host_alias}-libtool + can_build_shared=yes + + # All known linkers require a `.a' archive for static linking (except MSVC, +@@ -8265,8 +8765,6 @@ fi + lt_prog_compiler_pic= + lt_prog_compiler_static= + +-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 +-$as_echo_n "checking for $compiler option to produce PIC... " >&6; } + + if test "$GCC" = yes; then + lt_prog_compiler_wl='-Wl,' +@@ -8432,6 +8930,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + lt_prog_compiler_pic='--shared' + lt_prog_compiler_static='--static' + ;; ++ nagfor*) ++ # NAG Fortran compiler ++ lt_prog_compiler_wl='-Wl,-Wl,,' ++ lt_prog_compiler_pic='-PIC' ++ lt_prog_compiler_static='-Bstatic' ++ ;; + pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) + # Portland Group compilers (*not* the Pentium gcc compiler, + # which looks to be a dead project) +@@ -8494,7 +8998,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + case $cc_basename in +- f77* | f90* | f95*) ++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) + lt_prog_compiler_wl='-Qoption ld ';; + *) + lt_prog_compiler_wl='-Wl,';; +@@ -8551,13 +9055,17 @@ case $host_os in + lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" + ;; + esac +-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 +-$as_echo "$lt_prog_compiler_pic" >&6; } +- +- +- +- + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 ++$as_echo_n "checking for $compiler option to produce PIC... " >&6; } ++if test "${lt_cv_prog_compiler_pic+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 ++$as_echo "$lt_cv_prog_compiler_pic" >&6; } ++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic + + # + # Check to make sure the PIC flag actually works. +@@ -8618,6 +9126,11 @@ fi + + + ++ ++ ++ ++ ++ + # + # Check to make sure the static flag actually works. + # +@@ -8968,7 +9481,8 @@ _LT_EOF + allow_undefined_flag=unsupported + always_export_symbols=no + enable_shared_with_static_runtimes=yes +- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' ++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' + + if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' +@@ -9067,12 +9581,12 @@ _LT_EOF + whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' + hardcode_libdir_flag_spec= + hardcode_libdir_flag_spec_ld='-rpath $libdir' +- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' ++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' + if test "x$supports_anon_versioning" = xyes; then + archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ +- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' ++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' + fi + ;; + esac +@@ -9086,8 +9600,8 @@ _LT_EOF + archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' + wlarc= + else +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + fi + ;; + +@@ -9105,8 +9619,8 @@ _LT_EOF + + _LT_EOF + elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi +@@ -9152,8 +9666,8 @@ _LT_EOF + + *) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi +@@ -9283,7 +9797,13 @@ _LT_EOF + allow_undefined_flag='-berok' + # Determine the default libpath from the value encoded in an + # empty executable. +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath_+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + + int +@@ -9296,22 +9816,29 @@ main () + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath_ ++fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" +@@ -9323,7 +9850,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + else + # Determine the default libpath from the value encoded in an + # empty executable. +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath_+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + + int +@@ -9336,22 +9869,29 @@ main () + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath_ ++fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + # Warning - without using the other run time loading flags, +@@ -9396,20 +9936,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + # Microsoft Visual C++. + # hardcode_libdir_flag_spec is actually meaningless, as there is + # no search path for DLLs. +- hardcode_libdir_flag_spec=' ' +- allow_undefined_flag=unsupported +- # Tell ltmain to make .lib files, not .a files. +- libext=lib +- # Tell ltmain to make .dll files, not .so files. +- shrext_cmds=".dll" +- # FIXME: Setting linknames here is a bad hack. +- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' +- # The linker will automatically build a .lib file if we build a DLL. +- old_archive_from_new_cmds='true' +- # FIXME: Should let the user specify the lib program. +- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' +- fix_srcfile_path='`cygpath -w "$srcfile"`' +- enable_shared_with_static_runtimes=yes ++ case $cc_basename in ++ cl*) ++ # Native MSVC ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ always_export_symbols=yes ++ file_list_spec='@' ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' ++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then ++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; ++ else ++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; ++ fi~ ++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ ++ linknames=' ++ # The linker will not automatically build a static lib if we build a DLL. ++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true' ++ enable_shared_with_static_runtimes=yes ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ # Don't use ranlib ++ old_postinstall_cmds='chmod 644 $oldlib' ++ postlink_cmds='lt_outputfile="@OUTPUT@"~ ++ lt_tool_outputfile="@TOOL_OUTPUT@"~ ++ case $lt_outputfile in ++ *.exe|*.EXE) ;; ++ *) ++ lt_outputfile="$lt_outputfile.exe" ++ lt_tool_outputfile="$lt_tool_outputfile.exe" ++ ;; ++ esac~ ++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then ++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; ++ $RM "$lt_outputfile.manifest"; ++ fi' ++ ;; ++ *) ++ # Assume MSVC wrapper ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' ++ # The linker will automatically build a .lib file if we build a DLL. ++ old_archive_from_new_cmds='true' ++ # FIXME: Should let the user specify the lib program. ++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' ++ enable_shared_with_static_runtimes=yes ++ ;; ++ esac + ;; + + darwin* | rhapsody*) +@@ -9470,7 +10053,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + # FreeBSD 3 and greater uses gcc -shared to do shared libraries. + freebsd* | dragonfly*) +- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + hardcode_shlibpath_var=no +@@ -9478,7 +10061,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hpux9*) + if test "$GCC" = yes; then +- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' ++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + else + archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + fi +@@ -9494,7 +10077,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hpux10*) + if test "$GCC" = yes && test "$with_gnu_ld" = no; then +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' + fi +@@ -9518,10 +10101,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + ia64*) +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + else +@@ -9600,23 +10183,36 @@ fi + + irix5* | irix6* | nonstopux*) + if test "$GCC" = yes; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + # Try to use the -exported_symbol ld option, if it does not + # work, assume that -exports_file does not work either and + # implicitly export all symbols. +- save_LDFLAGS="$LDFLAGS" +- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ # This should be the same for all languages, so no per-tag cache variable. ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 ++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } ++if test "${lt_cv_irix_exported_symbol+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ save_LDFLAGS="$LDFLAGS" ++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ +-int foo(void) {} ++int foo (void) { return 0; } + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' +- ++ lt_cv_irix_exported_symbol=yes ++else ++ lt_cv_irix_exported_symbol=no + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +- LDFLAGS="$save_LDFLAGS" ++ LDFLAGS="$save_LDFLAGS" ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 ++$as_echo "$lt_cv_irix_exported_symbol" >&6; } ++ if test "$lt_cv_irix_exported_symbol" = yes; then ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' ++ fi + else + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' +@@ -9701,7 +10297,7 @@ rm -f core conftest.err conftest.$ac_objext \ + osf4* | osf5*) # as osf3* with the addition of -msym flag + if test "$GCC" = yes; then + allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' +- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + else + allow_undefined_flag=' -expect_unresolved \*' +@@ -9720,9 +10316,9 @@ rm -f core conftest.err conftest.$ac_objext \ + no_undefined_flag=' -z defs' + if test "$GCC" = yes; then + wlarc='${wl}' +- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ +- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + else + case `$CC -V 2>&1` in + *"Compilers 5.0"*) +@@ -10298,8 +10894,9 @@ cygwin* | mingw* | pw32* | cegcc*) + need_version=no + need_lib_prefix=no + +- case $GCC,$host_os in +- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) ++ case $GCC,$cc_basename in ++ yes,*) ++ # gcc + library_names_spec='$libname.dll.a' + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ +@@ -10332,13 +10929,71 @@ cygwin* | mingw* | pw32* | cegcc*) + library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + ;; + esac ++ dynamic_linker='Win32 ld.exe' ++ ;; ++ ++ *,cl*) ++ # Native MSVC ++ libname_spec='$name' ++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ++ library_names_spec='${libname}.dll.lib' ++ ++ case $build_os in ++ mingw*) ++ sys_lib_search_path_spec= ++ lt_save_ifs=$IFS ++ IFS=';' ++ for lt_path in $LIB ++ do ++ IFS=$lt_save_ifs ++ # Let DOS variable expansion print the short 8.3 style file name. ++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` ++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" ++ done ++ IFS=$lt_save_ifs ++ # Convert to MSYS style. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` ++ ;; ++ cygwin*) ++ # Convert to unix form, then to dos form, then back to unix form ++ # but this time dos style (no spaces!) so that the unix form looks ++ # like /cygdrive/c/PROGRA~1:/cygdr... ++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` ++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` ++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ ;; ++ *) ++ sys_lib_search_path_spec="$LIB" ++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then ++ # It is most probably a Windows format PATH. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` ++ else ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ fi ++ # FIXME: find the short name or the path components, as spaces are ++ # common. (e.g. "Program Files" -> "PROGRA~1") ++ ;; ++ esac ++ ++ # DLL is installed to $(libdir)/../bin by postinstall_cmds ++ postinstall_cmds='base_file=`basename \${file}`~ ++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ ++ dldir=$destdir/`dirname \$dlpath`~ ++ test -d \$dldir || mkdir -p \$dldir~ ++ $install_prog $dir/$dlname \$dldir/$dlname' ++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ ++ dlpath=$dir/\$dldll~ ++ $RM \$dlpath' ++ shlibpath_overrides_runpath=yes ++ dynamic_linker='Win32 link.exe' + ;; + + *) ++ # Assume MSVC wrapper + library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ++ dynamic_linker='Win32 ld.exe' + ;; + esac +- dynamic_linker='Win32 ld.exe' + # FIXME: first we should search . and the directory the executable is in + shlibpath_var=PATH + ;; +@@ -10430,7 +11085,7 @@ haiku*) + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LIBRARY_PATH + shlibpath_overrides_runpath=yes +- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' ++ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' + hardcode_into_libs=yes + ;; + +@@ -11270,10 +11925,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -11376,10 +12031,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -15436,13 +16091,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' + lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' + lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' + lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' ++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' ++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' + reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' + reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' + OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' + deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' + file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' ++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' ++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' ++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' ++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' + AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' + AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' ++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' + STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' + RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' + old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' +@@ -15457,14 +16119,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de + lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' + lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' + lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' ++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' ++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' + objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' + MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' +-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' ++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' + lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' + need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' ++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' + DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' + NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' + LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' +@@ -15497,12 +16162,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q + hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' + inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' + link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' +-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' + always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' + export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' + exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' + include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' + prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' ++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' + file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' + variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' + need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' +@@ -15557,8 +16222,13 @@ reload_flag \ + OBJDUMP \ + deplibs_check_method \ + file_magic_cmd \ ++file_magic_glob \ ++want_nocaseglob \ ++DLLTOOL \ ++sharedlib_from_linklib_cmd \ + AR \ + AR_FLAGS \ ++archiver_list_spec \ + STRIP \ + RANLIB \ + CC \ +@@ -15568,12 +16238,14 @@ lt_cv_sys_global_symbol_pipe \ + lt_cv_sys_global_symbol_to_cdecl \ + lt_cv_sys_global_symbol_to_c_name_address \ + lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ ++nm_file_list_spec \ + lt_prog_compiler_no_builtin_flag \ +-lt_prog_compiler_wl \ + lt_prog_compiler_pic \ ++lt_prog_compiler_wl \ + lt_prog_compiler_static \ + lt_cv_prog_compiler_c_o \ + need_locks \ ++MANIFEST_TOOL \ + DSYMUTIL \ + NMEDIT \ + LIPO \ +@@ -15589,7 +16261,6 @@ no_undefined_flag \ + hardcode_libdir_flag_spec \ + hardcode_libdir_flag_spec_ld \ + hardcode_libdir_separator \ +-fix_srcfile_path \ + exclude_expsyms \ + include_expsyms \ + file_list_spec \ +@@ -15625,6 +16296,7 @@ module_cmds \ + module_expsym_cmds \ + export_symbols_cmds \ + prelink_cmds \ ++postlink_cmds \ + postinstall_cmds \ + postuninstall_cmds \ + finish_cmds \ +@@ -16382,7 +17054,8 @@ $as_echo X"$file" | + # NOTE: Changes made to this file will be lost: look at ltmain.sh. + # + # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, +-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. ++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, ++# Inc. + # Written by Gordon Matzigkeit, 1996 + # + # This file is part of GNU Libtool. +@@ -16485,19 +17158,42 @@ SP2NL=$lt_lt_SP2NL + # turn newlines into spaces. + NL2SP=$lt_lt_NL2SP + ++# convert \$build file names to \$host format. ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++ ++# convert \$build files to toolchain format. ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++ + # An object symbol dumper. + OBJDUMP=$lt_OBJDUMP + + # Method to check whether dependent libraries are shared objects. + deplibs_check_method=$lt_deplibs_check_method + +-# Command to use when deplibs_check_method == "file_magic". ++# Command to use when deplibs_check_method = "file_magic". + file_magic_cmd=$lt_file_magic_cmd + ++# How to find potential files when deplibs_check_method = "file_magic". ++file_magic_glob=$lt_file_magic_glob ++ ++# Find potential files using nocaseglob when deplibs_check_method = "file_magic". ++want_nocaseglob=$lt_want_nocaseglob ++ ++# DLL creation program. ++DLLTOOL=$lt_DLLTOOL ++ ++# Command to associate shared and link libraries. ++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd ++ + # The archiver. + AR=$lt_AR ++ ++# Flags to create an archive. + AR_FLAGS=$lt_AR_FLAGS + ++# How to feed a file listing to the archiver. ++archiver_list_spec=$lt_archiver_list_spec ++ + # A symbol stripping program. + STRIP=$lt_STRIP + +@@ -16527,6 +17223,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address + # Transform the output of nm in a C name address pair when lib prefix is needed. + global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix + ++# Specify filename containing input files for \$NM. ++nm_file_list_spec=$lt_nm_file_list_spec ++ ++# The root where to search for dependent libraries,and in which our libraries should be installed. ++lt_sysroot=$lt_sysroot ++ + # The name of the directory that contains temporary libtool files. + objdir=$objdir + +@@ -16536,6 +17238,9 @@ MAGIC_CMD=$MAGIC_CMD + # Must we lock files when doing compilation? + need_locks=$lt_need_locks + ++# Manifest tool. ++MANIFEST_TOOL=$lt_MANIFEST_TOOL ++ + # Tool to manipulate archived DWARF debug symbol files on Mac OS X. + DSYMUTIL=$lt_DSYMUTIL + +@@ -16650,12 +17355,12 @@ with_gcc=$GCC + # Compiler flag to turn off builtin functions. + no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag + +-# How to pass a linker flag through the compiler. +-wl=$lt_lt_prog_compiler_wl +- + # Additional compiler flags for building library objects. + pic_flag=$lt_lt_prog_compiler_pic + ++# How to pass a linker flag through the compiler. ++wl=$lt_lt_prog_compiler_wl ++ + # Compiler flag to prevent dynamic linking. + link_static_flag=$lt_lt_prog_compiler_static + +@@ -16742,9 +17447,6 @@ inherit_rpath=$inherit_rpath + # Whether libtool must link a program against all its dependency libraries. + link_all_deplibs=$link_all_deplibs + +-# Fix the shell variable \$srcfile for the compiler. +-fix_srcfile_path=$lt_fix_srcfile_path +- + # Set to "yes" if exported symbols are required. + always_export_symbols=$always_export_symbols + +@@ -16760,6 +17462,9 @@ include_expsyms=$lt_include_expsyms + # Commands necessary for linking programs (against libraries) with templates. + prelink_cmds=$lt_prelink_cmds + ++# Commands necessary for finishing linking programs. ++postlink_cmds=$lt_postlink_cmds ++ + # Specify filename containing input files. + file_list_spec=$lt_file_list_spec + +@@ -16792,210 +17497,169 @@ ltmain="$ac_aux_dir/ltmain.sh" + # if finds mixed CR/LF and LF-only lines. Since sed operates in + # text mode, it properly converts lines to CR/LF. This bash problem + # is reportedly fixed, but why not run on old versions too? +- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- case $xsi_shell in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result="${1##*/}" +-} +- +-# func_dirname_and_basename file append nondir_replacement +-# perform func_basename and func_dirname in a single function +-# call: +-# dirname: Compute the dirname of FILE. If nonempty, +-# add APPEND to the result, otherwise set result +-# to NONDIR_REPLACEMENT. +-# value returned in "$func_dirname_result" +-# basename: Compute filename of FILE. +-# value retuned in "$func_basename_result" +-# Implementation must be kept synchronized with func_dirname +-# and func_basename. For efficiency, we do not delegate to +-# those functions but instead duplicate the functionality here. +-func_dirname_and_basename () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +- func_basename_result="${1##*/}" +-} +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-func_stripname () +-{ +- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are +- # positional parameters, so assign one to ordinary parameter first. +- func_stripname_result=${3} +- func_stripname_result=${func_stripname_result#"${1}"} +- func_stripname_result=${func_stripname_result%"${2}"} +-} +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=${1%%=*} +- func_opt_split_arg=${1#*=} +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- case ${1} in +- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; +- *) func_lo2o_result=${1} ;; +- esac +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=${1%.*}.lo +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=$(( $* )) +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=${#1} +-} +- +-_LT_EOF +- ;; +- *) # Bourne compatible functions. +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- # Extract subdirectory from the argument. +- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` +- if test "X$func_dirname_result" = "X${1}"; then +- func_dirname_result="${3}" +- else +- func_dirname_result="$func_dirname_result${2}" +- fi +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result=`$ECHO "${1}" | $SED "$basename"` +-} +- +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-# func_strip_suffix prefix name +-func_stripname () +-{ +- case ${2} in +- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; +- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; +- esac +-} +- +-# sed scripts: +-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' +-my_sed_long_arg='1s/^-[^=]*=//' +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` +- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=`expr "$@"` +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` +-} +- +-_LT_EOF +-esac +- +-case $lt_shell_append in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$1+=\$2" +-} +-_LT_EOF +- ;; +- *) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$1=\$$1\$2" +-} +- +-_LT_EOF +- ;; +- esac +- +- +- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- mv -f "$cfgfile" "$ofile" || ++ sed '$q' "$ltmain" >> "$cfgfile" \ ++ || (rm -f "$cfgfile"; exit 1) ++ ++ if test x"$xsi_shell" = xyes; then ++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ ++func_dirname ()\ ++{\ ++\ case ${1} in\ ++\ */*) func_dirname_result="${1%/*}${2}" ;;\ ++\ * ) func_dirname_result="${3}" ;;\ ++\ esac\ ++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_basename ()$/,/^} # func_basename /c\ ++func_basename ()\ ++{\ ++\ func_basename_result="${1##*/}"\ ++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ ++func_dirname_and_basename ()\ ++{\ ++\ case ${1} in\ ++\ */*) func_dirname_result="${1%/*}${2}" ;;\ ++\ * ) func_dirname_result="${3}" ;;\ ++\ esac\ ++\ func_basename_result="${1##*/}"\ ++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ ++func_stripname ()\ ++{\ ++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ ++\ # positional parameters, so assign one to ordinary parameter first.\ ++\ func_stripname_result=${3}\ ++\ func_stripname_result=${func_stripname_result#"${1}"}\ ++\ func_stripname_result=${func_stripname_result%"${2}"}\ ++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ ++func_split_long_opt ()\ ++{\ ++\ func_split_long_opt_name=${1%%=*}\ ++\ func_split_long_opt_arg=${1#*=}\ ++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ ++func_split_short_opt ()\ ++{\ ++\ func_split_short_opt_arg=${1#??}\ ++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ ++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ ++func_lo2o ()\ ++{\ ++\ case ${1} in\ ++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ ++\ *) func_lo2o_result=${1} ;;\ ++\ esac\ ++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_xform ()$/,/^} # func_xform /c\ ++func_xform ()\ ++{\ ++ func_xform_result=${1%.*}.lo\ ++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_arith ()$/,/^} # func_arith /c\ ++func_arith ()\ ++{\ ++ func_arith_result=$(( $* ))\ ++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_len ()$/,/^} # func_len /c\ ++func_len ()\ ++{\ ++ func_len_result=${#1}\ ++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++fi ++ ++if test x"$lt_shell_append" = xyes; then ++ sed -e '/^func_append ()$/,/^} # func_append /c\ ++func_append ()\ ++{\ ++ eval "${1}+=\\${2}"\ ++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ ++func_append_quoted ()\ ++{\ ++\ func_quote_for_eval "${2}"\ ++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ ++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ # Save a `func_append' function call where possible by direct use of '+=' ++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++else ++ # Save a `func_append' function call even when '+=' is not available ++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++fi ++ ++if test x"$_lt_function_replace_fail" = x":"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 ++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} ++fi ++ ++ ++ mv -f "$cfgfile" "$ofile" || + (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") + chmod +x "$ofile" + +diff --git a/gas/configure b/gas/configure +index e574cb8514..a36f1ae161 100755 +--- a/gas/configure ++++ b/gas/configure +@@ -650,8 +650,11 @@ OTOOL + LIPO + NMEDIT + DSYMUTIL ++MANIFEST_TOOL + RANLIB ++ac_ct_AR + AR ++DLLTOOL + OBJDUMP + LN_S + NM +@@ -763,6 +766,7 @@ enable_static + with_pic + enable_fast_install + with_gnu_ld ++with_libtool_sysroot + enable_libtool_lock + enable_plugins + enable_largefile +@@ -4921,8 +4925,8 @@ esac + + + +-macro_version='2.2.7a' +-macro_revision='1.3134' ++macro_version='2.4' ++macro_revision='1.3293' + + + +@@ -4962,7 +4966,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 + $as_echo_n "checking how to print strings... " >&6; } + # Test print first, because it will be a builtin if present. +-if test "X`print -r -- -n 2>/dev/null`" = X-n && \ ++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ + test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then + ECHO='print -r --' + elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then +@@ -5648,8 +5652,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; + # Try some XSI features + xsi_shell=no + ( _lt_dummy="a/b/c" +- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ +- = c,a/b,, \ ++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ ++ = c,a/b,b/c, \ + && eval 'test $(( 1 + 1 )) -eq 2 \ + && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ + && xsi_shell=yes +@@ -5698,6 +5702,80 @@ esac + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 ++$as_echo_n "checking how to convert $build file names to $host format... " >&6; } ++if test "${lt_cv_to_host_file_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 ++ ;; ++ esac ++ ;; ++ *-*-cygwin* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin ++ ;; ++ esac ++ ;; ++ * ) # unhandled hosts (and "normal" native builds) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++esac ++ ++fi ++ ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 ++$as_echo "$lt_cv_to_host_file_cmd" >&6; } ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 ++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } ++if test "${lt_cv_to_tool_file_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ #assume ordinary cross tools, or native build. ++lt_cv_to_tool_file_cmd=func_convert_file_noop ++case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ esac ++ ;; ++esac ++ ++fi ++ ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 ++$as_echo "$lt_cv_to_tool_file_cmd" >&6; } ++ ++ ++ ++ ++ + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 + $as_echo_n "checking for $LD option to reload object files... " >&6; } + if test "${lt_cv_ld_reload_flag+set}" = set; then : +@@ -5714,6 +5792,11 @@ case $reload_flag in + esac + reload_cmds='$LD$reload_flag -o $output$reload_objs' + case $host_os in ++ cygwin* | mingw* | pw32* | cegcc*) ++ if test "$GCC" != yes; then ++ reload_cmds=false ++ fi ++ ;; + darwin*) + if test "$GCC" = yes; then + reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' +@@ -5882,7 +5965,8 @@ mingw* | pw32*) + lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' + lt_cv_file_magic_cmd='func_win32_libid' + else +- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' ++ # Keep this pattern in sync with the one in func_win32_libid. ++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' + lt_cv_file_magic_cmd='$OBJDUMP -f' + fi + ;; +@@ -6036,6 +6120,21 @@ esac + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 + $as_echo "$lt_cv_deplibs_check_method" >&6; } ++ ++file_magic_glob= ++want_nocaseglob=no ++if test "$build" = "$host"; then ++ case $host_os in ++ mingw* | pw32*) ++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then ++ want_nocaseglob=yes ++ else ++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` ++ fi ++ ;; ++ esac ++fi ++ + file_magic_cmd=$lt_cv_file_magic_cmd + deplibs_check_method=$lt_cv_deplibs_check_method + test -z "$deplibs_check_method" && deplibs_check_method=unknown +@@ -6051,9 +6150,162 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. ++set dummy ${ac_tool_prefix}dlltool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_DLLTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$DLLTOOL"; then ++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++DLLTOOL=$ac_cv_prog_DLLTOOL ++if test -n "$DLLTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 ++$as_echo "$DLLTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_DLLTOOL"; then ++ ac_ct_DLLTOOL=$DLLTOOL ++ # Extract the first word of "dlltool", so it can be a program name with args. ++set dummy dlltool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_DLLTOOL"; then ++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_DLLTOOL="dlltool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL ++if test -n "$ac_ct_DLLTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 ++$as_echo "$ac_ct_DLLTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_DLLTOOL" = x; then ++ DLLTOOL="false" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ DLLTOOL=$ac_ct_DLLTOOL ++ fi ++else ++ DLLTOOL="$ac_cv_prog_DLLTOOL" ++fi ++ ++test -z "$DLLTOOL" && DLLTOOL=dlltool ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 ++$as_echo_n "checking how to associate runtime and link libraries... " >&6; } ++if test "${lt_cv_sharedlib_from_linklib_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_sharedlib_from_linklib_cmd='unknown' ++ ++case $host_os in ++cygwin* | mingw* | pw32* | cegcc*) ++ # two different shell functions defined in ltmain.sh ++ # decide which to use based on capabilities of $DLLTOOL ++ case `$DLLTOOL --help 2>&1` in ++ *--identify-strict*) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib ++ ;; ++ *) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback ++ ;; ++ esac ++ ;; ++*) ++ # fallback: assume linklib IS sharedlib ++ lt_cv_sharedlib_from_linklib_cmd="$ECHO" ++ ;; ++esac ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 ++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } ++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd ++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO ++ ++ ++ ++ ++ ++ ++ + if test -n "$ac_tool_prefix"; then +- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. +-set dummy ${ac_tool_prefix}ar; ac_word=$2 ++ for ac_prog in ar ++ do ++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. ++set dummy $ac_tool_prefix$ac_prog; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } + if test "${ac_cv_prog_AR+set}" = set; then : +@@ -6069,7 +6321,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then +- ac_cv_prog_AR="${ac_tool_prefix}ar" ++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -6089,11 +6341,15 @@ $as_echo "no" >&6; } + fi + + ++ test -n "$AR" && break ++ done + fi +-if test -z "$ac_cv_prog_AR"; then ++if test -z "$AR"; then + ac_ct_AR=$AR +- # Extract the first word of "ar", so it can be a program name with args. +-set dummy ar; ac_word=$2 ++ for ac_prog in ar ++do ++ # Extract the first word of "$ac_prog", so it can be a program name with args. ++set dummy $ac_prog; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } + if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : +@@ -6109,7 +6365,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then +- ac_cv_prog_ac_ct_AR="ar" ++ ac_cv_prog_ac_ct_AR="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -6128,6 +6384,10 @@ else + $as_echo "no" >&6; } + fi + ++ ++ test -n "$ac_ct_AR" && break ++done ++ + if test "x$ac_ct_AR" = x; then + AR="false" + else +@@ -6139,12 +6399,10 @@ ac_tool_warned=yes ;; + esac + AR=$ac_ct_AR + fi +-else +- AR="$ac_cv_prog_AR" + fi + +-test -z "$AR" && AR=ar +-test -z "$AR_FLAGS" && AR_FLAGS=cru ++: ${AR=ar} ++: ${AR_FLAGS=cru} + + + +@@ -6156,6 +6414,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 ++$as_echo_n "checking for archiver @FILE support... " >&6; } ++if test "${lt_cv_ar_at_file+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_ar_at_file=no ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ echo conftest.$ac_objext > conftest.lst ++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' ++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 ++ (eval $lt_ar_try) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ if test "$ac_status" -eq 0; then ++ # Ensure the archiver fails upon bogus file names. ++ rm -f conftest.$ac_objext libconftest.a ++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 ++ (eval $lt_ar_try) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ if test "$ac_status" -ne 0; then ++ lt_cv_ar_at_file=@ ++ fi ++ fi ++ rm -f conftest.* libconftest.a ++ ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 ++$as_echo "$lt_cv_ar_at_file" >&6; } ++ ++if test "x$lt_cv_ar_at_file" = xno; then ++ archiver_list_spec= ++else ++ archiver_list_spec=$lt_cv_ar_at_file ++fi ++ ++ ++ ++ ++ ++ ++ + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. + set dummy ${ac_tool_prefix}strip; ac_word=$2 +@@ -6490,8 +6806,8 @@ esac + lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" + + # Transform an extracted symbol line into symbol name and symbol address +-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" +-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" + + # Handle CRLF in mingw tool chain + opt_cr= +@@ -6527,6 +6843,7 @@ for ac_symprfx in "" "_"; do + else + lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" + fi ++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" + + # Check to see that the pipe works correctly. + pipe_works=no +@@ -6568,6 +6885,18 @@ _LT_EOF + if $GREP ' nm_test_var$' "$nlist" >/dev/null; then + if $GREP ' nm_test_func$' "$nlist" >/dev/null; then + cat <<_LT_EOF > conftest.$ac_ext ++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ ++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) ++/* DATA imports from DLLs on WIN32 con't be const, because runtime ++ relocations are performed -- see ld's documentation on pseudo-relocs. */ ++# define LT_DLSYM_CONST ++#elif defined(__osf__) ++/* This system does not cope well with relocations in const data. */ ++# define LT_DLSYM_CONST ++#else ++# define LT_DLSYM_CONST const ++#endif ++ + #ifdef __cplusplus + extern "C" { + #endif +@@ -6579,7 +6908,7 @@ _LT_EOF + cat <<_LT_EOF >> conftest.$ac_ext + + /* The mapping between symbol names and symbols. */ +-const struct { ++LT_DLSYM_CONST struct { + const char *name; + void *address; + } +@@ -6605,8 +6934,8 @@ static const void *lt_preloaded_setup() { + _LT_EOF + # Now try linking the two files. + mv conftest.$ac_objext conftstm.$ac_objext +- lt_save_LIBS="$LIBS" +- lt_save_CFLAGS="$CFLAGS" ++ lt_globsym_save_LIBS=$LIBS ++ lt_globsym_save_CFLAGS=$CFLAGS + LIBS="conftstm.$ac_objext" + CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 +@@ -6616,8 +6945,8 @@ _LT_EOF + test $ac_status = 0; } && test -s conftest${ac_exeext}; then + pipe_works=yes + fi +- LIBS="$lt_save_LIBS" +- CFLAGS="$lt_save_CFLAGS" ++ LIBS=$lt_globsym_save_LIBS ++ CFLAGS=$lt_globsym_save_CFLAGS + else + echo "cannot find nm_test_func in $nlist" >&5 + fi +@@ -6654,6 +6983,21 @@ else + $as_echo "ok" >&6; } + fi + ++# Response file support. ++if test "$lt_cv_nm_interface" = "MS dumpbin"; then ++ nm_file_list_spec='@' ++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then ++ nm_file_list_spec='@' ++fi ++ ++ ++ ++ ++ ++ ++ ++ ++ + + + +@@ -6671,6 +7015,40 @@ fi + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 ++$as_echo_n "checking for sysroot... " >&6; } ++ ++# Check whether --with-libtool-sysroot was given. ++if test "${with_libtool_sysroot+set}" = set; then : ++ withval=$with_libtool_sysroot; ++else ++ with_libtool_sysroot=no ++fi ++ ++ ++lt_sysroot= ++case ${with_libtool_sysroot} in #( ++ yes) ++ if test "$GCC" = yes; then ++ lt_sysroot=`$CC --print-sysroot 2>/dev/null` ++ fi ++ ;; #( ++ /*) ++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` ++ ;; #( ++ no|'') ++ ;; #( ++ *) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5 ++$as_echo "${with_libtool_sysroot}" >&6; } ++ as_fn_error "The sysroot must be an absolute path." "$LINENO" 5 ++ ;; ++esac ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 ++$as_echo "${lt_sysroot:-no}" >&6; } ++ ++ + + + +@@ -6881,6 +7259,123 @@ esac + + need_locks="$enable_libtool_lock" + ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. ++set dummy ${ac_tool_prefix}mt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_MANIFEST_TOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$MANIFEST_TOOL"; then ++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL ++if test -n "$MANIFEST_TOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 ++$as_echo "$MANIFEST_TOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then ++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL ++ # Extract the first word of "mt", so it can be a program name with args. ++set dummy mt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_MANIFEST_TOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_MANIFEST_TOOL"; then ++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL ++if test -n "$ac_ct_MANIFEST_TOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 ++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_MANIFEST_TOOL" = x; then ++ MANIFEST_TOOL=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL ++ fi ++else ++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" ++fi ++ ++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 ++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } ++if test "${lt_cv_path_mainfest_tool+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_path_mainfest_tool=no ++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 ++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out ++ cat conftest.err >&5 ++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then ++ lt_cv_path_mainfest_tool=yes ++ fi ++ rm -f conftest* ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 ++$as_echo "$lt_cv_path_mainfest_tool" >&6; } ++if test "x$lt_cv_path_mainfest_tool" != xyes; then ++ MANIFEST_TOOL=: ++fi ++ ++ ++ ++ ++ + + case $host_os in + rhapsody* | darwin*) +@@ -7444,6 +7939,8 @@ _LT_EOF + $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 + echo "$AR cru libconftest.a conftest.o" >&5 + $AR cru libconftest.a conftest.o 2>&5 ++ echo "$RANLIB libconftest.a" >&5 ++ $RANLIB libconftest.a 2>&5 + cat > conftest.c << _LT_EOF + int main() { return 0;} + _LT_EOF +@@ -7639,7 +8136,8 @@ fi + LIBTOOL_DEPS="$ltmain" + + # Always use our own libtool. +-LIBTOOL='$(SHELL) $(top_builddir)/libtool' ++LIBTOOL='$(SHELL) $(top_builddir)' ++LIBTOOL="$LIBTOOL/${host_alias}-libtool" + + + +@@ -7728,7 +8226,7 @@ aix3*) + esac + + # Global variables: +-ofile=libtool ++ofile=${host_alias}-libtool + can_build_shared=yes + + # All known linkers require a `.a' archive for static linking (except MSVC, +@@ -8026,8 +8524,6 @@ fi + lt_prog_compiler_pic= + lt_prog_compiler_static= + +-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 +-$as_echo_n "checking for $compiler option to produce PIC... " >&6; } + + if test "$GCC" = yes; then + lt_prog_compiler_wl='-Wl,' +@@ -8193,6 +8689,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + lt_prog_compiler_pic='--shared' + lt_prog_compiler_static='--static' + ;; ++ nagfor*) ++ # NAG Fortran compiler ++ lt_prog_compiler_wl='-Wl,-Wl,,' ++ lt_prog_compiler_pic='-PIC' ++ lt_prog_compiler_static='-Bstatic' ++ ;; + pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) + # Portland Group compilers (*not* the Pentium gcc compiler, + # which looks to be a dead project) +@@ -8255,7 +8757,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + case $cc_basename in +- f77* | f90* | f95*) ++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) + lt_prog_compiler_wl='-Qoption ld ';; + *) + lt_prog_compiler_wl='-Wl,';; +@@ -8312,13 +8814,17 @@ case $host_os in + lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" + ;; + esac +-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 +-$as_echo "$lt_prog_compiler_pic" >&6; } +- +- +- +- + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 ++$as_echo_n "checking for $compiler option to produce PIC... " >&6; } ++if test "${lt_cv_prog_compiler_pic+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 ++$as_echo "$lt_cv_prog_compiler_pic" >&6; } ++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic + + # + # Check to make sure the PIC flag actually works. +@@ -8379,6 +8885,11 @@ fi + + + ++ ++ ++ ++ ++ + # + # Check to make sure the static flag actually works. + # +@@ -8729,7 +9240,8 @@ _LT_EOF + allow_undefined_flag=unsupported + always_export_symbols=no + enable_shared_with_static_runtimes=yes +- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' ++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' + + if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' +@@ -8828,12 +9340,12 @@ _LT_EOF + whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' + hardcode_libdir_flag_spec= + hardcode_libdir_flag_spec_ld='-rpath $libdir' +- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' ++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' + if test "x$supports_anon_versioning" = xyes; then + archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ +- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' ++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' + fi + ;; + esac +@@ -8847,8 +9359,8 @@ _LT_EOF + archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' + wlarc= + else +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + fi + ;; + +@@ -8866,8 +9378,8 @@ _LT_EOF + + _LT_EOF + elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi +@@ -8913,8 +9425,8 @@ _LT_EOF + + *) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi +@@ -9044,7 +9556,13 @@ _LT_EOF + allow_undefined_flag='-berok' + # Determine the default libpath from the value encoded in an + # empty executable. +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath_+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + + int +@@ -9057,22 +9575,29 @@ main () + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath_ ++fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" +@@ -9084,7 +9609,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + else + # Determine the default libpath from the value encoded in an + # empty executable. +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath_+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + + int +@@ -9097,22 +9628,29 @@ main () + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath_ ++fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + # Warning - without using the other run time loading flags, +@@ -9157,20 +9695,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + # Microsoft Visual C++. + # hardcode_libdir_flag_spec is actually meaningless, as there is + # no search path for DLLs. +- hardcode_libdir_flag_spec=' ' +- allow_undefined_flag=unsupported +- # Tell ltmain to make .lib files, not .a files. +- libext=lib +- # Tell ltmain to make .dll files, not .so files. +- shrext_cmds=".dll" +- # FIXME: Setting linknames here is a bad hack. +- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' +- # The linker will automatically build a .lib file if we build a DLL. +- old_archive_from_new_cmds='true' +- # FIXME: Should let the user specify the lib program. +- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' +- fix_srcfile_path='`cygpath -w "$srcfile"`' +- enable_shared_with_static_runtimes=yes ++ case $cc_basename in ++ cl*) ++ # Native MSVC ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ always_export_symbols=yes ++ file_list_spec='@' ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' ++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then ++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; ++ else ++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; ++ fi~ ++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ ++ linknames=' ++ # The linker will not automatically build a static lib if we build a DLL. ++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true' ++ enable_shared_with_static_runtimes=yes ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ # Don't use ranlib ++ old_postinstall_cmds='chmod 644 $oldlib' ++ postlink_cmds='lt_outputfile="@OUTPUT@"~ ++ lt_tool_outputfile="@TOOL_OUTPUT@"~ ++ case $lt_outputfile in ++ *.exe|*.EXE) ;; ++ *) ++ lt_outputfile="$lt_outputfile.exe" ++ lt_tool_outputfile="$lt_tool_outputfile.exe" ++ ;; ++ esac~ ++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then ++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; ++ $RM "$lt_outputfile.manifest"; ++ fi' ++ ;; ++ *) ++ # Assume MSVC wrapper ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' ++ # The linker will automatically build a .lib file if we build a DLL. ++ old_archive_from_new_cmds='true' ++ # FIXME: Should let the user specify the lib program. ++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' ++ enable_shared_with_static_runtimes=yes ++ ;; ++ esac + ;; + + darwin* | rhapsody*) +@@ -9231,7 +9812,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + # FreeBSD 3 and greater uses gcc -shared to do shared libraries. + freebsd* | dragonfly*) +- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + hardcode_shlibpath_var=no +@@ -9239,7 +9820,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hpux9*) + if test "$GCC" = yes; then +- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' ++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + else + archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + fi +@@ -9255,7 +9836,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hpux10*) + if test "$GCC" = yes && test "$with_gnu_ld" = no; then +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' + fi +@@ -9279,10 +9860,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + ia64*) +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + else +@@ -9361,23 +9942,36 @@ fi + + irix5* | irix6* | nonstopux*) + if test "$GCC" = yes; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + # Try to use the -exported_symbol ld option, if it does not + # work, assume that -exports_file does not work either and + # implicitly export all symbols. +- save_LDFLAGS="$LDFLAGS" +- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ # This should be the same for all languages, so no per-tag cache variable. ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 ++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } ++if test "${lt_cv_irix_exported_symbol+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ save_LDFLAGS="$LDFLAGS" ++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ +-int foo(void) {} ++int foo (void) { return 0; } + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' +- ++ lt_cv_irix_exported_symbol=yes ++else ++ lt_cv_irix_exported_symbol=no + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +- LDFLAGS="$save_LDFLAGS" ++ LDFLAGS="$save_LDFLAGS" ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 ++$as_echo "$lt_cv_irix_exported_symbol" >&6; } ++ if test "$lt_cv_irix_exported_symbol" = yes; then ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' ++ fi + else + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' +@@ -9462,7 +10056,7 @@ rm -f core conftest.err conftest.$ac_objext \ + osf4* | osf5*) # as osf3* with the addition of -msym flag + if test "$GCC" = yes; then + allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' +- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + else + allow_undefined_flag=' -expect_unresolved \*' +@@ -9481,9 +10075,9 @@ rm -f core conftest.err conftest.$ac_objext \ + no_undefined_flag=' -z defs' + if test "$GCC" = yes; then + wlarc='${wl}' +- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ +- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + else + case `$CC -V 2>&1` in + *"Compilers 5.0"*) +@@ -10059,8 +10653,9 @@ cygwin* | mingw* | pw32* | cegcc*) + need_version=no + need_lib_prefix=no + +- case $GCC,$host_os in +- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) ++ case $GCC,$cc_basename in ++ yes,*) ++ # gcc + library_names_spec='$libname.dll.a' + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ +@@ -10093,13 +10688,71 @@ cygwin* | mingw* | pw32* | cegcc*) + library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + ;; + esac ++ dynamic_linker='Win32 ld.exe' ++ ;; ++ ++ *,cl*) ++ # Native MSVC ++ libname_spec='$name' ++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ++ library_names_spec='${libname}.dll.lib' ++ ++ case $build_os in ++ mingw*) ++ sys_lib_search_path_spec= ++ lt_save_ifs=$IFS ++ IFS=';' ++ for lt_path in $LIB ++ do ++ IFS=$lt_save_ifs ++ # Let DOS variable expansion print the short 8.3 style file name. ++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` ++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" ++ done ++ IFS=$lt_save_ifs ++ # Convert to MSYS style. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` ++ ;; ++ cygwin*) ++ # Convert to unix form, then to dos form, then back to unix form ++ # but this time dos style (no spaces!) so that the unix form looks ++ # like /cygdrive/c/PROGRA~1:/cygdr... ++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` ++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` ++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ ;; ++ *) ++ sys_lib_search_path_spec="$LIB" ++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then ++ # It is most probably a Windows format PATH. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` ++ else ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ fi ++ # FIXME: find the short name or the path components, as spaces are ++ # common. (e.g. "Program Files" -> "PROGRA~1") ++ ;; ++ esac ++ ++ # DLL is installed to $(libdir)/../bin by postinstall_cmds ++ postinstall_cmds='base_file=`basename \${file}`~ ++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ ++ dldir=$destdir/`dirname \$dlpath`~ ++ test -d \$dldir || mkdir -p \$dldir~ ++ $install_prog $dir/$dlname \$dldir/$dlname' ++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ ++ dlpath=$dir/\$dldll~ ++ $RM \$dlpath' ++ shlibpath_overrides_runpath=yes ++ dynamic_linker='Win32 link.exe' + ;; + + *) ++ # Assume MSVC wrapper + library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ++ dynamic_linker='Win32 ld.exe' + ;; + esac +- dynamic_linker='Win32 ld.exe' + # FIXME: first we should search . and the directory the executable is in + shlibpath_var=PATH + ;; +@@ -10191,7 +10844,7 @@ haiku*) + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LIBRARY_PATH + shlibpath_overrides_runpath=yes +- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' ++ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' + hardcode_into_libs=yes + ;; + +@@ -11031,10 +11684,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -11137,10 +11790,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -15425,13 +16078,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' + lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' + lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' + lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' ++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' ++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' + reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' + reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' + OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' + deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' + file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' ++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' ++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' ++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' ++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' + AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' + AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' ++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' + STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' + RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' + old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' +@@ -15446,14 +16106,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de + lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' + lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' + lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' ++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' ++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' + objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' + MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' +-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' ++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' + lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' + need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' ++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' + DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' + NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' + LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' +@@ -15486,12 +16149,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q + hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' + inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' + link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' +-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' + always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' + export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' + exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' + include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' + prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' ++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' + file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' + variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' + need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' +@@ -15546,8 +16209,13 @@ reload_flag \ + OBJDUMP \ + deplibs_check_method \ + file_magic_cmd \ ++file_magic_glob \ ++want_nocaseglob \ ++DLLTOOL \ ++sharedlib_from_linklib_cmd \ + AR \ + AR_FLAGS \ ++archiver_list_spec \ + STRIP \ + RANLIB \ + CC \ +@@ -15557,12 +16225,14 @@ lt_cv_sys_global_symbol_pipe \ + lt_cv_sys_global_symbol_to_cdecl \ + lt_cv_sys_global_symbol_to_c_name_address \ + lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ ++nm_file_list_spec \ + lt_prog_compiler_no_builtin_flag \ +-lt_prog_compiler_wl \ + lt_prog_compiler_pic \ ++lt_prog_compiler_wl \ + lt_prog_compiler_static \ + lt_cv_prog_compiler_c_o \ + need_locks \ ++MANIFEST_TOOL \ + DSYMUTIL \ + NMEDIT \ + LIPO \ +@@ -15578,7 +16248,6 @@ no_undefined_flag \ + hardcode_libdir_flag_spec \ + hardcode_libdir_flag_spec_ld \ + hardcode_libdir_separator \ +-fix_srcfile_path \ + exclude_expsyms \ + include_expsyms \ + file_list_spec \ +@@ -15614,6 +16283,7 @@ module_cmds \ + module_expsym_cmds \ + export_symbols_cmds \ + prelink_cmds \ ++postlink_cmds \ + postinstall_cmds \ + postuninstall_cmds \ + finish_cmds \ +@@ -16378,7 +17048,8 @@ $as_echo X"$file" | + # NOTE: Changes made to this file will be lost: look at ltmain.sh. + # + # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, +-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. ++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, ++# Inc. + # Written by Gordon Matzigkeit, 1996 + # + # This file is part of GNU Libtool. +@@ -16481,19 +17152,42 @@ SP2NL=$lt_lt_SP2NL + # turn newlines into spaces. + NL2SP=$lt_lt_NL2SP + ++# convert \$build file names to \$host format. ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++ ++# convert \$build files to toolchain format. ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++ + # An object symbol dumper. + OBJDUMP=$lt_OBJDUMP + + # Method to check whether dependent libraries are shared objects. + deplibs_check_method=$lt_deplibs_check_method + +-# Command to use when deplibs_check_method == "file_magic". ++# Command to use when deplibs_check_method = "file_magic". + file_magic_cmd=$lt_file_magic_cmd + ++# How to find potential files when deplibs_check_method = "file_magic". ++file_magic_glob=$lt_file_magic_glob ++ ++# Find potential files using nocaseglob when deplibs_check_method = "file_magic". ++want_nocaseglob=$lt_want_nocaseglob ++ ++# DLL creation program. ++DLLTOOL=$lt_DLLTOOL ++ ++# Command to associate shared and link libraries. ++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd ++ + # The archiver. + AR=$lt_AR ++ ++# Flags to create an archive. + AR_FLAGS=$lt_AR_FLAGS + ++# How to feed a file listing to the archiver. ++archiver_list_spec=$lt_archiver_list_spec ++ + # A symbol stripping program. + STRIP=$lt_STRIP + +@@ -16523,6 +17217,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address + # Transform the output of nm in a C name address pair when lib prefix is needed. + global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix + ++# Specify filename containing input files for \$NM. ++nm_file_list_spec=$lt_nm_file_list_spec ++ ++# The root where to search for dependent libraries,and in which our libraries should be installed. ++lt_sysroot=$lt_sysroot ++ + # The name of the directory that contains temporary libtool files. + objdir=$objdir + +@@ -16532,6 +17232,9 @@ MAGIC_CMD=$MAGIC_CMD + # Must we lock files when doing compilation? + need_locks=$lt_need_locks + ++# Manifest tool. ++MANIFEST_TOOL=$lt_MANIFEST_TOOL ++ + # Tool to manipulate archived DWARF debug symbol files on Mac OS X. + DSYMUTIL=$lt_DSYMUTIL + +@@ -16646,12 +17349,12 @@ with_gcc=$GCC + # Compiler flag to turn off builtin functions. + no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag + +-# How to pass a linker flag through the compiler. +-wl=$lt_lt_prog_compiler_wl +- + # Additional compiler flags for building library objects. + pic_flag=$lt_lt_prog_compiler_pic + ++# How to pass a linker flag through the compiler. ++wl=$lt_lt_prog_compiler_wl ++ + # Compiler flag to prevent dynamic linking. + link_static_flag=$lt_lt_prog_compiler_static + +@@ -16738,9 +17441,6 @@ inherit_rpath=$inherit_rpath + # Whether libtool must link a program against all its dependency libraries. + link_all_deplibs=$link_all_deplibs + +-# Fix the shell variable \$srcfile for the compiler. +-fix_srcfile_path=$lt_fix_srcfile_path +- + # Set to "yes" if exported symbols are required. + always_export_symbols=$always_export_symbols + +@@ -16756,6 +17456,9 @@ include_expsyms=$lt_include_expsyms + # Commands necessary for linking programs (against libraries) with templates. + prelink_cmds=$lt_prelink_cmds + ++# Commands necessary for finishing linking programs. ++postlink_cmds=$lt_postlink_cmds ++ + # Specify filename containing input files. + file_list_spec=$lt_file_list_spec + +@@ -16788,210 +17491,169 @@ ltmain="$ac_aux_dir/ltmain.sh" + # if finds mixed CR/LF and LF-only lines. Since sed operates in + # text mode, it properly converts lines to CR/LF. This bash problem + # is reportedly fixed, but why not run on old versions too? +- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- case $xsi_shell in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result="${1##*/}" +-} +- +-# func_dirname_and_basename file append nondir_replacement +-# perform func_basename and func_dirname in a single function +-# call: +-# dirname: Compute the dirname of FILE. If nonempty, +-# add APPEND to the result, otherwise set result +-# to NONDIR_REPLACEMENT. +-# value returned in "$func_dirname_result" +-# basename: Compute filename of FILE. +-# value retuned in "$func_basename_result" +-# Implementation must be kept synchronized with func_dirname +-# and func_basename. For efficiency, we do not delegate to +-# those functions but instead duplicate the functionality here. +-func_dirname_and_basename () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +- func_basename_result="${1##*/}" +-} +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-func_stripname () +-{ +- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are +- # positional parameters, so assign one to ordinary parameter first. +- func_stripname_result=${3} +- func_stripname_result=${func_stripname_result#"${1}"} +- func_stripname_result=${func_stripname_result%"${2}"} +-} +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=${1%%=*} +- func_opt_split_arg=${1#*=} +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- case ${1} in +- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; +- *) func_lo2o_result=${1} ;; +- esac +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=${1%.*}.lo +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=$(( $* )) +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=${#1} +-} +- +-_LT_EOF +- ;; +- *) # Bourne compatible functions. +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- # Extract subdirectory from the argument. +- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` +- if test "X$func_dirname_result" = "X${1}"; then +- func_dirname_result="${3}" +- else +- func_dirname_result="$func_dirname_result${2}" +- fi +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result=`$ECHO "${1}" | $SED "$basename"` +-} +- +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-# func_strip_suffix prefix name +-func_stripname () +-{ +- case ${2} in +- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; +- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; +- esac +-} +- +-# sed scripts: +-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' +-my_sed_long_arg='1s/^-[^=]*=//' +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` +- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=`expr "$@"` +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` +-} +- +-_LT_EOF +-esac +- +-case $lt_shell_append in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$1+=\$2" +-} +-_LT_EOF +- ;; +- *) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$1=\$$1\$2" +-} +- +-_LT_EOF +- ;; +- esac +- +- +- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- mv -f "$cfgfile" "$ofile" || ++ sed '$q' "$ltmain" >> "$cfgfile" \ ++ || (rm -f "$cfgfile"; exit 1) ++ ++ if test x"$xsi_shell" = xyes; then ++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ ++func_dirname ()\ ++{\ ++\ case ${1} in\ ++\ */*) func_dirname_result="${1%/*}${2}" ;;\ ++\ * ) func_dirname_result="${3}" ;;\ ++\ esac\ ++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_basename ()$/,/^} # func_basename /c\ ++func_basename ()\ ++{\ ++\ func_basename_result="${1##*/}"\ ++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ ++func_dirname_and_basename ()\ ++{\ ++\ case ${1} in\ ++\ */*) func_dirname_result="${1%/*}${2}" ;;\ ++\ * ) func_dirname_result="${3}" ;;\ ++\ esac\ ++\ func_basename_result="${1##*/}"\ ++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ ++func_stripname ()\ ++{\ ++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ ++\ # positional parameters, so assign one to ordinary parameter first.\ ++\ func_stripname_result=${3}\ ++\ func_stripname_result=${func_stripname_result#"${1}"}\ ++\ func_stripname_result=${func_stripname_result%"${2}"}\ ++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ ++func_split_long_opt ()\ ++{\ ++\ func_split_long_opt_name=${1%%=*}\ ++\ func_split_long_opt_arg=${1#*=}\ ++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ ++func_split_short_opt ()\ ++{\ ++\ func_split_short_opt_arg=${1#??}\ ++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ ++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ ++func_lo2o ()\ ++{\ ++\ case ${1} in\ ++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ ++\ *) func_lo2o_result=${1} ;;\ ++\ esac\ ++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_xform ()$/,/^} # func_xform /c\ ++func_xform ()\ ++{\ ++ func_xform_result=${1%.*}.lo\ ++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_arith ()$/,/^} # func_arith /c\ ++func_arith ()\ ++{\ ++ func_arith_result=$(( $* ))\ ++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_len ()$/,/^} # func_len /c\ ++func_len ()\ ++{\ ++ func_len_result=${#1}\ ++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++fi ++ ++if test x"$lt_shell_append" = xyes; then ++ sed -e '/^func_append ()$/,/^} # func_append /c\ ++func_append ()\ ++{\ ++ eval "${1}+=\\${2}"\ ++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ ++func_append_quoted ()\ ++{\ ++\ func_quote_for_eval "${2}"\ ++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ ++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ # Save a `func_append' function call where possible by direct use of '+=' ++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++else ++ # Save a `func_append' function call even when '+=' is not available ++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++fi ++ ++if test x"$_lt_function_replace_fail" = x":"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 ++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} ++fi ++ ++ ++ mv -f "$cfgfile" "$ofile" || + (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") + chmod +x "$ofile" + +diff --git a/gprof/configure b/gprof/configure +index 38a4c0b0e5..38d1f699c7 100755 +--- a/gprof/configure ++++ b/gprof/configure +@@ -631,8 +631,11 @@ OTOOL + LIPO + NMEDIT + DSYMUTIL ++MANIFEST_TOOL + RANLIB ++ac_ct_AR + AR ++DLLTOOL + OBJDUMP + LN_S + NM +@@ -744,6 +747,7 @@ enable_static + with_pic + enable_fast_install + with_gnu_ld ++with_libtool_sysroot + enable_libtool_lock + enable_plugins + enable_largefile +@@ -1402,6 +1406,8 @@ Optional Packages: + --with-pic try to use only PIC/non-PIC objects [default=use + both] + --with-gnu-ld assume the C compiler uses GNU ld [default=no] ++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR ++ (or the compiler's sysroot if not specified). + + Some influential environment variables: + CC C compiler command +@@ -4836,8 +4842,8 @@ esac + + + +-macro_version='2.2.7a' +-macro_revision='1.3134' ++macro_version='2.4' ++macro_revision='1.3293' + + + +@@ -4877,7 +4883,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 + $as_echo_n "checking how to print strings... " >&6; } + # Test print first, because it will be a builtin if present. +-if test "X`print -r -- -n 2>/dev/null`" = X-n && \ ++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ + test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then + ECHO='print -r --' + elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then +@@ -5563,8 +5569,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; + # Try some XSI features + xsi_shell=no + ( _lt_dummy="a/b/c" +- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ +- = c,a/b,, \ ++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ ++ = c,a/b,b/c, \ + && eval 'test $(( 1 + 1 )) -eq 2 \ + && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ + && xsi_shell=yes +@@ -5613,6 +5619,80 @@ esac + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 ++$as_echo_n "checking how to convert $build file names to $host format... " >&6; } ++if test "${lt_cv_to_host_file_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 ++ ;; ++ esac ++ ;; ++ *-*-cygwin* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin ++ ;; ++ esac ++ ;; ++ * ) # unhandled hosts (and "normal" native builds) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++esac ++ ++fi ++ ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 ++$as_echo "$lt_cv_to_host_file_cmd" >&6; } ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 ++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } ++if test "${lt_cv_to_tool_file_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ #assume ordinary cross tools, or native build. ++lt_cv_to_tool_file_cmd=func_convert_file_noop ++case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ esac ++ ;; ++esac ++ ++fi ++ ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 ++$as_echo "$lt_cv_to_tool_file_cmd" >&6; } ++ ++ ++ ++ ++ + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 + $as_echo_n "checking for $LD option to reload object files... " >&6; } + if test "${lt_cv_ld_reload_flag+set}" = set; then : +@@ -5629,6 +5709,11 @@ case $reload_flag in + esac + reload_cmds='$LD$reload_flag -o $output$reload_objs' + case $host_os in ++ cygwin* | mingw* | pw32* | cegcc*) ++ if test "$GCC" != yes; then ++ reload_cmds=false ++ fi ++ ;; + darwin*) + if test "$GCC" = yes; then + reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' +@@ -5797,7 +5882,8 @@ mingw* | pw32*) + lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' + lt_cv_file_magic_cmd='func_win32_libid' + else +- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' ++ # Keep this pattern in sync with the one in func_win32_libid. ++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' + lt_cv_file_magic_cmd='$OBJDUMP -f' + fi + ;; +@@ -5874,11 +5960,6 @@ linux* | k*bsd*-gnu | kopensolaris*-gnu) + lt_cv_deplibs_check_method=pass_all + ;; + +-linux-uclibc*) +- lt_cv_deplibs_check_method=pass_all +- lt_cv_file_magic_test_file=`echo /lib/libuClibc-*.so` +- ;; +- + netbsd*) + if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then + lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' +@@ -5956,6 +6037,21 @@ esac + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 + $as_echo "$lt_cv_deplibs_check_method" >&6; } ++ ++file_magic_glob= ++want_nocaseglob=no ++if test "$build" = "$host"; then ++ case $host_os in ++ mingw* | pw32*) ++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then ++ want_nocaseglob=yes ++ else ++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` ++ fi ++ ;; ++ esac ++fi ++ + file_magic_cmd=$lt_cv_file_magic_cmd + deplibs_check_method=$lt_cv_deplibs_check_method + test -z "$deplibs_check_method" && deplibs_check_method=unknown +@@ -5971,9 +6067,162 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. ++set dummy ${ac_tool_prefix}dlltool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_DLLTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$DLLTOOL"; then ++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++DLLTOOL=$ac_cv_prog_DLLTOOL ++if test -n "$DLLTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 ++$as_echo "$DLLTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_DLLTOOL"; then ++ ac_ct_DLLTOOL=$DLLTOOL ++ # Extract the first word of "dlltool", so it can be a program name with args. ++set dummy dlltool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_DLLTOOL"; then ++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_DLLTOOL="dlltool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL ++if test -n "$ac_ct_DLLTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 ++$as_echo "$ac_ct_DLLTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_DLLTOOL" = x; then ++ DLLTOOL="false" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ DLLTOOL=$ac_ct_DLLTOOL ++ fi ++else ++ DLLTOOL="$ac_cv_prog_DLLTOOL" ++fi ++ ++test -z "$DLLTOOL" && DLLTOOL=dlltool ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 ++$as_echo_n "checking how to associate runtime and link libraries... " >&6; } ++if test "${lt_cv_sharedlib_from_linklib_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_sharedlib_from_linklib_cmd='unknown' ++ ++case $host_os in ++cygwin* | mingw* | pw32* | cegcc*) ++ # two different shell functions defined in ltmain.sh ++ # decide which to use based on capabilities of $DLLTOOL ++ case `$DLLTOOL --help 2>&1` in ++ *--identify-strict*) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib ++ ;; ++ *) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback ++ ;; ++ esac ++ ;; ++*) ++ # fallback: assume linklib IS sharedlib ++ lt_cv_sharedlib_from_linklib_cmd="$ECHO" ++ ;; ++esac ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 ++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } ++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd ++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO ++ ++ ++ ++ ++ ++ ++ + if test -n "$ac_tool_prefix"; then +- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. +-set dummy ${ac_tool_prefix}ar; ac_word=$2 ++ for ac_prog in ar ++ do ++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. ++set dummy $ac_tool_prefix$ac_prog; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } + if test "${ac_cv_prog_AR+set}" = set; then : +@@ -5989,7 +6238,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then +- ac_cv_prog_AR="${ac_tool_prefix}ar" ++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -6009,11 +6258,15 @@ $as_echo "no" >&6; } + fi + + ++ test -n "$AR" && break ++ done + fi +-if test -z "$ac_cv_prog_AR"; then ++if test -z "$AR"; then + ac_ct_AR=$AR +- # Extract the first word of "ar", so it can be a program name with args. +-set dummy ar; ac_word=$2 ++ for ac_prog in ar ++do ++ # Extract the first word of "$ac_prog", so it can be a program name with args. ++set dummy $ac_prog; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } + if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : +@@ -6029,7 +6282,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then +- ac_cv_prog_ac_ct_AR="ar" ++ ac_cv_prog_ac_ct_AR="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -6048,6 +6301,10 @@ else + $as_echo "no" >&6; } + fi + ++ ++ test -n "$ac_ct_AR" && break ++done ++ + if test "x$ac_ct_AR" = x; then + AR="false" + else +@@ -6059,12 +6316,10 @@ ac_tool_warned=yes ;; + esac + AR=$ac_ct_AR + fi +-else +- AR="$ac_cv_prog_AR" + fi + +-test -z "$AR" && AR=ar +-test -z "$AR_FLAGS" && AR_FLAGS=cru ++: ${AR=ar} ++: ${AR_FLAGS=cru} + + + +@@ -6076,6 +6331,64 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 ++$as_echo_n "checking for archiver @FILE support... " >&6; } ++if test "${lt_cv_ar_at_file+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_ar_at_file=no ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ echo conftest.$ac_objext > conftest.lst ++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' ++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 ++ (eval $lt_ar_try) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ if test "$ac_status" -eq 0; then ++ # Ensure the archiver fails upon bogus file names. ++ rm -f conftest.$ac_objext libconftest.a ++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 ++ (eval $lt_ar_try) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ if test "$ac_status" -ne 0; then ++ lt_cv_ar_at_file=@ ++ fi ++ fi ++ rm -f conftest.* libconftest.a ++ ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 ++$as_echo "$lt_cv_ar_at_file" >&6; } ++ ++if test "x$lt_cv_ar_at_file" = xno; then ++ archiver_list_spec= ++else ++ archiver_list_spec=$lt_cv_ar_at_file ++fi ++ ++ ++ ++ ++ ++ ++ + if test -n "$ac_tool_prefix"; then + # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. + set dummy ${ac_tool_prefix}strip; ac_word=$2 +@@ -6410,8 +6723,8 @@ esac + lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" + + # Transform an extracted symbol line into symbol name and symbol address +-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" +-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" + + # Handle CRLF in mingw tool chain + opt_cr= +@@ -6447,6 +6760,7 @@ for ac_symprfx in "" "_"; do + else + lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" + fi ++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" + + # Check to see that the pipe works correctly. + pipe_works=no +@@ -6488,6 +6802,18 @@ _LT_EOF + if $GREP ' nm_test_var$' "$nlist" >/dev/null; then + if $GREP ' nm_test_func$' "$nlist" >/dev/null; then + cat <<_LT_EOF > conftest.$ac_ext ++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ ++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) ++/* DATA imports from DLLs on WIN32 con't be const, because runtime ++ relocations are performed -- see ld's documentation on pseudo-relocs. */ ++# define LT_DLSYM_CONST ++#elif defined(__osf__) ++/* This system does not cope well with relocations in const data. */ ++# define LT_DLSYM_CONST ++#else ++# define LT_DLSYM_CONST const ++#endif ++ + #ifdef __cplusplus + extern "C" { + #endif +@@ -6499,7 +6825,7 @@ _LT_EOF + cat <<_LT_EOF >> conftest.$ac_ext + + /* The mapping between symbol names and symbols. */ +-const struct { ++LT_DLSYM_CONST struct { + const char *name; + void *address; + } +@@ -6525,8 +6851,8 @@ static const void *lt_preloaded_setup() { + _LT_EOF + # Now try linking the two files. + mv conftest.$ac_objext conftstm.$ac_objext +- lt_save_LIBS="$LIBS" +- lt_save_CFLAGS="$CFLAGS" ++ lt_globsym_save_LIBS=$LIBS ++ lt_globsym_save_CFLAGS=$CFLAGS + LIBS="conftstm.$ac_objext" + CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 +@@ -6536,8 +6862,8 @@ _LT_EOF + test $ac_status = 0; } && test -s conftest${ac_exeext}; then + pipe_works=yes + fi +- LIBS="$lt_save_LIBS" +- CFLAGS="$lt_save_CFLAGS" ++ LIBS=$lt_globsym_save_LIBS ++ CFLAGS=$lt_globsym_save_CFLAGS + else + echo "cannot find nm_test_func in $nlist" >&5 + fi +@@ -6574,6 +6900,18 @@ else + $as_echo "ok" >&6; } + fi + ++# Response file support. ++if test "$lt_cv_nm_interface" = "MS dumpbin"; then ++ nm_file_list_spec='@' ++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then ++ nm_file_list_spec='@' ++fi ++ ++ ++ ++ ++ ++ + + + +@@ -6595,6 +6933,43 @@ fi + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 ++$as_echo_n "checking for sysroot... " >&6; } ++ ++# Check whether --with-libtool-sysroot was given. ++if test "${with_libtool_sysroot+set}" = set; then : ++ withval=$with_libtool_sysroot; ++else ++ with_libtool_sysroot=no ++fi ++ ++ ++lt_sysroot= ++case ${with_libtool_sysroot} in #( ++ yes) ++ if test "$GCC" = yes; then ++ lt_sysroot=`$CC --print-sysroot 2>/dev/null` ++ fi ++ ;; #( ++ /*) ++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` ++ ;; #( ++ no|'') ++ ;; #( ++ *) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5 ++$as_echo "${with_libtool_sysroot}" >&6; } ++ as_fn_error "The sysroot must be an absolute path." "$LINENO" 5 ++ ;; ++esac ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 ++$as_echo "${lt_sysroot:-no}" >&6; } ++ ++ ++ ++ ++ + # Check whether --enable-libtool-lock was given. + if test "${enable_libtool_lock+set}" = set; then : + enableval=$enable_libtool_lock; +@@ -6801,6 +7176,123 @@ esac + + need_locks="$enable_libtool_lock" + ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. ++set dummy ${ac_tool_prefix}mt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_MANIFEST_TOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$MANIFEST_TOOL"; then ++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL ++if test -n "$MANIFEST_TOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 ++$as_echo "$MANIFEST_TOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then ++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL ++ # Extract the first word of "mt", so it can be a program name with args. ++set dummy mt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_MANIFEST_TOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_MANIFEST_TOOL"; then ++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL ++if test -n "$ac_ct_MANIFEST_TOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 ++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_MANIFEST_TOOL" = x; then ++ MANIFEST_TOOL=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL ++ fi ++else ++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" ++fi ++ ++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 ++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } ++if test "${lt_cv_path_mainfest_tool+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_path_mainfest_tool=no ++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 ++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out ++ cat conftest.err >&5 ++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then ++ lt_cv_path_mainfest_tool=yes ++ fi ++ rm -f conftest* ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 ++$as_echo "$lt_cv_path_mainfest_tool" >&6; } ++if test "x$lt_cv_path_mainfest_tool" != xyes; then ++ MANIFEST_TOOL=: ++fi ++ ++ ++ ++ ++ + + case $host_os in + rhapsody* | darwin*) +@@ -7364,6 +7856,8 @@ _LT_EOF + $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 + echo "$AR cru libconftest.a conftest.o" >&5 + $AR cru libconftest.a conftest.o 2>&5 ++ echo "$RANLIB libconftest.a" >&5 ++ $RANLIB libconftest.a 2>&5 + cat > conftest.c << _LT_EOF + int main() { return 0;} + _LT_EOF +@@ -7559,7 +8053,8 @@ fi + LIBTOOL_DEPS="$ltmain" + + # Always use our own libtool. +-LIBTOOL='$(SHELL) $(top_builddir)/libtool' ++LIBTOOL='$(SHELL) $(top_builddir)' ++LIBTOOL="$LIBTOOL/${host_alias}-libtool" + + + +@@ -7648,7 +8143,7 @@ aix3*) + esac + + # Global variables: +-ofile=libtool ++ofile=${host_alias}-libtool + can_build_shared=yes + + # All known linkers require a `.a' archive for static linking (except MSVC, +@@ -7946,8 +8441,6 @@ fi + lt_prog_compiler_pic= + lt_prog_compiler_static= + +-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 +-$as_echo_n "checking for $compiler option to produce PIC... " >&6; } + + if test "$GCC" = yes; then + lt_prog_compiler_wl='-Wl,' +@@ -8113,6 +8606,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + lt_prog_compiler_pic='--shared' + lt_prog_compiler_static='--static' + ;; ++ nagfor*) ++ # NAG Fortran compiler ++ lt_prog_compiler_wl='-Wl,-Wl,,' ++ lt_prog_compiler_pic='-PIC' ++ lt_prog_compiler_static='-Bstatic' ++ ;; + pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) + # Portland Group compilers (*not* the Pentium gcc compiler, + # which looks to be a dead project) +@@ -8175,7 +8674,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + case $cc_basename in +- f77* | f90* | f95*) ++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) + lt_prog_compiler_wl='-Qoption ld ';; + *) + lt_prog_compiler_wl='-Wl,';; +@@ -8232,13 +8731,17 @@ case $host_os in + lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" + ;; + esac +-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 +-$as_echo "$lt_prog_compiler_pic" >&6; } +- +- +- +- + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 ++$as_echo_n "checking for $compiler option to produce PIC... " >&6; } ++if test "${lt_cv_prog_compiler_pic+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 ++$as_echo "$lt_cv_prog_compiler_pic" >&6; } ++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic + + # + # Check to make sure the PIC flag actually works. +@@ -8299,6 +8802,11 @@ fi + + + ++ ++ ++ ++ ++ + # + # Check to make sure the static flag actually works. + # +@@ -8649,7 +9157,8 @@ _LT_EOF + allow_undefined_flag=unsupported + always_export_symbols=no + enable_shared_with_static_runtimes=yes +- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' ++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' + + if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' +@@ -8748,12 +9257,12 @@ _LT_EOF + whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' + hardcode_libdir_flag_spec= + hardcode_libdir_flag_spec_ld='-rpath $libdir' +- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' ++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' + if test "x$supports_anon_versioning" = xyes; then + archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ +- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' ++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' + fi + ;; + esac +@@ -8767,8 +9276,8 @@ _LT_EOF + archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' + wlarc= + else +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + fi + ;; + +@@ -8786,8 +9295,8 @@ _LT_EOF + + _LT_EOF + elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi +@@ -8833,8 +9342,8 @@ _LT_EOF + + *) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi +@@ -8964,7 +9473,13 @@ _LT_EOF + allow_undefined_flag='-berok' + # Determine the default libpath from the value encoded in an + # empty executable. +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath_+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + + int +@@ -8977,22 +9492,29 @@ main () + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath_ ++fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" +@@ -9004,7 +9526,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + else + # Determine the default libpath from the value encoded in an + # empty executable. +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath_+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + + int +@@ -9017,22 +9545,29 @@ main () + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath_ ++fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + # Warning - without using the other run time loading flags, +@@ -9077,20 +9612,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + # Microsoft Visual C++. + # hardcode_libdir_flag_spec is actually meaningless, as there is + # no search path for DLLs. +- hardcode_libdir_flag_spec=' ' +- allow_undefined_flag=unsupported +- # Tell ltmain to make .lib files, not .a files. +- libext=lib +- # Tell ltmain to make .dll files, not .so files. +- shrext_cmds=".dll" +- # FIXME: Setting linknames here is a bad hack. +- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' +- # The linker will automatically build a .lib file if we build a DLL. +- old_archive_from_new_cmds='true' +- # FIXME: Should let the user specify the lib program. +- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' +- fix_srcfile_path='`cygpath -w "$srcfile"`' +- enable_shared_with_static_runtimes=yes ++ case $cc_basename in ++ cl*) ++ # Native MSVC ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ always_export_symbols=yes ++ file_list_spec='@' ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' ++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then ++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; ++ else ++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; ++ fi~ ++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ ++ linknames=' ++ # The linker will not automatically build a static lib if we build a DLL. ++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true' ++ enable_shared_with_static_runtimes=yes ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ # Don't use ranlib ++ old_postinstall_cmds='chmod 644 $oldlib' ++ postlink_cmds='lt_outputfile="@OUTPUT@"~ ++ lt_tool_outputfile="@TOOL_OUTPUT@"~ ++ case $lt_outputfile in ++ *.exe|*.EXE) ;; ++ *) ++ lt_outputfile="$lt_outputfile.exe" ++ lt_tool_outputfile="$lt_tool_outputfile.exe" ++ ;; ++ esac~ ++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then ++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; ++ $RM "$lt_outputfile.manifest"; ++ fi' ++ ;; ++ *) ++ # Assume MSVC wrapper ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' ++ # The linker will automatically build a .lib file if we build a DLL. ++ old_archive_from_new_cmds='true' ++ # FIXME: Should let the user specify the lib program. ++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' ++ enable_shared_with_static_runtimes=yes ++ ;; ++ esac + ;; + + darwin* | rhapsody*) +@@ -9151,7 +9729,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + # FreeBSD 3 and greater uses gcc -shared to do shared libraries. + freebsd* | dragonfly*) +- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + hardcode_shlibpath_var=no +@@ -9159,7 +9737,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hpux9*) + if test "$GCC" = yes; then +- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' ++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + else + archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + fi +@@ -9175,7 +9753,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hpux10*) + if test "$GCC" = yes && test "$with_gnu_ld" = no; then +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' + fi +@@ -9199,10 +9777,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + ia64*) +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + else +@@ -9281,23 +9859,36 @@ fi + + irix5* | irix6* | nonstopux*) + if test "$GCC" = yes; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + # Try to use the -exported_symbol ld option, if it does not + # work, assume that -exports_file does not work either and + # implicitly export all symbols. +- save_LDFLAGS="$LDFLAGS" +- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ # This should be the same for all languages, so no per-tag cache variable. ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 ++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } ++if test "${lt_cv_irix_exported_symbol+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ save_LDFLAGS="$LDFLAGS" ++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ +-int foo(void) {} ++int foo (void) { return 0; } + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' +- ++ lt_cv_irix_exported_symbol=yes ++else ++ lt_cv_irix_exported_symbol=no + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +- LDFLAGS="$save_LDFLAGS" ++ LDFLAGS="$save_LDFLAGS" ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 ++$as_echo "$lt_cv_irix_exported_symbol" >&6; } ++ if test "$lt_cv_irix_exported_symbol" = yes; then ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' ++ fi + else + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' +@@ -9382,7 +9973,7 @@ rm -f core conftest.err conftest.$ac_objext \ + osf4* | osf5*) # as osf3* with the addition of -msym flag + if test "$GCC" = yes; then + allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' +- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + else + allow_undefined_flag=' -expect_unresolved \*' +@@ -9401,9 +9992,9 @@ rm -f core conftest.err conftest.$ac_objext \ + no_undefined_flag=' -z defs' + if test "$GCC" = yes; then + wlarc='${wl}' +- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ +- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + else + case `$CC -V 2>&1` in + *"Compilers 5.0"*) +@@ -9979,8 +10570,9 @@ cygwin* | mingw* | pw32* | cegcc*) + need_version=no + need_lib_prefix=no + +- case $GCC,$host_os in +- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) ++ case $GCC,$cc_basename in ++ yes,*) ++ # gcc + library_names_spec='$libname.dll.a' + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ +@@ -10013,13 +10605,71 @@ cygwin* | mingw* | pw32* | cegcc*) + library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + ;; + esac ++ dynamic_linker='Win32 ld.exe' ++ ;; ++ ++ *,cl*) ++ # Native MSVC ++ libname_spec='$name' ++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ++ library_names_spec='${libname}.dll.lib' ++ ++ case $build_os in ++ mingw*) ++ sys_lib_search_path_spec= ++ lt_save_ifs=$IFS ++ IFS=';' ++ for lt_path in $LIB ++ do ++ IFS=$lt_save_ifs ++ # Let DOS variable expansion print the short 8.3 style file name. ++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` ++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" ++ done ++ IFS=$lt_save_ifs ++ # Convert to MSYS style. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` ++ ;; ++ cygwin*) ++ # Convert to unix form, then to dos form, then back to unix form ++ # but this time dos style (no spaces!) so that the unix form looks ++ # like /cygdrive/c/PROGRA~1:/cygdr... ++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` ++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` ++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ ;; ++ *) ++ sys_lib_search_path_spec="$LIB" ++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then ++ # It is most probably a Windows format PATH. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` ++ else ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ fi ++ # FIXME: find the short name or the path components, as spaces are ++ # common. (e.g. "Program Files" -> "PROGRA~1") ++ ;; ++ esac ++ ++ # DLL is installed to $(libdir)/../bin by postinstall_cmds ++ postinstall_cmds='base_file=`basename \${file}`~ ++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ ++ dldir=$destdir/`dirname \$dlpath`~ ++ test -d \$dldir || mkdir -p \$dldir~ ++ $install_prog $dir/$dlname \$dldir/$dlname' ++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ ++ dlpath=$dir/\$dldll~ ++ $RM \$dlpath' ++ shlibpath_overrides_runpath=yes ++ dynamic_linker='Win32 link.exe' + ;; + + *) ++ # Assume MSVC wrapper + library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ++ dynamic_linker='Win32 ld.exe' + ;; + esac +- dynamic_linker='Win32 ld.exe' + # FIXME: first we should search . and the directory the executable is in + shlibpath_var=PATH + ;; +@@ -10111,7 +10761,7 @@ haiku*) + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LIBRARY_PATH + shlibpath_overrides_runpath=yes +- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' ++ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' + hardcode_into_libs=yes + ;; + +@@ -10951,10 +11601,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -11057,10 +11707,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -13005,13 +13655,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' + lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' + lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' + lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' ++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' ++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' + reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' + reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' + OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' + deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' + file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' ++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' ++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' ++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' ++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' + AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' + AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' ++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' + STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' + RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' + old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' +@@ -13026,14 +13683,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de + lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' + lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' + lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' ++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' ++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' + objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' + MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' +-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' ++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' + lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' + need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' ++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' + DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' + NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' + LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' +@@ -13066,12 +13726,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q + hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' + inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' + link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' +-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' + always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' + export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' + exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' + include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' + prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' ++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' + file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' + variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' + need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' +@@ -13126,8 +13786,13 @@ reload_flag \ + OBJDUMP \ + deplibs_check_method \ + file_magic_cmd \ ++file_magic_glob \ ++want_nocaseglob \ ++DLLTOOL \ ++sharedlib_from_linklib_cmd \ + AR \ + AR_FLAGS \ ++archiver_list_spec \ + STRIP \ + RANLIB \ + CC \ +@@ -13137,12 +13802,14 @@ lt_cv_sys_global_symbol_pipe \ + lt_cv_sys_global_symbol_to_cdecl \ + lt_cv_sys_global_symbol_to_c_name_address \ + lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ ++nm_file_list_spec \ + lt_prog_compiler_no_builtin_flag \ +-lt_prog_compiler_wl \ + lt_prog_compiler_pic \ ++lt_prog_compiler_wl \ + lt_prog_compiler_static \ + lt_cv_prog_compiler_c_o \ + need_locks \ ++MANIFEST_TOOL \ + DSYMUTIL \ + NMEDIT \ + LIPO \ +@@ -13158,7 +13825,6 @@ no_undefined_flag \ + hardcode_libdir_flag_spec \ + hardcode_libdir_flag_spec_ld \ + hardcode_libdir_separator \ +-fix_srcfile_path \ + exclude_expsyms \ + include_expsyms \ + file_list_spec \ +@@ -13194,6 +13860,7 @@ module_cmds \ + module_expsym_cmds \ + export_symbols_cmds \ + prelink_cmds \ ++postlink_cmds \ + postinstall_cmds \ + postuninstall_cmds \ + finish_cmds \ +@@ -13950,7 +14617,8 @@ $as_echo X"$file" | + # NOTE: Changes made to this file will be lost: look at ltmain.sh. + # + # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, +-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. ++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, ++# Inc. + # Written by Gordon Matzigkeit, 1996 + # + # This file is part of GNU Libtool. +@@ -14053,19 +14721,42 @@ SP2NL=$lt_lt_SP2NL + # turn newlines into spaces. + NL2SP=$lt_lt_NL2SP + ++# convert \$build file names to \$host format. ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++ ++# convert \$build files to toolchain format. ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++ + # An object symbol dumper. + OBJDUMP=$lt_OBJDUMP + + # Method to check whether dependent libraries are shared objects. + deplibs_check_method=$lt_deplibs_check_method + +-# Command to use when deplibs_check_method == "file_magic". ++# Command to use when deplibs_check_method = "file_magic". + file_magic_cmd=$lt_file_magic_cmd + ++# How to find potential files when deplibs_check_method = "file_magic". ++file_magic_glob=$lt_file_magic_glob ++ ++# Find potential files using nocaseglob when deplibs_check_method = "file_magic". ++want_nocaseglob=$lt_want_nocaseglob ++ ++# DLL creation program. ++DLLTOOL=$lt_DLLTOOL ++ ++# Command to associate shared and link libraries. ++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd ++ + # The archiver. + AR=$lt_AR ++ ++# Flags to create an archive. + AR_FLAGS=$lt_AR_FLAGS + ++# How to feed a file listing to the archiver. ++archiver_list_spec=$lt_archiver_list_spec ++ + # A symbol stripping program. + STRIP=$lt_STRIP + +@@ -14095,6 +14786,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address + # Transform the output of nm in a C name address pair when lib prefix is needed. + global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix + ++# Specify filename containing input files for \$NM. ++nm_file_list_spec=$lt_nm_file_list_spec ++ ++# The root where to search for dependent libraries,and in which our libraries should be installed. ++lt_sysroot=$lt_sysroot ++ + # The name of the directory that contains temporary libtool files. + objdir=$objdir + +@@ -14104,6 +14801,9 @@ MAGIC_CMD=$MAGIC_CMD + # Must we lock files when doing compilation? + need_locks=$lt_need_locks + ++# Manifest tool. ++MANIFEST_TOOL=$lt_MANIFEST_TOOL ++ + # Tool to manipulate archived DWARF debug symbol files on Mac OS X. + DSYMUTIL=$lt_DSYMUTIL + +@@ -14218,12 +14918,12 @@ with_gcc=$GCC + # Compiler flag to turn off builtin functions. + no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag + +-# How to pass a linker flag through the compiler. +-wl=$lt_lt_prog_compiler_wl +- + # Additional compiler flags for building library objects. + pic_flag=$lt_lt_prog_compiler_pic + ++# How to pass a linker flag through the compiler. ++wl=$lt_lt_prog_compiler_wl ++ + # Compiler flag to prevent dynamic linking. + link_static_flag=$lt_lt_prog_compiler_static + +@@ -14310,9 +15010,6 @@ inherit_rpath=$inherit_rpath + # Whether libtool must link a program against all its dependency libraries. + link_all_deplibs=$link_all_deplibs + +-# Fix the shell variable \$srcfile for the compiler. +-fix_srcfile_path=$lt_fix_srcfile_path +- + # Set to "yes" if exported symbols are required. + always_export_symbols=$always_export_symbols + +@@ -14328,6 +15025,9 @@ include_expsyms=$lt_include_expsyms + # Commands necessary for linking programs (against libraries) with templates. + prelink_cmds=$lt_prelink_cmds + ++# Commands necessary for finishing linking programs. ++postlink_cmds=$lt_postlink_cmds ++ + # Specify filename containing input files. + file_list_spec=$lt_file_list_spec + +@@ -14360,210 +15060,169 @@ ltmain="$ac_aux_dir/ltmain.sh" + # if finds mixed CR/LF and LF-only lines. Since sed operates in + # text mode, it properly converts lines to CR/LF. This bash problem + # is reportedly fixed, but why not run on old versions too? +- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- case $xsi_shell in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result="${1##*/}" +-} +- +-# func_dirname_and_basename file append nondir_replacement +-# perform func_basename and func_dirname in a single function +-# call: +-# dirname: Compute the dirname of FILE. If nonempty, +-# add APPEND to the result, otherwise set result +-# to NONDIR_REPLACEMENT. +-# value returned in "$func_dirname_result" +-# basename: Compute filename of FILE. +-# value retuned in "$func_basename_result" +-# Implementation must be kept synchronized with func_dirname +-# and func_basename. For efficiency, we do not delegate to +-# those functions but instead duplicate the functionality here. +-func_dirname_and_basename () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +- func_basename_result="${1##*/}" +-} +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-func_stripname () +-{ +- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are +- # positional parameters, so assign one to ordinary parameter first. +- func_stripname_result=${3} +- func_stripname_result=${func_stripname_result#"${1}"} +- func_stripname_result=${func_stripname_result%"${2}"} +-} +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=${1%%=*} +- func_opt_split_arg=${1#*=} +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- case ${1} in +- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; +- *) func_lo2o_result=${1} ;; +- esac +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=${1%.*}.lo +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=$(( $* )) +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=${#1} +-} +- +-_LT_EOF +- ;; +- *) # Bourne compatible functions. +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- # Extract subdirectory from the argument. +- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` +- if test "X$func_dirname_result" = "X${1}"; then +- func_dirname_result="${3}" +- else +- func_dirname_result="$func_dirname_result${2}" +- fi +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result=`$ECHO "${1}" | $SED "$basename"` +-} +- +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-# func_strip_suffix prefix name +-func_stripname () +-{ +- case ${2} in +- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; +- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; +- esac +-} +- +-# sed scripts: +-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' +-my_sed_long_arg='1s/^-[^=]*=//' +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` +- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=`expr "$@"` +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` +-} +- +-_LT_EOF +-esac +- +-case $lt_shell_append in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$1+=\$2" +-} +-_LT_EOF +- ;; +- *) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$1=\$$1\$2" +-} +- +-_LT_EOF +- ;; +- esac +- +- +- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- mv -f "$cfgfile" "$ofile" || ++ sed '$q' "$ltmain" >> "$cfgfile" \ ++ || (rm -f "$cfgfile"; exit 1) ++ ++ if test x"$xsi_shell" = xyes; then ++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ ++func_dirname ()\ ++{\ ++\ case ${1} in\ ++\ */*) func_dirname_result="${1%/*}${2}" ;;\ ++\ * ) func_dirname_result="${3}" ;;\ ++\ esac\ ++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_basename ()$/,/^} # func_basename /c\ ++func_basename ()\ ++{\ ++\ func_basename_result="${1##*/}"\ ++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ ++func_dirname_and_basename ()\ ++{\ ++\ case ${1} in\ ++\ */*) func_dirname_result="${1%/*}${2}" ;;\ ++\ * ) func_dirname_result="${3}" ;;\ ++\ esac\ ++\ func_basename_result="${1##*/}"\ ++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ ++func_stripname ()\ ++{\ ++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ ++\ # positional parameters, so assign one to ordinary parameter first.\ ++\ func_stripname_result=${3}\ ++\ func_stripname_result=${func_stripname_result#"${1}"}\ ++\ func_stripname_result=${func_stripname_result%"${2}"}\ ++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ ++func_split_long_opt ()\ ++{\ ++\ func_split_long_opt_name=${1%%=*}\ ++\ func_split_long_opt_arg=${1#*=}\ ++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ ++func_split_short_opt ()\ ++{\ ++\ func_split_short_opt_arg=${1#??}\ ++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ ++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ ++func_lo2o ()\ ++{\ ++\ case ${1} in\ ++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ ++\ *) func_lo2o_result=${1} ;;\ ++\ esac\ ++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_xform ()$/,/^} # func_xform /c\ ++func_xform ()\ ++{\ ++ func_xform_result=${1%.*}.lo\ ++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_arith ()$/,/^} # func_arith /c\ ++func_arith ()\ ++{\ ++ func_arith_result=$(( $* ))\ ++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_len ()$/,/^} # func_len /c\ ++func_len ()\ ++{\ ++ func_len_result=${#1}\ ++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++fi ++ ++if test x"$lt_shell_append" = xyes; then ++ sed -e '/^func_append ()$/,/^} # func_append /c\ ++func_append ()\ ++{\ ++ eval "${1}+=\\${2}"\ ++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ ++func_append_quoted ()\ ++{\ ++\ func_quote_for_eval "${2}"\ ++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ ++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ # Save a `func_append' function call where possible by direct use of '+=' ++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++else ++ # Save a `func_append' function call even when '+=' is not available ++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++fi ++ ++if test x"$_lt_function_replace_fail" = x":"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 ++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} ++fi ++ ++ ++ mv -f "$cfgfile" "$ofile" || + (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") + chmod +x "$ofile" + +diff --git a/ld/configure b/ld/configure +index a16c6db059..4277b74bad 100755 +--- a/ld/configure ++++ b/ld/configure +@@ -659,8 +659,11 @@ OTOOL + LIPO + NMEDIT + DSYMUTIL ++MANIFEST_TOOL + RANLIB ++ac_ct_AR + AR ++DLLTOOL + OBJDUMP + LN_S + NM +@@ -782,6 +785,7 @@ enable_static + with_pic + enable_fast_install + with_gnu_ld ++with_libtool_sysroot + enable_libtool_lock + enable_plugins + enable_largefile +@@ -1463,6 +1467,8 @@ Optional Packages: + --with-pic try to use only PIC/non-PIC objects [default=use + both] + --with-gnu-ld assume the C compiler uses GNU ld [default=no] ++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR ++ (or the compiler's sysroot if not specified). + --with-lib-path=dir1:dir2... set default LIB_PATH + --with-sysroot=DIR Search for usr/lib et al within DIR. + +@@ -5657,8 +5663,8 @@ esac + + + +-macro_version='2.2.7a' +-macro_revision='1.3134' ++macro_version='2.4' ++macro_revision='1.3293' + + + +@@ -5698,7 +5704,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 + $as_echo_n "checking how to print strings... " >&6; } + # Test print first, because it will be a builtin if present. +-if test "X`print -r -- -n 2>/dev/null`" = X-n && \ ++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ + test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then + ECHO='print -r --' + elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then +@@ -6384,8 +6390,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; + # Try some XSI features + xsi_shell=no + ( _lt_dummy="a/b/c" +- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ +- = c,a/b,, \ ++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ ++ = c,a/b,b/c, \ + && eval 'test $(( 1 + 1 )) -eq 2 \ + && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ + && xsi_shell=yes +@@ -6434,6 +6440,80 @@ esac + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 ++$as_echo_n "checking how to convert $build file names to $host format... " >&6; } ++if test "${lt_cv_to_host_file_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 ++ ;; ++ esac ++ ;; ++ *-*-cygwin* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin ++ ;; ++ esac ++ ;; ++ * ) # unhandled hosts (and "normal" native builds) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++esac ++ ++fi ++ ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 ++$as_echo "$lt_cv_to_host_file_cmd" >&6; } ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 ++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } ++if test "${lt_cv_to_tool_file_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ #assume ordinary cross tools, or native build. ++lt_cv_to_tool_file_cmd=func_convert_file_noop ++case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ esac ++ ;; ++esac ++ ++fi ++ ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 ++$as_echo "$lt_cv_to_tool_file_cmd" >&6; } ++ ++ ++ ++ ++ + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 + $as_echo_n "checking for $LD option to reload object files... " >&6; } + if test "${lt_cv_ld_reload_flag+set}" = set; then : +@@ -6450,6 +6530,11 @@ case $reload_flag in + esac + reload_cmds='$LD$reload_flag -o $output$reload_objs' + case $host_os in ++ cygwin* | mingw* | pw32* | cegcc*) ++ if test "$GCC" != yes; then ++ reload_cmds=false ++ fi ++ ;; + darwin*) + if test "$GCC" = yes; then + reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' +@@ -6618,7 +6703,8 @@ mingw* | pw32*) + lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' + lt_cv_file_magic_cmd='func_win32_libid' + else +- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' ++ # Keep this pattern in sync with the one in func_win32_libid. ++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' + lt_cv_file_magic_cmd='$OBJDUMP -f' + fi + ;; +@@ -6772,6 +6858,21 @@ esac + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 + $as_echo "$lt_cv_deplibs_check_method" >&6; } ++ ++file_magic_glob= ++want_nocaseglob=no ++if test "$build" = "$host"; then ++ case $host_os in ++ mingw* | pw32*) ++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then ++ want_nocaseglob=yes ++ else ++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` ++ fi ++ ;; ++ esac ++fi ++ + file_magic_cmd=$lt_cv_file_magic_cmd + deplibs_check_method=$lt_cv_deplibs_check_method + test -z "$deplibs_check_method" && deplibs_check_method=unknown +@@ -6787,9 +6888,162 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. ++set dummy ${ac_tool_prefix}dlltool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_DLLTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$DLLTOOL"; then ++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++DLLTOOL=$ac_cv_prog_DLLTOOL ++if test -n "$DLLTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 ++$as_echo "$DLLTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_DLLTOOL"; then ++ ac_ct_DLLTOOL=$DLLTOOL ++ # Extract the first word of "dlltool", so it can be a program name with args. ++set dummy dlltool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_DLLTOOL"; then ++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_DLLTOOL="dlltool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL ++if test -n "$ac_ct_DLLTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 ++$as_echo "$ac_ct_DLLTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_DLLTOOL" = x; then ++ DLLTOOL="false" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ DLLTOOL=$ac_ct_DLLTOOL ++ fi ++else ++ DLLTOOL="$ac_cv_prog_DLLTOOL" ++fi ++ ++test -z "$DLLTOOL" && DLLTOOL=dlltool ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 ++$as_echo_n "checking how to associate runtime and link libraries... " >&6; } ++if test "${lt_cv_sharedlib_from_linklib_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_sharedlib_from_linklib_cmd='unknown' ++ ++case $host_os in ++cygwin* | mingw* | pw32* | cegcc*) ++ # two different shell functions defined in ltmain.sh ++ # decide which to use based on capabilities of $DLLTOOL ++ case `$DLLTOOL --help 2>&1` in ++ *--identify-strict*) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib ++ ;; ++ *) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback ++ ;; ++ esac ++ ;; ++*) ++ # fallback: assume linklib IS sharedlib ++ lt_cv_sharedlib_from_linklib_cmd="$ECHO" ++ ;; ++esac ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 ++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } ++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd ++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO ++ ++ ++ ++ ++ ++ ++ + if test -n "$ac_tool_prefix"; then +- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. +-set dummy ${ac_tool_prefix}ar; ac_word=$2 ++ for ac_prog in ar ++ do ++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. ++set dummy $ac_tool_prefix$ac_prog; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } + if test "${ac_cv_prog_AR+set}" = set; then : +@@ -6805,7 +7059,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then +- ac_cv_prog_AR="${ac_tool_prefix}ar" ++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -6825,11 +7079,15 @@ $as_echo "no" >&6; } + fi + + ++ test -n "$AR" && break ++ done + fi +-if test -z "$ac_cv_prog_AR"; then ++if test -z "$AR"; then + ac_ct_AR=$AR +- # Extract the first word of "ar", so it can be a program name with args. +-set dummy ar; ac_word=$2 ++ for ac_prog in ar ++do ++ # Extract the first word of "$ac_prog", so it can be a program name with args. ++set dummy $ac_prog; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } + if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : +@@ -6845,7 +7103,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then +- ac_cv_prog_ac_ct_AR="ar" ++ ac_cv_prog_ac_ct_AR="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -6864,6 +7122,10 @@ else + $as_echo "no" >&6; } + fi + ++ ++ test -n "$ac_ct_AR" && break ++done ++ + if test "x$ac_ct_AR" = x; then + AR="false" + else +@@ -6875,12 +7137,12 @@ ac_tool_warned=yes ;; + esac + AR=$ac_ct_AR + fi +-else +- AR="$ac_cv_prog_AR" + fi + +-test -z "$AR" && AR=ar +-test -z "$AR_FLAGS" && AR_FLAGS=cru ++: ${AR=ar} ++: ${AR_FLAGS=cru} ++ ++ + + + +@@ -6890,6 +7152,62 @@ test -z "$AR_FLAGS" && AR_FLAGS=cru + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 ++$as_echo_n "checking for archiver @FILE support... " >&6; } ++if test "${lt_cv_ar_at_file+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_ar_at_file=no ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ ++ ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ echo conftest.$ac_objext > conftest.lst ++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' ++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 ++ (eval $lt_ar_try) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ if test "$ac_status" -eq 0; then ++ # Ensure the archiver fails upon bogus file names. ++ rm -f conftest.$ac_objext libconftest.a ++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 ++ (eval $lt_ar_try) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ if test "$ac_status" -ne 0; then ++ lt_cv_ar_at_file=@ ++ fi ++ fi ++ rm -f conftest.* libconftest.a ++ ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 ++$as_echo "$lt_cv_ar_at_file" >&6; } ++ ++if test "x$lt_cv_ar_at_file" = xno; then ++ archiver_list_spec= ++else ++ archiver_list_spec=$lt_cv_ar_at_file ++fi ++ ++ ++ ++ ++ + + + if test -n "$ac_tool_prefix"; then +@@ -7226,8 +7544,8 @@ esac + lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" + + # Transform an extracted symbol line into symbol name and symbol address +-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" +-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" + + # Handle CRLF in mingw tool chain + opt_cr= +@@ -7263,6 +7581,7 @@ for ac_symprfx in "" "_"; do + else + lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" + fi ++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" + + # Check to see that the pipe works correctly. + pipe_works=no +@@ -7304,6 +7623,18 @@ _LT_EOF + if $GREP ' nm_test_var$' "$nlist" >/dev/null; then + if $GREP ' nm_test_func$' "$nlist" >/dev/null; then + cat <<_LT_EOF > conftest.$ac_ext ++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ ++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) ++/* DATA imports from DLLs on WIN32 con't be const, because runtime ++ relocations are performed -- see ld's documentation on pseudo-relocs. */ ++# define LT_DLSYM_CONST ++#elif defined(__osf__) ++/* This system does not cope well with relocations in const data. */ ++# define LT_DLSYM_CONST ++#else ++# define LT_DLSYM_CONST const ++#endif ++ + #ifdef __cplusplus + extern "C" { + #endif +@@ -7315,7 +7646,7 @@ _LT_EOF + cat <<_LT_EOF >> conftest.$ac_ext + + /* The mapping between symbol names and symbols. */ +-const struct { ++LT_DLSYM_CONST struct { + const char *name; + void *address; + } +@@ -7341,8 +7672,8 @@ static const void *lt_preloaded_setup() { + _LT_EOF + # Now try linking the two files. + mv conftest.$ac_objext conftstm.$ac_objext +- lt_save_LIBS="$LIBS" +- lt_save_CFLAGS="$CFLAGS" ++ lt_globsym_save_LIBS=$LIBS ++ lt_globsym_save_CFLAGS=$CFLAGS + LIBS="conftstm.$ac_objext" + CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 +@@ -7352,8 +7683,8 @@ _LT_EOF + test $ac_status = 0; } && test -s conftest${ac_exeext}; then + pipe_works=yes + fi +- LIBS="$lt_save_LIBS" +- CFLAGS="$lt_save_CFLAGS" ++ LIBS=$lt_globsym_save_LIBS ++ CFLAGS=$lt_globsym_save_CFLAGS + else + echo "cannot find nm_test_func in $nlist" >&5 + fi +@@ -7390,6 +7721,19 @@ else + $as_echo "ok" >&6; } + fi + ++# Response file support. ++if test "$lt_cv_nm_interface" = "MS dumpbin"; then ++ nm_file_list_spec='@' ++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then ++ nm_file_list_spec='@' ++fi ++ ++ ++ ++ ++ ++ ++ + + + +@@ -7410,6 +7754,42 @@ fi + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 ++$as_echo_n "checking for sysroot... " >&6; } ++ ++# Check whether --with-libtool-sysroot was given. ++if test "${with_libtool_sysroot+set}" = set; then : ++ withval=$with_libtool_sysroot; ++else ++ with_libtool_sysroot=no ++fi ++ ++ ++lt_sysroot= ++case ${with_libtool_sysroot} in #( ++ yes) ++ if test "$GCC" = yes; then ++ lt_sysroot=`$CC --print-sysroot 2>/dev/null` ++ fi ++ ;; #( ++ /*) ++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` ++ ;; #( ++ no|'') ++ ;; #( ++ *) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5 ++$as_echo "${with_libtool_sysroot}" >&6; } ++ as_fn_error "The sysroot must be an absolute path." "$LINENO" 5 ++ ;; ++esac ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 ++$as_echo "${lt_sysroot:-no}" >&6; } ++ ++ ++ ++ + + # Check whether --enable-libtool-lock was given. + if test "${enable_libtool_lock+set}" = set; then : +@@ -7617,6 +7997,123 @@ esac + + need_locks="$enable_libtool_lock" + ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. ++set dummy ${ac_tool_prefix}mt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_MANIFEST_TOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$MANIFEST_TOOL"; then ++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL ++if test -n "$MANIFEST_TOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 ++$as_echo "$MANIFEST_TOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then ++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL ++ # Extract the first word of "mt", so it can be a program name with args. ++set dummy mt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_MANIFEST_TOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_MANIFEST_TOOL"; then ++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL ++if test -n "$ac_ct_MANIFEST_TOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 ++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_MANIFEST_TOOL" = x; then ++ MANIFEST_TOOL=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL ++ fi ++else ++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" ++fi ++ ++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 ++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } ++if test "${lt_cv_path_mainfest_tool+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_path_mainfest_tool=no ++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 ++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out ++ cat conftest.err >&5 ++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then ++ lt_cv_path_mainfest_tool=yes ++ fi ++ rm -f conftest* ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 ++$as_echo "$lt_cv_path_mainfest_tool" >&6; } ++if test "x$lt_cv_path_mainfest_tool" != xyes; then ++ MANIFEST_TOOL=: ++fi ++ ++ ++ ++ ++ + + case $host_os in + rhapsody* | darwin*) +@@ -8180,6 +8677,8 @@ _LT_EOF + $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 + echo "$AR cru libconftest.a conftest.o" >&5 + $AR cru libconftest.a conftest.o 2>&5 ++ echo "$RANLIB libconftest.a" >&5 ++ $RANLIB libconftest.a 2>&5 + cat > conftest.c << _LT_EOF + int main() { return 0;} + _LT_EOF +@@ -8248,6 +8747,16 @@ done + + + ++func_stripname_cnf () ++{ ++ case ${2} in ++ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; ++ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; ++ esac ++} # func_stripname_cnf ++ ++ ++ + + + # Set options +@@ -8376,7 +8885,8 @@ fi + LIBTOOL_DEPS="$ltmain" + + # Always use our own libtool. +-LIBTOOL='$(SHELL) $(top_builddir)/libtool' ++LIBTOOL='$(SHELL) $(top_builddir)' ++LIBTOOL="$LIBTOOL/${host_alias}-libtool" + + + +@@ -8465,7 +8975,7 @@ aix3*) + esac + + # Global variables: +-ofile=libtool ++ofile=${host_alias}-libtool + can_build_shared=yes + + # All known linkers require a `.a' archive for static linking (except MSVC, +@@ -8763,8 +9273,6 @@ fi + lt_prog_compiler_pic= + lt_prog_compiler_static= + +-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 +-$as_echo_n "checking for $compiler option to produce PIC... " >&6; } + + if test "$GCC" = yes; then + lt_prog_compiler_wl='-Wl,' +@@ -8930,6 +9438,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + lt_prog_compiler_pic='--shared' + lt_prog_compiler_static='--static' + ;; ++ nagfor*) ++ # NAG Fortran compiler ++ lt_prog_compiler_wl='-Wl,-Wl,,' ++ lt_prog_compiler_pic='-PIC' ++ lt_prog_compiler_static='-Bstatic' ++ ;; + pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) + # Portland Group compilers (*not* the Pentium gcc compiler, + # which looks to be a dead project) +@@ -8992,7 +9506,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + case $cc_basename in +- f77* | f90* | f95*) ++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) + lt_prog_compiler_wl='-Qoption ld ';; + *) + lt_prog_compiler_wl='-Wl,';; +@@ -9049,13 +9563,17 @@ case $host_os in + lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" + ;; + esac +-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 +-$as_echo "$lt_prog_compiler_pic" >&6; } +- +- +- +- + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 ++$as_echo_n "checking for $compiler option to produce PIC... " >&6; } ++if test "${lt_cv_prog_compiler_pic+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 ++$as_echo "$lt_cv_prog_compiler_pic" >&6; } ++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic + + # + # Check to make sure the PIC flag actually works. +@@ -9116,6 +9634,11 @@ fi + + + ++ ++ ++ ++ ++ + # + # Check to make sure the static flag actually works. + # +@@ -9466,7 +9989,8 @@ _LT_EOF + allow_undefined_flag=unsupported + always_export_symbols=no + enable_shared_with_static_runtimes=yes +- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' ++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' + + if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' +@@ -9565,12 +10089,12 @@ _LT_EOF + whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' + hardcode_libdir_flag_spec= + hardcode_libdir_flag_spec_ld='-rpath $libdir' +- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' ++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' + if test "x$supports_anon_versioning" = xyes; then + archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ +- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' ++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' + fi + ;; + esac +@@ -9584,8 +10108,8 @@ _LT_EOF + archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' + wlarc= + else +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + fi + ;; + +@@ -9603,8 +10127,8 @@ _LT_EOF + + _LT_EOF + elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi +@@ -9650,8 +10174,8 @@ _LT_EOF + + *) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi +@@ -9781,7 +10305,13 @@ _LT_EOF + allow_undefined_flag='-berok' + # Determine the default libpath from the value encoded in an + # empty executable. +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath_+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + + int +@@ -9794,22 +10324,29 @@ main () + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath_ ++fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" +@@ -9821,7 +10358,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + else + # Determine the default libpath from the value encoded in an + # empty executable. +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath_+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + + int +@@ -9834,22 +10377,29 @@ main () + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath_ ++fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + # Warning - without using the other run time loading flags, +@@ -9893,21 +10443,64 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + # When not using gcc, we currently assume that we are using + # Microsoft Visual C++. + # hardcode_libdir_flag_spec is actually meaningless, as there is +- # no search path for DLLs. +- hardcode_libdir_flag_spec=' ' +- allow_undefined_flag=unsupported +- # Tell ltmain to make .lib files, not .a files. +- libext=lib +- # Tell ltmain to make .dll files, not .so files. +- shrext_cmds=".dll" +- # FIXME: Setting linknames here is a bad hack. +- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' +- # The linker will automatically build a .lib file if we build a DLL. +- old_archive_from_new_cmds='true' +- # FIXME: Should let the user specify the lib program. +- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' +- fix_srcfile_path='`cygpath -w "$srcfile"`' +- enable_shared_with_static_runtimes=yes ++ # no search path for DLLs. ++ case $cc_basename in ++ cl*) ++ # Native MSVC ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ always_export_symbols=yes ++ file_list_spec='@' ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' ++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then ++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; ++ else ++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; ++ fi~ ++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ ++ linknames=' ++ # The linker will not automatically build a static lib if we build a DLL. ++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true' ++ enable_shared_with_static_runtimes=yes ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ # Don't use ranlib ++ old_postinstall_cmds='chmod 644 $oldlib' ++ postlink_cmds='lt_outputfile="@OUTPUT@"~ ++ lt_tool_outputfile="@TOOL_OUTPUT@"~ ++ case $lt_outputfile in ++ *.exe|*.EXE) ;; ++ *) ++ lt_outputfile="$lt_outputfile.exe" ++ lt_tool_outputfile="$lt_tool_outputfile.exe" ++ ;; ++ esac~ ++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then ++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; ++ $RM "$lt_outputfile.manifest"; ++ fi' ++ ;; ++ *) ++ # Assume MSVC wrapper ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' ++ # The linker will automatically build a .lib file if we build a DLL. ++ old_archive_from_new_cmds='true' ++ # FIXME: Should let the user specify the lib program. ++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' ++ enable_shared_with_static_runtimes=yes ++ ;; ++ esac + ;; + + darwin* | rhapsody*) +@@ -9968,7 +10561,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + # FreeBSD 3 and greater uses gcc -shared to do shared libraries. + freebsd* | dragonfly*) +- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + hardcode_shlibpath_var=no +@@ -9976,7 +10569,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hpux9*) + if test "$GCC" = yes; then +- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' ++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + else + archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + fi +@@ -9992,7 +10585,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hpux10*) + if test "$GCC" = yes && test "$with_gnu_ld" = no; then +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' + fi +@@ -10016,10 +10609,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + ia64*) +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + else +@@ -10098,23 +10691,36 @@ fi + + irix5* | irix6* | nonstopux*) + if test "$GCC" = yes; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + # Try to use the -exported_symbol ld option, if it does not + # work, assume that -exports_file does not work either and + # implicitly export all symbols. +- save_LDFLAGS="$LDFLAGS" +- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ # This should be the same for all languages, so no per-tag cache variable. ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 ++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } ++if test "${lt_cv_irix_exported_symbol+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ save_LDFLAGS="$LDFLAGS" ++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ +-int foo(void) {} ++int foo (void) { return 0; } + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' +- ++ lt_cv_irix_exported_symbol=yes ++else ++ lt_cv_irix_exported_symbol=no + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +- LDFLAGS="$save_LDFLAGS" ++ LDFLAGS="$save_LDFLAGS" ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 ++$as_echo "$lt_cv_irix_exported_symbol" >&6; } ++ if test "$lt_cv_irix_exported_symbol" = yes; then ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' ++ fi + else + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' +@@ -10199,7 +10805,7 @@ rm -f core conftest.err conftest.$ac_objext \ + osf4* | osf5*) # as osf3* with the addition of -msym flag + if test "$GCC" = yes; then + allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' +- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + else + allow_undefined_flag=' -expect_unresolved \*' +@@ -10218,9 +10824,9 @@ rm -f core conftest.err conftest.$ac_objext \ + no_undefined_flag=' -z defs' + if test "$GCC" = yes; then + wlarc='${wl}' +- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ +- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + else + case `$CC -V 2>&1` in + *"Compilers 5.0"*) +@@ -10796,8 +11402,9 @@ cygwin* | mingw* | pw32* | cegcc*) + need_version=no + need_lib_prefix=no + +- case $GCC,$host_os in +- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) ++ case $GCC,$cc_basename in ++ yes,*) ++ # gcc + library_names_spec='$libname.dll.a' + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ +@@ -10830,13 +11437,71 @@ cygwin* | mingw* | pw32* | cegcc*) + library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + ;; + esac ++ dynamic_linker='Win32 ld.exe' ++ ;; ++ ++ *,cl*) ++ # Native MSVC ++ libname_spec='$name' ++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ++ library_names_spec='${libname}.dll.lib' ++ ++ case $build_os in ++ mingw*) ++ sys_lib_search_path_spec= ++ lt_save_ifs=$IFS ++ IFS=';' ++ for lt_path in $LIB ++ do ++ IFS=$lt_save_ifs ++ # Let DOS variable expansion print the short 8.3 style file name. ++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` ++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" ++ done ++ IFS=$lt_save_ifs ++ # Convert to MSYS style. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` ++ ;; ++ cygwin*) ++ # Convert to unix form, then to dos form, then back to unix form ++ # but this time dos style (no spaces!) so that the unix form looks ++ # like /cygdrive/c/PROGRA~1:/cygdr... ++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` ++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` ++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ ;; ++ *) ++ sys_lib_search_path_spec="$LIB" ++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then ++ # It is most probably a Windows format PATH. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` ++ else ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ fi ++ # FIXME: find the short name or the path components, as spaces are ++ # common. (e.g. "Program Files" -> "PROGRA~1") ++ ;; ++ esac ++ ++ # DLL is installed to $(libdir)/../bin by postinstall_cmds ++ postinstall_cmds='base_file=`basename \${file}`~ ++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ ++ dldir=$destdir/`dirname \$dlpath`~ ++ test -d \$dldir || mkdir -p \$dldir~ ++ $install_prog $dir/$dlname \$dldir/$dlname' ++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ ++ dlpath=$dir/\$dldll~ ++ $RM \$dlpath' ++ shlibpath_overrides_runpath=yes ++ dynamic_linker='Win32 link.exe' + ;; + + *) ++ # Assume MSVC wrapper + library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ++ dynamic_linker='Win32 ld.exe' + ;; + esac +- dynamic_linker='Win32 ld.exe' + # FIXME: first we should search . and the directory the executable is in + shlibpath_var=PATH + ;; +@@ -10928,7 +11593,7 @@ haiku*) + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LIBRARY_PATH + shlibpath_overrides_runpath=yes +- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' ++ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' + hardcode_into_libs=yes + ;; + +@@ -11768,10 +12433,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -11874,10 +12539,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -12269,6 +12934,7 @@ $RM -r conftest* + + # Allow CC to be a program name with arguments. + lt_save_CC=$CC ++ lt_save_CFLAGS=$CFLAGS + lt_save_LD=$LD + lt_save_GCC=$GCC + GCC=$GXX +@@ -12286,6 +12952,7 @@ $RM -r conftest* + fi + test -z "${LDCXX+set}" || LD=$LDCXX + CC=${CXX-"c++"} ++ CFLAGS=$CXXFLAGS + compiler=$CC + compiler_CXX=$CC + for cc_temp in $compiler""; do +@@ -12568,7 +13235,13 @@ $as_echo_n "checking whether the $compiler linker ($LD) supports shared librarie + allow_undefined_flag_CXX='-berok' + # Determine the default libpath from the value encoded in an empty + # executable. +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath__CXX+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + + int +@@ -12581,22 +13254,29 @@ main () + _ACEOF + if ac_fn_cxx_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath__CXX"; then ++ lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath__CXX"; then ++ lt_cv_aix_libpath__CXX="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath__CXX ++fi + + hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath" + +@@ -12609,7 +13289,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + else + # Determine the default libpath from the value encoded in an + # empty executable. +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath__CXX+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + + int +@@ -12622,22 +13308,29 @@ main () + _ACEOF + if ac_fn_cxx_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath__CXX"; then ++ lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath__CXX"; then ++ lt_cv_aix_libpath__CXX="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath__CXX ++fi + + hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath" + # Warning - without using the other run time loading flags, +@@ -12680,29 +13373,75 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + ;; + + cygwin* | mingw* | pw32* | cegcc*) +- # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless, +- # as there is no search path for DLLs. +- hardcode_libdir_flag_spec_CXX='-L$libdir' +- export_dynamic_flag_spec_CXX='${wl}--export-all-symbols' +- allow_undefined_flag_CXX=unsupported +- always_export_symbols_CXX=no +- enable_shared_with_static_runtimes_CXX=yes +- +- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then +- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' +- # If the export-symbols file already is a .def file (1st line +- # is EXPORTS), use it as is; otherwise, prepend... +- archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then +- cp $export_symbols $output_objdir/$soname.def; +- else +- echo EXPORTS > $output_objdir/$soname.def; +- cat $export_symbols >> $output_objdir/$soname.def; +- fi~ +- $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' +- else +- ld_shlibs_CXX=no +- fi +- ;; ++ case $GXX,$cc_basename in ++ ,cl* | no,cl*) ++ # Native MSVC ++ # hardcode_libdir_flag_spec is actually meaningless, as there is ++ # no search path for DLLs. ++ hardcode_libdir_flag_spec_CXX=' ' ++ allow_undefined_flag_CXX=unsupported ++ always_export_symbols_CXX=yes ++ file_list_spec_CXX='@' ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' ++ archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then ++ $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; ++ else ++ $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; ++ fi~ ++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ ++ linknames=' ++ # The linker will not automatically build a static lib if we build a DLL. ++ # _LT_TAGVAR(old_archive_from_new_cmds, CXX)='true' ++ enable_shared_with_static_runtimes_CXX=yes ++ # Don't use ranlib ++ old_postinstall_cmds_CXX='chmod 644 $oldlib' ++ postlink_cmds_CXX='lt_outputfile="@OUTPUT@"~ ++ lt_tool_outputfile="@TOOL_OUTPUT@"~ ++ case $lt_outputfile in ++ *.exe|*.EXE) ;; ++ *) ++ lt_outputfile="$lt_outputfile.exe" ++ lt_tool_outputfile="$lt_tool_outputfile.exe" ++ ;; ++ esac~ ++ func_to_tool_file "$lt_outputfile"~ ++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then ++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; ++ $RM "$lt_outputfile.manifest"; ++ fi' ++ ;; ++ *) ++ # g++ ++ # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless, ++ # as there is no search path for DLLs. ++ hardcode_libdir_flag_spec_CXX='-L$libdir' ++ export_dynamic_flag_spec_CXX='${wl}--export-all-symbols' ++ allow_undefined_flag_CXX=unsupported ++ always_export_symbols_CXX=no ++ enable_shared_with_static_runtimes_CXX=yes ++ ++ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then ++ archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' ++ # If the export-symbols file already is a .def file (1st line ++ # is EXPORTS), use it as is; otherwise, prepend... ++ archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then ++ cp $export_symbols $output_objdir/$soname.def; ++ else ++ echo EXPORTS > $output_objdir/$soname.def; ++ cat $export_symbols >> $output_objdir/$soname.def; ++ fi~ ++ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' ++ else ++ ld_shlibs_CXX=no ++ fi ++ ;; ++ esac ++ ;; + darwin* | rhapsody*) + + +@@ -12808,7 +13547,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + ;; + *) + if test "$GXX" = yes; then +- archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' ++ archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + else + # FIXME: insert proper C++ library support + ld_shlibs_CXX=no +@@ -12879,10 +13618,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + ;; + ia64*) +- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ++ archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + ;; + *) +- archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ++ archive_cmds_CXX='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + ;; + esac + fi +@@ -12923,9 +13662,9 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + *) + if test "$GXX" = yes; then + if test "$with_gnu_ld" = no; then +- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + else +- archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' ++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' + fi + fi + link_all_deplibs_CXX=yes +@@ -12995,20 +13734,20 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + prelink_cmds_CXX='tpldir=Template.dir~ + rm -rf $tpldir~ + $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ +- compile_command="$compile_command `find $tpldir -name \*.o | $NL2SP`"' ++ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"' + old_archive_cmds_CXX='tpldir=Template.dir~ + rm -rf $tpldir~ + $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ +- $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | $NL2SP`~ ++ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~ + $RANLIB $oldlib' + archive_cmds_CXX='tpldir=Template.dir~ + rm -rf $tpldir~ + $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ +- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' ++ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' + archive_expsym_cmds_CXX='tpldir=Template.dir~ + rm -rf $tpldir~ + $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ +- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' ++ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' + ;; + *) # Version 6 and above use weak symbols + archive_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' +@@ -13203,7 +13942,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + ;; + *) +- archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + ;; + esac + +@@ -13249,7 +13988,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + solaris*) + case $cc_basename in +- CC*) ++ CC* | sunCC*) + # Sun C++ 4.2, 5.x and Centerline C++ + archive_cmds_need_lc_CXX=yes + no_undefined_flag_CXX=' -zdefs' +@@ -13290,9 +14029,9 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + if test "$GXX" = yes && test "$with_gnu_ld" = no; then + no_undefined_flag_CXX=' ${wl}-z ${wl}defs' + if $CC --version | $GREP -v '^2\.7' > /dev/null; then +- archive_cmds_CXX='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' ++ archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' + archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ +- $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' ++ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' + + # Commands to make compiler produce verbose output that lists + # what "hidden" libraries, object files and flags are used when +@@ -13427,6 +14166,13 @@ private: + }; + _LT_EOF + ++ ++_lt_libdeps_save_CFLAGS=$CFLAGS ++case "$CC $CFLAGS " in #( ++*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;; ++*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;; ++esac ++ + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + (eval $ac_compile) 2>&5 + ac_status=$? +@@ -13440,7 +14186,7 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + pre_test_object_deps_done=no + + for p in `eval "$output_verbose_link_cmd"`; do +- case $p in ++ case ${prev}${p} in + + -L* | -R* | -l*) + # Some compilers place space between "-{L,R}" and the path. +@@ -13449,13 +14195,22 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + test $p = "-R"; then + prev=$p + continue +- else +- prev= + fi + ++ # Expand the sysroot to ease extracting the directories later. ++ if test -z "$prev"; then ++ case $p in ++ -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;; ++ -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;; ++ -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;; ++ esac ++ fi ++ case $p in ++ =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; ++ esac + if test "$pre_test_object_deps_done" = no; then +- case $p in +- -L* | -R*) ++ case ${prev} in ++ -L | -R) + # Internal compiler library paths should come after those + # provided the user. The postdeps already come after the + # user supplied libs so there is no need to process them. +@@ -13475,8 +14230,10 @@ if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 + postdeps_CXX="${postdeps_CXX} ${prev}${p}" + fi + fi ++ prev= + ;; + ++ *.lto.$objext) ;; # Ignore GCC LTO objects + *.$objext) + # This assumes that the test object file only shows up + # once in the compiler output. +@@ -13512,6 +14269,7 @@ else + fi + + $RM -f confest.$objext ++CFLAGS=$_lt_libdeps_save_CFLAGS + + # PORTME: override above test on systems where it is broken + case $host_os in +@@ -13547,7 +14305,7 @@ linux*) + + solaris*) + case $cc_basename in +- CC*) ++ CC* | sunCC*) + # The more standards-conforming stlport4 library is + # incompatible with the Cstd library. Avoid specifying + # it if it's in CXXFLAGS. Ignore libCrun as +@@ -13612,8 +14370,6 @@ fi + lt_prog_compiler_pic_CXX= + lt_prog_compiler_static_CXX= + +-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 +-$as_echo_n "checking for $compiler option to produce PIC... " >&6; } + + # C++ specific cases for pic, static, wl, etc. + if test "$GXX" = yes; then +@@ -13718,6 +14474,11 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + ;; + esac + ;; ++ mingw* | cygwin* | os2* | pw32* | cegcc*) ++ # This hack is so that the source file can tell whether it is being ++ # built for inclusion in a dll (and should export symbols for example). ++ lt_prog_compiler_pic_CXX='-DDLL_EXPORT' ++ ;; + dgux*) + case $cc_basename in + ec++*) +@@ -13870,7 +14631,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + ;; + solaris*) + case $cc_basename in +- CC*) ++ CC* | sunCC*) + # Sun C++ 4.2, 5.x and Centerline C++ + lt_prog_compiler_pic_CXX='-KPIC' + lt_prog_compiler_static_CXX='-Bstatic' +@@ -13935,10 +14696,17 @@ case $host_os in + lt_prog_compiler_pic_CXX="$lt_prog_compiler_pic_CXX -DPIC" + ;; + esac +-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic_CXX" >&5 +-$as_echo "$lt_prog_compiler_pic_CXX" >&6; } +- + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 ++$as_echo_n "checking for $compiler option to produce PIC... " >&6; } ++if test "${lt_cv_prog_compiler_pic_CXX+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_prog_compiler_pic_CXX=$lt_prog_compiler_pic_CXX ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_CXX" >&5 ++$as_echo "$lt_cv_prog_compiler_pic_CXX" >&6; } ++lt_prog_compiler_pic_CXX=$lt_cv_prog_compiler_pic_CXX + + # + # Check to make sure the PIC flag actually works. +@@ -13996,6 +14764,8 @@ fi + + + ++ ++ + # + # Check to make sure the static flag actually works. + # +@@ -14173,6 +14943,7 @@ fi + $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } + + export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' ++ exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' + case $host_os in + aix[4-9]*) + # If we're using GNU nm, then we don't want the "-C" option. +@@ -14187,15 +14958,20 @@ $as_echo_n "checking whether the $compiler linker ($LD) supports shared librarie + ;; + pw32*) + export_symbols_cmds_CXX="$ltdll_cmds" +- ;; ++ ;; + cygwin* | mingw* | cegcc*) +- export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;/^.*[ ]__nm__/s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' +- ;; ++ case $cc_basename in ++ cl*) ;; ++ *) ++ export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' ++ exclude_expsyms_CXX='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' ++ ;; ++ esac ++ ;; + *) + export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' +- ;; ++ ;; + esac +- exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' + + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5 + $as_echo "$ld_shlibs_CXX" >&6; } +@@ -14458,8 +15234,9 @@ cygwin* | mingw* | pw32* | cegcc*) + need_version=no + need_lib_prefix=no + +- case $GCC,$host_os in +- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) ++ case $GCC,$cc_basename in ++ yes,*) ++ # gcc + library_names_spec='$libname.dll.a' + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ +@@ -14491,13 +15268,71 @@ cygwin* | mingw* | pw32* | cegcc*) + library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + ;; + esac ++ dynamic_linker='Win32 ld.exe' ++ ;; ++ ++ *,cl*) ++ # Native MSVC ++ libname_spec='$name' ++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ++ library_names_spec='${libname}.dll.lib' ++ ++ case $build_os in ++ mingw*) ++ sys_lib_search_path_spec= ++ lt_save_ifs=$IFS ++ IFS=';' ++ for lt_path in $LIB ++ do ++ IFS=$lt_save_ifs ++ # Let DOS variable expansion print the short 8.3 style file name. ++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` ++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" ++ done ++ IFS=$lt_save_ifs ++ # Convert to MSYS style. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` ++ ;; ++ cygwin*) ++ # Convert to unix form, then to dos form, then back to unix form ++ # but this time dos style (no spaces!) so that the unix form looks ++ # like /cygdrive/c/PROGRA~1:/cygdr... ++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` ++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` ++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ ;; ++ *) ++ sys_lib_search_path_spec="$LIB" ++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then ++ # It is most probably a Windows format PATH. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` ++ else ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ fi ++ # FIXME: find the short name or the path components, as spaces are ++ # common. (e.g. "Program Files" -> "PROGRA~1") ++ ;; ++ esac ++ ++ # DLL is installed to $(libdir)/../bin by postinstall_cmds ++ postinstall_cmds='base_file=`basename \${file}`~ ++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ ++ dldir=$destdir/`dirname \$dlpath`~ ++ test -d \$dldir || mkdir -p \$dldir~ ++ $install_prog $dir/$dlname \$dldir/$dlname' ++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ ++ dlpath=$dir/\$dldll~ ++ $RM \$dlpath' ++ shlibpath_overrides_runpath=yes ++ dynamic_linker='Win32 link.exe' + ;; + + *) ++ # Assume MSVC wrapper + library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ++ dynamic_linker='Win32 ld.exe' + ;; + esac +- dynamic_linker='Win32 ld.exe' + # FIXME: first we should search . and the directory the executable is in + shlibpath_var=PATH + ;; +@@ -14588,7 +15423,7 @@ haiku*) + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LIBRARY_PATH + shlibpath_overrides_runpath=yes +- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' ++ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' + hardcode_into_libs=yes + ;; + +@@ -15047,6 +15882,7 @@ fi + fi # test -n "$compiler" + + CC=$lt_save_CC ++ CFLAGS=$lt_save_CFLAGS + LDCXX=$LD + LD=$lt_save_LD + GCC=$lt_save_GCC +@@ -18026,13 +18862,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' + lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' + lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' + lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' ++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' ++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' + reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' + reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' + OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' + deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' + file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' ++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' ++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' ++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' ++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' + AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' + AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' ++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' + STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' + RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' + old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' +@@ -18047,14 +18890,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de + lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' + lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' + lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' ++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' ++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' + objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' + MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' +-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' ++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' + lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' + need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' ++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' + DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' + NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' + LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' +@@ -18087,12 +18933,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q + hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' + inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' + link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' +-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' + always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' + export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' + exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' + include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' + prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' ++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' + file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' + variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' + need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' +@@ -18131,8 +18977,8 @@ old_archive_cmds_CXX='`$ECHO "$old_archive_cmds_CXX" | $SED "$delay_single_quote + compiler_CXX='`$ECHO "$compiler_CXX" | $SED "$delay_single_quote_subst"`' + GCC_CXX='`$ECHO "$GCC_CXX" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_no_builtin_flag_CXX='`$ECHO "$lt_prog_compiler_no_builtin_flag_CXX" | $SED "$delay_single_quote_subst"`' +-lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_pic_CXX='`$ECHO "$lt_prog_compiler_pic_CXX" | $SED "$delay_single_quote_subst"`' ++lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_static_CXX='`$ECHO "$lt_prog_compiler_static_CXX" | $SED "$delay_single_quote_subst"`' + lt_cv_prog_compiler_c_o_CXX='`$ECHO "$lt_cv_prog_compiler_c_o_CXX" | $SED "$delay_single_quote_subst"`' + archive_cmds_need_lc_CXX='`$ECHO "$archive_cmds_need_lc_CXX" | $SED "$delay_single_quote_subst"`' +@@ -18159,12 +19005,12 @@ hardcode_shlibpath_var_CXX='`$ECHO "$hardcode_shlibpath_var_CXX" | $SED "$delay_ + hardcode_automatic_CXX='`$ECHO "$hardcode_automatic_CXX" | $SED "$delay_single_quote_subst"`' + inherit_rpath_CXX='`$ECHO "$inherit_rpath_CXX" | $SED "$delay_single_quote_subst"`' + link_all_deplibs_CXX='`$ECHO "$link_all_deplibs_CXX" | $SED "$delay_single_quote_subst"`' +-fix_srcfile_path_CXX='`$ECHO "$fix_srcfile_path_CXX" | $SED "$delay_single_quote_subst"`' + always_export_symbols_CXX='`$ECHO "$always_export_symbols_CXX" | $SED "$delay_single_quote_subst"`' + export_symbols_cmds_CXX='`$ECHO "$export_symbols_cmds_CXX" | $SED "$delay_single_quote_subst"`' + exclude_expsyms_CXX='`$ECHO "$exclude_expsyms_CXX" | $SED "$delay_single_quote_subst"`' + include_expsyms_CXX='`$ECHO "$include_expsyms_CXX" | $SED "$delay_single_quote_subst"`' + prelink_cmds_CXX='`$ECHO "$prelink_cmds_CXX" | $SED "$delay_single_quote_subst"`' ++postlink_cmds_CXX='`$ECHO "$postlink_cmds_CXX" | $SED "$delay_single_quote_subst"`' + file_list_spec_CXX='`$ECHO "$file_list_spec_CXX" | $SED "$delay_single_quote_subst"`' + hardcode_action_CXX='`$ECHO "$hardcode_action_CXX" | $SED "$delay_single_quote_subst"`' + compiler_lib_search_dirs_CXX='`$ECHO "$compiler_lib_search_dirs_CXX" | $SED "$delay_single_quote_subst"`' +@@ -18202,8 +19048,13 @@ reload_flag \ + OBJDUMP \ + deplibs_check_method \ + file_magic_cmd \ ++file_magic_glob \ ++want_nocaseglob \ ++DLLTOOL \ ++sharedlib_from_linklib_cmd \ + AR \ + AR_FLAGS \ ++archiver_list_spec \ + STRIP \ + RANLIB \ + CC \ +@@ -18213,12 +19064,14 @@ lt_cv_sys_global_symbol_pipe \ + lt_cv_sys_global_symbol_to_cdecl \ + lt_cv_sys_global_symbol_to_c_name_address \ + lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ ++nm_file_list_spec \ + lt_prog_compiler_no_builtin_flag \ +-lt_prog_compiler_wl \ + lt_prog_compiler_pic \ ++lt_prog_compiler_wl \ + lt_prog_compiler_static \ + lt_cv_prog_compiler_c_o \ + need_locks \ ++MANIFEST_TOOL \ + DSYMUTIL \ + NMEDIT \ + LIPO \ +@@ -18234,7 +19087,6 @@ no_undefined_flag \ + hardcode_libdir_flag_spec \ + hardcode_libdir_flag_spec_ld \ + hardcode_libdir_separator \ +-fix_srcfile_path \ + exclude_expsyms \ + include_expsyms \ + file_list_spec \ +@@ -18256,8 +19108,8 @@ LD_CXX \ + reload_flag_CXX \ + compiler_CXX \ + lt_prog_compiler_no_builtin_flag_CXX \ +-lt_prog_compiler_wl_CXX \ + lt_prog_compiler_pic_CXX \ ++lt_prog_compiler_wl_CXX \ + lt_prog_compiler_static_CXX \ + lt_cv_prog_compiler_c_o_CXX \ + export_dynamic_flag_spec_CXX \ +@@ -18269,7 +19121,6 @@ no_undefined_flag_CXX \ + hardcode_libdir_flag_spec_CXX \ + hardcode_libdir_flag_spec_ld_CXX \ + hardcode_libdir_separator_CXX \ +-fix_srcfile_path_CXX \ + exclude_expsyms_CXX \ + include_expsyms_CXX \ + file_list_spec_CXX \ +@@ -18303,6 +19154,7 @@ module_cmds \ + module_expsym_cmds \ + export_symbols_cmds \ + prelink_cmds \ ++postlink_cmds \ + postinstall_cmds \ + postuninstall_cmds \ + finish_cmds \ +@@ -18317,7 +19169,8 @@ archive_expsym_cmds_CXX \ + module_cmds_CXX \ + module_expsym_cmds_CXX \ + export_symbols_cmds_CXX \ +-prelink_cmds_CXX; do ++prelink_cmds_CXX \ ++postlink_cmds_CXX; do + case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in + *[\\\\\\\`\\"\\\$]*) + eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" +@@ -19110,7 +19963,8 @@ $as_echo X"$file" | + # NOTE: Changes made to this file will be lost: look at ltmain.sh. + # + # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, +-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. ++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, ++# Inc. + # Written by Gordon Matzigkeit, 1996 + # + # This file is part of GNU Libtool. +@@ -19213,19 +20067,42 @@ SP2NL=$lt_lt_SP2NL + # turn newlines into spaces. + NL2SP=$lt_lt_NL2SP + ++# convert \$build file names to \$host format. ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++ ++# convert \$build files to toolchain format. ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++ + # An object symbol dumper. + OBJDUMP=$lt_OBJDUMP + + # Method to check whether dependent libraries are shared objects. + deplibs_check_method=$lt_deplibs_check_method + +-# Command to use when deplibs_check_method == "file_magic". ++# Command to use when deplibs_check_method = "file_magic". + file_magic_cmd=$lt_file_magic_cmd + ++# How to find potential files when deplibs_check_method = "file_magic". ++file_magic_glob=$lt_file_magic_glob ++ ++# Find potential files using nocaseglob when deplibs_check_method = "file_magic". ++want_nocaseglob=$lt_want_nocaseglob ++ ++# DLL creation program. ++DLLTOOL=$lt_DLLTOOL ++ ++# Command to associate shared and link libraries. ++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd ++ + # The archiver. + AR=$lt_AR ++ ++# Flags to create an archive. + AR_FLAGS=$lt_AR_FLAGS + ++# How to feed a file listing to the archiver. ++archiver_list_spec=$lt_archiver_list_spec ++ + # A symbol stripping program. + STRIP=$lt_STRIP + +@@ -19255,6 +20132,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address + # Transform the output of nm in a C name address pair when lib prefix is needed. + global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix + ++# Specify filename containing input files for \$NM. ++nm_file_list_spec=$lt_nm_file_list_spec ++ ++# The root where to search for dependent libraries,and in which our libraries should be installed. ++lt_sysroot=$lt_sysroot ++ + # The name of the directory that contains temporary libtool files. + objdir=$objdir + +@@ -19264,6 +20147,9 @@ MAGIC_CMD=$MAGIC_CMD + # Must we lock files when doing compilation? + need_locks=$lt_need_locks + ++# Manifest tool. ++MANIFEST_TOOL=$lt_MANIFEST_TOOL ++ + # Tool to manipulate archived DWARF debug symbol files on Mac OS X. + DSYMUTIL=$lt_DSYMUTIL + +@@ -19378,12 +20264,12 @@ with_gcc=$GCC + # Compiler flag to turn off builtin functions. + no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag + +-# How to pass a linker flag through the compiler. +-wl=$lt_lt_prog_compiler_wl +- + # Additional compiler flags for building library objects. + pic_flag=$lt_lt_prog_compiler_pic + ++# How to pass a linker flag through the compiler. ++wl=$lt_lt_prog_compiler_wl ++ + # Compiler flag to prevent dynamic linking. + link_static_flag=$lt_lt_prog_compiler_static + +@@ -19470,9 +20356,6 @@ inherit_rpath=$inherit_rpath + # Whether libtool must link a program against all its dependency libraries. + link_all_deplibs=$link_all_deplibs + +-# Fix the shell variable \$srcfile for the compiler. +-fix_srcfile_path=$lt_fix_srcfile_path +- + # Set to "yes" if exported symbols are required. + always_export_symbols=$always_export_symbols + +@@ -19488,6 +20371,9 @@ include_expsyms=$lt_include_expsyms + # Commands necessary for linking programs (against libraries) with templates. + prelink_cmds=$lt_prelink_cmds + ++# Commands necessary for finishing linking programs. ++postlink_cmds=$lt_postlink_cmds ++ + # Specify filename containing input files. + file_list_spec=$lt_file_list_spec + +@@ -19534,210 +20420,169 @@ ltmain="$ac_aux_dir/ltmain.sh" + # if finds mixed CR/LF and LF-only lines. Since sed operates in + # text mode, it properly converts lines to CR/LF. This bash problem + # is reportedly fixed, but why not run on old versions too? +- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- case $xsi_shell in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result="${1##*/}" +-} +- +-# func_dirname_and_basename file append nondir_replacement +-# perform func_basename and func_dirname in a single function +-# call: +-# dirname: Compute the dirname of FILE. If nonempty, +-# add APPEND to the result, otherwise set result +-# to NONDIR_REPLACEMENT. +-# value returned in "$func_dirname_result" +-# basename: Compute filename of FILE. +-# value retuned in "$func_basename_result" +-# Implementation must be kept synchronized with func_dirname +-# and func_basename. For efficiency, we do not delegate to +-# those functions but instead duplicate the functionality here. +-func_dirname_and_basename () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +- func_basename_result="${1##*/}" +-} +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-func_stripname () +-{ +- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are +- # positional parameters, so assign one to ordinary parameter first. +- func_stripname_result=${3} +- func_stripname_result=${func_stripname_result#"${1}"} +- func_stripname_result=${func_stripname_result%"${2}"} +-} +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=${1%%=*} +- func_opt_split_arg=${1#*=} +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- case ${1} in +- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; +- *) func_lo2o_result=${1} ;; +- esac +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=${1%.*}.lo +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=$(( $* )) +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=${#1} +-} +- +-_LT_EOF +- ;; +- *) # Bourne compatible functions. +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- # Extract subdirectory from the argument. +- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` +- if test "X$func_dirname_result" = "X${1}"; then +- func_dirname_result="${3}" +- else +- func_dirname_result="$func_dirname_result${2}" +- fi +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result=`$ECHO "${1}" | $SED "$basename"` +-} +- +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-# func_strip_suffix prefix name +-func_stripname () +-{ +- case ${2} in +- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; +- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; +- esac +-} +- +-# sed scripts: +-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' +-my_sed_long_arg='1s/^-[^=]*=//' +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` +- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=`expr "$@"` +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` +-} +- +-_LT_EOF +-esac +- +-case $lt_shell_append in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$1+=\$2" +-} +-_LT_EOF +- ;; +- *) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$1=\$$1\$2" +-} +- +-_LT_EOF +- ;; +- esac +- +- +- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- mv -f "$cfgfile" "$ofile" || ++ sed '$q' "$ltmain" >> "$cfgfile" \ ++ || (rm -f "$cfgfile"; exit 1) ++ ++ if test x"$xsi_shell" = xyes; then ++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ ++func_dirname ()\ ++{\ ++\ case ${1} in\ ++\ */*) func_dirname_result="${1%/*}${2}" ;;\ ++\ * ) func_dirname_result="${3}" ;;\ ++\ esac\ ++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_basename ()$/,/^} # func_basename /c\ ++func_basename ()\ ++{\ ++\ func_basename_result="${1##*/}"\ ++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ ++func_dirname_and_basename ()\ ++{\ ++\ case ${1} in\ ++\ */*) func_dirname_result="${1%/*}${2}" ;;\ ++\ * ) func_dirname_result="${3}" ;;\ ++\ esac\ ++\ func_basename_result="${1##*/}"\ ++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ ++func_stripname ()\ ++{\ ++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ ++\ # positional parameters, so assign one to ordinary parameter first.\ ++\ func_stripname_result=${3}\ ++\ func_stripname_result=${func_stripname_result#"${1}"}\ ++\ func_stripname_result=${func_stripname_result%"${2}"}\ ++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ ++func_split_long_opt ()\ ++{\ ++\ func_split_long_opt_name=${1%%=*}\ ++\ func_split_long_opt_arg=${1#*=}\ ++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ ++func_split_short_opt ()\ ++{\ ++\ func_split_short_opt_arg=${1#??}\ ++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ ++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ ++func_lo2o ()\ ++{\ ++\ case ${1} in\ ++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ ++\ *) func_lo2o_result=${1} ;;\ ++\ esac\ ++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_xform ()$/,/^} # func_xform /c\ ++func_xform ()\ ++{\ ++ func_xform_result=${1%.*}.lo\ ++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_arith ()$/,/^} # func_arith /c\ ++func_arith ()\ ++{\ ++ func_arith_result=$(( $* ))\ ++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_len ()$/,/^} # func_len /c\ ++func_len ()\ ++{\ ++ func_len_result=${#1}\ ++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++fi ++ ++if test x"$lt_shell_append" = xyes; then ++ sed -e '/^func_append ()$/,/^} # func_append /c\ ++func_append ()\ ++{\ ++ eval "${1}+=\\${2}"\ ++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ ++func_append_quoted ()\ ++{\ ++\ func_quote_for_eval "${2}"\ ++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ ++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ # Save a `func_append' function call where possible by direct use of '+=' ++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++else ++ # Save a `func_append' function call even when '+=' is not available ++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++fi ++ ++if test x"$_lt_function_replace_fail" = x":"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 ++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} ++fi ++ ++ ++ mv -f "$cfgfile" "$ofile" || + (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") + chmod +x "$ofile" + +@@ -19765,12 +20610,12 @@ with_gcc=$GCC_CXX + # Compiler flag to turn off builtin functions. + no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_CXX + +-# How to pass a linker flag through the compiler. +-wl=$lt_lt_prog_compiler_wl_CXX +- + # Additional compiler flags for building library objects. + pic_flag=$lt_lt_prog_compiler_pic_CXX + ++# How to pass a linker flag through the compiler. ++wl=$lt_lt_prog_compiler_wl_CXX ++ + # Compiler flag to prevent dynamic linking. + link_static_flag=$lt_lt_prog_compiler_static_CXX + +@@ -19857,9 +20702,6 @@ inherit_rpath=$inherit_rpath_CXX + # Whether libtool must link a program against all its dependency libraries. + link_all_deplibs=$link_all_deplibs_CXX + +-# Fix the shell variable \$srcfile for the compiler. +-fix_srcfile_path=$lt_fix_srcfile_path_CXX +- + # Set to "yes" if exported symbols are required. + always_export_symbols=$always_export_symbols_CXX + +@@ -19875,6 +20717,9 @@ include_expsyms=$lt_include_expsyms_CXX + # Commands necessary for linking programs (against libraries) with templates. + prelink_cmds=$lt_prelink_cmds_CXX + ++# Commands necessary for finishing linking programs. ++postlink_cmds=$lt_postlink_cmds_CXX ++ + # Specify filename containing input files. + file_list_spec=$lt_file_list_spec_CXX + +diff --git a/libtool.m4 b/libtool.m4 +index 24d13f3440..e45fdc6998 100644 +--- a/libtool.m4 ++++ b/libtool.m4 +@@ -1,7 +1,8 @@ + # libtool.m4 - Configure libtool for the host system. -*-Autoconf-*- + # + # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, +-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. ++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, ++# Inc. + # Written by Gordon Matzigkeit, 1996 + # + # This file is free software; the Free Software Foundation gives +@@ -10,7 +11,8 @@ + + m4_define([_LT_COPYING], [dnl + # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, +-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. ++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, ++# Inc. + # Written by Gordon Matzigkeit, 1996 + # + # This file is part of GNU Libtool. +@@ -37,7 +39,7 @@ m4_define([_LT_COPYING], [dnl + # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. + ]) + +-# serial 56 LT_INIT ++# serial 57 LT_INIT + + + # LT_PREREQ(VERSION) +@@ -92,7 +94,8 @@ _LT_SET_OPTIONS([$0], [$1]) + LIBTOOL_DEPS="$ltmain" + + # Always use our own libtool. +-LIBTOOL='$(SHELL) $(top_builddir)/libtool' ++LIBTOOL='$(SHELL) $(top_builddir)' ++LIBTOOL="$LIBTOOL/${host_alias}-libtool" + AC_SUBST(LIBTOOL)dnl + + _LT_SETUP +@@ -166,10 +169,13 @@ _LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl + dnl + m4_require([_LT_FILEUTILS_DEFAULTS])dnl + m4_require([_LT_CHECK_SHELL_FEATURES])dnl ++m4_require([_LT_PATH_CONVERSION_FUNCTIONS])dnl + m4_require([_LT_CMD_RELOAD])dnl + m4_require([_LT_CHECK_MAGIC_METHOD])dnl ++m4_require([_LT_CHECK_SHAREDLIB_FROM_LINKLIB])dnl + m4_require([_LT_CMD_OLD_ARCHIVE])dnl + m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl ++m4_require([_LT_WITH_SYSROOT])dnl + + _LT_CONFIG_LIBTOOL_INIT([ + # See if we are running on zsh, and set the options which allow our +@@ -199,7 +205,7 @@ aix3*) + esac + + # Global variables: +-ofile=libtool ++ofile=${host_alias}-libtool + can_build_shared=yes + + # All known linkers require a `.a' archive for static linking (except MSVC, +@@ -632,7 +638,7 @@ m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl + m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION]) + configured by $[0], generated by m4_PACKAGE_STRING. + +-Copyright (C) 2009 Free Software Foundation, Inc. ++Copyright (C) 2010 Free Software Foundation, Inc. + This config.lt script is free software; the Free Software Foundation + gives unlimited permision to copy, distribute and modify it." + +@@ -746,15 +752,12 @@ _LT_EOF + # if finds mixed CR/LF and LF-only lines. Since sed operates in + # text mode, it properly converts lines to CR/LF. This bash problem + # is reportedly fixed, but why not run on old versions too? +- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) ++ sed '$q' "$ltmain" >> "$cfgfile" \ ++ || (rm -f "$cfgfile"; exit 1) + +- _LT_PROG_XSI_SHELLFNS ++ _LT_PROG_REPLACE_SHELLFNS + +- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- mv -f "$cfgfile" "$ofile" || ++ mv -f "$cfgfile" "$ofile" || + (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") + chmod +x "$ofile" + ], +@@ -980,6 +983,8 @@ _LT_EOF + $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&AS_MESSAGE_LOG_FD + echo "$AR cru libconftest.a conftest.o" >&AS_MESSAGE_LOG_FD + $AR cru libconftest.a conftest.o 2>&AS_MESSAGE_LOG_FD ++ echo "$RANLIB libconftest.a" >&AS_MESSAGE_LOG_FD ++ $RANLIB libconftest.a 2>&AS_MESSAGE_LOG_FD + cat > conftest.c << _LT_EOF + int main() { return 0;} + _LT_EOF +@@ -1069,30 +1074,41 @@ m4_defun([_LT_DARWIN_LINKER_FEATURES], + fi + ]) + +-# _LT_SYS_MODULE_PATH_AIX +-# ----------------------- ++# _LT_SYS_MODULE_PATH_AIX([TAGNAME]) ++# ---------------------------------- + # Links a minimal program and checks the executable + # for the system default hardcoded library path. In most cases, + # this is /usr/lib:/lib, but when the MPI compilers are used + # the location of the communication and MPI libs are included too. + # If we don't find anything, use the default library path according + # to the aix ld manual. ++# Store the results from the different compilers for each TAGNAME. ++# Allow to override them for all tags through lt_cv_aix_libpath. + m4_defun([_LT_SYS_MODULE_PATH_AIX], + [m4_require([_LT_DECL_SED])dnl +-AC_LINK_IFELSE(AC_LANG_PROGRAM,[ +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi],[]) +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])], ++ [AC_LINK_IFELSE([AC_LANG_PROGRAM],[ ++ lt_aix_libpath_sed='[ ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }]' ++ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then ++ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi],[]) ++ if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then ++ _LT_TAGVAR([lt_cv_aix_libpath_], [$1])="/usr/lib:/lib" ++ fi ++ ]) ++ aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1]) ++fi + ])# _LT_SYS_MODULE_PATH_AIX + + +@@ -1117,7 +1133,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO + + AC_MSG_CHECKING([how to print strings]) + # Test print first, because it will be a builtin if present. +-if test "X`print -r -- -n 2>/dev/null`" = X-n && \ ++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ + test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then + ECHO='print -r --' + elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then +@@ -1161,6 +1177,39 @@ _LT_DECL([], [ECHO], [1], [An echo program that protects backslashes]) + ])# _LT_PROG_ECHO_BACKSLASH + + ++# _LT_WITH_SYSROOT ++# ---------------- ++AC_DEFUN([_LT_WITH_SYSROOT], ++[AC_MSG_CHECKING([for sysroot]) ++AC_ARG_WITH([libtool-sysroot], ++[ --with-libtool-sysroot[=DIR] Search for dependent libraries within DIR ++ (or the compiler's sysroot if not specified).], ++[], [with_libtool_sysroot=no]) ++ ++dnl lt_sysroot will always be passed unquoted. We quote it here ++dnl in case the user passed a directory name. ++lt_sysroot= ++case ${with_libtool_sysroot} in #( ++ yes) ++ if test "$GCC" = yes; then ++ lt_sysroot=`$CC --print-sysroot 2>/dev/null` ++ fi ++ ;; #( ++ /*) ++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` ++ ;; #( ++ no|'') ++ ;; #( ++ *) ++ AC_MSG_RESULT([${with_libtool_sysroot}]) ++ AC_MSG_ERROR([The sysroot must be an absolute path.]) ++ ;; ++esac ++ ++ AC_MSG_RESULT([${lt_sysroot:-no}]) ++_LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl ++[dependent libraries, and in which our libraries should be installed.])]) ++ + # _LT_ENABLE_LOCK + # --------------- + m4_defun([_LT_ENABLE_LOCK], +@@ -1320,14 +1369,47 @@ need_locks="$enable_libtool_lock" + ])# _LT_ENABLE_LOCK + + ++# _LT_PROG_AR ++# ----------- ++m4_defun([_LT_PROG_AR], ++[AC_CHECK_TOOLS(AR, [ar], false) ++: ${AR=ar} ++: ${AR_FLAGS=cru} ++_LT_DECL([], [AR], [1], [The archiver]) ++_LT_DECL([], [AR_FLAGS], [1], [Flags to create an archive]) ++ ++AC_CACHE_CHECK([for archiver @FILE support], [lt_cv_ar_at_file], ++ [lt_cv_ar_at_file=no ++ AC_COMPILE_IFELSE([AC_LANG_PROGRAM], ++ [echo conftest.$ac_objext > conftest.lst ++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD' ++ AC_TRY_EVAL([lt_ar_try]) ++ if test "$ac_status" -eq 0; then ++ # Ensure the archiver fails upon bogus file names. ++ rm -f conftest.$ac_objext libconftest.a ++ AC_TRY_EVAL([lt_ar_try]) ++ if test "$ac_status" -ne 0; then ++ lt_cv_ar_at_file=@ ++ fi ++ fi ++ rm -f conftest.* libconftest.a ++ ]) ++ ]) ++ ++if test "x$lt_cv_ar_at_file" = xno; then ++ archiver_list_spec= ++else ++ archiver_list_spec=$lt_cv_ar_at_file ++fi ++_LT_DECL([], [archiver_list_spec], [1], ++ [How to feed a file listing to the archiver]) ++])# _LT_PROG_AR ++ ++ + # _LT_CMD_OLD_ARCHIVE + # ------------------- + m4_defun([_LT_CMD_OLD_ARCHIVE], +-[AC_CHECK_TOOL(AR, ar, false) +-test -z "$AR" && AR=ar +-test -z "$AR_FLAGS" && AR_FLAGS=cru +-_LT_DECL([], [AR], [1], [The archiver]) +-_LT_DECL([], [AR_FLAGS], [1]) ++[_LT_PROG_AR + + AC_CHECK_TOOL(STRIP, strip, :) + test -z "$STRIP" && STRIP=: +@@ -1623,7 +1705,7 @@ else + lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 + lt_status=$lt_dlunknown + cat > conftest.$ac_ext <<_LT_EOF +-[#line __oline__ "configure" ++[#line $LINENO "configure" + #include "confdefs.h" + + #if HAVE_DLFCN_H +@@ -1667,10 +1749,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -2210,8 +2292,9 @@ cygwin* | mingw* | pw32* | cegcc*) + need_version=no + need_lib_prefix=no + +- case $GCC,$host_os in +- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) ++ case $GCC,$cc_basename in ++ yes,*) ++ # gcc + library_names_spec='$libname.dll.a' + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ +@@ -2244,13 +2327,71 @@ m4_if([$1], [],[ + library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' + ;; + esac ++ dynamic_linker='Win32 ld.exe' ++ ;; ++ ++ *,cl*) ++ # Native MSVC ++ libname_spec='$name' ++ soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' ++ library_names_spec='${libname}.dll.lib' ++ ++ case $build_os in ++ mingw*) ++ sys_lib_search_path_spec= ++ lt_save_ifs=$IFS ++ IFS=';' ++ for lt_path in $LIB ++ do ++ IFS=$lt_save_ifs ++ # Let DOS variable expansion print the short 8.3 style file name. ++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` ++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" ++ done ++ IFS=$lt_save_ifs ++ # Convert to MSYS style. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([[a-zA-Z]]\\):| /\\1|g' -e 's|^ ||'` ++ ;; ++ cygwin*) ++ # Convert to unix form, then to dos form, then back to unix form ++ # but this time dos style (no spaces!) so that the unix form looks ++ # like /cygdrive/c/PROGRA~1:/cygdr... ++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` ++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` ++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ ;; ++ *) ++ sys_lib_search_path_spec="$LIB" ++ if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then ++ # It is most probably a Windows format PATH. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` ++ else ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ fi ++ # FIXME: find the short name or the path components, as spaces are ++ # common. (e.g. "Program Files" -> "PROGRA~1") ++ ;; ++ esac ++ ++ # DLL is installed to $(libdir)/../bin by postinstall_cmds ++ postinstall_cmds='base_file=`basename \${file}`~ ++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ ++ dldir=$destdir/`dirname \$dlpath`~ ++ test -d \$dldir || mkdir -p \$dldir~ ++ $install_prog $dir/$dlname \$dldir/$dlname' ++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ ++ dlpath=$dir/\$dldll~ ++ $RM \$dlpath' ++ shlibpath_overrides_runpath=yes ++ dynamic_linker='Win32 link.exe' + ;; + + *) ++ # Assume MSVC wrapper + library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib' ++ dynamic_linker='Win32 ld.exe' + ;; + esac +- dynamic_linker='Win32 ld.exe' + # FIXME: first we should search . and the directory the executable is in + shlibpath_var=PATH + ;; +@@ -2342,7 +2483,7 @@ haiku*) + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LIBRARY_PATH + shlibpath_overrides_runpath=yes +- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' ++ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' + hardcode_into_libs=yes + ;; + +@@ -2950,6 +3091,11 @@ case $reload_flag in + esac + reload_cmds='$LD$reload_flag -o $output$reload_objs' + case $host_os in ++ cygwin* | mingw* | pw32* | cegcc*) ++ if test "$GCC" != yes; then ++ reload_cmds=false ++ fi ++ ;; + darwin*) + if test "$GCC" = yes; then + reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' +@@ -3016,7 +3162,8 @@ mingw* | pw32*) + lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' + lt_cv_file_magic_cmd='func_win32_libid' + else +- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' ++ # Keep this pattern in sync with the one in func_win32_libid. ++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' + lt_cv_file_magic_cmd='$OBJDUMP -f' + fi + ;; +@@ -3167,6 +3314,21 @@ tpf*) + ;; + esac + ]) ++ ++file_magic_glob= ++want_nocaseglob=no ++if test "$build" = "$host"; then ++ case $host_os in ++ mingw* | pw32*) ++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then ++ want_nocaseglob=yes ++ else ++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[[\1]]\/[[\1]]\/g;/g"` ++ fi ++ ;; ++ esac ++fi ++ + file_magic_cmd=$lt_cv_file_magic_cmd + deplibs_check_method=$lt_cv_deplibs_check_method + test -z "$deplibs_check_method" && deplibs_check_method=unknown +@@ -3174,7 +3336,11 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown + _LT_DECL([], [deplibs_check_method], [1], + [Method to check whether dependent libraries are shared objects]) + _LT_DECL([], [file_magic_cmd], [1], +- [Command to use when deplibs_check_method == "file_magic"]) ++ [Command to use when deplibs_check_method = "file_magic"]) ++_LT_DECL([], [file_magic_glob], [1], ++ [How to find potential files when deplibs_check_method = "file_magic"]) ++_LT_DECL([], [want_nocaseglob], [1], ++ [Find potential files using nocaseglob when deplibs_check_method = "file_magic"]) + ])# _LT_CHECK_MAGIC_METHOD + + +@@ -3277,6 +3443,67 @@ dnl aclocal-1.4 backwards compatibility: + dnl AC_DEFUN([AM_PROG_NM], []) + dnl AC_DEFUN([AC_PROG_NM], []) + ++# _LT_CHECK_SHAREDLIB_FROM_LINKLIB ++# -------------------------------- ++# how to determine the name of the shared library ++# associated with a specific link library. ++# -- PORTME fill in with the dynamic library characteristics ++m4_defun([_LT_CHECK_SHAREDLIB_FROM_LINKLIB], ++[m4_require([_LT_DECL_EGREP]) ++m4_require([_LT_DECL_OBJDUMP]) ++m4_require([_LT_DECL_DLLTOOL]) ++AC_CACHE_CHECK([how to associate runtime and link libraries], ++lt_cv_sharedlib_from_linklib_cmd, ++[lt_cv_sharedlib_from_linklib_cmd='unknown' ++ ++case $host_os in ++cygwin* | mingw* | pw32* | cegcc*) ++ # two different shell functions defined in ltmain.sh ++ # decide which to use based on capabilities of $DLLTOOL ++ case `$DLLTOOL --help 2>&1` in ++ *--identify-strict*) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib ++ ;; ++ *) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback ++ ;; ++ esac ++ ;; ++*) ++ # fallback: assume linklib IS sharedlib ++ lt_cv_sharedlib_from_linklib_cmd="$ECHO" ++ ;; ++esac ++]) ++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd ++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO ++ ++_LT_DECL([], [sharedlib_from_linklib_cmd], [1], ++ [Command to associate shared and link libraries]) ++])# _LT_CHECK_SHAREDLIB_FROM_LINKLIB ++ ++ ++# _LT_PATH_MANIFEST_TOOL ++# ---------------------- ++# locate the manifest tool ++m4_defun([_LT_PATH_MANIFEST_TOOL], ++[AC_CHECK_TOOL(MANIFEST_TOOL, mt, :) ++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt ++AC_CACHE_CHECK([if $MANIFEST_TOOL is a manifest tool], [lt_cv_path_mainfest_tool], ++ [lt_cv_path_mainfest_tool=no ++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&AS_MESSAGE_LOG_FD ++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out ++ cat conftest.err >&AS_MESSAGE_LOG_FD ++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then ++ lt_cv_path_mainfest_tool=yes ++ fi ++ rm -f conftest*]) ++if test "x$lt_cv_path_mainfest_tool" != xyes; then ++ MANIFEST_TOOL=: ++fi ++_LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl ++])# _LT_PATH_MANIFEST_TOOL ++ + + # LT_LIB_M + # -------- +@@ -3403,8 +3630,8 @@ esac + lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" + + # Transform an extracted symbol line into symbol name and symbol address +-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'" +-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\)[[ ]]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'" + + # Handle CRLF in mingw tool chain + opt_cr= +@@ -3440,6 +3667,7 @@ for ac_symprfx in "" "_"; do + else + lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" + fi ++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" + + # Check to see that the pipe works correctly. + pipe_works=no +@@ -3473,6 +3701,18 @@ _LT_EOF + if $GREP ' nm_test_var$' "$nlist" >/dev/null; then + if $GREP ' nm_test_func$' "$nlist" >/dev/null; then + cat <<_LT_EOF > conftest.$ac_ext ++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ ++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) ++/* DATA imports from DLLs on WIN32 con't be const, because runtime ++ relocations are performed -- see ld's documentation on pseudo-relocs. */ ++# define LT@&t@_DLSYM_CONST ++#elif defined(__osf__) ++/* This system does not cope well with relocations in const data. */ ++# define LT@&t@_DLSYM_CONST ++#else ++# define LT@&t@_DLSYM_CONST const ++#endif ++ + #ifdef __cplusplus + extern "C" { + #endif +@@ -3484,7 +3724,7 @@ _LT_EOF + cat <<_LT_EOF >> conftest.$ac_ext + + /* The mapping between symbol names and symbols. */ +-const struct { ++LT@&t@_DLSYM_CONST struct { + const char *name; + void *address; + } +@@ -3510,15 +3750,15 @@ static const void *lt_preloaded_setup() { + _LT_EOF + # Now try linking the two files. + mv conftest.$ac_objext conftstm.$ac_objext +- lt_save_LIBS="$LIBS" +- lt_save_CFLAGS="$CFLAGS" ++ lt_globsym_save_LIBS=$LIBS ++ lt_globsym_save_CFLAGS=$CFLAGS + LIBS="conftstm.$ac_objext" + CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)" + if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then + pipe_works=yes + fi +- LIBS="$lt_save_LIBS" +- CFLAGS="$lt_save_CFLAGS" ++ LIBS=$lt_globsym_save_LIBS ++ CFLAGS=$lt_globsym_save_CFLAGS + else + echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD + fi +@@ -3551,6 +3791,13 @@ else + AC_MSG_RESULT(ok) + fi + ++# Response file support. ++if test "$lt_cv_nm_interface" = "MS dumpbin"; then ++ nm_file_list_spec='@' ++elif $NM --help 2>/dev/null | grep '[[@]]FILE' >/dev/null; then ++ nm_file_list_spec='@' ++fi ++ + _LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1], + [Take the output of nm and produce a listing of raw symbols and C names]) + _LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1], +@@ -3561,6 +3808,8 @@ _LT_DECL([global_symbol_to_c_name_address], + _LT_DECL([global_symbol_to_c_name_address_lib_prefix], + [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1], + [Transform the output of nm in a C name address pair when lib prefix is needed]) ++_LT_DECL([], [nm_file_list_spec], [1], ++ [Specify filename containing input files for $NM]) + ]) # _LT_CMD_GLOBAL_SYMBOLS + + +@@ -3572,7 +3821,6 @@ _LT_TAGVAR(lt_prog_compiler_wl, $1)= + _LT_TAGVAR(lt_prog_compiler_pic, $1)= + _LT_TAGVAR(lt_prog_compiler_static, $1)= + +-AC_MSG_CHECKING([for $compiler option to produce PIC]) + m4_if([$1], [CXX], [ + # C++ specific cases for pic, static, wl, etc. + if test "$GXX" = yes; then +@@ -3678,6 +3926,12 @@ m4_if([$1], [CXX], [ + ;; + esac + ;; ++ mingw* | cygwin* | os2* | pw32* | cegcc*) ++ # This hack is so that the source file can tell whether it is being ++ # built for inclusion in a dll (and should export symbols for example). ++ m4_if([$1], [GCJ], [], ++ [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) ++ ;; + dgux*) + case $cc_basename in + ec++*) +@@ -3830,7 +4084,7 @@ m4_if([$1], [CXX], [ + ;; + solaris*) + case $cc_basename in +- CC*) ++ CC* | sunCC*) + # Sun C++ 4.2, 5.x and Centerline C++ + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' +@@ -4053,6 +4307,12 @@ m4_if([$1], [CXX], [ + _LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared' + _LT_TAGVAR(lt_prog_compiler_static, $1)='--static' + ;; ++ nagfor*) ++ # NAG Fortran compiler ++ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,' ++ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' ++ _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ++ ;; + pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) + # Portland Group compilers (*not* the Pentium gcc compiler, + # which looks to be a dead project) +@@ -4115,7 +4375,7 @@ m4_if([$1], [CXX], [ + _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' + _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' + case $cc_basename in +- f77* | f90* | f95*) ++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';; + *) + _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';; +@@ -4172,9 +4432,11 @@ case $host_os in + _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])" + ;; + esac +-AC_MSG_RESULT([$_LT_TAGVAR(lt_prog_compiler_pic, $1)]) +-_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1], +- [How to pass a linker flag through the compiler]) ++ ++AC_CACHE_CHECK([for $compiler option to produce PIC], ++ [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)], ++ [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_prog_compiler_pic, $1)]) ++_LT_TAGVAR(lt_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_cv_prog_compiler_pic, $1) + + # + # Check to make sure the PIC flag actually works. +@@ -4193,6 +4455,8 @@ fi + _LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1], + [Additional compiler flags for building library objects]) + ++_LT_TAGDECL([wl], [lt_prog_compiler_wl], [1], ++ [How to pass a linker flag through the compiler]) + # + # Check to make sure the static flag actually works. + # +@@ -4213,6 +4477,7 @@ _LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1], + m4_defun([_LT_LINKER_SHLIBS], + [AC_REQUIRE([LT_PATH_LD])dnl + AC_REQUIRE([LT_PATH_NM])dnl ++m4_require([_LT_PATH_MANIFEST_TOOL])dnl + m4_require([_LT_FILEUTILS_DEFAULTS])dnl + m4_require([_LT_DECL_EGREP])dnl + m4_require([_LT_DECL_SED])dnl +@@ -4221,6 +4486,7 @@ m4_require([_LT_TAG_COMPILER])dnl + AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) + m4_if([$1], [CXX], [ + _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' ++ _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] + case $host_os in + aix[[4-9]]*) + # If we're using GNU nm, then we don't want the "-C" option. +@@ -4235,15 +4501,20 @@ m4_if([$1], [CXX], [ + ;; + pw32*) + _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds" +- ;; ++ ;; + cygwin* | mingw* | cegcc*) +- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;/^.*[[ ]]__nm__/s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' +- ;; ++ case $cc_basename in ++ cl*) ;; ++ *) ++ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' ++ _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'] ++ ;; ++ esac ++ ;; + *) + _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' +- ;; ++ ;; + esac +- _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] + ], [ + runpath_var= + _LT_TAGVAR(allow_undefined_flag, $1)= +@@ -4411,7 +4682,8 @@ _LT_EOF + _LT_TAGVAR(allow_undefined_flag, $1)=unsupported + _LT_TAGVAR(always_export_symbols, $1)=no + _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes +- _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols' ++ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' ++ _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'] + + if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' +@@ -4510,12 +4782,12 @@ _LT_EOF + _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= + _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='-rpath $libdir' +- _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' ++ _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' + if test "x$supports_anon_versioning" = xyes; then + _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ +- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' ++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' + fi + ;; + esac +@@ -4529,8 +4801,8 @@ _LT_EOF + _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' + wlarc= + else +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + fi + ;; + +@@ -4548,8 +4820,8 @@ _LT_EOF + + _LT_EOF + elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + _LT_TAGVAR(ld_shlibs, $1)=no + fi +@@ -4595,8 +4867,8 @@ _LT_EOF + + *) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + _LT_TAGVAR(ld_shlibs, $1)=no + fi +@@ -4726,7 +4998,7 @@ _LT_EOF + _LT_TAGVAR(allow_undefined_flag, $1)='-berok' + # Determine the default libpath from the value encoded in an + # empty executable. +- _LT_SYS_MODULE_PATH_AIX ++ _LT_SYS_MODULE_PATH_AIX([$1]) + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" + else +@@ -4737,7 +5009,7 @@ _LT_EOF + else + # Determine the default libpath from the value encoded in an + # empty executable. +- _LT_SYS_MODULE_PATH_AIX ++ _LT_SYS_MODULE_PATH_AIX([$1]) + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" + # Warning - without using the other run time loading flags, + # -berok will link without error, but may produce a broken library. +@@ -4781,20 +5053,63 @@ _LT_EOF + # Microsoft Visual C++. + # hardcode_libdir_flag_spec is actually meaningless, as there is + # no search path for DLLs. +- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' +- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported +- # Tell ltmain to make .lib files, not .a files. +- libext=lib +- # Tell ltmain to make .dll files, not .so files. +- shrext_cmds=".dll" +- # FIXME: Setting linknames here is a bad hack. +- _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' +- # The linker will automatically build a .lib file if we build a DLL. +- _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' +- # FIXME: Should let the user specify the lib program. +- _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs' +- _LT_TAGVAR(fix_srcfile_path, $1)='`cygpath -w "$srcfile"`' +- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ++ case $cc_basename in ++ cl*) ++ # Native MSVC ++ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' ++ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported ++ _LT_TAGVAR(always_export_symbols, $1)=yes ++ _LT_TAGVAR(file_list_spec, $1)='@' ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' ++ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then ++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; ++ else ++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; ++ fi~ ++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ ++ linknames=' ++ # The linker will not automatically build a static lib if we build a DLL. ++ # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' ++ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ++ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1,DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols' ++ # Don't use ranlib ++ _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' ++ _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ ++ lt_tool_outputfile="@TOOL_OUTPUT@"~ ++ case $lt_outputfile in ++ *.exe|*.EXE) ;; ++ *) ++ lt_outputfile="$lt_outputfile.exe" ++ lt_tool_outputfile="$lt_tool_outputfile.exe" ++ ;; ++ esac~ ++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then ++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; ++ $RM "$lt_outputfile.manifest"; ++ fi' ++ ;; ++ *) ++ # Assume MSVC wrapper ++ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' ++ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' ++ # The linker will automatically build a .lib file if we build a DLL. ++ _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' ++ # FIXME: Should let the user specify the lib program. ++ _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs' ++ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ++ ;; ++ esac + ;; + + darwin* | rhapsody*) +@@ -4828,7 +5143,7 @@ _LT_EOF + + # FreeBSD 3 and greater uses gcc -shared to do shared libraries. + freebsd* | dragonfly*) +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' + _LT_TAGVAR(hardcode_direct, $1)=yes + _LT_TAGVAR(hardcode_shlibpath_var, $1)=no +@@ -4836,7 +5151,7 @@ _LT_EOF + + hpux9*) + if test "$GCC" = yes; then +- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' ++ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + else + _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + fi +@@ -4852,7 +5167,7 @@ _LT_EOF + + hpux10*) + if test "$GCC" = yes && test "$with_gnu_ld" = no; then +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + else + _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' + fi +@@ -4876,10 +5191,10 @@ _LT_EOF + _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + ia64*) +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + else +@@ -4926,16 +5241,31 @@ _LT_EOF + + irix5* | irix6* | nonstopux*) + if test "$GCC" = yes; then +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + # Try to use the -exported_symbol ld option, if it does not + # work, assume that -exports_file does not work either and + # implicitly export all symbols. +- save_LDFLAGS="$LDFLAGS" +- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" +- AC_LINK_IFELSE(int foo(void) {}, +- _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' +- ) +- LDFLAGS="$save_LDFLAGS" ++ # This should be the same for all languages, so no per-tag cache variable. ++ AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol], ++ [lt_cv_irix_exported_symbol], ++ [save_LDFLAGS="$LDFLAGS" ++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" ++ AC_LINK_IFELSE( ++ [AC_LANG_SOURCE( ++ [AC_LANG_CASE([C], [[int foo (void) { return 0; }]], ++ [C++], [[int foo (void) { return 0; }]], ++ [Fortran 77], [[ ++ subroutine foo ++ end]], ++ [Fortran], [[ ++ subroutine foo ++ end]])])], ++ [lt_cv_irix_exported_symbol=yes], ++ [lt_cv_irix_exported_symbol=no]) ++ LDFLAGS="$save_LDFLAGS"]) ++ if test "$lt_cv_irix_exported_symbol" = yes; then ++ _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' ++ fi + else + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' +@@ -5020,7 +5350,7 @@ _LT_EOF + osf4* | osf5*) # as osf3* with the addition of -msym flag + if test "$GCC" = yes; then + _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' + else + _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' +@@ -5039,9 +5369,9 @@ _LT_EOF + _LT_TAGVAR(no_undefined_flag, $1)=' -z defs' + if test "$GCC" = yes; then + wlarc='${wl}' +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ +- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + else + case `$CC -V 2>&1` in + *"Compilers 5.0"*) +@@ -5313,8 +5643,6 @@ _LT_TAGDECL([], [inherit_rpath], [0], + to runtime path list]) + _LT_TAGDECL([], [link_all_deplibs], [0], + [Whether libtool must link a program against all its dependency libraries]) +-_LT_TAGDECL([], [fix_srcfile_path], [1], +- [Fix the shell variable $srcfile for the compiler]) + _LT_TAGDECL([], [always_export_symbols], [0], + [Set to "yes" if exported symbols are required]) + _LT_TAGDECL([], [export_symbols_cmds], [2], +@@ -5325,6 +5653,8 @@ _LT_TAGDECL([], [include_expsyms], [1], + [Symbols that must always be exported]) + _LT_TAGDECL([], [prelink_cmds], [2], + [Commands necessary for linking programs (against libraries) with templates]) ++_LT_TAGDECL([], [postlink_cmds], [2], ++ [Commands necessary for finishing linking programs]) + _LT_TAGDECL([], [file_list_spec], [1], + [Specify filename containing input files]) + dnl FIXME: Not yet implemented +@@ -5426,6 +5756,7 @@ CC="$lt_save_CC" + m4_defun([_LT_LANG_CXX_CONFIG], + [m4_require([_LT_FILEUTILS_DEFAULTS])dnl + m4_require([_LT_DECL_EGREP])dnl ++m4_require([_LT_PATH_MANIFEST_TOOL])dnl + if test -n "$CXX" && ( test "X$CXX" != "Xno" && + ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) || + (test "X$CXX" != "Xg++"))) ; then +@@ -5487,6 +5818,7 @@ if test "$_lt_caught_CXX_error" != yes; then + + # Allow CC to be a program name with arguments. + lt_save_CC=$CC ++ lt_save_CFLAGS=$CFLAGS + lt_save_LD=$LD + lt_save_GCC=$GCC + GCC=$GXX +@@ -5504,6 +5836,7 @@ if test "$_lt_caught_CXX_error" != yes; then + fi + test -z "${LDCXX+set}" || LD=$LDCXX + CC=${CXX-"c++"} ++ CFLAGS=$CXXFLAGS + compiler=$CC + _LT_TAGVAR(compiler, $1)=$CC + _LT_CC_BASENAME([$compiler]) +@@ -5667,7 +6000,7 @@ if test "$_lt_caught_CXX_error" != yes; then + _LT_TAGVAR(allow_undefined_flag, $1)='-berok' + # Determine the default libpath from the value encoded in an empty + # executable. +- _LT_SYS_MODULE_PATH_AIX ++ _LT_SYS_MODULE_PATH_AIX([$1]) + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" + + _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" +@@ -5679,7 +6012,7 @@ if test "$_lt_caught_CXX_error" != yes; then + else + # Determine the default libpath from the value encoded in an + # empty executable. +- _LT_SYS_MODULE_PATH_AIX ++ _LT_SYS_MODULE_PATH_AIX([$1]) + _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" + # Warning - without using the other run time loading flags, + # -berok will link without error, but may produce a broken library. +@@ -5721,29 +6054,75 @@ if test "$_lt_caught_CXX_error" != yes; then + ;; + + cygwin* | mingw* | pw32* | cegcc*) +- # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, +- # as there is no search path for DLLs. +- _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' +- _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols' +- _LT_TAGVAR(allow_undefined_flag, $1)=unsupported +- _LT_TAGVAR(always_export_symbols, $1)=no +- _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes +- +- if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' +- # If the export-symbols file already is a .def file (1st line +- # is EXPORTS), use it as is; otherwise, prepend... +- _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then +- cp $export_symbols $output_objdir/$soname.def; +- else +- echo EXPORTS > $output_objdir/$soname.def; +- cat $export_symbols >> $output_objdir/$soname.def; +- fi~ +- $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' +- else +- _LT_TAGVAR(ld_shlibs, $1)=no +- fi +- ;; ++ case $GXX,$cc_basename in ++ ,cl* | no,cl*) ++ # Native MSVC ++ # hardcode_libdir_flag_spec is actually meaningless, as there is ++ # no search path for DLLs. ++ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' ++ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported ++ _LT_TAGVAR(always_export_symbols, $1)=yes ++ _LT_TAGVAR(file_list_spec, $1)='@' ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' ++ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then ++ $SED -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; ++ else ++ $SED -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; ++ fi~ ++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ ++ linknames=' ++ # The linker will not automatically build a static lib if we build a DLL. ++ # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' ++ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ++ # Don't use ranlib ++ _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' ++ _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ ++ lt_tool_outputfile="@TOOL_OUTPUT@"~ ++ case $lt_outputfile in ++ *.exe|*.EXE) ;; ++ *) ++ lt_outputfile="$lt_outputfile.exe" ++ lt_tool_outputfile="$lt_tool_outputfile.exe" ++ ;; ++ esac~ ++ func_to_tool_file "$lt_outputfile"~ ++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then ++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; ++ $RM "$lt_outputfile.manifest"; ++ fi' ++ ;; ++ *) ++ # g++ ++ # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, ++ # as there is no search path for DLLs. ++ _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' ++ _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-all-symbols' ++ _LT_TAGVAR(allow_undefined_flag, $1)=unsupported ++ _LT_TAGVAR(always_export_symbols, $1)=no ++ _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ++ ++ if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' ++ # If the export-symbols file already is a .def file (1st line ++ # is EXPORTS), use it as is; otherwise, prepend... ++ _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then ++ cp $export_symbols $output_objdir/$soname.def; ++ else ++ echo EXPORTS > $output_objdir/$soname.def; ++ cat $export_symbols >> $output_objdir/$soname.def; ++ fi~ ++ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' ++ else ++ _LT_TAGVAR(ld_shlibs, $1)=no ++ fi ++ ;; ++ esac ++ ;; + darwin* | rhapsody*) + _LT_DARWIN_LINKER_FEATURES($1) + ;; +@@ -5818,7 +6197,7 @@ if test "$_lt_caught_CXX_error" != yes; then + ;; + *) + if test "$GXX" = yes; then +- _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' ++ _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + else + # FIXME: insert proper C++ library support + _LT_TAGVAR(ld_shlibs, $1)=no +@@ -5889,10 +6268,10 @@ if test "$_lt_caught_CXX_error" != yes; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + ;; + ia64*) +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + ;; + *) +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' + ;; + esac + fi +@@ -5933,9 +6312,9 @@ if test "$_lt_caught_CXX_error" != yes; then + *) + if test "$GXX" = yes; then + if test "$with_gnu_ld" = no; then +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + else +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` -o $lib' + fi + fi + _LT_TAGVAR(link_all_deplibs, $1)=yes +@@ -6005,20 +6384,20 @@ if test "$_lt_caught_CXX_error" != yes; then + _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~ + rm -rf $tpldir~ + $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ +- compile_command="$compile_command `find $tpldir -name \*.o | $NL2SP`"' ++ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"' + _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~ + rm -rf $tpldir~ + $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ +- $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | $NL2SP`~ ++ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~ + $RANLIB $oldlib' + _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~ + rm -rf $tpldir~ + $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ +- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' ++ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~ + rm -rf $tpldir~ + $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ +- $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' ++ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' + ;; + *) # Version 6 and above use weak symbols + _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' +@@ -6213,7 +6592,7 @@ if test "$_lt_caught_CXX_error" != yes; then + _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + ;; + *) +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + ;; + esac + +@@ -6259,7 +6638,7 @@ if test "$_lt_caught_CXX_error" != yes; then + + solaris*) + case $cc_basename in +- CC*) ++ CC* | sunCC*) + # Sun C++ 4.2, 5.x and Centerline C++ + _LT_TAGVAR(archive_cmds_need_lc,$1)=yes + _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' +@@ -6300,9 +6679,9 @@ if test "$_lt_caught_CXX_error" != yes; then + if test "$GXX" = yes && test "$with_gnu_ld" = no; then + _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs' + if $CC --version | $GREP -v '^2\.7' > /dev/null; then +- _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' ++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' + _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ +- $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' ++ $CC -shared $pic_flag -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' + + # Commands to make compiler produce verbose output that lists + # what "hidden" libraries, object files and flags are used when +@@ -6431,6 +6810,7 @@ if test "$_lt_caught_CXX_error" != yes; then + fi # test -n "$compiler" + + CC=$lt_save_CC ++ CFLAGS=$lt_save_CFLAGS + LDCXX=$LD + LD=$lt_save_LD + GCC=$lt_save_GCC +@@ -6445,6 +6825,29 @@ AC_LANG_POP + ])# _LT_LANG_CXX_CONFIG + + ++# _LT_FUNC_STRIPNAME_CNF ++# ---------------------- ++# func_stripname_cnf prefix suffix name ++# strip PREFIX and SUFFIX off of NAME. ++# PREFIX and SUFFIX must not contain globbing or regex special ++# characters, hashes, percent signs, but SUFFIX may contain a leading ++# dot (in which case that matches only a dot). ++# ++# This function is identical to the (non-XSI) version of func_stripname, ++# except this one can be used by m4 code that may be executed by configure, ++# rather than the libtool script. ++m4_defun([_LT_FUNC_STRIPNAME_CNF],[dnl ++AC_REQUIRE([_LT_DECL_SED]) ++AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH]) ++func_stripname_cnf () ++{ ++ case ${2} in ++ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; ++ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; ++ esac ++} # func_stripname_cnf ++])# _LT_FUNC_STRIPNAME_CNF ++ + # _LT_SYS_HIDDEN_LIBDEPS([TAGNAME]) + # --------------------------------- + # Figure out "hidden" library dependencies from verbose +@@ -6453,6 +6856,7 @@ AC_LANG_POP + # objects, libraries and library flags. + m4_defun([_LT_SYS_HIDDEN_LIBDEPS], + [m4_require([_LT_FILEUTILS_DEFAULTS])dnl ++AC_REQUIRE([_LT_FUNC_STRIPNAME_CNF])dnl + # Dependencies to place before and after the object being linked: + _LT_TAGVAR(predep_objects, $1)= + _LT_TAGVAR(postdep_objects, $1)= +@@ -6503,6 +6907,13 @@ public class foo { + }; + _LT_EOF + ]) ++ ++_lt_libdeps_save_CFLAGS=$CFLAGS ++case "$CC $CFLAGS " in #( ++*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;; ++*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;; ++esac ++ + dnl Parse the compiler output and extract the necessary + dnl objects, libraries and library flags. + if AC_TRY_EVAL(ac_compile); then +@@ -6514,7 +6925,7 @@ if AC_TRY_EVAL(ac_compile); then + pre_test_object_deps_done=no + + for p in `eval "$output_verbose_link_cmd"`; do +- case $p in ++ case ${prev}${p} in + + -L* | -R* | -l*) + # Some compilers place space between "-{L,R}" and the path. +@@ -6523,13 +6934,22 @@ if AC_TRY_EVAL(ac_compile); then + test $p = "-R"; then + prev=$p + continue +- else +- prev= + fi + ++ # Expand the sysroot to ease extracting the directories later. ++ if test -z "$prev"; then ++ case $p in ++ -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;; ++ -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;; ++ -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;; ++ esac ++ fi ++ case $p in ++ =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; ++ esac + if test "$pre_test_object_deps_done" = no; then +- case $p in +- -L* | -R*) ++ case ${prev} in ++ -L | -R) + # Internal compiler library paths should come after those + # provided the user. The postdeps already come after the + # user supplied libs so there is no need to process them. +@@ -6549,8 +6969,10 @@ if AC_TRY_EVAL(ac_compile); then + _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}" + fi + fi ++ prev= + ;; + ++ *.lto.$objext) ;; # Ignore GCC LTO objects + *.$objext) + # This assumes that the test object file only shows up + # once in the compiler output. +@@ -6586,6 +7008,7 @@ else + fi + + $RM -f confest.$objext ++CFLAGS=$_lt_libdeps_save_CFLAGS + + # PORTME: override above test on systems where it is broken + m4_if([$1], [CXX], +@@ -6622,7 +7045,7 @@ linux*) + + solaris*) + case $cc_basename in +- CC*) ++ CC* | sunCC*) + # The more standards-conforming stlport4 library is + # incompatible with the Cstd library. Avoid specifying + # it if it's in CXXFLAGS. Ignore libCrun as +@@ -6735,7 +7158,9 @@ if test "$_lt_disable_F77" != yes; then + # Allow CC to be a program name with arguments. + lt_save_CC="$CC" + lt_save_GCC=$GCC ++ lt_save_CFLAGS=$CFLAGS + CC=${F77-"f77"} ++ CFLAGS=$FFLAGS + compiler=$CC + _LT_TAGVAR(compiler, $1)=$CC + _LT_CC_BASENAME([$compiler]) +@@ -6789,6 +7214,7 @@ if test "$_lt_disable_F77" != yes; then + + GCC=$lt_save_GCC + CC="$lt_save_CC" ++ CFLAGS="$lt_save_CFLAGS" + fi # test "$_lt_disable_F77" != yes + + AC_LANG_POP +@@ -6865,7 +7291,9 @@ if test "$_lt_disable_FC" != yes; then + # Allow CC to be a program name with arguments. + lt_save_CC="$CC" + lt_save_GCC=$GCC ++ lt_save_CFLAGS=$CFLAGS + CC=${FC-"f95"} ++ CFLAGS=$FCFLAGS + compiler=$CC + GCC=$ac_cv_fc_compiler_gnu + +@@ -6921,7 +7349,8 @@ if test "$_lt_disable_FC" != yes; then + fi # test -n "$compiler" + + GCC=$lt_save_GCC +- CC="$lt_save_CC" ++ CC=$lt_save_CC ++ CFLAGS=$lt_save_CFLAGS + fi # test "$_lt_disable_FC" != yes + + AC_LANG_POP +@@ -6958,10 +7387,12 @@ _LT_COMPILER_BOILERPLATE + _LT_LINKER_BOILERPLATE + + # Allow CC to be a program name with arguments. +-lt_save_CC="$CC" ++lt_save_CC=$CC ++lt_save_CFLAGS=$CFLAGS + lt_save_GCC=$GCC + GCC=yes + CC=${GCJ-"gcj"} ++CFLAGS=$GCJFLAGS + compiler=$CC + _LT_TAGVAR(compiler, $1)=$CC + _LT_TAGVAR(LD, $1)="$LD" +@@ -6992,7 +7423,8 @@ fi + AC_LANG_RESTORE + + GCC=$lt_save_GCC +-CC="$lt_save_CC" ++CC=$lt_save_CC ++CFLAGS=$lt_save_CFLAGS + ])# _LT_LANG_GCJ_CONFIG + + +@@ -7027,9 +7459,11 @@ _LT_LINKER_BOILERPLATE + + # Allow CC to be a program name with arguments. + lt_save_CC="$CC" ++lt_save_CFLAGS=$CFLAGS + lt_save_GCC=$GCC + GCC= + CC=${RC-"windres"} ++CFLAGS= + compiler=$CC + _LT_TAGVAR(compiler, $1)=$CC + _LT_CC_BASENAME([$compiler]) +@@ -7042,7 +7476,8 @@ fi + + GCC=$lt_save_GCC + AC_LANG_RESTORE +-CC="$lt_save_CC" ++CC=$lt_save_CC ++CFLAGS=$lt_save_CFLAGS + ])# _LT_LANG_RC_CONFIG + + +@@ -7101,6 +7536,15 @@ _LT_DECL([], [OBJDUMP], [1], [An object symbol dumper]) + AC_SUBST([OBJDUMP]) + ]) + ++# _LT_DECL_DLLTOOL ++# ---------------- ++# Ensure DLLTOOL variable is set. ++m4_defun([_LT_DECL_DLLTOOL], ++[AC_CHECK_TOOL(DLLTOOL, dlltool, false) ++test -z "$DLLTOOL" && DLLTOOL=dlltool ++_LT_DECL([], [DLLTOOL], [1], [DLL creation program]) ++AC_SUBST([DLLTOOL]) ++]) + + # _LT_DECL_SED + # ------------ +@@ -7194,8 +7638,8 @@ m4_defun([_LT_CHECK_SHELL_FEATURES], + # Try some XSI features + xsi_shell=no + ( _lt_dummy="a/b/c" +- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ +- = c,a/b,, \ ++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ ++ = c,a/b,b/c, \ + && eval 'test $(( 1 + 1 )) -eq 2 \ + && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ + && xsi_shell=yes +@@ -7234,206 +7678,162 @@ _LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl + ])# _LT_CHECK_SHELL_FEATURES + + +-# _LT_PROG_XSI_SHELLFNS +-# --------------------- +-# Bourne and XSI compatible variants of some useful shell functions. +-m4_defun([_LT_PROG_XSI_SHELLFNS], +-[case $xsi_shell in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result="${1##*/}" +-} +- +-# func_dirname_and_basename file append nondir_replacement +-# perform func_basename and func_dirname in a single function +-# call: +-# dirname: Compute the dirname of FILE. If nonempty, +-# add APPEND to the result, otherwise set result +-# to NONDIR_REPLACEMENT. +-# value returned in "$func_dirname_result" +-# basename: Compute filename of FILE. +-# value retuned in "$func_basename_result" +-# Implementation must be kept synchronized with func_dirname +-# and func_basename. For efficiency, we do not delegate to +-# those functions but instead duplicate the functionality here. +-func_dirname_and_basename () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +- func_basename_result="${1##*/}" +-} +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-func_stripname () +-{ +- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are +- # positional parameters, so assign one to ordinary parameter first. +- func_stripname_result=${3} +- func_stripname_result=${func_stripname_result#"${1}"} +- func_stripname_result=${func_stripname_result%"${2}"} +-} +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=${1%%=*} +- func_opt_split_arg=${1#*=} +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- case ${1} in +- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; +- *) func_lo2o_result=${1} ;; +- esac +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=${1%.*}.lo +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=$(( $[*] )) +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=${#1} +-} ++# _LT_PROG_FUNCTION_REPLACE (FUNCNAME, REPLACEMENT-BODY) ++# ------------------------------------------------------ ++# In `$cfgfile', look for function FUNCNAME delimited by `^FUNCNAME ()$' and ++# '^} FUNCNAME ', and replace its body with REPLACEMENT-BODY. ++m4_defun([_LT_PROG_FUNCTION_REPLACE], ++[dnl { ++sed -e '/^$1 ()$/,/^} # $1 /c\ ++$1 ()\ ++{\ ++m4_bpatsubsts([$2], [$], [\\], [^\([ ]\)], [\\\1]) ++} # Extended-shell $1 implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++]) + +-_LT_EOF +- ;; +- *) # Bourne compatible functions. +- cat << \_LT_EOF >> "$cfgfile" + +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- # Extract subdirectory from the argument. +- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` +- if test "X$func_dirname_result" = "X${1}"; then +- func_dirname_result="${3}" +- else +- func_dirname_result="$func_dirname_result${2}" +- fi +-} ++# _LT_PROG_REPLACE_SHELLFNS ++# ------------------------- ++# Replace existing portable implementations of several shell functions with ++# equivalent extended shell implementations where those features are available.. ++m4_defun([_LT_PROG_REPLACE_SHELLFNS], ++[if test x"$xsi_shell" = xyes; then ++ _LT_PROG_FUNCTION_REPLACE([func_dirname], [dnl ++ case ${1} in ++ */*) func_dirname_result="${1%/*}${2}" ;; ++ * ) func_dirname_result="${3}" ;; ++ esac]) ++ ++ _LT_PROG_FUNCTION_REPLACE([func_basename], [dnl ++ func_basename_result="${1##*/}"]) ++ ++ _LT_PROG_FUNCTION_REPLACE([func_dirname_and_basename], [dnl ++ case ${1} in ++ */*) func_dirname_result="${1%/*}${2}" ;; ++ * ) func_dirname_result="${3}" ;; ++ esac ++ func_basename_result="${1##*/}"]) + +-# func_basename file +-func_basename () +-{ +- func_basename_result=`$ECHO "${1}" | $SED "$basename"` +-} ++ _LT_PROG_FUNCTION_REPLACE([func_stripname], [dnl ++ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are ++ # positional parameters, so assign one to ordinary parameter first. ++ func_stripname_result=${3} ++ func_stripname_result=${func_stripname_result#"${1}"} ++ func_stripname_result=${func_stripname_result%"${2}"}]) + +-dnl func_dirname_and_basename +-dnl A portable version of this function is already defined in general.m4sh +-dnl so there is no need for it here. ++ _LT_PROG_FUNCTION_REPLACE([func_split_long_opt], [dnl ++ func_split_long_opt_name=${1%%=*} ++ func_split_long_opt_arg=${1#*=}]) + +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-# func_strip_suffix prefix name +-func_stripname () +-{ +- case ${2} in +- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; +- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; +- esac +-} ++ _LT_PROG_FUNCTION_REPLACE([func_split_short_opt], [dnl ++ func_split_short_opt_arg=${1#??} ++ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}]) + +-# sed scripts: +-my_sed_long_opt='1s/^\(-[[^=]]*\)=.*/\1/;q' +-my_sed_long_arg='1s/^-[[^=]]*=//' ++ _LT_PROG_FUNCTION_REPLACE([func_lo2o], [dnl ++ case ${1} in ++ *.lo) func_lo2o_result=${1%.lo}.${objext} ;; ++ *) func_lo2o_result=${1} ;; ++ esac]) + +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` +- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` +-} ++ _LT_PROG_FUNCTION_REPLACE([func_xform], [ func_xform_result=${1%.*}.lo]) + +-# func_lo2o object +-func_lo2o () +-{ +- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` +-} ++ _LT_PROG_FUNCTION_REPLACE([func_arith], [ func_arith_result=$(( $[*] ))]) + +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=`$ECHO "${1}" | $SED 's/\.[[^.]]*$/.lo/'` +-} ++ _LT_PROG_FUNCTION_REPLACE([func_len], [ func_len_result=${#1}]) ++fi + +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=`expr "$[@]"` +-} ++if test x"$lt_shell_append" = xyes; then ++ _LT_PROG_FUNCTION_REPLACE([func_append], [ eval "${1}+=\\${2}"]) + +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=`expr "$[1]" : ".*" 2>/dev/null || echo $max_cmd_len` +-} ++ _LT_PROG_FUNCTION_REPLACE([func_append_quoted], [dnl ++ func_quote_for_eval "${2}" ++dnl m4 expansion turns \\\\ into \\, and then the shell eval turns that into \ ++ eval "${1}+=\\\\ \\$func_quote_for_eval_result"]) + +-_LT_EOF +-esac ++ # Save a `func_append' function call where possible by direct use of '+=' ++ sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++else ++ # Save a `func_append' function call even when '+=' is not available ++ sed -e 's%func_append \([[a-zA-Z_]]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++fi + +-case $lt_shell_append in +- yes) +- cat << \_LT_EOF >> "$cfgfile" ++if test x"$_lt_function_replace_fail" = x":"; then ++ AC_MSG_WARN([Unable to substitute extended shell functions in $ofile]) ++fi ++]) + +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$[1]+=\$[2]" +-} +-_LT_EOF ++# _LT_PATH_CONVERSION_FUNCTIONS ++# ----------------------------- ++# Determine which file name conversion functions should be used by ++# func_to_host_file (and, implicitly, by func_to_host_path). These are needed ++# for certain cross-compile configurations and native mingw. ++m4_defun([_LT_PATH_CONVERSION_FUNCTIONS], ++[AC_REQUIRE([AC_CANONICAL_HOST])dnl ++AC_REQUIRE([AC_CANONICAL_BUILD])dnl ++AC_MSG_CHECKING([how to convert $build file names to $host format]) ++AC_CACHE_VAL(lt_cv_to_host_file_cmd, ++[case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 ++ ;; ++ esac + ;; +- *) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$[1]=\$$[1]\$[2]" +-} +- +-_LT_EOF ++ *-*-cygwin* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin ++ ;; ++ esac + ;; +- esac ++ * ) # unhandled hosts (and "normal" native builds) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++esac ++]) ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++AC_MSG_RESULT([$lt_cv_to_host_file_cmd]) ++_LT_DECL([to_host_file_cmd], [lt_cv_to_host_file_cmd], ++ [0], [convert $build file names to $host format])dnl ++ ++AC_MSG_CHECKING([how to convert $build file names to toolchain format]) ++AC_CACHE_VAL(lt_cv_to_tool_file_cmd, ++[#assume ordinary cross tools, or native build. ++lt_cv_to_tool_file_cmd=func_convert_file_noop ++case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ esac ++ ;; ++esac + ]) ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++AC_MSG_RESULT([$lt_cv_to_tool_file_cmd]) ++_LT_DECL([to_tool_file_cmd], [lt_cv_to_tool_file_cmd], ++ [0], [convert $build files to toolchain format])dnl ++])# _LT_PATH_CONVERSION_FUNCTIONS +diff --git a/ltmain.sh b/ltmain.sh +index 9503ec85d7..70e856e065 100644 +--- a/ltmain.sh ++++ b/ltmain.sh +@@ -1,10 +1,9 @@ +-# Generated from ltmain.m4sh. + +-# libtool (GNU libtool 1.3134 2009-11-29) 2.2.7a ++# libtool (GNU libtool) 2.4 + # Written by Gordon Matzigkeit , 1996 + + # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, 2006, +-# 2007, 2008, 2009 Free Software Foundation, Inc. ++# 2007, 2008, 2009, 2010 Free Software Foundation, Inc. + # This is free software; see the source for copying conditions. There is NO + # warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + +@@ -38,7 +37,6 @@ + # -n, --dry-run display commands without modifying any files + # --features display basic configuration information and exit + # --mode=MODE use operation mode MODE +-# --no-finish let install mode avoid finish commands + # --preserve-dup-deps don't remove duplicate dependency libraries + # --quiet, --silent don't print informational messages + # --no-quiet, --no-silent +@@ -71,17 +69,19 @@ + # compiler: $LTCC + # compiler flags: $LTCFLAGS + # linker: $LD (gnu? $with_gnu_ld) +-# $progname: (GNU libtool 1.3134 2009-11-29) 2.2.7a ++# $progname: (GNU libtool) 2.4 + # automake: $automake_version + # autoconf: $autoconf_version + # + # Report bugs to . ++# GNU libtool home page: . ++# General help using GNU software: . + + PROGRAM=libtool + PACKAGE=libtool +-VERSION=2.2.7a +-TIMESTAMP=" 1.3134 2009-11-29" +-package_revision=1.3134 ++VERSION=2.4 ++TIMESTAMP="" ++package_revision=1.3293 + + # Be Bourne compatible + if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then +@@ -106,9 +106,6 @@ _LTECHO_EOF' + } + + # NLS nuisances: We save the old values to restore during execute mode. +-# Only set LANG and LC_ALL to C if already set. +-# These must not be set unconditionally because not all systems understand +-# e.g. LANG=C (notably SCO). + lt_user_locale= + lt_safe_locale= + for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES +@@ -121,15 +118,13 @@ do + lt_safe_locale=\"$lt_var=C; \$lt_safe_locale\" + fi" + done ++LC_ALL=C ++LANGUAGE=C ++export LANGUAGE LC_ALL + + $lt_unset CDPATH + + +- +- +- +- +- + # Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh + # is ksh but when the shell is invoked as "sh" and the current value of + # the _XPG environment variable is not equal to 1 (one), the special +@@ -140,7 +135,7 @@ progpath="$0" + + + : ${CP="cp -f"} +-: ${ECHO=$as_echo} ++test "${ECHO+set}" = set || ECHO=${as_echo-'printf %s\n'} + : ${EGREP="/bin/grep -E"} + : ${FGREP="/bin/grep -F"} + : ${GREP="/bin/grep"} +@@ -149,7 +144,7 @@ progpath="$0" + : ${MKDIR="mkdir"} + : ${MV="mv -f"} + : ${RM="rm -f"} +-: ${SED="/mount/endor/wildenhu/local-x86_64/bin/sed"} ++: ${SED="/bin/sed"} + : ${SHELL="${CONFIG_SHELL-/bin/sh}"} + : ${Xsed="$SED -e 1s/^X//"} + +@@ -169,6 +164,27 @@ IFS=" $lt_nl" + dirname="s,/[^/]*$,," + basename="s,^.*/,," + ++# func_dirname file append nondir_replacement ++# Compute the dirname of FILE. If nonempty, add APPEND to the result, ++# otherwise set result to NONDIR_REPLACEMENT. ++func_dirname () ++{ ++ func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` ++ if test "X$func_dirname_result" = "X${1}"; then ++ func_dirname_result="${3}" ++ else ++ func_dirname_result="$func_dirname_result${2}" ++ fi ++} # func_dirname may be replaced by extended shell implementation ++ ++ ++# func_basename file ++func_basename () ++{ ++ func_basename_result=`$ECHO "${1}" | $SED "$basename"` ++} # func_basename may be replaced by extended shell implementation ++ ++ + # func_dirname_and_basename file append nondir_replacement + # perform func_basename and func_dirname in a single function + # call: +@@ -183,17 +199,31 @@ basename="s,^.*/,," + # those functions but instead duplicate the functionality here. + func_dirname_and_basename () + { +- # Extract subdirectory from the argument. +- func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"` +- if test "X$func_dirname_result" = "X${1}"; then +- func_dirname_result="${3}" +- else +- func_dirname_result="$func_dirname_result${2}" +- fi +- func_basename_result=`$ECHO "${1}" | $SED -e "$basename"` +-} ++ # Extract subdirectory from the argument. ++ func_dirname_result=`$ECHO "${1}" | $SED -e "$dirname"` ++ if test "X$func_dirname_result" = "X${1}"; then ++ func_dirname_result="${3}" ++ else ++ func_dirname_result="$func_dirname_result${2}" ++ fi ++ func_basename_result=`$ECHO "${1}" | $SED -e "$basename"` ++} # func_dirname_and_basename may be replaced by extended shell implementation ++ ++ ++# func_stripname prefix suffix name ++# strip PREFIX and SUFFIX off of NAME. ++# PREFIX and SUFFIX must not contain globbing or regex special ++# characters, hashes, percent signs, but SUFFIX may contain a leading ++# dot (in which case that matches only a dot). ++# func_strip_suffix prefix name ++func_stripname () ++{ ++ case ${2} in ++ .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; ++ *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; ++ esac ++} # func_stripname may be replaced by extended shell implementation + +-# Generated shell functions inserted here. + + # These SED scripts presuppose an absolute path with a trailing slash. + pathcar='s,^/\([^/]*\).*$,\1,' +@@ -376,6 +406,15 @@ sed_quote_subst='s/\([`"$\\]\)/\\\1/g' + # Same as above, but do not quote variable references. + double_quote_subst='s/\(["`\\]\)/\\\1/g' + ++# Sed substitution that turns a string into a regex matching for the ++# string literally. ++sed_make_literal_regex='s,[].[^$\\*\/],\\&,g' ++ ++# Sed substitution that converts a w32 file name or path ++# which contains forward slashes, into one that contains ++# (escaped) backslashes. A very naive implementation. ++lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' ++ + # Re-`\' parameter expansions in output of double_quote_subst that were + # `\'-ed in input to the same. If an odd number of `\' preceded a '$' + # in input to double_quote_subst, that '$' was protected from expansion. +@@ -404,7 +443,7 @@ opt_warning=: + # name if it has been set yet. + func_echo () + { +- $ECHO "$progname${mode+: }$mode: $*" ++ $ECHO "$progname: ${opt_mode+$opt_mode: }$*" + } + + # func_verbose arg... +@@ -430,14 +469,14 @@ func_echo_all () + # Echo program name prefixed message to standard error. + func_error () + { +- $ECHO "$progname${mode+: }$mode: "${1+"$@"} 1>&2 ++ $ECHO "$progname: ${opt_mode+$opt_mode: }"${1+"$@"} 1>&2 + } + + # func_warning arg... + # Echo program name prefixed warning message to standard error. + func_warning () + { +- $opt_warning && $ECHO "$progname${mode+: }$mode: warning: "${1+"$@"} 1>&2 ++ $opt_warning && $ECHO "$progname: ${opt_mode+$opt_mode: }warning: "${1+"$@"} 1>&2 + + # bash bug again: + : +@@ -656,19 +695,35 @@ func_show_eval_locale () + fi + } + +- +- ++# func_tr_sh ++# Turn $1 into a string suitable for a shell variable name. ++# Result is stored in $func_tr_sh_result. All characters ++# not in the set a-zA-Z0-9_ are replaced with '_'. Further, ++# if $1 begins with a digit, a '_' is prepended as well. ++func_tr_sh () ++{ ++ case $1 in ++ [0-9]* | *[!a-zA-Z0-9_]*) ++ func_tr_sh_result=`$ECHO "$1" | $SED 's/^\([0-9]\)/_\1/; s/[^a-zA-Z0-9_]/_/g'` ++ ;; ++ * ) ++ func_tr_sh_result=$1 ++ ;; ++ esac ++} + + + # func_version + # Echo version message to standard output and exit. + func_version () + { ++ $opt_debug ++ + $SED -n '/(C)/!b go + :more + /\./!{ + N +- s/\n# // ++ s/\n# / / + b more + } + :go +@@ -685,7 +740,9 @@ func_version () + # Echo short help message to standard output and exit. + func_usage () + { +- $SED -n '/^# Usage:/,/^# *-h/ { ++ $opt_debug ++ ++ $SED -n '/^# Usage:/,/^# *.*--help/ { + s/^# // + s/^# *$// + s/\$progname/'$progname'/ +@@ -701,7 +758,10 @@ func_usage () + # unless 'noexit' is passed as argument. + func_help () + { ++ $opt_debug ++ + $SED -n '/^# Usage:/,/# Report bugs to/ { ++ :print + s/^# // + s/^# *$// + s*\$progname*'$progname'* +@@ -714,7 +774,11 @@ func_help () + s/\$automake_version/'"`(automake --version) 2>/dev/null |$SED 1q`"'/ + s/\$autoconf_version/'"`(autoconf --version) 2>/dev/null |$SED 1q`"'/ + p +- }' < "$progpath" ++ d ++ } ++ /^# .* home page:/b print ++ /^# General help using/b print ++ ' < "$progpath" + ret=$? + if test -z "$1"; then + exit $ret +@@ -726,12 +790,39 @@ func_help () + # exit_cmd. + func_missing_arg () + { +- func_error "missing argument for $1" ++ $opt_debug ++ ++ func_error "missing argument for $1." + exit_cmd=exit + } + +-exit_cmd=: + ++# func_split_short_opt shortopt ++# Set func_split_short_opt_name and func_split_short_opt_arg shell ++# variables after splitting SHORTOPT after the 2nd character. ++func_split_short_opt () ++{ ++ my_sed_short_opt='1s/^\(..\).*$/\1/;q' ++ my_sed_short_rest='1s/^..\(.*\)$/\1/;q' ++ ++ func_split_short_opt_name=`$ECHO "$1" | $SED "$my_sed_short_opt"` ++ func_split_short_opt_arg=`$ECHO "$1" | $SED "$my_sed_short_rest"` ++} # func_split_short_opt may be replaced by extended shell implementation ++ ++ ++# func_split_long_opt longopt ++# Set func_split_long_opt_name and func_split_long_opt_arg shell ++# variables after splitting LONGOPT at the `=' sign. ++func_split_long_opt () ++{ ++ my_sed_long_opt='1s/^\(--[^=]*\)=.*/\1/;q' ++ my_sed_long_arg='1s/^--[^=]*=//' ++ ++ func_split_long_opt_name=`$ECHO "$1" | $SED "$my_sed_long_opt"` ++ func_split_long_opt_arg=`$ECHO "$1" | $SED "$my_sed_long_arg"` ++} # func_split_long_opt may be replaced by extended shell implementation ++ ++exit_cmd=: + + + +@@ -741,26 +832,64 @@ magic="%%%MAGIC variable%%%" + magic_exe="%%%MAGIC EXE variable%%%" + + # Global variables. +-# $mode is unset + nonopt= +-execute_dlfiles= + preserve_args= + lo2o="s/\\.lo\$/.${objext}/" + o2lo="s/\\.${objext}\$/.lo/" + extracted_archives= + extracted_serial=0 + +-opt_dry_run=false +-opt_finish=: +-opt_duplicate_deps=false +-opt_silent=false +-opt_debug=: +- + # If this variable is set in any of the actions, the command in it + # will be execed at the end. This prevents here-documents from being + # left over by shells. + exec_cmd= + ++# func_append var value ++# Append VALUE to the end of shell variable VAR. ++func_append () ++{ ++ eval "${1}=\$${1}\${2}" ++} # func_append may be replaced by extended shell implementation ++ ++# func_append_quoted var value ++# Quote VALUE and append to the end of shell variable VAR, separated ++# by a space. ++func_append_quoted () ++{ ++ func_quote_for_eval "${2}" ++ eval "${1}=\$${1}\\ \$func_quote_for_eval_result" ++} # func_append_quoted may be replaced by extended shell implementation ++ ++ ++# func_arith arithmetic-term... ++func_arith () ++{ ++ func_arith_result=`expr "${@}"` ++} # func_arith may be replaced by extended shell implementation ++ ++ ++# func_len string ++# STRING may not start with a hyphen. ++func_len () ++{ ++ func_len_result=`expr "${1}" : ".*" 2>/dev/null || echo $max_cmd_len` ++} # func_len may be replaced by extended shell implementation ++ ++ ++# func_lo2o object ++func_lo2o () ++{ ++ func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` ++} # func_lo2o may be replaced by extended shell implementation ++ ++ ++# func_xform libobj-or-source ++func_xform () ++{ ++ func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` ++} # func_xform may be replaced by extended shell implementation ++ ++ + # func_fatal_configuration arg... + # Echo program name prefixed message to standard error, followed by + # a configuration failure hint, and exit. +@@ -850,130 +979,204 @@ func_enable_tag () + esac + } + +-# Parse options once, thoroughly. This comes as soon as possible in +-# the script to make things like `libtool --version' happen quickly. ++# func_check_version_match ++# Ensure that we are using m4 macros, and libtool script from the same ++# release of libtool. ++func_check_version_match () + { ++ if test "$package_revision" != "$macro_revision"; then ++ if test "$VERSION" != "$macro_version"; then ++ if test -z "$macro_version"; then ++ cat >&2 <<_LT_EOF ++$progname: Version mismatch error. This is $PACKAGE $VERSION, but the ++$progname: definition of this LT_INIT comes from an older release. ++$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION ++$progname: and run autoconf again. ++_LT_EOF ++ else ++ cat >&2 <<_LT_EOF ++$progname: Version mismatch error. This is $PACKAGE $VERSION, but the ++$progname: definition of this LT_INIT comes from $PACKAGE $macro_version. ++$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION ++$progname: and run autoconf again. ++_LT_EOF ++ fi ++ else ++ cat >&2 <<_LT_EOF ++$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision, ++$progname: but the definition of this LT_INIT comes from revision $macro_revision. ++$progname: You should recreate aclocal.m4 with macros from revision $package_revision ++$progname: of $PACKAGE $VERSION and run autoconf again. ++_LT_EOF ++ fi + +- # Shorthand for --mode=foo, only valid as the first argument +- case $1 in +- clean|clea|cle|cl) +- shift; set dummy --mode clean ${1+"$@"}; shift +- ;; +- compile|compil|compi|comp|com|co|c) +- shift; set dummy --mode compile ${1+"$@"}; shift +- ;; +- execute|execut|execu|exec|exe|ex|e) +- shift; set dummy --mode execute ${1+"$@"}; shift +- ;; +- finish|finis|fini|fin|fi|f) +- shift; set dummy --mode finish ${1+"$@"}; shift +- ;; +- install|instal|insta|inst|ins|in|i) +- shift; set dummy --mode install ${1+"$@"}; shift +- ;; +- link|lin|li|l) +- shift; set dummy --mode link ${1+"$@"}; shift +- ;; +- uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) +- shift; set dummy --mode uninstall ${1+"$@"}; shift +- ;; +- esac ++ exit $EXIT_MISMATCH ++ fi ++} ++ ++ ++# Shorthand for --mode=foo, only valid as the first argument ++case $1 in ++clean|clea|cle|cl) ++ shift; set dummy --mode clean ${1+"$@"}; shift ++ ;; ++compile|compil|compi|comp|com|co|c) ++ shift; set dummy --mode compile ${1+"$@"}; shift ++ ;; ++execute|execut|execu|exec|exe|ex|e) ++ shift; set dummy --mode execute ${1+"$@"}; shift ++ ;; ++finish|finis|fini|fin|fi|f) ++ shift; set dummy --mode finish ${1+"$@"}; shift ++ ;; ++install|instal|insta|inst|ins|in|i) ++ shift; set dummy --mode install ${1+"$@"}; shift ++ ;; ++link|lin|li|l) ++ shift; set dummy --mode link ${1+"$@"}; shift ++ ;; ++uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) ++ shift; set dummy --mode uninstall ${1+"$@"}; shift ++ ;; ++esac + +- # Parse non-mode specific arguments: +- while test "$#" -gt 0; do ++ ++ ++# Option defaults: ++opt_debug=: ++opt_dry_run=false ++opt_config=false ++opt_preserve_dup_deps=false ++opt_features=false ++opt_finish=false ++opt_help=false ++opt_help_all=false ++opt_silent=: ++opt_verbose=: ++opt_silent=false ++opt_verbose=false ++ ++ ++# Parse options once, thoroughly. This comes as soon as possible in the ++# script to make things like `--version' happen as quickly as we can. ++{ ++ # this just eases exit handling ++ while test $# -gt 0; do + opt="$1" + shift +- + case $opt in +- --config) func_config ;; +- +- --debug) preserve_args="$preserve_args $opt" ++ --debug|-x) opt_debug='set -x' + func_echo "enabling shell trace mode" +- opt_debug='set -x' + $opt_debug + ;; +- +- -dlopen) test "$#" -eq 0 && func_missing_arg "$opt" && break +- execute_dlfiles="$execute_dlfiles $1" +- shift ++ --dry-run|--dryrun|-n) ++ opt_dry_run=: + ;; +- +- --dry-run | -n) opt_dry_run=: ;; +- --features) func_features ;; +- --finish) mode="finish" ;; +- --no-finish) opt_finish=false ;; +- +- --mode) test "$#" -eq 0 && func_missing_arg "$opt" && break +- case $1 in +- # Valid mode arguments: +- clean) ;; +- compile) ;; +- execute) ;; +- finish) ;; +- install) ;; +- link) ;; +- relink) ;; +- uninstall) ;; +- +- # Catch anything else as an error +- *) func_error "invalid argument for $opt" +- exit_cmd=exit +- break +- ;; +- esac +- +- mode="$1" ++ --config) ++ opt_config=: ++func_config ++ ;; ++ --dlopen|-dlopen) ++ optarg="$1" ++ opt_dlopen="${opt_dlopen+$opt_dlopen ++}$optarg" + shift + ;; +- + --preserve-dup-deps) +- opt_duplicate_deps=: ;; +- +- --quiet|--silent) preserve_args="$preserve_args $opt" +- opt_silent=: +- opt_verbose=false ++ opt_preserve_dup_deps=: + ;; +- +- --no-quiet|--no-silent) +- preserve_args="$preserve_args $opt" +- opt_silent=false ++ --features) ++ opt_features=: ++func_features + ;; +- +- --verbose| -v) preserve_args="$preserve_args $opt" ++ --finish) ++ opt_finish=: ++set dummy --mode finish ${1+"$@"}; shift ++ ;; ++ --help) ++ opt_help=: ++ ;; ++ --help-all) ++ opt_help_all=: ++opt_help=': help-all' ++ ;; ++ --mode) ++ test $# = 0 && func_missing_arg $opt && break ++ optarg="$1" ++ opt_mode="$optarg" ++case $optarg in ++ # Valid mode arguments: ++ clean|compile|execute|finish|install|link|relink|uninstall) ;; ++ ++ # Catch anything else as an error ++ *) func_error "invalid argument for $opt" ++ exit_cmd=exit ++ break ++ ;; ++esac ++ shift ++ ;; ++ --no-silent|--no-quiet) + opt_silent=false +- opt_verbose=: ++func_append preserve_args " $opt" + ;; +- +- --no-verbose) preserve_args="$preserve_args $opt" ++ --no-verbose) + opt_verbose=false ++func_append preserve_args " $opt" + ;; +- +- --tag) test "$#" -eq 0 && func_missing_arg "$opt" && break +- preserve_args="$preserve_args $opt $1" +- func_enable_tag "$1" # tagname is set here ++ --silent|--quiet) ++ opt_silent=: ++func_append preserve_args " $opt" ++ opt_verbose=false ++ ;; ++ --verbose|-v) ++ opt_verbose=: ++func_append preserve_args " $opt" ++opt_silent=false ++ ;; ++ --tag) ++ test $# = 0 && func_missing_arg $opt && break ++ optarg="$1" ++ opt_tag="$optarg" ++func_append preserve_args " $opt $optarg" ++func_enable_tag "$optarg" + shift + ;; + ++ -\?|-h) func_usage ;; ++ --help) func_help ;; ++ --version) func_version ;; ++ + # Separate optargs to long options: +- -dlopen=*|--mode=*|--tag=*) +- func_opt_split "$opt" +- set dummy "$func_opt_split_opt" "$func_opt_split_arg" ${1+"$@"} ++ --*=*) ++ func_split_long_opt "$opt" ++ set dummy "$func_split_long_opt_name" "$func_split_long_opt_arg" ${1+"$@"} + shift + ;; + +- -\?|-h) func_usage ;; +- --help) opt_help=: ;; +- --help-all) opt_help=': help-all' ;; +- --version) func_version ;; +- +- -*) func_fatal_help "unrecognized option \`$opt'" ;; +- +- *) nonopt="$opt" +- break ++ # Separate non-argument short options: ++ -\?*|-h*|-n*|-v*) ++ func_split_short_opt "$opt" ++ set dummy "$func_split_short_opt_name" "-$func_split_short_opt_arg" ${1+"$@"} ++ shift + ;; ++ ++ --) break ;; ++ -*) func_fatal_help "unrecognized option \`$opt'" ;; ++ *) set dummy "$opt" ${1+"$@"}; shift; break ;; + esac + done + ++ # Validate options: ++ ++ # save first non-option argument ++ if test "$#" -gt 0; then ++ nonopt="$opt" ++ shift ++ fi ++ ++ # preserve --debug ++ test "$opt_debug" = : || func_append preserve_args " --debug" + + case $host in + *cygwin* | *mingw* | *pw32* | *cegcc* | *solaris2* ) +@@ -981,82 +1184,44 @@ func_enable_tag () + opt_duplicate_compiler_generated_deps=: + ;; + *) +- opt_duplicate_compiler_generated_deps=$opt_duplicate_deps ++ opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps + ;; + esac + +- # Having warned about all mis-specified options, bail out if +- # anything was wrong. +- $exit_cmd $EXIT_FAILURE +-} ++ $opt_help || { ++ # Sanity checks first: ++ func_check_version_match + +-# func_check_version_match +-# Ensure that we are using m4 macros, and libtool script from the same +-# release of libtool. +-func_check_version_match () +-{ +- if test "$package_revision" != "$macro_revision"; then +- if test "$VERSION" != "$macro_version"; then +- if test -z "$macro_version"; then +- cat >&2 <<_LT_EOF +-$progname: Version mismatch error. This is $PACKAGE $VERSION, but the +-$progname: definition of this LT_INIT comes from an older release. +-$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION +-$progname: and run autoconf again. +-_LT_EOF +- else +- cat >&2 <<_LT_EOF +-$progname: Version mismatch error. This is $PACKAGE $VERSION, but the +-$progname: definition of this LT_INIT comes from $PACKAGE $macro_version. +-$progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION +-$progname: and run autoconf again. +-_LT_EOF +- fi +- else +- cat >&2 <<_LT_EOF +-$progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision, +-$progname: but the definition of this LT_INIT comes from revision $macro_revision. +-$progname: You should recreate aclocal.m4 with macros from revision $package_revision +-$progname: of $PACKAGE $VERSION and run autoconf again. +-_LT_EOF ++ if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then ++ func_fatal_configuration "not configured to build any kind of library" + fi + +- exit $EXIT_MISMATCH +- fi +-} +- ++ # Darwin sucks ++ eval std_shrext=\"$shrext_cmds\" + +-## ----------- ## +-## Main. ## +-## ----------- ## +- +-$opt_help || { +- # Sanity checks first: +- func_check_version_match +- +- if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then +- func_fatal_configuration "not configured to build any kind of library" +- fi ++ # Only execute mode is allowed to have -dlopen flags. ++ if test -n "$opt_dlopen" && test "$opt_mode" != execute; then ++ func_error "unrecognized option \`-dlopen'" ++ $ECHO "$help" 1>&2 ++ exit $EXIT_FAILURE ++ fi + +- test -z "$mode" && func_fatal_error "error: you must specify a MODE." ++ # Change the help message to a mode-specific one. ++ generic_help="$help" ++ help="Try \`$progname --help --mode=$opt_mode' for more information." ++ } + + +- # Darwin sucks +- eval "std_shrext=\"$shrext_cmds\"" ++ # Bail if the options were screwed ++ $exit_cmd $EXIT_FAILURE ++} + + +- # Only execute mode is allowed to have -dlopen flags. +- if test -n "$execute_dlfiles" && test "$mode" != execute; then +- func_error "unrecognized option \`-dlopen'" +- $ECHO "$help" 1>&2 +- exit $EXIT_FAILURE +- fi + +- # Change the help message to a mode-specific one. +- generic_help="$help" +- help="Try \`$progname --help --mode=$mode' for more information." +-} + ++## ----------- ## ++## Main. ## ++## ----------- ## + + # func_lalib_p file + # True iff FILE is a libtool `.la' library or `.lo' object file. +@@ -1121,12 +1286,9 @@ func_ltwrapper_executable_p () + # temporary ltwrapper_script. + func_ltwrapper_scriptname () + { +- func_ltwrapper_scriptname_result="" +- if func_ltwrapper_executable_p "$1"; then +- func_dirname_and_basename "$1" "" "." +- func_stripname '' '.exe' "$func_basename_result" +- func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper" +- fi ++ func_dirname_and_basename "$1" "" "." ++ func_stripname '' '.exe' "$func_basename_result" ++ func_ltwrapper_scriptname_result="$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper" + } + + # func_ltwrapper_p file +@@ -1149,7 +1311,7 @@ func_execute_cmds () + save_ifs=$IFS; IFS='~' + for cmd in $1; do + IFS=$save_ifs +- eval "cmd=\"$cmd\"" ++ eval cmd=\"$cmd\" + func_show_eval "$cmd" "${2-:}" + done + IFS=$save_ifs +@@ -1172,6 +1334,37 @@ func_source () + } + + ++# func_resolve_sysroot PATH ++# Replace a leading = in PATH with a sysroot. Store the result into ++# func_resolve_sysroot_result ++func_resolve_sysroot () ++{ ++ func_resolve_sysroot_result=$1 ++ case $func_resolve_sysroot_result in ++ =*) ++ func_stripname '=' '' "$func_resolve_sysroot_result" ++ func_resolve_sysroot_result=$lt_sysroot$func_stripname_result ++ ;; ++ esac ++} ++ ++# func_replace_sysroot PATH ++# If PATH begins with the sysroot, replace it with = and ++# store the result into func_replace_sysroot_result. ++func_replace_sysroot () ++{ ++ case "$lt_sysroot:$1" in ++ ?*:"$lt_sysroot"*) ++ func_stripname "$lt_sysroot" '' "$1" ++ func_replace_sysroot_result="=$func_stripname_result" ++ ;; ++ *) ++ # Including no sysroot. ++ func_replace_sysroot_result=$1 ++ ;; ++ esac ++} ++ + # func_infer_tag arg + # Infer tagged configuration to use if any are available and + # if one wasn't chosen via the "--tag" command line option. +@@ -1184,8 +1377,7 @@ func_infer_tag () + if test -n "$available_tags" && test -z "$tagname"; then + CC_quoted= + for arg in $CC; do +- func_quote_for_eval "$arg" +- CC_quoted="$CC_quoted $func_quote_for_eval_result" ++ func_append_quoted CC_quoted "$arg" + done + CC_expanded=`func_echo_all $CC` + CC_quoted_expanded=`func_echo_all $CC_quoted` +@@ -1204,8 +1396,7 @@ func_infer_tag () + CC_quoted= + for arg in $CC; do + # Double-quote args containing other shell metacharacters. +- func_quote_for_eval "$arg" +- CC_quoted="$CC_quoted $func_quote_for_eval_result" ++ func_append_quoted CC_quoted "$arg" + done + CC_expanded=`func_echo_all $CC` + CC_quoted_expanded=`func_echo_all $CC_quoted` +@@ -1274,6 +1465,486 @@ EOF + } + } + ++ ++################################################## ++# FILE NAME AND PATH CONVERSION HELPER FUNCTIONS # ++################################################## ++ ++# func_convert_core_file_wine_to_w32 ARG ++# Helper function used by file name conversion functions when $build is *nix, ++# and $host is mingw, cygwin, or some other w32 environment. Relies on a ++# correctly configured wine environment available, with the winepath program ++# in $build's $PATH. ++# ++# ARG is the $build file name to be converted to w32 format. ++# Result is available in $func_convert_core_file_wine_to_w32_result, and will ++# be empty on error (or when ARG is empty) ++func_convert_core_file_wine_to_w32 () ++{ ++ $opt_debug ++ func_convert_core_file_wine_to_w32_result="$1" ++ if test -n "$1"; then ++ # Unfortunately, winepath does not exit with a non-zero error code, so we ++ # are forced to check the contents of stdout. On the other hand, if the ++ # command is not found, the shell will set an exit code of 127 and print ++ # *an error message* to stdout. So we must check for both error code of ++ # zero AND non-empty stdout, which explains the odd construction: ++ func_convert_core_file_wine_to_w32_tmp=`winepath -w "$1" 2>/dev/null` ++ if test "$?" -eq 0 && test -n "${func_convert_core_file_wine_to_w32_tmp}"; then ++ func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" | ++ $SED -e "$lt_sed_naive_backslashify"` ++ else ++ func_convert_core_file_wine_to_w32_result= ++ fi ++ fi ++} ++# end: func_convert_core_file_wine_to_w32 ++ ++ ++# func_convert_core_path_wine_to_w32 ARG ++# Helper function used by path conversion functions when $build is *nix, and ++# $host is mingw, cygwin, or some other w32 environment. Relies on a correctly ++# configured wine environment available, with the winepath program in $build's ++# $PATH. Assumes ARG has no leading or trailing path separator characters. ++# ++# ARG is path to be converted from $build format to win32. ++# Result is available in $func_convert_core_path_wine_to_w32_result. ++# Unconvertible file (directory) names in ARG are skipped; if no directory names ++# are convertible, then the result may be empty. ++func_convert_core_path_wine_to_w32 () ++{ ++ $opt_debug ++ # unfortunately, winepath doesn't convert paths, only file names ++ func_convert_core_path_wine_to_w32_result="" ++ if test -n "$1"; then ++ oldIFS=$IFS ++ IFS=: ++ for func_convert_core_path_wine_to_w32_f in $1; do ++ IFS=$oldIFS ++ func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f" ++ if test -n "$func_convert_core_file_wine_to_w32_result" ; then ++ if test -z "$func_convert_core_path_wine_to_w32_result"; then ++ func_convert_core_path_wine_to_w32_result="$func_convert_core_file_wine_to_w32_result" ++ else ++ func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result" ++ fi ++ fi ++ done ++ IFS=$oldIFS ++ fi ++} ++# end: func_convert_core_path_wine_to_w32 ++ ++ ++# func_cygpath ARGS... ++# Wrapper around calling the cygpath program via LT_CYGPATH. This is used when ++# when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2) ++# $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or ++# (2), returns the Cygwin file name or path in func_cygpath_result (input ++# file name or path is assumed to be in w32 format, as previously converted ++# from $build's *nix or MSYS format). In case (3), returns the w32 file name ++# or path in func_cygpath_result (input file name or path is assumed to be in ++# Cygwin format). Returns an empty string on error. ++# ++# ARGS are passed to cygpath, with the last one being the file name or path to ++# be converted. ++# ++# Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH ++# environment variable; do not put it in $PATH. ++func_cygpath () ++{ ++ $opt_debug ++ if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then ++ func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null` ++ if test "$?" -ne 0; then ++ # on failure, ensure result is empty ++ func_cygpath_result= ++ fi ++ else ++ func_cygpath_result= ++ func_error "LT_CYGPATH is empty or specifies non-existent file: \`$LT_CYGPATH'" ++ fi ++} ++#end: func_cygpath ++ ++ ++# func_convert_core_msys_to_w32 ARG ++# Convert file name or path ARG from MSYS format to w32 format. Return ++# result in func_convert_core_msys_to_w32_result. ++func_convert_core_msys_to_w32 () ++{ ++ $opt_debug ++ # awkward: cmd appends spaces to result ++ func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null | ++ $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"` ++} ++#end: func_convert_core_msys_to_w32 ++ ++ ++# func_convert_file_check ARG1 ARG2 ++# Verify that ARG1 (a file name in $build format) was converted to $host ++# format in ARG2. Otherwise, emit an error message, but continue (resetting ++# func_to_host_file_result to ARG1). ++func_convert_file_check () ++{ ++ $opt_debug ++ if test -z "$2" && test -n "$1" ; then ++ func_error "Could not determine host file name corresponding to" ++ func_error " \`$1'" ++ func_error "Continuing, but uninstalled executables may not work." ++ # Fallback: ++ func_to_host_file_result="$1" ++ fi ++} ++# end func_convert_file_check ++ ++ ++# func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH ++# Verify that FROM_PATH (a path in $build format) was converted to $host ++# format in TO_PATH. Otherwise, emit an error message, but continue, resetting ++# func_to_host_file_result to a simplistic fallback value (see below). ++func_convert_path_check () ++{ ++ $opt_debug ++ if test -z "$4" && test -n "$3"; then ++ func_error "Could not determine the host path corresponding to" ++ func_error " \`$3'" ++ func_error "Continuing, but uninstalled executables may not work." ++ # Fallback. This is a deliberately simplistic "conversion" and ++ # should not be "improved". See libtool.info. ++ if test "x$1" != "x$2"; then ++ lt_replace_pathsep_chars="s|$1|$2|g" ++ func_to_host_path_result=`echo "$3" | ++ $SED -e "$lt_replace_pathsep_chars"` ++ else ++ func_to_host_path_result="$3" ++ fi ++ fi ++} ++# end func_convert_path_check ++ ++ ++# func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG ++# Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT ++# and appending REPL if ORIG matches BACKPAT. ++func_convert_path_front_back_pathsep () ++{ ++ $opt_debug ++ case $4 in ++ $1 ) func_to_host_path_result="$3$func_to_host_path_result" ++ ;; ++ esac ++ case $4 in ++ $2 ) func_append func_to_host_path_result "$3" ++ ;; ++ esac ++} ++# end func_convert_path_front_back_pathsep ++ ++ ++################################################## ++# $build to $host FILE NAME CONVERSION FUNCTIONS # ++################################################## ++# invoked via `$to_host_file_cmd ARG' ++# ++# In each case, ARG is the path to be converted from $build to $host format. ++# Result will be available in $func_to_host_file_result. ++ ++ ++# func_to_host_file ARG ++# Converts the file name ARG from $build format to $host format. Return result ++# in func_to_host_file_result. ++func_to_host_file () ++{ ++ $opt_debug ++ $to_host_file_cmd "$1" ++} ++# end func_to_host_file ++ ++ ++# func_to_tool_file ARG LAZY ++# converts the file name ARG from $build format to toolchain format. Return ++# result in func_to_tool_file_result. If the conversion in use is listed ++# in (the comma separated) LAZY, no conversion takes place. ++func_to_tool_file () ++{ ++ $opt_debug ++ case ,$2, in ++ *,"$to_tool_file_cmd",*) ++ func_to_tool_file_result=$1 ++ ;; ++ *) ++ $to_tool_file_cmd "$1" ++ func_to_tool_file_result=$func_to_host_file_result ++ ;; ++ esac ++} ++# end func_to_tool_file ++ ++ ++# func_convert_file_noop ARG ++# Copy ARG to func_to_host_file_result. ++func_convert_file_noop () ++{ ++ func_to_host_file_result="$1" ++} ++# end func_convert_file_noop ++ ++ ++# func_convert_file_msys_to_w32 ARG ++# Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic ++# conversion to w32 is not available inside the cwrapper. Returns result in ++# func_to_host_file_result. ++func_convert_file_msys_to_w32 () ++{ ++ $opt_debug ++ func_to_host_file_result="$1" ++ if test -n "$1"; then ++ func_convert_core_msys_to_w32 "$1" ++ func_to_host_file_result="$func_convert_core_msys_to_w32_result" ++ fi ++ func_convert_file_check "$1" "$func_to_host_file_result" ++} ++# end func_convert_file_msys_to_w32 ++ ++ ++# func_convert_file_cygwin_to_w32 ARG ++# Convert file name ARG from Cygwin to w32 format. Returns result in ++# func_to_host_file_result. ++func_convert_file_cygwin_to_w32 () ++{ ++ $opt_debug ++ func_to_host_file_result="$1" ++ if test -n "$1"; then ++ # because $build is cygwin, we call "the" cygpath in $PATH; no need to use ++ # LT_CYGPATH in this case. ++ func_to_host_file_result=`cygpath -m "$1"` ++ fi ++ func_convert_file_check "$1" "$func_to_host_file_result" ++} ++# end func_convert_file_cygwin_to_w32 ++ ++ ++# func_convert_file_nix_to_w32 ARG ++# Convert file name ARG from *nix to w32 format. Requires a wine environment ++# and a working winepath. Returns result in func_to_host_file_result. ++func_convert_file_nix_to_w32 () ++{ ++ $opt_debug ++ func_to_host_file_result="$1" ++ if test -n "$1"; then ++ func_convert_core_file_wine_to_w32 "$1" ++ func_to_host_file_result="$func_convert_core_file_wine_to_w32_result" ++ fi ++ func_convert_file_check "$1" "$func_to_host_file_result" ++} ++# end func_convert_file_nix_to_w32 ++ ++ ++# func_convert_file_msys_to_cygwin ARG ++# Convert file name ARG from MSYS to Cygwin format. Requires LT_CYGPATH set. ++# Returns result in func_to_host_file_result. ++func_convert_file_msys_to_cygwin () ++{ ++ $opt_debug ++ func_to_host_file_result="$1" ++ if test -n "$1"; then ++ func_convert_core_msys_to_w32 "$1" ++ func_cygpath -u "$func_convert_core_msys_to_w32_result" ++ func_to_host_file_result="$func_cygpath_result" ++ fi ++ func_convert_file_check "$1" "$func_to_host_file_result" ++} ++# end func_convert_file_msys_to_cygwin ++ ++ ++# func_convert_file_nix_to_cygwin ARG ++# Convert file name ARG from *nix to Cygwin format. Requires Cygwin installed ++# in a wine environment, working winepath, and LT_CYGPATH set. Returns result ++# in func_to_host_file_result. ++func_convert_file_nix_to_cygwin () ++{ ++ $opt_debug ++ func_to_host_file_result="$1" ++ if test -n "$1"; then ++ # convert from *nix to w32, then use cygpath to convert from w32 to cygwin. ++ func_convert_core_file_wine_to_w32 "$1" ++ func_cygpath -u "$func_convert_core_file_wine_to_w32_result" ++ func_to_host_file_result="$func_cygpath_result" ++ fi ++ func_convert_file_check "$1" "$func_to_host_file_result" ++} ++# end func_convert_file_nix_to_cygwin ++ ++ ++############################################# ++# $build to $host PATH CONVERSION FUNCTIONS # ++############################################# ++# invoked via `$to_host_path_cmd ARG' ++# ++# In each case, ARG is the path to be converted from $build to $host format. ++# The result will be available in $func_to_host_path_result. ++# ++# Path separators are also converted from $build format to $host format. If ++# ARG begins or ends with a path separator character, it is preserved (but ++# converted to $host format) on output. ++# ++# All path conversion functions are named using the following convention: ++# file name conversion function : func_convert_file_X_to_Y () ++# path conversion function : func_convert_path_X_to_Y () ++# where, for any given $build/$host combination the 'X_to_Y' value is the ++# same. If conversion functions are added for new $build/$host combinations, ++# the two new functions must follow this pattern, or func_init_to_host_path_cmd ++# will break. ++ ++ ++# func_init_to_host_path_cmd ++# Ensures that function "pointer" variable $to_host_path_cmd is set to the ++# appropriate value, based on the value of $to_host_file_cmd. ++to_host_path_cmd= ++func_init_to_host_path_cmd () ++{ ++ $opt_debug ++ if test -z "$to_host_path_cmd"; then ++ func_stripname 'func_convert_file_' '' "$to_host_file_cmd" ++ to_host_path_cmd="func_convert_path_${func_stripname_result}" ++ fi ++} ++ ++ ++# func_to_host_path ARG ++# Converts the path ARG from $build format to $host format. Return result ++# in func_to_host_path_result. ++func_to_host_path () ++{ ++ $opt_debug ++ func_init_to_host_path_cmd ++ $to_host_path_cmd "$1" ++} ++# end func_to_host_path ++ ++ ++# func_convert_path_noop ARG ++# Copy ARG to func_to_host_path_result. ++func_convert_path_noop () ++{ ++ func_to_host_path_result="$1" ++} ++# end func_convert_path_noop ++ ++ ++# func_convert_path_msys_to_w32 ARG ++# Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic ++# conversion to w32 is not available inside the cwrapper. Returns result in ++# func_to_host_path_result. ++func_convert_path_msys_to_w32 () ++{ ++ $opt_debug ++ func_to_host_path_result="$1" ++ if test -n "$1"; then ++ # Remove leading and trailing path separator characters from ARG. MSYS ++ # behavior is inconsistent here; cygpath turns them into '.;' and ';.'; ++ # and winepath ignores them completely. ++ func_stripname : : "$1" ++ func_to_host_path_tmp1=$func_stripname_result ++ func_convert_core_msys_to_w32 "$func_to_host_path_tmp1" ++ func_to_host_path_result="$func_convert_core_msys_to_w32_result" ++ func_convert_path_check : ";" \ ++ "$func_to_host_path_tmp1" "$func_to_host_path_result" ++ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" ++ fi ++} ++# end func_convert_path_msys_to_w32 ++ ++ ++# func_convert_path_cygwin_to_w32 ARG ++# Convert path ARG from Cygwin to w32 format. Returns result in ++# func_to_host_file_result. ++func_convert_path_cygwin_to_w32 () ++{ ++ $opt_debug ++ func_to_host_path_result="$1" ++ if test -n "$1"; then ++ # See func_convert_path_msys_to_w32: ++ func_stripname : : "$1" ++ func_to_host_path_tmp1=$func_stripname_result ++ func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"` ++ func_convert_path_check : ";" \ ++ "$func_to_host_path_tmp1" "$func_to_host_path_result" ++ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" ++ fi ++} ++# end func_convert_path_cygwin_to_w32 ++ ++ ++# func_convert_path_nix_to_w32 ARG ++# Convert path ARG from *nix to w32 format. Requires a wine environment and ++# a working winepath. Returns result in func_to_host_file_result. ++func_convert_path_nix_to_w32 () ++{ ++ $opt_debug ++ func_to_host_path_result="$1" ++ if test -n "$1"; then ++ # See func_convert_path_msys_to_w32: ++ func_stripname : : "$1" ++ func_to_host_path_tmp1=$func_stripname_result ++ func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1" ++ func_to_host_path_result="$func_convert_core_path_wine_to_w32_result" ++ func_convert_path_check : ";" \ ++ "$func_to_host_path_tmp1" "$func_to_host_path_result" ++ func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" ++ fi ++} ++# end func_convert_path_nix_to_w32 ++ ++ ++# func_convert_path_msys_to_cygwin ARG ++# Convert path ARG from MSYS to Cygwin format. Requires LT_CYGPATH set. ++# Returns result in func_to_host_file_result. ++func_convert_path_msys_to_cygwin () ++{ ++ $opt_debug ++ func_to_host_path_result="$1" ++ if test -n "$1"; then ++ # See func_convert_path_msys_to_w32: ++ func_stripname : : "$1" ++ func_to_host_path_tmp1=$func_stripname_result ++ func_convert_core_msys_to_w32 "$func_to_host_path_tmp1" ++ func_cygpath -u -p "$func_convert_core_msys_to_w32_result" ++ func_to_host_path_result="$func_cygpath_result" ++ func_convert_path_check : : \ ++ "$func_to_host_path_tmp1" "$func_to_host_path_result" ++ func_convert_path_front_back_pathsep ":*" "*:" : "$1" ++ fi ++} ++# end func_convert_path_msys_to_cygwin ++ ++ ++# func_convert_path_nix_to_cygwin ARG ++# Convert path ARG from *nix to Cygwin format. Requires Cygwin installed in a ++# a wine environment, working winepath, and LT_CYGPATH set. Returns result in ++# func_to_host_file_result. ++func_convert_path_nix_to_cygwin () ++{ ++ $opt_debug ++ func_to_host_path_result="$1" ++ if test -n "$1"; then ++ # Remove leading and trailing path separator characters from ++ # ARG. msys behavior is inconsistent here, cygpath turns them ++ # into '.;' and ';.', and winepath ignores them completely. ++ func_stripname : : "$1" ++ func_to_host_path_tmp1=$func_stripname_result ++ func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1" ++ func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result" ++ func_to_host_path_result="$func_cygpath_result" ++ func_convert_path_check : : \ ++ "$func_to_host_path_tmp1" "$func_to_host_path_result" ++ func_convert_path_front_back_pathsep ":*" "*:" : "$1" ++ fi ++} ++# end func_convert_path_nix_to_cygwin ++ ++ + # func_mode_compile arg... + func_mode_compile () + { +@@ -1314,12 +1985,12 @@ func_mode_compile () + ;; + + -pie | -fpie | -fPIE) +- pie_flag="$pie_flag $arg" ++ func_append pie_flag " $arg" + continue + ;; + + -shared | -static | -prefer-pic | -prefer-non-pic) +- later="$later $arg" ++ func_append later " $arg" + continue + ;; + +@@ -1340,15 +2011,14 @@ func_mode_compile () + save_ifs="$IFS"; IFS=',' + for arg in $args; do + IFS="$save_ifs" +- func_quote_for_eval "$arg" +- lastarg="$lastarg $func_quote_for_eval_result" ++ func_append_quoted lastarg "$arg" + done + IFS="$save_ifs" + func_stripname ' ' '' "$lastarg" + lastarg=$func_stripname_result + + # Add the arguments to base_compile. +- base_compile="$base_compile $lastarg" ++ func_append base_compile " $lastarg" + continue + ;; + +@@ -1364,8 +2034,7 @@ func_mode_compile () + esac # case $arg_mode + + # Aesthetically quote the previous argument. +- func_quote_for_eval "$lastarg" +- base_compile="$base_compile $func_quote_for_eval_result" ++ func_append_quoted base_compile "$lastarg" + done # for arg + + case $arg_mode in +@@ -1496,17 +2165,16 @@ compiler." + $opt_dry_run || $RM $removelist + exit $EXIT_FAILURE + fi +- removelist="$removelist $output_obj" ++ func_append removelist " $output_obj" + $ECHO "$srcfile" > "$lockfile" + fi + + $opt_dry_run || $RM $removelist +- removelist="$removelist $lockfile" ++ func_append removelist " $lockfile" + trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15 + +- if test -n "$fix_srcfile_path"; then +- eval "srcfile=\"$fix_srcfile_path\"" +- fi ++ func_to_tool_file "$srcfile" func_convert_file_msys_to_w32 ++ srcfile=$func_to_tool_file_result + func_quote_for_eval "$srcfile" + qsrcfile=$func_quote_for_eval_result + +@@ -1526,7 +2194,7 @@ compiler." + + if test -z "$output_obj"; then + # Place PIC objects in $objdir +- command="$command -o $lobj" ++ func_append command " -o $lobj" + fi + + func_show_eval_locale "$command" \ +@@ -1573,11 +2241,11 @@ compiler." + command="$base_compile $qsrcfile $pic_flag" + fi + if test "$compiler_c_o" = yes; then +- command="$command -o $obj" ++ func_append command " -o $obj" + fi + + # Suppress compiler output if we already did a PIC compilation. +- command="$command$suppress_output" ++ func_append command "$suppress_output" + func_show_eval_locale "$command" \ + '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' + +@@ -1622,13 +2290,13 @@ compiler." + } + + $opt_help || { +- test "$mode" = compile && func_mode_compile ${1+"$@"} ++ test "$opt_mode" = compile && func_mode_compile ${1+"$@"} + } + + func_mode_help () + { + # We need to display help for each of the modes. +- case $mode in ++ case $opt_mode in + "") + # Generic help is extracted from the usage comments + # at the start of this file. +@@ -1659,8 +2327,8 @@ This mode accepts the following additional options: + + -o OUTPUT-FILE set the output file name to OUTPUT-FILE + -no-suppress do not suppress compiler output for multiple passes +- -prefer-pic try to building PIC objects only +- -prefer-non-pic try to building non-PIC objects only ++ -prefer-pic try to build PIC objects only ++ -prefer-non-pic try to build non-PIC objects only + -shared do not build a \`.o' file suitable for static linking + -static only build a \`.o' file suitable for static linking + -Wc,FLAG pass FLAG directly to the compiler +@@ -1804,7 +2472,7 @@ Otherwise, only FILE itself is deleted using RM." + ;; + + *) +- func_fatal_help "invalid operation mode \`$mode'" ++ func_fatal_help "invalid operation mode \`$opt_mode'" + ;; + esac + +@@ -1819,13 +2487,13 @@ if $opt_help; then + else + { + func_help noexit +- for mode in compile link execute install finish uninstall clean; do ++ for opt_mode in compile link execute install finish uninstall clean; do + func_mode_help + done + } | sed -n '1p; 2,$s/^Usage:/ or: /p' + { + func_help noexit +- for mode in compile link execute install finish uninstall clean; do ++ for opt_mode in compile link execute install finish uninstall clean; do + echo + func_mode_help + done +@@ -1854,13 +2522,16 @@ func_mode_execute () + func_fatal_help "you must specify a COMMAND" + + # Handle -dlopen flags immediately. +- for file in $execute_dlfiles; do ++ for file in $opt_dlopen; do + test -f "$file" \ + || func_fatal_help "\`$file' is not a file" + + dir= + case $file in + *.la) ++ func_resolve_sysroot "$file" ++ file=$func_resolve_sysroot_result ++ + # Check to see that this really is a libtool archive. + func_lalib_unsafe_p "$file" \ + || func_fatal_help "\`$lib' is not a valid libtool archive" +@@ -1882,7 +2553,7 @@ func_mode_execute () + dir="$func_dirname_result" + + if test -f "$dir/$objdir/$dlname"; then +- dir="$dir/$objdir" ++ func_append dir "/$objdir" + else + if test ! -f "$dir/$dlname"; then + func_fatal_error "cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'" +@@ -1907,10 +2578,10 @@ func_mode_execute () + test -n "$absdir" && dir="$absdir" + + # Now add the directory to shlibpath_var. +- if eval test -z \"\$$shlibpath_var\"; then +- eval $shlibpath_var=\$dir ++ if eval "test -z \"\$$shlibpath_var\""; then ++ eval "$shlibpath_var=\"\$dir\"" + else +- eval $shlibpath_var=\$dir:\$$shlibpath_var ++ eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\"" + fi + done + +@@ -1939,8 +2610,7 @@ func_mode_execute () + ;; + esac + # Quote arguments (to preserve shell metacharacters). +- func_quote_for_eval "$file" +- args="$args $func_quote_for_eval_result" ++ func_append_quoted args "$file" + done + + if test "X$opt_dry_run" = Xfalse; then +@@ -1972,22 +2642,59 @@ func_mode_execute () + fi + } + +-test "$mode" = execute && func_mode_execute ${1+"$@"} ++test "$opt_mode" = execute && func_mode_execute ${1+"$@"} + + + # func_mode_finish arg... + func_mode_finish () + { + $opt_debug +- libdirs="$nonopt" ++ libs= ++ libdirs= + admincmds= + +- if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then +- for dir +- do +- libdirs="$libdirs $dir" +- done ++ for opt in "$nonopt" ${1+"$@"} ++ do ++ if test -d "$opt"; then ++ func_append libdirs " $opt" + ++ elif test -f "$opt"; then ++ if func_lalib_unsafe_p "$opt"; then ++ func_append libs " $opt" ++ else ++ func_warning "\`$opt' is not a valid libtool archive" ++ fi ++ ++ else ++ func_fatal_error "invalid argument \`$opt'" ++ fi ++ done ++ ++ if test -n "$libs"; then ++ if test -n "$lt_sysroot"; then ++ sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"` ++ sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;" ++ else ++ sysroot_cmd= ++ fi ++ ++ # Remove sysroot references ++ if $opt_dry_run; then ++ for lib in $libs; do ++ echo "removing references to $lt_sysroot and \`=' prefixes from $lib" ++ done ++ else ++ tmpdir=`func_mktempdir` ++ for lib in $libs; do ++ sed -e "${sysroot_cmd} s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \ ++ > $tmpdir/tmp-la ++ mv -f $tmpdir/tmp-la $lib ++ done ++ ${RM}r "$tmpdir" ++ fi ++ fi ++ ++ if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then + for libdir in $libdirs; do + if test -n "$finish_cmds"; then + # Do each command in the finish commands. +@@ -1997,7 +2704,7 @@ func_mode_finish () + if test -n "$finish_eval"; then + # Do the single finish_eval. + eval cmds=\"$finish_eval\" +- $opt_dry_run || eval "$cmds" || admincmds="$admincmds ++ $opt_dry_run || eval "$cmds" || func_append admincmds " + $cmds" + fi + done +@@ -2006,53 +2713,55 @@ func_mode_finish () + # Exit here if they wanted silent mode. + $opt_silent && exit $EXIT_SUCCESS + +- echo "----------------------------------------------------------------------" +- echo "Libraries have been installed in:" +- for libdir in $libdirs; do +- $ECHO " $libdir" +- done +- echo +- echo "If you ever happen to want to link against installed libraries" +- echo "in a given directory, LIBDIR, you must either use libtool, and" +- echo "specify the full pathname of the library, or use the \`-LLIBDIR'" +- echo "flag during linking and do at least one of the following:" +- if test -n "$shlibpath_var"; then +- echo " - add LIBDIR to the \`$shlibpath_var' environment variable" +- echo " during execution" +- fi +- if test -n "$runpath_var"; then +- echo " - add LIBDIR to the \`$runpath_var' environment variable" +- echo " during linking" +- fi +- if test -n "$hardcode_libdir_flag_spec"; then +- libdir=LIBDIR +- eval "flag=\"$hardcode_libdir_flag_spec\"" ++ if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then ++ echo "----------------------------------------------------------------------" ++ echo "Libraries have been installed in:" ++ for libdir in $libdirs; do ++ $ECHO " $libdir" ++ done ++ echo ++ echo "If you ever happen to want to link against installed libraries" ++ echo "in a given directory, LIBDIR, you must either use libtool, and" ++ echo "specify the full pathname of the library, or use the \`-LLIBDIR'" ++ echo "flag during linking and do at least one of the following:" ++ if test -n "$shlibpath_var"; then ++ echo " - add LIBDIR to the \`$shlibpath_var' environment variable" ++ echo " during execution" ++ fi ++ if test -n "$runpath_var"; then ++ echo " - add LIBDIR to the \`$runpath_var' environment variable" ++ echo " during linking" ++ fi ++ if test -n "$hardcode_libdir_flag_spec"; then ++ libdir=LIBDIR ++ eval flag=\"$hardcode_libdir_flag_spec\" + +- $ECHO " - use the \`$flag' linker flag" +- fi +- if test -n "$admincmds"; then +- $ECHO " - have your system administrator run these commands:$admincmds" +- fi +- if test -f /etc/ld.so.conf; then +- echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'" +- fi +- echo ++ $ECHO " - use the \`$flag' linker flag" ++ fi ++ if test -n "$admincmds"; then ++ $ECHO " - have your system administrator run these commands:$admincmds" ++ fi ++ if test -f /etc/ld.so.conf; then ++ echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'" ++ fi ++ echo + +- echo "See any operating system documentation about shared libraries for" +- case $host in +- solaris2.[6789]|solaris2.1[0-9]) +- echo "more information, such as the ld(1), crle(1) and ld.so(8) manual" +- echo "pages." +- ;; +- *) +- echo "more information, such as the ld(1) and ld.so(8) manual pages." +- ;; +- esac +- echo "----------------------------------------------------------------------" ++ echo "See any operating system documentation about shared libraries for" ++ case $host in ++ solaris2.[6789]|solaris2.1[0-9]) ++ echo "more information, such as the ld(1), crle(1) and ld.so(8) manual" ++ echo "pages." ++ ;; ++ *) ++ echo "more information, such as the ld(1) and ld.so(8) manual pages." ++ ;; ++ esac ++ echo "----------------------------------------------------------------------" ++ fi + exit $EXIT_SUCCESS + } + +-test "$mode" = finish && func_mode_finish ${1+"$@"} ++test "$opt_mode" = finish && func_mode_finish ${1+"$@"} + + + # func_mode_install arg... +@@ -2077,7 +2786,7 @@ func_mode_install () + # The real first argument should be the name of the installation program. + # Aesthetically quote it. + func_quote_for_eval "$arg" +- install_prog="$install_prog$func_quote_for_eval_result" ++ func_append install_prog "$func_quote_for_eval_result" + install_shared_prog=$install_prog + case " $install_prog " in + *[\\\ /]cp\ *) install_cp=: ;; +@@ -2097,7 +2806,7 @@ func_mode_install () + do + arg2= + if test -n "$dest"; then +- files="$files $dest" ++ func_append files " $dest" + dest=$arg + continue + fi +@@ -2135,11 +2844,11 @@ func_mode_install () + + # Aesthetically quote the argument. + func_quote_for_eval "$arg" +- install_prog="$install_prog $func_quote_for_eval_result" ++ func_append install_prog " $func_quote_for_eval_result" + if test -n "$arg2"; then + func_quote_for_eval "$arg2" + fi +- install_shared_prog="$install_shared_prog $func_quote_for_eval_result" ++ func_append install_shared_prog " $func_quote_for_eval_result" + done + + test -z "$install_prog" && \ +@@ -2151,7 +2860,7 @@ func_mode_install () + if test -n "$install_override_mode" && $no_mode; then + if $install_cp; then :; else + func_quote_for_eval "$install_override_mode" +- install_shared_prog="$install_shared_prog -m $func_quote_for_eval_result" ++ func_append install_shared_prog " -m $func_quote_for_eval_result" + fi + fi + +@@ -2209,10 +2918,13 @@ func_mode_install () + case $file in + *.$libext) + # Do the static libraries later. +- staticlibs="$staticlibs $file" ++ func_append staticlibs " $file" + ;; + + *.la) ++ func_resolve_sysroot "$file" ++ file=$func_resolve_sysroot_result ++ + # Check to see that this really is a libtool archive. + func_lalib_unsafe_p "$file" \ + || func_fatal_help "\`$file' is not a valid libtool archive" +@@ -2226,23 +2938,30 @@ func_mode_install () + if test "X$destdir" = "X$libdir"; then + case "$current_libdirs " in + *" $libdir "*) ;; +- *) current_libdirs="$current_libdirs $libdir" ;; ++ *) func_append current_libdirs " $libdir" ;; + esac + else + # Note the libdir as a future libdir. + case "$future_libdirs " in + *" $libdir "*) ;; +- *) future_libdirs="$future_libdirs $libdir" ;; ++ *) func_append future_libdirs " $libdir" ;; + esac + fi + + func_dirname "$file" "/" "" + dir="$func_dirname_result" +- dir="$dir$objdir" ++ func_append dir "$objdir" + + if test -n "$relink_command"; then ++ # Strip any trailing slash from the destination. ++ func_stripname '' '/' "$libdir" ++ destlibdir=$func_stripname_result ++ ++ func_stripname '' '/' "$destdir" ++ s_destdir=$func_stripname_result ++ + # Determine the prefix the user has applied to our future dir. +- inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"` ++ inst_prefix_dir=`$ECHO "X$s_destdir" | $Xsed -e "s%$destlibdir\$%%"` + + # Don't allow the user to place us outside of our expected + # location b/c this prevents finding dependent libraries that +@@ -2315,7 +3034,7 @@ func_mode_install () + func_show_eval "$install_prog $instname $destdir/$name" 'exit $?' + + # Maybe install the static library, too. +- test -n "$old_library" && staticlibs="$staticlibs $dir/$old_library" ++ test -n "$old_library" && func_append staticlibs " $dir/$old_library" + ;; + + *.lo) +@@ -2503,7 +3222,7 @@ func_mode_install () + test -n "$future_libdirs" && \ + func_warning "remember to run \`$progname --finish$future_libdirs'" + +- if test -n "$current_libdirs" && $opt_finish; then ++ if test -n "$current_libdirs"; then + # Maybe just do a dry run. + $opt_dry_run && current_libdirs=" -n$current_libdirs" + exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs' +@@ -2512,7 +3231,7 @@ func_mode_install () + fi + } + +-test "$mode" = install && func_mode_install ${1+"$@"} ++test "$opt_mode" = install && func_mode_install ${1+"$@"} + + + # func_generate_dlsyms outputname originator pic_p +@@ -2559,6 +3278,18 @@ extern \"C\" { + #pragma GCC diagnostic ignored \"-Wstrict-prototypes\" + #endif + ++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ ++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) ++/* DATA imports from DLLs on WIN32 con't be const, because runtime ++ relocations are performed -- see ld's documentation on pseudo-relocs. */ ++# define LT_DLSYM_CONST ++#elif defined(__osf__) ++/* This system does not cope well with relocations in const data. */ ++# define LT_DLSYM_CONST ++#else ++# define LT_DLSYM_CONST const ++#endif ++ + /* External symbol declarations for the compiler. */\ + " + +@@ -2570,21 +3301,22 @@ extern \"C\" { + # Add our own program objects to the symbol list. + progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP` + for progfile in $progfiles; do +- func_verbose "extracting global C symbols from \`$progfile'" +- $opt_dry_run || eval "$NM $progfile | $global_symbol_pipe >> '$nlist'" ++ func_to_tool_file "$progfile" func_convert_file_msys_to_w32 ++ func_verbose "extracting global C symbols from \`$func_to_tool_file_result'" ++ $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'" + done + + if test -n "$exclude_expsyms"; then + $opt_dry_run || { +- $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T +- $MV "$nlist"T "$nlist" ++ eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T' ++ eval '$MV "$nlist"T "$nlist"' + } + fi + + if test -n "$export_symbols_regex"; then + $opt_dry_run || { +- $EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T +- $MV "$nlist"T "$nlist" ++ eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T' ++ eval '$MV "$nlist"T "$nlist"' + } + fi + +@@ -2593,23 +3325,23 @@ extern \"C\" { + export_symbols="$output_objdir/$outputname.exp" + $opt_dry_run || { + $RM $export_symbols +- ${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' < "$nlist" > "$export_symbols" ++ eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"' + case $host in + *cygwin* | *mingw* | *cegcc* ) +- echo EXPORTS > "$output_objdir/$outputname.def" +- cat "$export_symbols" >> "$output_objdir/$outputname.def" ++ eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' ++ eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"' + ;; + esac + } + else + $opt_dry_run || { +- ${SED} -e 's/\([].[*^$]\)/\\\1/g' -e 's/^/ /' -e 's/$/$/' < "$export_symbols" > "$output_objdir/$outputname.exp" +- $GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T +- $MV "$nlist"T "$nlist" ++ eval "${SED} -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"' ++ eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T' ++ eval '$MV "$nlist"T "$nlist"' + case $host in + *cygwin* | *mingw* | *cegcc* ) +- echo EXPORTS > "$output_objdir/$outputname.def" +- cat "$nlist" >> "$output_objdir/$outputname.def" ++ eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' ++ eval 'cat "$nlist" >> "$output_objdir/$outputname.def"' + ;; + esac + } +@@ -2620,10 +3352,52 @@ extern \"C\" { + func_verbose "extracting global C symbols from \`$dlprefile'" + func_basename "$dlprefile" + name="$func_basename_result" +- $opt_dry_run || { +- $ECHO ": $name " >> "$nlist" +- eval "$NM $dlprefile 2>/dev/null | $global_symbol_pipe >> '$nlist'" +- } ++ case $host in ++ *cygwin* | *mingw* | *cegcc* ) ++ # if an import library, we need to obtain dlname ++ if func_win32_import_lib_p "$dlprefile"; then ++ func_tr_sh "$dlprefile" ++ eval "curr_lafile=\$libfile_$func_tr_sh_result" ++ dlprefile_dlbasename="" ++ if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then ++ # Use subshell, to avoid clobbering current variable values ++ dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"` ++ if test -n "$dlprefile_dlname" ; then ++ func_basename "$dlprefile_dlname" ++ dlprefile_dlbasename="$func_basename_result" ++ else ++ # no lafile. user explicitly requested -dlpreopen . ++ $sharedlib_from_linklib_cmd "$dlprefile" ++ dlprefile_dlbasename=$sharedlib_from_linklib_result ++ fi ++ fi ++ $opt_dry_run || { ++ if test -n "$dlprefile_dlbasename" ; then ++ eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"' ++ else ++ func_warning "Could not compute DLL name from $name" ++ eval '$ECHO ": $name " >> "$nlist"' ++ fi ++ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 ++ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe | ++ $SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'" ++ } ++ else # not an import lib ++ $opt_dry_run || { ++ eval '$ECHO ": $name " >> "$nlist"' ++ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 ++ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'" ++ } ++ fi ++ ;; ++ *) ++ $opt_dry_run || { ++ eval '$ECHO ": $name " >> "$nlist"' ++ func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 ++ eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'" ++ } ++ ;; ++ esac + done + + $opt_dry_run || { +@@ -2661,26 +3435,9 @@ typedef struct { + const char *name; + void *address; + } lt_dlsymlist; +-" +- case $host in +- *cygwin* | *mingw* | *cegcc* ) +- echo >> "$output_objdir/$my_dlsyms" "\ +-/* DATA imports from DLLs on WIN32 con't be const, because +- runtime relocations are performed -- see ld's documentation +- on pseudo-relocs. */" +- lt_dlsym_const= ;; +- *osf5*) +- echo >> "$output_objdir/$my_dlsyms" "\ +-/* This system does not cope well with relocations in const data */" +- lt_dlsym_const= ;; +- *) +- lt_dlsym_const=const ;; +- esac +- +- echo >> "$output_objdir/$my_dlsyms" "\ +-extern $lt_dlsym_const lt_dlsymlist ++extern LT_DLSYM_CONST lt_dlsymlist + lt_${my_prefix}_LTX_preloaded_symbols[]; +-$lt_dlsym_const lt_dlsymlist ++LT_DLSYM_CONST lt_dlsymlist + lt_${my_prefix}_LTX_preloaded_symbols[] = + {\ + { \"$my_originator\", (void *) 0 }," +@@ -2736,7 +3493,7 @@ static const void *lt_preloaded_setup() { + for arg in $LTCFLAGS; do + case $arg in + -pie | -fpie | -fPIE) ;; +- *) symtab_cflags="$symtab_cflags $arg" ;; ++ *) func_append symtab_cflags " $arg" ;; + esac + done + +@@ -2796,9 +3553,11 @@ func_win32_libid () + win32_libid_type="x86 archive import" + ;; + *ar\ archive*) # could be an import, or static +- if $OBJDUMP -f "$1" | $SED -e '10q' 2>/dev/null | +- $EGREP 'file format (pe-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then +- win32_nmres=`$NM -f posix -A "$1" | ++ # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD. ++ if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null | ++ $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then ++ func_to_tool_file "$1" func_convert_file_msys_to_w32 ++ win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" | + $SED -n -e ' + 1,100{ + / I /{ +@@ -2827,6 +3586,131 @@ func_win32_libid () + $ECHO "$win32_libid_type" + } + ++# func_cygming_dll_for_implib ARG ++# ++# Platform-specific function to extract the ++# name of the DLL associated with the specified ++# import library ARG. ++# Invoked by eval'ing the libtool variable ++# $sharedlib_from_linklib_cmd ++# Result is available in the variable ++# $sharedlib_from_linklib_result ++func_cygming_dll_for_implib () ++{ ++ $opt_debug ++ sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"` ++} ++ ++# func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs ++# ++# The is the core of a fallback implementation of a ++# platform-specific function to extract the name of the ++# DLL associated with the specified import library LIBNAME. ++# ++# SECTION_NAME is either .idata$6 or .idata$7, depending ++# on the platform and compiler that created the implib. ++# ++# Echos the name of the DLL associated with the ++# specified import library. ++func_cygming_dll_for_implib_fallback_core () ++{ ++ $opt_debug ++ match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"` ++ $OBJDUMP -s --section "$1" "$2" 2>/dev/null | ++ $SED '/^Contents of section '"$match_literal"':/{ ++ # Place marker at beginning of archive member dllname section ++ s/.*/====MARK====/ ++ p ++ d ++ } ++ # These lines can sometimes be longer than 43 characters, but ++ # are always uninteresting ++ /:[ ]*file format pe[i]\{,1\}-/d ++ /^In archive [^:]*:/d ++ # Ensure marker is printed ++ /^====MARK====/p ++ # Remove all lines with less than 43 characters ++ /^.\{43\}/!d ++ # From remaining lines, remove first 43 characters ++ s/^.\{43\}//' | ++ $SED -n ' ++ # Join marker and all lines until next marker into a single line ++ /^====MARK====/ b para ++ H ++ $ b para ++ b ++ :para ++ x ++ s/\n//g ++ # Remove the marker ++ s/^====MARK====// ++ # Remove trailing dots and whitespace ++ s/[\. \t]*$// ++ # Print ++ /./p' | ++ # we now have a list, one entry per line, of the stringified ++ # contents of the appropriate section of all members of the ++ # archive which possess that section. Heuristic: eliminate ++ # all those which have a first or second character that is ++ # a '.' (that is, objdump's representation of an unprintable ++ # character.) This should work for all archives with less than ++ # 0x302f exports -- but will fail for DLLs whose name actually ++ # begins with a literal '.' or a single character followed by ++ # a '.'. ++ # ++ # Of those that remain, print the first one. ++ $SED -e '/^\./d;/^.\./d;q' ++} ++ ++# func_cygming_gnu_implib_p ARG ++# This predicate returns with zero status (TRUE) if ++# ARG is a GNU/binutils-style import library. Returns ++# with nonzero status (FALSE) otherwise. ++func_cygming_gnu_implib_p () ++{ ++ $opt_debug ++ func_to_tool_file "$1" func_convert_file_msys_to_w32 ++ func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'` ++ test -n "$func_cygming_gnu_implib_tmp" ++} ++ ++# func_cygming_ms_implib_p ARG ++# This predicate returns with zero status (TRUE) if ++# ARG is an MS-style import library. Returns ++# with nonzero status (FALSE) otherwise. ++func_cygming_ms_implib_p () ++{ ++ $opt_debug ++ func_to_tool_file "$1" func_convert_file_msys_to_w32 ++ func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'` ++ test -n "$func_cygming_ms_implib_tmp" ++} ++ ++# func_cygming_dll_for_implib_fallback ARG ++# Platform-specific function to extract the ++# name of the DLL associated with the specified ++# import library ARG. ++# ++# This fallback implementation is for use when $DLLTOOL ++# does not support the --identify-strict option. ++# Invoked by eval'ing the libtool variable ++# $sharedlib_from_linklib_cmd ++# Result is available in the variable ++# $sharedlib_from_linklib_result ++func_cygming_dll_for_implib_fallback () ++{ ++ $opt_debug ++ if func_cygming_gnu_implib_p "$1" ; then ++ # binutils import library ++ sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"` ++ elif func_cygming_ms_implib_p "$1" ; then ++ # ms-generated import library ++ sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"` ++ else ++ # unknown ++ sharedlib_from_linklib_result="" ++ fi ++} + + + # func_extract_an_archive dir oldlib +@@ -2917,7 +3801,7 @@ func_extract_archives () + darwin_file= + darwin_files= + for darwin_file in $darwin_filelist; do +- darwin_files=`find unfat-$$ -name $darwin_file -print | $NL2SP` ++ darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP` + $LIPO -create -output "$darwin_file" $darwin_files + done # $darwin_filelist + $RM -rf unfat-$$ +@@ -2932,7 +3816,7 @@ func_extract_archives () + func_extract_an_archive "$my_xdir" "$my_xabs" + ;; + esac +- my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | $NL2SP` ++ my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP` + done + + func_extract_archives_result="$my_oldobjs" +@@ -3014,7 +3898,110 @@ func_fallback_echo () + _LTECHO_EOF' + } + ECHO=\"$qECHO\" +- fi\ ++ fi ++ ++# Very basic option parsing. These options are (a) specific to ++# the libtool wrapper, (b) are identical between the wrapper ++# /script/ and the wrapper /executable/ which is used only on ++# windows platforms, and (c) all begin with the string "--lt-" ++# (application programs are unlikely to have options which match ++# this pattern). ++# ++# There are only two supported options: --lt-debug and ++# --lt-dump-script. There is, deliberately, no --lt-help. ++# ++# The first argument to this parsing function should be the ++# script's $0 value, followed by "$@". ++lt_option_debug= ++func_parse_lt_options () ++{ ++ lt_script_arg0=\$0 ++ shift ++ for lt_opt ++ do ++ case \"\$lt_opt\" in ++ --lt-debug) lt_option_debug=1 ;; ++ --lt-dump-script) ++ lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\` ++ test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=. ++ lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\` ++ cat \"\$lt_dump_D/\$lt_dump_F\" ++ exit 0 ++ ;; ++ --lt-*) ++ \$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2 ++ exit 1 ++ ;; ++ esac ++ done ++ ++ # Print the debug banner immediately: ++ if test -n \"\$lt_option_debug\"; then ++ echo \"${outputname}:${output}:\${LINENO}: libtool wrapper (GNU $PACKAGE$TIMESTAMP) $VERSION\" 1>&2 ++ fi ++} ++ ++# Used when --lt-debug. Prints its arguments to stdout ++# (redirection is the responsibility of the caller) ++func_lt_dump_args () ++{ ++ lt_dump_args_N=1; ++ for lt_arg ++ do ++ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[\$lt_dump_args_N]: \$lt_arg\" ++ lt_dump_args_N=\`expr \$lt_dump_args_N + 1\` ++ done ++} ++ ++# Core function for launching the target application ++func_exec_program_core () ++{ ++" ++ case $host in ++ # Backslashes separate directories on plain windows ++ *-*-mingw | *-*-os2* | *-cegcc*) ++ $ECHO "\ ++ if test -n \"\$lt_option_debug\"; then ++ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir\\\\\$program\" 1>&2 ++ func_lt_dump_args \${1+\"\$@\"} 1>&2 ++ fi ++ exec \"\$progdir\\\\\$program\" \${1+\"\$@\"} ++" ++ ;; ++ ++ *) ++ $ECHO "\ ++ if test -n \"\$lt_option_debug\"; then ++ \$ECHO \"${outputname}:${output}:\${LINENO}: newargv[0]: \$progdir/\$program\" 1>&2 ++ func_lt_dump_args \${1+\"\$@\"} 1>&2 ++ fi ++ exec \"\$progdir/\$program\" \${1+\"\$@\"} ++" ++ ;; ++ esac ++ $ECHO "\ ++ \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2 ++ exit 1 ++} ++ ++# A function to encapsulate launching the target application ++# Strips options in the --lt-* namespace from \$@ and ++# launches target application with the remaining arguments. ++func_exec_program () ++{ ++ for lt_wr_arg ++ do ++ case \$lt_wr_arg in ++ --lt-*) ;; ++ *) set x \"\$@\" \"\$lt_wr_arg\"; shift;; ++ esac ++ shift ++ done ++ func_exec_program_core \${1+\"\$@\"} ++} ++ ++ # Parse options ++ func_parse_lt_options \"\$0\" \${1+\"\$@\"} + + # Find the directory that this script lives in. + thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\` +@@ -3078,7 +4065,7 @@ _LTECHO_EOF' + + # relink executable if necessary + if test -n \"\$relink_command\"; then +- if relink_command_output=\`eval \"\$relink_command\" 2>&1\`; then : ++ if relink_command_output=\`eval \$relink_command 2>&1\`; then : + else + $ECHO \"\$relink_command_output\" >&2 + $RM \"\$progdir/\$file\" +@@ -3102,6 +4089,18 @@ _LTECHO_EOF' + + if test -f \"\$progdir/\$program\"; then" + ++ # fixup the dll searchpath if we need to. ++ # ++ # Fix the DLL searchpath if we need to. Do this before prepending ++ # to shlibpath, because on Windows, both are PATH and uninstalled ++ # libraries must come first. ++ if test -n "$dllsearchpath"; then ++ $ECHO "\ ++ # Add the dll search path components to the executable PATH ++ PATH=$dllsearchpath:\$PATH ++" ++ fi ++ + # Export our shlibpath_var if we have one. + if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then + $ECHO "\ +@@ -3116,35 +4115,10 @@ _LTECHO_EOF' + " + fi + +- # fixup the dll searchpath if we need to. +- if test -n "$dllsearchpath"; then +- $ECHO "\ +- # Add the dll search path components to the executable PATH +- PATH=$dllsearchpath:\$PATH +-" +- fi +- + $ECHO "\ + if test \"\$libtool_execute_magic\" != \"$magic\"; then + # Run the actual program with our arguments. +-" +- case $host in +- # Backslashes separate directories on plain windows +- *-*-mingw | *-*-os2* | *-cegcc*) +- $ECHO "\ +- exec \"\$progdir\\\\\$program\" \${1+\"\$@\"} +-" +- ;; +- +- *) +- $ECHO "\ +- exec \"\$progdir/\$program\" \${1+\"\$@\"} +-" +- ;; +- esac +- $ECHO "\ +- \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2 +- exit 1 ++ func_exec_program \${1+\"\$@\"} + fi + else + # The program doesn't exist. +@@ -3158,166 +4132,6 @@ fi\ + } + + +-# func_to_host_path arg +-# +-# Convert paths to host format when used with build tools. +-# Intended for use with "native" mingw (where libtool itself +-# is running under the msys shell), or in the following cross- +-# build environments: +-# $build $host +-# mingw (msys) mingw [e.g. native] +-# cygwin mingw +-# *nix + wine mingw +-# where wine is equipped with the `winepath' executable. +-# In the native mingw case, the (msys) shell automatically +-# converts paths for any non-msys applications it launches, +-# but that facility isn't available from inside the cwrapper. +-# Similar accommodations are necessary for $host mingw and +-# $build cygwin. Calling this function does no harm for other +-# $host/$build combinations not listed above. +-# +-# ARG is the path (on $build) that should be converted to +-# the proper representation for $host. The result is stored +-# in $func_to_host_path_result. +-func_to_host_path () +-{ +- func_to_host_path_result="$1" +- if test -n "$1"; then +- case $host in +- *mingw* ) +- lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' +- case $build in +- *mingw* ) # actually, msys +- # awkward: cmd appends spaces to result +- func_to_host_path_result=`( cmd //c echo "$1" ) 2>/dev/null | +- $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"` +- ;; +- *cygwin* ) +- func_to_host_path_result=`cygpath -w "$1" | +- $SED -e "$lt_sed_naive_backslashify"` +- ;; +- * ) +- # Unfortunately, winepath does not exit with a non-zero +- # error code, so we are forced to check the contents of +- # stdout. On the other hand, if the command is not +- # found, the shell will set an exit code of 127 and print +- # *an error message* to stdout. So we must check for both +- # error code of zero AND non-empty stdout, which explains +- # the odd construction: +- func_to_host_path_tmp1=`winepath -w "$1" 2>/dev/null` +- if test "$?" -eq 0 && test -n "${func_to_host_path_tmp1}"; then +- func_to_host_path_result=`$ECHO "$func_to_host_path_tmp1" | +- $SED -e "$lt_sed_naive_backslashify"` +- else +- # Allow warning below. +- func_to_host_path_result= +- fi +- ;; +- esac +- if test -z "$func_to_host_path_result" ; then +- func_error "Could not determine host path corresponding to" +- func_error " \`$1'" +- func_error "Continuing, but uninstalled executables may not work." +- # Fallback: +- func_to_host_path_result="$1" +- fi +- ;; +- esac +- fi +-} +-# end: func_to_host_path +- +-# func_to_host_pathlist arg +-# +-# Convert pathlists to host format when used with build tools. +-# See func_to_host_path(), above. This function supports the +-# following $build/$host combinations (but does no harm for +-# combinations not listed here): +-# $build $host +-# mingw (msys) mingw [e.g. native] +-# cygwin mingw +-# *nix + wine mingw +-# +-# Path separators are also converted from $build format to +-# $host format. If ARG begins or ends with a path separator +-# character, it is preserved (but converted to $host format) +-# on output. +-# +-# ARG is a pathlist (on $build) that should be converted to +-# the proper representation on $host. The result is stored +-# in $func_to_host_pathlist_result. +-func_to_host_pathlist () +-{ +- func_to_host_pathlist_result="$1" +- if test -n "$1"; then +- case $host in +- *mingw* ) +- lt_sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' +- # Remove leading and trailing path separator characters from +- # ARG. msys behavior is inconsistent here, cygpath turns them +- # into '.;' and ';.', and winepath ignores them completely. +- func_stripname : : "$1" +- func_to_host_pathlist_tmp1=$func_stripname_result +- case $build in +- *mingw* ) # Actually, msys. +- # Awkward: cmd appends spaces to result. +- func_to_host_pathlist_result=` +- ( cmd //c echo "$func_to_host_pathlist_tmp1" ) 2>/dev/null | +- $SED -e 's/[ ]*$//' -e "$lt_sed_naive_backslashify"` +- ;; +- *cygwin* ) +- func_to_host_pathlist_result=`cygpath -w -p "$func_to_host_pathlist_tmp1" | +- $SED -e "$lt_sed_naive_backslashify"` +- ;; +- * ) +- # unfortunately, winepath doesn't convert pathlists +- func_to_host_pathlist_result="" +- func_to_host_pathlist_oldIFS=$IFS +- IFS=: +- for func_to_host_pathlist_f in $func_to_host_pathlist_tmp1 ; do +- IFS=$func_to_host_pathlist_oldIFS +- if test -n "$func_to_host_pathlist_f" ; then +- func_to_host_path "$func_to_host_pathlist_f" +- if test -n "$func_to_host_path_result" ; then +- if test -z "$func_to_host_pathlist_result" ; then +- func_to_host_pathlist_result="$func_to_host_path_result" +- else +- func_append func_to_host_pathlist_result ";$func_to_host_path_result" +- fi +- fi +- fi +- done +- IFS=$func_to_host_pathlist_oldIFS +- ;; +- esac +- if test -z "$func_to_host_pathlist_result"; then +- func_error "Could not determine the host path(s) corresponding to" +- func_error " \`$1'" +- func_error "Continuing, but uninstalled executables may not work." +- # Fallback. This may break if $1 contains DOS-style drive +- # specifications. The fix is not to complicate the expression +- # below, but for the user to provide a working wine installation +- # with winepath so that path translation in the cross-to-mingw +- # case works properly. +- lt_replace_pathsep_nix_to_dos="s|:|;|g" +- func_to_host_pathlist_result=`echo "$func_to_host_pathlist_tmp1" |\ +- $SED -e "$lt_replace_pathsep_nix_to_dos"` +- fi +- # Now, add the leading and trailing path separators back +- case "$1" in +- :* ) func_to_host_pathlist_result=";$func_to_host_pathlist_result" +- ;; +- esac +- case "$1" in +- *: ) func_append func_to_host_pathlist_result ";" +- ;; +- esac +- ;; +- esac +- fi +-} +-# end: func_to_host_pathlist +- + # func_emit_cwrapperexe_src + # emit the source code for a wrapper executable on stdout + # Must ONLY be called from within func_mode_link because +@@ -3334,10 +4148,6 @@ func_emit_cwrapperexe_src () + + This wrapper executable should never be moved out of the build directory. + If it is, it will not operate correctly. +- +- Currently, it simply execs the wrapper *script* "$SHELL $output", +- but could eventually absorb all of the scripts functionality and +- exec $objdir/$outputname directly. + */ + EOF + cat <<"EOF" +@@ -3462,22 +4272,13 @@ int setenv (const char *, const char *, int); + if (stale) { free ((void *) stale); stale = 0; } \ + } while (0) + +-#undef LTWRAPPER_DEBUGPRINTF +-#if defined LT_DEBUGWRAPPER +-# define LTWRAPPER_DEBUGPRINTF(args) ltwrapper_debugprintf args +-static void +-ltwrapper_debugprintf (const char *fmt, ...) +-{ +- va_list args; +- va_start (args, fmt); +- (void) vfprintf (stderr, fmt, args); +- va_end (args); +-} ++#if defined(LT_DEBUGWRAPPER) ++static int lt_debug = 1; + #else +-# define LTWRAPPER_DEBUGPRINTF(args) ++static int lt_debug = 0; + #endif + +-const char *program_name = NULL; ++const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */ + + void *xmalloc (size_t num); + char *xstrdup (const char *string); +@@ -3487,7 +4288,10 @@ char *chase_symlinks (const char *pathspec); + int make_executable (const char *path); + int check_executable (const char *path); + char *strendzap (char *str, const char *pat); +-void lt_fatal (const char *message, ...); ++void lt_debugprintf (const char *file, int line, const char *fmt, ...); ++void lt_fatal (const char *file, int line, const char *message, ...); ++static const char *nonnull (const char *s); ++static const char *nonempty (const char *s); + void lt_setenv (const char *name, const char *value); + char *lt_extend_str (const char *orig_value, const char *add, int to_end); + void lt_update_exe_path (const char *name, const char *value); +@@ -3497,14 +4301,14 @@ void lt_dump_script (FILE *f); + EOF + + cat <"))); ++ lt_debugprintf (__FILE__, __LINE__, "(main) lt_argv_zero: %s\n", ++ nonnull (lt_argv_zero)); + for (i = 0; i < newargc; i++) + { +- LTWRAPPER_DEBUGPRINTF (("(main) newargz[%d] : %s\n", i, (newargz[i] ? newargz[i] : ""))); ++ lt_debugprintf (__FILE__, __LINE__, "(main) newargz[%d]: %s\n", ++ i, nonnull (newargz[i])); + } + + EOF +@@ -3706,7 +4529,9 @@ EOF + if (rval == -1) + { + /* failed to start process */ +- LTWRAPPER_DEBUGPRINTF (("(main) failed to launch target \"%s\": errno = %d\n", lt_argv_zero, errno)); ++ lt_debugprintf (__FILE__, __LINE__, ++ "(main) failed to launch target \"%s\": %s\n", ++ lt_argv_zero, nonnull (strerror (errno))); + return 127; + } + return rval; +@@ -3728,7 +4553,7 @@ xmalloc (size_t num) + { + void *p = (void *) malloc (num); + if (!p) +- lt_fatal ("Memory exhausted"); ++ lt_fatal (__FILE__, __LINE__, "memory exhausted"); + + return p; + } +@@ -3762,8 +4587,8 @@ check_executable (const char *path) + { + struct stat st; + +- LTWRAPPER_DEBUGPRINTF (("(check_executable) : %s\n", +- path ? (*path ? path : "EMPTY!") : "NULL!")); ++ lt_debugprintf (__FILE__, __LINE__, "(check_executable): %s\n", ++ nonempty (path)); + if ((!path) || (!*path)) + return 0; + +@@ -3780,8 +4605,8 @@ make_executable (const char *path) + int rval = 0; + struct stat st; + +- LTWRAPPER_DEBUGPRINTF (("(make_executable) : %s\n", +- path ? (*path ? path : "EMPTY!") : "NULL!")); ++ lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n", ++ nonempty (path)); + if ((!path) || (!*path)) + return 0; + +@@ -3807,8 +4632,8 @@ find_executable (const char *wrapper) + int tmp_len; + char *concat_name; + +- LTWRAPPER_DEBUGPRINTF (("(find_executable) : %s\n", +- wrapper ? (*wrapper ? wrapper : "EMPTY!") : "NULL!")); ++ lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n", ++ nonempty (wrapper)); + + if ((wrapper == NULL) || (*wrapper == '\0')) + return NULL; +@@ -3861,7 +4686,8 @@ find_executable (const char *wrapper) + { + /* empty path: current directory */ + if (getcwd (tmp, LT_PATHMAX) == NULL) +- lt_fatal ("getcwd failed"); ++ lt_fatal (__FILE__, __LINE__, "getcwd failed: %s", ++ nonnull (strerror (errno))); + tmp_len = strlen (tmp); + concat_name = + XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1); +@@ -3886,7 +4712,8 @@ find_executable (const char *wrapper) + } + /* Relative path | not found in path: prepend cwd */ + if (getcwd (tmp, LT_PATHMAX) == NULL) +- lt_fatal ("getcwd failed"); ++ lt_fatal (__FILE__, __LINE__, "getcwd failed: %s", ++ nonnull (strerror (errno))); + tmp_len = strlen (tmp); + concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1); + memcpy (concat_name, tmp, tmp_len); +@@ -3912,8 +4739,9 @@ chase_symlinks (const char *pathspec) + int has_symlinks = 0; + while (strlen (tmp_pathspec) && !has_symlinks) + { +- LTWRAPPER_DEBUGPRINTF (("checking path component for symlinks: %s\n", +- tmp_pathspec)); ++ lt_debugprintf (__FILE__, __LINE__, ++ "checking path component for symlinks: %s\n", ++ tmp_pathspec); + if (lstat (tmp_pathspec, &s) == 0) + { + if (S_ISLNK (s.st_mode) != 0) +@@ -3935,8 +4763,9 @@ chase_symlinks (const char *pathspec) + } + else + { +- char *errstr = strerror (errno); +- lt_fatal ("Error accessing file %s (%s)", tmp_pathspec, errstr); ++ lt_fatal (__FILE__, __LINE__, ++ "error accessing file \"%s\": %s", ++ tmp_pathspec, nonnull (strerror (errno))); + } + } + XFREE (tmp_pathspec); +@@ -3949,7 +4778,8 @@ chase_symlinks (const char *pathspec) + tmp_pathspec = realpath (pathspec, buf); + if (tmp_pathspec == 0) + { +- lt_fatal ("Could not follow symlinks for %s", pathspec); ++ lt_fatal (__FILE__, __LINE__, ++ "could not follow symlinks for %s", pathspec); + } + return xstrdup (tmp_pathspec); + #endif +@@ -3975,11 +4805,25 @@ strendzap (char *str, const char *pat) + return str; + } + ++void ++lt_debugprintf (const char *file, int line, const char *fmt, ...) ++{ ++ va_list args; ++ if (lt_debug) ++ { ++ (void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line); ++ va_start (args, fmt); ++ (void) vfprintf (stderr, fmt, args); ++ va_end (args); ++ } ++} ++ + static void +-lt_error_core (int exit_status, const char *mode, ++lt_error_core (int exit_status, const char *file, ++ int line, const char *mode, + const char *message, va_list ap) + { +- fprintf (stderr, "%s: %s: ", program_name, mode); ++ fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode); + vfprintf (stderr, message, ap); + fprintf (stderr, ".\n"); + +@@ -3988,20 +4832,32 @@ lt_error_core (int exit_status, const char *mode, + } + + void +-lt_fatal (const char *message, ...) ++lt_fatal (const char *file, int line, const char *message, ...) + { + va_list ap; + va_start (ap, message); +- lt_error_core (EXIT_FAILURE, "FATAL", message, ap); ++ lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap); + va_end (ap); + } + ++static const char * ++nonnull (const char *s) ++{ ++ return s ? s : "(null)"; ++} ++ ++static const char * ++nonempty (const char *s) ++{ ++ return (s && !*s) ? "(empty)" : nonnull (s); ++} ++ + void + lt_setenv (const char *name, const char *value) + { +- LTWRAPPER_DEBUGPRINTF (("(lt_setenv) setting '%s' to '%s'\n", +- (name ? name : ""), +- (value ? value : ""))); ++ lt_debugprintf (__FILE__, __LINE__, ++ "(lt_setenv) setting '%s' to '%s'\n", ++ nonnull (name), nonnull (value)); + { + #ifdef HAVE_SETENV + /* always make a copy, for consistency with !HAVE_SETENV */ +@@ -4049,9 +4905,9 @@ lt_extend_str (const char *orig_value, const char *add, int to_end) + void + lt_update_exe_path (const char *name, const char *value) + { +- LTWRAPPER_DEBUGPRINTF (("(lt_update_exe_path) modifying '%s' by prepending '%s'\n", +- (name ? name : ""), +- (value ? value : ""))); ++ lt_debugprintf (__FILE__, __LINE__, ++ "(lt_update_exe_path) modifying '%s' by prepending '%s'\n", ++ nonnull (name), nonnull (value)); + + if (name && *name && value && *value) + { +@@ -4070,9 +4926,9 @@ lt_update_exe_path (const char *name, const char *value) + void + lt_update_lib_path (const char *name, const char *value) + { +- LTWRAPPER_DEBUGPRINTF (("(lt_update_lib_path) modifying '%s' by prepending '%s'\n", +- (name ? name : ""), +- (value ? value : ""))); ++ lt_debugprintf (__FILE__, __LINE__, ++ "(lt_update_lib_path) modifying '%s' by prepending '%s'\n", ++ nonnull (name), nonnull (value)); + + if (name && *name && value && *value) + { +@@ -4222,7 +5078,7 @@ EOF + func_win32_import_lib_p () + { + $opt_debug +- case `eval "$file_magic_cmd \"\$1\" 2>/dev/null" | $SED -e 10q` in ++ case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in + *import*) : ;; + *) false ;; + esac +@@ -4401,9 +5257,9 @@ func_mode_link () + ;; + *) + if test "$prev" = dlfiles; then +- dlfiles="$dlfiles $arg" ++ func_append dlfiles " $arg" + else +- dlprefiles="$dlprefiles $arg" ++ func_append dlprefiles " $arg" + fi + prev= + continue +@@ -4427,7 +5283,7 @@ func_mode_link () + *-*-darwin*) + case "$deplibs " in + *" $qarg.ltframework "*) ;; +- *) deplibs="$deplibs $qarg.ltframework" # this is fixed later ++ *) func_append deplibs " $qarg.ltframework" # this is fixed later + ;; + esac + ;; +@@ -4446,7 +5302,7 @@ func_mode_link () + moreargs= + for fil in `cat "$save_arg"` + do +-# moreargs="$moreargs $fil" ++# func_append moreargs " $fil" + arg=$fil + # A libtool-controlled object. + +@@ -4475,7 +5331,7 @@ func_mode_link () + + if test "$prev" = dlfiles; then + if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then +- dlfiles="$dlfiles $pic_object" ++ func_append dlfiles " $pic_object" + prev= + continue + else +@@ -4487,7 +5343,7 @@ func_mode_link () + # CHECK ME: I think I busted this. -Ossama + if test "$prev" = dlprefiles; then + # Preload the old-style object. +- dlprefiles="$dlprefiles $pic_object" ++ func_append dlprefiles " $pic_object" + prev= + fi + +@@ -4557,12 +5413,12 @@ func_mode_link () + if test "$prev" = rpath; then + case "$rpath " in + *" $arg "*) ;; +- *) rpath="$rpath $arg" ;; ++ *) func_append rpath " $arg" ;; + esac + else + case "$xrpath " in + *" $arg "*) ;; +- *) xrpath="$xrpath $arg" ;; ++ *) func_append xrpath " $arg" ;; + esac + fi + prev= +@@ -4574,28 +5430,28 @@ func_mode_link () + continue + ;; + weak) +- weak_libs="$weak_libs $arg" ++ func_append weak_libs " $arg" + prev= + continue + ;; + xcclinker) +- linker_flags="$linker_flags $qarg" +- compiler_flags="$compiler_flags $qarg" ++ func_append linker_flags " $qarg" ++ func_append compiler_flags " $qarg" + prev= + func_append compile_command " $qarg" + func_append finalize_command " $qarg" + continue + ;; + xcompiler) +- compiler_flags="$compiler_flags $qarg" ++ func_append compiler_flags " $qarg" + prev= + func_append compile_command " $qarg" + func_append finalize_command " $qarg" + continue + ;; + xlinker) +- linker_flags="$linker_flags $qarg" +- compiler_flags="$compiler_flags $wl$qarg" ++ func_append linker_flags " $qarg" ++ func_append compiler_flags " $wl$qarg" + prev= + func_append compile_command " $wl$qarg" + func_append finalize_command " $wl$qarg" +@@ -4686,15 +5542,16 @@ func_mode_link () + ;; + + -L*) +- func_stripname '-L' '' "$arg" +- dir=$func_stripname_result +- if test -z "$dir"; then ++ func_stripname "-L" '' "$arg" ++ if test -z "$func_stripname_result"; then + if test "$#" -gt 0; then + func_fatal_error "require no space between \`-L' and \`$1'" + else + func_fatal_error "need path for \`-L' option" + fi + fi ++ func_resolve_sysroot "$func_stripname_result" ++ dir=$func_resolve_sysroot_result + # We need an absolute path. + case $dir in + [\\/]* | [A-Za-z]:[\\/]*) ;; +@@ -4706,10 +5563,16 @@ func_mode_link () + ;; + esac + case "$deplibs " in +- *" -L$dir "*) ;; ++ *" -L$dir "* | *" $arg "*) ++ # Will only happen for absolute or sysroot arguments ++ ;; + *) +- deplibs="$deplibs -L$dir" +- lib_search_path="$lib_search_path $dir" ++ # Preserve sysroot, but never include relative directories ++ case $dir in ++ [\\/]* | [A-Za-z]:[\\/]* | =*) func_append deplibs " $arg" ;; ++ *) func_append deplibs " -L$dir" ;; ++ esac ++ func_append lib_search_path " $dir" + ;; + esac + case $host in +@@ -4718,12 +5581,12 @@ func_mode_link () + case :$dllsearchpath: in + *":$dir:"*) ;; + ::) dllsearchpath=$dir;; +- *) dllsearchpath="$dllsearchpath:$dir";; ++ *) func_append dllsearchpath ":$dir";; + esac + case :$dllsearchpath: in + *":$testbindir:"*) ;; + ::) dllsearchpath=$testbindir;; +- *) dllsearchpath="$dllsearchpath:$testbindir";; ++ *) func_append dllsearchpath ":$testbindir";; + esac + ;; + esac +@@ -4747,7 +5610,7 @@ func_mode_link () + ;; + *-*-rhapsody* | *-*-darwin1.[012]) + # Rhapsody C and math libraries are in the System framework +- deplibs="$deplibs System.ltframework" ++ func_append deplibs " System.ltframework" + continue + ;; + *-*-sco3.2v5* | *-*-sco5v6*) +@@ -4758,9 +5621,6 @@ func_mode_link () + # Compiler inserts libc in the correct place for threads to work + test "X$arg" = "X-lc" && continue + ;; +- *-*-linux*) +- test "X$arg" = "X-lc" && continue +- ;; + esac + elif test "X$arg" = "X-lc_r"; then + case $host in +@@ -4770,7 +5630,7 @@ func_mode_link () + ;; + esac + fi +- deplibs="$deplibs $arg" ++ func_append deplibs " $arg" + continue + ;; + +@@ -4782,8 +5642,8 @@ func_mode_link () + # Tru64 UNIX uses -model [arg] to determine the layout of C++ + # classes, name mangling, and exception handling. + # Darwin uses the -arch flag to determine output architecture. +- -model|-arch|-isysroot) +- compiler_flags="$compiler_flags $arg" ++ -model|-arch|-isysroot|--sysroot) ++ func_append compiler_flags " $arg" + func_append compile_command " $arg" + func_append finalize_command " $arg" + prev=xcompiler +@@ -4791,12 +5651,12 @@ func_mode_link () + ;; + + -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe|-threads) +- compiler_flags="$compiler_flags $arg" ++ func_append compiler_flags " $arg" + func_append compile_command " $arg" + func_append finalize_command " $arg" + case "$new_inherited_linker_flags " in + *" $arg "*) ;; +- * ) new_inherited_linker_flags="$new_inherited_linker_flags $arg" ;; ++ * ) func_append new_inherited_linker_flags " $arg" ;; + esac + continue + ;; +@@ -4863,13 +5723,17 @@ func_mode_link () + # We need an absolute path. + case $dir in + [\\/]* | [A-Za-z]:[\\/]*) ;; ++ =*) ++ func_stripname '=' '' "$dir" ++ dir=$lt_sysroot$func_stripname_result ++ ;; + *) + func_fatal_error "only absolute run-paths are allowed" + ;; + esac + case "$xrpath " in + *" $dir "*) ;; +- *) xrpath="$xrpath $dir" ;; ++ *) func_append xrpath " $dir" ;; + esac + continue + ;; +@@ -4922,8 +5786,8 @@ func_mode_link () + for flag in $args; do + IFS="$save_ifs" + func_quote_for_eval "$flag" +- arg="$arg $func_quote_for_eval_result" +- compiler_flags="$compiler_flags $func_quote_for_eval_result" ++ func_append arg " $func_quote_for_eval_result" ++ func_append compiler_flags " $func_quote_for_eval_result" + done + IFS="$save_ifs" + func_stripname ' ' '' "$arg" +@@ -4938,9 +5802,9 @@ func_mode_link () + for flag in $args; do + IFS="$save_ifs" + func_quote_for_eval "$flag" +- arg="$arg $wl$func_quote_for_eval_result" +- compiler_flags="$compiler_flags $wl$func_quote_for_eval_result" +- linker_flags="$linker_flags $func_quote_for_eval_result" ++ func_append arg " $wl$func_quote_for_eval_result" ++ func_append compiler_flags " $wl$func_quote_for_eval_result" ++ func_append linker_flags " $func_quote_for_eval_result" + done + IFS="$save_ifs" + func_stripname ' ' '' "$arg" +@@ -4968,24 +5832,27 @@ func_mode_link () + arg="$func_quote_for_eval_result" + ;; + +- # -64, -mips[0-9] enable 64-bit mode on the SGI compiler +- # -r[0-9][0-9]* specifies the processor on the SGI compiler +- # -xarch=*, -xtarget=* enable 64-bit mode on the Sun compiler +- # +DA*, +DD* enable 64-bit mode on the HP compiler +- # -q* pass through compiler args for the IBM compiler +- # -m*, -t[45]*, -txscale* pass through architecture-specific +- # compiler args for GCC +- # -F/path gives path to uninstalled frameworks, gcc on darwin +- # -p, -pg, --coverage, -fprofile-* pass through profiling flag for GCC +- # @file GCC response files +- # -tp=* Portland pgcc target processor selection ++ # Flags to be passed through unchanged, with rationale: ++ # -64, -mips[0-9] enable 64-bit mode for the SGI compiler ++ # -r[0-9][0-9]* specify processor for the SGI compiler ++ # -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler ++ # +DA*, +DD* enable 64-bit mode for the HP compiler ++ # -q* compiler args for the IBM compiler ++ # -m*, -t[45]*, -txscale* architecture-specific flags for GCC ++ # -F/path path to uninstalled frameworks, gcc on darwin ++ # -p, -pg, --coverage, -fprofile-* profiling flags for GCC ++ # @file GCC response files ++ # -tp=* Portland pgcc target processor selection ++ # --sysroot=* for sysroot support ++ # -O*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization + -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \ +- -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*) ++ -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \ ++ -O*|-flto*|-fwhopr*|-fuse-linker-plugin) + func_quote_for_eval "$arg" + arg="$func_quote_for_eval_result" + func_append compile_command " $arg" + func_append finalize_command " $arg" +- compiler_flags="$compiler_flags $arg" ++ func_append compiler_flags " $arg" + continue + ;; + +@@ -4997,7 +5864,7 @@ func_mode_link () + + *.$objext) + # A standard object. +- objs="$objs $arg" ++ func_append objs " $arg" + ;; + + *.lo) +@@ -5028,7 +5895,7 @@ func_mode_link () + + if test "$prev" = dlfiles; then + if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then +- dlfiles="$dlfiles $pic_object" ++ func_append dlfiles " $pic_object" + prev= + continue + else +@@ -5040,7 +5907,7 @@ func_mode_link () + # CHECK ME: I think I busted this. -Ossama + if test "$prev" = dlprefiles; then + # Preload the old-style object. +- dlprefiles="$dlprefiles $pic_object" ++ func_append dlprefiles " $pic_object" + prev= + fi + +@@ -5085,24 +5952,25 @@ func_mode_link () + + *.$libext) + # An archive. +- deplibs="$deplibs $arg" +- old_deplibs="$old_deplibs $arg" ++ func_append deplibs " $arg" ++ func_append old_deplibs " $arg" + continue + ;; + + *.la) + # A libtool-controlled library. + ++ func_resolve_sysroot "$arg" + if test "$prev" = dlfiles; then + # This library was specified with -dlopen. +- dlfiles="$dlfiles $arg" ++ func_append dlfiles " $func_resolve_sysroot_result" + prev= + elif test "$prev" = dlprefiles; then + # The library was specified with -dlpreopen. +- dlprefiles="$dlprefiles $arg" ++ func_append dlprefiles " $func_resolve_sysroot_result" + prev= + else +- deplibs="$deplibs $arg" ++ func_append deplibs " $func_resolve_sysroot_result" + fi + continue + ;; +@@ -5127,7 +5995,7 @@ func_mode_link () + func_fatal_help "the \`$prevarg' option requires an argument" + + if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then +- eval "arg=\"$export_dynamic_flag_spec\"" ++ eval arg=\"$export_dynamic_flag_spec\" + func_append compile_command " $arg" + func_append finalize_command " $arg" + fi +@@ -5144,11 +6012,13 @@ func_mode_link () + else + shlib_search_path= + fi +- eval "sys_lib_search_path=\"$sys_lib_search_path_spec\"" +- eval "sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"" ++ eval sys_lib_search_path=\"$sys_lib_search_path_spec\" ++ eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\" + + func_dirname "$output" "/" "" + output_objdir="$func_dirname_result$objdir" ++ func_to_tool_file "$output_objdir/" ++ tool_output_objdir=$func_to_tool_file_result + # Create the object directory. + func_mkdir_p "$output_objdir" + +@@ -5169,12 +6039,12 @@ func_mode_link () + # Find all interdependent deplibs by searching for libraries + # that are linked more than once (e.g. -la -lb -la) + for deplib in $deplibs; do +- if $opt_duplicate_deps ; then ++ if $opt_preserve_dup_deps ; then + case "$libs " in +- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; ++ *" $deplib "*) func_append specialdeplibs " $deplib" ;; + esac + fi +- libs="$libs $deplib" ++ func_append libs " $deplib" + done + + if test "$linkmode" = lib; then +@@ -5187,9 +6057,9 @@ func_mode_link () + if $opt_duplicate_compiler_generated_deps; then + for pre_post_dep in $predeps $postdeps; do + case "$pre_post_deps " in +- *" $pre_post_dep "*) specialdeplibs="$specialdeplibs $pre_post_deps" ;; ++ *" $pre_post_dep "*) func_append specialdeplibs " $pre_post_deps" ;; + esac +- pre_post_deps="$pre_post_deps $pre_post_dep" ++ func_append pre_post_deps " $pre_post_dep" + done + fi + pre_post_deps= +@@ -5256,8 +6126,9 @@ func_mode_link () + for lib in $dlprefiles; do + # Ignore non-libtool-libs + dependency_libs= ++ func_resolve_sysroot "$lib" + case $lib in +- *.la) func_source "$lib" ;; ++ *.la) func_source "$func_resolve_sysroot_result" ;; + esac + + # Collect preopened libtool deplibs, except any this library +@@ -5267,7 +6138,7 @@ func_mode_link () + deplib_base=$func_basename_result + case " $weak_libs " in + *" $deplib_base "*) ;; +- *) deplibs="$deplibs $deplib" ;; ++ *) func_append deplibs " $deplib" ;; + esac + done + done +@@ -5288,11 +6159,11 @@ func_mode_link () + compile_deplibs="$deplib $compile_deplibs" + finalize_deplibs="$deplib $finalize_deplibs" + else +- compiler_flags="$compiler_flags $deplib" ++ func_append compiler_flags " $deplib" + if test "$linkmode" = lib ; then + case "$new_inherited_linker_flags " in + *" $deplib "*) ;; +- * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;; ++ * ) func_append new_inherited_linker_flags " $deplib" ;; + esac + fi + fi +@@ -5377,7 +6248,7 @@ func_mode_link () + if test "$linkmode" = lib ; then + case "$new_inherited_linker_flags " in + *" $deplib "*) ;; +- * ) new_inherited_linker_flags="$new_inherited_linker_flags $deplib" ;; ++ * ) func_append new_inherited_linker_flags " $deplib" ;; + esac + fi + fi +@@ -5390,7 +6261,8 @@ func_mode_link () + test "$pass" = conv && continue + newdependency_libs="$deplib $newdependency_libs" + func_stripname '-L' '' "$deplib" +- newlib_search_path="$newlib_search_path $func_stripname_result" ++ func_resolve_sysroot "$func_stripname_result" ++ func_append newlib_search_path " $func_resolve_sysroot_result" + ;; + prog) + if test "$pass" = conv; then +@@ -5404,7 +6276,8 @@ func_mode_link () + finalize_deplibs="$deplib $finalize_deplibs" + fi + func_stripname '-L' '' "$deplib" +- newlib_search_path="$newlib_search_path $func_stripname_result" ++ func_resolve_sysroot "$func_stripname_result" ++ func_append newlib_search_path " $func_resolve_sysroot_result" + ;; + *) + func_warning "\`-L' is ignored for archives/objects" +@@ -5415,17 +6288,21 @@ func_mode_link () + -R*) + if test "$pass" = link; then + func_stripname '-R' '' "$deplib" +- dir=$func_stripname_result ++ func_resolve_sysroot "$func_stripname_result" ++ dir=$func_resolve_sysroot_result + # Make sure the xrpath contains only unique directories. + case "$xrpath " in + *" $dir "*) ;; +- *) xrpath="$xrpath $dir" ;; ++ *) func_append xrpath " $dir" ;; + esac + fi + deplibs="$deplib $deplibs" + continue + ;; +- *.la) lib="$deplib" ;; ++ *.la) ++ func_resolve_sysroot "$deplib" ++ lib=$func_resolve_sysroot_result ++ ;; + *.$libext) + if test "$pass" = conv; then + deplibs="$deplib $deplibs" +@@ -5488,11 +6365,11 @@ func_mode_link () + if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then + # If there is no dlopen support or we're linking statically, + # we need to preload. +- newdlprefiles="$newdlprefiles $deplib" ++ func_append newdlprefiles " $deplib" + compile_deplibs="$deplib $compile_deplibs" + finalize_deplibs="$deplib $finalize_deplibs" + else +- newdlfiles="$newdlfiles $deplib" ++ func_append newdlfiles " $deplib" + fi + fi + continue +@@ -5538,7 +6415,7 @@ func_mode_link () + for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do + case " $new_inherited_linker_flags " in + *" $tmp_inherited_linker_flag "*) ;; +- *) new_inherited_linker_flags="$new_inherited_linker_flags $tmp_inherited_linker_flag";; ++ *) func_append new_inherited_linker_flags " $tmp_inherited_linker_flag";; + esac + done + fi +@@ -5546,8 +6423,8 @@ func_mode_link () + if test "$linkmode,$pass" = "lib,link" || + test "$linkmode,$pass" = "prog,scan" || + { test "$linkmode" != prog && test "$linkmode" != lib; }; then +- test -n "$dlopen" && dlfiles="$dlfiles $dlopen" +- test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen" ++ test -n "$dlopen" && func_append dlfiles " $dlopen" ++ test -n "$dlpreopen" && func_append dlprefiles " $dlpreopen" + fi + + if test "$pass" = conv; then +@@ -5558,20 +6435,20 @@ func_mode_link () + func_fatal_error "cannot find name of link library for \`$lib'" + fi + # It is a libtool convenience library, so add in its objects. +- convenience="$convenience $ladir/$objdir/$old_library" +- old_convenience="$old_convenience $ladir/$objdir/$old_library" ++ func_append convenience " $ladir/$objdir/$old_library" ++ func_append old_convenience " $ladir/$objdir/$old_library" + elif test "$linkmode" != prog && test "$linkmode" != lib; then + func_fatal_error "\`$lib' is not a convenience library" + fi + tmp_libs= + for deplib in $dependency_libs; do + deplibs="$deplib $deplibs" +- if $opt_duplicate_deps ; then ++ if $opt_preserve_dup_deps ; then + case "$tmp_libs " in +- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; ++ *" $deplib "*) func_append specialdeplibs " $deplib" ;; + esac + fi +- tmp_libs="$tmp_libs $deplib" ++ func_append tmp_libs " $deplib" + done + continue + fi # $pass = conv +@@ -5579,9 +6456,15 @@ func_mode_link () + + # Get the name of the library we link against. + linklib= +- for l in $old_library $library_names; do +- linklib="$l" +- done ++ if test -n "$old_library" && ++ { test "$prefer_static_libs" = yes || ++ test "$prefer_static_libs,$installed" = "built,no"; }; then ++ linklib=$old_library ++ else ++ for l in $old_library $library_names; do ++ linklib="$l" ++ done ++ fi + if test -z "$linklib"; then + func_fatal_error "cannot find name of link library for \`$lib'" + fi +@@ -5598,9 +6481,9 @@ func_mode_link () + # statically, we need to preload. We also need to preload any + # dependent libraries so libltdl's deplib preloader doesn't + # bomb out in the load deplibs phase. +- dlprefiles="$dlprefiles $lib $dependency_libs" ++ func_append dlprefiles " $lib $dependency_libs" + else +- newdlfiles="$newdlfiles $lib" ++ func_append newdlfiles " $lib" + fi + continue + fi # $pass = dlopen +@@ -5622,14 +6505,14 @@ func_mode_link () + + # Find the relevant object directory and library name. + if test "X$installed" = Xyes; then +- if test ! -f "$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then ++ if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then + func_warning "library \`$lib' was moved." + dir="$ladir" + absdir="$abs_ladir" + libdir="$abs_ladir" + else +- dir="$libdir" +- absdir="$libdir" ++ dir="$lt_sysroot$libdir" ++ absdir="$lt_sysroot$libdir" + fi + test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes + else +@@ -5637,12 +6520,12 @@ func_mode_link () + dir="$ladir" + absdir="$abs_ladir" + # Remove this search path later +- notinst_path="$notinst_path $abs_ladir" ++ func_append notinst_path " $abs_ladir" + else + dir="$ladir/$objdir" + absdir="$abs_ladir/$objdir" + # Remove this search path later +- notinst_path="$notinst_path $abs_ladir" ++ func_append notinst_path " $abs_ladir" + fi + fi # $installed = yes + func_stripname 'lib' '.la' "$laname" +@@ -5653,20 +6536,46 @@ func_mode_link () + if test -z "$libdir" && test "$linkmode" = prog; then + func_fatal_error "only libraries may -dlpreopen a convenience library: \`$lib'" + fi +- # Prefer using a static library (so that no silly _DYNAMIC symbols +- # are required to link). +- if test -n "$old_library"; then +- newdlprefiles="$newdlprefiles $dir/$old_library" +- # Keep a list of preopened convenience libraries to check +- # that they are being used correctly in the link pass. +- test -z "$libdir" && \ +- dlpreconveniencelibs="$dlpreconveniencelibs $dir/$old_library" +- # Otherwise, use the dlname, so that lt_dlopen finds it. +- elif test -n "$dlname"; then +- newdlprefiles="$newdlprefiles $dir/$dlname" +- else +- newdlprefiles="$newdlprefiles $dir/$linklib" +- fi ++ case "$host" in ++ # special handling for platforms with PE-DLLs. ++ *cygwin* | *mingw* | *cegcc* ) ++ # Linker will automatically link against shared library if both ++ # static and shared are present. Therefore, ensure we extract ++ # symbols from the import library if a shared library is present ++ # (otherwise, the dlopen module name will be incorrect). We do ++ # this by putting the import library name into $newdlprefiles. ++ # We recover the dlopen module name by 'saving' the la file ++ # name in a special purpose variable, and (later) extracting the ++ # dlname from the la file. ++ if test -n "$dlname"; then ++ func_tr_sh "$dir/$linklib" ++ eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname" ++ func_append newdlprefiles " $dir/$linklib" ++ else ++ func_append newdlprefiles " $dir/$old_library" ++ # Keep a list of preopened convenience libraries to check ++ # that they are being used correctly in the link pass. ++ test -z "$libdir" && \ ++ func_append dlpreconveniencelibs " $dir/$old_library" ++ fi ++ ;; ++ * ) ++ # Prefer using a static library (so that no silly _DYNAMIC symbols ++ # are required to link). ++ if test -n "$old_library"; then ++ func_append newdlprefiles " $dir/$old_library" ++ # Keep a list of preopened convenience libraries to check ++ # that they are being used correctly in the link pass. ++ test -z "$libdir" && \ ++ func_append dlpreconveniencelibs " $dir/$old_library" ++ # Otherwise, use the dlname, so that lt_dlopen finds it. ++ elif test -n "$dlname"; then ++ func_append newdlprefiles " $dir/$dlname" ++ else ++ func_append newdlprefiles " $dir/$linklib" ++ fi ++ ;; ++ esac + fi # $pass = dlpreopen + + if test -z "$libdir"; then +@@ -5684,7 +6593,7 @@ func_mode_link () + + + if test "$linkmode" = prog && test "$pass" != link; then +- newlib_search_path="$newlib_search_path $ladir" ++ func_append newlib_search_path " $ladir" + deplibs="$lib $deplibs" + + linkalldeplibs=no +@@ -5697,7 +6606,8 @@ func_mode_link () + for deplib in $dependency_libs; do + case $deplib in + -L*) func_stripname '-L' '' "$deplib" +- newlib_search_path="$newlib_search_path $func_stripname_result" ++ func_resolve_sysroot "$func_stripname_result" ++ func_append newlib_search_path " $func_resolve_sysroot_result" + ;; + esac + # Need to link against all dependency_libs? +@@ -5708,12 +6618,12 @@ func_mode_link () + # or/and link against static libraries + newdependency_libs="$deplib $newdependency_libs" + fi +- if $opt_duplicate_deps ; then ++ if $opt_preserve_dup_deps ; then + case "$tmp_libs " in +- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; ++ *" $deplib "*) func_append specialdeplibs " $deplib" ;; + esac + fi +- tmp_libs="$tmp_libs $deplib" ++ func_append tmp_libs " $deplib" + done # for deplib + continue + fi # $linkmode = prog... +@@ -5728,7 +6638,7 @@ func_mode_link () + # Make sure the rpath contains only unique directories. + case "$temp_rpath:" in + *"$absdir:"*) ;; +- *) temp_rpath="$temp_rpath$absdir:" ;; ++ *) func_append temp_rpath "$absdir:" ;; + esac + fi + +@@ -5740,7 +6650,7 @@ func_mode_link () + *) + case "$compile_rpath " in + *" $absdir "*) ;; +- *) compile_rpath="$compile_rpath $absdir" ++ *) func_append compile_rpath " $absdir" ;; + esac + ;; + esac +@@ -5749,7 +6659,7 @@ func_mode_link () + *) + case "$finalize_rpath " in + *" $libdir "*) ;; +- *) finalize_rpath="$finalize_rpath $libdir" ++ *) func_append finalize_rpath " $libdir" ;; + esac + ;; + esac +@@ -5774,12 +6684,12 @@ func_mode_link () + case $host in + *cygwin* | *mingw* | *cegcc*) + # No point in relinking DLLs because paths are not encoded +- notinst_deplibs="$notinst_deplibs $lib" ++ func_append notinst_deplibs " $lib" + need_relink=no + ;; + *) + if test "$installed" = no; then +- notinst_deplibs="$notinst_deplibs $lib" ++ func_append notinst_deplibs " $lib" + need_relink=yes + fi + ;; +@@ -5814,7 +6724,7 @@ func_mode_link () + *) + case "$compile_rpath " in + *" $absdir "*) ;; +- *) compile_rpath="$compile_rpath $absdir" ++ *) func_append compile_rpath " $absdir" ;; + esac + ;; + esac +@@ -5823,7 +6733,7 @@ func_mode_link () + *) + case "$finalize_rpath " in + *" $libdir "*) ;; +- *) finalize_rpath="$finalize_rpath $libdir" ++ *) func_append finalize_rpath " $libdir" ;; + esac + ;; + esac +@@ -5835,7 +6745,7 @@ func_mode_link () + shift + realname="$1" + shift +- eval "libname=\"$libname_spec\"" ++ libname=`eval "\\$ECHO \"$libname_spec\""` + # use dlname if we got it. it's perfectly good, no? + if test -n "$dlname"; then + soname="$dlname" +@@ -5848,7 +6758,7 @@ func_mode_link () + versuffix="-$major" + ;; + esac +- eval "soname=\"$soname_spec\"" ++ eval soname=\"$soname_spec\" + else + soname="$realname" + fi +@@ -5877,7 +6787,7 @@ func_mode_link () + linklib=$newlib + fi # test -n "$old_archive_from_expsyms_cmds" + +- if test "$linkmode" = prog || test "$mode" != relink; then ++ if test "$linkmode" = prog || test "$opt_mode" != relink; then + add_shlibpath= + add_dir= + add= +@@ -5933,7 +6843,7 @@ func_mode_link () + if test -n "$inst_prefix_dir"; then + case $libdir in + [\\/]*) +- add_dir="$add_dir -L$inst_prefix_dir$libdir" ++ func_append add_dir " -L$inst_prefix_dir$libdir" + ;; + esac + fi +@@ -5955,7 +6865,7 @@ func_mode_link () + if test -n "$add_shlibpath"; then + case :$compile_shlibpath: in + *":$add_shlibpath:"*) ;; +- *) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;; ++ *) func_append compile_shlibpath "$add_shlibpath:" ;; + esac + fi + if test "$linkmode" = prog; then +@@ -5969,13 +6879,13 @@ func_mode_link () + test "$hardcode_shlibpath_var" = yes; then + case :$finalize_shlibpath: in + *":$libdir:"*) ;; +- *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; ++ *) func_append finalize_shlibpath "$libdir:" ;; + esac + fi + fi + fi + +- if test "$linkmode" = prog || test "$mode" = relink; then ++ if test "$linkmode" = prog || test "$opt_mode" = relink; then + add_shlibpath= + add_dir= + add= +@@ -5989,7 +6899,7 @@ func_mode_link () + elif test "$hardcode_shlibpath_var" = yes; then + case :$finalize_shlibpath: in + *":$libdir:"*) ;; +- *) finalize_shlibpath="$finalize_shlibpath$libdir:" ;; ++ *) func_append finalize_shlibpath "$libdir:" ;; + esac + add="-l$name" + elif test "$hardcode_automatic" = yes; then +@@ -6001,12 +6911,12 @@ func_mode_link () + fi + else + # We cannot seem to hardcode it, guess we'll fake it. +- add_dir="-L$libdir" ++ add_dir="-L$lt_sysroot$libdir" + # Try looking first in the location we're being installed to. + if test -n "$inst_prefix_dir"; then + case $libdir in + [\\/]*) +- add_dir="$add_dir -L$inst_prefix_dir$libdir" ++ func_append add_dir " -L$inst_prefix_dir$libdir" + ;; + esac + fi +@@ -6083,27 +6993,33 @@ func_mode_link () + temp_xrpath=$func_stripname_result + case " $xrpath " in + *" $temp_xrpath "*) ;; +- *) xrpath="$xrpath $temp_xrpath";; ++ *) func_append xrpath " $temp_xrpath";; + esac;; +- *) temp_deplibs="$temp_deplibs $libdir";; ++ *) func_append temp_deplibs " $libdir";; + esac + done + dependency_libs="$temp_deplibs" + fi + +- newlib_search_path="$newlib_search_path $absdir" ++ func_append newlib_search_path " $absdir" + # Link against this library + test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs" + # ... and its dependency_libs + tmp_libs= + for deplib in $dependency_libs; do + newdependency_libs="$deplib $newdependency_libs" +- if $opt_duplicate_deps ; then ++ case $deplib in ++ -L*) func_stripname '-L' '' "$deplib" ++ func_resolve_sysroot "$func_stripname_result";; ++ *) func_resolve_sysroot "$deplib" ;; ++ esac ++ if $opt_preserve_dup_deps ; then + case "$tmp_libs " in +- *" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;; ++ *" $func_resolve_sysroot_result "*) ++ func_append specialdeplibs " $func_resolve_sysroot_result" ;; + esac + fi +- tmp_libs="$tmp_libs $deplib" ++ func_append tmp_libs " $func_resolve_sysroot_result" + done + + if test "$link_all_deplibs" != no; then +@@ -6113,8 +7029,10 @@ func_mode_link () + case $deplib in + -L*) path="$deplib" ;; + *.la) ++ func_resolve_sysroot "$deplib" ++ deplib=$func_resolve_sysroot_result + func_dirname "$deplib" "" "." +- dir="$func_dirname_result" ++ dir=$func_dirname_result + # We need an absolute path. + case $dir in + [\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;; +@@ -6130,7 +7048,7 @@ func_mode_link () + case $host in + *-*-darwin*) + depdepl= +- deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` ++ eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` + if test -n "$deplibrary_names" ; then + for tmp in $deplibrary_names ; do + depdepl=$tmp +@@ -6141,8 +7059,8 @@ func_mode_link () + if test -z "$darwin_install_name"; then + darwin_install_name=`${OTOOL64} -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` + fi +- compiler_flags="$compiler_flags ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}" +- linker_flags="$linker_flags -dylib_file ${darwin_install_name}:${depdepl}" ++ func_append compiler_flags " ${wl}-dylib_file ${wl}${darwin_install_name}:${depdepl}" ++ func_append linker_flags " -dylib_file ${darwin_install_name}:${depdepl}" + path= + fi + fi +@@ -6152,7 +7070,7 @@ func_mode_link () + ;; + esac + else +- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` ++ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` + test -z "$libdir" && \ + func_fatal_error "\`$deplib' is not a valid libtool archive" + test "$absdir" != "$libdir" && \ +@@ -6192,7 +7110,7 @@ func_mode_link () + for dir in $newlib_search_path; do + case "$lib_search_path " in + *" $dir "*) ;; +- *) lib_search_path="$lib_search_path $dir" ;; ++ *) func_append lib_search_path " $dir" ;; + esac + done + newlib_search_path= +@@ -6205,7 +7123,7 @@ func_mode_link () + fi + for var in $vars dependency_libs; do + # Add libraries to $var in reverse order +- eval tmp_libs=\$$var ++ eval tmp_libs=\"\$$var\" + new_libs= + for deplib in $tmp_libs; do + # FIXME: Pedantically, this is the right thing to do, so +@@ -6250,13 +7168,13 @@ func_mode_link () + -L*) + case " $tmp_libs " in + *" $deplib "*) ;; +- *) tmp_libs="$tmp_libs $deplib" ;; ++ *) func_append tmp_libs " $deplib" ;; + esac + ;; +- *) tmp_libs="$tmp_libs $deplib" ;; ++ *) func_append tmp_libs " $deplib" ;; + esac + done +- eval $var=\$tmp_libs ++ eval $var=\"$tmp_libs\" + done # for var + fi + # Last step: remove runtime libs from dependency_libs +@@ -6269,7 +7187,7 @@ func_mode_link () + ;; + esac + if test -n "$i" ; then +- tmp_libs="$tmp_libs $i" ++ func_append tmp_libs " $i" + fi + done + dependency_libs=$tmp_libs +@@ -6310,7 +7228,7 @@ func_mode_link () + # Now set the variables for building old libraries. + build_libtool_libs=no + oldlibs="$output" +- objs="$objs$old_deplibs" ++ func_append objs "$old_deplibs" + ;; + + lib) +@@ -6319,8 +7237,8 @@ func_mode_link () + lib*) + func_stripname 'lib' '.la' "$outputname" + name=$func_stripname_result +- eval "shared_ext=\"$shrext_cmds\"" +- eval "libname=\"$libname_spec\"" ++ eval shared_ext=\"$shrext_cmds\" ++ eval libname=\"$libname_spec\" + ;; + *) + test "$module" = no && \ +@@ -6330,8 +7248,8 @@ func_mode_link () + # Add the "lib" prefix for modules if required + func_stripname '' '.la' "$outputname" + name=$func_stripname_result +- eval "shared_ext=\"$shrext_cmds\"" +- eval "libname=\"$libname_spec\"" ++ eval shared_ext=\"$shrext_cmds\" ++ eval libname=\"$libname_spec\" + else + func_stripname '' '.la' "$outputname" + libname=$func_stripname_result +@@ -6346,7 +7264,7 @@ func_mode_link () + echo + $ECHO "*** Warning: Linking the shared library $output against the non-libtool" + $ECHO "*** objects $objs is not portable!" +- libobjs="$libobjs $objs" ++ func_append libobjs " $objs" + fi + fi + +@@ -6544,7 +7462,7 @@ func_mode_link () + done + + # Make executables depend on our current version. +- verstring="$verstring:${current}.0" ++ func_append verstring ":${current}.0" + ;; + + qnx) +@@ -6612,10 +7530,10 @@ func_mode_link () + fi + + func_generate_dlsyms "$libname" "$libname" "yes" +- libobjs="$libobjs $symfileobj" ++ func_append libobjs " $symfileobj" + test "X$libobjs" = "X " && libobjs= + +- if test "$mode" != relink; then ++ if test "$opt_mode" != relink; then + # Remove our outputs, but don't remove object files since they + # may have been created when compiling PIC objects. + removelist= +@@ -6631,7 +7549,7 @@ func_mode_link () + continue + fi + fi +- removelist="$removelist $p" ++ func_append removelist " $p" + ;; + *) ;; + esac +@@ -6642,7 +7560,7 @@ func_mode_link () + + # Now set the variables for building old libraries. + if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then +- oldlibs="$oldlibs $output_objdir/$libname.$libext" ++ func_append oldlibs " $output_objdir/$libname.$libext" + + # Transform .lo files to .o files. + oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; $lo2o" | $NL2SP` +@@ -6659,10 +7577,11 @@ func_mode_link () + # If the user specified any rpath flags, then add them. + temp_xrpath= + for libdir in $xrpath; do +- temp_xrpath="$temp_xrpath -R$libdir" ++ func_replace_sysroot "$libdir" ++ func_append temp_xrpath " -R$func_replace_sysroot_result" + case "$finalize_rpath " in + *" $libdir "*) ;; +- *) finalize_rpath="$finalize_rpath $libdir" ;; ++ *) func_append finalize_rpath " $libdir" ;; + esac + done + if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then +@@ -6676,7 +7595,7 @@ func_mode_link () + for lib in $old_dlfiles; do + case " $dlprefiles $dlfiles " in + *" $lib "*) ;; +- *) dlfiles="$dlfiles $lib" ;; ++ *) func_append dlfiles " $lib" ;; + esac + done + +@@ -6686,7 +7605,7 @@ func_mode_link () + for lib in $old_dlprefiles; do + case "$dlprefiles " in + *" $lib "*) ;; +- *) dlprefiles="$dlprefiles $lib" ;; ++ *) func_append dlprefiles " $lib" ;; + esac + done + +@@ -6698,7 +7617,7 @@ func_mode_link () + ;; + *-*-rhapsody* | *-*-darwin1.[012]) + # Rhapsody C library is in the System framework +- deplibs="$deplibs System.ltframework" ++ func_append deplibs " System.ltframework" + ;; + *-*-netbsd*) + # Don't link with libc until the a.out ld.so is fixed. +@@ -6715,7 +7634,7 @@ func_mode_link () + *) + # Add libc to deplibs on all other systems if necessary. + if test "$build_libtool_need_lc" = "yes"; then +- deplibs="$deplibs -lc" ++ func_append deplibs " -lc" + fi + ;; + esac +@@ -6764,18 +7683,18 @@ EOF + if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then + case " $predeps $postdeps " in + *" $i "*) +- newdeplibs="$newdeplibs $i" ++ func_append newdeplibs " $i" + i="" + ;; + esac + fi + if test -n "$i" ; then +- eval "libname=\"$libname_spec\"" +- eval "deplib_matches=\"$library_names_spec\"" ++ libname=`eval "\\$ECHO \"$libname_spec\""` ++ deplib_matches=`eval "\\$ECHO \"$library_names_spec\""` + set dummy $deplib_matches; shift + deplib_match=$1 + if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then +- newdeplibs="$newdeplibs $i" ++ func_append newdeplibs " $i" + else + droppeddeps=yes + echo +@@ -6789,7 +7708,7 @@ EOF + fi + ;; + *) +- newdeplibs="$newdeplibs $i" ++ func_append newdeplibs " $i" + ;; + esac + done +@@ -6807,18 +7726,18 @@ EOF + if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then + case " $predeps $postdeps " in + *" $i "*) +- newdeplibs="$newdeplibs $i" ++ func_append newdeplibs " $i" + i="" + ;; + esac + fi + if test -n "$i" ; then +- eval "libname=\"$libname_spec\"" +- eval "deplib_matches=\"$library_names_spec\"" ++ libname=`eval "\\$ECHO \"$libname_spec\""` ++ deplib_matches=`eval "\\$ECHO \"$library_names_spec\""` + set dummy $deplib_matches; shift + deplib_match=$1 + if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then +- newdeplibs="$newdeplibs $i" ++ func_append newdeplibs " $i" + else + droppeddeps=yes + echo +@@ -6840,7 +7759,7 @@ EOF + fi + ;; + *) +- newdeplibs="$newdeplibs $i" ++ func_append newdeplibs " $i" + ;; + esac + done +@@ -6857,15 +7776,27 @@ EOF + if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then + case " $predeps $postdeps " in + *" $a_deplib "*) +- newdeplibs="$newdeplibs $a_deplib" ++ func_append newdeplibs " $a_deplib" + a_deplib="" + ;; + esac + fi + if test -n "$a_deplib" ; then +- eval "libname=\"$libname_spec\"" ++ libname=`eval "\\$ECHO \"$libname_spec\""` ++ if test -n "$file_magic_glob"; then ++ libnameglob=`func_echo_all "$libname" | $SED -e $file_magic_glob` ++ else ++ libnameglob=$libname ++ fi ++ test "$want_nocaseglob" = yes && nocaseglob=`shopt -p nocaseglob` + for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do +- potential_libs=`ls $i/$libname[.-]* 2>/dev/null` ++ if test "$want_nocaseglob" = yes; then ++ shopt -s nocaseglob ++ potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null` ++ $nocaseglob ++ else ++ potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null` ++ fi + for potent_lib in $potential_libs; do + # Follow soft links. + if ls -lLd "$potent_lib" 2>/dev/null | +@@ -6885,10 +7816,10 @@ EOF + *) potlib=`$ECHO "$potlib" | $SED 's,[^/]*$,,'`"$potliblink";; + esac + done +- if eval "$file_magic_cmd \"\$potlib\"" 2>/dev/null | ++ if eval $file_magic_cmd \"\$potlib\" 2>/dev/null | + $SED -e 10q | + $EGREP "$file_magic_regex" > /dev/null; then +- newdeplibs="$newdeplibs $a_deplib" ++ func_append newdeplibs " $a_deplib" + a_deplib="" + break 2 + fi +@@ -6913,7 +7844,7 @@ EOF + ;; + *) + # Add a -L argument. +- newdeplibs="$newdeplibs $a_deplib" ++ func_append newdeplibs " $a_deplib" + ;; + esac + done # Gone through all deplibs. +@@ -6929,20 +7860,20 @@ EOF + if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then + case " $predeps $postdeps " in + *" $a_deplib "*) +- newdeplibs="$newdeplibs $a_deplib" ++ func_append newdeplibs " $a_deplib" + a_deplib="" + ;; + esac + fi + if test -n "$a_deplib" ; then +- eval "libname=\"$libname_spec\"" ++ libname=`eval "\\$ECHO \"$libname_spec\""` + for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do + potential_libs=`ls $i/$libname[.-]* 2>/dev/null` + for potent_lib in $potential_libs; do + potlib="$potent_lib" # see symlink-check above in file_magic test + if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \ + $EGREP "$match_pattern_regex" > /dev/null; then +- newdeplibs="$newdeplibs $a_deplib" ++ func_append newdeplibs " $a_deplib" + a_deplib="" + break 2 + fi +@@ -6967,7 +7898,7 @@ EOF + ;; + *) + # Add a -L argument. +- newdeplibs="$newdeplibs $a_deplib" ++ func_append newdeplibs " $a_deplib" + ;; + esac + done # Gone through all deplibs. +@@ -7071,7 +8002,7 @@ EOF + *) + case " $deplibs " in + *" -L$path/$objdir "*) +- new_libs="$new_libs -L$path/$objdir" ;; ++ func_append new_libs " -L$path/$objdir" ;; + esac + ;; + esac +@@ -7081,10 +8012,10 @@ EOF + -L*) + case " $new_libs " in + *" $deplib "*) ;; +- *) new_libs="$new_libs $deplib" ;; ++ *) func_append new_libs " $deplib" ;; + esac + ;; +- *) new_libs="$new_libs $deplib" ;; ++ *) func_append new_libs " $deplib" ;; + esac + done + deplibs="$new_libs" +@@ -7101,10 +8032,12 @@ EOF + hardcode_libdirs= + dep_rpath= + rpath="$finalize_rpath" +- test "$mode" != relink && rpath="$compile_rpath$rpath" ++ test "$opt_mode" != relink && rpath="$compile_rpath$rpath" + for libdir in $rpath; do + if test -n "$hardcode_libdir_flag_spec"; then + if test -n "$hardcode_libdir_separator"; then ++ func_replace_sysroot "$libdir" ++ libdir=$func_replace_sysroot_result + if test -z "$hardcode_libdirs"; then + hardcode_libdirs="$libdir" + else +@@ -7113,18 +8046,18 @@ EOF + *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) + ;; + *) +- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" ++ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" + ;; + esac + fi + else +- eval "flag=\"$hardcode_libdir_flag_spec\"" +- dep_rpath="$dep_rpath $flag" ++ eval flag=\"$hardcode_libdir_flag_spec\" ++ func_append dep_rpath " $flag" + fi + elif test -n "$runpath_var"; then + case "$perm_rpath " in + *" $libdir "*) ;; +- *) perm_rpath="$perm_rpath $libdir" ;; ++ *) func_apped perm_rpath " $libdir" ;; + esac + fi + done +@@ -7133,40 +8066,38 @@ EOF + test -n "$hardcode_libdirs"; then + libdir="$hardcode_libdirs" + if test -n "$hardcode_libdir_flag_spec_ld"; then +- eval "dep_rpath=\"$hardcode_libdir_flag_spec_ld\"" ++ eval dep_rpath=\"$hardcode_libdir_flag_spec_ld\" + else +- eval "dep_rpath=\"$hardcode_libdir_flag_spec\"" ++ eval dep_rpath=\"$hardcode_libdir_flag_spec\" + fi + fi + if test -n "$runpath_var" && test -n "$perm_rpath"; then + # We should set the runpath_var. + rpath= + for dir in $perm_rpath; do +- rpath="$rpath$dir:" ++ func_append rpath "$dir:" + done +- eval $runpath_var=\$rpath\$$runpath_var +- export $runpath_var ++ eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var" + fi + test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs" + fi + + shlibpath="$finalize_shlibpath" +- test "$mode" != relink && shlibpath="$compile_shlibpath$shlibpath" ++ test "$opt_mode" != relink && shlibpath="$compile_shlibpath$shlibpath" + if test -n "$shlibpath"; then +- eval $shlibpath_var=\$shlibpath\$$shlibpath_var +- export $shlibpath_var ++ eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var" + fi + + # Get the real and link names of the library. +- eval "shared_ext=\"$shrext_cmds\"" +- eval "library_names=\"$library_names_spec\"" ++ eval shared_ext=\"$shrext_cmds\" ++ eval library_names=\"$library_names_spec\" + set dummy $library_names + shift + realname="$1" + shift + + if test -n "$soname_spec"; then +- eval "soname=\"$soname_spec\"" ++ eval soname=\"$soname_spec\" + else + soname="$realname" + fi +@@ -7178,7 +8109,7 @@ EOF + linknames= + for link + do +- linknames="$linknames $link" ++ func_append linknames " $link" + done + + # Use standard objects if they are pic +@@ -7189,7 +8120,7 @@ EOF + if test -n "$export_symbols" && test -n "$include_expsyms"; then + $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp" + export_symbols="$output_objdir/$libname.uexp" +- delfiles="$delfiles $export_symbols" ++ func_append delfiles " $export_symbols" + fi + + orig_export_symbols= +@@ -7220,13 +8151,45 @@ EOF + $opt_dry_run || $RM $export_symbols + cmds=$export_symbols_cmds + save_ifs="$IFS"; IFS='~' +- for cmd in $cmds; do ++ for cmd1 in $cmds; do + IFS="$save_ifs" +- eval "cmd=\"$cmd\"" +- func_len " $cmd" +- len=$func_len_result +- if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then ++ # Take the normal branch if the nm_file_list_spec branch ++ # doesn't work or if tool conversion is not needed. ++ case $nm_file_list_spec~$to_tool_file_cmd in ++ *~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*) ++ try_normal_branch=yes ++ eval cmd=\"$cmd1\" ++ func_len " $cmd" ++ len=$func_len_result ++ ;; ++ *) ++ try_normal_branch=no ++ ;; ++ esac ++ if test "$try_normal_branch" = yes \ ++ && { test "$len" -lt "$max_cmd_len" \ ++ || test "$max_cmd_len" -le -1; } ++ then ++ func_show_eval "$cmd" 'exit $?' ++ skipped_export=false ++ elif test -n "$nm_file_list_spec"; then ++ func_basename "$output" ++ output_la=$func_basename_result ++ save_libobjs=$libobjs ++ save_output=$output ++ output=${output_objdir}/${output_la}.nm ++ func_to_tool_file "$output" ++ libobjs=$nm_file_list_spec$func_to_tool_file_result ++ func_append delfiles " $output" ++ func_verbose "creating $NM input file list: $output" ++ for obj in $save_libobjs; do ++ func_to_tool_file "$obj" ++ $ECHO "$func_to_tool_file_result" ++ done > "$output" ++ eval cmd=\"$cmd1\" + func_show_eval "$cmd" 'exit $?' ++ output=$save_output ++ libobjs=$save_libobjs + skipped_export=false + else + # The command line is too long to execute in one step. +@@ -7248,7 +8211,7 @@ EOF + if test -n "$export_symbols" && test -n "$include_expsyms"; then + tmp_export_symbols="$export_symbols" + test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" +- $opt_dry_run || $ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols" ++ $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"' + fi + + if test "X$skipped_export" != "X:" && test -n "$orig_export_symbols"; then +@@ -7260,7 +8223,7 @@ EOF + # global variables. join(1) would be nice here, but unfortunately + # isn't a blessed tool. + $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter +- delfiles="$delfiles $export_symbols $output_objdir/$libname.filter" ++ func_append delfiles " $export_symbols $output_objdir/$libname.filter" + export_symbols=$output_objdir/$libname.def + $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols + fi +@@ -7270,7 +8233,7 @@ EOF + case " $convenience " in + *" $test_deplib "*) ;; + *) +- tmp_deplibs="$tmp_deplibs $test_deplib" ++ func_append tmp_deplibs " $test_deplib" + ;; + esac + done +@@ -7286,43 +8249,43 @@ EOF + fi + if test -n "$whole_archive_flag_spec"; then + save_libobjs=$libobjs +- eval "libobjs=\"\$libobjs $whole_archive_flag_spec\"" ++ eval libobjs=\"\$libobjs $whole_archive_flag_spec\" + test "X$libobjs" = "X " && libobjs= + else + gentop="$output_objdir/${outputname}x" +- generated="$generated $gentop" ++ func_append generated " $gentop" + + func_extract_archives $gentop $convenience +- libobjs="$libobjs $func_extract_archives_result" ++ func_append libobjs " $func_extract_archives_result" + test "X$libobjs" = "X " && libobjs= + fi + fi + + if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then +- eval "flag=\"$thread_safe_flag_spec\"" +- linker_flags="$linker_flags $flag" ++ eval flag=\"$thread_safe_flag_spec\" ++ func_append linker_flags " $flag" + fi + + # Make a backup of the uninstalled library when relinking +- if test "$mode" = relink; then +- $opt_dry_run || (cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U) || exit $? ++ if test "$opt_mode" = relink; then ++ $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $? + fi + + # Do each of the archive commands. + if test "$module" = yes && test -n "$module_cmds" ; then + if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then +- eval "test_cmds=\"$module_expsym_cmds\"" ++ eval test_cmds=\"$module_expsym_cmds\" + cmds=$module_expsym_cmds + else +- eval "test_cmds=\"$module_cmds\"" ++ eval test_cmds=\"$module_cmds\" + cmds=$module_cmds + fi + else + if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then +- eval "test_cmds=\"$archive_expsym_cmds\"" ++ eval test_cmds=\"$archive_expsym_cmds\" + cmds=$archive_expsym_cmds + else +- eval "test_cmds=\"$archive_cmds\"" ++ eval test_cmds=\"$archive_cmds\" + cmds=$archive_cmds + fi + fi +@@ -7366,10 +8329,13 @@ EOF + echo 'INPUT (' > $output + for obj in $save_libobjs + do +- $ECHO "$obj" >> $output ++ func_to_tool_file "$obj" ++ $ECHO "$func_to_tool_file_result" >> $output + done + echo ')' >> $output +- delfiles="$delfiles $output" ++ func_append delfiles " $output" ++ func_to_tool_file "$output" ++ output=$func_to_tool_file_result + elif test -n "$save_libobjs" && test "X$skipped_export" != "X:" && test "X$file_list_spec" != X; then + output=${output_objdir}/${output_la}.lnk + func_verbose "creating linker input file list: $output" +@@ -7383,15 +8349,17 @@ EOF + fi + for obj + do +- $ECHO "$obj" >> $output ++ func_to_tool_file "$obj" ++ $ECHO "$func_to_tool_file_result" >> $output + done +- delfiles="$delfiles $output" +- output=$firstobj\"$file_list_spec$output\" ++ func_append delfiles " $output" ++ func_to_tool_file "$output" ++ output=$firstobj\"$file_list_spec$func_to_tool_file_result\" + else + if test -n "$save_libobjs"; then + func_verbose "creating reloadable object files..." + output=$output_objdir/$output_la-${k}.$objext +- eval "test_cmds=\"$reload_cmds\"" ++ eval test_cmds=\"$reload_cmds\" + func_len " $test_cmds" + len0=$func_len_result + len=$len0 +@@ -7411,12 +8379,12 @@ EOF + if test "$k" -eq 1 ; then + # The first file doesn't have a previous command to add. + reload_objs=$objlist +- eval "concat_cmds=\"$reload_cmds\"" ++ eval concat_cmds=\"$reload_cmds\" + else + # All subsequent reloadable object files will link in + # the last one created. + reload_objs="$objlist $last_robj" +- eval "concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\"" ++ eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\" + fi + last_robj=$output_objdir/$output_la-${k}.$objext + func_arith $k + 1 +@@ -7433,11 +8401,11 @@ EOF + # files will link in the last one created. + test -z "$concat_cmds" || concat_cmds=$concat_cmds~ + reload_objs="$objlist $last_robj" +- eval "concat_cmds=\"\${concat_cmds}$reload_cmds\"" ++ eval concat_cmds=\"\${concat_cmds}$reload_cmds\" + if test -n "$last_robj"; then +- eval "concat_cmds=\"\${concat_cmds}~\$RM $last_robj\"" ++ eval concat_cmds=\"\${concat_cmds}~\$RM $last_robj\" + fi +- delfiles="$delfiles $output" ++ func_append delfiles " $output" + + else + output= +@@ -7450,9 +8418,9 @@ EOF + libobjs=$output + # Append the command to create the export file. + test -z "$concat_cmds" || concat_cmds=$concat_cmds~ +- eval "concat_cmds=\"\$concat_cmds$export_symbols_cmds\"" ++ eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\" + if test -n "$last_robj"; then +- eval "concat_cmds=\"\$concat_cmds~\$RM $last_robj\"" ++ eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\" + fi + fi + +@@ -7471,7 +8439,7 @@ EOF + lt_exit=$? + + # Restore the uninstalled library and exit +- if test "$mode" = relink; then ++ if test "$opt_mode" = relink; then + ( cd "$output_objdir" && \ + $RM "${realname}T" && \ + $MV "${realname}U" "$realname" ) +@@ -7492,7 +8460,7 @@ EOF + if test -n "$export_symbols" && test -n "$include_expsyms"; then + tmp_export_symbols="$export_symbols" + test -n "$orig_export_symbols" && tmp_export_symbols="$orig_export_symbols" +- $opt_dry_run || $ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols" ++ $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"' + fi + + if test -n "$orig_export_symbols"; then +@@ -7504,7 +8472,7 @@ EOF + # global variables. join(1) would be nice here, but unfortunately + # isn't a blessed tool. + $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter +- delfiles="$delfiles $export_symbols $output_objdir/$libname.filter" ++ func_append delfiles " $export_symbols $output_objdir/$libname.filter" + export_symbols=$output_objdir/$libname.def + $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols + fi +@@ -7515,7 +8483,7 @@ EOF + output=$save_output + + if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then +- eval "libobjs=\"\$libobjs $whole_archive_flag_spec\"" ++ eval libobjs=\"\$libobjs $whole_archive_flag_spec\" + test "X$libobjs" = "X " && libobjs= + fi + # Expand the library linking commands again to reset the +@@ -7539,23 +8507,23 @@ EOF + + if test -n "$delfiles"; then + # Append the command to remove temporary files to $cmds. +- eval "cmds=\"\$cmds~\$RM $delfiles\"" ++ eval cmds=\"\$cmds~\$RM $delfiles\" + fi + + # Add any objects from preloaded convenience libraries + if test -n "$dlprefiles"; then + gentop="$output_objdir/${outputname}x" +- generated="$generated $gentop" ++ func_append generated " $gentop" + + func_extract_archives $gentop $dlprefiles +- libobjs="$libobjs $func_extract_archives_result" ++ func_append libobjs " $func_extract_archives_result" + test "X$libobjs" = "X " && libobjs= + fi + + save_ifs="$IFS"; IFS='~' + for cmd in $cmds; do + IFS="$save_ifs" +- eval "cmd=\"$cmd\"" ++ eval cmd=\"$cmd\" + $opt_silent || { + func_quote_for_expand "$cmd" + eval "func_echo $func_quote_for_expand_result" +@@ -7564,7 +8532,7 @@ EOF + lt_exit=$? + + # Restore the uninstalled library and exit +- if test "$mode" = relink; then ++ if test "$opt_mode" = relink; then + ( cd "$output_objdir" && \ + $RM "${realname}T" && \ + $MV "${realname}U" "$realname" ) +@@ -7576,8 +8544,8 @@ EOF + IFS="$save_ifs" + + # Restore the uninstalled library and exit +- if test "$mode" = relink; then +- $opt_dry_run || (cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname) || exit $? ++ if test "$opt_mode" = relink; then ++ $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $? + + if test -n "$convenience"; then + if test -z "$whole_archive_flag_spec"; then +@@ -7656,17 +8624,20 @@ EOF + + if test -n "$convenience"; then + if test -n "$whole_archive_flag_spec"; then +- eval "tmp_whole_archive_flags=\"$whole_archive_flag_spec\"" ++ eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\" + reload_conv_objs=$reload_objs\ `$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'` + else + gentop="$output_objdir/${obj}x" +- generated="$generated $gentop" ++ func_append generated " $gentop" + + func_extract_archives $gentop $convenience + reload_conv_objs="$reload_objs $func_extract_archives_result" + fi + fi + ++ # If we're not building shared, we need to use non_pic_objs ++ test "$build_libtool_libs" != yes && libobjs="$non_pic_objects" ++ + # Create the old-style object. + reload_objs="$objs$old_deplibs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.${libext}$/d; /\.lib$/d; $lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test + +@@ -7690,7 +8661,7 @@ EOF + # Create an invalid libtool object if no PIC, so that we don't + # accidentally link it into a program. + # $show "echo timestamp > $libobj" +- # $opt_dry_run || echo timestamp > $libobj || exit $? ++ # $opt_dry_run || eval "echo timestamp > $libobj" || exit $? + exit $EXIT_SUCCESS + fi + +@@ -7740,8 +8711,8 @@ EOF + if test "$tagname" = CXX ; then + case ${MACOSX_DEPLOYMENT_TARGET-10.0} in + 10.[0123]) +- compile_command="$compile_command ${wl}-bind_at_load" +- finalize_command="$finalize_command ${wl}-bind_at_load" ++ func_append compile_command " ${wl}-bind_at_load" ++ func_append finalize_command " ${wl}-bind_at_load" + ;; + esac + fi +@@ -7761,7 +8732,7 @@ EOF + *) + case " $compile_deplibs " in + *" -L$path/$objdir "*) +- new_libs="$new_libs -L$path/$objdir" ;; ++ func_append new_libs " -L$path/$objdir" ;; + esac + ;; + esac +@@ -7771,17 +8742,17 @@ EOF + -L*) + case " $new_libs " in + *" $deplib "*) ;; +- *) new_libs="$new_libs $deplib" ;; ++ *) func_append new_libs " $deplib" ;; + esac + ;; +- *) new_libs="$new_libs $deplib" ;; ++ *) func_append new_libs " $deplib" ;; + esac + done + compile_deplibs="$new_libs" + + +- compile_command="$compile_command $compile_deplibs" +- finalize_command="$finalize_command $finalize_deplibs" ++ func_append compile_command " $compile_deplibs" ++ func_append finalize_command " $finalize_deplibs" + + if test -n "$rpath$xrpath"; then + # If the user specified any rpath flags, then add them. +@@ -7789,7 +8760,7 @@ EOF + # This is the magic to use -rpath. + case "$finalize_rpath " in + *" $libdir "*) ;; +- *) finalize_rpath="$finalize_rpath $libdir" ;; ++ *) func_append finalize_rpath " $libdir" ;; + esac + done + fi +@@ -7808,18 +8779,18 @@ EOF + *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) + ;; + *) +- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" ++ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" + ;; + esac + fi + else +- eval "flag=\"$hardcode_libdir_flag_spec\"" +- rpath="$rpath $flag" ++ eval flag=\"$hardcode_libdir_flag_spec\" ++ func_append rpath " $flag" + fi + elif test -n "$runpath_var"; then + case "$perm_rpath " in + *" $libdir "*) ;; +- *) perm_rpath="$perm_rpath $libdir" ;; ++ *) func_append perm_rpath " $libdir" ;; + esac + fi + case $host in +@@ -7828,12 +8799,12 @@ EOF + case :$dllsearchpath: in + *":$libdir:"*) ;; + ::) dllsearchpath=$libdir;; +- *) dllsearchpath="$dllsearchpath:$libdir";; ++ *) func_append dllsearchpath ":$libdir";; + esac + case :$dllsearchpath: in + *":$testbindir:"*) ;; + ::) dllsearchpath=$testbindir;; +- *) dllsearchpath="$dllsearchpath:$testbindir";; ++ *) func_append dllsearchpath ":$testbindir";; + esac + ;; + esac +@@ -7842,7 +8813,7 @@ EOF + if test -n "$hardcode_libdir_separator" && + test -n "$hardcode_libdirs"; then + libdir="$hardcode_libdirs" +- eval "rpath=\" $hardcode_libdir_flag_spec\"" ++ eval rpath=\" $hardcode_libdir_flag_spec\" + fi + compile_rpath="$rpath" + +@@ -7859,18 +8830,18 @@ EOF + *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) + ;; + *) +- hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir" ++ func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" + ;; + esac + fi + else +- eval "flag=\"$hardcode_libdir_flag_spec\"" +- rpath="$rpath $flag" ++ eval flag=\"$hardcode_libdir_flag_spec\" ++ func_append rpath " $flag" + fi + elif test -n "$runpath_var"; then + case "$finalize_perm_rpath " in + *" $libdir "*) ;; +- *) finalize_perm_rpath="$finalize_perm_rpath $libdir" ;; ++ *) func_append finalize_perm_rpath " $libdir" ;; + esac + fi + done +@@ -7878,7 +8849,7 @@ EOF + if test -n "$hardcode_libdir_separator" && + test -n "$hardcode_libdirs"; then + libdir="$hardcode_libdirs" +- eval "rpath=\" $hardcode_libdir_flag_spec\"" ++ eval rpath=\" $hardcode_libdir_flag_spec\" + fi + finalize_rpath="$rpath" + +@@ -7921,6 +8892,12 @@ EOF + exit_status=0 + func_show_eval "$link_command" 'exit_status=$?' + ++ if test -n "$postlink_cmds"; then ++ func_to_tool_file "$output" ++ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` ++ func_execute_cmds "$postlink_cmds" 'exit $?' ++ fi ++ + # Delete the generated files. + if test -f "$output_objdir/${outputname}S.${objext}"; then + func_show_eval '$RM "$output_objdir/${outputname}S.${objext}"' +@@ -7943,7 +8920,7 @@ EOF + # We should set the runpath_var. + rpath= + for dir in $perm_rpath; do +- rpath="$rpath$dir:" ++ func_append rpath "$dir:" + done + compile_var="$runpath_var=\"$rpath\$$runpath_var\" " + fi +@@ -7951,7 +8928,7 @@ EOF + # We should set the runpath_var. + rpath= + for dir in $finalize_perm_rpath; do +- rpath="$rpath$dir:" ++ func_append rpath "$dir:" + done + finalize_var="$runpath_var=\"$rpath\$$runpath_var\" " + fi +@@ -7966,6 +8943,13 @@ EOF + $opt_dry_run || $RM $output + # Link the executable and exit + func_show_eval "$link_command" 'exit $?' ++ ++ if test -n "$postlink_cmds"; then ++ func_to_tool_file "$output" ++ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` ++ func_execute_cmds "$postlink_cmds" 'exit $?' ++ fi ++ + exit $EXIT_SUCCESS + fi + +@@ -7999,6 +8983,12 @@ EOF + + func_show_eval "$link_command" 'exit $?' + ++ if test -n "$postlink_cmds"; then ++ func_to_tool_file "$output_objdir/$outputname" ++ postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` ++ func_execute_cmds "$postlink_cmds" 'exit $?' ++ fi ++ + # Now create the wrapper script. + func_verbose "creating $output" + +@@ -8096,7 +9086,7 @@ EOF + else + oldobjs="$old_deplibs $non_pic_objects" + if test "$preload" = yes && test -f "$symfileobj"; then +- oldobjs="$oldobjs $symfileobj" ++ func_append oldobjs " $symfileobj" + fi + fi + addlibs="$old_convenience" +@@ -8104,10 +9094,10 @@ EOF + + if test -n "$addlibs"; then + gentop="$output_objdir/${outputname}x" +- generated="$generated $gentop" ++ func_append generated " $gentop" + + func_extract_archives $gentop $addlibs +- oldobjs="$oldobjs $func_extract_archives_result" ++ func_append oldobjs " $func_extract_archives_result" + fi + + # Do each command in the archive commands. +@@ -8118,10 +9108,10 @@ EOF + # Add any objects from preloaded convenience libraries + if test -n "$dlprefiles"; then + gentop="$output_objdir/${outputname}x" +- generated="$generated $gentop" ++ func_append generated " $gentop" + + func_extract_archives $gentop $dlprefiles +- oldobjs="$oldobjs $func_extract_archives_result" ++ func_append oldobjs " $func_extract_archives_result" + fi + + # POSIX demands no paths to be encoded in archives. We have +@@ -8139,7 +9129,7 @@ EOF + else + echo "copying selected object files to avoid basename conflicts..." + gentop="$output_objdir/${outputname}x" +- generated="$generated $gentop" ++ func_append generated " $gentop" + func_mkdir_p "$gentop" + save_oldobjs=$oldobjs + oldobjs= +@@ -8163,18 +9153,28 @@ EOF + esac + done + func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj" +- oldobjs="$oldobjs $gentop/$newobj" ++ func_append oldobjs " $gentop/$newobj" + ;; +- *) oldobjs="$oldobjs $obj" ;; ++ *) func_append oldobjs " $obj" ;; + esac + done + fi +- eval "cmds=\"$old_archive_cmds\"" ++ eval cmds=\"$old_archive_cmds\" + + func_len " $cmds" + len=$func_len_result + if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then + cmds=$old_archive_cmds ++ elif test -n "$archiver_list_spec"; then ++ func_verbose "using command file archive linking..." ++ for obj in $oldobjs ++ do ++ func_to_tool_file "$obj" ++ $ECHO "$func_to_tool_file_result" ++ done > $output_objdir/$libname.libcmd ++ func_to_tool_file "$output_objdir/$libname.libcmd" ++ oldobjs=" $archiver_list_spec$func_to_tool_file_result" ++ cmds=$old_archive_cmds + else + # the command line is too long to link in one step, link in parts + func_verbose "using piecewise archive linking..." +@@ -8189,7 +9189,7 @@ EOF + do + last_oldobj=$obj + done +- eval "test_cmds=\"$old_archive_cmds\"" ++ eval test_cmds=\"$old_archive_cmds\" + func_len " $test_cmds" + len0=$func_len_result + len=$len0 +@@ -8208,7 +9208,7 @@ EOF + RANLIB=$save_RANLIB + fi + test -z "$concat_cmds" || concat_cmds=$concat_cmds~ +- eval "concat_cmds=\"\${concat_cmds}$old_archive_cmds\"" ++ eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\" + objlist= + len=$len0 + fi +@@ -8216,9 +9216,9 @@ EOF + RANLIB=$save_RANLIB + oldobjs=$objlist + if test "X$oldobjs" = "X" ; then +- eval "cmds=\"\$concat_cmds\"" ++ eval cmds=\"\$concat_cmds\" + else +- eval "cmds=\"\$concat_cmds~\$old_archive_cmds\"" ++ eval cmds=\"\$concat_cmds~\$old_archive_cmds\" + fi + fi + fi +@@ -8268,12 +9268,23 @@ EOF + *.la) + func_basename "$deplib" + name="$func_basename_result" +- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` ++ func_resolve_sysroot "$deplib" ++ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result` + test -z "$libdir" && \ + func_fatal_error "\`$deplib' is not a valid libtool archive" +- newdependency_libs="$newdependency_libs $libdir/$name" ++ func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name" ++ ;; ++ -L*) ++ func_stripname -L '' "$deplib" ++ func_replace_sysroot "$func_stripname_result" ++ func_append newdependency_libs " -L$func_replace_sysroot_result" + ;; +- *) newdependency_libs="$newdependency_libs $deplib" ;; ++ -R*) ++ func_stripname -R '' "$deplib" ++ func_replace_sysroot "$func_stripname_result" ++ func_append newdependency_libs " -R$func_replace_sysroot_result" ++ ;; ++ *) func_append newdependency_libs " $deplib" ;; + esac + done + dependency_libs="$newdependency_libs" +@@ -8284,12 +9295,14 @@ EOF + *.la) + func_basename "$lib" + name="$func_basename_result" +- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` ++ func_resolve_sysroot "$lib" ++ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result` ++ + test -z "$libdir" && \ + func_fatal_error "\`$lib' is not a valid libtool archive" +- newdlfiles="$newdlfiles $libdir/$name" ++ func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name" + ;; +- *) newdlfiles="$newdlfiles $lib" ;; ++ *) func_append newdlfiles " $lib" ;; + esac + done + dlfiles="$newdlfiles" +@@ -8303,10 +9316,11 @@ EOF + # the library: + func_basename "$lib" + name="$func_basename_result" +- libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib` ++ func_resolve_sysroot "$lib" ++ eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result` + test -z "$libdir" && \ + func_fatal_error "\`$lib' is not a valid libtool archive" +- newdlprefiles="$newdlprefiles $libdir/$name" ++ func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name" + ;; + esac + done +@@ -8318,7 +9332,7 @@ EOF + [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; + *) abs=`pwd`"/$lib" ;; + esac +- newdlfiles="$newdlfiles $abs" ++ func_append newdlfiles " $abs" + done + dlfiles="$newdlfiles" + newdlprefiles= +@@ -8327,7 +9341,7 @@ EOF + [\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;; + *) abs=`pwd`"/$lib" ;; + esac +- newdlprefiles="$newdlprefiles $abs" ++ func_append newdlprefiles " $abs" + done + dlprefiles="$newdlprefiles" + fi +@@ -8412,7 +9426,7 @@ relink_command=\"$relink_command\"" + exit $EXIT_SUCCESS + } + +-{ test "$mode" = link || test "$mode" = relink; } && ++{ test "$opt_mode" = link || test "$opt_mode" = relink; } && + func_mode_link ${1+"$@"} + + +@@ -8432,9 +9446,9 @@ func_mode_uninstall () + for arg + do + case $arg in +- -f) RM="$RM $arg"; rmforce=yes ;; +- -*) RM="$RM $arg" ;; +- *) files="$files $arg" ;; ++ -f) func_append RM " $arg"; rmforce=yes ;; ++ -*) func_append RM " $arg" ;; ++ *) func_append files " $arg" ;; + esac + done + +@@ -8443,24 +9457,23 @@ func_mode_uninstall () + + rmdirs= + +- origobjdir="$objdir" + for file in $files; do + func_dirname "$file" "" "." + dir="$func_dirname_result" + if test "X$dir" = X.; then +- objdir="$origobjdir" ++ odir="$objdir" + else +- objdir="$dir/$origobjdir" ++ odir="$dir/$objdir" + fi + func_basename "$file" + name="$func_basename_result" +- test "$mode" = uninstall && objdir="$dir" ++ test "$opt_mode" = uninstall && odir="$dir" + +- # Remember objdir for removal later, being careful to avoid duplicates +- if test "$mode" = clean; then ++ # Remember odir for removal later, being careful to avoid duplicates ++ if test "$opt_mode" = clean; then + case " $rmdirs " in +- *" $objdir "*) ;; +- *) rmdirs="$rmdirs $objdir" ;; ++ *" $odir "*) ;; ++ *) func_append rmdirs " $odir" ;; + esac + fi + +@@ -8486,18 +9499,17 @@ func_mode_uninstall () + + # Delete the libtool libraries and symlinks. + for n in $library_names; do +- rmfiles="$rmfiles $objdir/$n" ++ func_append rmfiles " $odir/$n" + done +- test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library" ++ test -n "$old_library" && func_append rmfiles " $odir/$old_library" + +- case "$mode" in ++ case "$opt_mode" in + clean) +- case " $library_names " in +- # " " in the beginning catches empty $dlname ++ case " $library_names " in + *" $dlname "*) ;; +- *) rmfiles="$rmfiles $objdir/$dlname" ;; ++ *) test -n "$dlname" && func_append rmfiles " $odir/$dlname" ;; + esac +- test -n "$libdir" && rmfiles="$rmfiles $objdir/$name $objdir/${name}i" ++ test -n "$libdir" && func_append rmfiles " $odir/$name $odir/${name}i" + ;; + uninstall) + if test -n "$library_names"; then +@@ -8525,19 +9537,19 @@ func_mode_uninstall () + # Add PIC object to the list of files to remove. + if test -n "$pic_object" && + test "$pic_object" != none; then +- rmfiles="$rmfiles $dir/$pic_object" ++ func_append rmfiles " $dir/$pic_object" + fi + + # Add non-PIC object to the list of files to remove. + if test -n "$non_pic_object" && + test "$non_pic_object" != none; then +- rmfiles="$rmfiles $dir/$non_pic_object" ++ func_append rmfiles " $dir/$non_pic_object" + fi + fi + ;; + + *) +- if test "$mode" = clean ; then ++ if test "$opt_mode" = clean ; then + noexename=$name + case $file in + *.exe) +@@ -8547,7 +9559,7 @@ func_mode_uninstall () + noexename=$func_stripname_result + # $file with .exe has already been added to rmfiles, + # add $file without .exe +- rmfiles="$rmfiles $file" ++ func_append rmfiles " $file" + ;; + esac + # Do a test to see if this is a libtool program. +@@ -8556,7 +9568,7 @@ func_mode_uninstall () + func_ltwrapper_scriptname "$file" + relink_command= + func_source $func_ltwrapper_scriptname_result +- rmfiles="$rmfiles $func_ltwrapper_scriptname_result" ++ func_append rmfiles " $func_ltwrapper_scriptname_result" + else + relink_command= + func_source $dir/$noexename +@@ -8564,12 +9576,12 @@ func_mode_uninstall () + + # note $name still contains .exe if it was in $file originally + # as does the version of $file that was added into $rmfiles +- rmfiles="$rmfiles $objdir/$name $objdir/${name}S.${objext}" ++ func_append rmfiles " $odir/$name $odir/${name}S.${objext}" + if test "$fast_install" = yes && test -n "$relink_command"; then +- rmfiles="$rmfiles $objdir/lt-$name" ++ func_append rmfiles " $odir/lt-$name" + fi + if test "X$noexename" != "X$name" ; then +- rmfiles="$rmfiles $objdir/lt-${noexename}.c" ++ func_append rmfiles " $odir/lt-${noexename}.c" + fi + fi + fi +@@ -8577,7 +9589,6 @@ func_mode_uninstall () + esac + func_show_eval "$RM $rmfiles" 'exit_status=1' + done +- objdir="$origobjdir" + + # Try to remove the ${objdir}s in the directories where we deleted files + for dir in $rmdirs; do +@@ -8589,16 +9600,16 @@ func_mode_uninstall () + exit $exit_status + } + +-{ test "$mode" = uninstall || test "$mode" = clean; } && ++{ test "$opt_mode" = uninstall || test "$opt_mode" = clean; } && + func_mode_uninstall ${1+"$@"} + +-test -z "$mode" && { ++test -z "$opt_mode" && { + help="$generic_help" + func_fatal_help "you must specify a MODE" + } + + test -z "$exec_cmd" && \ +- func_fatal_help "invalid operation mode \`$mode'" ++ func_fatal_help "invalid operation mode \`$opt_mode'" + + if test -n "$exec_cmd"; then + eval exec "$exec_cmd" +diff --git a/ltoptions.m4 b/ltoptions.m4 +index 5ef12ced2a..17cfd51c0b 100644 +--- a/ltoptions.m4 ++++ b/ltoptions.m4 +@@ -8,7 +8,7 @@ + # unlimited permission to copy and/or distribute it, with or without + # modifications, as long as this notice is preserved. + +-# serial 6 ltoptions.m4 ++# serial 7 ltoptions.m4 + + # This is to help aclocal find these macros, as it can't see m4_define. + AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])]) +diff --git a/ltversion.m4 b/ltversion.m4 +index bf87f77132..9c7b5d4118 100644 +--- a/ltversion.m4 ++++ b/ltversion.m4 +@@ -7,17 +7,17 @@ + # unlimited permission to copy and/or distribute it, with or without + # modifications, as long as this notice is preserved. + +-# Generated from ltversion.in. ++# @configure_input@ + +-# serial 3134 ltversion.m4 ++# serial 3293 ltversion.m4 + # This file is part of GNU Libtool + +-m4_define([LT_PACKAGE_VERSION], [2.2.7a]) +-m4_define([LT_PACKAGE_REVISION], [1.3134]) ++m4_define([LT_PACKAGE_VERSION], [2.4]) ++m4_define([LT_PACKAGE_REVISION], [1.3293]) + + AC_DEFUN([LTVERSION_VERSION], +-[macro_version='2.2.7a' +-macro_revision='1.3134' ++[macro_version='2.4' ++macro_revision='1.3293' + _LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?]) + _LT_DECL(, macro_revision, 0) + ]) +diff --git a/lt~obsolete.m4 b/lt~obsolete.m4 +index bf92b5e079..c573da90c5 100644 +--- a/lt~obsolete.m4 ++++ b/lt~obsolete.m4 +@@ -7,7 +7,7 @@ + # unlimited permission to copy and/or distribute it, with or without + # modifications, as long as this notice is preserved. + +-# serial 4 lt~obsolete.m4 ++# serial 5 lt~obsolete.m4 + + # These exist entirely to fool aclocal when bootstrapping libtool. + # +diff --git a/opcodes/configure b/opcodes/configure +index 0b352a454d..7eaea7db73 100755 +--- a/opcodes/configure ++++ b/opcodes/configure +@@ -650,6 +650,9 @@ OTOOL + LIPO + NMEDIT + DSYMUTIL ++MANIFEST_TOOL ++ac_ct_AR ++DLLTOOL + OBJDUMP + LN_S + NM +@@ -763,6 +766,7 @@ enable_static + with_pic + enable_fast_install + with_gnu_ld ++with_libtool_sysroot + enable_libtool_lock + enable_targets + enable_werror +@@ -1423,6 +1427,8 @@ Optional Packages: + --with-pic try to use only PIC/non-PIC objects [default=use + both] + --with-gnu-ld assume the C compiler uses GNU ld [default=no] ++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR ++ (or the compiler's sysroot if not specified). + + Some influential environment variables: + CC C compiler command +@@ -5115,8 +5121,8 @@ esac + + + +-macro_version='2.2.7a' +-macro_revision='1.3134' ++macro_version='2.4' ++macro_revision='1.3293' + + + +@@ -5156,7 +5162,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 + $as_echo_n "checking how to print strings... " >&6; } + # Test print first, because it will be a builtin if present. +-if test "X`print -r -- -n 2>/dev/null`" = X-n && \ ++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ + test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then + ECHO='print -r --' + elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then +@@ -5842,8 +5848,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; + # Try some XSI features + xsi_shell=no + ( _lt_dummy="a/b/c" +- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ +- = c,a/b,, \ ++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ ++ = c,a/b,b/c, \ + && eval 'test $(( 1 + 1 )) -eq 2 \ + && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ + && xsi_shell=yes +@@ -5892,6 +5898,80 @@ esac + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 ++$as_echo_n "checking how to convert $build file names to $host format... " >&6; } ++if test "${lt_cv_to_host_file_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 ++ ;; ++ esac ++ ;; ++ *-*-cygwin* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin ++ ;; ++ esac ++ ;; ++ * ) # unhandled hosts (and "normal" native builds) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++esac ++ ++fi ++ ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 ++$as_echo "$lt_cv_to_host_file_cmd" >&6; } ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 ++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } ++if test "${lt_cv_to_tool_file_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ #assume ordinary cross tools, or native build. ++lt_cv_to_tool_file_cmd=func_convert_file_noop ++case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ esac ++ ;; ++esac ++ ++fi ++ ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 ++$as_echo "$lt_cv_to_tool_file_cmd" >&6; } ++ ++ ++ ++ ++ + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 + $as_echo_n "checking for $LD option to reload object files... " >&6; } + if test "${lt_cv_ld_reload_flag+set}" = set; then : +@@ -5908,6 +5988,11 @@ case $reload_flag in + esac + reload_cmds='$LD$reload_flag -o $output$reload_objs' + case $host_os in ++ cygwin* | mingw* | pw32* | cegcc*) ++ if test "$GCC" != yes; then ++ reload_cmds=false ++ fi ++ ;; + darwin*) + if test "$GCC" = yes; then + reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' +@@ -6076,7 +6161,8 @@ mingw* | pw32*) + lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' + lt_cv_file_magic_cmd='func_win32_libid' + else +- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' ++ # Keep this pattern in sync with the one in func_win32_libid. ++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' + lt_cv_file_magic_cmd='$OBJDUMP -f' + fi + ;; +@@ -6230,6 +6316,21 @@ esac + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 + $as_echo "$lt_cv_deplibs_check_method" >&6; } ++ ++file_magic_glob= ++want_nocaseglob=no ++if test "$build" = "$host"; then ++ case $host_os in ++ mingw* | pw32*) ++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then ++ want_nocaseglob=yes ++ else ++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` ++ fi ++ ;; ++ esac ++fi ++ + file_magic_cmd=$lt_cv_file_magic_cmd + deplibs_check_method=$lt_cv_deplibs_check_method + test -z "$deplibs_check_method" && deplibs_check_method=unknown +@@ -6245,9 +6346,162 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ + if test -n "$ac_tool_prefix"; then +- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. +-set dummy ${ac_tool_prefix}ar; ac_word=$2 ++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. ++set dummy ${ac_tool_prefix}dlltool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_DLLTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$DLLTOOL"; then ++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++DLLTOOL=$ac_cv_prog_DLLTOOL ++if test -n "$DLLTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 ++$as_echo "$DLLTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_DLLTOOL"; then ++ ac_ct_DLLTOOL=$DLLTOOL ++ # Extract the first word of "dlltool", so it can be a program name with args. ++set dummy dlltool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_DLLTOOL"; then ++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_DLLTOOL="dlltool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL ++if test -n "$ac_ct_DLLTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 ++$as_echo "$ac_ct_DLLTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_DLLTOOL" = x; then ++ DLLTOOL="false" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ DLLTOOL=$ac_ct_DLLTOOL ++ fi ++else ++ DLLTOOL="$ac_cv_prog_DLLTOOL" ++fi ++ ++test -z "$DLLTOOL" && DLLTOOL=dlltool ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 ++$as_echo_n "checking how to associate runtime and link libraries... " >&6; } ++if test "${lt_cv_sharedlib_from_linklib_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_sharedlib_from_linklib_cmd='unknown' ++ ++case $host_os in ++cygwin* | mingw* | pw32* | cegcc*) ++ # two different shell functions defined in ltmain.sh ++ # decide which to use based on capabilities of $DLLTOOL ++ case `$DLLTOOL --help 2>&1` in ++ *--identify-strict*) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib ++ ;; ++ *) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback ++ ;; ++ esac ++ ;; ++*) ++ # fallback: assume linklib IS sharedlib ++ lt_cv_sharedlib_from_linklib_cmd="$ECHO" ++ ;; ++esac ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 ++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } ++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd ++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO ++ ++ ++ ++ ++ ++ ++ ++if test -n "$ac_tool_prefix"; then ++ for ac_prog in ar ++ do ++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. ++set dummy $ac_tool_prefix$ac_prog; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } + if test "${ac_cv_prog_AR+set}" = set; then : +@@ -6263,7 +6517,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then +- ac_cv_prog_AR="${ac_tool_prefix}ar" ++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -6283,11 +6537,15 @@ $as_echo "no" >&6; } + fi + + ++ test -n "$AR" && break ++ done + fi +-if test -z "$ac_cv_prog_AR"; then ++if test -z "$AR"; then + ac_ct_AR=$AR +- # Extract the first word of "ar", so it can be a program name with args. +-set dummy ar; ac_word=$2 ++ for ac_prog in ar ++do ++ # Extract the first word of "$ac_prog", so it can be a program name with args. ++set dummy $ac_prog; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } + if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : +@@ -6303,7 +6561,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then +- ac_cv_prog_ac_ct_AR="ar" ++ ac_cv_prog_ac_ct_AR="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -6322,6 +6580,10 @@ else + $as_echo "no" >&6; } + fi + ++ ++ test -n "$ac_ct_AR" && break ++done ++ + if test "x$ac_ct_AR" = x; then + AR="false" + else +@@ -6333,16 +6595,72 @@ ac_tool_warned=yes ;; + esac + AR=$ac_ct_AR + fi +-else +- AR="$ac_cv_prog_AR" + fi + +-test -z "$AR" && AR=ar +-test -z "$AR_FLAGS" && AR_FLAGS=cru ++: ${AR=ar} ++: ${AR_FLAGS=cru} ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 ++$as_echo_n "checking for archiver @FILE support... " >&6; } ++if test "${lt_cv_ar_at_file+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_ar_at_file=no ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ + ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ echo conftest.$ac_objext > conftest.lst ++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' ++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 ++ (eval $lt_ar_try) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ if test "$ac_status" -eq 0; then ++ # Ensure the archiver fails upon bogus file names. ++ rm -f conftest.$ac_objext libconftest.a ++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 ++ (eval $lt_ar_try) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ if test "$ac_status" -ne 0; then ++ lt_cv_ar_at_file=@ ++ fi ++ fi ++ rm -f conftest.* libconftest.a + ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 ++$as_echo "$lt_cv_ar_at_file" >&6; } + ++if test "x$lt_cv_ar_at_file" = xno; then ++ archiver_list_spec= ++else ++ archiver_list_spec=$lt_cv_ar_at_file ++fi + + + +@@ -6684,8 +7002,8 @@ esac + lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" + + # Transform an extracted symbol line into symbol name and symbol address +-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" +-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" + + # Handle CRLF in mingw tool chain + opt_cr= +@@ -6721,6 +7039,7 @@ for ac_symprfx in "" "_"; do + else + lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" + fi ++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" + + # Check to see that the pipe works correctly. + pipe_works=no +@@ -6762,6 +7081,18 @@ _LT_EOF + if $GREP ' nm_test_var$' "$nlist" >/dev/null; then + if $GREP ' nm_test_func$' "$nlist" >/dev/null; then + cat <<_LT_EOF > conftest.$ac_ext ++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ ++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) ++/* DATA imports from DLLs on WIN32 con't be const, because runtime ++ relocations are performed -- see ld's documentation on pseudo-relocs. */ ++# define LT_DLSYM_CONST ++#elif defined(__osf__) ++/* This system does not cope well with relocations in const data. */ ++# define LT_DLSYM_CONST ++#else ++# define LT_DLSYM_CONST const ++#endif ++ + #ifdef __cplusplus + extern "C" { + #endif +@@ -6773,7 +7104,7 @@ _LT_EOF + cat <<_LT_EOF >> conftest.$ac_ext + + /* The mapping between symbol names and symbols. */ +-const struct { ++LT_DLSYM_CONST struct { + const char *name; + void *address; + } +@@ -6799,8 +7130,8 @@ static const void *lt_preloaded_setup() { + _LT_EOF + # Now try linking the two files. + mv conftest.$ac_objext conftstm.$ac_objext +- lt_save_LIBS="$LIBS" +- lt_save_CFLAGS="$CFLAGS" ++ lt_globsym_save_LIBS=$LIBS ++ lt_globsym_save_CFLAGS=$CFLAGS + LIBS="conftstm.$ac_objext" + CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 +@@ -6810,8 +7141,8 @@ _LT_EOF + test $ac_status = 0; } && test -s conftest${ac_exeext}; then + pipe_works=yes + fi +- LIBS="$lt_save_LIBS" +- CFLAGS="$lt_save_CFLAGS" ++ LIBS=$lt_globsym_save_LIBS ++ CFLAGS=$lt_globsym_save_CFLAGS + else + echo "cannot find nm_test_func in $nlist" >&5 + fi +@@ -6848,6 +7179,16 @@ else + $as_echo "ok" >&6; } + fi + ++# Response file support. ++if test "$lt_cv_nm_interface" = "MS dumpbin"; then ++ nm_file_list_spec='@' ++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then ++ nm_file_list_spec='@' ++fi ++ ++ ++ ++ + + + +@@ -6869,6 +7210,45 @@ fi + + + ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 ++$as_echo_n "checking for sysroot... " >&6; } ++ ++# Check whether --with-libtool-sysroot was given. ++if test "${with_libtool_sysroot+set}" = set; then : ++ withval=$with_libtool_sysroot; ++else ++ with_libtool_sysroot=no ++fi ++ ++ ++lt_sysroot= ++case ${with_libtool_sysroot} in #( ++ yes) ++ if test "$GCC" = yes; then ++ lt_sysroot=`$CC --print-sysroot 2>/dev/null` ++ fi ++ ;; #( ++ /*) ++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` ++ ;; #( ++ no|'') ++ ;; #( ++ *) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5 ++$as_echo "${with_libtool_sysroot}" >&6; } ++ as_fn_error "The sysroot must be an absolute path." "$LINENO" 5 ++ ;; ++esac ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 ++$as_echo "${lt_sysroot:-no}" >&6; } ++ ++ ++ ++ ++ + # Check whether --enable-libtool-lock was given. + if test "${enable_libtool_lock+set}" = set; then : + enableval=$enable_libtool_lock; +@@ -7075,6 +7455,123 @@ esac + + need_locks="$enable_libtool_lock" + ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. ++set dummy ${ac_tool_prefix}mt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_MANIFEST_TOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$MANIFEST_TOOL"; then ++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL ++if test -n "$MANIFEST_TOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 ++$as_echo "$MANIFEST_TOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then ++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL ++ # Extract the first word of "mt", so it can be a program name with args. ++set dummy mt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_MANIFEST_TOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_MANIFEST_TOOL"; then ++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL ++if test -n "$ac_ct_MANIFEST_TOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 ++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_MANIFEST_TOOL" = x; then ++ MANIFEST_TOOL=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL ++ fi ++else ++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" ++fi ++ ++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 ++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } ++if test "${lt_cv_path_mainfest_tool+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_path_mainfest_tool=no ++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 ++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out ++ cat conftest.err >&5 ++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then ++ lt_cv_path_mainfest_tool=yes ++ fi ++ rm -f conftest* ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 ++$as_echo "$lt_cv_path_mainfest_tool" >&6; } ++if test "x$lt_cv_path_mainfest_tool" != xyes; then ++ MANIFEST_TOOL=: ++fi ++ ++ ++ ++ ++ + + case $host_os in + rhapsody* | darwin*) +@@ -7638,6 +8135,8 @@ _LT_EOF + $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 + echo "$AR cru libconftest.a conftest.o" >&5 + $AR cru libconftest.a conftest.o 2>&5 ++ echo "$RANLIB libconftest.a" >&5 ++ $RANLIB libconftest.a 2>&5 + cat > conftest.c << _LT_EOF + int main() { return 0;} + _LT_EOF +@@ -7803,7 +8302,8 @@ fi + LIBTOOL_DEPS="$ltmain" + + # Always use our own libtool. +-LIBTOOL='$(SHELL) $(top_builddir)/libtool' ++LIBTOOL='$(SHELL) $(top_builddir)' ++LIBTOOL="$LIBTOOL/${host_alias}-libtool" + + + +@@ -7892,7 +8392,7 @@ aix3*) + esac + + # Global variables: +-ofile=libtool ++ofile=${host_alias}-libtool + can_build_shared=yes + + # All known linkers require a `.a' archive for static linking (except MSVC, +@@ -8190,8 +8690,6 @@ fi + lt_prog_compiler_pic= + lt_prog_compiler_static= + +-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 +-$as_echo_n "checking for $compiler option to produce PIC... " >&6; } + + if test "$GCC" = yes; then + lt_prog_compiler_wl='-Wl,' +@@ -8357,6 +8855,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + lt_prog_compiler_pic='--shared' + lt_prog_compiler_static='--static' + ;; ++ nagfor*) ++ # NAG Fortran compiler ++ lt_prog_compiler_wl='-Wl,-Wl,,' ++ lt_prog_compiler_pic='-PIC' ++ lt_prog_compiler_static='-Bstatic' ++ ;; + pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) + # Portland Group compilers (*not* the Pentium gcc compiler, + # which looks to be a dead project) +@@ -8419,7 +8923,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + case $cc_basename in +- f77* | f90* | f95*) ++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) + lt_prog_compiler_wl='-Qoption ld ';; + *) + lt_prog_compiler_wl='-Wl,';; +@@ -8476,13 +8980,17 @@ case $host_os in + lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" + ;; + esac +-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 +-$as_echo "$lt_prog_compiler_pic" >&6; } +- +- +- +- + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 ++$as_echo_n "checking for $compiler option to produce PIC... " >&6; } ++if test "${lt_cv_prog_compiler_pic+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 ++$as_echo "$lt_cv_prog_compiler_pic" >&6; } ++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic + + # + # Check to make sure the PIC flag actually works. +@@ -8543,6 +9051,11 @@ fi + + + ++ ++ ++ ++ ++ + # + # Check to make sure the static flag actually works. + # +@@ -8893,7 +9406,8 @@ _LT_EOF + allow_undefined_flag=unsupported + always_export_symbols=no + enable_shared_with_static_runtimes=yes +- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' ++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' + + if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' +@@ -8992,12 +9506,12 @@ _LT_EOF + whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' + hardcode_libdir_flag_spec= + hardcode_libdir_flag_spec_ld='-rpath $libdir' +- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' ++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' + if test "x$supports_anon_versioning" = xyes; then + archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ +- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' ++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' + fi + ;; + esac +@@ -9011,8 +9525,8 @@ _LT_EOF + archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' + wlarc= + else +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + fi + ;; + +@@ -9030,8 +9544,8 @@ _LT_EOF + + _LT_EOF + elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi +@@ -9077,8 +9591,8 @@ _LT_EOF + + *) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi +@@ -9208,7 +9722,13 @@ _LT_EOF + allow_undefined_flag='-berok' + # Determine the default libpath from the value encoded in an + # empty executable. +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath_+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + + int +@@ -9221,22 +9741,29 @@ main () + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath_ ++fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" +@@ -9248,7 +9775,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + else + # Determine the default libpath from the value encoded in an + # empty executable. +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath_+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ + + int +@@ -9261,22 +9794,29 @@ main () + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath_ ++fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + # Warning - without using the other run time loading flags, +@@ -9321,20 +9861,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + # Microsoft Visual C++. + # hardcode_libdir_flag_spec is actually meaningless, as there is + # no search path for DLLs. +- hardcode_libdir_flag_spec=' ' +- allow_undefined_flag=unsupported +- # Tell ltmain to make .lib files, not .a files. +- libext=lib +- # Tell ltmain to make .dll files, not .so files. +- shrext_cmds=".dll" +- # FIXME: Setting linknames here is a bad hack. +- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' +- # The linker will automatically build a .lib file if we build a DLL. +- old_archive_from_new_cmds='true' +- # FIXME: Should let the user specify the lib program. +- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' +- fix_srcfile_path='`cygpath -w "$srcfile"`' +- enable_shared_with_static_runtimes=yes ++ case $cc_basename in ++ cl*) ++ # Native MSVC ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ always_export_symbols=yes ++ file_list_spec='@' ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' ++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then ++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; ++ else ++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; ++ fi~ ++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ ++ linknames=' ++ # The linker will not automatically build a static lib if we build a DLL. ++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true' ++ enable_shared_with_static_runtimes=yes ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ # Don't use ranlib ++ old_postinstall_cmds='chmod 644 $oldlib' ++ postlink_cmds='lt_outputfile="@OUTPUT@"~ ++ lt_tool_outputfile="@TOOL_OUTPUT@"~ ++ case $lt_outputfile in ++ *.exe|*.EXE) ;; ++ *) ++ lt_outputfile="$lt_outputfile.exe" ++ lt_tool_outputfile="$lt_tool_outputfile.exe" ++ ;; ++ esac~ ++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then ++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; ++ $RM "$lt_outputfile.manifest"; ++ fi' ++ ;; ++ *) ++ # Assume MSVC wrapper ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' ++ # The linker will automatically build a .lib file if we build a DLL. ++ old_archive_from_new_cmds='true' ++ # FIXME: Should let the user specify the lib program. ++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' ++ enable_shared_with_static_runtimes=yes ++ ;; ++ esac + ;; + + darwin* | rhapsody*) +@@ -9395,7 +9978,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + # FreeBSD 3 and greater uses gcc -shared to do shared libraries. + freebsd* | dragonfly*) +- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + hardcode_shlibpath_var=no +@@ -9403,7 +9986,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hpux9*) + if test "$GCC" = yes; then +- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' ++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + else + archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + fi +@@ -9419,7 +10002,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hpux10*) + if test "$GCC" = yes && test "$with_gnu_ld" = no; then +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' + fi +@@ -9443,10 +10026,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + ia64*) +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + else +@@ -9525,23 +10108,36 @@ fi + + irix5* | irix6* | nonstopux*) + if test "$GCC" = yes; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + # Try to use the -exported_symbol ld option, if it does not + # work, assume that -exports_file does not work either and + # implicitly export all symbols. +- save_LDFLAGS="$LDFLAGS" +- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" +- cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++ # This should be the same for all languages, so no per-tag cache variable. ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 ++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } ++if test "${lt_cv_irix_exported_symbol+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ save_LDFLAGS="$LDFLAGS" ++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ +-int foo(void) {} ++int foo (void) { return 0; } + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' +- ++ lt_cv_irix_exported_symbol=yes ++else ++ lt_cv_irix_exported_symbol=no + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +- LDFLAGS="$save_LDFLAGS" ++ LDFLAGS="$save_LDFLAGS" ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 ++$as_echo "$lt_cv_irix_exported_symbol" >&6; } ++ if test "$lt_cv_irix_exported_symbol" = yes; then ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' ++ fi + else + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' +@@ -9626,7 +10222,7 @@ rm -f core conftest.err conftest.$ac_objext \ + osf4* | osf5*) # as osf3* with the addition of -msym flag + if test "$GCC" = yes; then + allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' +- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + else + allow_undefined_flag=' -expect_unresolved \*' +@@ -9645,9 +10241,9 @@ rm -f core conftest.err conftest.$ac_objext \ + no_undefined_flag=' -z defs' + if test "$GCC" = yes; then + wlarc='${wl}' +- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ +- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + else + case `$CC -V 2>&1` in + *"Compilers 5.0"*) +@@ -10223,8 +10819,9 @@ cygwin* | mingw* | pw32* | cegcc*) + need_version=no + need_lib_prefix=no + +- case $GCC,$host_os in +- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) ++ case $GCC,$cc_basename in ++ yes,*) ++ # gcc + library_names_spec='$libname.dll.a' + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ +@@ -10257,13 +10854,71 @@ cygwin* | mingw* | pw32* | cegcc*) + library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + ;; + esac ++ dynamic_linker='Win32 ld.exe' ++ ;; ++ ++ *,cl*) ++ # Native MSVC ++ libname_spec='$name' ++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ++ library_names_spec='${libname}.dll.lib' ++ ++ case $build_os in ++ mingw*) ++ sys_lib_search_path_spec= ++ lt_save_ifs=$IFS ++ IFS=';' ++ for lt_path in $LIB ++ do ++ IFS=$lt_save_ifs ++ # Let DOS variable expansion print the short 8.3 style file name. ++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` ++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" ++ done ++ IFS=$lt_save_ifs ++ # Convert to MSYS style. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` ++ ;; ++ cygwin*) ++ # Convert to unix form, then to dos form, then back to unix form ++ # but this time dos style (no spaces!) so that the unix form looks ++ # like /cygdrive/c/PROGRA~1:/cygdr... ++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` ++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` ++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ ;; ++ *) ++ sys_lib_search_path_spec="$LIB" ++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then ++ # It is most probably a Windows format PATH. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` ++ else ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ fi ++ # FIXME: find the short name or the path components, as spaces are ++ # common. (e.g. "Program Files" -> "PROGRA~1") ++ ;; ++ esac ++ ++ # DLL is installed to $(libdir)/../bin by postinstall_cmds ++ postinstall_cmds='base_file=`basename \${file}`~ ++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ ++ dldir=$destdir/`dirname \$dlpath`~ ++ test -d \$dldir || mkdir -p \$dldir~ ++ $install_prog $dir/$dlname \$dldir/$dlname' ++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ ++ dlpath=$dir/\$dldll~ ++ $RM \$dlpath' ++ shlibpath_overrides_runpath=yes ++ dynamic_linker='Win32 link.exe' + ;; + + *) ++ # Assume MSVC wrapper + library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ++ dynamic_linker='Win32 ld.exe' + ;; + esac +- dynamic_linker='Win32 ld.exe' + # FIXME: first we should search . and the directory the executable is in + shlibpath_var=PATH + ;; +@@ -10355,7 +11010,7 @@ haiku*) + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LIBRARY_PATH + shlibpath_overrides_runpath=yes +- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' ++ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' + hardcode_into_libs=yes + ;; + +@@ -11195,10 +11850,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -11301,10 +11956,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -12543,7 +13198,7 @@ if test "$enable_shared" = "yes"; then + # since libbfd may not pull in the entirety of libiberty. + x=`sed -n -e 's/^[ ]*PICFLAG[ ]*=[ ]*//p' < ../libiberty/Makefile | sed -n '$p'` + if test -n "$x"; then +- SHARED_LIBADD="-L`pwd`/../libiberty/pic -liberty" ++ SHARED_LIBADD="`pwd`/../libiberty/pic/libiberty.a" + fi + + case "${host}" in +@@ -13518,13 +14173,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' + lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' + lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' + lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' ++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' ++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' + reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' + reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' + OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' + deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' + file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' ++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' ++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' ++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' ++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' + AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' + AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' ++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' + STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' + RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' + old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' +@@ -13539,14 +14201,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de + lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' + lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' + lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' ++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' ++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' + objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' + MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' +-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' ++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' + lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' + need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' ++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' + DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' + NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' + LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' +@@ -13579,12 +14244,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q + hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' + inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' + link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' +-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' + always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' + export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' + exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' + include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' + prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' ++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' + file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' + variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' + need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' +@@ -13639,8 +14304,13 @@ reload_flag \ + OBJDUMP \ + deplibs_check_method \ + file_magic_cmd \ ++file_magic_glob \ ++want_nocaseglob \ ++DLLTOOL \ ++sharedlib_from_linklib_cmd \ + AR \ + AR_FLAGS \ ++archiver_list_spec \ + STRIP \ + RANLIB \ + CC \ +@@ -13650,12 +14320,14 @@ lt_cv_sys_global_symbol_pipe \ + lt_cv_sys_global_symbol_to_cdecl \ + lt_cv_sys_global_symbol_to_c_name_address \ + lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ ++nm_file_list_spec \ + lt_prog_compiler_no_builtin_flag \ +-lt_prog_compiler_wl \ + lt_prog_compiler_pic \ ++lt_prog_compiler_wl \ + lt_prog_compiler_static \ + lt_cv_prog_compiler_c_o \ + need_locks \ ++MANIFEST_TOOL \ + DSYMUTIL \ + NMEDIT \ + LIPO \ +@@ -13671,7 +14343,6 @@ no_undefined_flag \ + hardcode_libdir_flag_spec \ + hardcode_libdir_flag_spec_ld \ + hardcode_libdir_separator \ +-fix_srcfile_path \ + exclude_expsyms \ + include_expsyms \ + file_list_spec \ +@@ -13707,6 +14378,7 @@ module_cmds \ + module_expsym_cmds \ + export_symbols_cmds \ + prelink_cmds \ ++postlink_cmds \ + postinstall_cmds \ + postuninstall_cmds \ + finish_cmds \ +@@ -14463,7 +15135,8 @@ $as_echo X"$file" | + # NOTE: Changes made to this file will be lost: look at ltmain.sh. + # + # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, +-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. ++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, ++# Inc. + # Written by Gordon Matzigkeit, 1996 + # + # This file is part of GNU Libtool. +@@ -14566,19 +15239,42 @@ SP2NL=$lt_lt_SP2NL + # turn newlines into spaces. + NL2SP=$lt_lt_NL2SP + ++# convert \$build file names to \$host format. ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++ ++# convert \$build files to toolchain format. ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++ + # An object symbol dumper. + OBJDUMP=$lt_OBJDUMP + + # Method to check whether dependent libraries are shared objects. + deplibs_check_method=$lt_deplibs_check_method + +-# Command to use when deplibs_check_method == "file_magic". ++# Command to use when deplibs_check_method = "file_magic". + file_magic_cmd=$lt_file_magic_cmd + ++# How to find potential files when deplibs_check_method = "file_magic". ++file_magic_glob=$lt_file_magic_glob ++ ++# Find potential files using nocaseglob when deplibs_check_method = "file_magic". ++want_nocaseglob=$lt_want_nocaseglob ++ ++# DLL creation program. ++DLLTOOL=$lt_DLLTOOL ++ ++# Command to associate shared and link libraries. ++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd ++ + # The archiver. + AR=$lt_AR ++ ++# Flags to create an archive. + AR_FLAGS=$lt_AR_FLAGS + ++# How to feed a file listing to the archiver. ++archiver_list_spec=$lt_archiver_list_spec ++ + # A symbol stripping program. + STRIP=$lt_STRIP + +@@ -14608,6 +15304,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address + # Transform the output of nm in a C name address pair when lib prefix is needed. + global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix + ++# Specify filename containing input files for \$NM. ++nm_file_list_spec=$lt_nm_file_list_spec ++ ++# The root where to search for dependent libraries,and in which our libraries should be installed. ++lt_sysroot=$lt_sysroot ++ + # The name of the directory that contains temporary libtool files. + objdir=$objdir + +@@ -14617,6 +15319,9 @@ MAGIC_CMD=$MAGIC_CMD + # Must we lock files when doing compilation? + need_locks=$lt_need_locks + ++# Manifest tool. ++MANIFEST_TOOL=$lt_MANIFEST_TOOL ++ + # Tool to manipulate archived DWARF debug symbol files on Mac OS X. + DSYMUTIL=$lt_DSYMUTIL + +@@ -14731,12 +15436,12 @@ with_gcc=$GCC + # Compiler flag to turn off builtin functions. + no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag + +-# How to pass a linker flag through the compiler. +-wl=$lt_lt_prog_compiler_wl +- + # Additional compiler flags for building library objects. + pic_flag=$lt_lt_prog_compiler_pic + ++# How to pass a linker flag through the compiler. ++wl=$lt_lt_prog_compiler_wl ++ + # Compiler flag to prevent dynamic linking. + link_static_flag=$lt_lt_prog_compiler_static + +@@ -14823,9 +15528,6 @@ inherit_rpath=$inherit_rpath + # Whether libtool must link a program against all its dependency libraries. + link_all_deplibs=$link_all_deplibs + +-# Fix the shell variable \$srcfile for the compiler. +-fix_srcfile_path=$lt_fix_srcfile_path +- + # Set to "yes" if exported symbols are required. + always_export_symbols=$always_export_symbols + +@@ -14841,6 +15543,9 @@ include_expsyms=$lt_include_expsyms + # Commands necessary for linking programs (against libraries) with templates. + prelink_cmds=$lt_prelink_cmds + ++# Commands necessary for finishing linking programs. ++postlink_cmds=$lt_postlink_cmds ++ + # Specify filename containing input files. + file_list_spec=$lt_file_list_spec + +@@ -14873,210 +15578,169 @@ ltmain="$ac_aux_dir/ltmain.sh" + # if finds mixed CR/LF and LF-only lines. Since sed operates in + # text mode, it properly converts lines to CR/LF. This bash problem + # is reportedly fixed, but why not run on old versions too? +- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- case $xsi_shell in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result="${1##*/}" +-} +- +-# func_dirname_and_basename file append nondir_replacement +-# perform func_basename and func_dirname in a single function +-# call: +-# dirname: Compute the dirname of FILE. If nonempty, +-# add APPEND to the result, otherwise set result +-# to NONDIR_REPLACEMENT. +-# value returned in "$func_dirname_result" +-# basename: Compute filename of FILE. +-# value retuned in "$func_basename_result" +-# Implementation must be kept synchronized with func_dirname +-# and func_basename. For efficiency, we do not delegate to +-# those functions but instead duplicate the functionality here. +-func_dirname_and_basename () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +- func_basename_result="${1##*/}" +-} +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-func_stripname () +-{ +- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are +- # positional parameters, so assign one to ordinary parameter first. +- func_stripname_result=${3} +- func_stripname_result=${func_stripname_result#"${1}"} +- func_stripname_result=${func_stripname_result%"${2}"} +-} +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=${1%%=*} +- func_opt_split_arg=${1#*=} +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- case ${1} in +- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; +- *) func_lo2o_result=${1} ;; +- esac +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=${1%.*}.lo +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=$(( $* )) +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=${#1} +-} +- +-_LT_EOF +- ;; +- *) # Bourne compatible functions. +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- # Extract subdirectory from the argument. +- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` +- if test "X$func_dirname_result" = "X${1}"; then +- func_dirname_result="${3}" +- else +- func_dirname_result="$func_dirname_result${2}" +- fi +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result=`$ECHO "${1}" | $SED "$basename"` +-} +- +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-# func_strip_suffix prefix name +-func_stripname () +-{ +- case ${2} in +- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; +- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; +- esac +-} +- +-# sed scripts: +-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' +-my_sed_long_arg='1s/^-[^=]*=//' +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` +- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=`expr "$@"` +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` +-} +- +-_LT_EOF +-esac +- +-case $lt_shell_append in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$1+=\$2" +-} +-_LT_EOF +- ;; +- *) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$1=\$$1\$2" +-} +- +-_LT_EOF +- ;; +- esac +- +- +- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- mv -f "$cfgfile" "$ofile" || ++ sed '$q' "$ltmain" >> "$cfgfile" \ ++ || (rm -f "$cfgfile"; exit 1) ++ ++ if test x"$xsi_shell" = xyes; then ++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ ++func_dirname ()\ ++{\ ++\ case ${1} in\ ++\ */*) func_dirname_result="${1%/*}${2}" ;;\ ++\ * ) func_dirname_result="${3}" ;;\ ++\ esac\ ++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_basename ()$/,/^} # func_basename /c\ ++func_basename ()\ ++{\ ++\ func_basename_result="${1##*/}"\ ++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ ++func_dirname_and_basename ()\ ++{\ ++\ case ${1} in\ ++\ */*) func_dirname_result="${1%/*}${2}" ;;\ ++\ * ) func_dirname_result="${3}" ;;\ ++\ esac\ ++\ func_basename_result="${1##*/}"\ ++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ ++func_stripname ()\ ++{\ ++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ ++\ # positional parameters, so assign one to ordinary parameter first.\ ++\ func_stripname_result=${3}\ ++\ func_stripname_result=${func_stripname_result#"${1}"}\ ++\ func_stripname_result=${func_stripname_result%"${2}"}\ ++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ ++func_split_long_opt ()\ ++{\ ++\ func_split_long_opt_name=${1%%=*}\ ++\ func_split_long_opt_arg=${1#*=}\ ++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ ++func_split_short_opt ()\ ++{\ ++\ func_split_short_opt_arg=${1#??}\ ++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ ++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ ++func_lo2o ()\ ++{\ ++\ case ${1} in\ ++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ ++\ *) func_lo2o_result=${1} ;;\ ++\ esac\ ++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_xform ()$/,/^} # func_xform /c\ ++func_xform ()\ ++{\ ++ func_xform_result=${1%.*}.lo\ ++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_arith ()$/,/^} # func_arith /c\ ++func_arith ()\ ++{\ ++ func_arith_result=$(( $* ))\ ++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_len ()$/,/^} # func_len /c\ ++func_len ()\ ++{\ ++ func_len_result=${#1}\ ++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++fi ++ ++if test x"$lt_shell_append" = xyes; then ++ sed -e '/^func_append ()$/,/^} # func_append /c\ ++func_append ()\ ++{\ ++ eval "${1}+=\\${2}"\ ++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ ++func_append_quoted ()\ ++{\ ++\ func_quote_for_eval "${2}"\ ++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ ++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ # Save a `func_append' function call where possible by direct use of '+=' ++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++else ++ # Save a `func_append' function call even when '+=' is not available ++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++fi ++ ++if test x"$_lt_function_replace_fail" = x":"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 ++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} ++fi ++ ++ ++ mv -f "$cfgfile" "$ofile" || + (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") + chmod +x "$ofile" + +diff --git a/opcodes/configure.ac b/opcodes/configure.ac +index b9f5eb8a4f..a31b66a2f0 100644 +--- a/opcodes/configure.ac ++++ b/opcodes/configure.ac +@@ -167,7 +167,7 @@ changequote(,)dnl + x=`sed -n -e 's/^[ ]*PICFLAG[ ]*=[ ]*//p' < ../libiberty/Makefile | sed -n '$p'` + changequote([,])dnl + if test -n "$x"; then +- SHARED_LIBADD="-L`pwd`/../libiberty/pic -liberty" ++ SHARED_LIBADD="`pwd`/../libiberty/pic/libiberty.a" + fi + + case "${host}" in +diff --git a/zlib/configure b/zlib/configure +index bed9e3ea2b..caef0b674e 100755 +--- a/zlib/configure ++++ b/zlib/configure +@@ -614,8 +614,11 @@ OTOOL + LIPO + NMEDIT + DSYMUTIL ++MANIFEST_TOOL + RANLIB ++ac_ct_AR + AR ++DLLTOOL + OBJDUMP + LN_S + NM +@@ -737,6 +740,7 @@ enable_static + with_pic + enable_fast_install + with_gnu_ld ++with_libtool_sysroot + enable_libtool_lock + enable_host_shared + ' +@@ -1385,6 +1389,8 @@ Optional Packages: + --with-pic try to use only PIC/non-PIC objects [default=use + both] + --with-gnu-ld assume the C compiler uses GNU ld [default=no] ++ --with-libtool-sysroot=DIR Search for dependent libraries within DIR ++ (or the compiler's sysroot if not specified). + + Some influential environment variables: + CC C compiler command +@@ -3910,8 +3916,8 @@ esac + + + +-macro_version='2.2.7a' +-macro_revision='1.3134' ++macro_version='2.4' ++macro_revision='1.3293' + + + +@@ -3951,7 +3957,7 @@ ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO + { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 + $as_echo_n "checking how to print strings... " >&6; } + # Test print first, because it will be a builtin if present. +-if test "X`print -r -- -n 2>/dev/null`" = X-n && \ ++if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ + test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then + ECHO='print -r --' + elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then +@@ -4767,8 +4773,8 @@ $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; + # Try some XSI features + xsi_shell=no + ( _lt_dummy="a/b/c" +- test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ +- = c,a/b,, \ ++ test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ ++ = c,a/b,b/c, \ + && eval 'test $(( 1 + 1 )) -eq 2 \ + && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ + && xsi_shell=yes +@@ -4817,6 +4823,80 @@ esac + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 ++$as_echo_n "checking how to convert $build file names to $host format... " >&6; } ++if test "${lt_cv_to_host_file_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 ++ ;; ++ esac ++ ;; ++ *-*-cygwin* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin ++ ;; ++ *-*-cygwin* ) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++ * ) # otherwise, assume *nix ++ lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin ++ ;; ++ esac ++ ;; ++ * ) # unhandled hosts (and "normal" native builds) ++ lt_cv_to_host_file_cmd=func_convert_file_noop ++ ;; ++esac ++ ++fi ++ ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 ++$as_echo "$lt_cv_to_host_file_cmd" >&6; } ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 ++$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } ++if test "${lt_cv_to_tool_file_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ #assume ordinary cross tools, or native build. ++lt_cv_to_tool_file_cmd=func_convert_file_noop ++case $host in ++ *-*-mingw* ) ++ case $build in ++ *-*-mingw* ) # actually msys ++ lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 ++ ;; ++ esac ++ ;; ++esac ++ ++fi ++ ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 ++$as_echo "$lt_cv_to_tool_file_cmd" >&6; } ++ ++ ++ ++ ++ + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 + $as_echo_n "checking for $LD option to reload object files... " >&6; } + if test "${lt_cv_ld_reload_flag+set}" = set; then : +@@ -4833,6 +4913,11 @@ case $reload_flag in + esac + reload_cmds='$LD$reload_flag -o $output$reload_objs' + case $host_os in ++ cygwin* | mingw* | pw32* | cegcc*) ++ if test "$GCC" != yes; then ++ reload_cmds=false ++ fi ++ ;; + darwin*) + if test "$GCC" = yes; then + reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' +@@ -5001,7 +5086,8 @@ mingw* | pw32*) + lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' + lt_cv_file_magic_cmd='func_win32_libid' + else +- lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' ++ # Keep this pattern in sync with the one in func_win32_libid. ++ lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' + lt_cv_file_magic_cmd='$OBJDUMP -f' + fi + ;; +@@ -5155,6 +5241,21 @@ esac + fi + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 + $as_echo "$lt_cv_deplibs_check_method" >&6; } ++ ++file_magic_glob= ++want_nocaseglob=no ++if test "$build" = "$host"; then ++ case $host_os in ++ mingw* | pw32*) ++ if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then ++ want_nocaseglob=yes ++ else ++ file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` ++ fi ++ ;; ++ esac ++fi ++ + file_magic_cmd=$lt_cv_file_magic_cmd + deplibs_check_method=$lt_cv_deplibs_check_method + test -z "$deplibs_check_method" && deplibs_check_method=unknown +@@ -5170,9 +5271,163 @@ test -z "$deplibs_check_method" && deplibs_check_method=unknown + + + ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ + if test -n "$ac_tool_prefix"; then +- # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. +-set dummy ${ac_tool_prefix}ar; ac_word=$2 ++ # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. ++set dummy ${ac_tool_prefix}dlltool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_DLLTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$DLLTOOL"; then ++ ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++DLLTOOL=$ac_cv_prog_DLLTOOL ++if test -n "$DLLTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 ++$as_echo "$DLLTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_DLLTOOL"; then ++ ac_ct_DLLTOOL=$DLLTOOL ++ # Extract the first word of "dlltool", so it can be a program name with args. ++set dummy dlltool; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_DLLTOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_DLLTOOL"; then ++ ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_DLLTOOL="dlltool" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL ++if test -n "$ac_ct_DLLTOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 ++$as_echo "$ac_ct_DLLTOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_DLLTOOL" = x; then ++ DLLTOOL="false" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ DLLTOOL=$ac_ct_DLLTOOL ++ fi ++else ++ DLLTOOL="$ac_cv_prog_DLLTOOL" ++fi ++ ++test -z "$DLLTOOL" && DLLTOOL=dlltool ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 ++$as_echo_n "checking how to associate runtime and link libraries... " >&6; } ++if test "${lt_cv_sharedlib_from_linklib_cmd+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_sharedlib_from_linklib_cmd='unknown' ++ ++case $host_os in ++cygwin* | mingw* | pw32* | cegcc*) ++ # two different shell functions defined in ltmain.sh ++ # decide which to use based on capabilities of $DLLTOOL ++ case `$DLLTOOL --help 2>&1` in ++ *--identify-strict*) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib ++ ;; ++ *) ++ lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback ++ ;; ++ esac ++ ;; ++*) ++ # fallback: assume linklib IS sharedlib ++ lt_cv_sharedlib_from_linklib_cmd="$ECHO" ++ ;; ++esac ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 ++$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } ++sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd ++test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO ++ ++ ++ ++ ++ ++ ++ ++ ++if test -n "$ac_tool_prefix"; then ++ for ac_prog in ar ++ do ++ # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. ++set dummy $ac_tool_prefix$ac_prog; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } + if test "${ac_cv_prog_AR+set}" = set; then : +@@ -5188,7 +5443,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then +- ac_cv_prog_AR="${ac_tool_prefix}ar" ++ ac_cv_prog_AR="$ac_tool_prefix$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -5208,11 +5463,15 @@ $as_echo "no" >&6; } + fi + + ++ test -n "$AR" && break ++ done + fi +-if test -z "$ac_cv_prog_AR"; then ++if test -z "$AR"; then + ac_ct_AR=$AR +- # Extract the first word of "ar", so it can be a program name with args. +-set dummy ar; ac_word=$2 ++ for ac_prog in ar ++do ++ # Extract the first word of "$ac_prog", so it can be a program name with args. ++set dummy $ac_prog; ac_word=$2 + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 + $as_echo_n "checking for $ac_word... " >&6; } + if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : +@@ -5228,7 +5487,7 @@ do + test -z "$as_dir" && as_dir=. + for ac_exec_ext in '' $ac_executable_extensions; do + if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then +- ac_cv_prog_ac_ct_AR="ar" ++ ac_cv_prog_ac_ct_AR="$ac_prog" + $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 + break 2 + fi +@@ -5247,6 +5506,10 @@ else + $as_echo "no" >&6; } + fi + ++ ++ test -n "$ac_ct_AR" && break ++done ++ + if test "x$ac_ct_AR" = x; then + AR="false" + else +@@ -5258,16 +5521,72 @@ ac_tool_warned=yes ;; + esac + AR=$ac_ct_AR + fi +-else +- AR="$ac_cv_prog_AR" + fi + +-test -z "$AR" && AR=ar +-test -z "$AR_FLAGS" && AR_FLAGS=cru ++: ${AR=ar} ++: ${AR_FLAGS=cru} ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 ++$as_echo_n "checking for archiver @FILE support... " >&6; } ++if test "${lt_cv_ar_at_file+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_ar_at_file=no ++ cat confdefs.h - <<_ACEOF >conftest.$ac_ext ++/* end confdefs.h. */ ++ ++int ++main () ++{ + ++ ; ++ return 0; ++} ++_ACEOF ++if ac_fn_c_try_compile "$LINENO"; then : ++ echo conftest.$ac_objext > conftest.lst ++ lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' ++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 ++ (eval $lt_ar_try) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ if test "$ac_status" -eq 0; then ++ # Ensure the archiver fails upon bogus file names. ++ rm -f conftest.$ac_objext libconftest.a ++ { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 ++ (eval $lt_ar_try) 2>&5 ++ ac_status=$? ++ $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 ++ test $ac_status = 0; } ++ if test "$ac_status" -ne 0; then ++ lt_cv_ar_at_file=@ ++ fi ++ fi ++ rm -f conftest.* libconftest.a + ++fi ++rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext + ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 ++$as_echo "$lt_cv_ar_at_file" >&6; } + ++if test "x$lt_cv_ar_at_file" = xno; then ++ archiver_list_spec= ++else ++ archiver_list_spec=$lt_cv_ar_at_file ++fi + + + +@@ -5609,8 +5928,8 @@ esac + lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" + + # Transform an extracted symbol line into symbol name and symbol address +-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" +-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" ++lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" + + # Handle CRLF in mingw tool chain + opt_cr= +@@ -5646,6 +5965,7 @@ for ac_symprfx in "" "_"; do + else + lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" + fi ++ lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" + + # Check to see that the pipe works correctly. + pipe_works=no +@@ -5687,6 +6007,18 @@ _LT_EOF + if $GREP ' nm_test_var$' "$nlist" >/dev/null; then + if $GREP ' nm_test_func$' "$nlist" >/dev/null; then + cat <<_LT_EOF > conftest.$ac_ext ++/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ ++#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) ++/* DATA imports from DLLs on WIN32 con't be const, because runtime ++ relocations are performed -- see ld's documentation on pseudo-relocs. */ ++# define LT_DLSYM_CONST ++#elif defined(__osf__) ++/* This system does not cope well with relocations in const data. */ ++# define LT_DLSYM_CONST ++#else ++# define LT_DLSYM_CONST const ++#endif ++ + #ifdef __cplusplus + extern "C" { + #endif +@@ -5698,7 +6030,7 @@ _LT_EOF + cat <<_LT_EOF >> conftest.$ac_ext + + /* The mapping between symbol names and symbols. */ +-const struct { ++LT_DLSYM_CONST struct { + const char *name; + void *address; + } +@@ -5724,8 +6056,8 @@ static const void *lt_preloaded_setup() { + _LT_EOF + # Now try linking the two files. + mv conftest.$ac_objext conftstm.$ac_objext +- lt_save_LIBS="$LIBS" +- lt_save_CFLAGS="$CFLAGS" ++ lt_globsym_save_LIBS=$LIBS ++ lt_globsym_save_CFLAGS=$CFLAGS + LIBS="conftstm.$ac_objext" + CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" + if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 +@@ -5735,8 +6067,8 @@ _LT_EOF + test $ac_status = 0; } && test -s conftest${ac_exeext}; then + pipe_works=yes + fi +- LIBS="$lt_save_LIBS" +- CFLAGS="$lt_save_CFLAGS" ++ LIBS=$lt_globsym_save_LIBS ++ CFLAGS=$lt_globsym_save_CFLAGS + else + echo "cannot find nm_test_func in $nlist" >&5 + fi +@@ -5773,6 +6105,19 @@ else + $as_echo "ok" >&6; } + fi + ++# Response file support. ++if test "$lt_cv_nm_interface" = "MS dumpbin"; then ++ nm_file_list_spec='@' ++elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then ++ nm_file_list_spec='@' ++fi ++ ++ ++ ++ ++ ++ ++ + + + +@@ -5793,6 +6138,41 @@ fi + + + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 ++$as_echo_n "checking for sysroot... " >&6; } ++ ++# Check whether --with-libtool-sysroot was given. ++if test "${with_libtool_sysroot+set}" = set; then : ++ withval=$with_libtool_sysroot; ++else ++ with_libtool_sysroot=no ++fi ++ ++ ++lt_sysroot= ++case ${with_libtool_sysroot} in #( ++ yes) ++ if test "$GCC" = yes; then ++ lt_sysroot=`$CC --print-sysroot 2>/dev/null` ++ fi ++ ;; #( ++ /*) ++ lt_sysroot=`echo "$with_libtool_sysroot" | sed -e "$sed_quote_subst"` ++ ;; #( ++ no|'') ++ ;; #( ++ *) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_libtool_sysroot}" >&5 ++$as_echo "${with_libtool_sysroot}" >&6; } ++ as_fn_error "The sysroot must be an absolute path." "$LINENO" 5 ++ ;; ++esac ++ ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 ++$as_echo "${lt_sysroot:-no}" >&6; } ++ ++ ++ + + + # Check whether --enable-libtool-lock was given. +@@ -6004,6 +6384,123 @@ esac + + need_locks="$enable_libtool_lock" + ++if test -n "$ac_tool_prefix"; then ++ # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. ++set dummy ${ac_tool_prefix}mt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_MANIFEST_TOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$MANIFEST_TOOL"; then ++ ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL ++if test -n "$MANIFEST_TOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 ++$as_echo "$MANIFEST_TOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++fi ++if test -z "$ac_cv_prog_MANIFEST_TOOL"; then ++ ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL ++ # Extract the first word of "mt", so it can be a program name with args. ++set dummy mt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_ac_ct_MANIFEST_TOOL+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$ac_ct_MANIFEST_TOOL"; then ++ ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++fi ++fi ++ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL ++if test -n "$ac_ct_MANIFEST_TOOL"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 ++$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ if test "x$ac_ct_MANIFEST_TOOL" = x; then ++ MANIFEST_TOOL=":" ++ else ++ case $cross_compiling:$ac_tool_warned in ++yes:) ++{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 ++$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ++ac_tool_warned=yes ;; ++esac ++ MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL ++ fi ++else ++ MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" ++fi ++ ++test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 ++$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } ++if test "${lt_cv_path_mainfest_tool+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_path_mainfest_tool=no ++ echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 ++ $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out ++ cat conftest.err >&5 ++ if $GREP 'Manifest Tool' conftest.out > /dev/null; then ++ lt_cv_path_mainfest_tool=yes ++ fi ++ rm -f conftest* ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 ++$as_echo "$lt_cv_path_mainfest_tool" >&6; } ++if test "x$lt_cv_path_mainfest_tool" != xyes; then ++ MANIFEST_TOOL=: ++fi ++ ++ ++ ++ ++ + + case $host_os in + rhapsody* | darwin*) +@@ -6570,6 +7067,8 @@ _LT_EOF + $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 + echo "$AR cru libconftest.a conftest.o" >&5 + $AR cru libconftest.a conftest.o 2>&5 ++ echo "$RANLIB libconftest.a" >&5 ++ $RANLIB libconftest.a 2>&5 + cat > conftest.c << _LT_EOF + int main() { return 0;} + _LT_EOF +@@ -7033,7 +7532,8 @@ fi + LIBTOOL_DEPS="$ltmain" + + # Always use our own libtool. +-LIBTOOL='$(SHELL) $(top_builddir)/libtool' ++LIBTOOL='$(SHELL) $(top_builddir)' ++LIBTOOL="$LIBTOOL/${host_alias}-libtool" + + + +@@ -7122,7 +7622,7 @@ aix3*) + esac + + # Global variables: +-ofile=libtool ++ofile=${host_alias}-libtool + can_build_shared=yes + + # All known linkers require a `.a' archive for static linking (except MSVC, +@@ -7420,8 +7920,6 @@ fi + lt_prog_compiler_pic= + lt_prog_compiler_static= + +-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 +-$as_echo_n "checking for $compiler option to produce PIC... " >&6; } + + if test "$GCC" = yes; then + lt_prog_compiler_wl='-Wl,' +@@ -7587,6 +8085,12 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + lt_prog_compiler_pic='--shared' + lt_prog_compiler_static='--static' + ;; ++ nagfor*) ++ # NAG Fortran compiler ++ lt_prog_compiler_wl='-Wl,-Wl,,' ++ lt_prog_compiler_pic='-PIC' ++ lt_prog_compiler_static='-Bstatic' ++ ;; + pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) + # Portland Group compilers (*not* the Pentium gcc compiler, + # which looks to be a dead project) +@@ -7649,7 +8153,7 @@ $as_echo_n "checking for $compiler option to produce PIC... " >&6; } + lt_prog_compiler_pic='-KPIC' + lt_prog_compiler_static='-Bstatic' + case $cc_basename in +- f77* | f90* | f95*) ++ f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) + lt_prog_compiler_wl='-Qoption ld ';; + *) + lt_prog_compiler_wl='-Wl,';; +@@ -7706,13 +8210,17 @@ case $host_os in + lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" + ;; + esac +-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 +-$as_echo "$lt_prog_compiler_pic" >&6; } +- +- +- +- + ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 ++$as_echo_n "checking for $compiler option to produce PIC... " >&6; } ++if test "${lt_cv_prog_compiler_pic+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ lt_cv_prog_compiler_pic=$lt_prog_compiler_pic ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 ++$as_echo "$lt_cv_prog_compiler_pic" >&6; } ++lt_prog_compiler_pic=$lt_cv_prog_compiler_pic + + # + # Check to make sure the PIC flag actually works. +@@ -7773,6 +8281,11 @@ fi + + + ++ ++ ++ ++ ++ + # + # Check to make sure the static flag actually works. + # +@@ -8123,7 +8636,8 @@ _LT_EOF + allow_undefined_flag=unsupported + always_export_symbols=no + enable_shared_with_static_runtimes=yes +- export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' ++ exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' + + if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' +@@ -8222,12 +8736,12 @@ _LT_EOF + whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' + hardcode_libdir_flag_spec= + hardcode_libdir_flag_spec_ld='-rpath $libdir' +- archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' ++ archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' + if test "x$supports_anon_versioning" = xyes; then + archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ + cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ + echo "local: *; };" >> $output_objdir/$libname.ver~ +- $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' ++ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' + fi + ;; + esac +@@ -8241,8 +8755,8 @@ _LT_EOF + archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' + wlarc= + else +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + fi + ;; + +@@ -8260,8 +8774,8 @@ _LT_EOF + + _LT_EOF + elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi +@@ -8307,8 +8821,8 @@ _LT_EOF + + *) + if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' + else + ld_shlibs=no + fi +@@ -8438,7 +8952,13 @@ _LT_EOF + allow_undefined_flag='-berok' + # Determine the default libpath from the value encoded in an + # empty executable. +- if test x$gcc_no_link = xyes; then ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath_+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test x$gcc_no_link = xyes; then + as_fn_error "Link tests are not allowed after GCC_NO_EXECUTABLES." "$LINENO" 5 + fi + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +@@ -8454,22 +8974,29 @@ main () + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath_ ++fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" +@@ -8481,7 +9008,13 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + else + # Determine the default libpath from the value encoded in an + # empty executable. +- if test x$gcc_no_link = xyes; then ++ if test "${lt_cv_aix_libpath+set}" = set; then ++ aix_libpath=$lt_cv_aix_libpath ++else ++ if test "${lt_cv_aix_libpath_+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test x$gcc_no_link = xyes; then + as_fn_error "Link tests are not allowed after GCC_NO_EXECUTABLES." "$LINENO" 5 + fi + cat confdefs.h - <<_ACEOF >conftest.$ac_ext +@@ -8497,22 +9030,29 @@ main () + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : + +-lt_aix_libpath_sed=' +- /Import File Strings/,/^$/ { +- /^0/ { +- s/^0 *\(.*\)$/\1/ +- p +- } +- }' +-aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-# Check for a 64-bit object if we didn't find anything. +-if test -z "$aix_libpath"; then +- aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` +-fi ++ lt_aix_libpath_sed=' ++ /Import File Strings/,/^$/ { ++ /^0/ { ++ s/^0 *\([^ ]*\) *$/\1/ ++ p ++ } ++ }' ++ lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ # Check for a 64-bit object if we didn't find anything. ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` ++ fi + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +-if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ++ if test -z "$lt_cv_aix_libpath_"; then ++ lt_cv_aix_libpath_="/usr/lib:/lib" ++ fi ++ ++fi ++ ++ aix_libpath=$lt_cv_aix_libpath_ ++fi + + hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" + # Warning - without using the other run time loading flags, +@@ -8557,20 +9097,63 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + # Microsoft Visual C++. + # hardcode_libdir_flag_spec is actually meaningless, as there is + # no search path for DLLs. +- hardcode_libdir_flag_spec=' ' +- allow_undefined_flag=unsupported +- # Tell ltmain to make .lib files, not .a files. +- libext=lib +- # Tell ltmain to make .dll files, not .so files. +- shrext_cmds=".dll" +- # FIXME: Setting linknames here is a bad hack. +- archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' +- # The linker will automatically build a .lib file if we build a DLL. +- old_archive_from_new_cmds='true' +- # FIXME: Should let the user specify the lib program. +- old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' +- fix_srcfile_path='`cygpath -w "$srcfile"`' +- enable_shared_with_static_runtimes=yes ++ case $cc_basename in ++ cl*) ++ # Native MSVC ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ always_export_symbols=yes ++ file_list_spec='@' ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' ++ archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then ++ sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; ++ else ++ sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; ++ fi~ ++ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ ++ linknames=' ++ # The linker will not automatically build a static lib if we build a DLL. ++ # _LT_TAGVAR(old_archive_from_new_cmds, )='true' ++ enable_shared_with_static_runtimes=yes ++ export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' ++ # Don't use ranlib ++ old_postinstall_cmds='chmod 644 $oldlib' ++ postlink_cmds='lt_outputfile="@OUTPUT@"~ ++ lt_tool_outputfile="@TOOL_OUTPUT@"~ ++ case $lt_outputfile in ++ *.exe|*.EXE) ;; ++ *) ++ lt_outputfile="$lt_outputfile.exe" ++ lt_tool_outputfile="$lt_tool_outputfile.exe" ++ ;; ++ esac~ ++ if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then ++ $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; ++ $RM "$lt_outputfile.manifest"; ++ fi' ++ ;; ++ *) ++ # Assume MSVC wrapper ++ hardcode_libdir_flag_spec=' ' ++ allow_undefined_flag=unsupported ++ # Tell ltmain to make .lib files, not .a files. ++ libext=lib ++ # Tell ltmain to make .dll files, not .so files. ++ shrext_cmds=".dll" ++ # FIXME: Setting linknames here is a bad hack. ++ archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' ++ # The linker will automatically build a .lib file if we build a DLL. ++ old_archive_from_new_cmds='true' ++ # FIXME: Should let the user specify the lib program. ++ old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' ++ enable_shared_with_static_runtimes=yes ++ ;; ++ esac + ;; + + darwin* | rhapsody*) +@@ -8631,7 +9214,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + # FreeBSD 3 and greater uses gcc -shared to do shared libraries. + freebsd* | dragonfly*) +- archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' + hardcode_libdir_flag_spec='-R$libdir' + hardcode_direct=yes + hardcode_shlibpath_var=no +@@ -8639,7 +9222,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hpux9*) + if test "$GCC" = yes; then +- archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' ++ archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + else + archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' + fi +@@ -8655,7 +9238,7 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + + hpux10*) + if test "$GCC" = yes && test "$with_gnu_ld" = no; then +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + else + archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' + fi +@@ -8679,10 +9262,10 @@ if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi + archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + ;; + ia64*) +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' + ;; + *) +- archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' + ;; + esac + else +@@ -8761,26 +9344,39 @@ fi + + irix5* | irix6* | nonstopux*) + if test "$GCC" = yes; then +- archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + # Try to use the -exported_symbol ld option, if it does not + # work, assume that -exports_file does not work either and + # implicitly export all symbols. +- save_LDFLAGS="$LDFLAGS" +- LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" +- if test x$gcc_no_link = xyes; then ++ # This should be the same for all languages, so no per-tag cache variable. ++ { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 ++$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } ++if test "${lt_cv_irix_exported_symbol+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ save_LDFLAGS="$LDFLAGS" ++ LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" ++ if test x$gcc_no_link = xyes; then + as_fn_error "Link tests are not allowed after GCC_NO_EXECUTABLES." "$LINENO" 5 + fi + cat confdefs.h - <<_ACEOF >conftest.$ac_ext + /* end confdefs.h. */ +-int foo(void) {} ++int foo (void) { return 0; } + _ACEOF + if ac_fn_c_try_link "$LINENO"; then : +- archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' +- ++ lt_cv_irix_exported_symbol=yes ++else ++ lt_cv_irix_exported_symbol=no + fi + rm -f core conftest.err conftest.$ac_objext \ + conftest$ac_exeext conftest.$ac_ext +- LDFLAGS="$save_LDFLAGS" ++ LDFLAGS="$save_LDFLAGS" ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 ++$as_echo "$lt_cv_irix_exported_symbol" >&6; } ++ if test "$lt_cv_irix_exported_symbol" = yes; then ++ archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' ++ fi + else + archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' + archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' +@@ -8865,7 +9461,7 @@ rm -f core conftest.err conftest.$ac_objext \ + osf4* | osf5*) # as osf3* with the addition of -msym flag + if test "$GCC" = yes; then + allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' +- archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ++ archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' + hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' + else + allow_undefined_flag=' -expect_unresolved \*' +@@ -8884,9 +9480,9 @@ rm -f core conftest.err conftest.$ac_objext \ + no_undefined_flag=' -z defs' + if test "$GCC" = yes; then + wlarc='${wl}' +- archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ++ archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' + archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ +- $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ++ $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' + else + case `$CC -V 2>&1` in + *"Compilers 5.0"*) +@@ -9462,8 +10058,9 @@ cygwin* | mingw* | pw32* | cegcc*) + need_version=no + need_lib_prefix=no + +- case $GCC,$host_os in +- yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) ++ case $GCC,$cc_basename in ++ yes,*) ++ # gcc + library_names_spec='$libname.dll.a' + # DLL is installed to $(libdir)/../bin by postinstall_cmds + postinstall_cmds='base_file=`basename \${file}`~ +@@ -9496,13 +10093,71 @@ cygwin* | mingw* | pw32* | cegcc*) + library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' + ;; + esac ++ dynamic_linker='Win32 ld.exe' ++ ;; ++ ++ *,cl*) ++ # Native MSVC ++ libname_spec='$name' ++ soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ++ library_names_spec='${libname}.dll.lib' ++ ++ case $build_os in ++ mingw*) ++ sys_lib_search_path_spec= ++ lt_save_ifs=$IFS ++ IFS=';' ++ for lt_path in $LIB ++ do ++ IFS=$lt_save_ifs ++ # Let DOS variable expansion print the short 8.3 style file name. ++ lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` ++ sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" ++ done ++ IFS=$lt_save_ifs ++ # Convert to MSYS style. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` ++ ;; ++ cygwin*) ++ # Convert to unix form, then to dos form, then back to unix form ++ # but this time dos style (no spaces!) so that the unix form looks ++ # like /cygdrive/c/PROGRA~1:/cygdr... ++ sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` ++ sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` ++ sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ ;; ++ *) ++ sys_lib_search_path_spec="$LIB" ++ if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then ++ # It is most probably a Windows format PATH. ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` ++ else ++ sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ++ fi ++ # FIXME: find the short name or the path components, as spaces are ++ # common. (e.g. "Program Files" -> "PROGRA~1") ++ ;; ++ esac ++ ++ # DLL is installed to $(libdir)/../bin by postinstall_cmds ++ postinstall_cmds='base_file=`basename \${file}`~ ++ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ ++ dldir=$destdir/`dirname \$dlpath`~ ++ test -d \$dldir || mkdir -p \$dldir~ ++ $install_prog $dir/$dlname \$dldir/$dlname' ++ postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ ++ dlpath=$dir/\$dldll~ ++ $RM \$dlpath' ++ shlibpath_overrides_runpath=yes ++ dynamic_linker='Win32 link.exe' + ;; + + *) ++ # Assume MSVC wrapper + library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ++ dynamic_linker='Win32 ld.exe' + ;; + esac +- dynamic_linker='Win32 ld.exe' + # FIXME: first we should search . and the directory the executable is in + shlibpath_var=PATH + ;; +@@ -9594,7 +10249,7 @@ haiku*) + soname_spec='${libname}${release}${shared_ext}$major' + shlibpath_var=LIBRARY_PATH + shlibpath_overrides_runpath=yes +- sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/beos/system/lib' ++ sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' + hardcode_into_libs=yes + ;; + +@@ -10452,10 +11107,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -10558,10 +11213,10 @@ else + /* When -fvisbility=hidden is used, assume the code has been annotated + correspondingly for the symbols needed. */ + #if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) +-void fnord () __attribute__((visibility("default"))); ++int fnord () __attribute__((visibility("default"))); + #endif + +-void fnord () { int i=42; } ++int fnord () { return 42; } + int main () + { + void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); +@@ -11992,13 +12647,20 @@ exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' + lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' + lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' + lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' ++lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' ++lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' + reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' + reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' + OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' + deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' + file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' ++file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' ++want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' ++DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' ++sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' + AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' + AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' ++archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' + STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' + RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' + old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' +@@ -12013,14 +12675,17 @@ lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$de + lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' + lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' + lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' ++nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' ++lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' + objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' + MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' +-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' ++lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' + lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' + lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' + need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' ++MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' + DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' + NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' + LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' +@@ -12053,12 +12718,12 @@ hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_q + hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' + inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' + link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' +-fix_srcfile_path='`$ECHO "$fix_srcfile_path" | $SED "$delay_single_quote_subst"`' + always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' + export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' + exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' + include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' + prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' ++postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' + file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' + variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' + need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' +@@ -12113,8 +12778,13 @@ reload_flag \ + OBJDUMP \ + deplibs_check_method \ + file_magic_cmd \ ++file_magic_glob \ ++want_nocaseglob \ ++DLLTOOL \ ++sharedlib_from_linklib_cmd \ + AR \ + AR_FLAGS \ ++archiver_list_spec \ + STRIP \ + RANLIB \ + CC \ +@@ -12124,12 +12794,14 @@ lt_cv_sys_global_symbol_pipe \ + lt_cv_sys_global_symbol_to_cdecl \ + lt_cv_sys_global_symbol_to_c_name_address \ + lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ ++nm_file_list_spec \ + lt_prog_compiler_no_builtin_flag \ +-lt_prog_compiler_wl \ + lt_prog_compiler_pic \ ++lt_prog_compiler_wl \ + lt_prog_compiler_static \ + lt_cv_prog_compiler_c_o \ + need_locks \ ++MANIFEST_TOOL \ + DSYMUTIL \ + NMEDIT \ + LIPO \ +@@ -12145,7 +12817,6 @@ no_undefined_flag \ + hardcode_libdir_flag_spec \ + hardcode_libdir_flag_spec_ld \ + hardcode_libdir_separator \ +-fix_srcfile_path \ + exclude_expsyms \ + include_expsyms \ + file_list_spec \ +@@ -12181,6 +12852,7 @@ module_cmds \ + module_expsym_cmds \ + export_symbols_cmds \ + prelink_cmds \ ++postlink_cmds \ + postinstall_cmds \ + postuninstall_cmds \ + finish_cmds \ +@@ -12770,7 +13442,8 @@ $as_echo X"$file" | + # NOTE: Changes made to this file will be lost: look at ltmain.sh. + # + # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, +-# 2006, 2007, 2008, 2009 Free Software Foundation, Inc. ++# 2006, 2007, 2008, 2009, 2010 Free Software Foundation, ++# Inc. + # Written by Gordon Matzigkeit, 1996 + # + # This file is part of GNU Libtool. +@@ -12873,19 +13546,42 @@ SP2NL=$lt_lt_SP2NL + # turn newlines into spaces. + NL2SP=$lt_lt_NL2SP + ++# convert \$build file names to \$host format. ++to_host_file_cmd=$lt_cv_to_host_file_cmd ++ ++# convert \$build files to toolchain format. ++to_tool_file_cmd=$lt_cv_to_tool_file_cmd ++ + # An object symbol dumper. + OBJDUMP=$lt_OBJDUMP + + # Method to check whether dependent libraries are shared objects. + deplibs_check_method=$lt_deplibs_check_method + +-# Command to use when deplibs_check_method == "file_magic". ++# Command to use when deplibs_check_method = "file_magic". + file_magic_cmd=$lt_file_magic_cmd + ++# How to find potential files when deplibs_check_method = "file_magic". ++file_magic_glob=$lt_file_magic_glob ++ ++# Find potential files using nocaseglob when deplibs_check_method = "file_magic". ++want_nocaseglob=$lt_want_nocaseglob ++ ++# DLL creation program. ++DLLTOOL=$lt_DLLTOOL ++ ++# Command to associate shared and link libraries. ++sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd ++ + # The archiver. + AR=$lt_AR ++ ++# Flags to create an archive. + AR_FLAGS=$lt_AR_FLAGS + ++# How to feed a file listing to the archiver. ++archiver_list_spec=$lt_archiver_list_spec ++ + # A symbol stripping program. + STRIP=$lt_STRIP + +@@ -12915,6 +13611,12 @@ global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address + # Transform the output of nm in a C name address pair when lib prefix is needed. + global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix + ++# Specify filename containing input files for \$NM. ++nm_file_list_spec=$lt_nm_file_list_spec ++ ++# The root where to search for dependent libraries,and in which our libraries should be installed. ++lt_sysroot=$lt_sysroot ++ + # The name of the directory that contains temporary libtool files. + objdir=$objdir + +@@ -12924,6 +13626,9 @@ MAGIC_CMD=$MAGIC_CMD + # Must we lock files when doing compilation? + need_locks=$lt_need_locks + ++# Manifest tool. ++MANIFEST_TOOL=$lt_MANIFEST_TOOL ++ + # Tool to manipulate archived DWARF debug symbol files on Mac OS X. + DSYMUTIL=$lt_DSYMUTIL + +@@ -13038,12 +13743,12 @@ with_gcc=$GCC + # Compiler flag to turn off builtin functions. + no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag + +-# How to pass a linker flag through the compiler. +-wl=$lt_lt_prog_compiler_wl +- + # Additional compiler flags for building library objects. + pic_flag=$lt_lt_prog_compiler_pic + ++# How to pass a linker flag through the compiler. ++wl=$lt_lt_prog_compiler_wl ++ + # Compiler flag to prevent dynamic linking. + link_static_flag=$lt_lt_prog_compiler_static + +@@ -13130,9 +13835,6 @@ inherit_rpath=$inherit_rpath + # Whether libtool must link a program against all its dependency libraries. + link_all_deplibs=$link_all_deplibs + +-# Fix the shell variable \$srcfile for the compiler. +-fix_srcfile_path=$lt_fix_srcfile_path +- + # Set to "yes" if exported symbols are required. + always_export_symbols=$always_export_symbols + +@@ -13148,6 +13850,9 @@ include_expsyms=$lt_include_expsyms + # Commands necessary for linking programs (against libraries) with templates. + prelink_cmds=$lt_prelink_cmds + ++# Commands necessary for finishing linking programs. ++postlink_cmds=$lt_postlink_cmds ++ + # Specify filename containing input files. + file_list_spec=$lt_file_list_spec + +@@ -13180,210 +13885,169 @@ ltmain="$ac_aux_dir/ltmain.sh" + # if finds mixed CR/LF and LF-only lines. Since sed operates in + # text mode, it properly converts lines to CR/LF. This bash problem + # is reportedly fixed, but why not run on old versions too? +- sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- case $xsi_shell in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result="${1##*/}" +-} +- +-# func_dirname_and_basename file append nondir_replacement +-# perform func_basename and func_dirname in a single function +-# call: +-# dirname: Compute the dirname of FILE. If nonempty, +-# add APPEND to the result, otherwise set result +-# to NONDIR_REPLACEMENT. +-# value returned in "$func_dirname_result" +-# basename: Compute filename of FILE. +-# value retuned in "$func_basename_result" +-# Implementation must be kept synchronized with func_dirname +-# and func_basename. For efficiency, we do not delegate to +-# those functions but instead duplicate the functionality here. +-func_dirname_and_basename () +-{ +- case ${1} in +- */*) func_dirname_result="${1%/*}${2}" ;; +- * ) func_dirname_result="${3}" ;; +- esac +- func_basename_result="${1##*/}" +-} +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-func_stripname () +-{ +- # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are +- # positional parameters, so assign one to ordinary parameter first. +- func_stripname_result=${3} +- func_stripname_result=${func_stripname_result#"${1}"} +- func_stripname_result=${func_stripname_result%"${2}"} +-} +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=${1%%=*} +- func_opt_split_arg=${1#*=} +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- case ${1} in +- *.lo) func_lo2o_result=${1%.lo}.${objext} ;; +- *) func_lo2o_result=${1} ;; +- esac +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=${1%.*}.lo +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=$(( $* )) +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=${#1} +-} +- +-_LT_EOF +- ;; +- *) # Bourne compatible functions. +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_dirname file append nondir_replacement +-# Compute the dirname of FILE. If nonempty, add APPEND to the result, +-# otherwise set result to NONDIR_REPLACEMENT. +-func_dirname () +-{ +- # Extract subdirectory from the argument. +- func_dirname_result=`$ECHO "${1}" | $SED "$dirname"` +- if test "X$func_dirname_result" = "X${1}"; then +- func_dirname_result="${3}" +- else +- func_dirname_result="$func_dirname_result${2}" +- fi +-} +- +-# func_basename file +-func_basename () +-{ +- func_basename_result=`$ECHO "${1}" | $SED "$basename"` +-} +- +- +-# func_stripname prefix suffix name +-# strip PREFIX and SUFFIX off of NAME. +-# PREFIX and SUFFIX must not contain globbing or regex special +-# characters, hashes, percent signs, but SUFFIX may contain a leading +-# dot (in which case that matches only a dot). +-# func_strip_suffix prefix name +-func_stripname () +-{ +- case ${2} in +- .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; +- *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; +- esac +-} +- +-# sed scripts: +-my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' +-my_sed_long_arg='1s/^-[^=]*=//' +- +-# func_opt_split +-func_opt_split () +-{ +- func_opt_split_opt=`$ECHO "${1}" | $SED "$my_sed_long_opt"` +- func_opt_split_arg=`$ECHO "${1}" | $SED "$my_sed_long_arg"` +-} +- +-# func_lo2o object +-func_lo2o () +-{ +- func_lo2o_result=`$ECHO "${1}" | $SED "$lo2o"` +-} +- +-# func_xform libobj-or-source +-func_xform () +-{ +- func_xform_result=`$ECHO "${1}" | $SED 's/\.[^.]*$/.lo/'` +-} +- +-# func_arith arithmetic-term... +-func_arith () +-{ +- func_arith_result=`expr "$@"` +-} +- +-# func_len string +-# STRING may not start with a hyphen. +-func_len () +-{ +- func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` +-} +- +-_LT_EOF +-esac +- +-case $lt_shell_append in +- yes) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$1+=\$2" +-} +-_LT_EOF +- ;; +- *) +- cat << \_LT_EOF >> "$cfgfile" +- +-# func_append var value +-# Append VALUE to the end of shell variable VAR. +-func_append () +-{ +- eval "$1=\$$1\$2" +-} +- +-_LT_EOF +- ;; +- esac +- +- +- sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ +- || (rm -f "$cfgfile"; exit 1) +- +- mv -f "$cfgfile" "$ofile" || ++ sed '$q' "$ltmain" >> "$cfgfile" \ ++ || (rm -f "$cfgfile"; exit 1) ++ ++ if test x"$xsi_shell" = xyes; then ++ sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ ++func_dirname ()\ ++{\ ++\ case ${1} in\ ++\ */*) func_dirname_result="${1%/*}${2}" ;;\ ++\ * ) func_dirname_result="${3}" ;;\ ++\ esac\ ++} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_basename ()$/,/^} # func_basename /c\ ++func_basename ()\ ++{\ ++\ func_basename_result="${1##*/}"\ ++} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ ++func_dirname_and_basename ()\ ++{\ ++\ case ${1} in\ ++\ */*) func_dirname_result="${1%/*}${2}" ;;\ ++\ * ) func_dirname_result="${3}" ;;\ ++\ esac\ ++\ func_basename_result="${1##*/}"\ ++} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ ++func_stripname ()\ ++{\ ++\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ ++\ # positional parameters, so assign one to ordinary parameter first.\ ++\ func_stripname_result=${3}\ ++\ func_stripname_result=${func_stripname_result#"${1}"}\ ++\ func_stripname_result=${func_stripname_result%"${2}"}\ ++} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ ++func_split_long_opt ()\ ++{\ ++\ func_split_long_opt_name=${1%%=*}\ ++\ func_split_long_opt_arg=${1#*=}\ ++} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ ++func_split_short_opt ()\ ++{\ ++\ func_split_short_opt_arg=${1#??}\ ++\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ ++} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ ++func_lo2o ()\ ++{\ ++\ case ${1} in\ ++\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ ++\ *) func_lo2o_result=${1} ;;\ ++\ esac\ ++} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_xform ()$/,/^} # func_xform /c\ ++func_xform ()\ ++{\ ++ func_xform_result=${1%.*}.lo\ ++} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_arith ()$/,/^} # func_arith /c\ ++func_arith ()\ ++{\ ++ func_arith_result=$(( $* ))\ ++} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_len ()$/,/^} # func_len /c\ ++func_len ()\ ++{\ ++ func_len_result=${#1}\ ++} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++fi ++ ++if test x"$lt_shell_append" = xyes; then ++ sed -e '/^func_append ()$/,/^} # func_append /c\ ++func_append ()\ ++{\ ++ eval "${1}+=\\${2}"\ ++} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ ++func_append_quoted ()\ ++{\ ++\ func_quote_for_eval "${2}"\ ++\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ ++} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++test 0 -eq $? || _lt_function_replace_fail=: ++ ++ ++ # Save a `func_append' function call where possible by direct use of '+=' ++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++else ++ # Save a `func_append' function call even when '+=' is not available ++ sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ ++ && mv -f "$cfgfile.tmp" "$cfgfile" \ ++ || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") ++ test 0 -eq $? || _lt_function_replace_fail=: ++fi ++ ++if test x"$_lt_function_replace_fail" = x":"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 ++$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} ++fi ++ ++ ++ mv -f "$cfgfile" "$ofile" || + (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") + chmod +x "$ofile" + +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0008-Add-the-armv5e-architecture-to-binutils.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0008-Add-the-armv5e-architecture-to-binutils.patch new file mode 100644 index 000000000..449225a36 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0008-Add-the-armv5e-architecture-to-binutils.patch @@ -0,0 +1,35 @@ +From 9c313e8a15a7e7c5c0f2906e3218ed211563ac2a Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 2 Mar 2015 01:37:10 +0000 +Subject: [PATCH 08/15] Add the armv5e architecture to binutils + +Binutils has a comment that indicates it is supposed to match gcc for +all of the support "-march=" settings, but it was lacking the armv5e setting. +This was a simple way to add it, as thumb instructions shouldn't be generated +by the compiler anyway. + +Upstream-Status: Denied +Upstream maintainer indicated that we should not be using armv5e, even +though it is a legal archicture defined by our gcc. + +Signed-off-by: Mark Hatle +Signed-off-by: Khem Raj +--- + gas/config/tc-arm.c | 1 + + 1 file changed, 1 insertion(+) + +diff --git a/gas/config/tc-arm.c b/gas/config/tc-arm.c +index 60bda51070..eb6d0afd6e 100644 +--- a/gas/config/tc-arm.c ++++ b/gas/config/tc-arm.c +@@ -25633,6 +25633,7 @@ static const struct arm_arch_option_table arm_archs[] = + ARM_ARCH_OPT ("armv4t", ARM_ARCH_V4T, FPU_ARCH_FPA), + ARM_ARCH_OPT ("armv4txm", ARM_ARCH_V4TxM, FPU_ARCH_FPA), + ARM_ARCH_OPT ("armv5", ARM_ARCH_V5, FPU_ARCH_VFP), ++ ARM_ARCH_OPT ("armv5e", ARM_ARCH_V5TE, FPU_ARCH_VFP), + ARM_ARCH_OPT ("armv5t", ARM_ARCH_V5T, FPU_ARCH_VFP), + ARM_ARCH_OPT ("armv5txm", ARM_ARCH_V5TxM, FPU_ARCH_VFP), + ARM_ARCH_OPT ("armv5te", ARM_ARCH_V5TE, FPU_ARCH_VFP), +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0008-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0008-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch deleted file mode 100644 index af524ced2..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0008-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch +++ /dev/null @@ -1,35 +0,0 @@ -From b2d277937d11f1ce1d581f48ff74a25822a052da Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Mon, 2 Mar 2015 01:39:01 +0000 -Subject: [PATCH 08/13] don't let the distro compiler point to the wrong - installation location - -Thanks to RP for helping find the source code causing the issue. - -2010/08/13 -Nitin A Kamble - -Upstream-Status: Inappropriate [embedded specific] - -Signed-off-by: Khem Raj ---- - libiberty/Makefile.in | 3 ++- - 1 file changed, 2 insertions(+), 1 deletion(-) - -diff --git a/libiberty/Makefile.in b/libiberty/Makefile.in -index c7a4568..f766b72 100644 ---- a/libiberty/Makefile.in -+++ b/libiberty/Makefile.in -@@ -364,7 +364,8 @@ install-strip: install - # multilib-specific flags, it's overridden by FLAGS_TO_PASS from the - # default multilib, so we have to take CFLAGS into account as well, - # since it will be passed the multilib flags. --MULTIOSDIR = `$(CC) $(CFLAGS) -print-multi-os-directory` -+#MULTIOSDIR = `$(CC) $(CFLAGS) -print-multi-os-directory` -+MULTIOSDIR = "" - install_to_libdir: all - if test -n "${target_header_dir}"; then \ - ${mkinstalldirs} $(DESTDIR)$(libdir)/$(MULTIOSDIR); \ --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0009-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0009-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch new file mode 100644 index 000000000..1c4059392 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0009-don-t-let-the-distro-compiler-point-to-the-wrong-ins.patch @@ -0,0 +1,35 @@ +From 2be9b44a4a308e3ea42a027c4c3211170f10c9c0 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 2 Mar 2015 01:39:01 +0000 +Subject: [PATCH 09/15] don't let the distro compiler point to the wrong + installation location + +Thanks to RP for helping find the source code causing the issue. + +2010/08/13 +Nitin A Kamble + +Upstream-Status: Inappropriate [embedded specific] + +Signed-off-by: Khem Raj +--- + libiberty/Makefile.in | 3 ++- + 1 file changed, 2 insertions(+), 1 deletion(-) + +diff --git a/libiberty/Makefile.in b/libiberty/Makefile.in +index 0ff9e45e45..42c32642b2 100644 +--- a/libiberty/Makefile.in ++++ b/libiberty/Makefile.in +@@ -366,7 +366,8 @@ install-strip: install + # multilib-specific flags, it's overridden by FLAGS_TO_PASS from the + # default multilib, so we have to take CFLAGS into account as well, + # since it will be passed the multilib flags. +-MULTIOSDIR = `$(CC) $(CFLAGS) -print-multi-os-directory` ++#MULTIOSDIR = `$(CC) $(CFLAGS) -print-multi-os-directory` ++MULTIOSDIR = "" + install_to_libdir: all + if test -n "${target_header_dir}"; then \ + ${mkinstalldirs} $(DESTDIR)$(libdir)/$(MULTIOSDIR); \ +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0009-warn-for-uses-of-system-directories-when-cross-linki.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0009-warn-for-uses-of-system-directories-when-cross-linki.patch deleted file mode 100644 index 3c83e69e4..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0009-warn-for-uses-of-system-directories-when-cross-linki.patch +++ /dev/null @@ -1,273 +0,0 @@ -From 7ab8e318659eb5d9adc758c78d084a95560b93fd Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 15 Jan 2016 06:31:09 +0000 -Subject: [PATCH 09/13] warn for uses of system directories when cross linking - -2008-07-02 Joseph Myers - - ld/ - * ld.h (args_type): Add error_poison_system_directories. - * ld.texinfo (--error-poison-system-directories): Document. - * ldfile.c (ldfile_add_library_path): Check - command_line.error_poison_system_directories. - * ldmain.c (main): Initialize - command_line.error_poison_system_directories. - * lexsup.c (enum option_values): Add - OPTION_ERROR_POISON_SYSTEM_DIRECTORIES. - (ld_options): Add --error-poison-system-directories. - (parse_args): Handle new option. - -2007-06-13 Joseph Myers - - ld/ - * config.in: Regenerate. - * ld.h (args_type): Add poison_system_directories. - * ld.texinfo (--no-poison-system-directories): Document. - * ldfile.c (ldfile_add_library_path): Check - command_line.poison_system_directories. - * ldmain.c (main): Initialize - command_line.poison_system_directories. - * lexsup.c (enum option_values): Add - OPTION_NO_POISON_SYSTEM_DIRECTORIES. - (ld_options): Add --no-poison-system-directories. - (parse_args): Handle new option. - -2007-04-20 Joseph Myers - - Merge from Sourcery G++ binutils 2.17: - - 2007-03-20 Joseph Myers - Based on patch by Mark Hatle . - ld/ - * configure.in (--enable-poison-system-directories): New option. - * configure, config.in: Regenerate. - * ldfile.c (ldfile_add_library_path): If - ENABLE_POISON_SYSTEM_DIRECTORIES defined, warn for use of /lib, - /usr/lib, /usr/local/lib or /usr/X11R6/lib. - -Signed-off-by: Mark Hatle -Signed-off-by: Scott Garman -Signed-off-by: Khem Raj ---- -Upstream-Status: Pending - - ld/config.in | 3 +++ - ld/configure | 16 ++++++++++++++++ - ld/configure.ac | 10 ++++++++++ - ld/ld.h | 8 ++++++++ - ld/ld.texinfo | 12 ++++++++++++ - ld/ldfile.c | 17 +++++++++++++++++ - ld/ldlex.h | 2 ++ - ld/ldmain.c | 2 ++ - ld/lexsup.c | 16 ++++++++++++++++ - 9 files changed, 86 insertions(+) - -diff --git a/ld/config.in b/ld/config.in -index 2c6d698..d3cb7e8 100644 ---- a/ld/config.in -+++ b/ld/config.in -@@ -17,6 +17,9 @@ - language is requested. */ - #undef ENABLE_NLS - -+/* Define to warn for use of native system library directories */ -+#undef ENABLE_POISON_SYSTEM_DIRECTORIES -+ - /* Additional extension a shared object might have. */ - #undef EXTRA_SHLIB_EXTENSION - -diff --git a/ld/configure b/ld/configure -index 3df1149..0601c98 100755 ---- a/ld/configure -+++ b/ld/configure -@@ -792,6 +792,7 @@ with_lib_path - enable_targets - enable_64_bit_bfd - with_sysroot -+enable_poison_system_directories - enable_gold - enable_got - enable_compressed_debug_sections -@@ -1449,6 +1450,8 @@ Optional Features: - --disable-largefile omit support for large files - --enable-targets alternative target configurations - --enable-64-bit-bfd 64-bit support (on hosts with narrower word sizes) -+ --enable-poison-system-directories -+ warn for use of native system library directories - --enable-gold[=ARG] build gold [ARG={default,yes,no}] - --enable-got= GOT handling scheme (target, single, negative, - multigot) -@@ -16313,6 +16316,19 @@ fi - - - -+# Check whether --enable-poison-system-directories was given. -+if test "${enable_poison_system_directories+set}" = set; then : -+ enableval=$enable_poison_system_directories; -+else -+ enable_poison_system_directories=no -+fi -+ -+if test "x${enable_poison_system_directories}" = "xyes"; then -+ -+$as_echo "#define ENABLE_POISON_SYSTEM_DIRECTORIES 1" >>confdefs.h -+ -+fi -+ - # Check whether --enable-gold was given. - if test "${enable_gold+set}" = set; then : - enableval=$enable_gold; case "${enableval}" in -diff --git a/ld/configure.ac b/ld/configure.ac -index d17281f..9fb3db9 100644 ---- a/ld/configure.ac -+++ b/ld/configure.ac -@@ -95,6 +95,16 @@ AC_SUBST(use_sysroot) - AC_SUBST(TARGET_SYSTEM_ROOT) - AC_SUBST(TARGET_SYSTEM_ROOT_DEFINE) - -+AC_ARG_ENABLE([poison-system-directories], -+ AS_HELP_STRING([--enable-poison-system-directories], -+ [warn for use of native system library directories]),, -+ [enable_poison_system_directories=no]) -+if test "x${enable_poison_system_directories}" = "xyes"; then -+ AC_DEFINE([ENABLE_POISON_SYSTEM_DIRECTORIES], -+ [1], -+ [Define to warn for use of native system library directories]) -+fi -+ - dnl Use --enable-gold to decide if this linker should be the default. - dnl "install_as_default" is set to false if gold is the default linker. - dnl "installed_linker" is the installed BFD linker name. -diff --git a/ld/ld.h b/ld/ld.h -index 85a48ad..a8fdddf 100644 ---- a/ld/ld.h -+++ b/ld/ld.h -@@ -169,6 +169,14 @@ typedef struct - /* If set, display the target memory usage (per memory region). */ - bfd_boolean print_memory_usage; - -+ /* If TRUE (the default) warn for uses of system directories when -+ cross linking. */ -+ bfd_boolean poison_system_directories; -+ -+ /* If TRUE (default FALSE) give an error for uses of system -+ directories when cross linking instead of a warning. */ -+ bfd_boolean error_poison_system_directories; -+ - /* Big or little endian as set on command line. */ - enum endian_enum endian; - -diff --git a/ld/ld.texinfo b/ld/ld.texinfo -index bc16764..838c75d 100644 ---- a/ld/ld.texinfo -+++ b/ld/ld.texinfo -@@ -2357,6 +2357,18 @@ string identifying the original linked file does not change. - - Passing @code{none} for @var{style} disables the setting from any - @code{--build-id} options earlier on the command line. -+ -+@kindex --no-poison-system-directories -+@item --no-poison-system-directories -+Do not warn for @option{-L} options using system directories such as -+@file{/usr/lib} when cross linking. This option is intended for use -+in chroot environments when such directories contain the correct -+libraries for the target system rather than the host. -+ -+@kindex --error-poison-system-directories -+@item --error-poison-system-directories -+Give an error instead of a warning for @option{-L} options using -+system directories when cross linking. - @end table - - @c man end -diff --git a/ld/ldfile.c b/ld/ldfile.c -index e397737..013727b 100644 ---- a/ld/ldfile.c -+++ b/ld/ldfile.c -@@ -114,6 +114,23 @@ ldfile_add_library_path (const char *name, bfd_boolean cmdline) - new_dirs->name = concat (ld_sysroot, name + 1, (const char *) NULL); - else - new_dirs->name = xstrdup (name); -+ -+#ifdef ENABLE_POISON_SYSTEM_DIRECTORIES -+ if (command_line.poison_system_directories -+ && ((!strncmp (name, "/lib", 4)) -+ || (!strncmp (name, "/usr/lib", 8)) -+ || (!strncmp (name, "/usr/local/lib", 14)) -+ || (!strncmp (name, "/usr/X11R6/lib", 14)))) -+ { -+ if (command_line.error_poison_system_directories) -+ einfo (_("%X%P: error: library search path \"%s\" is unsafe for " -+ "cross-compilation\n"), name); -+ else -+ einfo (_("%P: warning: library search path \"%s\" is unsafe for " -+ "cross-compilation\n"), name); -+ } -+#endif -+ - } - - /* Try to open a BFD for a lang_input_statement. */ -diff --git a/ld/ldlex.h b/ld/ldlex.h -index cf943e4..12cfcaa 100644 ---- a/ld/ldlex.h -+++ b/ld/ldlex.h -@@ -144,6 +144,8 @@ enum option_values - OPTION_PRINT_MEMORY_USAGE, - OPTION_REQUIRE_DEFINED_SYMBOL, - OPTION_ORPHAN_HANDLING, -+ OPTION_NO_POISON_SYSTEM_DIRECTORIES, -+ OPTION_ERROR_POISON_SYSTEM_DIRECTORIES, - }; - - /* The initial parser states. */ -diff --git a/ld/ldmain.c b/ld/ldmain.c -index e7a8dff..3ccf2af 100644 ---- a/ld/ldmain.c -+++ b/ld/ldmain.c -@@ -257,6 +257,8 @@ main (int argc, char **argv) - command_line.warn_mismatch = TRUE; - command_line.warn_search_mismatch = TRUE; - command_line.check_section_addresses = -1; -+ command_line.poison_system_directories = TRUE; -+ command_line.error_poison_system_directories = FALSE; - - /* We initialize DEMANGLING based on the environment variable - COLLECT_NO_DEMANGLE. The gcc collect2 program will demangle the -diff --git a/ld/lexsup.c b/ld/lexsup.c -index 6d28e91..5c9da57 100644 ---- a/ld/lexsup.c -+++ b/ld/lexsup.c -@@ -530,6 +530,14 @@ static const struct ld_option ld_options[] = - { {"orphan-handling", required_argument, NULL, OPTION_ORPHAN_HANDLING}, - '\0', N_("=MODE"), N_("Control how orphan sections are handled."), - TWO_DASHES }, -+ { {"no-poison-system-directories", no_argument, NULL, -+ OPTION_NO_POISON_SYSTEM_DIRECTORIES}, -+ '\0', NULL, N_("Do not warn for -L options using system directories"), -+ TWO_DASHES }, -+ { {"error-poison-system-directories", no_argument, NULL, -+ + OPTION_ERROR_POISON_SYSTEM_DIRECTORIES}, -+ '\0', NULL, N_("Give an error for -L options using system directories"), -+ TWO_DASHES }, - }; - - #define OPTION_COUNT ARRAY_SIZE (ld_options) -@@ -1550,6 +1558,14 @@ parse_args (unsigned argc, char **argv) - einfo (_("%P%F: invalid argument to option" - " \"--orphan-handling\"\n")); - break; -+ -+ case OPTION_NO_POISON_SYSTEM_DIRECTORIES: -+ command_line.poison_system_directories = FALSE; -+ break; -+ -+ case OPTION_ERROR_POISON_SYSTEM_DIRECTORIES: -+ command_line.error_poison_system_directories = TRUE; -+ break; - } - } - --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0010-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0010-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch deleted file mode 100644 index e2f6a51a3..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0010-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch +++ /dev/null @@ -1,52 +0,0 @@ -From 27fc7a4927ab92925a5be84217959d9f1c808ccc Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Mon, 2 Mar 2015 01:42:38 +0000 -Subject: [PATCH 10/13] Fix rpath in libtool when sysroot is enabled - -Enabling sysroot support in libtool exposed a bug where the final -library had an RPATH encoded into it which still pointed to the -sysroot. This works around the issue until it gets sorted out -upstream. - -Fix suggested by Richard Purdie - -Upstream-Status: Inappropriate [embedded specific] - -Signed-off-by: Scott Garman -Signed-off-by: Khem Raj ---- - ltmain.sh | 10 ++++++++-- - 1 file changed, 8 insertions(+), 2 deletions(-) - -diff --git a/ltmain.sh b/ltmain.sh -index 70e856e..11ee684 100644 ---- a/ltmain.sh -+++ b/ltmain.sh -@@ -8035,9 +8035,11 @@ EOF - test "$opt_mode" != relink && rpath="$compile_rpath$rpath" - for libdir in $rpath; do - if test -n "$hardcode_libdir_flag_spec"; then -+ func_replace_sysroot "$libdir" -+ libdir=$func_replace_sysroot_result -+ func_stripname '=' '' "$libdir" -+ libdir=$func_stripname_result - if test -n "$hardcode_libdir_separator"; then -- func_replace_sysroot "$libdir" -- libdir=$func_replace_sysroot_result - if test -z "$hardcode_libdirs"; then - hardcode_libdirs="$libdir" - else -@@ -8770,6 +8772,10 @@ EOF - hardcode_libdirs= - for libdir in $compile_rpath $finalize_rpath; do - if test -n "$hardcode_libdir_flag_spec"; then -+ func_replace_sysroot "$libdir" -+ libdir=$func_replace_sysroot_result -+ func_stripname '=' '' "$libdir" -+ libdir=$func_stripname_result - if test -n "$hardcode_libdir_separator"; then - if test -z "$hardcode_libdirs"; then - hardcode_libdirs="$libdir" --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0010-warn-for-uses-of-system-directories-when-cross-linki.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0010-warn-for-uses-of-system-directories-when-cross-linki.patch new file mode 100644 index 000000000..0774ad6f0 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0010-warn-for-uses-of-system-directories-when-cross-linki.patch @@ -0,0 +1,273 @@ +From b1ab17abe4128684f19775448545176fb2a5e27e Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 15 Jan 2016 06:31:09 +0000 +Subject: [PATCH 10/15] warn for uses of system directories when cross linking + +2008-07-02 Joseph Myers + + ld/ + * ld.h (args_type): Add error_poison_system_directories. + * ld.texinfo (--error-poison-system-directories): Document. + * ldfile.c (ldfile_add_library_path): Check + command_line.error_poison_system_directories. + * ldmain.c (main): Initialize + command_line.error_poison_system_directories. + * lexsup.c (enum option_values): Add + OPTION_ERROR_POISON_SYSTEM_DIRECTORIES. + (ld_options): Add --error-poison-system-directories. + (parse_args): Handle new option. + +2007-06-13 Joseph Myers + + ld/ + * config.in: Regenerate. + * ld.h (args_type): Add poison_system_directories. + * ld.texinfo (--no-poison-system-directories): Document. + * ldfile.c (ldfile_add_library_path): Check + command_line.poison_system_directories. + * ldmain.c (main): Initialize + command_line.poison_system_directories. + * lexsup.c (enum option_values): Add + OPTION_NO_POISON_SYSTEM_DIRECTORIES. + (ld_options): Add --no-poison-system-directories. + (parse_args): Handle new option. + +2007-04-20 Joseph Myers + + Merge from Sourcery G++ binutils 2.17: + + 2007-03-20 Joseph Myers + Based on patch by Mark Hatle . + ld/ + * configure.in (--enable-poison-system-directories): New option. + * configure, config.in: Regenerate. + * ldfile.c (ldfile_add_library_path): If + ENABLE_POISON_SYSTEM_DIRECTORIES defined, warn for use of /lib, + /usr/lib, /usr/local/lib or /usr/X11R6/lib. + +Signed-off-by: Mark Hatle +Signed-off-by: Scott Garman +Signed-off-by: Khem Raj +--- +Upstream-Status: Pending + + ld/config.in | 3 +++ + ld/configure | 16 ++++++++++++++++ + ld/configure.ac | 10 ++++++++++ + ld/ld.h | 8 ++++++++ + ld/ld.texinfo | 12 ++++++++++++ + ld/ldfile.c | 17 +++++++++++++++++ + ld/ldlex.h | 2 ++ + ld/ldmain.c | 2 ++ + ld/lexsup.c | 16 ++++++++++++++++ + 9 files changed, 86 insertions(+) + +diff --git a/ld/config.in b/ld/config.in +index 2c6d698b6c..d3cb7e882d 100644 +--- a/ld/config.in ++++ b/ld/config.in +@@ -17,6 +17,9 @@ + language is requested. */ + #undef ENABLE_NLS + ++/* Define to warn for use of native system library directories */ ++#undef ENABLE_POISON_SYSTEM_DIRECTORIES ++ + /* Additional extension a shared object might have. */ + #undef EXTRA_SHLIB_EXTENSION + +diff --git a/ld/configure b/ld/configure +index 4277b74bad..63109644b6 100755 +--- a/ld/configure ++++ b/ld/configure +@@ -793,6 +793,7 @@ with_lib_path + enable_targets + enable_64_bit_bfd + with_sysroot ++enable_poison_system_directories + enable_gold + enable_got + enable_compressed_debug_sections +@@ -1450,6 +1451,8 @@ Optional Features: + --disable-largefile omit support for large files + --enable-targets alternative target configurations + --enable-64-bit-bfd 64-bit support (on hosts with narrower word sizes) ++ --enable-poison-system-directories ++ warn for use of native system library directories + --enable-gold[=ARG] build gold [ARG={default,yes,no}] + --enable-got= GOT handling scheme (target, single, negative, + multigot) +@@ -16314,6 +16317,19 @@ fi + + + ++# Check whether --enable-poison-system-directories was given. ++if test "${enable_poison_system_directories+set}" = set; then : ++ enableval=$enable_poison_system_directories; ++else ++ enable_poison_system_directories=no ++fi ++ ++if test "x${enable_poison_system_directories}" = "xyes"; then ++ ++$as_echo "#define ENABLE_POISON_SYSTEM_DIRECTORIES 1" >>confdefs.h ++ ++fi ++ + # Check whether --enable-gold was given. + if test "${enable_gold+set}" = set; then : + enableval=$enable_gold; case "${enableval}" in +diff --git a/ld/configure.ac b/ld/configure.ac +index 36a9f5083a..47f1d33fa5 100644 +--- a/ld/configure.ac ++++ b/ld/configure.ac +@@ -95,6 +95,16 @@ AC_SUBST(use_sysroot) + AC_SUBST(TARGET_SYSTEM_ROOT) + AC_SUBST(TARGET_SYSTEM_ROOT_DEFINE) + ++AC_ARG_ENABLE([poison-system-directories], ++ AS_HELP_STRING([--enable-poison-system-directories], ++ [warn for use of native system library directories]),, ++ [enable_poison_system_directories=no]) ++if test "x${enable_poison_system_directories}" = "xyes"; then ++ AC_DEFINE([ENABLE_POISON_SYSTEM_DIRECTORIES], ++ [1], ++ [Define to warn for use of native system library directories]) ++fi ++ + dnl Use --enable-gold to decide if this linker should be the default. + dnl "install_as_default" is set to false if gold is the default linker. + dnl "installed_linker" is the installed BFD linker name. +diff --git a/ld/ld.h b/ld/ld.h +index 104bb8e237..74c914bdd5 100644 +--- a/ld/ld.h ++++ b/ld/ld.h +@@ -172,6 +172,14 @@ typedef struct + /* If set, display the target memory usage (per memory region). */ + bfd_boolean print_memory_usage; + ++ /* If TRUE (the default) warn for uses of system directories when ++ cross linking. */ ++ bfd_boolean poison_system_directories; ++ ++ /* If TRUE (default FALSE) give an error for uses of system ++ directories when cross linking instead of a warning. */ ++ bfd_boolean error_poison_system_directories; ++ + /* Big or little endian as set on command line. */ + enum endian_enum endian; + +diff --git a/ld/ld.texinfo b/ld/ld.texinfo +index d393acdd94..ba995b1e3a 100644 +--- a/ld/ld.texinfo ++++ b/ld/ld.texinfo +@@ -2403,6 +2403,18 @@ string identifying the original linked file does not change. + + Passing @code{none} for @var{style} disables the setting from any + @code{--build-id} options earlier on the command line. ++ ++@kindex --no-poison-system-directories ++@item --no-poison-system-directories ++Do not warn for @option{-L} options using system directories such as ++@file{/usr/lib} when cross linking. This option is intended for use ++in chroot environments when such directories contain the correct ++libraries for the target system rather than the host. ++ ++@kindex --error-poison-system-directories ++@item --error-poison-system-directories ++Give an error instead of a warning for @option{-L} options using ++system directories when cross linking. + @end table + + @c man end +diff --git a/ld/ldfile.c b/ld/ldfile.c +index 0943bb2dfa..95874c75de 100644 +--- a/ld/ldfile.c ++++ b/ld/ldfile.c +@@ -114,6 +114,23 @@ ldfile_add_library_path (const char *name, bfd_boolean cmdline) + new_dirs->name = concat (ld_sysroot, name + 1, (const char *) NULL); + else + new_dirs->name = xstrdup (name); ++ ++#ifdef ENABLE_POISON_SYSTEM_DIRECTORIES ++ if (command_line.poison_system_directories ++ && ((!strncmp (name, "/lib", 4)) ++ || (!strncmp (name, "/usr/lib", 8)) ++ || (!strncmp (name, "/usr/local/lib", 14)) ++ || (!strncmp (name, "/usr/X11R6/lib", 14)))) ++ { ++ if (command_line.error_poison_system_directories) ++ einfo (_("%X%P: error: library search path \"%s\" is unsafe for " ++ "cross-compilation\n"), name); ++ else ++ einfo (_("%P: warning: library search path \"%s\" is unsafe for " ++ "cross-compilation\n"), name); ++ } ++#endif ++ + } + + /* Try to open a BFD for a lang_input_statement. */ +diff --git a/ld/ldlex.h b/ld/ldlex.h +index 3ecac2bc86..34117f43a5 100644 +--- a/ld/ldlex.h ++++ b/ld/ldlex.h +@@ -146,6 +146,8 @@ enum option_values + OPTION_PRINT_MEMORY_USAGE, + OPTION_REQUIRE_DEFINED_SYMBOL, + OPTION_ORPHAN_HANDLING, ++ OPTION_NO_POISON_SYSTEM_DIRECTORIES, ++ OPTION_ERROR_POISON_SYSTEM_DIRECTORIES, + }; + + /* The initial parser states. */ +diff --git a/ld/ldmain.c b/ld/ldmain.c +index 1e48b1a2db..21f27bacf1 100644 +--- a/ld/ldmain.c ++++ b/ld/ldmain.c +@@ -270,6 +270,8 @@ main (int argc, char **argv) + command_line.warn_mismatch = TRUE; + command_line.warn_search_mismatch = TRUE; + command_line.check_section_addresses = -1; ++ command_line.poison_system_directories = TRUE; ++ command_line.error_poison_system_directories = FALSE; + + /* We initialize DEMANGLING based on the environment variable + COLLECT_NO_DEMANGLE. The gcc collect2 program will demangle the +diff --git a/ld/lexsup.c b/ld/lexsup.c +index 0b7d4976ac..dedc07a143 100644 +--- a/ld/lexsup.c ++++ b/ld/lexsup.c +@@ -535,6 +535,14 @@ static const struct ld_option ld_options[] = + { {"orphan-handling", required_argument, NULL, OPTION_ORPHAN_HANDLING}, + '\0', N_("=MODE"), N_("Control how orphan sections are handled."), + TWO_DASHES }, ++ { {"no-poison-system-directories", no_argument, NULL, ++ OPTION_NO_POISON_SYSTEM_DIRECTORIES}, ++ '\0', NULL, N_("Do not warn for -L options using system directories"), ++ TWO_DASHES }, ++ { {"error-poison-system-directories", no_argument, NULL, ++ + OPTION_ERROR_POISON_SYSTEM_DIRECTORIES}, ++ '\0', NULL, N_("Give an error for -L options using system directories"), ++ TWO_DASHES }, + }; + + #define OPTION_COUNT ARRAY_SIZE (ld_options) +@@ -1562,6 +1570,14 @@ parse_args (unsigned argc, char **argv) + einfo (_("%P%F: invalid argument to option" + " \"--orphan-handling\"\n")); + break; ++ ++ case OPTION_NO_POISON_SYSTEM_DIRECTORIES: ++ command_line.poison_system_directories = FALSE; ++ break; ++ ++ case OPTION_ERROR_POISON_SYSTEM_DIRECTORIES: ++ command_line.error_poison_system_directories = TRUE; ++ break; + } + } + +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0011-Change-default-emulation-for-mips64-linux.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0011-Change-default-emulation-for-mips64-linux.patch deleted file mode 100644 index 3fdb5eb92..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0011-Change-default-emulation-for-mips64-linux.patch +++ /dev/null @@ -1,59 +0,0 @@ -From 6b018e8129298dbd5b714e459ef0b3aef5bbce13 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Mon, 2 Mar 2015 01:44:14 +0000 -Subject: [PATCH 11/13] Change default emulation for mips64*-*-linux - -we change the default emulations to be N64 instead of N32 - -Upstream-Status: Inappropriate [ OE configuration Specific] - -Signed-off-by: Khem Raj ---- - bfd/config.bfd | 8 ++++---- - ld/configure.tgt | 8 ++++---- - 2 files changed, 8 insertions(+), 8 deletions(-) - -diff --git a/bfd/config.bfd b/bfd/config.bfd -index b998830..ab17e72 100644 ---- a/bfd/config.bfd -+++ b/bfd/config.bfd -@@ -1101,12 +1101,12 @@ case "${targ}" in - targ_selvecs="mips_elf32_le_vec mips_elf64_be_vec mips_elf64_le_vec mips_ecoff_be_vec mips_ecoff_le_vec" - ;; - mips64*el-*-linux*) -- targ_defvec=mips_elf32_ntrad_le_vec -- targ_selvecs="mips_elf32_ntrad_be_vec mips_elf32_trad_le_vec mips_elf32_trad_be_vec mips_elf64_trad_le_vec mips_elf64_trad_be_vec" -+ targ_defvec=mips_elf64_trad_le_vec -+ targ_selvecs="mips_elf32_ntrad_be_vec mips_elf32_ntrad_le_vec mips_elf32_trad_le_vec mips_elf32_trad_be_vec mips_elf64_trad_be_vec" - ;; - mips64*-*-linux*) -- targ_defvec=mips_elf32_ntrad_be_vec -- targ_selvecs="mips_elf32_ntrad_le_vec mips_elf32_trad_be_vec mips_elf32_trad_le_vec mips_elf64_trad_be_vec mips_elf64_trad_le_vec" -+ targ_defvec=mips_elf64_trad_be_vec -+ targ_selvecs="mips_elf32_ntrad_be_vec mips_elf32_ntrad_be_vec mips_elf32_trad_be_vec mips_elf32_trad_le_vec mips_elf64_trad_le_vec" - ;; - mips*el-*-linux*) - targ_defvec=mips_elf32_trad_le_vec -diff --git a/ld/configure.tgt b/ld/configure.tgt -index a3db909..212327c 100644 ---- a/ld/configure.tgt -+++ b/ld/configure.tgt -@@ -513,11 +513,11 @@ mips*el-*-vxworks*) targ_emul=elf32elmipvxworks - mips*-*-vxworks*) targ_emul=elf32ebmipvxworks - targ_extra_emuls="elf32elmipvxworks" ;; - mips*-*-windiss) targ_emul=elf32mipswindiss ;; --mips64*el-*-linux-*) targ_emul=elf32ltsmipn32 -- targ_extra_emuls="elf32btsmipn32 elf32ltsmip elf32btsmip elf64ltsmip elf64btsmip" -+mips64*el-*-linux-*) targ_emul=elf64ltsmip -+ targ_extra_emuls="elf32btsmipn32 elf32ltsmipn32 elf32ltsmip elf32btsmip elf64btsmip" - targ_extra_libpath=$targ_extra_emuls ;; --mips64*-*-linux-*) targ_emul=elf32btsmipn32 -- targ_extra_emuls="elf32ltsmipn32 elf32btsmip elf32ltsmip elf64btsmip elf64ltsmip" -+mips64*-*-linux-*) targ_emul=elf64btsmip -+ targ_extra_emuls="elf32btsmipn32 elf32ltsmipn32 elf32btsmip elf32ltsmip elf64ltsmip" - targ_extra_libpath=$targ_extra_emuls ;; - mips*el-*-linux-*) targ_emul=elf32ltsmip - targ_extra_emuls="elf32btsmip elf32ltsmipn32 elf64ltsmip elf32btsmipn32 elf64btsmip" --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0011-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0011-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch new file mode 100644 index 000000000..949ef51bd --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0011-Fix-rpath-in-libtool-when-sysroot-is-enabled.patch @@ -0,0 +1,52 @@ +From 4fe13a36997253a5c91bcb086aeb392ab2095f67 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 2 Mar 2015 01:42:38 +0000 +Subject: [PATCH 11/15] Fix rpath in libtool when sysroot is enabled + +Enabling sysroot support in libtool exposed a bug where the final +library had an RPATH encoded into it which still pointed to the +sysroot. This works around the issue until it gets sorted out +upstream. + +Fix suggested by Richard Purdie + +Upstream-Status: Inappropriate [embedded specific] + +Signed-off-by: Scott Garman +Signed-off-by: Khem Raj +--- + ltmain.sh | 10 ++++++++-- + 1 file changed, 8 insertions(+), 2 deletions(-) + +diff --git a/ltmain.sh b/ltmain.sh +index 70e856e065..11ee684ccc 100644 +--- a/ltmain.sh ++++ b/ltmain.sh +@@ -8035,9 +8035,11 @@ EOF + test "$opt_mode" != relink && rpath="$compile_rpath$rpath" + for libdir in $rpath; do + if test -n "$hardcode_libdir_flag_spec"; then ++ func_replace_sysroot "$libdir" ++ libdir=$func_replace_sysroot_result ++ func_stripname '=' '' "$libdir" ++ libdir=$func_stripname_result + if test -n "$hardcode_libdir_separator"; then +- func_replace_sysroot "$libdir" +- libdir=$func_replace_sysroot_result + if test -z "$hardcode_libdirs"; then + hardcode_libdirs="$libdir" + else +@@ -8770,6 +8772,10 @@ EOF + hardcode_libdirs= + for libdir in $compile_rpath $finalize_rpath; do + if test -n "$hardcode_libdir_flag_spec"; then ++ func_replace_sysroot "$libdir" ++ libdir=$func_replace_sysroot_result ++ func_stripname '=' '' "$libdir" ++ libdir=$func_stripname_result + if test -n "$hardcode_libdir_separator"; then + if test -z "$hardcode_libdirs"; then + hardcode_libdirs="$libdir" +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0012-Add-support-for-Netlogic-XLP.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0012-Add-support-for-Netlogic-XLP.patch deleted file mode 100644 index 8103347ba..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0012-Add-support-for-Netlogic-XLP.patch +++ /dev/null @@ -1,413 +0,0 @@ -From c2e138f4ccdf8af81c18c8511c901d3deee696b5 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Sun, 14 Feb 2016 17:06:19 +0000 -Subject: [PATCH 12/13] Add support for Netlogic XLP - -Patch From: Nebu Philips - -Using the mipsisa64r2nlm target, add support for XLP from -Netlogic. Also, update vendor name to NLM wherever applicable. - -Use 0x00000080 for INSN_XLP, the value 0x00000040 has already been -assigned to INSN_OCTEON3 - -Signed-off-by: Khem Raj -Signed-off-by: Baoshan Pang -Signed-off-by: Mark Hatle ---- -Upstream-Status: Pending - - bfd/aoutx.h | 1 + - bfd/archures.c | 1 + - bfd/bfd-in2.h | 1 + - bfd/config.bfd | 5 +++++ - bfd/cpu-mips.c | 6 ++++-- - bfd/elfxx-mips.c | 8 ++++++++ - binutils/readelf.c | 1 + - gas/config/tc-mips.c | 4 +++- - gas/configure | 3 +++ - gas/configure.tgt | 2 +- - include/elf/mips.h | 1 + - include/opcode/mips.h | 10 ++++++++-- - ld/configure.tgt | 2 ++ - opcodes/mips-dis.c | 12 +++++------- - opcodes/mips-opc.c | 33 +++++++++++++++++++++------------ - 15 files changed, 65 insertions(+), 25 deletions(-) - -diff --git a/bfd/aoutx.h b/bfd/aoutx.h -index be0126a..4ca7e24 100644 ---- a/bfd/aoutx.h -+++ b/bfd/aoutx.h -@@ -812,6 +812,7 @@ NAME (aout, machine_type) (enum bfd_architecture arch, - case bfd_mach_mipsisa64r6: - case bfd_mach_mips_sb1: - case bfd_mach_mips_xlr: -+ case bfd_mach_mips_xlp: - /* FIXME: These should be MIPS3, MIPS4, MIPS16, MIPS32, etc. */ - arch_flags = M_MIPS2; - break; -diff --git a/bfd/archures.c b/bfd/archures.c -index 96c9109..5a30d02 100644 ---- a/bfd/archures.c -+++ b/bfd/archures.c -@@ -197,6 +197,7 @@ DESCRIPTION - .#define bfd_mach_mips_octeon2 6502 - .#define bfd_mach_mips_octeon3 6503 - .#define bfd_mach_mips_xlr 887682 {* decimal 'XLR' *} -+.#define bfd_mach_mips_xlp 887680 {* decimal 'XLP' *} - .#define bfd_mach_mipsisa32 32 - .#define bfd_mach_mipsisa32r2 33 - .#define bfd_mach_mipsisa32r3 34 -diff --git a/bfd/bfd-in2.h b/bfd/bfd-in2.h -index 30513c4..5e8ed4c 100644 ---- a/bfd/bfd-in2.h -+++ b/bfd/bfd-in2.h -@@ -2008,6 +2008,7 @@ enum bfd_architecture - #define bfd_mach_mips_octeon2 6502 - #define bfd_mach_mips_octeon3 6503 - #define bfd_mach_mips_xlr 887682 /* decimal 'XLR' */ -+#define bfd_mach_mips_xlp 887680 /* decimal 'XLP' */ - #define bfd_mach_mipsisa32 32 - #define bfd_mach_mipsisa32r2 33 - #define bfd_mach_mipsisa32r3 34 -diff --git a/bfd/config.bfd b/bfd/config.bfd -index ab17e72..863be89 100644 ---- a/bfd/config.bfd -+++ b/bfd/config.bfd -@@ -1084,6 +1084,11 @@ case "${targ}" in - targ_defvec=mips_elf32_le_vec - targ_selvecs="mips_elf32_be_vec mips_elf64_be_vec mips_elf64_le_vec" - ;; -+ mipsisa64*-*-elf*) -+ targ_defvec=mips_elf32_trad_be_vec -+ targ_selvecs="mips_elf32_trad_le_vec mips_elf64_trad_be_vec mips_elf64_trad_le_vec" -+ want64=true -+ ;; - mips*-*-elf* | mips*-*-rtems* | mips*-*-windiss | mips*-*-none) - targ_defvec=mips_elf32_be_vec - targ_selvecs="mips_elf32_le_vec mips_elf64_be_vec mips_elf64_le_vec" -diff --git a/bfd/cpu-mips.c b/bfd/cpu-mips.c -index d209fb6..b6a86ae 100644 ---- a/bfd/cpu-mips.c -+++ b/bfd/cpu-mips.c -@@ -104,7 +104,8 @@ enum - I_mipsocteon2, - I_mipsocteon3, - I_xlr, -- I_micromips -+ I_micromips, -+ I_xlp - }; - - #define NN(index) (&arch_info_struct[(index) + 1]) -@@ -155,7 +156,8 @@ static const bfd_arch_info_type arch_info_struct[] = - N (64, 64, bfd_mach_mips_octeon2,"mips:octeon2", FALSE, NN(I_mipsocteon2)), - N (64, 64, bfd_mach_mips_octeon3, "mips:octeon3", FALSE, NN(I_mipsocteon3)), - N (64, 64, bfd_mach_mips_xlr, "mips:xlr", FALSE, NN(I_xlr)), -- N (64, 64, bfd_mach_mips_micromips,"mips:micromips",FALSE,0) -+ N (64, 64, bfd_mach_mips_micromips,"mips:micromips",FALSE,NN(I_micromips)), -+ N (64, 64, bfd_mach_mips_xlp, "mips:xlp", FALSE, 0) - }; - - /* The default architecture is mips:3000, but with a machine number of -diff --git a/bfd/elfxx-mips.c b/bfd/elfxx-mips.c -index e47276b..71c30a0 100644 ---- a/bfd/elfxx-mips.c -+++ b/bfd/elfxx-mips.c -@@ -6646,6 +6646,9 @@ _bfd_elf_mips_mach (flagword flags) - case E_MIPS_MACH_XLR: - return bfd_mach_mips_xlr; - -+ case E_MIPS_MACH_XLP: -+ return bfd_mach_mips_xlp; -+ - default: - switch (flags & EF_MIPS_ARCH) - { -@@ -11949,6 +11952,10 @@ mips_set_isa_flags (bfd *abfd) - val = E_MIPS_ARCH_64R2 | E_MIPS_MACH_OCTEON2; - break; - -+ case bfd_mach_mips_xlp: -+ val = E_MIPS_ARCH_64R2 | E_MIPS_MACH_XLP; -+ break; -+ - case bfd_mach_mipsisa32: - val = E_MIPS_ARCH_32; - break; -@@ -13975,6 +13982,7 @@ static const struct mips_mach_extension mips_mach_extensions[] = - { bfd_mach_mips_octeonp, bfd_mach_mips_octeon }, - { bfd_mach_mips_octeon, bfd_mach_mipsisa64r2 }, - { bfd_mach_mips_loongson_3a, bfd_mach_mipsisa64r2 }, -+ { bfd_mach_mips_xlp, bfd_mach_mipsisa64r2 }, - - /* MIPS64 extensions. */ - { bfd_mach_mipsisa64r2, bfd_mach_mipsisa64 }, -diff --git a/binutils/readelf.c b/binutils/readelf.c -index 274ddd1..d31558c 100644 ---- a/binutils/readelf.c -+++ b/binutils/readelf.c -@@ -3230,6 +3230,7 @@ get_machine_flags (unsigned e_flags, unsigned e_machine) - case E_MIPS_MACH_OCTEON2: strcat (buf, ", octeon2"); break; - case E_MIPS_MACH_OCTEON3: strcat (buf, ", octeon3"); break; - case E_MIPS_MACH_XLR: strcat (buf, ", xlr"); break; -+ case E_MIPS_MACH_XLP: strcat (buf, ", xlp"); break; - case 0: - /* We simply ignore the field in this case to avoid confusion: - MIPS ELF does not specify EF_MIPS_MACH, it is a GNU -diff --git a/gas/config/tc-mips.c b/gas/config/tc-mips.c -index eb8b26b..e59dce6 100644 ---- a/gas/config/tc-mips.c -+++ b/gas/config/tc-mips.c -@@ -552,6 +552,7 @@ static int mips_32bitmode = 0; - || mips_opts.arch == CPU_RM7000 \ - || mips_opts.arch == CPU_VR5500 \ - || mips_opts.micromips \ -+ || mips_opts.arch == CPU_XLP \ - ) - - /* Whether the processor uses hardware interlocks to protect reads -@@ -581,6 +582,7 @@ static int mips_32bitmode = 0; - && mips_opts.isa != ISA_MIPS3) \ - || mips_opts.arch == CPU_R4300 \ - || mips_opts.micromips \ -+ || mips_opts.arch == CPU_XLP \ - ) - - /* Whether the processor uses hardware interlocks to protect reads -@@ -18858,7 +18860,7 @@ static const struct mips_cpu_info mips_cpu_info_table[] = - /* Broadcom XLP. - XLP is mostly like XLR, with the prominent exception that it is - MIPS64R2 rather than MIPS64. */ -- { "xlp", 0, 0, ISA_MIPS64R2, CPU_XLR }, -+ { "xlp", 0, 0, ISA_MIPS64R2, CPU_XLP }, - - /* MIPS 64 Release 6 */ - { "i6400", 0, ASE_MSA, ISA_MIPS64R6, CPU_MIPS64R6}, -diff --git a/gas/configure b/gas/configure -index 45da030..aba89f7 100755 ---- a/gas/configure -+++ b/gas/configure -@@ -12914,6 +12914,9 @@ _ACEOF - mipsisa64r6 | mipsisa64r6el) - mips_cpu=mips64r6 - ;; -+ mipsisa64r2nlm | mipsisa64r2nlmel) -+ mips_cpu=xlp -+ ;; - mipstx39 | mipstx39el) - mips_cpu=r3900 - ;; -diff --git a/gas/configure.tgt b/gas/configure.tgt -index 1b9fd99..a9f1977 100644 ---- a/gas/configure.tgt -+++ b/gas/configure.tgt -@@ -339,7 +339,7 @@ case ${generic_target} in - mips-*-sysv4*MP* | mips-*-gnu*) fmt=elf em=tmips ;; - mips*-sde-elf* | mips*-mti-elf* | mips*-img-elf*) - fmt=elf em=tmips ;; -- mips-*-elf* | mips-*-rtems*) fmt=elf ;; -+ mips-*-elf* | mips-*-rtems*) fmt=elf em=tmips ;; - mips-*-netbsd*) fmt=elf em=tmips ;; - mips-*-openbsd*) fmt=elf em=tmips ;; - -diff --git a/include/elf/mips.h b/include/elf/mips.h -index 7e813de..d7d72c1 100644 ---- a/include/elf/mips.h -+++ b/include/elf/mips.h -@@ -290,6 +290,7 @@ END_RELOC_NUMBERS (R_MIPS_maxext) - #define E_MIPS_MACH_SB1 0x008a0000 - #define E_MIPS_MACH_OCTEON 0x008b0000 - #define E_MIPS_MACH_XLR 0x008c0000 -+#define E_MIPS_MACH_XLP 0x008f0000 - #define E_MIPS_MACH_OCTEON2 0x008d0000 - #define E_MIPS_MACH_OCTEON3 0x008e0000 - #define E_MIPS_MACH_5400 0x00910000 -diff --git a/include/opcode/mips.h b/include/opcode/mips.h -index b1d4ef6..f2c8e88 100644 ---- a/include/opcode/mips.h -+++ b/include/opcode/mips.h -@@ -1232,8 +1232,10 @@ static const unsigned int mips_isa_table[] = { - #define INSN_LOONGSON_2F 0x80000000 - /* Loongson 3A. */ - #define INSN_LOONGSON_3A 0x00000400 --/* RMI Xlr instruction */ --#define INSN_XLR 0x00000020 -+/* Netlogic Xlr instruction */ -+#define INSN_XLR 0x00000020 -+/* Netlogic XlP instruction */ -+#define INSN_XLP 0x00000080 - - /* DSP ASE */ - #define ASE_DSP 0x00000001 -@@ -1331,6 +1333,7 @@ static const unsigned int mips_isa_table[] = { - #define CPU_OCTEON2 6502 - #define CPU_OCTEON3 6503 - #define CPU_XLR 887682 /* decimal 'XLR' */ -+#define CPU_XLP 887680 /* decimal 'XLP' */ - - /* Return true if the given CPU is included in INSN_* mask MASK. */ - -@@ -1408,6 +1411,9 @@ cpu_is_member (int cpu, unsigned int mask) - return ((mask & INSN_ISA_MASK) == INSN_ISA32R6) - || ((mask & INSN_ISA_MASK) == INSN_ISA64R6); - -+ case CPU_XLP: -+ return (mask & INSN_XLP) != 0; -+ - default: - return FALSE; - } -diff --git a/ld/configure.tgt b/ld/configure.tgt -index 212327c..212e09c 100644 ---- a/ld/configure.tgt -+++ b/ld/configure.tgt -@@ -499,6 +499,8 @@ mips*el-sde-elf* | mips*el-mti-elf* | mips*el-img-elf*) - mips*-sde-elf* | mips*-mti-elf* | mips*-img-elf*) - targ_emul=elf32btsmip - targ_extra_emuls="elf32ltsmip elf32btsmipn32 elf64btsmip elf32ltsmipn32 elf64ltsmip" ;; -+mipsisa64*-*-elf*) targ_emul=elf32btsmip -+ targ_extra_emuls="elf32ltsmip elf64btsmip elf64ltsmip" ;; - mips64*el-ps2-elf*) targ_emul=elf32lr5900n32 - targ_extra_emuls="elf32lr5900" - targ_extra_libpath=$targ_extra_emuls ;; -diff --git a/opcodes/mips-dis.c b/opcodes/mips-dis.c -index 3f874e7..9813d0e 100644 ---- a/opcodes/mips-dis.c -+++ b/opcodes/mips-dis.c -@@ -648,13 +648,11 @@ const struct mips_arch_choice mips_arch_choices[] = - mips_cp0sel_names_xlr, ARRAY_SIZE (mips_cp0sel_names_xlr), - mips_cp1_names_mips3264, mips_hwr_names_numeric }, - -- /* XLP is mostly like XLR, with the prominent exception it is being -- MIPS64R2. */ -- { "xlp", 1, bfd_mach_mips_xlr, CPU_XLR, -- ISA_MIPS64R2 | INSN_XLR, 0, -- mips_cp0_names_xlr, -- mips_cp0sel_names_xlr, ARRAY_SIZE (mips_cp0sel_names_xlr), -- mips_cp1_names_mips3264, mips_hwr_names_numeric }, -+ { "xlp", 1, bfd_mach_mips_xlp, CPU_XLP, -+ ISA_MIPS64R2 | INSN_XLP, 0, -+ mips_cp0_names_mips3264r2, -+ mips_cp0sel_names_mips3264r2, ARRAY_SIZE (mips_cp0sel_names_mips3264r2), -+ mips_cp1_names_mips3264, mips_hwr_names_mips3264r2 }, - - /* This entry, mips16, is here only for ISA/processor selection; do - not print its name. */ -diff --git a/opcodes/mips-opc.c b/opcodes/mips-opc.c -index a95eff1..99fb7bb 100644 ---- a/opcodes/mips-opc.c -+++ b/opcodes/mips-opc.c -@@ -320,7 +320,8 @@ decode_mips_operand (const char *p) - #define IOCTP (INSN_OCTEONP | INSN_OCTEON2 | INSN_OCTEON3) - #define IOCT2 (INSN_OCTEON2 | INSN_OCTEON3) - #define IOCT3 INSN_OCTEON3 --#define XLR INSN_XLR -+#define XLR INSN_XLR -+#define XLP INSN_XLP - #define IVIRT ASE_VIRT - #define IVIRT64 ASE_VIRT64 - -@@ -958,6 +959,7 @@ const struct mips_opcode mips_builtin_opcodes[] = - {"clo", "U,s", 0x70000021, 0xfc0007ff, WR_1|RD_2, 0, I32|N55, 0, I37 }, - {"clz", "d,s", 0x00000050, 0xfc1f07ff, WR_1|RD_2, 0, I37, 0, 0 }, - {"clz", "U,s", 0x70000020, 0xfc0007ff, WR_1|RD_2, 0, I32|N55, 0, I37 }, -+{"crc", "d,s,t", 0x7000001c, 0xfc0007ff, WR_1|RD_2|RD_3, 0, XLP, 0, 0 }, - /* ctc0 is at the bottom of the table. */ - {"ctc1", "t,G", 0x44c00000, 0xffe007ff, RD_1|WR_CC|CM, 0, I1, 0, 0 }, - {"ctc1", "t,S", 0x44c00000, 0xffe007ff, RD_1|WR_CC|CM, 0, I1, 0, 0 }, -@@ -990,12 +992,13 @@ const struct mips_opcode mips_builtin_opcodes[] = - {"daddiu", "t,r,j", 0x64000000, 0xfc000000, WR_1|RD_2, 0, I3, 0, 0 }, - {"daddu", "d,v,t", 0x0000002d, 0xfc0007ff, WR_1|RD_2|RD_3, 0, I3, 0, 0 }, - {"daddu", "t,r,I", 0, (int) M_DADDU_I, INSN_MACRO, 0, I3, 0, 0 }, --{"daddwc", "d,s,t", 0x70000038, 0xfc0007ff, WR_1|RD_2|RD_3|WR_C0|RD_C0, 0, XLR, 0, 0 }, -+{"daddwc", "d,s,t", 0x70000038, 0xfc0007ff, WR_1|RD_2|RD_3|WR_C0|RD_C0, 0, XLR|XLP, 0, 0 }, - {"dbreak", "", 0x7000003f, 0xffffffff, 0, 0, N5, 0, 0 }, - {"dclo", "d,s", 0x00000053, 0xfc1f07ff, WR_1|RD_2, 0, I69, 0, 0 }, - {"dclo", "U,s", 0x70000025, 0xfc0007ff, WR_1|RD_2, 0, I64|N55, 0, I69 }, - {"dclz", "d,s", 0x00000052, 0xfc1f07ff, WR_1|RD_2, 0, I69, 0, 0 }, - {"dclz", "U,s", 0x70000024, 0xfc0007ff, WR_1|RD_2, 0, I64|N55, 0, I69 }, -+{"dcrc", "d,s,t", 0x7000001d, 0xfc0007ff, WR_1|RD_2|RD_3, 0, XLP, 0, 0 }, - /* dctr and dctw are used on the r5000. */ - {"dctr", "o(b)", 0xbc050000, 0xfc1f0000, RD_2, 0, I3, 0, 0 }, - {"dctw", "o(b)", 0xbc090000, 0xfc1f0000, RD_2, 0, I3, 0, 0 }, -@@ -1067,6 +1070,7 @@ const struct mips_opcode mips_builtin_opcodes[] = - {"dmfc0", "t,G,H", 0x40200000, 0xffe007f8, WR_1|RD_C0|LC, 0, I64, 0, 0 }, - {"dmfgc0", "t,G", 0x40600100, 0xffe007ff, WR_1|RD_C0|LC, 0, 0, IVIRT64, 0 }, - {"dmfgc0", "t,G,H", 0x40600100, 0xffe007f8, WR_1|RD_C0|LC, 0, 0, IVIRT64, 0 }, -+{"dmfur", "t,d", 0x7000001e, 0xffe007ff, WR_1, 0, XLP, 0, 0 }, - {"dmt", "", 0x41600bc1, 0xffffffff, TRAP, 0, 0, MT32, 0 }, - {"dmt", "t", 0x41600bc1, 0xffe0ffff, WR_1|TRAP, 0, 0, MT32, 0 }, - {"dmtc0", "t,G", 0x40a00000, 0xffe007ff, RD_1|WR_C0|WR_CC|CM, 0, I3, 0, EE }, -@@ -1082,6 +1086,8 @@ const struct mips_opcode mips_builtin_opcodes[] = - /* dmfc3 is at the bottom of the table. */ - /* dmtc3 is at the bottom of the table. */ - {"dmuh", "d,s,t", 0x000000dc, 0xfc0007ff, WR_1|RD_2|RD_3, 0, I69, 0, 0 }, -+{"dmtur", "t,d", 0x7000001f, 0xffe007ff, RD_1, 0, XLP, 0, 0 }, -+{"dmul", "d,s,t", 0x70000006, 0xfc0007ff, WR_1|RD_2|RD_3, 0, XLP, 0, 0 }, - {"dmul", "d,s,t", 0x0000009c, 0xfc0007ff, WR_1|RD_2|RD_3, 0, I69, 0, 0 }, - {"dmul", "d,v,t", 0x70000003, 0xfc0007ff, WR_1|RD_2|RD_3|WR_HILO, 0, IOCT, 0, 0 }, - {"dmul", "d,v,t", 0, (int) M_DMUL, INSN_MACRO, 0, I3, 0, M32|I69 }, -@@ -1235,9 +1241,9 @@ const struct mips_opcode mips_builtin_opcodes[] = - {"ld", "s,-b(+R)", 0xec180000, 0xfc1c0000, WR_1, RD_pc, I69, 0, 0 }, - {"ld", "t,A(b)", 0, (int) M_LD_AB, INSN_MACRO, 0, I1, 0, 0 }, - {"ld", "t,o(b)", 0xdc000000, 0xfc000000, WR_1|RD_3|LM, 0, I3, 0, 0 }, --{"ldaddw", "t,b", 0x70000010, 0xfc00ffff, MOD_1|RD_2|LM|SM, 0, XLR, 0, 0 }, --{"ldaddwu", "t,b", 0x70000011, 0xfc00ffff, MOD_1|RD_2|LM|SM, 0, XLR, 0, 0 }, --{"ldaddd", "t,b", 0x70000012, 0xfc00ffff, MOD_1|RD_2|LM|SM, 0, XLR, 0, 0 }, -+{"ldaddw", "t,b", 0x70000010, 0xfc00ffff, MOD_1|RD_2|SM, 0, XLR|XLP, 0, 0 }, -+{"ldaddwu", "t,b", 0x70000011, 0xfc00ffff, MOD_1|RD_2|SM, 0, XLR|XLP, 0, 0 }, -+{"ldaddd", "t,b", 0x70000012, 0xfc00ffff, MOD_1|RD_2|SM, 0, XLR|XLP, 0, 0 }, - {"ldc1", "T,o(b)", 0xd4000000, 0xfc000000, WR_1|RD_3|CLD|FP_D, 0, I2, 0, SF }, - {"ldc1", "E,o(b)", 0xd4000000, 0xfc000000, WR_1|RD_3|CLD|FP_D, 0, I2, 0, SF }, - {"ldc1", "T,A(b)", 0, (int) M_LDC1_AB, INSN_MACRO, INSN2_M_FP_D, I2, 0, SF }, -@@ -1402,7 +1408,7 @@ const struct mips_opcode mips_builtin_opcodes[] = - {"mflo", "d,9", 0x00000012, 0xff9f07ff, WR_1|RD_LO, 0, 0, D32, 0 }, - {"mflo1", "d", 0x70000012, 0xffff07ff, WR_1|RD_LO, 0, EE, 0, 0 }, - {"mflhxu", "d", 0x00000052, 0xffff07ff, WR_1|MOD_HILO, 0, 0, SMT, 0 }, --{"mfcr", "t,s", 0x70000018, 0xfc00ffff, WR_1|RD_2, 0, XLR, 0, 0 }, -+{"mfcr", "t,s", 0x70000018, 0xfc00ffff, WR_1, 0, XLR|XLP, 0, 0 }, - {"mfsa", "d", 0x00000028, 0xffff07ff, WR_1, 0, EE, 0, 0 }, - {"min.ob", "X,Y,Q", 0x78000006, 0xfc20003f, WR_1|RD_2|RD_3|FP_D, 0, SB1, MX, 0 }, - {"min.ob", "D,S,Q", 0x48000006, 0xfc20003f, WR_1|RD_2|RD_3|FP_D, 0, N54, 0, 0 }, -@@ -1447,10 +1453,13 @@ const struct mips_opcode mips_builtin_opcodes[] = - /* move is at the top of the table. */ - {"msgn.qh", "X,Y,Q", 0x78200000, 0xfc20003f, WR_1|RD_2|RD_3|FP_D, 0, 0, MX, 0 }, - {"msgsnd", "t", 0, (int) M_MSGSND, INSN_MACRO, 0, XLR, 0, 0 }, -+{"msgsnds", "d,t", 0x4a000001, 0xffe007ff, WR_1|RD_2|RD_C0|WR_C0, 0, XLP, 0, 0 }, - {"msgld", "", 0, (int) M_MSGLD, INSN_MACRO, 0, XLR, 0, 0 }, - {"msgld", "t", 0, (int) M_MSGLD_T, INSN_MACRO, 0, XLR, 0, 0 }, --{"msgwait", "", 0, (int) M_MSGWAIT, INSN_MACRO, 0, XLR, 0, 0 }, --{"msgwait", "t", 0, (int) M_MSGWAIT_T,INSN_MACRO, 0, XLR, 0, 0 }, -+{"msglds", "d,t", 0x4a000002, 0xffe007ff, WR_1|RD_2|RD_C0|WR_C0, 0, XLP, 0, 0 }, -+{"msgwait", "", 0, (int) M_MSGWAIT, INSN_MACRO, 0, XLR|XLP, 0, 0 }, -+{"msgwait", "t", 0, (int) M_MSGWAIT_T,INSN_MACRO, 0, XLR|XLP, 0, 0 }, -+{"msgsync", "", 0x4a000004, 0xffffffff,0, 0, XLP, 0, 0 }, - {"msub.d", "D,R,S,T", 0x4c000029, 0xfc00003f, WR_1|RD_2|RD_3|RD_4|FP_D, 0, I4_33, 0, I37 }, - {"msub.d", "D,S,T", 0x46200019, 0xffe0003f, WR_1|RD_2|RD_3|FP_D, 0, IL2E, 0, 0 }, - {"msub.d", "D,S,T", 0x72200019, 0xffe0003f, WR_1|RD_2|RD_3|FP_D, 0, IL2F, 0, 0 }, -@@ -1500,7 +1509,7 @@ const struct mips_opcode mips_builtin_opcodes[] = - {"mtlo", "s,7", 0x00000013, 0xfc1fe7ff, RD_1|WR_LO, 0, 0, D32, 0 }, - {"mtlo1", "s", 0x70000013, 0xfc1fffff, RD_1|WR_LO, 0, EE, 0, 0 }, - {"mtlhx", "s", 0x00000053, 0xfc1fffff, RD_1|MOD_HILO, 0, 0, SMT, 0 }, --{"mtcr", "t,s", 0x70000019, 0xfc00ffff, RD_1|RD_2, 0, XLR, 0, 0 }, -+{"mtcr", "t,s", 0x70000019, 0xfc00ffff, RD_1, 0, XLR|XLP, 0, 0 }, - {"mtm0", "s", 0x70000008, 0xfc1fffff, RD_1, 0, IOCT, 0, 0 }, - {"mtm0", "s,t", 0x70000008, 0xfc00ffff, RD_1|RD_2, 0, IOCT3, 0, 0 }, - {"mtm1", "s", 0x7000000c, 0xfc1fffff, RD_1, 0, IOCT, 0, 0 }, -@@ -1937,9 +1946,9 @@ const struct mips_opcode mips_builtin_opcodes[] = - {"suxc1", "S,t(b)", 0x4c00000d, 0xfc0007ff, RD_1|RD_2|RD_3|SM|FP_D, 0, I5_33|N55, 0, I37}, - {"sw", "t,o(b)", 0xac000000, 0xfc000000, RD_1|RD_3|SM, 0, I1, 0, 0 }, - {"sw", "t,A(b)", 0, (int) M_SW_AB, INSN_MACRO, 0, I1, 0, 0 }, --{"swapw", "t,b", 0x70000014, 0xfc00ffff, MOD_1|RD_2|LM|SM, 0, XLR, 0, 0 }, --{"swapwu", "t,b", 0x70000015, 0xfc00ffff, MOD_1|RD_2|LM|SM, 0, XLR, 0, 0 }, --{"swapd", "t,b", 0x70000016, 0xfc00ffff, MOD_1|RD_2|LM|SM, 0, XLR, 0, 0 }, -+{"swapw", "t,b", 0x70000014, 0xfc00ffff, MOD_1|RD_2|SM, 0, XLR|XLP, 0, 0 }, -+{"swapwu", "t,b", 0x70000015, 0xfc00ffff, MOD_1|RD_2|SM, 0, XLR|XLP, 0, 0 }, -+{"swapd", "t,b", 0x70000016, 0xfc00ffff, MOD_1|RD_2|SM, 0, XLR|XLP, 0, 0 }, - {"swc0", "E,o(b)", 0xe0000000, 0xfc000000, RD_3|RD_C0|SM, 0, I1, 0, IOCT|IOCTP|IOCT2|I37 }, - {"swc0", "E,A(b)", 0, (int) M_SWC0_AB, INSN_MACRO, 0, I1, 0, IOCT|IOCTP|IOCT2|I37 }, - {"swc1", "T,o(b)", 0xe4000000, 0xfc000000, RD_1|RD_3|SM|FP_S, 0, I1, 0, 0 }, --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0012-Change-default-emulation-for-mips64-linux.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0012-Change-default-emulation-for-mips64-linux.patch new file mode 100644 index 000000000..2ac101c95 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0012-Change-default-emulation-for-mips64-linux.patch @@ -0,0 +1,59 @@ +From f43f832e0009caea6a3d5bcaa8f0a64d943072ea Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 2 Mar 2015 01:44:14 +0000 +Subject: [PATCH 12/15] Change default emulation for mips64*-*-linux + +we change the default emulations to be N64 instead of N32 + +Upstream-Status: Inappropriate [ OE configuration Specific] + +Signed-off-by: Khem Raj +--- + bfd/config.bfd | 8 ++++---- + ld/configure.tgt | 8 ++++---- + 2 files changed, 8 insertions(+), 8 deletions(-) + +diff --git a/bfd/config.bfd b/bfd/config.bfd +index 1b28016b91..63596c2ebc 100644 +--- a/bfd/config.bfd ++++ b/bfd/config.bfd +@@ -1183,12 +1183,12 @@ case "${targ}" in + targ_selvecs="mips_elf32_le_vec mips_elf64_be_vec mips_elf64_le_vec mips_ecoff_be_vec mips_ecoff_le_vec" + ;; + mips64*el-*-linux*) +- targ_defvec=mips_elf32_ntrad_le_vec +- targ_selvecs="mips_elf32_ntrad_be_vec mips_elf32_trad_le_vec mips_elf32_trad_be_vec mips_elf64_trad_le_vec mips_elf64_trad_be_vec" ++ targ_defvec=mips_elf64_trad_le_vec ++ targ_selvecs="mips_elf32_ntrad_be_vec mips_elf32_ntrad_le_vec mips_elf32_trad_le_vec mips_elf32_trad_be_vec mips_elf64_trad_be_vec" + ;; + mips64*-*-linux*) +- targ_defvec=mips_elf32_ntrad_be_vec +- targ_selvecs="mips_elf32_ntrad_le_vec mips_elf32_trad_be_vec mips_elf32_trad_le_vec mips_elf64_trad_be_vec mips_elf64_trad_le_vec" ++ targ_defvec=mips_elf64_trad_be_vec ++ targ_selvecs="mips_elf32_ntrad_be_vec mips_elf32_ntrad_be_vec mips_elf32_trad_be_vec mips_elf32_trad_le_vec mips_elf64_trad_le_vec" + ;; + mips*el-*-linux*) + targ_defvec=mips_elf32_trad_le_vec +diff --git a/ld/configure.tgt b/ld/configure.tgt +index b85c6bb35a..4e77383a19 100644 +--- a/ld/configure.tgt ++++ b/ld/configure.tgt +@@ -518,11 +518,11 @@ mips*el-*-vxworks*) targ_emul=elf32elmipvxworks + mips*-*-vxworks*) targ_emul=elf32ebmipvxworks + targ_extra_emuls="elf32elmipvxworks" ;; + mips*-*-windiss) targ_emul=elf32mipswindiss ;; +-mips64*el-*-linux-*) targ_emul=elf32ltsmipn32 +- targ_extra_emuls="elf32btsmipn32 elf32ltsmip elf32btsmip elf64ltsmip elf64btsmip" ++mips64*el-*-linux-*) targ_emul=elf64ltsmip ++ targ_extra_emuls="elf32btsmipn32 elf32ltsmipn32 elf32ltsmip elf32btsmip elf64btsmip" + targ_extra_libpath=$targ_extra_emuls ;; +-mips64*-*-linux-*) targ_emul=elf32btsmipn32 +- targ_extra_emuls="elf32ltsmipn32 elf32btsmip elf32ltsmip elf64btsmip elf64ltsmip" ++mips64*-*-linux-*) targ_emul=elf64btsmip ++ targ_extra_emuls="elf32btsmipn32 elf32ltsmipn32 elf32btsmip elf32ltsmip elf64ltsmip" + targ_extra_libpath=$targ_extra_emuls ;; + mips*el-*-linux-*) targ_emul=elf32ltsmip + targ_extra_emuls="elf32btsmip elf32ltsmipn32 elf64ltsmip elf32btsmipn32 elf64btsmip" +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0013-Add-support-for-Netlogic-XLP.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0013-Add-support-for-Netlogic-XLP.patch new file mode 100644 index 000000000..b03e04608 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0013-Add-support-for-Netlogic-XLP.patch @@ -0,0 +1,399 @@ +From fc6fa6a6e6e9e6e5ad7080785af31b4ea68f60c4 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Sun, 14 Feb 2016 17:06:19 +0000 +Subject: [PATCH 13/15] Add support for Netlogic XLP + +Patch From: Nebu Philips + +Using the mipsisa64r2nlm target, add support for XLP from +Netlogic. Also, update vendor name to NLM wherever applicable. + +Use 0x00000080 for INSN_XLP, the value 0x00000040 has already been +assigned to INSN_OCTEON3 + +Signed-off-by: Khem Raj +Signed-off-by: Baoshan Pang +Signed-off-by: Mark Hatle +--- +Upstream-Status: Pending + + bfd/aoutx.h | 1 + + bfd/archures.c | 1 + + bfd/bfd-in2.h | 1 + + bfd/config.bfd | 5 +++++ + bfd/cpu-mips.c | 6 ++++-- + bfd/elfxx-mips.c | 8 ++++++++ + binutils/readelf.c | 1 + + gas/config/tc-mips.c | 4 +++- + gas/configure | 3 +++ + include/elf/mips.h | 1 + + include/opcode/mips.h | 10 ++++++++-- + ld/configure.tgt | 2 ++ + opcodes/mips-dis.c | 12 +++++------- + opcodes/mips-opc.c | 33 +++++++++++++++++++++------------ + 14 files changed, 64 insertions(+), 24 deletions(-) + +diff --git a/bfd/aoutx.h b/bfd/aoutx.h +index d30e8b8fbc..913b499744 100644 +--- a/bfd/aoutx.h ++++ b/bfd/aoutx.h +@@ -812,6 +812,7 @@ NAME (aout, machine_type) (enum bfd_architecture arch, + case bfd_mach_mipsisa64r6: + case bfd_mach_mips_sb1: + case bfd_mach_mips_xlr: ++ case bfd_mach_mips_xlp: + /* FIXME: These should be MIPS3, MIPS4, MIPS16, MIPS32, etc. */ + arch_flags = M_MIPS2; + break; +diff --git a/bfd/archures.c b/bfd/archures.c +index 6f35a5b2a7..d12cdf609a 100644 +--- a/bfd/archures.c ++++ b/bfd/archures.c +@@ -197,6 +197,7 @@ DESCRIPTION + .#define bfd_mach_mips_octeon2 6502 + .#define bfd_mach_mips_octeon3 6503 + .#define bfd_mach_mips_xlr 887682 {* decimal 'XLR' *} ++.#define bfd_mach_mips_xlp 887680 {* decimal 'XLP' *} + .#define bfd_mach_mipsisa32 32 + .#define bfd_mach_mipsisa32r2 33 + .#define bfd_mach_mipsisa32r3 34 +diff --git a/bfd/bfd-in2.h b/bfd/bfd-in2.h +index 6288c3bb4a..e9f9859a7b 100644 +--- a/bfd/bfd-in2.h ++++ b/bfd/bfd-in2.h +@@ -2041,6 +2041,7 @@ enum bfd_architecture + #define bfd_mach_mips_octeon2 6502 + #define bfd_mach_mips_octeon3 6503 + #define bfd_mach_mips_xlr 887682 /* decimal 'XLR' */ ++#define bfd_mach_mips_xlp 887680 /* decimal 'XLP' */ + #define bfd_mach_mipsisa32 32 + #define bfd_mach_mipsisa32r2 33 + #define bfd_mach_mipsisa32r3 34 +diff --git a/bfd/config.bfd b/bfd/config.bfd +index 63596c2ebc..6e923fb0ed 100644 +--- a/bfd/config.bfd ++++ b/bfd/config.bfd +@@ -1166,6 +1166,11 @@ case "${targ}" in + targ_defvec=mips_elf32_le_vec + targ_selvecs="mips_elf32_be_vec mips_elf64_be_vec mips_elf64_le_vec" + ;; ++ mipsisa64*-*-elf*) ++ targ_defvec=mips_elf32_trad_be_vec ++ targ_selvecs="mips_elf32_trad_le_vec mips_elf64_trad_be_vec mips_elf64_trad_le_vec" ++ want64=true ++ ;; + mips*-*-elf* | mips*-*-rtems* | mips*-*-windiss | mips*-*-none) + targ_defvec=mips_elf32_be_vec + targ_selvecs="mips_elf32_le_vec mips_elf64_be_vec mips_elf64_le_vec" +diff --git a/bfd/cpu-mips.c b/bfd/cpu-mips.c +index b9ecdd6e55..df1bffc25b 100644 +--- a/bfd/cpu-mips.c ++++ b/bfd/cpu-mips.c +@@ -104,7 +104,8 @@ enum + I_mipsocteon2, + I_mipsocteon3, + I_xlr, +- I_micromips ++ I_micromips, ++ I_xlp + }; + + #define NN(index) (&arch_info_struct[(index) + 1]) +@@ -155,7 +156,8 @@ static const bfd_arch_info_type arch_info_struct[] = + N (64, 64, bfd_mach_mips_octeon2,"mips:octeon2", FALSE, NN(I_mipsocteon2)), + N (64, 64, bfd_mach_mips_octeon3, "mips:octeon3", FALSE, NN(I_mipsocteon3)), + N (64, 64, bfd_mach_mips_xlr, "mips:xlr", FALSE, NN(I_xlr)), +- N (64, 64, bfd_mach_mips_micromips,"mips:micromips",FALSE,0) ++ N (64, 64, bfd_mach_mips_micromips,"mips:micromips",FALSE,NN(I_micromips)), ++ N (64, 64, bfd_mach_mips_xlp, "mips:xlp", FALSE, 0) + }; + + /* The default architecture is mips:3000, but with a machine number of +diff --git a/bfd/elfxx-mips.c b/bfd/elfxx-mips.c +index 723853f821..7b464211c3 100644 +--- a/bfd/elfxx-mips.c ++++ b/bfd/elfxx-mips.c +@@ -6787,6 +6787,9 @@ _bfd_elf_mips_mach (flagword flags) + case E_MIPS_MACH_XLR: + return bfd_mach_mips_xlr; + ++ case E_MIPS_MACH_XLP: ++ return bfd_mach_mips_xlp; ++ + default: + switch (flags & EF_MIPS_ARCH) + { +@@ -12106,6 +12109,10 @@ mips_set_isa_flags (bfd *abfd) + val = E_MIPS_ARCH_64R2 | E_MIPS_MACH_OCTEON2; + break; + ++ case bfd_mach_mips_xlp: ++ val = E_MIPS_ARCH_64R2 | E_MIPS_MACH_XLP; ++ break; ++ + case bfd_mach_mipsisa32: + val = E_MIPS_ARCH_32; + break; +@@ -14135,6 +14142,7 @@ static const struct mips_mach_extension mips_mach_extensions[] = + { bfd_mach_mips_octeonp, bfd_mach_mips_octeon }, + { bfd_mach_mips_octeon, bfd_mach_mipsisa64r2 }, + { bfd_mach_mips_loongson_3a, bfd_mach_mipsisa64r2 }, ++ { bfd_mach_mips_xlp, bfd_mach_mipsisa64r2 }, + + /* MIPS64 extensions. */ + { bfd_mach_mipsisa64r2, bfd_mach_mipsisa64 }, +diff --git a/binutils/readelf.c b/binutils/readelf.c +index 8dca490226..b5f577f5a1 100644 +--- a/binutils/readelf.c ++++ b/binutils/readelf.c +@@ -3261,6 +3261,7 @@ get_machine_flags (unsigned e_flags, unsigned e_machine) + case E_MIPS_MACH_OCTEON2: strcat (buf, ", octeon2"); break; + case E_MIPS_MACH_OCTEON3: strcat (buf, ", octeon3"); break; + case E_MIPS_MACH_XLR: strcat (buf, ", xlr"); break; ++ case E_MIPS_MACH_XLP: strcat (buf, ", xlp"); break; + case 0: + /* We simply ignore the field in this case to avoid confusion: + MIPS ELF does not specify EF_MIPS_MACH, it is a GNU +diff --git a/gas/config/tc-mips.c b/gas/config/tc-mips.c +index e24e84df54..baf84e419d 100644 +--- a/gas/config/tc-mips.c ++++ b/gas/config/tc-mips.c +@@ -552,6 +552,7 @@ static int mips_32bitmode = 0; + || mips_opts.arch == CPU_RM7000 \ + || mips_opts.arch == CPU_VR5500 \ + || mips_opts.micromips \ ++ || mips_opts.arch == CPU_XLP \ + ) + + /* Whether the processor uses hardware interlocks to protect reads +@@ -581,6 +582,7 @@ static int mips_32bitmode = 0; + && mips_opts.isa != ISA_MIPS3) \ + || mips_opts.arch == CPU_R4300 \ + || mips_opts.micromips \ ++ || mips_opts.arch == CPU_XLP \ + ) + + /* Whether the processor uses hardware interlocks to protect reads +@@ -19409,7 +19411,7 @@ static const struct mips_cpu_info mips_cpu_info_table[] = + /* Broadcom XLP. + XLP is mostly like XLR, with the prominent exception that it is + MIPS64R2 rather than MIPS64. */ +- { "xlp", 0, 0, ISA_MIPS64R2, CPU_XLR }, ++ { "xlp", 0, 0, ISA_MIPS64R2, CPU_XLP }, + + /* MIPS 64 Release 6 */ + { "i6400", 0, ASE_MSA, ISA_MIPS64R6, CPU_MIPS64R6}, +diff --git a/gas/configure b/gas/configure +index a36f1ae161..99f0a94e20 100755 +--- a/gas/configure ++++ b/gas/configure +@@ -12989,6 +12989,9 @@ _ACEOF + mipsisa64r6 | mipsisa64r6el) + mips_cpu=mips64r6 + ;; ++ mipsisa64r2nlm | mipsisa64r2nlmel) ++ mips_cpu=xlp ++ ;; + mipstx39 | mipstx39el) + mips_cpu=r3900 + ;; +diff --git a/include/elf/mips.h b/include/elf/mips.h +index 3e27b05122..81ea78a817 100644 +--- a/include/elf/mips.h ++++ b/include/elf/mips.h +@@ -290,6 +290,7 @@ END_RELOC_NUMBERS (R_MIPS_maxext) + #define E_MIPS_MACH_SB1 0x008a0000 + #define E_MIPS_MACH_OCTEON 0x008b0000 + #define E_MIPS_MACH_XLR 0x008c0000 ++#define E_MIPS_MACH_XLP 0x008f0000 + #define E_MIPS_MACH_OCTEON2 0x008d0000 + #define E_MIPS_MACH_OCTEON3 0x008e0000 + #define E_MIPS_MACH_5400 0x00910000 +diff --git a/include/opcode/mips.h b/include/opcode/mips.h +index 0d043d9520..450e9c2d67 100644 +--- a/include/opcode/mips.h ++++ b/include/opcode/mips.h +@@ -1244,8 +1244,10 @@ static const unsigned int mips_isa_table[] = { + #define INSN_LOONGSON_2F 0x80000000 + /* Loongson 3A. */ + #define INSN_LOONGSON_3A 0x00000400 +-/* RMI Xlr instruction */ +-#define INSN_XLR 0x00000020 ++/* Netlogic Xlr instruction */ ++#define INSN_XLR 0x00000020 ++/* Netlogic XlP instruction */ ++#define INSN_XLP 0x00000080 + + /* DSP ASE */ + #define ASE_DSP 0x00000001 +@@ -1344,6 +1346,7 @@ static const unsigned int mips_isa_table[] = { + #define CPU_OCTEON2 6502 + #define CPU_OCTEON3 6503 + #define CPU_XLR 887682 /* decimal 'XLR' */ ++#define CPU_XLP 887680 /* decimal 'XLP' */ + + /* Return true if the given CPU is included in INSN_* mask MASK. */ + +@@ -1421,6 +1424,9 @@ cpu_is_member (int cpu, unsigned int mask) + return ((mask & INSN_ISA_MASK) == INSN_ISA32R6) + || ((mask & INSN_ISA_MASK) == INSN_ISA64R6); + ++ case CPU_XLP: ++ return (mask & INSN_XLP) != 0; ++ + default: + return FALSE; + } +diff --git a/ld/configure.tgt b/ld/configure.tgt +index 4e77383a19..8a81f7ac39 100644 +--- a/ld/configure.tgt ++++ b/ld/configure.tgt +@@ -504,6 +504,8 @@ mips*el-sde-elf* | mips*el-mti-elf* | mips*el-img-elf*) + mips*-sde-elf* | mips*-mti-elf* | mips*-img-elf*) + targ_emul=elf32btsmip + targ_extra_emuls="elf32ltsmip elf32btsmipn32 elf64btsmip elf32ltsmipn32 elf64ltsmip" ;; ++mipsisa64*-*-elf*) targ_emul=elf32btsmip ++ targ_extra_emuls="elf32ltsmip elf64btsmip elf64ltsmip" ;; + mips64*el-ps2-elf*) targ_emul=elf32lr5900n32 + targ_extra_emuls="elf32lr5900" + targ_extra_libpath=$targ_extra_emuls ;; +diff --git a/opcodes/mips-dis.c b/opcodes/mips-dis.c +index bb9912e462..70ecc51717 100644 +--- a/opcodes/mips-dis.c ++++ b/opcodes/mips-dis.c +@@ -648,13 +648,11 @@ const struct mips_arch_choice mips_arch_choices[] = + mips_cp0sel_names_xlr, ARRAY_SIZE (mips_cp0sel_names_xlr), + mips_cp1_names_mips3264, mips_hwr_names_numeric }, + +- /* XLP is mostly like XLR, with the prominent exception it is being +- MIPS64R2. */ +- { "xlp", 1, bfd_mach_mips_xlr, CPU_XLR, +- ISA_MIPS64R2 | INSN_XLR, 0, +- mips_cp0_names_xlr, +- mips_cp0sel_names_xlr, ARRAY_SIZE (mips_cp0sel_names_xlr), +- mips_cp1_names_mips3264, mips_hwr_names_numeric }, ++ { "xlp", 1, bfd_mach_mips_xlp, CPU_XLP, ++ ISA_MIPS64R2 | INSN_XLP, 0, ++ mips_cp0_names_mips3264r2, ++ mips_cp0sel_names_mips3264r2, ARRAY_SIZE (mips_cp0sel_names_mips3264r2), ++ mips_cp1_names_mips3264, mips_hwr_names_mips3264r2 }, + + /* This entry, mips16, is here only for ISA/processor selection; do + not print its name. */ +diff --git a/opcodes/mips-opc.c b/opcodes/mips-opc.c +index 5cb8e7365f..f2074856a2 100644 +--- a/opcodes/mips-opc.c ++++ b/opcodes/mips-opc.c +@@ -320,7 +320,8 @@ decode_mips_operand (const char *p) + #define IOCTP (INSN_OCTEONP | INSN_OCTEON2 | INSN_OCTEON3) + #define IOCT2 (INSN_OCTEON2 | INSN_OCTEON3) + #define IOCT3 INSN_OCTEON3 +-#define XLR INSN_XLR ++#define XLR INSN_XLR ++#define XLP INSN_XLP + #define IVIRT ASE_VIRT + #define IVIRT64 ASE_VIRT64 + +@@ -958,6 +959,7 @@ const struct mips_opcode mips_builtin_opcodes[] = + {"clo", "U,s", 0x70000021, 0xfc0007ff, WR_1|RD_2, 0, I32|N55, 0, I37 }, + {"clz", "d,s", 0x00000050, 0xfc1f07ff, WR_1|RD_2, 0, I37, 0, 0 }, + {"clz", "U,s", 0x70000020, 0xfc0007ff, WR_1|RD_2, 0, I32|N55, 0, I37 }, ++{"crc", "d,s,t", 0x7000001c, 0xfc0007ff, WR_1|RD_2|RD_3, 0, XLP, 0, 0 }, + /* ctc0 is at the bottom of the table. */ + {"ctc1", "t,G", 0x44c00000, 0xffe007ff, RD_1|WR_CC|CM, 0, I1, 0, 0 }, + {"ctc1", "t,S", 0x44c00000, 0xffe007ff, RD_1|WR_CC|CM, 0, I1, 0, 0 }, +@@ -990,12 +992,13 @@ const struct mips_opcode mips_builtin_opcodes[] = + {"daddiu", "t,r,j", 0x64000000, 0xfc000000, WR_1|RD_2, 0, I3, 0, 0 }, + {"daddu", "d,v,t", 0x0000002d, 0xfc0007ff, WR_1|RD_2|RD_3, 0, I3, 0, 0 }, + {"daddu", "t,r,I", 0, (int) M_DADDU_I, INSN_MACRO, 0, I3, 0, 0 }, +-{"daddwc", "d,s,t", 0x70000038, 0xfc0007ff, WR_1|RD_2|RD_3|WR_C0|RD_C0, 0, XLR, 0, 0 }, ++{"daddwc", "d,s,t", 0x70000038, 0xfc0007ff, WR_1|RD_2|RD_3|WR_C0|RD_C0, 0, XLR|XLP, 0, 0 }, + {"dbreak", "", 0x7000003f, 0xffffffff, 0, 0, N5, 0, 0 }, + {"dclo", "d,s", 0x00000053, 0xfc1f07ff, WR_1|RD_2, 0, I69, 0, 0 }, + {"dclo", "U,s", 0x70000025, 0xfc0007ff, WR_1|RD_2, 0, I64|N55, 0, I69 }, + {"dclz", "d,s", 0x00000052, 0xfc1f07ff, WR_1|RD_2, 0, I69, 0, 0 }, + {"dclz", "U,s", 0x70000024, 0xfc0007ff, WR_1|RD_2, 0, I64|N55, 0, I69 }, ++{"dcrc", "d,s,t", 0x7000001d, 0xfc0007ff, WR_1|RD_2|RD_3, 0, XLP, 0, 0 }, + /* dctr and dctw are used on the r5000. */ + {"dctr", "o(b)", 0xbc050000, 0xfc1f0000, RD_2, 0, I3, 0, 0 }, + {"dctw", "o(b)", 0xbc090000, 0xfc1f0000, RD_2, 0, I3, 0, 0 }, +@@ -1067,6 +1070,7 @@ const struct mips_opcode mips_builtin_opcodes[] = + {"dmfc0", "t,G,H", 0x40200000, 0xffe007f8, WR_1|RD_C0|LC, 0, I64, 0, 0 }, + {"dmfgc0", "t,G", 0x40600100, 0xffe007ff, WR_1|RD_C0|LC, 0, 0, IVIRT64, 0 }, + {"dmfgc0", "t,G,H", 0x40600100, 0xffe007f8, WR_1|RD_C0|LC, 0, 0, IVIRT64, 0 }, ++{"dmfur", "t,d", 0x7000001e, 0xffe007ff, WR_1, 0, XLP, 0, 0 }, + {"dmt", "", 0x41600bc1, 0xffffffff, TRAP, 0, 0, MT32, 0 }, + {"dmt", "t", 0x41600bc1, 0xffe0ffff, WR_1|TRAP, 0, 0, MT32, 0 }, + {"dmtc0", "t,G", 0x40a00000, 0xffe007ff, RD_1|WR_C0|WR_CC|CM, 0, I3, 0, EE }, +@@ -1082,6 +1086,8 @@ const struct mips_opcode mips_builtin_opcodes[] = + /* dmfc3 is at the bottom of the table. */ + /* dmtc3 is at the bottom of the table. */ + {"dmuh", "d,s,t", 0x000000dc, 0xfc0007ff, WR_1|RD_2|RD_3, 0, I69, 0, 0 }, ++{"dmtur", "t,d", 0x7000001f, 0xffe007ff, RD_1, 0, XLP, 0, 0 }, ++{"dmul", "d,s,t", 0x70000006, 0xfc0007ff, WR_1|RD_2|RD_3, 0, XLP, 0, 0 }, + {"dmul", "d,s,t", 0x0000009c, 0xfc0007ff, WR_1|RD_2|RD_3, 0, I69, 0, 0 }, + {"dmul", "d,v,t", 0x70000003, 0xfc0007ff, WR_1|RD_2|RD_3|WR_HILO, 0, IOCT, 0, 0 }, + {"dmul", "d,v,t", 0, (int) M_DMUL, INSN_MACRO, 0, I3, 0, M32|I69 }, +@@ -1235,9 +1241,9 @@ const struct mips_opcode mips_builtin_opcodes[] = + {"ld", "s,-b(+R)", 0xec180000, 0xfc1c0000, WR_1, RD_pc, I69, 0, 0 }, + {"ld", "t,A(b)", 0, (int) M_LD_AB, INSN_MACRO, 0, I1, 0, 0 }, + {"ld", "t,o(b)", 0xdc000000, 0xfc000000, WR_1|RD_3|LM, 0, I3, 0, 0 }, +-{"ldaddw", "t,b", 0x70000010, 0xfc00ffff, MOD_1|RD_2|LM|SM, 0, XLR, 0, 0 }, +-{"ldaddwu", "t,b", 0x70000011, 0xfc00ffff, MOD_1|RD_2|LM|SM, 0, XLR, 0, 0 }, +-{"ldaddd", "t,b", 0x70000012, 0xfc00ffff, MOD_1|RD_2|LM|SM, 0, XLR, 0, 0 }, ++{"ldaddw", "t,b", 0x70000010, 0xfc00ffff, MOD_1|RD_2|SM, 0, XLR|XLP, 0, 0 }, ++{"ldaddwu", "t,b", 0x70000011, 0xfc00ffff, MOD_1|RD_2|SM, 0, XLR|XLP, 0, 0 }, ++{"ldaddd", "t,b", 0x70000012, 0xfc00ffff, MOD_1|RD_2|SM, 0, XLR|XLP, 0, 0 }, + {"ldc1", "T,o(b)", 0xd4000000, 0xfc000000, WR_1|RD_3|CLD|FP_D, 0, I2, 0, SF }, + {"ldc1", "E,o(b)", 0xd4000000, 0xfc000000, WR_1|RD_3|CLD|FP_D, 0, I2, 0, SF }, + {"ldc1", "T,A(b)", 0, (int) M_LDC1_AB, INSN_MACRO, INSN2_M_FP_D, I2, 0, SF }, +@@ -1402,7 +1408,7 @@ const struct mips_opcode mips_builtin_opcodes[] = + {"mflo", "d,9", 0x00000012, 0xff9f07ff, WR_1|RD_LO, 0, 0, D32, 0 }, + {"mflo1", "d", 0x70000012, 0xffff07ff, WR_1|RD_LO, 0, EE, 0, 0 }, + {"mflhxu", "d", 0x00000052, 0xffff07ff, WR_1|MOD_HILO, 0, 0, SMT, 0 }, +-{"mfcr", "t,s", 0x70000018, 0xfc00ffff, WR_1|RD_2, 0, XLR, 0, 0 }, ++{"mfcr", "t,s", 0x70000018, 0xfc00ffff, WR_1, 0, XLR|XLP, 0, 0 }, + {"mfsa", "d", 0x00000028, 0xffff07ff, WR_1, 0, EE, 0, 0 }, + {"min.ob", "X,Y,Q", 0x78000006, 0xfc20003f, WR_1|RD_2|RD_3|FP_D, 0, SB1, MX, 0 }, + {"min.ob", "D,S,Q", 0x48000006, 0xfc20003f, WR_1|RD_2|RD_3|FP_D, 0, N54, 0, 0 }, +@@ -1447,10 +1453,13 @@ const struct mips_opcode mips_builtin_opcodes[] = + /* move is at the top of the table. */ + {"msgn.qh", "X,Y,Q", 0x78200000, 0xfc20003f, WR_1|RD_2|RD_3|FP_D, 0, 0, MX, 0 }, + {"msgsnd", "t", 0, (int) M_MSGSND, INSN_MACRO, 0, XLR, 0, 0 }, ++{"msgsnds", "d,t", 0x4a000001, 0xffe007ff, WR_1|RD_2|RD_C0|WR_C0, 0, XLP, 0, 0 }, + {"msgld", "", 0, (int) M_MSGLD, INSN_MACRO, 0, XLR, 0, 0 }, + {"msgld", "t", 0, (int) M_MSGLD_T, INSN_MACRO, 0, XLR, 0, 0 }, +-{"msgwait", "", 0, (int) M_MSGWAIT, INSN_MACRO, 0, XLR, 0, 0 }, +-{"msgwait", "t", 0, (int) M_MSGWAIT_T,INSN_MACRO, 0, XLR, 0, 0 }, ++{"msglds", "d,t", 0x4a000002, 0xffe007ff, WR_1|RD_2|RD_C0|WR_C0, 0, XLP, 0, 0 }, ++{"msgwait", "", 0, (int) M_MSGWAIT, INSN_MACRO, 0, XLR|XLP, 0, 0 }, ++{"msgwait", "t", 0, (int) M_MSGWAIT_T,INSN_MACRO, 0, XLR|XLP, 0, 0 }, ++{"msgsync", "", 0x4a000004, 0xffffffff,0, 0, XLP, 0, 0 }, + {"msub.d", "D,R,S,T", 0x4c000029, 0xfc00003f, WR_1|RD_2|RD_3|RD_4|FP_D, 0, I4_33, 0, I37 }, + {"msub.d", "D,S,T", 0x46200019, 0xffe0003f, WR_1|RD_2|RD_3|FP_D, 0, IL2E, 0, 0 }, + {"msub.d", "D,S,T", 0x72200019, 0xffe0003f, WR_1|RD_2|RD_3|FP_D, 0, IL2F, 0, 0 }, +@@ -1500,7 +1509,7 @@ const struct mips_opcode mips_builtin_opcodes[] = + {"mtlo", "s,7", 0x00000013, 0xfc1fe7ff, RD_1|WR_LO, 0, 0, D32, 0 }, + {"mtlo1", "s", 0x70000013, 0xfc1fffff, RD_1|WR_LO, 0, EE, 0, 0 }, + {"mtlhx", "s", 0x00000053, 0xfc1fffff, RD_1|MOD_HILO, 0, 0, SMT, 0 }, +-{"mtcr", "t,s", 0x70000019, 0xfc00ffff, RD_1|RD_2, 0, XLR, 0, 0 }, ++{"mtcr", "t,s", 0x70000019, 0xfc00ffff, RD_1, 0, XLR|XLP, 0, 0 }, + {"mtm0", "s", 0x70000008, 0xfc1fffff, RD_1, 0, IOCT, 0, 0 }, + {"mtm0", "s,t", 0x70000008, 0xfc00ffff, RD_1|RD_2, 0, IOCT3, 0, 0 }, + {"mtm1", "s", 0x7000000c, 0xfc1fffff, RD_1, 0, IOCT, 0, 0 }, +@@ -1937,9 +1946,9 @@ const struct mips_opcode mips_builtin_opcodes[] = + {"suxc1", "S,t(b)", 0x4c00000d, 0xfc0007ff, RD_1|RD_2|RD_3|SM|FP_D, 0, I5_33|N55, 0, I37}, + {"sw", "t,o(b)", 0xac000000, 0xfc000000, RD_1|RD_3|SM, 0, I1, 0, 0 }, + {"sw", "t,A(b)", 0, (int) M_SW_AB, INSN_MACRO, 0, I1, 0, 0 }, +-{"swapw", "t,b", 0x70000014, 0xfc00ffff, MOD_1|RD_2|LM|SM, 0, XLR, 0, 0 }, +-{"swapwu", "t,b", 0x70000015, 0xfc00ffff, MOD_1|RD_2|LM|SM, 0, XLR, 0, 0 }, +-{"swapd", "t,b", 0x70000016, 0xfc00ffff, MOD_1|RD_2|LM|SM, 0, XLR, 0, 0 }, ++{"swapw", "t,b", 0x70000014, 0xfc00ffff, MOD_1|RD_2|SM, 0, XLR|XLP, 0, 0 }, ++{"swapwu", "t,b", 0x70000015, 0xfc00ffff, MOD_1|RD_2|SM, 0, XLR|XLP, 0, 0 }, ++{"swapd", "t,b", 0x70000016, 0xfc00ffff, MOD_1|RD_2|SM, 0, XLR|XLP, 0, 0 }, + {"swc0", "E,o(b)", 0xe0000000, 0xfc000000, RD_3|RD_C0|SM, 0, I1, 0, IOCT|IOCTP|IOCT2|I37 }, + {"swc0", "E,A(b)", 0, (int) M_SWC0_AB, INSN_MACRO, 0, I1, 0, IOCT|IOCTP|IOCT2|I37 }, + {"swc1", "T,o(b)", 0xe4000000, 0xfc000000, RD_1|RD_3|SM|FP_S, 0, I1, 0, 0 }, +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0013-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0013-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch deleted file mode 100644 index f314d6420..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0013-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch +++ /dev/null @@ -1,36 +0,0 @@ -From 0c04c3b626998c8e7b595381b35456428581f52a Mon Sep 17 00:00:00 2001 -From: Zhenhua Luo -Date: Sat, 11 Jun 2016 22:08:29 -0500 -Subject: [PATCH 13/13] fix the incorrect assembling for ppc wait mnemonic - -Signed-off-by: Zhenhua Luo - -Upstream-Status: Pending ---- - opcodes/ppc-opc.c | 3 +-- - 1 file changed, 1 insertion(+), 2 deletions(-) - -diff --git a/opcodes/ppc-opc.c b/opcodes/ppc-opc.c -index 8106ab7..ca4b0f3 100644 ---- a/opcodes/ppc-opc.c -+++ b/opcodes/ppc-opc.c -@@ -4811,7 +4811,6 @@ const struct powerpc_opcode powerpc_opcodes[] = { - {"ldepx", X(31,29), X_MASK, E500MC|PPCA2, 0, {RT, RA0, RB}}, - - {"waitasec", X(31,30), XRTRARB_MASK, POWER8, POWER9, {0}}, --{"wait", X(31,30), XWC_MASK, POWER9, 0, {WC}}, - - {"lwepx", X(31,31), X_MASK, E500MC|PPCA2, 0, {RT, RA0, RB}}, - -@@ -4865,7 +4864,7 @@ const struct powerpc_opcode powerpc_opcodes[] = { - - {"waitrsv", X(31,62)|(1<<21), 0xffffffff, E500MC|PPCA2, 0, {0}}, - {"waitimpl", X(31,62)|(2<<21), 0xffffffff, E500MC|PPCA2, 0, {0}}, --{"wait", X(31,62), XWC_MASK, E500MC|PPCA2, 0, {WC}}, -+{"wait", X(31,62), XWC_MASK, E500MC|PPCA2|POWER9, 0, {WC}}, - - {"dcbstep", XRT(31,63,0), XRT_MASK, E500MC|PPCA2, 0, {RA0, RB}}, - --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0014-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0014-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch new file mode 100644 index 000000000..bb95a0cb0 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0014-fix-the-incorrect-assembling-for-ppc-wait-mnemonic.patch @@ -0,0 +1,36 @@ +From a9177150f808d7e6285e1011c85d0ce89037b553 Mon Sep 17 00:00:00 2001 +From: Zhenhua Luo +Date: Sat, 11 Jun 2016 22:08:29 -0500 +Subject: [PATCH 14/15] fix the incorrect assembling for ppc wait mnemonic + +Signed-off-by: Zhenhua Luo + +Upstream-Status: Pending +--- + opcodes/ppc-opc.c | 3 +-- + 1 file changed, 1 insertion(+), 2 deletions(-) + +diff --git a/opcodes/ppc-opc.c b/opcodes/ppc-opc.c +index 30fd789182..f2708e2276 100644 +--- a/opcodes/ppc-opc.c ++++ b/opcodes/ppc-opc.c +@@ -4876,7 +4876,6 @@ const struct powerpc_opcode powerpc_opcodes[] = { + {"ldepx", X(31,29), X_MASK, E500MC|PPCA2, 0, {RT, RA0, RB}}, + + {"waitasec", X(31,30), XRTRARB_MASK, POWER8, POWER9, {0}}, +-{"wait", X(31,30), XWC_MASK, POWER9, 0, {WC}}, + + {"lwepx", X(31,31), X_MASK, E500MC|PPCA2, 0, {RT, RA0, RB}}, + +@@ -4930,7 +4929,7 @@ const struct powerpc_opcode powerpc_opcodes[] = { + + {"waitrsv", X(31,62)|(1<<21), 0xffffffff, E500MC|PPCA2, 0, {0}}, + {"waitimpl", X(31,62)|(2<<21), 0xffffffff, E500MC|PPCA2, 0, {0}}, +-{"wait", X(31,62), XWC_MASK, E500MC|PPCA2, 0, {WC}}, ++{"wait", X(31,62), XWC_MASK, E500MC|PPCA2|POWER9, 0, {WC}}, + + {"dcbstep", XRT(31,63,0), XRT_MASK, E500MC|PPCA2, 0, {RA0, RB}}, + +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0014-libtool-remove-rpath.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0014-libtool-remove-rpath.patch deleted file mode 100644 index d56948f15..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0014-libtool-remove-rpath.patch +++ /dev/null @@ -1,100 +0,0 @@ -Apply these patches from our libtool patches as not only are redundant RPATHs a -waste of space but they can cause incorrect linking when native packages are -restored from sstate. - -fix-rpath.patch: -We don't want to add RPATHS which match default linker -search paths, they're a waste of space. This patch -filters libtools list and removes the ones we don't need. - -norm-rpath.patch: -Libtool may be passed link paths of the form "/usr/lib/../lib", which -fool its detection code into thinking it should be included as an -RPATH in the generated binary. Normalize before comparision. - -Upstream-Status: Inappropriate -Signed-off-by: Ross Burton - -diff --git a/ltmain.sh b/ltmain.sh -index 683317c..860a16a 100644 ---- a/ltmain.sh -+++ b/ltmain.sh -@@ -8053,8 +8053,14 @@ EOF - esac - fi - else -- eval flag=\"$hardcode_libdir_flag_spec\" -- func_append dep_rpath " $flag" -+ # We only want to hardcode in an rpath if it isn't in the -+ # default dlsearch path. -+ case " $sys_lib_dlsearch_path " in -+ *" $libdir "*) ;; -+ *) eval flag=\"$hardcode_libdir_flag_spec\" -+ func_append dep_rpath " $flag" -+ ;; -+ esac - fi - elif test -n "$runpath_var"; then - case "$perm_rpath " in -@@ -8790,8 +8796,14 @@ EOF - esac - fi - else -- eval flag=\"$hardcode_libdir_flag_spec\" -- func_append rpath " $flag" -+ # We only want to hardcode in an rpath if it isn't in the -+ # default dlsearch path. -+ case " $sys_lib_dlsearch_path " in -+ *" $libdir "*) ;; -+ *) eval flag=\"$hardcode_libdir_flag_spec\" -+ rpath+=" $flag" -+ ;; -+ esac - fi - elif test -n "$runpath_var"; then - case "$perm_rpath " in -@@ -8841,8 +8853,14 @@ EOF - esac - fi - else -- eval flag=\"$hardcode_libdir_flag_spec\" -- func_append rpath " $flag" -+ # We only want to hardcode in an rpath if it isn't in the -+ # default dlsearch path. -+ case " $sys_lib_dlsearch_path " in -+ *" $libdir "*) ;; -+ *) eval flag=\"$hardcode_libdir_flag_spec\" -+ func_append rpath " $flag" -+ ;; -+ esac - fi - elif test -n "$runpath_var"; then - case "$finalize_perm_rpath " in -diff --git a/ltmain.sh b/ltmain.sh -index 683317c..860a16a 100644 ---- a/ltmain.sh -+++ b/ltmain.sh -@@ -8055,8 +8055,10 @@ EOF - else - # We only want to hardcode in an rpath if it isn't in the - # default dlsearch path. -+ func_normal_abspath "$libdir" -+ libdir_norm=$func_normal_abspath_result - case " $sys_lib_dlsearch_path " in -- *" $libdir "*) ;; -+ *" $libdir_norm "*) ;; - *) eval flag=\"$hardcode_libdir_flag_spec\" - func_append dep_rpath " $flag" - ;; -@@ -8798,8 +8800,10 @@ EOF - else - # We only want to hardcode in an rpath if it isn't in the - # default dlsearch path. -+ func_normal_abspath "$libdir" -+ libdir_norm=$func_normal_abspath_result - case " $sys_lib_dlsearch_path " in -- *" $libdir "*) ;; -+ *" $libdir_norm "*) ;; - *) eval flag=\"$hardcode_libdir_flag_spec\" - rpath+=" $flag" - ;; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0015-Refine-.cfi_sections-check-to-only-consider-compact-.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0015-Refine-.cfi_sections-check-to-only-consider-compact-.patch deleted file mode 100644 index f3e3a11b1..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0015-Refine-.cfi_sections-check-to-only-consider-compact-.patch +++ /dev/null @@ -1,97 +0,0 @@ -From 3d3424e9a8d6ad56160b98bf6e223c0346164468 Mon Sep 17 00:00:00 2001 -From: Matthew Fortune -Date: Thu, 29 Sep 2016 11:13:46 +0100 -Subject: [PATCH] Refine .cfi_sections check to only consider compact eh_frame - -The .cfi_sections directive can be safely used multiple times -with different sections named at any time unless the compact form -of exception handling is requested after CFI information has -been emitted. Only the compact form of CFI information changes -the way in which CFI is generated and therefore cannot be -retrospectively requested after generating CFI information. - -gas/ - - PR gas/20648 - * dw2gencfi.c (dot_cfi_sections): Refine the check for - inconsistent .cfi_sections to only consider compact vs non - compact forms. - * testsuite/gas/cfi/cfi-common-9.d: New file. - * testsuite/gas/cfi/cfi-common-9.s: New file. - * testsuite/gas/cfi/cfi.exp: Run new test. ---- -Upstream-Status: Backport -Signed-off-by: Khem Raj - -Index: binutils/gas/config/tc-mips.c -=================================================================== ---- binutils.orig/gas/config/tc-mips.c 2016-09-29 05:12:31.000000000 +0100 -+++ binutils/gas/config/tc-mips.c 2016-09-29 20:05:13.257411084 +0100 -@@ -1353,7 +1353,7 @@ static void s_mips_stab (int); - static void s_mips_weakext (int); - static void s_mips_file (int); - static void s_mips_loc (int); --static bfd_boolean pic_need_relax (symbolS *, asection *); -+static bfd_boolean pic_need_relax (symbolS *); - static int relaxed_branch_length (fragS *, asection *, int); - static int relaxed_micromips_16bit_branch_length (fragS *, asection *, int); - static int relaxed_micromips_32bit_branch_length (fragS *, asection *, int); -@@ -4258,6 +4258,8 @@ mips_move_text_labels (void) - mips_move_labels (seg_info (now_seg)->label_list, TRUE); - } - -+/* Duplicate the test for LINK_ONCE sections as in `adjust_reloc_syms'. */ -+ - static bfd_boolean - s_is_linkonce (symbolS *sym, segT from_seg) - { -@@ -14823,7 +14825,7 @@ mips_frob_file (void) - constants; we'll report an error for those later. */ - if (got16_reloc_p (l->fixp->fx_r_type) - && !(l->fixp->fx_addsy -- && pic_need_relax (l->fixp->fx_addsy, l->seg))) -+ && pic_need_relax (l->fixp->fx_addsy))) - continue; - - /* Check quickly whether the next fixup happens to be a matching %lo. */ -@@ -17043,7 +17045,7 @@ nopic_need_relax (symbolS *sym, int befo - /* Return true if the given symbol should be considered local for SVR4 PIC. */ - - static bfd_boolean --pic_need_relax (symbolS *sym, asection *segtype) -+pic_need_relax (symbolS *sym) - { - asection *symsec; - -@@ -17068,7 +17070,6 @@ pic_need_relax (symbolS *sym, asection * - return (!bfd_is_und_section (symsec) - && !bfd_is_abs_section (symsec) - && !bfd_is_com_section (symsec) -- && !s_is_linkonce (sym, segtype) - /* A global or weak symbol is treated as external. */ - && (!S_IS_WEAK (sym) && !S_IS_EXTERNAL (sym))); - } -@@ -17507,7 +17508,7 @@ md_estimate_size_before_relax (fragS *fr - if (mips_pic == NO_PIC) - change = nopic_need_relax (fragp->fr_symbol, 0); - else if (mips_pic == SVR4_PIC) -- change = pic_need_relax (fragp->fr_symbol, segtype); -+ change = pic_need_relax (fragp->fr_symbol); - else if (mips_pic == VXWORKS_PIC) - /* For vxworks, GOT16 relocations never have a corresponding LO16. */ - change = 0; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0015-sync-with-OE-libtool-changes.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0015-sync-with-OE-libtool-changes.patch new file mode 100644 index 000000000..1559038e5 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0015-sync-with-OE-libtool-changes.patch @@ -0,0 +1,89 @@ +From 58cdb28ed71cb57b4a0ea1b412a708fdb0f84c27 Mon Sep 17 00:00:00 2001 +From: Ross Burton +Date: Mon, 6 Mar 2017 23:33:27 -0800 +Subject: [PATCH 15/15] sync with OE libtool changes + +Apply these patches from our libtool patches as not only are redundant RPATHs a +waste of space but they can cause incorrect linking when native packages are +restored from sstate. + +fix-rpath.patch: +We don't want to add RPATHS which match default linker +search paths, they're a waste of space. This patch +filters libtools list and removes the ones we don't need. + +norm-rpath.patch: +Libtool may be passed link paths of the form "/usr/lib/../lib", which +fool its detection code into thinking it should be included as an +RPATH in the generated binary. Normalize before comparision. + +Upstream-Status: Inappropriate + +Signed-off-by: Ross Burton +Signed-off-by: Khem Raj +--- + ltmain.sh | 34 ++++++++++++++++++++++++++++------ + 1 file changed, 28 insertions(+), 6 deletions(-) + +diff --git a/ltmain.sh b/ltmain.sh +index 11ee684ccc..3b19ac1532 100644 +--- a/ltmain.sh ++++ b/ltmain.sh +@@ -8053,8 +8053,16 @@ EOF + esac + fi + else +- eval flag=\"$hardcode_libdir_flag_spec\" +- func_append dep_rpath " $flag" ++ # We only want to hardcode in an rpath if it isn't in the ++ # default dlsearch path. ++ func_normal_abspath "$libdir" ++ libdir_norm=$func_normal_abspath_result ++ case " $sys_lib_dlsearch_path " in ++ *" $libdir_norm "*) ;; ++ *) eval flag=\"$hardcode_libdir_flag_spec\" ++ func_append dep_rpath " $flag" ++ ;; ++ esac + fi + elif test -n "$runpath_var"; then + case "$perm_rpath " in +@@ -8790,8 +8798,16 @@ EOF + esac + fi + else +- eval flag=\"$hardcode_libdir_flag_spec\" +- func_append rpath " $flag" ++ # We only want to hardcode in an rpath if it isn't in the ++ # default dlsearch path. ++ func_normal_abspath "$libdir" ++ libdir_norm=$func_normal_abspath_result ++ case " $sys_lib_dlsearch_path " in ++ *" $libdir_norm "*) ;; ++ *) eval flag=\"$hardcode_libdir_flag_spec\" ++ rpath+=" $flag" ++ ;; ++ esac + fi + elif test -n "$runpath_var"; then + case "$perm_rpath " in +@@ -8841,8 +8857,14 @@ EOF + esac + fi + else +- eval flag=\"$hardcode_libdir_flag_spec\" +- func_append rpath " $flag" ++ # We only want to hardcode in an rpath if it isn't in the ++ # default dlsearch path. ++ case " $sys_lib_dlsearch_path " in ++ *" $libdir "*) ;; ++ *) eval flag=\"$hardcode_libdir_flag_spec\" ++ func_append rpath " $flag" ++ ;; ++ esac + fi + elif test -n "$runpath_var"; then + case "$finalize_perm_rpath " in +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0016-Detect-64-bit-MIPS-targets.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0016-Detect-64-bit-MIPS-targets.patch new file mode 100644 index 000000000..1b2eb8485 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0016-Detect-64-bit-MIPS-targets.patch @@ -0,0 +1,50 @@ +From c3ebde5d8cc3b0092966b4d725cad7cfd074fd8d Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 31 Mar 2017 11:42:03 -0700 +Subject: [PATCH 16/16] Detect 64-bit MIPS targets + +Add mips64 target triplets and default to N64 + +Upstream-Status: Submitted +https://sourceware.org/ml/binutils/2016-08/msg00048.html + +Signed-off-by: Khem Raj +--- + gold/configure.tgt | 14 ++++++++++++++ + 1 file changed, 14 insertions(+) + +diff --git a/gold/configure.tgt b/gold/configure.tgt +index 3d63027297..c1f92a1360 100644 +--- a/gold/configure.tgt ++++ b/gold/configure.tgt +@@ -153,6 +153,13 @@ aarch64*-*) + targ_big_endian=false + targ_extra_big_endian=true + ;; ++mips*64*el*-*-*|mips*64*le*-*-*) ++ targ_obj=mips ++ targ_machine=EM_MIPS_RS3_LE ++ targ_size=64 ++ targ_big_endian=false ++ targ_extra_big_endian=true ++ ;; + mips*el*-*-*|mips*le*-*-*) + targ_obj=mips + targ_machine=EM_MIPS_RS3_LE +@@ -160,6 +167,13 @@ mips*el*-*-*|mips*le*-*-*) + targ_big_endian=false + targ_extra_big_endian=true + ;; ++mips*64*-*-*) ++ targ_obj=mips ++ targ_machine=EM_MIPS ++ targ_size=64 ++ targ_big_endian=true ++ targ_extra_big_endian=false ++ ;; + mips*-*-*) + targ_obj=mips + targ_machine=EM_MIPS +-- +2.12.1 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0016-Fix-seg-fault-in-ARM-linker-when-trying-to-parse-a-b.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0016-Fix-seg-fault-in-ARM-linker-when-trying-to-parse-a-b.patch deleted file mode 100644 index 33bf1e8f6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0016-Fix-seg-fault-in-ARM-linker-when-trying-to-parse-a-b.patch +++ /dev/null @@ -1,31 +0,0 @@ -From 72b09de92cc597c53b1d762882b67a17fe56846c Mon Sep 17 00:00:00 2001 -From: Nick Clifton -Date: Tue, 23 Aug 2016 09:45:11 +0100 -Subject: [PATCH 16/16] Fix seg-fault in ARM linker when trying to parse a - binary file. - - * elf32-arm.c (elf32_arm_count_additional_relocs): Return zero if - there is no arm data associated with the section. ---- -Upstream-Status: Backport -Signed-off-by: Khem Raj - - bfd/elf32-arm.c | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/bfd/elf32-arm.c b/bfd/elf32-arm.c -index 700bec3..3fab609 100644 ---- a/bfd/elf32-arm.c -+++ b/bfd/elf32-arm.c -@@ -18207,7 +18207,7 @@ elf32_arm_count_additional_relocs (asection *sec) - { - struct _arm_elf_section_data *arm_data; - arm_data = get_arm_elf_section_data (sec); -- return arm_data->additional_reloc_count; -+ return arm_data == NULL ? 0 : arm_data->additional_reloc_count; - } - - /* Called to set the sh_flags, sh_link and sh_info fields of OSECTION which --- -2.10.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0017-Fix-the-generation-of-alignment-frags-in-code-sectio.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0017-Fix-the-generation-of-alignment-frags-in-code-sectio.patch deleted file mode 100644 index f8b46be69..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0017-Fix-the-generation-of-alignment-frags-in-code-sectio.patch +++ /dev/null @@ -1,139 +0,0 @@ -From 4a4286465b5d6c28968bc2b29ae08daca7f219a3 Mon Sep 17 00:00:00 2001 -From: Nick Clifton -Date: Fri, 18 Nov 2016 11:42:48 -0800 -Subject: [PATCH] Fix the generation of alignment frags in code sections for AArch64. - -PR gas/20364 -* config/tc-aarch64.c (s_ltorg): Change the mapping state after -aligning the frag. -(aarch64_init): Treat rs_align frags in code sections as -containing code, not data. -* testsuite/gas/aarch64/pr20364.s: New test. -* testsuite/gas/aarch64/pr20364.d: New test driver. - -Backporting the patch from binutils mainline -https://sourceware.org/git/gitweb.cgi?p=binutils-gdb.git;h=7ea12e5c3ad54da440c08f32da09534e63e515ca - -Upstream-Status: Backport - -Signed-off-by: Manjukumar Matha ---- - gas/ChangeLog | 10 ++++++++++ - gas/config/tc-aarch64.c | 10 +++++++--- - gas/testsuite/gas/aarch64/pr20364.d | 13 +++++++++++++ - gas/testsuite/gas/aarch64/pr20364.s | 28 ++++++++++++++++++++++++++++ - 4 files changed, 58 insertions(+), 3 deletions(-) - create mode 100644 gas/testsuite/gas/aarch64/pr20364.d - create mode 100644 gas/testsuite/gas/aarch64/pr20364.s - -diff --git a/gas/ChangeLog b/gas/ChangeLog -index a39895a..fad06dc 100644 ---- a/gas/ChangeLog -+++ b/gas/ChangeLog -@@ -1,3 +1,13 @@ -+2016-08-05 Nick Clifton -+ -+ PR gas/20364 -+ * config/tc-aarch64.c (s_ltorg): Change the mapping state after -+ aligning the frag. -+ (aarch64_init): Treat rs_align frags in code sections as -+ containing code, not data. -+ * testsuite/gas/aarch64/pr20364.s: New test. -+ * testsuite/gas/aarch64/pr20364.d: New test driver. -+ - 2016-08-03 Tristan Gingold - - * configure: Regenerate. -diff --git a/gas/config/tc-aarch64.c b/gas/config/tc-aarch64.c -index ddc40f2..74933cb 100644 ---- a/gas/config/tc-aarch64.c -+++ b/gas/config/tc-aarch64.c -@@ -1736,13 +1736,13 @@ s_ltorg (int ignored ATTRIBUTE_UNUSED) - if (pool == NULL || pool->symbol == NULL || pool->next_free_entry == 0) - continue; - -- mapping_state (MAP_DATA); -- - /* Align pool as you have word accesses. - Only make a frag if we have to. */ - if (!need_pass_2) - frag_align (align, 0, 0); - -+ mapping_state (MAP_DATA); -+ - record_alignment (now_seg, align); - - sprintf (sym_name, "$$lit_\002%x", pool->id); -@@ -6373,11 +6373,15 @@ aarch64_init_frag (fragS * fragP, int max_chars) - - switch (fragP->fr_type) - { -- case rs_align: - case rs_align_test: - case rs_fill: - mapping_state_2 (MAP_DATA, max_chars); - break; -+ case rs_align: -+ /* PR 20364: We can get alignment frags in code sections, -+ so do not just assume that we should use the MAP_DATA state. */ -+ mapping_state_2 (subseg_text_p (now_seg) ? MAP_INSN : MAP_DATA, max_chars); -+ break; - case rs_align_code: - mapping_state_2 (MAP_INSN, max_chars); - break; -diff --git a/gas/testsuite/gas/aarch64/pr20364.d b/gas/testsuite/gas/aarch64/pr20364.d -new file mode 100644 -index 0000000..babcff1 ---- /dev/null -+++ b/gas/testsuite/gas/aarch64/pr20364.d -@@ -0,0 +1,13 @@ -+# Check that ".align , " does not set the mapping state to DATA, causing unnecessary frag generation. -+#name: PR20364 -+#objdump: -d -+ -+.*: file format .* -+ -+Disassembly of section \.vectors: -+ -+0+000 <.*>: -+ 0: d2800000 mov x0, #0x0 // #0 -+ 4: 94000000 bl 0 -+ 8: 17fffffe b 0 -+ -diff --git a/gas/testsuite/gas/aarch64/pr20364.s b/gas/testsuite/gas/aarch64/pr20364.s -new file mode 100644 -index 0000000..594ad7c ---- /dev/null -+++ b/gas/testsuite/gas/aarch64/pr20364.s -@@ -0,0 +1,28 @@ -+ .macro vector_base label -+ .section .vectors, "ax" -+ .align 11, 0 -+ \label: -+ .endm -+ -+ .macro vector_entry label -+ .section .vectors, "ax" -+ .align 7, 0 -+ \label: -+ .endm -+ -+ .macro check_vector_size since -+ .if (. - \since) > (32 * 4) -+ .error "Vector exceeds 32 instructions" -+ .endif -+ .endm -+ -+ .globl bl1_exceptions -+ -+vector_base bl1_exceptions -+ -+vector_entry SynchronousExceptionSP0 -+ mov x0, #0x0 -+ bl plat_report_exception -+ b SynchronousExceptionSP0 -+ check_vector_size SynchronousExceptionSP0 -+ --- -2.7.4 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0017-bfd-Improve-lookup-of-file-line-information-for-erro.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0017-bfd-Improve-lookup-of-file-line-information-for-erro.patch new file mode 100644 index 000000000..23ad10ab4 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0017-bfd-Improve-lookup-of-file-line-information-for-erro.patch @@ -0,0 +1,75 @@ +From 3239a4231ff79bf8b67b8faaf414b1667486167c Mon Sep 17 00:00:00 2001 +From: Andrew Burgess +Date: Mon, 19 Dec 2016 15:27:59 +0000 +Subject: [PATCH] bfd: Improve lookup of file / line information for errors + +When looking up file and line information (used from the linker to +report error messages) if no symbol is passed in, then use the symbol +list to look for a matching symbol. + +If a matching symbol is found then use this to look up the file / line +information. + +This should improve errors when looking up file / line information for +data sections. Hopefully we should find a matching data symbol, which +should, in turn (we hope) match a DW_TAG_variable in the DWARF, this +should allow us to give accurate file / line errors for data symbols. + +As the hope is to find a matching DW_TAG_variable in the DWARF then we +ignore section symbols, and prefer global symbols to locals. + +CVE: CVE-2017-8392 +Upstream-Status: Accepted + +Signed-off-by: Fan Xin +--- + bfd/dwarf2.c | 32 ++++++++++++++++++++++++++++++++ + 1 files changed, 32 insertions(+) + + +diff --git a/bfd/dwarf2.c b/bfd/dwarf2.c +index 03447a9..9bb8126 100644 +--- a/bfd/dwarf2.c ++++ b/bfd/dwarf2.c +@@ -4155,6 +4155,38 @@ _bfd_dwarf2_find_nearest_line (bfd *abfd, + { + BFD_ASSERT (section != NULL && functionname_ptr != NULL); + addr = offset; ++ ++ /* If we have no SYMBOL but the section we're looking at is not a ++ code section, then take a look through the list of symbols to see ++ if we have a symbol at the address we're looking for. If we do ++ then use this to look up line information. This will allow us to ++ give file and line results for data symbols. We exclude code ++ symbols here, if we look up a function symbol and then look up the ++ line information we'll actually return the line number for the ++ opening '{' rather than the function definition line. This is ++ because looking up by symbol uses the line table, in which the ++ first line for a function is usually the opening '{', while ++ looking up the function by section + offset uses the ++ DW_AT_decl_line from the function DW_TAG_subprogram for the line, ++ which will be the line of the function name. */ ++ if ((section->flags & SEC_CODE) == 0) ++ { ++ asymbol **tmp; ++ ++ for (tmp = symbols; (*tmp) != NULL; ++tmp) ++ if ((*tmp)->the_bfd == abfd ++ && (*tmp)->section == section ++ && (*tmp)->value == offset ++ && ((*tmp)->flags & BSF_SECTION_SYM) == 0) ++ { ++ symbol = *tmp; ++ do_line = TRUE; ++ /* For local symbols, keep going in the hope we find a ++ global. */ ++ if ((symbol->flags & BSF_GLOBAL) != 0) ++ break; ++ } ++ } + } + + if (section->output_section) +-- +1.9.1 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0018-PR-21409-segfault-in-_bfd_dwarf2_find_nearest_line.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0018-PR-21409-segfault-in-_bfd_dwarf2_find_nearest_line.patch new file mode 100644 index 000000000..acb37df16 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/0018-PR-21409-segfault-in-_bfd_dwarf2_find_nearest_line.patch @@ -0,0 +1,33 @@ +From 97e83a100aa8250be783304bfe0429761c6e6b6b Mon Sep 17 00:00:00 2001 +From: Alan Modra +Date: Sun, 23 Apr 2017 13:55:49 +0930 +Subject: [PATCH] PR 21409, segfault in _bfd_dwarf2_find_nearest_line + + PR 21409 + * dwarf2.c (_bfd_dwarf2_find_nearest_line): Don't segfault when + no symbols. + +CVE: CVE-2017-8392 +Upstream-Status: Accepted + +Signed-off-by: Fan Xin +--- + bfd/dwarf2.c | 2 +- + 1 files changed, 1 insertions(+), 1 deletion(-) + +diff --git a/bfd/dwarf2.c b/bfd/dwarf2.c +index 132a674..0ef3e1f 100644 +--- a/bfd/dwarf2.c ++++ b/bfd/dwarf2.c +@@ -4205,7 +4205,7 @@ _bfd_dwarf2_find_nearest_line (bfd *abfd, + looking up the function by section + offset uses the + DW_AT_decl_line from the function DW_TAG_subprogram for the line, + which will be the line of the function name. */ +- if ((section->flags & SEC_CODE) == 0) ++ if (symbols != NULL && (section->flags & SEC_CODE) == 0) + { + asymbol **tmp; + +-- +1.9.1 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6965.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6965.patch index 85f7f98fe..1334c9444 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6965.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6965.patch @@ -1,4 +1,4 @@ -From 6f898c17b1d6f6a29a05ca6de31f0fc8f52cfbfe Mon Sep 17 00:00:00 2001 +From bdc5166c274b842f83f8328e7cfaaf80fd29934e Mon Sep 17 00:00:00 2001 From: Nick Clifton Date: Mon, 13 Feb 2017 13:08:32 +0000 Subject: [PATCH 1/2] Fix readelf writing to illegal addresses whilst @@ -20,13 +20,10 @@ Signed-off-by: Yuanjie Huang 2 files changed, 32 insertions(+), 5 deletions(-) diff --git a/binutils/ChangeLog b/binutils/ChangeLog -index 995de87dc3..154b797a29 100644 +index f21867f98c..e789a3b99b 100644 --- a/binutils/ChangeLog +++ b/binutils/ChangeLog -@@ -5,6 +5,13 @@ - Check for buffer overflow before writing relocated values. - (apply_relocations): Pass end to target_specific_reloc_handling. - +@@ -1,3 +1,10 @@ +2017-02-13 Nick Clifton + + PR binutils/21137 @@ -34,14 +31,14 @@ index 995de87dc3..154b797a29 100644 + Check for buffer overflow before writing relocated values. + (apply_relocations): Pass end to target_specific_reloc_handling. + - 2016-08-03 Tristan Gingold + 2017-03-02 Tristan Gingold * configure: Regenerate. diff --git a/binutils/readelf.c b/binutils/readelf.c -index d31558c3b4..220671f76f 100644 +index b5f577f5a1..8cdaae3b8c 100644 --- a/binutils/readelf.c +++ b/binutils/readelf.c -@@ -11345,6 +11345,7 @@ process_syminfo (FILE * file ATTRIBUTE_UNUSED) +@@ -11585,6 +11585,7 @@ process_syminfo (FILE * file ATTRIBUTE_UNUSED) static bfd_boolean target_specific_reloc_handling (Elf_Internal_Rela * reloc, unsigned char * start, @@ -49,7 +46,7 @@ index d31558c3b4..220671f76f 100644 Elf_Internal_Sym * symtab) { unsigned int reloc_type = get_reloc_type (reloc->r_info); -@@ -11384,13 +11385,19 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, +@@ -11625,13 +11626,19 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, handle_sym_diff: if (saved_sym != NULL) { @@ -70,7 +67,7 @@ index d31558c3b4..220671f76f 100644 saved_sym = NULL; return TRUE; -@@ -11421,13 +11428,18 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, +@@ -11662,13 +11669,18 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, case 2: /* R_MN10300_16 */ if (saved_sym != NULL) { @@ -90,7 +87,7 @@ index d31558c3b4..220671f76f 100644 saved_sym = NULL; return TRUE; -@@ -11462,12 +11474,20 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, +@@ -11703,12 +11715,20 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, break; case 0x41: /* R_RL78_ABS32. */ @@ -113,7 +110,7 @@ index d31558c3b4..220671f76f 100644 value = 0; return TRUE; -@@ -12074,7 +12094,7 @@ apply_relocations (void * file, +@@ -12325,7 +12345,7 @@ apply_relocations (void * file, reloc_type = get_reloc_type (rp->r_info); diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6966.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6966.patch index 5e364ef69..dd58df5fb 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6966.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6966.patch @@ -1,7 +1,7 @@ -From 310e2cdc0a46ef62602097f5c21c393571e76df4 Mon Sep 17 00:00:00 2001 +From 383ec757d27652448d1511169e1133f486abf54f Mon Sep 17 00:00:00 2001 From: Nick Clifton Date: Mon, 13 Feb 2017 14:03:22 +0000 -Subject: [PATCH 2/2] Fix read-after-free error in readelf when processing +Subject: [PATCH] Fix read-after-free error in readelf when processing multiple, relocated sections in an MSP430 binary. PR binutils/21139 @@ -23,7 +23,7 @@ Signed-off-by: Yuanjie Huang 2 files changed, 94 insertions(+), 25 deletions(-) diff --git a/binutils/ChangeLog b/binutils/ChangeLog -index 154b797a29..aef0a51f19 100644 +index e789a3b99b..bd63c8a0d8 100644 --- a/binutils/ChangeLog +++ b/binutils/ChangeLog @@ -1,5 +1,15 @@ @@ -43,10 +43,10 @@ index 154b797a29..aef0a51f19 100644 * readelf.c (target_specific_reloc_handling): Add end parameter. Check for buffer overflow before writing relocated values. diff --git a/binutils/readelf.c b/binutils/readelf.c -index 220671f76f..2b6cef1638 100644 +index 8cdaae3b8c..7c158c6342 100644 --- a/binutils/readelf.c +++ b/binutils/readelf.c -@@ -11340,15 +11340,27 @@ process_syminfo (FILE * file ATTRIBUTE_UNUSED) +@@ -11580,15 +11580,27 @@ process_syminfo (FILE * file ATTRIBUTE_UNUSED) /* Check to see if the given reloc needs to be handled in a target specific manner. If so then process the reloc and return TRUE otherwise return @@ -77,7 +77,7 @@ index 220671f76f..2b6cef1638 100644 switch (elf_header.e_machine) { -@@ -11357,13 +11369,24 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, +@@ -11597,6 +11609,12 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, { static Elf_Internal_Sym * saved_sym = NULL; @@ -90,8 +90,9 @@ index 220671f76f..2b6cef1638 100644 switch (reloc_type) { case 10: /* R_MSP430_SYM_DIFF */ - if (uses_msp430x_relocs ()) +@@ -11604,7 +11622,12 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, break; + /* Fall through. */ case 21: /* R_MSP430X_SYM_DIFF */ - saved_sym = symtab + get_reloc_symindex (reloc->r_info); + /* PR 21139. */ @@ -103,7 +104,7 @@ index 220671f76f..2b6cef1638 100644 return TRUE; case 1: /* R_MSP430_32 or R_MSP430_ABS32 */ -@@ -11388,16 +11411,21 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, +@@ -11629,16 +11652,21 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, int reloc_size = reloc_type == 1 ? 4 : 2; bfd_vma value; @@ -134,7 +135,7 @@ index 220671f76f..2b6cef1638 100644 saved_sym = NULL; return TRUE; -@@ -11417,13 +11445,24 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, +@@ -11658,13 +11686,24 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, { static Elf_Internal_Sym * saved_sym = NULL; @@ -160,7 +161,7 @@ index 220671f76f..2b6cef1638 100644 case 1: /* R_MN10300_32 */ case 2: /* R_MN10300_16 */ if (saved_sym != NULL) -@@ -11431,15 +11470,20 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, +@@ -11672,15 +11711,20 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, int reloc_size = reloc_type == 1 ? 4 : 2; bfd_vma value; @@ -189,7 +190,7 @@ index 220671f76f..2b6cef1638 100644 saved_sym = NULL; return TRUE; -@@ -11459,12 +11503,24 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, +@@ -11700,12 +11744,24 @@ target_specific_reloc_handling (Elf_Internal_Rela * reloc, static bfd_vma saved_sym2 = 0; static bfd_vma value; @@ -216,7 +217,7 @@ index 220671f76f..2b6cef1638 100644 return TRUE; case 0x83: /* R_RL78_OPsub. */ -@@ -12094,7 +12150,7 @@ apply_relocations (void * file, +@@ -12345,7 +12401,7 @@ apply_relocations (void * file, reloc_type = get_reloc_type (rp->r_info); @@ -225,7 +226,7 @@ index 220671f76f..2b6cef1638 100644 continue; else if (is_none_reloc (reloc_type)) continue; -@@ -12190,6 +12246,9 @@ apply_relocations (void * file, +@@ -12441,6 +12497,9 @@ apply_relocations (void * file, } free (symtab); diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6969.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6969.patch index 3d036c4cf..ed5403430 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6969.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6969.patch @@ -1,7 +1,8 @@ -From 489246368e2c49a795ad5ecbc8895cbc854292fa Mon Sep 17 00:00:00 2001 +From 1d9a2696903fc59d6a936f4ab4e4407ef329d066 Mon Sep 17 00:00:00 2001 From: Nick Clifton Date: Fri, 17 Feb 2017 15:59:45 +0000 -Subject: Fix illegal memory accesses in readelf when parsing a corrupt binary. +Subject: Fix illegal memory accesses in readelf when parsing + a corrupt binary. PR binutils/21156 * readelf.c (find_section_in_set): Test for invalid section @@ -17,7 +18,7 @@ Signed-off-by: Yuanjie Huang 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/binutils/ChangeLog b/binutils/ChangeLog -index a70bdb7a7b..dbf8eb079e 100644 +index bd63c8a0d8..1d840b42f9 100644 --- a/binutils/ChangeLog +++ b/binutils/ChangeLog @@ -1,3 +1,9 @@ @@ -27,14 +28,14 @@ index a70bdb7a7b..dbf8eb079e 100644 + * readelf.c (find_section_in_set): Test for invalid section + indicies. + - 2016-08-03 Tristan Gingold + 2017-02-13 Nick Clifton - * configure: Regenerate. + PR binutils/21139 diff --git a/binutils/readelf.c b/binutils/readelf.c -index d31558c3b4..7f7365dbc5 100644 +index 7c158c6342..4960491c5c 100644 --- a/binutils/readelf.c +++ b/binutils/readelf.c -@@ -674,8 +674,14 @@ find_section_in_set (const char * name, unsigned int * set) +@@ -675,8 +675,14 @@ find_section_in_set (const char * name, unsigned int * set) if (set != NULL) { while ((i = *set++) > 0) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6969_2.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6969_2.patch index 491c7086e..59a5dec67 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6969_2.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-6969_2.patch @@ -1,8 +1,8 @@ -From 59fcd64fe65a89fb0acaf5463840310701189375 Mon Sep 17 00:00:00 2001 +From ef81126314f67472a46db9581530fbf5ccb6b3f2 Mon Sep 17 00:00:00 2001 From: Nick Clifton Date: Mon, 20 Feb 2017 14:40:39 +0000 -Subject: Fix another memory access error in readelf when parsing a corrupt - binary. +Subject: Fix another memory access error in readelf when + parsing a corrupt binary. PR binutils/21156 * dwarf.c (cu_tu_indexes_read): Move into... @@ -22,7 +22,7 @@ Signed-off-by: Yuanjie Huang 2 files changed, 30 insertions(+), 14 deletions(-) diff --git a/binutils/ChangeLog b/binutils/ChangeLog -index dbf8eb079e..55d2f8ba40 100644 +index 1d840b42f9..53352c1801 100644 --- a/binutils/ChangeLog +++ b/binutils/ChangeLog @@ -1,3 +1,13 @@ @@ -40,7 +40,7 @@ index dbf8eb079e..55d2f8ba40 100644 PR binutils/21156 diff --git a/binutils/dwarf.c b/binutils/dwarf.c -index 282e069958..a23267feb6 100644 +index 0184a7ab2e..6d879c9b61 100644 --- a/binutils/dwarf.c +++ b/binutils/dwarf.c @@ -76,7 +76,6 @@ int dwarf_check = 0; @@ -60,7 +60,7 @@ index 282e069958..a23267feb6 100644 /* Values for do_debug_lines. */ #define FLAG_DEBUG_LINES_RAW 1 -@@ -2713,7 +2712,7 @@ load_debug_info (void * file) +@@ -2715,7 +2714,7 @@ load_debug_info (void * file) return num_debug_info_entries; /* If this is a DWARF package file, load the CU and TU indexes. */ @@ -69,7 +69,7 @@ index 282e069958..a23267feb6 100644 if (load_debug_section (info, file) && process_debug_info (&debug_displays [info].section, file, abbrev, 1, 0)) -@@ -7302,21 +7301,27 @@ process_cu_tu_index (struct dwarf_section *section, int do_display) +@@ -7378,21 +7377,27 @@ process_cu_tu_index (struct dwarf_section *section, int do_display) section sets that we can use to associate a .debug_info.dwo section with its associated .debug_abbrev.dwo section in a .dwp file. */ @@ -107,7 +107,7 @@ index 282e069958..a23267feb6 100644 } /* Find the set of sections that includes section SHNDX. */ -@@ -7326,7 +7331,8 @@ find_cu_tu_set (void *file, unsigned int shndx) +@@ -7402,7 +7407,8 @@ find_cu_tu_set (void *file, unsigned int shndx) { unsigned int i; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7209.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7209.patch index 336d72cfe..2357a1283 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7209.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7209.patch @@ -1,8 +1,7 @@ -From 6e5e9d96b5bd7dc3147db9917d6a7a20682915cc Mon Sep 17 00:00:00 2001 +From b2706ceadac7239e7b02d43f05100fc6538b0d65 Mon Sep 17 00:00:00 2001 From: Nick Clifton Date: Mon, 13 Feb 2017 15:04:37 +0000 -Subject: Fix invalid read of section contents whilst processing a corrupt - binary. +Subject: Fix invalid read of section contents whilst processing a corrupt binary. PR binutils/21135 * readelf.c (dump_section_as_bytes): Handle the case where @@ -18,7 +17,7 @@ Signed-off-by: Yuanjie Huang 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/binutils/ChangeLog b/binutils/ChangeLog -index 55d2f8ba40..c4d8e60eca 100644 +index 53352c1801..cf92744c12 100644 --- a/binutils/ChangeLog +++ b/binutils/ChangeLog @@ -1,3 +1,9 @@ @@ -32,10 +31,10 @@ index 55d2f8ba40..c4d8e60eca 100644 PR binutils/21156 diff --git a/binutils/readelf.c b/binutils/readelf.c -index 7f7365dbc5..bc4e92fa81 100644 +index 4960491c5c..f0e7b080e8 100644 --- a/binutils/readelf.c +++ b/binutils/readelf.c -@@ -12473,10 +12473,18 @@ dump_section_as_bytes (Elf_Internal_Shdr * section, +@@ -12803,10 +12803,18 @@ dump_section_as_bytes (Elf_Internal_Shdr * section, new_size -= 12; } diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7210.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7210.patch index 211d2bfd8..8791792c7 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7210.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7210.patch @@ -1,4 +1,4 @@ -From 80958b04c91edcd41c42807225a7ad1b2a4ce0e6 Mon Sep 17 00:00:00 2001 +From 4da598a472e1d298825035e452e3bc68f714311c Mon Sep 17 00:00:00 2001 From: Nick Clifton Date: Tue, 14 Feb 2017 14:07:29 +0000 Subject: Fix handling of corrupt STABS enum type strings. @@ -18,7 +18,7 @@ Signed-off-by: Yuanjie Huang 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/binutils/ChangeLog b/binutils/ChangeLog -index c4d8e60eca..2bae9ec587 100644 +index cf92744c12..0045fbaaa6 100644 --- a/binutils/ChangeLog +++ b/binutils/ChangeLog @@ -1,3 +1,10 @@ @@ -33,7 +33,7 @@ index c4d8e60eca..2bae9ec587 100644 PR binutils/21135 diff --git a/binutils/stabs.c b/binutils/stabs.c -index aebde7afe9..c425afe98e 100644 +index f5c5d2d8e0..5d013cc361 100644 --- a/binutils/stabs.c +++ b/binutils/stabs.c @@ -232,6 +232,10 @@ parse_number (const char **pp, bfd_boolean *poverflow) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7223.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7223.patch new file mode 100644 index 000000000..c78c8bf00 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7223.patch @@ -0,0 +1,52 @@ +From 69ace2200106348a1b00d509a6a234337c104c17 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Thu, 1 Dec 2016 15:20:19 +0000 +Subject: [PATCH] Fix seg fault attempting to unget an EOF character. + + PR gas/20898 + * app.c (do_scrub_chars): Do not attempt to unget EOF. + +Affects: <= 2.28 +Upstream-Status: Backport +CVE: CVE-2017-7223 +Signed-off-by: Armin Kuster + +--- + gas/ChangeLog | 3 +++ + gas/app.c | 2 +- + 2 files changed, 4 insertions(+), 1 deletion(-) + +Index: git/gas/ChangeLog +=================================================================== +--- git.orig/gas/ChangeLog ++++ git/gas/ChangeLog +@@ -1,3 +1,8 @@ ++2016-12-01 Nick Clifton ++ ++ PR gas/20898 ++ * app.c (do_scrub_chars): Do not attempt to unget EOF. ++ + 2017-03-02 Tristan Gingold + + * configure: Regenerate. +@@ -198,7 +203,6 @@ + * config/tc-pru.c (md_number_to_chars): Fix parameter to be + valueT, as declared in tc.h. + (md_apply_fix): Fix to work on 32-bit hosts. +->>>>>>> 0115611... RISC-V/GAS: Correct branch relaxation for weak symbols. + + 2017-01-02 Alan Modra + +Index: git/gas/app.c +=================================================================== +--- git.orig/gas/app.c ++++ git/gas/app.c +@@ -1350,7 +1350,7 @@ do_scrub_chars (size_t (*get) (char *, s + PUT (ch); + break; + } +- else ++ else if (ch2 != EOF) + { + state = 9; + if (ch == EOF || !IS_SYMBOL_COMPONENT (ch)) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7614.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7614.patch new file mode 100644 index 000000000..be8631ab7 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-7614.patch @@ -0,0 +1,103 @@ +From ad32986fdf9da1c8748e47b8b45100398223dba8 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Tue, 4 Apr 2017 11:23:36 +0100 +Subject: [PATCH] Fix null pointer dereferences when using a link built with + clang. + + PR binutils/21342 + * elflink.c (_bfd_elf_define_linkage_sym): Prevent null pointer + dereference. + (bfd_elf_final_link): Only initialize the extended symbol index + section if there are extended symbol tables to list. + +Upstream-Status: Backport +CVE: CVE-2017-7614 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 8 ++++++++ + bfd/elflink.c | 35 +++++++++++++++++++++-------------- + 2 files changed, 29 insertions(+), 14 deletions(-) + +Index: git/bfd/elflink.c +=================================================================== +--- git.orig/bfd/elflink.c ++++ git/bfd/elflink.c +@@ -119,15 +119,18 @@ _bfd_elf_define_linkage_sym (bfd *abfd, + defined in shared libraries can't be overridden, because we + lose the link to the bfd which is via the symbol section. */ + h->root.type = bfd_link_hash_new; ++ bh = &h->root; + } ++ else ++ bh = NULL; + +- bh = &h->root; + bed = get_elf_backend_data (abfd); + if (!_bfd_generic_link_add_one_symbol (info, abfd, name, BSF_GLOBAL, + sec, 0, NULL, FALSE, bed->collect, + &bh)) + return NULL; + h = (struct elf_link_hash_entry *) bh; ++ BFD_ASSERT (h != NULL); + h->def_regular = 1; + h->non_elf = 0; + h->root.linker_def = 1; +@@ -11973,24 +11976,28 @@ bfd_elf_final_link (bfd *abfd, struct bf + { + /* Finish up and write out the symbol string table (.strtab) + section. */ +- Elf_Internal_Shdr *symstrtab_hdr; ++ Elf_Internal_Shdr *symstrtab_hdr = NULL; + file_ptr off = symtab_hdr->sh_offset + symtab_hdr->sh_size; + +- symtab_shndx_hdr = & elf_symtab_shndx_list (abfd)->hdr; +- if (symtab_shndx_hdr != NULL && symtab_shndx_hdr->sh_name != 0) ++ if (elf_symtab_shndx_list (abfd)) + { +- symtab_shndx_hdr->sh_type = SHT_SYMTAB_SHNDX; +- symtab_shndx_hdr->sh_entsize = sizeof (Elf_External_Sym_Shndx); +- symtab_shndx_hdr->sh_addralign = sizeof (Elf_External_Sym_Shndx); +- amt = bfd_get_symcount (abfd) * sizeof (Elf_External_Sym_Shndx); +- symtab_shndx_hdr->sh_size = amt; ++ symtab_shndx_hdr = & elf_symtab_shndx_list (abfd)->hdr; + +- off = _bfd_elf_assign_file_position_for_section (symtab_shndx_hdr, +- off, TRUE); ++ if (symtab_shndx_hdr != NULL && symtab_shndx_hdr->sh_name != 0) ++ { ++ symtab_shndx_hdr->sh_type = SHT_SYMTAB_SHNDX; ++ symtab_shndx_hdr->sh_entsize = sizeof (Elf_External_Sym_Shndx); ++ symtab_shndx_hdr->sh_addralign = sizeof (Elf_External_Sym_Shndx); ++ amt = bfd_get_symcount (abfd) * sizeof (Elf_External_Sym_Shndx); ++ symtab_shndx_hdr->sh_size = amt; + +- if (bfd_seek (abfd, symtab_shndx_hdr->sh_offset, SEEK_SET) != 0 +- || (bfd_bwrite (flinfo.symshndxbuf, amt, abfd) != amt)) +- return FALSE; ++ off = _bfd_elf_assign_file_position_for_section (symtab_shndx_hdr, ++ off, TRUE); ++ ++ if (bfd_seek (abfd, symtab_shndx_hdr->sh_offset, SEEK_SET) != 0 ++ || (bfd_bwrite (flinfo.symshndxbuf, amt, abfd) != amt)) ++ return FALSE; ++ } + } + + symstrtab_hdr = &elf_tdata (abfd)->strtab_hdr; +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,3 +1,11 @@ ++2017-04-04 Nick Clifton ++ ++ PR binutils/21342 ++ * elflink.c (_bfd_elf_define_linkage_sym): Prevent null pointer ++ dereference. ++ (bfd_elf_final_link): Only initialize the extended symbol index ++ section if there are extended symbol tables to list. ++ + 2017-03-07 Alan Modra + + PR 21224 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8393.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8393.patch new file mode 100644 index 000000000..8500a03b1 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8393.patch @@ -0,0 +1,205 @@ +From bce964aa6c777d236fbd641f2bc7bb931cfe4bf3 Mon Sep 17 00:00:00 2001 +From: Alan Modra +Date: Sun, 23 Apr 2017 11:03:34 +0930 +Subject: [PATCH] PR 21412, get_reloc_section assumes .rel/.rela name for + SHT_REL/RELA. + +This patch fixes an assumption made by code that runs for objcopy and +strip, that SHT_REL/SHR_RELA sections are always named starting with a +.rel/.rela prefix. I'm also modifying the interface for +elf_backend_get_reloc_section, so any backend function just needs to +handle name mapping. + + PR 21412 + * elf-bfd.h (struct elf_backend_data ): Change + parameters and comment. + (_bfd_elf_get_reloc_section): Delete. + (_bfd_elf_plt_get_reloc_section): Declare. + * elf.c (_bfd_elf_plt_get_reloc_section, elf_get_reloc_section): + New functions. Don't blindly skip over assumed .rel/.rela prefix. + Extracted from.. + (_bfd_elf_get_reloc_section): ..here. Delete. + (assign_section_numbers): Call elf_get_reloc_section. + * elf64-ppc.c (elf_backend_get_reloc_section): Define. + * elfxx-target.h (elf_backend_get_reloc_section): Update. + +Upstream-Status: Backport +CVE: CVE-2017-8393 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 15 ++++++++++++++ + bfd/elf-bfd.h | 8 ++++--- + bfd/elf.c | 61 +++++++++++++++++++++++++++++++----------------------- + bfd/elf64-ppc.c | 1 + + bfd/elfxx-target.h | 2 +- + 5 files changed, 57 insertions(+), 30 deletions(-) + +Index: git/bfd/elf-bfd.h +=================================================================== +--- git.orig/bfd/elf-bfd.h ++++ git/bfd/elf-bfd.h +@@ -1322,8 +1322,10 @@ struct elf_backend_data + bfd_size_type (*maybe_function_sym) (const asymbol *sym, asection *sec, + bfd_vma *code_off); + +- /* Return the section which RELOC_SEC applies to. */ +- asection *(*get_reloc_section) (asection *reloc_sec); ++ /* Given NAME, the name of a relocation section stripped of its ++ .rel/.rela prefix, return the section in ABFD to which the ++ relocations apply. */ ++ asection *(*get_reloc_section) (bfd *abfd, const char *name); + + /* Called to set the sh_flags, sh_link and sh_info fields of OSECTION which + has a type >= SHT_LOOS. Returns TRUE if the fields were initialised, +@@ -2392,7 +2394,7 @@ extern bfd_boolean _bfd_elf_is_function_ + extern bfd_size_type _bfd_elf_maybe_function_sym (const asymbol *, asection *, + bfd_vma *); + +-extern asection *_bfd_elf_get_reloc_section (asection *); ++extern asection *_bfd_elf_plt_get_reloc_section (bfd *, const char *); + + extern int bfd_elf_get_default_section_type (flagword); + +Index: git/bfd/elf.c +=================================================================== +--- git.orig/bfd/elf.c ++++ git/bfd/elf.c +@@ -3532,17 +3532,39 @@ bfd_elf_set_group_contents (bfd *abfd, a + H_PUT_32 (abfd, sec->flags & SEC_LINK_ONCE ? GRP_COMDAT : 0, loc); + } + +-/* Return the section which RELOC_SEC applies to. */ ++/* Given NAME, the name of a relocation section stripped of its ++ .rel/.rela prefix, return the section in ABFD to which the ++ relocations apply. */ + + asection * +-_bfd_elf_get_reloc_section (asection *reloc_sec) ++_bfd_elf_plt_get_reloc_section (bfd *abfd, const char *name) ++{ ++ /* If a target needs .got.plt section, relocations in rela.plt/rel.plt ++ section likely apply to .got.plt or .got section. */ ++ if (get_elf_backend_data (abfd)->want_got_plt ++ && strcmp (name, ".plt") == 0) ++ { ++ asection *sec; ++ ++ name = ".got.plt"; ++ sec = bfd_get_section_by_name (abfd, name); ++ if (sec != NULL) ++ return sec; ++ name = ".got"; ++ } ++ ++ return bfd_get_section_by_name (abfd, name); ++} ++ ++/* Return the section to which RELOC_SEC applies. */ ++ ++static asection * ++elf_get_reloc_section (asection *reloc_sec) + { + const char *name; + unsigned int type; + bfd *abfd; +- +- if (reloc_sec == NULL) +- return NULL; ++ const struct elf_backend_data *bed; + + type = elf_section_data (reloc_sec)->this_hdr.sh_type; + if (type != SHT_REL && type != SHT_RELA) +@@ -3550,28 +3572,15 @@ _bfd_elf_get_reloc_section (asection *re + + /* We look up the section the relocs apply to by name. */ + name = reloc_sec->name; +- if (type == SHT_REL) +- name += 4; +- else +- name += 5; ++ if (strncmp (name, ".rel", 4) != 0) ++ return NULL; ++ name += 4; ++ if (type == SHT_RELA && *name++ != 'a') ++ return NULL; + +- /* If a target needs .got.plt section, relocations in rela.plt/rel.plt +- section apply to .got.plt section. */ + abfd = reloc_sec->owner; +- if (get_elf_backend_data (abfd)->want_got_plt +- && strcmp (name, ".plt") == 0) +- { +- /* .got.plt is a linker created input section. It may be mapped +- to some other output section. Try two likely sections. */ +- name = ".got.plt"; +- reloc_sec = bfd_get_section_by_name (abfd, name); +- if (reloc_sec != NULL) +- return reloc_sec; +- name = ".got"; +- } +- +- reloc_sec = bfd_get_section_by_name (abfd, name); +- return reloc_sec; ++ bed = get_elf_backend_data (abfd); ++ return bed->get_reloc_section (abfd, name); + } + + /* Assign all ELF section numbers. The dummy first section is handled here +@@ -3833,7 +3842,7 @@ assign_section_numbers (bfd *abfd, struc + if (s != NULL) + d->this_hdr.sh_link = elf_section_data (s)->this_idx; + +- s = get_elf_backend_data (abfd)->get_reloc_section (sec); ++ s = elf_get_reloc_section (sec); + if (s != NULL) + { + d->this_hdr.sh_info = elf_section_data (s)->this_idx; +Index: git/bfd/elf64-ppc.c +=================================================================== +--- git.orig/bfd/elf64-ppc.c ++++ git/bfd/elf64-ppc.c +@@ -121,6 +121,7 @@ static bfd_vma opd_entry_value + #define elf_backend_special_sections ppc64_elf_special_sections + #define elf_backend_merge_symbol_attribute ppc64_elf_merge_symbol_attribute + #define elf_backend_merge_symbol ppc64_elf_merge_symbol ++#define elf_backend_get_reloc_section bfd_get_section_by_name + + /* The name of the dynamic interpreter. This is put in the .interp + section. */ +Index: git/bfd/elfxx-target.h +=================================================================== +--- git.orig/bfd/elfxx-target.h ++++ git/bfd/elfxx-target.h +@@ -706,7 +706,7 @@ + #endif + + #ifndef elf_backend_get_reloc_section +-#define elf_backend_get_reloc_section _bfd_elf_get_reloc_section ++#define elf_backend_get_reloc_section _bfd_elf_plt_get_reloc_section + #endif + + #ifndef elf_backend_copy_special_section_fields +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,3 +1,18 @@ ++2017-04-23 Alan Modra ++ ++ PR 21412 ++ * elf-bfd.h (struct elf_backend_data ): Change ++ parameters and comment. ++ (_bfd_elf_get_reloc_section): Delete. ++ (_bfd_elf_plt_get_reloc_section): Declare. ++ * elf.c (_bfd_elf_plt_get_reloc_section, elf_get_reloc_section): ++ New functions. Don't blindly skip over assumed .rel/.rela prefix. ++ Extracted from.. ++ (_bfd_elf_get_reloc_section): ..here. Delete. ++ (assign_section_numbers): Call elf_get_reloc_section. ++ * elf64-ppc.c (elf_backend_get_reloc_section): Define. ++ * elfxx-target.h (elf_backend_get_reloc_section): Update. ++ + 2017-04-04 Nick Clifton + + PR binutils/21342 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8394.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8394.patch new file mode 100644 index 000000000..e6c6b17da --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8394.patch @@ -0,0 +1,118 @@ +From 7eacd66b086cabb1daab20890d5481894d4f56b2 Mon Sep 17 00:00:00 2001 +From: Alan Modra +Date: Sun, 23 Apr 2017 15:21:11 +0930 +Subject: [PATCH] PR 21414, null pointer deref of _bfd_elf_large_com_section + sym + + PR 21414 + * section.c (GLOBAL_SYM_INIT): Make available in bfd.h. + * elf.c (lcomm_sym): New. + (_bfd_elf_large_com_section): Use lcomm_sym section symbol. + * bfd-in2.h: Regenerate. + +Upstream-Status: Backport +CVE: CVE-2017-8394 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 8 ++++++++ + bfd/bfd-in2.h | 12 ++++++++++++ + bfd/elf.c | 6 ++++-- + bfd/section.c | 24 ++++++++++++------------ + 4 files changed, 36 insertions(+), 14 deletions(-) + +Index: git/bfd/bfd-in2.h +=================================================================== +--- git.orig/bfd/bfd-in2.h ++++ git/bfd/bfd-in2.h +@@ -1838,6 +1838,18 @@ extern asection _bfd_std_section[4]; + { NULL }, { NULL } \ + } + ++/* We use a macro to initialize the static asymbol structures because ++ traditional C does not permit us to initialize a union member while ++ gcc warns if we don't initialize it. ++ the_bfd, name, value, attr, section [, udata] */ ++#ifdef __STDC__ ++#define GLOBAL_SYM_INIT(NAME, SECTION) \ ++ { 0, NAME, 0, BSF_SECTION_SYM, SECTION, { 0 }} ++#else ++#define GLOBAL_SYM_INIT(NAME, SECTION) \ ++ { 0, NAME, 0, BSF_SECTION_SYM, SECTION } ++#endif ++ + void bfd_section_list_clear (bfd *); + + asection *bfd_get_section_by_name (bfd *abfd, const char *name); +Index: git/bfd/elf.c +=================================================================== +--- git.orig/bfd/elf.c ++++ git/bfd/elf.c +@@ -11164,9 +11164,11 @@ _bfd_elf_get_synthetic_symtab (bfd *abfd + + /* It is only used by x86-64 so far. + ??? This repeats *COM* id of zero. sec->id is supposed to be unique, +- but current usage would allow all of _bfd_std_section to be zero. t*/ ++ but current usage would allow all of _bfd_std_section to be zero. */ ++static const asymbol lcomm_sym ++ = GLOBAL_SYM_INIT ("LARGE_COMMON", &_bfd_elf_large_com_section); + asection _bfd_elf_large_com_section +- = BFD_FAKE_SECTION (_bfd_elf_large_com_section, NULL, ++ = BFD_FAKE_SECTION (_bfd_elf_large_com_section, &lcomm_sym, + "LARGE_COMMON", 0, SEC_IS_COMMON); + + void +Index: git/bfd/section.c +=================================================================== +--- git.orig/bfd/section.c ++++ git/bfd/section.c +@@ -738,20 +738,20 @@ CODE_FRAGMENT + . { NULL }, { NULL } \ + . } + . ++.{* We use a macro to initialize the static asymbol structures because ++. traditional C does not permit us to initialize a union member while ++. gcc warns if we don't initialize it. ++. the_bfd, name, value, attr, section [, udata] *} ++.#ifdef __STDC__ ++.#define GLOBAL_SYM_INIT(NAME, SECTION) \ ++. { 0, NAME, 0, BSF_SECTION_SYM, SECTION, { 0 }} ++.#else ++.#define GLOBAL_SYM_INIT(NAME, SECTION) \ ++. { 0, NAME, 0, BSF_SECTION_SYM, SECTION } ++.#endif ++. + */ + +-/* We use a macro to initialize the static asymbol structures because +- traditional C does not permit us to initialize a union member while +- gcc warns if we don't initialize it. */ +- /* the_bfd, name, value, attr, section [, udata] */ +-#ifdef __STDC__ +-#define GLOBAL_SYM_INIT(NAME, SECTION) \ +- { 0, NAME, 0, BSF_SECTION_SYM, SECTION, { 0 }} +-#else +-#define GLOBAL_SYM_INIT(NAME, SECTION) \ +- { 0, NAME, 0, BSF_SECTION_SYM, SECTION } +-#endif +- + /* These symbols are global, not specific to any BFD. Therefore, anything + that tries to change them is broken, and should be repaired. */ + +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,4 +1,12 @@ ++ + 2017-04-23 Alan Modra ++ PR 21414 ++ * section.c (GLOBAL_SYM_INIT): Make available in bfd.h. ++ * elf.c (lcomm_sym): New. ++ (_bfd_elf_large_com_section): Use lcomm_sym section symbol. ++ * bfd-in2.h: Regenerate. ++ +++2017-04-23 Alan Modra + + PR 21412 + * elf-bfd.h (struct elf_backend_data ): Change diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8395.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8395.patch new file mode 100644 index 000000000..0a9bce337 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8395.patch @@ -0,0 +1,72 @@ +From e63d123268f23a4cbc45ee55fb6dbc7d84729da3 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Wed, 26 Apr 2017 13:07:49 +0100 +Subject: [PATCH] Fix seg-fault attempting to compress a debug section in a + corrupt binary. + + PR binutils/21431 + * compress.c (bfd_init_section_compress_status): Check the return + value from bfd_malloc. + +Upstream-Status: Backport +CVE: CVE-2017-8395 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 6 ++++++ + bfd/compress.c | 19 +++++++++---------- + 2 files changed, 15 insertions(+), 10 deletions(-) + +Index: git/bfd/compress.c +=================================================================== +--- git.orig/bfd/compress.c ++++ git/bfd/compress.c +@@ -542,7 +542,6 @@ bfd_init_section_compress_status (bfd *a + { + bfd_size_type uncompressed_size; + bfd_byte *uncompressed_buffer; +- bfd_boolean ret; + + /* Error if not opened for read. */ + if (abfd->direction != read_direction +@@ -558,18 +557,18 @@ bfd_init_section_compress_status (bfd *a + /* Read in the full section contents and compress it. */ + uncompressed_size = sec->size; + uncompressed_buffer = (bfd_byte *) bfd_malloc (uncompressed_size); ++ /* PR 21431 */ ++ if (uncompressed_buffer == NULL) ++ return FALSE; ++ + if (!bfd_get_section_contents (abfd, sec, uncompressed_buffer, + 0, uncompressed_size)) +- ret = FALSE; +- else +- { +- uncompressed_size = bfd_compress_section_contents (abfd, sec, +- uncompressed_buffer, +- uncompressed_size); +- ret = uncompressed_size != 0; +- } ++ return FALSE; + +- return ret; ++ uncompressed_size = bfd_compress_section_contents (abfd, sec, ++ uncompressed_buffer, ++ uncompressed_size); ++ return uncompressed_size != 0; + } + + /* +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,3 +1,8 @@ ++2017-04-26 Nick Clifton ++ ++ PR binutils/21431 ++ * compress.c (bfd_init_section_compress_status): Check the return ++ value from bfd_malloc. + + 2017-04-23 Alan Modra + PR 21414 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8396_8397.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8396_8397.patch new file mode 100644 index 000000000..14f42824a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8396_8397.patch @@ -0,0 +1,102 @@ +From a941291cab71b9ac356e1c03968c177c03e602ab Mon Sep 17 00:00:00 2001 +From: Alan Modra +Date: Sat, 29 Apr 2017 14:48:16 +0930 +Subject: [PATCH] PR21432, buffer overflow in perform_relocation + +The existing reloc offset range tests didn't catch small negative +offsets less than the size of the reloc field. + + PR 21432 + * reloc.c (reloc_offset_in_range): New function. + (bfd_perform_relocation, bfd_install_relocation): Use it. + (_bfd_final_link_relocate): Likewise. + +Upstream-Status: Backport +CVE: CVE-2017-8396 +CVE: CVE-2017-8397 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 7 +++++++ + bfd/reloc.c | 32 ++++++++++++++++++++------------ + 2 files changed, 27 insertions(+), 12 deletions(-) + +Index: git/bfd/reloc.c +=================================================================== +--- git.orig/bfd/reloc.c ++++ git/bfd/reloc.c +@@ -538,6 +538,22 @@ bfd_check_overflow (enum complain_overfl + return flag; + } + ++/* HOWTO describes a relocation, at offset OCTET. Return whether the ++ relocation field is within SECTION of ABFD. */ ++ ++static bfd_boolean ++reloc_offset_in_range (reloc_howto_type *howto, bfd *abfd, ++ asection *section, bfd_size_type octet) ++{ ++ bfd_size_type octet_end = bfd_get_section_limit_octets (abfd, section); ++ bfd_size_type reloc_size = bfd_get_reloc_size (howto); ++ ++ /* The reloc field must be contained entirely within the section. ++ Allow zero length fields (marker relocs or NONE relocs where no ++ relocation will be performed) at the end of the section. */ ++ return octet <= octet_end && octet + reloc_size <= octet_end; ++} ++ + /* + FUNCTION + bfd_perform_relocation +@@ -618,13 +634,10 @@ bfd_perform_relocation (bfd *abfd, + /* PR 17512: file: 0f67f69d. */ + if (howto == NULL) + return bfd_reloc_undefined; +- +- /* Is the address of the relocation really within the section? +- Include the size of the reloc in the test for out of range addresses. +- PR 17512: file: c146ab8b, 46dff27f, 38e53ebf. */ ++ ++ /* Is the address of the relocation really within the section? */ + octets = reloc_entry->address * bfd_octets_per_byte (abfd); +- if (octets + bfd_get_reloc_size (howto) +- > bfd_get_section_limit_octets (abfd, input_section)) ++ if (!reloc_offset_in_range (howto, abfd, input_section, octets)) + return bfd_reloc_outofrange; + + /* Work out which section the relocation is targeted at and the +@@ -1012,8 +1025,7 @@ bfd_install_relocation (bfd *abfd, + + /* Is the address of the relocation really within the section? */ + octets = reloc_entry->address * bfd_octets_per_byte (abfd); +- if (octets + bfd_get_reloc_size (howto) +- > bfd_get_section_limit_octets (abfd, input_section)) ++ if (!reloc_offset_in_range (howto, abfd, input_section, octets)) + return bfd_reloc_outofrange; + + /* Work out which section the relocation is targeted at and the +@@ -1351,8 +1363,7 @@ _bfd_final_link_relocate (reloc_howto_ty + bfd_size_type octets = address * bfd_octets_per_byte (input_bfd); + + /* Sanity check the address. */ +- if (octets + bfd_get_reloc_size (howto) +- > bfd_get_section_limit_octets (input_bfd, input_section)) ++ if (!reloc_offset_in_range (howto, input_bfd, input_section, octets)) + return bfd_reloc_outofrange; + + /* This function assumes that we are dealing with a basic relocation +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,3 +1,10 @@ ++2017-04-29 Alan Modra ++ ++ PR 21432 ++ * reloc.c (reloc_offset_in_range): New function. ++ (bfd_perform_relocation, bfd_install_relocation): Use it. ++ (_bfd_final_link_relocate): Likewise. ++ + 2017-04-26 Nick Clifton + + PR binutils/21431 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8398.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8398.patch new file mode 100644 index 000000000..5b9acc8cf --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8398.patch @@ -0,0 +1,147 @@ +From d949ff5607b9f595e0eed2ff15fbe5eb84eb3a34 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Fri, 28 Apr 2017 10:28:04 +0100 +Subject: [PATCH] Fix heap-buffer overflow bugs caused when dumping debug + information from a corrupt binary. + + PR binutils/21438 + * dwarf.c (process_extended_line_op): Do not assume that the + string extracted from the section is NUL terminated. + (fetch_indirect_string): If the string retrieved from the section + is not NUL terminated, return an error message. + (fetch_indirect_line_string): Likewise. + (fetch_indexed_string): Likewise. + +Upstream-Status: Backport +CVE: CVE-2017-8398 +Signed-off-by: Armin Kuster + +--- + binutils/ChangeLog | 10 +++++++++ + binutils/dwarf.c | 66 +++++++++++++++++++++++++++++++++++++++++------------- + 2 files changed, 60 insertions(+), 16 deletions(-) + +Index: git/binutils/dwarf.c +=================================================================== +--- git.orig/binutils/dwarf.c ++++ git/binutils/dwarf.c +@@ -472,15 +472,20 @@ process_extended_line_op (unsigned char + printf (_(" Entry\tDir\tTime\tSize\tName\n")); + printf (" %d\t", ++state_machine_regs.last_file_entry); + +- name = data; +- data += strnlen ((char *) data, end - data) + 1; +- printf ("%s\t", dwarf_vmatoa ("u", read_uleb128 (data, & bytes_read, end))); +- data += bytes_read; +- printf ("%s\t", dwarf_vmatoa ("u", read_uleb128 (data, & bytes_read, end))); +- data += bytes_read; +- printf ("%s\t", dwarf_vmatoa ("u", read_uleb128 (data, & bytes_read, end))); +- data += bytes_read; +- printf ("%s\n\n", name); ++ { ++ size_t l; ++ ++ name = data; ++ l = strnlen ((char *) data, end - data); ++ data += len + 1; ++ printf ("%s\t", dwarf_vmatoa ("u", read_uleb128 (data, & bytes_read, end))); ++ data += bytes_read; ++ printf ("%s\t", dwarf_vmatoa ("u", read_uleb128 (data, & bytes_read, end))); ++ data += bytes_read; ++ printf ("%s\t", dwarf_vmatoa ("u", read_uleb128 (data, & bytes_read, end))); ++ data += bytes_read; ++ printf ("%.*s\n\n", (int) l, name); ++ } + + if (((unsigned int) (data - orig_data) != len) || data == end) + warn (_("DW_LNE_define_file: Bad opcode length\n")); +@@ -597,18 +602,27 @@ static const unsigned char * + fetch_indirect_string (dwarf_vma offset) + { + struct dwarf_section *section = &debug_displays [str].section; ++ const unsigned char * ret; + + if (section->start == NULL) + return (const unsigned char *) _(""); + +- if (offset > section->size) ++ if (offset >= section->size) + { + warn (_("DW_FORM_strp offset too big: %s\n"), + dwarf_vmatoa ("x", offset)); + return (const unsigned char *) _(""); + } ++ ret = section->start + offset; ++ /* Unfortunately we cannot rely upon the .debug_str section ending with a ++ NUL byte. Since our caller is expecting to receive a well formed C ++ string we test for the lack of a terminating byte here. */ ++ if (strnlen ((const char *) ret, section->size - offset) ++ == section->size - offset) ++ ret = (const unsigned char *) ++ _(""); + +- return (const unsigned char *) section->start + offset; ++ return ret; + } + + static const char * +@@ -621,6 +635,7 @@ fetch_indexed_string (dwarf_vma idx, str + struct dwarf_section *str_section = &debug_displays [str_sec_idx].section; + dwarf_vma index_offset = idx * offset_size; + dwarf_vma str_offset; ++ const char * ret; + + if (index_section->start == NULL) + return (dwo ? _("") +@@ -628,7 +643,7 @@ fetch_indexed_string (dwarf_vma idx, str + + if (this_set != NULL) + index_offset += this_set->section_offsets [DW_SECT_STR_OFFSETS]; +- if (index_offset > index_section->size) ++ if (index_offset >= index_section->size) + { + warn (_("DW_FORM_GNU_str_index offset too big: %s\n"), + dwarf_vmatoa ("x", index_offset)); +@@ -641,14 +656,22 @@ fetch_indexed_string (dwarf_vma idx, str + + str_offset = byte_get (index_section->start + index_offset, offset_size); + str_offset -= str_section->address; +- if (str_offset > str_section->size) ++ if (str_offset >= str_section->size) + { + warn (_("DW_FORM_GNU_str_index indirect offset too big: %s\n"), + dwarf_vmatoa ("x", str_offset)); + return _(""); + } + +- return (const char *) str_section->start + str_offset; ++ ret = (const char *) str_section->start + str_offset; ++ /* Unfortunately we cannot rely upon str_section ending with a NUL byte. ++ Since our caller is expecting to receive a well formed C string we test ++ for the lack of a terminating byte here. */ ++ if (strnlen (ret, str_section->size - str_offset) ++ == str_section->size - str_offset) ++ ret = (const char *) _(""); ++ ++ return ret; + } + + static const char * +Index: git/binutils/ChangeLog +=================================================================== +--- git.orig/binutils/ChangeLog ++++ git/binutils/ChangeLog +@@ -1,3 +1,13 @@ ++2017-04-28 Nick Clifton ++ ++ PR binutils/21438 ++ * dwarf.c (process_extended_line_op): Do not assume that the ++ string extracted from the section is NUL terminated. ++ (fetch_indirect_string): If the string retrieved from the section ++ is not NUL terminated, return an error message. ++ (fetch_indirect_line_string): Likewise. ++ (fetch_indexed_string): Likewise. ++ + 2017-02-14 Nick Clifton + + PR binutils/21157 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8421.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8421.patch new file mode 100644 index 000000000..7969c66f3 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-8421.patch @@ -0,0 +1,52 @@ +From 39ff1b79f687b65f4144ddb379f22587003443fb Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Tue, 2 May 2017 11:54:53 +0100 +Subject: [PATCH] Prevent memory exhaustion from a corrupt PE binary with an + overlarge number of relocs. + + PR 21440 + * objdump.c (dump_relocs_in_section): Check for an excessive + number of relocs before attempting to dump them. + +Upstream-Status: Backport +CVE: CVE-2017-8421 +Signed-off-by: Armin Kuster + +--- + binutils/ChangeLog | 6 ++++++ + binutils/objdump.c | 8 ++++++++ + 2 files changed, 14 insertions(+) + +Index: git/binutils/objdump.c +=================================================================== +--- git.orig/binutils/objdump.c ++++ git/binutils/objdump.c +@@ -3311,6 +3311,14 @@ dump_relocs_in_section (bfd *abfd, + return; + } + ++ if ((bfd_get_file_flags (abfd) & (BFD_IN_MEMORY | BFD_LINKER_CREATED)) == 0 ++ && relsize > get_file_size (bfd_get_filename (abfd))) ++ { ++ printf (" (too many: 0x%x)\n", section->reloc_count); ++ bfd_set_error (bfd_error_file_truncated); ++ bfd_fatal (bfd_get_filename (abfd)); ++ } ++ + relpp = (arelent **) xmalloc (relsize); + relcount = bfd_canonicalize_reloc (abfd, section, relpp, syms); + +Index: git/binutils/ChangeLog +=================================================================== +--- git.orig/binutils/ChangeLog ++++ git/binutils/ChangeLog +@@ -1,3 +1,9 @@ ++2017-05-02 Nick Clifton ++ ++ PR 21440 ++ * objdump.c (dump_relocs_in_section): Check for an excessive ++ number of relocs before attempting to dump them. ++ + 2017-04-28 Nick Clifton + + PR binutils/21438 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9038_9044.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9038_9044.patch new file mode 100644 index 000000000..535efc314 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9038_9044.patch @@ -0,0 +1,51 @@ +From f32ba72991d2406b21ab17edc234a2f3fa7fb23d Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Mon, 3 Apr 2017 11:01:45 +0100 +Subject: [PATCH] readelf: Update check for invalid word offsets in ARM unwind + information. + + PR binutils/21343 + * readelf.c (get_unwind_section_word): Fix snafu checking for + invalid word offsets in ARM unwind information. + +Upstream-Status: Backport +CVE: CVE-2017-9038 +CVE: CVE-2017-9044 +Signed-off-by: Armin Kuster + +--- + binutils/ChangeLog | 6 ++++++ + binutils/readelf.c | 6 +++--- + 2 files changed, 9 insertions(+), 3 deletions(-) + +Index: git/binutils/readelf.c +=================================================================== +--- git.orig/binutils/readelf.c ++++ git/binutils/readelf.c +@@ -7972,9 +7972,9 @@ get_unwind_section_word (struct arm_unw_ + return FALSE; + + /* If the offset is invalid then fail. */ +- if (word_offset > (sec->sh_size - 4) +- /* PR 18879 */ +- || (sec->sh_size < 5 && word_offset >= sec->sh_size) ++ if (/* PR 21343 *//* PR 18879 */ ++ sec->sh_size < 4 ++ || word_offset > (sec->sh_size - 4) + || ((bfd_signed_vma) word_offset) < 0) + return FALSE; + +Index: git/binutils/ChangeLog +=================================================================== +--- git.orig/binutils/ChangeLog ++++ git/binutils/ChangeLog +@@ -1,3 +1,9 @@ ++2017-04-03 Nick Clifton ++ ++ PR binutils/21343 ++ * readelf.c (get_unwind_section_word): Fix snafu checking for ++ invalid word offsets in ARM unwind information. ++ + 2017-05-02 Nick Clifton + + PR 21440 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9039.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9039.patch new file mode 100644 index 000000000..aed8f7f40 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9039.patch @@ -0,0 +1,61 @@ +From 82156ab704b08b124d319c0decdbd48b3ca2dac5 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Mon, 3 Apr 2017 12:14:06 +0100 +Subject: [PATCH] readelf: Fix overlarge memory allocation when reading a + binary with an excessive number of program headers. + + PR binutils/21345 + * readelf.c (get_program_headers): Check for there being too many + program headers before attempting to allocate space for them. + +Upstream-Status: Backport +CVE: CVE-2017-9039 +Signed-off-by: Armin Kuster + +--- + binutils/ChangeLog | 6 ++++++ + binutils/readelf.c | 17 ++++++++++++++--- + 2 files changed, 20 insertions(+), 3 deletions(-) + +Index: git/binutils/readelf.c +=================================================================== +--- git.orig/binutils/readelf.c ++++ git/binutils/readelf.c +@@ -4765,9 +4765,19 @@ get_program_headers (FILE * file) + if (program_headers != NULL) + return 1; + +- phdrs = (Elf_Internal_Phdr *) cmalloc (elf_header.e_phnum, +- sizeof (Elf_Internal_Phdr)); ++ /* Be kind to memory checkers by looking for ++ e_phnum values which we know must be invalid. */ ++ if (elf_header.e_phnum ++ * (is_32bit_elf ? sizeof (Elf32_External_Phdr) : sizeof (Elf64_External_Phdr)) ++ >= current_file_size) ++ { ++ error (_("Too many program headers - %#x - the file is not that big\n"), ++ elf_header.e_phnum); ++ return FALSE; ++ } + ++ phdrs = (Elf_Internal_Phdr *) cmalloc (elf_header.e_phnum, ++ sizeof (Elf_Internal_Phdr)); + if (phdrs == NULL) + { + error (_("Out of memory reading %u program headers\n"), +Index: git/binutils/ChangeLog +=================================================================== +--- git.orig/binutils/ChangeLog ++++ git/binutils/ChangeLog +@@ -1,5 +1,11 @@ + 2017-04-03 Nick Clifton + ++ PR binutils/21345 ++ * readelf.c (get_program_headers): Check for there being too many ++ program headers before attempting to allocate space for them. ++ ++2017-04-03 Nick Clifton ++ + PR binutils/21343 + * readelf.c (get_unwind_section_word): Fix snafu checking for + invalid word offsets in ARM unwind information. diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9040_9042.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9040_9042.patch new file mode 100644 index 000000000..79c6a7d8a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9040_9042.patch @@ -0,0 +1,57 @@ +From 7296a62a2a237f6b1ad8db8c38b090e9f592c8cf Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Thu, 13 Apr 2017 16:06:30 +0100 +Subject: [PATCH] readelf: fix out of range subtraction, seg fault from a NULL + pointer and memory exhaustion, all from parsing corrupt binaries. + + PR binutils/21379 + * readelf.c (process_dynamic_section): Detect over large section + offsets in the DT_SYMTAB entry. + + PR binutils/21345 + * readelf.c (process_mips_specific): Catch an unfeasible memory + allocation before it happens and print a suitable error message. + +Upstream-Status: Backport +CVE: CVE-2017-9040 +CVE: CVE-2017-9042 +Signed-off-by: Armin Kuster + +--- + binutils/ChangeLog | 12 ++++++++++++ + binutils/readelf.c | 26 +++++++++++++++++++++----- + 2 files changed, 33 insertions(+), 5 deletions(-) + +Index: git/binutils/readelf.c +=================================================================== +--- git.orig/binutils/readelf.c ++++ git/binutils/readelf.c +@@ -9306,6 +9306,12 @@ process_dynamic_section (FILE * file) + processing that. This is overkill, I know, but it + should work. */ + section.sh_offset = offset_from_vma (file, entry->d_un.d_val, 0); ++ if ((bfd_size_type) section.sh_offset > current_file_size) ++ { ++ /* See PR 21379 for a reproducer. */ ++ error (_("Invalid DT_SYMTAB entry: %lx"), (long) section.sh_offset); ++ return FALSE; ++ } + + if (archive_file_offset != 0) + section.sh_size = archive_file_size - section.sh_offset; +@@ -15175,6 +15181,15 @@ process_mips_specific (FILE * file) + return 0; + } + ++ /* PR 21345 - print a slightly more helpful error message ++ if we are sure that the cmalloc will fail. */ ++ if (conflictsno * sizeof (* iconf) > current_file_size) ++ { ++ error (_("Overlarge number of conflicts detected: %lx\n"), ++ (long) conflictsno); ++ return FALSE; ++ } ++ + iconf = (Elf32_Conflict *) cmalloc (conflictsno, sizeof (* iconf)); + if (iconf == NULL) + { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9742.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9742.patch new file mode 100644 index 000000000..0c9ed0d2a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9742.patch @@ -0,0 +1,45 @@ +From e64519d1ed7fd8f990f05a5562d5b5c0c44b7d7e Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Wed, 14 Jun 2017 17:10:28 +0100 +Subject: [PATCH] Fix seg-fault when trying to disassemble a corrupt score + binary. + + PR binutils/21576 + * score7-dis.c (score_opcodes): Add sentinel. + +Upstream-Status: Backport +CVE: CVE-2017-9742 +Signed-off-by: Armin Kuster + +--- + opcodes/ChangeLog | 5 +++++ + opcodes/score7-dis.c | 3 ++- + 2 files changed, 7 insertions(+), 1 deletion(-) + +Index: git/opcodes/score7-dis.c +=================================================================== +--- git.orig/opcodes/score7-dis.c ++++ git/opcodes/score7-dis.c +@@ -513,7 +513,8 @@ static struct score_opcode score_opcodes + {0x00000d05, 0x00007f0f, "tvc!"}, + {0x00000026, 0x3e0003ff, "xor\t\t%20-24r, %15-19r, %10-14r"}, + {0x00000027, 0x3e0003ff, "xor.c\t\t%20-24r, %15-19r, %10-14r"}, +- {0x00002007, 0x0000700f, "xor!\t\t%8-11r, %4-7r"} ++ {0x00002007, 0x0000700f, "xor!\t\t%8-11r, %4-7r"}, ++ { 0, 0, NULL } + }; + + typedef struct +Index: git/opcodes/ChangeLog +=================================================================== +--- git.orig/opcodes/ChangeLog ++++ git/opcodes/ChangeLog +@@ -1,3 +1,8 @@ ++2017-06-14 Nick Clifton ++ ++ PR binutils/21576 ++ * score7-dis.c (score_opcodes): Add sentinel. ++ + 2017-03-07 Alan Modra + + Apply from master diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9744.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9744.patch new file mode 100644 index 000000000..c34a5a6ec --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9744.patch @@ -0,0 +1,46 @@ +From f461bbd847f15657f3dd2f317c30c75a7520da1f Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Wed, 14 Jun 2017 17:01:54 +0100 +Subject: [PATCH] Fix address violation bug when disassembling a corrupt SH + binary. + + PR binutils/21578 + * elf32-sh.c (sh_elf_set_mach_from_flags): Fix check for invalid + flag value. + +Upstream-Status: Backport +CVE: CVE-2017-9744 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 6 ++++++ + bfd/elf32-sh.c | 2 +- + 2 files changed, 7 insertions(+), 1 deletion(-) + +Index: git/bfd/elf32-sh.c +=================================================================== +--- git.orig/bfd/elf32-sh.c ++++ git/bfd/elf32-sh.c +@@ -6344,7 +6344,7 @@ sh_elf_set_mach_from_flags (bfd *abfd) + { + flagword flags = elf_elfheader (abfd)->e_flags & EF_SH_MACH_MASK; + +- if (flags >= sizeof(sh_ef_bfd_table)) ++ if (flags >= ARRAY_SIZE (sh_ef_bfd_table)) + return FALSE; + + if (sh_ef_bfd_table[flags] == 0) +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,3 +1,9 @@ ++2017-06-14 Nick Clifton ++ ++ PR binutils/21578 ++ * elf32-sh.c (sh_elf_set_mach_from_flags): Fix check for invalid ++ flag value. ++ + 2017-04-29 Alan Modra + + PR 21432 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9745.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9745.patch new file mode 100644 index 000000000..0b3885b94 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9745.patch @@ -0,0 +1,35 @@ +From 76800cba595efc3fe95a446c2d664e42ae4ee869 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Thu, 15 Jun 2017 12:08:57 +0100 +Subject: [PATCH] Handle EITR records in VMS Alpha binaries with overlarge + command length parameters. + + PR binutils/21579 + * vms-alpha.c (_bfd_vms_slurp_etir): Extend check of cmd_length. + +Upstream-Status: Backport +CVE: CVE-2017-9745 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 5 +++++ + bfd/vms-alpha.c | 16 ++++++++-------- + 2 files changed, 13 insertions(+), 8 deletions(-) + +Index: git/bfd/vms-alpha.c +=================================================================== +--- git.orig/bfd/vms-alpha.c ++++ git/bfd/vms-alpha.c +@@ -1741,6 +1741,12 @@ _bfd_vms_slurp_etir (bfd *abfd, struct b + _bfd_hexdump (8, ptr, cmd_length - 4, 0); + #endif + ++#if VMS_DEBUG ++ _bfd_vms_debug (4, "etir: %s(%d)\n", ++ _bfd_vms_etir_name (cmd), cmd); ++ _bfd_hexdump (8, ptr, cmd_length - 4, 0); ++#endif ++ + switch (cmd) + { + /* Stack global diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9746.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9746.patch new file mode 100644 index 000000000..bd4a40c35 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9746.patch @@ -0,0 +1,91 @@ +From ae87f7e73eba29bd38b3a9684a10b948ed715612 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Wed, 14 Jun 2017 16:50:03 +0100 +Subject: [PATCH] Fix address violation when disassembling a corrupt binary. + + PR binutils/21580 +binutils * objdump.c (disassemble_bytes): Check for buffer overrun when + printing out rae insns. + +ld * testsuite/ld-nds32/diff.d: Adjust expected output. + +Upstream-Status: Backport +CVE: CVE-2017-9746 +Signed-off-by: Armin Kuster + +--- + binutils/objdump.c | 27 +++++++++++++++------------ + ld/ChangeLog | 5 +++++ + ld/testsuite/ld-nds32/diff.d | 6 +++--- + 3 files changed, 23 insertions(+), 15 deletions(-) + +Index: git/binutils/objdump.c +=================================================================== +--- git.orig/binutils/objdump.c ++++ git/binutils/objdump.c +@@ -1855,20 +1855,23 @@ disassemble_bytes (struct disassemble_in + + for (j = addr_offset * opb; j < addr_offset * opb + pb; j += bpc) + { +- int k; +- +- if (bpc > 1 && inf->display_endian == BFD_ENDIAN_LITTLE) +- { +- for (k = bpc - 1; k >= 0; k--) +- printf ("%02x", (unsigned) data[j + k]); +- putchar (' '); +- } +- else ++ /* PR 21580: Check for a buffer ending early. */ ++ if (j + bpc <= stop_offset * opb) + { +- for (k = 0; k < bpc; k++) +- printf ("%02x", (unsigned) data[j + k]); +- putchar (' '); ++ int k; ++ ++ if (inf->display_endian == BFD_ENDIAN_LITTLE) ++ { ++ for (k = bpc - 1; k >= 0; k--) ++ printf ("%02x", (unsigned) data[j + k]); ++ } ++ else ++ { ++ for (k = 0; k < bpc; k++) ++ printf ("%02x", (unsigned) data[j + k]); ++ } + } ++ putchar (' '); + } + + for (; pb < octets_per_line; pb += bpc) +Index: git/ld/testsuite/ld-nds32/diff.d +=================================================================== +--- git.orig/ld/testsuite/ld-nds32/diff.d ++++ git/ld/testsuite/ld-nds32/diff.d +@@ -7,9 +7,9 @@ + + Disassembly of section .data: + 00008000 (7e 00 00 00|00 00 00 7e).* +-00008004 (7e 00 7e fe|00 7e 7e fe).* +-00008006 7e fe 00 fe.* +-00008007 fe 00.* ++00008004 (7e 00|00 7e).* ++00008006 7e.* ++00008007 fe.* + ... + 00008009 fe 00.* + .* +Index: git/ld/ChangeLog +=================================================================== +--- git.orig/ld/ChangeLog ++++ git/ld/ChangeLog +@@ -1,3 +1,8 @@ ++2017-06-14 Nick Clifton ++ ++ PR binutils/21580 ++ * testsuite/ld-nds32/diff.d: Adjust expected output. ++ + 2017-03-07 Alan Modra + + * ldlang.c (open_input_bfds): Check that lang_assignment_statement diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9747.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9747.patch new file mode 100644 index 000000000..41ead54a9 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9747.patch @@ -0,0 +1,43 @@ +From 62b76e4b6e0b4cb5b3e0053d1de4097b32577049 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Thu, 15 Jun 2017 13:08:47 +0100 +Subject: [PATCH] Fix address violation parsing a corrupt ieee binary. + + PR binutils/21581 + (ieee_archive_p): Use a static buffer to avoid compiler bugs. + +Upstream-Status: Backport +CVE: CVE-2017-9747 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 2 ++ + bfd/ieee.c | 2 +- + 2 files changed, 3 insertions(+), 1 deletion(-) + +Index: git/bfd/ieee.c +=================================================================== +--- git.orig/bfd/ieee.c ++++ git/bfd/ieee.c +@@ -1357,7 +1357,7 @@ ieee_archive_p (bfd *abfd) + { + char *library; + unsigned int i; +- unsigned char buffer[512]; ++ static unsigned char buffer[512]; + file_ptr buffer_offset = 0; + ieee_ar_data_type *save = abfd->tdata.ieee_ar_data; + ieee_ar_data_type *ieee; +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,3 +1,8 @@ ++2017-06-15 Nick Clifton ++ ++ PR binutils/21581 ++ (ieee_archive_p): Likewise. ++ + 2017-06-14 Nick Clifton + + PR binutils/21578 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9748.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9748.patch new file mode 100644 index 000000000..02070235a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9748.patch @@ -0,0 +1,46 @@ +From 63634bb4a107877dd08b6282e28e11cfd1a1649e Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Thu, 15 Jun 2017 12:44:23 +0100 +Subject: [PATCH] Avoid a possible compiler bug by using a static buffer + instead of a stack local buffer. + + PR binutils/21582 + * ieee.c (ieee_object_p): Use a static buffer to avoid compiler + bugs. + +Upstream-Status: Backport +CVE: CVE-2017-9748 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 6 ++++++ + bfd/ieee.c | 2 +- + 2 files changed, 7 insertions(+), 1 deletion(-) + +Index: git/bfd/ieee.c +=================================================================== +--- git.orig/bfd/ieee.c ++++ git/bfd/ieee.c +@@ -1875,7 +1875,7 @@ ieee_object_p (bfd *abfd) + char *processor; + unsigned int part; + ieee_data_type *ieee; +- unsigned char buffer[300]; ++ static unsigned char buffer[300]; + ieee_data_type *save = IEEE_DATA (abfd); + bfd_size_type amt; + +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,5 +1,9 @@ + 2017-06-15 Nick Clifton + ++ PR binutils/21582 ++ * ieee.c (ieee_object_p): Use a static buffer to avoid compiler ++ bugs. ++ + PR binutils/21581 + (ieee_archive_p): Likewise. + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9749.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9749.patch new file mode 100644 index 000000000..3cc2afc91 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9749.patch @@ -0,0 +1,77 @@ +From 08c7881b814c546efc3996fd1decdf0877f7a779 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Thu, 15 Jun 2017 11:52:02 +0100 +Subject: [PATCH] Prevent invalid array accesses when disassembling a corrupt + bfin binary. + + PR binutils/21586 + * bfin-dis.c (gregs): Clip index to prevent overflow. + (regs): Likewise. + (regs_lo): Likewise. + (regs_hi): Likewise. + +Upstream-Status: Backport +CVE: CVE-2017-9749 +Signed-off-by: Armin Kuster + +--- + opcodes/ChangeLog | 8 ++++++++ + opcodes/bfin-dis.c | 8 ++++---- + 2 files changed, 12 insertions(+), 4 deletions(-) + +Index: git/opcodes/ChangeLog +=================================================================== +--- git.orig/opcodes/ChangeLog ++++ git/opcodes/ChangeLog +@@ -1,3 +1,11 @@ ++2017-06-15 Nick Clifton ++ ++ PR binutils/21586 ++ * bfin-dis.c (gregs): Clip index to prevent overflow. ++ (regs): Likewise. ++ (regs_lo): Likewise. ++ (regs_hi): Likewise. ++ + 2017-06-14 Nick Clifton + + PR binutils/21576 +Index: git/opcodes/bfin-dis.c +=================================================================== +--- git.orig/opcodes/bfin-dis.c ++++ git/opcodes/bfin-dis.c +@@ -350,7 +350,7 @@ static const enum machine_registers deco + REG_P0, REG_P1, REG_P2, REG_P3, REG_P4, REG_P5, REG_SP, REG_FP, + }; + +-#define gregs(x, i) REGNAME (decode_gregs[((i) << 3) | (x)]) ++#define gregs(x, i) REGNAME (decode_gregs[(((i) << 3) | (x)) & 15]) + + /* [dregs pregs (iregs mregs) (bregs lregs)]. */ + static const enum machine_registers decode_regs[] = +@@ -361,7 +361,7 @@ static const enum machine_registers deco + REG_B0, REG_B1, REG_B2, REG_B3, REG_L0, REG_L1, REG_L2, REG_L3, + }; + +-#define regs(x, i) REGNAME (decode_regs[((i) << 3) | (x)]) ++#define regs(x, i) REGNAME (decode_regs[(((i) << 3) | (x)) & 31]) + + /* [dregs pregs (iregs mregs) (bregs lregs) Low Half]. */ + static const enum machine_registers decode_regs_lo[] = +@@ -372,7 +372,7 @@ static const enum machine_registers deco + REG_BL0, REG_BL1, REG_BL2, REG_BL3, REG_LL0, REG_LL1, REG_LL2, REG_LL3, + }; + +-#define regs_lo(x, i) REGNAME (decode_regs_lo[((i) << 3) | (x)]) ++#define regs_lo(x, i) REGNAME (decode_regs_lo[(((i) << 3) | (x)) & 31]) + + /* [dregs pregs (iregs mregs) (bregs lregs) High Half]. */ + static const enum machine_registers decode_regs_hi[] = +@@ -383,7 +383,7 @@ static const enum machine_registers deco + REG_BH0, REG_BH1, REG_BH2, REG_BH3, REG_LH0, REG_LH1, REG_LH2, REG_LH3, + }; + +-#define regs_hi(x, i) REGNAME (decode_regs_hi[((i) << 3) | (x)]) ++#define regs_hi(x, i) REGNAME (decode_regs_hi[(((i) << 3) | (x)) & 31]) + + static const enum machine_registers decode_statbits[] = + { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9750.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9750.patch new file mode 100644 index 000000000..fe8fa6934 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9750.patch @@ -0,0 +1,247 @@ +From db5fa770268baf8cc82cf9b141d69799fd485fe2 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Wed, 14 Jun 2017 13:35:06 +0100 +Subject: [PATCH] Fix address violation problems when disassembling a corrupt + RX binary. + + PR binutils/21587 + * rx-decode.opc: Include libiberty.h + (GET_SCALE): New macro - validates access to SCALE array. + (GET_PSCALE): New macro - validates access to PSCALE array. + (DIs, SIs, S2Is, rx_disp): Use new macros. + * rx-decode.c: Regenerate. + +Upstream-Status: Backport +CVE: CVE-2017-9750 +Signed-off-by: Armin Kuster + +--- + opcodes/ChangeLog | 9 +++++++++ + opcodes/rx-decode.c | 24 ++++++++++++++---------- + opcodes/rx-decode.opc | 24 ++++++++++++++---------- + 3 files changed, 37 insertions(+), 20 deletions(-) + +Index: git/opcodes/rx-decode.c +=================================================================== +--- git.orig/opcodes/rx-decode.c ++++ git/opcodes/rx-decode.c +@@ -27,6 +27,7 @@ + #include + #include "ansidecl.h" + #include "opcode/rx.h" ++#include "libiberty.h" + + #define RX_OPCODE_BIG_ENDIAN 0 + +@@ -45,7 +46,7 @@ static int trace = 0; + #define LSIZE 2 + + /* These are for when the upper bits are "don't care" or "undefined". */ +-static int bwl[] = ++static int bwl[4] = + { + RX_Byte, + RX_Word, +@@ -53,7 +54,7 @@ static int bwl[] = + RX_Bad_Size /* Bogus instructions can have a size field set to 3. */ + }; + +-static int sbwl[] = ++static int sbwl[4] = + { + RX_SByte, + RX_SWord, +@@ -61,7 +62,7 @@ static int sbwl[] = + RX_Bad_Size /* Bogus instructions can have a size field set to 3. */ + }; + +-static int ubw[] = ++static int ubw[4] = + { + RX_UByte, + RX_UWord, +@@ -69,7 +70,7 @@ static int ubw[] = + RX_Bad_Size /* Bogus instructions can have a size field set to 3. */ + }; + +-static int memex[] = ++static int memex[4] = + { + RX_SByte, + RX_SWord, +@@ -89,6 +90,9 @@ static int SCALE[] = { 1, 2, 4, 0 }; + /* This is for the prefix size enum. */ + static int PSCALE[] = { 4, 1, 1, 1, 2, 2, 2, 3, 4 }; + ++#define GET_SCALE(_indx) ((unsigned)(_indx) < ARRAY_SIZE (SCALE) ? SCALE[(_indx)] : 0) ++#define GET_PSCALE(_indx) ((unsigned)(_indx) < ARRAY_SIZE (PSCALE) ? PSCALE[(_indx)] : 0) ++ + static int flagmap[] = {0, 1, 2, 3, 0, 0, 0, 0, + 16, 17, 0, 0, 0, 0, 0, 0 }; + +@@ -107,7 +111,7 @@ static int dsp3map[] = { 8, 9, 10, 3, 4, + #define DC(c) OP (0, RX_Operand_Immediate, 0, c) + #define DR(r) OP (0, RX_Operand_Register, r, 0) + #define DI(r,a) OP (0, RX_Operand_Indirect, r, a) +-#define DIs(r,a,s) OP (0, RX_Operand_Indirect, r, (a) * SCALE[s]) ++#define DIs(r,a,s) OP (0, RX_Operand_Indirect, r, (a) * GET_SCALE (s)) + #define DD(t,r,s) rx_disp (0, t, r, bwl[s], ld); + #define DF(r) OP (0, RX_Operand_Flag, flagmap[r], 0) + +@@ -115,7 +119,7 @@ static int dsp3map[] = { 8, 9, 10, 3, 4, + #define SR(r) OP (1, RX_Operand_Register, r, 0) + #define SRR(r) OP (1, RX_Operand_TwoReg, r, 0) + #define SI(r,a) OP (1, RX_Operand_Indirect, r, a) +-#define SIs(r,a,s) OP (1, RX_Operand_Indirect, r, (a) * SCALE[s]) ++#define SIs(r,a,s) OP (1, RX_Operand_Indirect, r, (a) * GET_SCALE (s)) + #define SD(t,r,s) rx_disp (1, t, r, bwl[s], ld); + #define SP(t,r) rx_disp (1, t, r, (t!=3) ? RX_UByte : RX_Long, ld); P(t, 1); + #define SPm(t,r,m) rx_disp (1, t, r, memex[m], ld); rx->op[1].size = memex[m]; +@@ -124,7 +128,7 @@ static int dsp3map[] = { 8, 9, 10, 3, 4, + #define S2C(i) OP (2, RX_Operand_Immediate, 0, i) + #define S2R(r) OP (2, RX_Operand_Register, r, 0) + #define S2I(r,a) OP (2, RX_Operand_Indirect, r, a) +-#define S2Is(r,a,s) OP (2, RX_Operand_Indirect, r, (a) * SCALE[s]) ++#define S2Is(r,a,s) OP (2, RX_Operand_Indirect, r, (a) * GET_SCALE (s)) + #define S2D(t,r,s) rx_disp (2, t, r, bwl[s], ld); + #define S2P(t,r) rx_disp (2, t, r, (t!=3) ? RX_UByte : RX_Long, ld); P(t, 2); + #define S2Pm(t,r,m) rx_disp (2, t, r, memex[m], ld); rx->op[2].size = memex[m]; +@@ -211,7 +215,7 @@ immediate (int sfield, int ex, LocalData + } + + static void +-rx_disp (int n, int type, int reg, int size, LocalData * ld) ++rx_disp (int n, int type, int reg, unsigned int size, LocalData * ld) + { + int disp; + +@@ -228,7 +232,7 @@ rx_disp (int n, int type, int reg, int s + case 1: + ld->rx->op[n].type = RX_Operand_Indirect; + disp = GETBYTE (); +- ld->rx->op[n].addend = disp * PSCALE[size]; ++ ld->rx->op[n].addend = disp * GET_PSCALE (size); + break; + case 2: + ld->rx->op[n].type = RX_Operand_Indirect; +@@ -238,7 +242,7 @@ rx_disp (int n, int type, int reg, int s + #else + disp = disp + GETBYTE () * 256; + #endif +- ld->rx->op[n].addend = disp * PSCALE[size]; ++ ld->rx->op[n].addend = disp * GET_PSCALE (size); + break; + default: + abort (); +Index: git/opcodes/rx-decode.opc +=================================================================== +--- git.orig/opcodes/rx-decode.opc ++++ git/opcodes/rx-decode.opc +@@ -26,6 +26,7 @@ + #include + #include "ansidecl.h" + #include "opcode/rx.h" ++#include "libiberty.h" + + #define RX_OPCODE_BIG_ENDIAN 0 + +@@ -44,7 +45,7 @@ static int trace = 0; + #define LSIZE 2 + + /* These are for when the upper bits are "don't care" or "undefined". */ +-static int bwl[] = ++static int bwl[4] = + { + RX_Byte, + RX_Word, +@@ -52,7 +53,7 @@ static int bwl[] = + RX_Bad_Size /* Bogus instructions can have a size field set to 3. */ + }; + +-static int sbwl[] = ++static int sbwl[4] = + { + RX_SByte, + RX_SWord, +@@ -60,7 +61,7 @@ static int sbwl[] = + RX_Bad_Size /* Bogus instructions can have a size field set to 3. */ + }; + +-static int ubw[] = ++static int ubw[4] = + { + RX_UByte, + RX_UWord, +@@ -68,7 +69,7 @@ static int ubw[] = + RX_Bad_Size /* Bogus instructions can have a size field set to 3. */ + }; + +-static int memex[] = ++static int memex[4] = + { + RX_SByte, + RX_SWord, +@@ -88,6 +89,9 @@ static int SCALE[] = { 1, 2, 4, 0 }; + /* This is for the prefix size enum. */ + static int PSCALE[] = { 4, 1, 1, 1, 2, 2, 2, 3, 4 }; + ++#define GET_SCALE(_indx) ((unsigned)(_indx) < ARRAY_SIZE (SCALE) ? SCALE[(_indx)] : 0) ++#define GET_PSCALE(_indx) ((unsigned)(_indx) < ARRAY_SIZE (PSCALE) ? PSCALE[(_indx)] : 0) ++ + static int flagmap[] = {0, 1, 2, 3, 0, 0, 0, 0, + 16, 17, 0, 0, 0, 0, 0, 0 }; + +@@ -106,7 +110,7 @@ static int dsp3map[] = { 8, 9, 10, 3, 4, + #define DC(c) OP (0, RX_Operand_Immediate, 0, c) + #define DR(r) OP (0, RX_Operand_Register, r, 0) + #define DI(r,a) OP (0, RX_Operand_Indirect, r, a) +-#define DIs(r,a,s) OP (0, RX_Operand_Indirect, r, (a) * SCALE[s]) ++#define DIs(r,a,s) OP (0, RX_Operand_Indirect, r, (a) * GET_SCALE (s)) + #define DD(t,r,s) rx_disp (0, t, r, bwl[s], ld); + #define DF(r) OP (0, RX_Operand_Flag, flagmap[r], 0) + +@@ -114,7 +118,7 @@ static int dsp3map[] = { 8, 9, 10, 3, 4, + #define SR(r) OP (1, RX_Operand_Register, r, 0) + #define SRR(r) OP (1, RX_Operand_TwoReg, r, 0) + #define SI(r,a) OP (1, RX_Operand_Indirect, r, a) +-#define SIs(r,a,s) OP (1, RX_Operand_Indirect, r, (a) * SCALE[s]) ++#define SIs(r,a,s) OP (1, RX_Operand_Indirect, r, (a) * GET_SCALE (s)) + #define SD(t,r,s) rx_disp (1, t, r, bwl[s], ld); + #define SP(t,r) rx_disp (1, t, r, (t!=3) ? RX_UByte : RX_Long, ld); P(t, 1); + #define SPm(t,r,m) rx_disp (1, t, r, memex[m], ld); rx->op[1].size = memex[m]; +@@ -123,7 +127,7 @@ static int dsp3map[] = { 8, 9, 10, 3, 4, + #define S2C(i) OP (2, RX_Operand_Immediate, 0, i) + #define S2R(r) OP (2, RX_Operand_Register, r, 0) + #define S2I(r,a) OP (2, RX_Operand_Indirect, r, a) +-#define S2Is(r,a,s) OP (2, RX_Operand_Indirect, r, (a) * SCALE[s]) ++#define S2Is(r,a,s) OP (2, RX_Operand_Indirect, r, (a) * GET_SCALE (s)) + #define S2D(t,r,s) rx_disp (2, t, r, bwl[s], ld); + #define S2P(t,r) rx_disp (2, t, r, (t!=3) ? RX_UByte : RX_Long, ld); P(t, 2); + #define S2Pm(t,r,m) rx_disp (2, t, r, memex[m], ld); rx->op[2].size = memex[m]; +@@ -210,7 +214,7 @@ immediate (int sfield, int ex, LocalData + } + + static void +-rx_disp (int n, int type, int reg, int size, LocalData * ld) ++rx_disp (int n, int type, int reg, unsigned int size, LocalData * ld) + { + int disp; + +@@ -227,7 +231,7 @@ rx_disp (int n, int type, int reg, int s + case 1: + ld->rx->op[n].type = RX_Operand_Indirect; + disp = GETBYTE (); +- ld->rx->op[n].addend = disp * PSCALE[size]; ++ ld->rx->op[n].addend = disp * GET_PSCALE (size); + break; + case 2: + ld->rx->op[n].type = RX_Operand_Indirect; +@@ -237,7 +241,7 @@ rx_disp (int n, int type, int reg, int s + #else + disp = disp + GETBYTE () * 256; + #endif +- ld->rx->op[n].addend = disp * PSCALE[size]; ++ ld->rx->op[n].addend = disp * GET_PSCALE (size); + break; + default: + abort (); diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9751.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9751.patch new file mode 100644 index 000000000..d7c18cf85 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9751.patch @@ -0,0 +1,3748 @@ +From 63323b5b23bd83fa7b04ea00dff593c933e9b0e3 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Thu, 15 Jun 2017 12:37:01 +0100 +Subject: [PATCH] Fix address violation when disassembling a corrupt RL78 + binary. + + PR binutils/21588 + * rl78-decode.opc (OP_BUF_LEN): Define. + (GETBYTE): Check for the index exceeding OP_BUF_LEN. + (rl78_decode_opcode): Use OP_BUF_LEN as the length of the op_buf + array. + * rl78-decode.c: Regenerate. + +Upstream-Status: Backport +CVE: CVE-2017-9751 +Signed-off-by: Armin Kuster + +--- + opcodes/ChangeLog | 9 + + opcodes/rl78-decode.c | 820 ++++++++++++++++++++++++------------------------ + opcodes/rl78-decode.opc | 6 +- + 3 files changed, 424 insertions(+), 411 deletions(-) + +diff --git a/opcodes/ChangeLog b/opcodes/ChangeLog +index 34b1844..c77f00a 100644 +--- a/opcodes/ChangeLog ++++ b/opcodes/ChangeLog +@@ -1,5 +1,14 @@ + 2017-06-15 Nick Clifton + ++ PR binutils/21588 ++ * rl78-decode.opc (OP_BUF_LEN): Define. ++ (GETBYTE): Check for the index exceeding OP_BUF_LEN. ++ (rl78_decode_opcode): Use OP_BUF_LEN as the length of the op_buf ++ array. ++ * rl78-decode.c: Regenerate. ++ ++2017-06-15 Nick Clifton ++ + PR binutils/21586 + * bfin-dis.c (gregs): Clip index to prevent overflow. + (regs): Likewise. +diff --git a/opcodes/rl78-decode.c b/opcodes/rl78-decode.c +index d0566ea..b2d4bd6 100644 +--- a/opcodes/rl78-decode.c ++++ b/opcodes/rl78-decode.c +@@ -51,7 +51,9 @@ typedef struct + #define W() rl78->size = RL78_Word + + #define AU ATTRIBUTE_UNUSED +-#define GETBYTE() (ld->op [ld->rl78->n_bytes++] = ld->getbyte (ld->ptr)) ++ ++#define OP_BUF_LEN 20 ++#define GETBYTE() (ld->rl78->n_bytes < (OP_BUF_LEN - 1) ? ld->op [ld->rl78->n_bytes++] = ld->getbyte (ld->ptr): 0) + #define B ((unsigned long) GETBYTE()) + + #define SYNTAX(x) rl78->syntax = x +@@ -169,7 +171,7 @@ rl78_decode_opcode (unsigned long pc AU, + RL78_Dis_Isa isa) + { + LocalData lds, * ld = &lds; +- unsigned char op_buf[20] = {0}; ++ unsigned char op_buf[OP_BUF_LEN] = {0}; + unsigned char *op = op_buf; + int op0, op1; + +@@ -201,7 +203,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("nop"); +-#line 911 "rl78-decode.opc" ++#line 913 "rl78-decode.opc" + ID(nop); + + /*----------------------------------------------------------------------*/ +@@ -214,7 +216,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x07: + { + /** 0000 0rw1 addw %0, %1 */ +-#line 274 "rl78-decode.opc" ++#line 276 "rl78-decode.opc" + int rw AU = (op[0] >> 1) & 0x03; + if (trace) + { +@@ -224,7 +226,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rw = 0x%x\n", rw); + } + SYNTAX("addw %0, %1"); +-#line 274 "rl78-decode.opc" ++#line 276 "rl78-decode.opc" + ID(add); W(); DR(AX); SRW(rw); Fzac; + + } +@@ -239,7 +241,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("addw %0, %e!1"); +-#line 265 "rl78-decode.opc" ++#line 267 "rl78-decode.opc" + ID(add); W(); DR(AX); SM(None, IMMU(2)); Fzac; + + } +@@ -254,7 +256,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("addw %0, #%1"); +-#line 271 "rl78-decode.opc" ++#line 273 "rl78-decode.opc" + ID(add); W(); DR(AX); SC(IMMU(2)); Fzac; + + } +@@ -269,7 +271,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("addw %0, %1"); +-#line 277 "rl78-decode.opc" ++#line 279 "rl78-decode.opc" + ID(add); W(); DR(AX); SM(None, SADDR); Fzac; + + } +@@ -284,7 +286,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("xch a, x"); +-#line 1234 "rl78-decode.opc" ++#line 1236 "rl78-decode.opc" + ID(xch); DR(A); SR(X); + + /*----------------------------------------------------------------------*/ +@@ -301,7 +303,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %e1"); +-#line 678 "rl78-decode.opc" ++#line 680 "rl78-decode.opc" + ID(mov); DR(A); SM(B, IMMU(2)); + + } +@@ -316,7 +318,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("add %0, #%1"); +-#line 228 "rl78-decode.opc" ++#line 230 "rl78-decode.opc" + ID(add); DM(None, SADDR); SC(IMMU(1)); Fzac; + + /*----------------------------------------------------------------------*/ +@@ -333,7 +335,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("add %0, %1"); +-#line 222 "rl78-decode.opc" ++#line 224 "rl78-decode.opc" + ID(add); DR(A); SM(None, SADDR); Fzac; + + } +@@ -348,7 +350,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("add %0, #%1"); +-#line 216 "rl78-decode.opc" ++#line 218 "rl78-decode.opc" + ID(add); DR(A); SC(IMMU(1)); Fzac; + + } +@@ -363,7 +365,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("add %0, %e1"); +-#line 204 "rl78-decode.opc" ++#line 206 "rl78-decode.opc" + ID(add); DR(A); SM(HL, 0); Fzac; + + } +@@ -378,7 +380,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("add %0, %ea1"); +-#line 210 "rl78-decode.opc" ++#line 212 "rl78-decode.opc" + ID(add); DR(A); SM(HL, IMMU(1)); Fzac; + + } +@@ -393,7 +395,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("add %0, %e!1"); +-#line 201 "rl78-decode.opc" ++#line 203 "rl78-decode.opc" + ID(add); DR(A); SM(None, IMMU(2)); Fzac; + + } +@@ -408,7 +410,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("addw %0, #%1"); +-#line 280 "rl78-decode.opc" ++#line 282 "rl78-decode.opc" + ID(add); W(); DR(SP); SC(IMMU(1)); Fzac; + + /*----------------------------------------------------------------------*/ +@@ -425,7 +427,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("es:"); +-#line 193 "rl78-decode.opc" ++#line 195 "rl78-decode.opc" + DE(); SE(); + op ++; + pc ++; +@@ -440,7 +442,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x16: + { + /** 0001 0ra0 movw %0, %1 */ +-#line 859 "rl78-decode.opc" ++#line 861 "rl78-decode.opc" + int ra AU = (op[0] >> 1) & 0x03; + if (trace) + { +@@ -450,7 +452,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" ra = 0x%x\n", ra); + } + SYNTAX("movw %0, %1"); +-#line 859 "rl78-decode.opc" ++#line 861 "rl78-decode.opc" + ID(mov); W(); DRW(ra); SR(AX); + + } +@@ -460,7 +462,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x17: + { + /** 0001 0ra1 movw %0, %1 */ +-#line 856 "rl78-decode.opc" ++#line 858 "rl78-decode.opc" + int ra AU = (op[0] >> 1) & 0x03; + if (trace) + { +@@ -470,7 +472,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" ra = 0x%x\n", ra); + } + SYNTAX("movw %0, %1"); +-#line 856 "rl78-decode.opc" ++#line 858 "rl78-decode.opc" + ID(mov); W(); DR(AX); SRW(ra); + + } +@@ -485,7 +487,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %e0, %1"); +-#line 729 "rl78-decode.opc" ++#line 731 "rl78-decode.opc" + ID(mov); DM(B, IMMU(2)); SR(A); + + } +@@ -500,7 +502,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %e0, #%1"); +-#line 726 "rl78-decode.opc" ++#line 728 "rl78-decode.opc" + ID(mov); DM(B, IMMU(2)); SC(IMMU(1)); + + } +@@ -515,7 +517,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("addc %0, #%1"); +-#line 260 "rl78-decode.opc" ++#line 262 "rl78-decode.opc" + ID(addc); DM(None, SADDR); SC(IMMU(1)); Fzac; + + /*----------------------------------------------------------------------*/ +@@ -532,7 +534,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("addc %0, %1"); +-#line 257 "rl78-decode.opc" ++#line 259 "rl78-decode.opc" + ID(addc); DR(A); SM(None, SADDR); Fzac; + + } +@@ -547,7 +549,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("addc %0, #%1"); +-#line 248 "rl78-decode.opc" ++#line 250 "rl78-decode.opc" + ID(addc); DR(A); SC(IMMU(1)); Fzac; + + } +@@ -562,7 +564,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("addc %0, %e1"); +-#line 236 "rl78-decode.opc" ++#line 238 "rl78-decode.opc" + ID(addc); DR(A); SM(HL, 0); Fzac; + + } +@@ -577,7 +579,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("addc %0, %ea1"); +-#line 245 "rl78-decode.opc" ++#line 247 "rl78-decode.opc" + ID(addc); DR(A); SM(HL, IMMU(1)); Fzac; + + } +@@ -592,7 +594,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("addc %0, %e!1"); +-#line 233 "rl78-decode.opc" ++#line 235 "rl78-decode.opc" + ID(addc); DR(A); SM(None, IMMU(2)); Fzac; + + } +@@ -607,7 +609,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("subw %0, #%1"); +-#line 1198 "rl78-decode.opc" ++#line 1200 "rl78-decode.opc" + ID(sub); W(); DR(SP); SC(IMMU(1)); Fzac; + + /*----------------------------------------------------------------------*/ +@@ -620,7 +622,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x27: + { + /** 0010 0rw1 subw %0, %1 */ +-#line 1192 "rl78-decode.opc" ++#line 1194 "rl78-decode.opc" + int rw AU = (op[0] >> 1) & 0x03; + if (trace) + { +@@ -630,7 +632,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rw = 0x%x\n", rw); + } + SYNTAX("subw %0, %1"); +-#line 1192 "rl78-decode.opc" ++#line 1194 "rl78-decode.opc" + ID(sub); W(); DR(AX); SRW(rw); Fzac; + + } +@@ -645,7 +647,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("subw %0, %e!1"); +-#line 1183 "rl78-decode.opc" ++#line 1185 "rl78-decode.opc" + ID(sub); W(); DR(AX); SM(None, IMMU(2)); Fzac; + + } +@@ -660,7 +662,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("subw %0, #%1"); +-#line 1189 "rl78-decode.opc" ++#line 1191 "rl78-decode.opc" + ID(sub); W(); DR(AX); SC(IMMU(2)); Fzac; + + } +@@ -675,7 +677,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("subw %0, %1"); +-#line 1195 "rl78-decode.opc" ++#line 1197 "rl78-decode.opc" + ID(sub); W(); DR(AX); SM(None, SADDR); Fzac; + + } +@@ -690,7 +692,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %e0, %1"); +-#line 741 "rl78-decode.opc" ++#line 743 "rl78-decode.opc" + ID(mov); DM(C, IMMU(2)); SR(A); + + } +@@ -705,7 +707,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %e1"); +-#line 684 "rl78-decode.opc" ++#line 686 "rl78-decode.opc" + ID(mov); DR(A); SM(C, IMMU(2)); + + } +@@ -720,7 +722,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("sub %0, #%1"); +-#line 1146 "rl78-decode.opc" ++#line 1148 "rl78-decode.opc" + ID(sub); DM(None, SADDR); SC(IMMU(1)); Fzac; + + /*----------------------------------------------------------------------*/ +@@ -737,7 +739,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("sub %0, %1"); +-#line 1140 "rl78-decode.opc" ++#line 1142 "rl78-decode.opc" + ID(sub); DR(A); SM(None, SADDR); Fzac; + + } +@@ -752,7 +754,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("sub %0, #%1"); +-#line 1134 "rl78-decode.opc" ++#line 1136 "rl78-decode.opc" + ID(sub); DR(A); SC(IMMU(1)); Fzac; + + } +@@ -767,7 +769,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("sub %0, %e1"); +-#line 1122 "rl78-decode.opc" ++#line 1124 "rl78-decode.opc" + ID(sub); DR(A); SM(HL, 0); Fzac; + + } +@@ -782,7 +784,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("sub %0, %ea1"); +-#line 1128 "rl78-decode.opc" ++#line 1130 "rl78-decode.opc" + ID(sub); DR(A); SM(HL, IMMU(1)); Fzac; + + } +@@ -797,7 +799,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("sub %0, %e!1"); +-#line 1119 "rl78-decode.opc" ++#line 1121 "rl78-decode.opc" + ID(sub); DR(A); SM(None, IMMU(2)); Fzac; + + } +@@ -808,7 +810,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x36: + { + /** 0011 0rg0 movw %0, #%1 */ +-#line 853 "rl78-decode.opc" ++#line 855 "rl78-decode.opc" + int rg AU = (op[0] >> 1) & 0x03; + if (trace) + { +@@ -818,7 +820,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rg = 0x%x\n", rg); + } + SYNTAX("movw %0, #%1"); +-#line 853 "rl78-decode.opc" ++#line 855 "rl78-decode.opc" + ID(mov); W(); DRW(rg); SC(IMMU(2)); + + } +@@ -830,7 +832,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x00: + { + /** 0011 0001 0bit 0000 btclr %s1, $%a0 */ +-#line 416 "rl78-decode.opc" ++#line 418 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -840,7 +842,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("btclr %s1, $%a0"); +-#line 416 "rl78-decode.opc" ++#line 418 "rl78-decode.opc" + ID(branch_cond_clear); SM(None, SADDR); SB(bit); DC(pc+IMMS(1)+4); COND(T); + + /*----------------------------------------------------------------------*/ +@@ -850,7 +852,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x01: + { + /** 0011 0001 0bit 0001 btclr %1, $%a0 */ +-#line 410 "rl78-decode.opc" ++#line 412 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -860,7 +862,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("btclr %1, $%a0"); +-#line 410 "rl78-decode.opc" ++#line 412 "rl78-decode.opc" + ID(branch_cond_clear); DC(pc+IMMS(1)+3); SR(A); SB(bit); COND(T); + + } +@@ -868,7 +870,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x02: + { + /** 0011 0001 0bit 0010 bt %s1, $%a0 */ +-#line 402 "rl78-decode.opc" ++#line 404 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -878,7 +880,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("bt %s1, $%a0"); +-#line 402 "rl78-decode.opc" ++#line 404 "rl78-decode.opc" + ID(branch_cond); SM(None, SADDR); SB(bit); DC(pc+IMMS(1)+4); COND(T); + + /*----------------------------------------------------------------------*/ +@@ -888,7 +890,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x03: + { + /** 0011 0001 0bit 0011 bt %1, $%a0 */ +-#line 396 "rl78-decode.opc" ++#line 398 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -898,7 +900,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("bt %1, $%a0"); +-#line 396 "rl78-decode.opc" ++#line 398 "rl78-decode.opc" + ID(branch_cond); DC(pc+IMMS(1)+3); SR(A); SB(bit); COND(T); + + } +@@ -906,7 +908,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x04: + { + /** 0011 0001 0bit 0100 bf %s1, $%a0 */ +-#line 363 "rl78-decode.opc" ++#line 365 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -916,7 +918,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("bf %s1, $%a0"); +-#line 363 "rl78-decode.opc" ++#line 365 "rl78-decode.opc" + ID(branch_cond); SM(None, SADDR); SB(bit); DC(pc+IMMS(1)+4); COND(F); + + /*----------------------------------------------------------------------*/ +@@ -926,7 +928,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x05: + { + /** 0011 0001 0bit 0101 bf %1, $%a0 */ +-#line 357 "rl78-decode.opc" ++#line 359 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -936,7 +938,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("bf %1, $%a0"); +-#line 357 "rl78-decode.opc" ++#line 359 "rl78-decode.opc" + ID(branch_cond); DC(pc+IMMS(1)+3); SR(A); SB(bit); COND(F); + + } +@@ -944,7 +946,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x07: + { + /** 0011 0001 0cnt 0111 shl %0, %1 */ +-#line 1075 "rl78-decode.opc" ++#line 1077 "rl78-decode.opc" + int cnt AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -954,7 +956,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" cnt = 0x%x\n", cnt); + } + SYNTAX("shl %0, %1"); +-#line 1075 "rl78-decode.opc" ++#line 1077 "rl78-decode.opc" + ID(shl); DR(C); SC(cnt); + + } +@@ -962,7 +964,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x08: + { + /** 0011 0001 0cnt 1000 shl %0, %1 */ +-#line 1072 "rl78-decode.opc" ++#line 1074 "rl78-decode.opc" + int cnt AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -972,7 +974,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" cnt = 0x%x\n", cnt); + } + SYNTAX("shl %0, %1"); +-#line 1072 "rl78-decode.opc" ++#line 1074 "rl78-decode.opc" + ID(shl); DR(B); SC(cnt); + + } +@@ -980,7 +982,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x09: + { + /** 0011 0001 0cnt 1001 shl %0, %1 */ +-#line 1069 "rl78-decode.opc" ++#line 1071 "rl78-decode.opc" + int cnt AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -990,7 +992,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" cnt = 0x%x\n", cnt); + } + SYNTAX("shl %0, %1"); +-#line 1069 "rl78-decode.opc" ++#line 1071 "rl78-decode.opc" + ID(shl); DR(A); SC(cnt); + + } +@@ -998,7 +1000,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x0a: + { + /** 0011 0001 0cnt 1010 shr %0, %1 */ +-#line 1086 "rl78-decode.opc" ++#line 1088 "rl78-decode.opc" + int cnt AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -1008,7 +1010,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" cnt = 0x%x\n", cnt); + } + SYNTAX("shr %0, %1"); +-#line 1086 "rl78-decode.opc" ++#line 1088 "rl78-decode.opc" + ID(shr); DR(A); SC(cnt); + + } +@@ -1016,7 +1018,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x0b: + { + /** 0011 0001 0cnt 1011 sar %0, %1 */ +-#line 1033 "rl78-decode.opc" ++#line 1035 "rl78-decode.opc" + int cnt AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -1026,7 +1028,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" cnt = 0x%x\n", cnt); + } + SYNTAX("sar %0, %1"); +-#line 1033 "rl78-decode.opc" ++#line 1035 "rl78-decode.opc" + ID(sar); DR(A); SC(cnt); + + } +@@ -1035,7 +1037,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x8c: + { + /** 0011 0001 wcnt 1100 shlw %0, %1 */ +-#line 1081 "rl78-decode.opc" ++#line 1083 "rl78-decode.opc" + int wcnt AU = (op[1] >> 4) & 0x0f; + if (trace) + { +@@ -1045,7 +1047,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" wcnt = 0x%x\n", wcnt); + } + SYNTAX("shlw %0, %1"); +-#line 1081 "rl78-decode.opc" ++#line 1083 "rl78-decode.opc" + ID(shl); W(); DR(BC); SC(wcnt); + + /*----------------------------------------------------------------------*/ +@@ -1056,7 +1058,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x8d: + { + /** 0011 0001 wcnt 1101 shlw %0, %1 */ +-#line 1078 "rl78-decode.opc" ++#line 1080 "rl78-decode.opc" + int wcnt AU = (op[1] >> 4) & 0x0f; + if (trace) + { +@@ -1066,7 +1068,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" wcnt = 0x%x\n", wcnt); + } + SYNTAX("shlw %0, %1"); +-#line 1078 "rl78-decode.opc" ++#line 1080 "rl78-decode.opc" + ID(shl); W(); DR(AX); SC(wcnt); + + } +@@ -1075,7 +1077,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x8e: + { + /** 0011 0001 wcnt 1110 shrw %0, %1 */ +-#line 1089 "rl78-decode.opc" ++#line 1091 "rl78-decode.opc" + int wcnt AU = (op[1] >> 4) & 0x0f; + if (trace) + { +@@ -1085,7 +1087,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" wcnt = 0x%x\n", wcnt); + } + SYNTAX("shrw %0, %1"); +-#line 1089 "rl78-decode.opc" ++#line 1091 "rl78-decode.opc" + ID(shr); W(); DR(AX); SC(wcnt); + + /*----------------------------------------------------------------------*/ +@@ -1096,7 +1098,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x8f: + { + /** 0011 0001 wcnt 1111 sarw %0, %1 */ +-#line 1036 "rl78-decode.opc" ++#line 1038 "rl78-decode.opc" + int wcnt AU = (op[1] >> 4) & 0x0f; + if (trace) + { +@@ -1106,7 +1108,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" wcnt = 0x%x\n", wcnt); + } + SYNTAX("sarw %0, %1"); +-#line 1036 "rl78-decode.opc" ++#line 1038 "rl78-decode.opc" + ID(sar); W(); DR(AX); SC(wcnt); + + /*----------------------------------------------------------------------*/ +@@ -1116,7 +1118,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x80: + { + /** 0011 0001 1bit 0000 btclr %s1, $%a0 */ +-#line 413 "rl78-decode.opc" ++#line 415 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -1126,7 +1128,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("btclr %s1, $%a0"); +-#line 413 "rl78-decode.opc" ++#line 415 "rl78-decode.opc" + ID(branch_cond_clear); SM(None, SFR); SB(bit); DC(pc+IMMS(1)+4); COND(T); + + } +@@ -1134,7 +1136,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x81: + { + /** 0011 0001 1bit 0001 btclr %e1, $%a0 */ +-#line 407 "rl78-decode.opc" ++#line 409 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -1144,7 +1146,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("btclr %e1, $%a0"); +-#line 407 "rl78-decode.opc" ++#line 409 "rl78-decode.opc" + ID(branch_cond_clear); DC(pc+IMMS(1)+3); SM(HL,0); SB(bit); COND(T); + + } +@@ -1152,7 +1154,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x82: + { + /** 0011 0001 1bit 0010 bt %s1, $%a0 */ +-#line 399 "rl78-decode.opc" ++#line 401 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -1162,7 +1164,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("bt %s1, $%a0"); +-#line 399 "rl78-decode.opc" ++#line 401 "rl78-decode.opc" + ID(branch_cond); SM(None, SFR); SB(bit); DC(pc+IMMS(1)+4); COND(T); + + } +@@ -1170,7 +1172,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x83: + { + /** 0011 0001 1bit 0011 bt %e1, $%a0 */ +-#line 393 "rl78-decode.opc" ++#line 395 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -1180,7 +1182,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("bt %e1, $%a0"); +-#line 393 "rl78-decode.opc" ++#line 395 "rl78-decode.opc" + ID(branch_cond); DC(pc+IMMS(1)+3); SM(HL,0); SB(bit); COND(T); + + } +@@ -1188,7 +1190,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x84: + { + /** 0011 0001 1bit 0100 bf %s1, $%a0 */ +-#line 360 "rl78-decode.opc" ++#line 362 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -1198,7 +1200,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("bf %s1, $%a0"); +-#line 360 "rl78-decode.opc" ++#line 362 "rl78-decode.opc" + ID(branch_cond); SM(None, SFR); SB(bit); DC(pc+IMMS(1)+4); COND(F); + + } +@@ -1206,7 +1208,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x85: + { + /** 0011 0001 1bit 0101 bf %e1, $%a0 */ +-#line 354 "rl78-decode.opc" ++#line 356 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -1216,7 +1218,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("bf %e1, $%a0"); +-#line 354 "rl78-decode.opc" ++#line 356 "rl78-decode.opc" + ID(branch_cond); DC(pc+IMMS(1)+3); SM(HL,0); SB(bit); COND(F); + + } +@@ -1229,7 +1231,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x37: + { + /** 0011 0ra1 xchw %0, %1 */ +-#line 1239 "rl78-decode.opc" ++#line 1241 "rl78-decode.opc" + int ra AU = (op[0] >> 1) & 0x03; + if (trace) + { +@@ -1239,7 +1241,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" ra = 0x%x\n", ra); + } + SYNTAX("xchw %0, %1"); +-#line 1239 "rl78-decode.opc" ++#line 1241 "rl78-decode.opc" + ID(xch); W(); DR(AX); SRW(ra); + + /*----------------------------------------------------------------------*/ +@@ -1256,7 +1258,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %e0, #%1"); +-#line 738 "rl78-decode.opc" ++#line 740 "rl78-decode.opc" + ID(mov); DM(C, IMMU(2)); SC(IMMU(1)); + + } +@@ -1271,7 +1273,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %e0, #%1"); +-#line 732 "rl78-decode.opc" ++#line 734 "rl78-decode.opc" + ID(mov); DM(BC, IMMU(2)); SC(IMMU(1)); + + } +@@ -1286,7 +1288,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("subc %0, #%1"); +-#line 1178 "rl78-decode.opc" ++#line 1180 "rl78-decode.opc" + ID(subc); DM(None, SADDR); SC(IMMU(1)); Fzac; + + /*----------------------------------------------------------------------*/ +@@ -1303,7 +1305,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("subc %0, %1"); +-#line 1175 "rl78-decode.opc" ++#line 1177 "rl78-decode.opc" + ID(subc); DR(A); SM(None, SADDR); Fzac; + + } +@@ -1318,7 +1320,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("subc %0, #%1"); +-#line 1166 "rl78-decode.opc" ++#line 1168 "rl78-decode.opc" + ID(subc); DR(A); SC(IMMU(1)); Fzac; + + } +@@ -1333,7 +1335,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("subc %0, %e1"); +-#line 1154 "rl78-decode.opc" ++#line 1156 "rl78-decode.opc" + ID(subc); DR(A); SM(HL, 0); Fzac; + + } +@@ -1348,7 +1350,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("subc %0, %ea1"); +-#line 1163 "rl78-decode.opc" ++#line 1165 "rl78-decode.opc" + ID(subc); DR(A); SM(HL, IMMU(1)); Fzac; + + } +@@ -1363,7 +1365,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("subc %0, %e!1"); +-#line 1151 "rl78-decode.opc" ++#line 1153 "rl78-decode.opc" + ID(subc); DR(A); SM(None, IMMU(2)); Fzac; + + } +@@ -1378,7 +1380,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("cmp %e!0, #%1"); +-#line 480 "rl78-decode.opc" ++#line 482 "rl78-decode.opc" + ID(cmp); DM(None, IMMU(2)); SC(IMMU(1)); Fzac; + + } +@@ -1393,7 +1395,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, #%1"); +-#line 717 "rl78-decode.opc" ++#line 719 "rl78-decode.opc" + ID(mov); DR(ES); SC(IMMU(1)); + + } +@@ -1408,7 +1410,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("cmpw %0, %e!1"); +-#line 531 "rl78-decode.opc" ++#line 533 "rl78-decode.opc" + ID(cmp); W(); DR(AX); SM(None, IMMU(2)); Fzac; + + } +@@ -1418,7 +1420,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x47: + { + /** 0100 0ra1 cmpw %0, %1 */ +-#line 540 "rl78-decode.opc" ++#line 542 "rl78-decode.opc" + int ra AU = (op[0] >> 1) & 0x03; + if (trace) + { +@@ -1428,7 +1430,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" ra = 0x%x\n", ra); + } + SYNTAX("cmpw %0, %1"); +-#line 540 "rl78-decode.opc" ++#line 542 "rl78-decode.opc" + ID(cmp); W(); DR(AX); SRW(ra); Fzac; + + } +@@ -1443,7 +1445,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("cmpw %0, #%1"); +-#line 537 "rl78-decode.opc" ++#line 539 "rl78-decode.opc" + ID(cmp); W(); DR(AX); SC(IMMU(2)); Fzac; + + } +@@ -1458,7 +1460,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("cmpw %0, %1"); +-#line 543 "rl78-decode.opc" ++#line 545 "rl78-decode.opc" + ID(cmp); W(); DR(AX); SM(None, SADDR); Fzac; + + /*----------------------------------------------------------------------*/ +@@ -1475,7 +1477,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %e0, %1"); +-#line 735 "rl78-decode.opc" ++#line 737 "rl78-decode.opc" + ID(mov); DM(BC, IMMU(2)); SR(A); + + } +@@ -1490,7 +1492,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %e1"); +-#line 681 "rl78-decode.opc" ++#line 683 "rl78-decode.opc" + ID(mov); DR(A); SM(BC, IMMU(2)); + + } +@@ -1505,7 +1507,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("cmp %0, #%1"); +-#line 483 "rl78-decode.opc" ++#line 485 "rl78-decode.opc" + ID(cmp); DM(None, SADDR); SC(IMMU(1)); Fzac; + + } +@@ -1520,7 +1522,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("cmp %0, %1"); +-#line 510 "rl78-decode.opc" ++#line 512 "rl78-decode.opc" + ID(cmp); DR(A); SM(None, SADDR); Fzac; + + /*----------------------------------------------------------------------*/ +@@ -1537,7 +1539,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("cmp %0, #%1"); +-#line 501 "rl78-decode.opc" ++#line 503 "rl78-decode.opc" + ID(cmp); DR(A); SC(IMMU(1)); Fzac; + + } +@@ -1552,7 +1554,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("cmp %0, %e1"); +-#line 489 "rl78-decode.opc" ++#line 491 "rl78-decode.opc" + ID(cmp); DR(A); SM(HL, 0); Fzac; + + } +@@ -1567,7 +1569,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("cmp %0, %ea1"); +-#line 498 "rl78-decode.opc" ++#line 500 "rl78-decode.opc" + ID(cmp); DR(A); SM(HL, IMMU(1)); Fzac; + + } +@@ -1582,7 +1584,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("cmp %0, %e!1"); +-#line 486 "rl78-decode.opc" ++#line 488 "rl78-decode.opc" + ID(cmp); DR(A); SM(None, IMMU(2)); Fzac; + + } +@@ -1597,7 +1599,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x57: + { + /** 0101 0reg mov %0, #%1 */ +-#line 669 "rl78-decode.opc" ++#line 671 "rl78-decode.opc" + int reg AU = op[0] & 0x07; + if (trace) + { +@@ -1607,7 +1609,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" reg = 0x%x\n", reg); + } + SYNTAX("mov %0, #%1"); +-#line 669 "rl78-decode.opc" ++#line 671 "rl78-decode.opc" + ID(mov); DRB(reg); SC(IMMU(1)); + + } +@@ -1622,7 +1624,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %e0, %1"); +-#line 871 "rl78-decode.opc" ++#line 873 "rl78-decode.opc" + ID(mov); W(); DM(B, IMMU(2)); SR(AX); + + } +@@ -1637,7 +1639,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %0, %e1"); +-#line 862 "rl78-decode.opc" ++#line 864 "rl78-decode.opc" + ID(mov); W(); DR(AX); SM(B, IMMU(2)); + + } +@@ -1652,7 +1654,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("and %0, #%1"); +-#line 312 "rl78-decode.opc" ++#line 314 "rl78-decode.opc" + ID(and); DM(None, SADDR); SC(IMMU(1)); Fz; + + /*----------------------------------------------------------------------*/ +@@ -1669,7 +1671,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("and %0, %1"); +-#line 309 "rl78-decode.opc" ++#line 311 "rl78-decode.opc" + ID(and); DR(A); SM(None, SADDR); Fz; + + } +@@ -1684,7 +1686,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("and %0, #%1"); +-#line 300 "rl78-decode.opc" ++#line 302 "rl78-decode.opc" + ID(and); DR(A); SC(IMMU(1)); Fz; + + } +@@ -1699,7 +1701,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("and %0, %e1"); +-#line 288 "rl78-decode.opc" ++#line 290 "rl78-decode.opc" + ID(and); DR(A); SM(HL, 0); Fz; + + } +@@ -1714,7 +1716,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("and %0, %ea1"); +-#line 294 "rl78-decode.opc" ++#line 296 "rl78-decode.opc" + ID(and); DR(A); SM(HL, IMMU(1)); Fz; + + } +@@ -1729,7 +1731,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("and %0, %e!1"); +-#line 285 "rl78-decode.opc" ++#line 287 "rl78-decode.opc" + ID(and); DR(A); SM(None, IMMU(2)); Fz; + + } +@@ -1743,7 +1745,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x67: + { + /** 0110 0rba mov %0, %1 */ +-#line 672 "rl78-decode.opc" ++#line 674 "rl78-decode.opc" + int rba AU = op[0] & 0x07; + if (trace) + { +@@ -1753,7 +1755,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rba = 0x%x\n", rba); + } + SYNTAX("mov %0, %1"); +-#line 672 "rl78-decode.opc" ++#line 674 "rl78-decode.opc" + ID(mov); DR(A); SRB(rba); + + } +@@ -1772,7 +1774,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x07: + { + /** 0110 0001 0000 0reg add %0, %1 */ +-#line 225 "rl78-decode.opc" ++#line 227 "rl78-decode.opc" + int reg AU = op[1] & 0x07; + if (trace) + { +@@ -1782,7 +1784,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" reg = 0x%x\n", reg); + } + SYNTAX("add %0, %1"); +-#line 225 "rl78-decode.opc" ++#line 227 "rl78-decode.opc" + ID(add); DRB(reg); SR(A); Fzac; + + } +@@ -1796,7 +1798,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x0f: + { + /** 0110 0001 0000 1rba add %0, %1 */ +-#line 219 "rl78-decode.opc" ++#line 221 "rl78-decode.opc" + int rba AU = op[1] & 0x07; + if (trace) + { +@@ -1806,7 +1808,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rba = 0x%x\n", rba); + } + SYNTAX("add %0, %1"); +-#line 219 "rl78-decode.opc" ++#line 221 "rl78-decode.opc" + ID(add); DR(A); SRB(rba); Fzac; + + } +@@ -1821,7 +1823,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("addw %0, %ea1"); +-#line 268 "rl78-decode.opc" ++#line 270 "rl78-decode.opc" + ID(add); W(); DR(AX); SM(HL, IMMU(1)); Fzac; + + } +@@ -1836,7 +1838,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x17: + { + /** 0110 0001 0001 0reg addc %0, %1 */ +-#line 254 "rl78-decode.opc" ++#line 256 "rl78-decode.opc" + int reg AU = op[1] & 0x07; + if (trace) + { +@@ -1846,7 +1848,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" reg = 0x%x\n", reg); + } + SYNTAX("addc %0, %1"); +-#line 254 "rl78-decode.opc" ++#line 256 "rl78-decode.opc" + ID(addc); DRB(reg); SR(A); Fzac; + + } +@@ -1860,7 +1862,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x1f: + { + /** 0110 0001 0001 1rba addc %0, %1 */ +-#line 251 "rl78-decode.opc" ++#line 253 "rl78-decode.opc" + int rba AU = op[1] & 0x07; + if (trace) + { +@@ -1870,7 +1872,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rba = 0x%x\n", rba); + } + SYNTAX("addc %0, %1"); +-#line 251 "rl78-decode.opc" ++#line 253 "rl78-decode.opc" + ID(addc); DR(A); SRB(rba); Fzac; + + } +@@ -1885,7 +1887,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x27: + { + /** 0110 0001 0010 0reg sub %0, %1 */ +-#line 1143 "rl78-decode.opc" ++#line 1145 "rl78-decode.opc" + int reg AU = op[1] & 0x07; + if (trace) + { +@@ -1895,7 +1897,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" reg = 0x%x\n", reg); + } + SYNTAX("sub %0, %1"); +-#line 1143 "rl78-decode.opc" ++#line 1145 "rl78-decode.opc" + ID(sub); DRB(reg); SR(A); Fzac; + + } +@@ -1909,7 +1911,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x2f: + { + /** 0110 0001 0010 1rba sub %0, %1 */ +-#line 1137 "rl78-decode.opc" ++#line 1139 "rl78-decode.opc" + int rba AU = op[1] & 0x07; + if (trace) + { +@@ -1919,7 +1921,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rba = 0x%x\n", rba); + } + SYNTAX("sub %0, %1"); +-#line 1137 "rl78-decode.opc" ++#line 1139 "rl78-decode.opc" + ID(sub); DR(A); SRB(rba); Fzac; + + } +@@ -1934,7 +1936,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("subw %0, %ea1"); +-#line 1186 "rl78-decode.opc" ++#line 1188 "rl78-decode.opc" + ID(sub); W(); DR(AX); SM(HL, IMMU(1)); Fzac; + + } +@@ -1949,7 +1951,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x37: + { + /** 0110 0001 0011 0reg subc %0, %1 */ +-#line 1172 "rl78-decode.opc" ++#line 1174 "rl78-decode.opc" + int reg AU = op[1] & 0x07; + if (trace) + { +@@ -1959,7 +1961,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" reg = 0x%x\n", reg); + } + SYNTAX("subc %0, %1"); +-#line 1172 "rl78-decode.opc" ++#line 1174 "rl78-decode.opc" + ID(subc); DRB(reg); SR(A); Fzac; + + } +@@ -1973,7 +1975,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x3f: + { + /** 0110 0001 0011 1rba subc %0, %1 */ +-#line 1169 "rl78-decode.opc" ++#line 1171 "rl78-decode.opc" + int rba AU = op[1] & 0x07; + if (trace) + { +@@ -1983,7 +1985,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rba = 0x%x\n", rba); + } + SYNTAX("subc %0, %1"); +-#line 1169 "rl78-decode.opc" ++#line 1171 "rl78-decode.opc" + ID(subc); DR(A); SRB(rba); Fzac; + + } +@@ -1998,7 +2000,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x47: + { + /** 0110 0001 0100 0reg cmp %0, %1 */ +-#line 507 "rl78-decode.opc" ++#line 509 "rl78-decode.opc" + int reg AU = op[1] & 0x07; + if (trace) + { +@@ -2008,7 +2010,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" reg = 0x%x\n", reg); + } + SYNTAX("cmp %0, %1"); +-#line 507 "rl78-decode.opc" ++#line 509 "rl78-decode.opc" + ID(cmp); DRB(reg); SR(A); Fzac; + + } +@@ -2022,7 +2024,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x4f: + { + /** 0110 0001 0100 1rba cmp %0, %1 */ +-#line 504 "rl78-decode.opc" ++#line 506 "rl78-decode.opc" + int rba AU = op[1] & 0x07; + if (trace) + { +@@ -2032,7 +2034,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rba = 0x%x\n", rba); + } + SYNTAX("cmp %0, %1"); +-#line 504 "rl78-decode.opc" ++#line 506 "rl78-decode.opc" + ID(cmp); DR(A); SRB(rba); Fzac; + + } +@@ -2047,7 +2049,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("cmpw %0, %ea1"); +-#line 534 "rl78-decode.opc" ++#line 536 "rl78-decode.opc" + ID(cmp); W(); DR(AX); SM(HL, IMMU(1)); Fzac; + + } +@@ -2062,7 +2064,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x57: + { + /** 0110 0001 0101 0reg and %0, %1 */ +-#line 306 "rl78-decode.opc" ++#line 308 "rl78-decode.opc" + int reg AU = op[1] & 0x07; + if (trace) + { +@@ -2072,7 +2074,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" reg = 0x%x\n", reg); + } + SYNTAX("and %0, %1"); +-#line 306 "rl78-decode.opc" ++#line 308 "rl78-decode.opc" + ID(and); DRB(reg); SR(A); Fz; + + } +@@ -2086,7 +2088,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x5f: + { + /** 0110 0001 0101 1rba and %0, %1 */ +-#line 303 "rl78-decode.opc" ++#line 305 "rl78-decode.opc" + int rba AU = op[1] & 0x07; + if (trace) + { +@@ -2096,7 +2098,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rba = 0x%x\n", rba); + } + SYNTAX("and %0, %1"); +-#line 303 "rl78-decode.opc" ++#line 305 "rl78-decode.opc" + ID(and); DR(A); SRB(rba); Fz; + + } +@@ -2111,7 +2113,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("inc %ea0"); +-#line 584 "rl78-decode.opc" ++#line 586 "rl78-decode.opc" + ID(add); DM(HL, IMMU(1)); SC(1); Fza; + + } +@@ -2126,7 +2128,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x67: + { + /** 0110 0001 0110 0reg or %0, %1 */ +-#line 961 "rl78-decode.opc" ++#line 963 "rl78-decode.opc" + int reg AU = op[1] & 0x07; + if (trace) + { +@@ -2136,7 +2138,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" reg = 0x%x\n", reg); + } + SYNTAX("or %0, %1"); +-#line 961 "rl78-decode.opc" ++#line 963 "rl78-decode.opc" + ID(or); DRB(reg); SR(A); Fz; + + } +@@ -2150,7 +2152,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x6f: + { + /** 0110 0001 0110 1rba or %0, %1 */ +-#line 958 "rl78-decode.opc" ++#line 960 "rl78-decode.opc" + int rba AU = op[1] & 0x07; + if (trace) + { +@@ -2160,7 +2162,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rba = 0x%x\n", rba); + } + SYNTAX("or %0, %1"); +-#line 958 "rl78-decode.opc" ++#line 960 "rl78-decode.opc" + ID(or); DR(A); SRB(rba); Fz; + + } +@@ -2175,7 +2177,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("dec %ea0"); +-#line 551 "rl78-decode.opc" ++#line 553 "rl78-decode.opc" + ID(sub); DM(HL, IMMU(1)); SC(1); Fza; + + } +@@ -2190,7 +2192,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x77: + { + /** 0110 0001 0111 0reg xor %0, %1 */ +-#line 1265 "rl78-decode.opc" ++#line 1267 "rl78-decode.opc" + int reg AU = op[1] & 0x07; + if (trace) + { +@@ -2200,7 +2202,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" reg = 0x%x\n", reg); + } + SYNTAX("xor %0, %1"); +-#line 1265 "rl78-decode.opc" ++#line 1267 "rl78-decode.opc" + ID(xor); DRB(reg); SR(A); Fz; + + } +@@ -2214,7 +2216,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x7f: + { + /** 0110 0001 0111 1rba xor %0, %1 */ +-#line 1262 "rl78-decode.opc" ++#line 1264 "rl78-decode.opc" + int rba AU = op[1] & 0x07; + if (trace) + { +@@ -2224,7 +2226,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rba = 0x%x\n", rba); + } + SYNTAX("xor %0, %1"); +-#line 1262 "rl78-decode.opc" ++#line 1264 "rl78-decode.opc" + ID(xor); DR(A); SRB(rba); Fz; + + } +@@ -2239,7 +2241,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("incw %ea0"); +-#line 598 "rl78-decode.opc" ++#line 600 "rl78-decode.opc" + ID(add); W(); DM(HL, IMMU(1)); SC(1); + + } +@@ -2255,7 +2257,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("add %0, %e1"); +-#line 207 "rl78-decode.opc" ++#line 209 "rl78-decode.opc" + ID(add); DR(A); SM2(HL, B, 0); Fzac; + + } +@@ -2270,7 +2272,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("add %0, %e1"); +-#line 213 "rl78-decode.opc" ++#line 215 "rl78-decode.opc" + ID(add); DR(A); SM2(HL, C, 0); Fzac; + + } +@@ -2309,9 +2311,9 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xf7: + { + /** 0110 0001 1nnn 01mm callt [%x0] */ +-#line 433 "rl78-decode.opc" ++#line 435 "rl78-decode.opc" + int nnn AU = (op[1] >> 4) & 0x07; +-#line 433 "rl78-decode.opc" ++#line 435 "rl78-decode.opc" + int mm AU = op[1] & 0x03; + if (trace) + { +@@ -2322,7 +2324,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" mm = 0x%x\n", mm); + } + SYNTAX("callt [%x0]"); +-#line 433 "rl78-decode.opc" ++#line 435 "rl78-decode.opc" + ID(call); DM(None, 0x80 + mm*16 + nnn*2); + + /*----------------------------------------------------------------------*/ +@@ -2338,7 +2340,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x8f: + { + /** 0110 0001 1000 1reg xch %0, %1 */ +-#line 1224 "rl78-decode.opc" ++#line 1226 "rl78-decode.opc" + int reg AU = op[1] & 0x07; + if (trace) + { +@@ -2348,7 +2350,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" reg = 0x%x\n", reg); + } + SYNTAX("xch %0, %1"); +-#line 1224 "rl78-decode.opc" ++#line 1226 "rl78-decode.opc" + /* Note: DECW uses reg == X, so this must follow DECW */ + ID(xch); DR(A); SRB(reg); + +@@ -2364,7 +2366,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("decw %ea0"); +-#line 565 "rl78-decode.opc" ++#line 567 "rl78-decode.opc" + ID(sub); W(); DM(HL, IMMU(1)); SC(1); + + } +@@ -2379,7 +2381,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("addc %0, %e1"); +-#line 239 "rl78-decode.opc" ++#line 241 "rl78-decode.opc" + ID(addc); DR(A); SM2(HL, B, 0); Fzac; + + } +@@ -2394,7 +2396,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("addc %0, %e1"); +-#line 242 "rl78-decode.opc" ++#line 244 "rl78-decode.opc" + ID(addc); DR(A); SM2(HL, C, 0); Fzac; + + } +@@ -2410,7 +2412,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("sub %0, %e1"); +-#line 1125 "rl78-decode.opc" ++#line 1127 "rl78-decode.opc" + ID(sub); DR(A); SM2(HL, B, 0); Fzac; + + } +@@ -2425,7 +2427,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("sub %0, %e1"); +-#line 1131 "rl78-decode.opc" ++#line 1133 "rl78-decode.opc" + ID(sub); DR(A); SM2(HL, C, 0); Fzac; + + } +@@ -2440,7 +2442,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("xch %0, %1"); +-#line 1228 "rl78-decode.opc" ++#line 1230 "rl78-decode.opc" + ID(xch); DR(A); SM(None, SADDR); + + } +@@ -2455,7 +2457,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("xch %0, %e1"); +-#line 1221 "rl78-decode.opc" ++#line 1223 "rl78-decode.opc" + ID(xch); DR(A); SM2(HL, C, 0); + + } +@@ -2470,7 +2472,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("xch %0, %e!1"); +-#line 1203 "rl78-decode.opc" ++#line 1205 "rl78-decode.opc" + ID(xch); DR(A); SM(None, IMMU(2)); + + } +@@ -2485,7 +2487,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("xch %0, %s1"); +-#line 1231 "rl78-decode.opc" ++#line 1233 "rl78-decode.opc" + ID(xch); DR(A); SM(None, SFR); + + } +@@ -2500,7 +2502,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("xch %0, %e1"); +-#line 1212 "rl78-decode.opc" ++#line 1214 "rl78-decode.opc" + ID(xch); DR(A); SM(HL, 0); + + } +@@ -2515,7 +2517,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("xch %0, %ea1"); +-#line 1218 "rl78-decode.opc" ++#line 1220 "rl78-decode.opc" + ID(xch); DR(A); SM(HL, IMMU(1)); + + } +@@ -2530,7 +2532,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("xch %0, %e1"); +-#line 1206 "rl78-decode.opc" ++#line 1208 "rl78-decode.opc" + ID(xch); DR(A); SM(DE, 0); + + } +@@ -2545,7 +2547,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("xch %0, %ea1"); +-#line 1209 "rl78-decode.opc" ++#line 1211 "rl78-decode.opc" + ID(xch); DR(A); SM(DE, IMMU(1)); + + } +@@ -2560,7 +2562,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("subc %0, %e1"); +-#line 1157 "rl78-decode.opc" ++#line 1159 "rl78-decode.opc" + ID(subc); DR(A); SM2(HL, B, 0); Fzac; + + } +@@ -2575,7 +2577,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("subc %0, %e1"); +-#line 1160 "rl78-decode.opc" ++#line 1162 "rl78-decode.opc" + ID(subc); DR(A); SM2(HL, C, 0); Fzac; + + } +@@ -2590,7 +2592,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("mov %0, %1"); +-#line 723 "rl78-decode.opc" ++#line 725 "rl78-decode.opc" + ID(mov); DR(ES); SM(None, SADDR); + + } +@@ -2605,7 +2607,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("xch %0, %e1"); +-#line 1215 "rl78-decode.opc" ++#line 1217 "rl78-decode.opc" + ID(xch); DR(A); SM2(HL, B, 0); + + } +@@ -2620,7 +2622,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("cmp %0, %e1"); +-#line 492 "rl78-decode.opc" ++#line 494 "rl78-decode.opc" + ID(cmp); DR(A); SM2(HL, B, 0); Fzac; + + } +@@ -2635,7 +2637,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("cmp %0, %e1"); +-#line 495 "rl78-decode.opc" ++#line 497 "rl78-decode.opc" + ID(cmp); DR(A); SM2(HL, C, 0); Fzac; + + } +@@ -2650,7 +2652,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("bh $%a0"); +-#line 340 "rl78-decode.opc" ++#line 342 "rl78-decode.opc" + ID(branch_cond); DC(pc+IMMS(1)+3); SR(None); COND(H); + + } +@@ -2665,7 +2667,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("sk%c1"); +-#line 1094 "rl78-decode.opc" ++#line 1096 "rl78-decode.opc" + ID(skip); COND(C); + + } +@@ -2680,7 +2682,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("mov %0, %e1"); +-#line 660 "rl78-decode.opc" ++#line 662 "rl78-decode.opc" + ID(mov); DR(A); SM2(HL, B, 0); + + } +@@ -2691,7 +2693,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xfa: + { + /** 0110 0001 11rg 1010 call %0 */ +-#line 430 "rl78-decode.opc" ++#line 432 "rl78-decode.opc" + int rg AU = (op[1] >> 4) & 0x03; + if (trace) + { +@@ -2701,7 +2703,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rg = 0x%x\n", rg); + } + SYNTAX("call %0"); +-#line 430 "rl78-decode.opc" ++#line 432 "rl78-decode.opc" + ID(call); DRW(rg); + + } +@@ -2716,7 +2718,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("br ax"); +-#line 380 "rl78-decode.opc" ++#line 382 "rl78-decode.opc" + ID(branch); DR(AX); + + /*----------------------------------------------------------------------*/ +@@ -2733,7 +2735,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("brk"); +-#line 388 "rl78-decode.opc" ++#line 390 "rl78-decode.opc" + ID(break); + + /*----------------------------------------------------------------------*/ +@@ -2750,7 +2752,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("pop %s0"); +-#line 989 "rl78-decode.opc" ++#line 991 "rl78-decode.opc" + ID(mov); W(); DR(PSW); SPOP(); + + /*----------------------------------------------------------------------*/ +@@ -2767,7 +2769,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("movs %ea0, %1"); +-#line 811 "rl78-decode.opc" ++#line 813 "rl78-decode.opc" + ID(mov); DM(HL, IMMU(1)); SR(X); Fzc; + + /*----------------------------------------------------------------------*/ +@@ -2780,7 +2782,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xff: + { + /** 0110 0001 11rb 1111 sel rb%1 */ +-#line 1041 "rl78-decode.opc" ++#line 1043 "rl78-decode.opc" + int rb AU = (op[1] >> 4) & 0x03; + if (trace) + { +@@ -2790,7 +2792,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rb = 0x%x\n", rb); + } + SYNTAX("sel rb%1"); +-#line 1041 "rl78-decode.opc" ++#line 1043 "rl78-decode.opc" + ID(sel); SC(rb); + + /*----------------------------------------------------------------------*/ +@@ -2807,7 +2809,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("and %0, %e1"); +-#line 291 "rl78-decode.opc" ++#line 293 "rl78-decode.opc" + ID(and); DR(A); SM2(HL, B, 0); Fz; + + } +@@ -2822,7 +2824,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("and %0, %e1"); +-#line 297 "rl78-decode.opc" ++#line 299 "rl78-decode.opc" + ID(and); DR(A); SM2(HL, C, 0); Fz; + + } +@@ -2837,7 +2839,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("bnh $%a0"); +-#line 343 "rl78-decode.opc" ++#line 345 "rl78-decode.opc" + ID(branch_cond); DC(pc+IMMS(1)+3); SR(None); COND(NH); + + } +@@ -2852,7 +2854,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("sk%c1"); +-#line 1100 "rl78-decode.opc" ++#line 1102 "rl78-decode.opc" + ID(skip); COND(NC); + + } +@@ -2867,7 +2869,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("mov %e0, %1"); +-#line 627 "rl78-decode.opc" ++#line 629 "rl78-decode.opc" + ID(mov); DM2(HL, B, 0); SR(A); + + } +@@ -2882,7 +2884,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("ror %0, %1"); +-#line 1022 "rl78-decode.opc" ++#line 1024 "rl78-decode.opc" + ID(ror); DR(A); SC(1); + + } +@@ -2897,7 +2899,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("rolc %0, %1"); +-#line 1016 "rl78-decode.opc" ++#line 1018 "rl78-decode.opc" + ID(rolc); DR(A); SC(1); + + } +@@ -2912,7 +2914,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("push %s1"); +-#line 997 "rl78-decode.opc" ++#line 999 "rl78-decode.opc" + ID(mov); W(); DPUSH(); SR(PSW); + + /*----------------------------------------------------------------------*/ +@@ -2929,7 +2931,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("cmps %0, %ea1"); +-#line 526 "rl78-decode.opc" ++#line 528 "rl78-decode.opc" + ID(cmp); DR(X); SM(HL, IMMU(1)); Fzac; + + /*----------------------------------------------------------------------*/ +@@ -2946,7 +2948,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("or %0, %e1"); +-#line 946 "rl78-decode.opc" ++#line 948 "rl78-decode.opc" + ID(or); DR(A); SM2(HL, B, 0); Fz; + + } +@@ -2961,7 +2963,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("or %0, %e1"); +-#line 952 "rl78-decode.opc" ++#line 954 "rl78-decode.opc" + ID(or); DR(A); SM2(HL, C, 0); Fz; + + } +@@ -2976,7 +2978,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("sk%c1"); +-#line 1097 "rl78-decode.opc" ++#line 1099 "rl78-decode.opc" + ID(skip); COND(H); + + } +@@ -2991,7 +2993,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("sk%c1"); +-#line 1109 "rl78-decode.opc" ++#line 1111 "rl78-decode.opc" + ID(skip); COND(Z); + + /*----------------------------------------------------------------------*/ +@@ -3008,7 +3010,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("mov %0, %e1"); +-#line 663 "rl78-decode.opc" ++#line 665 "rl78-decode.opc" + ID(mov); DR(A); SM2(HL, C, 0); + + } +@@ -3023,7 +3025,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("rol %0, %1"); +-#line 1013 "rl78-decode.opc" ++#line 1015 "rl78-decode.opc" + ID(rol); DR(A); SC(1); + + } +@@ -3038,7 +3040,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("retb"); +-#line 1008 "rl78-decode.opc" ++#line 1010 "rl78-decode.opc" + ID(reti); + + /*----------------------------------------------------------------------*/ +@@ -3055,7 +3057,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("halt"); +-#line 576 "rl78-decode.opc" ++#line 578 "rl78-decode.opc" + ID(halt); + + /*----------------------------------------------------------------------*/ +@@ -3066,7 +3068,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xfe: + { + /** 0110 0001 111r 1110 rolwc %0, %1 */ +-#line 1019 "rl78-decode.opc" ++#line 1021 "rl78-decode.opc" + int r AU = (op[1] >> 4) & 0x01; + if (trace) + { +@@ -3076,7 +3078,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" r = 0x%x\n", r); + } + SYNTAX("rolwc %0, %1"); +-#line 1019 "rl78-decode.opc" ++#line 1021 "rl78-decode.opc" + ID(rolc); W(); DRW(r); SC(1); + + } +@@ -3091,7 +3093,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("xor %0, %e1"); +-#line 1250 "rl78-decode.opc" ++#line 1252 "rl78-decode.opc" + ID(xor); DR(A); SM2(HL, B, 0); Fz; + + } +@@ -3106,7 +3108,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("xor %0, %e1"); +-#line 1256 "rl78-decode.opc" ++#line 1258 "rl78-decode.opc" + ID(xor); DR(A); SM2(HL, C, 0); Fz; + + } +@@ -3121,7 +3123,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("sk%c1"); +-#line 1103 "rl78-decode.opc" ++#line 1105 "rl78-decode.opc" + ID(skip); COND(NH); + + } +@@ -3136,7 +3138,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("sk%c1"); +-#line 1106 "rl78-decode.opc" ++#line 1108 "rl78-decode.opc" + ID(skip); COND(NZ); + + } +@@ -3151,7 +3153,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("mov %e0, %1"); +-#line 636 "rl78-decode.opc" ++#line 638 "rl78-decode.opc" + ID(mov); DM2(HL, C, 0); SR(A); + + } +@@ -3166,7 +3168,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("rorc %0, %1"); +-#line 1025 "rl78-decode.opc" ++#line 1027 "rl78-decode.opc" + ID(rorc); DR(A); SC(1); + + /*----------------------------------------------------------------------*/ +@@ -3186,7 +3188,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("reti"); +-#line 1005 "rl78-decode.opc" ++#line 1007 "rl78-decode.opc" + ID(reti); + + } +@@ -3201,7 +3203,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("stop"); +-#line 1114 "rl78-decode.opc" ++#line 1116 "rl78-decode.opc" + ID(stop); + + /*----------------------------------------------------------------------*/ +@@ -3221,7 +3223,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %e0, %1"); +-#line 874 "rl78-decode.opc" ++#line 876 "rl78-decode.opc" + ID(mov); W(); DM(C, IMMU(2)); SR(AX); + + } +@@ -3236,7 +3238,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %0, %e1"); +-#line 865 "rl78-decode.opc" ++#line 867 "rl78-decode.opc" + ID(mov); W(); DR(AX); SM(C, IMMU(2)); + + } +@@ -3251,7 +3253,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("or %0, #%1"); +-#line 967 "rl78-decode.opc" ++#line 969 "rl78-decode.opc" + ID(or); DM(None, SADDR); SC(IMMU(1)); Fz; + + /*----------------------------------------------------------------------*/ +@@ -3268,7 +3270,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("or %0, %1"); +-#line 964 "rl78-decode.opc" ++#line 966 "rl78-decode.opc" + ID(or); DR(A); SM(None, SADDR); Fz; + + } +@@ -3283,7 +3285,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("or %0, #%1"); +-#line 955 "rl78-decode.opc" ++#line 957 "rl78-decode.opc" + ID(or); DR(A); SC(IMMU(1)); Fz; + + } +@@ -3298,7 +3300,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("or %0, %e1"); +-#line 943 "rl78-decode.opc" ++#line 945 "rl78-decode.opc" + ID(or); DR(A); SM(HL, 0); Fz; + + } +@@ -3313,7 +3315,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("or %0, %ea1"); +-#line 949 "rl78-decode.opc" ++#line 951 "rl78-decode.opc" + ID(or); DR(A); SM(HL, IMMU(1)); Fz; + + } +@@ -3328,7 +3330,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("or %0, %e!1"); +-#line 940 "rl78-decode.opc" ++#line 942 "rl78-decode.opc" + ID(or); DR(A); SM(None, IMMU(2)); Fz; + + } +@@ -3342,7 +3344,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x77: + { + /** 0111 0rba mov %0, %1 */ +-#line 696 "rl78-decode.opc" ++#line 698 "rl78-decode.opc" + int rba AU = op[0] & 0x07; + if (trace) + { +@@ -3352,7 +3354,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rba = 0x%x\n", rba); + } + SYNTAX("mov %0, %1"); +-#line 696 "rl78-decode.opc" ++#line 698 "rl78-decode.opc" + ID(mov); DRB(rba); SR(A); + + } +@@ -3371,7 +3373,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x70: + { + /** 0111 0001 0bit 0000 set1 %e!0 */ +-#line 1046 "rl78-decode.opc" ++#line 1048 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3381,7 +3383,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("set1 %e!0"); +-#line 1046 "rl78-decode.opc" ++#line 1048 "rl78-decode.opc" + ID(mov); DM(None, IMMU(2)); DB(bit); SC(1); + + } +@@ -3396,7 +3398,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x71: + { + /** 0111 0001 0bit 0001 mov1 %0, cy */ +-#line 803 "rl78-decode.opc" ++#line 805 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3406,7 +3408,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("mov1 %0, cy"); +-#line 803 "rl78-decode.opc" ++#line 805 "rl78-decode.opc" + ID(mov); DM(None, SADDR); DB(bit); SCY(); + + } +@@ -3421,7 +3423,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x72: + { + /** 0111 0001 0bit 0010 set1 %0 */ +-#line 1064 "rl78-decode.opc" ++#line 1066 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3431,7 +3433,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("set1 %0"); +-#line 1064 "rl78-decode.opc" ++#line 1066 "rl78-decode.opc" + ID(mov); DM(None, SADDR); DB(bit); SC(1); + + /*----------------------------------------------------------------------*/ +@@ -3448,7 +3450,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x73: + { + /** 0111 0001 0bit 0011 clr1 %0 */ +-#line 456 "rl78-decode.opc" ++#line 458 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3458,7 +3460,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("clr1 %0"); +-#line 456 "rl78-decode.opc" ++#line 458 "rl78-decode.opc" + ID(mov); DM(None, SADDR); DB(bit); SC(0); + + /*----------------------------------------------------------------------*/ +@@ -3475,7 +3477,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x74: + { + /** 0111 0001 0bit 0100 mov1 cy, %1 */ +-#line 797 "rl78-decode.opc" ++#line 799 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3485,7 +3487,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("mov1 cy, %1"); +-#line 797 "rl78-decode.opc" ++#line 799 "rl78-decode.opc" + ID(mov); DCY(); SM(None, SADDR); SB(bit); + + } +@@ -3500,7 +3502,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x75: + { + /** 0111 0001 0bit 0101 and1 cy, %s1 */ +-#line 326 "rl78-decode.opc" ++#line 328 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3510,7 +3512,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("and1 cy, %s1"); +-#line 326 "rl78-decode.opc" ++#line 328 "rl78-decode.opc" + ID(and); DCY(); SM(None, SADDR); SB(bit); + + /*----------------------------------------------------------------------*/ +@@ -3530,7 +3532,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x76: + { + /** 0111 0001 0bit 0110 or1 cy, %s1 */ +-#line 981 "rl78-decode.opc" ++#line 983 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3540,7 +3542,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("or1 cy, %s1"); +-#line 981 "rl78-decode.opc" ++#line 983 "rl78-decode.opc" + ID(or); DCY(); SM(None, SADDR); SB(bit); + + /*----------------------------------------------------------------------*/ +@@ -3557,7 +3559,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x77: + { + /** 0111 0001 0bit 0111 xor1 cy, %s1 */ +-#line 1285 "rl78-decode.opc" ++#line 1287 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3567,7 +3569,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("xor1 cy, %s1"); +-#line 1285 "rl78-decode.opc" ++#line 1287 "rl78-decode.opc" + ID(xor); DCY(); SM(None, SADDR); SB(bit); + + /*----------------------------------------------------------------------*/ +@@ -3584,7 +3586,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x78: + { + /** 0111 0001 0bit 1000 clr1 %e!0 */ +-#line 438 "rl78-decode.opc" ++#line 440 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3594,7 +3596,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("clr1 %e!0"); +-#line 438 "rl78-decode.opc" ++#line 440 "rl78-decode.opc" + ID(mov); DM(None, IMMU(2)); DB(bit); SC(0); + + } +@@ -3609,7 +3611,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x79: + { + /** 0111 0001 0bit 1001 mov1 %s0, cy */ +-#line 806 "rl78-decode.opc" ++#line 808 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3619,7 +3621,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("mov1 %s0, cy"); +-#line 806 "rl78-decode.opc" ++#line 808 "rl78-decode.opc" + ID(mov); DM(None, SFR); DB(bit); SCY(); + + /*----------------------------------------------------------------------*/ +@@ -3636,7 +3638,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x7a: + { + /** 0111 0001 0bit 1010 set1 %s0 */ +-#line 1058 "rl78-decode.opc" ++#line 1060 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3646,7 +3648,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("set1 %s0"); +-#line 1058 "rl78-decode.opc" ++#line 1060 "rl78-decode.opc" + op0 = SFR; + ID(mov); DM(None, op0); DB(bit); SC(1); + if (op0 == RL78_SFR_PSW && bit == 7) +@@ -3664,7 +3666,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x7b: + { + /** 0111 0001 0bit 1011 clr1 %s0 */ +-#line 450 "rl78-decode.opc" ++#line 452 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3674,7 +3676,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("clr1 %s0"); +-#line 450 "rl78-decode.opc" ++#line 452 "rl78-decode.opc" + op0 = SFR; + ID(mov); DM(None, op0); DB(bit); SC(0); + if (op0 == RL78_SFR_PSW && bit == 7) +@@ -3692,7 +3694,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x7c: + { + /** 0111 0001 0bit 1100 mov1 cy, %s1 */ +-#line 800 "rl78-decode.opc" ++#line 802 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3702,7 +3704,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("mov1 cy, %s1"); +-#line 800 "rl78-decode.opc" ++#line 802 "rl78-decode.opc" + ID(mov); DCY(); SM(None, SFR); SB(bit); + + } +@@ -3717,7 +3719,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x7d: + { + /** 0111 0001 0bit 1101 and1 cy, %s1 */ +-#line 323 "rl78-decode.opc" ++#line 325 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3727,7 +3729,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("and1 cy, %s1"); +-#line 323 "rl78-decode.opc" ++#line 325 "rl78-decode.opc" + ID(and); DCY(); SM(None, SFR); SB(bit); + + } +@@ -3742,7 +3744,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x7e: + { + /** 0111 0001 0bit 1110 or1 cy, %s1 */ +-#line 978 "rl78-decode.opc" ++#line 980 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3752,7 +3754,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("or1 cy, %s1"); +-#line 978 "rl78-decode.opc" ++#line 980 "rl78-decode.opc" + ID(or); DCY(); SM(None, SFR); SB(bit); + + } +@@ -3767,7 +3769,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x7f: + { + /** 0111 0001 0bit 1111 xor1 cy, %s1 */ +-#line 1282 "rl78-decode.opc" ++#line 1284 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3777,7 +3779,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("xor1 cy, %s1"); +-#line 1282 "rl78-decode.opc" ++#line 1284 "rl78-decode.opc" + ID(xor); DCY(); SM(None, SFR); SB(bit); + + } +@@ -3792,7 +3794,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("set1 cy"); +-#line 1055 "rl78-decode.opc" ++#line 1057 "rl78-decode.opc" + ID(mov); DCY(); SC(1); + + } +@@ -3807,7 +3809,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xf1: + { + /** 0111 0001 1bit 0001 mov1 %e0, cy */ +-#line 785 "rl78-decode.opc" ++#line 787 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3817,7 +3819,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("mov1 %e0, cy"); +-#line 785 "rl78-decode.opc" ++#line 787 "rl78-decode.opc" + ID(mov); DM(HL, 0); DB(bit); SCY(); + + } +@@ -3832,7 +3834,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xf2: + { + /** 0111 0001 1bit 0010 set1 %e0 */ +-#line 1049 "rl78-decode.opc" ++#line 1051 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3842,7 +3844,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("set1 %e0"); +-#line 1049 "rl78-decode.opc" ++#line 1051 "rl78-decode.opc" + ID(mov); DM(HL, 0); DB(bit); SC(1); + + } +@@ -3857,7 +3859,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xf3: + { + /** 0111 0001 1bit 0011 clr1 %e0 */ +-#line 441 "rl78-decode.opc" ++#line 443 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3867,7 +3869,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("clr1 %e0"); +-#line 441 "rl78-decode.opc" ++#line 443 "rl78-decode.opc" + ID(mov); DM(HL, 0); DB(bit); SC(0); + + } +@@ -3882,7 +3884,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xf4: + { + /** 0111 0001 1bit 0100 mov1 cy, %e1 */ +-#line 791 "rl78-decode.opc" ++#line 793 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3892,7 +3894,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("mov1 cy, %e1"); +-#line 791 "rl78-decode.opc" ++#line 793 "rl78-decode.opc" + ID(mov); DCY(); SM(HL, 0); SB(bit); + + } +@@ -3907,7 +3909,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xf5: + { + /** 0111 0001 1bit 0101 and1 cy, %e1 */ +-#line 317 "rl78-decode.opc" ++#line 319 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3917,7 +3919,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("and1 cy, %e1"); +-#line 317 "rl78-decode.opc" ++#line 319 "rl78-decode.opc" + ID(and); DCY(); SM(HL, 0); SB(bit); + + } +@@ -3932,7 +3934,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xf6: + { + /** 0111 0001 1bit 0110 or1 cy, %e1 */ +-#line 972 "rl78-decode.opc" ++#line 974 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3942,7 +3944,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("or1 cy, %e1"); +-#line 972 "rl78-decode.opc" ++#line 974 "rl78-decode.opc" + ID(or); DCY(); SM(HL, 0); SB(bit); + + } +@@ -3957,7 +3959,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xf7: + { + /** 0111 0001 1bit 0111 xor1 cy, %e1 */ +-#line 1276 "rl78-decode.opc" ++#line 1278 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -3967,7 +3969,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("xor1 cy, %e1"); +-#line 1276 "rl78-decode.opc" ++#line 1278 "rl78-decode.opc" + ID(xor); DCY(); SM(HL, 0); SB(bit); + + } +@@ -3982,7 +3984,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("clr1 cy"); +-#line 447 "rl78-decode.opc" ++#line 449 "rl78-decode.opc" + ID(mov); DCY(); SC(0); + + } +@@ -3997,7 +3999,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xf9: + { + /** 0111 0001 1bit 1001 mov1 %e0, cy */ +-#line 788 "rl78-decode.opc" ++#line 790 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -4007,7 +4009,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("mov1 %e0, cy"); +-#line 788 "rl78-decode.opc" ++#line 790 "rl78-decode.opc" + ID(mov); DR(A); DB(bit); SCY(); + + } +@@ -4022,7 +4024,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xfa: + { + /** 0111 0001 1bit 1010 set1 %0 */ +-#line 1052 "rl78-decode.opc" ++#line 1054 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -4032,7 +4034,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("set1 %0"); +-#line 1052 "rl78-decode.opc" ++#line 1054 "rl78-decode.opc" + ID(mov); DR(A); DB(bit); SC(1); + + } +@@ -4047,7 +4049,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xfb: + { + /** 0111 0001 1bit 1011 clr1 %0 */ +-#line 444 "rl78-decode.opc" ++#line 446 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -4057,7 +4059,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("clr1 %0"); +-#line 444 "rl78-decode.opc" ++#line 446 "rl78-decode.opc" + ID(mov); DR(A); DB(bit); SC(0); + + } +@@ -4072,7 +4074,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xfc: + { + /** 0111 0001 1bit 1100 mov1 cy, %e1 */ +-#line 794 "rl78-decode.opc" ++#line 796 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -4082,7 +4084,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("mov1 cy, %e1"); +-#line 794 "rl78-decode.opc" ++#line 796 "rl78-decode.opc" + ID(mov); DCY(); SR(A); SB(bit); + + } +@@ -4097,7 +4099,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xfd: + { + /** 0111 0001 1bit 1101 and1 cy, %1 */ +-#line 320 "rl78-decode.opc" ++#line 322 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -4107,7 +4109,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("and1 cy, %1"); +-#line 320 "rl78-decode.opc" ++#line 322 "rl78-decode.opc" + ID(and); DCY(); SR(A); SB(bit); + + } +@@ -4122,7 +4124,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xfe: + { + /** 0111 0001 1bit 1110 or1 cy, %1 */ +-#line 975 "rl78-decode.opc" ++#line 977 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -4132,7 +4134,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("or1 cy, %1"); +-#line 975 "rl78-decode.opc" ++#line 977 "rl78-decode.opc" + ID(or); DCY(); SR(A); SB(bit); + + } +@@ -4147,7 +4149,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xff: + { + /** 0111 0001 1bit 1111 xor1 cy, %1 */ +-#line 1279 "rl78-decode.opc" ++#line 1281 "rl78-decode.opc" + int bit AU = (op[1] >> 4) & 0x07; + if (trace) + { +@@ -4157,7 +4159,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" bit = 0x%x\n", bit); + } + SYNTAX("xor1 cy, %1"); +-#line 1279 "rl78-decode.opc" ++#line 1281 "rl78-decode.opc" + ID(xor); DCY(); SR(A); SB(bit); + + } +@@ -4172,7 +4174,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0], op[1]); + } + SYNTAX("not1 cy"); +-#line 916 "rl78-decode.opc" ++#line 918 "rl78-decode.opc" + ID(xor); DCY(); SC(1); + + /*----------------------------------------------------------------------*/ +@@ -4192,7 +4194,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %e0, %1"); +-#line 877 "rl78-decode.opc" ++#line 879 "rl78-decode.opc" + ID(mov); W(); DM(BC, IMMU(2)); SR(AX); + + } +@@ -4207,7 +4209,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %0, %e1"); +-#line 868 "rl78-decode.opc" ++#line 870 "rl78-decode.opc" + ID(mov); W(); DR(AX); SM(BC, IMMU(2)); + + } +@@ -4222,7 +4224,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("xor %0, #%1"); +-#line 1271 "rl78-decode.opc" ++#line 1273 "rl78-decode.opc" + ID(xor); DM(None, SADDR); SC(IMMU(1)); Fz; + + /*----------------------------------------------------------------------*/ +@@ -4239,7 +4241,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("xor %0, %1"); +-#line 1268 "rl78-decode.opc" ++#line 1270 "rl78-decode.opc" + ID(xor); DR(A); SM(None, SADDR); Fz; + + } +@@ -4254,7 +4256,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("xor %0, #%1"); +-#line 1259 "rl78-decode.opc" ++#line 1261 "rl78-decode.opc" + ID(xor); DR(A); SC(IMMU(1)); Fz; + + } +@@ -4269,7 +4271,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("xor %0, %e1"); +-#line 1247 "rl78-decode.opc" ++#line 1249 "rl78-decode.opc" + ID(xor); DR(A); SM(HL, 0); Fz; + + } +@@ -4284,7 +4286,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("xor %0, %ea1"); +-#line 1253 "rl78-decode.opc" ++#line 1255 "rl78-decode.opc" + ID(xor); DR(A); SM(HL, IMMU(1)); Fz; + + } +@@ -4299,7 +4301,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("xor %0, %e!1"); +-#line 1244 "rl78-decode.opc" ++#line 1246 "rl78-decode.opc" + ID(xor); DR(A); SM(None, IMMU(2)); Fz; + + } +@@ -4314,7 +4316,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x87: + { + /** 1000 0reg inc %0 */ +-#line 587 "rl78-decode.opc" ++#line 589 "rl78-decode.opc" + int reg AU = op[0] & 0x07; + if (trace) + { +@@ -4324,7 +4326,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" reg = 0x%x\n", reg); + } + SYNTAX("inc %0"); +-#line 587 "rl78-decode.opc" ++#line 589 "rl78-decode.opc" + ID(add); DRB(reg); SC(1); Fza; + + } +@@ -4339,7 +4341,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %ea1"); +-#line 666 "rl78-decode.opc" ++#line 668 "rl78-decode.opc" + ID(mov); DR(A); SM(SP, IMMU(1)); + + } +@@ -4354,7 +4356,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %e1"); +-#line 648 "rl78-decode.opc" ++#line 650 "rl78-decode.opc" + ID(mov); DR(A); SM(DE, 0); + + } +@@ -4369,7 +4371,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %ea1"); +-#line 651 "rl78-decode.opc" ++#line 653 "rl78-decode.opc" + ID(mov); DR(A); SM(DE, IMMU(1)); + + } +@@ -4384,7 +4386,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %e1"); +-#line 654 "rl78-decode.opc" ++#line 656 "rl78-decode.opc" + ID(mov); DR(A); SM(HL, 0); + + } +@@ -4399,7 +4401,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %ea1"); +-#line 657 "rl78-decode.opc" ++#line 659 "rl78-decode.opc" + ID(mov); DR(A); SM(HL, IMMU(1)); + + } +@@ -4414,7 +4416,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %1"); +-#line 690 "rl78-decode.opc" ++#line 692 "rl78-decode.opc" + ID(mov); DR(A); SM(None, SADDR); + + } +@@ -4429,7 +4431,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %s1"); +-#line 687 "rl78-decode.opc" ++#line 689 "rl78-decode.opc" + ID(mov); DR(A); SM(None, SFR); + + } +@@ -4444,7 +4446,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %e!1"); +-#line 645 "rl78-decode.opc" ++#line 647 "rl78-decode.opc" + ID(mov); DR(A); SM(None, IMMU(2)); + + } +@@ -4459,7 +4461,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0x97: + { + /** 1001 0reg dec %0 */ +-#line 554 "rl78-decode.opc" ++#line 556 "rl78-decode.opc" + int reg AU = op[0] & 0x07; + if (trace) + { +@@ -4469,7 +4471,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" reg = 0x%x\n", reg); + } + SYNTAX("dec %0"); +-#line 554 "rl78-decode.opc" ++#line 556 "rl78-decode.opc" + ID(sub); DRB(reg); SC(1); Fza; + + } +@@ -4484,7 +4486,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %a0, %1"); +-#line 642 "rl78-decode.opc" ++#line 644 "rl78-decode.opc" + ID(mov); DM(SP, IMMU(1)); SR(A); + + } +@@ -4499,7 +4501,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %e0, %1"); +-#line 615 "rl78-decode.opc" ++#line 617 "rl78-decode.opc" + ID(mov); DM(DE, 0); SR(A); + + } +@@ -4514,7 +4516,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %ea0, %1"); +-#line 621 "rl78-decode.opc" ++#line 623 "rl78-decode.opc" + ID(mov); DM(DE, IMMU(1)); SR(A); + + } +@@ -4529,7 +4531,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %e0, %1"); +-#line 624 "rl78-decode.opc" ++#line 626 "rl78-decode.opc" + ID(mov); DM(HL, 0); SR(A); + + } +@@ -4544,7 +4546,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %ea0, %1"); +-#line 633 "rl78-decode.opc" ++#line 635 "rl78-decode.opc" + ID(mov); DM(HL, IMMU(1)); SR(A); + + } +@@ -4559,7 +4561,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %1"); +-#line 747 "rl78-decode.opc" ++#line 749 "rl78-decode.opc" + ID(mov); DM(None, SADDR); SR(A); + + } +@@ -4574,7 +4576,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %s0, %1"); +-#line 780 "rl78-decode.opc" ++#line 782 "rl78-decode.opc" + ID(mov); DM(None, SFR); SR(A); + + /*----------------------------------------------------------------------*/ +@@ -4591,7 +4593,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %e!0, %1"); +-#line 612 "rl78-decode.opc" ++#line 614 "rl78-decode.opc" + ID(mov); DM(None, IMMU(2)); SR(A); + + } +@@ -4606,7 +4608,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("inc %e!0"); +-#line 581 "rl78-decode.opc" ++#line 583 "rl78-decode.opc" + ID(add); DM(None, IMMU(2)); SC(1); Fza; + + } +@@ -4617,7 +4619,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xa7: + { + /** 1010 0rg1 incw %0 */ +-#line 601 "rl78-decode.opc" ++#line 603 "rl78-decode.opc" + int rg AU = (op[0] >> 1) & 0x03; + if (trace) + { +@@ -4627,7 +4629,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rg = 0x%x\n", rg); + } + SYNTAX("incw %0"); +-#line 601 "rl78-decode.opc" ++#line 603 "rl78-decode.opc" + ID(add); W(); DRW(rg); SC(1); + + } +@@ -4642,7 +4644,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("incw %e!0"); +-#line 595 "rl78-decode.opc" ++#line 597 "rl78-decode.opc" + ID(add); W(); DM(None, IMMU(2)); SC(1); + + } +@@ -4657,7 +4659,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("inc %0"); +-#line 590 "rl78-decode.opc" ++#line 592 "rl78-decode.opc" + ID(add); DM(None, SADDR); SC(1); Fza; + + /*----------------------------------------------------------------------*/ +@@ -4674,7 +4676,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("incw %0"); +-#line 604 "rl78-decode.opc" ++#line 606 "rl78-decode.opc" + ID(add); W(); DM(None, SADDR); SC(1); + + /*----------------------------------------------------------------------*/ +@@ -4691,7 +4693,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %0, %a1"); +-#line 850 "rl78-decode.opc" ++#line 852 "rl78-decode.opc" + ID(mov); W(); DR(AX); SM(SP, IMMU(1)); + + } +@@ -4706,7 +4708,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %0, %e1"); +-#line 838 "rl78-decode.opc" ++#line 840 "rl78-decode.opc" + ID(mov); W(); DR(AX); SM(DE, 0); + + } +@@ -4721,7 +4723,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %0, %ea1"); +-#line 841 "rl78-decode.opc" ++#line 843 "rl78-decode.opc" + ID(mov); W(); DR(AX); SM(DE, IMMU(1)); + + } +@@ -4736,7 +4738,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %0, %e1"); +-#line 844 "rl78-decode.opc" ++#line 846 "rl78-decode.opc" + ID(mov); W(); DR(AX); SM(HL, 0); + + } +@@ -4751,7 +4753,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %0, %ea1"); +-#line 847 "rl78-decode.opc" ++#line 849 "rl78-decode.opc" + ID(mov); W(); DR(AX); SM(HL, IMMU(1)); + + } +@@ -4766,7 +4768,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %0, %1"); +-#line 880 "rl78-decode.opc" ++#line 882 "rl78-decode.opc" + ID(mov); W(); DR(AX); SM(None, SADDR); + + } +@@ -4781,7 +4783,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %0, %s1"); +-#line 883 "rl78-decode.opc" ++#line 885 "rl78-decode.opc" + ID(mov); W(); DR(AX); SM(None, SFR); + + } +@@ -4796,7 +4798,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %0, %e!1"); +-#line 834 "rl78-decode.opc" ++#line 836 "rl78-decode.opc" + ID(mov); W(); DR(AX); SM(None, IMMU(2)); + + +@@ -4812,7 +4814,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("dec %e!0"); +-#line 548 "rl78-decode.opc" ++#line 550 "rl78-decode.opc" + ID(sub); DM(None, IMMU(2)); SC(1); Fza; + + } +@@ -4823,7 +4825,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xb7: + { + /** 1011 0rg1 decw %0 */ +-#line 568 "rl78-decode.opc" ++#line 570 "rl78-decode.opc" + int rg AU = (op[0] >> 1) & 0x03; + if (trace) + { +@@ -4833,7 +4835,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rg = 0x%x\n", rg); + } + SYNTAX("decw %0"); +-#line 568 "rl78-decode.opc" ++#line 570 "rl78-decode.opc" + ID(sub); W(); DRW(rg); SC(1); + + } +@@ -4848,7 +4850,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("decw %e!0"); +-#line 562 "rl78-decode.opc" ++#line 564 "rl78-decode.opc" + ID(sub); W(); DM(None, IMMU(2)); SC(1); + + } +@@ -4863,7 +4865,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("dec %0"); +-#line 557 "rl78-decode.opc" ++#line 559 "rl78-decode.opc" + ID(sub); DM(None, SADDR); SC(1); Fza; + + /*----------------------------------------------------------------------*/ +@@ -4880,7 +4882,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("decw %0"); +-#line 571 "rl78-decode.opc" ++#line 573 "rl78-decode.opc" + ID(sub); W(); DM(None, SADDR); SC(1); + + /*----------------------------------------------------------------------*/ +@@ -4897,7 +4899,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %a0, %1"); +-#line 831 "rl78-decode.opc" ++#line 833 "rl78-decode.opc" + ID(mov); W(); DM(SP, IMMU(1)); SR(AX); + + } +@@ -4912,7 +4914,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %e0, %1"); +-#line 819 "rl78-decode.opc" ++#line 821 "rl78-decode.opc" + ID(mov); W(); DM(DE, 0); SR(AX); + + } +@@ -4927,7 +4929,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %ea0, %1"); +-#line 822 "rl78-decode.opc" ++#line 824 "rl78-decode.opc" + ID(mov); W(); DM(DE, IMMU(1)); SR(AX); + + } +@@ -4942,7 +4944,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %e0, %1"); +-#line 825 "rl78-decode.opc" ++#line 827 "rl78-decode.opc" + ID(mov); W(); DM(HL, 0); SR(AX); + + } +@@ -4957,7 +4959,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %ea0, %1"); +-#line 828 "rl78-decode.opc" ++#line 830 "rl78-decode.opc" + ID(mov); W(); DM(HL, IMMU(1)); SR(AX); + + } +@@ -4972,7 +4974,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %0, %1"); +-#line 895 "rl78-decode.opc" ++#line 897 "rl78-decode.opc" + ID(mov); W(); DM(None, SADDR); SR(AX); + + } +@@ -4987,7 +4989,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %s0, %1"); +-#line 901 "rl78-decode.opc" ++#line 903 "rl78-decode.opc" + ID(mov); W(); DM(None, SFR); SR(AX); + + /*----------------------------------------------------------------------*/ +@@ -5004,7 +5006,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %e!0, %1"); +-#line 816 "rl78-decode.opc" ++#line 818 "rl78-decode.opc" + ID(mov); W(); DM(None, IMMU(2)); SR(AX); + + } +@@ -5015,7 +5017,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xc6: + { + /** 1100 0rg0 pop %0 */ +-#line 986 "rl78-decode.opc" ++#line 988 "rl78-decode.opc" + int rg AU = (op[0] >> 1) & 0x03; + if (trace) + { +@@ -5025,7 +5027,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rg = 0x%x\n", rg); + } + SYNTAX("pop %0"); +-#line 986 "rl78-decode.opc" ++#line 988 "rl78-decode.opc" + ID(mov); W(); DRW(rg); SPOP(); + + } +@@ -5036,7 +5038,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xc7: + { + /** 1100 0rg1 push %1 */ +-#line 994 "rl78-decode.opc" ++#line 996 "rl78-decode.opc" + int rg AU = (op[0] >> 1) & 0x03; + if (trace) + { +@@ -5046,7 +5048,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rg = 0x%x\n", rg); + } + SYNTAX("push %1"); +-#line 994 "rl78-decode.opc" ++#line 996 "rl78-decode.opc" + ID(mov); W(); DPUSH(); SRW(rg); + + } +@@ -5061,7 +5063,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %a0, #%1"); +-#line 639 "rl78-decode.opc" ++#line 641 "rl78-decode.opc" + ID(mov); DM(SP, IMMU(1)); SC(IMMU(1)); + + } +@@ -5076,7 +5078,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %0, #%1"); +-#line 892 "rl78-decode.opc" ++#line 894 "rl78-decode.opc" + ID(mov); W(); DM(None, SADDR); SC(IMMU(2)); + + } +@@ -5091,7 +5093,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %ea0, #%1"); +-#line 618 "rl78-decode.opc" ++#line 620 "rl78-decode.opc" + ID(mov); DM(DE, IMMU(1)); SC(IMMU(1)); + + } +@@ -5106,7 +5108,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("movw %s0, #%1"); +-#line 898 "rl78-decode.opc" ++#line 900 "rl78-decode.opc" + ID(mov); W(); DM(None, SFR); SC(IMMU(2)); + + } +@@ -5121,7 +5123,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %ea0, #%1"); +-#line 630 "rl78-decode.opc" ++#line 632 "rl78-decode.opc" + ID(mov); DM(HL, IMMU(1)); SC(IMMU(1)); + + } +@@ -5136,7 +5138,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, #%1"); +-#line 744 "rl78-decode.opc" ++#line 746 "rl78-decode.opc" + ID(mov); DM(None, SADDR); SC(IMMU(1)); + + } +@@ -5151,7 +5153,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %s0, #%1"); +-#line 750 "rl78-decode.opc" ++#line 752 "rl78-decode.opc" + op0 = SFR; + op1 = IMMU(1); + ID(mov); DM(None, op0); SC(op1); +@@ -5193,7 +5195,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %e!0, #%1"); +-#line 609 "rl78-decode.opc" ++#line 611 "rl78-decode.opc" + ID(mov); DM(None, IMMU(2)); SC(IMMU(1)); + + } +@@ -5204,7 +5206,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xd3: + { + /** 1101 00rg cmp0 %0 */ +-#line 518 "rl78-decode.opc" ++#line 520 "rl78-decode.opc" + int rg AU = op[0] & 0x03; + if (trace) + { +@@ -5214,7 +5216,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rg = 0x%x\n", rg); + } + SYNTAX("cmp0 %0"); +-#line 518 "rl78-decode.opc" ++#line 520 "rl78-decode.opc" + ID(cmp); DRB(rg); SC(0); Fzac; + + } +@@ -5229,7 +5231,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("cmp0 %0"); +-#line 521 "rl78-decode.opc" ++#line 523 "rl78-decode.opc" + ID(cmp); DM(None, SADDR); SC(0); Fzac; + + /*----------------------------------------------------------------------*/ +@@ -5246,7 +5248,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("cmp0 %e!0"); +-#line 515 "rl78-decode.opc" ++#line 517 "rl78-decode.opc" + ID(cmp); DM(None, IMMU(2)); SC(0); Fzac; + + } +@@ -5261,7 +5263,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mulu x"); +-#line 906 "rl78-decode.opc" ++#line 908 "rl78-decode.opc" + ID(mulu); + + /*----------------------------------------------------------------------*/ +@@ -5278,7 +5280,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("ret"); +-#line 1002 "rl78-decode.opc" ++#line 1004 "rl78-decode.opc" + ID(ret); + + } +@@ -5293,7 +5295,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %1"); +-#line 711 "rl78-decode.opc" ++#line 713 "rl78-decode.opc" + ID(mov); DR(X); SM(None, SADDR); + + } +@@ -5308,7 +5310,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %e!1"); +-#line 708 "rl78-decode.opc" ++#line 710 "rl78-decode.opc" + ID(mov); DR(X); SM(None, IMMU(2)); + + } +@@ -5318,7 +5320,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xfa: + { + /** 11ra 1010 movw %0, %1 */ +-#line 889 "rl78-decode.opc" ++#line 891 "rl78-decode.opc" + int ra AU = (op[0] >> 4) & 0x03; + if (trace) + { +@@ -5328,7 +5330,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" ra = 0x%x\n", ra); + } + SYNTAX("movw %0, %1"); +-#line 889 "rl78-decode.opc" ++#line 891 "rl78-decode.opc" + ID(mov); W(); DRW(ra); SM(None, SADDR); + + } +@@ -5338,7 +5340,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xfb: + { + /** 11ra 1011 movw %0, %es!1 */ +-#line 886 "rl78-decode.opc" ++#line 888 "rl78-decode.opc" + int ra AU = (op[0] >> 4) & 0x03; + if (trace) + { +@@ -5348,7 +5350,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" ra = 0x%x\n", ra); + } + SYNTAX("movw %0, %es!1"); +-#line 886 "rl78-decode.opc" ++#line 888 "rl78-decode.opc" + ID(mov); W(); DRW(ra); SM(None, IMMU(2)); + + } +@@ -5363,7 +5365,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("bc $%a0"); +-#line 334 "rl78-decode.opc" ++#line 336 "rl78-decode.opc" + ID(branch_cond); DC(pc+IMMS(1)+2); SR(None); COND(C); + + } +@@ -5378,7 +5380,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("bz $%a0"); +-#line 346 "rl78-decode.opc" ++#line 348 "rl78-decode.opc" + ID(branch_cond); DC(pc+IMMS(1)+2); SR(None); COND(Z); + + } +@@ -5393,7 +5395,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("bnc $%a0"); +-#line 337 "rl78-decode.opc" ++#line 339 "rl78-decode.opc" + ID(branch_cond); DC(pc+IMMS(1)+2); SR(None); COND(NC); + + } +@@ -5408,7 +5410,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("bnz $%a0"); +-#line 349 "rl78-decode.opc" ++#line 351 "rl78-decode.opc" + ID(branch_cond); DC(pc+IMMS(1)+2); SR(None); COND(NZ); + + /*----------------------------------------------------------------------*/ +@@ -5421,7 +5423,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xe3: + { + /** 1110 00rg oneb %0 */ +-#line 924 "rl78-decode.opc" ++#line 926 "rl78-decode.opc" + int rg AU = op[0] & 0x03; + if (trace) + { +@@ -5431,7 +5433,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rg = 0x%x\n", rg); + } + SYNTAX("oneb %0"); +-#line 924 "rl78-decode.opc" ++#line 926 "rl78-decode.opc" + ID(mov); DRB(rg); SC(1); + + } +@@ -5446,7 +5448,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("oneb %0"); +-#line 927 "rl78-decode.opc" ++#line 929 "rl78-decode.opc" + ID(mov); DM(None, SADDR); SC(1); + + /*----------------------------------------------------------------------*/ +@@ -5463,7 +5465,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("oneb %e!0"); +-#line 921 "rl78-decode.opc" ++#line 923 "rl78-decode.opc" + ID(mov); DM(None, IMMU(2)); SC(1); + + } +@@ -5478,7 +5480,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("onew %0"); +-#line 932 "rl78-decode.opc" ++#line 934 "rl78-decode.opc" + ID(mov); DR(AX); SC(1); + + } +@@ -5493,7 +5495,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("onew %0"); +-#line 935 "rl78-decode.opc" ++#line 937 "rl78-decode.opc" + ID(mov); DR(BC); SC(1); + + /*----------------------------------------------------------------------*/ +@@ -5510,7 +5512,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %1"); +-#line 699 "rl78-decode.opc" ++#line 701 "rl78-decode.opc" + ID(mov); DR(B); SM(None, SADDR); + + } +@@ -5525,7 +5527,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %e!1"); +-#line 693 "rl78-decode.opc" ++#line 695 "rl78-decode.opc" + ID(mov); DR(B); SM(None, IMMU(2)); + + } +@@ -5540,7 +5542,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("br !%!a0"); +-#line 368 "rl78-decode.opc" ++#line 370 "rl78-decode.opc" + ID(branch); DC(IMMU(3)); + + } +@@ -5555,7 +5557,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("br %!a0"); +-#line 371 "rl78-decode.opc" ++#line 373 "rl78-decode.opc" + ID(branch); DC(IMMU(2)); + + } +@@ -5570,7 +5572,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("br $%!a0"); +-#line 374 "rl78-decode.opc" ++#line 376 "rl78-decode.opc" + ID(branch); DC(pc+IMMS(2)+3); + + } +@@ -5585,7 +5587,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("br $%a0"); +-#line 377 "rl78-decode.opc" ++#line 379 "rl78-decode.opc" + ID(branch); DC(pc+IMMS(1)+2); + + } +@@ -5596,7 +5598,7 @@ rl78_decode_opcode (unsigned long pc AU, + case 0xf3: + { + /** 1111 00rg clrb %0 */ +-#line 464 "rl78-decode.opc" ++#line 466 "rl78-decode.opc" + int rg AU = op[0] & 0x03; + if (trace) + { +@@ -5606,7 +5608,7 @@ rl78_decode_opcode (unsigned long pc AU, + printf (" rg = 0x%x\n", rg); + } + SYNTAX("clrb %0"); +-#line 464 "rl78-decode.opc" ++#line 466 "rl78-decode.opc" + ID(mov); DRB(rg); SC(0); + + } +@@ -5621,7 +5623,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("clrb %0"); +-#line 467 "rl78-decode.opc" ++#line 469 "rl78-decode.opc" + ID(mov); DM(None, SADDR); SC(0); + + /*----------------------------------------------------------------------*/ +@@ -5638,7 +5640,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("clrb %e!0"); +-#line 461 "rl78-decode.opc" ++#line 463 "rl78-decode.opc" + ID(mov); DM(None, IMMU(2)); SC(0); + + } +@@ -5653,7 +5655,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("clrw %0"); +-#line 472 "rl78-decode.opc" ++#line 474 "rl78-decode.opc" + ID(mov); DR(AX); SC(0); + + } +@@ -5668,7 +5670,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("clrw %0"); +-#line 475 "rl78-decode.opc" ++#line 477 "rl78-decode.opc" + ID(mov); DR(BC); SC(0); + + /*----------------------------------------------------------------------*/ +@@ -5685,7 +5687,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %1"); +-#line 705 "rl78-decode.opc" ++#line 707 "rl78-decode.opc" + ID(mov); DR(C); SM(None, SADDR); + + } +@@ -5700,7 +5702,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("mov %0, %e!1"); +-#line 702 "rl78-decode.opc" ++#line 704 "rl78-decode.opc" + ID(mov); DR(C); SM(None, IMMU(2)); + + } +@@ -5715,7 +5717,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("call !%!a0"); +-#line 421 "rl78-decode.opc" ++#line 423 "rl78-decode.opc" + ID(call); DC(IMMU(3)); + + } +@@ -5730,7 +5732,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("call %!a0"); +-#line 424 "rl78-decode.opc" ++#line 426 "rl78-decode.opc" + ID(call); DC(IMMU(2)); + + } +@@ -5745,7 +5747,7 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("call $%!a0"); +-#line 427 "rl78-decode.opc" ++#line 429 "rl78-decode.opc" + ID(call); DC(pc+IMMS(2)+3); + + } +@@ -5760,13 +5762,13 @@ rl78_decode_opcode (unsigned long pc AU, + op[0]); + } + SYNTAX("brk1"); +-#line 385 "rl78-decode.opc" ++#line 387 "rl78-decode.opc" + ID(break); + + } + break; + } +-#line 1290 "rl78-decode.opc" ++#line 1292 "rl78-decode.opc" + + return rl78->n_bytes; + } +diff --git a/opcodes/rl78-decode.opc b/opcodes/rl78-decode.opc +index 6212f08..b25e441 100644 +--- a/opcodes/rl78-decode.opc ++++ b/opcodes/rl78-decode.opc +@@ -50,7 +50,9 @@ typedef struct + #define W() rl78->size = RL78_Word + + #define AU ATTRIBUTE_UNUSED +-#define GETBYTE() (ld->op [ld->rl78->n_bytes++] = ld->getbyte (ld->ptr)) ++ ++#define OP_BUF_LEN 20 ++#define GETBYTE() (ld->rl78->n_bytes < (OP_BUF_LEN - 1) ? ld->op [ld->rl78->n_bytes++] = ld->getbyte (ld->ptr): 0) + #define B ((unsigned long) GETBYTE()) + + #define SYNTAX(x) rl78->syntax = x +@@ -168,7 +170,7 @@ rl78_decode_opcode (unsigned long pc AU, + RL78_Dis_Isa isa) + { + LocalData lds, * ld = &lds; +- unsigned char op_buf[20] = {0}; ++ unsigned char op_buf[OP_BUF_LEN] = {0}; + unsigned char *op = op_buf; + int op0, op1; + +-- +2.7.4 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9752.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9752.patch new file mode 100644 index 000000000..f63a993b2 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9752.patch @@ -0,0 +1,208 @@ +From c53d2e6d744da000aaafe0237bced090aab62818 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Wed, 14 Jun 2017 11:27:15 +0100 +Subject: [PATCH] Fix potential address violations when processing a corrupt + Alpha VMA binary. + + PR binutils/21589 + * vms-alpha.c (_bfd_vms_get_value): Add an extra parameter - the + maximum value for the ascic pointer. Check that name processing + does not read beyond this value. + (_bfd_vms_slurp_etir): Add checks for attempts to read beyond the + end of etir record. + +Upstream-Status: Backport +CVE: CVE-2017-9752 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 9 +++++++++ + bfd/vms-alpha.c | 51 +++++++++++++++++++++++++++++++++++++++++---------- + 2 files changed, 50 insertions(+), 10 deletions(-) + +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -9,6 +9,15 @@ + + 2017-06-14 Nick Clifton + ++ PR binutils/21589 ++ * vms-alpha.c (_bfd_vms_get_value): Add an extra parameter - the ++ maximum value for the ascic pointer. Check that name processing ++ does not read beyond this value. ++ (_bfd_vms_slurp_etir): Add checks for attempts to read beyond the ++ end of etir record. ++ ++2017-06-14 Nick Clifton ++ + PR binutils/21578 + * elf32-sh.c (sh_elf_set_mach_from_flags): Fix check for invalid + flag value. +Index: git/bfd/vms-alpha.c +=================================================================== +--- git.orig/bfd/vms-alpha.c ++++ git/bfd/vms-alpha.c +@@ -1456,7 +1456,7 @@ dst_retrieve_location (bfd *abfd, unsign + /* Write multiple bytes to section image. */ + + static bfd_boolean +-image_write (bfd *abfd, unsigned char *ptr, int size) ++image_write (bfd *abfd, unsigned char *ptr, unsigned int size) + { + #if VMS_DEBUG + _bfd_vms_debug (8, "image_write from (%p, %d) to (%ld)\n", ptr, size, +@@ -1603,14 +1603,16 @@ _bfd_vms_etir_name (int cmd) + #define HIGHBIT(op) ((op & 0x80000000L) == 0x80000000L) + + static void +-_bfd_vms_get_value (bfd *abfd, const unsigned char *ascic, ++_bfd_vms_get_value (bfd *abfd, ++ const unsigned char *ascic, ++ const unsigned char *max_ascic, + struct bfd_link_info *info, + bfd_vma *vma, + struct alpha_vms_link_hash_entry **hp) + { + char name[257]; +- int len; +- int i; ++ unsigned int len; ++ unsigned int i; + struct alpha_vms_link_hash_entry *h; + + /* Not linking. Do not try to resolve the symbol. */ +@@ -1622,6 +1624,14 @@ _bfd_vms_get_value (bfd *abfd, const uns + } + + len = *ascic; ++ if (ascic + len >= max_ascic) ++ { ++ _bfd_error_handler (_("Corrupt vms value")); ++ *vma = 0; ++ *hp = NULL; ++ return; ++ } ++ + for (i = 0; i < len; i++) + name[i] = ascic[i + 1]; + name[i] = 0; +@@ -1741,6 +1751,15 @@ _bfd_vms_slurp_etir (bfd *abfd, struct b + _bfd_hexdump (8, ptr, cmd_length - 4, 0); + #endif + ++ /* PR 21589: Check for a corrupt ETIR record. */ ++ if (cmd_length < 4) ++ { ++ corrupt_etir: ++ _bfd_error_handler (_("Corrupt ETIR record encountered")); ++ bfd_set_error (bfd_error_bad_value); ++ return FALSE; ++ } ++ + switch (cmd) + { + /* Stack global +@@ -1748,7 +1767,7 @@ _bfd_vms_slurp_etir (bfd *abfd, struct b + + stack 32 bit value of symbol (high bits set to 0). */ + case ETIR__C_STA_GBL: +- _bfd_vms_get_value (abfd, ptr, info, &op1, &h); ++ _bfd_vms_get_value (abfd, ptr, maxptr, info, &op1, &h); + _bfd_vms_push (abfd, op1, alpha_vms_sym_to_ctxt (h)); + break; + +@@ -1757,6 +1776,8 @@ _bfd_vms_slurp_etir (bfd *abfd, struct b + + stack 32 bit value, sign extend to 64 bit. */ + case ETIR__C_STA_LW: ++ if (ptr + 4 >= maxptr) ++ goto corrupt_etir; + _bfd_vms_push (abfd, bfd_getl32 (ptr), RELC_NONE); + break; + +@@ -1765,6 +1786,8 @@ _bfd_vms_slurp_etir (bfd *abfd, struct b + + stack 64 bit value of symbol. */ + case ETIR__C_STA_QW: ++ if (ptr + 8 >= maxptr) ++ goto corrupt_etir; + _bfd_vms_push (abfd, bfd_getl64 (ptr), RELC_NONE); + break; + +@@ -1778,6 +1801,8 @@ _bfd_vms_slurp_etir (bfd *abfd, struct b + { + int psect; + ++ if (ptr + 12 >= maxptr) ++ goto corrupt_etir; + psect = bfd_getl32 (ptr); + if ((unsigned int) psect >= PRIV (section_count)) + { +@@ -1867,6 +1892,8 @@ _bfd_vms_slurp_etir (bfd *abfd, struct b + { + int size; + ++ if (ptr + 4 >= maxptr) ++ goto corrupt_etir; + size = bfd_getl32 (ptr); + _bfd_vms_pop (abfd, &op1, &rel1); + if (rel1 != RELC_NONE) +@@ -1879,7 +1906,7 @@ _bfd_vms_slurp_etir (bfd *abfd, struct b + /* Store global: write symbol value + arg: cs global symbol name. */ + case ETIR__C_STO_GBL: +- _bfd_vms_get_value (abfd, ptr, info, &op1, &h); ++ _bfd_vms_get_value (abfd, ptr, maxptr, info, &op1, &h); + if (h && h->sym) + { + if (h->sym->typ == EGSD__C_SYMG) +@@ -1901,7 +1928,7 @@ _bfd_vms_slurp_etir (bfd *abfd, struct b + /* Store code address: write address of entry point + arg: cs global symbol name (procedure). */ + case ETIR__C_STO_CA: +- _bfd_vms_get_value (abfd, ptr, info, &op1, &h); ++ _bfd_vms_get_value (abfd, ptr, maxptr, info, &op1, &h); + if (h && h->sym) + { + if (h->sym->flags & EGSY__V_NORM) +@@ -1946,8 +1973,10 @@ _bfd_vms_slurp_etir (bfd *abfd, struct b + da data. */ + case ETIR__C_STO_IMM: + { +- int size; ++ unsigned int size; + ++ if (ptr + 4 >= maxptr) ++ goto corrupt_etir; + size = bfd_getl32 (ptr); + image_write (abfd, ptr + 4, size); + } +@@ -1960,7 +1989,7 @@ _bfd_vms_slurp_etir (bfd *abfd, struct b + store global longword: store 32bit value of symbol + arg: cs symbol name. */ + case ETIR__C_STO_GBL_LW: +- _bfd_vms_get_value (abfd, ptr, info, &op1, &h); ++ _bfd_vms_get_value (abfd, ptr, maxptr, info, &op1, &h); + #if 0 + abort (); + #endif +@@ -2013,7 +2042,7 @@ _bfd_vms_slurp_etir (bfd *abfd, struct b + da signature. */ + + case ETIR__C_STC_LP_PSB: +- _bfd_vms_get_value (abfd, ptr + 4, info, &op1, &h); ++ _bfd_vms_get_value (abfd, ptr + 4, maxptr, info, &op1, &h); + if (h && h->sym) + { + if (h->sym->typ == EGSD__C_SYMG) +@@ -2109,6 +2138,8 @@ _bfd_vms_slurp_etir (bfd *abfd, struct b + /* Augment relocation base: increment image location counter by offset + arg: lw offset value. */ + case ETIR__C_CTL_AUGRB: ++ if (ptr + 4 >= maxptr) ++ goto corrupt_etir; + op1 = bfd_getl32 (ptr); + image_inc_ptr (abfd, op1); + break; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9753.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9753.patch new file mode 100644 index 000000000..241142b57 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9753.patch @@ -0,0 +1,79 @@ +From 04f963fd489cae724a60140e13984415c205f4ac Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Wed, 14 Jun 2017 10:35:16 +0100 +Subject: [PATCH] Fix seg-faults in objdump when disassembling a corrupt + versados binary. + + PR binutils/21591 + * versados.c (versados_mkobject): Zero the allocated tdata structure. + (process_otr): Check for an invalid offset in the otr structure. + +Upstream-Status: Backport +CVE: CVE-2017-9753 +CVE: CVE-2017-9754 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 6 ++++++ + bfd/versados.c | 12 ++++++++---- + 2 files changed, 14 insertions(+), 4 deletions(-) + +Index: git/bfd/versados.c +=================================================================== +--- git.orig/bfd/versados.c ++++ git/bfd/versados.c +@@ -149,7 +149,7 @@ versados_mkobject (bfd *abfd) + if (abfd->tdata.versados_data == NULL) + { + bfd_size_type amt = sizeof (tdata_type); +- tdata_type *tdata = bfd_alloc (abfd, amt); ++ tdata_type *tdata = bfd_zalloc (abfd, amt); + + if (tdata == NULL) + return FALSE; +@@ -345,13 +345,13 @@ reloc_howto_type versados_howto_table[] + }; + + static int +-get_offset (int len, unsigned char *ptr) ++get_offset (unsigned int len, unsigned char *ptr) + { + int val = 0; + + if (len) + { +- int i; ++ unsigned int i; + + val = *ptr++; + if (val & 0x80) +@@ -394,9 +394,13 @@ process_otr (bfd *abfd, struct ext_otr * + int flag = *srcp++; + int esdids = (flag >> 5) & 0x7; + int sizeinwords = ((flag >> 3) & 1) ? 2 : 1; +- int offsetlen = flag & 0x7; ++ unsigned int offsetlen = flag & 0x7; + int j; + ++ /* PR 21591: Check for invalid lengths. */ ++ if (srcp + esdids + offsetlen >= endp) ++ return; ++ + if (esdids == 0) + { + /* A zero esdid means the new pc is the offset given. */ +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -8,6 +8,10 @@ + (ieee_archive_p): Likewise. + + 2017-06-14 Nick Clifton ++ ++ PR binutils/21591 ++ * versados.c (versados_mkobject): Zero the allocated tdata structure. ++ (process_otr): Check for an invalid offset in the otr structure. + + PR binutils/21589 + * vms-alpha.c (_bfd_vms_get_value): Add an extra parameter - the diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9755.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9755.patch new file mode 100644 index 000000000..15dc9090d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9755.patch @@ -0,0 +1,63 @@ +From 0d96e4df4812c3bad77c229dfef47a9bc115ac12 Mon Sep 17 00:00:00 2001 +From: "H.J. Lu" +Date: Thu, 15 Jun 2017 06:40:17 -0700 +Subject: [PATCH] i386-dis: Check valid bnd register + +Since there are only 4 bnd registers, return "(bad)" for register +number > 3. + + PR binutils/21594 + * i386-dis.c (OP_E_register): Check valid bnd register. + (OP_G): Likewise. + +Upstream-Status: Backport +CVE: CVE-2017-9755 +Signed-off-by: Armin Kuster + +--- + opcodes/ChangeLog | 6 ++++++ + opcodes/i386-dis.c | 10 ++++++++++ + 2 files changed, 16 insertions(+) + +Index: git/opcodes/ChangeLog +=================================================================== +--- git.orig/opcodes/ChangeLog ++++ git/opcodes/ChangeLog +@@ -1,3 +1,9 @@ ++2017-06-15 H.J. Lu ++ ++ PR binutils/21594 ++ * i386-dis.c (OP_E_register): Check valid bnd register. ++ (OP_G): Likewise. ++ + 2017-06-15 Nick Clifton + + PR binutils/21588 +Index: git/opcodes/i386-dis.c +=================================================================== +--- git.orig/opcodes/i386-dis.c ++++ git/opcodes/i386-dis.c +@@ -14939,6 +14939,11 @@ OP_E_register (int bytemode, int sizefla + names = address_mode == mode_64bit ? names64 : names32; + break; + case bnd_mode: ++ if (reg > 0x3) ++ { ++ oappend ("(bad)"); ++ return; ++ } + names = names_bnd; + break; + case indir_v_mode: +@@ -15483,6 +15488,11 @@ OP_G (int bytemode, int sizeflag) + oappend (names64[modrm.reg + add]); + break; + case bnd_mode: ++ if (modrm.reg > 0x3) ++ { ++ oappend ("(bad)"); ++ return; ++ } + oappend (names_bnd[modrm.reg]); + break; + case v_mode: diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9756.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9756.patch new file mode 100644 index 000000000..191d0be19 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9756.patch @@ -0,0 +1,50 @@ +From cd3ea7c69acc5045eb28f9bf80d923116e15e4f5 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Thu, 15 Jun 2017 13:26:54 +0100 +Subject: [PATCH] Prevent address violation problem when disassembling corrupt + aarch64 binary. + + PR binutils/21595 + * aarch64-dis.c (aarch64_ext_ldst_reglist): Check for an out of + range value. + +Upstream-Status: Backport +CVE: CVE-2017-9756 +Signed-off-by: Armin Kuster + +--- + opcodes/ChangeLog | 6 ++++++ + opcodes/aarch64-dis.c | 3 +++ + 2 files changed, 9 insertions(+) + +Index: git/opcodes/ChangeLog +=================================================================== +--- git.orig/opcodes/ChangeLog ++++ git/opcodes/ChangeLog +@@ -6,6 +6,12 @@ + + 2017-06-15 Nick Clifton + ++ PR binutils/21595 ++ * aarch64-dis.c (aarch64_ext_ldst_reglist): Check for an out of ++ range value. ++ ++2017-06-15 Nick Clifton ++ + PR binutils/21588 + * rl78-decode.opc (OP_BUF_LEN): Define. + (GETBYTE): Check for the index exceeding OP_BUF_LEN. +Index: git/opcodes/aarch64-dis.c +=================================================================== +--- git.orig/opcodes/aarch64-dis.c ++++ git/opcodes/aarch64-dis.c +@@ -409,6 +409,9 @@ aarch64_ext_ldst_reglist (const aarch64_ + info->reglist.first_regno = extract_field (FLD_Rt, code, 0); + /* opcode */ + value = extract_field (FLD_opcode, code, 0); ++ /* PR 21595: Check for a bogus value. */ ++ if (value >= ARRAY_SIZE (data)) ++ return 0; + if (expected_num != data[value].num_elements || data[value].is_reserved) + return 0; + info->reglist.num_regs = data[value].num_regs; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9954.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9954.patch new file mode 100644 index 000000000..8a9d7ebd9 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9954.patch @@ -0,0 +1,58 @@ +From 04e15b4a9462cb1ae819e878a6009829aab8020b Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Mon, 26 Jun 2017 15:46:34 +0100 +Subject: [PATCH] Fix address violation parsing a corrupt texhex format file. + + PR binutils/21670 + * tekhex.c (getvalue): Check for the source pointer exceeding the + end pointer before the first byte is read. + +Upstream-Status: Backport +CVE: CVE_2017-9954 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 6 ++++++ + bfd/tekhex.c | 6 +++++- + 2 files changed, 11 insertions(+), 1 deletion(-) + +Index: git/bfd/tekhex.c +=================================================================== +--- git.orig/bfd/tekhex.c ++++ git/bfd/tekhex.c +@@ -273,6 +273,9 @@ getvalue (char **srcp, bfd_vma *valuep, + bfd_vma value = 0; + unsigned int len; + ++ if (src >= endp) ++ return FALSE; ++ + if (!ISHEX (*src)) + return FALSE; + +@@ -514,9 +517,10 @@ pass_over (bfd *abfd, bfd_boolean (*func + /* To the front of the file. */ + if (bfd_seek (abfd, (file_ptr) 0, SEEK_SET) != 0) + return FALSE; ++ + while (! is_eof) + { +- char src[MAXCHUNK]; ++ static char src[MAXCHUNK]; + char type; + + /* Find first '%'. */ +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,3 +1,9 @@ ++2017-06-26 Nick Clifton ++ ++ PR binutils/21670 ++ * tekhex.c (getvalue): Check for the source pointer exceeding the ++ end pointer before the first byte is read. ++ + 2017-06-15 Nick Clifton + + PR binutils/21582 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_1.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_1.patch new file mode 100644 index 000000000..774670fb0 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_1.patch @@ -0,0 +1,168 @@ +From cfd14a500e0485374596234de4db10e88ebc7618 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Mon, 26 Jun 2017 15:25:08 +0100 +Subject: [PATCH] Fix address violations when atempting to parse fuzzed + binaries. + + PR binutils/21665 +bfd * opncls.c (get_build_id): Check that the section is beig enough + to contain the whole note. + * compress.c (bfd_get_full_section_contents): Check for and reject + a section whoes size is greater than the size of the entire file. + * elf32-v850.c (v850_elf_copy_notes): Allow for the ouput to not + contain a notes section. + +binutils* objdump.c (disassemble_section): Skip any section that is bigger + than the entire file. + +Upstream-Status: Backport +CVE: CVE-2017-9955 #1 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 10 ++++++++++ + bfd/compress.c | 6 ++++++ + bfd/elf32-v850.c | 4 +++- + bfd/opncls.c | 18 ++++++++++++++++-- + binutils/ChangeLog | 6 ++++++ + binutils/objdump.c | 4 ++-- + 6 files changed, 43 insertions(+), 5 deletions(-) + +Index: git/bfd/compress.c +=================================================================== +--- git.orig/bfd/compress.c ++++ git/bfd/compress.c +@@ -239,6 +239,12 @@ bfd_get_full_section_contents (bfd *abfd + *ptr = NULL; + return TRUE; + } ++ else if (bfd_get_file_size (abfd) > 0 ++ && sz > (bfd_size_type) bfd_get_file_size (abfd)) ++ { ++ *ptr = NULL; ++ return FALSE; ++ } + + switch (sec->compress_status) + { +Index: git/bfd/elf32-v850.c +=================================================================== +--- git.orig/bfd/elf32-v850.c ++++ git/bfd/elf32-v850.c +@@ -2450,7 +2450,9 @@ v850_elf_copy_notes (bfd *ibfd, bfd *obf + BFD_ASSERT (bfd_malloc_and_get_section (ibfd, inotes, & icont)); + + if ((ocont = elf_section_data (onotes)->this_hdr.contents) == NULL) +- BFD_ASSERT (bfd_malloc_and_get_section (obfd, onotes, & ocont)); ++ /* If the output is being stripped then it is possible for ++ the notes section to disappear. In this case do nothing. */ ++ return; + + /* Copy/overwrite notes from the input to the output. */ + memcpy (ocont, icont, bfd_section_size (obfd, onotes)); +Index: git/bfd/opncls.c +=================================================================== +--- git.orig/bfd/opncls.c ++++ git/bfd/opncls.c +@@ -1776,6 +1776,7 @@ get_build_id (bfd *abfd) + Elf_External_Note *enote; + bfd_byte *contents; + asection *sect; ++ bfd_size_type size; + + BFD_ASSERT (abfd); + +@@ -1790,8 +1791,9 @@ get_build_id (bfd *abfd) + return NULL; + } + ++ size = bfd_get_section_size (sect); + /* FIXME: Should we support smaller build-id notes ? */ +- if (bfd_get_section_size (sect) < 0x24) ++ if (size < 0x24) + { + bfd_set_error (bfd_error_invalid_operation); + return NULL; +@@ -1804,6 +1806,17 @@ get_build_id (bfd *abfd) + return NULL; + } + ++ /* FIXME: Paranoia - allow for compressed build-id sections. ++ Maybe we should complain if this size is different from ++ the one obtained above... */ ++ size = bfd_get_section_size (sect); ++ if (size < sizeof (Elf_External_Note)) ++ { ++ bfd_set_error (bfd_error_invalid_operation); ++ free (contents); ++ return NULL; ++ } ++ + enote = (Elf_External_Note *) contents; + inote.type = H_GET_32 (abfd, enote->type); + inote.namesz = H_GET_32 (abfd, enote->namesz); +@@ -1815,7 +1828,8 @@ get_build_id (bfd *abfd) + if (inote.descsz == 0 + || inote.type != NT_GNU_BUILD_ID + || inote.namesz != 4 /* sizeof "GNU" */ +- || strcmp (inote.namedata, "GNU") != 0) ++ || strncmp (inote.namedata, "GNU", 4) != 0 ++ || size < (12 + BFD_ALIGN (inote.namesz, 4) + inote.descsz)) + { + free (contents); + bfd_set_error (bfd_error_invalid_operation); +Index: git/binutils/objdump.c +=================================================================== +--- git.orig/binutils/objdump.c ++++ git/binutils/objdump.c +@@ -2048,7 +2048,7 @@ disassemble_section (bfd *abfd, asection + return; + + datasize = bfd_get_section_size (section); +- if (datasize == 0) ++ if (datasize == 0 || datasize >= (bfd_size_type) bfd_get_file_size (abfd)) + return; + + if (start_address == (bfd_vma) -1 +@@ -2912,7 +2912,7 @@ dump_target_specific (bfd *abfd) + static void + dump_section (bfd *abfd, asection *section, void *dummy ATTRIBUTE_UNUSED) + { +- bfd_byte *data = 0; ++ bfd_byte *data = NULL; + bfd_size_type datasize; + bfd_vma addr_offset; + bfd_vma start_offset; +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,4 +1,14 @@ + 2017-06-26 Nick Clifton ++ ++ PR binutils/21665 ++ * opncls.c (get_build_id): Check that the section is beig enough ++ to contain the whole note. ++ * compress.c (bfd_get_full_section_contents): Check for and reject ++ a section whoes size is greater than the size of the entire file. ++ * elf32-v850.c (v850_elf_copy_notes): Allow for the ouput to not ++ contain a notes section. ++ ++2017-06-26 Nick Clifton + + PR binutils/21670 + * tekhex.c (getvalue): Check for the source pointer exceeding the +Index: git/binutils/ChangeLog +=================================================================== +--- git.orig/binutils/ChangeLog ++++ git/binutils/ChangeLog +@@ -1,3 +1,9 @@ ++2017-06-26 Nick Clifton ++ ++ PR binutils/21665 ++ * objdump.c (disassemble_section): Skip any section that is bigger ++ than the entire file. ++ + 2017-04-03 Nick Clifton + + PR binutils/21345 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_2.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_2.patch new file mode 100644 index 000000000..f95295f18 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_2.patch @@ -0,0 +1,122 @@ +From 0630b49c470ca2e3c3f74da4c7e4ff63440dd71f Mon Sep 17 00:00:00 2001 +From: "H.J. Lu" +Date: Mon, 26 Jun 2017 09:24:49 -0700 +Subject: [PATCH] Check file size before getting section contents + +Don't check the section size in bfd_get_full_section_contents since +the size of a decompressed section may be larger than the file size. +Instead, check file size in _bfd_generic_get_section_contents. + + PR binutils/21665 + * compress.c (bfd_get_full_section_contents): Don't check the + file size here. + * libbfd.c (_bfd_generic_get_section_contents): Check for and + reject a section whoes size + offset is greater than the size + of the entire file. + (_bfd_generic_get_section_contents_in_window): Likewise. + +Upstream-Status: Backport +CVE: CVE-2017-9955 #2 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 10 +++++++++- + bfd/compress.c | 8 +------- + bfd/libbfd.c | 17 ++++++++++++++++- + 3 files changed, 26 insertions(+), 9 deletions(-) + +Index: git/bfd/compress.c +=================================================================== +--- git.orig/bfd/compress.c ++++ git/bfd/compress.c +@@ -239,12 +239,6 @@ bfd_get_full_section_contents (bfd *abfd + *ptr = NULL; + return TRUE; + } +- else if (bfd_get_file_size (abfd) > 0 +- && sz > (bfd_size_type) bfd_get_file_size (abfd)) +- { +- *ptr = NULL; +- return FALSE; +- } + + switch (sec->compress_status) + { +@@ -260,7 +254,7 @@ bfd_get_full_section_contents (bfd *abfd + /* xgettext:c-format */ + (_("error: %B(%A) is too large (%#lx bytes)"), + abfd, sec, (long) sz); +- return FALSE; ++ return FALSE; + } + } + +Index: git/bfd/libbfd.c +=================================================================== +--- git.orig/bfd/libbfd.c ++++ git/bfd/libbfd.c +@@ -780,6 +780,7 @@ _bfd_generic_get_section_contents (bfd * + bfd_size_type count) + { + bfd_size_type sz; ++ file_ptr filesz; + if (count == 0) + return TRUE; + +@@ -802,8 +803,15 @@ _bfd_generic_get_section_contents (bfd * + sz = section->rawsize; + else + sz = section->size; ++ filesz = bfd_get_file_size (abfd); ++ if (filesz < 0) ++ { ++ /* This should never happen. */ ++ abort (); ++ } + if (offset + count < count +- || offset + count > sz) ++ || offset + count > sz ++ || (section->filepos + offset + sz) > (bfd_size_type) filesz) + { + bfd_set_error (bfd_error_invalid_operation); + return FALSE; +@@ -826,6 +834,7 @@ _bfd_generic_get_section_contents_in_win + { + #ifdef USE_MMAP + bfd_size_type sz; ++ file_ptr filesz; + + if (count == 0) + return TRUE; +@@ -858,7 +867,13 @@ _bfd_generic_get_section_contents_in_win + sz = section->rawsize; + else + sz = section->size; ++ filesz = bfd_get_file_size (abfd); ++ { ++ /* This should never happen. */ ++ abort (); ++ } + if (offset + count > sz ++ || (section->filepos + offset + sz) > (bfd_size_type) filesz + || ! bfd_get_file_window (abfd, section->filepos + offset, count, w, + TRUE)) + return FALSE; +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,3 +1,13 @@ ++2017-06-26 H.J. Lu ++ ++ PR binutils/21665 ++ * compress.c (bfd_get_full_section_contents): Don't check the ++ file size here. ++ * libbfd.c (_bfd_generic_get_section_contents): Check for and ++ reject a section whoes size + offset is greater than the size ++ of the entire file. ++ (_bfd_generic_get_section_contents_in_window): Likewise. ++ + 2017-06-26 Nick Clifton + + PR binutils/21665 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_3.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_3.patch new file mode 100644 index 000000000..1b67c4e95 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_3.patch @@ -0,0 +1,48 @@ +From 1f473e3d0ad285195934e6a077c7ed32afe66437 Mon Sep 17 00:00:00 2001 +From: "H.J. Lu" +Date: Mon, 26 Jun 2017 15:47:16 -0700 +Subject: [PATCH] Add a missing line to + _bfd_generic_get_section_contents_in_window + + PR binutils/21665 + * libbfd.c (_bfd_generic_get_section_contents_in_window): Add + a missing line. + +Upstream-Status: Backport +CVE: CVE-2017-9955 #3 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 6 ++++++ + bfd/libbfd.c | 1 + + 2 files changed, 7 insertions(+) + +Index: git/bfd/libbfd.c +=================================================================== +--- git.orig/bfd/libbfd.c ++++ git/bfd/libbfd.c +@@ -868,6 +868,7 @@ _bfd_generic_get_section_contents_in_win + else + sz = section->size; + filesz = bfd_get_file_size (abfd); ++ if (filesz < 0) + { + /* This should never happen. */ + abort (); +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,6 +1,12 @@ + 2017-06-26 H.J. Lu + + PR binutils/21665 ++ * libbfd.c (_bfd_generic_get_section_contents_in_window): Add ++ a missing line. ++ ++2017-06-26 H.J. Lu ++ ++ PR binutils/21665 + * compress.c (bfd_get_full_section_contents): Don't check the + file size here. + * libbfd.c (_bfd_generic_get_section_contents): Check for and diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_4.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_4.patch new file mode 100644 index 000000000..97d529a78 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_4.patch @@ -0,0 +1,51 @@ +From ab27f80c5dceaa23c4ba7f62c0d5d22a5d5dd7a1 Mon Sep 17 00:00:00 2001 +From: Pedro Alves +Date: Tue, 27 Jun 2017 00:21:25 +0100 +Subject: [PATCH] Fix GDB regressions caused by previous + bfd_get_section_contents changes + +Ref: https://sourceware.org/ml/binutils/2017-06/msg00343.html + +bfd/ChangeLog: +2017-06-26 Pedro Alves + + PR binutils/21665 + * libbfd.c (_bfd_generic_get_section_contents): Add "count", not + "sz". + +Upstream-Status: Backport +CVE: CVE-2017-9955 #4 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 6 ++++++ + bfd/libbfd.c | 2 +- + 2 files changed, 7 insertions(+), 1 deletion(-) + +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,3 +1,9 @@ ++2017-06-26 Pedro Alves ++ ++ PR binutils/21665 ++ * libbfd.c (_bfd_generic_get_section_contents): Add "count", not ++ "sz". ++ + 2017-06-26 H.J. Lu + + PR binutils/21665 +Index: git/bfd/libbfd.c +=================================================================== +--- git.orig/bfd/libbfd.c ++++ git/bfd/libbfd.c +@@ -811,7 +811,7 @@ _bfd_generic_get_section_contents (bfd * + } + if (offset + count < count + || offset + count > sz +- || (section->filepos + offset + sz) > (bfd_size_type) filesz) ++ || (section->filepos + offset + count) > (bfd_size_type) filesz) + { + bfd_set_error (bfd_error_invalid_operation); + return FALSE; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_5.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_5.patch new file mode 100644 index 000000000..da3bd37e8 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_5.patch @@ -0,0 +1,89 @@ +From 7211ae501eb0de1044983f2dfb00091a58fbd66c Mon Sep 17 00:00:00 2001 +From: Alan Modra +Date: Tue, 27 Jun 2017 09:45:04 +0930 +Subject: [PATCH] More fixes for bfd_get_section_contents change + + PR binutils/21665 + * libbfd.c (_bfd_generic_get_section_contents): Delete abort. + Use unsigned file pointer type, and remove cast. + * libbfd.c (_bfd_generic_get_section_contents_in_window): Likewise. + Add "count", not "sz". + +Upstream-Status: Backport +CVE: CVE-2017-9955 #5 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 8 ++++++++ + bfd/libbfd.c | 18 ++++-------------- + 2 files changed, 12 insertions(+), 14 deletions(-) + +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,3 +1,11 @@ ++2017-06-27 Alan Modra ++ ++ PR binutils/21665 ++ * libbfd.c (_bfd_generic_get_section_contents): Delete abort. ++ Use unsigned file pointer type, and remove cast. ++ * libbfd.c (_bfd_generic_get_section_contents_in_window): Likewise. ++ Add "count", not "sz". ++ + 2017-06-26 Pedro Alves + + PR binutils/21665 +Index: git/bfd/libbfd.c +=================================================================== +--- git.orig/bfd/libbfd.c ++++ git/bfd/libbfd.c +@@ -780,7 +780,7 @@ _bfd_generic_get_section_contents (bfd * + bfd_size_type count) + { + bfd_size_type sz; +- file_ptr filesz; ++ ufile_ptr filesz; + if (count == 0) + return TRUE; + +@@ -804,14 +804,9 @@ _bfd_generic_get_section_contents (bfd * + else + sz = section->size; + filesz = bfd_get_file_size (abfd); +- if (filesz < 0) +- { +- /* This should never happen. */ +- abort (); +- } + if (offset + count < count + || offset + count > sz +- || (section->filepos + offset + count) > (bfd_size_type) filesz) ++ || section->filepos + offset + count > filesz) + { + bfd_set_error (bfd_error_invalid_operation); + return FALSE; +@@ -834,7 +829,7 @@ _bfd_generic_get_section_contents_in_win + { + #ifdef USE_MMAP + bfd_size_type sz; +- file_ptr filesz; ++ ufile_ptr filesz; + + if (count == 0) + return TRUE; +@@ -868,13 +863,8 @@ _bfd_generic_get_section_contents_in_win + else + sz = section->size; + filesz = bfd_get_file_size (abfd); +- if (filesz < 0) +- { +- /* This should never happen. */ +- abort (); +- } + if (offset + count > sz +- || (section->filepos + offset + sz) > (bfd_size_type) filesz ++ || section->filepos + offset + count > filesz + || ! bfd_get_file_window (abfd, section->filepos + offset, count, w, + TRUE)) + return FALSE; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_6.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_6.patch new file mode 100644 index 000000000..e36429ad5 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_6.patch @@ -0,0 +1,56 @@ +From ea9aafc41a764e4e2dbb88a7b031e886b481b99a Mon Sep 17 00:00:00 2001 +From: Alan Modra +Date: Tue, 27 Jun 2017 14:43:49 +0930 +Subject: [PATCH] Warning fix + + PR binutils/21665 + * libbfd.c (_bfd_generic_get_section_contents): Warning fix. + (_bfd_generic_get_section_contents_in_window): Likewise. + +Upstream-Status: Backport +CVE: CVE-2017-9955 #6 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 12 +++++++++--- + bfd/libbfd.c | 4 ++-- + 2 files changed, 11 insertions(+), 5 deletions(-) + +Index: git/bfd/libbfd.c +=================================================================== +--- git.orig/bfd/libbfd.c ++++ git/bfd/libbfd.c +@@ -806,7 +806,7 @@ _bfd_generic_get_section_contents (bfd * + filesz = bfd_get_file_size (abfd); + if (offset + count < count + || offset + count > sz +- || section->filepos + offset + count > filesz) ++ || (ufile_ptr) section->filepos + offset + count > filesz) + { + bfd_set_error (bfd_error_invalid_operation); + return FALSE; +@@ -864,7 +864,7 @@ _bfd_generic_get_section_contents_in_win + sz = section->size; + filesz = bfd_get_file_size (abfd); + if (offset + count > sz +- || section->filepos + offset + count > filesz ++ || (ufile_ptr) section->filepos + offset + count > filesz + || ! bfd_get_file_window (abfd, section->filepos + offset, count, w, + TRUE)) + return FALSE; +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,5 +1,11 @@ + 2017-06-27 Alan Modra + ++ PR binutils/21665 ++ * libbfd.c (_bfd_generic_get_section_contents): Warning fix. ++ (_bfd_generic_get_section_contents_in_window): Likewise. ++ ++2017-06-27 Alan Modra ++ + PR binutils/21665 + * libbfd.c (_bfd_generic_get_section_contents): Delete abort. + Use unsigned file pointer type, and remove cast. diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_7.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_7.patch new file mode 100644 index 000000000..2cae63b4f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_7.patch @@ -0,0 +1,80 @@ +From 60a02042bacf8d25814430080adda61ed086bca6 Mon Sep 17 00:00:00 2001 +From: Nick Clifton +Date: Fri, 30 Jun 2017 11:03:37 +0100 +Subject: [PATCH] Fix failures in MMIX linker tests introduced by fix for PR + 21665. + + PR binutils/21665 + * objdump.c (disassemble_section): Move check for an overlarge + section to just before the allocation of memory. Do not check + section size against file size, but instead use an arbitrary 2Gb + limit. Issue a warning message if the section is too big. + +Upstream-Status: Backport +CVE: CVE-2017-9955 #7 +Signed-off-by: Armin Kuster + +--- + binutils/ChangeLog | 8 ++++++++ + binutils/objdump.c | 25 ++++++++++++++++++++++++- + 2 files changed, 32 insertions(+), 1 deletion(-) + +Index: git/binutils/objdump.c +=================================================================== +--- git.orig/binutils/objdump.c ++++ git/binutils/objdump.c +@@ -2048,7 +2048,7 @@ disassemble_section (bfd *abfd, asection + return; + + datasize = bfd_get_section_size (section); +- if (datasize == 0 || datasize >= (bfd_size_type) bfd_get_file_size (abfd)) ++ if (datasize == 0) + return; + + if (start_address == (bfd_vma) -1 +@@ -2112,6 +2112,29 @@ disassemble_section (bfd *abfd, asection + } + rel_ppend = rel_pp + rel_count; + ++ /* PR 21665: Check for overlarge datasizes. ++ Note - we used to check for "datasize > bfd_get_file_size (abfd)" but ++ this fails when using compressed sections or compressed file formats ++ (eg MMO, tekhex). ++ ++ The call to xmalloc below will fail if too much memory is requested, ++ which will catch the problem in the normal use case. But if a memory ++ checker is in use, eg valgrind or sanitize, then an exception will ++ be still generated, so we try to catch the problem first. ++ ++ Unfortunately there is no simple way to determine how much memory can ++ be allocated by calling xmalloc. So instead we use a simple, arbitrary ++ limit of 2Gb. Hopefully this should be enough for most users. If ++ someone does start trying to disassemble sections larger then 2Gb in ++ size they will doubtless complain and we can increase the limit. */ ++#define MAX_XMALLOC (1024 * 1024 * 1024 * 2UL) /* 2Gb */ ++ if (datasize > MAX_XMALLOC) ++ { ++ non_fatal (_("Reading section %s failed because it is too big (%#lx)"), ++ section->name, (unsigned long) datasize); ++ return; ++ } ++ + data = (bfd_byte *) xmalloc (datasize); + + bfd_get_section_contents (abfd, section, data, 0, datasize); +Index: git/binutils/ChangeLog +=================================================================== +--- git.orig/binutils/ChangeLog ++++ git/binutils/ChangeLog +@@ -1,3 +1,11 @@ ++2017-06-30 Nick Clifton ++ ++ PR binutils/21665 ++ * objdump.c (disassemble_section): Move check for an overlarge ++ section to just before the allocation of memory. Do not check ++ section size against file size, but instead use an arbitrary 2Gb ++ limit. Issue a warning message if the section is too big. ++ + 2017-06-26 Nick Clifton + + PR binutils/21665 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_8.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_8.patch new file mode 100644 index 000000000..45dd97467 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_8.patch @@ -0,0 +1,187 @@ +From bae7501e87ab614115d9d3213b4dd18d96e604db Mon Sep 17 00:00:00 2001 +From: Alan Modra +Date: Sat, 1 Jul 2017 21:58:10 +0930 +Subject: [PATCH] Use bfd_malloc_and_get_section + +It's nicer than xmalloc followed by bfd_get_section_contents, since +xmalloc exits on failure and needs a check that its size_t arg doesn't +lose high bits when converted from bfd_size_type. + + PR binutils/21665 + * objdump.c (strtab): Make var a bfd_byte*. + (disassemble_section): Don't limit malloc size. Instead, use + bfd_malloc_and_get_section. + (read_section_stabs): Use bfd_malloc_and_get_section. Return + bfd_byte*. + (find_stabs_section): Remove now unnecessary cast. + * objcopy.c (copy_object): Use bfd_malloc_and_get_section. Free + contents on error return. + * nlmconv.c (copy_sections): Use bfd_malloc_and_get_section. + +Upstream-Status: Backport +CVE: CVE-2017-9955 #8 +Signed-off-by: Armin Kuster + +--- + binutils/ChangeLog | 13 +++++++++++++ + binutils/nlmconv.c | 6 ++---- + binutils/objcopy.c | 5 +++-- + binutils/objdump.c | 44 +++++++------------------------------------- + 4 files changed, 25 insertions(+), 43 deletions(-) + +Index: git/binutils/ChangeLog +=================================================================== +--- git.orig/binutils/ChangeLog ++++ git/binutils/ChangeLog +@@ -1,3 +1,16 @@ ++2017-07-01 Alan Modra ++ ++ PR binutils/21665 ++ * objdump.c (strtab): Make var a bfd_byte*. ++ (disassemble_section): Don't limit malloc size. Instead, use ++ bfd_malloc_and_get_section. ++ (read_section_stabs): Use bfd_malloc_and_get_section. Return ++ bfd_byte*. ++ (find_stabs_section): Remove now unnecessary cast. ++ * objcopy.c (copy_object): Use bfd_malloc_and_get_section. Free ++ contents on error return. ++ * nlmconv.c (copy_sections): Use bfd_malloc_and_get_section. ++ + 2017-06-30 Nick Clifton + + PR binutils/21665 +Index: git/binutils/nlmconv.c +=================================================================== +--- git.orig/binutils/nlmconv.c ++++ git/binutils/nlmconv.c +@@ -1224,7 +1224,7 @@ copy_sections (bfd *inbfd, asection *ins + const char *inname; + asection *outsec; + bfd_size_type size; +- void *contents; ++ bfd_byte *contents; + long reloc_size; + bfd_byte buf[4]; + bfd_size_type add; +@@ -1240,9 +1240,7 @@ copy_sections (bfd *inbfd, asection *ins + contents = NULL; + else + { +- contents = xmalloc (size); +- if (! bfd_get_section_contents (inbfd, insec, contents, +- (file_ptr) 0, size)) ++ if (!bfd_malloc_and_get_section (inbfd, insec, &contents)) + bfd_fatal (bfd_get_filename (inbfd)); + } + +Index: git/binutils/objdump.c +=================================================================== +--- git.orig/binutils/objdump.c ++++ git/binutils/objdump.c +@@ -180,7 +180,7 @@ static long dynsymcount = 0; + static bfd_byte *stabs; + static bfd_size_type stab_size; + +-static char *strtab; ++static bfd_byte *strtab; + static bfd_size_type stabstr_size; + + static bfd_boolean is_relocatable = FALSE; +@@ -2112,29 +2112,6 @@ disassemble_section (bfd *abfd, asection + } + rel_ppend = rel_pp + rel_count; + +- /* PR 21665: Check for overlarge datasizes. +- Note - we used to check for "datasize > bfd_get_file_size (abfd)" but +- this fails when using compressed sections or compressed file formats +- (eg MMO, tekhex). +- +- The call to xmalloc below will fail if too much memory is requested, +- which will catch the problem in the normal use case. But if a memory +- checker is in use, eg valgrind or sanitize, then an exception will +- be still generated, so we try to catch the problem first. +- +- Unfortunately there is no simple way to determine how much memory can +- be allocated by calling xmalloc. So instead we use a simple, arbitrary +- limit of 2Gb. Hopefully this should be enough for most users. If +- someone does start trying to disassemble sections larger then 2Gb in +- size they will doubtless complain and we can increase the limit. */ +-#define MAX_XMALLOC (1024 * 1024 * 1024 * 2UL) /* 2Gb */ +- if (datasize > MAX_XMALLOC) +- { +- non_fatal (_("Reading section %s failed because it is too big (%#lx)"), +- section->name, (unsigned long) datasize); +- return; +- } +- + data = (bfd_byte *) xmalloc (datasize); + + bfd_get_section_contents (abfd, section, data, 0, datasize); +@@ -2652,12 +2629,11 @@ dump_dwarf (bfd *abfd) + /* Read ABFD's stabs section STABSECT_NAME, and return a pointer to + it. Return NULL on failure. */ + +-static char * ++static bfd_byte * + read_section_stabs (bfd *abfd, const char *sect_name, bfd_size_type *size_ptr) + { + asection *stabsect; +- bfd_size_type size; +- char *contents; ++ bfd_byte *contents; + + stabsect = bfd_get_section_by_name (abfd, sect_name); + if (stabsect == NULL) +@@ -2666,10 +2642,7 @@ read_section_stabs (bfd *abfd, const cha + return FALSE; + } + +- size = bfd_section_size (abfd, stabsect); +- contents = (char *) xmalloc (size); +- +- if (! bfd_get_section_contents (abfd, stabsect, contents, 0, size)) ++ if (!bfd_malloc_and_get_section (abfd, stabsect, &contents)) + { + non_fatal (_("reading %s section of %s failed: %s"), + sect_name, bfd_get_filename (abfd), +@@ -2679,7 +2652,7 @@ read_section_stabs (bfd *abfd, const cha + return NULL; + } + +- *size_ptr = size; ++ *size_ptr = bfd_section_size (abfd, stabsect); + + return contents; + } +@@ -2806,8 +2779,7 @@ find_stabs_section (bfd *abfd, asection + + if (strtab) + { +- stabs = (bfd_byte *) read_section_stabs (abfd, section->name, +- &stab_size); ++ stabs = read_section_stabs (abfd, section->name, &stab_size); + if (stabs) + print_section_stabs (abfd, section->name, &sought->string_offset); + } +Index: git/binutils/objcopy.c +=================================================================== +--- git.orig/binutils/objcopy.c ++++ git/binutils/objcopy.c +@@ -2186,14 +2186,15 @@ copy_object (bfd *ibfd, bfd *obfd, const + continue; + } + +- bfd_byte * contents = xmalloc (size); +- if (bfd_get_section_contents (ibfd, sec, contents, 0, size)) ++ bfd_byte *contents; ++ if (bfd_malloc_and_get_section (ibfd, sec, &contents)) + { + if (fwrite (contents, 1, size, f) != size) + { + non_fatal (_("error writing section contents to %s (error: %s)"), + pdump->filename, + strerror (errno)); ++ free (contents); + return FALSE; + } + } diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_9.patch b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_9.patch new file mode 100644 index 000000000..c6353d8ce --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils/CVE-2017-9955_9.patch @@ -0,0 +1,361 @@ +From 8e2f54bcee7e3e8315d4a39a302eaf8e4389e07d Mon Sep 17 00:00:00 2001 +From: "H.J. Lu" +Date: Tue, 30 May 2017 06:34:05 -0700 +Subject: [PATCH] Add bfd_get_file_size to get archive element size + +We can't use stat() to get archive element size. Add bfd_get_file_size +to get size for both normal files and archive elements. + +bfd/ + + PR binutils/21519 + * bfdio.c (bfd_get_file_size): New function. + * bfd-in2.h: Regenerated. + +binutils/ + + PR binutils/21519 + * objdump.c (dump_relocs_in_section): Replace get_file_size + with bfd_get_file_size to get archive element size. + * testsuite/binutils-all/objdump.exp (test_objdump_f): New + proc. + (test_objdump_h): Likewise. + (test_objdump_t): Likewise. + (test_objdump_r): Likewise. + (test_objdump_s): Likewise. + Add objdump tests on archive. + +Upstream-Status: Backport +CVE: CVE-2017-9955 +Signed-off-by: Armin Kuster + +--- + bfd/ChangeLog | 6 + + bfd/bfd-in2.h | 2 + + bfd/bfdio.c | 23 ++++ + binutils/ChangeLog | 13 ++ + binutils/objdump.c | 2 +- + binutils/testsuite/binutils-all/objdump.exp | 178 +++++++++++++++++++--------- + 6 files changed, 170 insertions(+), 54 deletions(-) + +Index: git/bfd/bfd-in2.h +=================================================================== +--- git.orig/bfd/bfd-in2.h ++++ git/bfd/bfd-in2.h +@@ -1241,6 +1241,8 @@ long bfd_get_mtime (bfd *abfd); + + file_ptr bfd_get_size (bfd *abfd); + ++file_ptr bfd_get_file_size (bfd *abfd); ++ + void *bfd_mmap (bfd *abfd, void *addr, bfd_size_type len, + int prot, int flags, file_ptr offset, + void **map_addr, bfd_size_type *map_len); +Index: git/bfd/bfdio.c +=================================================================== +--- git.orig/bfd/bfdio.c ++++ git/bfd/bfdio.c +@@ -434,6 +434,29 @@ bfd_get_size (bfd *abfd) + return buf.st_size; + } + ++/* ++FUNCTION ++ bfd_get_file_size ++ ++SYNOPSIS ++ file_ptr bfd_get_file_size (bfd *abfd); ++ ++DESCRIPTION ++ Return the file size (as read from file system) for the file ++ associated with BFD @var{abfd}. It supports both normal files ++ and archive elements. ++ ++*/ ++ ++file_ptr ++bfd_get_file_size (bfd *abfd) ++{ ++ if (abfd->my_archive != NULL ++ && !bfd_is_thin_archive (abfd->my_archive)) ++ return arelt_size (abfd); ++ ++ return bfd_get_size (abfd); ++} + + /* + FUNCTION +Index: git/binutils/objdump.c +=================================================================== +--- git.orig/binutils/objdump.c ++++ git/binutils/objdump.c +@@ -3310,7 +3310,7 @@ dump_relocs_in_section (bfd *abfd, + } + + if ((bfd_get_file_flags (abfd) & (BFD_IN_MEMORY | BFD_LINKER_CREATED)) == 0 +- && relsize > get_file_size (bfd_get_filename (abfd))) ++ && relsize > bfd_get_file_size (abfd)) + { + printf (" (too many: 0x%x)\n", section->reloc_count); + bfd_set_error (bfd_error_file_truncated); +Index: git/binutils/testsuite/binutils-all/objdump.exp +=================================================================== +--- git.orig/binutils/testsuite/binutils-all/objdump.exp ++++ git/binutils/testsuite/binutils-all/objdump.exp +@@ -64,96 +64,168 @@ if [regexp $want $got] then { + if {![binutils_assemble $srcdir/$subdir/bintest.s tmpdir/bintest.o]} then { + return + } ++if {![binutils_assemble $srcdir/$subdir/bintest.s tmpdir/bintest2.o]} then { ++ return ++} + if [is_remote host] { + set testfile [remote_download host tmpdir/bintest.o] ++ set testfile2 [remote_download host tmpdir/bintest2.o] + } else { + set testfile tmpdir/bintest.o ++ set testfile2 tmpdir/bintest2.o ++} ++ ++if { ![istarget "alpha-*-*"] || [is_elf_format] } then { ++ remote_file host file delete tmpdir/bintest.a ++ set got [binutils_run $AR "rc tmpdir/bintest.a $testfile2"] ++ if ![string match "" $got] then { ++ fail "bintest.a" ++ remote_file host delete tmpdir/bintest.a ++ } else { ++ if [is_remote host] { ++ set testarchive [remote_download host tmpdir/bintest.a] ++ } else { ++ set testarchive tmpdir/bintest.a ++ } ++ } ++ remote_file host delete tmpdir/bintest2.o + } + + # Test objdump -f + +-set got [binutils_run $OBJDUMP "$OBJDUMPFLAGS -f $testfile"] ++proc test_objdump_f { testfile dumpfile } { ++ global OBJDUMP ++ global OBJDUMPFLAGS ++ global cpus_regex + +-set want "$testfile:\[ \]*file format.*architecture:\[ \]*${cpus_regex}.*HAS_RELOC.*HAS_SYMS" ++ set got [binutils_run $OBJDUMP "$OBJDUMPFLAGS -f $testfile"] + +-if ![regexp $want $got] then { +- fail "objdump -f" +-} else { +- pass "objdump -f" ++ set want "$dumpfile:\[ \]*file format.*architecture:\[ \]*${cpus_regex}.*HAS_RELOC.*HAS_SYMS" ++ ++ if ![regexp $want $got] then { ++ fail "objdump -f ($testfile, $dumpfile)" ++ } else { ++ pass "objdump -f ($testfile, $dumpfile)" ++ } ++} ++ ++test_objdump_f $testfile $testfile ++if { [ remote_file host exists $testarchive ] } then { ++ test_objdump_f $testarchive bintest2.o + } + + # Test objdump -h + +-set got [binutils_run $OBJDUMP "$OBJDUMPFLAGS -h $testfile"] ++proc test_objdump_h { testfile dumpfile } { ++ global OBJDUMP ++ global OBJDUMPFLAGS + +-set want "$testfile:\[ \]*file format.*Sections.*\[0-9\]+\[ \]+\[^ \]*(text|TEXT|P|\\\$CODE\\\$)\[^ \]*\[ \]*(\[0-9a-fA-F\]+).*\[0-9\]+\[ \]+\[^ \]*(\\.data|DATA|D_1)\[^ \]*\[ \]*(\[0-9a-fA-F\]+)" ++ set got [binutils_run $OBJDUMP "$OBJDUMPFLAGS -h $testfile"] + +-if ![regexp $want $got all text_name text_size data_name data_size] then { +- fail "objdump -h" +-} else { +- verbose "text name is $text_name size is $text_size" +- verbose "data name is $data_name size is $data_size" +- set ets 8 +- set eds 4 +- # The [ti]c4x target has the property sizeof(char)=sizeof(long)=1 +- if [istarget *c4x*-*-*] then { +- set ets 2 +- set eds 1 +- } +- # c54x section sizes are in bytes, not octets; adjust accordingly +- if [istarget *c54x*-*-*] then { +- set ets 4 +- set eds 2 +- } +- if {[expr "0x$text_size"] < $ets || [expr "0x$data_size"] < $eds} then { +- send_log "sizes too small\n" +- fail "objdump -h" ++ set want "$dumpfile:\[ \]*file format.*Sections.*\[0-9\]+\[ \]+\[^ \]*(text|TEXT|P|\\\$CODE\\\$)\[^ \]*\[ \]*(\[0-9a-fA-F\]+).*\[0-9\]+\[ \]+\[^ \]*(\\.data|DATA|D_1)\[^ \]*\[ \]*(\[0-9a-fA-F\]+)" ++ ++ if ![regexp $want $got all text_name text_size data_name data_size] then { ++ fail "objdump -h ($testfile, $dumpfile)" + } else { +- pass "objdump -h" ++ verbose "text name is $text_name size is $text_size" ++ verbose "data name is $data_name size is $data_size" ++ set ets 8 ++ set eds 4 ++ # The [ti]c4x target has the property sizeof(char)=sizeof(long)=1 ++ if [istarget *c4x*-*-*] then { ++ set ets 2 ++ set eds 1 ++ } ++ # c54x section sizes are in bytes, not octets; adjust accordingly ++ if [istarget *c54x*-*-*] then { ++ set ets 4 ++ set eds 2 ++ } ++ if {[expr "0x$text_size"] < $ets || [expr "0x$data_size"] < $eds} then { ++ send_log "sizes too small\n" ++ fail "objdump -h ($testfile, $dumpfile)" ++ } else { ++ pass "objdump -h ($testfile, $dumpfile)" ++ } + } + } + ++test_objdump_h $testfile $testfile ++if { [ remote_file host exists $testarchive ] } then { ++ test_objdump_h $testarchive bintest2.o ++} ++ + # Test objdump -t + +-set got [binutils_run $OBJDUMP "$OBJDUMPFLAGS -t $testfile"] ++proc test_objdump_t { testfile} { ++ global OBJDUMP ++ global OBJDUMPFLAGS ++ ++ set got [binutils_run $OBJDUMP "$OBJDUMPFLAGS -t $testfile"] ++ ++ if [info exists vars] then { unset vars } ++ while {[regexp "(\[a-z\]*_symbol)(.*)" $got all symbol rest]} { ++ set vars($symbol) 1 ++ set got $rest ++ } + +-if [info exists vars] then { unset vars } +-while {[regexp "(\[a-z\]*_symbol)(.*)" $got all symbol rest]} { +- set vars($symbol) 1 +- set got $rest ++ if {![info exists vars(text_symbol)] \ ++ || ![info exists vars(data_symbol)] \ ++ || ![info exists vars(common_symbol)] \ ++ || ![info exists vars(external_symbol)]} then { ++ fail "objdump -t ($testfile)" ++ } else { ++ pass "objdump -t ($testfile)" ++ } + } + +-if {![info exists vars(text_symbol)] \ +- || ![info exists vars(data_symbol)] \ +- || ![info exists vars(common_symbol)] \ +- || ![info exists vars(external_symbol)]} then { +- fail "objdump -t" +-} else { +- pass "objdump -t" ++test_objdump_t $testfile ++if { [ remote_file host exists $testarchive ] } then { ++ test_objdump_t $testarchive + } + + # Test objdump -r + +-set got [binutils_run $OBJDUMP "$OBJDUMPFLAGS -r $testfile"] ++proc test_objdump_r { testfile dumpfile } { ++ global OBJDUMP ++ global OBJDUMPFLAGS + +-set want "$testfile:\[ \]*file format.*RELOCATION RECORDS FOR \\\[\[^\]\]*(text|TEXT|P|\\\$CODE\\\$)\[^\]\]*\\\].*external_symbol" ++ set got [binutils_run $OBJDUMP "$OBJDUMPFLAGS -r $testfile"] + +-if [regexp $want $got] then { +- pass "objdump -r" +-} else { +- fail "objdump -r" ++ set want "$dumpfile:\[ \]*file format.*RELOCATION RECORDS FOR \\\[\[^\]\]*(text|TEXT|P|\\\$CODE\\\$)\[^\]\]*\\\].*external_symbol" ++ ++ if [regexp $want $got] then { ++ pass "objdump -r ($testfile, $dumpfile)" ++ } else { ++ fail "objdump -r ($testfile, $dumpfile)" ++ } ++} ++ ++test_objdump_r $testfile $testfile ++if { [ remote_file host exists $testarchive ] } then { ++ test_objdump_r $testarchive bintest2.o + } + + # Test objdump -s + +-set got [binutils_run $OBJDUMP "$OBJDUMPFLAGS -s $testfile"] ++proc test_objdump_s { testfile dumpfile } { ++ global OBJDUMP ++ global OBJDUMPFLAGS + +-set want "$testfile:\[ \]*file format.*Contents.*(text|TEXT|P|\\\$CODE\\\$)\[^0-9\]*\[ \]*\[0-9a-fA-F\]*\[ \]*(00000001|01000000|00000100).*Contents.*(data|DATA|D_1)\[^0-9\]*\[ \]*\[0-9a-fA-F\]*\[ \]*(00000002|02000000|00000200)" ++ set got [binutils_run $OBJDUMP "$OBJDUMPFLAGS -s $testfile"] + +-if [regexp $want $got] then { +- pass "objdump -s" +-} else { +- fail "objdump -s" ++ set want "$dumpfile:\[ \]*file format.*Contents.*(text|TEXT|P|\\\$CODE\\\$)\[^0-9\]*\[ \]*\[0-9a-fA-F\]*\[ \]*(00000001|01000000|00000100).*Contents.*(data|DATA|D_1)\[^0-9\]*\[ \]*\[0-9a-fA-F\]*\[ \]*(00000002|02000000|00000200)" ++ ++ if [regexp $want $got] then { ++ pass "objdump -s ($testfile, $dumpfile)" ++ } else { ++ fail "objdump -s ($testfile, $dumpfile)" ++ } ++} ++ ++test_objdump_s $testfile $testfile ++if { [ remote_file host exists $testarchive ] } then { ++ test_objdump_s $testarchive bintest2.o + } + + # Test objdump -s on a file that contains a compressed .debug section +Index: git/bfd/ChangeLog +=================================================================== +--- git.orig/bfd/ChangeLog ++++ git/bfd/ChangeLog +@@ -1,3 +1,9 @@ ++2017-05-30 H.J. Lu ++ ++ PR binutils/21519 ++ * bfdio.c (bfd_get_file_size): New function. ++ * bfd-in2.h: Regenerated. ++ + 2017-06-27 Alan Modra + + PR binutils/21665 +Index: git/binutils/ChangeLog +=================================================================== +--- git.orig/binutils/ChangeLog ++++ git/binutils/ChangeLog +@@ -1,3 +1,16 @@ ++2017-05-30 H.J. Lu ++ ++ PR binutils/21519 ++ * objdump.c (dump_relocs_in_section): Replace get_file_size ++ with bfd_get_file_size to get archive element size. ++ * testsuite/binutils-all/objdump.exp (test_objdump_f): New ++ proc. ++ (test_objdump_h): Likewise. ++ (test_objdump_t): Likewise. ++ (test_objdump_r): Likewise. ++ (test_objdump_s): Likewise. ++ Add objdump tests on archive. ++ + 2017-07-01 Alan Modra + + PR binutils/21665 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils_2.27.bb b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils_2.27.bb deleted file mode 100644 index b51437bbc..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils_2.27.bb +++ /dev/null @@ -1,45 +0,0 @@ -require binutils.inc -require binutils-${PV}.inc - -DEPENDS += "flex bison zlib" - -EXTRA_OECONF += "--with-sysroot=/ \ - --enable-install-libbfd \ - --enable-install-libiberty \ - --enable-shared \ - --with-system-zlib \ - " - -EXTRA_OEMAKE_append_libc-musl = "\ - gt_cv_func_gnugettext1_libc=yes \ - gt_cv_func_gnugettext2_libc=yes \ - " -EXTRA_OECONF_class-native = "--enable-targets=all \ - --enable-64-bit-bfd \ - --enable-install-libiberty \ - --enable-install-libbfd \ - --disable-werror" - -do_install_class-native () { - autotools_do_install - - # Install the libiberty header - install -d ${D}${includedir} - install -m 644 ${S}/include/ansidecl.h ${D}${includedir} - install -m 644 ${S}/include/libiberty.h ${D}${includedir} - - # We only want libiberty, libbfd and libopcodes - rm -rf ${D}${bindir} - rm -rf ${D}${prefix}/${TARGET_SYS} - rm -rf ${D}${prefix}/lib/ldscripts - rm -rf ${D}${prefix}/share/info - rm -rf ${D}${prefix}/share/locale - rm -rf ${D}${prefix}/share/man - rmdir ${D}${prefix}/share || : - rmdir ${D}/${libdir}/gcc-lib || : - rmdir ${D}/${libdir}64/gcc-lib || : - rmdir ${D}/${libdir} || : - rmdir ${D}/${libdir}64 || : -} - -BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils_2.28.bb b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils_2.28.bb new file mode 100644 index 000000000..b51437bbc --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/binutils/binutils_2.28.bb @@ -0,0 +1,45 @@ +require binutils.inc +require binutils-${PV}.inc + +DEPENDS += "flex bison zlib" + +EXTRA_OECONF += "--with-sysroot=/ \ + --enable-install-libbfd \ + --enable-install-libiberty \ + --enable-shared \ + --with-system-zlib \ + " + +EXTRA_OEMAKE_append_libc-musl = "\ + gt_cv_func_gnugettext1_libc=yes \ + gt_cv_func_gnugettext2_libc=yes \ + " +EXTRA_OECONF_class-native = "--enable-targets=all \ + --enable-64-bit-bfd \ + --enable-install-libiberty \ + --enable-install-libbfd \ + --disable-werror" + +do_install_class-native () { + autotools_do_install + + # Install the libiberty header + install -d ${D}${includedir} + install -m 644 ${S}/include/ansidecl.h ${D}${includedir} + install -m 644 ${S}/include/libiberty.h ${D}${includedir} + + # We only want libiberty, libbfd and libopcodes + rm -rf ${D}${bindir} + rm -rf ${D}${prefix}/${TARGET_SYS} + rm -rf ${D}${prefix}/lib/ldscripts + rm -rf ${D}${prefix}/share/info + rm -rf ${D}${prefix}/share/locale + rm -rf ${D}${prefix}/share/man + rmdir ${D}${prefix}/share || : + rmdir ${D}/${libdir}/gcc-lib || : + rmdir ${D}/${libdir}64/gcc-lib || : + rmdir ${D}/${libdir} || : + rmdir ${D}/${libdir}64 || : +} + +BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/bison/bison/bison-2.3_m4.patch b/import-layers/yocto-poky/meta/recipes-devtools/bison/bison/bison-2.3_m4.patch deleted file mode 100644 index 348ce1d2b..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/bison/bison/bison-2.3_m4.patch +++ /dev/null @@ -1,591 +0,0 @@ -Upstream-Status: Pending - -# -# Patch managed by http://www.mn-logistik.de/unsupported/pxa250/patcher -# - ---- /dev/null -+++ bison-1.875/m4/inttypes-pri.m4 -@@ -0,0 +1,32 @@ -+# inttypes-pri.m4 serial 1 (gettext-0.11.4) -+dnl Copyright (C) 1997-2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+dnl From Bruno Haible. -+ -+# Define PRI_MACROS_BROKEN if exists and defines the PRI* -+# macros to non-string values. This is the case on AIX 4.3.3. -+ -+AC_DEFUN([gt_INTTYPES_PRI], -+[ -+ AC_REQUIRE([gt_HEADER_INTTYPES_H]) -+ if test $gt_cv_header_inttypes_h = yes; then -+ AC_CACHE_CHECK([whether the inttypes.h PRIxNN macros are broken], -+ gt_cv_inttypes_pri_broken, -+ [ -+ AC_TRY_COMPILE([#include -+#ifdef PRId32 -+char *p = PRId32; -+#endif -+], [], gt_cv_inttypes_pri_broken=no, gt_cv_inttypes_pri_broken=yes) -+ ]) -+ fi -+ if test "$gt_cv_inttypes_pri_broken" = yes; then -+ AC_DEFINE_UNQUOTED(PRI_MACROS_BROKEN, 1, -+ [Define if exists and defines unusable PRI* macros.]) -+ fi -+]) ---- /dev/null -+++ bison-1.875/m4/lcmessage.m4 -@@ -0,0 +1,32 @@ -+# lcmessage.m4 serial 3 (gettext-0.11.3) -+dnl Copyright (C) 1995-2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+dnl -+dnl This file can can be used in projects which are not available under -+dnl the GNU General Public License or the GNU Library General Public -+dnl License but which still want to provide support for the GNU gettext -+dnl functionality. -+dnl Please note that the actual code of the GNU gettext library is covered -+dnl by the GNU Library General Public License, and the rest of the GNU -+dnl gettext package package is covered by the GNU General Public License. -+dnl They are *not* in the public domain. -+ -+dnl Authors: -+dnl Ulrich Drepper , 1995. -+ -+# Check whether LC_MESSAGES is available in . -+ -+AC_DEFUN([AM_LC_MESSAGES], -+[ -+ AC_CACHE_CHECK([for LC_MESSAGES], am_cv_val_LC_MESSAGES, -+ [AC_TRY_LINK([#include ], [return LC_MESSAGES], -+ am_cv_val_LC_MESSAGES=yes, am_cv_val_LC_MESSAGES=no)]) -+ if test $am_cv_val_LC_MESSAGES = yes; then -+ AC_DEFINE(HAVE_LC_MESSAGES, 1, -+ [Define if your file defines LC_MESSAGES.]) -+ fi -+]) ---- /dev/null -+++ bison-1.875/m4/uintmax_t.m4 -@@ -0,0 +1,29 @@ -+# uintmax_t.m4 serial 6 (gettext-0.11) -+dnl Copyright (C) 1997-2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+dnl From Paul Eggert. -+ -+AC_PREREQ(2.13) -+ -+# Define uintmax_t to `unsigned long' or `unsigned long long' -+# if does not exist. -+ -+AC_DEFUN([jm_AC_TYPE_UINTMAX_T], -+[ -+ AC_REQUIRE([jm_AC_HEADER_INTTYPES_H]) -+ AC_REQUIRE([jm_AC_HEADER_STDINT_H]) -+ if test $jm_ac_cv_header_inttypes_h = no && test $jm_ac_cv_header_stdint_h = no; then -+ AC_REQUIRE([jm_AC_TYPE_UNSIGNED_LONG_LONG]) -+ test $ac_cv_type_unsigned_long_long = yes \ -+ && ac_type='unsigned long long' \ -+ || ac_type='unsigned long' -+ AC_DEFINE_UNQUOTED(uintmax_t, $ac_type, -+ [Define to unsigned long or unsigned long long -+ if and don't define.]) -+ fi -+]) ---- /dev/null -+++ bison-1.875/m4/glibc21.m4 -@@ -0,0 +1,32 @@ -+# glibc21.m4 serial 2 (fileutils-4.1.3, gettext-0.10.40) -+dnl Copyright (C) 2000-2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+# Test for the GNU C Library, version 2.1 or newer. -+# From Bruno Haible. -+ -+AC_DEFUN([jm_GLIBC21], -+ [ -+ AC_CACHE_CHECK(whether we are using the GNU C Library 2.1 or newer, -+ ac_cv_gnu_library_2_1, -+ [AC_EGREP_CPP([Lucky GNU user], -+ [ -+#include -+#ifdef __GNU_LIBRARY__ -+ #if (__GLIBC__ == 2 && __GLIBC_MINOR__ >= 1) || (__GLIBC__ > 2) -+ Lucky GNU user -+ #endif -+#endif -+ ], -+ ac_cv_gnu_library_2_1=yes, -+ ac_cv_gnu_library_2_1=no) -+ ] -+ ) -+ AC_SUBST(GLIBC21) -+ GLIBC21="$ac_cv_gnu_library_2_1" -+ ] -+) ---- /dev/null -+++ bison-1.875/m4/stdint_h.m4 -@@ -0,0 +1,28 @@ -+# stdint_h.m4 serial 2 (gettext-0.11.4) -+dnl Copyright (C) 1997-2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+dnl From Paul Eggert. -+ -+# Define HAVE_STDINT_H_WITH_UINTMAX if exists, -+# doesn't clash with , and declares uintmax_t. -+ -+AC_DEFUN([jm_AC_HEADER_STDINT_H], -+[ -+ AC_CACHE_CHECK([for stdint.h], jm_ac_cv_header_stdint_h, -+ [AC_TRY_COMPILE( -+ [#include -+#include ], -+ [uintmax_t i = (uintmax_t) -1;], -+ jm_ac_cv_header_stdint_h=yes, -+ jm_ac_cv_header_stdint_h=no)]) -+ if test $jm_ac_cv_header_stdint_h = yes; then -+ AC_DEFINE_UNQUOTED(HAVE_STDINT_H_WITH_UINTMAX, 1, -+[Define if exists, doesn't clash with , -+ and declares uintmax_t. ]) -+ fi -+]) ---- /dev/null -+++ bison-1.875/m4/inttypes_h.m4 -@@ -0,0 +1,28 @@ -+# inttypes_h.m4 serial 4 (gettext-0.11.4) -+dnl Copyright (C) 1997-2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+dnl From Paul Eggert. -+ -+# Define HAVE_INTTYPES_H_WITH_UINTMAX if exists, -+# doesn't clash with , and declares uintmax_t. -+ -+AC_DEFUN([jm_AC_HEADER_INTTYPES_H], -+[ -+ AC_CACHE_CHECK([for inttypes.h], jm_ac_cv_header_inttypes_h, -+ [AC_TRY_COMPILE( -+ [#include -+#include ], -+ [uintmax_t i = (uintmax_t) -1;], -+ jm_ac_cv_header_inttypes_h=yes, -+ jm_ac_cv_header_inttypes_h=no)]) -+ if test $jm_ac_cv_header_inttypes_h = yes; then -+ AC_DEFINE_UNQUOTED(HAVE_INTTYPES_H_WITH_UINTMAX, 1, -+[Define if exists, doesn't clash with , -+ and declares uintmax_t. ]) -+ fi -+]) ---- /dev/null -+++ bison-1.875/m4/ulonglong.m4 -@@ -0,0 +1,23 @@ -+# ulonglong.m4 serial 2 (fileutils-4.0.32, gettext-0.10.40) -+dnl Copyright (C) 1999-2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+dnl From Paul Eggert. -+ -+AC_DEFUN([jm_AC_TYPE_UNSIGNED_LONG_LONG], -+[ -+ AC_CACHE_CHECK([for unsigned long long], ac_cv_type_unsigned_long_long, -+ [AC_TRY_LINK([unsigned long long ull = 1; int i = 63;], -+ [unsigned long long ullmax = (unsigned long long) -1; -+ return ull << i | ull >> i | ullmax / ull | ullmax % ull;], -+ ac_cv_type_unsigned_long_long=yes, -+ ac_cv_type_unsigned_long_long=no)]) -+ if test $ac_cv_type_unsigned_long_long = yes; then -+ AC_DEFINE(HAVE_UNSIGNED_LONG_LONG, 1, -+ [Define if you have the unsigned long long type.]) -+ fi -+]) ---- /dev/null -+++ bison-1.875/m4/codeset.m4 -@@ -0,0 +1,23 @@ -+# codeset.m4 serial AM1 (gettext-0.10.40) -+dnl Copyright (C) 2000-2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+dnl From Bruno Haible. -+ -+AC_DEFUN([AM_LANGINFO_CODESET], -+[ -+ AC_CACHE_CHECK([for nl_langinfo and CODESET], am_cv_langinfo_codeset, -+ [AC_TRY_LINK([#include ], -+ [char* cs = nl_langinfo(CODESET);], -+ am_cv_langinfo_codeset=yes, -+ am_cv_langinfo_codeset=no) -+ ]) -+ if test $am_cv_langinfo_codeset = yes; then -+ AC_DEFINE(HAVE_LANGINFO_CODESET, 1, -+ [Define if you have and nl_langinfo(CODESET).]) -+ fi -+]) ---- /dev/null -+++ bison-1.875/m4/intdiv0.m4 -@@ -0,0 +1,72 @@ -+# intdiv0.m4 serial 1 (gettext-0.11.3) -+dnl Copyright (C) 2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+dnl From Bruno Haible. -+ -+AC_DEFUN([gt_INTDIV0], -+[ -+ AC_REQUIRE([AC_PROG_CC])dnl -+ AC_REQUIRE([AC_CANONICAL_HOST])dnl -+ -+ AC_CACHE_CHECK([whether integer division by zero raises SIGFPE], -+ gt_cv_int_divbyzero_sigfpe, -+ [ -+ AC_TRY_RUN([ -+#include -+#include -+ -+static void -+#ifdef __cplusplus -+sigfpe_handler (int sig) -+#else -+sigfpe_handler (sig) int sig; -+#endif -+{ -+ /* Exit with code 0 if SIGFPE, with code 1 if any other signal. */ -+ exit (sig != SIGFPE); -+} -+ -+int x = 1; -+int y = 0; -+int z; -+int nan; -+ -+int main () -+{ -+ signal (SIGFPE, sigfpe_handler); -+/* IRIX and AIX (when "xlc -qcheck" is used) yield signal SIGTRAP. */ -+#if (defined (__sgi) || defined (_AIX)) && defined (SIGTRAP) -+ signal (SIGTRAP, sigfpe_handler); -+#endif -+/* Linux/SPARC yields signal SIGILL. */ -+#if defined (__sparc__) && defined (__linux__) -+ signal (SIGILL, sigfpe_handler); -+#endif -+ -+ z = x / y; -+ nan = y / y; -+ exit (1); -+} -+], gt_cv_int_divbyzero_sigfpe=yes, gt_cv_int_divbyzero_sigfpe=no, -+ [ -+ # Guess based on the CPU. -+ case "$host_cpu" in -+ alpha* | i[34567]86 | m68k | s390*) -+ gt_cv_int_divbyzero_sigfpe="guessing yes";; -+ *) -+ gt_cv_int_divbyzero_sigfpe="guessing no";; -+ esac -+ ]) -+ ]) -+ case "$gt_cv_int_divbyzero_sigfpe" in -+ *yes) value=1;; -+ *) value=0;; -+ esac -+ AC_DEFINE_UNQUOTED(INTDIV0_RAISES_SIGFPE, $value, -+ [Define if integer division by zero raises signal SIGFPE.]) -+]) ---- /dev/null -+++ bison-1.875/m4/glib.m4 -@@ -0,0 +1,196 @@ -+# Configure paths for GLIB -+# Owen Taylor 97-11-3 -+ -+dnl AM_PATH_GLIB([MINIMUM-VERSION, [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND [, MODULES]]]]) -+dnl Test for GLIB, and define GLIB_CFLAGS and GLIB_LIBS, if "gmodule" or -+dnl gthread is specified in MODULES, pass to glib-config -+dnl -+AC_DEFUN(AM_PATH_GLIB, -+[dnl -+dnl Get the cflags and libraries from the glib-config script -+dnl -+AC_ARG_WITH(glib-prefix,[ --with-glib-prefix=PFX Prefix where GLIB is installed (optional)], -+ glib_config_prefix="$withval", glib_config_prefix="") -+AC_ARG_WITH(glib-exec-prefix,[ --with-glib-exec-prefix=PFX Exec prefix where GLIB is installed (optional)], -+ glib_config_exec_prefix="$withval", glib_config_exec_prefix="") -+AC_ARG_ENABLE(glibtest, [ --disable-glibtest Do not try to compile and run a test GLIB program], -+ , enable_glibtest=yes) -+ -+ if test x$glib_config_exec_prefix != x ; then -+ glib_config_args="$glib_config_args --exec-prefix=$glib_config_exec_prefix" -+ if test x${GLIB_CONFIG+set} != xset ; then -+ GLIB_CONFIG=$glib_config_exec_prefix/bin/glib-config -+ fi -+ fi -+ if test x$glib_config_prefix != x ; then -+ glib_config_args="$glib_config_args --prefix=$glib_config_prefix" -+ if test x${GLIB_CONFIG+set} != xset ; then -+ GLIB_CONFIG=$glib_config_prefix/bin/glib-config -+ fi -+ fi -+ -+ for module in . $4 -+ do -+ case "$module" in -+ gmodule) -+ glib_config_args="$glib_config_args gmodule" -+ ;; -+ gthread) -+ glib_config_args="$glib_config_args gthread" -+ ;; -+ esac -+ done -+ -+ AC_PATH_PROG(GLIB_CONFIG, glib-config, no) -+ min_glib_version=ifelse([$1], ,0.99.7,$1) -+ AC_MSG_CHECKING(for GLIB - version >= $min_glib_version) -+ no_glib="" -+ if test "$GLIB_CONFIG" = "no" ; then -+ no_glib=yes -+ else -+ GLIB_CFLAGS=`$GLIB_CONFIG $glib_config_args --cflags` -+ GLIB_LIBS=`$GLIB_CONFIG $glib_config_args --libs` -+ glib_config_major_version=`$GLIB_CONFIG $glib_config_args --version | \ -+ sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\1/'` -+ glib_config_minor_version=`$GLIB_CONFIG $glib_config_args --version | \ -+ sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\2/'` -+ glib_config_micro_version=`$GLIB_CONFIG $glib_config_args --version | \ -+ sed 's/\([[0-9]]*\).\([[0-9]]*\).\([[0-9]]*\)/\3/'` -+ if test "x$enable_glibtest" = "xyes" ; then -+ ac_save_CFLAGS="$CFLAGS" -+ ac_save_LIBS="$LIBS" -+ CFLAGS="$CFLAGS $GLIB_CFLAGS" -+ LIBS="$GLIB_LIBS $LIBS" -+dnl -+dnl Now check if the installed GLIB is sufficiently new. (Also sanity -+dnl checks the results of glib-config to some extent -+dnl -+ rm -f conf.glibtest -+ AC_TRY_RUN([ -+#include -+#include -+#include -+ -+int -+main () -+{ -+ int major, minor, micro; -+ char *tmp_version; -+ -+ system ("touch conf.glibtest"); -+ -+ /* HP/UX 9 (%@#!) writes to sscanf strings */ -+ tmp_version = g_strdup("$min_glib_version"); -+ if (sscanf(tmp_version, "%d.%d.%d", &major, &minor, µ) != 3) { -+ printf("%s, bad version string\n", "$min_glib_version"); -+ exit(1); -+ } -+ -+ if ((glib_major_version != $glib_config_major_version) || -+ (glib_minor_version != $glib_config_minor_version) || -+ (glib_micro_version != $glib_config_micro_version)) -+ { -+ printf("\n*** 'glib-config --version' returned %d.%d.%d, but GLIB (%d.%d.%d)\n", -+ $glib_config_major_version, $glib_config_minor_version, $glib_config_micro_version, -+ glib_major_version, glib_minor_version, glib_micro_version); -+ printf ("*** was found! If glib-config was correct, then it is best\n"); -+ printf ("*** to remove the old version of GLIB. You may also be able to fix the error\n"); -+ printf("*** by modifying your LD_LIBRARY_PATH enviroment variable, or by editing\n"); -+ printf("*** /etc/ld.so.conf. Make sure you have run ldconfig if that is\n"); -+ printf("*** required on your system.\n"); -+ printf("*** If glib-config was wrong, set the environment variable GLIB_CONFIG\n"); -+ printf("*** to point to the correct copy of glib-config, and remove the file config.cache\n"); -+ printf("*** before re-running configure\n"); -+ } -+ else if ((glib_major_version != GLIB_MAJOR_VERSION) || -+ (glib_minor_version != GLIB_MINOR_VERSION) || -+ (glib_micro_version != GLIB_MICRO_VERSION)) -+ { -+ printf("*** GLIB header files (version %d.%d.%d) do not match\n", -+ GLIB_MAJOR_VERSION, GLIB_MINOR_VERSION, GLIB_MICRO_VERSION); -+ printf("*** library (version %d.%d.%d)\n", -+ glib_major_version, glib_minor_version, glib_micro_version); -+ } -+ else -+ { -+ if ((glib_major_version > major) || -+ ((glib_major_version == major) && (glib_minor_version > minor)) || -+ ((glib_major_version == major) && (glib_minor_version == minor) && (glib_micro_version >= micro))) -+ { -+ return 0; -+ } -+ else -+ { -+ printf("\n*** An old version of GLIB (%d.%d.%d) was found.\n", -+ glib_major_version, glib_minor_version, glib_micro_version); -+ printf("*** You need a version of GLIB newer than %d.%d.%d. The latest version of\n", -+ major, minor, micro); -+ printf("*** GLIB is always available from ftp://ftp.gtk.org.\n"); -+ printf("***\n"); -+ printf("*** If you have already installed a sufficiently new version, this error\n"); -+ printf("*** probably means that the wrong copy of the glib-config shell script is\n"); -+ printf("*** being found. The easiest way to fix this is to remove the old version\n"); -+ printf("*** of GLIB, but you can also set the GLIB_CONFIG environment to point to the\n"); -+ printf("*** correct copy of glib-config. (In this case, you will have to\n"); -+ printf("*** modify your LD_LIBRARY_PATH enviroment variable, or edit /etc/ld.so.conf\n"); -+ printf("*** so that the correct libraries are found at run-time))\n"); -+ } -+ } -+ return 1; -+} -+],, no_glib=yes,[echo $ac_n "cross compiling; assumed OK... $ac_c"]) -+ CFLAGS="$ac_save_CFLAGS" -+ LIBS="$ac_save_LIBS" -+ fi -+ fi -+ if test "x$no_glib" = x ; then -+ AC_MSG_RESULT(yes) -+ ifelse([$2], , :, [$2]) -+ else -+ AC_MSG_RESULT(no) -+ if test "$GLIB_CONFIG" = "no" ; then -+ echo "*** The glib-config script installed by GLIB could not be found" -+ echo "*** If GLIB was installed in PREFIX, make sure PREFIX/bin is in" -+ echo "*** your path, or set the GLIB_CONFIG environment variable to the" -+ echo "*** full path to glib-config." -+ else -+ if test -f conf.glibtest ; then -+ : -+ else -+ echo "*** Could not run GLIB test program, checking why..." -+ CFLAGS="$CFLAGS $GLIB_CFLAGS" -+ LIBS="$LIBS $GLIB_LIBS" -+ AC_TRY_LINK([ -+#include -+#include -+], [ return ((glib_major_version) || (glib_minor_version) || (glib_micro_version)); ], -+ [ echo "*** The test program compiled, but did not run. This usually means" -+ echo "*** that the run-time linker is not finding GLIB or finding the wrong" -+ echo "*** version of GLIB. If it is not finding GLIB, you'll need to set your" -+ echo "*** LD_LIBRARY_PATH environment variable, or edit /etc/ld.so.conf to point" -+ echo "*** to the installed location Also, make sure you have run ldconfig if that" -+ echo "*** is required on your system" -+ echo "***" -+ echo "*** If you have an old version installed, it is best to remove it, although" -+ echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH" -+ echo "***" -+ echo "*** If you have a RedHat 5.0 system, you should remove the GTK package that" -+ echo "*** came with the system with the command" -+ echo "***" -+ echo "*** rpm --erase --nodeps gtk gtk-devel" ], -+ [ echo "*** The test program failed to compile or link. See the file config.log for the" -+ echo "*** exact error that occured. This usually means GLIB was incorrectly installed" -+ echo "*** or that you have moved GLIB since it was installed. In the latter case, you" -+ echo "*** may want to edit the glib-config script: $GLIB_CONFIG" ]) -+ CFLAGS="$ac_save_CFLAGS" -+ LIBS="$ac_save_LIBS" -+ fi -+ fi -+ GLIB_CFLAGS="" -+ GLIB_LIBS="" -+ ifelse([$3], , :, [$3]) -+ fi -+ AC_SUBST(GLIB_CFLAGS) -+ AC_SUBST(GLIB_LIBS) -+ rm -f conf.glibtest -+]) ---- /dev/null -+++ bison-1.875/m4/inttypes.m4 -@@ -0,0 +1,27 @@ -+# inttypes.m4 serial 1 (gettext-0.11.4) -+dnl Copyright (C) 1997-2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+dnl From Paul Eggert. -+ -+# Define HAVE_INTTYPES_H if exists and doesn't clash with -+# . -+ -+AC_DEFUN([gt_HEADER_INTTYPES_H], -+[ -+ AC_CACHE_CHECK([for inttypes.h], gt_cv_header_inttypes_h, -+ [ -+ AC_TRY_COMPILE( -+ [#include -+#include ], -+ [], gt_cv_header_inttypes_h=yes, gt_cv_header_inttypes_h=no) -+ ]) -+ if test $gt_cv_header_inttypes_h = yes; then -+ AC_DEFINE_UNQUOTED(HAVE_INTTYPES_H, 1, -+ [Define if exists and doesn't clash with .]) -+ fi -+]) ---- /dev/null -+++ bison-1.875/m4/isc-posix.m4 -@@ -0,0 +1,26 @@ -+# isc-posix.m4 serial 2 (gettext-0.11.2) -+dnl Copyright (C) 1995-2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+# This file is not needed with autoconf-2.53 and newer. Remove it in 2005. -+ -+# This test replaces the one in autoconf. -+# Currently this macro should have the same name as the autoconf macro -+# because gettext's gettext.m4 (distributed in the automake package) -+# still uses it. Otherwise, the use in gettext.m4 makes autoheader -+# give these diagnostics: -+# configure.in:556: AC_TRY_COMPILE was called before AC_ISC_POSIX -+# configure.in:556: AC_TRY_RUN was called before AC_ISC_POSIX -+ -+undefine([AC_ISC_POSIX]) -+ -+AC_DEFUN([AC_ISC_POSIX], -+ [ -+ dnl This test replaces the obsolescent AC_ISC_POSIX kludge. -+ AC_CHECK_LIB(cposix, strerror, [LIBS="$LIBS -lcposix"]) -+ ] -+) - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/bison/bison_2.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/bison/bison_2.3.bb deleted file mode 100644 index 182e8802b..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/bison/bison_2.3.bb +++ /dev/null @@ -1,26 +0,0 @@ -SUMMARY = "GNU Project parser generator (yacc replacement)" -DESCRIPTION = "Bison is a general-purpose parser generator that converts an annotated context-free grammar into \ -an LALR(1) or GLR parser for that grammar. Bison is upward compatible with Yacc: all properly-written Yacc \ -grammars ought to work with Bison with no change. Anyone familiar with Yacc should be able to use Bison with \ -little trouble." -HOMEPAGE = "http://www.gnu.org/software/bison/" -LICENSE = "GPLv2" -LIC_FILES_CHKSUM = "file://COPYING;md5=eb723b61539feef013de476e68b5c50a" -SECTION = "devel" -DEPENDS = "bison-native flex-native" - -PR = "r1" - -SRC_URI = "${GNU_MIRROR}/bison/bison-${PV}.tar.gz \ - file://bison-2.3_m4.patch" - -SRC_URI[md5sum] = "22327efdd5080e2b1acb6e560a04b43a" -SRC_URI[sha256sum] = "52f78aa4761a74ceb7fdf770f3554dd84308c3b93c4255e3a5c17558ecda293e" - -inherit autotools gettext texinfo -acpaths = "-I ${S}/m4" - -do_configure_prepend () { - rm -f ${S}/m4/*gl.m4 - cp ${STAGING_DATADIR_NATIVE}/gettext/po/Makefile.in.in ${S}/runtime-po/ -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/btrfs-tools/btrfs-tools/fix-parallel.patch b/import-layers/yocto-poky/meta/recipes-devtools/btrfs-tools/btrfs-tools/fix-parallel.patch deleted file mode 100644 index bbc53c6fe..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/btrfs-tools/btrfs-tools/fix-parallel.patch +++ /dev/null @@ -1,32 +0,0 @@ -From 373eb51328b5e10529763cad441210e6b0efb24e Mon Sep 17 00:00:00 2001 -From: Robert Yang -Date: Wed, 11 Feb 2015 22:08:41 -0800 -Subject: [PATCH] Makefile: fix for parallel build - -Fixed: -mkfs.c:300:46: error: 'BTRFS_BUILD_VERSION' undeclared (first use in this function) - fprintf(stderr, "mkfs.btrfs, part of %s\n", BTRFS_BUILD_VERSION); - -Upstream-Status: Pending - -Signed-off-by: Robert Yang ---- - Makefile.in | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/Makefile.in b/Makefile.in -index 860a390..8a6fbd7 100644 ---- a/Makefile.in -+++ b/Makefile.in -@@ -142,7 +142,7 @@ else - check_echo = true - endif - --%.o.d: %.c -+%.o.d: %.c version.h - $(Q)$(CC) -MM -MG -MF $@ -MT $(@:.o.d=.o) -MT $(@:.o.d=.static.o) -MT $@ $(CFLAGS) $< - - .c.o: --- -1.7.9.5 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/btrfs-tools/btrfs-tools_4.7.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/btrfs-tools/btrfs-tools_4.7.1.bb deleted file mode 100644 index becf09396..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/btrfs-tools/btrfs-tools_4.7.1.bb +++ /dev/null @@ -1,33 +0,0 @@ -SUMMARY = "Checksumming Copy on Write Filesystem utilities" -DESCRIPTION = "Btrfs is a new copy on write filesystem for Linux aimed at \ -implementing advanced features while focusing on fault tolerance, repair and \ -easy administration. \ -This package contains utilities (mkfs, fsck, btrfsctl) used to work with \ -btrfs and an utility (btrfs-convert) to make a btrfs filesystem from an ext3." - -HOMEPAGE = "https://btrfs.wiki.kernel.org" - -LICENSE = "GPLv2" -LIC_FILES_CHKSUM = "file://COPYING;md5=fcb02dc552a041dee27e4b85c7396067" -SECTION = "base" -DEPENDS = "util-linux attr e2fsprogs lzo acl" -RDEPENDS_${PN} = "libgcc" - -SRCREV = "6819fbcdcab63c2d95da1a2997e37be9a71ca533" -SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/kdave/btrfs-progs.git \ - file://fix-parallel.patch \ -" - -inherit autotools-brokensep pkgconfig - -EXTRA_OECONF += "--disable-documentation" -EXTRA_OECONF_append_libc-musl = " --disable-backtrace " - - -do_configure_prepend() { - sh autogen.sh -} - -S = "${WORKDIR}/git" - -BBCLASSEXTEND = "native" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/btrfs-tools/btrfs-tools_4.9.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/btrfs-tools/btrfs-tools_4.9.1.bb new file mode 100644 index 000000000..a5e9e2278 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/btrfs-tools/btrfs-tools_4.9.1.bb @@ -0,0 +1,33 @@ +SUMMARY = "Checksumming Copy on Write Filesystem utilities" +DESCRIPTION = "Btrfs is a new copy on write filesystem for Linux aimed at \ +implementing advanced features while focusing on fault tolerance, repair and \ +easy administration. \ +This package contains utilities (mkfs, fsck, btrfsctl) used to work with \ +btrfs and an utility (btrfs-convert) to make a btrfs filesystem from an ext3." + +HOMEPAGE = "https://btrfs.wiki.kernel.org" + +LICENSE = "GPLv2" +LIC_FILES_CHKSUM = "file://COPYING;md5=fcb02dc552a041dee27e4b85c7396067" +SECTION = "base" +DEPENDS = "util-linux attr e2fsprogs lzo acl" +DEPENDS_append_class-target = " udev" +RDEPENDS_${PN} = "libgcc" + +SRCREV = "96485c34ac0329fb0073476f16d2083c64701f29" +SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/kdave/btrfs-progs.git" + +inherit autotools-brokensep pkgconfig manpages + +PACKAGECONFIG[manpages] = "--enable-documentation, --disable-documentation, asciidoc-native xmlto-native" +EXTRA_OECONF_append_libc-musl = " --disable-backtrace " + +do_configure_prepend() { + # Upstream doesn't ship this and autoreconf won't install it as automake isn't used. + mkdir -p ${S}/config + cp -f $(automake --print-libdir)/install-sh ${S}/config/ +} + +S = "${WORKDIR}/git" + +BBCLASSEXTEND = "native" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/build-compare/build-compare_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/build-compare/build-compare_git.bb index fa9c6b45d..c60c16410 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/build-compare/build-compare_git.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/build-compare/build-compare_git.bb @@ -17,11 +17,15 @@ SRC_URI = "git://github.com/openSUSE/build-compare.git \ file://functions.sh-improve-deb-and-ipk-checking.patch \ " +# Date matches entry in build-compare.changes and date of SRCREV. +# SRCREV = "c5352c054c6ef15735da31b76d6d88620f4aff0a" +PE = "1" +PV = "2015.02.10+git${SRCPV}" S = "${WORKDIR}/git" -BBCLASSEXTEND += "native nativesdk" +BBCLASSEXTEND = "native nativesdk" do_install() { install -d ${D}/${bindir} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/ccache/ccache.inc b/import-layers/yocto-poky/meta/recipes-devtools/ccache/ccache.inc index 69aa64e5e..656632893 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/ccache/ccache.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/ccache/ccache.inc @@ -9,9 +9,7 @@ LICENSE = "GPLv3+" DEPENDS = "zlib" -SRC_URI = "git://git.samba.org/ccache.git" - -S = "${WORKDIR}/git" +SRC_URI = "https://download.samba.org/pub/${BPN}/${BP}.tar.xz" inherit autotools diff --git a/import-layers/yocto-poky/meta/recipes-devtools/ccache/ccache_3.2.5.bb b/import-layers/yocto-poky/meta/recipes-devtools/ccache/ccache_3.2.5.bb deleted file mode 100644 index afd1b0ea1..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/ccache/ccache_3.2.5.bb +++ /dev/null @@ -1,11 +0,0 @@ -require ccache.inc - -LICENSE = "GPLv3+" -LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=b8a4fa173ed91c1a5204ea4f9c9eadc3" - -SRCREV = "424d3ae1fb73444c6c38bf189f8fc048f66d6499" - -SRC_URI += " \ - file://0002-dev.mk.in-fix-file-name-too-long.patch \ - file://Revert-Create-man-page-in-the-make-install-from-git-.patch \ -" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/ccache/ccache_3.3.4.bb b/import-layers/yocto-poky/meta/recipes-devtools/ccache/ccache_3.3.4.bb new file mode 100644 index 000000000..1e535b192 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/ccache/ccache_3.3.4.bb @@ -0,0 +1,12 @@ +require ccache.inc + +LICENSE = "GPLv3+" +LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=7fe21f9470f2305e95e7d8a632255079" + +SRC_URI[md5sum] = "95ab3c56284129cc2a32460c23069516" +SRC_URI[sha256sum] = "24f15bf389e38c41548c9c259532187774ec0cb9686c3497bbb75504c8dc404f" + +SRC_URI += " \ + file://0002-dev.mk.in-fix-file-name-too-long.patch \ + file://Revert-Create-man-page-in-the-make-install-from-git-.patch \ +" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/cdrtools/cdrtools-native_3.01.bb b/import-layers/yocto-poky/meta/recipes-devtools/cdrtools/cdrtools-native_3.01.bb index ff4d5ac44..0e0be6271 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/cdrtools/cdrtools-native_3.01.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/cdrtools/cdrtools-native_3.01.bb @@ -14,6 +14,9 @@ SRC_URI[sha256sum] = "ed282eb6276c4154ce6a0b5dee0bdb81940d0cbbfc7d03f769c4735ef5 EXTRA_OEMAKE = "-e MAKEFLAGS=" +# Stop failures when 'cc' can't be found +export ac_cv_prog_CC = "${CC}" + inherit native PV = "3.01a31+really3.01" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/chrpath/chrpath_0.16.bb b/import-layers/yocto-poky/meta/recipes-devtools/chrpath/chrpath_0.16.bb index e115eb5c9..b61eef9c8 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/chrpath/chrpath_0.16.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/chrpath/chrpath_0.16.bb @@ -7,7 +7,7 @@ BUGTRACKER = "http://alioth.debian.org/tracker/?atid=412807&group_id=31052" LICENSE = "GPLv2" LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552" -SRC_URI = "http://alioth.debian.org/frs/download.php/file/3979/chrpath-0.16.tar.gz \ +SRC_URI = "https://alioth.debian.org/frs/download.php/file/3979/chrpath-0.16.tar.gz \ file://standarddoc.patch" SRC_URI[md5sum] = "2bf8d1d1ee345fc8a7915576f5649982" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake-native_3.6.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake-native_3.6.1.bb deleted file mode 100644 index aec0d64de..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake-native_3.6.1.bb +++ /dev/null @@ -1,36 +0,0 @@ -require cmake.inc -inherit native - -DEPENDS += "bzip2-replacement-native expat-native xz-native zlib-native curl-native" - -SRC_URI += "\ - file://cmlibarchive-disable-ext2fs.patch \ -" - -B = "${WORKDIR}/build" -do_configure[cleandirs] = "${B}" - -# Disable ccmake since we don't depend on ncurses -CMAKE_EXTRACONF = "\ - -DCMAKE_LIBRARY_PATH=${STAGING_LIBDIR_NATIVE} \ - -DBUILD_CursesDialog=0 \ - -DCMAKE_USE_SYSTEM_LIBRARIES=1 \ - -DCMAKE_USE_SYSTEM_LIBRARY_JSONCPP=0 \ - -DCMAKE_USE_SYSTEM_LIBRARY_LIBARCHIVE=0 \ - -DENABLE_ACL=0 -DHAVE_ACL_LIBACL_H=0 \ - -DHAVE_SYS_ACL_H=0 \ -" - -do_configure () { - ${S}/configure --verbose --prefix=${prefix} -- ${CMAKE_EXTRACONF} -} - -do_compile() { - oe_runmake -} - -do_install() { - oe_runmake 'DESTDIR=${D}' install -} - -do_compile[progress] = "percent" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake-native_3.7.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake-native_3.7.2.bb new file mode 100644 index 000000000..7ad4345ef --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake-native_3.7.2.bb @@ -0,0 +1,37 @@ +require cmake.inc +inherit native + +DEPENDS += "bzip2-replacement-native expat-native xz-native zlib-native curl-native" + +SRC_URI += "\ + file://cmlibarchive-disable-ext2fs.patch \ +" + +B = "${WORKDIR}/build" +do_configure[cleandirs] = "${B}" + +# Disable ccmake since we don't depend on ncurses +CMAKE_EXTRACONF = "\ + -DCMAKE_LIBRARY_PATH=${STAGING_LIBDIR_NATIVE} \ + -DBUILD_CursesDialog=0 \ + -DCMAKE_USE_SYSTEM_LIBRARIES=1 \ + -DCMAKE_USE_SYSTEM_LIBRARY_JSONCPP=0 \ + -DCMAKE_USE_SYSTEM_LIBRARY_LIBARCHIVE=0 \ + -DCMAKE_USE_SYSTEM_LIBRARY_LIBUV=0 \ + -DENABLE_ACL=0 -DHAVE_ACL_LIBACL_H=0 \ + -DHAVE_SYS_ACL_H=0 \ +" + +do_configure () { + ${S}/configure --verbose --prefix=${prefix} -- ${CMAKE_EXTRACONF} +} + +do_compile() { + oe_runmake +} + +do_install() { + oe_runmake 'DESTDIR=${D}' install +} + +do_compile[progress] = "percent" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake.inc b/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake.inc index 4fcb0b1ed..6c8b36d18 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake.inc @@ -6,18 +6,20 @@ HOMEPAGE = "http://www.cmake.org/" BUGTRACKER = "http://public.kitware.com/Bug/my_view_page.php" SECTION = "console/utils" LICENSE = "BSD" -LIC_FILES_CHKSUM = "file://Copyright.txt;md5=052f86c15bbde68af55c7f7b340ab639 \ - file://Source/cmake.h;beginline=1;endline=10;md5=341736dae83c9e344b53eeb1bc7d7bc2" +LIC_FILES_CHKSUM = "file://Copyright.txt;md5=7a64bc564202bf7401d9a8ef33c9564d \ + file://Source/cmake.h;beginline=1;endline=3;md5=4494dee184212fc89c469c3acd555a14" -CMAKE_MAJOR_VERSION = "${@'.'.join(d.getVar('PV', True).split('.')[0:2])}" +CMAKE_MAJOR_VERSION = "${@'.'.join(d.getVar('PV').split('.')[0:2])}" SRC_URI = "https://cmake.org/files/v${CMAKE_MAJOR_VERSION}/cmake-${PV}.tar.gz \ file://support-oe-qt4-tools-names.patch \ file://qt4-fail-silent.patch \ + file://avoid-gcc-warnings-with-Wstrict-prototypes.patch \ + file://0001-KWIML-tests-Remove-format-security-from-flags.patch \ " -SRC_URI[md5sum] = "d6dd661380adacdb12f41b926ec99545" -SRC_URI[sha256sum] = "28ee98ec40427d41a45673847db7a905b59ce9243bb866eaf59dce0f58aaef11" +SRC_URI[md5sum] = "79bd7e65cd81ea3aa2619484ad6ff25a" +SRC_URI[sha256sum] = "dc1246c4e6d168ea4d6e042cfba577c1acd65feea27e56f5ff37df920c30cae0" UPSTREAM_CHECK_REGEX = "cmake-(?P\d+(\.\d+)+)\.tar" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake/0001-KWIML-tests-Remove-format-security-from-flags.patch b/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake/0001-KWIML-tests-Remove-format-security-from-flags.patch new file mode 100644 index 000000000..190133ba5 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake/0001-KWIML-tests-Remove-format-security-from-flags.patch @@ -0,0 +1,33 @@ +From 0941395b146804abcd87004589ff6e7a2953412d Mon Sep 17 00:00:00 2001 +From: Jussi Kukkonen +Date: Thu, 16 Mar 2017 14:39:04 +0200 +Subject: [PATCH] KWIML tests: Remove format-security from flags + +For the tests where "format" is removed from flags, "format-security" +should be removed as well. Otherwise building cmake with +"-Wformat -Wformat-security" fails: + +| cc1: error: -Wformat-security ignored without -Wformat [-Werror=format-security] + +Upstream-Status: Backport [part of commit f77420cfc9] +Signed-off-by: Jussi Kukkonen +--- + Utilities/KWIML/test/CMakeLists.txt | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/Utilities/KWIML/test/CMakeLists.txt b/Utilities/KWIML/test/CMakeLists.txt +index 4f6f37b..1bf93bb 100644 +--- a/Utilities/KWIML/test/CMakeLists.txt ++++ b/Utilities/KWIML/test/CMakeLists.txt +@@ -10,7 +10,7 @@ endif() + # Suppress printf/scanf format warnings; we test if the sizes match. + foreach(lang C CXX) + if(KWIML_LANGUAGE_${lang} AND CMAKE_${lang}_COMPILER_ID STREQUAL "GNU") +- set(CMAKE_${lang}_FLAGS "${CMAKE_${lang}_FLAGS} -Wno-format") ++ set(CMAKE_${lang}_FLAGS "${CMAKE_${lang}_FLAGS} -Wno-format -Wno-format-security") + endif() + endforeach() + +-- +2.1.4 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake/avoid-gcc-warnings-with-Wstrict-prototypes.patch b/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake/avoid-gcc-warnings-with-Wstrict-prototypes.patch new file mode 100644 index 000000000..8b8d4802e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake/avoid-gcc-warnings-with-Wstrict-prototypes.patch @@ -0,0 +1,42 @@ +From 4bc17345c01ea467099e28c7df30c23ace9e7811 Mon Sep 17 00:00:00 2001 +From: Andre McCurdy +Date: Fri, 14 Oct 2016 16:26:58 -0700 +Subject: [PATCH] CheckFunctionExists.c: avoid gcc warnings with + -Wstrict-prototypes + +Avoid warnings (and therefore build failures etc) if a user happens +to add -Wstrict-prototypes to CFLAGS. + + | $ gcc --version + | gcc (Ubuntu 4.8.4-2ubuntu1~14.04.3) 4.8.4 + | + | $ gcc -Wstrict-prototypes -Werror -DCHECK_FUNCTION_EXISTS=pthread_create -o foo.o -c Modules/CheckFunctionExists.c + | Modules/CheckFunctionExists.c:7:3: error: function declaration isn't a prototype [-Werror=strict-prototypes] + | CHECK_FUNCTION_EXISTS(); + | ^ + | cc1: all warnings being treated as errors + | + +Upstream-Status: Pending + +Signed-off-by: Andre McCurdy +--- + Modules/CheckFunctionExists.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/Modules/CheckFunctionExists.c b/Modules/CheckFunctionExists.c +index 2304000..224e340 100644 +--- a/Modules/CheckFunctionExists.c ++++ b/Modules/CheckFunctionExists.c +@@ -4,7 +4,7 @@ + extern "C" + #endif + char +- CHECK_FUNCTION_EXISTS(); ++ CHECK_FUNCTION_EXISTS(void); + #ifdef __CLASSIC_C__ + int main() + { +-- +1.9.1 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake_3.6.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake_3.6.1.bb deleted file mode 100644 index 850d6de23..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake_3.6.1.bb +++ /dev/null @@ -1,48 +0,0 @@ -require cmake.inc - -inherit cmake - -DEPENDS += "curl expat zlib libarchive xz ncurses bzip2" - -SRC_URI_append_class-nativesdk = " \ - file://OEToolchainConfig.cmake \ - file://environment.d-cmake.sh" - -# Strip ${prefix} from ${docdir}, set result into docdir_stripped -python () { - prefix=d.getVar("prefix", True) - docdir=d.getVar("docdir", True) - - if not docdir.startswith(prefix): - bb.fatal('docdir must contain prefix as its prefix') - - docdir_stripped = docdir[len(prefix):] - if len(docdir_stripped) > 0 and docdir_stripped[0] == '/': - docdir_stripped = docdir_stripped[1:] - - d.setVar("docdir_stripped", docdir_stripped) -} - -EXTRA_OECMAKE=" \ - -DCMAKE_DOC_DIR=${docdir_stripped}/cmake-${CMAKE_MAJOR_VERSION} \ - -DCMAKE_USE_SYSTEM_LIBRARIES=1 \ - -DCMAKE_USE_SYSTEM_LIBRARY_JSONCPP=0 \ - -DKWSYS_CHAR_IS_SIGNED=1 \ - -DBUILD_CursesDialog=0 \ - ${@bb.utils.contains('DISTRO_FEATURES', 'largefile', '-DKWSYS_LFS_WORKS=1', '-DKWSYS_LFS_DISABLE=1', d)} \ -" - -do_install_append_class-nativesdk() { - mkdir -p ${D}${datadir}/cmake - install -m 644 ${WORKDIR}/OEToolchainConfig.cmake ${D}${datadir}/cmake/ - - mkdir -p ${D}${SDKPATHNATIVE}/environment-setup.d - install -m 644 ${WORKDIR}/environment.d-cmake.sh ${D}${SDKPATHNATIVE}/environment-setup.d/cmake.sh -} - -FILES_${PN}_append_class-nativesdk = " ${SDKPATHNATIVE}" - -FILES_${PN} += "${datadir}/cmake-${CMAKE_MAJOR_VERSION}" -FILES_${PN}-doc += "${docdir}/cmake-${CMAKE_MAJOR_VERSION}" - -BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake_3.7.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake_3.7.2.bb new file mode 100644 index 000000000..f566a48cf --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/cmake/cmake_3.7.2.bb @@ -0,0 +1,49 @@ +require cmake.inc + +inherit cmake + +DEPENDS += "curl expat zlib libarchive xz ncurses bzip2" + +SRC_URI_append_class-nativesdk = " \ + file://OEToolchainConfig.cmake \ + file://environment.d-cmake.sh" + +# Strip ${prefix} from ${docdir}, set result into docdir_stripped +python () { + prefix=d.getVar("prefix") + docdir=d.getVar("docdir") + + if not docdir.startswith(prefix): + bb.fatal('docdir must contain prefix as its prefix') + + docdir_stripped = docdir[len(prefix):] + if len(docdir_stripped) > 0 and docdir_stripped[0] == '/': + docdir_stripped = docdir_stripped[1:] + + d.setVar("docdir_stripped", docdir_stripped) +} + +EXTRA_OECMAKE=" \ + -DCMAKE_DOC_DIR=${docdir_stripped}/cmake-${CMAKE_MAJOR_VERSION} \ + -DCMAKE_USE_SYSTEM_LIBRARIES=1 \ + -DCMAKE_USE_SYSTEM_LIBRARY_JSONCPP=0 \ + -DCMAKE_USE_SYSTEM_LIBRARY_LIBUV=0 \ + -DKWSYS_CHAR_IS_SIGNED=1 \ + -DBUILD_CursesDialog=0 \ + -DKWSYS_LFS_WORKS=1 \ +" + +do_install_append_class-nativesdk() { + mkdir -p ${D}${datadir}/cmake + install -m 644 ${WORKDIR}/OEToolchainConfig.cmake ${D}${datadir}/cmake/ + + mkdir -p ${D}${SDKPATHNATIVE}/environment-setup.d + install -m 644 ${WORKDIR}/environment.d-cmake.sh ${D}${SDKPATHNATIVE}/environment-setup.d/cmake.sh +} + +FILES_${PN}_append_class-nativesdk = " ${SDKPATHNATIVE}" + +FILES_${PN} += "${datadir}/cmake-${CMAKE_MAJOR_VERSION}" +FILES_${PN}-doc += "${docdir}/cmake-${CMAKE_MAJOR_VERSION}" + +BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/createrepo-c/createrepo-c/0001-Correctly-install-the-shared-library.patch b/import-layers/yocto-poky/meta/recipes-devtools/createrepo-c/createrepo-c/0001-Correctly-install-the-shared-library.patch new file mode 100644 index 000000000..01271246a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/createrepo-c/createrepo-c/0001-Correctly-install-the-shared-library.patch @@ -0,0 +1,28 @@ +From fef835e1fdedc72c97d9c3e5704302e56e1bdef0 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Mon, 2 Jan 2017 17:23:59 +0200 +Subject: [PATCH] Correctly install the shared library + +Upstream-Status: Pending +Signed-off-by: Alexander Kanavin +--- + src/CMakeLists.txt | 3 ++- + 1 file changed, 2 insertions(+), 1 deletion(-) + +diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt +index f3635e8..46dc037 100644 +--- a/src/CMakeLists.txt ++++ b/src/CMakeLists.txt +@@ -110,7 +110,8 @@ CONFIGURE_FILE("deltarpms.h.in" "${CMAKE_CURRENT_SOURCE_DIR}/deltarpms.h" @ONLY) + IF (CMAKE_SIZEOF_VOID_P MATCHES "8") + SET (LIB_SUFFIX "64") + ENDIF (CMAKE_SIZEOF_VOID_P MATCHES "8") +-SET (LIB_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}") ++#SET (LIB_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}") ++SET (LIB_INSTALL_DIR "${CMAKE_INSTALL_LIBDIR}") + + INSTALL(FILES ${headers} DESTINATION "include/createrepo_c") + INSTALL(FILES "createrepo_c.pc" DESTINATION "${LIB_INSTALL_DIR}/pkgconfig") +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/createrepo-c/createrepo-c/0001-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch b/import-layers/yocto-poky/meta/recipes-devtools/createrepo-c/createrepo-c/0001-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch new file mode 100644 index 000000000..953107093 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/createrepo-c/createrepo-c/0001-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch @@ -0,0 +1,27 @@ +From bef487b5c2515062c5dd73c21082ce42f69aa717 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 30 Dec 2016 18:31:02 +0200 +Subject: [PATCH] Do not set PYTHON_INSTALL_DIR by running python + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + src/python/CMakeLists.txt | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/src/python/CMakeLists.txt b/src/python/CMakeLists.txt +index 9f1ac64..eae0cbb 100644 +--- a/src/python/CMakeLists.txt ++++ b/src/python/CMakeLists.txt +@@ -19,7 +19,7 @@ else() + FIND_PACKAGE(PythonInterp 3.0 REQUIRED) + endif() + +-EXECUTE_PROCESS(COMMAND ${PYTHON_EXECUTABLE} -c "from sys import stdout; from distutils import sysconfig; stdout.write(sysconfig.get_python_lib(True))" OUTPUT_VARIABLE PYTHON_INSTALL_DIR) ++#EXECUTE_PROCESS(COMMAND ${PYTHON_EXECUTABLE} -c "from sys import stdout; from distutils import sysconfig; stdout.write(sysconfig.get_python_lib(True))" OUTPUT_VARIABLE PYTHON_INSTALL_DIR) + INCLUDE_DIRECTORIES (${PYTHON_INCLUDE_PATH}) + + MESSAGE(STATUS "Python install dir is ${PYTHON_INSTALL_DIR}") +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/createrepo-c/createrepo-c_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/createrepo-c/createrepo-c_git.bb new file mode 100644 index 000000000..31761368b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/createrepo-c/createrepo-c_git.bb @@ -0,0 +1,31 @@ +DESCRIPTION = "C implementation of createrepo." +HOMEPAGE = "https://github.com/rpm-software-management/createrepo_c/wiki" + +LICENSE = "GPLv2" +LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263" + +SRC_URI = "git://github.com/rpm-software-management/createrepo_c \ + file://0001-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch \ + file://0001-Correctly-install-the-shared-library.patch \ + " + +PV = "0.10.0+git${SRCPV}" +SRCREV = "748891ff8ee524c2d37926c608cd2794f88013f3" + +S = "${WORKDIR}/git" + +DEPENDS = "expat curl glib-2.0 libxml2 openssl bzip2 zlib file sqlite3 xz rpm" +DEPENDS_append_class-native = " file-replacement-native" + +inherit cmake pkgconfig bash-completion distutils3-base + +EXTRA_OECMAKE = " -DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} -DPYTHON_DESIRED=3" + +BBCLASSEXTEND = "native" + +# Direct createrepo to read rpm configuration from our sysroot, not the one it was compiled in +do_install_append_class-native() { + create_wrapper ${D}/${bindir}/createrepo_c \ + RPM_CONFIGDIR=${STAGING_LIBDIR_NATIVE}/rpm +} + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/cve-check-tool/cve-check-tool_5.6.4.bb b/import-layers/yocto-poky/meta/recipes-devtools/cve-check-tool/cve-check-tool_5.6.4.bb index 1baadea8e..1f906ee0a 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/cve-check-tool/cve-check-tool_5.6.4.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/cve-check-tool/cve-check-tool_5.6.4.bb @@ -9,18 +9,22 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=e8c1458438ead3c34974bc0be3a03ed6" SRC_URI = "https://github.com/ikeydoherty/${BPN}/releases/download/v${PV}/${BP}.tar.xz \ file://check-for-malloc_trim-before-using-it.patch \ file://0001-print-progress-in-percent-when-downloading-CVE-db.patch \ + file://0001-curl-allow-overriding-default-CA-certificate-file.patch \ + file://0001-update-Compare-computed-vs-expected-sha256-digit-str.patch \ " SRC_URI[md5sum] = "c5f4247140fc9be3bf41491d31a34155" SRC_URI[sha256sum] = "b8f283be718af8d31232ac1bfc10a0378fb958aaaa49af39168f8acf501e6a5b" +UPSTREAM_CHECK_URI = "https://github.com/ikeydoherty/cve-check-tool/releases" + DEPENDS = "libcheck glib-2.0 json-glib curl libxml2 sqlite3 openssl ca-certificates" RDEPENDS_${PN} = "ca-certificates" inherit pkgconfig autotools -EXTRA_OECONF = "--disable-coverage" +EXTRA_OECONF = "--disable-coverage --enable-relative-plugins" CFLAGS_append = " -Wno-error=pedantic" do_populate_cve_db() { @@ -37,7 +41,8 @@ do_populate_cve_db() { [ -z "${cve_file}" ] && cve_file="${TMPDIR}/cve_check" bbdebug 2 "Updating cve-check-tool database located in $cve_dir" - if cve-check-update -d "$cve_dir" ; then + # --cacert works around curl-native not finding the CA bundle + if cve-check-update --cacert ${sysconfdir}/ssl/certs/ca-certificates.crt -d "$cve_dir" ; then printf "CVE database was updated on %s UTC\n\n" "$(LANG=C date --utc +'%F %T')" > "$cve_file" else bbwarn "Error in executing cve-check-update" @@ -48,6 +53,7 @@ do_populate_cve_db() { } addtask populate_cve_db after do_populate_sysroot +do_populate_cve_db[depends] = "cve-check-tool-native:do_populate_sysroot" do_populate_cve_db[nostamp] = "1" do_populate_cve_db[progress] = "percent" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/cve-check-tool/files/0001-curl-allow-overriding-default-CA-certificate-file.patch b/import-layers/yocto-poky/meta/recipes-devtools/cve-check-tool/files/0001-curl-allow-overriding-default-CA-certificate-file.patch new file mode 100644 index 000000000..3d8ebd1bd --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/cve-check-tool/files/0001-curl-allow-overriding-default-CA-certificate-file.patch @@ -0,0 +1,215 @@ +From 825a9969dea052b02ba868bdf39e676349f10dce Mon Sep 17 00:00:00 2001 +From: Jussi Kukkonen +Date: Thu, 9 Feb 2017 14:51:28 +0200 +Subject: [PATCH] curl: allow overriding default CA certificate file + +Similar to curl, --cacert can now be used in cve-check-tool and +cve-check-update to override the default CA certificate file. Useful +in cases where the system default is unsuitable (for example, +out-dated) or broken (as in OE's current native libcurl, which embeds +a path string from one build host and then uses it on another although +the right path may have become something different). + +Upstream-Status: Submitted [https://github.com/ikeydoherty/cve-check-tool/pull/45] + +Signed-off-by: Patrick Ohly + + +Took Patrick Ohlys original patch from meta-security-isafw, rebased +on top of other patches. + +Signed-off-by: Jussi Kukkonen +--- + src/library/cve-check-tool.h | 1 + + src/library/fetch.c | 10 +++++++++- + src/library/fetch.h | 3 ++- + src/main.c | 5 ++++- + src/update-main.c | 4 +++- + src/update.c | 12 +++++++----- + src/update.h | 2 +- + 7 files changed, 27 insertions(+), 10 deletions(-) + +diff --git a/src/library/cve-check-tool.h b/src/library/cve-check-tool.h +index e4bb5b1..f89eade 100644 +--- a/src/library/cve-check-tool.h ++++ b/src/library/cve-check-tool.h +@@ -43,6 +43,7 @@ typedef struct CveCheckTool { + bool bugs; /**output_file = output_file; ++ self->cacert_file = cacert_file; + + if (!csv_mode && self->output_file) { + quiet = false; +@@ -530,7 +533,7 @@ int main(int argc, char **argv) + if (status) { + fprintf(stderr, "Update of db forced\n"); + cve_db_unlock(); +- if (!update_db(quiet, db_path->str)) { ++ if (!update_db(quiet, db_path->str, self->cacert_file)) { + fprintf(stderr, "DB update failure\n"); + goto cleanup; + } +diff --git a/src/update-main.c b/src/update-main.c +index 2379cfa..c52d9d0 100644 +--- a/src/update-main.c ++++ b/src/update-main.c +@@ -43,11 +43,13 @@ the Free Software Foundation; either version 2 of the License, or\n\ + static gchar *nvds = NULL; + static bool _show_version = false; + static bool _quiet = false; ++static const char *_cacert_file = NULL; + + static GOptionEntry _entries[] = { + { "nvd-dir", 'd', 0, G_OPTION_ARG_STRING, &nvds, "NVD directory in filesystem", NULL }, + { "version", 'v', 0, G_OPTION_ARG_NONE, &_show_version, "Show version", NULL }, + { "quiet", 'q', 0, G_OPTION_ARG_NONE, &_quiet, "Run silently", NULL }, ++ { "cacert", 'C', 0, G_OPTION_ARG_STRING, &_cacert_file, "Path to the combined SSL certificates file (system default is used if not set)", NULL}, + { .short_name = 0 } + }; + +@@ -88,7 +90,7 @@ int main(int argc, char **argv) + goto end; + } + +- if (update_db(_quiet, db_path->str)) { ++ if (update_db(_quiet, db_path->str, _cacert_file)) { + ret = EXIT_SUCCESS; + } else { + fprintf(stderr, "Failed to update database\n"); +diff --git a/src/update.c b/src/update.c +index 070560a..8cb4a39 100644 +--- a/src/update.c ++++ b/src/update.c +@@ -267,7 +267,8 @@ static inline void update_end(int fd, const char *update_fname, bool ok) + + static int do_fetch_update(int year, const char *db_dir, CveDB *cve_db, + bool db_exist, bool verbose, +- unsigned int this_percent, unsigned int next_percent) ++ unsigned int this_percent, unsigned int next_percent, ++ const char *cacert_file) + { + const char nvd_uri[] = URI_PREFIX; + autofree(cve_string) *uri_meta = NULL; +@@ -331,14 +332,14 @@ refetch: + } + + /* Fetch NVD META file */ +- st = fetch_uri(uri_meta->str, nvdcve_meta->str, verbose, this_percent, this_percent); ++ st = fetch_uri(uri_meta->str, nvdcve_meta->str, verbose, this_percent, this_percent, cacert_file); + if (st == FETCH_STATUS_FAIL) { + fprintf(stderr, "Failed to fetch %s\n", uri_meta->str); + return -1; + } + + /* Fetch NVD XML file */ +- st = fetch_uri(uri_data_gz->str, nvdcve_data_gz->str, verbose, this_percent, next_percent); ++ st = fetch_uri(uri_data_gz->str, nvdcve_data_gz->str, verbose, this_percent, next_percent, cacert_file); + switch (st) { + case FETCH_STATUS_FAIL: + fprintf(stderr, "Failed to fetch %s\n", uri_data_gz->str); +@@ -391,7 +392,7 @@ refetch: + return 0; + } + +-bool update_db(bool quiet, const char *db_file) ++bool update_db(bool quiet, const char *db_file, const char *cacert_file) + { + autofree(char) *db_dir = NULL; + autofree(CveDB) *cve_db = NULL; +@@ -466,7 +467,8 @@ bool update_db(bool quiet, const char *db_file) + if (!quiet) + fprintf(stderr, "completed: %u%%\r", start_percent); + rc = do_fetch_update(y, db_dir, cve_db, db_exist, !quiet, +- start_percent, end_percent); ++ start_percent, end_percent, ++ cacert_file); + switch (rc) { + case 0: + if (!quiet) +diff --git a/src/update.h b/src/update.h +index b8e9911..ceea0c3 100644 +--- a/src/update.h ++++ b/src/update.h +@@ -15,7 +15,7 @@ cve_string *get_db_path(const char *path); + + int update_required(const char *db_file); + +-bool update_db(bool quiet, const char *db_file); ++bool update_db(bool quiet, const char *db_file, const char *cacert_file); + + + /* +-- +2.1.4 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/cve-check-tool/files/0001-update-Compare-computed-vs-expected-sha256-digit-str.patch b/import-layers/yocto-poky/meta/recipes-devtools/cve-check-tool/files/0001-update-Compare-computed-vs-expected-sha256-digit-str.patch new file mode 100644 index 000000000..458c0cc84 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/cve-check-tool/files/0001-update-Compare-computed-vs-expected-sha256-digit-str.patch @@ -0,0 +1,52 @@ +From b0426e63c9ac61657e029f689bcb8dd051e752c6 Mon Sep 17 00:00:00 2001 +From: Sergey Popovich +Date: Fri, 21 Apr 2017 07:32:23 -0700 +Subject: [PATCH] update: Compare computed vs expected sha256 digit string + ignoring case + +We produce sha256 digest string using %x snprintf() +qualifier for each byte of digest which uses alphabetic +characters from "a" to "f" in lower case to represent +integer values from 10 to 15. + +Previously all of the NVD META files supply sha256 +digest string for corresponding XML file in lower case. + +However due to some reason this changed recently to +provide digest digits in upper case causing fetched +data consistency checks to fail. This prevents database +from being updated periodically. + +While commit c4f6e94 (update: Do not treat sha256 failure +as fatal if requested) adds useful option to skip +digest validation at all and thus provides workaround for +this situation, it might be unacceptable for some +deployments where we need to ensure that downloaded +data is consistent before start parsing it and update +SQLite database. + +Use strcasecmp() to compare two digest strings case +insensitively and addressing this case. + +Upstream-Status: Backport +Signed-off-by: Sergey Popovich +--- + src/update.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/src/update.c b/src/update.c +index 8588f38..3cc6b67 100644 +--- a/src/update.c ++++ b/src/update.c +@@ -187,7 +187,7 @@ static bool nvdcve_data_ok(const char *meta, const char *data) + snprintf(&csum_data[idx], len, "%02hhx", digest[i]); + } + +- ret = streq(csum_meta, csum_data); ++ ret = !strcasecmp(csum_meta, csum_data); + + err_unmap: + munmap(buffer, length); +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/devel-config/distcc-config.bb b/import-layers/yocto-poky/meta/recipes-devtools/devel-config/distcc-config.bb index cb7830b99..3cd661d54 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/devel-config/distcc-config.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/devel-config/distcc-config.bb @@ -1,8 +1,7 @@ SUMMARY = "Sets up distcc for compilation on the target device" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \ - file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" +LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" SRC_URI = "file://distcc.sh" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/devel-config/nfs-export-root.bb b/import-layers/yocto-poky/meta/recipes-devtools/devel-config/nfs-export-root.bb index 169d00581..56375a47b 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/devel-config/nfs-export-root.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/devel-config/nfs-export-root.bb @@ -1,8 +1,7 @@ SUMMARY = "Configuration script to export target rootfs filesystem" DESCRIPTION = "Enables NFS access from any host to the entire filesystem (for development purposes)." LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/LICENSE;md5=4d92cd373abda3937c2bc47fbc49d690 \ - file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" +LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" PR = "r1" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/diffstat/diffstat_1.61.bb b/import-layers/yocto-poky/meta/recipes-devtools/diffstat/diffstat_1.61.bb index 0ec41c3ab..583b387e9 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/diffstat/diffstat_1.61.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/diffstat/diffstat_1.61.bb @@ -7,7 +7,7 @@ SECTION = "devel" LICENSE = "MIT" LIC_FILES_CHKSUM = "file://install-sh;endline=42;md5=b3549726c1022bee09c174c72a0ca4a5" -SRC_URI = "ftp://invisible-island.net/diffstat/diffstat-${PV}.tgz \ +SRC_URI = "http://invisible-mirror.net/archives/${BPN}/${BP}.tgz \ file://run-ptest \ " diff --git a/import-layers/yocto-poky/meta/recipes-devtools/distcc/distcc_3.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/distcc/distcc_3.2.bb index c084ad2b7..ea3d7c10b 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/distcc/distcc_3.2.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/distcc/distcc_3.2.bb @@ -65,7 +65,3 @@ FILES_${PN} = " ${sysconfdir} \ ${systemd_unitdir}/system/distcc.service" FILES_distcc-distmon-gnome = " ${bindir}/distccmon-gnome \ ${datadir}/distcc" - -pkg_postrm_${PN} () { - deluser distcc || true -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0001-Corretly-install-tmpfiles.d-configuration.patch b/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0001-Corretly-install-tmpfiles.d-configuration.patch new file mode 100644 index 000000000..c9df45897 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0001-Corretly-install-tmpfiles.d-configuration.patch @@ -0,0 +1,21 @@ +From 8ce181714640315d2dd37ee794acbb22063cd669 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Thu, 26 Jan 2017 16:36:20 +0200 +Subject: [PATCH] Corretly install tmpfiles.d configuration + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + etc/tmpfiles.d/CMakeLists.txt | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/etc/tmpfiles.d/CMakeLists.txt b/etc/tmpfiles.d/CMakeLists.txt +index f69c773e..3eb6d0e8 100644 +--- a/etc/tmpfiles.d/CMakeLists.txt ++++ b/etc/tmpfiles.d/CMakeLists.txt +@@ -1 +1 @@ +-INSTALL (FILES dnf.conf DESTINATION /usr/lib/tmpfiles.d/) ++INSTALL (FILES dnf.conf DESTINATION ${SYSCONFDIR}/tmpfiles.d/) +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0001-Do-not-hardcode-etc-and-systemd-unit-directories.patch b/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0001-Do-not-hardcode-etc-and-systemd-unit-directories.patch new file mode 100644 index 000000000..0f261e5c5 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0001-Do-not-hardcode-etc-and-systemd-unit-directories.patch @@ -0,0 +1,29 @@ +From 4313ced1320594013795f11f6db00381e3f4cc45 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Thu, 26 Jan 2017 16:25:47 +0200 +Subject: [PATCH] Do not hardcode /etc and systemd unit directories + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + CMakeLists.txt | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 6a319935..db20ccd4 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -7,8 +7,8 @@ if (NOT PYTHON_DESIRED) + set (PYTHON_DESIRED "2") + endif() + +-SET( SYSCONFDIR /etc) +-SET( SYSTEMD_DIR /usr/lib/systemd/system) ++SET( SYSCONFDIR ${CMAKE_INSTALL_SYSCONFDIR}) ++SET( SYSTEMD_DIR $ENV{systemd_system_unitdir}) + + if (${PYTHON_DESIRED} STREQUAL "2") + FIND_PACKAGE (PythonInterp REQUIRED) +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0001-Do-not-prepend-installroot-to-logdir.patch b/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0001-Do-not-prepend-installroot-to-logdir.patch new file mode 100644 index 000000000..a90e77cbf --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0001-Do-not-prepend-installroot-to-logdir.patch @@ -0,0 +1,31 @@ +From 31653d324cf8c7b1f2f9e49d22676bd2ac546331 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Wed, 11 Jan 2017 15:10:13 +0200 +Subject: [PATCH] Do not prepend installroot to logdir. + +This would otherwise write the logs into rootfs/var/log +(whereas we want them in $T), +and will break installation of base-files rpm. + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + dnf/cli/cli.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/dnf/cli/cli.py b/dnf/cli/cli.py +index b764801a..893f4bda 100644 +--- a/dnf/cli/cli.py ++++ b/dnf/cli/cli.py +@@ -881,7 +881,7 @@ class Cli(object): + subst = conf.substitutions + subst.update_from_etc(conf.installroot) + +- for opt in ('cachedir', 'logdir', 'persistdir'): ++ for opt in ('cachedir', 'persistdir'): + conf.prepend_installroot(opt) + + self.base._logging._setup_from_dnf_conf(conf) +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0029-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch b/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0029-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch new file mode 100644 index 000000000..8c59f9f67 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0029-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch @@ -0,0 +1,27 @@ +From 3ddaa930cda57a62a2174faebcc87aebc59591d1 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 30 Dec 2016 18:29:07 +0200 +Subject: [PATCH 29/30] Do not set PYTHON_INSTALL_DIR by running python + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + CMakeLists.txt | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 6a319935..466ca1e6 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -18,7 +18,7 @@ else() + FIND_PACKAGE(PythonInterp 3.0 REQUIRED) + endif() + +-EXECUTE_PROCESS(COMMAND ${PYTHON_EXECUTABLE} -c "from sys import stdout; from distutils import sysconfig; stdout.write(sysconfig.get_python_lib())" OUTPUT_VARIABLE PYTHON_INSTALL_DIR) ++#EXECUTE_PROCESS(COMMAND ${PYTHON_EXECUTABLE} -c "from sys import stdout; from distutils import sysconfig; stdout.write(sysconfig.get_python_lib())" OUTPUT_VARIABLE PYTHON_INSTALL_DIR) + EXECUTE_PROCESS(COMMAND ${PYTHON_EXECUTABLE} -c "import sys; sys.stdout.write('%s.%s' % (sys.version_info.major, sys.version_info.minor))" OUTPUT_VARIABLE PYTHON_MAJOR_DOT_MINOR_VERSION) + MESSAGE(STATUS "Python install dir is ${PYTHON_INSTALL_DIR}") + +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0030-Run-python-scripts-using-env.patch b/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0030-Run-python-scripts-using-env.patch new file mode 100644 index 000000000..61328e6ec --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf/0030-Run-python-scripts-using-env.patch @@ -0,0 +1,48 @@ +From 9c8d545152b35d8943be72b9503414a53e1ebf7c Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 30 Dec 2016 18:29:37 +0200 +Subject: [PATCH 30/30] Run python scripts using env + +Otherwise the build tools hardcode the python path into them. + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + bin/dnf-automatic.in | 2 +- + bin/dnf.in | 2 +- + bin/yum.in | 2 +- + 3 files changed, 3 insertions(+), 3 deletions(-) + +diff --git a/bin/dnf-automatic.in b/bin/dnf-automatic.in +index 5b06aa26..9f6f703e 100755 +--- a/bin/dnf-automatic.in ++++ b/bin/dnf-automatic.in +@@ -1,4 +1,4 @@ +-#!@PYTHON_EXECUTABLE@ ++#!/usr/bin/env python3 + # dnf-automatic executable. + # + # Copyright (C) 2014-2016 Red Hat, Inc. +diff --git a/bin/dnf.in b/bin/dnf.in +index 645d0f06..ab141abd 100755 +--- a/bin/dnf.in ++++ b/bin/dnf.in +@@ -1,4 +1,4 @@ +-#!@PYTHON_EXECUTABLE@ ++#!/usr/bin/env python3 + # The dnf executable script. + # + # Copyright (C) 2012-2016 Red Hat, Inc. +diff --git a/bin/yum.in b/bin/yum.in +index f1fee071..013dc8c5 100755 +--- a/bin/yum.in ++++ b/bin/yum.in +@@ -1,4 +1,4 @@ +-#!@PYTHON_EXECUTABLE@ ++#!/usr/bin/env python3 + # The dnf executable script. + # + # Copyright (C) 2016 Red Hat, Inc. +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf_git.bb new file mode 100644 index 000000000..9f814fb21 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/dnf/dnf_git.bb @@ -0,0 +1,49 @@ +SUMMARY = "Package manager forked from Yum, using libsolv as a dependency resolver" +LICENSE = "GPLv2" +LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263 \ + file://PACKAGE-LICENSING;md5=bfc29916e11321be06924c4fb096fdcc \ + " + +SRC_URI = "git://github.com/rpm-software-management/dnf.git \ + file://0029-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch \ + file://0030-Run-python-scripts-using-env.patch \ + file://0001-Do-not-prepend-installroot-to-logdir.patch \ + file://0001-Do-not-hardcode-etc-and-systemd-unit-directories.patch \ + file://0001-Corretly-install-tmpfiles.d-configuration.patch \ + " + +PV = "2.0.0+git${SRCPV}" +SRCREV = "f0093d672d3069cfee8447973ae70ef615fd8886" + +S = "${WORKDIR}/git" + +inherit cmake gettext bash-completion distutils3-base systemd + +DEPENDS += "libdnf librepo libcomps python3-pygpgme python3-iniparse" + +# manpages generation requires http://www.sphinx-doc.org/ +EXTRA_OECMAKE = " -DWITH_MAN=0 -DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} -DPYTHON_DESIRED=3" + +BBCLASSEXTEND = "native nativesdk" +RDEPENDS_${PN}_class-target += "python3-core python3-codecs python3-netclient python3-email python3-threading python3-distutils librepo python3-shell python3-subprocess libcomps libdnf python3-sqlite3 python3-compression python3-pygpgme python3-rpm python3-iniparse python3-json python3-importlib python3-curses python3-argparse python3-misc" + +# Create a symlink called 'dnf' as 'make install' does not do it, but +# .spec file in dnf source tree does (and then Fedora and dnf documentation +# says that dnf binary is plain 'dnf'). +do_install_append() { + lnr ${D}/${bindir}/dnf-3 ${D}/${bindir}/dnf + lnr ${D}/${bindir}/dnf-automatic-3 ${D}/${bindir}/dnf-automatic +} + +# Direct dnf-native to read rpm configuration from our sysroot, not the one it was compiled in +do_install_append_class-native() { + create_wrapper ${D}/${bindir}/dnf \ + RPM_CONFIGDIR=${STAGING_LIBDIR_NATIVE}/rpm \ + RPM_NO_CHROOT_FOR_SCRIPTS=1 +} + +SYSTEMD_SERVICE_${PN} = "dnf-makecache.service dnf-makecache.timer \ + dnf-automatic-download.service dnf-automatic-download.timer \ + dnf-automatic-install.service dnf-automatic-install.timer \ + dnf-automatic-notifyonly.service dnf-automatic-notifyonly.timer \ +" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/docbook-dsssl-stylesheets/docbook-dsssl-stylesheets-native_1.79.bb b/import-layers/yocto-poky/meta/recipes-devtools/docbook-dsssl-stylesheets/docbook-dsssl-stylesheets-native_1.79.bb deleted file mode 100644 index b7791f360..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/docbook-dsssl-stylesheets/docbook-dsssl-stylesheets-native_1.79.bb +++ /dev/null @@ -1,71 +0,0 @@ -SUMMARY = "DSSSL stylesheets used to transform SGML and XML DocBook files" -HOMEPAGE = "http://docbook.sourceforge.net" -# Simple persmissive -LICENSE = "DSSSL" -LIC_FILES_CHKSUM = "file://README;beginline=41;endline=74;md5=875385159b2ee76ecf56136ae7f542d6" - -DEPENDS = "sgml-common-native" - -PR = "r4" - -SRC_URI = "${SOURCEFORGE_MIRROR}/docbook/docbook-dsssl-${PV}.tar.bz2" - -SRC_URI[md5sum] = "bc192d23266b9a664ca0aba4a7794c7c" -SRC_URI[sha256sum] = "2f329e120bee9ef42fbdd74ddd60e05e49786c5a7953a0ff4c680ae6bdf0e2bc" - -UPSTREAM_CHECK_URI = "http://sourceforge.net/projects/docbook/files/docbook-dsssl/" -UPSTREAM_CHECK_REGEX = "/docbook-dsssl/(?P(\d+[\.\-_]*)+)/" - -S = "${WORKDIR}/docbook-dsssl-${PV}" - -inherit native - -SSTATEPOSTINSTFUNCS += "docbook_dsssl_stylesheets_sstate_postinst" -SYSROOT_PREPROCESS_FUNCS += "docbook_dsssl_sysroot_preprocess" -CLEANFUNCS += "docbook_dsssl_stylesheets_sstate_clean" - - -do_install () { - # Refer to http://www.linuxfromscratch.org/blfs/view/stable/pst/docbook-dsssl.html - # for details. - install -d ${D}${bindir} - install -m 0755 bin/collateindex.pl ${D}${bindir} - - install -d ${D}${datadir}/sgml/docbook/dsssl-stylesheets-${PV} - install -m 0644 catalog ${D}${datadir}/sgml/docbook/dsssl-stylesheets-${PV} - cp -v -R * ${D}${datadir}/sgml/docbook/dsssl-stylesheets-${PV} - - install -d ${D}${sysconfdir}/sgml - echo "CATALOG ${datadir}/sgml/docbook/dsssl-stylesheets-${PV}/catalog" > \ - ${D}${sysconfdir}/sgml/dsssl-docbook-stylesheets.cat - echo "CATALOG ${datadir}/sgml/docbook/dsssl-stylesheets-${PV}/common/catalog" >> \ - ${D}${sysconfdir}/sgml/dsssl-docbook-stylesheets.cat -} - -docbook_dsssl_stylesheets_sstate_postinst () { - if [ "${BB_CURRENTTASK}" = "populate_sysroot" -o "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ] - then - # Ensure that the catalog file sgml-docbook.cat is properly - # updated when the package is installed from sstate cache. - ${SYSROOT_DESTDIR}${bindir_crossscripts}/install-catalog-docbook-dsssl \ - --add ${sysconfdir}/sgml/sgml-docbook.bak \ - ${sysconfdir}/sgml/dsssl-docbook-stylesheets.cat - ${SYSROOT_DESTDIR}${bindir_crossscripts}/install-catalog-docbook-dsssl \ - --add ${sysconfdir}/sgml/sgml-docbook.cat \ - ${sysconfdir}/sgml/dsssl-docbook-stylesheets.cat - fi -} - -docbook_dsssl_sysroot_preprocess () { - install -d ${SYSROOT_DESTDIR}${bindir_crossscripts}/ - install -m 755 ${STAGING_BINDIR_NATIVE}/install-catalog ${SYSROOT_DESTDIR}${bindir_crossscripts}/install-catalog-docbook-dsssl -} - -docbook_dsssl_stylesheets_sstate_clean () { - # Ensure that the catalog file sgml-docbook.cat is properly - # updated when the package is removed from sstate cache. - files="${sysconfdir}/sgml/sgml-docbook.bak ${sysconfdir}/sgml/sgml-docbook.cat" - for f in $files; do - [ ! -f $f ] || sed -i '/\/sgml\/dsssl-docbook-stylesheets.cat/d' $f - done -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-3.1-native_3.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-3.1-native_3.1.bb deleted file mode 100644 index a3ee5d83a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-3.1-native_3.1.bb +++ /dev/null @@ -1,26 +0,0 @@ -require docbook-sgml-dtd-native.inc - -LICENSE = "OASIS" -LIC_FILES_CHKSUM = "file://LICENSE-OASIS;md5=c608985dd5f7f215e669e7639a0b1d2e" - -DTD_VERSION = "3.1" - -PR = "${INC_PR}.0" - -# Note: the upstream sources are not distributed with a license file. -# LICENSE-OASIS is included as a "patch" to workaround this. When -# upgrading this recipe, please verify whether this is still needed. -SRC_URI = "http://www.docbook.org/sgml/3.1/docbk31.zip \ - file://LICENSE-OASIS" - -SRC_URI[md5sum] = "432749c0c806dbae81c8bcb70da3b5d3" -SRC_URI[sha256sum] = "20261d2771b9a052abfa3d8fab1aa62be05791a010281c566f9073bf0e644538" - -UPSTREAM_CHECK_URI = "http://www.docbook.org/sgml/" -UPSTREAM_CHECK_REGEX = "(?P3\..+)/" - -do_compile() { - # Refer to http://www.linuxfromscratch.org/blfs/view/stable/pst/sgml-dtd-3.html - # for details. - sed -i -e '/ISO 8879/d' -e 's|DTDDECL "-//OASIS//DTD DocBook V3.1//EN"|SGMLDECL|g' docbook.cat -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-4.1-native_4.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-4.1-native_4.1.bb deleted file mode 100644 index be5968077..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-4.1-native_4.1.bb +++ /dev/null @@ -1,26 +0,0 @@ -require docbook-sgml-dtd-native.inc - -LICENSE = "OASIS" -LIC_FILES_CHKSUM = "file://LICENSE-OASIS;md5=c608985dd5f7f215e669e7639a0b1d2e" - -DTD_VERSION = "4.1" - -PR = "${INC_PR}.0" - -# Note: the upstream sources are not distributed with a license file. -# LICENSE-OASIS is included as a "patch" to workaround this. When -# upgrading this recipe, please verify whether this is still needed. -SRC_URI = "http://docbook.org/sgml/4.1/docbk41.zip \ - file://LICENSE-OASIS" - -SRC_URI[md5sum] = "489f6ff2a2173eb1e14216c10533ede2" -SRC_URI[sha256sum] = "deaafcf0a3677692e7ad4412c0e41c1db3e9da6cdcdb3dd32b2cc1f9c97d6311" - -UPSTREAM_CHECK_URI = "http://www.docbook.org/sgml/" -UPSTREAM_CHECK_REGEX = "(?P4\.1(\.\d+)*)/" - -do_compile() { - # Refer to http://www.linuxfromscratch.org/blfs/view/stable/pst/sgml-dtd.html - # for details. - sed -i -e '/ISO 8879/d' -e '/gml/d' docbook.cat -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-4.5-native.bb b/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-4.5-native.bb deleted file mode 100644 index ae723c74f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-4.5-native.bb +++ /dev/null @@ -1,18 +0,0 @@ -require docbook-sgml-dtd-native.inc - -LICENSE = "OASIS" -LIC_FILES_CHKSUM = "file://LICENSE-OASIS;md5=c608985dd5f7f215e669e7639a0b1d2e" - -DTD_VERSION = "4.5" - -PR = "${INC_PR}.0" -PV = "4.5" - -SRC_URI[md5sum] = "07c581f4bbcba6d3aac85360a19f95f7" -SRC_URI[sha256sum] = "8043e514e80c6c19cb146b5d37937d1305bf3abf9b0097c36df7f70f611cdf43" - -do_compile() { - # Refer to http://www.linuxfromscratch.org/blfs/view/stable/pst/sgml-dtd.html - # for details. - sed -i -e '/ISO 8879/d' -e'/gml/d' docbook.cat -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-native.inc b/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-native.inc deleted file mode 100644 index af5098796..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/docbook-sgml-dtd-native.inc +++ /dev/null @@ -1,70 +0,0 @@ -# The DTDs of the various versions have to be installed in parallel and -# should not replace each other. The installation step is common for -# all versions and just differs in ${DTD_VERSION} which is set in each -# version recipe. -# -# The DTDs do have some quirks (see LFS documentation). -SUMMARY = "Document type definitions for verification of SGML data files" -DESCRIPTION = "Document type definitions for verification of SGML data \ -files against the DocBook rule set" -HOMEPAGE = "http://www.docbook.org/sgml/" - -DEPENDS = "sgml-common-native" - -# Note: the upstream sources are not distributed with a license file. -# LICENSE-OASIS is included as a "patch" to workaround this. When -# upgrading this recipe, please verify whether this is still needed. -SRC_URI = "http://www.docbook.org/sgml/${DTD_VERSION}/docbook-${DTD_VERSION}.zip \ - file://LICENSE-OASIS" - -# The .zip file extracts to the current dir -S = "${WORKDIR}" - -INC_PR = "r3" - -SSTATEPOSTINSTFUNCS += "docbook_sgml_dtd_sstate_postinst" -SYSROOT_PREPROCESS_FUNCS += "docbook_sgml_dtd_sysroot_preprocess" - -inherit native - -do_install () { - # Refer to http://www.linuxfromscratch.org/blfs/view/stable/pst/sgml-dtd.html - # for details. - install -d -m 755 ${D}${datadir}/sgml/docbook/sgml-dtd-${DTD_VERSION} - install docbook.cat ${D}${datadir}/sgml/docbook/sgml-dtd-${DTD_VERSION}/catalog - cp -PpRr *.dtd *.mod *.dcl ${D}${datadir}/sgml/docbook/sgml-dtd-${DTD_VERSION} - - install -d ${D}${sysconfdir}/sgml - echo "CATALOG ${datadir}/sgml/docbook/sgml-dtd-${DTD_VERSION}/catalog" > \ - ${D}${sysconfdir}/sgml/sgml-docbook-dtd-${DTD_VERSION}.cat -} - -docbook_sgml_dtd_sstate_postinst () { - if [ "${BB_CURRENTTASK}" = "populate_sysroot" -o "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ] - then - # Ensure that the catalog file sgml-docbook.cat is properly - # updated when the package is installed from sstate cache. - ${SYSROOT_DESTDIR}${bindir_crossscripts}/install-catalog-docbook-sgml-dtd-${DTD_VERSION} \ - --add ${sysconfdir}/sgml/sgml-docbook.bak \ - ${sysconfdir}/sgml/sgml-docbook-dtd-${DTD_VERSION}.cat - ${SYSROOT_DESTDIR}${bindir_crossscripts}/install-catalog-docbook-sgml-dtd-${DTD_VERSION} \ - --add ${sysconfdir}/sgml/sgml-docbook.cat \ - ${sysconfdir}/sgml/sgml-docbook-dtd-${DTD_VERSION}.cat - fi -} - -docbook_sgml_dtd_sysroot_preprocess () { - install -d ${SYSROOT_DESTDIR}${bindir_crossscripts}/ - install -m 755 ${STAGING_BINDIR_NATIVE}/install-catalog ${SYSROOT_DESTDIR}${bindir_crossscripts}/install-catalog-docbook-sgml-dtd-${DTD_VERSION} -} - -CLEANFUNCS += "docbook_sgml_dtd_sstate_clean" - -docbook_sgml_dtd_sstate_clean () { - # Ensure that the catalog file sgml-docbook.cat is properly - # updated when the package is removed from sstate cache. - files="${sysconfdir}/sgml/sgml-docbook.bak ${sysconfdir}/sgml/sgml-docbook.cat" - for f in $files; do - [ ! -f $f ] || sed -i '/\/sgml\/sgml-docbook-dtd-${DTD_VERSION}.cat/d' $f - done -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/files/LICENSE-OASIS b/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/files/LICENSE-OASIS deleted file mode 100644 index 6da7f5b8c..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/docbook-sgml-dtd/files/LICENSE-OASIS +++ /dev/null @@ -1,16 +0,0 @@ -Permission to use, copy, modify and distribute the DocBook XML DTD -and its accompanying documentation for any purpose and without fee -is hereby granted in perpetuity, provided that the above copyright -notice and this paragraph appear in all copies. The copyright -holders make no representation about the suitability of the DTD for -any purpose. It is provided "as is" without expressed or implied -warranty. - -If you modify the DocBook DTD in any way, except for declaring and -referencing additional sets of general entities and declaring -additional notations, label your DTD as a variant of DocBook. See -the maintenance documentation for more information. - -Please direct all questions, bug reports, or suggestions for -changes to the docbook@lists.oasis-open.org mailing list. For more -information, see http://www.oasis-open.org/docbook/. diff --git a/import-layers/yocto-poky/meta/recipes-devtools/docbook-utils/docbook-utils-0.6.14/re.patch b/import-layers/yocto-poky/meta/recipes-devtools/docbook-utils/docbook-utils-0.6.14/re.patch deleted file mode 100644 index db079d68f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/docbook-utils/docbook-utils-0.6.14/re.patch +++ /dev/null @@ -1,29 +0,0 @@ -Upstream-Status: Pending - -Fix runtime error occurred e.g. with docbook-to-man calls: - - grep: character class syntax is [[:space:]], not [:space:] - grep: character class syntax is [[:space:]], not [:space:] - jw: There is no frontend called "/docbook/utils-0.6.14/frontends/docbook". - -See also: - - - -Signed-off-by: Steffen Sledz - -diff -Nurd docbook-utils-0.6.14-orig/bin/jw.in docbook-utils-0.6.14/bin/jw.in ---- docbook-utils-0.6.14-orig/bin/jw.in 2012-03-29 07:50:00.789564826 +0200 -+++ docbook-utils-0.6.14/bin/jw.in 2012-03-29 07:52:10.371302967 +0200 -@@ -80,9 +80,9 @@ - SGML_CATALOGS_DIR="/etc/sgml" - if [ -f "$SGML_CONF" ] - then -- RE='^[:space:]*SGML_BASE_DIR[:space:]*=[:space:]*' -+ RE='^[[:space:]]*SGML_BASE_DIR[[:space:]]*=[[:space:]]*' - SGML_BASE_DIR=`grep $RE $SGML_CONF | sed "s/$RE//"` -- RE='^[:space:]*SGML_CATALOGS_DIR[:space:]*=[:space:]*' -+ RE='^[[:space:]]*SGML_CATALOGS_DIR[[:space:]]*=[[:space:]]*' - SGML_CATALOGS_DIR=`grep $RE $SGML_CONF | sed "s/$RE//"` - fi - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/docbook-utils/docbook-utils-native_0.6.14.bb b/import-layers/yocto-poky/meta/recipes-devtools/docbook-utils/docbook-utils-native_0.6.14.bb deleted file mode 100644 index 44b43a810..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/docbook-utils/docbook-utils-native_0.6.14.bb +++ /dev/null @@ -1,63 +0,0 @@ -SUMMARY = "Utilities for formatting and manipulating DocBook documents" -DESCRIPTION = "A collection of all the free software tools you need to \ -work on and format DocBook documents." -HOMEPAGE = "http://sources.redhat.com/docbook-tools/" -SECTION = "console/utils" -LICENSE = "GPLv2" -LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f" -DEPENDS = "openjade-native sgmlspl-native docbook-dsssl-stylesheets-native docbook-sgml-dtd-3.1-native" - -PR = "r3" - -SRC_URI = "\ - http://ftp.osuosl.org/pub/blfs/conglomeration/docbook-utils/docbook-utils-${PV}.tar.gz \ - file://re.patch \ -" - -SRC_URI[md5sum] = "6b41b18c365c01f225bc417cf632d81c" -SRC_URI[sha256sum] = "48faab8ee8a7605c9342fb7b906e0815e3cee84a489182af38e8f7c0df2e92e9" - -inherit autotools native - -do_configure_prepend() { - # Fix hard-coded references to /etc/sgml - if [ ! -e ${S}/.sed_done ]; then - sed -i -e "s|/etc/sgml|${sysconfdir}/sgml|g" ${S}/bin/jw.in - sed -i -e "s|/etc/sgml|${sysconfdir}/sgml|g" ${S}/doc/man/Makefile.am - sed -i -e "s|/etc/sgml|${sysconfdir}/sgml|g" ${S}/doc/HTML/Makefile.am - - # Point jw to the native sysroot catalog - sed -i -e 's|^SGML_EXTRA_CATALOGS=""|SGML_EXTRA_CATALOGS=":${sysconfdir}/sgml/catalog"|g' ${S}/bin/jw.in - touch ${S}/.sed_done - fi -} -do_unpack[cleandirs] += "${S}" - -do_install() { - install -d ${D}${bindir} - # Install the binaries and a bunch of other commonly used names for them. - for doctype in html ps dvi man pdf rtf tex texi txt - do - install -m 0755 ${S}/bin/docbook2$doctype ${D}${bindir}/ - ln -sf docbook2x-$doctype ${D}${bindir}/db2$doctype - ln -sf docbook2$doctype ${D}${bindir}/db2$doctype - ln -sf docbook2$doctype ${D}${bindir}/docbook-to-$doctype - done - - install -m 0755 ${B}/bin/jw ${D}${bindir}/ - for i in backends/dvi backends/html \ - backends/pdf backends/ps backends/rtf backends/tex \ - backends/txt \ - helpers/docbook2man-spec.pl helpers/docbook2texi-spec.pl \ - docbook-utils.dsl - do - install -d ${D}${datadir}/sgml/docbook/utils-${PV}/`dirname $i` - install ${S}/$i ${D}${datadir}/sgml/docbook/utils-${PV}/$i - done - for i in backends/man backends/texi frontends/docbook - do - install -d ${D}${datadir}/sgml/docbook/utils-${PV}/`dirname $i` - install ${B}/$i ${D}${datadir}/sgml/docbook/utils-${PV}/$i - done - -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4_4.5.bb b/import-layers/yocto-poky/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4_4.5.bb index 2f13dba52..da62b77a6 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4_4.5.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/docbook-xml/docbook-xml-dtd4_4.5.bb @@ -27,10 +27,7 @@ S="${WORKDIR}/docbook-xml-4.5.c31424" inherit allarch BBCLASSEXTEND = "native" -SSTATEPOSTINSTFUNCS_append_class-native = " docbook_xml_dtd_sstate_postinst" -SYSROOT_PREPROCESS_FUNCS_append_class-native = " docbook_xml_dtd_sysroot_preprocess" - -do_configre (){ +do_configure (){ : } @@ -49,18 +46,10 @@ do_install () { install -m 755 ${WORKDIR}/docbook-xml.xml ${D}${sysconfdir}/xml/docbook-xml.xml } -docbook_xml_dtd_sstate_postinst () { - if [ "${BB_CURRENTTASK}" = "populate_sysroot" -o "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ] - then - # Ensure that the catalog file sgml-docbook.cat is properly - # updated when the package is installed from sstate cache. - sed -i -e "s|file://.*/usr/share/xml|file://${datadir}/xml|g" ${SYSROOT_DESTDIR}${sysconfdir}/xml/docbook-xml.xml - fi -} - -docbook_xml_dtd_sysroot_preprocess () { - # Update the hardcode dir in docbook-xml.xml - sed -i -e "s|file:///usr/share/xml|file://${datadir}/xml|g" ${SYSROOT_DESTDIR}${sysconfdir}/xml/docbook-xml.xml +do_install_append_class-native () { + # Ensure that the catalog file sgml-docbook.cat is properly + # updated when the package is installed from sstate cache. + sed -i -e "s|file://.*/usr/share/xml|file://${datadir}/xml|g" ${D}${sysconfdir}/xml/docbook-xml.xml } FILES_${PN} = "${datadir}/* ${sysconfdir}/xml/docbook-xml.xml" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/docbook-xml/docbook-xsl-stylesheets_1.79.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/docbook-xml/docbook-xsl-stylesheets_1.79.1.bb index aa03a82a2..07d32ce97 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/docbook-xml/docbook-xsl-stylesheets_1.79.1.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/docbook-xml/docbook-xsl-stylesheets_1.79.1.bb @@ -20,9 +20,6 @@ S = "${WORKDIR}/docbook-xsl-${PV}" inherit allarch BBCLASSEXTEND = "native" -SSTATEPOSTINSTFUNCS_append_class-native = " docbook_xsl_stylesheets_sstate_postinst" -SYSROOT_PREPROCESS_FUNCS_append_class-native = " docbook_xsl_stylesheets_sysroot_preprocess" - do_configure (){ : } @@ -55,18 +52,10 @@ do_install () { } -docbook_xsl_stylesheets_sstate_postinst () { - if [ "${BB_CURRENTTASK}" = "populate_sysroot" -o "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ] - then - # Ensure that the catalog file sgml-docbook.cat is properly - # updated when the package is installed from sstate cache. - sed -i -e "s|file://.*/usr/share/xml|file://${datadir}/xml|g" ${SYSROOT_DESTDIR}${sysconfdir}/xml/docbook-xsl.xml - fi -} - -docbook_xsl_stylesheets_sysroot_preprocess () { - # Update the hardcode dir in docbook-xml.xml - sed -i -e "s|file:///usr/share/xml|file://${datadir}/xml|g" ${SYSROOT_DESTDIR}${sysconfdir}/xml/docbook-xsl.xml +do_install_append_class-native () { + # Ensure that the catalog file sgml-docbook.cat is properly + # updated when the package is installed from sstate cache. + sed -i -e "s|file://.*/usr/share/xml|file://${datadir}/xml|g" ${D}${sysconfdir}/xml/docbook-xsl.xml } RDEPENDS_${PN} += "perl" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/0001-Include-fcntl.h-for-getting-loff_t-definition.patch b/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/0001-Include-fcntl.h-for-getting-loff_t-definition.patch deleted file mode 100644 index 06f5b7a96..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/0001-Include-fcntl.h-for-getting-loff_t-definition.patch +++ /dev/null @@ -1,41 +0,0 @@ -From b7c42c6a9829bea911b22201edd7df2a9bec1a14 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Mon, 13 Apr 2015 17:52:34 -0700 -Subject: [PATCH] Include fcntl.h for getting loff_t definition - -Upstream-Status: Pending - -Signed-off-by: Khem Raj ---- - dosfsck/dosfsck.h | 2 ++ - dosfsck/lfn.c | 1 + - 2 files changed, 3 insertions(+) - -diff --git a/dosfsck/dosfsck.h b/dosfsck/dosfsck.h -index d9314b1..2076d5f 100644 ---- a/dosfsck/dosfsck.h -+++ b/dosfsck/dosfsck.h -@@ -50,6 +50,8 @@ - #define CT_LE_L(v) (v) - #endif /* __BIG_ENDIAN */ - -+#include -+ - #define VFAT_LN_ATTR (ATTR_RO | ATTR_HIDDEN | ATTR_SYS | ATTR_VOLUME) - - /* ++roman: Use own definition of boot sector structure -- the kernel headers' -diff --git a/dosfsck/lfn.c b/dosfsck/lfn.c -index 9b2cfc3..bb04fda 100644 ---- a/dosfsck/lfn.c -+++ b/dosfsck/lfn.c -@@ -7,6 +7,7 @@ - #include - #include - #include -+#include - - #include "common.h" - #include "io.h" --- -2.1.4 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/alignment_hack.patch b/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/alignment_hack.patch deleted file mode 100644 index b46b2db0a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/alignment_hack.patch +++ /dev/null @@ -1,38 +0,0 @@ -The problem is that unsigned char[2] is -guranteed to be 8Bit aligned on arm -but unsigned short is/needs to be 16bit aligned -the union { unsigned short; unsigned char[2] } trick -didn't work so no we use the alpha hack. - -memcpy into an 16bit aligned - - -zecke - -Upstream-Status: Inappropriate [licensing] -We're tracking an old release of dosfstools due to licensing issues. - -Signed-off-by: Scott Garman - ---- dosfstools/dosfsck/boot.c.orig 2003-05-15 19:32:23.000000000 +0200 -+++ dosfstools/dosfsck/boot.c 2003-06-13 17:44:25.000000000 +0200 -@@ -36,17 +36,15 @@ - { 0xff, "5.25\" 320k floppy 2s/40tr/8sec" }, - }; - --#if defined __alpha || defined __ia64__ || defined __s390x__ || defined __x86_64__ || defined __ppc64__ -+ - /* Unaligned fields must first be copied byte-wise */ - #define GET_UNALIGNED_W(f) \ - ({ \ - unsigned short __v; \ - memcpy( &__v, &f, sizeof(__v) ); \ -- CF_LE_W( *(unsigned short *)&f ); \ -+ CF_LE_W( *(unsigned short *)&__v ); \ - }) --#else --#define GET_UNALIGNED_W(f) CF_LE_W( *(unsigned short *)&f ) --#endif -+ - - - static char *get_media_descr( unsigned char media ) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/dosfstools-msdos_fs-types.patch b/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/dosfstools-msdos_fs-types.patch deleted file mode 100644 index 35abd1a2b..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/dosfstools-msdos_fs-types.patch +++ /dev/null @@ -1,37 +0,0 @@ -Ensure the __s8 type is properly defined. - -Upstream-Status: Inappropriate [licensing] -We're tracking an old release of dosfstools due to licensing issues. - -Signed-off-by: Scott Garman - ---- dosfstools-2.10/dosfsck/dosfsck.h.org 2006-02-21 08:36:14.000000000 -0700 -+++ dosfstools-2.10/dosfsck/dosfsck.h 2006-02-21 08:40:12.000000000 -0700 -@@ -22,6 +22,14 @@ - #undef __KERNEL__ - #endif - -+#ifndef __s8 -+#include -+#endif -+ -+#ifndef __ASM_STUB_BYTEORDER_H__ -+#include -+#endif -+ - #include - - /* 2.1 kernels use le16_to_cpu() type functions for CF_LE_W & Co., but don't ---- dosfstools-2.10/dosfsck/file.c.org 2006-02-21 08:37:36.000000000 -0700 -+++ dosfstools-2.10/dosfsck/file.c 2006-02-21 08:37:47.000000000 -0700 -@@ -23,6 +23,10 @@ - #undef __KERNEL__ - #endif - -+#ifndef __s8 -+#include -+#endif -+ - #include - - #include "common.h" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/fix_populated_dosfs_creation.patch b/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/fix_populated_dosfs_creation.patch deleted file mode 100644 index 9d7f7321a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/fix_populated_dosfs_creation.patch +++ /dev/null @@ -1,489 +0,0 @@ -Upstream-Status: Inappropriate - -This patch fixes populated dosfs image creation with directory -structures. Earlier it was causing segfault; and only image -population with no subdirectories was working. - -Issues fixed: -1. (dir->count == dir->entries) check was only needed for root - directory entries. And this check is wrong for non-root - directories. -2. For each dir entry 2 dir->table entries were needed, one for - the file/dir and 2nd for long file name support. Earlier long - name support was added for filenames but the 2nd entry - allocation, initialization & counting was missed. -3. The memory clearing was missed at the code path after dir->table - memroy allocation. -4. Add entries for . & .. directories in all non-root directories. -5. The . directory points to the correct entry in fat now. -6. All directoriy entries' size was not zero as required for dosfsck, - Now all directory entries' size is zero. - -Enhancements: -1. Added support for long names for directory names. This is same - as the existing long name support for filenames. -2. Added error messages for previously silent memory allocation and - other errors. -3. -d options does not work correctly with fat32, so now throwing - an error for that. -4. Use predefined structures from kernel's msdos_fs.h file, rather - than defining again here. And accordingly change the names & use - of structure variables. - -Outstanding Issues: -1. The .. directory entry do not point to the parent of current - directory. This issue can be fixed by running dosfsck -a after - image creation. -2. For files the filesize is correct, but the clusters size is more - than it needs to be, this also can be fixed by running dosfsck -a - after image creation. - -Signed-off-by: Nitin A Kamble -2011/12/13 - - -Index: dosfstools-2.11/mkdosfs/mkdosfs.c -=================================================================== ---- dosfstools-2.11.orig/mkdosfs/mkdosfs.c -+++ dosfstools-2.11/mkdosfs/mkdosfs.c -@@ -21,7 +21,17 @@ - June 2004 - Jordan Crouse (info.linux@amd.com) - Added -d support to populate the image - Copyright (C) 2004, Advanced Micro Devices, All Rights Reserved -- -+ -+ 2011-12-13: Nitin A Kamble -+ Enhanced the -d support for population of image while -+ creation. Earlier subdirectores support was broken, only files in -+ the rootdir were supported. Now directory hirarchy is supported. -+ Also added long filename support to directory names. -+ The -d option (image population while creation) -+ is broken with fat32. -+ Copyright (C) 2011, Intel Corporation, All Rights Reserved -+ -+ - Fixes/additions May 1998 by Roman Hodek - : - - Atari format support -@@ -86,23 +96,23 @@ - # undef __KERNEL__ - #endif - --#if __BYTE_ORDER == __BIG_ENDIAN -- -+#ifndef __ASM_STUB_BYTEORDER_H__ - #include --#ifdef __le16_to_cpu --/* ++roman: 2.1 kernel headers define these function, they're probably more -- * efficient then coding the swaps machine-independently. */ --#define CF_LE_W __le16_to_cpu --#define CF_LE_L __le32_to_cpu --#define CT_LE_W __cpu_to_le16 --#define CT_LE_L __cpu_to_le32 --#else --#define CF_LE_W(v) ((((v) & 0xff) << 8) | (((v) >> 8) & 0xff)) --#define CF_LE_L(v) (((unsigned)(v)>>24) | (((unsigned)(v)>>8)&0xff00) | \ -- (((unsigned)(v)<<8)&0xff0000) | ((unsigned)(v)<<24)) -+#endif -+ -+#include -+ -+#undef CF_LE_W -+#undef CF_LE_L -+#undef CT_LE_W -+#undef CT_LE_L -+ -+#if __BYTE_ORDER == __BIG_ENDIAN -+#include -+#define CF_LE_W(v) bswap_16(v) -+#define CF_LE_L(v) bswap_32(v) - #define CT_LE_W(v) CF_LE_W(v) - #define CT_LE_L(v) CF_LE_L(v) --#endif /* defined(__le16_to_cpu) */ - - #else - -@@ -253,33 +263,6 @@ struct fat32_fsinfo { - __u32 reserved2[4]; - }; - --/* This stores up to 13 chars of the name */ -- --struct msdos_dir_slot { -- __u8 id; /* sequence number for slot */ -- __u8 name0_4[10]; /* first 5 characters in name */ -- __u8 attr; /* attribute byte */ -- __u8 reserved; /* always 0 */ -- __u8 alias_checksum; /* checksum for 8.3 alias */ -- __u8 name5_10[12]; /* 6 more characters in name */ -- __u16 start; /* starting cluster number, 0 in long slots */ -- __u8 name11_12[4]; /* last 2 characters in name */ --}; -- --struct msdos_dir_entry -- { -- char name[8], ext[3]; /* name and extension */ -- __u8 attr; /* attribute bits */ -- __u8 lcase; /* Case for base and extension */ -- __u8 ctime_ms; /* Creation time, milliseconds */ -- __u16 ctime; /* Creation time */ -- __u16 cdate; /* Creation date */ -- __u16 adate; /* Last access date */ -- __u16 starthi; /* high 16 bits of first cl. (FAT32) */ -- __u16 time, date, start; /* time, date and first cluster */ -- __u32 size; /* file size (in bytes) */ -- } __attribute__ ((packed)); -- - /* The "boot code" we put into the filesystem... it writes a message and - tells the user to try again */ - -@@ -356,7 +339,6 @@ static struct msdos_dir_entry *root_dir; - static int size_root_dir; /* Size of the root directory in bytes */ - static int sectors_per_cluster = 0; /* Number of sectors per disk cluster */ - static int root_dir_entries = 0; /* Number of root directory entries */ --static int root_dir_num_entries = 0; - static int last_cluster_written = 0; - - static char *blank_sector; /* Blank sector - all zeros */ -@@ -1315,7 +1297,7 @@ setup_tables (void) - de->date = CT_LE_W((unsigned short)(ctime->tm_mday + - ((ctime->tm_mon+1) << 5) + - ((ctime->tm_year-80) << 9))); -- de->ctime_ms = 0; -+ de->ctime_cs = 0; - de->ctime = de->time; - de->cdate = de->date; - de->adate = de->date; -@@ -1451,16 +1433,23 @@ write_tables (void) - - /* Add a file to the specified directory entry, and also write it into the image */ - --static void copy_filename(char *filename, char *base, char *ext) { -+static void copy_filename(char *filename, char *dos_name) { - - char *ch = filename; - int i, len; - -- memset(base, 0x20, 8); -- memset(ext, 0x20, 3); -+ if (!strcmp(filename, ".")) { -+ strncpy(dos_name, MSDOS_DOT, MSDOS_NAME); -+ return; -+ } -+ if (!strcmp(filename, "..")) { -+ strncpy(dos_name, MSDOS_DOTDOT, MSDOS_NAME); -+ return; -+ } -+ memset(dos_name, 0x20, MSDOS_NAME); - - for(len = 0 ; *ch && *ch != '.'; ch++) { -- base[len++] = toupper(*ch); -+ dos_name[len++] = toupper(*ch); - if (len == 8) break; - } - -@@ -1468,7 +1457,7 @@ static void copy_filename(char *filename - if (*ch) ch++; - - for(len = 0 ; *ch; ch++) { -- ext[len++] = toupper(*ch); -+ dos_name[8 + len++] = toupper(*ch); - if (len == 3) break; - } - } -@@ -1551,7 +1540,7 @@ static int add_file(char *filename, stru - int start; - int usedsec, totalsec; - -- char name83[8], ext83[3]; -+ char dos_name[MSDOS_NAME+1]; - - struct msdos_dir_slot *slot; - int i; -@@ -1562,23 +1551,22 @@ static int add_file(char *filename, stru - if (dir->root) { - if (dir->count == dir->entries) { - printf("Error - too many directory entries\n"); -+ return; - } - } - else { -- if (dir->count == dir->entries) { -- if (!dir->table) -- dir->table = -- (struct msdos_dir_entry *) malloc(sizeof(struct msdos_dir_entry)); -- else { -- dir->table = -- (struct msdos_dir_entry *) realloc(dir->table, (dir->entries + 1) * -- sizeof(struct msdos_dir_entry)); -- -- memset(&dir->table[dir->entries], 0, sizeof(struct msdos_dir_entry)); -- } -- -- dir->entries++; -- } -+ /* 2 entries, one extra for long filename */ -+ if (!dir->table) -+ dir->table = -+ (struct msdos_dir_entry *) malloc(2 * sizeof(struct msdos_dir_entry)); -+ else -+ dir->table = -+ (struct msdos_dir_entry *) realloc(dir->table, 2 * (dir->entries + 1) * -+ sizeof(struct msdos_dir_entry)); -+ if (!dir->table) -+ printf("Error - realloc failed\n"); -+ memset(&dir->table[dir->entries], 0, 2 * sizeof(struct msdos_dir_entry)); -+ dir->entries += 2; - } - - infile = open(filename, O_RDONLY, 0); -@@ -1611,13 +1599,13 @@ static int add_file(char *filename, stru - return -1; - } - -- printf("ADD %s\n", filename); -+ printf("ADD FILE %s\n", filename); - - /* Grab the basename of the file */ - base = basename(filename); - -- /* Extract out the 8.3 name */ -- copy_filename(base, name83, ext83); -+ /* convert for dos fat structure */ -+ copy_filename(base, dos_name); - - /* Make an extended name slot */ - -@@ -1629,12 +1617,9 @@ static int add_file(char *filename, stru - - slot->alias_checksum = 0; - -- for(i = 0; i < 8; i++) -- slot->alias_checksum = (((slot->alias_checksum&1)<<7)|((slot->alias_checksum&0xfe)>>1)) + name83[i]; -+ for(i = 0; i < MSDOS_NAME; i++) -+ slot->alias_checksum = (((slot->alias_checksum&1)<<7)|((slot->alias_checksum&0xfe)>>1)) + dos_name[i]; - -- for(i = 0; i < 3; i++) -- slot->alias_checksum = (((slot->alias_checksum&1)<<7)|((slot->alias_checksum&0xfe)>>1)) + ext83[i]; -- - p = base; - - copy_name(slot->name0_4, 10, &p); -@@ -1645,8 +1630,7 @@ static int add_file(char *filename, stru - /* Get the entry from the root filesytem */ - entry = &dir->table[dir->count++]; - -- strncpy(entry->name, name83, 8); -- strncpy(entry->ext, ext83, 3); -+ strncpy(entry->name, dos_name, MSDOS_NAME); - - - /* If the user has it read only, then add read only to the incoming -@@ -1665,7 +1649,7 @@ static int add_file(char *filename, stru - ((ctime->tm_mon+1) << 5) + - ((ctime->tm_year-80) << 9))); - -- entry->ctime_ms = 0; -+ entry->ctime_cs = 0; - entry->ctime = entry->time; - entry->cdate = entry->date; - entry->adate = entry->date; -@@ -1711,6 +1695,7 @@ static int add_file(char *filename, stru - - exit_add: - if (infile) close(infile); -+ return 0; - } - - /* Add a new directory to the specified directory entry, and in turn populate -@@ -1727,10 +1712,18 @@ static void add_directory(char *filename - struct dirent *dentry = 0; - int remain; - char *data; -+ char *base; -+ char dos_name[MSDOS_NAME+1]; -+ struct msdos_dir_slot *slot; -+ int i; -+ char *p; - - /* If the directory doesn't exist */ -- if (!rddir) return; -- -+ if (!rddir) { -+ printf("Error - dir does not exist: %s\n", filename); -+ return; -+ } -+ - if (dir->root) { - if (dir->count == dir->entries) { - printf("Error - too many directory entries\n"); -@@ -1738,28 +1731,58 @@ static void add_directory(char *filename - } - } - else { -- if (dir->count == dir->entries) { -- if (!dir->table) -- dir->table = (struct msdos_dir_entry *) malloc(sizeof(struct msdos_dir_entry)); -- else { -- dir->table = (struct msdos_dir_entry *) realloc(dir->table, (dir->entries + 1) * -- sizeof(struct msdos_dir_entry)); -- -- /* Zero it out to avoid issues */ -- memset(&dir->table[dir->entries], 0, sizeof(struct msdos_dir_entry)); -- } -- dir->entries++; -+ /* 2 entries, one extra for long name of the directory */ -+ if (!dir->table) -+ dir->table = (struct msdos_dir_entry *) malloc(2 * sizeof(struct msdos_dir_entry)); -+ else -+ dir->table = (struct msdos_dir_entry *) realloc(dir->table, 2 * (dir->entries + 1) * -+ sizeof(struct msdos_dir_entry)); -+ if (!dir->table) { -+ printf("Error - memory allocation failed\n"); -+ goto exit_add_dir; - } -+ /* Zero it out to avoid issues */ -+ memset(&dir->table[dir->entries], 0, 2 * sizeof(struct msdos_dir_entry)); -+ dir->entries += 2; - } - -+ printf("ADD DIR %s\n", filename); - /* Now, create a new directory entry for the new directory */ - newdir = (struct dir_entry *) calloc(1, sizeof(struct dir_entry)); -- if (!newdir) goto exit_add_dir; -+ if (!newdir) { -+ printf("Error - calloc failed\n"); -+ goto exit_add_dir; -+ } -+ -+ /* Grab the basename of the file */ -+ base = basename(filename); -+ -+ /* convert for dos structure */ -+ copy_filename(base, dos_name); -+ -+ /* Make an extended name slot */ -+ slot = (struct msdos_dir_slot *) &dir->table[dir->count++]; -+ slot->id = 'A'; -+ slot->attr = 0x0F; -+ slot->reserved = 0; -+ slot->start = 0; -+ -+ slot->alias_checksum = 0; - -+ for (i = 0; i < MSDOS_NAME; i++) -+ slot->alias_checksum = (((slot->alias_checksum&1)<<7)|((slot->alias_checksum&0xfe)>>1)) + dos_name[i]; -+ -+ p = base; -+ -+ copy_name(slot->name0_4, 10, &p); -+ copy_name(slot->name5_10, 12, &p); -+ copy_name(slot->name11_12, 4, &p); -+ -+ /* Get the entry from the root filesytem */ - entry = &dir->table[dir->count++]; - -- strncpy(entry->name, basename(filename), sizeof(entry->name)); -- -+ strncpy(entry->name, dos_name, MSDOS_NAME); -+ - entry->attr = ATTR_DIR; - ctime = localtime(&create_time); - -@@ -1770,25 +1793,32 @@ static void add_directory(char *filename - ((ctime->tm_mon+1) << 5) + - ((ctime->tm_year-80) << 9))); - -- entry->ctime_ms = 0; -+ entry->ctime_cs = 0; - entry->ctime = entry->time; - entry->cdate = entry->date; - entry->adate = entry->date; - - /* Now, read the directory */ - -- while((dentry = readdir(rddir))) { -+ -+ while((base[0] != '.') && (dentry = readdir(rddir))) { - struct stat st; - char *buffer; -- -- if (!strcmp(dentry->d_name, ".") || !strcmp(dentry->d_name, "..")) -- continue; - -- /* DOS wouldn't like a typical unix . (dot) file, so we skip those too */ -- if (dentry->d_name[0] == '.') continue; -+ if (dentry->d_name[0] == '.') { -+ /* dos also has . & .. directory entries */ -+ if (! ((!strcmp(dentry->d_name, ".")) || (!strcmp(dentry->d_name, "..")))) { -+ /* ignore other .* files */ -+ printf("Error - File/Dir name is not dos compatible, ignored: %s\n", dentry->d_name); -+ continue; -+ } -+ } - - buffer = malloc(strlen(filename) + strlen(dentry->d_name) + 3); -- if (!buffer) continue; -+ if (!buffer) { -+ printf("Error - malloc failed\n"); -+ goto exit_add_dir; -+ } - - sprintf(buffer, "%s/%s", filename, dentry->d_name); - if (!stat(buffer, &st)) { -@@ -1806,11 +1836,23 @@ static void add_directory(char *filename - /* Now that the entire directory has been written, go ahead and write the directory - entry as well */ - -+ entry->size = 0; /* a directory has zero size */ -+ -+ if (base[0] == '.') { /* . & .. point to parent's cluster */ -+ goto exit_add_dir; -+ } -+ - entry->start = CT_LE_W(last_cluster_written); - entry->starthi = CT_LE_W((last_cluster_written & 0xFFFF0000) >> 16); -- entry->size = newdir->count * sizeof(struct msdos_dir_entry); -+ -+/* . dir start points to parent */ -+ newdir->table[1].start = entry->start; -+/* .. dir points to parent of parent*/ -+/* .. dir start is not set yet, would need more changes to the code, -+ * but dosfsck can fix these .. entry start pointers correctly */ -+ -+ remain = newdir->count * sizeof(struct msdos_dir_entry); - -- remain = entry->size; - data = (char *) newdir->table; - - while(remain) { -@@ -1858,6 +1900,7 @@ static void add_root_directory(char *dir - - if (!newdir) { - closedir(dir); -+ printf("Error - calloc failed!\n"); - return; - } - -@@ -1877,7 +1920,10 @@ static void add_root_directory(char *dir - if (entry->d_name[0] == '.') continue; - - buffer = malloc(strlen(dirname) + strlen(entry->d_name) + 3); -- if (!buffer) continue; -+ if (!buffer) { -+ printf("Error - malloc failed!\n"); -+ continue; -+ } - - sprintf(buffer, "%s/%s", dirname, entry->d_name); - if (!stat(buffer, &st)) { -@@ -2245,6 +2291,9 @@ main (int argc, char **argv) - if (check && listfile) /* Auto and specified bad block handling are mutually */ - die ("-c and -l are incompatible"); /* exclusive of each other! */ - -+ if (dirname && (size_fat == 32)) -+ die ("-d is incompatible with FAT32"); -+ - if (!create) { - check_mount (device_name); /* Is the device already mounted? */ - dev = open (device_name, O_RDWR); /* Is it a suitable device to build the FS on? */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/include-linux-types.patch b/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/include-linux-types.patch deleted file mode 100644 index ab5c8cf8c..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/include-linux-types.patch +++ /dev/null @@ -1,22 +0,0 @@ -mkdsofs is using types of the style __u8, which it gets with some -versions of libc headers via linux/hdreg.h including asm/types.h. -Newer version of fedora (at least) have a hdreg.h whichdoes not -include asm/types.h. To work around this patch mkdosfs.c to explicity -include linux/types.h which will in turn pull in asm/types.h which -defines these variables. - -Upstream-Status: Inappropriate [licensing] -We're tracking an old release of dosfstools due to licensing issues. - -Signed-off-by: Scott Garman - ---- dosfstools-2.10/mkdosfs/mkdosfs.c~ 2006-07-12 18:46:21.000000000 +1000 -+++ dosfstools-2.10/mkdosfs/mkdosfs.c 2006-07-12 18:46:21.000000000 +1000 -@@ -60,6 +60,7 @@ - #include "../version.h" - - #include -+#include - #include - #include - #include diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/mkdosfs-bootcode.patch b/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/mkdosfs-bootcode.patch deleted file mode 100644 index ae21bee78..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/mkdosfs-bootcode.patch +++ /dev/null @@ -1,241 +0,0 @@ -Add option to read in bootcode from a file. - -Upstream-Status: Inappropriate [licensing] -We're tracking an old release of dosfstools due to licensing issues. - -Signed-off-by: Scott Garman - -Index: dosfstools-2.11/mkdosfs/ChangeLog -=================================================================== ---- dosfstools-2.11.orig/mkdosfs/ChangeLog 1997-06-18 10:09:38.000000000 +0000 -+++ dosfstools-2.11/mkdosfs/ChangeLog 2011-12-06 12:14:23.634011558 +0000 -@@ -1,3 +1,14 @@ -+19th June 2003 Sam Bingner (sam@bingner.com) -+ -+ Added option to read in bootcode from a file so that if you have -+ for example Windows 2000 boot code, you can have it write that -+ as the bootcode. This is a dump of the behinning of a partition -+ generally 512 bytes, but can be up to reserved sectors*512 bytes. -+ Also writes 0x80 as the BIOS drive number if we are formatting a -+ hard drive, and sets the number of hidden sectors to be the -+ number of sectors in one track. These were required so that DOS -+ could boot using the bootcode. -+ - 28th January 1995 H. Peter Anvin (hpa@yggdrasil.com) - - Better algorithm to select cluster sizes on large filesystems. -Index: dosfstools-2.11/mkdosfs/mkdosfs.8 -=================================================================== ---- dosfstools-2.11.orig/mkdosfs/mkdosfs.8 2004-02-25 19:36:07.000000000 +0000 -+++ dosfstools-2.11/mkdosfs/mkdosfs.8 2011-12-06 12:19:54.777888434 +0000 -@@ -44,6 +44,10 @@ - .I message-file - ] - [ -+.B \-B -+.I bootcode-file -+] -+[ - .B \-n - .I volume-name - ] -@@ -165,6 +169,18 @@ - carriage return-line feed combinations, and tabs have been expanded. - If the filename is a hyphen (-), the text is taken from standard input. - .TP -+.BI \-B " bootcode-file" -+Uses boot machine code from file "file". On any thing other than FAT32, -+this only writes the first 3 bytes, and 480 bytes from offset 3Eh. On -+FAT32, this writes the first 3 bytes, 420 bytes from offset 5Ah to both -+primary and backup boot sectors. Also writes all other reserved sectors -+excluding the sectors following boot sectors (usually sector 2 and 7). -+Does not require that the input file be as large as reserved_sectors*512. -+To make a FAT32 partition bootable, you will need at least the first -+13 sectors (6656 bytes). You can also specify a partition as the argument -+to clone the boot code from that partition. -+i.e mkdosfs -B /dev/sda1 /dev/sda1 -+.TP - .BI \-n " volume-name" - Sets the volume name (label) of the filesystem. The volume name can - be up to 11 characters long. The default is no label. -@@ -198,8 +214,9 @@ - simply will not support it ;) - .SH AUTHOR - Dave Hudson - ; modified by Peter Anvin --. Fixes and additions by Roman Hodek -- for Debian/GNU Linux. -+ and Sam Bingner . Fixes and -+additions by Roman Hodek -+for Debian/GNU Linux. - .SH ACKNOWLEDGEMENTS - .B mkdosfs - is based on code from -Index: dosfstools-2.11/mkdosfs/mkdosfs.c -=================================================================== ---- dosfstools-2.11.orig/mkdosfs/mkdosfs.c 2005-03-12 16:12:16.000000000 +0000 -+++ dosfstools-2.11/mkdosfs/mkdosfs.c 2011-12-06 12:27:55.121886076 +0000 -@@ -24,6 +24,12 @@ - - New options -A, -S, -C - - Support for filesystems > 2GB - - FAT32 support -+ -+ Fixes/additions June 2003 by Sam Bingner -+ : -+ - Add -B option to read in bootcode from a file -+ - Write BIOS drive number so that FS can properly boot -+ - Set number of hidden sectors before boot code to be one track - - Copying: Copyright 1993, 1994 David Hudson (dave@humbug.demon.co.uk) - -@@ -153,6 +159,8 @@ - #define FAT_BAD 0x0ffffff7 - - #define MSDOS_EXT_SIGN 0x29 /* extended boot sector signature */ -+#define HD_DRIVE_NUMBER 0x80 /* Boot off first hard drive */ -+#define FD_DRIVE_NUMBER 0x00 /* Boot off first floppy drive */ - #define MSDOS_FAT12_SIGN "FAT12 " /* FAT12 filesystem signature */ - #define MSDOS_FAT16_SIGN "FAT16 " /* FAT16 filesystem signature */ - #define MSDOS_FAT32_SIGN "FAT32 " /* FAT32 filesystem signature */ -@@ -175,6 +183,8 @@ - #define BOOTCODE_SIZE 448 - #define BOOTCODE_FAT32_SIZE 420 - -+#define MAX_RESERVED 0xFFFF -+ - /* __attribute__ ((packed)) is used on all structures to make gcc ignore any - * alignments */ - -@@ -202,7 +212,7 @@ - __u16 fat_length; /* sectors/FAT */ - __u16 secs_track; /* sectors per track */ - __u16 heads; /* number of heads */ -- __u32 hidden; /* hidden sectors (unused) */ -+ __u32 hidden; /* hidden sectors (one track) */ - __u32 total_sect; /* number of sectors (if sectors == 0) */ - union { - struct { -@@ -285,6 +295,8 @@ - - /* Global variables - the root of all evil :-) - see these and weep! */ - -+static char *template_boot_code; /* Variable to store a full template boot sector in */ -+static int use_template = 0; - static char *program_name = "mkdosfs"; /* Name of the program */ - static char *device_name = NULL; /* Name of the device on which to create the filesystem */ - static int atari_format = 0; /* Use Atari variation of MS-DOS FS format */ -@@ -837,6 +849,12 @@ - vi->volume_id[2] = (unsigned char) ((volume_id & 0x00ff0000) >> 16); - vi->volume_id[3] = (unsigned char) (volume_id >> 24); - } -+ if (bs.media == 0xf8) { -+ vi->drive_number = HD_DRIVE_NUMBER; /* Set bios drive number to 80h */ -+ } -+ else { -+ vi->drive_number = FD_DRIVE_NUMBER; /* Set bios drive number to 00h */ -+ } - - if (!atari_format) { - memcpy(vi->volume_label, volume_name, 11); -@@ -1362,6 +1380,32 @@ - * dir area on FAT12/16, and the first cluster on FAT32. */ - writebuf( (char *) root_dir, size_root_dir, "root directory" ); - -+ if (use_template == 1) { -+ /* dupe template into reserved sectors */ -+ seekto( 0, "Start of partition" ); -+ if (size_fat == 32) { -+ writebuf( template_boot_code, 3, "backup jmpBoot" ); -+ seekto( 0x5a, "sector 1 boot area" ); -+ writebuf( template_boot_code+0x5a, 420, "sector 1 boot area" ); -+ seekto( 512*2, "third sector" ); -+ if (backup_boot != 0) { -+ writebuf( template_boot_code+512*2, backup_boot*sector_size - 512*2, "data to backup boot" ); -+ seekto( backup_boot*sector_size, "backup boot sector" ); -+ writebuf( template_boot_code, 3, "backup jmpBoot" ); -+ seekto( backup_boot*sector_size+0x5a, "backup boot sector boot area" ); -+ writebuf( template_boot_code+0x5a, 420, "backup boot sector boot area" ); -+ seekto( (backup_boot+2)*sector_size, "sector following backup code" ); -+ writebuf( template_boot_code+(backup_boot+2)*sector_size, (reserved_sectors-backup_boot-2)*512, "remaining data" ); -+ } else { -+ writebuf( template_boot_code+512*2, (reserved_sectors-2)*512, "remaining data" ); -+ } -+ } else { -+ writebuf( template_boot_code, 3, "jmpBoot" ); -+ seekto( 0x3e, "sector 1 boot area" ); -+ writebuf( template_boot_code+0x3e, 448, "boot code" ); -+ } -+ } -+ - if (blank_sector) free( blank_sector ); - if (info_sector) free( info_sector ); - free (root_dir); /* Free up the root directory space from setup_tables */ -@@ -1376,7 +1420,7 @@ - { - fatal_error("\ - Usage: mkdosfs [-A] [-c] [-C] [-v] [-I] [-l bad-block-file] [-b backup-boot-sector]\n\ -- [-m boot-msg-file] [-n volume-name] [-i volume-id]\n\ -+ [-m boot-msg-file] [-n volume-name] [-i volume-id] [-B bootcode]\n\ - [-s sectors-per-cluster] [-S logical-sector-size] [-f number-of-FATs]\n\ - [-h hidden-sectors] [-F fat-size] [-r root-dir-entries] [-R reserved-sectors]\n\ - /dev/name [blocks]\n"); -@@ -1439,7 +1483,7 @@ - printf ("%s " VERSION " (" VERSION_DATE ")\n", - program_name); - -- while ((c = getopt (argc, argv, "AbcCf:F:Ii:l:m:n:r:R:s:S:h:v")) != EOF) -+ while ((c = getopt (argc, argv, "AbcCf:F:Ii:l:m:n:r:R:s:S:v:B:")) != EOF) - /* Scan the command line for options */ - switch (c) - { -@@ -1509,6 +1553,51 @@ - listfile = optarg; - break; - -+ case 'B': /* B : read in bootcode */ -+ if ( strcmp(optarg, "-") ) -+ { -+ msgfile = fopen(optarg, "r"); -+ if ( !msgfile ) -+ perror(optarg); -+ } -+ else -+ msgfile = stdin; -+ -+ if ( msgfile ) -+ { -+ if (!(template_boot_code = malloc( MAX_RESERVED ))) -+ die( "Out of memory" ); -+ /* The template boot sector including reserved must not be > 65535 */ -+ use_template = 1; -+ i = 0; -+ do -+ { -+ ch = getc(msgfile); -+ switch (ch) -+ { -+ case EOF: -+ break; -+ -+ default: -+ template_boot_code[i++] = ch; /* Store character */ -+ break; -+ } -+ } -+ while ( ch != EOF && i < MAX_RESERVED ); -+ ch = getc(msgfile); /* find out if we're at EOF */ -+ -+ /* Fill up with zeros */ -+ while( i < MAX_RESERVED ) -+ template_boot_code[i++] = '\0'; -+ -+ if ( ch != EOF ) -+ printf ("Warning: template too long; truncated after %d bytes\n", i); -+ -+ if ( msgfile != stdin ) -+ fclose(msgfile); -+ } -+ break; -+ - case 'm': /* m : Set boot message */ - if ( strcmp(optarg, "-") ) - { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/mkdosfs-dir.patch b/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/mkdosfs-dir.patch deleted file mode 100644 index 3ba4711d1..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/mkdosfs-dir.patch +++ /dev/null @@ -1,639 +0,0 @@ -Add -d support to populate the image. - -Upstream-Status: Inappropriate [licensing] -We're tracking an old release of dosfstools due to licensing issues. - -Signed-off-by: Scott Garman - -Index: dosfstools-2.11/mkdosfs/mkdosfs.c -=================================================================== ---- dosfstools-2.11.orig/mkdosfs/mkdosfs.c 2011-12-06 12:27:55.000000000 +0000 -+++ dosfstools-2.11/mkdosfs/mkdosfs.c 2011-12-06 12:37:13.445950703 +0000 -@@ -18,6 +18,10 @@ - as a rule), and not the block. For example the boot block does not - occupy a full cluster. - -+ June 2004 - Jordan Crouse (info.linux@amd.com) -+ Added -d support to populate the image -+ Copyright (C) 2004, Advanced Micro Devices, All Rights Reserved -+ - Fixes/additions May 1998 by Roman Hodek - : - - Atari format support -@@ -71,6 +75,8 @@ - #include - #include - #include -+#include -+#include - - #include - #if LINUX_VERSION_CODE >= KERNEL_VERSION(2, 6, 0) -@@ -110,6 +116,8 @@ - * sufficient (or even better :) for 64 bit offsets in the meantime */ - #define llseek lseek - -+#define ROUND_UP(value, divisor) (value + (divisor - (value % divisor))) / divisor -+ - /* Constant definitions */ - - #define TRUE 1 /* Boolean constants */ -@@ -149,7 +157,6 @@ - #define ATTR_VOLUME 8 /* volume label */ - #define ATTR_DIR 16 /* directory */ - #define ATTR_ARCH 32 /* archived */ -- - #define ATTR_NONE 0 /* no attribute bits */ - #define ATTR_UNUSED (ATTR_VOLUME | ATTR_ARCH | ATTR_SYS | ATTR_HIDDEN) - /* attribute bits that are copied "as is" */ -@@ -245,6 +252,19 @@ - __u32 reserved2[4]; - }; - -+/* This stores up to 13 chars of the name */ -+ -+struct msdos_dir_slot { -+ __u8 id; /* sequence number for slot */ -+ __u8 name0_4[10]; /* first 5 characters in name */ -+ __u8 attr; /* attribute byte */ -+ __u8 reserved; /* always 0 */ -+ __u8 alias_checksum; /* checksum for 8.3 alias */ -+ __u8 name5_10[12]; /* 6 more characters in name */ -+ __u16 start; /* starting cluster number, 0 in long slots */ -+ __u8 name11_12[4]; /* last 2 characters in name */ -+}; -+ - struct msdos_dir_entry - { - char name[8], ext[3]; /* name and extension */ -@@ -293,6 +313,15 @@ - - #define MESSAGE_OFFSET 29 /* Offset of message in above code */ - -+/* Special structure to keep track of directories as we add them for the -d option */ -+ -+struct dir_entry { -+ int root; /* Specifies if this is the root dir or not */ -+ int count; /* Number of items in the table */ -+ int entries; /* Number of entries in the table */ -+ struct msdos_dir_entry *table; /* Pointer to the entry table */ -+}; -+ - /* Global variables - the root of all evil :-) - see these and weep! */ - - static char *template_boot_code; /* Variable to store a full template boot sector in */ -@@ -326,6 +355,9 @@ - static int size_root_dir; /* Size of the root directory in bytes */ - static int sectors_per_cluster = 0; /* Number of sectors per disk cluster */ - static int root_dir_entries = 0; /* Number of root directory entries */ -+static int root_dir_num_entries = 0; -+static int last_cluster_written = 0; -+ - static char *blank_sector; /* Blank sector - all zeros */ - static int hidden_sectors = 0; /* Number of hidden sectors */ - -@@ -399,7 +431,6 @@ - } - } - -- - /* Mark a specified sector as having a particular value in it's FAT entry */ - - static void -@@ -1266,6 +1297,9 @@ - die ("unable to allocate space for root directory in memory"); - } - -+ -+ last_cluster_written = 2; -+ - memset(root_dir, 0, size_root_dir); - if ( memcmp(volume_name, " ", 11) ) - { -@@ -1314,11 +1348,11 @@ - } - - if (!(blank_sector = malloc( sector_size ))) -- die( "Out of memory" ); -+ die( "Out of memory" ); -+ - memset(blank_sector, 0, sector_size); - } -- -- -+ - /* Write the new filesystem's data tables to wherever they're going to end up! */ - - #define error(str) \ -@@ -1340,7 +1374,7 @@ - do { \ - int __size = (size); \ - if (write (dev, buf, __size) != __size) \ -- error ("failed whilst writing " errstr); \ -+ error ("failed whilst writing " errstr); \ - } while(0) - - -@@ -1412,6 +1446,452 @@ - free (fat); /* Free up the fat table space reserved during setup_tables */ - } - -+/* Add a file to the specified directory entry, and also write it into the image */ -+ -+static void copy_filename(char *filename, char *base, char *ext) { -+ -+ char *ch = filename; -+ int i, len; -+ -+ memset(base, 0x20, 8); -+ memset(ext, 0x20, 3); -+ -+ for(len = 0 ; *ch && *ch != '.'; ch++) { -+ base[len++] = toupper(*ch); -+ if (len == 8) break; -+ } -+ -+ for ( ; *ch && *ch != '.'; ch++); -+ if (*ch) ch++; -+ -+ for(len = 0 ; *ch; ch++) { -+ ext[len++] = toupper(*ch); -+ if (len == 3) break; -+ } -+} -+ -+/* Check for an .attrib. file, and read the attributes therein */ -+ -+/* We are going to be pretty pedantic about this. The file needs 3 -+ bytes at the beginning, the attributes are listed in this order: -+ -+ (H)idden|(S)ystem|(A)rchived -+ -+ A capital HSA means to enable it, anything else will disable it -+ (I recommend a '-') The unix user attributes will still be used -+ for write access. -+ -+ For example, to enable system file access for ldlinux.sys, write -+ the following to .attrib.ldlinux.sys: -S- -+*/ -+ -+unsigned char check_attrib_file(char *dir, char *filename) { -+ -+ char attrib[4] = { '-', '-', '-' }; -+ unsigned char *buffer = 0; -+ int ret = ATTR_NONE; -+ int fd = -1; -+ -+ buffer = (char *) calloc(1, strlen(dir) + strlen(filename) + 10); -+ if (!buffer) return ATTR_NONE; -+ -+ sprintf(buffer, "%s/.attrib.%s", dir, filename); -+ -+ if (access(buffer, R_OK)) -+ goto exit_attrib; -+ -+ if ((fd = open(buffer, O_RDONLY, 0)) < 0) -+ goto exit_attrib; -+ -+ if (read(fd, attrib, 3) < 0) -+ goto exit_attrib; -+ -+ if (attrib[0] == 'H') ret |= ATTR_HIDDEN; -+ if (attrib[1] == 'S') ret |= ATTR_SYS; -+ if (attrib[2] == 'A') ret |= ATTR_ARCH; -+ -+ printf("%s: Setting atrribute %x\n", filename, ret); -+ -+ exit_attrib: -+ if (fd >= 0) close(fd); -+ if (buffer) free(buffer); -+ -+ return ret; -+} -+ -+static void copy_name(char *buffer, int size, char **pointer) { -+ int i; -+ -+ for(i = 0; i < size; i += 2) { -+ if (*pointer) { -+ buffer[i] = **pointer; -+ buffer[i + 1] = 0x00; -+ *pointer = **pointer ? *pointer + 1 : 0; -+ } -+ else { -+ buffer[i] = 0xFF; -+ buffer[i + 1] = 0xFF; -+ } -+ } -+} -+ -+static int add_file(char *filename, struct dir_entry *dir, unsigned char attr) -+{ -+ struct stat stat; -+ struct msdos_dir_entry *entry; -+ int infile = 0; -+ int sectors, clusters; -+ struct tm *ctime; -+ int c, s; -+ int ptr; -+ char *buffer, *base; -+ int start; -+ int usedsec, totalsec; -+ -+ char name83[8], ext83[3]; -+ -+ struct msdos_dir_slot *slot; -+ int i; -+ char *p; -+ -+ /* The root directory is static, everything else grows as needed */ -+ -+ if (dir->root) { -+ if (dir->count == dir->entries) { -+ printf("Error - too many directory entries\n"); -+ } -+ } -+ else { -+ if (dir->count == dir->entries) { -+ if (!dir->table) -+ dir->table = -+ (struct msdos_dir_entry *) malloc(sizeof(struct msdos_dir_entry)); -+ else { -+ dir->table = -+ (struct msdos_dir_entry *) realloc(dir->table, (dir->entries + 1) * -+ sizeof(struct msdos_dir_entry)); -+ -+ memset(&dir->table[dir->entries], 0, sizeof(struct msdos_dir_entry)); -+ } -+ -+ dir->entries++; -+ } -+ } -+ -+ infile = open(filename, O_RDONLY, 0); -+ if (!infile) return; -+ -+ if (fstat(infile, &stat)) -+ goto exit_add; -+ -+ if (S_ISCHR(stat.st_mode) ||S_ISBLK(stat.st_mode) || -+ S_ISFIFO(stat.st_mode) || S_ISLNK(stat.st_mode)) { -+ printf("Error - cannot create a special file in a FATFS\n"); -+ goto exit_add; -+ } -+ -+ /* FIXME: This isn't very pretty */ -+ -+ usedsec = start_data_sector + (size_root_dir / sector_size) + -+ (last_cluster_written * bs.cluster_size); -+ -+ totalsec = blocks * BLOCK_SIZE / sector_size; -+ -+ /* Figure out how many sectors / clustors the file requires */ -+ -+ sectors = ROUND_UP(stat.st_size, sector_size); -+ clusters = ROUND_UP(sectors, (int) bs.cluster_size); -+ -+ if (usedsec + sectors > totalsec) { -+ printf("Error - %s is too big (%d vs %d)\n", filename, sectors, totalsec - usedsec); -+ close(infile); -+ return -1; -+ } -+ -+ printf("ADD %s\n", filename); -+ -+ /* Grab the basename of the file */ -+ base = basename(filename); -+ -+ /* Extract out the 8.3 name */ -+ copy_filename(base, name83, ext83); -+ -+ /* Make an extended name slot */ -+ -+ slot = (struct msdos_dir_slot *) &dir->table[dir->count++]; -+ slot->id = 'A'; -+ slot->attr = 0x0F; -+ slot->reserved = 0; -+ slot->start = 0; -+ -+ slot->alias_checksum = 0; -+ -+ for(i = 0; i < 8; i++) -+ slot->alias_checksum = (((slot->alias_checksum&1)<<7)|((slot->alias_checksum&0xfe)>>1)) + name83[i]; -+ -+ for(i = 0; i < 3; i++) -+ slot->alias_checksum = (((slot->alias_checksum&1)<<7)|((slot->alias_checksum&0xfe)>>1)) + ext83[i]; -+ -+ p = base; -+ -+ copy_name(slot->name0_4, 10, &p); -+ copy_name(slot->name5_10, 12, &p); -+ copy_name(slot->name11_12, 4, &p); -+ -+ -+ /* Get the entry from the root filesytem */ -+ entry = &dir->table[dir->count++]; -+ -+ strncpy(entry->name, name83, 8); -+ strncpy(entry->ext, ext83, 3); -+ -+ -+ /* If the user has it read only, then add read only to the incoming -+ attribute settings */ -+ -+ if (!(stat.st_mode & S_IWUSR)) attr |= ATTR_RO; -+ entry->attr = attr; -+ -+ /* Set the access time on the file */ -+ ctime = localtime(&create_time); -+ -+ entry->time = CT_LE_W((unsigned short)((ctime->tm_sec >> 1) + -+ (ctime->tm_min << 5) + (ctime->tm_hour << 11))); -+ -+ entry->date = CT_LE_W((unsigned short)(ctime->tm_mday + -+ ((ctime->tm_mon+1) << 5) + -+ ((ctime->tm_year-80) << 9))); -+ -+ entry->ctime_ms = 0; -+ entry->ctime = entry->time; -+ entry->cdate = entry->date; -+ entry->adate = entry->date; -+ entry->size = stat.st_size; -+ -+ start = last_cluster_written; -+ -+ entry->start = CT_LE_W(start); /* start sector */ -+ entry->starthi = CT_LE_W((start & 0xFFFF0000) >> 16); /* High start sector (for FAT32) */ -+ -+ /* We mark all of the clusters we use in the FAT */ -+ -+ for(c = 0; c < clusters; c++ ) { -+ int free; -+ int next = c == (clusters - 1) ? FAT_EOF : start + c + 1; -+ mark_FAT_cluster(start + c, next); -+ last_cluster_written++; -+ } -+ -+ /* This confused me too - cluster 2 starts after the -+ root directory data - search me as to why */ -+ -+ ptr = (start_data_sector * sector_size) + size_root_dir; -+ ptr += (start - 2) * bs.cluster_size * sector_size; -+ -+ buffer = (char *) malloc(sector_size); -+ -+ if (!buffer) { -+ printf("Error - couldn't allocate memory\n"); -+ goto exit_add; -+ } -+ -+ /* Write the file into the file block */ -+ -+ seekto(ptr, "datafile"); -+ -+ while(1) { -+ int size = read(infile, buffer, sector_size); -+ if (size <= 0) break; -+ -+ writebuf(buffer, size, "data"); -+ } -+ -+ exit_add: -+ if (infile) close(infile); -+} -+ -+/* Add a new directory to the specified directory entry, and in turn populate -+ it with its own files */ -+ -+/* FIXME: This should check to make sure there is enough size to add itself */ -+ -+static void add_directory(char *filename, struct dir_entry *dir) { -+ -+ struct dir_entry *newdir = 0; -+ struct msdos_dir_entry *entry; -+ struct tm *ctime; -+ DIR *rddir = opendir(filename); -+ struct dirent *dentry = 0; -+ int remain; -+ char *data; -+ -+ /* If the directory doesn't exist */ -+ if (!rddir) return; -+ -+ if (dir->root) { -+ if (dir->count == dir->entries) { -+ printf("Error - too many directory entries\n"); -+ goto exit_add_dir; -+ } -+ } -+ else { -+ if (dir->count == dir->entries) { -+ if (!dir->table) -+ dir->table = (struct msdos_dir_entry *) malloc(sizeof(struct msdos_dir_entry)); -+ else { -+ dir->table = (struct msdos_dir_entry *) realloc(dir->table, (dir->entries + 1) * -+ sizeof(struct msdos_dir_entry)); -+ -+ /* Zero it out to avoid issues */ -+ memset(&dir->table[dir->entries], 0, sizeof(struct msdos_dir_entry)); -+ } -+ dir->entries++; -+ } -+ } -+ -+ /* Now, create a new directory entry for the new directory */ -+ newdir = (struct dir_entry *) calloc(1, sizeof(struct dir_entry)); -+ if (!newdir) goto exit_add_dir; -+ -+ entry = &dir->table[dir->count++]; -+ -+ strncpy(entry->name, basename(filename), sizeof(entry->name)); -+ -+ entry->attr = ATTR_DIR; -+ ctime = localtime(&create_time); -+ -+ entry->time = CT_LE_W((unsigned short)((ctime->tm_sec >> 1) + -+ (ctime->tm_min << 5) + (ctime->tm_hour << 11))); -+ -+ entry->date = CT_LE_W((unsigned short)(ctime->tm_mday + -+ ((ctime->tm_mon+1) << 5) + -+ ((ctime->tm_year-80) << 9))); -+ -+ entry->ctime_ms = 0; -+ entry->ctime = entry->time; -+ entry->cdate = entry->date; -+ entry->adate = entry->date; -+ -+ /* Now, read the directory */ -+ -+ while((dentry = readdir(rddir))) { -+ struct stat st; -+ char *buffer; -+ -+ if (!strcmp(dentry->d_name, ".") || !strcmp(dentry->d_name, "..")) -+ continue; -+ -+ /* DOS wouldn't like a typical unix . (dot) file, so we skip those too */ -+ if (dentry->d_name[0] == '.') continue; -+ -+ buffer = malloc(strlen(filename) + strlen(dentry->d_name) + 3); -+ if (!buffer) continue; -+ -+ sprintf(buffer, "%s/%s", filename, dentry->d_name); -+ if (!stat(buffer, &st)) { -+ if (S_ISDIR(st.st_mode)) -+ add_directory(buffer, newdir); -+ else if (S_ISREG(st.st_mode)) { -+ unsigned char attrib = check_attrib_file(filename, dentry->d_name); -+ add_file(buffer, newdir, attrib); -+ } -+ } -+ -+ free(buffer); -+ } -+ -+ /* Now that the entire directory has been written, go ahead and write the directory -+ entry as well */ -+ -+ entry->start = CT_LE_W(last_cluster_written); -+ entry->starthi = CT_LE_W((last_cluster_written & 0xFFFF0000) >> 16); -+ entry->size = newdir->count * sizeof(struct msdos_dir_entry); -+ -+ remain = entry->size; -+ data = (char *) newdir->table; -+ -+ while(remain) { -+ int size = -+ remain > bs.cluster_size * sector_size ? bs.cluster_size * sector_size : remain; -+ -+ int pos = (start_data_sector * sector_size) + size_root_dir; -+ pos += (last_cluster_written - 2) * bs.cluster_size * sector_size; -+ -+ seekto(pos, "add_dir"); -+ writebuf(data, size, "add_dir"); -+ -+ remain -= size; -+ data += size; -+ -+ mark_FAT_cluster(last_cluster_written, remain ? last_cluster_written + 1 : FAT_EOF); -+ last_cluster_written++; -+ } -+ -+ exit_add_dir: -+ if (rddir) closedir(rddir); -+ if (newdir->table) free(newdir->table); -+ if (newdir) free(newdir); -+} -+ -+/* Given a directory, add all the files and directories to the root directory of the -+ image. -+*/ -+ -+static void add_root_directory(char *dirname) -+{ -+ DIR *dir = opendir(dirname); -+ struct dirent *entry = 0; -+ struct dir_entry *newdir = 0; -+ -+ if (!dir) { -+ printf("Error - directory %s does not exist\n", dirname); -+ return; -+ } -+ -+ /* Create the root directory structure - this is a bit different then -+ above, because the table already exists, we just refer to it. */ -+ -+ newdir = (struct dir_entry *) calloc(1,sizeof(struct dir_entry)); -+ -+ if (!newdir) { -+ closedir(dir); -+ return; -+ } -+ -+ newdir->entries = root_dir_entries; -+ newdir->root = 1; -+ newdir->count = 0; -+ newdir->table = root_dir; -+ -+ while((entry = readdir(dir))) { -+ struct stat st; -+ char *buffer; -+ -+ if (!strcmp(entry->d_name, ".") || !strcmp(entry->d_name, "..")) -+ continue; -+ -+ /* DOS wouldn't like a typical unix . (dot) file, so we skip those too */ -+ if (entry->d_name[0] == '.') continue; -+ -+ buffer = malloc(strlen(dirname) + strlen(entry->d_name) + 3); -+ if (!buffer) continue; -+ -+ sprintf(buffer, "%s/%s", dirname, entry->d_name); -+ if (!stat(buffer, &st)) { -+ if (S_ISDIR(st.st_mode)) -+ add_directory(buffer, newdir); -+ else if (S_ISREG(st.st_mode)) { -+ unsigned char attrib = check_attrib_file(dirname, entry->d_name); -+ add_file(buffer, newdir, attrib); -+ } -+ } -+ -+ free(buffer); -+ } -+ -+ closedir(dir); -+ if (newdir) free(newdir); -+} - - /* Report the command usage and return a failure error code */ - -@@ -1423,7 +1903,7 @@ - [-m boot-msg-file] [-n volume-name] [-i volume-id] [-B bootcode]\n\ - [-s sectors-per-cluster] [-S logical-sector-size] [-f number-of-FATs]\n\ - [-h hidden-sectors] [-F fat-size] [-r root-dir-entries] [-R reserved-sectors]\n\ -- /dev/name [blocks]\n"); -+ [-d directory] /dev/name [blocks]\n"); - } - - /* -@@ -1463,6 +1943,8 @@ - int c; - char *tmp; - char *listfile = NULL; -+ char *dirname = NULL; -+ - FILE *msgfile; - struct stat statbuf; - int i = 0, pos, ch; -@@ -1483,7 +1965,7 @@ - printf ("%s " VERSION " (" VERSION_DATE ")\n", - program_name); - -- while ((c = getopt (argc, argv, "AbcCf:F:Ii:l:m:n:r:R:s:S:v:B:")) != EOF) -+ while ((c = getopt (argc, argv, "AbcCd:f:F:Ii:l:m:n:r:R:s:S:v:B:")) != EOF) - /* Scan the command line for options */ - switch (c) - { -@@ -1508,6 +1990,10 @@ - create = TRUE; - break; - -+ case 'd': -+ dirname = optarg; -+ break; -+ - case 'f': /* f : Choose number of FATs */ - nr_fats = (int) strtol (optarg, &tmp, 0); - if (*tmp || nr_fats < 1 || nr_fats > 4) -@@ -1811,8 +2297,10 @@ - else if (listfile) - get_list_blocks (listfile); - -- write_tables (); /* Write the file system tables away! */ - -+ if (dirname) add_root_directory(dirname); -+ -+ write_tables (); /* Write the file system tables away! */ - exit (0); /* Terminate with no errors! */ - } - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/msdos_fat12_undefined.patch b/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/msdos_fat12_undefined.patch deleted file mode 100644 index 11e8a7594..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/msdos_fat12_undefined.patch +++ /dev/null @@ -1,19 +0,0 @@ -Fix a compilation error due to undefined MSDOS_FAT12. - -Upstream-Status: Inappropriate [licensing] -We're tracking an old release of dosfstools due to licensing issues. - -Signed-off-by: Scott Garman - ---- dosfstools-2.10/dosfsck/boot.c.orig 2004-10-15 08:51:42.394725176 -0600 -+++ dosfstools-2.10/dosfsck/boot.c 2004-10-15 08:49:16.776862456 -0600 -@@ -14,6 +14,9 @@ - #include "io.h" - #include "boot.h" - -+#ifndef MSDOS_FAT12 -+#define MSDOS_FAT12 4084 -+#endif - - #define ROUND_TO_MULTIPLE(n,m) ((n) && (m) ? (n)+(m)-1-((n)-1)%(m) : 0) - /* don't divide by zero */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/nofat32_autoselect.patch b/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/nofat32_autoselect.patch deleted file mode 100644 index 848a76b99..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools/nofat32_autoselect.patch +++ /dev/null @@ -1,27 +0,0 @@ -FAT32 appears to be broken when used with the -d option to populate the msdos -image. This disables the FAT32 autoselection code which means we don't get -broken images with the -d option. It can still be enabled on the commandline -at the users own risk. This changes us back to the 2.10 version's behaviour -which was known to work well even with large images. - -Upstream-Status: Inappropriate [depends on other patches we apply] - -RP 2011/12/13 - -Index: dosfstools-2.11/mkdosfs/mkdosfs.c -=================================================================== ---- dosfstools-2.11.orig/mkdosfs/mkdosfs.c 2011-12-13 13:54:37.538509391 +0000 -+++ dosfstools-2.11/mkdosfs/mkdosfs.c 2011-12-13 13:55:10.258508631 +0000 -@@ -808,10 +808,12 @@ - bs.media = (char) 0xf8; /* Set up the media descriptor for a hard drive */ - bs.dir_entries[0] = (char) 0; /* Default to 512 entries */ - bs.dir_entries[1] = (char) 2; -+/* - if (!size_fat && blocks*SECTORS_PER_BLOCK > 1064960) { - if (verbose) printf("Auto-selecting FAT32 for large filesystem\n"); - size_fat = 32; - } -+*/ - if (size_fat == 32) { - /* For FAT32, try to do the same as M$'s format command: - * fs size < 256M: 0.5k clusters diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools_2.11.bb b/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools_2.11.bb deleted file mode 100644 index 176504d4f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools_2.11.bb +++ /dev/null @@ -1,34 +0,0 @@ -# dosfstools OE build file -# Copyright (C) 2004-2006, Advanced Micro Devices, Inc. All Rights Reserved -# Released under the MIT license (see packages/COPYING) -SUMMARY = "DOS FAT Filesystem Utilities" -HOMEPAGE = "https://github.com/dosfstools/dosfstools" - -SECTION = "base" -LICENSE = "GPLv2" -LIC_FILES_CHKSUM = "file://mkdosfs/COPYING;md5=cbe67f08d6883bff587f615f0cc81aa8" -PR = "r5" - -SRC_URI = "http://pkgs.fedoraproject.org/repo/pkgs/${BPN}/${BP}.src.tar.gz/407d405ade410f7597d364ab5dc8c9f6/${BP}.src.tar.gz \ - file://mkdosfs-bootcode.patch \ - file://mkdosfs-dir.patch \ - file://alignment_hack.patch \ - file://msdos_fat12_undefined.patch \ - file://dosfstools-msdos_fs-types.patch \ - file://include-linux-types.patch \ - file://nofat32_autoselect.patch \ - file://fix_populated_dosfs_creation.patch \ - file://0001-Include-fcntl.h-for-getting-loff_t-definition.patch \ -" - -SRC_URI[md5sum] = "407d405ade410f7597d364ab5dc8c9f6" -SRC_URI[sha256sum] = "0eac6d12388b3d9ed78684529c1b0d9346fa2abbe406c4d4a3eb5a023c98a484" - -CFLAGS += "-D_GNU_SOURCE ${@bb.utils.contains('DISTRO_FEATURES', 'largefile', '-D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64', '', d)}" - -EXTRA_OEMAKE = "CC='${CC}' CFLAGS='${CFLAGS}' LDFLAGS='${LDFLAGS}'" - -do_install () { - oe_runmake "PREFIX=${D}" "SBINDIR=${D}${base_sbindir}" \ - "MANDIR=${D}${mandir}/man8" install -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools_4.0.bb b/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools_4.0.bb deleted file mode 100644 index e7b1f6109..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools_4.0.bb +++ /dev/null @@ -1,25 +0,0 @@ -# dosfstools OE build file -# Copyright (C) 2004-2006, Advanced Micro Devices, Inc. All Rights Reserved -# Copyright (C) 2015, Sören Brinkmann All Rights Reserved -# Released under the MIT license (see packages/COPYING) -SUMMARY = "DOS FAT Filesystem Utilities" -HOMEPAGE = "https://github.com/dosfstools/dosfstools" - -SECTION = "base" -LICENSE = "GPLv3" -LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504" - -SRC_URI = "https://github.com/dosfstools/dosfstools/releases/download/v${PV}/${BP}.tar.xz \ - " -SRC_URI[md5sum] = "20f8388b99702f276c973d228c7cff45" -SRC_URI[sha256sum] = "9037738953559d1efe04fc5408b6846216cc0138f7f9d32de80b6ec3c35e7daf" - -UPSTREAM_CHECK_URI = "https://github.com/dosfstools/dosfstools/releases" - -inherit autotools pkgconfig - -EXTRA_OECONF = "--without-udev --enable-compat-symlinks" - -CFLAGS += "-D_GNU_SOURCE ${@bb.utils.contains('DISTRO_FEATURES', 'largefile', '-D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64', '', d)}" - -BBCLASSEXTEND = "native" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools_4.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools_4.1.bb new file mode 100644 index 000000000..69aa81af4 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/dosfstools/dosfstools_4.1.bb @@ -0,0 +1,25 @@ +# dosfstools OE build file +# Copyright (C) 2004-2006, Advanced Micro Devices, Inc. All Rights Reserved +# Copyright (C) 2015, Sören Brinkmann All Rights Reserved +# Released under the MIT license (see packages/COPYING) +SUMMARY = "DOS FAT Filesystem Utilities" +HOMEPAGE = "https://github.com/dosfstools/dosfstools" + +SECTION = "base" +LICENSE = "GPLv3" +LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504" + +SRC_URI = "https://github.com/dosfstools/dosfstools/releases/download/v${PV}/${BP}.tar.xz \ + " +SRC_URI[md5sum] = "07a1050db1a898e9a2e03b0c4569c4bd" +SRC_URI[sha256sum] = "e6b2aca70ccc3fe3687365009dd94a2e18e82b688ed4e260e04b7412471cc173" + +UPSTREAM_CHECK_URI = "https://github.com/dosfstools/dosfstools/releases" + +inherit autotools pkgconfig + +EXTRA_OECONF = "--without-udev --enable-compat-symlinks" + +CFLAGS += "-D_GNU_SOURCE -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64" + +BBCLASSEXTEND = "native" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg.inc b/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg.inc index e8d8a9b4f..fe4732d1d 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg.inc @@ -4,43 +4,39 @@ SECTION = "base" DEPENDS = "zlib bzip2 perl ncurses" DEPENDS_class-native = "bzip2-replacement-native zlib-native virtual/update-alternatives-native gettext-native perl-native" -RDEPENDS_${PN} = "${VIRTUAL-RUNTIME_update-alternatives} xz run-postinsts perl" -RDEPENDS_${PN}_class-native = "xz-native" +RDEPENDS_${PN} = "${VIRTUAL-RUNTIME_update-alternatives} perl" +RDEPENDS_${PN}_class-native = "" UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/d/dpkg/" -inherit autotools gettext perlnative pkgconfig systemd +inherit autotools gettext perlnative pkgconfig systemd perl-version python () { if not bb.utils.contains('DISTRO_FEATURES', 'sysvinit', True, False, d): - pn = d.getVar('PN', True) + pn = d.getVar('PN') d.setVar('SYSTEMD_SERVICE_%s' % (pn), 'dpkg-configure.service') } export PERL = "${bindir}/perl" PERL_class-native = "${STAGING_BINDIR_NATIVE}/perl-native/perl" -export PERL_LIBDIR = "${libdir}/perl" -PERL_LIBDIR_class-native = "${libdir}/perl-native/perl" +export PERL_LIBDIR = "${libdir}/perl/${@get_perl_version(d)}" +PERL_LIBDIR_class-native = "${libdir}/perl-native/perl/${@get_perl_version(d)}" EXTRA_OECONF = "\ --disable-dselect \ --enable-start-stop-daemon \ - --with-zlib \ - --with-bz2 \ - --without-liblzma \ - --without-selinux \ + --with-libz \ + --with-libbz2 \ + --without-libselinux \ " +PACKAGECONFIG = "liblzma" +PACKAGECONFIG[liblzma] = "--with-liblzma,--without-liblzma, xz" + EXTRA_OECONF += "TAR=tar" EXTRA_OECONF_append_class-target = " DEB_HOST_ARCH=${DPKG_ARCH}" -do_configure () { - echo >> ${S}/m4/compiler.m4 - sed -i -e 's#PERL_LIBDIR=.*$#PERL_LIBDIR="${libdir}/perl"#' ${S}/configure - autotools_do_configure -} - do_install_append () { if [ "${PN}" = "dpkg-native" ]; then # update-alternatives doesn't have an offline mode @@ -71,7 +67,26 @@ FILES_update-alternatives-dpkg = "${bindir}/update-alternatives ${localstatedir} RPROVIDES_update-alternatives-dpkg += "update-alternatives" PACKAGES += "${PN}-perl" -FILES_${PN}-perl = "${libdir}/perl" +FILES_${PN}-perl = "${libdir}/perl/${@get_perl_version(d)}" + +RDEPENDS_${PN}-perl += "perl-module-carp perl-module-constant \ + perl-module-cwd perl-module-digest \ + perl-module-digest-md5 perl-module-errno \ + perl-module-exporter perl-module-fcntl \ + perl-module-feature perl-module-file-basename \ + perl-module-file-compare perl-module-file-copy \ + perl-module-file-find perl-module-file-path \ + perl-module-file-spec perl-module-file-temp \ + perl-module-list-util perl-module-overload \ + perl-module-parent perl-module-storable \ + perl-module-filehandle perl-module-io \ + perl-module-io-handle perl-module-io-seekable \ + perl-module-posix perl-module-scalar-util \ + perl-module-selectsaver perl-module-symbol \ + perl-module-term-ansicolor perl-module-tie-handle \ + perl-module-tie-hash perl-module-storable \ + perl-module-time-hires perl-module-time-piece \ + perl-module-xsloader" # Split out start-stop-daemon to its own package. Note that it # is installed in a different directory than the one used for diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg/0003-Our-pre-postinsts-expect-D-to-be-set-when-running-in.patch b/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg/0003-Our-pre-postinsts-expect-D-to-be-set-when-running-in.patch index 80504ce8b..93d870443 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg/0003-Our-pre-postinsts-expect-D-to-be-set-when-running-in.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg/0003-Our-pre-postinsts-expect-D-to-be-set-when-running-in.patch @@ -9,27 +9,32 @@ Upstream-Status: Inappropriate [OE Specific] RP 2011/12/07 ALIMON 2016/05/26 - +ALIMON 2017/02/21 --- - src/script.c | 39 +++------------------------------------ - 1 file changed, 3 insertions(+), 36 deletions(-) + src/script.c | 44 +++----------------------------------------- + 1 file changed, 3 insertions(+), 41 deletions(-) diff --git a/src/script.c b/src/script.c -index 3c88be8..ce66a86 100644 +index 2f252ae..768a9d1 100644 --- a/src/script.c +++ b/src/script.c -@@ -97,43 +97,10 @@ setexecute(const char *path, struct stat *stab) +@@ -97,48 +97,10 @@ setexecute(const char *path, struct stat *stab) static const char * maintscript_pre_exec(struct command *cmd) { - const char *admindir = dpkg_db_get_dir(); -- const char *changedir = fc_script_chrootless ? instdir : "/"; -- size_t instdirl = strlen(instdir); +- const char *changedir; +- size_t instdirlen = strlen(instdir); +- +- if (instdirlen > 0 && fc_script_chrootless) +- changedir = instdir; +- else +- changedir = "/"; - -- if (*instdir && !fc_script_chrootless) { -- if (strncmp(admindir, instdir, instdirl) != 0) +- if (instdirlen > 0 && !fc_script_chrootless) { +- if (strncmp(admindir, instdir, instdirlen) != 0) - ohshit(_("admindir must be inside instdir for dpkg to work properly")); -- if (setenv("DPKG_ADMINDIR", admindir + instdirl, 1) < 0) +- if (setenv("DPKG_ADMINDIR", admindir + instdirlen, 1) < 0) - ohshite(_("unable to setenv for subprocesses")); - if (setenv("DPKG_ROOT", "", 1) < 0) - ohshite(_("unable to setenv for subprocesses")); @@ -56,11 +61,11 @@ index 3c88be8..ce66a86 100644 - args.buf); - varbuf_destroy(&args); - } -- if (!instdirl || fc_script_chrootless) +- if (instdirlen == 0 || fc_script_chrootless) - return cmd->filename; - -- assert(strlen(cmd->filename) >= instdirl); -- return cmd->filename + instdirl; +- assert(strlen(cmd->filename) >= instdirlen); +- return cmd->filename + instdirlen; + return cmd->filename; } diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg/0007-dpkg-deb-build.c-Remove-usage-of-clamp-mtime-in-tar.patch b/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg/0007-dpkg-deb-build.c-Remove-usage-of-clamp-mtime-in-tar.patch new file mode 100644 index 000000000..8bfaad14d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg/0007-dpkg-deb-build.c-Remove-usage-of-clamp-mtime-in-tar.patch @@ -0,0 +1,40 @@ +From 8659eeeeda74d71e12080121f0b13a88cbdda433 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?An=C3=ADbal=20Lim=C3=B3n?= +Date: Tue, 21 Feb 2017 11:23:27 -0600 +Subject: [PATCH] dpkg-deb/build.c: Remove usage of --clamp-mtime in tar +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +Recently dpkg added --clamp-mtime to tar to create reproducible +build tarballs [1]. + +But host tools doesn't support this option because is new on tar +so disable in our builds. + +Signed-off-by: Aníbal Limón + +Upstream-Status: Inappropriate [Configuration] + +[1] https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=759999#20 +[2] https://lists.gnu.org/archive/html/help-tar/2016-01/msg00000.html +--- + dpkg-deb/build.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/dpkg-deb/build.c b/dpkg-deb/build.c +index 655aa55..927f56f 100644 +--- a/dpkg-deb/build.c ++++ b/dpkg-deb/build.c +@@ -447,7 +447,7 @@ tarball_pack(const char *dir, filenames_feed_func *tar_filenames_feeder, + snprintf(mtime, sizeof(mtime), "@%ld", timestamp); + + execlp(TAR, "tar", "-cf", "-", "--format=gnu", +- "--mtime", mtime, "--clamp-mtime", ++ "--mtime", mtime, + "--null", "--no-unquote", + "--no-recursion", "-T", "-", NULL); + ohshite(_("unable to execute %s (%s)"), "tar -cf", TAR); +-- +2.1.4 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg_1.18.10.bb b/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg_1.18.10.bb new file mode 100644 index 000000000..21385af87 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg_1.18.10.bb @@ -0,0 +1,20 @@ +require dpkg.inc +LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe" + +SRC_URI = "http://snapshot.debian.org/archive/debian/20160731T221931Z/pool/main/d/${BPN}/${BPN}_${PV}.tar.xz \ + file://noman.patch \ + file://remove-tar-no-timestamp.patch \ + file://arch_pm.patch \ + file://dpkg-configure.service \ + file://add_armeb_triplet_entry.patch \ + file://0002-Adapt-to-linux-wrs-kernel-version-which-has-characte.patch \ + file://0003-Our-pre-postinsts-expect-D-to-be-set-when-running-in.patch \ + file://0004-The-lutimes-function-doesn-t-work-properly-for-all-s.patch \ + file://0005-dpkg-compiler.m4-remove-Wvla.patch \ + file://0006-add-musleabi-to-known-target-tripets.patch \ + file://0007-dpkg-deb-build.c-Remove-usage-of-clamp-mtime-in-tar.patch \ + " +SRC_URI_append_class-native = " file://glibc2.5-sync_file_range.patch " + +SRC_URI[md5sum] = "ccff17730c0964428fc186ded2f2f401" +SRC_URI[sha256sum] = "025524da41ba18b183ff11e388eb8686f7cc58ee835ed7d48bd159c46a8b6dc5" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg_1.18.7.bb b/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg_1.18.7.bb deleted file mode 100644 index 28fdc136a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/dpkg/dpkg_1.18.7.bb +++ /dev/null @@ -1,19 +0,0 @@ -require dpkg.inc -LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe" - -SRC_URI = "http://snapshot.debian.org/archive/debian/20160509T100042Z/pool/main/d/${BPN}/${BPN}_${PV}.tar.xz \ - file://noman.patch \ - file://remove-tar-no-timestamp.patch \ - file://arch_pm.patch \ - file://dpkg-configure.service \ - file://add_armeb_triplet_entry.patch \ - file://0002-Adapt-to-linux-wrs-kernel-version-which-has-characte.patch \ - file://0003-Our-pre-postinsts-expect-D-to-be-set-when-running-in.patch \ - file://0004-The-lutimes-function-doesn-t-work-properly-for-all-s.patch \ - file://0005-dpkg-compiler.m4-remove-Wvla.patch \ - file://0006-add-musleabi-to-known-target-tripets.patch \ - " -SRC_URI_append_class-native = " file://glibc2.5-sync_file_range.patch " - -SRC_URI[md5sum] = "073dbf2129a54b0fc627464bf8af4a1b" -SRC_URI[sha256sum] = "ace36d3a6dc750a42baf797f9e75ec580a21f92bb9ff96b482100755d6d9b87b" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs.inc b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs.inc index 74e92f6b1..81d90a134 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs.inc @@ -17,7 +17,7 @@ LIC_FILES_CHKSUM = "file://NOTICE;md5=b48f21d765b875bd10400975d12c1ca2 \ file://lib/et/et_name.c;beginline=1;endline=11;md5=ead236447dac7b980dbc5b4804d8c836 \ file://lib/ss/ss.h;beginline=1;endline=20;md5=6e89ad47da6e75fecd2b5e0e81e1d4a6" SECTION = "base" -DEPENDS = "util-linux" +DEPENDS = "util-linux attr" SRC_URI = "git://git.kernel.org/pub/scm/fs/ext2/e2fsprogs.git" S = "${WORKDIR}/git" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/0001-e2fsck-exit-with-exit-status-0-if-no-errors-were-fix.patch b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/0001-e2fsck-exit-with-exit-status-0-if-no-errors-were-fix.patch index 44f3888b1..1d1752025 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/0001-e2fsck-exit-with-exit-status-0-if-no-errors-were-fix.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/0001-e2fsck-exit-with-exit-status-0-if-no-errors-were-fix.patch @@ -1,4 +1,4 @@ -From b9bb77a0dd712f06b262a12766972b99cd801269 Mon Sep 17 00:00:00 2001 +From bf9f3b6d5b10d19218b4ed904c12b22e36ec57dd Mon Sep 17 00:00:00 2001 From: Theodore Ts'o Date: Thu, 16 Feb 2017 22:02:35 -0500 Subject: [PATCH] e2fsck: exit with exit status 0 if no errors were fixed @@ -13,15 +13,12 @@ Upstream-Status: Backport Signed-off-by: Theodore Ts'o Signed-off-by: Daniel Schultz - -Conflicts: - e2fsck/e2fsck.conf.5.in --- - e2fsck/e2fsck.conf.5.in | 34 +++++++++++++++++++++++++++++ + e2fsck/e2fsck.conf.5.in | 7 +++++++ e2fsck/journal.c | 1 + - e2fsck/problem.c | 8 ++++--- + e2fsck/problem.c | 8 +++++--- e2fsck/problemP.h | 1 + - e2fsck/unix.c | 20 +++++++++++++---- + e2fsck/unix.c | 20 ++++++++++++++++---- tests/f_collapse_extent_tree/expect.1 | 2 +- tests/f_compress_extent_tree_level/expect.1 | 2 +- tests/f_convert_bmap/expect.1 | 2 +- @@ -33,16 +30,16 @@ Conflicts: tests/f_orphan_extents_inode/expect.1 | 2 +- tests/f_rehash_dir/expect.1 | 2 +- tests/f_unsorted_EAs/expect.1 | 2 +- - 16 files changed, 68 insertions(+), 18 deletions(-) + 16 files changed, 41 insertions(+), 18 deletions(-) diff --git a/e2fsck/e2fsck.conf.5.in b/e2fsck/e2fsck.conf.5.in -index 1f80a04..6a205ce 100644 +index 1848bdb..0bfc76a 100644 --- a/e2fsck/e2fsck.conf.5.in +++ b/e2fsck/e2fsck.conf.5.in -@@ -326,6 +326,40 @@ defaults to true. - This relation controls whether or not the scratch file directory is used - instead of an in-memory data structure when tracking inode counts. It - defaults to true. +@@ -303,6 +303,13 @@ of 'should this problem be fixed?'. The + option even overrides the + .B -y + option given on the command-line (just for the specific problem, of course). +.TP +.I not_a_fix +This boolean option, it set to true, marks the problem as @@ -50,38 +47,11 @@ index 1f80a04..6a205ce 100644 +it does not mean that the file system had a problem which has since +been fixed. This is used for requests to optimize the file system's +data structure, such as pruning an extent tree. -+@TDB_MAN_COMMENT@.SH THE [scratch_files] STANZA -+@TDB_MAN_COMMENT@The following relations are defined in the -+@TDB_MAN_COMMENT@.I [scratch_files] -+@TDB_MAN_COMMENT@stanza. -+@TDB_MAN_COMMENT@.TP -+@TDB_MAN_COMMENT@.I directory -+@TDB_MAN_COMMENT@If the directory named by this relation exists and is -+@TDB_MAN_COMMENT@writeable, then e2fsck will attempt to use this -+@TDB_MAN_COMMENT@directory to store scratch files instead of using -+@TDB_MAN_COMMENT@in-memory data structures. -+@TDB_MAN_COMMENT@.TP -+@TDB_MAN_COMMENT@.I numdirs_threshold -+@TDB_MAN_COMMENT@If this relation is set, then in-memory data structures -+@TDB_MAN_COMMENT@be used if the number of directories in the filesystem -+@TDB_MAN_COMMENT@are fewer than amount specified. -+@TDB_MAN_COMMENT@.TP -+@TDB_MAN_COMMENT@.I dirinfo -+@TDB_MAN_COMMENT@This relation controls whether or not the scratch file -+@TDB_MAN_COMMENT@directory is used instead of an in-memory data -+@TDB_MAN_COMMENT@structure for directory information. It defaults to -+@TDB_MAN_COMMENT@true. -+@TDB_MAN_COMMENT@.TP -+@TDB_MAN_COMMENT@.I icount -+@TDB_MAN_COMMENT@This relation controls whether or not the scratch file -+@TDB_MAN_COMMENT@directory is used instead of an in-memory data -+@TDB_MAN_COMMENT@structure when tracking inode counts. It defaults to -+@TDB_MAN_COMMENT@true. - .SH LOGGING - E2fsck has the facility to save the information from an e2fsck run in a - directory so that a system administrator can review its output at their + @TDB_MAN_COMMENT@.SH THE [scratch_files] STANZA + @TDB_MAN_COMMENT@The following relations are defined in the + @TDB_MAN_COMMENT@.I [scratch_files] diff --git a/e2fsck/journal.c b/e2fsck/journal.c -index c8ac57d..b4cf329 100644 +index 46fe7b4..c4f58f1 100644 --- a/e2fsck/journal.c +++ b/e2fsck/journal.c @@ -572,6 +572,7 @@ static void clear_v2_journal_fields(journal_t *journal) @@ -93,10 +63,10 @@ index c8ac57d..b4cf329 100644 ctx->fs->blocksize-V1_SB_SIZE); mark_buffer_dirty(journal->j_sb_buffer); diff --git a/e2fsck/problem.c b/e2fsck/problem.c -index 1e645e4..2b01ffc 100644 +index 34a671e..4b25069 100644 --- a/e2fsck/problem.c +++ b/e2fsck/problem.c -@@ -1261,12 +1261,12 @@ static struct e2fsck_problem problem_table[] = { +@@ -1276,12 +1276,12 @@ static struct e2fsck_problem problem_table[] = { /* Inode extent tree could be shorter */ { PR_1E_CAN_COLLAPSE_EXTENT_TREE, N_("@i %i @x tree (at level %b) could be shorter. "), @@ -111,7 +81,7 @@ index 1e645e4..2b01ffc 100644 /* Pass 2 errors */ -@@ -2146,6 +2146,7 @@ int fix_problem(e2fsck_t ctx, problem_t code, struct problem_context *pctx) +@@ -2166,6 +2166,7 @@ int fix_problem(e2fsck_t ctx, problem_t code, struct problem_context *pctx) reconfigure_bool(ctx, ptr, key, PR_NO_NOMSG, "no_nomsg"); reconfigure_bool(ctx, ptr, key, PR_PREEN_NOHDR, "preen_noheader"); reconfigure_bool(ctx, ptr, key, PR_FORCE_NO, "force_no"); @@ -119,7 +89,7 @@ index 1e645e4..2b01ffc 100644 profile_get_integer(ctx->profile, "options", "max_count_problems", 0, 0, &ptr->max_count); -@@ -2263,7 +2264,8 @@ int fix_problem(e2fsck_t ctx, problem_t code, struct problem_context *pctx) +@@ -2283,7 +2284,8 @@ int fix_problem(e2fsck_t ctx, problem_t code, struct problem_context *pctx) if (ptr->flags & PR_AFTER_CODE) answer = fix_problem(ctx, ptr->second_code, pctx); @@ -139,10 +109,10 @@ index 7944cd6..63bb8df 100644 #define PR_FORCE_NO 0x100000 /* Force the answer to be no */ +#define PR_NOT_A_FIX 0x200000 /* Yes doesn't mean a problem was fixed */ diff --git a/e2fsck/unix.c b/e2fsck/unix.c -index 004a6e5..d33d7fd 100644 +index eb9f311..9e4d31a 100644 --- a/e2fsck/unix.c +++ b/e2fsck/unix.c -@@ -1896,11 +1896,23 @@ no_journal: +@@ -1901,11 +1901,23 @@ no_journal: fix_problem(ctx, PR_6_IO_FLUSH, &pctx); if (was_changed) { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/Revert-mke2fs-enable-the-metadata_csum-and-64bit-fea.patch b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/Revert-mke2fs-enable-the-metadata_csum-and-64bit-fea.patch index e66cd4d1a..38fb07e6e 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/Revert-mke2fs-enable-the-metadata_csum-and-64bit-fea.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/Revert-mke2fs-enable-the-metadata_csum-and-64bit-fea.patch @@ -20,20 +20,17 @@ Signed-off-by: Hongxu Jia 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/misc/mke2fs.conf.in b/misc/mke2fs.conf.in -index 78fe50a..6f1940e 100644 +index 812f7c7..0280398 100644 --- a/misc/mke2fs.conf.in +++ b/misc/mke2fs.conf.in @@ -11,8 +11,9 @@ features = has_journal } ext4 = { -- features = has_journal,extent,huge_file,flex_bg,64bit,dir_nlink,extra_isize +- features = has_journal,extent,huge_file,flex_bg,uninit_bg,64bit,dir_nlink,extra_isize + features = has_journal,extent,huge_file,flex_bg,uninit_bg,dir_nlink,extra_isize inode_size = 256 + auto_64-bit_support = 1 } ext4dev = { - features = has_journal,extent,huge_file,flex_bg,inline_data,64bit,dir_nlink,extra_isize --- -2.8.1 - + features = has_journal,extent,huge_file,flex_bg,uninit_bg,inline_data,64bit,dir_nlink,extra_isize diff --git a/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/e2fsprogs-1.43-sysmacros.patch b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/e2fsprogs-1.43-sysmacros.patch new file mode 100644 index 000000000..abbf2bad2 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/e2fsprogs-1.43-sysmacros.patch @@ -0,0 +1,130 @@ +From 30ef41f68703b6a16027cc8787118b87f1462dff Mon Sep 17 00:00:00 2001 +From: Mike Frysinger +Date: Mon, 28 Mar 2016 20:31:33 -0400 +Subject: [PATCH e2fsprogs] include sys/sysmacros.h as needed + +The minor/major/makedev macros are not entirely standard. glibc has had +the definitions in sys/sysmacros.h since the start, and wants to move away +from always defining them implicitly via sys/types.h (as this pollutes the +namespace in violation of POSIX). Other C libraries have already dropped +them. Since the configure script already checks for this header, use that +to pull in the header in files that use these macros. + +Signed-off-by: Mike Frysinger + +Taken from gentoo portage. + +Upstream-Status: Pending + +--- + debugfs/debugfs.c | 3 +++ + lib/blkid/devname.c | 3 +++ + lib/blkid/devno.c | 3 +++ + lib/ext2fs/finddev.c | 3 +++ + lib/ext2fs/ismounted.c | 3 +++ + misc/create_inode.c | 4 ++++ + misc/mk_hugefiles.c | 3 +++ + 7 files changed, 22 insertions(+) + +diff --git a/debugfs/debugfs.c b/debugfs/debugfs.c +index ba8be40..7d481bc 100644 +--- a/debugfs/debugfs.c ++++ b/debugfs/debugfs.c +@@ -26,6 +26,9 @@ extern char *optarg; + #include + #endif + #include ++#ifdef HAVE_SYS_SYSMACROS_H ++#include ++#endif + + #include "debugfs.h" + #include "uuid/uuid.h" +diff --git a/lib/blkid/devname.c b/lib/blkid/devname.c +index 3e2efa9..671e781 100644 +--- a/lib/blkid/devname.c ++++ b/lib/blkid/devname.c +@@ -36,6 +36,9 @@ + #if HAVE_SYS_MKDEV_H + #include + #endif ++#ifdef HAVE_SYS_SYSMACROS_H ++#include ++#endif + #include + + #include "blkidP.h" +diff --git a/lib/blkid/devno.c b/lib/blkid/devno.c +index 479d977..61e6fc7 100644 +--- a/lib/blkid/devno.c ++++ b/lib/blkid/devno.c +@@ -31,6 +31,9 @@ + #if HAVE_SYS_MKDEV_H + #include + #endif ++#ifdef HAVE_SYS_SYSMACROS_H ++#include ++#endif + + #include "blkidP.h" + +diff --git a/lib/ext2fs/finddev.c b/lib/ext2fs/finddev.c +index 311608d..62fa0db 100644 +--- a/lib/ext2fs/finddev.c ++++ b/lib/ext2fs/finddev.c +@@ -31,6 +31,9 @@ + #if HAVE_SYS_MKDEV_H + #include + #endif ++#ifdef HAVE_SYS_SYSMACROS_H ++#include ++#endif + + #include "ext2_fs.h" + #include "ext2fs.h" +diff --git a/lib/ext2fs/ismounted.c b/lib/ext2fs/ismounted.c +index e0f69dd..7404996 100644 +--- a/lib/ext2fs/ismounted.c ++++ b/lib/ext2fs/ismounted.c +@@ -49,6 +49,9 @@ + #if HAVE_SYS_TYPES_H + #include + #endif ++#ifdef HAVE_SYS_SYSMACROS_H ++#include ++#endif + + #include "ext2_fs.h" + #include "ext2fs.h" +diff --git a/misc/create_inode.c b/misc/create_inode.c +index 4dbd8e5..98aeb41 100644 +--- a/misc/create_inode.c ++++ b/misc/create_inode.c +@@ -22,6 +22,10 @@ + #include + #endif + #include ++#ifdef HAVE_SYS_SYSMACROS_H ++#include ++#endif ++ + #include + #include + #include +diff --git a/misc/mk_hugefiles.c b/misc/mk_hugefiles.c +index 71a15c5..00e95cd 100644 +--- a/misc/mk_hugefiles.c ++++ b/misc/mk_hugefiles.c +@@ -35,6 +35,9 @@ extern int optind; + #include + #include + #include ++#ifdef HAVE_SYS_SYSMACROS_H ++#include ++#endif + #include + #include + #include +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/ptest.patch b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/ptest.patch index ef1ce5872..879d93625 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/ptest.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/ptest.patch @@ -1,5 +1,5 @@ diff --git a/tests/Makefile.in b/tests/Makefile.in -index 60cf655..ce220f1 100644 +index c130f4a..d2ade03 100644 --- a/tests/Makefile.in +++ b/tests/Makefile.in @@ -18,7 +18,7 @@ test_one: $(srcdir)/test_one.in Makefile mke2fs.conf @@ -7,21 +7,21 @@ index 60cf655..ce220f1 100644 @echo "HTREE=y" >> test_one @echo "QUOTA=y" >> test_one - @echo "SRCDIR=@srcdir@" >> test_one -+ @echo "SRCDIR=/usr/lib/e2fsprogs/ptest/test" >> test_one ++ @echo "SRCDIR=${prefix}${libdir}/e2fsprogs/ptest/test" >> test_one @echo "DIFF_OPTS=@UNI_DIFF_OPTS@" >> test_one + @echo "SIZEOF_TIME_T=@SIZEOF_TIME_T@" >> test_one @cat $(srcdir)/test_one.in >> test_one - @chmod +x test_one -@@ -26,7 +26,7 @@ test_one: $(srcdir)/test_one.in Makefile mke2fs.conf +@@ -27,7 +27,7 @@ test_one: $(srcdir)/test_one.in Makefile mke2fs.conf test_script: test_one test_script.in Makefile mke2fs.conf @echo "Creating test_script..." @echo "#!/bin/sh" > test_script - @echo "SRCDIR=@srcdir@" >> test_script -+ @echo "SRCDIR=/usr/lib/e2fsprogs/ptest/test" >> test_script ++ @echo "SRCDIR=${prefix}${libdir}/e2fsprogs/ptest/test" >> test_script @cat $(srcdir)/test_script.in >> test_script @chmod +x test_script diff --git a/tests/test_config b/tests/test_config -index 7f39157..c815a44 100644 +index 547ef4c..e11e6f4 100644 --- a/tests/test_config +++ b/tests/test_config @@ -3,24 +3,24 @@ @@ -65,3 +65,16 @@ index 7f39157..c815a44 100644 CLEAN_OUTPUT="sed -f $cmd_dir/filter.sed" LD_LIBRARY_PATH=../lib:../lib/ext2fs:../lib/e2p:../lib/et:../lib/ss:${LD_LIBRARY_PATH} DYLD_LIBRARY_PATH=../lib:../lib/ext2fs:../lib/e2p:../lib/et:../lib/ss:${DYLD_LIBRARY_PATH} +diff --git a/tests/test_script.in b/tests/test_script.in +index 9959e30..442999d 100644 +--- a/tests/test_script.in ++++ b/tests/test_script.in +@@ -39,7 +39,7 @@ for i; do + done + + if test "$TESTS"x = x ; then +- if test -n "DO_FAILED"; then ++ if test -n "$DO_FAILED"; then + exit 0 + fi + TESTS=`ls -d $SRCDIR/[a-zA-Z]_*` diff --git a/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/run-ptest b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/run-ptest index 1ac251324..e02fc7f2b 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/run-ptest +++ b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs/run-ptest @@ -5,7 +5,7 @@ cd ./test if [ $? -eq 0 ] then echo "PASS: e2fsprogs" - rm test.log + rm ../test.log else echo "FAIL: e2fsprogs" fi diff --git a/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.43.4.bb b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.43.4.bb new file mode 100644 index 000000000..5216c7027 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.43.4.bb @@ -0,0 +1,112 @@ +require e2fsprogs.inc + +SRC_URI += "file://acinclude.m4 \ + file://remove.ldconfig.call.patch \ + file://quiet-debugfs.patch \ + file://run-ptest \ + file://ptest.patch \ + file://mkdir.patch \ + file://Revert-mke2fs-enable-the-metadata_csum-and-64bit-fea.patch \ + file://e2fsprogs-1.43-sysmacros.patch \ + file://mkdir_p.patch \ + file://0001-e2fsck-exit-with-exit-status-0-if-no-errors-were-fix.patch \ +" + +SRC_URI_append_class-native = " file://e2fsprogs-fix-missing-check-for-permission-denied.patch" + +SRCREV = "3d66c4b20f09f923078c1e6eb9b549865b549674" +UPSTREAM_CHECK_GITTAGREGEX = "v(?P\d+\.\d+(\.\d+)*)$" + +EXTRA_OECONF += "--libdir=${base_libdir} --sbindir=${base_sbindir} \ + --enable-elf-shlibs --disable-libuuid --disable-uuidd \ + --disable-libblkid --enable-verbose-makecmds" + +EXTRA_OECONF_darwin = "--libdir=${base_libdir} --sbindir=${base_sbindir} --enable-bsd-shlibs" + +PACKAGECONFIG ??= "" +PACKAGECONFIG[fuse] = '--enable-fuse2fs,--disable-fuse2fs,fuse' + +do_configure_prepend () { + cp ${WORKDIR}/acinclude.m4 ${S}/ +} + +do_install () { + oe_runmake 'DESTDIR=${D}' install + oe_runmake 'DESTDIR=${D}' install-libs + # We use blkid from util-linux now so remove from here + rm -f ${D}${base_libdir}/libblkid* + rm -rf ${D}${includedir}/blkid + rm -f ${D}${base_libdir}/pkgconfig/blkid.pc + rm -f ${D}${base_sbindir}/blkid + rm -f ${D}${base_sbindir}/fsck + rm -f ${D}${base_sbindir}/findfs + + # e2initrd_helper and the pkgconfig files belong in libdir + if [ ! ${D}${libdir} -ef ${D}${base_libdir} ]; then + install -d ${D}${libdir} + mv ${D}${base_libdir}/e2initrd_helper ${D}${libdir} + mv ${D}${base_libdir}/pkgconfig ${D}${libdir} + fi + + oe_multilib_header ext2fs/ext2_types.h + install -d ${D}${base_bindir} + mv ${D}${bindir}/chattr ${D}${base_bindir}/chattr.e2fsprogs + + install -v -m 755 ${S}/contrib/populate-extfs.sh ${D}${base_sbindir}/ + + # Clean host path (build directory) in compile_et, mk_cmds + sed -i -e "s,\(ET_DIR=.*\)${S}/lib/et\(.*\),\1${datadir}/et\2,g" ${D}${bindir}/compile_et + sed -i -e "s,\(SS_DIR=.*\)${S}/lib/ss\(.*\),\1${datadir}/ss\2,g" ${D}${bindir}/mk_cmds +} + +# Need to find the right mke2fs.conf file +e2fsprogs_conf_fixup () { + for i in mke2fs mkfs.ext2 mkfs.ext3 mkfs.ext4; do + create_wrapper ${D}${base_sbindir}/$i MKE2FS_CONFIG=${sysconfdir}/mke2fs.conf + done +} + +do_install_append_class-native() { + e2fsprogs_conf_fixup +} + +do_install_append_class-nativesdk() { + e2fsprogs_conf_fixup +} + +RDEPENDS_e2fsprogs = "e2fsprogs-badblocks" +RRECOMMENDS_e2fsprogs = "e2fsprogs-mke2fs e2fsprogs-e2fsck" + +PACKAGES =+ "e2fsprogs-e2fsck e2fsprogs-mke2fs e2fsprogs-tune2fs e2fsprogs-badblocks e2fsprogs-resize2fs" +PACKAGES =+ "libcomerr libss libe2p libext2fs" + +FILES_e2fsprogs-resize2fs = "${base_sbindir}/resize2fs*" +FILES_e2fsprogs-e2fsck = "${base_sbindir}/e2fsck ${base_sbindir}/fsck.ext*" +FILES_e2fsprogs-mke2fs = "${base_sbindir}/mke2fs ${base_sbindir}/mkfs.ext* ${sysconfdir}/mke2fs.conf" +FILES_e2fsprogs-tune2fs = "${base_sbindir}/tune2fs ${base_sbindir}/e2label" +FILES_e2fsprogs-badblocks = "${base_sbindir}/badblocks" +FILES_libcomerr = "${base_libdir}/libcom_err.so.*" +FILES_libss = "${base_libdir}/libss.so.*" +FILES_libe2p = "${base_libdir}/libe2p.so.*" +FILES_libext2fs = "${libdir}/e2initrd_helper ${base_libdir}/libext2fs.so.*" +FILES_${PN}-dev += "${datadir}/*/*.awk ${datadir}/*/*.sed ${base_libdir}/*.so ${bindir}/compile_et ${bindir}/mk_cmds" + +ALTERNATIVE_${PN} = "chattr" +ALTERNATIVE_PRIORITY = "100" +ALTERNATIVE_LINK_NAME[chattr] = "${base_bindir}/chattr" +ALTERNATIVE_TARGET[chattr] = "${base_bindir}/chattr.e2fsprogs" + +ALTERNATIVE_${PN}-doc = "fsck.8" +ALTERNATIVE_LINK_NAME[fsck.8] = "${mandir}/man8/fsck.8" + +RDEPENDS_${PN}-ptest += "${PN} ${PN}-tune2fs coreutils procps bash" + +do_compile_ptest() { + oe_runmake -C ${B}/tests +} + +do_install_ptest() { + cp -a ${B}/tests ${D}${PTEST_PATH}/test + cp -a ${S}/tests/* ${D}${PTEST_PATH}/test + sed -e 's!../e2fsck/e2fsck!e2fsck!g' -i ${D}${PTEST_PATH}/test/*/expect* +} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.43.bb b/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.43.bb deleted file mode 100644 index dcfb564a4..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/e2fsprogs/e2fsprogs_1.43.bb +++ /dev/null @@ -1,113 +0,0 @@ -require e2fsprogs.inc - -PR = "r1" - -SRC_URI += "file://acinclude.m4 \ - file://remove.ldconfig.call.patch \ - file://quiet-debugfs.patch \ - file://run-ptest \ - file://ptest.patch \ - file://mkdir.patch \ - file://Revert-mke2fs-enable-the-metadata_csum-and-64bit-fea.patch \ - file://mkdir_p.patch \ - file://0001-e2fsck-exit-with-exit-status-0-if-no-errors-were-fix.patch \ -" - -SRC_URI_append_class-native = " file://e2fsprogs-fix-missing-check-for-permission-denied.patch" - -SRCREV = "d6adf070b0e85f209c0d7f310188b134b5cb7180" -UPSTREAM_CHECK_GITTAGREGEX = "v(?P\d+\.\d+(\.\d+)*)$" - -EXTRA_OECONF += "--libdir=${base_libdir} --sbindir=${base_sbindir} \ - --enable-elf-shlibs --disable-libuuid --disable-uuidd \ - --disable-libblkid --enable-verbose-makecmds" - -EXTRA_OECONF_darwin = "--libdir=${base_libdir} --sbindir=${base_sbindir} --enable-bsd-shlibs" - -PACKAGECONFIG ??= "" -PACKAGECONFIG[fuse] = '--enable-fuse2fs,--disable-fuse2fs,fuse' - -do_configure_prepend () { - cp ${WORKDIR}/acinclude.m4 ${S}/ -} - -do_install () { - oe_runmake 'DESTDIR=${D}' install - oe_runmake 'DESTDIR=${D}' install-libs - # We use blkid from util-linux now so remove from here - rm -f ${D}${base_libdir}/libblkid* - rm -rf ${D}${includedir}/blkid - rm -f ${D}${base_libdir}/pkgconfig/blkid.pc - rm -f ${D}${base_sbindir}/blkid - rm -f ${D}${base_sbindir}/fsck - rm -f ${D}${base_sbindir}/findfs - - # e2initrd_helper and the pkgconfig files belong in libdir - if [ ! ${D}${libdir} -ef ${D}${base_libdir} ]; then - install -d ${D}${libdir} - mv ${D}${base_libdir}/e2initrd_helper ${D}${libdir} - mv ${D}${base_libdir}/pkgconfig ${D}${libdir} - fi - - oe_multilib_header ext2fs/ext2_types.h - install -d ${D}${base_bindir} - mv ${D}${bindir}/chattr ${D}${base_bindir}/chattr.e2fsprogs - - install -v -m 755 ${S}/contrib/populate-extfs.sh ${D}${base_sbindir}/ - - # Clean host path (build directory) in compile_et, mk_cmds - sed -i -e "s,\(ET_DIR=.*\)${S}/lib/et\(.*\),\1${datadir}/et\2,g" ${D}${bindir}/compile_et - sed -i -e "s,\(SS_DIR=.*\)${S}/lib/ss\(.*\),\1${datadir}/ss\2,g" ${D}${bindir}/mk_cmds -} - -# Need to find the right mke2fs.conf file -e2fsprogs_conf_fixup () { - for i in mke2fs mkfs.ext2 mkfs.ext3 mkfs.ext4 mkfs.ext4dev; do - create_wrapper ${D}${base_sbindir}/$i MKE2FS_CONFIG=${sysconfdir}/mke2fs.conf - done -} - -do_install_append_class-native() { - e2fsprogs_conf_fixup -} - -do_install_append_class-nativesdk() { - e2fsprogs_conf_fixup -} - -RDEPENDS_e2fsprogs = "e2fsprogs-badblocks" -RRECOMMENDS_e2fsprogs = "e2fsprogs-mke2fs e2fsprogs-e2fsck" - -PACKAGES =+ "e2fsprogs-e2fsck e2fsprogs-mke2fs e2fsprogs-tune2fs e2fsprogs-badblocks e2fsprogs-resize2fs" -PACKAGES =+ "libcomerr libss libe2p libext2fs" - -FILES_e2fsprogs-resize2fs = "${base_sbindir}/resize2fs*" -FILES_e2fsprogs-e2fsck = "${base_sbindir}/e2fsck ${base_sbindir}/fsck.ext*" -FILES_e2fsprogs-mke2fs = "${base_sbindir}/mke2fs ${base_sbindir}/mkfs.ext* ${sysconfdir}/mke2fs.conf" -FILES_e2fsprogs-tune2fs = "${base_sbindir}/tune2fs ${base_sbindir}/e2label" -FILES_e2fsprogs-badblocks = "${base_sbindir}/badblocks" -FILES_libcomerr = "${base_libdir}/libcom_err.so.*" -FILES_libss = "${base_libdir}/libss.so.*" -FILES_libe2p = "${base_libdir}/libe2p.so.*" -FILES_libext2fs = "${libdir}/e2initrd_helper ${base_libdir}/libext2fs.so.*" -FILES_${PN}-dev += "${datadir}/*/*.awk ${datadir}/*/*.sed ${base_libdir}/*.so ${bindir}/compile_et ${bindir}/mk_cmds" - -ALTERNATIVE_${PN} = "chattr" -ALTERNATIVE_PRIORITY = "100" -ALTERNATIVE_LINK_NAME[chattr] = "${base_bindir}/chattr" -ALTERNATIVE_TARGET[chattr] = "${base_bindir}/chattr.e2fsprogs" - -ALTERNATIVE_${PN}-doc = "fsck.8" -ALTERNATIVE_LINK_NAME[fsck.8] = "${mandir}/man8/fsck.8" - -RDEPENDS_${PN}-ptest += "${PN} ${PN}-tune2fs coreutils procps bash" - -do_compile_ptest() { - oe_runmake -C ${B}/tests -} - -do_install_ptest() { - cp -a ${B}/tests ${D}${PTEST_PATH}/test - cp -a ${S}/tests/* ${D}${PTEST_PATH}/test - sed -e 's!../e2fsck/e2fsck!e2fsck!g' -i ${D}${PTEST_PATH}/test/*/expect* -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/arm_backend.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/arm_backend.diff deleted file mode 100644 index d4e4675ad..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/arm_backend.diff +++ /dev/null @@ -1,449 +0,0 @@ -Upstream-Status: Backport - -Index: elfutils-0.146/backends/arm_init.c -=================================================================== ---- elfutils-0.146.orig/backends/arm_init.c 2009-04-21 14:50:01.000000000 +0000 -+++ elfutils-0.146/backends/arm_init.c 2010-04-24 10:11:13.000000000 +0000 -@@ -32,21 +32,32 @@ - #define RELOC_PREFIX R_ARM_ - #include "libebl_CPU.h" - -+#include "libebl_arm.h" -+ - /* This defines the common reloc hooks based on arm_reloc.def. */ - #include "common-reloc.c" - - - const char * - arm_init (elf, machine, eh, ehlen) -- Elf *elf __attribute__ ((unused)); -+ Elf *elf; - GElf_Half machine __attribute__ ((unused)); - Ebl *eh; - size_t ehlen; - { -+ int soft_float = 0; -+ - /* Check whether the Elf_BH object has a sufficent size. */ - if (ehlen < sizeof (Ebl)) - return NULL; - -+ if (elf) { -+ GElf_Ehdr ehdr_mem; -+ GElf_Ehdr *ehdr = gelf_getehdr (elf, &ehdr_mem); -+ if (ehdr && (ehdr->e_flags & EF_ARM_SOFT_FLOAT)) -+ soft_float = 1; -+ } -+ - /* We handle it. */ - eh->name = "ARM"; - arm_init_reloc (eh); -@@ -58,7 +69,10 @@ - HOOK (eh, core_note); - HOOK (eh, auxv_info); - HOOK (eh, check_object_attribute); -- HOOK (eh, return_value_location); -+ if (soft_float) -+ eh->return_value_location = arm_return_value_location_soft; -+ else -+ eh->return_value_location = arm_return_value_location_hard; - - return MODVERSION; - } -Index: elfutils-0.146/backends/arm_regs.c -=================================================================== ---- elfutils-0.146.orig/backends/arm_regs.c 2009-04-21 14:50:01.000000000 +0000 -+++ elfutils-0.146/backends/arm_regs.c 2010-04-24 10:11:13.000000000 +0000 -@@ -28,6 +28,7 @@ - #endif - - #include -+#include - #include - - #define BACKEND arm_ -@@ -58,7 +59,15 @@ - namelen = 2; - break; - -- case 10 ... 12: -+ case 10 ... 11: -+ name[0] = 'r'; -+ name[1] = '1'; -+ name[2] = regno % 10 + '0'; -+ namelen = 3; -+ break; -+ -+ case 12: -+ *type = DW_ATE_unsigned; - name[0] = 'r'; - name[1] = '1'; - name[2] = regno % 10 + '0'; -@@ -73,6 +82,9 @@ - break; - - case 16 + 0 ... 16 + 7: -+ /* AADWARF says that there are no registers in that range, -+ * but gcc maps FPA registers here -+ */ - regno += 96 - 16; - /* Fall through. */ - case 96 + 0 ... 96 + 7: -@@ -84,11 +96,139 @@ - namelen = 2; - break; - -+ case 64 + 0 ... 64 + 9: -+ *setname = "VFP"; -+ *bits = 32; -+ *type = DW_ATE_float; -+ name[0] = 's'; -+ name[1] = regno - 64 + '0'; -+ namelen = 2; -+ break; -+ -+ case 64 + 10 ... 64 + 31: -+ *setname = "VFP"; -+ *bits = 32; -+ *type = DW_ATE_float; -+ name[0] = 's'; -+ name[1] = (regno - 64) / 10 + '0'; -+ name[2] = (regno - 64) % 10 + '0'; -+ namelen = 3; -+ break; -+ -+ case 104 + 0 ... 104 + 7: -+ /* XXX TODO: -+ * This can be either intel wireless MMX general purpose/control -+ * registers or xscale accumulator, which have different usage. -+ * We only have the intel wireless MMX here now. -+ * The name needs to be changed for the xscale accumulator too. */ -+ *setname = "MMX"; -+ *type = DW_ATE_unsigned; -+ *bits = 32; -+ memcpy(name, "wcgr", 4); -+ name[4] = regno - 104 + '0'; -+ namelen = 5; -+ break; -+ -+ case 112 + 0 ... 112 + 9: -+ *setname = "MMX"; -+ *type = DW_ATE_unsigned; -+ *bits = 64; -+ name[0] = 'w'; -+ name[1] = 'r'; -+ name[2] = regno - 112 + '0'; -+ namelen = 3; -+ break; -+ -+ case 112 + 10 ... 112 + 15: -+ *setname = "MMX"; -+ *type = DW_ATE_unsigned; -+ *bits = 64; -+ name[0] = 'w'; -+ name[1] = 'r'; -+ name[2] = '1'; -+ name[3] = regno - 112 - 10 + '0'; -+ namelen = 4; -+ break; -+ - case 128: -+ *setname = "special"; - *type = DW_ATE_unsigned; - return stpcpy (name, "spsr") + 1 - name; - -+ case 129: -+ *setname = "special"; -+ *type = DW_ATE_unsigned; -+ return stpcpy(name, "spsr_fiq") + 1 - name; -+ -+ case 130: -+ *setname = "special"; -+ *type = DW_ATE_unsigned; -+ return stpcpy(name, "spsr_irq") + 1 - name; -+ -+ case 131: -+ *setname = "special"; -+ *type = DW_ATE_unsigned; -+ return stpcpy(name, "spsr_abt") + 1 - name; -+ -+ case 132: -+ *setname = "special"; -+ *type = DW_ATE_unsigned; -+ return stpcpy(name, "spsr_und") + 1 - name; -+ -+ case 133: -+ *setname = "special"; -+ *type = DW_ATE_unsigned; -+ return stpcpy(name, "spsr_svc") + 1 - name; -+ -+ case 144 ... 150: -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ *bits = 32; -+ return sprintf(name, "r%d_usr", regno - 144 + 8) + 1; -+ -+ case 151 ... 157: -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ *bits = 32; -+ return sprintf(name, "r%d_fiq", regno - 151 + 8) + 1; -+ -+ case 158 ... 159: -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ *bits = 32; -+ return sprintf(name, "r%d_irq", regno - 158 + 13) + 1; -+ -+ case 160 ... 161: -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ *bits = 32; -+ return sprintf(name, "r%d_abt", regno - 160 + 13) + 1; -+ -+ case 162 ... 163: -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ *bits = 32; -+ return sprintf(name, "r%d_und", regno - 162 + 13) + 1; -+ -+ case 164 ... 165: -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ *bits = 32; -+ return sprintf(name, "r%d_svc", regno - 164 + 13) + 1; -+ -+ case 192 ... 199: -+ *setname = "MMX"; -+ *bits = 32; -+ *type = DW_ATE_unsigned; -+ name[0] = 'w'; -+ name[1] = 'c'; -+ name[2] = regno - 192 + '0'; -+ namelen = 3; -+ break; -+ - case 256 + 0 ... 256 + 9: -+ /* XXX TODO: Neon also uses those registers and can contain -+ * both float and integers */ - *setname = "VFP"; - *type = DW_ATE_float; - *bits = 64; -Index: elfutils-0.146/backends/arm_retval.c -=================================================================== ---- elfutils-0.146.orig/backends/arm_retval.c 2010-01-12 16:57:54.000000000 +0000 -+++ elfutils-0.146/backends/arm_retval.c 2010-04-24 10:11:13.000000000 +0000 -@@ -45,6 +45,13 @@ - #define nloc_intreg 1 - #define nloc_intregs(n) (2 * (n)) - -+/* f1 */ /* XXX TODO: f0 can also have number 96 if program was compiled with -mabi=aapcs */ -+static const Dwarf_Op loc_fpreg[] = -+ { -+ { .atom = DW_OP_reg16 }, -+ }; -+#define nloc_fpreg 1 -+ - /* The return value is a structure and is actually stored in stack space - passed in a hidden argument by the caller. But, the compiler - helpfully returns the address of that space in r0. */ -@@ -55,8 +62,9 @@ - #define nloc_aggregate 1 - - --int --arm_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) -+static int -+arm_return_value_location_ (Dwarf_Die *functypedie, const Dwarf_Op **locp, -+ int soft_float) - { - /* Start with the function's type, and get the DW_AT_type attribute, - which is the type of the return value. */ -@@ -109,14 +117,31 @@ - else - return -1; - } -+ if (tag == DW_TAG_base_type) -+ { -+ Dwarf_Word encoding; -+ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_encoding, -+ &attr_mem), &encoding) != 0) -+ return -1; -+ -+ if ((encoding == DW_ATE_float) && !soft_float) -+ { -+ *locp = loc_fpreg; -+ if (size <= 8) -+ return nloc_fpreg; -+ goto aggregate; -+ } -+ } - if (size <= 16) - { - intreg: - *locp = loc_intreg; - return size <= 4 ? nloc_intreg : nloc_intregs ((size + 3) / 4); - } -+ /* fall through. */ - - aggregate: -+ /* XXX TODO sometimes aggregates are returned in r0 (-mabi=aapcs) */ - *locp = loc_aggregate; - return nloc_aggregate; - -@@ -135,3 +160,18 @@ - DWARF and might be valid. */ - return -2; - } -+ -+/* return location for -mabi=apcs-gnu -msoft-float */ -+int -+arm_return_value_location_soft (Dwarf_Die *functypedie, const Dwarf_Op **locp) -+{ -+ return arm_return_value_location_ (functypedie, locp, 1); -+} -+ -+/* return location for -mabi=apcs-gnu -mhard-float (current default) */ -+int -+arm_return_value_location_hard (Dwarf_Die *functypedie, const Dwarf_Op **locp) -+{ -+ return arm_return_value_location_ (functypedie, locp, 0); -+} -+ -Index: elfutils-0.146/libelf/elf.h -=================================================================== ---- elfutils-0.146.orig/libelf/elf.h 2010-04-24 10:11:11.000000000 +0000 -+++ elfutils-0.146/libelf/elf.h 2010-04-24 10:11:13.000000000 +0000 -@@ -2290,6 +2290,9 @@ - #define EF_ARM_EABI_VER4 0x04000000 - #define EF_ARM_EABI_VER5 0x05000000 - -+/* EI_OSABI values */ -+#define ELFOSABI_ARM_AEABI 64 /* Contains symbol versioning. */ -+ - /* Additional symbol types for Thumb. */ - #define STT_ARM_TFUNC STT_LOPROC /* A Thumb function. */ - #define STT_ARM_16BIT STT_HIPROC /* A Thumb label. */ -@@ -2307,12 +2310,19 @@ - - /* Processor specific values for the Phdr p_type field. */ - #define PT_ARM_EXIDX (PT_LOPROC + 1) /* ARM unwind segment. */ -+#define PT_ARM_UNWIND PT_ARM_EXIDX - - /* Processor specific values for the Shdr sh_type field. */ - #define SHT_ARM_EXIDX (SHT_LOPROC + 1) /* ARM unwind section. */ - #define SHT_ARM_PREEMPTMAP (SHT_LOPROC + 2) /* Preemption details. */ - #define SHT_ARM_ATTRIBUTES (SHT_LOPROC + 3) /* ARM attributes section. */ - -+/* Processor specific values for the Dyn d_tag field. */ -+#define DT_ARM_RESERVED1 (DT_LOPROC + 0) -+#define DT_ARM_SYMTABSZ (DT_LOPROC + 1) -+#define DT_ARM_PREEMTMAB (DT_LOPROC + 2) -+#define DT_ARM_RESERVED2 (DT_LOPROC + 3) -+#define DT_ARM_NUM 4 - - /* ARM relocs. */ - -@@ -2344,12 +2354,75 @@ - #define R_ARM_GOTPC 25 /* 32 bit PC relative offset to GOT */ - #define R_ARM_GOT32 26 /* 32 bit GOT entry */ - #define R_ARM_PLT32 27 /* 32 bit PLT address */ -+#define R_ARM_CALL 28 -+#define R_ARM_JUMP24 29 -+#define R_ARM_THM_JUMP24 30 -+#define R_ARM_BASE_ABS 31 - #define R_ARM_ALU_PCREL_7_0 32 - #define R_ARM_ALU_PCREL_15_8 33 - #define R_ARM_ALU_PCREL_23_15 34 - #define R_ARM_LDR_SBREL_11_0 35 - #define R_ARM_ALU_SBREL_19_12 36 - #define R_ARM_ALU_SBREL_27_20 37 -+#define R_ARM_TARGET1 38 -+#define R_ARM_SBREL31 39 -+#define R_ARM_V4BX 40 -+#define R_ARM_TARGET2 41 -+#define R_ARM_PREL31 42 -+#define R_ARM_MOVW_ABS_NC 43 -+#define R_ARM_MOVT_ABS 44 -+#define R_ARM_MOVW_PREL_NC 45 -+#define R_ARM_MOVT_PREL 46 -+#define R_ARM_THM_MOVW_ABS_NC 47 -+#define R_ARM_THM_MOVT_ABS 48 -+#define R_ARM_THM_MOVW_PREL_NC 49 -+#define R_ARM_THM_MOVT_PREL 50 -+#define R_ARM_THM_JUMP19 51 -+#define R_ARM_THM_JUMP6 52 -+#define R_ARM_THM_ALU_PREL_11_0 53 -+#define R_ARM_THM_PC12 54 -+#define R_ARM_ABS32_NOI 55 -+#define R_ARM_REL32_NOI 56 -+#define R_ARM_ALU_PC_G0_NC 57 -+#define R_ARM_ALU_PC_G0 58 -+#define R_ARM_ALU_PC_G1_NC 59 -+#define R_ARM_ALU_PC_G1 60 -+#define R_ARM_ALU_PC_G2 61 -+#define R_ARM_LDR_PC_G1 62 -+#define R_ARM_LDR_PC_G2 63 -+#define R_ARM_LDRS_PC_G0 64 -+#define R_ARM_LDRS_PC_G1 65 -+#define R_ARM_LDRS_PC_G2 66 -+#define R_ARM_LDC_PC_G0 67 -+#define R_ARM_LDC_PC_G1 68 -+#define R_ARM_LDC_PC_G2 69 -+#define R_ARM_ALU_SB_G0_NC 70 -+#define R_ARM_ALU_SB_G0 71 -+#define R_ARM_ALU_SB_G1_NC 72 -+#define R_ARM_ALU_SB_G1 73 -+#define R_ARM_ALU_SB_G2 74 -+#define R_ARM_LDR_SB_G0 75 -+#define R_ARM_LDR_SB_G1 76 -+#define R_ARM_LDR_SB_G2 77 -+#define R_ARM_LDRS_SB_G0 78 -+#define R_ARM_LDRS_SB_G1 79 -+#define R_ARM_LDRS_SB_G2 80 -+#define R_ARM_LDC_G0 81 -+#define R_ARM_LDC_G1 82 -+#define R_ARM_LDC_G2 83 -+#define R_ARM_MOVW_BREL_NC 84 -+#define R_ARM_MOVT_BREL 85 -+#define R_ARM_MOVW_BREL 86 -+#define R_ARM_THM_MOVW_BREL_NC 87 -+#define R_ARM_THM_MOVT_BREL 88 -+#define R_ARM_THM_MOVW_BREL 89 -+/* 90-93 unallocated */ -+#define R_ARM_PLT32_ABS 94 -+#define R_ARM_GOT_ABS 95 -+#define R_ARM_GOT_PREL 96 -+#define R_ARM_GOT_BREL12 97 -+#define R_ARM_GOTOFF12 98 -+#define R_ARM_GOTRELAX 99 - #define R_ARM_GNU_VTENTRY 100 - #define R_ARM_GNU_VTINHERIT 101 - #define R_ARM_THM_PC11 102 /* thumb unconditional branch */ -@@ -2364,6 +2437,12 @@ - static TLS block offset */ - #define R_ARM_TLS_LE32 108 /* 32 bit offset relative to static - TLS block */ -+#define R_ARM_TLS_LDO12 109 -+#define R_ARM_TLS_LE12 110 -+#define R_ARM_TLS_IE12GP 111 -+/* 112 - 127 private range */ -+#define R_ARM_ME_TOO 128 /* obsolete */ -+ - #define R_ARM_RXPC25 249 - #define R_ARM_RSBREL32 250 - #define R_ARM_THM_RPC22 251 -Index: elfutils-0.146/backends/libebl_arm.h -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.146/backends/libebl_arm.h 2010-04-24 10:11:13.000000000 +0000 -@@ -0,0 +1,9 @@ -+#ifndef _LIBEBL_ARM_H -+#define _LIBEBL_ARM_H 1 -+ -+#include -+ -+extern int arm_return_value_location_soft(Dwarf_Die *, const Dwarf_Op **locp); -+extern int arm_return_value_location_hard(Dwarf_Die *, const Dwarf_Op **locp); -+ -+#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elf_additions.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elf_additions.diff deleted file mode 100644 index 5baa70900..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elf_additions.diff +++ /dev/null @@ -1,71 +0,0 @@ -Upstream-Status: Backport - -Index: elfutils-0.146/libelf/elf.h -=================================================================== ---- elfutils-0.146.orig/libelf/elf.h 2010-04-24 10:13:50.000000000 +0000 -+++ elfutils-0.146/libelf/elf.h 2010-04-24 10:22:43.000000000 +0000 -@@ -143,6 +143,7 @@ - #define ELFOSABI_HPUX 1 /* HP-UX */ - #define ELFOSABI_NETBSD 2 /* NetBSD. */ - #define ELFOSABI_LINUX 3 /* Linux. */ -+#define ELFOSABI_HURD 4 /* GNU/Hurd */ - #define ELFOSABI_SOLARIS 6 /* Sun Solaris. */ - #define ELFOSABI_AIX 7 /* IBM AIX. */ - #define ELFOSABI_IRIX 8 /* SGI Irix. */ -@@ -150,8 +151,13 @@ - #define ELFOSABI_TRU64 10 /* Compaq TRU64 UNIX. */ - #define ELFOSABI_MODESTO 11 /* Novell Modesto. */ - #define ELFOSABI_OPENBSD 12 /* OpenBSD. */ -+#define ELFOSABI_OPENVMS 13 /* OpenVMS */ -+#define ELFOSABI_NSK 14 /* Hewlett-Packard Non-Stop Kernel */ -+#define ELFOSABI_AROS 15 /* Amiga Research OS */ -+/* 64-255 Architecture-specific value range */ - #define ELFOSABI_ARM_AEABI 64 /* ARM EABI */ - #define ELFOSABI_ARM 97 /* ARM */ -+/* This is deprecated? It's not in the latest version anymore. */ - #define ELFOSABI_STANDALONE 255 /* Standalone (embedded) application */ - - #define EI_ABIVERSION 8 /* ABI version */ -@@ -206,7 +212,7 @@ - #define EM_H8_300H 47 /* Hitachi H8/300H */ - #define EM_H8S 48 /* Hitachi H8S */ - #define EM_H8_500 49 /* Hitachi H8/500 */ --#define EM_IA_64 50 /* Intel Merced */ -+#define EM_IA_64 50 /* Intel IA64 */ - #define EM_MIPS_X 51 /* Stanford MIPS-X */ - #define EM_COLDFIRE 52 /* Motorola Coldfire */ - #define EM_68HC12 53 /* Motorola M68HC12 */ -@@ -220,7 +226,8 @@ - #define EM_TINYJ 61 /* Advanced Logic Corp. Tinyj emb.fam*/ - #define EM_X86_64 62 /* AMD x86-64 architecture */ - #define EM_PDSP 63 /* Sony DSP Processor */ -- -+#define EM_PDP10 64 /* Digital Equipment Corp. PDP-10 */ -+#define EM_PDP11 65 /* Digital Equipment Corp. PDP-11 */ - #define EM_FX66 66 /* Siemens FX66 microcontroller */ - #define EM_ST9PLUS 67 /* STMicroelectronics ST9+ 8/16 mc */ - #define EM_ST7 68 /* STmicroelectronics ST7 8 bit mc */ -@@ -250,7 +257,22 @@ - #define EM_OPENRISC 92 /* OpenRISC 32-bit embedded processor */ - #define EM_ARC_A5 93 /* ARC Cores Tangent-A5 */ - #define EM_XTENSA 94 /* Tensilica Xtensa Architecture */ --#define EM_NUM 95 -+#define EM_VIDEOCORE 95 /* Alphamosaic VideoCore processor */ -+#define EM_TMM_GPP 96 /* Thompson Multimedia General Purpose Processor */ -+#define EM_NS32K 97 /* National Semiconductor 32000 series */ -+#define EM_TPC 98 /* Tenor Network TPC processor */ -+#define EM_SNP1K 99 /* Trebia SNP 1000 processor */ -+#define EM_ST200 100 /* STMicroelectronics (www.st.com) ST200 microcontroller */ -+#define EM_IP2K 101 /* Ubicom IP2XXX microcontroller family */ -+#define EM_MAX 102 /* MAX Processor */ -+#define EM_CR 103 /* National Semiconductor CompactRISC */ -+#define EM_F2MC16 104 /* Fujitsu F2MC16 */ -+#define EM_MSP430 105 /* TI msp430 micro controller */ -+#define EM_BLACKFIN 106 /* Analog Devices Blackfin (DSP) processor */ -+#define EM_SE_C33 107 /* S1C33 Family of Seiko Epson processors */ -+#define EM_SEP 108 /* Sharp embedded microprocessor */ -+#define EM_ARCA 109 /* Arca RISC Microprocessor */ -+#define EM_NUM 110 - - /* If it is necessary to assign new unofficial EM_* values, please - pick large random numbers (0x8523, 0xa7f2, etc.) to minimize the diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elf_begin.c-CVE-2014-9447-fix.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elf_begin.c-CVE-2014-9447-fix.patch deleted file mode 100644 index deba45fa8..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elf_begin.c-CVE-2014-9447-fix.patch +++ /dev/null @@ -1,37 +0,0 @@ -From 323ca04a0c9189544075c19b49da67f6443a8950 Mon Sep 17 00:00:00 2001 -From: Li xin -Date: Wed, 21 Jan 2015 09:33:38 +0900 -Subject: [PATCH] elf_begin.c: CVE-2014-9447 fix - -this patch is from: - https://git.fedorahosted.org/cgit/elfutils.git/commit/?id=147018e729e7c22eeabf15b82d26e4bf68a0d18e - -Upstream-Status: Backport -CVE: CVE-2014-9447 - -Signed-off-by: Li Xin ---- - libelf/elf_begin.c | 7 ++----- - 1 file changed, 2 insertions(+), 5 deletions(-) - -diff --git a/libelf/elf_begin.c b/libelf/elf_begin.c -index e46add3..e83ba35 100644 ---- a/libelf/elf_begin.c -+++ b/libelf/elf_begin.c -@@ -736,11 +736,8 @@ read_long_names (Elf *elf) - break; - - /* NUL-terminate the string. */ -- *runp = '\0'; -- -- /* Skip the NUL byte and the \012. */ -- runp += 2; -- -+ *runp++ = '\0'; -+ - /* A sanity check. Somebody might have generated invalid - archive. */ - if (runp >= newp + len) --- -1.8.4.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elfutils-ar-c-fix-num-passed-to-memset.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elfutils-ar-c-fix-num-passed-to-memset.patch deleted file mode 100644 index b619619ec..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elfutils-ar-c-fix-num-passed-to-memset.patch +++ /dev/null @@ -1,23 +0,0 @@ -Upstream-Status: Backport - -ar.c (do_oper_delete): Fix num passed to memset -native build failed as following on Fedora18+: -ar.c: In function 'do_oper_delete': -ar.c:918:31: error: argument to 'sizeof' in 'memset' call is the same expression as the destination; did you mean to dereference it? [-Werror=sizeof-pointer-memaccess] - memset (found, '\0', sizeof (found)); - ^ -The original commit is http://git.fedorahosted.org/cgit/elfutils.git/commit/src/ar.c?id=1a4d0668d18bf1090c5c08cdb5cb3ba2b8eb5410 - -Signed-off-by: Zhenhua Luo - ---- elfutils-0.148/src/ar.c.org 2013-03-12 21:12:17.928281375 -0500 -+++ elfutils-0.148/src/ar.c 2013-03-12 21:15:30.053285271 -0500 -@@ -915,7 +915,7 @@ - long int instance) - { - bool *found = alloca (sizeof (bool) * argc); -- memset (found, '\0', sizeof (found)); -+ memset (found, '\0', sizeof (bool) * argc); - - /* List of the files we keep. */ - struct armem *to_copy = NULL; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elfutils-fsize.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elfutils-fsize.patch deleted file mode 100644 index 0ff353d0d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/elfutils-fsize.patch +++ /dev/null @@ -1,39 +0,0 @@ -Upstream-Status: Backport - -The ELF_T_LIB and ELF_T_GNUHASH sizes were missing from fsize table. - -This could cause a failure in the elf*_xlatetof function. - -diff -ur elfutils-0.148.orig/libelf/exttypes.h elfutils-0.148/libelf/exttypes.h ---- elfutils-0.148.orig/libelf/exttypes.h 2009-01-08 12:56:37.000000000 -0800 -+++ elfutils-0.148/libelf/exttypes.h 2010-08-18 14:00:33.000000000 -0700 -@@ -94,6 +94,7 @@ - Vernaux32 (Ext_); - Syminfo32 (Ext_); - Move32 (Ext_); -+Lib32 (Ext_); - auxv_t32 (Ext_); - - Ehdr64 (Ext_); -@@ -110,6 +111,7 @@ - Vernaux64 (Ext_); - Syminfo64 (Ext_); - Move64 (Ext_); -+Lib64 (Ext_); - auxv_t64 (Ext_); - - #undef START -diff -ur elfutils-0.148.orig/libelf/gelf_fsize.c elfutils-0.148/libelf/gelf_fsize.c ---- elfutils-0.148.orig/libelf/gelf_fsize.c 2009-01-08 12:56:37.000000000 -0800 -+++ elfutils-0.148/libelf/gelf_fsize.c 2010-08-18 14:11:57.000000000 -0700 -@@ -87,7 +87,9 @@ - [ELF_T_NHDR] = sizeof (ElfW2(LIBELFBITS, Ext_Nhdr)), \ - [ELF_T_SYMINFO] = sizeof (ElfW2(LIBELFBITS, Ext_Syminfo)), \ - [ELF_T_MOVE] = sizeof (ElfW2(LIBELFBITS, Ext_Move)), \ -- [ELF_T_AUXV] = sizeof (ElfW2(LIBELFBITS, Ext_auxv_t)) -+ [ELF_T_LIB] = sizeof (ElfW2(LIBELFBITS, Ext_Lib)), \ -+ [ELF_T_AUXV] = sizeof (ElfW2(LIBELFBITS, Ext_auxv_t)), \ -+ [ELF_T_GNUHASH] = ELFW2(LIBELFBITS, FSZ_WORD) - TYPE_SIZES (32) - }, - [ELFCLASS64 - 1] = { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/fix-build-gcc-4.8.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/fix-build-gcc-4.8.patch deleted file mode 100644 index 0e2869020..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/fix-build-gcc-4.8.patch +++ /dev/null @@ -1,57 +0,0 @@ -This patch fixes a warning seen with gcc 4.8 (especially on ubuntu 13.10) - -| addr2line.c: In function 'handle_address': -| addr2line.c:450:7: error: format '%a' expects argument of type 'float *', but argument 3 has type 'char **' [-Werror=format=] -| if (sscanf (string, "(%a[^)])%" PRIiMAX "%n", &name, &addr, &i) == 2 -| ^ -| addr2line.c:453:7: error: format '%a' expects argument of type 'float *', but argument 3 has type 'char **' [-Werror=format=] -| switch (sscanf (string, "%a[^-+]%n%" PRIiMAX "%n", &name, &i, &addr, &j)) -| ^ -| cc1: all warnings being treated as errors - - -%a is old GNU style and should be abandoned in favor of %m - -Also see - -http://gcc.gnu.org/bugzilla/show_bug.cgi?id=54361 - -to support this assertion - -This patch is added via redhat-compatibility patch so lets revert this part -here. - -Signed-off-by: Khem Raj - -Upstream-Status: Inappropriate [Caused by an earlier patch] - -Index: elfutils-0.148/src/addr2line.c -=================================================================== ---- elfutils-0.148.orig/src/addr2line.c 2013-09-23 17:46:45.513586538 -0700 -+++ elfutils-0.148/src/addr2line.c 2013-09-23 17:46:46.329586558 -0700 -@@ -447,10 +447,10 @@ - bool parsed = false; - int i, j; - char *name = NULL; -- if (sscanf (string, "(%a[^)])%" PRIiMAX "%n", &name, &addr, &i) == 2 -+ if (sscanf (string, "(%m[^)])%" PRIiMAX "%n", &name, &addr, &i) == 2 - && string[i] == '\0') - parsed = adjust_to_section (name, &addr, dwfl); -- switch (sscanf (string, "%a[^-+]%n%" PRIiMAX "%n", &name, &i, &addr, &j)) -+ switch (sscanf (string, "%m[^-+]%n%" PRIiMAX "%n", &name, &i, &addr, &j)) - { - default: - break; -Index: elfutils-0.148/tests/line2addr.c -=================================================================== ---- elfutils-0.148.orig/tests/line2addr.c 2013-09-23 17:46:45.521586538 -0700 -+++ elfutils-0.148/tests/line2addr.c 2013-09-23 17:46:46.329586558 -0700 -@@ -132,7 +132,7 @@ - { - struct args a = { .arg = argv[cnt] }; - -- switch (sscanf (a.arg, "%a[^:]:%d", &a.file, &a.line)) -+ switch (sscanf (a.arg, "%m[^:]:%d", &a.file, &a.line)) - { - default: - case 0: diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/fix_for_gcc-4.7.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/fix_for_gcc-4.7.patch deleted file mode 100644 index c78f95d69..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/fix_for_gcc-4.7.patch +++ /dev/null @@ -1,73 +0,0 @@ -Upstream-Status: pending -gcc 4.7 does not like pointer conversion, so have a void * tmp var to work -around following compilation issue. - -Signed-off-by: Nitin A Kamble -2011/07/07 - -| md5.c: In function 'md5_finish_ctx': -| md5.c:108:3: error: dereferencing type-punned pointer will break strict-aliasing rules [-Werror=strict-aliasing] -| md5.c:109:3: error: dereferencing type-punned pointer will break strict-aliasing rules [-Werror=strict-aliasing] -| cc1: all warnings being treated as errors -| -| make[2]: *** [md5.o] Error 1 -| make[2]: *** Waiting for unfinished jobs.... -| sha1.c: In function 'sha1_finish_ctx': -| sha1.c:109:3: error: dereferencing type-punned pointer will break strict-aliasing rules [-Werror=strict-aliasing] -| sha1.c:111:3: error: dereferencing type-punned pointer will break strict-aliasing rules [-Werror=strict-aliasing] -| cc1: all warnings being treated as errors -| -| make[2]: *** [sha1.o] Error 1 - -Index: elfutils-0.148/lib/md5.c -=================================================================== ---- elfutils-0.148.orig/lib/md5.c -+++ elfutils-0.148/lib/md5.c -@@ -95,6 +95,7 @@ md5_finish_ctx (ctx, resbuf) - /* Take yet unprocessed bytes into account. */ - md5_uint32 bytes = ctx->buflen; - size_t pad; -+ void * tmp; - - /* Now count remaining bytes. */ - ctx->total[0] += bytes; -@@ -105,9 +106,10 @@ md5_finish_ctx (ctx, resbuf) - memcpy (&ctx->buffer[bytes], fillbuf, pad); - - /* Put the 64-bit file length in *bits* at the end of the buffer. */ -- *(md5_uint32 *) &ctx->buffer[bytes + pad] = SWAP (ctx->total[0] << 3); -- *(md5_uint32 *) &ctx->buffer[bytes + pad + 4] = SWAP ((ctx->total[1] << 3) | -- (ctx->total[0] >> 29)); -+ tmp = &ctx->buffer[bytes + pad]; -+ *(md5_uint32 *) tmp = SWAP (ctx->total[0] << 3); -+ tmp = &ctx->buffer[bytes + pad + 4]; -+ *(md5_uint32 *) tmp = SWAP ((ctx->total[1] << 3) | (ctx->total[0] >> 29)); - - /* Process last bytes. */ - md5_process_block (ctx->buffer, bytes + pad + 8, ctx); -Index: elfutils-0.148/lib/sha1.c -=================================================================== ---- elfutils-0.148.orig/lib/sha1.c -+++ elfutils-0.148/lib/sha1.c -@@ -96,6 +96,7 @@ sha1_finish_ctx (ctx, resbuf) - /* Take yet unprocessed bytes into account. */ - sha1_uint32 bytes = ctx->buflen; - size_t pad; -+ void * tmp; - - /* Now count remaining bytes. */ - ctx->total[0] += bytes; -@@ -106,9 +107,10 @@ sha1_finish_ctx (ctx, resbuf) - memcpy (&ctx->buffer[bytes], fillbuf, pad); - - /* Put the 64-bit file length in *bits* at the end of the buffer. */ -- *(sha1_uint32 *) &ctx->buffer[bytes + pad] = SWAP ((ctx->total[1] << 3) | -- (ctx->total[0] >> 29)); -- *(sha1_uint32 *) &ctx->buffer[bytes + pad + 4] = SWAP (ctx->total[0] << 3); -+ tmp = &ctx->buffer[bytes + pad]; -+ *(sha1_uint32 *) tmp = SWAP ((ctx->total[1] << 3) | (ctx->total[0] >> 29)); -+ tmp = &ctx->buffer[bytes + pad + 4]; -+ *(sha1_uint32 *) tmp = SWAP (ctx->total[0] << 3); - - /* Process last bytes. */ - sha1_process_block (ctx->buffer, bytes + pad + 8, ctx); diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/gcc6.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/gcc6.patch deleted file mode 100644 index b56a754f8..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/gcc6.patch +++ /dev/null @@ -1,23 +0,0 @@ -Fix warnings found with gcc6 - -| ../../elfutils-0.148/libdw/dwarf_siblingof.c: In function 'dwarf_siblingof': -| ../../elfutils-0.148/libdw/dwarf_siblingof.c:69:6: error: nonnull argument 'result' compared to NULL [-Werror=nonnull-compare] -| if (result == NULL) -| ^ - -Signed-off-by: Khem Raj -Upstream-Status: Inappropriate [ unmaintained ] -Index: elfutils-0.148/libdw/dwarf_siblingof.c -=================================================================== ---- elfutils-0.148.orig/libdw/dwarf_siblingof.c -+++ elfutils-0.148/libdw/dwarf_siblingof.c -@@ -66,9 +66,6 @@ dwarf_siblingof (die, result) - if (die == NULL) - return -1; - -- if (result == NULL) -- return -1; -- - if (result != die) - result->addr = NULL; - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/hppa_backend.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/hppa_backend.diff deleted file mode 100644 index a86b97c68..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/hppa_backend.diff +++ /dev/null @@ -1,801 +0,0 @@ -Upstream-Status: Backport - -Index: elfutils-0.146/backends/parisc_init.c -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.146/backends/parisc_init.c 2010-04-24 10:10:50.000000000 +0000 -@@ -0,0 +1,74 @@ -+/* Initialization of PA-RISC specific backend library. -+ Copyright (C) 2002, 2005, 2006 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ Written by Ulrich Drepper , 2002. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#define BACKEND parisc_ -+#define RELOC_PREFIX R_PARISC_ -+#include "libebl_CPU.h" -+#include "libebl_parisc.h" -+ -+/* This defines the common reloc hooks based on parisc_reloc.def. */ -+#include "common-reloc.c" -+ -+ -+const char * -+parisc_init (elf, machine, eh, ehlen) -+ Elf *elf __attribute__ ((unused)); -+ GElf_Half machine __attribute__ ((unused)); -+ Ebl *eh; -+ size_t ehlen; -+{ -+ int pa64 = 0; -+ -+ /* Check whether the Elf_BH object has a sufficent size. */ -+ if (ehlen < sizeof (Ebl)) -+ return NULL; -+ -+ if (elf) { -+ GElf_Ehdr ehdr_mem; -+ GElf_Ehdr *ehdr = gelf_getehdr (elf, &ehdr_mem); -+ if (ehdr && (ehdr->e_flags & EF_PARISC_WIDE)) -+ pa64 = 1; -+ } -+ /* We handle it. */ -+ eh->name = "PA-RISC"; -+ parisc_init_reloc (eh); -+ HOOK (eh, reloc_simple_type); -+ HOOK (eh, machine_flag_check); -+ HOOK (eh, symbol_type_name); -+ HOOK (eh, segment_type_name); -+ HOOK (eh, section_type_name); -+ HOOK (eh, register_info); -+ if (pa64) -+ eh->return_value_location = parisc_return_value_location_64; -+ else -+ eh->return_value_location = parisc_return_value_location_32; -+ -+ return MODVERSION; -+} -Index: elfutils-0.146/backends/parisc_regs.c -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.146/backends/parisc_regs.c 2010-04-24 10:10:50.000000000 +0000 -@@ -0,0 +1,159 @@ -+/* Register names and numbers for PA-RISC DWARF. -+ Copyright (C) 2005, 2006 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND parisc_ -+#include "libebl_CPU.h" -+ -+ssize_t -+parisc_register_info (Ebl *ebl, int regno, char *name, size_t namelen, -+ const char **prefix, const char **setname, -+ int *bits, int *type) -+{ -+ int pa64 = 0; -+ -+ if (ebl->elf) { -+ GElf_Ehdr ehdr_mem; -+ GElf_Ehdr *ehdr = gelf_getehdr (ebl->elf, &ehdr_mem); -+ if (ehdr->e_flags & EF_PARISC_WIDE) -+ pa64 = 1; -+ } -+ -+ int nregs = pa64 ? 127 : 128; -+ -+ if (name == NULL) -+ return nregs; -+ -+ if (regno < 0 || regno >= nregs || namelen < 6) -+ return -1; -+ -+ *prefix = "%"; -+ -+ if (regno < 32) -+ { -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ if (pa64) -+ { -+ *bits = 64; -+ } -+ else -+ { -+ *bits = 32; -+ } -+ } -+ else if (regno == 32) -+ { -+ *setname = "special"; -+ if (pa64) -+ { -+ *bits = 6; -+ } -+ else -+ { -+ *bits = 5; -+ } -+ *type = DW_ATE_unsigned; -+ } -+ else -+ { -+ *setname = "FPU"; -+ *type = DW_ATE_float; -+ if (pa64) -+ { -+ *bits = 64; -+ } -+ else -+ { -+ *bits = 32; -+ } -+ } -+ -+ if (regno < 33) { -+ switch (regno) -+ { -+ case 0 ... 9: -+ name[0] = 'r'; -+ name[1] = regno + '0'; -+ namelen = 2; -+ break; -+ case 10 ... 31: -+ name[0] = 'r'; -+ name[1] = regno / 10 + '0'; -+ name[2] = regno % 10 + '0'; -+ namelen = 3; -+ break; -+ case 32: -+ *prefix = NULL; -+ name[0] = 'S'; -+ name[1] = 'A'; -+ name[2] = 'R'; -+ namelen = 3; -+ break; -+ } -+ } -+ else { -+ if (pa64 && ((regno - 72) % 2)) { -+ *setname = NULL; -+ return 0; -+ } -+ -+ switch (regno) -+ { -+ case 72 + 0 ... 72 + 11: -+ name[0] = 'f'; -+ name[1] = 'r'; -+ name[2] = (regno + 8 - 72) / 2 + '0'; -+ namelen = 3; -+ if ((regno + 8 - 72) % 2) { -+ name[3] = 'R'; -+ namelen++; -+ } -+ break; -+ case 72 + 12 ... 72 + 55: -+ name[0] = 'f'; -+ name[1] = 'r'; -+ name[2] = (regno + 8 - 72) / 2 / 10 + '0'; -+ name[3] = (regno + 8 - 72) / 2 % 10 + '0'; -+ namelen = 4; -+ if ((regno + 8 - 72) % 2) { -+ name[4] = 'R'; -+ namelen++; -+ } -+ break; -+ default: -+ *setname = NULL; -+ return 0; -+ } -+ } -+ name[namelen++] = '\0'; -+ return namelen; -+} -Index: elfutils-0.146/backends/parisc_reloc.def -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.146/backends/parisc_reloc.def 2010-04-24 10:10:50.000000000 +0000 -@@ -0,0 +1,128 @@ -+/* List the relocation types for PA-RISC. -*- C -*- -+ Copyright (C) 2005 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+/* NAME, REL|EXEC|DYN */ -+ -+RELOC_TYPE (NONE, EXEC|DYN) -+RELOC_TYPE (DIR32, REL|EXEC|DYN) -+RELOC_TYPE (DIR21L, REL|EXEC|DYN) -+RELOC_TYPE (DIR17R, REL) -+RELOC_TYPE (DIR17F, REL) -+RELOC_TYPE (DIR14R, REL|DYN) -+RELOC_TYPE (PCREL32, REL) -+RELOC_TYPE (PCREL21L, REL) -+RELOC_TYPE (PCREL17R, REL) -+RELOC_TYPE (PCREL17F, REL) -+RELOC_TYPE (PCREL14R, REL|EXEC) -+RELOC_TYPE (DPREL21L, REL) -+RELOC_TYPE (DPREL14WR, REL) -+RELOC_TYPE (DPREL14DR, REL) -+RELOC_TYPE (DPREL14R, REL) -+RELOC_TYPE (GPREL21L, 0) -+RELOC_TYPE (GPREL14R, 0) -+RELOC_TYPE (LTOFF21L, REL) -+RELOC_TYPE (LTOFF14R, REL) -+RELOC_TYPE (DLTIND14F, 0) -+RELOC_TYPE (SETBASE, 0) -+RELOC_TYPE (SECREL32, REL) -+RELOC_TYPE (BASEREL21L, 0) -+RELOC_TYPE (BASEREL17R, 0) -+RELOC_TYPE (BASEREL14R, 0) -+RELOC_TYPE (SEGBASE, 0) -+RELOC_TYPE (SEGREL32, REL) -+RELOC_TYPE (PLTOFF21L, 0) -+RELOC_TYPE (PLTOFF14R, 0) -+RELOC_TYPE (PLTOFF14F, 0) -+RELOC_TYPE (LTOFF_FPTR32, 0) -+RELOC_TYPE (LTOFF_FPTR21L, 0) -+RELOC_TYPE (LTOFF_FPTR14R, 0) -+RELOC_TYPE (FPTR64, 0) -+RELOC_TYPE (PLABEL32, REL|DYN) -+RELOC_TYPE (PCREL64, 0) -+RELOC_TYPE (PCREL22C, 0) -+RELOC_TYPE (PCREL22F, 0) -+RELOC_TYPE (PCREL14WR, 0) -+RELOC_TYPE (PCREL14DR, 0) -+RELOC_TYPE (PCREL16F, 0) -+RELOC_TYPE (PCREL16WF, 0) -+RELOC_TYPE (PCREL16DF, 0) -+RELOC_TYPE (DIR64, REL|DYN) -+RELOC_TYPE (DIR14WR, REL) -+RELOC_TYPE (DIR14DR, REL) -+RELOC_TYPE (DIR16F, REL) -+RELOC_TYPE (DIR16WF, REL) -+RELOC_TYPE (DIR16DF, REL) -+RELOC_TYPE (GPREL64, 0) -+RELOC_TYPE (GPREL14WR, 0) -+RELOC_TYPE (GPREL14DR, 0) -+RELOC_TYPE (GPREL16F, 0) -+RELOC_TYPE (GPREL16WF, 0) -+RELOC_TYPE (GPREL16DF, 0) -+RELOC_TYPE (LTOFF64, 0) -+RELOC_TYPE (LTOFF14WR, 0) -+RELOC_TYPE (LTOFF14DR, 0) -+RELOC_TYPE (LTOFF16F, 0) -+RELOC_TYPE (LTOFF16WF, 0) -+RELOC_TYPE (LTOFF16DF, 0) -+RELOC_TYPE (SECREL64, 0) -+RELOC_TYPE (BASEREL14WR, 0) -+RELOC_TYPE (BASEREL14DR, 0) -+RELOC_TYPE (SEGREL64, 0) -+RELOC_TYPE (PLTOFF14WR, 0) -+RELOC_TYPE (PLTOFF14DR, 0) -+RELOC_TYPE (PLTOFF16F, 0) -+RELOC_TYPE (PLTOFF16WF, 0) -+RELOC_TYPE (PLTOFF16DF, 0) -+RELOC_TYPE (LTOFF_FPTR64, 0) -+RELOC_TYPE (LTOFF_FPTR14WR, 0) -+RELOC_TYPE (LTOFF_FPTR14DR, 0) -+RELOC_TYPE (LTOFF_FPTR16F, 0) -+RELOC_TYPE (LTOFF_FPTR16WF, 0) -+RELOC_TYPE (LTOFF_FPTR16DF, 0) -+RELOC_TYPE (COPY, EXEC) -+RELOC_TYPE (IPLT, EXEC|DYN) -+RELOC_TYPE (EPLT, 0) -+RELOC_TYPE (TPREL32, DYN) -+RELOC_TYPE (TPREL21L, 0) -+RELOC_TYPE (TPREL14R, 0) -+RELOC_TYPE (LTOFF_TP21L, 0) -+RELOC_TYPE (LTOFF_TP14R, 0) -+RELOC_TYPE (LTOFF_TP14F, 0) -+RELOC_TYPE (TPREL64, 0) -+RELOC_TYPE (TPREL14WR, 0) -+RELOC_TYPE (TPREL14DR, 0) -+RELOC_TYPE (TPREL16F, 0) -+RELOC_TYPE (TPREL16WF, 0) -+RELOC_TYPE (TPREL16DF, 0) -+RELOC_TYPE (LTOFF_TP64, 0) -+RELOC_TYPE (LTOFF_TP14WR, 0) -+RELOC_TYPE (LTOFF_TP14DR, 0) -+RELOC_TYPE (LTOFF_TP16F, 0) -+RELOC_TYPE (LTOFF_TP16WF, 0) -+RELOC_TYPE (LTOFF_TP16DF, 0) -+RELOC_TYPE (TLS_DTPMOD32, DYN) -+RELOC_TYPE (TLS_DTPMOD64, DYN) -+ -+#define NO_RELATIVE_RELOC 1 -Index: elfutils-0.146/backends/parisc_retval.c -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.146/backends/parisc_retval.c 2010-04-24 10:10:50.000000000 +0000 -@@ -0,0 +1,213 @@ -+/* Function return value location for Linux/PA-RISC ABI. -+ Copyright (C) 2005 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND parisc_ -+#include "libebl_CPU.h" -+#include "libebl_parisc.h" -+ -+/* %r28, or pair %r28, %r29. */ -+static const Dwarf_Op loc_intreg32[] = -+ { -+ { .atom = DW_OP_reg28 }, { .atom = DW_OP_piece, .number = 4 }, -+ { .atom = DW_OP_reg29 }, { .atom = DW_OP_piece, .number = 4 }, -+ }; -+ -+static const Dwarf_Op loc_intreg[] = -+ { -+ { .atom = DW_OP_reg28 }, { .atom = DW_OP_piece, .number = 8 }, -+ { .atom = DW_OP_reg29 }, { .atom = DW_OP_piece, .number = 8 }, -+ }; -+#define nloc_intreg 1 -+#define nloc_intregpair 4 -+ -+/* %fr4L, or pair %fr4L, %fr4R on pa-32 */ -+static const Dwarf_Op loc_fpreg32[] = -+ { -+ { .atom = DW_OP_regx, .number = 72 }, { .atom = DW_OP_piece, .number = 4 }, -+ { .atom = DW_OP_regx, .number = 73 }, { .atom = DW_OP_piece, .number = 4 }, -+ }; -+#define nloc_fpreg32 2 -+#define nloc_fpregpair32 4 -+ -+/* $fr4 */ -+static const Dwarf_Op loc_fpreg[] = -+ { -+ { .atom = DW_OP_regx, .number = 72 }, -+ }; -+#define nloc_fpreg 1 -+ -+#if 0 -+/* The return value is a structure and is actually stored in stack space -+ passed in a hidden argument by the caller. Address of the location is stored -+ in %r28 before function call, but it may be changed by function. */ -+static const Dwarf_Op loc_aggregate[] = -+ { -+ { .atom = DW_OP_breg28 }, -+ }; -+#define nloc_aggregate 1 -+#endif -+ -+static int -+parisc_return_value_location_ (Dwarf_Die *functypedie, const Dwarf_Op **locp, int pa64) -+{ -+ Dwarf_Word regsize = pa64 ? 8 : 4; -+ -+ /* Start with the function's type, and get the DW_AT_type attribute, -+ which is the type of the return value. */ -+ -+ Dwarf_Attribute attr_mem; -+ Dwarf_Attribute *attr = dwarf_attr_integrate (functypedie, DW_AT_type, &attr_mem); -+ if (attr == NULL) -+ /* The function has no return value, like a `void' function in C. */ -+ return 0; -+ -+ Dwarf_Die die_mem; -+ Dwarf_Die *typedie = dwarf_formref_die (attr, &die_mem); -+ int tag = dwarf_tag (typedie); -+ -+ /* Follow typedefs and qualifiers to get to the actual type. */ -+ while (tag == DW_TAG_typedef -+ || tag == DW_TAG_const_type || tag == DW_TAG_volatile_type -+ || tag == DW_TAG_restrict_type || tag == DW_TAG_mutable_type) -+ { -+ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem); -+ typedie = dwarf_formref_die (attr, &die_mem); -+ tag = dwarf_tag (typedie); -+ } -+ -+ switch (tag) -+ { -+ case -1: -+ return -1; -+ -+ case DW_TAG_subrange_type: -+ if (! dwarf_hasattr_integrate (typedie, DW_AT_byte_size)) -+ { -+ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem); -+ typedie = dwarf_formref_die (attr, &die_mem); -+ tag = dwarf_tag (typedie); -+ } -+ /* Fall through. */ -+ -+ case DW_TAG_base_type: -+ case DW_TAG_enumeration_type: -+ case DW_TAG_pointer_type: -+ case DW_TAG_ptr_to_member_type: -+ { -+ Dwarf_Word size; -+ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_byte_size, -+ &attr_mem), &size) != 0) -+ { -+ if (tag == DW_TAG_pointer_type || tag == DW_TAG_ptr_to_member_type) -+ size = 4; -+ else -+ return -1; -+ } -+ if (tag == DW_TAG_base_type) -+ { -+ Dwarf_Word encoding; -+ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_encoding, -+ &attr_mem), &encoding) != 0) -+ return -1; -+ -+ if (encoding == DW_ATE_float) -+ { -+ if (pa64) { -+ *locp = loc_fpreg; -+ if (size <= 8) -+ return nloc_fpreg; -+ } -+ else { -+ *locp = loc_fpreg32; -+ if (size <= 4) -+ return nloc_fpreg32; -+ else if (size <= 8) -+ return nloc_fpregpair32; -+ } -+ goto aggregate; -+ } -+ } -+ if (pa64) -+ *locp = loc_intreg; -+ else -+ *locp = loc_intreg32; -+ if (size <= regsize) -+ return nloc_intreg; -+ if (size <= 2 * regsize) -+ return nloc_intregpair; -+ -+ /* Else fall through. */ -+ } -+ -+ case DW_TAG_structure_type: -+ case DW_TAG_class_type: -+ case DW_TAG_union_type: -+ case DW_TAG_array_type: -+ aggregate: { -+ Dwarf_Word size; -+ if (dwarf_aggregate_size (typedie, &size) != 0) -+ return -1; -+ if (pa64) -+ *locp = loc_intreg; -+ else -+ *locp = loc_intreg32; -+ if (size <= regsize) -+ return nloc_intreg; -+ if (size <= 2 * regsize) -+ return nloc_intregpair; -+#if 0 -+ /* there should be some way to know this location... But I do not see it. */ -+ *locp = loc_aggregate; -+ return nloc_aggregate; -+#endif -+ /* fall through. */ -+ } -+ } -+ -+ /* XXX We don't have a good way to return specific errors from ebl calls. -+ This value means we do not understand the type, but it is well-formed -+ DWARF and might be valid. */ -+ return -2; -+} -+ -+int -+parisc_return_value_location_32 (Dwarf_Die *functypedie, const Dwarf_Op **locp) -+{ -+ return parisc_return_value_location_ (functypedie, locp, 0); -+} -+ -+int -+parisc_return_value_location_64 (Dwarf_Die *functypedie, const Dwarf_Op **locp) -+{ -+ return parisc_return_value_location_ (functypedie, locp, 1); -+} -+ -Index: elfutils-0.146/backends/parisc_symbol.c -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.146/backends/parisc_symbol.c 2010-04-24 10:10:50.000000000 +0000 -@@ -0,0 +1,112 @@ -+/* PA-RISC specific symbolic name handling. -+ Copyright (C) 2002, 2005 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ Written by Ulrich Drepper , 2002. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND parisc_ -+#include "libebl_CPU.h" -+ -+const char * -+parisc_segment_type_name (int segment, char *buf __attribute__ ((unused)), -+ size_t len __attribute__ ((unused))) -+{ -+ switch (segment) -+ { -+ case PT_PARISC_ARCHEXT: -+ return "PARISC_ARCHEXT"; -+ case PT_PARISC_UNWIND: -+ return "PARISC_UNWIND"; -+ default: -+ break; -+ } -+ return NULL; -+} -+ -+/* Return symbolic representation of symbol type. */ -+const char * -+parisc_symbol_type_name(int symbol, char *buf __attribute__ ((unused)), -+ size_t len __attribute__ ((unused))) -+{ -+ if (symbol == STT_PARISC_MILLICODE) -+ return "PARISC_MILLI"; -+ return NULL; -+} -+ -+/* Return symbolic representation of section type. */ -+const char * -+parisc_section_type_name (int type, -+ char *buf __attribute__ ((unused)), -+ size_t len __attribute__ ((unused))) -+{ -+ switch (type) -+ { -+ case SHT_PARISC_EXT: -+ return "PARISC_EXT"; -+ case SHT_PARISC_UNWIND: -+ return "PARISC_UNWIND"; -+ case SHT_PARISC_DOC: -+ return "PARISC_DOC"; -+ } -+ -+ return NULL; -+} -+ -+/* Check whether machine flags are valid. */ -+bool -+parisc_machine_flag_check (GElf_Word flags) -+{ -+ if (flags &~ (EF_PARISC_TRAPNIL | EF_PARISC_EXT | EF_PARISC_LSB | -+ EF_PARISC_WIDE | EF_PARISC_NO_KABP | -+ EF_PARISC_LAZYSWAP | EF_PARISC_ARCH)) -+ return 0; -+ -+ GElf_Word arch = flags & EF_PARISC_ARCH; -+ -+ return ((arch == EFA_PARISC_1_0) || (arch == EFA_PARISC_1_1) || -+ (arch == EFA_PARISC_2_0)); -+} -+ -+/* Check for the simple reloc types. */ -+Elf_Type -+parisc_reloc_simple_type (Ebl *ebl __attribute__ ((unused)), int type) -+{ -+ switch (type) -+ { -+ case R_PARISC_DIR64: -+ case R_PARISC_SECREL64: -+ return ELF_T_XWORD; -+ case R_PARISC_DIR32: -+ case R_PARISC_SECREL32: -+ return ELF_T_WORD; -+ default: -+ return ELF_T_NUM; -+ } -+} -Index: elfutils-0.146/backends/libebl_parisc.h -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.146/backends/libebl_parisc.h 2010-04-24 10:10:50.000000000 +0000 -@@ -0,0 +1,9 @@ -+#ifndef _LIBEBL_HPPA_H -+#define _LIBEBL_HPPA_H 1 -+ -+#include -+ -+extern int parisc_return_value_location_32(Dwarf_Die *, const Dwarf_Op **locp); -+extern int parisc_return_value_location_64(Dwarf_Die *, const Dwarf_Op **locp); -+ -+#endif -Index: elfutils-0.146/backends/Makefile.am -=================================================================== ---- elfutils-0.146.orig/backends/Makefile.am 2010-04-24 10:10:41.000000000 +0000 -+++ elfutils-0.146/backends/Makefile.am 2010-04-24 10:10:50.000000000 +0000 -@@ -29,11 +29,11 @@ - -I$(top_srcdir)/libelf -I$(top_srcdir)/libdw - - --modules = i386 sh x86_64 ia64 alpha arm sparc ppc ppc64 s390 -+modules = i386 sh x86_64 ia64 alpha arm sparc ppc ppc64 s390 parisc - libebl_pic = libebl_i386_pic.a libebl_sh_pic.a libebl_x86_64_pic.a \ - libebl_ia64_pic.a libebl_alpha_pic.a libebl_arm_pic.a \ - libebl_sparc_pic.a libebl_ppc_pic.a libebl_ppc64_pic.a \ -- libebl_s390_pic.a -+ libebl_s390_pic.a libebl_parisc_pic.a - noinst_LIBRARIES = $(libebl_pic) - noinst_DATA = $(libebl_pic:_pic.a=.so) - -@@ -95,6 +95,9 @@ - libebl_s390_pic_a_SOURCES = $(s390_SRCS) - am_libebl_s390_pic_a_OBJECTS = $(s390_SRCS:.c=.os) - -+parisc_SRCS = parisc_init.c parisc_symbol.c parisc_regs.c parisc_retval.c -+libebl_parisc_pic_a_SOURCES = $(parisc_SRCS) -+am_libebl_parisc_pic_a_OBJECTS = $(parisc_SRCS:.c=.os) - - libebl_%.so libebl_%.map: libebl_%_pic.a $(libelf) $(libdw) - @rm -f $(@:.so=.map) -Index: elfutils-0.146/libelf/elf.h -=================================================================== ---- elfutils-0.146.orig/libelf/elf.h 2010-04-13 20:08:02.000000000 +0000 -+++ elfutils-0.146/libelf/elf.h 2010-04-24 10:10:50.000000000 +0000 -@@ -1789,16 +1789,24 @@ - #define R_PARISC_PCREL17F 12 /* 17 bits of rel. address. */ - #define R_PARISC_PCREL14R 14 /* Right 14 bits of rel. address. */ - #define R_PARISC_DPREL21L 18 /* Left 21 bits of rel. address. */ -+#define R_PARISC_DPREL14WR 19 -+#define R_PARISC_DPREL14DR 20 - #define R_PARISC_DPREL14R 22 /* Right 14 bits of rel. address. */ - #define R_PARISC_GPREL21L 26 /* GP-relative, left 21 bits. */ - #define R_PARISC_GPREL14R 30 /* GP-relative, right 14 bits. */ - #define R_PARISC_LTOFF21L 34 /* LT-relative, left 21 bits. */ - #define R_PARISC_LTOFF14R 38 /* LT-relative, right 14 bits. */ -+#define R_PARISC_DLTIND14F 39 -+#define R_PARISC_SETBASE 40 - #define R_PARISC_SECREL32 41 /* 32 bits section rel. address. */ -+#define R_PARISC_BASEREL21L 42 -+#define R_PARISC_BASEREL17R 43 -+#define R_PARISC_BASEREL14R 46 - #define R_PARISC_SEGBASE 48 /* No relocation, set segment base. */ - #define R_PARISC_SEGREL32 49 /* 32 bits segment rel. address. */ - #define R_PARISC_PLTOFF21L 50 /* PLT rel. address, left 21 bits. */ - #define R_PARISC_PLTOFF14R 54 /* PLT rel. address, right 14 bits. */ -+#define R_PARISC_PLTOFF14F 55 - #define R_PARISC_LTOFF_FPTR32 57 /* 32 bits LT-rel. function pointer. */ - #define R_PARISC_LTOFF_FPTR21L 58 /* LT-rel. fct ptr, left 21 bits. */ - #define R_PARISC_LTOFF_FPTR14R 62 /* LT-rel. fct ptr, right 14 bits. */ -@@ -1807,6 +1815,7 @@ - #define R_PARISC_PLABEL21L 66 /* Left 21 bits of fdesc address. */ - #define R_PARISC_PLABEL14R 70 /* Right 14 bits of fdesc address. */ - #define R_PARISC_PCREL64 72 /* 64 bits PC-rel. address. */ -+#define R_PARISC_PCREL22C 73 - #define R_PARISC_PCREL22F 74 /* 22 bits PC-rel. address. */ - #define R_PARISC_PCREL14WR 75 /* PC-rel. address, right 14 bits. */ - #define R_PARISC_PCREL14DR 76 /* PC rel. address, right 14 bits. */ -@@ -1832,6 +1841,8 @@ - #define R_PARISC_LTOFF16WF 102 /* 16 bits LT-rel. address. */ - #define R_PARISC_LTOFF16DF 103 /* 16 bits LT-rel. address. */ - #define R_PARISC_SECREL64 104 /* 64 bits section rel. address. */ -+#define R_PARISC_BASEREL14WR 107 -+#define R_PARISC_BASEREL14DR 108 - #define R_PARISC_SEGREL64 112 /* 64 bits segment rel. address. */ - #define R_PARISC_PLTOFF14WR 115 /* PLT-rel. address, right 14 bits. */ - #define R_PARISC_PLTOFF14DR 116 /* PLT-rel. address, right 14 bits. */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/i386_dis.h b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/i386_dis.h deleted file mode 100644 index a5cc01f91..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/i386_dis.h +++ /dev/null @@ -1,1657 +0,0 @@ -#define MNEMONIC_BITS 10 -#define SUFFIX_BITS 3 -#define FCT1_BITS 7 -#define STR1_BITS 4 -#define OFF1_1_BITS 7 -#define OFF1_1_BIAS 3 -#define OFF1_2_BITS 7 -#define OFF1_2_BIAS 4 -#define OFF1_3_BITS 1 -#define OFF1_3_BIAS 7 -#define FCT2_BITS 6 -#define STR2_BITS 2 -#define OFF2_1_BITS 7 -#define OFF2_1_BIAS 5 -#define OFF2_2_BITS 7 -#define OFF2_2_BIAS 4 -#define OFF2_3_BITS 4 -#define OFF2_3_BIAS 7 -#define FCT3_BITS 4 -#define STR3_BITS 1 -#define OFF3_1_BITS 6 -#define OFF3_1_BIAS 10 -#define OFF3_2_BITS 1 -#define OFF3_2_BIAS 21 - -#include - -#define suffix_none 0 -#define suffix_w 1 -#define suffix_w0 2 -#define suffix_W 3 -#define suffix_tttn 4 -#define suffix_D 7 -#define suffix_w1 5 -#define suffix_W1 6 - -static const opfct_t op1_fct[] = -{ - NULL, - FCT_MOD$R_M, - FCT_Mod$R_m, - FCT_abs, - FCT_ax, - FCT_ax$w, - FCT_ccc, - FCT_ddd, - FCT_disp8, - FCT_ds_bx, - FCT_ds_si, - FCT_dx, - FCT_es_di, - FCT_freg, - FCT_imm$s, - FCT_imm$w, - FCT_imm16, - FCT_imm8, - FCT_imms8, - FCT_mmxreg, - FCT_mod$16r_m, - FCT_mod$64r_m, - FCT_mod$8r_m, - FCT_mod$r_m, - FCT_mod$r_m$w, - FCT_reg, - FCT_reg$w, - FCT_reg16, - FCT_reg64, - FCT_rel, - FCT_sel, - FCT_sreg2, - FCT_sreg3, - FCT_string, - FCT_xmmreg, -}; -static const char op1_str[] = - "%ax\0" - "%cl\0" - "%eax\0" - "%st\0" - "%xmm0\0" - "*"; -static const uint8_t op1_str_idx[] = { - 0, - 4, - 8, - 13, - 17, - 23, -}; -static const opfct_t op2_fct[] = -{ - NULL, - FCT_MOD$R_M, - FCT_Mod$R_m, - FCT_abs, - FCT_absval, - FCT_ax$w, - FCT_ccc, - FCT_ddd, - FCT_ds_si, - FCT_dx, - FCT_es_di, - FCT_freg, - FCT_imm8, - FCT_mmxreg, - FCT_mod$64r_m, - FCT_mod$r_m, - FCT_mod$r_m$w, - FCT_moda$r_m, - FCT_reg, - FCT_reg$w, - FCT_reg64, - FCT_sreg3, - FCT_string, - FCT_xmmreg, -}; -static const char op2_str[] = - "%ecx\0" - "%st"; -static const uint8_t op2_str_idx[] = { - 0, - 5, -}; -static const opfct_t op3_fct[] = -{ - NULL, - FCT_mmxreg, - FCT_mod$r_m, - FCT_reg, - FCT_string, - FCT_xmmreg, -}; -static const char op3_str[] = - "%edx"; -static const uint8_t op3_str_idx[] = { - 0, -}; -static const struct instr_enc instrtab[] = -{ - { .mnemonic = MNE_aaa, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_aad, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_aam, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_aas, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_adc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_adc, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_adc, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_adc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_adc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_add, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_add, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_add, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_add, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_add, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_addsubpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_addsubps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_and, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_and, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_and, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_and, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_and, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andnpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andnps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_arpl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 27, .str1 = 0, .off1_1 = 7, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bound, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 7, .off1_2 = 0, .off1_3 = 0, .fct2 = 17, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bsf, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bsr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bswap, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 25, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bt, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_btc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_btc, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_btr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_btr, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bts, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bts, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_call, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 29, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_call, .rep = 0, .repe = 0, .suffix = 3, .modrm = 1, .fct1 = 21, .str1 = 6, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lcall, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 30, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 4, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lcall, .rep = 0, .repe = 0, .suffix = 3, .modrm = 1, .fct1 = 21, .str1 = 6, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_clc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cli, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_syscall, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_clts, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sysret, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sysenter, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sysexit, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmov, .rep = 0, .repe = 0, .suffix = 4, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmp, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmp, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmps, .rep = 0, .repe = 1, .suffix = 1, .modrm = 0, .fct1 = 12, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 8, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpxchg, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 15, .off1_2 = 11, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 8, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpxchg8b, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cpuid, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtdq2pd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtpd2dq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvttpd2dq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_daa, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_das, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_dec, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_dec, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 25, .str1 = 0, .off1_1 = 2, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_div, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_emms, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_enter, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 16, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 12, .str2 = 0, .off2_1 = 19, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fnop, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fchs, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fabs, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ftst, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fxam, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fld1, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldl2t, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldl2e, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldpi, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldlg2, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldln2, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldz, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_f2xm1, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fyl2x, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fptan, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fpatan, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fxtract, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fprem1, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdecstp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fincstp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fprem, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fyl2xp1, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsqrt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsincos, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_frndint, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fscale, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsin, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcos, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fadd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fadd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fadd, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fmul, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fmul, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fmul, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsub, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsubr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsubr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsubr, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fst, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fst, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fstp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fstp, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldenv, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldcw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fnstenv, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fnstcw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fxch, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_faddp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fiadd, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmove, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fmulp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fimul, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsubp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fisub, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsubrp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fisubr, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fnstsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 1, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fbld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcomip, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fbstp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fchs, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fclex, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_finit, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fwait, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fnclex, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmove, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovbe, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovu, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovnb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovne, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovnbe, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovnu, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcom, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcom, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcomp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcomp, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcompp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcomi, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcomip, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fucomi, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fucomip, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcos, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdecstp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdiv, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdiv, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdiv, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fidivl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdivp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fidiv, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdivrp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdivr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdivr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdivr, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fidivrl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fidivr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdivrp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ffree, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovbe, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ficom, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovu, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ficomp, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fild, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fildl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fildll, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fincstp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fninit, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fist, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fistp, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fistpll, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fisttp, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fisttpll, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fstpt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fld, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fucom, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_frstor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fucomp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fnsave, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fnstsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_hlt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_idiv, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_imul, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_imul, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_imul, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 14, .str1 = 0, .off1_1 = 13, .off1_2 = 2, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 3, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_in, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_in, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 11, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 3, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_inc, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_inc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 25, .str1 = 0, .off1_1 = 2, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ins, .rep = 1, .repe = 0, .suffix = 1, .modrm = 0, .fct1 = 11, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 10, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_int, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_int3, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_into, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_invd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_swapgs, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_invlpg, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_iret, .rep = 0, .repe = 0, .suffix = 6, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_j, .rep = 0, .repe = 0, .suffix = 4, .modrm = 0, .fct1 = 8, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_j, .rep = 0, .repe = 0, .suffix = 4, .modrm = 0, .fct1 = 29, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_set, .rep = 0, .repe = 0, .suffix = 4, .modrm = 1, .fct1 = 22, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 8, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_jmp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 8, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_jmp, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 29, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_jmp, .rep = 0, .repe = 0, .suffix = 3, .modrm = 1, .fct1 = 21, .str1 = 6, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ljmp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 30, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 4, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ljmp, .rep = 0, .repe = 0, .suffix = 3, .modrm = 1, .fct1 = 21, .str1 = 6, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lahf, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lar, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 20, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lds, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 5, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lea, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 5, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_leave, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_les, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 5, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lfs, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lgs, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lgdt, .rep = 0, .repe = 0, .suffix = 2, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lidt, .rep = 0, .repe = 0, .suffix = 2, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lldt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 20, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lmsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 20, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lock, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lods, .rep = 1, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 10, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 3, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_loop, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 8, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_loope, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 8, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_loopne, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 8, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lsl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 20, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ltr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 20, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 3, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 35, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 5, .str1 = 0, .off1_1 = 37, .off1_2 = 3, .off1_3 = 0, .fct2 = 3, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 6, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 20, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 28, .str1 = 0, .off1_1 = 18, .off1_2 = 0, .off1_3 = 0, .fct2 = 6, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 7, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 20, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 28, .str1 = 0, .off1_1 = 18, .off1_2 = 0, .off1_3 = 0, .fct2 = 7, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 32, .str1 = 0, .off1_1 = 7, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 21, .str2 = 0, .off2_1 = 5, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movs, .rep = 1, .repe = 0, .suffix = 1, .modrm = 0, .fct1 = 10, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 10, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movsbl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 22, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movswl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 20, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movzbl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 22, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movzwl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 20, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mul, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_neg, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pause, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_nop, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_popcnt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_not, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_or, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_or, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_or, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_or, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_or, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_out, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 5, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 12, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_out, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 5, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 9, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_outs, .rep = 1, .repe = 0, .suffix = 1, .modrm = 0, .fct1 = 10, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 9, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pop, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pop, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 32, .str1 = 0, .off1_1 = 7, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_popf, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_push, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_push, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 25, .str1 = 0, .off1_1 = 2, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pop, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 25, .str1 = 0, .off1_1 = 2, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_push, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 14, .str1 = 0, .off1_1 = 5, .off1_2 = 2, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_push, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 31, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_push, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 32, .str1 = 0, .off1_1 = 7, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pusha, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_popa, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pushf, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcl, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcl, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcl, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcr, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcr, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcr, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rdmsr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rdpmc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rdtsc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ret, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ret, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 16, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lret, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lret, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 16, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rol, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rol, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rol, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ror, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ror, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ror, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rsm, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sahf, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sar, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sar, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sar, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sbb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sbb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sbb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sbb, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sbb, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_scas, .rep = 0, .repe = 1, .suffix = 0, .modrm = 0, .fct1 = 12, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 3, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_set, .rep = 0, .repe = 0, .suffix = 4, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_shl, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_shl, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_shl, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_shr, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_shld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 2, .str3 = 0, .off3_1 = 6, .off3_2 = 0, }, - { .mnemonic = MNE_shld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 2, .str3 = 0, .off3_1 = 6, .off3_2 = 0, }, - { .mnemonic = MNE_shr, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_shr, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_shrd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 2, .str3 = 0, .off3_1 = 6, .off3_2 = 0, }, - { .mnemonic = MNE_shrd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 2, .str3 = 0, .off3_1 = 6, .off3_2 = 0, }, - { .mnemonic = MNE_vmcall, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmlaunch, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmresume, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmxoff, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmread, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 28, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 14, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmwrite, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 21, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 20, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sgdtl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_monitor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 3, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 1, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 4, .str3 = 1, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mwait, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 3, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 22, .str2 = 1, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sidtl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sldt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_smsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_stc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_std, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sti, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_stos, .rep = 1, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 5, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 10, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_str, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sub, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sub, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_test, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_test, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_test, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ud2a, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_verr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 20, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_verw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 20, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_wbinvd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_prefetch, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 22, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_prefetchw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 22, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_prefetchnta, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_prefetcht0, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_prefetcht1, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_prefetcht2, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_nop, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_wrmsr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xadd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 15, .off1_2 = 11, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 8, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xchg, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xchg, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 4, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xlat, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 9, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xor, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xor, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_emms, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pand, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pand, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pandn, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pandn, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaddwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaddwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_por, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_por, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pxor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pxor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andnps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpeqps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpltps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpleps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpunordps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpneqps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpnltps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpnleps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpordps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpeqss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpltss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpless, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpunordss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpneqss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpnltss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpnless, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpordss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fxrstor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fxsave, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ldmxcsr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_stmxcsr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movupd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movups, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movupd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movups, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movddup, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movsldup, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movlpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movhlps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 18, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movlps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movhlpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movhlps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movlpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movlps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_unpcklpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_unpcklps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_unpckhpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_unpckhps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movshdup, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movhpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movlhps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 18, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movhps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movlhpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movlhps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movhpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movhps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movapd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movaps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movapd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movaps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtsi2sd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtsi2ss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtpi2pd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtpi2ps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movntpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movntps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvttsd2si, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvttss2si, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvttpd2pi, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvttps2pi, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtpd2pi, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtsd2si, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtss2si, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtps2pi, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ucomisd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ucomiss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_comisd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_comiss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_getsec, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movmskpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 26, .off1_2 = 0, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movmskps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 18, .off1_2 = 0, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sqrtpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sqrtsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sqrtss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sqrtps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rsqrtss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rsqrtps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcpss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcpps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andnpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andnps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_orpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_orps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xorpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xorps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_addsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_addss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_addpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_addps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mulsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mulss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mulpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mulps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtsd2ss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtss2sd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtpd2ps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtps2pd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtps2dq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvttps2dq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtdq2ps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_subsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_subss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_subpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_subps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_minsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_minss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_minpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_minps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_divsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_divss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_divpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_divps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_maxsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_maxss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_maxpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_maxps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpcklbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpcklbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpcklwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpcklwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckldq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckldq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_packsswb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_packsswb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpgtb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpgtb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpgtw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpgtw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpgtd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpgtd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_packuswb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_packuswb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckhbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckhbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckhwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckhwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckhdq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckhdq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_packssdw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_packssdw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpcklqdq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckhqdq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movdqa, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movdqu, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pshufd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 16, .off3_2 = 0, }, - { .mnemonic = MNE_pshuflw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 16, .off3_2 = 0, }, - { .mnemonic = MNE_pshufhw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 16, .off3_2 = 0, }, - { .mnemonic = MNE_pshufw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 1, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 1, .str3 = 0, .off3_1 = 8, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpeqb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpeqb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpeqw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpeqw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpeqd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpeqd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_haddpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_haddps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_hsubpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_hsubps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 19, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movdqa, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movdqu, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 19, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 1, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movnti, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pinsrw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 16, .off3_2 = 0, }, - { .mnemonic = MNE_pinsrw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 1, .str3 = 0, .off3_1 = 8, .off3_2 = 0, }, - { .mnemonic = MNE_pextrw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 3, .str3 = 0, .off3_1 = 16, .off3_2 = 0, }, - { .mnemonic = MNE_pextrw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 3, .str3 = 0, .off3_1 = 8, .off3_2 = 0, }, - { .mnemonic = MNE_shufpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 16, .off3_2 = 0, }, - { .mnemonic = MNE_shufps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 8, .off3_2 = 0, }, - { .mnemonic = MNE_psrlw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrlw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrlq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrlq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmullw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmullw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movdq2q, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 26, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movq2dq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 19, .str1 = 0, .off1_1 = 26, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovmskb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 26, .off1_2 = 0, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovmskb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 19, .str1 = 0, .off1_1 = 18, .off1_2 = 0, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubusb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubusb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubusw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubusw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddusb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddusb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddusw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddusw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pavgb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pavgb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psraw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psraw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrad, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrad, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pavgw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pavgw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmulhuw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmulhuw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmulhw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmulhw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movntdq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movntq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 19, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 1, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lddqu, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pslld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pslld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmuludq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmuludq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psadbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psadbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_maskmovdqu, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 26, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_maskmovq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 19, .str1 = 0, .off1_1 = 18, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pshufb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pshufb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phaddw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phaddw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phaddd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phaddd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phaddsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phaddsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaddubsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaddubsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phsubw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phsubw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phsubd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phsubd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phsubsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phsubsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psignb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psignb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psignw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psignw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psignd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psignd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmulhrsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmulhrsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pabsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pabsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pabsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pabsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pabsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pabsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_palignr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_palignr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 1, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 1, .str3 = 0, .off3_1 = 16, .off3_2 = 0, }, - { .mnemonic = MNE_vmclear, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmxon, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmptrld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmptrst, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrlw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrlw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psraw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psraw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrad, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrad, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pslld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pslld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrlq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrlq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrldq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pslldq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lfence, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mfence, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sfence, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_clflush, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_blendps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_blendpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_blendvps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 33, .str1 = 5, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_blendvpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 33, .str1 = 5, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_dpps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_dppd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_insertps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_movntdqa, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mpsadbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_packusdw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pblendvb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 33, .str1 = 5, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pblendw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpeqq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpestri, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpestrm, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpistri, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpistrm, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpgtq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phminposuw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pinsrb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pinsrd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxud, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxuw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminud, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminuw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovsxbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovsxbd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovsxbq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovsxwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovsxwq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovsxdq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovzxbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovzxbd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovzxbq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovzxwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovzxwq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovzxdq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmuldq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmulld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ptest, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 23, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_roundps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_roundpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_roundss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_roundsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 17, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pop, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 31, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, -}; -static const uint8_t match_data[] = -{ - 0x11, 0x37, - 0x22, 0xd5, 0xa, - 0x22, 0xd4, 0xa, - 0x11, 0x3f, - 0x1, 0xfe, 0x14, - 0x2, 0xfe, 0x80, 0x38, 0x10, - 0x2, 0xfe, 0x82, 0x38, 0x10, - 0x2, 0xfe, 0x10, 0, 0, - 0x2, 0xfe, 0x12, 0, 0, - 0x1, 0xfe, 0x4, - 0x2, 0xfe, 0x80, 0x38, 0, - 0x12, 0x83, 0x38, 0, - 0x2, 0xfe, 0, 0, 0, - 0x2, 0xfe, 0x2, 0, 0, - 0x34, 0x66, 0xf, 0xd0, 0, 0, - 0x34, 0xf2, 0xf, 0xd0, 0, 0, - 0x1, 0xfe, 0x24, - 0x2, 0xfe, 0x80, 0x38, 0x20, - 0x2, 0xfe, 0x82, 0x38, 0x20, - 0x2, 0xfe, 0x20, 0, 0, - 0x2, 0xfe, 0x22, 0, 0, - 0x34, 0x66, 0xf, 0x54, 0, 0, - 0x23, 0xf, 0x54, 0, 0, - 0x34, 0x66, 0xf, 0x55, 0, 0, - 0x23, 0xf, 0x55, 0, 0, - 0x12, 0x63, 0, 0, - 0x12, 0x62, 0, 0, - 0x23, 0xf, 0xbc, 0, 0, - 0x23, 0xf, 0xbd, 0, 0, - 0x12, 0xf, 0xf8, 0xc8, - 0x23, 0xf, 0xa3, 0, 0, - 0x23, 0xf, 0xba, 0x38, 0x20, - 0x23, 0xf, 0xbb, 0, 0, - 0x23, 0xf, 0xba, 0x38, 0x38, - 0x23, 0xf, 0xb3, 0, 0, - 0x23, 0xf, 0xba, 0x38, 0x30, - 0x23, 0xf, 0xab, 0, 0, - 0x23, 0xf, 0xba, 0x38, 0x28, - 0x11, 0xe8, - 0x12, 0xff, 0x38, 0x10, - 0x11, 0x9a, - 0x12, 0xff, 0x38, 0x18, - 0x11, 0x98, - 0x11, 0x99, - 0x11, 0xf8, - 0x11, 0xfc, - 0x11, 0xfa, - 0x22, 0xf, 0x5, - 0x22, 0xf, 0x6, - 0x22, 0xf, 0x7, - 0x22, 0xf, 0x34, - 0x22, 0xf, 0x35, - 0x11, 0xf5, - 0x13, 0xf, 0xf0, 0x40, 0, 0, - 0x1, 0xfe, 0x3c, - 0x2, 0xfe, 0x80, 0x38, 0x38, - 0x12, 0x83, 0x38, 0x38, - 0x2, 0xfe, 0x38, 0, 0, - 0x2, 0xfe, 0x3a, 0, 0, - 0x34, 0xf2, 0xf, 0xc2, 0, 0, - 0x34, 0xf3, 0xf, 0xc2, 0, 0, - 0x34, 0x66, 0xf, 0xc2, 0, 0, - 0x23, 0xf, 0xc2, 0, 0, - 0x1, 0xfe, 0xa6, - 0x13, 0xf, 0xfe, 0xb0, 0, 0, - 0x23, 0xf, 0xc7, 0x38, 0x8, - 0x22, 0xf, 0xa2, - 0x34, 0xf3, 0xf, 0xe6, 0, 0, - 0x34, 0xf2, 0xf, 0xe6, 0, 0, - 0x34, 0x66, 0xf, 0xe6, 0, 0, - 0x11, 0x27, - 0x11, 0x2f, - 0x2, 0xfe, 0xfe, 0x38, 0x8, - 0x1, 0xf8, 0x48, - 0x2, 0xfe, 0xf6, 0x38, 0x30, - 0x22, 0xf, 0x77, - 0x11, 0xc8, - 0x22, 0xd9, 0xd0, - 0x22, 0xd9, 0xe0, - 0x22, 0xd9, 0xe1, - 0x22, 0xd9, 0xe4, - 0x22, 0xd9, 0xe5, - 0x22, 0xd9, 0xe8, - 0x22, 0xd9, 0xe9, - 0x22, 0xd9, 0xea, - 0x22, 0xd9, 0xeb, - 0x22, 0xd9, 0xec, - 0x22, 0xd9, 0xed, - 0x22, 0xd9, 0xee, - 0x22, 0xd9, 0xf0, - 0x22, 0xd9, 0xf1, - 0x22, 0xd9, 0xf2, - 0x22, 0xd9, 0xf3, - 0x22, 0xd9, 0xf4, - 0x22, 0xd9, 0xf5, - 0x22, 0xd9, 0xf6, - 0x22, 0xd9, 0xf7, - 0x22, 0xd9, 0xf8, - 0x22, 0xd9, 0xf9, - 0x22, 0xd9, 0xfa, - 0x22, 0xd9, 0xfb, - 0x22, 0xd9, 0xfc, - 0x22, 0xd9, 0xfd, - 0x22, 0xd9, 0xfe, - 0x22, 0xd9, 0xff, - 0x12, 0xd8, 0xf8, 0xc0, - 0x12, 0xdc, 0xf8, 0xc0, - 0x2, 0xfb, 0xd8, 0x38, 0, - 0x12, 0xd8, 0xf8, 0xc8, - 0x12, 0xdc, 0xf8, 0xc8, - 0x2, 0xfb, 0xd8, 0x38, 0x8, - 0x12, 0xd8, 0xf8, 0xe0, - 0x12, 0xdc, 0xf8, 0xe0, - 0x2, 0xfb, 0xd8, 0x38, 0x20, - 0x12, 0xd8, 0xf8, 0xe8, - 0x12, 0xdc, 0xf8, 0xe8, - 0x2, 0xfb, 0xd8, 0x38, 0x28, - 0x12, 0xdd, 0xf8, 0xd0, - 0x2, 0xfb, 0xd9, 0x38, 0x10, - 0x12, 0xdd, 0xf8, 0xd8, - 0x2, 0xfb, 0xd9, 0x38, 0x18, - 0x12, 0xd9, 0x38, 0x20, - 0x12, 0xd9, 0x38, 0x28, - 0x12, 0xd9, 0x38, 0x30, - 0x12, 0xd9, 0x38, 0x38, - 0x12, 0xd9, 0xf8, 0xc8, - 0x12, 0xde, 0xf8, 0xc0, - 0x12, 0xda, 0xf8, 0xc0, - 0x2, 0xfb, 0xda, 0x38, 0, - 0x12, 0xda, 0xf8, 0xc8, - 0x12, 0xde, 0xf8, 0xc8, - 0x2, 0xfb, 0xda, 0x38, 0x8, - 0x12, 0xde, 0xf8, 0xe0, - 0x2, 0xfb, 0xda, 0x38, 0x20, - 0x12, 0xde, 0xf8, 0xe8, - 0x2, 0xfb, 0xda, 0x38, 0x28, - 0x22, 0xdf, 0xe0, - 0x12, 0xdf, 0x38, 0x20, - 0x12, 0xdf, 0xf8, 0xf0, - 0x12, 0xdf, 0x38, 0x30, - 0x22, 0xd9, 0xe0, - 0x33, 0x9b, 0xdb, 0xe2, - 0x33, 0x9b, 0xdb, 0xe3, - 0x11, 0x9b, - 0x22, 0xdb, 0xe2, - 0x12, 0xda, 0xf8, 0xc0, - 0x12, 0xda, 0xf8, 0xc8, - 0x12, 0xda, 0xf8, 0xd0, - 0x12, 0xda, 0xf8, 0xd8, - 0x12, 0xdb, 0xf8, 0xc0, - 0x12, 0xdb, 0xf8, 0xc8, - 0x12, 0xdb, 0xf8, 0xd0, - 0x12, 0xdb, 0xf8, 0xd8, - 0x12, 0xd8, 0xf8, 0xd0, - 0x2, 0xfb, 0xd8, 0x38, 0x10, - 0x12, 0xd8, 0xf8, 0xd8, - 0x2, 0xfb, 0xd8, 0x38, 0x18, - 0x22, 0xde, 0xd9, - 0x12, 0xdb, 0xf8, 0xf0, - 0x12, 0xdf, 0xf8, 0xf0, - 0x12, 0xdb, 0xf8, 0xe8, - 0x12, 0xdf, 0xf8, 0xe8, - 0x22, 0xd9, 0xff, - 0x22, 0xd9, 0xf6, - 0x12, 0xd8, 0xf8, 0xf0, - 0x12, 0xdc, 0xf8, 0xf0, - 0x2, 0xfb, 0xd8, 0x38, 0x30, - 0x12, 0xda, 0x38, 0x30, - 0x12, 0xde, 0xf8, 0xf0, - 0x12, 0xde, 0x38, 0x30, - 0x12, 0xde, 0xf8, 0xf8, - 0x12, 0xd8, 0xf8, 0xf8, - 0x12, 0xdc, 0xf8, 0xf8, - 0x2, 0xfb, 0xd8, 0x38, 0x38, - 0x12, 0xda, 0x38, 0x38, - 0x12, 0xde, 0x38, 0x38, - 0x12, 0xde, 0xf8, 0xf0, - 0x12, 0xdd, 0xf8, 0xc0, - 0x12, 0xda, 0xf8, 0xd0, - 0x2, 0xfb, 0xda, 0x38, 0x10, - 0x12, 0xda, 0xf8, 0xd8, - 0x2, 0xfb, 0xda, 0x38, 0x18, - 0x12, 0xdf, 0x38, 0, - 0x12, 0xdb, 0x38, 0, - 0x12, 0xdf, 0x38, 0x28, - 0x22, 0xd9, 0xf7, - 0x22, 0xdb, 0xe3, - 0x2, 0xfb, 0xdb, 0x38, 0x10, - 0x2, 0xfb, 0xdb, 0x38, 0x18, - 0x12, 0xdf, 0x38, 0x38, - 0x2, 0xfb, 0xdb, 0x38, 0x8, - 0x12, 0xdd, 0x38, 0x8, - 0x12, 0xdb, 0x38, 0x28, - 0x12, 0xdb, 0x38, 0x38, - 0x12, 0xd9, 0xf8, 0xc0, - 0x2, 0xfb, 0xd9, 0x38, 0, - 0x12, 0xdd, 0xf8, 0xe0, - 0x12, 0xdd, 0x38, 0x20, - 0x12, 0xdd, 0xf8, 0xe8, - 0x12, 0xdd, 0x38, 0x30, - 0x12, 0xdd, 0x38, 0x38, - 0x11, 0xf4, - 0x2, 0xfe, 0xf6, 0x38, 0x38, - 0x2, 0xfe, 0xf6, 0x38, 0x28, - 0x23, 0xf, 0xaf, 0, 0, - 0x2, 0xfd, 0x69, 0, 0, - 0x1, 0xfe, 0xe4, - 0x1, 0xfe, 0xec, - 0x2, 0xfe, 0xfe, 0x38, 0, - 0x1, 0xf8, 0x40, - 0x1, 0xfe, 0x6c, - 0x11, 0xcd, - 0x11, 0xcc, - 0x11, 0xce, - 0x22, 0xf, 0x8, - 0x33, 0xf, 0x1, 0xf8, - 0x23, 0xf, 0x1, 0x38, 0x38, - 0x11, 0xcf, - 0x1, 0xf0, 0x70, - 0x12, 0xf, 0xf0, 0x80, - 0x13, 0xf, 0xf0, 0x90, 0x38, 0, - 0x11, 0xe3, - 0x11, 0xeb, - 0x11, 0xe9, - 0x12, 0xff, 0x38, 0x20, - 0x11, 0xea, - 0x12, 0xff, 0x38, 0x28, - 0x11, 0x9f, - 0x23, 0xf, 0x2, 0, 0, - 0x12, 0xc5, 0, 0, - 0x12, 0x8d, 0, 0, - 0x11, 0xc9, - 0x12, 0xc4, 0, 0, - 0x23, 0xf, 0xb4, 0, 0, - 0x23, 0xf, 0xb5, 0, 0, - 0x23, 0xf, 0x1, 0x38, 0x10, - 0x23, 0xf, 0x1, 0x38, 0x18, - 0x23, 0xf, 0, 0x38, 0x10, - 0x23, 0xf, 0x1, 0x38, 0x30, - 0x11, 0xf0, - 0x1, 0xfe, 0xac, - 0x11, 0xe2, - 0x11, 0xe1, - 0x11, 0xe0, - 0x23, 0xf, 0x3, 0, 0, - 0x23, 0xf, 0xb2, 0, 0, - 0x23, 0xf, 0, 0x38, 0x18, - 0x2, 0xfe, 0x88, 0, 0, - 0x2, 0xfe, 0x8a, 0, 0, - 0x2, 0xfe, 0xc6, 0x38, 0, - 0x1, 0xf0, 0xb0, - 0x1, 0xfe, 0xa0, - 0x1, 0xfe, 0xa2, - 0x23, 0xf, 0x20, 0xc0, 0xc0, - 0x23, 0xf, 0x22, 0xc0, 0xc0, - 0x23, 0xf, 0x21, 0xc0, 0xc0, - 0x23, 0xf, 0x23, 0xc0, 0xc0, - 0x12, 0x8c, 0, 0, - 0x12, 0x8e, 0, 0, - 0x1, 0xfe, 0xa4, - 0x23, 0xf, 0xbe, 0, 0, - 0x23, 0xf, 0xbf, 0, 0, - 0x23, 0xf, 0xb6, 0, 0, - 0x23, 0xf, 0xb7, 0, 0, - 0x2, 0xfe, 0xf6, 0x38, 0x20, - 0x2, 0xfe, 0xf6, 0x38, 0x18, - 0x22, 0xf3, 0x90, - 0x11, 0x90, - 0x34, 0xf3, 0xf, 0xb8, 0, 0, - 0x2, 0xfe, 0xf6, 0x38, 0x10, - 0x2, 0xfe, 0x8, 0, 0, - 0x2, 0xfe, 0xa, 0, 0, - 0x2, 0xfe, 0x80, 0x38, 0x8, - 0x2, 0xfe, 0x82, 0x38, 0x8, - 0x1, 0xfe, 0xc, - 0x1, 0xfe, 0xe6, - 0x1, 0xfe, 0xee, - 0x1, 0xfe, 0x6e, - 0x12, 0x8f, 0x38, 0, - 0x12, 0xf, 0xc7, 0x81, - 0x11, 0x9d, - 0x12, 0xff, 0x38, 0x30, - 0x1, 0xf8, 0x50, - 0x1, 0xf8, 0x58, - 0x1, 0xfd, 0x68, - 0x1, 0xe7, 0x6, - 0x12, 0xf, 0xc7, 0x80, - 0x11, 0x60, - 0x11, 0x61, - 0x11, 0x9c, - 0x2, 0xfe, 0xd0, 0x38, 0x10, - 0x2, 0xfe, 0xd2, 0x38, 0x10, - 0x2, 0xfe, 0xc0, 0x38, 0x10, - 0x2, 0xfe, 0xd0, 0x38, 0x18, - 0x2, 0xfe, 0xd2, 0x38, 0x18, - 0x2, 0xfe, 0xc0, 0x38, 0x18, - 0x22, 0xf, 0x32, - 0x22, 0xf, 0x33, - 0x22, 0xf, 0x31, - 0x11, 0xc3, - 0x11, 0xc2, - 0x11, 0xcb, - 0x11, 0xca, - 0x2, 0xfe, 0xd0, 0x38, 0, - 0x2, 0xfe, 0xd2, 0x38, 0, - 0x2, 0xfe, 0xc0, 0x38, 0, - 0x2, 0xfe, 0xd0, 0x38, 0x8, - 0x2, 0xfe, 0xd2, 0x38, 0x8, - 0x2, 0xfe, 0xc0, 0x38, 0x8, - 0x22, 0xf, 0xaa, - 0x11, 0x9e, - 0x2, 0xfe, 0xd0, 0x38, 0x38, - 0x2, 0xfe, 0xd2, 0x38, 0x38, - 0x2, 0xfe, 0xc0, 0x38, 0x38, - 0x2, 0xfe, 0x18, 0, 0, - 0x2, 0xfe, 0x1a, 0, 0, - 0x1, 0xfe, 0x1c, - 0x2, 0xfe, 0x80, 0x38, 0x18, - 0x2, 0xfe, 0x82, 0x38, 0x18, - 0x1, 0xfe, 0xae, - 0x13, 0xf, 0xf0, 0x90, 0x38, 0, - 0x2, 0xfe, 0xd0, 0x38, 0x20, - 0x2, 0xfe, 0xd2, 0x38, 0x20, - 0x2, 0xfe, 0xc0, 0x38, 0x20, - 0x2, 0xfe, 0xd0, 0x38, 0x28, - 0x23, 0xf, 0xa4, 0, 0, - 0x23, 0xf, 0xa5, 0, 0, - 0x2, 0xfe, 0xd2, 0x38, 0x28, - 0x2, 0xfe, 0xc0, 0x38, 0x28, - 0x23, 0xf, 0xac, 0, 0, - 0x23, 0xf, 0xad, 0, 0, - 0x33, 0xf, 0x1, 0xc1, - 0x33, 0xf, 0x1, 0xc2, - 0x33, 0xf, 0x1, 0xc3, - 0x33, 0xf, 0x1, 0xc4, - 0x23, 0xf, 0x78, 0, 0, - 0x23, 0xf, 0x79, 0, 0, - 0x23, 0xf, 0x1, 0x38, 0, - 0x33, 0xf, 0x1, 0xc8, - 0x33, 0xf, 0x1, 0xc9, - 0x23, 0xf, 0x1, 0x38, 0x8, - 0x23, 0xf, 0, 0x38, 0, - 0x23, 0xf, 0x1, 0x38, 0x20, - 0x11, 0xf9, - 0x11, 0xfd, - 0x11, 0xfb, - 0x1, 0xfe, 0xaa, - 0x23, 0xf, 0, 0x38, 0x8, - 0x2, 0xfe, 0x28, 0, 0, - 0x2, 0xfe, 0x2a, 0, 0, - 0x1, 0xfe, 0x2c, - 0x2, 0xfe, 0x80, 0x38, 0x28, - 0x2, 0xfe, 0x82, 0x38, 0x28, - 0x2, 0xfe, 0x84, 0, 0, - 0x1, 0xfe, 0xa8, - 0x2, 0xfe, 0xf6, 0x38, 0, - 0x22, 0xf, 0xb, - 0x23, 0xf, 0, 0x38, 0x20, - 0x23, 0xf, 0, 0x38, 0x28, - 0x22, 0xf, 0x9, - 0x23, 0xf, 0xd, 0x38, 0, - 0x23, 0xf, 0xd, 0x38, 0x8, - 0x23, 0xf, 0x18, 0x38, 0, - 0x23, 0xf, 0x18, 0x38, 0x8, - 0x23, 0xf, 0x18, 0x38, 0x10, - 0x23, 0xf, 0x18, 0x38, 0x18, - 0x23, 0xf, 0x1f, 0, 0, - 0x22, 0xf, 0x30, - 0x13, 0xf, 0xfe, 0xc0, 0, 0, - 0x2, 0xfe, 0x86, 0, 0, - 0x1, 0xf8, 0x90, - 0x11, 0xd7, - 0x2, 0xfe, 0x30, 0, 0, - 0x2, 0xfe, 0x32, 0, 0, - 0x1, 0xfe, 0x34, - 0x2, 0xfe, 0x80, 0x38, 0x30, - 0x2, 0xfe, 0x82, 0x38, 0x30, - 0x22, 0xf, 0x77, - 0x34, 0x66, 0xf, 0xdb, 0, 0, - 0x23, 0xf, 0xdb, 0, 0, - 0x34, 0x66, 0xf, 0xdf, 0, 0, - 0x23, 0xf, 0xdf, 0, 0, - 0x34, 0x66, 0xf, 0xf5, 0, 0, - 0x23, 0xf, 0xf5, 0, 0, - 0x34, 0x66, 0xf, 0xeb, 0, 0, - 0x23, 0xf, 0xeb, 0, 0, - 0x34, 0x66, 0xf, 0xef, 0, 0, - 0x23, 0xf, 0xef, 0, 0, - 0x23, 0xf, 0x55, 0, 0, - 0x23, 0xf, 0x54, 0, 0, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0x1, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0x2, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0x3, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0x4, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0x5, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0x6, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0x7, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0x1, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0x2, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0x3, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0x4, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0x5, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0x6, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0x7, - 0x23, 0xf, 0xae, 0x38, 0x8, - 0x23, 0xf, 0xae, 0x38, 0, - 0x23, 0xf, 0xae, 0x38, 0x10, - 0x23, 0xf, 0xae, 0x38, 0x18, - 0x34, 0xf2, 0xf, 0x10, 0, 0, - 0x34, 0xf3, 0xf, 0x10, 0, 0, - 0x34, 0x66, 0xf, 0x10, 0, 0, - 0x23, 0xf, 0x10, 0, 0, - 0x34, 0xf2, 0xf, 0x11, 0, 0, - 0x34, 0xf3, 0xf, 0x11, 0, 0, - 0x34, 0x66, 0xf, 0x11, 0, 0, - 0x23, 0xf, 0x11, 0, 0, - 0x34, 0xf2, 0xf, 0x12, 0, 0, - 0x34, 0xf3, 0xf, 0x12, 0, 0, - 0x34, 0x66, 0xf, 0x12, 0, 0, - 0x23, 0xf, 0x12, 0xc0, 0xc0, - 0x23, 0xf, 0x12, 0, 0, - 0x34, 0x66, 0xf, 0x13, 0xc0, 0xc0, - 0x23, 0xf, 0x13, 0xc0, 0xc0, - 0x34, 0x66, 0xf, 0x13, 0, 0, - 0x23, 0xf, 0x13, 0, 0, - 0x34, 0x66, 0xf, 0x14, 0, 0, - 0x23, 0xf, 0x14, 0, 0, - 0x34, 0x66, 0xf, 0x15, 0, 0, - 0x23, 0xf, 0x15, 0, 0, - 0x34, 0xf3, 0xf, 0x16, 0, 0, - 0x34, 0x66, 0xf, 0x16, 0, 0, - 0x23, 0xf, 0x16, 0xc0, 0xc0, - 0x23, 0xf, 0x16, 0, 0, - 0x34, 0x66, 0xf, 0x17, 0xc0, 0xc0, - 0x23, 0xf, 0x17, 0xc0, 0xc0, - 0x34, 0x66, 0xf, 0x17, 0, 0, - 0x23, 0xf, 0x17, 0, 0, - 0x34, 0x66, 0xf, 0x28, 0, 0, - 0x23, 0xf, 0x28, 0, 0, - 0x34, 0x66, 0xf, 0x29, 0, 0, - 0x23, 0xf, 0x29, 0, 0, - 0x34, 0xf2, 0xf, 0x2a, 0, 0, - 0x34, 0xf3, 0xf, 0x2a, 0, 0, - 0x34, 0x66, 0xf, 0x2a, 0, 0, - 0x23, 0xf, 0x2a, 0, 0, - 0x34, 0x66, 0xf, 0x2b, 0, 0, - 0x23, 0xf, 0x2b, 0, 0, - 0x34, 0xf2, 0xf, 0x2c, 0, 0, - 0x34, 0xf3, 0xf, 0x2c, 0, 0, - 0x34, 0x66, 0xf, 0x2c, 0, 0, - 0x23, 0xf, 0x2c, 0, 0, - 0x34, 0x66, 0xf, 0x2d, 0, 0, - 0x34, 0xf2, 0xf, 0x2d, 0, 0, - 0x34, 0xf3, 0xf, 0x2d, 0, 0, - 0x23, 0xf, 0x2d, 0, 0, - 0x34, 0x66, 0xf, 0x2e, 0, 0, - 0x23, 0xf, 0x2e, 0, 0, - 0x34, 0x66, 0xf, 0x2f, 0, 0, - 0x23, 0xf, 0x2f, 0, 0, - 0x22, 0xf, 0x37, - 0x34, 0x66, 0xf, 0x50, 0xc0, 0xc0, - 0x23, 0xf, 0x50, 0xc0, 0xc0, - 0x34, 0x66, 0xf, 0x51, 0, 0, - 0x34, 0xf2, 0xf, 0x51, 0, 0, - 0x34, 0xf3, 0xf, 0x51, 0, 0, - 0x23, 0xf, 0x51, 0, 0, - 0x34, 0xf3, 0xf, 0x52, 0, 0, - 0x23, 0xf, 0x52, 0, 0, - 0x34, 0xf3, 0xf, 0x53, 0, 0, - 0x23, 0xf, 0x53, 0, 0, - 0x34, 0x66, 0xf, 0x54, 0, 0, - 0x23, 0xf, 0x54, 0, 0, - 0x34, 0x66, 0xf, 0x55, 0, 0, - 0x23, 0xf, 0x55, 0, 0, - 0x34, 0x66, 0xf, 0x56, 0, 0, - 0x23, 0xf, 0x56, 0, 0, - 0x34, 0x66, 0xf, 0x57, 0, 0, - 0x23, 0xf, 0x57, 0, 0, - 0x34, 0xf2, 0xf, 0x58, 0, 0, - 0x34, 0xf3, 0xf, 0x58, 0, 0, - 0x34, 0x66, 0xf, 0x58, 0, 0, - 0x23, 0xf, 0x58, 0, 0, - 0x34, 0xf2, 0xf, 0x59, 0, 0, - 0x34, 0xf3, 0xf, 0x59, 0, 0, - 0x34, 0x66, 0xf, 0x59, 0, 0, - 0x23, 0xf, 0x59, 0, 0, - 0x34, 0xf2, 0xf, 0x5a, 0, 0, - 0x34, 0xf3, 0xf, 0x5a, 0, 0, - 0x34, 0x66, 0xf, 0x5a, 0, 0, - 0x23, 0xf, 0x5a, 0, 0, - 0x34, 0x66, 0xf, 0x5b, 0, 0, - 0x34, 0xf3, 0xf, 0x5b, 0, 0, - 0x23, 0xf, 0x5b, 0, 0, - 0x34, 0xf2, 0xf, 0x5c, 0, 0, - 0x34, 0xf3, 0xf, 0x5c, 0, 0, - 0x34, 0x66, 0xf, 0x5c, 0, 0, - 0x23, 0xf, 0x5c, 0, 0, - 0x34, 0xf2, 0xf, 0x5d, 0, 0, - 0x34, 0xf3, 0xf, 0x5d, 0, 0, - 0x34, 0x66, 0xf, 0x5d, 0, 0, - 0x23, 0xf, 0x5d, 0, 0, - 0x34, 0xf2, 0xf, 0x5e, 0, 0, - 0x34, 0xf3, 0xf, 0x5e, 0, 0, - 0x34, 0x66, 0xf, 0x5e, 0, 0, - 0x23, 0xf, 0x5e, 0, 0, - 0x34, 0xf2, 0xf, 0x5f, 0, 0, - 0x34, 0xf3, 0xf, 0x5f, 0, 0, - 0x34, 0x66, 0xf, 0x5f, 0, 0, - 0x23, 0xf, 0x5f, 0, 0, - 0x34, 0x66, 0xf, 0x60, 0, 0, - 0x23, 0xf, 0x60, 0, 0, - 0x34, 0x66, 0xf, 0x61, 0, 0, - 0x23, 0xf, 0x61, 0, 0, - 0x34, 0x66, 0xf, 0x62, 0, 0, - 0x23, 0xf, 0x62, 0, 0, - 0x34, 0x66, 0xf, 0x63, 0, 0, - 0x23, 0xf, 0x63, 0, 0, - 0x34, 0x66, 0xf, 0x64, 0, 0, - 0x23, 0xf, 0x64, 0, 0, - 0x34, 0x66, 0xf, 0x65, 0, 0, - 0x23, 0xf, 0x65, 0, 0, - 0x34, 0x66, 0xf, 0x66, 0, 0, - 0x23, 0xf, 0x66, 0, 0, - 0x34, 0x66, 0xf, 0x67, 0, 0, - 0x23, 0xf, 0x67, 0, 0, - 0x34, 0x66, 0xf, 0x68, 0, 0, - 0x23, 0xf, 0x68, 0, 0, - 0x34, 0x66, 0xf, 0x69, 0, 0, - 0x23, 0xf, 0x69, 0, 0, - 0x34, 0x66, 0xf, 0x6a, 0, 0, - 0x23, 0xf, 0x6a, 0, 0, - 0x34, 0x66, 0xf, 0x6b, 0, 0, - 0x23, 0xf, 0x6b, 0, 0, - 0x34, 0x66, 0xf, 0x6c, 0, 0, - 0x34, 0x66, 0xf, 0x6d, 0, 0, - 0x34, 0x66, 0xf, 0x6e, 0, 0, - 0x23, 0xf, 0x6e, 0, 0, - 0x34, 0x66, 0xf, 0x6f, 0, 0, - 0x34, 0xf3, 0xf, 0x6f, 0, 0, - 0x23, 0xf, 0x6f, 0, 0, - 0x34, 0x66, 0xf, 0x70, 0, 0, - 0x34, 0xf2, 0xf, 0x70, 0, 0, - 0x34, 0xf3, 0xf, 0x70, 0, 0, - 0x23, 0xf, 0x70, 0, 0, - 0x34, 0x66, 0xf, 0x74, 0, 0, - 0x23, 0xf, 0x74, 0, 0, - 0x34, 0x66, 0xf, 0x75, 0, 0, - 0x23, 0xf, 0x75, 0, 0, - 0x34, 0x66, 0xf, 0x76, 0, 0, - 0x23, 0xf, 0x76, 0, 0, - 0x34, 0x66, 0xf, 0x7c, 0, 0, - 0x34, 0xf2, 0xf, 0x7c, 0, 0, - 0x34, 0x66, 0xf, 0x7d, 0, 0, - 0x34, 0xf2, 0xf, 0x7d, 0, 0, - 0x34, 0x66, 0xf, 0x7e, 0, 0, - 0x34, 0xf3, 0xf, 0x7e, 0, 0, - 0x23, 0xf, 0x7e, 0, 0, - 0x34, 0x66, 0xf, 0x7f, 0, 0, - 0x34, 0xf3, 0xf, 0x7f, 0, 0, - 0x23, 0xf, 0x7f, 0, 0, - 0x23, 0xf, 0xc3, 0, 0, - 0x34, 0x66, 0xf, 0xc4, 0, 0, - 0x23, 0xf, 0xc4, 0, 0, - 0x34, 0x66, 0xf, 0xc5, 0xc0, 0xc0, - 0x23, 0xf, 0xc5, 0xc0, 0xc0, - 0x34, 0x66, 0xf, 0xc6, 0, 0, - 0x23, 0xf, 0xc6, 0, 0, - 0x34, 0x66, 0xf, 0xd1, 0, 0, - 0x23, 0xf, 0xd1, 0, 0, - 0x34, 0x66, 0xf, 0xd2, 0, 0, - 0x23, 0xf, 0xd2, 0, 0, - 0x34, 0x66, 0xf, 0xd3, 0, 0, - 0x23, 0xf, 0xd3, 0, 0, - 0x34, 0x66, 0xf, 0xd4, 0, 0, - 0x23, 0xf, 0xd4, 0, 0, - 0x34, 0x66, 0xf, 0xd5, 0, 0, - 0x23, 0xf, 0xd5, 0, 0, - 0x34, 0x66, 0xf, 0xd6, 0, 0, - 0x34, 0xf2, 0xf, 0xd6, 0xc0, 0xc0, - 0x34, 0xf3, 0xf, 0xd6, 0xc0, 0xc0, - 0x34, 0x66, 0xf, 0xd7, 0xc0, 0xc0, - 0x23, 0xf, 0xd7, 0xc0, 0xc0, - 0x34, 0x66, 0xf, 0xd8, 0, 0, - 0x23, 0xf, 0xd8, 0, 0, - 0x34, 0x66, 0xf, 0xd9, 0, 0, - 0x23, 0xf, 0xd9, 0, 0, - 0x34, 0x66, 0xf, 0xda, 0, 0, - 0x23, 0xf, 0xda, 0, 0, - 0x34, 0x66, 0xf, 0xdc, 0, 0, - 0x23, 0xf, 0xdc, 0, 0, - 0x34, 0x66, 0xf, 0xdd, 0, 0, - 0x23, 0xf, 0xdd, 0, 0, - 0x34, 0x66, 0xf, 0xde, 0, 0, - 0x23, 0xf, 0xde, 0, 0, - 0x34, 0x66, 0xf, 0xe0, 0, 0, - 0x23, 0xf, 0xe0, 0, 0, - 0x34, 0x66, 0xf, 0xe1, 0, 0, - 0x23, 0xf, 0xe1, 0, 0, - 0x34, 0x66, 0xf, 0xe2, 0, 0, - 0x23, 0xf, 0xe2, 0, 0, - 0x34, 0x66, 0xf, 0xe3, 0, 0, - 0x23, 0xf, 0xe3, 0, 0, - 0x34, 0x66, 0xf, 0xe4, 0, 0, - 0x23, 0xf, 0xe4, 0, 0, - 0x34, 0x66, 0xf, 0xe5, 0, 0, - 0x23, 0xf, 0xe5, 0, 0, - 0x34, 0x66, 0xf, 0xe7, 0, 0, - 0x23, 0xf, 0xe7, 0, 0, - 0x34, 0x66, 0xf, 0xe8, 0, 0, - 0x23, 0xf, 0xe8, 0, 0, - 0x34, 0x66, 0xf, 0xe9, 0, 0, - 0x23, 0xf, 0xe9, 0, 0, - 0x34, 0x66, 0xf, 0xea, 0, 0, - 0x23, 0xf, 0xea, 0, 0, - 0x34, 0x66, 0xf, 0xec, 0, 0, - 0x23, 0xf, 0xec, 0, 0, - 0x34, 0x66, 0xf, 0xed, 0, 0, - 0x23, 0xf, 0xed, 0, 0, - 0x34, 0x66, 0xf, 0xee, 0, 0, - 0x23, 0xf, 0xee, 0, 0, - 0x34, 0xf2, 0xf, 0xf0, 0, 0, - 0x34, 0x66, 0xf, 0xf1, 0, 0, - 0x23, 0xf, 0xf1, 0, 0, - 0x34, 0x66, 0xf, 0xf2, 0, 0, - 0x23, 0xf, 0xf2, 0, 0, - 0x34, 0x66, 0xf, 0xf3, 0, 0, - 0x23, 0xf, 0xf3, 0, 0, - 0x34, 0x66, 0xf, 0xf4, 0, 0, - 0x23, 0xf, 0xf4, 0, 0, - 0x34, 0x66, 0xf, 0xf6, 0, 0, - 0x23, 0xf, 0xf6, 0, 0, - 0x34, 0x66, 0xf, 0xf7, 0xc0, 0xc0, - 0x23, 0xf, 0xf7, 0xc0, 0xc0, - 0x34, 0x66, 0xf, 0xf8, 0, 0, - 0x23, 0xf, 0xf8, 0, 0, - 0x34, 0x66, 0xf, 0xf9, 0, 0, - 0x23, 0xf, 0xf9, 0, 0, - 0x34, 0x66, 0xf, 0xfa, 0, 0, - 0x23, 0xf, 0xfa, 0, 0, - 0x34, 0x66, 0xf, 0xfb, 0, 0, - 0x23, 0xf, 0xfb, 0, 0, - 0x34, 0x66, 0xf, 0xfc, 0, 0, - 0x23, 0xf, 0xfc, 0, 0, - 0x34, 0x66, 0xf, 0xfd, 0, 0, - 0x23, 0xf, 0xfd, 0, 0, - 0x34, 0x66, 0xf, 0xfe, 0, 0, - 0x23, 0xf, 0xfe, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0, 0, 0, - 0x34, 0xf, 0x38, 0, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x1, 0, 0, - 0x34, 0xf, 0x38, 0x1, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x2, 0, 0, - 0x34, 0xf, 0x38, 0x2, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x3, 0, 0, - 0x34, 0xf, 0x38, 0x3, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x4, 0, 0, - 0x34, 0xf, 0x38, 0x4, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x5, 0, 0, - 0x34, 0xf, 0x38, 0x5, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x6, 0, 0, - 0x34, 0xf, 0x38, 0x6, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x7, 0, 0, - 0x34, 0xf, 0x38, 0x7, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x8, 0, 0, - 0x34, 0xf, 0x38, 0x8, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x9, 0, 0, - 0x34, 0xf, 0x38, 0x9, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0xa, 0, 0, - 0x34, 0xf, 0x38, 0xa, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0xb, 0, 0, - 0x34, 0xf, 0x38, 0xb, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x1c, 0, 0, - 0x34, 0xf, 0x38, 0x1c, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x1d, 0, 0, - 0x34, 0xf, 0x38, 0x1d, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x1e, 0, 0, - 0x34, 0xf, 0x38, 0x1e, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0xf, 0, 0, - 0x34, 0xf, 0x3a, 0xf, 0, 0, - 0x34, 0x66, 0xf, 0xc7, 0x38, 0x30, - 0x34, 0xf3, 0xf, 0xc7, 0x38, 0x30, - 0x23, 0xf, 0xc7, 0x38, 0x30, - 0x23, 0xf, 0xc7, 0x38, 0x38, - 0x34, 0x66, 0xf, 0x71, 0xf8, 0xd0, - 0x23, 0xf, 0x71, 0xf8, 0xd0, - 0x34, 0x66, 0xf, 0x71, 0xf8, 0xe0, - 0x23, 0xf, 0x71, 0xf8, 0xe0, - 0x34, 0x66, 0xf, 0x71, 0xf8, 0xf0, - 0x23, 0xf, 0x71, 0xf8, 0xf0, - 0x34, 0x66, 0xf, 0x72, 0xf8, 0xd0, - 0x23, 0xf, 0x72, 0xf8, 0xd0, - 0x34, 0x66, 0xf, 0x72, 0xf8, 0xe0, - 0x23, 0xf, 0x72, 0xf8, 0xe0, - 0x34, 0x66, 0xf, 0x72, 0xf8, 0xf0, - 0x23, 0xf, 0x72, 0xf8, 0xf0, - 0x34, 0x66, 0xf, 0x73, 0xf8, 0xd0, - 0x23, 0xf, 0x73, 0xf8, 0xd0, - 0x34, 0x66, 0xf, 0x73, 0xf8, 0xd8, - 0x34, 0x66, 0xf, 0x73, 0xf8, 0xf0, - 0x23, 0xf, 0x73, 0xf8, 0xf0, - 0x34, 0x66, 0xf, 0x73, 0xf8, 0xf8, - 0x33, 0xf, 0xae, 0xe8, - 0x33, 0xf, 0xae, 0xf0, - 0x33, 0xf, 0xae, 0xf8, - 0x23, 0xf, 0xae, 0x38, 0x38, - 0x23, 0xf, 0xf, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0xc, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0xd, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x14, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x15, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x40, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x41, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x21, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x2a, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x42, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x2b, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x10, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0xe, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x29, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x61, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x60, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x63, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x62, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x37, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x41, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x20, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x22, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x3c, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x3d, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x3f, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x3e, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x38, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x39, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x3b, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x3a, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x20, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x21, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x22, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x23, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x24, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x25, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x30, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x31, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x32, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x33, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x34, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x35, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x28, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x40, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x17, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x8, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x9, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0xa, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0xb, 0, 0, - 0x1, 0xe7, 0x7, -}; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/m68k_backend.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/m68k_backend.diff deleted file mode 100644 index 5b621f92f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/m68k_backend.diff +++ /dev/null @@ -1,309 +0,0 @@ -Upstream-Status: Backport - -Index: elfutils-0.146/backends/m68k_init.c -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.146/backends/m68k_init.c 2010-04-24 10:11:38.000000000 +0000 -@@ -0,0 +1,49 @@ -+/* Initialization of m68k specific backend library. -+ Copyright (C) 2007 Kurt Roeckx -+ -+ This software is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ This softare is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with this software; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+*/ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#define BACKEND m68k_ -+#define RELOC_PREFIX R_68K_ -+#include "libebl_CPU.h" -+ -+/* This defines the common reloc hooks based on m68k_reloc.def. */ -+#include "common-reloc.c" -+ -+ -+const char * -+m68k_init (elf, machine, eh, ehlen) -+ Elf *elf __attribute__ ((unused)); -+ GElf_Half machine __attribute__ ((unused)); -+ Ebl *eh; -+ size_t ehlen; -+{ -+ /* Check whether the Elf_BH object has a sufficent size. */ -+ if (ehlen < sizeof (Ebl)) -+ return NULL; -+ -+ /* We handle it. */ -+ eh->name = "m68k"; -+ m68k_init_reloc (eh); -+ HOOK (eh, reloc_simple_type); -+ HOOK (eh, register_info); -+ -+ return MODVERSION; -+} -Index: elfutils-0.146/backends/m68k_regs.c -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.146/backends/m68k_regs.c 2010-04-24 10:11:38.000000000 +0000 -@@ -0,0 +1,106 @@ -+/* Register names and numbers for m68k DWARF. -+ Copyright (C) 2007 Kurt Roeckx -+ -+ This software is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ This software is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with this software; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND m68k_ -+#include "libebl_CPU.h" -+ -+ssize_t -+m68k_register_info (Ebl *ebl __attribute__ ((unused)), -+ int regno, char *name, size_t namelen, -+ const char **prefix, const char **setname, -+ int *bits, int *type) -+{ -+ if (name == NULL) -+ return 25; -+ -+ if (regno < 0 || regno > 24 || namelen < 5) -+ return -1; -+ -+ *prefix = "%"; -+ *bits = 32; -+ *type = (regno < 8 ? DW_ATE_signed -+ : regno < 16 ? DW_ATE_address : DW_ATE_float); -+ -+ if (regno < 8) -+ { -+ *setname = "integer"; -+ } -+ else if (regno < 16) -+ { -+ *setname = "address"; -+ } -+ else if (regno < 24) -+ { -+ *setname = "FPU"; -+ } -+ else -+ { -+ *setname = "address"; -+ *type = DW_ATE_address; -+ } -+ -+ switch (regno) -+ { -+ case 0 ... 7: -+ name[0] = 'd'; -+ name[1] = regno + '0'; -+ namelen = 2; -+ break; -+ -+ case 8 ... 13: -+ name[0] = 'a'; -+ name[1] = regno - 8 + '0'; -+ namelen = 2; -+ break; -+ -+ case 14: -+ name[0] = 'f'; -+ name[1] = 'p'; -+ namelen = 2; -+ break; -+ -+ case 15: -+ name[0] = 's'; -+ name[1] = 'p'; -+ namelen = 2; -+ break; -+ -+ case 16 ... 23: -+ name[0] = 'f'; -+ name[1] = 'p'; -+ name[2] = regno - 16 + '0'; -+ namelen = 3; -+ break; -+ -+ case 24: -+ name[0] = 'p'; -+ name[1] = 'c'; -+ namelen = 2; -+ } -+ -+ name[namelen++] = '\0'; -+ return namelen; -+} -+ -Index: elfutils-0.146/backends/m68k_reloc.def -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.146/backends/m68k_reloc.def 2010-04-24 10:11:38.000000000 +0000 -@@ -0,0 +1,45 @@ -+/* List the relocation types for m68k. -*- C -*- -+ Copyright (C) 2007 Kurt Roeckx -+ -+ This software is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ This software is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with this software; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+*/ -+ -+/* NAME, REL|EXEC|DYN */ -+ -+RELOC_TYPE (NONE, 0) -+RELOC_TYPE (32, REL|EXEC|DYN) -+RELOC_TYPE (16, REL) -+RELOC_TYPE (8, REL) -+RELOC_TYPE (PC32, REL|EXEC|DYN) -+RELOC_TYPE (PC16, REL) -+RELOC_TYPE (PC8, REL) -+RELOC_TYPE (GOT32, REL) -+RELOC_TYPE (GOT16, REL) -+RELOC_TYPE (GOT8, REL) -+RELOC_TYPE (GOT32O, REL) -+RELOC_TYPE (GOT16O, REL) -+RELOC_TYPE (GOT8O, REL) -+RELOC_TYPE (PLT32, REL) -+RELOC_TYPE (PLT16, REL) -+RELOC_TYPE (PLT8, REL) -+RELOC_TYPE (PLT32O, REL) -+RELOC_TYPE (PLT16O, REL) -+RELOC_TYPE (PLT8O, REL) -+RELOC_TYPE (COPY, EXEC) -+RELOC_TYPE (GLOB_DAT, EXEC|DYN) -+RELOC_TYPE (JMP_SLOT, EXEC|DYN) -+RELOC_TYPE (RELATIVE, EXEC|DYN) -+RELOC_TYPE (GNU_VTINHERIT, REL) -+RELOC_TYPE (GNU_VTENTRY, REL) -+ -Index: elfutils-0.146/libelf/elf.h -=================================================================== ---- elfutils-0.146.orig/libelf/elf.h 2010-04-24 10:11:13.000000000 +0000 -+++ elfutils-0.146/libelf/elf.h 2010-04-24 10:13:50.000000000 +0000 -@@ -1125,6 +1125,9 @@ - #define R_68K_GLOB_DAT 20 /* Create GOT entry */ - #define R_68K_JMP_SLOT 21 /* Create PLT entry */ - #define R_68K_RELATIVE 22 /* Adjust by program base */ -+/* The next 2 are GNU extensions to enable C++ vtable garbage collection. */ -+#define R_68K_GNU_VTINHERIT 23 -+#define R_68K_GNU_VTENTRY 24 - #define R_68K_TLS_GD32 25 /* 32 bit GOT offset for GD */ - #define R_68K_TLS_GD16 26 /* 16 bit GOT offset for GD */ - #define R_68K_TLS_GD8 27 /* 8 bit GOT offset for GD */ -Index: elfutils-0.146/backends/Makefile.am -=================================================================== ---- elfutils-0.146.orig/backends/Makefile.am 2010-04-24 10:11:23.000000000 +0000 -+++ elfutils-0.146/backends/Makefile.am 2010-04-24 10:11:38.000000000 +0000 -@@ -29,11 +29,12 @@ - -I$(top_srcdir)/libelf -I$(top_srcdir)/libdw - - --modules = i386 sh x86_64 ia64 alpha arm sparc ppc ppc64 s390 parisc mips -+modules = i386 sh x86_64 ia64 alpha arm sparc ppc ppc64 s390 parisc mips m68k - libebl_pic = libebl_i386_pic.a libebl_sh_pic.a libebl_x86_64_pic.a \ - libebl_ia64_pic.a libebl_alpha_pic.a libebl_arm_pic.a \ - libebl_sparc_pic.a libebl_ppc_pic.a libebl_ppc64_pic.a \ -- libebl_s390_pic.a libebl_parisc_pic.a libebl_mips_pic.a -+ libebl_s390_pic.a libebl_parisc_pic.a libebl_mips_pic.a \ -+ libebl_m68k_pic.a - noinst_LIBRARIES = $(libebl_pic) - noinst_DATA = $(libebl_pic:_pic.a=.so) - -@@ -103,6 +104,10 @@ - libebl_mips_pic_a_SOURCES = $(mips_SRCS) - am_libebl_mips_pic_a_OBJECTS = $(mips_SRCS:.c=.os) - -+m68k_SRCS = m68k_init.c m68k_symbol.c m68k_regs.c -+libebl_m68k_pic_a_SOURCES = $(m68k_SRCS) -+am_libebl_m68k_pic_a_OBJECTS = $(m68k_SRCS:.c=.os) -+ - libebl_%.so libebl_%.map: libebl_%_pic.a $(libelf) $(libdw) - @rm -f $(@:.so=.map) - echo 'ELFUTILS_$(PACKAGE_VERSION) { global: $*_init; local: *; };' \ -Index: elfutils-0.146/backends/m68k_symbol.c -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.146/backends/m68k_symbol.c 2010-04-24 10:11:38.000000000 +0000 -@@ -0,0 +1,43 @@ -+/* m68k specific symbolic name handling. -+ Copyright (C) 2007 Kurt Roeckx -+ -+ This software is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ This software distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with this software; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+*/ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND m68k_ -+#include "libebl_CPU.h" -+ -+/* Check for the simple reloc types. */ -+Elf_Type -+m68k_reloc_simple_type (Ebl *ebl __attribute__ ((unused)), int type) -+{ -+ switch (type) -+ { -+ case R_68K_32: -+ return ELF_T_SWORD; -+ case R_68K_16: -+ return ELF_T_HALF; -+ case R_68K_8: -+ return ELF_T_BYTE; -+ default: -+ return ELF_T_NUM; -+ } -+} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/mips_backend.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/mips_backend.diff deleted file mode 100644 index 3f81a75b1..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/mips_backend.diff +++ /dev/null @@ -1,713 +0,0 @@ -Upstream-Status: Backport - -Index: elfutils-0.145/backends/mips_init.c -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.145/backends/mips_init.c 2010-02-24 18:57:35.000000000 +0000 -@@ -0,0 +1,60 @@ -+/* Initialization of mips specific backend library. -+ Copyright (C) 2006 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#define BACKEND mips_ -+#define RELOC_PREFIX R_MIPS_ -+#include "libebl_CPU.h" -+ -+/* This defines the common reloc hooks based on mips_reloc.def. */ -+#include "common-reloc.c" -+ -+const char * -+mips_init (elf, machine, eh, ehlen) -+ Elf *elf __attribute__ ((unused)); -+ GElf_Half machine __attribute__ ((unused)); -+ Ebl *eh; -+ size_t ehlen; -+{ -+ /* Check whether the Elf_BH object has a sufficent size. */ -+ if (ehlen < sizeof (Ebl)) -+ return NULL; -+ -+ /* We handle it. */ -+ if (machine == EM_MIPS) -+ eh->name = "MIPS R3000 big-endian"; -+ else if (machine == EM_MIPS_RS3_LE) -+ eh->name = "MIPS R3000 little-endian"; -+ -+ mips_init_reloc (eh); -+ HOOK (eh, reloc_simple_type); -+ HOOK (eh, return_value_location); -+ HOOK (eh, register_info); -+ -+ return MODVERSION; -+} -Index: elfutils-0.145/backends/mips_regs.c -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.145/backends/mips_regs.c 2010-02-24 18:57:35.000000000 +0000 -@@ -0,0 +1,104 @@ -+/* Register names and numbers for MIPS DWARF. -+ Copyright (C) 2006 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND mips_ -+#include "libebl_CPU.h" -+ -+ssize_t -+mips_register_info (Ebl *ebl __attribute__((unused)), -+ int regno, char *name, size_t namelen, -+ const char **prefix, const char **setname, -+ int *bits, int *type) -+{ -+ if (name == NULL) -+ return 66; -+ -+ if (regno < 0 || regno > 65 || namelen < 4) -+ return -1; -+ -+ *prefix = "$"; -+ -+ if (regno < 32) -+ { -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ *bits = 32; -+ if (regno < 32 + 10) -+ { -+ name[0] = regno + '0'; -+ namelen = 1; -+ } -+ else -+ { -+ name[0] = (regno / 10) + '0'; -+ name[1] = (regno % 10) + '0'; -+ namelen = 2; -+ } -+ } -+ else if (regno < 64) -+ { -+ *setname = "FPU"; -+ *type = DW_ATE_float; -+ *bits = 32; -+ name[0] = 'f'; -+ if (regno < 32 + 10) -+ { -+ name[1] = (regno - 32) + '0'; -+ namelen = 2; -+ } -+ else -+ { -+ name[1] = (regno - 32) / 10 + '0'; -+ name[2] = (regno - 32) % 10 + '0'; -+ namelen = 3; -+ } -+ } -+ else if (regno == 64) -+ { -+ *type = DW_ATE_signed; -+ *bits = 32; -+ name[0] = 'h'; -+ name[1] = 'i'; -+ namelen = 2; -+ } -+ else -+ { -+ *type = DW_ATE_signed; -+ *bits = 32; -+ name[0] = 'l'; -+ name[1] = 'o'; -+ namelen = 2; -+ } -+ -+ name[namelen++] = '\0'; -+ return namelen; -+} -Index: elfutils-0.145/backends/mips_reloc.def -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.145/backends/mips_reloc.def 2010-02-24 18:57:35.000000000 +0000 -@@ -0,0 +1,79 @@ -+/* List the relocation types for mips. -*- C -*- -+ Copyright (C) 2006 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+/* NAME, REL|EXEC|DYN */ -+ -+RELOC_TYPE (NONE, 0) -+RELOC_TYPE (16, 0) -+RELOC_TYPE (32, 0) -+RELOC_TYPE (REL32, 0) -+RELOC_TYPE (26, 0) -+RELOC_TYPE (HI16, 0) -+RELOC_TYPE (LO16, 0) -+RELOC_TYPE (GPREL16, 0) -+RELOC_TYPE (LITERAL, 0) -+RELOC_TYPE (GOT16, 0) -+RELOC_TYPE (PC16, 0) -+RELOC_TYPE (CALL16, 0) -+RELOC_TYPE (GPREL32, 0) -+ -+RELOC_TYPE (SHIFT5, 0) -+RELOC_TYPE (SHIFT6, 0) -+RELOC_TYPE (64, 0) -+RELOC_TYPE (GOT_DISP, 0) -+RELOC_TYPE (GOT_PAGE, 0) -+RELOC_TYPE (GOT_OFST, 0) -+RELOC_TYPE (GOT_HI16, 0) -+RELOC_TYPE (GOT_LO16, 0) -+RELOC_TYPE (SUB, 0) -+RELOC_TYPE (INSERT_A, 0) -+RELOC_TYPE (INSERT_B, 0) -+RELOC_TYPE (DELETE, 0) -+RELOC_TYPE (HIGHER, 0) -+RELOC_TYPE (HIGHEST, 0) -+RELOC_TYPE (CALL_HI16, 0) -+RELOC_TYPE (CALL_LO16, 0) -+RELOC_TYPE (SCN_DISP, 0) -+RELOC_TYPE (REL16, 0) -+RELOC_TYPE (ADD_IMMEDIATE, 0) -+RELOC_TYPE (PJUMP, 0) -+RELOC_TYPE (RELGOT, 0) -+RELOC_TYPE (JALR, 0) -+RELOC_TYPE (TLS_DTPMOD32, 0) -+RELOC_TYPE (TLS_DTPREL32, 0) -+RELOC_TYPE (TLS_DTPMOD64, 0) -+RELOC_TYPE (TLS_DTPREL64, 0) -+RELOC_TYPE (TLS_GD, 0) -+RELOC_TYPE (TLS_LDM, 0) -+RELOC_TYPE (TLS_DTPREL_HI16, 0) -+RELOC_TYPE (TLS_DTPREL_LO16, 0) -+RELOC_TYPE (TLS_GOTTPREL, 0) -+RELOC_TYPE (TLS_TPREL32, 0) -+RELOC_TYPE (TLS_TPREL64, 0) -+RELOC_TYPE (TLS_TPREL_HI16, 0) -+RELOC_TYPE (TLS_TPREL_LO16, 0) -+ -+#define NO_COPY_RELOC 1 -+#define NO_RELATIVE_RELOC 1 -Index: elfutils-0.145/backends/mips_retval.c -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.145/backends/mips_retval.c 2010-02-24 18:57:35.000000000 +0000 -@@ -0,0 +1,321 @@ -+/* Function return value location for Linux/mips ABI. -+ Copyright (C) 2005 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+#include -+#include -+ -+#include "../libebl/libeblP.h" -+#include "../libdw/libdwP.h" -+ -+#define BACKEND mips_ -+#include "libebl_CPU.h" -+ -+/* The ABI of the file. Also see EF_MIPS_ABI2 above. */ -+#define EF_MIPS_ABI 0x0000F000 -+ -+/* The original o32 abi. */ -+#define E_MIPS_ABI_O32 0x00001000 -+ -+/* O32 extended to work on 64 bit architectures */ -+#define E_MIPS_ABI_O64 0x00002000 -+ -+/* EABI in 32 bit mode */ -+#define E_MIPS_ABI_EABI32 0x00003000 -+ -+/* EABI in 64 bit mode */ -+#define E_MIPS_ABI_EABI64 0x00004000 -+ -+/* All the possible MIPS ABIs. */ -+enum mips_abi -+ { -+ MIPS_ABI_UNKNOWN = 0, -+ MIPS_ABI_N32, -+ MIPS_ABI_O32, -+ MIPS_ABI_N64, -+ MIPS_ABI_O64, -+ MIPS_ABI_EABI32, -+ MIPS_ABI_EABI64, -+ MIPS_ABI_LAST -+ }; -+ -+/* Find the mips ABI of the current file */ -+enum mips_abi find_mips_abi(Elf *elf) -+{ -+ GElf_Ehdr ehdr_mem; -+ GElf_Ehdr *ehdr = gelf_getehdr (elf, &ehdr_mem); -+ -+ if (ehdr == NULL) -+ return MIPS_ABI_LAST; -+ -+ GElf_Word elf_flags = ehdr->e_flags; -+ -+ /* Check elf_flags to see if it specifies the ABI being used. */ -+ switch ((elf_flags & EF_MIPS_ABI)) -+ { -+ case E_MIPS_ABI_O32: -+ return MIPS_ABI_O32; -+ case E_MIPS_ABI_O64: -+ return MIPS_ABI_O64; -+ case E_MIPS_ABI_EABI32: -+ return MIPS_ABI_EABI32; -+ case E_MIPS_ABI_EABI64: -+ return MIPS_ABI_EABI64; -+ default: -+ if ((elf_flags & EF_MIPS_ABI2)) -+ return MIPS_ABI_N32; -+ } -+ -+ /* GCC creates a pseudo-section whose name describes the ABI. */ -+ size_t shstrndx; -+ if (elf_getshdrstrndx (elf, &shstrndx) < 0) -+ return MIPS_ABI_LAST; -+ -+ const char *name; -+ Elf_Scn *scn = NULL; -+ while ((scn = elf_nextscn (elf, scn)) != NULL) -+ { -+ GElf_Shdr shdr_mem; -+ GElf_Shdr *shdr = gelf_getshdr (scn, &shdr_mem); -+ if (shdr == NULL) -+ return MIPS_ABI_LAST; -+ -+ name = elf_strptr (elf, shstrndx, shdr->sh_name) ?: ""; -+ if (strncmp (name, ".mdebug.", 8) != 0) -+ continue; -+ -+ if (strcmp (name, ".mdebug.abi32") == 0) -+ return MIPS_ABI_O32; -+ else if (strcmp (name, ".mdebug.abiN32") == 0) -+ return MIPS_ABI_N32; -+ else if (strcmp (name, ".mdebug.abi64") == 0) -+ return MIPS_ABI_N64; -+ else if (strcmp (name, ".mdebug.abiO64") == 0) -+ return MIPS_ABI_O64; -+ else if (strcmp (name, ".mdebug.eabi32") == 0) -+ return MIPS_ABI_EABI32; -+ else if (strcmp (name, ".mdebug.eabi64") == 0) -+ return MIPS_ABI_EABI64; -+ else -+ return MIPS_ABI_UNKNOWN; -+ } -+ -+ return MIPS_ABI_UNKNOWN; -+} -+ -+unsigned int -+mips_abi_regsize (enum mips_abi abi) -+{ -+ switch (abi) -+ { -+ case MIPS_ABI_EABI32: -+ case MIPS_ABI_O32: -+ return 4; -+ case MIPS_ABI_N32: -+ case MIPS_ABI_N64: -+ case MIPS_ABI_O64: -+ case MIPS_ABI_EABI64: -+ return 8; -+ case MIPS_ABI_UNKNOWN: -+ case MIPS_ABI_LAST: -+ default: -+ return 0; -+ } -+} -+ -+ -+/* $v0 or pair $v0, $v1 */ -+static const Dwarf_Op loc_intreg_o32[] = -+ { -+ { .atom = DW_OP_reg2 }, { .atom = DW_OP_piece, .number = 4 }, -+ { .atom = DW_OP_reg3 }, { .atom = DW_OP_piece, .number = 4 }, -+ }; -+ -+static const Dwarf_Op loc_intreg[] = -+ { -+ { .atom = DW_OP_reg2 }, { .atom = DW_OP_piece, .number = 8 }, -+ { .atom = DW_OP_reg3 }, { .atom = DW_OP_piece, .number = 8 }, -+ }; -+#define nloc_intreg 1 -+#define nloc_intregpair 4 -+ -+/* $f0 (float), or pair $f0, $f1 (double). -+ * f2/f3 are used for COMPLEX (= 2 doubles) returns in Fortran */ -+static const Dwarf_Op loc_fpreg_o32[] = -+ { -+ { .atom = DW_OP_regx, .number = 32 }, { .atom = DW_OP_piece, .number = 4 }, -+ { .atom = DW_OP_regx, .number = 33 }, { .atom = DW_OP_piece, .number = 4 }, -+ { .atom = DW_OP_regx, .number = 34 }, { .atom = DW_OP_piece, .number = 4 }, -+ { .atom = DW_OP_regx, .number = 35 }, { .atom = DW_OP_piece, .number = 4 }, -+ }; -+ -+/* $f0, or pair $f0, $f2. */ -+static const Dwarf_Op loc_fpreg[] = -+ { -+ { .atom = DW_OP_regx, .number = 32 }, { .atom = DW_OP_piece, .number = 8 }, -+ { .atom = DW_OP_regx, .number = 34 }, { .atom = DW_OP_piece, .number = 8 }, -+ }; -+#define nloc_fpreg 1 -+#define nloc_fpregpair 4 -+#define nloc_fpregquad 8 -+ -+/* The return value is a structure and is actually stored in stack space -+ passed in a hidden argument by the caller. But, the compiler -+ helpfully returns the address of that space in $v0. */ -+static const Dwarf_Op loc_aggregate[] = -+ { -+ { .atom = DW_OP_breg2, .number = 0 } -+ }; -+#define nloc_aggregate 1 -+ -+int -+mips_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) -+{ -+ /* First find the ABI used by the elf object */ -+ enum mips_abi abi = find_mips_abi(functypedie->cu->dbg->elf); -+ -+ /* Something went seriously wrong while trying to figure out the ABI */ -+ if (abi == MIPS_ABI_LAST) -+ return -1; -+ -+ /* We couldn't identify the ABI, but the file seems valid */ -+ if (abi == MIPS_ABI_UNKNOWN) -+ return -2; -+ -+ /* Can't handle EABI variants */ -+ if ((abi == MIPS_ABI_EABI32) || (abi == MIPS_ABI_EABI64)) -+ return -2; -+ -+ unsigned int regsize = mips_abi_regsize (abi); -+ if (!regsize) -+ return -2; -+ -+ /* Start with the function's type, and get the DW_AT_type attribute, -+ which is the type of the return value. */ -+ -+ Dwarf_Attribute attr_mem; -+ Dwarf_Attribute *attr = dwarf_attr_integrate (functypedie, DW_AT_type, &attr_mem); -+ if (attr == NULL) -+ /* The function has no return value, like a `void' function in C. */ -+ return 0; -+ -+ Dwarf_Die die_mem; -+ Dwarf_Die *typedie = dwarf_formref_die (attr, &die_mem); -+ int tag = dwarf_tag (typedie); -+ -+ /* Follow typedefs and qualifiers to get to the actual type. */ -+ while (tag == DW_TAG_typedef -+ || tag == DW_TAG_const_type || tag == DW_TAG_volatile_type -+ || tag == DW_TAG_restrict_type || tag == DW_TAG_mutable_type) -+ { -+ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem); -+ typedie = dwarf_formref_die (attr, &die_mem); -+ tag = dwarf_tag (typedie); -+ } -+ -+ switch (tag) -+ { -+ case -1: -+ return -1; -+ -+ case DW_TAG_subrange_type: -+ if (! dwarf_hasattr_integrate (typedie, DW_AT_byte_size)) -+ { -+ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem); -+ typedie = dwarf_formref_die (attr, &die_mem); -+ tag = dwarf_tag (typedie); -+ } -+ /* Fall through. */ -+ -+ case DW_TAG_base_type: -+ case DW_TAG_enumeration_type: -+ case DW_TAG_pointer_type: -+ case DW_TAG_ptr_to_member_type: -+ { -+ Dwarf_Word size; -+ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_byte_size, -+ &attr_mem), &size) != 0) -+ { -+ if (tag == DW_TAG_pointer_type || tag == DW_TAG_ptr_to_member_type) -+ size = regsize; -+ else -+ return -1; -+ } -+ if (tag == DW_TAG_base_type) -+ { -+ Dwarf_Word encoding; -+ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_encoding, -+ &attr_mem), &encoding) != 0) -+ return -1; -+ -+#define ABI_LOC(loc, regsize) ((regsize) == 4 ? (loc ## _o32) : (loc)) -+ -+ if (encoding == DW_ATE_float) -+ { -+ *locp = ABI_LOC(loc_fpreg, regsize); -+ if (size <= regsize) -+ return nloc_fpreg; -+ -+ if (size <= 2*regsize) -+ return nloc_fpregpair; -+ -+ if (size <= 4*regsize && abi == MIPS_ABI_O32) -+ return nloc_fpregquad; -+ -+ goto aggregate; -+ } -+ } -+ *locp = ABI_LOC(loc_intreg, regsize); -+ if (size <= regsize) -+ return nloc_intreg; -+ if (size <= 2*regsize) -+ return nloc_intregpair; -+ -+ /* Else fall through. Shouldn't happen though (at least with gcc) */ -+ } -+ -+ case DW_TAG_structure_type: -+ case DW_TAG_class_type: -+ case DW_TAG_union_type: -+ case DW_TAG_array_type: -+ aggregate: -+ /* XXX TODO: Can't handle structure return with other ABI's yet :-/ */ -+ if ((abi != MIPS_ABI_O32) && (abi != MIPS_ABI_O64)) -+ return -2; -+ -+ *locp = loc_aggregate; -+ return nloc_aggregate; -+ } -+ -+ /* XXX We don't have a good way to return specific errors from ebl calls. -+ This value means we do not understand the type, but it is well-formed -+ DWARF and might be valid. */ -+ return -2; -+} -Index: elfutils-0.145/backends/mips_symbol.c -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ elfutils-0.145/backends/mips_symbol.c 2010-02-24 18:57:35.000000000 +0000 -@@ -0,0 +1,52 @@ -+/* MIPS specific symbolic name handling. -+ Copyright (C) 2002, 2003, 2005 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ Written by Jakub Jelinek , 2002. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND mips_ -+#include "libebl_CPU.h" -+ -+/* Check for the simple reloc types. */ -+Elf_Type -+mips_reloc_simple_type (Ebl *ebl __attribute__ ((unused)), int type) -+{ -+ switch (type) -+ { -+ case R_MIPS_16: -+ return ELF_T_HALF; -+ case R_MIPS_32: -+ return ELF_T_WORD; -+ case R_MIPS_64: -+ return ELF_T_XWORD; -+ default: -+ return ELF_T_NUM; -+ } -+} -Index: elfutils-0.145/libebl/eblopenbackend.c -=================================================================== ---- elfutils-0.145.orig/libebl/eblopenbackend.c 2010-02-24 18:55:51.000000000 +0000 -+++ elfutils-0.145/libebl/eblopenbackend.c 2010-02-24 18:57:35.000000000 +0000 -@@ -91,6 +91,8 @@ - { "sparc", "elf_sparc", "sparc", 5, EM_SPARC, 0, 0 }, - { "sparc", "elf_sparcv8plus", "sparc", 5, EM_SPARC32PLUS, 0, 0 }, - { "s390", "ebl_s390", "s390", 4, EM_S390, 0, 0 }, -+ { "mips", "elf_mips", "mips", 4, EM_MIPS, 0, 0 }, -+ { "mips", "elf_mipsel", "mipsel", 4, EM_MIPS_RS3_LE, 0, 0 }, - - { "m32", "elf_m32", "m32", 3, EM_M32, 0, 0 }, - { "m68k", "elf_m68k", "m68k", 4, EM_68K, 0, 0 }, -Index: elfutils-0.145/backends/common-reloc.c -=================================================================== ---- elfutils-0.145.orig/backends/common-reloc.c 2010-02-24 18:55:51.000000000 +0000 -+++ elfutils-0.145/backends/common-reloc.c 2010-02-24 18:57:35.000000000 +0000 -@@ -109,11 +109,13 @@ - } - - -+#ifndef NO_COPY_RELOC - bool - EBLHOOK(copy_reloc_p) (int reloc) - { - return reloc == R_TYPE (COPY); - } -+#endif - - bool - EBLHOOK(none_reloc_p) (int reloc) -@@ -135,7 +137,9 @@ - ebl->reloc_type_name = EBLHOOK(reloc_type_name); - ebl->reloc_type_check = EBLHOOK(reloc_type_check); - ebl->reloc_valid_use = EBLHOOK(reloc_valid_use); -+#ifndef NO_COPY_RELOC - ebl->copy_reloc_p = EBLHOOK(copy_reloc_p); -+#endif - ebl->none_reloc_p = EBLHOOK(none_reloc_p); - #ifndef NO_RELATIVE_RELOC - ebl->relative_reloc_p = EBLHOOK(relative_reloc_p); -Index: elfutils-0.145/backends/Makefile.am -=================================================================== ---- elfutils-0.145.orig/backends/Makefile.am 2010-02-24 18:57:26.000000000 +0000 -+++ elfutils-0.145/backends/Makefile.am 2010-02-24 18:57:57.000000000 +0000 -@@ -29,11 +29,11 @@ - -I$(top_srcdir)/libelf -I$(top_srcdir)/libdw - - --modules = i386 sh x86_64 ia64 alpha arm sparc ppc ppc64 s390 parisc -+modules = i386 sh x86_64 ia64 alpha arm sparc ppc ppc64 s390 parisc mips - libebl_pic = libebl_i386_pic.a libebl_sh_pic.a libebl_x86_64_pic.a \ - libebl_ia64_pic.a libebl_alpha_pic.a libebl_arm_pic.a \ - libebl_sparc_pic.a libebl_ppc_pic.a libebl_ppc64_pic.a \ -- libebl_s390_pic.a libebl_parisc_pic.a -+ libebl_s390_pic.a libebl_parisc_pic.a libebl_mips_pic.a - noinst_LIBRARIES = $(libebl_pic) - noinst_DATA = $(libebl_pic:_pic.a=.so) - -@@ -99,6 +99,10 @@ - libebl_parisc_pic_a_SOURCES = $(parisc_SRCS) - am_libebl_parisc_pic_a_OBJECTS = $(parisc_SRCS:.c=.os) - -+mips_SRCS = mips_init.c mips_symbol.c mips_regs.c mips_retval.c -+libebl_mips_pic_a_SOURCES = $(mips_SRCS) -+am_libebl_mips_pic_a_OBJECTS = $(mips_SRCS:.c=.os) -+ - libebl_%.so libebl_%.map: libebl_%_pic.a $(libelf) $(libdw) - @rm -f $(@:.so=.map) - echo 'ELFUTILS_$(PACKAGE_VERSION) { global: $*_init; local: *; };' \ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/nm-Fix-size-passed-to-snprintf-for-invalid-sh_name-case.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/nm-Fix-size-passed-to-snprintf-for-invalid-sh_name-case.patch deleted file mode 100644 index 2b5dad368..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/nm-Fix-size-passed-to-snprintf-for-invalid-sh_name-case.patch +++ /dev/null @@ -1,27 +0,0 @@ -Upstream-Status: Backport - -nm: Fix size passed to snprintf for invalid sh_name case. -native build failed as following on Fedora18: -nm.c: In function 'show_symbols_sysv': -nm.c:756:27: error: argument to 'sizeof' in 'snprintf' call is the same expression as the destination; did you mean to provide an explicit length? [-Werror=sizeof-pointer-memaccess] - snprintf (name, sizeof name, "[invalid sh_name %#" PRIx32 "]", - ^ - -The original commit is http://git.fedorahosted.org/cgit/elfutils.git/commit/src/nm.c?id=57bd66cabf6e6b9ecf622cdbf350804897a8df58 - -Signed-off-by: Zhenhua Luo - ---- elfutils-0.148/src/nm.c.org 2013-03-11 22:36:11.000000000 -0500 -+++ elfutils-0.148/src/nm.c 2013-03-11 22:46:09.000000000 -0500 -@@ -752,8 +752,9 @@ - gelf_getshdr (scn, &shdr_mem)->sh_name); - if (unlikely (name == NULL)) - { -- name = alloca (sizeof "[invalid sh_name 0x12345678]"); -- snprintf (name, sizeof name, "[invalid sh_name %#" PRIx32 "]", -+ const size_t bufsz = sizeof "[invalid sh_name 0x12345678]"; -+ name = alloca (bufsz); -+ snprintf (name, bufsz, "[invalid sh_name %#" PRIx32 "]", - gelf_getshdr (scn, &shdr_mem)->sh_name); - } - scnnames[elf_ndxscn (scn)] = name; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/redhat-portability.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/redhat-portability.diff deleted file mode 100644 index b8a912c41..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/redhat-portability.diff +++ /dev/null @@ -1,756 +0,0 @@ -Upstream-Status: Backport - -Index: elfutils-0.148/backends/ChangeLog -=================================================================== ---- elfutils-0.148.orig/backends/ChangeLog 2010-04-13 20:08:02.000000000 +0000 -+++ elfutils-0.148/backends/ChangeLog 2010-07-03 13:04:07.000000000 +0000 -@@ -106,6 +106,10 @@ - * ppc_attrs.c (ppc_check_object_attribute): Handle tag - GNU_Power_ABI_Struct_Return. - -+2009-01-23 Roland McGrath -+ -+ * Makefile.am (libebl_%.so): Use $(LD_AS_NEEDED). -+ - 2008-10-04 Ulrich Drepper - - * i386_reloc.def: Fix entries for TLS_GOTDESC, TLS_DESC_CALL, and -@@ -433,6 +437,11 @@ - * sparc_init.c: Likewise. - * x86_64_init.c: Likewise. - -+2005-11-22 Roland McGrath -+ -+ * Makefile.am (LD_AS_NEEDED): New variable, substituted by configure. -+ (libebl_%.so rule): Use it in place of -Wl,--as-needed. -+ - 2005-11-19 Roland McGrath - - * ppc64_reloc.def: REL30 -> ADDR30. -@@ -455,6 +464,9 @@ - * Makefile.am (uninstall): Don't try to remove $(pkgincludedir). - (CLEANFILES): Add libebl_$(m).so. - -+ * Makefile.am (WEXTRA): New variable, substituted by configure. -+ (AM_CFLAGS): Use it in place of -Wextra. -+ - * ppc_reloc.def: Update bits per Alan Modra . - * ppc64_reloc.def: Likewise. - -Index: elfutils-0.148/backends/Makefile.am -=================================================================== ---- elfutils-0.148.orig/backends/Makefile.am 2010-04-13 20:08:02.000000000 +0000 -+++ elfutils-0.148/backends/Makefile.am 2010-07-03 13:04:07.000000000 +0000 -@@ -103,7 +103,7 @@ - $(LINK) -shared -o $(@:.map=.so) \ - -Wl,--whole-archive $< $(cpu_$*) -Wl,--no-whole-archive \ - -Wl,--version-script,$(@:.so=.map) \ -- -Wl,-z,defs -Wl,--as-needed $(libelf) $(libdw) $(libmudflap) -+ -Wl,-z,defs $(LD_AS_NEEDED) $(libelf) $(libdw) $(libmudflap) - $(textrel_check) - - libebl_i386.so: $(cpu_i386) -Index: elfutils-0.148/ChangeLog -=================================================================== ---- elfutils-0.148.orig/ChangeLog 2010-04-21 14:26:40.000000000 +0000 -+++ elfutils-0.148/ChangeLog 2010-07-03 13:04:07.000000000 +0000 -@@ -2,6 +2,10 @@ - - * configure.ac (LOCALEDIR, DATADIRNAME): Removed. - -+2009-11-22 Roland McGrath -+ -+ * configure.ac: Use sed and expr instead of modern bash extensions. -+ - 2009-09-21 Ulrich Drepper - - * configure.ac: Update for more modern autoconf. -@@ -10,6 +14,10 @@ - - * configure.ac (zip_LIBS): Check for liblzma too. - -+2009-08-17 Roland McGrath -+ -+ * configure.ac: Check for -fgnu89-inline; add it to WEXTRA if it works. -+ - 2009-04-19 Roland McGrath - - * configure.ac (eu_version): Round down here, not in version.h macros. -@@ -21,6 +29,8 @@ - - 2009-01-23 Roland McGrath - -+ * configure.ac: Check for __builtin_popcount. -+ - * configure.ac (zlib check): Check for gzdirect, need zlib >= 1.2.2.3. - - * configure.ac (__thread check): Use AC_LINK_IFELSE, in case of -@@ -101,6 +111,10 @@ - * configure.ac: Add dummy automake conditional to get dependencies - for non-generic linker right. See src/Makefile.am. - -+2005-11-22 Roland McGrath -+ -+ * configure.ac: Check for --as-needed linker option. -+ - 2005-11-18 Roland McGrath - - * Makefile.am (DISTCHECK_CONFIGURE_FLAGS): New variable. -@@ -148,6 +162,17 @@ - * Makefile.am (all_SUBDIRS): Add libdwfl. - * configure.ac: Write libdwfl/Makefile. - -+2005-05-31 Roland McGrath -+ -+ * configure.ac (WEXTRA): Check for -Wextra and set this substitution. -+ -+ * configure.ac: Check for struct stat st_?tim members. -+ * src/strip.c (process_file): Use st_?time if st_?tim are not there. -+ -+ * configure.ac: Check for futimes function. -+ * src/strip.c (handle_elf) [! HAVE_FUTIMES]: Use utimes instead. -+ (handle_ar) [! HAVE_FUTIMES]: Likewise. -+ - 2005-05-19 Roland McGrath - - * configure.ac [AH_BOTTOM] (INTDECL, _INTDECL): New macros. -Index: elfutils-0.148/config/eu.am -=================================================================== ---- elfutils-0.148.orig/config/eu.am 2010-04-21 14:26:40.000000000 +0000 -+++ elfutils-0.148/config/eu.am 2010-07-03 13:04:07.000000000 +0000 -@@ -25,11 +25,14 @@ - ## . - ## - -+WEXTRA = @WEXTRA@ -+LD_AS_NEEDED = @LD_AS_NEEDED@ -+ - DEFS = -D_GNU_SOURCE -DHAVE_CONFIG_H -DLOCALEDIR='"${localedir}"' - INCLUDES = -I. -I$(srcdir) -I$(top_srcdir)/lib -I.. - AM_CFLAGS = -std=gnu99 -Wall -Wshadow \ - $(if $($(*F)_no_Werror),,-Werror) \ -- $(if $($(*F)_no_Wunused),,-Wunused -Wextra) \ -+ $(if $($(*F)_no_Wunused),,-Wunused $(WEXTRA)) \ - $(if $($(*F)_no_Wformat),-Wno-format,-Wformat=2) \ - $($(*F)_CFLAGS) - -Index: elfutils-0.148/config.h.in -=================================================================== ---- elfutils-0.148.orig/config.h.in 2010-06-28 19:07:37.000000000 +0000 -+++ elfutils-0.148/config.h.in 2010-07-03 13:04:07.000000000 +0000 -@@ -1,5 +1,8 @@ - /* config.h.in. Generated from configure.ac by autoheader. */ - -+/* Have __builtin_popcount. */ -+#undef HAVE_BUILTIN_POPCOUNT -+ - /* $libdir subdirectory containing libebl modules. */ - #undef LIBEBL_SUBDIR - -@@ -55,4 +58,7 @@ - /* Define for large files, on AIX-style hosts. */ - #undef _LARGE_FILES - -+/* Stubbed out if missing compiler support. */ -+#undef __thread -+ - #include -Index: elfutils-0.148/configure.ac -=================================================================== ---- elfutils-0.148.orig/configure.ac 2010-06-28 19:07:26.000000000 +0000 -+++ elfutils-0.148/configure.ac 2010-07-03 13:04:07.000000000 +0000 -@@ -73,6 +73,54 @@ - AS_IF([test "x$ac_cv_c99" != xyes], - AC_MSG_ERROR([gcc with C99 support required])) - -+AC_CACHE_CHECK([for -Wextra option to $CC], ac_cv_cc_wextra, [dnl -+old_CFLAGS="$CFLAGS" -+CFLAGS="$CFLAGS -Wextra" -+AC_COMPILE_IFELSE([void foo (void) { }], -+ ac_cv_cc_wextra=yes, ac_cv_cc_wextra=no) -+CFLAGS="$old_CFLAGS"]) -+AC_SUBST(WEXTRA) -+AS_IF([test "x$ac_cv_cc_wextra" = xyes], [WEXTRA=-Wextra], [WEXTRA=-W]) -+ -+AC_CACHE_CHECK([for -fgnu89-inline option to $CC], ac_cv_cc_gnu89_inline, [dnl -+old_CFLAGS="$CFLAGS" -+CFLAGS="$CFLAGS -fgnu89-inline -Werror" -+AC_COMPILE_IFELSE([ -+void foo (void) -+{ -+ inline void bar (void) {} -+ bar (); -+} -+extern inline void baz (void) {} -+], ac_cv_cc_gnu89_inline=yes, ac_cv_cc_gnu89_inline=no) -+CFLAGS="$old_CFLAGS"]) -+AS_IF([test "x$ac_cv_cc_gnu89_inline" = xyes], -+ [WEXTRA="${WEXTRA:+$WEXTRA }-fgnu89-inline"]) -+ -+AC_CACHE_CHECK([for --as-needed linker option], -+ ac_cv_as_needed, [dnl -+cat > conftest.c <&AS_MESSAGE_LOG_FD]) -+then -+ ac_cv_as_needed=yes -+else -+ ac_cv_as_needed=no -+fi -+rm -f conftest*]) -+AS_IF([test "x$ac_cv_as_needed" = xyes], -+ [LD_AS_NEEDED=-Wl,--as-needed], [LD_AS_NEEDED=]) -+AC_SUBST(LD_AS_NEEDED) -+ -+AC_CACHE_CHECK([for __builtin_popcount], ac_cv_popcount, [dnl -+AC_LINK_IFELSE([AC_LANG_PROGRAM([], [[exit (__builtin_popcount (127));]])], -+ ac_cv_popcount=yes, ac_cv_popcount=no)]) -+AS_IF([test "x$ac_cv_popcount" = xyes], -+ [AC_DEFINE([HAVE_BUILTIN_POPCOUNT], [1], [Have __builtin_popcount.])]) -+ - AC_CACHE_CHECK([for __thread support], ac_cv_tls, [dnl - # Use the same flags that we use for our DSOs, so the test is representative. - # Some old compiler/linker/libc combinations fail some ways and not others. -@@ -88,7 +136,10 @@ - CFLAGS="$save_CFLAGS" - LDFLAGS="$save_LDFLAGS"]) - AS_IF([test "x$ac_cv_tls" != xyes], -- AC_MSG_ERROR([__thread support required])) -+ [AS_IF([test "$use_locks" = yes], -+ [AC_MSG_ERROR([--enable-thread-safety requires __thread support])], -+ [AC_DEFINE([__thread], [/* empty: no multi-thread support */], -+ [Stubbed out if missing compiler support.])])]) - - dnl This test must come as early as possible after the compiler configuration - dnl tests, because the choice of the file model can (in principle) affect -@@ -251,7 +302,7 @@ - - # 1.234 -> 1234 - case "$PACKAGE_VERSION" in --[[0-9]].*) eu_version="${PACKAGE_VERSION/./}" ;; -+[[0-9]].*) eu_version=`echo "$PACKAGE_VERSION" | sed 's@\.@@'` ;; - *) AC_MSG_ERROR([confused by version number '$PACKAGE_VERSION']) ;; - esac - case "$eu_version" in -@@ -280,6 +331,6 @@ - esac - - # Round up to the next release API (x.y) version. --[eu_version=$[($eu_version + 999) / 1000]] -+eu_version=`expr \( $eu_version + 999 \) / 1000` - - AC_OUTPUT -Index: elfutils-0.148/lib/ChangeLog -=================================================================== ---- elfutils-0.148.orig/lib/ChangeLog 2010-06-28 19:05:56.000000000 +0000 -+++ elfutils-0.148/lib/ChangeLog 2010-07-03 13:04:07.000000000 +0000 -@@ -14,6 +14,9 @@ - - 2009-01-23 Roland McGrath - -+ * eu-config.h [! HAVE_BUILTIN_POPCOUNT] -+ (__builtin_popcount): New inline function. -+ - * eu-config.h: Add multiple inclusion protection. - - 2009-01-17 Ulrich Drepper -@@ -70,6 +73,11 @@ - * Makefile.am (libeu_a_SOURCES): Add it. - * system.h: Declare crc32_file. - -+2005-02-07 Roland McGrath -+ -+ * Makefile.am (WEXTRA): New variable, substituted by configure. -+ (AM_CFLAGS): Use it in place of -Wextra. -+ - 2005-04-30 Ulrich Drepper - - * Makefile.am: Use -ffunction-sections for xmalloc.c. -Index: elfutils-0.148/lib/eu-config.h -=================================================================== ---- elfutils-0.148.orig/lib/eu-config.h 2009-08-12 14:23:22.000000000 +0000 -+++ elfutils-0.148/lib/eu-config.h 2010-07-03 13:04:07.000000000 +0000 -@@ -182,6 +182,17 @@ - /* This macro is used by the tests conditionalize for standalone building. */ - #define ELFUTILS_HEADER(name) - -+#ifndef HAVE_BUILTIN_POPCOUNT -+# define __builtin_popcount hakmem_popcount -+static inline unsigned int __attribute__ ((unused)) -+hakmem_popcount (unsigned int x) -+{ -+ /* HAKMEM 169 */ -+ unsigned int n = x - ((x >> 1) & 033333333333) - ((x >> 2) & 011111111111); -+ return ((n + (n >> 3)) & 030707070707) % 63; -+} -+#endif /* HAVE_BUILTIN_POPCOUNT */ -+ - - #ifdef SHARED - # define OLD_VERSION(name, version) \ -Index: elfutils-0.148/libasm/ChangeLog -=================================================================== ---- elfutils-0.148.orig/libasm/ChangeLog 2010-03-05 05:48:23.000000000 +0000 -+++ elfutils-0.148/libasm/ChangeLog 2010-07-03 13:04:07.000000000 +0000 -@@ -67,6 +67,11 @@ - * asm_error.c: Add new error ASM_E_IOERROR. - * libasmP.h: Add ASM_E_IOERROR definition. - -+2005-05-31 Roland McGrath -+ -+ * Makefile.am (WEXTRA): New variable, substituted by configure. -+ (AM_CFLAGS): Use it in place of -Wextra. -+ - 2005-02-15 Ulrich Drepper - - * Makefile.am (AM_CFLAGS): Add -Wunused -Wextra -Wformat=2. -Index: elfutils-0.148/libcpu/ChangeLog -=================================================================== ---- elfutils-0.148.orig/libcpu/ChangeLog 2010-03-05 05:48:23.000000000 +0000 -+++ elfutils-0.148/libcpu/ChangeLog 2010-07-03 13:04:07.000000000 +0000 -@@ -9,6 +9,9 @@ - - 2009-01-23 Roland McGrath - -+ * i386_disasm.c (i386_disasm): Add abort after assert-constant for old -+ compilers that don't realize it's noreturn. -+ - * Makefile.am (i386_parse_CFLAGS): Use quotes around command - substitution that can produce leading whitespace. - -@@ -338,6 +341,11 @@ - * defs/i386.doc: New file. - * defs/x86_64: New file. - -+2005-04-04 Roland McGrath -+ -+ * Makefile.am (WEXTRA): New variable, substituted by configure. -+ (AM_CFLAGS): Use it instead of -Wextra. -+ - 2005-02-15 Ulrich Drepper - - * Makefile (AM_CFLAGS): Add -Wunused -Wextra -Wformat=2. -Index: elfutils-0.148/libcpu/i386_disasm.c -=================================================================== ---- elfutils-0.148.orig/libcpu/i386_disasm.c 2009-01-08 20:56:36.000000000 +0000 -+++ elfutils-0.148/libcpu/i386_disasm.c 2010-07-03 13:04:07.000000000 +0000 -@@ -791,6 +791,7 @@ - - default: - assert (! "INVALID not handled"); -+ abort (); - } - } - else -Index: elfutils-0.148/libdw/ChangeLog -=================================================================== ---- elfutils-0.148.orig/libdw/ChangeLog 2010-06-28 19:05:56.000000000 +0000 -+++ elfutils-0.148/libdw/ChangeLog 2010-07-03 13:04:07.000000000 +0000 -@@ -276,6 +276,10 @@ - - * dwarf_hasattr_integrate.c: Integrate DW_AT_specification too. - -+2009-08-17 Roland McGrath -+ -+ * libdw.h: Disable extern inlines for GCC 4.2. -+ - 2009-08-10 Roland McGrath - - * dwarf_getscopevar.c: Use dwarf_diename. -@@ -1044,6 +1048,11 @@ - - 2005-05-31 Roland McGrath - -+ * Makefile.am (WEXTRA): New variable, substituted by configure. -+ (AM_CFLAGS): Use it in place of -Wextra. -+ -+2005-05-31 Roland McGrath -+ - * dwarf_formref_die.c (dwarf_formref_die): Add CU header offset to - formref offset. - -Index: elfutils-0.148/libdw/libdw.h -=================================================================== ---- elfutils-0.148.orig/libdw/libdw.h 2010-06-28 19:05:56.000000000 +0000 -+++ elfutils-0.148/libdw/libdw.h 2010-07-03 13:04:07.000000000 +0000 -@@ -842,7 +842,7 @@ - - - /* Inline optimizations. */ --#ifdef __OPTIMIZE__ -+#if defined __OPTIMIZE__ && !(__GNUC__ == 4 && __GNUC_MINOR__ == 2) - /* Return attribute code of given attribute. */ - __libdw_extern_inline unsigned int - dwarf_whatattr (Dwarf_Attribute *attr) -Index: elfutils-0.148/libdwfl/ChangeLog -=================================================================== ---- elfutils-0.148.orig/libdwfl/ChangeLog 2010-06-28 19:05:56.000000000 +0000 -+++ elfutils-0.148/libdwfl/ChangeLog 2010-07-03 13:04:07.000000000 +0000 -@@ -1265,6 +1265,11 @@ - - 2005-07-21 Roland McGrath - -+ * Makefile.am (WEXTRA): New variable, substituted by configure. -+ (AM_CFLAGS): Use it in place of -Wextra. -+ -+2005-07-21 Roland McGrath -+ - * Makefile.am (noinst_HEADERS): Add loc2c.c. - - * test2.c (main): Check sscanf result to quiet warning. -Index: elfutils-0.148/libebl/ChangeLog -=================================================================== ---- elfutils-0.148.orig/libebl/ChangeLog 2010-03-05 05:48:23.000000000 +0000 -+++ elfutils-0.148/libebl/ChangeLog 2010-07-03 13:04:07.000000000 +0000 -@@ -624,6 +624,11 @@ - * Makefile.am (libebl_*_so_SOURCES): Set to $(*_SRCS) so dependency - tracking works right. - -+2005-05-31 Roland McGrath -+ -+ * Makefile.am (WEXTRA): New variable, substituted by configure. -+ (AM_CFLAGS): Use it in place of -Wextra. -+ - 2005-05-21 Ulrich Drepper - - * libebl_x86_64.map: Add x86_64_core_note. -Index: elfutils-0.148/libelf/ChangeLog -=================================================================== ---- elfutils-0.148.orig/libelf/ChangeLog 2010-06-14 21:17:20.000000000 +0000 -+++ elfutils-0.148/libelf/ChangeLog 2010-07-03 13:04:07.000000000 +0000 -@@ -657,6 +657,11 @@ - - * elf.h: Update from glibc. - -+2005-05-31 Roland McGrath -+ -+ * Makefile.am (WEXTRA): New variable, substituted by configure. -+ (AM_CFLAGS): Use it in place of -Wextra. -+ - 2005-05-08 Roland McGrath - - * elf_begin.c (read_file) [_MUDFLAP]: Don't use mmap for now. -Index: elfutils-0.148/libelf/common.h -=================================================================== ---- elfutils-0.148.orig/libelf/common.h 2009-01-08 20:56:36.000000000 +0000 -+++ elfutils-0.148/libelf/common.h 2010-07-03 13:04:07.000000000 +0000 -@@ -160,7 +160,7 @@ - (Var) = (sizeof (Var) == 1 \ - ? (unsigned char) (Var) \ - : (sizeof (Var) == 2 \ -- ? bswap_16 (Var) \ -+ ? (unsigned short int) bswap_16 (Var) \ - : (sizeof (Var) == 4 \ - ? bswap_32 (Var) \ - : bswap_64 (Var)))) -@@ -169,7 +169,7 @@ - (Dst) = (sizeof (Var) == 1 \ - ? (unsigned char) (Var) \ - : (sizeof (Var) == 2 \ -- ? bswap_16 (Var) \ -+ ? (unsigned short int) bswap_16 (Var) \ - : (sizeof (Var) == 4 \ - ? bswap_32 (Var) \ - : bswap_64 (Var)))) -Index: elfutils-0.148/src/addr2line.c -=================================================================== ---- elfutils-0.148.orig/src/addr2line.c 2010-05-28 14:38:30.000000000 +0000 -+++ elfutils-0.148/src/addr2line.c 2010-07-03 13:05:40.000000000 +0000 -@@ -447,10 +447,10 @@ - bool parsed = false; - int i, j; - char *name = NULL; -- if (sscanf (string, "(%m[^)])%" PRIiMAX "%n", &name, &addr, &i) == 2 -+ if (sscanf (string, "(%a[^)])%" PRIiMAX "%n", &name, &addr, &i) == 2 - && string[i] == '\0') - parsed = adjust_to_section (name, &addr, dwfl); -- switch (sscanf (string, "%m[^-+]%n%" PRIiMAX "%n", &name, &i, &addr, &j)) -+ switch (sscanf (string, "%a[^-+]%n%" PRIiMAX "%n", &name, &i, &addr, &j)) - { - default: - break; -Index: elfutils-0.148/src/ChangeLog -=================================================================== ---- elfutils-0.148.orig/src/ChangeLog 2010-06-28 19:05:56.000000000 +0000 -+++ elfutils-0.148/src/ChangeLog 2010-07-03 13:04:08.000000000 +0000 -@@ -165,8 +165,16 @@ - * readelf.c (attr_callback): Use print_block only when we don't use - print_ops. - -+2009-08-17 Roland McGrath -+ -+ * ld.h: Disable extern inlines for GCC 4.2. -+ - 2009-08-14 Roland McGrath - -+ * strings.c (read_block): Conditionalize posix_fadvise use -+ on [POSIX_FADV_SEQUENTIAL]. -+ From Petr Salinger . -+ - * ar.c (do_oper_extract): Use pathconf instead of statfs. - - 2009-08-01 Ulrich Drepper -@@ -330,6 +338,8 @@ - * readelf.c (print_debug_frame_section): Use t instead of j formats - for ptrdiff_t OFFSET. - -+ * addr2line.c (handle_address): Use %a instead of %m for compatibility. -+ - 2009-01-21 Ulrich Drepper - - * elflint.c (check_program_header): Fix typo in .eh_frame_hdr section -@@ -513,6 +523,11 @@ - that matches its PT_LOAD's p_flags &~ PF_W. On sparc, PF_X really - is valid in RELRO. - -+2008-03-01 Roland McGrath -+ -+ * readelf.c (dump_archive_index): Tweak portability hack -+ to match [__GNUC__ < 4] too. -+ - 2008-02-29 Roland McGrath - - * readelf.c (print_attributes): Add a cast. -@@ -764,6 +779,8 @@ - - * readelf.c (hex_dump): Fix rounding error in whitespace calculation. - -+ * Makefile.am (readelf_no_Werror): New variable. -+ - 2007-10-15 Roland McGrath - - * make-debug-archive.in: New file. -@@ -1203,6 +1220,10 @@ - * elflint.c (valid_e_machine): Add EM_ALPHA. - Reported by Christian Aichinger . - -+ * strings.c (map_file): Define POSIX_MADV_SEQUENTIAL to -+ MADV_SEQUENTIAL if undefined. Don't call posix_madvise -+ if neither is defined. -+ - 2006-08-08 Ulrich Drepper - - * elflint.c (check_dynamic): Don't require DT_HASH for DT_SYMTAB. -@@ -1279,6 +1300,10 @@ - * Makefile.am: Add hacks to create dependency files for non-generic - linker. - -+2006-04-05 Roland McGrath -+ -+ * strings.c (MAP_POPULATE): Define to 0 if undefined. -+ - 2006-06-12 Ulrich Drepper - - * ldgeneric.c (ld_generic_generate_sections): Don't create .interp -@@ -1627,6 +1652,11 @@ - * readelf.c (print_debug_loc_section): Fix indentation for larger - address size. - -+2005-05-31 Roland McGrath -+ -+ * Makefile.am (WEXTRA): New variable, substituted by configure. -+ (AM_CFLAGS): Use it in place of -Wextra. -+ - 2005-05-30 Roland McGrath - - * readelf.c (print_debug_line_section): Print section offset of each -Index: elfutils-0.148/src/findtextrel.c -=================================================================== ---- elfutils-0.148.orig/src/findtextrel.c 2009-02-11 01:12:59.000000000 +0000 -+++ elfutils-0.148/src/findtextrel.c 2010-07-03 13:04:08.000000000 +0000 -@@ -490,7 +490,11 @@ - - - static void --check_rel (size_t nsegments, struct segments segments[nsegments], -+check_rel (size_t nsegments, struct segments segments[ -+#if __GNUC__ >= 4 -+ nsegments -+#endif -+ ], - GElf_Addr addr, Elf *elf, Elf_Scn *symscn, Dwarf *dw, - const char *fname, bool more_than_one, void **knownsrcs) - { -Index: elfutils-0.148/src/ld.h -=================================================================== ---- elfutils-0.148.orig/src/ld.h 2009-06-13 22:39:51.000000000 +0000 -+++ elfutils-0.148/src/ld.h 2010-07-03 13:04:08.000000000 +0000 -@@ -1122,6 +1122,7 @@ - - /* Checked whether the symbol is undefined and referenced from a DSO. */ - extern bool linked_from_dso_p (struct scninfo *scninfo, size_t symidx); -+#if defined __OPTIMIZE__ && !(__GNUC__ == 4 && __GNUC_MINOR__ == 2) - #ifdef __GNUC_STDC_INLINE__ - __attribute__ ((__gnu_inline__)) - #endif -@@ -1139,5 +1140,6 @@ - - return sym->defined && sym->in_dso; - } -+#endif /* Optimizing and not GCC 4.2. */ - - #endif /* ld.h */ -Index: elfutils-0.148/src/Makefile.am -=================================================================== ---- elfutils-0.148.orig/src/Makefile.am 2010-03-05 05:48:23.000000000 +0000 -+++ elfutils-0.148/src/Makefile.am 2010-07-03 13:04:08.000000000 +0000 -@@ -99,6 +99,9 @@ - # XXX While the file is not finished, don't warn about this - ldgeneric_no_Wunused = yes - -+# Buggy old compilers. -+readelf_no_Werror = yes -+ - readelf_LDADD = $(libdw) $(libebl) $(libelf) $(libeu) $(libmudflap) -ldl - nm_LDADD = $(libdw) $(libebl) $(libelf) $(libeu) $(libmudflap) -ldl - size_LDADD = $(libelf) $(libeu) $(libmudflap) - -Index: elfutils-0.148/src/readelf.c -=================================================================== ---- elfutils-0.148.orig/src/readelf.c 2010-06-28 19:05:56.000000000 +0000 -+++ elfutils-0.148/src/readelf.c 2010-07-03 13:04:08.000000000 +0000 -@@ -7845,7 +7845,7 @@ - if (unlikely (elf_rand (elf, as_off) == 0) - || unlikely ((subelf = elf_begin (-1, ELF_C_READ_MMAP, elf)) - == NULL)) --#if __GLIBC__ < 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ < 7) -+#if __GLIBC__ < 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ < 7) || __GNUC__ < 4 - while (1) - #endif - error (EXIT_FAILURE, 0, -Index: elfutils-0.148/src/strings.c -=================================================================== ---- elfutils-0.148.orig/src/strings.c 2009-02-11 01:12:59.000000000 +0000 -+++ elfutils-0.148/src/strings.c 2010-07-03 13:04:08.000000000 +0000 -@@ -51,6 +51,10 @@ - - #include - -+#ifndef MAP_POPULATE -+# define MAP_POPULATE 0 -+#endif -+ - - /* Prototypes of local functions. */ - static int read_fd (int fd, const char *fname, off64_t fdlen); -@@ -491,8 +495,13 @@ - fd, start_off); - if (mem != MAP_FAILED) - { -+#if !defined POSIX_MADV_SEQUENTIAL && defined MADV_SEQUENTIAL -+# define POSIX_MADV_SEQUENTIAL MADV_SEQUENTIAL -+#endif -+#ifdef POSIX_MADV_SEQUENTIAL - /* We will go through the mapping sequentially. */ - (void) posix_madvise (mem, map_size, POSIX_MADV_SEQUENTIAL); -+#endif - break; - } - if (errno != EINVAL && errno != ENOMEM) -@@ -586,9 +595,11 @@ - elfmap_off = from & ~(ps - 1); - elfmap_base = elfmap = map_file (fd, elfmap_off, fdlen, &elfmap_size); - -+#ifdef POSIX_FADV_SEQUENTIAL - if (unlikely (elfmap == MAP_FAILED)) - /* Let the kernel know we are going to read everything in sequence. */ - (void) posix_fadvise (fd, 0, 0, POSIX_FADV_SEQUENTIAL); -+#endif - } - - if (unlikely (elfmap == MAP_FAILED)) -Index: elfutils-0.148/src/strip.c -=================================================================== ---- elfutils-0.148.orig/src/strip.c 2010-01-15 09:05:55.000000000 +0000 -+++ elfutils-0.148/src/strip.c 2010-07-03 13:04:08.000000000 +0000 -@@ -53,6 +53,12 @@ - #include - #include - -+#ifdef HAVE_FUTIMES -+# define FUTIMES(fd, fname, tvp) futimes (fd, tvp) -+#else -+# define FUTIMES(fd, fname, tvp) utimes (fname, tvp) -+#endif -+ - - /* Name and version of program. */ - static void print_version (FILE *stream, struct argp_state *state); -@@ -301,8 +307,18 @@ - - /* If we have to preserve the timestamp, we need it in the - format utimes() understands. */ -+#ifdef HAVE_STRUCT_STAT_ST_ATIM - TIMESPEC_TO_TIMEVAL (&tv[0], &pre_st.st_atim); -+#else -+ tv[0].tv_sec = pre_st.st_atime; -+ tv[0].tv_usec = 0; -+#endif -+#ifdef HAVE_STRUCT_STAT_ST_MTIM - TIMESPEC_TO_TIMEVAL (&tv[1], &pre_st.st_mtim); -+#else -+ tv[1].tv_sec = pre_st.st_atime; -+ tv[1].tv_usec = 0; -+#endif - } - - /* Open the file. */ -@@ -1747,7 +1763,7 @@ - /* If requested, preserve the timestamp. */ - if (tvp != NULL) - { -- if (futimes (fd, tvp) != 0) -+ if (FUTIMES (fd, output_fname, tvp) != 0) - { - error (0, errno, gettext ("\ - cannot set access and modification date of '%s'"), -@@ -1804,7 +1820,7 @@ - - if (tvp != NULL) - { -- if (unlikely (futimes (fd, tvp) != 0)) -+ if (unlikely (FUTIMES (fd, fname, tvp) != 0)) - { - error (0, errno, gettext ("\ - cannot set access and modification date of '%s'"), fname); -Index: elfutils-0.148/tests/ChangeLog -=================================================================== ---- elfutils-0.148.orig/tests/ChangeLog 2010-06-28 19:05:56.000000000 +0000 -+++ elfutils-0.148/tests/ChangeLog 2010-07-03 13:04:08.000000000 +0000 -@@ -154,6 +154,8 @@ - - 2008-01-21 Roland McGrath - -+ * line2addr.c (main): Revert last change. -+ - * testfile45.S.bz2: Add tests for cltq, cqto. - * testfile45.expect.bz2: Adjust. - -@@ -862,6 +864,11 @@ - * Makefile.am (TESTS): Add run-elflint-test.sh. - (EXTRA_DIST): Add run-elflint-test.sh and testfile18.bz2. - -+2005-05-31 Roland McGrath -+ -+ * Makefile.am (WEXTRA): New variable, substituted by configure. -+ (AM_CFLAGS): Use it in place of -Wextra. -+ - 2005-05-24 Ulrich Drepper - - * get-files.c (main): Use correct format specifier. -Index: elfutils-0.148/tests/line2addr.c -=================================================================== ---- elfutils-0.148.orig/tests/line2addr.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/tests/line2addr.c 2010-07-03 13:04:08.000000000 +0000 -@@ -132,7 +132,7 @@ - { - struct args a = { .arg = argv[cnt] }; - -- switch (sscanf (a.arg, "%m[^:]:%d", &a.file, &a.line)) -+ switch (sscanf (a.arg, "%a[^:]:%d", &a.file, &a.line)) - { - default: - case 0: diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/redhat-robustify.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/redhat-robustify.diff deleted file mode 100644 index cd398549d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/redhat-robustify.diff +++ /dev/null @@ -1,1709 +0,0 @@ -Upstream-Status: Backport - -Index: elfutils-0.148/libelf/ChangeLog -=================================================================== ---- elfutils-0.148.orig/libelf/ChangeLog 2010-07-03 13:07:10.000000000 +0000 -+++ elfutils-0.148/libelf/ChangeLog 2010-07-03 13:07:11.000000000 +0000 -@@ -649,10 +649,53 @@ - If section content hasn't been read yet, do it before looking for the - block size. If no section data present, infer size of section header. - -+2005-05-14 Jakub Jelinek -+ -+ * libelfP.h (INVALID_NDX): Define. -+ * gelf_getdyn.c (gelf_getdyn): Use it. Remove ndx < 0 test if any. -+ * gelf_getlib.c (gelf_getlib): Likewise. -+ * gelf_getmove.c (gelf_getmove): Likewise. -+ * gelf_getrel.c (gelf_getrel): Likewise. -+ * gelf_getrela.c (gelf_getrela): Likewise. -+ * gelf_getsym.c (gelf_getsym): Likewise. -+ * gelf_getsyminfo.c (gelf_getsyminfo): Likewise. -+ * gelf_getsymshndx.c (gelf_getsymshndx): Likewise. -+ * gelf_getversym.c (gelf_getversym): Likewise. -+ * gelf_update_dyn.c (gelf_update_dyn): Likewise. -+ * gelf_update_lib.c (gelf_update_lib): Likewise. -+ * gelf_update_move.c (gelf_update_move): Likewise. -+ * gelf_update_rel.c (gelf_update_rel): Likewise. -+ * gelf_update_rela.c (gelf_update_rela): Likewise. -+ * gelf_update_sym.c (gelf_update_sym): Likewise. -+ * gelf_update_syminfo.c (gelf_update_syminfo): Likewise. -+ * gelf_update_symshndx.c (gelf_update_symshndx): Likewise. -+ * gelf_update_versym.c (gelf_update_versym): Likewise. -+ * elf_newscn.c (elf_newscn): Check for overflow. -+ * elf32_updatefile.c (__elfw2(LIBELFBITS,updatemmap)): Likewise. -+ (__elfw2(LIBELFBITS,updatefile)): Likewise. -+ * elf_begin.c (file_read_elf): Likewise. -+ * elf32_newphdr.c (elfw2(LIBELFBITS,newphdr)): Likewise. -+ * elf_getarsym.c (elf_getarsym): Likewise. -+ * elf32_getshdr.c (elfw2(LIBELFBITS,getshdr)): Likewise. - 2005-05-11 Ulrich Drepper - - * elf.h: Update again. - -+2005-05-17 Jakub Jelinek -+ -+ * elf32_getphdr.c (elfw2(LIBELFBITS,getphdr)): Check if program header -+ table fits into object's bounds. -+ * elf_getshstrndx.c (elf_getshstrndx): Add elf->start_offset to -+ elf->map_address. Check if first section header fits into object's -+ bounds. -+ * elf32_getshdr.c (elfw2(LIBELFBITS,getshdr)): -+ Check if section header table fits into object's bounds. -+ * elf_begin.c (get_shnum): Ensure section headers fits into -+ object's bounds. -+ (file_read_elf): Make sure scncnt is small enough to allocate both -+ ElfXX_Shdr and Elf_Scn array. Make sure section and program header -+ tables fit into object's bounds. Avoid memory leak on failure. -+ - 2005-05-09 Ulrich Drepper - - * elf.h: Update from glibc. -Index: elfutils-0.148/libelf/elf32_getphdr.c -=================================================================== ---- elfutils-0.148.orig/libelf/elf32_getphdr.c 2010-04-21 14:26:40.000000000 +0000 -+++ elfutils-0.148/libelf/elf32_getphdr.c 2010-07-03 13:07:11.000000000 +0000 -@@ -114,6 +114,16 @@ - - if (elf->map_address != NULL) - { -+ /* First see whether the information in the ELF header is -+ valid and it does not ask for too much. */ -+ if (unlikely (ehdr->e_phoff >= elf->maximum_size) -+ || unlikely (elf->maximum_size - ehdr->e_phoff < size)) -+ { -+ /* Something is wrong. */ -+ __libelf_seterrno (ELF_E_INVALID_PHDR); -+ goto out; -+ } -+ - /* All the data is already mapped. Use it. */ - void *file_phdr = ((char *) elf->map_address - + elf->start_offset + ehdr->e_phoff); -Index: elfutils-0.148/libelf/elf32_getshdr.c -=================================================================== ---- elfutils-0.148.orig/libelf/elf32_getshdr.c 2009-06-13 22:41:42.000000000 +0000 -+++ elfutils-0.148/libelf/elf32_getshdr.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Return section header. -- Copyright (C) 1998, 1999, 2000, 2001, 2002, 2005, 2007, 2009 Red Hat, Inc. -+ Copyright (C) 1998-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 1998. - -@@ -81,7 +81,8 @@ - goto out; - - size_t shnum; -- if (__elf_getshdrnum_rdlock (elf, &shnum) != 0) -+ if (__elf_getshdrnum_rdlock (elf, &shnum) != 0 -+ || shnum > SIZE_MAX / sizeof (ElfW2(LIBELFBITS,Shdr))) - goto out; - size_t size = shnum * sizeof (ElfW2(LIBELFBITS,Shdr)); - -@@ -98,6 +99,16 @@ - - if (elf->map_address != NULL) - { -+ /* First see whether the information in the ELF header is -+ valid and it does not ask for too much. */ -+ if (unlikely (ehdr->e_shoff >= elf->maximum_size) -+ || unlikely (elf->maximum_size - ehdr->e_shoff < size)) -+ { -+ /* Something is wrong. */ -+ __libelf_seterrno (ELF_E_INVALID_SECTION_HEADER); -+ goto free_and_out; -+ } -+ - ElfW2(LIBELFBITS,Shdr) *notcvt; - - /* All the data is already mapped. If we could use it -Index: elfutils-0.148/libelf/elf32_newphdr.c -=================================================================== ---- elfutils-0.148.orig/libelf/elf32_newphdr.c 2010-01-12 16:57:54.000000000 +0000 -+++ elfutils-0.148/libelf/elf32_newphdr.c 2010-07-03 13:07:11.000000000 +0000 -@@ -135,6 +135,12 @@ - || count == PN_XNUM - || elf->state.ELFW(elf,LIBELFBITS).phdr == NULL) - { -+ if (unlikely (count > SIZE_MAX / sizeof (ElfW2(LIBELFBITS,Phdr)))) -+ { -+ result = NULL; -+ goto out; -+ } -+ - /* Allocate a new program header with the appropriate number of - elements. */ - result = (ElfW2(LIBELFBITS,Phdr) *) -Index: elfutils-0.148/libelf/elf32_updatefile.c -=================================================================== ---- elfutils-0.148.orig/libelf/elf32_updatefile.c 2010-01-12 16:57:54.000000000 +0000 -+++ elfutils-0.148/libelf/elf32_updatefile.c 2010-07-03 13:07:11.000000000 +0000 -@@ -223,6 +223,9 @@ - /* Write all the sections. Well, only those which are modified. */ - if (shnum > 0) - { -+ if (unlikely (shnum > SIZE_MAX / sizeof (Elf_Scn *))) -+ return 1; -+ - Elf_ScnList *list = &elf->state.ELFW(elf,LIBELFBITS).scns; - Elf_Scn **scns = (Elf_Scn **) alloca (shnum * sizeof (Elf_Scn *)); - char *const shdr_start = ((char *) elf->map_address + elf->start_offset -@@ -645,6 +648,10 @@ - /* Write all the sections. Well, only those which are modified. */ - if (shnum > 0) - { -+ if (unlikely (shnum > SIZE_MAX / (sizeof (Elf_Scn *) -+ + sizeof (ElfW2(LIBELFBITS,Shdr))))) -+ return 1; -+ - off_t shdr_offset = elf->start_offset + ehdr->e_shoff; - #if EV_NUM != 2 - xfct_t shdr_fctp = __elf_xfctstom[__libelf_version - 1][EV_CURRENT - 1][ELFW(ELFCLASS, LIBELFBITS) - 1][ELF_T_SHDR]; -Index: elfutils-0.148/libelf/elf_begin.c -=================================================================== ---- elfutils-0.148.orig/libelf/elf_begin.c 2010-04-21 14:26:40.000000000 +0000 -+++ elfutils-0.148/libelf/elf_begin.c 2010-07-03 13:07:11.000000000 +0000 -@@ -165,7 +165,8 @@ - - if (unlikely (result == 0) && ehdr.e32->e_shoff != 0) - { -- if (ehdr.e32->e_shoff + sizeof (Elf32_Shdr) > maxsize) -+ if (unlikely (ehdr.e32->e_shoff >= maxsize) -+ || unlikely (maxsize - ehdr.e32->e_shoff < sizeof (Elf32_Shdr))) - /* Cannot read the first section header. */ - return 0; - -@@ -213,7 +214,8 @@ - - if (unlikely (result == 0) && ehdr.e64->e_shoff != 0) - { -- if (ehdr.e64->e_shoff + sizeof (Elf64_Shdr) > maxsize) -+ if (unlikely (ehdr.e64->e_shoff >= maxsize) -+ || unlikely (ehdr.e64->e_shoff + sizeof (Elf64_Shdr) > maxsize)) - /* Cannot read the first section header. */ - return 0; - -@@ -285,6 +287,15 @@ - /* Could not determine the number of sections. */ - return NULL; - -+ /* Check for too many sections. */ -+ if (e_ident[EI_CLASS] == ELFCLASS32) -+ { -+ if (scncnt > SIZE_MAX / (sizeof (Elf_Scn) + sizeof (Elf32_Shdr))) -+ return NULL; -+ } -+ else if (scncnt > SIZE_MAX / (sizeof (Elf_Scn) + sizeof (Elf64_Shdr))) -+ return NULL; -+ - /* We can now allocate the memory. Even if there are no section headers, - we allocate space for a zeroth section in case we need it later. */ - const size_t scnmax = (scncnt ?: (cmd == ELF_C_RDWR || cmd == ELF_C_RDWR_MMAP) -@@ -324,6 +335,16 @@ - { - /* We can use the mmapped memory. */ - elf->state.elf32.ehdr = ehdr; -+ -+ if (unlikely (ehdr->e_shoff >= maxsize) -+ || unlikely (maxsize - ehdr->e_shoff -+ < scncnt * sizeof (Elf32_Shdr))) -+ { -+ free_and_out: -+ free (elf); -+ __libelf_seterrno (ELF_E_INVALID_FILE); -+ return NULL; -+ } - elf->state.elf32.shdr - = (Elf32_Shdr *) ((char *) ehdr + ehdr->e_shoff); - -@@ -410,6 +431,11 @@ - { - /* We can use the mmapped memory. */ - elf->state.elf64.ehdr = ehdr; -+ -+ if (unlikely (ehdr->e_shoff >= maxsize) -+ || unlikely (ehdr->e_shoff -+ + scncnt * sizeof (Elf32_Shdr) > maxsize)) -+ goto free_and_out; - elf->state.elf64.shdr - = (Elf64_Shdr *) ((char *) ehdr + ehdr->e_shoff); - -Index: elfutils-0.148/libelf/elf_getarsym.c -=================================================================== ---- elfutils-0.148.orig/libelf/elf_getarsym.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/elf_getarsym.c 2010-07-03 13:07:11.000000000 +0000 -@@ -179,6 +179,9 @@ - size_t index_size = atol (tmpbuf); - - if (SARMAG + sizeof (struct ar_hdr) + index_size > elf->maximum_size -+#if SIZE_MAX <= 4294967295U -+ || n >= SIZE_MAX / sizeof (Elf_Arsym) -+#endif - || n * sizeof (uint32_t) > index_size) - { - /* This index table cannot be right since it does not fit into -Index: elfutils-0.148/libelf/elf_getshdrstrndx.c -=================================================================== ---- elfutils-0.148.orig/libelf/elf_getshdrstrndx.c 2009-06-13 22:31:35.000000000 +0000 -+++ elfutils-0.148/libelf/elf_getshdrstrndx.c 2010-07-03 13:07:11.000000000 +0000 -@@ -125,10 +125,25 @@ - if (elf->map_address != NULL - && elf->state.elf32.ehdr->e_ident[EI_DATA] == MY_ELFDATA - && (ALLOW_UNALIGNED -- || (((size_t) ((char *) elf->map_address + offset)) -+ || (((size_t) ((char *) elf->map_address -+ + elf->start_offset + offset)) - & (__alignof__ (Elf32_Shdr) - 1)) == 0)) -- /* We can directly access the memory. */ -- num = ((Elf32_Shdr *) (elf->map_address + offset))->sh_link; -+ { -+ /* First see whether the information in the ELF header is -+ valid and it does not ask for too much. */ -+ if (unlikely (elf->maximum_size - offset -+ < sizeof (Elf32_Shdr))) -+ { -+ /* Something is wrong. */ -+ __libelf_seterrno (ELF_E_INVALID_SECTION_HEADER); -+ result = -1; -+ goto out; -+ } -+ -+ /* We can directly access the memory. */ -+ num = ((Elf32_Shdr *) (elf->map_address + elf->start_offset -+ + offset))->sh_link; -+ } - else - { - /* We avoid reading in all the section headers. Just read -@@ -163,10 +178,25 @@ - if (elf->map_address != NULL - && elf->state.elf64.ehdr->e_ident[EI_DATA] == MY_ELFDATA - && (ALLOW_UNALIGNED -- || (((size_t) ((char *) elf->map_address + offset)) -+ || (((size_t) ((char *) elf->map_address -+ + elf->start_offset + offset)) - & (__alignof__ (Elf64_Shdr) - 1)) == 0)) -- /* We can directly access the memory. */ -- num = ((Elf64_Shdr *) (elf->map_address + offset))->sh_link; -+ { -+ /* First see whether the information in the ELF header is -+ valid and it does not ask for too much. */ -+ if (unlikely (elf->maximum_size - offset -+ < sizeof (Elf64_Shdr))) -+ { -+ /* Something is wrong. */ -+ __libelf_seterrno (ELF_E_INVALID_SECTION_HEADER); -+ result = -1; -+ goto out; -+ } -+ -+ /* We can directly access the memory. */ -+ num = ((Elf64_Shdr *) (elf->map_address + elf->start_offset -+ + offset))->sh_link; -+ } - else - { - /* We avoid reading in all the section headers. Just read -Index: elfutils-0.148/libelf/elf_newscn.c -=================================================================== ---- elfutils-0.148.orig/libelf/elf_newscn.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/elf_newscn.c 2010-07-03 13:07:11.000000000 +0000 -@@ -104,10 +104,18 @@ - else - { - /* We must allocate a new element. */ -- Elf_ScnList *newp; -+ Elf_ScnList *newp = NULL; - - assert (elf->state.elf.scnincr > 0); - -+ if ( -+#if SIZE_MAX <= 4294967295U -+ likely (elf->state.elf.scnincr -+ < SIZE_MAX / 2 / sizeof (Elf_Scn) - sizeof (Elf_ScnList)) -+#else -+ 1 -+#endif -+ ) - newp = (Elf_ScnList *) calloc (sizeof (Elf_ScnList) - + ((elf->state.elf.scnincr *= 2) - * sizeof (Elf_Scn)), 1); -Index: elfutils-0.148/libelf/gelf_getdyn.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_getdyn.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_getdyn.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Get information from dynamic table at the given index. -- Copyright (C) 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 2000-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2000. - -@@ -93,7 +93,7 @@ - table entries has to be adopted. The user better has provided - a buffer where we can store the information. While copying the - data we are converting the format. */ -- if (unlikely ((ndx + 1) * sizeof (Elf32_Dyn) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf32_Dyn, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -@@ -114,7 +114,7 @@ - - /* The data is already in the correct form. Just make sure the - index is OK. */ -- if (unlikely ((ndx + 1) * sizeof (GElf_Dyn) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, GElf_Dyn, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -Index: elfutils-0.148/libelf/gelf_getlib.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_getlib.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_getlib.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Get library from table at the given index. -- Copyright (C) 2004 Red Hat, Inc. -+ Copyright (C) 2004-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2004. - -@@ -86,7 +86,7 @@ - /* The data is already in the correct form. Just make sure the - index is OK. */ - GElf_Lib *result = NULL; -- if (unlikely ((ndx + 1) * sizeof (GElf_Lib) > data->d_size)) -+ if (INVALID_NDX (ndx, GElf_Lib, data)) - __libelf_seterrno (ELF_E_INVALID_INDEX); - else - { -Index: elfutils-0.148/libelf/gelf_getmove.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_getmove.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_getmove.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Get move structure at the given index. -- Copyright (C) 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 2000-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2000. - -@@ -83,7 +83,7 @@ - - /* The data is already in the correct form. Just make sure the - index is OK. */ -- if (unlikely ((ndx + 1) * sizeof (GElf_Move) > data->d_size)) -+ if (INVALID_NDX (ndx, GElf_Move, data)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -Index: elfutils-0.148/libelf/gelf_getrela.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_getrela.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_getrela.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Get RELA relocation information at given index. -- Copyright (C) 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 2000-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2000. - -@@ -71,12 +71,6 @@ - if (data_scn == NULL) - return NULL; - -- if (unlikely (ndx < 0)) -- { -- __libelf_seterrno (ELF_E_INVALID_INDEX); -- return NULL; -- } -- - if (unlikely (data_scn->d.d_type != ELF_T_RELA)) - { - __libelf_seterrno (ELF_E_INVALID_HANDLE); -@@ -93,7 +87,7 @@ - if (scn->elf->class == ELFCLASS32) - { - /* We have to convert the data. */ -- if (unlikely ((ndx + 1) * sizeof (Elf32_Rela) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf32_Rela, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - result = NULL; -@@ -114,7 +108,7 @@ - { - /* Simply copy the data after we made sure we are actually getting - correct data. */ -- if (unlikely ((ndx + 1) * sizeof (Elf64_Rela) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf64_Rela, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - result = NULL; -Index: elfutils-0.148/libelf/gelf_getrel.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_getrel.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_getrel.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Get REL relocation information at given index. -- Copyright (C) 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 2000-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2000. - -@@ -71,12 +71,6 @@ - if (data_scn == NULL) - return NULL; - -- if (unlikely (ndx < 0)) -- { -- __libelf_seterrno (ELF_E_INVALID_INDEX); -- return NULL; -- } -- - if (unlikely (data_scn->d.d_type != ELF_T_REL)) - { - __libelf_seterrno (ELF_E_INVALID_HANDLE); -@@ -93,7 +87,7 @@ - if (scn->elf->class == ELFCLASS32) - { - /* We have to convert the data. */ -- if (unlikely ((ndx + 1) * sizeof (Elf32_Rel) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf32_Rel, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - result = NULL; -@@ -113,7 +107,7 @@ - { - /* Simply copy the data after we made sure we are actually getting - correct data. */ -- if (unlikely ((ndx + 1) * sizeof (Elf64_Rel) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf64_Rel, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - result = NULL; -Index: elfutils-0.148/libelf/gelf_getsym.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_getsym.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_getsym.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Get symbol information from symbol table at the given index. -- Copyright (C) 1999, 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 1999-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 1999. - -@@ -90,7 +90,7 @@ - table entries has to be adopted. The user better has provided - a buffer where we can store the information. While copying the - data we are converting the format. */ -- if (unlikely ((ndx + 1) * sizeof (Elf32_Sym) > data->d_size)) -+ if (INVALID_NDX (ndx, Elf32_Sym, data)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -@@ -119,7 +119,7 @@ - - /* The data is already in the correct form. Just make sure the - index is OK. */ -- if (unlikely ((ndx + 1) * sizeof (GElf_Sym) > data->d_size)) -+ if (INVALID_NDX (ndx, GElf_Sym, data)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -Index: elfutils-0.148/libelf/gelf_getsyminfo.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_getsyminfo.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_getsyminfo.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Get additional symbol information from symbol table at the given index. -- Copyright (C) 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 2000-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2000. - -@@ -84,7 +84,7 @@ - - /* The data is already in the correct form. Just make sure the - index is OK. */ -- if (unlikely ((ndx + 1) * sizeof (GElf_Syminfo) > data->d_size)) -+ if (INVALID_NDX (ndx, GElf_Syminfo, data)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -Index: elfutils-0.148/libelf/gelf_getsymshndx.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_getsymshndx.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_getsymshndx.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,6 +1,6 @@ - /* Get symbol information and separate section index from symbol table - at the given index. -- Copyright (C) 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 2000-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2000. - -@@ -90,7 +90,7 @@ - section index table. */ - if (likely (shndxdata_scn != NULL)) - { -- if (unlikely ((ndx + 1) * sizeof (Elf32_Word) > shndxdata_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf32_Word, &shndxdata_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -@@ -110,7 +110,7 @@ - table entries has to be adopted. The user better has provided - a buffer where we can store the information. While copying the - data we are converting the format. */ -- if (unlikely ((ndx + 1) * sizeof (Elf32_Sym) > symdata->d_size)) -+ if (INVALID_NDX (ndx, Elf32_Sym, symdata)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -@@ -139,7 +139,7 @@ - - /* The data is already in the correct form. Just make sure the - index is OK. */ -- if (unlikely ((ndx + 1) * sizeof (GElf_Sym) > symdata->d_size)) -+ if (INVALID_NDX (ndx, GElf_Sym, symdata)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -Index: elfutils-0.148/libelf/gelf_getversym.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_getversym.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_getversym.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Get symbol version information at the given index. -- Copyright (C) 1999, 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 1999-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 1999. - -@@ -92,7 +92,7 @@ - - /* The data is already in the correct form. Just make sure the - index is OK. */ -- if (unlikely ((ndx + 1) * sizeof (GElf_Versym) > data->d_size)) -+ if (INVALID_NDX (ndx, GElf_Versym, data)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - result = NULL; -Index: elfutils-0.148/libelf/gelf_update_dyn.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_update_dyn.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_update_dyn.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Update information in dynamic table at the given index. -- Copyright (C) 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 2000-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2000. - -@@ -71,12 +71,6 @@ - if (data == NULL) - return 0; - -- if (unlikely (ndx < 0)) -- { -- __libelf_seterrno (ELF_E_INVALID_INDEX); -- return 0; -- } -- - if (unlikely (data_scn->d.d_type != ELF_T_DYN)) - { - /* The type of the data better should match. */ -@@ -102,7 +96,7 @@ - } - - /* Check whether we have to resize the data buffer. */ -- if (unlikely ((ndx + 1) * sizeof (Elf32_Dyn) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf32_Dyn, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -@@ -116,7 +110,7 @@ - else - { - /* Check whether we have to resize the data buffer. */ -- if (unlikely ((ndx + 1) * sizeof (Elf64_Dyn) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf64_Dyn, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -Index: elfutils-0.148/libelf/gelf_update_lib.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_update_lib.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_update_lib.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Update library in table at the given index. -- Copyright (C) 2004 Red Hat, Inc. -+ Copyright (C) 2004-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2004. - -@@ -68,12 +68,6 @@ - if (data == NULL) - return 0; - -- if (unlikely (ndx < 0)) -- { -- __libelf_seterrno (ELF_E_INVALID_INDEX); -- return 0; -- } -- - Elf_Data_Scn *data_scn = (Elf_Data_Scn *) data; - if (unlikely (data_scn->d.d_type != ELF_T_LIB)) - { -@@ -87,7 +81,7 @@ - - /* Check whether we have to resize the data buffer. */ - int result = 0; -- if (unlikely ((ndx + 1) * sizeof (Elf64_Lib) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf64_Lib, &data_scn->d)) - __libelf_seterrno (ELF_E_INVALID_INDEX); - else - { -Index: elfutils-0.148/libelf/gelf_update_move.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_update_move.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_update_move.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Update move structure at the given index. -- Copyright (C) 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 2000-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2000. - -@@ -75,8 +75,7 @@ - assert (sizeof (GElf_Move) == sizeof (Elf64_Move)); - - /* Check whether we have to resize the data buffer. */ -- if (unlikely (ndx < 0) -- || unlikely ((ndx + 1) * sizeof (GElf_Move) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, GElf_Move, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - return 0; -Index: elfutils-0.148/libelf/gelf_update_rela.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_update_rela.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_update_rela.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Update RELA relocation information at given index. -- Copyright (C) 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 2000-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2000. - -@@ -68,12 +68,6 @@ - if (dst == NULL) - return 0; - -- if (unlikely (ndx < 0)) -- { -- __libelf_seterrno (ELF_E_INVALID_INDEX); -- return 0; -- } -- - if (unlikely (data_scn->d.d_type != ELF_T_RELA)) - { - /* The type of the data better should match. */ -@@ -101,7 +95,7 @@ - } - - /* Check whether we have to resize the data buffer. */ -- if (unlikely ((ndx + 1) * sizeof (Elf32_Rela) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf32_Rela, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -@@ -117,7 +111,7 @@ - else - { - /* Check whether we have to resize the data buffer. */ -- if (unlikely ((ndx + 1) * sizeof (Elf64_Rela) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf64_Rela, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -Index: elfutils-0.148/libelf/gelf_update_rel.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_update_rel.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_update_rel.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Update REL relocation information at given index. -- Copyright (C) 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 2000-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2000. - -@@ -68,12 +68,6 @@ - if (dst == NULL) - return 0; - -- if (unlikely (ndx < 0)) -- { -- __libelf_seterrno (ELF_E_INVALID_INDEX); -- return 0; -- } -- - if (unlikely (data_scn->d.d_type != ELF_T_REL)) - { - /* The type of the data better should match. */ -@@ -99,7 +93,7 @@ - } - - /* Check whether we have to resize the data buffer. */ -- if (unlikely ((ndx + 1) * sizeof (Elf32_Rel) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf32_Rel, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -@@ -114,7 +108,7 @@ - else - { - /* Check whether we have to resize the data buffer. */ -- if (unlikely ((ndx + 1) * sizeof (Elf64_Rel) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf64_Rel, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -Index: elfutils-0.148/libelf/gelf_update_sym.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_update_sym.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_update_sym.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Update symbol information in symbol table at the given index. -- Copyright (C) 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 2000-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2000. - -@@ -72,12 +72,6 @@ - if (data == NULL) - return 0; - -- if (unlikely (ndx < 0)) -- { -- __libelf_seterrno (ELF_E_INVALID_INDEX); -- return 0; -- } -- - if (unlikely (data_scn->d.d_type != ELF_T_SYM)) - { - /* The type of the data better should match. */ -@@ -102,7 +96,7 @@ - } - - /* Check whether we have to resize the data buffer. */ -- if (unlikely ((ndx + 1) * sizeof (Elf32_Sym) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf32_Sym, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -@@ -125,7 +119,7 @@ - else - { - /* Check whether we have to resize the data buffer. */ -- if (unlikely ((ndx + 1) * sizeof (Elf64_Sym) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf64_Sym, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -Index: elfutils-0.148/libelf/gelf_update_syminfo.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_update_syminfo.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_update_syminfo.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Update additional symbol information in symbol table at the given index. -- Copyright (C) 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 2000-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2000. - -@@ -72,12 +72,6 @@ - if (data == NULL) - return 0; - -- if (unlikely (ndx < 0)) -- { -- __libelf_seterrno (ELF_E_INVALID_INDEX); -- return 0; -- } -- - if (unlikely (data_scn->d.d_type != ELF_T_SYMINFO)) - { - /* The type of the data better should match. */ -@@ -93,7 +87,7 @@ - rwlock_wrlock (scn->elf->lock); - - /* Check whether we have to resize the data buffer. */ -- if (unlikely ((ndx + 1) * sizeof (GElf_Syminfo) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, GElf_Syminfo, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -Index: elfutils-0.148/libelf/gelf_update_symshndx.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_update_symshndx.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_update_symshndx.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,6 +1,6 @@ - /* Update symbol information and section index in symbol table at the - given index. -- Copyright (C) 2000, 2001, 2002 Red Hat, Inc. -+ Copyright (C) 2000-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2000. - -@@ -77,12 +77,6 @@ - if (symdata == NULL) - return 0; - -- if (unlikely (ndx < 0)) -- { -- __libelf_seterrno (ELF_E_INVALID_INDEX); -- return 0; -- } -- - if (unlikely (symdata_scn->d.d_type != ELF_T_SYM)) - { - /* The type of the data better should match. */ -@@ -128,7 +122,7 @@ - } - - /* Check whether we have to resize the data buffer. */ -- if (unlikely ((ndx + 1) * sizeof (Elf32_Sym) > symdata_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf32_Sym, &symdata_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -@@ -151,7 +145,7 @@ - else - { - /* Check whether we have to resize the data buffer. */ -- if (unlikely ((ndx + 1) * sizeof (Elf64_Sym) > symdata_scn->d.d_size)) -+ if (INVALID_NDX (ndx, Elf64_Sym, &symdata_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - goto out; -Index: elfutils-0.148/libelf/gelf_update_versym.c -=================================================================== ---- elfutils-0.148.orig/libelf/gelf_update_versym.c 2009-01-08 20:56:37.000000000 +0000 -+++ elfutils-0.148/libelf/gelf_update_versym.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1,5 +1,5 @@ - /* Update symbol version information. -- Copyright (C) 2001, 2002 Red Hat, Inc. -+ Copyright (C) 2001-2009 Red Hat, Inc. - This file is part of Red Hat elfutils. - Written by Ulrich Drepper , 2001. - -@@ -75,8 +75,7 @@ - assert (sizeof (GElf_Versym) == sizeof (Elf64_Versym)); - - /* Check whether we have to resize the data buffer. */ -- if (unlikely (ndx < 0) -- || unlikely ((ndx + 1) * sizeof (GElf_Versym) > data_scn->d.d_size)) -+ if (INVALID_NDX (ndx, GElf_Versym, &data_scn->d)) - { - __libelf_seterrno (ELF_E_INVALID_INDEX); - return 0; -Index: elfutils-0.148/libelf/libelfP.h -=================================================================== ---- elfutils-0.148.orig/libelf/libelfP.h 2010-01-12 16:57:54.000000000 +0000 -+++ elfutils-0.148/libelf/libelfP.h 2010-07-03 13:07:11.000000000 +0000 -@@ -608,4 +608,8 @@ - /* Align offset to 4 bytes as needed for note name and descriptor data. */ - #define NOTE_ALIGN(n) (((n) + 3) & -4U) - -+/* Convenience macro. */ -+#define INVALID_NDX(ndx, type, data) \ -+ unlikely ((data)->d_size / sizeof (type) <= (unsigned int) (ndx)) -+ - #endif /* libelfP.h */ -Index: elfutils-0.148/src/ChangeLog -=================================================================== ---- elfutils-0.148.orig/src/ChangeLog 2010-07-03 13:07:10.000000000 +0000 -+++ elfutils-0.148/src/ChangeLog 2010-07-03 13:07:11.000000000 +0000 -@@ -1640,6 +1640,16 @@ - object symbols or symbols with unknown type. - (check_rel): Likewise. - -+2005-06-09 Roland McGrath -+ -+ * readelf.c (handle_dynamic, handle_symtab): Check for bogus sh_link. -+ (handle_verneed, handle_verdef, handle_versym, handle_hash): Likewise. -+ (handle_scngrp): Check for bogus sh_info. -+ -+ * strip.c (handle_elf): Check for bogus values in sh_link, sh_info, -+ st_shndx, e_shstrndx, and SHT_GROUP or SHT_SYMTAB_SHNDX data. -+ Don't use assert on input values, instead bail with "illformed" error. -+ - 2005-06-08 Roland McGrath - - * readelf.c (print_ops): Add consts. -@@ -1690,6 +1700,19 @@ - - * readelf.c (dwarf_tag_string): Add new tags. - -+2005-05-17 Jakub Jelinek -+ -+ * elflint.c (check_hash): Don't check entries beyond end of section. -+ (check_note): Don't crash if gelf_rawchunk fails. -+ (section_name): Return if gelf_getshdr returns NULL. -+ -+2005-05-14 Jakub Jelinek -+ -+ * elflint.c (section_name): Return "" instead of -+ crashing on invalid section name. -+ (check_symtab, is_rel_dyn, check_rela, check_rel, check_dynamic, -+ check_symtab_shndx, check_hash, check_versym): Robustify. -+ - 2005-05-08 Roland McGrath - - * strip.c (handle_elf): Don't translate hash and versym data formats, -Index: elfutils-0.148/src/elflint.c -=================================================================== ---- elfutils-0.148.orig/src/elflint.c 2010-04-13 20:08:02.000000000 +0000 -+++ elfutils-0.148/src/elflint.c 2010-07-03 13:07:11.000000000 +0000 -@@ -131,6 +131,10 @@ - /* Array to count references in section groups. */ - static int *scnref; - -+/* Numbers of sections and program headers. */ -+static unsigned int shnum; -+static unsigned int phnum; -+ - - int - main (int argc, char *argv[]) -@@ -319,10 +323,19 @@ - { - GElf_Shdr shdr_mem; - GElf_Shdr *shdr; -+ const char *ret; -+ -+ if ((unsigned int) idx > shnum) -+ return ""; - - shdr = gelf_getshdr (elf_getscn (ebl->elf, idx), &shdr_mem); -+ if (shdr == NULL) -+ return ""; - -- return elf_strptr (ebl->elf, shstrndx, shdr->sh_name); -+ ret = elf_strptr (ebl->elf, shstrndx, shdr->sh_name); -+ if (ret == NULL) -+ return ""; -+ return ret; - } - - -@@ -344,11 +357,6 @@ - (sizeof (valid_e_machine) / sizeof (valid_e_machine[0])) - - --/* Numbers of sections and program headers. */ --static unsigned int shnum; --static unsigned int phnum; -- -- - static void - check_elf_header (Ebl *ebl, GElf_Ehdr *ehdr, size_t size) - { -@@ -632,7 +640,8 @@ - } - } - -- if (shdr->sh_entsize != gelf_fsize (ebl->elf, ELF_T_SYM, 1, EV_CURRENT)) -+ size_t sh_entsize = gelf_fsize (ebl->elf, ELF_T_SYM, 1, EV_CURRENT); -+ if (shdr->sh_entsize != sh_entsize) - ERROR (gettext ("\ - section [%2u] '%s': entry size is does not match ElfXX_Sym\n"), - idx, section_name (ebl, idx)); -@@ -670,7 +679,7 @@ - xndxscnidx, section_name (ebl, xndxscnidx)); - } - -- for (size_t cnt = 1; cnt < shdr->sh_size / shdr->sh_entsize; ++cnt) -+ for (size_t cnt = 1; cnt < shdr->sh_size / sh_entsize; ++cnt) - { - sym = gelf_getsymshndx (data, xndxdata, cnt, &sym_mem, &xndx); - if (sym == NULL) -@@ -690,7 +699,8 @@ - else - { - name = elf_strptr (ebl->elf, shdr->sh_link, sym->st_name); -- assert (name != NULL); -+ assert (name != NULL -+ || strshdr->sh_type != SHT_STRTAB); - } - - if (sym->st_shndx == SHN_XINDEX) -@@ -1038,9 +1048,11 @@ - { - GElf_Shdr rcshdr_mem; - const GElf_Shdr *rcshdr = gelf_getshdr (scn, &rcshdr_mem); -- assert (rcshdr != NULL); - -- if (rcshdr->sh_type == SHT_DYNAMIC) -+ if (rcshdr == NULL) -+ break; -+ -+ if (rcshdr->sh_type == SHT_DYNAMIC && rcshdr->sh_entsize) - { - /* Found the dynamic section. Look through it. */ - Elf_Data *d = elf_getdata (scn, NULL); -@@ -1050,7 +1062,9 @@ - { - GElf_Dyn dyn_mem; - GElf_Dyn *dyn = gelf_getdyn (d, cnt, &dyn_mem); -- assert (dyn != NULL); -+ -+ if (dyn == NULL) -+ break; - - if (dyn->d_tag == DT_RELCOUNT) - { -@@ -1064,7 +1078,9 @@ - /* Does the number specified number of relative - relocations exceed the total number of - relocations? */ -- if (dyn->d_un.d_val > shdr->sh_size / shdr->sh_entsize) -+ if (shdr->sh_entsize != 0 -+ && dyn->d_un.d_val > (shdr->sh_size -+ / shdr->sh_entsize)) - ERROR (gettext ("\ - section [%2d] '%s': DT_RELCOUNT value %d too high for this section\n"), - idx, section_name (ebl, idx), -@@ -1224,7 +1240,8 @@ - } - } - -- if (shdr->sh_entsize != gelf_fsize (ebl->elf, reltype, 1, EV_CURRENT)) -+ size_t sh_entsize = gelf_fsize (ebl->elf, reltype, 1, EV_CURRENT); -+ if (shdr->sh_entsize != sh_entsize) - ERROR (gettext (reltype == ELF_T_RELA ? "\ - section [%2d] '%s': section entry size does not match ElfXX_Rela\n" : "\ - section [%2d] '%s': section entry size does not match ElfXX_Rel\n"), -@@ -1447,7 +1464,8 @@ - Elf_Data *symdata = elf_getdata (symscn, NULL); - enum load_state state = state_undecided; - -- for (size_t cnt = 0; cnt < shdr->sh_size / shdr->sh_entsize; ++cnt) -+ size_t sh_entsize = gelf_fsize (ebl->elf, ELF_T_RELA, 1, EV_CURRENT); -+ for (size_t cnt = 0; cnt < shdr->sh_size / sh_entsize; ++cnt) - { - GElf_Rela rela_mem; - GElf_Rela *rela = gelf_getrela (data, cnt, &rela_mem); -@@ -1497,7 +1515,8 @@ - Elf_Data *symdata = elf_getdata (symscn, NULL); - enum load_state state = state_undecided; - -- for (size_t cnt = 0; cnt < shdr->sh_size / shdr->sh_entsize; ++cnt) -+ size_t sh_entsize = gelf_fsize (ebl->elf, ELF_T_REL, 1, EV_CURRENT); -+ for (size_t cnt = 0; cnt < shdr->sh_size / sh_entsize; ++cnt) - { - GElf_Rel rel_mem; - GElf_Rel *rel = gelf_getrel (data, cnt, &rel_mem); -@@ -1600,7 +1619,8 @@ - shdr->sh_link, section_name (ebl, shdr->sh_link), - idx, section_name (ebl, idx)); - -- if (shdr->sh_entsize != gelf_fsize (ebl->elf, ELF_T_DYN, 1, EV_CURRENT)) -+ size_t sh_entsize = gelf_fsize (ebl->elf, ELF_T_DYN, 1, EV_CURRENT); -+ if (shdr->sh_entsize != sh_entsize) - ERROR (gettext ("\ - section [%2d] '%s': section entry size does not match ElfXX_Dyn\n"), - idx, section_name (ebl, idx)); -@@ -1610,7 +1630,7 @@ - idx, section_name (ebl, idx)); - - bool non_null_warned = false; -- for (cnt = 0; cnt < shdr->sh_size / shdr->sh_entsize; ++cnt) -+ for (cnt = 0; cnt < shdr->sh_size / sh_entsize; ++cnt) - { - GElf_Dyn dyn_mem; - GElf_Dyn *dyn = gelf_getdyn (data, cnt, &dyn_mem); -@@ -1891,6 +1911,8 @@ - idx, section_name (ebl, idx)); - - if (symshdr != NULL -+ && shdr->sh_entsize -+ && symshdr->sh_entsize - && (shdr->sh_size / shdr->sh_entsize - < symshdr->sh_size / symshdr->sh_entsize)) - ERROR (gettext ("\ -@@ -1917,6 +1939,12 @@ - } - - Elf_Data *data = elf_getdata (elf_getscn (ebl->elf, idx), NULL); -+ if (data == NULL) -+ { -+ ERROR (gettext ("section [%2d] '%s': cannot get section data\n"), -+ idx, section_name (ebl, idx)); -+ return; -+ } - - if (*((Elf32_Word *) data->d_buf) != 0) - ERROR (gettext ("symbol 0 should have zero extended section index\n")); -@@ -1959,7 +1987,7 @@ - - size_t maxidx = nchain; - -- if (symshdr != NULL) -+ if (symshdr != NULL && symshdr->sh_entsize != 0) - { - size_t symsize = symshdr->sh_size / symshdr->sh_entsize; - -@@ -1970,18 +1998,28 @@ - maxidx = symsize; - } - -+ Elf32_Word *buf = (Elf32_Word *) data->d_buf; -+ Elf32_Word *end = (Elf32_Word *) ((char *) data->d_buf + shdr->sh_size); - size_t cnt; - for (cnt = 2; cnt < 2 + nbucket; ++cnt) -- if (((Elf32_Word *) data->d_buf)[cnt] >= maxidx) -+ { -+ if (buf + cnt >= end) -+ break; -+ else if (buf[cnt] >= maxidx) - ERROR (gettext ("\ - section [%2d] '%s': hash bucket reference %zu out of bounds\n"), - idx, section_name (ebl, idx), cnt - 2); -+ } - - for (; cnt < 2 + nbucket + nchain; ++cnt) -- if (((Elf32_Word *) data->d_buf)[cnt] >= maxidx) -+ { -+ if (buf + cnt >= end) -+ break; -+ else if (buf[cnt] >= maxidx) - ERROR (gettext ("\ - section [%2d] '%s': hash chain reference %zu out of bounds\n"), - idx, section_name (ebl, idx), cnt - 2 - nbucket); -+ } - } - - -@@ -2011,18 +2049,28 @@ - maxidx = symsize; - } - -+ Elf64_Xword *buf = (Elf64_Xword *) data->d_buf; -+ Elf64_Xword *end = (Elf64_Xword *) ((char *) data->d_buf + shdr->sh_size); - size_t cnt; - for (cnt = 2; cnt < 2 + nbucket; ++cnt) -- if (((Elf64_Xword *) data->d_buf)[cnt] >= maxidx) -+ { -+ if (buf + cnt >= end) -+ break; -+ else if (buf[cnt] >= maxidx) - ERROR (gettext ("\ - section [%2d] '%s': hash bucket reference %zu out of bounds\n"), - idx, section_name (ebl, idx), cnt - 2); -+ } - - for (; cnt < 2 + nbucket + nchain; ++cnt) -- if (((Elf64_Xword *) data->d_buf)[cnt] >= maxidx) -+ { -+ if (buf + cnt >= end) -+ break; -+ else if (buf[cnt] >= maxidx) - ERROR (gettext ("\ - section [%2d] '%s': hash chain reference %" PRIu64 " out of bounds\n"), -- idx, section_name (ebl, idx), (uint64_t) (cnt - 2 - nbucket)); -+ idx, section_name (ebl, idx), (uint64_t) cnt - 2 - nbucket); -+ } - } - - -@@ -2047,7 +2095,7 @@ - if (shdr->sh_size < (4 + bitmask_words + nbuckets) * sizeof (Elf32_Word)) - { - ERROR (gettext ("\ --section [%2d] '%s': hash table section is too small (is %ld, expected at least%ld)\n"), -+section [%2d] '%s': hash table section is too small (is %ld, expected at least %ld)\n"), - idx, section_name (ebl, idx), (long int) shdr->sh_size, - (long int) ((4 + bitmask_words + nbuckets) * sizeof (Elf32_Word))); - return; -@@ -2719,8 +2767,9 @@ - - /* The number of elements in the version symbol table must be the - same as the number of symbols. */ -- if (shdr->sh_size / shdr->sh_entsize -- != symshdr->sh_size / symshdr->sh_entsize) -+ if (shdr->sh_entsize && symshdr->sh_entsize -+ && (shdr->sh_size / shdr->sh_entsize -+ != symshdr->sh_size / symshdr->sh_entsize)) - ERROR (gettext ("\ - section [%2d] '%s' has different number of entries than symbol table [%2d] '%s'\n"), - idx, section_name (ebl, idx), -Index: elfutils-0.148/src/readelf.c -=================================================================== ---- elfutils-0.148.orig/src/readelf.c 2010-07-03 13:07:10.000000000 +0000 -+++ elfutils-0.148/src/readelf.c 2010-07-03 13:07:11.000000000 +0000 -@@ -1172,6 +1172,8 @@ - Elf32_Word *grpref = (Elf32_Word *) data->d_buf; - - GElf_Sym sym_mem; -+ GElf_Sym *sym = gelf_getsym (symdata, shdr->sh_info, &sym_mem); -+ - printf ((grpref[0] & GRP_COMDAT) - ? ngettext ("\ - \nCOMDAT section group [%2zu] '%s' with signature '%s' contains %zu entry:\n", -@@ -1184,8 +1186,8 @@ - data->d_size / sizeof (Elf32_Word) - 1), - elf_ndxscn (scn), - elf_strptr (ebl->elf, shstrndx, shdr->sh_name), -- elf_strptr (ebl->elf, symshdr->sh_link, -- gelf_getsym (symdata, shdr->sh_info, &sym_mem)->st_name) -+ (sym == NULL ? NULL -+ : elf_strptr (ebl->elf, symshdr->sh_link, sym->st_name)) - ?: gettext (""), - data->d_size / sizeof (Elf32_Word) - 1); - -@@ -1336,7 +1338,8 @@ - handle_dynamic (Ebl *ebl, Elf_Scn *scn, GElf_Shdr *shdr) - { - int class = gelf_getclass (ebl->elf); -- GElf_Shdr glink; -+ GElf_Shdr glink_mem; -+ GElf_Shdr *glink; - Elf_Data *data; - size_t cnt; - size_t shstrndx; -@@ -1351,6 +1354,11 @@ - error (EXIT_FAILURE, 0, - gettext ("cannot get section header string table index")); - -+ glink = gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link), &glink_mem); -+ if (glink == NULL) -+ error (EXIT_FAILURE, 0, gettext ("invalid sh_link value in section %Zu"), -+ elf_ndxscn (scn)); -+ - printf (ngettext ("\ - \nDynamic segment contains %lu entry:\n Addr: %#0*" PRIx64 " Offset: %#08" PRIx64 " Link to section: [%2u] '%s'\n", - "\ -@@ -1360,9 +1368,7 @@ - class == ELFCLASS32 ? 10 : 18, shdr->sh_addr, - shdr->sh_offset, - (int) shdr->sh_link, -- elf_strptr (ebl->elf, shstrndx, -- gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link), -- &glink)->sh_name)); -+ elf_strptr (ebl->elf, shstrndx, glink->sh_name)); - fputs_unlocked (gettext (" Type Value\n"), stdout); - - for (cnt = 0; cnt < shdr->sh_size / shdr->sh_entsize; ++cnt) -@@ -1945,6 +1951,13 @@ - error (EXIT_FAILURE, 0, - gettext ("cannot get section header string table index")); - -+ GElf_Shdr glink_mem; -+ GElf_Shdr *glink = gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link), -+ &glink_mem); -+ if (glink == NULL) -+ error (EXIT_FAILURE, 0, gettext ("invalid sh_link value in section %Zu"), -+ elf_ndxscn (scn)); -+ - /* Now we can compute the number of entries in the section. */ - unsigned int nsyms = data->d_size / (class == ELFCLASS32 - ? sizeof (Elf32_Sym) -@@ -1955,15 +1968,12 @@ - nsyms), - (unsigned int) elf_ndxscn (scn), - elf_strptr (ebl->elf, shstrndx, shdr->sh_name), nsyms); -- GElf_Shdr glink; - printf (ngettext (" %lu local symbol String table: [%2u] '%s'\n", - " %lu local symbols String table: [%2u] '%s'\n", - shdr->sh_info), - (unsigned long int) shdr->sh_info, - (unsigned int) shdr->sh_link, -- elf_strptr (ebl->elf, shstrndx, -- gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link), -- &glink)->sh_name)); -+ elf_strptr (ebl->elf, shstrndx, glink->sh_name)); - - fputs_unlocked (class == ELFCLASS32 - ? gettext ("\ -@@ -2199,7 +2209,13 @@ - error (EXIT_FAILURE, 0, - gettext ("cannot get section header string table index")); - -- GElf_Shdr glink; -+ GElf_Shdr glink_mem; -+ GElf_Shdr *glink = gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link), -+ &glink_mem); -+ if (glink == NULL) -+ error (EXIT_FAILURE, 0, gettext ("invalid sh_link value in section %Zu"), -+ elf_ndxscn (scn)); -+ - printf (ngettext ("\ - \nVersion needs section [%2u] '%s' contains %d entry:\n Addr: %#0*" PRIx64 " Offset: %#08" PRIx64 " Link to section: [%2u] '%s'\n", - "\ -@@ -2210,9 +2226,7 @@ - class == ELFCLASS32 ? 10 : 18, shdr->sh_addr, - shdr->sh_offset, - (unsigned int) shdr->sh_link, -- elf_strptr (ebl->elf, shstrndx, -- gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link), -- &glink)->sh_name)); -+ elf_strptr (ebl->elf, shstrndx, glink->sh_name)); - - unsigned int offset = 0; - for (int cnt = shdr->sh_info; --cnt >= 0; ) -@@ -2265,8 +2279,14 @@ - error (EXIT_FAILURE, 0, - gettext ("cannot get section header string table index")); - -+ GElf_Shdr glink_mem; -+ GElf_Shdr *glink = gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link), -+ &glink_mem); -+ if (glink == NULL) -+ error (EXIT_FAILURE, 0, gettext ("invalid sh_link value in section %Zu"), -+ elf_ndxscn (scn)); -+ - int class = gelf_getclass (ebl->elf); -- GElf_Shdr glink; - printf (ngettext ("\ - \nVersion definition section [%2u] '%s' contains %d entry:\n Addr: %#0*" PRIx64 " Offset: %#08" PRIx64 " Link to section: [%2u] '%s'\n", - "\ -@@ -2278,9 +2298,7 @@ - class == ELFCLASS32 ? 10 : 18, shdr->sh_addr, - shdr->sh_offset, - (unsigned int) shdr->sh_link, -- elf_strptr (ebl->elf, shstrndx, -- gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link), -- &glink)->sh_name)); -+ elf_strptr (ebl->elf, shstrndx, glink->sh_name)); - - unsigned int offset = 0; - for (int cnt = shdr->sh_info; --cnt >= 0; ) -@@ -2542,8 +2560,14 @@ - filename = NULL; - } - -+ GElf_Shdr glink_mem; -+ GElf_Shdr *glink = gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link), -+ &glink_mem); -+ if (glink == NULL) -+ error (EXIT_FAILURE, 0, gettext ("invalid sh_link value in section %Zu"), -+ elf_ndxscn (scn)); -+ - /* Print the header. */ -- GElf_Shdr glink; - printf (ngettext ("\ - \nVersion symbols section [%2u] '%s' contains %d entry:\n Addr: %#0*" PRIx64 " Offset: %#08" PRIx64 " Link to section: [%2u] '%s'", - "\ -@@ -2555,9 +2579,7 @@ - class == ELFCLASS32 ? 10 : 18, shdr->sh_addr, - shdr->sh_offset, - (unsigned int) shdr->sh_link, -- elf_strptr (ebl->elf, shstrndx, -- gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link), -- &glink)->sh_name)); -+ elf_strptr (ebl->elf, shstrndx, glink->sh_name)); - - /* Now we can finally look at the actual contents of this section. */ - for (unsigned int cnt = 0; cnt < shdr->sh_size / shdr->sh_entsize; ++cnt) -@@ -2609,7 +2631,17 @@ - for (Elf32_Word cnt = 0; cnt < nbucket; ++cnt) - ++counts[lengths[cnt]]; - -- GElf_Shdr glink; -+ GElf_Shdr glink_mem; -+ GElf_Shdr *glink = gelf_getshdr (elf_getscn (ebl->elf, -+ shdr->sh_link), -+ &glink_mem); -+ if (glink == NULL) -+ { -+ error (0, 0, gettext ("invalid sh_link value in section %Zu"), -+ elf_ndxscn (scn)); -+ return; -+ } -+ - printf (ngettext ("\ - \nHistogram for bucket list length in section [%2u] '%s' (total of %d bucket):\n Addr: %#0*" PRIx64 " Offset: %#08" PRIx64 " Link to section: [%2u] '%s'\n", - "\ -@@ -2622,9 +2654,7 @@ - shdr->sh_addr, - shdr->sh_offset, - (unsigned int) shdr->sh_link, -- elf_strptr (ebl->elf, shstrndx, -- gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link), -- &glink)->sh_name)); -+ elf_strptr (ebl->elf, shstrndx, glink->sh_name)); - - if (extrastr != NULL) - fputs (extrastr, stdout); -@@ -4312,6 +4342,16 @@ - return; - } - -+ GElf_Shdr glink_mem; -+ GElf_Shdr *glink; -+ glink = gelf_getshdr (elf_getscn (ebl->elf, shdr->sh_link), &glink_mem); -+ if (glink == NULL) -+ { -+ error (0, 0, gettext ("invalid sh_link value in section %Zu"), -+ elf_ndxscn (scn)); -+ return; -+ } -+ - printf (ngettext ("\ - \nDWARF section [%2zu] '%s' at offset %#" PRIx64 " contains %zu entry:\n", - "\ -Index: elfutils-0.148/src/strip.c -=================================================================== ---- elfutils-0.148.orig/src/strip.c 2010-07-03 13:07:10.000000000 +0000 -+++ elfutils-0.148/src/strip.c 2010-07-03 13:07:11.000000000 +0000 -@@ -561,6 +561,11 @@ - goto fail_close; - } - -+ if (shstrndx >= shnum) -+ goto illformed; -+ -+#define elf_assert(test) do { if (!(test)) goto illformed; } while (0) -+ - /* Storage for section information. We leave room for two more - entries since we unconditionally create a section header string - table. Maybe some weird tool created an ELF file without one. -@@ -582,7 +587,7 @@ - { - /* This should always be true (i.e., there should not be any - holes in the numbering). */ -- assert (elf_ndxscn (scn) == cnt); -+ elf_assert (elf_ndxscn (scn) == cnt); - - shdr_info[cnt].scn = scn; - -@@ -595,6 +600,7 @@ - shdr_info[cnt].shdr.sh_name); - if (shdr_info[cnt].name == NULL) - { -+ illformed: - error (0, 0, gettext ("illformed file '%s'"), fname); - goto fail_close; - } -@@ -604,6 +610,8 @@ - - /* Remember the shdr.sh_link value. */ - shdr_info[cnt].old_sh_link = shdr_info[cnt].shdr.sh_link; -+ if (shdr_info[cnt].old_sh_link >= shnum) -+ goto illformed; - - /* Sections in files other than relocatable object files which - are not loaded can be freely moved by us. In relocatable -@@ -616,7 +624,7 @@ - appropriate reference. */ - if (unlikely (shdr_info[cnt].shdr.sh_type == SHT_SYMTAB_SHNDX)) - { -- assert (shdr_info[shdr_info[cnt].shdr.sh_link].symtab_idx == 0); -+ elf_assert (shdr_info[shdr_info[cnt].shdr.sh_link].symtab_idx == 0); - shdr_info[shdr_info[cnt].shdr.sh_link].symtab_idx = cnt; - } - else if (unlikely (shdr_info[cnt].shdr.sh_type == SHT_GROUP)) -@@ -633,7 +641,12 @@ - for (inner = 1; - inner < shdr_info[cnt].data->d_size / sizeof (Elf32_Word); - ++inner) -+ { -+ if (grpref[inner] < shnum) - shdr_info[grpref[inner]].group_idx = cnt; -+ else -+ goto illformed; -+ } - - if (inner == 1 || (inner == 2 && (grpref[0] & GRP_COMDAT) == 0)) - /* If the section group contains only one element and this -@@ -644,7 +657,7 @@ - } - else if (unlikely (shdr_info[cnt].shdr.sh_type == SHT_GNU_versym)) - { -- assert (shdr_info[shdr_info[cnt].shdr.sh_link].version_idx == 0); -+ elf_assert (shdr_info[shdr_info[cnt].shdr.sh_link].version_idx == 0); - shdr_info[shdr_info[cnt].shdr.sh_link].version_idx = cnt; - } - -@@ -652,7 +665,7 @@ - discarded right away. */ - if ((shdr_info[cnt].shdr.sh_flags & SHF_GROUP) != 0) - { -- assert (shdr_info[cnt].group_idx != 0); -+ elf_assert (shdr_info[cnt].group_idx != 0); - - if (shdr_info[shdr_info[cnt].group_idx].idx == 0) - { -@@ -727,11 +740,15 @@ - { - /* If a relocation section is marked as being removed make - sure the section it is relocating is removed, too. */ -- if ((shdr_info[cnt].shdr.sh_type == SHT_REL -+ if (shdr_info[cnt].shdr.sh_type == SHT_REL - || shdr_info[cnt].shdr.sh_type == SHT_RELA) -- && shdr_info[shdr_info[cnt].shdr.sh_info].idx != 0) -+ { -+ if (shdr_info[cnt].shdr.sh_info >= shnum) -+ goto illformed; -+ else if (shdr_info[shdr_info[cnt].shdr.sh_info].idx != 0) - shdr_info[cnt].idx = 1; - } -+ } - - if (shdr_info[cnt].idx == 1) - { -@@ -758,7 +775,7 @@ - if (shdr_info[cnt].symtab_idx != 0 - && shdr_info[shdr_info[cnt].symtab_idx].data == NULL) - { -- assert (shdr_info[cnt].shdr.sh_type == SHT_SYMTAB); -+ elf_assert (shdr_info[cnt].shdr.sh_type == SHT_SYMTAB); - - shdr_info[shdr_info[cnt].symtab_idx].data - = elf_getdata (shdr_info[shdr_info[cnt].symtab_idx].scn, -@@ -798,6 +815,9 @@ - else if (scnidx == SHN_XINDEX) - scnidx = xndx; - -+ if (scnidx >= shnum) -+ goto illformed; -+ - if (shdr_info[scnidx].idx == 0) - /* This symbol table has a real symbol in - a discarded section. So preserve the -@@ -828,12 +848,16 @@ - } - - /* Handle references through sh_info. */ -- if (SH_INFO_LINK_P (&shdr_info[cnt].shdr) -- && shdr_info[shdr_info[cnt].shdr.sh_info].idx == 0) -+ if (SH_INFO_LINK_P (&shdr_info[cnt].shdr)) -+ { -+ if (shdr_info[cnt].shdr.sh_info >= shnum) -+ goto illformed; -+ else if ( shdr_info[shdr_info[cnt].shdr.sh_info].idx == 0) - { - shdr_info[shdr_info[cnt].shdr.sh_info].idx = 1; - changes |= shdr_info[cnt].shdr.sh_info < cnt; - } -+ } - - /* Mark the section as investigated. */ - shdr_info[cnt].idx = 2; -@@ -972,7 +996,7 @@ - error (EXIT_FAILURE, 0, gettext ("while generating output file: %s"), - elf_errmsg (-1)); - -- assert (elf_ndxscn (shdr_info[cnt].newscn) == shdr_info[cnt].idx); -+ elf_assert (elf_ndxscn (shdr_info[cnt].newscn) == shdr_info[cnt].idx); - - /* Add this name to the section header string table. */ - shdr_info[cnt].se = ebl_strtabadd (shst, shdr_info[cnt].name, 0); -@@ -1009,7 +1033,7 @@ - error (EXIT_FAILURE, 0, - gettext ("while create section header section: %s"), - elf_errmsg (-1)); -- assert (elf_ndxscn (shdr_info[cnt].newscn) == shdr_info[cnt].idx); -+ elf_assert (elf_ndxscn (shdr_info[cnt].newscn) == shdr_info[cnt].idx); - - shdr_info[cnt].data = elf_newdata (shdr_info[cnt].newscn); - if (shdr_info[cnt].data == NULL) -@@ -1065,7 +1089,7 @@ - error (EXIT_FAILURE, 0, - gettext ("while create section header section: %s"), - elf_errmsg (-1)); -- assert (elf_ndxscn (shdr_info[cnt].newscn) == idx); -+ elf_assert (elf_ndxscn (shdr_info[cnt].newscn) == idx); - - /* Finalize the string table and fill in the correct indices in the - section headers. */ -@@ -1155,20 +1179,20 @@ - shndxdata = elf_getdata (shdr_info[shdr_info[cnt].symtab_idx].scn, - NULL); - -- assert ((versiondata->d_size / sizeof (Elf32_Word)) -+ elf_assert ((versiondata->d_size / sizeof (Elf32_Word)) - >= shdr_info[cnt].data->d_size / elsize); - } - - if (shdr_info[cnt].version_idx != 0) - { -- assert (shdr_info[cnt].shdr.sh_type == SHT_DYNSYM); -+ elf_assert (shdr_info[cnt].shdr.sh_type == SHT_DYNSYM); - /* This section has associated version - information. We have to modify that - information, too. */ - versiondata = elf_getdata (shdr_info[shdr_info[cnt].version_idx].scn, - NULL); - -- assert ((versiondata->d_size / sizeof (GElf_Versym)) -+ elf_assert ((versiondata->d_size / sizeof (GElf_Versym)) - >= shdr_info[cnt].data->d_size / elsize); - } - -@@ -1223,7 +1247,7 @@ - sec = shdr_info[sym->st_shndx].idx; - else - { -- assert (shndxdata != NULL); -+ elf_assert (shndxdata != NULL); - - sec = shdr_info[xshndx].idx; - } -@@ -1244,7 +1268,7 @@ - nxshndx = sec; - } - -- assert (sec < SHN_LORESERVE || shndxdata != NULL); -+ elf_assert (sec < SHN_LORESERVE || shndxdata != NULL); - - if ((inner != destidx || nshndx != sym->st_shndx - || (shndxdata != NULL && nxshndx != xshndx)) -@@ -1268,7 +1292,7 @@ - || shdr_info[cnt].debug_data == NULL) - /* This is a section symbol for a section which has - been removed. */ -- assert (GELF_ST_TYPE (sym->st_info) == STT_SECTION); -+ elf_assert (GELF_ST_TYPE (sym->st_info) == STT_SECTION); - } - - if (destidx != inner) -@@ -1455,11 +1479,11 @@ - { - GElf_Sym sym_mem; - GElf_Sym *sym = gelf_getsym (symd, inner, &sym_mem); -- assert (sym != NULL); -+ elf_assert (sym != NULL); - - const char *name = elf_strptr (elf, strshndx, - sym->st_name); -- assert (name != NULL); -+ elf_assert (name != NULL); - size_t hidx = elf_hash (name) % nbucket; - - if (bucket[hidx] == 0) -@@ -1478,7 +1502,7 @@ - else - { - /* Alpha and S390 64-bit use 64-bit SHT_HASH entries. */ -- assert (shdr_info[cnt].shdr.sh_entsize -+ elf_assert (shdr_info[cnt].shdr.sh_entsize - == sizeof (Elf64_Xword)); - - Elf64_Xword *bucket = (Elf64_Xword *) hashd->d_buf; -@@ -1509,11 +1533,11 @@ - { - GElf_Sym sym_mem; - GElf_Sym *sym = gelf_getsym (symd, inner, &sym_mem); -- assert (sym != NULL); -+ elf_assert (sym != NULL); - - const char *name = elf_strptr (elf, strshndx, - sym->st_name); -- assert (name != NULL); -+ elf_assert (name != NULL); - size_t hidx = elf_hash (name) % nbucket; - - if (bucket[hidx] == 0) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/remove-unused.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/remove-unused.patch deleted file mode 100644 index 6a1979148..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/remove-unused.patch +++ /dev/null @@ -1,154 +0,0 @@ -Upstream-Status: Backport - -Remove unused variables from the code to prevent -Werror causing a build -failure on hosts with GCC 4.6. - -These changes are all upstream so should not be required once we've updated -to elfutils 0.152 or later. Therefore this patch consolidates several -changes from elfutils upstream by Roland McGrath into a single file so that -it's easier to remove later once we upgrade. -Links to upstream gitweb of the consolidated commits follow: -- http://git.fedorahosted.org/git?p=elfutils.git;a=commit;h=7094d00a169afb27e0323f8580e817798ae7c240 -- http://git.fedorahosted.org/git?p=elfutils.git;a=commit;h=fd992543185126eb0280c1ee0883e073020499b4 -- http://git.fedorahosted.org/git?p=elfutils.git;a=commit;h=4db89f04bb59327abd7a3b60e88f2e7e73c65c79 -- http://git.fedorahosted.org/git?p=elfutils.git;a=commit;h=8f6c1795ab9d41f03805eebd55767070ade55aac -- http://git.fedorahosted.org/git?p=elfutils.git;a=commit;h=240784b48aa276822c5a61c9ad6a4355051ce259 - -Joshua Lock - 06/04/11 - -Index: elfutils-0.148/libasm/asm_newscn.c -=================================================================== ---- elfutils-0.148.orig/libasm/asm_newscn.c -+++ elfutils-0.148/libasm/asm_newscn.c -@@ -162,7 +162,6 @@ asm_newscn (ctx, scnname, type, flags) - GElf_Xword flags; - { - size_t scnname_len = strlen (scnname) + 1; -- unsigned long int hval; - AsmScn_t *result; - - /* If no context is given there might be an earlier error. */ -@@ -180,8 +179,6 @@ asm_newscn (ctx, scnname, type, flags) - return NULL; - } - -- hval = elf_hash (scnname); -- - rwlock_wrlock (ctx->lock); - - /* This is a new section. */ -Index: elfutils-0.148/src/elflint.c -=================================================================== ---- elfutils-0.148.orig/src/elflint.c -+++ elfutils-0.148/src/elflint.c -@@ -707,9 +707,10 @@ section [%2d] '%s': symbol %zu: invalid - { - if (xndxdata == NULL) - { -- ERROR (gettext ("\ -+ if (!no_xndx_warned) -+ ERROR (gettext ("\ - section [%2d] '%s': symbol %zu: too large section index but no extended section index section\n"), -- idx, section_name (ebl, idx), cnt); -+ idx, section_name (ebl, idx), cnt); - no_xndx_warned = true; - } - else if (xndx < SHN_LORESERVE) -@@ -1592,10 +1593,6 @@ check_dynamic (Ebl *ebl, GElf_Ehdr *ehdr - [DT_STRSZ] = true, - [DT_SYMENT] = true - }; -- GElf_Addr reladdr = 0; -- GElf_Word relsz = 0; -- GElf_Addr pltreladdr = 0; -- GElf_Word pltrelsz = 0; - - memset (has_dt, '\0', sizeof (has_dt)); - memset (has_val_dt, '\0', sizeof (has_val_dt)); -@@ -1694,15 +1691,6 @@ section [%2d] '%s': entry %zu: level 2 t - section [%2d] '%s': entry %zu: DT_PLTREL value must be DT_REL or DT_RELA\n"), - idx, section_name (ebl, idx), cnt); - -- if (dyn->d_tag == DT_REL) -- reladdr = dyn->d_un.d_ptr; -- if (dyn->d_tag == DT_RELSZ) -- relsz = dyn->d_un.d_val; -- if (dyn->d_tag == DT_JMPREL) -- pltreladdr = dyn->d_un.d_ptr; -- if (dyn->d_tag == DT_PLTRELSZ) -- pltrelsz = dyn->d_un.d_val; -- - /* Check that addresses for entries are in loaded segments. */ - switch (dyn->d_tag) - { -Index: elfutils-0.148/src/ldgeneric.c -=================================================================== ---- elfutils-0.148.orig/src/ldgeneric.c -+++ elfutils-0.148/src/ldgeneric.c -@@ -285,12 +285,10 @@ static int - check_for_duplicate2 (struct usedfiles *newp, struct usedfiles *list) - { - struct usedfiles *first; -- struct usedfiles *prevp; - - if (list == NULL) - return 0; - -- prevp = list; - list = first = list->next; - do - { -Index: elfutils-0.148/src/ldscript.y -=================================================================== ---- elfutils-0.148.orig/src/ldscript.y -+++ elfutils-0.148/src/ldscript.y -@@ -802,12 +802,9 @@ add_versions (struct version *versions) - - do - { -- struct version *oldp; -- - add_id_list (versions->versionname, versions->local_names, true); - add_id_list (versions->versionname, versions->global_names, false); - -- oldp = versions; - versions = versions->next; - } - while (versions != NULL); -Index: elfutils-0.148/src/unstrip.c -=================================================================== ---- elfutils-0.148.orig/src/unstrip.c -+++ elfutils-0.148/src/unstrip.c -@@ -1301,7 +1301,6 @@ more sections in stripped file than debu - /* Match each debuginfo section with its corresponding stripped section. */ - bool check_prelink = false; - Elf_Scn *unstripped_symtab = NULL; -- size_t unstripped_strtab_ndx = SHN_UNDEF; - size_t alloc_avail = 0; - scn = NULL; - while ((scn = elf_nextscn (unstripped, scn)) != NULL) -@@ -1313,7 +1312,6 @@ more sections in stripped file than debu - if (shdr->sh_type == SHT_SYMTAB) - { - unstripped_symtab = scn; -- unstripped_strtab_ndx = shdr->sh_link; - continue; - } - -Index: elfutils-0.148/src/ldscript.c -=================================================================== ---- elfutils-0.148.orig/src/ldscript.c -+++ elfutils-0.148/src/ldscript.c -@@ -2728,12 +2728,9 @@ add_versions (struct version *versions) - - do - { -- struct version *oldp; -- - add_id_list (versions->versionname, versions->local_names, true); - add_id_list (versions->versionname, versions->global_names, false); - -- oldp = versions; - versions = versions->next; - } - while (versions != NULL); diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/testsuite-ignore-elflint.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/testsuite-ignore-elflint.diff deleted file mode 100644 index d792d5fd7..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/testsuite-ignore-elflint.diff +++ /dev/null @@ -1,21 +0,0 @@ -Upstream-Status: Backport - -On many architectures this test fails because binaries/libs produced by -binutils don't pass elflint. However elfutils shouldn't FTBFS because of this. - -So we run the tests on all archs to see what breaks, but if it breaks we ignore -the result (exitcode 77 means: this test was skipped). - -Index: elfutils-0.128/tests/run-elflint-self.sh -=================================================================== ---- elfutils-0.128.orig/tests/run-elflint-self.sh 2007-07-08 21:46:16.000000000 +0000 -+++ elfutils-0.128/tests/run-elflint-self.sh 2007-07-08 21:46:49.000000000 +0000 -@@ -32,7 +32,7 @@ - # echo $1 - if [ -f $1 ]; then - testrun ../src/elflint --quiet --gnu-ld $1 || -- { echo "*** failure in $1"; status=1; } -+ { echo "*** failure in $1"; status=77; } - fi - } - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/uclibc-support-for-elfutils-0.148.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/uclibc-support-for-elfutils-0.148.patch deleted file mode 100644 index 3cf16ac92..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/uclibc-support-for-elfutils-0.148.patch +++ /dev/null @@ -1,91 +0,0 @@ -on uclibc systems libintl and libuargp are separate from libc. -so they need to be specified on commandline when we use proxy-libintl -then libintl is a static archive so it should be listed last since -elfutils does not respect disable-nls we need to link in libintl - -We add a new option --enable-uclibc which will be used to control -the uclibc specific configurations during build. - -Signed-off-by: Khem Raj - -Upstream-Status: Inappropriate [uclibc specific] - -Index: elfutils-0.148/configure.ac -=================================================================== ---- elfutils-0.148.orig/configure.ac -+++ elfutils-0.148/configure.ac -@@ -55,9 +55,16 @@ AS_IF([test "$use_locks" = yes], [AC_DEF - - AH_TEMPLATE([USE_LOCKS], [Defined if libraries should be thread-safe.]) - -+AC_ARG_ENABLE([uclibc], -+AS_HELP_STRING([--enable-uclibc], [Use uclibc for system libraries]), -+use_uclibc=yes, use_uclibc=no) -+AM_CONDITIONAL(USE_UCLIBC, test "$use_uclibc" = yes) -+AS_IF([test "$use_uclibc" = yes], [AC_DEFINE(USE_UCLIBC)]) -+ -+AH_TEMPLATE([USE_UCLIBC], [Defined if uclibc libraries are used.]) -+ - dnl Add all the languages for which translations are available. - ALL_LINGUAS= -- - AC_PROG_CC - AC_PROG_RANLIB - AC_PROG_YACC -Index: elfutils-0.148/libelf/Makefile.am -=================================================================== ---- elfutils-0.148.orig/libelf/Makefile.am -+++ elfutils-0.148/libelf/Makefile.am -@@ -93,7 +93,12 @@ if !MUDFLAP - libelf_pic_a_SOURCES = - am_libelf_pic_a_OBJECTS = $(libelf_a_SOURCES:.c=.os) - -+ - libelf_so_LDLIBS = -+if USE_UCLIBC -+libelf_so_LDLIBS += -lintl -luargp -+endif -+ - if USE_LOCKS - libelf_so_LDLIBS += -lpthread - endif -Index: elfutils-0.148/libdw/Makefile.am -=================================================================== ---- elfutils-0.148.orig/libdw/Makefile.am -+++ elfutils-0.148/libdw/Makefile.am -@@ -98,6 +98,11 @@ if !MUDFLAP - libdw_pic_a_SOURCES = - am_libdw_pic_a_OBJECTS = $(libdw_a_SOURCES:.c=.os) - -+libdw_so_LDLIBS = -+if USE_UCLIBC -+libdw_so_LDLIBS += -lintl -luargp -+endif -+ - libdw_so_SOURCES = - libdw.so: $(srcdir)/libdw.map libdw_pic.a \ - ../libdwfl/libdwfl_pic.a ../libebl/libebl.a \ -@@ -108,7 +113,7 @@ libdw.so: $(srcdir)/libdw.map libdw_pic. - -Wl,--enable-new-dtags,-rpath,$(pkglibdir) \ - -Wl,--version-script,$<,--no-undefined \ - -Wl,--whole-archive $(filter-out $<,$^) -Wl,--no-whole-archive\ -- -ldl $(zip_LIBS) -+ -ldl $(zip_LIBS) $(libdw_so_LDLIBS) - if readelf -d $@ | fgrep -q TEXTREL; then exit 1; fi - ln -fs $@ $@.$(VERSION) - -Index: elfutils-0.148/libcpu/Makefile.am -=================================================================== ---- elfutils-0.148.orig/libcpu/Makefile.am -+++ elfutils-0.148/libcpu/Makefile.am -@@ -63,6 +63,10 @@ i386_parse_CFLAGS = -DNMNES="`wc -l < i3 - i386_lex.o: i386_parse.h - i386_gendis_LDADD = $(libeu) -lm $(libmudflap) - -+if USE_UCLIBC -+i386_gendis_LDADD += -luargp -lintl -+endif -+ - i386_parse.h: i386_parse.c ; - - noinst_HEADERS = memory-access.h i386_parse.h i386_data.h diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/x86_64_dis.h b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/x86_64_dis.h deleted file mode 100644 index a0198bed9..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.148/x86_64_dis.h +++ /dev/null @@ -1,1632 +0,0 @@ -#define MNEMONIC_BITS 10 -#define SUFFIX_BITS 3 -#define FCT1_BITS 7 -#define STR1_BITS 4 -#define OFF1_1_BITS 7 -#define OFF1_1_BIAS 3 -#define OFF1_2_BITS 7 -#define OFF1_2_BIAS 4 -#define OFF1_3_BITS 1 -#define OFF1_3_BIAS 7 -#define FCT2_BITS 6 -#define STR2_BITS 2 -#define OFF2_1_BITS 7 -#define OFF2_1_BIAS 5 -#define OFF2_2_BITS 7 -#define OFF2_2_BIAS 4 -#define OFF2_3_BITS 4 -#define OFF2_3_BIAS 7 -#define FCT3_BITS 4 -#define STR3_BITS 1 -#define OFF3_1_BITS 6 -#define OFF3_1_BIAS 10 -#define OFF3_2_BITS 1 -#define OFF3_2_BIAS 21 - -#include - -#define suffix_none 0 -#define suffix_w 1 -#define suffix_w0 2 -#define suffix_W 3 -#define suffix_tttn 4 -#define suffix_D 7 -#define suffix_w1 5 -#define suffix_W1 6 - -static const opfct_t op1_fct[] = -{ - NULL, - FCT_MOD$R_M, - FCT_Mod$R_m, - FCT_abs, - FCT_ax, - FCT_ax$w, - FCT_ccc, - FCT_ddd, - FCT_disp8, - FCT_ds_bx, - FCT_ds_si, - FCT_dx, - FCT_es_di, - FCT_freg, - FCT_imm$s, - FCT_imm$w, - FCT_imm16, - FCT_imm64$w, - FCT_imm8, - FCT_imms8, - FCT_mmxreg, - FCT_mod$16r_m, - FCT_mod$64r_m, - FCT_mod$8r_m, - FCT_mod$r_m, - FCT_mod$r_m$w, - FCT_reg, - FCT_reg$w, - FCT_reg64, - FCT_rel, - FCT_sel, - FCT_sreg2, - FCT_sreg3, - FCT_string, - FCT_xmmreg, -}; -static const char op1_str[] = - "%ax\0" - "%cl\0" - "%rax\0" - "%st\0" - "%xmm0\0" - "*"; -static const uint8_t op1_str_idx[] = { - 0, - 4, - 8, - 13, - 17, - 23, -}; -static const opfct_t op2_fct[] = -{ - NULL, - FCT_MOD$R_M, - FCT_Mod$R_m, - FCT_abs, - FCT_absval, - FCT_ax$w, - FCT_ccc, - FCT_ddd, - FCT_ds_si, - FCT_dx, - FCT_es_di, - FCT_freg, - FCT_imm8, - FCT_mmxreg, - FCT_mod$64r_m, - FCT_mod$r_m, - FCT_mod$r_m$w, - FCT_oreg, - FCT_oreg$w, - FCT_reg, - FCT_reg$w, - FCT_reg64, - FCT_sreg3, - FCT_string, - FCT_xmmreg, -}; -static const char op2_str[] = - "%rcx\0" - "%st"; -static const uint8_t op2_str_idx[] = { - 0, - 5, -}; -static const opfct_t op3_fct[] = -{ - NULL, - FCT_mmxreg, - FCT_mod$r_m, - FCT_reg, - FCT_string, - FCT_xmmreg, -}; -static const char op3_str[] = - "%rdx"; -static const uint8_t op3_str_idx[] = { - 0, -}; -static const struct instr_enc instrtab[] = -{ - { .mnemonic = MNE_adc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_adc, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_adc, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 19, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_adc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 27, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_adc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 20, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_add, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_add, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_add, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 19, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_add, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 27, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_add, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 20, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_addsubpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_addsubps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_and, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_and, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_and, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 19, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_and, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 27, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_and, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 20, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andnpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andnps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movslq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 21, .str2 = 0, .off2_1 = 5, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bsf, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bsr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bswap, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 26, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bt, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_btc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_btc, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_btr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_btr, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bts, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_bts, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_call, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 29, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_call, .rep = 0, .repe = 0, .suffix = 3, .modrm = 1, .fct1 = 22, .str1 = 6, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lcall, .rep = 0, .repe = 0, .suffix = 3, .modrm = 1, .fct1 = 22, .str1 = 6, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_clc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cli, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_syscall, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_clts, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sysret, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sysenter, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sysexit, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmov, .rep = 0, .repe = 0, .suffix = 4, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmp, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmp, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 19, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 27, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 20, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmps, .rep = 0, .repe = 1, .suffix = 1, .modrm = 0, .fct1 = 12, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 8, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpxchg, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 27, .str1 = 0, .off1_1 = 15, .off1_2 = 11, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 8, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cpuid, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtdq2pd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtpd2dq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvttpd2dq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_dec, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_div, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_emms, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_enter, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 16, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 12, .str2 = 0, .off2_1 = 19, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fnop, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fchs, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fabs, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ftst, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fxam, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fld1, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldl2t, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldl2e, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldpi, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldlg2, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldln2, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldz, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_f2xm1, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fyl2x, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fptan, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fpatan, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fxtract, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fprem1, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdecstp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fincstp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fprem, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fyl2xp1, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsqrt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsincos, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_frndint, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fscale, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsin, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcos, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fadd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fadd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fadd, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fmul, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fmul, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fmul, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsub, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsubr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsubr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsubr, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fst, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fst, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fstp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fstp, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldenv, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldcw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fnstenv, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fnstcw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fxch, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_faddp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fiadd, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmove, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fmulp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fimul, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsubp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fisub, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fsubrp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fisubr, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fnstsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 1, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fbld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcomip, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fbstp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fchs, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fclex, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_finit, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fwait, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fnclex, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmove, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovbe, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovu, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovnb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovne, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovnbe, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovnu, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcom, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcom, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcomp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcomp, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcompp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcomi, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcomip, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fucomi, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fucomip, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcos, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdecstp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdiv, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdiv, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdiv, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fidivl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdivp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fidiv, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdivrp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdivr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 2, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdivr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdivr, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fidivrl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fidivr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fdivrp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 4, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 11, .str2 = 0, .off2_1 = 8, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ffree, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovbe, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ficom, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fcmovu, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ficomp, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fild, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fildl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fildll, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fincstp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fninit, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fist, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fistp, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fistpll, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fisttp, .rep = 0, .repe = 0, .suffix = 5, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fisttpll, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fldt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fstpt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fld, .rep = 0, .repe = 0, .suffix = 7, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fucom, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_frstor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fucomp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 13, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fnsave, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fnstsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_hlt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_idiv, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_imul, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_imul, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_imul, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 14, .str1 = 0, .off1_1 = 13, .off1_2 = 2, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 3, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_in, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_in, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 11, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 3, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_inc, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ins, .rep = 1, .repe = 0, .suffix = 1, .modrm = 0, .fct1 = 11, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 10, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_int, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_int3, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_invd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_swapgs, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_invlpg, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_iret, .rep = 0, .repe = 0, .suffix = 6, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_j, .rep = 0, .repe = 0, .suffix = 4, .modrm = 0, .fct1 = 8, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_j, .rep = 0, .repe = 0, .suffix = 4, .modrm = 0, .fct1 = 29, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_set, .rep = 0, .repe = 0, .suffix = 4, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 8, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_jmp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 8, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_jmp, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 29, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_jmp, .rep = 0, .repe = 0, .suffix = 3, .modrm = 1, .fct1 = 22, .str1 = 6, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ljmp, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 30, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 4, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ljmp, .rep = 0, .repe = 0, .suffix = 3, .modrm = 1, .fct1 = 22, .str1 = 6, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lahf, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lar, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 21, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lea, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 5, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_leave, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lfs, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lgs, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lgdt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lidt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lldt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 21, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lmsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 21, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lock, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lods, .rep = 1, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 10, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 3, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_loop, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 8, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_loope, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 8, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_loopne, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 8, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lsl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 21, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ltr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 21, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 27, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 20, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 17, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 18, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 3, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 35, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 5, .str1 = 0, .off1_1 = 37, .off1_2 = 3, .off1_3 = 0, .fct2 = 3, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 6, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 21, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 28, .str1 = 0, .off1_1 = 18, .off1_2 = 0, .off1_3 = 0, .fct2 = 6, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 7, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 21, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 28, .str1 = 0, .off1_1 = 18, .off1_2 = 0, .off1_3 = 0, .fct2 = 7, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 32, .str1 = 0, .off1_1 = 7, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mov, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 22, .str2 = 0, .off2_1 = 5, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movs, .rep = 1, .repe = 0, .suffix = 1, .modrm = 0, .fct1 = 10, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 10, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movsbl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movswl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 21, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movzbl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movzwl, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 21, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mul, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_neg, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pause, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_popcnt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_not, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_or, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 27, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_or, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 20, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_or, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_or, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 19, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_or, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_out, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 5, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 12, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_out, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 5, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 9, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_outs, .rep = 1, .repe = 0, .suffix = 1, .modrm = 0, .fct1 = 10, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 9, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pop, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 28, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pop, .rep = 0, .repe = 0, .suffix = 3, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pop, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 32, .str1 = 0, .off1_1 = 7, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_popf, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_push, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 28, .str1 = 0, .off1_1 = 10, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pushq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_push, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 28, .str1 = 0, .off1_1 = 2, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pop, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 28, .str1 = 0, .off1_1 = 2, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_push, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 14, .str1 = 0, .off1_1 = 5, .off1_2 = 2, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_push, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 31, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_push, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 32, .str1 = 0, .off1_1 = 7, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pushf, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcl, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcl, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcl, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcr, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcr, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcr, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rdmsr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rdpmc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rdtsc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ret, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ret, .rep = 0, .repe = 0, .suffix = 3, .modrm = 0, .fct1 = 16, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lret, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lret, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 16, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rol, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rol, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rol, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ror, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ror, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ror, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rsm, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sahf, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sar, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sar, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sar, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sbb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 27, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sbb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 20, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sbb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sbb, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sbb, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 19, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_scas, .rep = 0, .repe = 1, .suffix = 0, .modrm = 0, .fct1 = 12, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 3, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_set, .rep = 0, .repe = 0, .suffix = 4, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_shl, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_shl, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_shl, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_shr, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_shld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 2, .str3 = 0, .off3_1 = 6, .off3_2 = 0, }, - { .mnemonic = MNE_shld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 2, .str3 = 0, .off3_1 = 6, .off3_2 = 0, }, - { .mnemonic = MNE_shr, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_shr, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_shrd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 2, .str3 = 0, .off3_1 = 6, .off3_2 = 0, }, - { .mnemonic = MNE_shrd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 33, .str1 = 2, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 2, .str3 = 0, .off3_1 = 6, .off3_2 = 0, }, - { .mnemonic = MNE_vmcall, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmlaunch, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmresume, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmxoff, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmread, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 28, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 14, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmwrite, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 22, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 21, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sgdt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_monitor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 3, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 1, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 4, .str3 = 1, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mwait, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 33, .str1 = 3, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 23, .str2 = 1, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sidt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sldt, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_smsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_stc, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_std, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sti, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_stos, .rep = 1, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 5, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 10, .str2 = 0, .off2_1 = 3, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_str, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 27, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 20, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sub, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sub, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 19, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_test, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 27, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_test, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_test, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ud2a, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_verr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 21, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_verw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 21, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_wbinvd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_prefetch, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_prefetchw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 23, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_prefetchnta, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_prefetcht0, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_prefetcht1, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_prefetcht2, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_nop, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_wrmsr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xadd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 27, .str1 = 0, .off1_1 = 15, .off1_2 = 11, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 8, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xchg, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 27, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xchg, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 4, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 17, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xlat, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 9, .str1 = 0, .off1_1 = 5, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 27, .str1 = 0, .off1_1 = 7, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 25, .str1 = 0, .off1_1 = 5, .off1_2 = 9, .off1_3 = 0, .fct2 = 20, .str2 = 0, .off2_1 = 5, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 15, .str1 = 0, .off1_1 = 5, .off1_2 = 3, .off1_3 = 0, .fct2 = 5, .str2 = 0, .off2_1 = 11, .off2_2 = 3, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xor, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 15, .str1 = 0, .off1_1 = 13, .off1_2 = 3, .off1_3 = 0, .fct2 = 16, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xor, .rep = 0, .repe = 0, .suffix = 1, .modrm = 1, .fct1 = 19, .str1 = 0, .off1_1 = 13, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 3, .off2_2 = 9, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_emms, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pand, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pand, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pandn, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pandn, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaddwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaddwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_por, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_por, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pxor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pxor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andnps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpeqps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpltps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpleps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpunordps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpneqps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpnltps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpnleps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpordps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpeqss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpltss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpless, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpunordss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpneqss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpnltss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpnless, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cmpordss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fxrstor, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_fxsave, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ldmxcsr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_stmxcsr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movupd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movups, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movupd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movups, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movddup, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movsldup, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movlpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movhlps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 18, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movlps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movhlpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movhlps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movlpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movlps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_unpcklpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_unpcklps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_unpckhpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_unpckhps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movshdup, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movhpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movlhps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 18, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movhps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movlhpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movlhps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movhpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movhps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movapd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movaps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movapd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movaps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtsi2sd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtsi2ss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtpi2pd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtpi2ps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movntpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movntps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvttsd2si, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvttss2si, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvttpd2pi, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvttps2pi, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtpd2pi, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtsd2si, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtss2si, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtps2pi, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ucomisd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ucomiss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_comisd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_comiss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_getsec, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movmskpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 26, .off1_2 = 0, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movmskps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 18, .off1_2 = 0, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sqrtpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sqrtsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sqrtss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sqrtps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rsqrtss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rsqrtps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcpss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_rcpps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andnpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_andnps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_orpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_orps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xorpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_xorps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_addsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_addss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_addpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_addps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mulsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mulss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mulpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mulps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtsd2ss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtss2sd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtpd2ps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtps2pd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtps2dq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvttps2dq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_cvtdq2ps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_subsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_subss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_subpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_subps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_minsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_minss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_minpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_minps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_divsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_divss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_divpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_divps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_maxsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_maxss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_maxpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_maxps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpcklbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpcklbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpcklwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpcklwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckldq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckldq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_packsswb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_packsswb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpgtb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpgtb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpgtw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpgtw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpgtd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpgtd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_packuswb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_packuswb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckhbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckhbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckhwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckhwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckhdq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckhdq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_packssdw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_packssdw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpcklqdq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_punpckhqdq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movdqa, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movdqu, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pshufd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 16, .off3_2 = 0, }, - { .mnemonic = MNE_pshuflw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 16, .off3_2 = 0, }, - { .mnemonic = MNE_pshufhw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 16, .off3_2 = 0, }, - { .mnemonic = MNE_pshufw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 1, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 1, .str3 = 0, .off3_1 = 8, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpeqb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpeqb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpeqw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpeqw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpeqd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpeqd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_haddpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_haddps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_hsubpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_hsubps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 20, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movdqa, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movdqu, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 20, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 1, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movnti, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 26, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pinsrw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 16, .off3_2 = 0, }, - { .mnemonic = MNE_pinsrw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 1, .str3 = 0, .off3_1 = 8, .off3_2 = 0, }, - { .mnemonic = MNE_pextrw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 3, .str3 = 0, .off3_1 = 16, .off3_2 = 0, }, - { .mnemonic = MNE_pextrw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 3, .str3 = 0, .off3_1 = 8, .off3_2 = 0, }, - { .mnemonic = MNE_shufpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 16, .off3_2 = 0, }, - { .mnemonic = MNE_shufps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 8, .off3_2 = 0, }, - { .mnemonic = MNE_psrlw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrlw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrlq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrlq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmullw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmullw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movdq2q, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 26, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movq2dq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 20, .str1 = 0, .off1_1 = 26, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovmskb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 26, .off1_2 = 0, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovmskb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 20, .str1 = 0, .off1_1 = 18, .off1_2 = 0, .off1_3 = 0, .fct2 = 19, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubusb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubusb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubusw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubusw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddusb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddusb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddusw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddusw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxub, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pavgb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pavgb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psraw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psraw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrad, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrad, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pavgw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pavgw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmulhuw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmulhuw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmulhw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmulhw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movntdq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 34, .str1 = 0, .off1_1 = 23, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_movntq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 20, .str1 = 0, .off1_1 = 15, .off1_2 = 0, .off1_3 = 0, .fct2 = 1, .str2 = 0, .off2_1 = 11, .off2_2 = 17, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lddqu, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pslld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pslld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmuludq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmuludq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psadbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psadbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_maskmovdqu, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 34, .str1 = 0, .off1_1 = 26, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_maskmovq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 20, .str1 = 0, .off1_1 = 18, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psubq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_paddd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pshufb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pshufb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phaddw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phaddw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phaddd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phaddd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phaddsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phaddsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaddubsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaddubsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phsubw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phsubw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phsubd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phsubd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phsubsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phsubsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psignb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psignb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psignw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psignw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psignd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psignd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmulhrsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmulhrsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pabsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pabsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pabsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pabsw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pabsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pabsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 21, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_palignr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_palignr, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 1, .str2 = 0, .off2_1 = 19, .off2_2 = 25, .off2_3 = 0, .fct3 = 1, .str3 = 0, .off3_1 = 16, .off3_2 = 0, }, - { .mnemonic = MNE_vmclear, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmxon, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 21, .off1_2 = 25, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmptrld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_vmptrst, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrlw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrlw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psraw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psraw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrad, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrad, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pslld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pslld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrlq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrlq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psrldq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_psllq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 21, .off1_2 = 0, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 16, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pslldq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 18, .str1 = 0, .off1_1 = 29, .off1_2 = 0, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 24, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_lfence, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mfence, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_sfence, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 0, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_clflush, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 24, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_INVALID, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 1, .str1 = 0, .off1_1 = 13, .off1_2 = 17, .off1_3 = 0, .fct2 = 13, .str2 = 0, .off2_1 = 13, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_blendps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_blendpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_blendvps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 33, .str1 = 5, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_blendvpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 33, .str1 = 5, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_dpps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_dppd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_insertps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_movntdqa, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_mpsadbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_packusdw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pblendvb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 33, .str1 = 5, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pblendw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpeqq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpestri, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpestrm, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpistri, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpistrm, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pcmpgtq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_phminposuw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pinsrb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pinsrd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 15, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxud, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmaxuw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminsb, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminud, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pminuw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovsxbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovsxbd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovsxbq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovsxwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovsxwq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovsxdq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovzxbw, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovzxbd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovzxbq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovzxwd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovzxwq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmovzxdq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmuldq, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_pmulld, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_ptest, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 2, .str1 = 0, .off1_1 = 29, .off1_2 = 33, .off1_3 = 0, .fct2 = 24, .str2 = 0, .off2_1 = 29, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, - { .mnemonic = MNE_roundps, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_roundpd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_roundss, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_roundsd, .rep = 0, .repe = 0, .suffix = 0, .modrm = 1, .fct1 = 18, .str1 = 0, .off1_1 = 37, .off1_2 = 0, .off1_3 = 0, .fct2 = 2, .str2 = 0, .off2_1 = 27, .off2_2 = 33, .off2_3 = 0, .fct3 = 5, .str3 = 0, .off3_1 = 24, .off3_2 = 0, }, - { .mnemonic = MNE_pop, .rep = 0, .repe = 0, .suffix = 0, .modrm = 0, .fct1 = 31, .str1 = 0, .off1_1 = 0, .off1_2 = 0, .off1_3 = 0, .fct2 = 0, .str2 = 0, .off2_1 = 0, .off2_2 = 0, .off2_3 = 0, .fct3 = 0, .str3 = 0, .off3_1 = 0, .off3_2 = 0, }, -}; -static const uint8_t match_data[] = -{ - 0x1, 0xfe, 0x14, - 0x2, 0xfe, 0x80, 0x38, 0x10, - 0x2, 0xfe, 0x82, 0x38, 0x10, - 0x2, 0xfe, 0x10, 0, 0, - 0x2, 0xfe, 0x12, 0, 0, - 0x1, 0xfe, 0x4, - 0x2, 0xfe, 0x80, 0x38, 0, - 0x12, 0x83, 0x38, 0, - 0x2, 0xfe, 0, 0, 0, - 0x2, 0xfe, 0x2, 0, 0, - 0x34, 0x66, 0xf, 0xd0, 0, 0, - 0x34, 0xf2, 0xf, 0xd0, 0, 0, - 0x1, 0xfe, 0x24, - 0x2, 0xfe, 0x80, 0x38, 0x20, - 0x2, 0xfe, 0x82, 0x38, 0x20, - 0x2, 0xfe, 0x20, 0, 0, - 0x2, 0xfe, 0x22, 0, 0, - 0x34, 0x66, 0xf, 0x54, 0, 0, - 0x23, 0xf, 0x54, 0, 0, - 0x34, 0x66, 0xf, 0x55, 0, 0, - 0x23, 0xf, 0x55, 0, 0, - 0x12, 0x63, 0, 0, - 0x23, 0xf, 0xbc, 0, 0, - 0x23, 0xf, 0xbd, 0, 0, - 0x12, 0xf, 0xf8, 0xc8, - 0x23, 0xf, 0xa3, 0, 0, - 0x23, 0xf, 0xba, 0x38, 0x20, - 0x23, 0xf, 0xbb, 0, 0, - 0x23, 0xf, 0xba, 0x38, 0x38, - 0x23, 0xf, 0xb3, 0, 0, - 0x23, 0xf, 0xba, 0x38, 0x30, - 0x23, 0xf, 0xab, 0, 0, - 0x23, 0xf, 0xba, 0x38, 0x28, - 0x11, 0xe8, - 0x12, 0xff, 0x38, 0x10, - 0x12, 0xff, 0x38, 0x18, - 0x11, 0x98, - 0x11, 0x99, - 0x11, 0xf8, - 0x11, 0xfc, - 0x11, 0xfa, - 0x22, 0xf, 0x5, - 0x22, 0xf, 0x6, - 0x22, 0xf, 0x7, - 0x22, 0xf, 0x34, - 0x22, 0xf, 0x35, - 0x11, 0xf5, - 0x13, 0xf, 0xf0, 0x40, 0, 0, - 0x1, 0xfe, 0x3c, - 0x2, 0xfe, 0x80, 0x38, 0x38, - 0x12, 0x83, 0x38, 0x38, - 0x2, 0xfe, 0x38, 0, 0, - 0x2, 0xfe, 0x3a, 0, 0, - 0x34, 0xf2, 0xf, 0xc2, 0, 0, - 0x34, 0xf3, 0xf, 0xc2, 0, 0, - 0x34, 0x66, 0xf, 0xc2, 0, 0, - 0x23, 0xf, 0xc2, 0, 0, - 0x1, 0xfe, 0xa6, - 0x13, 0xf, 0xfe, 0xb0, 0, 0, - 0x23, 0xf, 0xc7, 0x38, 0x8, - 0x22, 0xf, 0xa2, - 0x34, 0xf3, 0xf, 0xe6, 0, 0, - 0x34, 0xf2, 0xf, 0xe6, 0, 0, - 0x34, 0x66, 0xf, 0xe6, 0, 0, - 0x2, 0xfe, 0xfe, 0x38, 0x8, - 0x2, 0xfe, 0xf6, 0x38, 0x30, - 0x22, 0xf, 0x77, - 0x11, 0xc8, - 0x22, 0xd9, 0xd0, - 0x22, 0xd9, 0xe0, - 0x22, 0xd9, 0xe1, - 0x22, 0xd9, 0xe4, - 0x22, 0xd9, 0xe5, - 0x22, 0xd9, 0xe8, - 0x22, 0xd9, 0xe9, - 0x22, 0xd9, 0xea, - 0x22, 0xd9, 0xeb, - 0x22, 0xd9, 0xec, - 0x22, 0xd9, 0xed, - 0x22, 0xd9, 0xee, - 0x22, 0xd9, 0xf0, - 0x22, 0xd9, 0xf1, - 0x22, 0xd9, 0xf2, - 0x22, 0xd9, 0xf3, - 0x22, 0xd9, 0xf4, - 0x22, 0xd9, 0xf5, - 0x22, 0xd9, 0xf6, - 0x22, 0xd9, 0xf7, - 0x22, 0xd9, 0xf8, - 0x22, 0xd9, 0xf9, - 0x22, 0xd9, 0xfa, - 0x22, 0xd9, 0xfb, - 0x22, 0xd9, 0xfc, - 0x22, 0xd9, 0xfd, - 0x22, 0xd9, 0xfe, - 0x22, 0xd9, 0xff, - 0x12, 0xd8, 0xf8, 0xc0, - 0x12, 0xdc, 0xf8, 0xc0, - 0x2, 0xfb, 0xd8, 0x38, 0, - 0x12, 0xd8, 0xf8, 0xc8, - 0x12, 0xdc, 0xf8, 0xc8, - 0x2, 0xfb, 0xd8, 0x38, 0x8, - 0x12, 0xd8, 0xf8, 0xe0, - 0x12, 0xdc, 0xf8, 0xe0, - 0x2, 0xfb, 0xd8, 0x38, 0x20, - 0x12, 0xd8, 0xf8, 0xe8, - 0x12, 0xdc, 0xf8, 0xe8, - 0x2, 0xfb, 0xd8, 0x38, 0x28, - 0x12, 0xdd, 0xf8, 0xd0, - 0x2, 0xfb, 0xd9, 0x38, 0x10, - 0x12, 0xdd, 0xf8, 0xd8, - 0x2, 0xfb, 0xd9, 0x38, 0x18, - 0x12, 0xd9, 0x38, 0x20, - 0x12, 0xd9, 0x38, 0x28, - 0x12, 0xd9, 0x38, 0x30, - 0x12, 0xd9, 0x38, 0x38, - 0x12, 0xd9, 0xf8, 0xc8, - 0x12, 0xde, 0xf8, 0xc0, - 0x12, 0xda, 0xf8, 0xc0, - 0x2, 0xfb, 0xda, 0x38, 0, - 0x12, 0xda, 0xf8, 0xc8, - 0x12, 0xde, 0xf8, 0xc8, - 0x2, 0xfb, 0xda, 0x38, 0x8, - 0x12, 0xde, 0xf8, 0xe0, - 0x2, 0xfb, 0xda, 0x38, 0x20, - 0x12, 0xde, 0xf8, 0xe8, - 0x2, 0xfb, 0xda, 0x38, 0x28, - 0x22, 0xdf, 0xe0, - 0x12, 0xdf, 0x38, 0x20, - 0x12, 0xdf, 0xf8, 0xf0, - 0x12, 0xdf, 0x38, 0x30, - 0x22, 0xd9, 0xe0, - 0x33, 0x9b, 0xdb, 0xe2, - 0x33, 0x9b, 0xdb, 0xe3, - 0x11, 0x9b, - 0x22, 0xdb, 0xe2, - 0x12, 0xda, 0xf8, 0xc0, - 0x12, 0xda, 0xf8, 0xc8, - 0x12, 0xda, 0xf8, 0xd0, - 0x12, 0xda, 0xf8, 0xd8, - 0x12, 0xdb, 0xf8, 0xc0, - 0x12, 0xdb, 0xf8, 0xc8, - 0x12, 0xdb, 0xf8, 0xd0, - 0x12, 0xdb, 0xf8, 0xd8, - 0x12, 0xd8, 0xf8, 0xd0, - 0x2, 0xfb, 0xd8, 0x38, 0x10, - 0x12, 0xd8, 0xf8, 0xd8, - 0x2, 0xfb, 0xd8, 0x38, 0x18, - 0x22, 0xde, 0xd9, - 0x12, 0xdb, 0xf8, 0xf0, - 0x12, 0xdf, 0xf8, 0xf0, - 0x12, 0xdb, 0xf8, 0xe8, - 0x12, 0xdf, 0xf8, 0xe8, - 0x22, 0xd9, 0xff, - 0x22, 0xd9, 0xf6, - 0x12, 0xd8, 0xf8, 0xf0, - 0x12, 0xdc, 0xf8, 0xf0, - 0x2, 0xfb, 0xd8, 0x38, 0x30, - 0x12, 0xda, 0x38, 0x30, - 0x12, 0xde, 0xf8, 0xf0, - 0x12, 0xde, 0x38, 0x30, - 0x12, 0xde, 0xf8, 0xf8, - 0x12, 0xd8, 0xf8, 0xf8, - 0x12, 0xdc, 0xf8, 0xf8, - 0x2, 0xfb, 0xd8, 0x38, 0x38, - 0x12, 0xda, 0x38, 0x38, - 0x12, 0xde, 0x38, 0x38, - 0x12, 0xde, 0xf8, 0xf0, - 0x12, 0xdd, 0xf8, 0xc0, - 0x12, 0xda, 0xf8, 0xd0, - 0x2, 0xfb, 0xda, 0x38, 0x10, - 0x12, 0xda, 0xf8, 0xd8, - 0x2, 0xfb, 0xda, 0x38, 0x18, - 0x12, 0xdf, 0x38, 0, - 0x12, 0xdb, 0x38, 0, - 0x12, 0xdf, 0x38, 0x28, - 0x22, 0xd9, 0xf7, - 0x22, 0xdb, 0xe3, - 0x2, 0xfb, 0xdb, 0x38, 0x10, - 0x2, 0xfb, 0xdb, 0x38, 0x18, - 0x12, 0xdf, 0x38, 0x38, - 0x2, 0xfb, 0xdb, 0x38, 0x8, - 0x12, 0xdd, 0x38, 0x8, - 0x12, 0xdb, 0x38, 0x28, - 0x12, 0xdb, 0x38, 0x38, - 0x12, 0xd9, 0xf8, 0xc0, - 0x2, 0xfb, 0xd9, 0x38, 0, - 0x12, 0xdd, 0xf8, 0xe0, - 0x12, 0xdd, 0x38, 0x20, - 0x12, 0xdd, 0xf8, 0xe8, - 0x12, 0xdd, 0x38, 0x30, - 0x12, 0xdd, 0x38, 0x38, - 0x11, 0xf4, - 0x2, 0xfe, 0xf6, 0x38, 0x38, - 0x2, 0xfe, 0xf6, 0x38, 0x28, - 0x23, 0xf, 0xaf, 0, 0, - 0x2, 0xfd, 0x69, 0, 0, - 0x1, 0xfe, 0xe4, - 0x1, 0xfe, 0xec, - 0x2, 0xfe, 0xfe, 0x38, 0, - 0x1, 0xfe, 0x6c, - 0x11, 0xcd, - 0x11, 0xcc, - 0x22, 0xf, 0x8, - 0x33, 0xf, 0x1, 0xf8, - 0x23, 0xf, 0x1, 0x38, 0x38, - 0x11, 0xcf, - 0x1, 0xf0, 0x70, - 0x12, 0xf, 0xf0, 0x80, - 0x13, 0xf, 0xf0, 0x90, 0x38, 0, - 0x11, 0xe3, - 0x11, 0xeb, - 0x11, 0xe9, - 0x12, 0xff, 0x38, 0x20, - 0x11, 0xea, - 0x12, 0xff, 0x38, 0x28, - 0x11, 0x9f, - 0x23, 0xf, 0x2, 0, 0, - 0x12, 0x8d, 0, 0, - 0x11, 0xc9, - 0x23, 0xf, 0xb4, 0, 0, - 0x23, 0xf, 0xb5, 0, 0, - 0x23, 0xf, 0x1, 0x38, 0x10, - 0x23, 0xf, 0x1, 0x38, 0x18, - 0x23, 0xf, 0, 0x38, 0x10, - 0x23, 0xf, 0x1, 0x38, 0x30, - 0x11, 0xf0, - 0x1, 0xfe, 0xac, - 0x11, 0xe2, - 0x11, 0xe1, - 0x11, 0xe0, - 0x23, 0xf, 0x3, 0, 0, - 0x23, 0xf, 0xb2, 0, 0, - 0x23, 0xf, 0, 0x38, 0x18, - 0x2, 0xfe, 0x88, 0, 0, - 0x2, 0xfe, 0x8a, 0, 0, - 0x2, 0xfe, 0xc6, 0x38, 0, - 0x1, 0xf0, 0xb0, - 0x1, 0xfe, 0xa0, - 0x1, 0xfe, 0xa2, - 0x23, 0xf, 0x20, 0xc0, 0xc0, - 0x23, 0xf, 0x22, 0xc0, 0xc0, - 0x23, 0xf, 0x21, 0xc0, 0xc0, - 0x23, 0xf, 0x23, 0xc0, 0xc0, - 0x12, 0x8c, 0, 0, - 0x12, 0x8e, 0, 0, - 0x1, 0xfe, 0xa4, - 0x23, 0xf, 0xbe, 0, 0, - 0x23, 0xf, 0xbf, 0, 0, - 0x23, 0xf, 0xb6, 0, 0, - 0x23, 0xf, 0xb7, 0, 0, - 0x2, 0xfe, 0xf6, 0x38, 0x20, - 0x2, 0xfe, 0xf6, 0x38, 0x18, - 0x22, 0xf3, 0x90, - 0x11, 0x90, - 0x34, 0xf3, 0xf, 0xb8, 0, 0, - 0x2, 0xfe, 0xf6, 0x38, 0x10, - 0x2, 0xfe, 0x8, 0, 0, - 0x2, 0xfe, 0xa, 0, 0, - 0x2, 0xfe, 0x80, 0x38, 0x8, - 0x2, 0xfe, 0x82, 0x38, 0x8, - 0x1, 0xfe, 0xc, - 0x1, 0xfe, 0xe6, - 0x1, 0xfe, 0xee, - 0x1, 0xfe, 0x6e, - 0x12, 0x8f, 0xf8, 0xc0, - 0x12, 0x8f, 0x38, 0, - 0x12, 0xf, 0xc7, 0x81, - 0x11, 0x9d, - 0x12, 0xff, 0xf8, 0xf0, - 0x12, 0xff, 0x38, 0x30, - 0x1, 0xf8, 0x50, - 0x1, 0xf8, 0x58, - 0x1, 0xfd, 0x68, - 0x1, 0xe7, 0x6, - 0x12, 0xf, 0xc7, 0x80, - 0x11, 0x9c, - 0x2, 0xfe, 0xd0, 0x38, 0x10, - 0x2, 0xfe, 0xd2, 0x38, 0x10, - 0x2, 0xfe, 0xc0, 0x38, 0x10, - 0x2, 0xfe, 0xd0, 0x38, 0x18, - 0x2, 0xfe, 0xd2, 0x38, 0x18, - 0x2, 0xfe, 0xc0, 0x38, 0x18, - 0x22, 0xf, 0x32, - 0x22, 0xf, 0x33, - 0x22, 0xf, 0x31, - 0x11, 0xc3, - 0x11, 0xc2, - 0x11, 0xcb, - 0x11, 0xca, - 0x2, 0xfe, 0xd0, 0x38, 0, - 0x2, 0xfe, 0xd2, 0x38, 0, - 0x2, 0xfe, 0xc0, 0x38, 0, - 0x2, 0xfe, 0xd0, 0x38, 0x8, - 0x2, 0xfe, 0xd2, 0x38, 0x8, - 0x2, 0xfe, 0xc0, 0x38, 0x8, - 0x22, 0xf, 0xaa, - 0x11, 0x9e, - 0x2, 0xfe, 0xd0, 0x38, 0x38, - 0x2, 0xfe, 0xd2, 0x38, 0x38, - 0x2, 0xfe, 0xc0, 0x38, 0x38, - 0x2, 0xfe, 0x18, 0, 0, - 0x2, 0xfe, 0x1a, 0, 0, - 0x1, 0xfe, 0x1c, - 0x2, 0xfe, 0x80, 0x38, 0x18, - 0x2, 0xfe, 0x82, 0x38, 0x18, - 0x1, 0xfe, 0xae, - 0x13, 0xf, 0xf0, 0x90, 0x38, 0, - 0x2, 0xfe, 0xd0, 0x38, 0x20, - 0x2, 0xfe, 0xd2, 0x38, 0x20, - 0x2, 0xfe, 0xc0, 0x38, 0x20, - 0x2, 0xfe, 0xd0, 0x38, 0x28, - 0x23, 0xf, 0xa4, 0, 0, - 0x23, 0xf, 0xa5, 0, 0, - 0x2, 0xfe, 0xd2, 0x38, 0x28, - 0x2, 0xfe, 0xc0, 0x38, 0x28, - 0x23, 0xf, 0xac, 0, 0, - 0x23, 0xf, 0xad, 0, 0, - 0x33, 0xf, 0x1, 0xc1, - 0x33, 0xf, 0x1, 0xc2, - 0x33, 0xf, 0x1, 0xc3, - 0x33, 0xf, 0x1, 0xc4, - 0x23, 0xf, 0x78, 0, 0, - 0x23, 0xf, 0x79, 0, 0, - 0x23, 0xf, 0x1, 0x38, 0, - 0x33, 0xf, 0x1, 0xc8, - 0x33, 0xf, 0x1, 0xc9, - 0x23, 0xf, 0x1, 0x38, 0x8, - 0x23, 0xf, 0, 0x38, 0, - 0x23, 0xf, 0x1, 0x38, 0x20, - 0x11, 0xf9, - 0x11, 0xfd, - 0x11, 0xfb, - 0x1, 0xfe, 0xaa, - 0x23, 0xf, 0, 0x38, 0x8, - 0x2, 0xfe, 0x28, 0, 0, - 0x2, 0xfe, 0x2a, 0, 0, - 0x1, 0xfe, 0x2c, - 0x2, 0xfe, 0x80, 0x38, 0x28, - 0x2, 0xfe, 0x82, 0x38, 0x28, - 0x2, 0xfe, 0x84, 0, 0, - 0x1, 0xfe, 0xa8, - 0x2, 0xfe, 0xf6, 0x38, 0, - 0x22, 0xf, 0xb, - 0x23, 0xf, 0, 0x38, 0x20, - 0x23, 0xf, 0, 0x38, 0x28, - 0x22, 0xf, 0x9, - 0x23, 0xf, 0xd, 0x38, 0, - 0x23, 0xf, 0xd, 0x38, 0x8, - 0x23, 0xf, 0x18, 0x38, 0, - 0x23, 0xf, 0x18, 0x38, 0x8, - 0x23, 0xf, 0x18, 0x38, 0x10, - 0x23, 0xf, 0x18, 0x38, 0x18, - 0x23, 0xf, 0x1f, 0, 0, - 0x22, 0xf, 0x30, - 0x13, 0xf, 0xfe, 0xc0, 0, 0, - 0x2, 0xfe, 0x86, 0, 0, - 0x1, 0xf8, 0x90, - 0x11, 0xd7, - 0x2, 0xfe, 0x30, 0, 0, - 0x2, 0xfe, 0x32, 0, 0, - 0x1, 0xfe, 0x34, - 0x2, 0xfe, 0x80, 0x38, 0x30, - 0x2, 0xfe, 0x82, 0x38, 0x30, - 0x22, 0xf, 0x77, - 0x34, 0x66, 0xf, 0xdb, 0, 0, - 0x23, 0xf, 0xdb, 0, 0, - 0x34, 0x66, 0xf, 0xdf, 0, 0, - 0x23, 0xf, 0xdf, 0, 0, - 0x34, 0x66, 0xf, 0xf5, 0, 0, - 0x23, 0xf, 0xf5, 0, 0, - 0x34, 0x66, 0xf, 0xeb, 0, 0, - 0x23, 0xf, 0xeb, 0, 0, - 0x34, 0x66, 0xf, 0xef, 0, 0, - 0x23, 0xf, 0xef, 0, 0, - 0x23, 0xf, 0x55, 0, 0, - 0x23, 0xf, 0x54, 0, 0, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0x1, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0x2, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0x3, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0x4, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0x5, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0x6, - 0x24, 0xf, 0xc2, 0, 0, 0xff, 0x7, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0x1, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0x2, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0x3, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0x4, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0x5, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0x6, - 0x35, 0xf3, 0xf, 0xc2, 0, 0, 0xff, 0x7, - 0x23, 0xf, 0xae, 0x38, 0x8, - 0x23, 0xf, 0xae, 0x38, 0, - 0x23, 0xf, 0xae, 0x38, 0x10, - 0x23, 0xf, 0xae, 0x38, 0x18, - 0x34, 0xf2, 0xf, 0x10, 0, 0, - 0x34, 0xf3, 0xf, 0x10, 0, 0, - 0x34, 0x66, 0xf, 0x10, 0, 0, - 0x23, 0xf, 0x10, 0, 0, - 0x34, 0xf2, 0xf, 0x11, 0, 0, - 0x34, 0xf3, 0xf, 0x11, 0, 0, - 0x34, 0x66, 0xf, 0x11, 0, 0, - 0x23, 0xf, 0x11, 0, 0, - 0x34, 0xf2, 0xf, 0x12, 0, 0, - 0x34, 0xf3, 0xf, 0x12, 0, 0, - 0x34, 0x66, 0xf, 0x12, 0, 0, - 0x23, 0xf, 0x12, 0xc0, 0xc0, - 0x23, 0xf, 0x12, 0, 0, - 0x34, 0x66, 0xf, 0x13, 0xc0, 0xc0, - 0x23, 0xf, 0x13, 0xc0, 0xc0, - 0x34, 0x66, 0xf, 0x13, 0, 0, - 0x23, 0xf, 0x13, 0, 0, - 0x34, 0x66, 0xf, 0x14, 0, 0, - 0x23, 0xf, 0x14, 0, 0, - 0x34, 0x66, 0xf, 0x15, 0, 0, - 0x23, 0xf, 0x15, 0, 0, - 0x34, 0xf3, 0xf, 0x16, 0, 0, - 0x34, 0x66, 0xf, 0x16, 0, 0, - 0x23, 0xf, 0x16, 0xc0, 0xc0, - 0x23, 0xf, 0x16, 0, 0, - 0x34, 0x66, 0xf, 0x17, 0xc0, 0xc0, - 0x23, 0xf, 0x17, 0xc0, 0xc0, - 0x34, 0x66, 0xf, 0x17, 0, 0, - 0x23, 0xf, 0x17, 0, 0, - 0x34, 0x66, 0xf, 0x28, 0, 0, - 0x23, 0xf, 0x28, 0, 0, - 0x34, 0x66, 0xf, 0x29, 0, 0, - 0x23, 0xf, 0x29, 0, 0, - 0x34, 0xf2, 0xf, 0x2a, 0, 0, - 0x34, 0xf3, 0xf, 0x2a, 0, 0, - 0x34, 0x66, 0xf, 0x2a, 0, 0, - 0x23, 0xf, 0x2a, 0, 0, - 0x34, 0x66, 0xf, 0x2b, 0, 0, - 0x23, 0xf, 0x2b, 0, 0, - 0x34, 0xf2, 0xf, 0x2c, 0, 0, - 0x34, 0xf3, 0xf, 0x2c, 0, 0, - 0x34, 0x66, 0xf, 0x2c, 0, 0, - 0x23, 0xf, 0x2c, 0, 0, - 0x34, 0x66, 0xf, 0x2d, 0, 0, - 0x34, 0xf2, 0xf, 0x2d, 0, 0, - 0x34, 0xf3, 0xf, 0x2d, 0, 0, - 0x23, 0xf, 0x2d, 0, 0, - 0x34, 0x66, 0xf, 0x2e, 0, 0, - 0x23, 0xf, 0x2e, 0, 0, - 0x34, 0x66, 0xf, 0x2f, 0, 0, - 0x23, 0xf, 0x2f, 0, 0, - 0x22, 0xf, 0x37, - 0x34, 0x66, 0xf, 0x50, 0xc0, 0xc0, - 0x23, 0xf, 0x50, 0xc0, 0xc0, - 0x34, 0x66, 0xf, 0x51, 0, 0, - 0x34, 0xf2, 0xf, 0x51, 0, 0, - 0x34, 0xf3, 0xf, 0x51, 0, 0, - 0x23, 0xf, 0x51, 0, 0, - 0x34, 0xf3, 0xf, 0x52, 0, 0, - 0x23, 0xf, 0x52, 0, 0, - 0x34, 0xf3, 0xf, 0x53, 0, 0, - 0x23, 0xf, 0x53, 0, 0, - 0x34, 0x66, 0xf, 0x54, 0, 0, - 0x23, 0xf, 0x54, 0, 0, - 0x34, 0x66, 0xf, 0x55, 0, 0, - 0x23, 0xf, 0x55, 0, 0, - 0x34, 0x66, 0xf, 0x56, 0, 0, - 0x23, 0xf, 0x56, 0, 0, - 0x34, 0x66, 0xf, 0x57, 0, 0, - 0x23, 0xf, 0x57, 0, 0, - 0x34, 0xf2, 0xf, 0x58, 0, 0, - 0x34, 0xf3, 0xf, 0x58, 0, 0, - 0x34, 0x66, 0xf, 0x58, 0, 0, - 0x23, 0xf, 0x58, 0, 0, - 0x34, 0xf2, 0xf, 0x59, 0, 0, - 0x34, 0xf3, 0xf, 0x59, 0, 0, - 0x34, 0x66, 0xf, 0x59, 0, 0, - 0x23, 0xf, 0x59, 0, 0, - 0x34, 0xf2, 0xf, 0x5a, 0, 0, - 0x34, 0xf3, 0xf, 0x5a, 0, 0, - 0x34, 0x66, 0xf, 0x5a, 0, 0, - 0x23, 0xf, 0x5a, 0, 0, - 0x34, 0x66, 0xf, 0x5b, 0, 0, - 0x34, 0xf3, 0xf, 0x5b, 0, 0, - 0x23, 0xf, 0x5b, 0, 0, - 0x34, 0xf2, 0xf, 0x5c, 0, 0, - 0x34, 0xf3, 0xf, 0x5c, 0, 0, - 0x34, 0x66, 0xf, 0x5c, 0, 0, - 0x23, 0xf, 0x5c, 0, 0, - 0x34, 0xf2, 0xf, 0x5d, 0, 0, - 0x34, 0xf3, 0xf, 0x5d, 0, 0, - 0x34, 0x66, 0xf, 0x5d, 0, 0, - 0x23, 0xf, 0x5d, 0, 0, - 0x34, 0xf2, 0xf, 0x5e, 0, 0, - 0x34, 0xf3, 0xf, 0x5e, 0, 0, - 0x34, 0x66, 0xf, 0x5e, 0, 0, - 0x23, 0xf, 0x5e, 0, 0, - 0x34, 0xf2, 0xf, 0x5f, 0, 0, - 0x34, 0xf3, 0xf, 0x5f, 0, 0, - 0x34, 0x66, 0xf, 0x5f, 0, 0, - 0x23, 0xf, 0x5f, 0, 0, - 0x34, 0x66, 0xf, 0x60, 0, 0, - 0x23, 0xf, 0x60, 0, 0, - 0x34, 0x66, 0xf, 0x61, 0, 0, - 0x23, 0xf, 0x61, 0, 0, - 0x34, 0x66, 0xf, 0x62, 0, 0, - 0x23, 0xf, 0x62, 0, 0, - 0x34, 0x66, 0xf, 0x63, 0, 0, - 0x23, 0xf, 0x63, 0, 0, - 0x34, 0x66, 0xf, 0x64, 0, 0, - 0x23, 0xf, 0x64, 0, 0, - 0x34, 0x66, 0xf, 0x65, 0, 0, - 0x23, 0xf, 0x65, 0, 0, - 0x34, 0x66, 0xf, 0x66, 0, 0, - 0x23, 0xf, 0x66, 0, 0, - 0x34, 0x66, 0xf, 0x67, 0, 0, - 0x23, 0xf, 0x67, 0, 0, - 0x34, 0x66, 0xf, 0x68, 0, 0, - 0x23, 0xf, 0x68, 0, 0, - 0x34, 0x66, 0xf, 0x69, 0, 0, - 0x23, 0xf, 0x69, 0, 0, - 0x34, 0x66, 0xf, 0x6a, 0, 0, - 0x23, 0xf, 0x6a, 0, 0, - 0x34, 0x66, 0xf, 0x6b, 0, 0, - 0x23, 0xf, 0x6b, 0, 0, - 0x34, 0x66, 0xf, 0x6c, 0, 0, - 0x34, 0x66, 0xf, 0x6d, 0, 0, - 0x34, 0x66, 0xf, 0x6e, 0, 0, - 0x23, 0xf, 0x6e, 0, 0, - 0x34, 0x66, 0xf, 0x6f, 0, 0, - 0x34, 0xf3, 0xf, 0x6f, 0, 0, - 0x23, 0xf, 0x6f, 0, 0, - 0x34, 0x66, 0xf, 0x70, 0, 0, - 0x34, 0xf2, 0xf, 0x70, 0, 0, - 0x34, 0xf3, 0xf, 0x70, 0, 0, - 0x23, 0xf, 0x70, 0, 0, - 0x34, 0x66, 0xf, 0x74, 0, 0, - 0x23, 0xf, 0x74, 0, 0, - 0x34, 0x66, 0xf, 0x75, 0, 0, - 0x23, 0xf, 0x75, 0, 0, - 0x34, 0x66, 0xf, 0x76, 0, 0, - 0x23, 0xf, 0x76, 0, 0, - 0x34, 0x66, 0xf, 0x7c, 0, 0, - 0x34, 0xf2, 0xf, 0x7c, 0, 0, - 0x34, 0x66, 0xf, 0x7d, 0, 0, - 0x34, 0xf2, 0xf, 0x7d, 0, 0, - 0x34, 0x66, 0xf, 0x7e, 0, 0, - 0x34, 0xf3, 0xf, 0x7e, 0, 0, - 0x23, 0xf, 0x7e, 0, 0, - 0x34, 0x66, 0xf, 0x7f, 0, 0, - 0x34, 0xf3, 0xf, 0x7f, 0, 0, - 0x23, 0xf, 0x7f, 0, 0, - 0x23, 0xf, 0xc3, 0, 0, - 0x34, 0x66, 0xf, 0xc4, 0, 0, - 0x23, 0xf, 0xc4, 0, 0, - 0x34, 0x66, 0xf, 0xc5, 0xc0, 0xc0, - 0x23, 0xf, 0xc5, 0xc0, 0xc0, - 0x34, 0x66, 0xf, 0xc6, 0, 0, - 0x23, 0xf, 0xc6, 0, 0, - 0x34, 0x66, 0xf, 0xd1, 0, 0, - 0x23, 0xf, 0xd1, 0, 0, - 0x34, 0x66, 0xf, 0xd2, 0, 0, - 0x23, 0xf, 0xd2, 0, 0, - 0x34, 0x66, 0xf, 0xd3, 0, 0, - 0x23, 0xf, 0xd3, 0, 0, - 0x34, 0x66, 0xf, 0xd4, 0, 0, - 0x23, 0xf, 0xd4, 0, 0, - 0x34, 0x66, 0xf, 0xd5, 0, 0, - 0x23, 0xf, 0xd5, 0, 0, - 0x34, 0x66, 0xf, 0xd6, 0, 0, - 0x34, 0xf2, 0xf, 0xd6, 0xc0, 0xc0, - 0x34, 0xf3, 0xf, 0xd6, 0xc0, 0xc0, - 0x34, 0x66, 0xf, 0xd7, 0xc0, 0xc0, - 0x23, 0xf, 0xd7, 0xc0, 0xc0, - 0x34, 0x66, 0xf, 0xd8, 0, 0, - 0x23, 0xf, 0xd8, 0, 0, - 0x34, 0x66, 0xf, 0xd9, 0, 0, - 0x23, 0xf, 0xd9, 0, 0, - 0x34, 0x66, 0xf, 0xda, 0, 0, - 0x23, 0xf, 0xda, 0, 0, - 0x34, 0x66, 0xf, 0xdc, 0, 0, - 0x23, 0xf, 0xdc, 0, 0, - 0x34, 0x66, 0xf, 0xdd, 0, 0, - 0x23, 0xf, 0xdd, 0, 0, - 0x34, 0x66, 0xf, 0xde, 0, 0, - 0x23, 0xf, 0xde, 0, 0, - 0x34, 0x66, 0xf, 0xe0, 0, 0, - 0x23, 0xf, 0xe0, 0, 0, - 0x34, 0x66, 0xf, 0xe1, 0, 0, - 0x23, 0xf, 0xe1, 0, 0, - 0x34, 0x66, 0xf, 0xe2, 0, 0, - 0x23, 0xf, 0xe2, 0, 0, - 0x34, 0x66, 0xf, 0xe3, 0, 0, - 0x23, 0xf, 0xe3, 0, 0, - 0x34, 0x66, 0xf, 0xe4, 0, 0, - 0x23, 0xf, 0xe4, 0, 0, - 0x34, 0x66, 0xf, 0xe5, 0, 0, - 0x23, 0xf, 0xe5, 0, 0, - 0x34, 0x66, 0xf, 0xe7, 0, 0, - 0x23, 0xf, 0xe7, 0, 0, - 0x34, 0x66, 0xf, 0xe8, 0, 0, - 0x23, 0xf, 0xe8, 0, 0, - 0x34, 0x66, 0xf, 0xe9, 0, 0, - 0x23, 0xf, 0xe9, 0, 0, - 0x34, 0x66, 0xf, 0xea, 0, 0, - 0x23, 0xf, 0xea, 0, 0, - 0x34, 0x66, 0xf, 0xec, 0, 0, - 0x23, 0xf, 0xec, 0, 0, - 0x34, 0x66, 0xf, 0xed, 0, 0, - 0x23, 0xf, 0xed, 0, 0, - 0x34, 0x66, 0xf, 0xee, 0, 0, - 0x23, 0xf, 0xee, 0, 0, - 0x34, 0xf2, 0xf, 0xf0, 0, 0, - 0x34, 0x66, 0xf, 0xf1, 0, 0, - 0x23, 0xf, 0xf1, 0, 0, - 0x34, 0x66, 0xf, 0xf2, 0, 0, - 0x23, 0xf, 0xf2, 0, 0, - 0x34, 0x66, 0xf, 0xf3, 0, 0, - 0x23, 0xf, 0xf3, 0, 0, - 0x34, 0x66, 0xf, 0xf4, 0, 0, - 0x23, 0xf, 0xf4, 0, 0, - 0x34, 0x66, 0xf, 0xf6, 0, 0, - 0x23, 0xf, 0xf6, 0, 0, - 0x34, 0x66, 0xf, 0xf7, 0xc0, 0xc0, - 0x23, 0xf, 0xf7, 0xc0, 0xc0, - 0x34, 0x66, 0xf, 0xf8, 0, 0, - 0x23, 0xf, 0xf8, 0, 0, - 0x34, 0x66, 0xf, 0xf9, 0, 0, - 0x23, 0xf, 0xf9, 0, 0, - 0x34, 0x66, 0xf, 0xfa, 0, 0, - 0x23, 0xf, 0xfa, 0, 0, - 0x34, 0x66, 0xf, 0xfb, 0, 0, - 0x23, 0xf, 0xfb, 0, 0, - 0x34, 0x66, 0xf, 0xfc, 0, 0, - 0x23, 0xf, 0xfc, 0, 0, - 0x34, 0x66, 0xf, 0xfd, 0, 0, - 0x23, 0xf, 0xfd, 0, 0, - 0x34, 0x66, 0xf, 0xfe, 0, 0, - 0x23, 0xf, 0xfe, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0, 0, 0, - 0x34, 0xf, 0x38, 0, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x1, 0, 0, - 0x34, 0xf, 0x38, 0x1, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x2, 0, 0, - 0x34, 0xf, 0x38, 0x2, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x3, 0, 0, - 0x34, 0xf, 0x38, 0x3, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x4, 0, 0, - 0x34, 0xf, 0x38, 0x4, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x5, 0, 0, - 0x34, 0xf, 0x38, 0x5, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x6, 0, 0, - 0x34, 0xf, 0x38, 0x6, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x7, 0, 0, - 0x34, 0xf, 0x38, 0x7, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x8, 0, 0, - 0x34, 0xf, 0x38, 0x8, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x9, 0, 0, - 0x34, 0xf, 0x38, 0x9, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0xa, 0, 0, - 0x34, 0xf, 0x38, 0xa, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0xb, 0, 0, - 0x34, 0xf, 0x38, 0xb, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x1c, 0, 0, - 0x34, 0xf, 0x38, 0x1c, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x1d, 0, 0, - 0x34, 0xf, 0x38, 0x1d, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x1e, 0, 0, - 0x34, 0xf, 0x38, 0x1e, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0xf, 0, 0, - 0x34, 0xf, 0x3a, 0xf, 0, 0, - 0x34, 0x66, 0xf, 0xc7, 0x38, 0x30, - 0x34, 0xf3, 0xf, 0xc7, 0x38, 0x30, - 0x23, 0xf, 0xc7, 0x38, 0x30, - 0x23, 0xf, 0xc7, 0x38, 0x38, - 0x34, 0x66, 0xf, 0x71, 0xf8, 0xd0, - 0x23, 0xf, 0x71, 0xf8, 0xd0, - 0x34, 0x66, 0xf, 0x71, 0xf8, 0xe0, - 0x23, 0xf, 0x71, 0xf8, 0xe0, - 0x34, 0x66, 0xf, 0x71, 0xf8, 0xf0, - 0x23, 0xf, 0x71, 0xf8, 0xf0, - 0x34, 0x66, 0xf, 0x72, 0xf8, 0xd0, - 0x23, 0xf, 0x72, 0xf8, 0xd0, - 0x34, 0x66, 0xf, 0x72, 0xf8, 0xe0, - 0x23, 0xf, 0x72, 0xf8, 0xe0, - 0x34, 0x66, 0xf, 0x72, 0xf8, 0xf0, - 0x23, 0xf, 0x72, 0xf8, 0xf0, - 0x34, 0x66, 0xf, 0x73, 0xf8, 0xd0, - 0x23, 0xf, 0x73, 0xf8, 0xd0, - 0x34, 0x66, 0xf, 0x73, 0xf8, 0xd8, - 0x34, 0x66, 0xf, 0x73, 0xf8, 0xf0, - 0x23, 0xf, 0x73, 0xf8, 0xf0, - 0x34, 0x66, 0xf, 0x73, 0xf8, 0xf8, - 0x33, 0xf, 0xae, 0xe8, - 0x33, 0xf, 0xae, 0xf0, - 0x33, 0xf, 0xae, 0xf8, - 0x23, 0xf, 0xae, 0x38, 0x38, - 0x23, 0xf, 0xf, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0xc, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0xd, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x14, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x15, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x40, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x41, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x21, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x2a, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x42, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x2b, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x10, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0xe, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x29, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x61, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x60, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x63, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x62, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x37, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x41, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x20, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x22, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x3c, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x3d, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x3f, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x3e, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x38, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x39, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x3b, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x3a, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x20, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x21, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x22, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x23, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x24, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x25, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x30, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x31, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x32, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x33, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x34, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x35, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x28, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x40, 0, 0, - 0x45, 0x66, 0xf, 0x38, 0x17, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x8, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0x9, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0xa, 0, 0, - 0x45, 0x66, 0xf, 0x3a, 0xb, 0, 0, - 0x1, 0xe7, 0x7, -}; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-Add-GCC7-Wimplicit-fallthrough-support-fixes.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-Add-GCC7-Wimplicit-fallthrough-support-fixes.patch deleted file mode 100644 index a240323f3..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-Add-GCC7-Wimplicit-fallthrough-support-fixes.patch +++ /dev/null @@ -1,318 +0,0 @@ -From 09949994e76eea3c1230a5c88ffa8fdf588b120f Mon Sep 17 00:00:00 2001 -From: Mark Wielaard -Date: Wed, 2 Nov 2016 13:29:26 +0100 -Subject: [PATCH] Add GCC7 -Wimplicit-fallthrough support/fixes. - -GCC7 will have a new -Wimplicit-fallthrough warning. It did catch one -small buglet in elflint option procession. So it seems useful to enable -to make sure all swatch case fallthroughs are deliberate. - -Add configure check to detect whether gcc support -Wimplicit-fallthrough -and enable it. Add fixes and explicit fallthrough comments where necessary. - -Signed-off-by: Mark Wielaard - -Upstream-Status: Backport -Upstream-Commit: a3cc8182b2ae05290b0eafa74b70746d7befc0e4 ---- - backends/alpha_retval.c | 4 +--- - backends/i386_regs.c | 1 + - backends/i386_retval.c | 3 +-- - backends/linux-core-note.c | 4 ++-- - backends/ppc_regs.c | 2 +- - backends/x86_64_regs.c | 1 + - config/eu.am | 8 +++++++- - configure.ac | 10 ++++++++++ - libcpu/i386_disasm.c | 2 +- - libdw/cfi.c | 2 ++ - libdw/encoded-value.h | 1 + - libdwfl/dwfl_report_elf.c | 2 +- - src/addr2line.c | 1 + - src/elfcompress.c | 3 ++- - src/elflint.c | 4 +++- - src/objdump.c | 4 +++- - tests/backtrace-data.c | 1 + - tests/backtrace.c | 2 +- - 18 files changed, 40 insertions(+), 15 deletions(-) - -diff --git a/backends/alpha_retval.c b/backends/alpha_retval.c -index 53dbfa45..7232b462 100644 ---- a/backends/alpha_retval.c -+++ b/backends/alpha_retval.c -@@ -130,9 +130,7 @@ alpha_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) - return nloc_intreg; - } - } -- -- /* Else fall through. */ -- -+ /* Fallthrough */ - case DW_TAG_structure_type: - case DW_TAG_class_type: - case DW_TAG_union_type: -diff --git a/backends/i386_regs.c b/backends/i386_regs.c -index fb8ded33..fd963a62 100644 ---- a/backends/i386_regs.c -+++ b/backends/i386_regs.c -@@ -92,6 +92,7 @@ i386_register_info (Ebl *ebl __attribute__ ((unused)), - case 5: - case 8: - *type = DW_ATE_address; -+ /* Fallthrough */ - case 0 ... 3: - case 6 ... 7: - name[0] = 'e'; -diff --git a/backends/i386_retval.c b/backends/i386_retval.c -index 9da797d5..4aa646fe 100644 ---- a/backends/i386_retval.c -+++ b/backends/i386_retval.c -@@ -122,9 +122,8 @@ i386_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) - return nloc_intreg; - if (size <= 8) - return nloc_intregpair; -- -- /* Else fall through. */ - } -+ /* Fallthrough */ - - case DW_TAG_structure_type: - case DW_TAG_class_type: -diff --git a/backends/linux-core-note.c b/backends/linux-core-note.c -index ff2b226f..321721f3 100644 ---- a/backends/linux-core-note.c -+++ b/backends/linux-core-note.c -@@ -219,8 +219,8 @@ EBLHOOK(core_note) (const GElf_Nhdr *nhdr, const char *name, - case sizeof "CORE": - if (memcmp (name, "CORE", nhdr->n_namesz) == 0) - break; -- /* Buggy old Linux kernels didn't terminate "LINUX". -- Fall through. */ -+ /* Buggy old Linux kernels didn't terminate "LINUX". */ -+ /* Fall through. */ - - case sizeof "LINUX": - if (memcmp (name, "LINUX", nhdr->n_namesz) == 0) -diff --git a/backends/ppc_regs.c b/backends/ppc_regs.c -index 4b92a9aa..bcf4f7a3 100644 ---- a/backends/ppc_regs.c -+++ b/backends/ppc_regs.c -@@ -140,7 +140,7 @@ ppc_register_info (Ebl *ebl __attribute__ ((unused)), - case 100: - if (*bits == 32) - return stpcpy (name, "mq") + 1 - name; -- -+ /* Fallthrough */ - case 102 ... 107: - name[0] = 's'; - name[1] = 'p'; -diff --git a/backends/x86_64_regs.c b/backends/x86_64_regs.c -index 2172d9f1..84304407 100644 ---- a/backends/x86_64_regs.c -+++ b/backends/x86_64_regs.c -@@ -87,6 +87,7 @@ x86_64_register_info (Ebl *ebl __attribute__ ((unused)), - - case 6 ... 7: - *type = DW_ATE_address; -+ /* Fallthrough */ - case 0 ... 5: - name[0] = 'r'; - name[1] = baseregs[regno][0]; -diff --git a/config/eu.am b/config/eu.am -index 4998771d..8fe1e259 100644 ---- a/config/eu.am -+++ b/config/eu.am -@@ -61,10 +61,16 @@ else - NULL_DEREFERENCE_WARNING= - endif - -+if HAVE_IMPLICIT_FALLTHROUGH_WARNING -+IMPLICIT_FALLTHROUGH_WARNING=-Wimplicit-fallthrough -+else -+IMPLICIT_FALLTHROUGH_WARNING= -+endif -+ - AM_CFLAGS = -std=gnu99 -Wall -Wshadow -Wformat=2 \ - -Wold-style-definition -Wstrict-prototypes \ - $(LOGICAL_OP_WARNING) $(DUPLICATED_COND_WARNING) \ -- $(NULL_DEREFERENCE_WARNING) \ -+ $(NULL_DEREFERENCE_WARNING) $(IMPLICIT_FALLTHROUGH_WARNING) \ - $(if $($(*F)_no_Werror),,-Werror) \ - $(if $($(*F)_no_Wunused),,-Wunused -Wextra) \ - $(if $($(*F)_no_Wstack_usage),,$(STACK_USAGE_WARNING)) \ -diff --git a/configure.ac b/configure.ac -index 86a69c66..35850c64 100644 ---- a/configure.ac -+++ b/configure.ac -@@ -336,6 +336,16 @@ CFLAGS="$old_CFLAGS"]) - AM_CONDITIONAL(HAVE_NULL_DEREFERENCE_WARNING, - [test "x$ac_cv_null_dereference" != "xno"]) - -+# -Wimplicit-fallthrough was added by GCC7 -+AC_CACHE_CHECK([whether gcc accepts -Wimplicit-fallthrough], ac_cv_implicit_fallthrough, [dnl -+old_CFLAGS="$CFLAGS" -+CFLAGS="$CFLAGS -Wimplicit-fallthrough -Werror" -+AC_COMPILE_IFELSE([AC_LANG_SOURCE([])], -+ ac_cv_implicit_fallthrough=yes, ac_cv_implicit_fallthrough=no) -+CFLAGS="$old_CFLAGS"]) -+AM_CONDITIONAL(HAVE_IMPLICIT_FALLTHROUGH_WARNING, -+ [test "x$ac_cv_implicit_fallthrough" != "xno"]) -+ - dnl Check if we have argp available from our libc - AC_LINK_IFELSE( - [AC_LANG_PROGRAM( -diff --git a/libcpu/i386_disasm.c b/libcpu/i386_disasm.c -index 832241f2..1a584635 100644 ---- a/libcpu/i386_disasm.c -+++ b/libcpu/i386_disasm.c -@@ -819,7 +819,7 @@ i386_disasm (const uint8_t **startp, const uint8_t *end, GElf_Addr addr, - ++param_start; - break; - } -- -+ /* Fallthrough */ - default: - assert (! "INVALID not handled"); - } -diff --git a/libdw/cfi.c b/libdw/cfi.c -index 1fd668d7..daa845f3 100644 ---- a/libdw/cfi.c -+++ b/libdw/cfi.c -@@ -138,6 +138,7 @@ execute_cfi (Dwarf_CFI *cache, - - case DW_CFA_advance_loc1: - operand = *program++; -+ /* Fallthrough */ - case DW_CFA_advance_loc + 0 ... DW_CFA_advance_loc + CFI_PRIMARY_MAX: - advance_loc: - loc += operand * cie->code_alignment_factor; -@@ -300,6 +301,7 @@ execute_cfi (Dwarf_CFI *cache, - - case DW_CFA_restore_extended: - get_uleb128 (operand, program, end); -+ /* Fallthrough */ - case DW_CFA_restore + 0 ... DW_CFA_restore + CFI_PRIMARY_MAX: - - if (unlikely (abi_cfi) && likely (opcode == DW_CFA_restore)) -diff --git a/libdw/encoded-value.h b/libdw/encoded-value.h -index 48d868fb..f0df4cec 100644 ---- a/libdw/encoded-value.h -+++ b/libdw/encoded-value.h -@@ -64,6 +64,7 @@ encoded_value_size (const Elf_Data *data, const unsigned char e_ident[], - if (*end++ & 0x80u) - return end - p; - } -+ return 0; - - default: - return 0; -diff --git a/libdwfl/dwfl_report_elf.c b/libdwfl/dwfl_report_elf.c -index 1c6e401d..73a5511a 100644 ---- a/libdwfl/dwfl_report_elf.c -+++ b/libdwfl/dwfl_report_elf.c -@@ -170,7 +170,7 @@ __libdwfl_elf_address_range (Elf *elf, GElf_Addr base, bool add_p_vaddr, - /* An assigned base address is meaningless for these. */ - base = 0; - add_p_vaddr = true; -- -+ /* Fallthrough. */ - case ET_DYN: - default:; - size_t phnum; -diff --git a/src/addr2line.c b/src/addr2line.c -index 0ce854f6..bea24aea 100644 ---- a/src/addr2line.c -+++ b/src/addr2line.c -@@ -632,6 +632,7 @@ handle_address (const char *string, Dwfl *dwfl) - case 1: - addr = 0; - j = i; -+ /* Fallthrough */ - case 2: - if (string[j] != '\0') - break; -diff --git a/src/elfcompress.c b/src/elfcompress.c -index d0ca469c..57afa116 100644 ---- a/src/elfcompress.c -+++ b/src/elfcompress.c -@@ -153,7 +153,8 @@ parse_opt (int key, char *arg __attribute__ ((unused)), - argp_error (state, - N_("Only one input file allowed together with '-o'")); - /* We only use this for checking the number of arguments, we don't -- actually want to consume them, so fallthrough. */ -+ actually want to consume them. */ -+ /* Fallthrough */ - default: - return ARGP_ERR_UNKNOWN; - } -diff --git a/src/elflint.c b/src/elflint.c -index 15b12f6f..2c45fcb8 100644 ---- a/src/elflint.c -+++ b/src/elflint.c -@@ -210,6 +210,7 @@ parse_opt (int key, char *arg __attribute__ ((unused)), - - case 'd': - is_debuginfo = true; -+ break; - - case ARGP_gnuld: - gnuld = true; -@@ -3963,6 +3964,7 @@ section [%2zu] '%s': merge flag set but entry size is zero\n"), - case SHT_NOBITS: - if (is_debuginfo) - break; -+ /* Fallthrough */ - default: - ERROR (gettext ("\ - section [%2zu] '%s' has unexpected type %d for an executable section\n"), -@@ -4305,7 +4307,7 @@ section [%2d] '%s': unknown core file note type %" PRIu32 - if (nhdr.n_namesz == sizeof "Linux" - && !memcmp (data->d_buf + name_offset, "Linux", sizeof "Linux")) - break; -- -+ /* Fallthrough */ - default: - if (shndx == 0) - ERROR (gettext ("\ -diff --git a/src/objdump.c b/src/objdump.c -index 0aa41e89..94e9e021 100644 ---- a/src/objdump.c -+++ b/src/objdump.c -@@ -234,7 +234,9 @@ parse_opt (int key, char *arg, - program_invocation_short_name); - exit (EXIT_FAILURE); - } -- -+ /* We only use this for checking the number of arguments, we don't -+ actually want to consume them. */ -+ /* Fallthrough */ - default: - return ARGP_ERR_UNKNOWN; - } -diff --git a/tests/backtrace-data.c b/tests/backtrace-data.c -index bc5ceba0..b7158dae 100644 ---- a/tests/backtrace-data.c -+++ b/tests/backtrace-data.c -@@ -250,6 +250,7 @@ thread_callback (Dwfl_Thread *thread, void *thread_arg __attribute__ ((unused))) - break; - case -1: - error (1, 0, "dwfl_thread_getframes: %s", dwfl_errmsg (-1)); -+ break; - default: - abort (); - } -diff --git a/tests/backtrace.c b/tests/backtrace.c -index 12476430..bf5995b4 100644 ---- a/tests/backtrace.c -+++ b/tests/backtrace.c -@@ -123,7 +123,7 @@ callback_verify (pid_t tid, unsigned frameno, Dwarf_Addr pc, - assert (symname2 == NULL || strcmp (symname2, "jmp") != 0); - break; - } -- /* PASSTHRU */ -+ /* FALLTHRU */ - case 4: - assert (symname != NULL && strcmp (symname, "stdarg") == 0); - break; --- -2.13.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-Ignore-differences-between-mips-machine-identifiers.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-Ignore-differences-between-mips-machine-identifiers.patch deleted file mode 100644 index 3f110f98f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-Ignore-differences-between-mips-machine-identifiers.patch +++ /dev/null @@ -1,34 +0,0 @@ -From 77cb4a53c270d5854d3af24f19547bc3de825233 Mon Sep 17 00:00:00 2001 -From: James Cowgill -Date: Mon, 5 Jan 2015 15:16:58 +0000 -Subject: [PATCH 1/3] Ignore differences between mips machine identifiers - -Little endian binaries actually use EM_MIPS so you can't tell the endianness -from the elf machine id. Also, the EM_MIPS_RS3_LE machine is dead anyway (the -kernel will not load binaries containing it). - -Signed-off-by: James Cowgill ---- - backends/mips_init.c | 6 +----- - 1 file changed, 1 insertion(+), 5 deletions(-) - -diff --git a/backends/mips_init.c b/backends/mips_init.c -index 7429a89..d10e940 100644 ---- a/backends/mips_init.c -+++ b/backends/mips_init.c -@@ -46,11 +46,7 @@ mips_init (elf, machine, eh, ehlen) - return NULL; - - /* We handle it. */ -- if (machine == EM_MIPS) -- eh->name = "MIPS R3000 big-endian"; -- else if (machine == EM_MIPS_RS3_LE) -- eh->name = "MIPS R3000 little-endian"; -- -+ eh->name = "MIPS"; - mips_init_reloc (eh); - HOOK (eh, reloc_simple_type); - HOOK (eh, return_value_location); --- -2.1.4 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-ar-Fix-GCC7-Wformat-length-issues.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-ar-Fix-GCC7-Wformat-length-issues.patch deleted file mode 100644 index 346547678..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-ar-Fix-GCC7-Wformat-length-issues.patch +++ /dev/null @@ -1,125 +0,0 @@ -From f090883ca61f0bf0f979c5b26d4e1a69e805156e Mon Sep 17 00:00:00 2001 -From: Mark Wielaard -Date: Thu, 10 Nov 2016 18:45:02 +0100 -Subject: [PATCH] ar: Fix GCC7 -Wformat-length issues. - -GCC7 adds warnings for snprintf formatting into too small buffers. -Fix the two issues pointed out by the new warning. The ar header -fields are fixed length containing left-justified strings without -zero terminator. snprintf always adds a '\0' char at the end (which -we then don't copy into the ar header field) and numbers are decimal -strings of fixed 10 chars (-Wformat-length thinks formatting -them as size_t might overflow the buffer on 64bit arches). - -Signed-off-by: Mark Wielaard - -Upstream-Status: Backport -Upstream-Commit: d5afff85e22b38949f3e7936231c67de16e180e8 ---- - src/ar.c | 15 +++++++++++---- - src/arlib.c | 16 ++++++++++------ - 2 files changed, 21 insertions(+), 10 deletions(-) - -diff --git a/src/ar.c b/src/ar.c -index 1320d07b..f2160d35 100644 ---- a/src/ar.c -+++ b/src/ar.c -@@ -1,5 +1,5 @@ - /* Create, modify, and extract from archives. -- Copyright (C) 2005-2012 Red Hat, Inc. -+ Copyright (C) 2005-2012, 2016 Red Hat, Inc. - This file is part of elfutils. - Written by Ulrich Drepper , 2005. - -@@ -853,7 +853,10 @@ write_member (struct armem *memb, off_t *startp, off_t *lenp, Elf *elf, - off_t end_off, int newfd) - { - struct ar_hdr arhdr; -- char tmpbuf[sizeof (arhdr.ar_name) + 1]; -+ /* The ar_name is not actually zero teminated, but we need that for -+ snprintf. Also if the name is too long, then the string starts -+ with '/' plus an index off number (decimal). */ -+ char tmpbuf[sizeof (arhdr.ar_name) + 2]; - - bool changed_header = memb->long_name_off != -1; - if (changed_header) -@@ -1455,7 +1458,11 @@ do_oper_insert (int oper, const char *arfname, char **argv, int argc, - - /* Create the header. */ - struct ar_hdr arhdr; -- char tmpbuf[sizeof (arhdr.ar_name) + 1]; -+ /* The ar_name is not actually zero teminated, but we -+ need that for snprintf. Also if the name is too -+ long, then the string starts with '/' plus an index -+ off number (decimal). */ -+ char tmpbuf[sizeof (arhdr.ar_name) + 2]; - if (all->long_name_off == -1) - { - size_t namelen = strlen (all->name); -@@ -1465,7 +1472,7 @@ do_oper_insert (int oper, const char *arfname, char **argv, int argc, - } - else - { -- snprintf (tmpbuf, sizeof (arhdr.ar_name) + 1, "/%-*ld", -+ snprintf (tmpbuf, sizeof (tmpbuf), "/%-*ld", - (int) sizeof (arhdr.ar_name), all->long_name_off); - memcpy (arhdr.ar_name, tmpbuf, sizeof (arhdr.ar_name)); - } -diff --git a/src/arlib.c b/src/arlib.c -index 43a9145b..0c2e4cde 100644 ---- a/src/arlib.c -+++ b/src/arlib.c -@@ -1,5 +1,5 @@ - /* Functions to handle creation of Linux archives. -- Copyright (C) 2007-2012 Red Hat, Inc. -+ Copyright (C) 2007-2012, 2016 Red Hat, Inc. - This file is part of elfutils. - Written by Ulrich Drepper , 2007. - -@@ -23,6 +23,7 @@ - #include - #include - #include -+#include - #include - #include - #include -@@ -107,6 +108,9 @@ arlib_init (void) - void - arlib_finalize (void) - { -+ /* Note that the size is stored as decimal string in 10 chars, -+ without zero terminator (we add + 1 here only so snprintf can -+ put it at the end, we then don't use it when we memcpy it). */ - char tmpbuf[sizeof (((struct ar_hdr *) NULL)->ar_size) + 1]; - - symtab.longnameslen = obstack_object_size (&symtab.longnamesob); -@@ -121,9 +125,9 @@ arlib_finalize (void) - - symtab.longnames = obstack_finish (&symtab.longnamesob); - -- int s = snprintf (tmpbuf, sizeof (tmpbuf), "%-*zu", -+ int s = snprintf (tmpbuf, sizeof (tmpbuf), "%-*" PRIu32 "", - (int) sizeof (((struct ar_hdr *) NULL)->ar_size), -- symtab.longnameslen - sizeof (struct ar_hdr)); -+ (uint32_t) (symtab.longnameslen - sizeof (struct ar_hdr))); - memcpy (&((struct ar_hdr *) symtab.longnames)->ar_size, tmpbuf, s); - } - -@@ -169,10 +173,10 @@ arlib_finalize (void) - - /* See comment for ar_date above. */ - memcpy (&((struct ar_hdr *) symtab.symsoff)->ar_size, tmpbuf, -- snprintf (tmpbuf, sizeof (tmpbuf), "%-*zu", -+ snprintf (tmpbuf, sizeof (tmpbuf), "%-*" PRIu32 "", - (int) sizeof (((struct ar_hdr *) NULL)->ar_size), -- symtab.symsofflen + symtab.symsnamelen -- - sizeof (struct ar_hdr))); -+ (uint32_t) (symtab.symsofflen + symtab.symsnamelen -+ - sizeof (struct ar_hdr)))); - } - - --- -2.13.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-build-Provide-alternatives-for-glibc-assumptions-hel.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-build-Provide-alternatives-for-glibc-assumptions-hel.patch deleted file mode 100644 index 38b31f6a8..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-build-Provide-alternatives-for-glibc-assumptions-hel.patch +++ /dev/null @@ -1,1163 +0,0 @@ -From 4b0fe80b3951f044c1c1b14d1d7f5f0b8ab67507 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Thu, 31 Dec 2015 06:35:34 +0000 -Subject: [PATCH] build: Provide alternatives for glibc assumptions helps - compiling it on musl - -Signed-off-by: Khem Raj ---- -Upstream-Status: Pending - - Makefile.am | 2 +- - lib/color.c | 2 +- - lib/crc32_file.c | 4 +++- - lib/fixedsizehash.h | 4 ++-- - lib/system.h | 11 +++++++++++ - lib/xmalloc.c | 2 +- - libasm/asm_end.c | 2 +- - libasm/asm_newscn.c | 2 +- - libcpu/i386_gendis.c | 2 +- - libcpu/i386_lex.c | 2 +- - libcpu/i386_parse.c | 2 +- - libdw/Makefile.am | 2 +- - libdw/libdw_alloc.c | 3 ++- - libdwfl/Makefile.am | 2 ++ - libdwfl/dwfl_build_id_find_elf.c | 3 ++- - libdwfl/dwfl_error.c | 4 +++- - libdwfl/dwfl_module_getdwarf.c | 1 + - libdwfl/find-debuginfo.c | 2 +- - libdwfl/libdwfl_crc32_file.c | 10 ++++++++++ - libdwfl/linux-kernel-modules.c | 1 + - libebl/eblopenbackend.c | 2 +- - libebl/eblwstrtab.c | 2 +- - libelf/elf.h | 9 ++++++--- - libelf/libelf.h | 1 + - libelf/libelfP.h | 1 + - src/addr2line.c | 2 +- - src/ar.c | 2 +- - src/arlib.c | 2 +- - src/arlib2.c | 2 +- - src/elfcmp.c | 2 +- - src/elflint.c | 2 +- - src/findtextrel.c | 2 +- - src/i386_ld.c | 2 +- - src/ld.c | 2 +- - src/ldgeneric.c | 2 +- - src/ldlex.c | 2 +- - src/ldscript.c | 2 +- - src/nm.c | 2 +- - src/objdump.c | 2 +- - src/ranlib.c | 2 +- - src/readelf.c | 2 +- - src/size.c | 2 +- - src/stack.c | 2 +- - src/strings.c | 2 +- - src/strip.c | 2 +- - src/unstrip.c | 2 +- - tests/addrscopes.c | 2 +- - tests/allregs.c | 2 +- - tests/backtrace-data.c | 2 +- - tests/backtrace-dwarf.c | 2 +- - tests/backtrace.c | 2 +- - tests/buildid.c | 2 +- - tests/debugaltlink.c | 2 +- - tests/debuglink.c | 2 +- - tests/deleted.c | 2 +- - tests/dwfl-addr-sect.c | 2 +- - tests/dwfl-bug-addr-overflow.c | 2 +- - tests/dwfl-bug-fd-leak.c | 2 +- - tests/dwfl-bug-getmodules.c | 2 +- - tests/dwfl-report-elf-align.c | 2 +- - tests/dwfllines.c | 2 +- - tests/dwflmodtest.c | 2 +- - tests/dwflsyms.c | 2 +- - tests/early-offscn.c | 2 +- - tests/ecp.c | 2 +- - tests/find-prologues.c | 2 +- - tests/funcretval.c | 2 +- - tests/funcscopes.c | 2 +- - tests/getsrc_die.c | 2 +- - tests/line2addr.c | 2 +- - tests/low_high_pc.c | 2 +- - tests/md5-sha1-test.c | 2 +- - tests/rdwrmmap.c | 2 +- - tests/saridx.c | 2 +- - tests/sectiondump.c | 2 +- - tests/varlocs.c | 2 +- - tests/vdsosyms.c | 2 +- - 77 files changed, 109 insertions(+), 73 deletions(-) - -Index: elfutils-0.166/Makefile.am -=================================================================== ---- elfutils-0.166.orig/Makefile.am -+++ elfutils-0.166/Makefile.am -@@ -28,7 +28,7 @@ pkginclude_HEADERS = version.h - - # Add doc back when we have some real content. - SUBDIRS = config m4 lib libelf libebl libdwelf libdwfl libdw libcpu libasm \ -- backends src po tests -+ backends po tests - - EXTRA_DIST = elfutils.spec GPG-KEY NOTES CONTRIBUTING \ - COPYING COPYING-GPLV2 COPYING-LGPLV3 -Index: elfutils-0.166/lib/color.c -=================================================================== ---- elfutils-0.166.orig/lib/color.c -+++ elfutils-0.166/lib/color.c -@@ -32,7 +32,7 @@ - #endif - - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/lib/crc32_file.c -=================================================================== ---- elfutils-0.166.orig/lib/crc32_file.c -+++ elfutils-0.166/lib/crc32_file.c -@@ -30,12 +30,14 @@ - # include - #endif - --#include "system.h" - #include - #include -+#include - #include - #include - -+#include "system.h" -+ - int - crc32_file (int fd, uint32_t *resp) - { -Index: elfutils-0.166/lib/fixedsizehash.h -=================================================================== ---- elfutils-0.166.orig/lib/fixedsizehash.h -+++ elfutils-0.166/lib/fixedsizehash.h -@@ -30,12 +30,12 @@ - #include - #include - #include --#include - #include - - #include - --#define CONCAT(t1,t2) __CONCAT (t1,t2) -+#define CONCAT1(x,y) x##y -+#define CONCAT(x,y) CONCAT1(x,y) - - /* Before including this file the following macros must be defined: - -Index: elfutils-0.166/lib/system.h -=================================================================== ---- elfutils-0.166.orig/lib/system.h -+++ elfutils-0.166/lib/system.h -@@ -49,6 +49,16 @@ - #else - # error "Unknown byte order" - #endif -+#ifndef TEMP_FAILURE_RETRY -+#define TEMP_FAILURE_RETRY(expression) \ -+ (__extension__ \ -+ ({ long int __result; \ -+ do __result = (long int) (expression); \ -+ while (__result == -1L && errno == EINTR); \ -+ __result; })) -+#endif -+ -+#define error(status, errno, ...) err(status, __VA_ARGS__) - - extern void *xmalloc (size_t) __attribute__ ((__malloc__)); - extern void *xcalloc (size_t, size_t) __attribute__ ((__malloc__)); -Index: elfutils-0.166/lib/xmalloc.c -=================================================================== ---- elfutils-0.166.orig/lib/xmalloc.c -+++ elfutils-0.166/lib/xmalloc.c -@@ -30,7 +30,7 @@ - # include - #endif - --#include -+#include - #include - #include - #include -Index: elfutils-0.166/libasm/asm_end.c -=================================================================== ---- elfutils-0.166.orig/libasm/asm_end.c -+++ elfutils-0.166/libasm/asm_end.c -@@ -32,7 +32,7 @@ - #endif - - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/libasm/asm_newscn.c -=================================================================== ---- elfutils-0.166.orig/libasm/asm_newscn.c -+++ elfutils-0.166/libasm/asm_newscn.c -@@ -32,7 +32,7 @@ - #endif - - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/libcpu/i386_gendis.c -=================================================================== ---- elfutils-0.166.orig/libcpu/i386_gendis.c -+++ elfutils-0.166/libcpu/i386_gendis.c -@@ -31,7 +31,7 @@ - # include - #endif - --#include -+#include - #include - #include - #include -Index: elfutils-0.166/libcpu/i386_lex.c -=================================================================== ---- elfutils-0.166.orig/libcpu/i386_lex.c -+++ elfutils-0.166/libcpu/i386_lex.c -@@ -578,7 +578,7 @@ char *i386_text; - #endif - - #include --#include -+#include - #include - - #include -Index: elfutils-0.166/libcpu/i386_parse.c -=================================================================== ---- elfutils-0.166.orig/libcpu/i386_parse.c -+++ elfutils-0.166/libcpu/i386_parse.c -@@ -107,7 +107,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/libdw/Makefile.am -=================================================================== ---- elfutils-0.166.orig/libdw/Makefile.am -+++ elfutils-0.166/libdw/Makefile.am -@@ -117,7 +117,7 @@ libdw.so$(EXEEXT): $(srcdir)/libdw.map l - -Wl,--enable-new-dtags,-rpath,$(pkglibdir) \ - -Wl,--version-script,$<,--no-undefined \ - -Wl,--whole-archive $(filter-out $<,$^) -Wl,--no-whole-archive\ -- -ldl -lz $(argp_LDADD) $(zip_LIBS) $(libdw_so_LDLIBS) -+ -ldl -lz $(argp_LDADD) $(zip_LIBS) $(libdw_so_LDLIBS) -lfts -largp - @$(textrel_check) - $(AM_V_at)ln -fs $@ $@.$(VERSION) - -Index: elfutils-0.166/libdw/libdw_alloc.c -=================================================================== ---- elfutils-0.166.orig/libdw/libdw_alloc.c -+++ elfutils-0.166/libdw/libdw_alloc.c -@@ -31,11 +31,12 @@ - # include - #endif - --#include -+#include - #include - #include - #include - #include "libdwP.h" -+#include "system.h" - - - void * -Index: elfutils-0.166/libdwfl/Makefile.am -=================================================================== ---- elfutils-0.166.orig/libdwfl/Makefile.am -+++ elfutils-0.166/libdwfl/Makefile.am -@@ -84,6 +84,8 @@ libelf = ../libelf/libelf.so - libebl = ../libebl/libebl.a - libeu = ../lib/libeu.a - -+LDADD = -lfts -+ - libdwfl_pic_a_SOURCES = - am_libdwfl_pic_a_OBJECTS = $(libdwfl_a_SOURCES:.c=.os) - -Index: elfutils-0.166/libdwfl/dwfl_build_id_find_elf.c -=================================================================== ---- elfutils-0.166.orig/libdwfl/dwfl_build_id_find_elf.c -+++ elfutils-0.166/libdwfl/dwfl_build_id_find_elf.c -@@ -27,6 +27,7 @@ - not, see . */ - - #include "libdwflP.h" -+#include "system.h" - #include - #include - #include -@@ -94,7 +95,7 @@ __libdwfl_open_by_build_id (Dwfl_Module - { - if (*file_name != NULL) - free (*file_name); -- *file_name = canonicalize_file_name (name); -+ *file_name = realpath (name, NULL); - if (*file_name == NULL) - { - *file_name = name; -Index: elfutils-0.166/libdwfl/dwfl_error.c -=================================================================== ---- elfutils-0.166.orig/libdwfl/dwfl_error.c -+++ elfutils-0.166/libdwfl/dwfl_error.c -@@ -140,6 +140,7 @@ __libdwfl_seterrno (Dwfl_Error error) - const char * - dwfl_errmsg (int error) - { -+ static __thread char s[64] = ""; - if (error == 0 || error == -1) - { - int last_error = global_error; -@@ -154,7 +155,8 @@ dwfl_errmsg (int error) - switch (error &~ 0xffff) - { - case OTHER_ERROR (ERRNO): -- return strerror_r (error & 0xffff, "bad", 0); -+ strerror_r (error & 0xffff, s, sizeof(s)); -+ return s; - case OTHER_ERROR (LIBELF): - return elf_errmsg (error & 0xffff); - case OTHER_ERROR (LIBDW): -Index: elfutils-0.166/libdwfl/dwfl_module_getdwarf.c -=================================================================== ---- elfutils-0.166.orig/libdwfl/dwfl_module_getdwarf.c -+++ elfutils-0.166/libdwfl/dwfl_module_getdwarf.c -@@ -31,6 +31,7 @@ - #include - #include - #include -+#include "system.h" - #include "../libdw/libdwP.h" /* DWARF_E_* values are here. */ - #include "../libelf/libelfP.h" - -Index: elfutils-0.166/libdwfl/find-debuginfo.c -=================================================================== ---- elfutils-0.166.orig/libdwfl/find-debuginfo.c -+++ elfutils-0.166/libdwfl/find-debuginfo.c -@@ -385,7 +385,7 @@ dwfl_standard_find_debuginfo (Dwfl_Modul - /* If FILE_NAME is a symlink, the debug file might be associated - with the symlink target name instead. */ - -- char *canon = canonicalize_file_name (file_name); -+ char *canon = realpath (file_name, NULL); - if (canon != NULL && strcmp (file_name, canon)) - fd = find_debuginfo_in_path (mod, canon, - debuglink_file, debuglink_crc, -Index: elfutils-0.166/libdwfl/libdwfl_crc32_file.c -=================================================================== ---- elfutils-0.166.orig/libdwfl/libdwfl_crc32_file.c -+++ elfutils-0.166/libdwfl/libdwfl_crc32_file.c -@@ -31,6 +31,16 @@ - - #define crc32_file attribute_hidden __libdwfl_crc32_file - #define crc32 __libdwfl_crc32 -+ -+#ifndef TEMP_FAILURE_RETRY -+#define TEMP_FAILURE_RETRY(expression) \ -+ (__extension__ \ -+ ({ long int __result; \ -+ do __result = (long int) (expression); \ -+ while (__result == -1L && errno == EINTR); \ -+ __result; })) -+#endif -+ - #define LIB_SYSTEM_H 1 - #include - #include "../lib/crc32_file.c" -Index: elfutils-0.166/libdwfl/linux-kernel-modules.c -=================================================================== ---- elfutils-0.166.orig/libdwfl/linux-kernel-modules.c -+++ elfutils-0.166/libdwfl/linux-kernel-modules.c -@@ -34,6 +34,7 @@ - #include - - #include "libdwflP.h" -+#include "system.h" - #include - #include - #include -Index: elfutils-0.166/libebl/eblopenbackend.c -=================================================================== ---- elfutils-0.166.orig/libebl/eblopenbackend.c -+++ elfutils-0.166/libebl/eblopenbackend.c -@@ -32,7 +32,7 @@ - - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/libebl/eblwstrtab.c -=================================================================== ---- elfutils-0.166.orig/libebl/eblwstrtab.c -+++ elfutils-0.166/libebl/eblwstrtab.c -@@ -305,7 +305,7 @@ copystrings (struct Ebl_WStrent *nodep, - - /* Process the current node. */ - nodep->offset = *offsetp; -- *freep = wmempcpy (*freep, nodep->string, nodep->len); -+ *freep = wmemcpy (*freep, nodep->string, nodep->len) + nodep->len; - *offsetp += nodep->len * sizeof (wchar_t); - - for (subs = nodep->next; subs != NULL; subs = subs->next) -Index: elfutils-0.166/libelf/elf.h -=================================================================== ---- elfutils-0.166.orig/libelf/elf.h -+++ elfutils-0.166/libelf/elf.h -@@ -21,7 +21,9 @@ - - #include - --__BEGIN_DECLS -+#ifdef __cplusplus -+extern "C" { -+#endif - - /* Standard ELF types. */ - -@@ -3591,7 +3593,8 @@ enum - - #define R_TILEGX_NUM 130 - -- --__END_DECLS -+#ifdef __cplusplus -+} -+#endif - - #endif /* elf.h */ -Index: elfutils-0.166/libelf/libelf.h -=================================================================== ---- elfutils-0.166.orig/libelf/libelf.h -+++ elfutils-0.166/libelf/libelf.h -@@ -29,6 +29,7 @@ - #ifndef _LIBELF_H - #define _LIBELF_H 1 - -+#include - #include - #include - -Index: elfutils-0.166/libelf/libelfP.h -=================================================================== ---- elfutils-0.166.orig/libelf/libelfP.h -+++ elfutils-0.166/libelf/libelfP.h -@@ -36,6 +36,7 @@ - - #include - #include -+#include - - #include - #include -Index: elfutils-0.166/src/addr2line.c -=================================================================== ---- elfutils-0.166.orig/src/addr2line.c -+++ elfutils-0.166/src/addr2line.c -@@ -23,7 +23,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/ar.c -=================================================================== ---- elfutils-0.166.orig/src/ar.c -+++ elfutils-0.166/src/ar.c -@@ -22,7 +22,7 @@ - - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/arlib.c -=================================================================== ---- elfutils-0.166.orig/src/arlib.c -+++ elfutils-0.166/src/arlib.c -@@ -21,7 +21,7 @@ - #endif - - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/arlib2.c -=================================================================== ---- elfutils-0.166.orig/src/arlib2.c -+++ elfutils-0.166/src/arlib2.c -@@ -20,7 +20,7 @@ - # include - #endif - --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/elfcmp.c -=================================================================== ---- elfutils-0.166.orig/src/elfcmp.c -+++ elfutils-0.166/src/elfcmp.c -@@ -23,7 +23,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/elflint.c -=================================================================== ---- elfutils-0.166.orig/src/elflint.c -+++ elfutils-0.166/src/elflint.c -@@ -24,7 +24,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/findtextrel.c -=================================================================== ---- elfutils-0.166.orig/src/findtextrel.c -+++ elfutils-0.166/src/findtextrel.c -@@ -23,7 +23,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/i386_ld.c -=================================================================== ---- elfutils-0.166.orig/src/i386_ld.c -+++ elfutils-0.166/src/i386_ld.c -@@ -20,7 +20,7 @@ - #endif - - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/ld.c -=================================================================== ---- elfutils-0.166.orig/src/ld.c -+++ elfutils-0.166/src/ld.c -@@ -21,7 +21,7 @@ - - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/ldgeneric.c -=================================================================== ---- elfutils-0.166.orig/src/ldgeneric.c -+++ elfutils-0.166/src/ldgeneric.c -@@ -23,7 +23,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/ldlex.c -=================================================================== ---- elfutils-0.166.orig/src/ldlex.c -+++ elfutils-0.166/src/ldlex.c -@@ -1106,7 +1106,7 @@ char *ldtext; - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/ldscript.c -=================================================================== ---- elfutils-0.166.orig/src/ldscript.c -+++ elfutils-0.166/src/ldscript.c -@@ -95,7 +95,7 @@ - #endif - - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/nm.c -=================================================================== ---- elfutils-0.166.orig/src/nm.c -+++ elfutils-0.166/src/nm.c -@@ -26,7 +26,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/objdump.c -=================================================================== ---- elfutils-0.166.orig/src/objdump.c -+++ elfutils-0.166/src/objdump.c -@@ -21,7 +21,7 @@ - #endif - - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/ranlib.c -=================================================================== ---- elfutils-0.166.orig/src/ranlib.c -+++ elfutils-0.166/src/ranlib.c -@@ -24,7 +24,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/readelf.c -=================================================================== ---- elfutils-0.166.orig/src/readelf.c -+++ elfutils-0.166/src/readelf.c -@@ -25,7 +25,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/size.c -=================================================================== ---- elfutils-0.166.orig/src/size.c -+++ elfutils-0.166/src/size.c -@@ -21,7 +21,7 @@ - #endif - - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/stack.c -=================================================================== ---- elfutils-0.166.orig/src/stack.c -+++ elfutils-0.166/src/stack.c -@@ -18,7 +18,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/strings.c -=================================================================== ---- elfutils-0.166.orig/src/strings.c -+++ elfutils-0.166/src/strings.c -@@ -25,7 +25,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/strip.c -=================================================================== ---- elfutils-0.166.orig/src/strip.c -+++ elfutils-0.166/src/strip.c -@@ -24,7 +24,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/src/unstrip.c -=================================================================== ---- elfutils-0.166.orig/src/unstrip.c -+++ elfutils-0.166/src/unstrip.c -@@ -31,7 +31,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/tests/addrscopes.c -=================================================================== ---- elfutils-0.166.orig/tests/addrscopes.c -+++ elfutils-0.166/tests/addrscopes.c -@@ -25,7 +25,7 @@ - #include - #include - #include --#include -+#include - #include - - -Index: elfutils-0.166/tests/allregs.c -=================================================================== ---- elfutils-0.166.orig/tests/allregs.c -+++ elfutils-0.166/tests/allregs.c -@@ -21,7 +21,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/tests/backtrace-data.c -=================================================================== ---- elfutils-0.166.orig/tests/backtrace-data.c -+++ elfutils-0.166/tests/backtrace-data.c -@@ -27,7 +27,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #if defined(__x86_64__) && defined(__linux__) -Index: elfutils-0.166/tests/backtrace-dwarf.c -=================================================================== ---- elfutils-0.166.orig/tests/backtrace-dwarf.c -+++ elfutils-0.166/tests/backtrace-dwarf.c -@@ -22,7 +22,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/tests/backtrace.c -=================================================================== ---- elfutils-0.166.orig/tests/backtrace.c -+++ elfutils-0.166/tests/backtrace.c -@@ -24,7 +24,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #ifdef __linux__ -Index: elfutils-0.166/tests/buildid.c -=================================================================== ---- elfutils-0.166.orig/tests/buildid.c -+++ elfutils-0.166/tests/buildid.c -@@ -23,7 +23,7 @@ - #include ELFUTILS_HEADER(elf) - #include ELFUTILS_HEADER(dwelf) - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/tests/debugaltlink.c -=================================================================== ---- elfutils-0.166.orig/tests/debugaltlink.c -+++ elfutils-0.166/tests/debugaltlink.c -@@ -23,7 +23,7 @@ - #include ELFUTILS_HEADER(dw) - #include ELFUTILS_HEADER(dwelf) - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/tests/debuglink.c -=================================================================== ---- elfutils-0.166.orig/tests/debuglink.c -+++ elfutils-0.166/tests/debuglink.c -@@ -21,7 +21,7 @@ - #include - #include ELFUTILS_HEADER(dwelf) - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/tests/deleted.c -=================================================================== ---- elfutils-0.166.orig/tests/deleted.c -+++ elfutils-0.166/tests/deleted.c -@@ -21,7 +21,7 @@ - #include - #include - #include --#include -+#include - #include - #ifdef __linux__ - #include -Index: elfutils-0.166/tests/dwfl-addr-sect.c -=================================================================== ---- elfutils-0.166.orig/tests/dwfl-addr-sect.c -+++ elfutils-0.166/tests/dwfl-addr-sect.c -@@ -23,7 +23,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include ELFUTILS_HEADER(dwfl) -Index: elfutils-0.166/tests/dwfl-bug-addr-overflow.c -=================================================================== ---- elfutils-0.166.orig/tests/dwfl-bug-addr-overflow.c -+++ elfutils-0.166/tests/dwfl-bug-addr-overflow.c -@@ -20,7 +20,7 @@ - #include - #include - #include --#include -+#include - #include - #include ELFUTILS_HEADER(dwfl) - -Index: elfutils-0.166/tests/dwfl-bug-fd-leak.c -=================================================================== ---- elfutils-0.166.orig/tests/dwfl-bug-fd-leak.c -+++ elfutils-0.166/tests/dwfl-bug-fd-leak.c -@@ -24,7 +24,7 @@ - #include - #include - #include --#include -+#include - #include - #include - -Index: elfutils-0.166/tests/dwfl-bug-getmodules.c -=================================================================== ---- elfutils-0.166.orig/tests/dwfl-bug-getmodules.c -+++ elfutils-0.166/tests/dwfl-bug-getmodules.c -@@ -18,7 +18,7 @@ - #include - #include ELFUTILS_HEADER(dwfl) - --#include -+#include - - static const Dwfl_Callbacks callbacks = - { -Index: elfutils-0.166/tests/dwfl-report-elf-align.c -=================================================================== ---- elfutils-0.166.orig/tests/dwfl-report-elf-align.c -+++ elfutils-0.166/tests/dwfl-report-elf-align.c -@@ -20,7 +20,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/tests/dwfllines.c -=================================================================== ---- elfutils-0.166.orig/tests/dwfllines.c -+++ elfutils-0.166/tests/dwfllines.c -@@ -27,7 +27,7 @@ - #include - #include - #include --#include -+#include - - int - main (int argc, char *argv[]) -Index: elfutils-0.166/tests/dwflmodtest.c -=================================================================== ---- elfutils-0.166.orig/tests/dwflmodtest.c -+++ elfutils-0.166/tests/dwflmodtest.c -@@ -23,7 +23,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include ELFUTILS_HEADER(dwfl) -Index: elfutils-0.166/tests/dwflsyms.c -=================================================================== ---- elfutils-0.166.orig/tests/dwflsyms.c -+++ elfutils-0.166/tests/dwflsyms.c -@@ -25,7 +25,7 @@ - #include - #include - #include --#include -+#include - #include - - static const char * -Index: elfutils-0.166/tests/early-offscn.c -=================================================================== ---- elfutils-0.166.orig/tests/early-offscn.c -+++ elfutils-0.166/tests/early-offscn.c -@@ -19,7 +19,7 @@ - #endif - - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/tests/ecp.c -=================================================================== ---- elfutils-0.166.orig/tests/ecp.c -+++ elfutils-0.166/tests/ecp.c -@@ -20,7 +20,7 @@ - #endif - - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/tests/find-prologues.c -=================================================================== ---- elfutils-0.166.orig/tests/find-prologues.c -+++ elfutils-0.166/tests/find-prologues.c -@@ -25,7 +25,7 @@ - #include - #include - #include --#include -+#include - #include - #include - -Index: elfutils-0.166/tests/funcretval.c -=================================================================== ---- elfutils-0.166.orig/tests/funcretval.c -+++ elfutils-0.166/tests/funcretval.c -@@ -25,7 +25,7 @@ - #include - #include - #include --#include -+#include - #include - #include - -Index: elfutils-0.166/tests/funcscopes.c -=================================================================== ---- elfutils-0.166.orig/tests/funcscopes.c -+++ elfutils-0.166/tests/funcscopes.c -@@ -25,7 +25,7 @@ - #include - #include - #include --#include -+#include - #include - #include - -Index: elfutils-0.166/tests/getsrc_die.c -=================================================================== ---- elfutils-0.166.orig/tests/getsrc_die.c -+++ elfutils-0.166/tests/getsrc_die.c -@@ -19,7 +19,7 @@ - #endif - - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/tests/line2addr.c -=================================================================== ---- elfutils-0.166.orig/tests/line2addr.c -+++ elfutils-0.166/tests/line2addr.c -@@ -26,7 +26,7 @@ - #include - #include - #include --#include -+#include - - - static void -Index: elfutils-0.166/tests/low_high_pc.c -=================================================================== ---- elfutils-0.166.orig/tests/low_high_pc.c -+++ elfutils-0.166/tests/low_high_pc.c -@@ -25,7 +25,7 @@ - #include - #include - #include --#include -+#include - #include - #include - -Index: elfutils-0.166/tests/md5-sha1-test.c -=================================================================== ---- elfutils-0.166.orig/tests/md5-sha1-test.c -+++ elfutils-0.166/tests/md5-sha1-test.c -@@ -19,7 +19,7 @@ - #endif - - #include --#include -+#include - - #include "md5.h" - #include "sha1.h" -Index: elfutils-0.166/tests/rdwrmmap.c -=================================================================== ---- elfutils-0.166.orig/tests/rdwrmmap.c -+++ elfutils-0.166/tests/rdwrmmap.c -@@ -19,7 +19,7 @@ - #endif - - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/tests/saridx.c -=================================================================== ---- elfutils-0.166.orig/tests/saridx.c -+++ elfutils-0.166/tests/saridx.c -@@ -17,7 +17,7 @@ - - #include - --#include -+#include - #include - #include - #include -Index: elfutils-0.166/tests/sectiondump.c -=================================================================== ---- elfutils-0.166.orig/tests/sectiondump.c -+++ elfutils-0.166/tests/sectiondump.c -@@ -18,7 +18,7 @@ - #include - - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/tests/varlocs.c -=================================================================== ---- elfutils-0.166.orig/tests/varlocs.c -+++ elfutils-0.166/tests/varlocs.c -@@ -25,7 +25,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/tests/vdsosyms.c -=================================================================== ---- elfutils-0.166.orig/tests/vdsosyms.c -+++ elfutils-0.166/tests/vdsosyms.c -@@ -18,7 +18,7 @@ - #include - #include - #include --#include -+#include - #include - #include - #include -Index: elfutils-0.166/libelf/elf_getarsym.c -=================================================================== ---- elfutils-0.166.orig/libelf/elf_getarsym.c -+++ elfutils-0.166/libelf/elf_getarsym.c -@@ -302,7 +302,7 @@ elf_getarsym (Elf *elf, size_t *ptr) - arsym[cnt].as_off = (*u32)[cnt]; - - arsym[cnt].as_hash = _dl_elf_hash (str_data); -- str_data = rawmemchr (str_data, '\0') + 1; -+ str_data = memchr (str_data, '\0', SIZE_MAX) + 1; - } - - /* At the end a special entry. */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-elf_getarsym-Silence-Werror-maybe-uninitialized-fals.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-elf_getarsym-Silence-Werror-maybe-uninitialized-fals.patch deleted file mode 100644 index 3754c1c36..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-elf_getarsym-Silence-Werror-maybe-uninitialized-fals.patch +++ /dev/null @@ -1,35 +0,0 @@ -From 668accf322fd7185e273bfd50b84320e71d9de5a Mon Sep 17 00:00:00 2001 -From: Martin Jansa -Date: Fri, 10 Apr 2015 00:29:18 +0200 -Subject: [PATCH] elf_getarsym: Silence -Werror=maybe-uninitialized false - positive - -Upstream-Status: Pending -Signed-off-by: Martin Jansa ---- - libelf/elf_getarsym.c | 9 +++++++-- - 1 file changed, 7 insertions(+), 2 deletions(-) - -diff --git a/libelf/elf_getarsym.c b/libelf/elf_getarsym.c -index d0bb28a..08954d2 100644 ---- a/libelf/elf_getarsym.c -+++ b/libelf/elf_getarsym.c -@@ -165,8 +165,13 @@ elf_getarsym (elf, ptr) - int w = index64_p ? 8 : 4; - - /* We have an archive. The first word in there is the number of -- entries in the table. */ -- uint64_t n; -+ entries in the table. -+ Set to SIZE_MAX just to silence -Werror=maybe-uninitialized -+ elf_getarsym.c:290:9: error: 'n' may be used uninitialized in this function -+ The read_number_entries function doesn't initialize n only when returning -+ -1 which in turn ensures to jump over usage of this uninitialized variable. -+ */ -+ uint64_t n = SIZE_MAX; - size_t off = elf->start_offset + SARMAG + sizeof (struct ar_hdr); - if (read_number_entries (&n, elf, &off, index64_p) < 0) - { --- -2.3.5 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-fix-a-stack-usage-warning.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-fix-a-stack-usage-warning.patch deleted file mode 100644 index 6923bf705..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-fix-a-stack-usage-warning.patch +++ /dev/null @@ -1,28 +0,0 @@ -[PATCH] fix a stack-usage warning - -Upstream-Status: Pending - -not use a variable to as a array size, otherwise the warning to error that -stack usage might be unbounded [-Werror=stack-usage=] will happen - -Signed-off-by: Roy Li ---- - backends/ppc_initreg.c | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/backends/ppc_initreg.c b/backends/ppc_initreg.c -index 64f5379..52dde3e 100644 ---- a/backends/ppc_initreg.c -+++ b/backends/ppc_initreg.c -@@ -93,7 +93,7 @@ ppc_set_initial_registers_tid (pid_t tid __attribute__ ((unused)), - return false; - } - const size_t gprs = sizeof (user_regs.r.gpr) / sizeof (*user_regs.r.gpr); -- Dwarf_Word dwarf_regs[gprs]; -+ Dwarf_Word dwarf_regs[sizeof (user_regs.r.gpr) / sizeof (*user_regs.r.gpr)]; - for (unsigned gpr = 0; gpr < gprs; gpr++) - dwarf_regs[gpr] = user_regs.r.gpr[gpr]; - if (! setfunc (0, gprs, dwarf_regs, arg)) --- -1.9.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-remove-the-unneed-checking.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-remove-the-unneed-checking.patch deleted file mode 100644 index 5be92d705..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0001-remove-the-unneed-checking.patch +++ /dev/null @@ -1,38 +0,0 @@ -Disable the test to convert euc-jp - -Remove the test "Test against HP-UX 11.11 bug: -No converter from EUC-JP to UTF-8 is provided" -since we don't support HP-UX and if the euc-jp is not -installed on the host, the dependence will be built without -iconv support and will cause guild-native building fail. - -Upstream-Status: Inappropriate [OE specific] - -Signed-off-by: Roy Li ---- - m4/iconv.m4 | 2 ++ - 1 file changed, 2 insertions(+) - -diff --git a/m4/iconv.m4 b/m4/iconv.m4 -index a503646..299f1eb 100644 ---- a/m4/iconv.m4 -+++ b/m4/iconv.m4 -@@ -159,6 +159,7 @@ int main () - } - } - #endif -+#if 0 - /* Test against HP-UX 11.11 bug: No converter from EUC-JP to UTF-8 is - provided. */ - if (/* Try standardized names. */ -@@ -170,6 +171,7 @@ int main () - /* Try HP-UX names. */ - && iconv_open ("utf8", "eucJP") == (iconv_t)(-1)) - result |= 16; -+#endif - return result; - }]])], - [am_cv_func_iconv_works=yes], --- -2.0.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0002-Add-support-for-mips64-abis-in-mips_retval.c.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0002-Add-support-for-mips64-abis-in-mips_retval.c.patch deleted file mode 100644 index 72125c9ff..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0002-Add-support-for-mips64-abis-in-mips_retval.c.patch +++ /dev/null @@ -1,168 +0,0 @@ -From fdaab18a65ed2529656baa64cb6169f34d7e507b Mon Sep 17 00:00:00 2001 -From: James Cowgill -Date: Mon, 5 Jan 2015 15:17:01 +0000 -Subject: [PATCH 2/3] Add support for mips64 abis in mips_retval.c - -Signed-off-by: James Cowgill ---- - backends/mips_retval.c | 104 ++++++++++++++++++++++++++++++++++++++++++++----- - 1 file changed, 94 insertions(+), 10 deletions(-) - -diff --git a/backends/mips_retval.c b/backends/mips_retval.c -index 33f12a7..d5c6ef0 100644 ---- a/backends/mips_retval.c -+++ b/backends/mips_retval.c -@@ -91,6 +91,8 @@ enum mips_abi find_mips_abi(Elf *elf) - default: - if ((elf_flags & EF_MIPS_ABI2)) - return MIPS_ABI_N32; -+ else if ((ehdr->e_ident[EI_CLASS] == ELFCLASS64)) -+ return MIPS_ABI_N64; - } - - /* GCC creates a pseudo-section whose name describes the ABI. */ -@@ -195,6 +197,57 @@ static const Dwarf_Op loc_aggregate[] = - }; - #define nloc_aggregate 1 - -+/* Test if a struct member is a float */ -+static int is_float_child(Dwarf_Die *childdie) -+{ -+ /* Test if this is actually a struct member */ -+ if (dwarf_tag(childdie) != DW_TAG_member) -+ return 0; -+ -+ /* Get type of member */ -+ Dwarf_Attribute attr_mem; -+ Dwarf_Die child_type_mem; -+ Dwarf_Die *child_typedie = -+ dwarf_formref_die(dwarf_attr_integrate(childdie, -+ DW_AT_type, -+ &attr_mem), &child_type_mem); -+ -+ if (dwarf_tag(child_typedie) != DW_TAG_base_type) -+ return 0; -+ -+ /* Get base subtype */ -+ Dwarf_Word encoding; -+ if (dwarf_formudata (dwarf_attr_integrate (child_typedie, -+ DW_AT_encoding, -+ &attr_mem), &encoding) != 0) -+ return 0; -+ -+ return encoding == DW_ATE_float; -+} -+ -+/* Returns the number of fpregs which can be returned in the given struct */ -+static int get_struct_fpregs(Dwarf_Die *structtypedie) -+{ -+ Dwarf_Die child_mem; -+ int fpregs = 0; -+ -+ /* Get first structure member */ -+ if (dwarf_child(structtypedie, &child_mem) != 0) -+ return 0; -+ -+ do -+ { -+ /* Ensure this register is a float */ -+ if (!is_float_child(&child_mem)) -+ return 0; -+ -+ fpregs++; -+ } -+ while (dwarf_siblingof (&child_mem, &child_mem) == 0); -+ -+ return fpregs; -+} -+ - int - mips_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) - { -@@ -240,6 +293,7 @@ mips_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) - tag = dwarf_tag (typedie); - } - -+ Dwarf_Word size; - switch (tag) - { - case -1: -@@ -258,8 +312,6 @@ mips_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) - case DW_TAG_enumeration_type: - case DW_TAG_pointer_type: - case DW_TAG_ptr_to_member_type: -- { -- Dwarf_Word size; - if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_byte_size, - &attr_mem), &size) != 0) - { -@@ -289,7 +341,7 @@ mips_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) - if (size <= 4*regsize && abi == MIPS_ABI_O32) - return nloc_fpregquad; - -- goto aggregate; -+ goto large; - } - } - *locp = ABI_LOC(loc_intreg, regsize); -@@ -298,18 +350,50 @@ mips_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) - if (size <= 2*regsize) - return nloc_intregpair; - -- /* Else fall through. Shouldn't happen though (at least with gcc) */ -- } -+ /* Else pass in memory. Shouldn't happen though (at least with gcc) */ -+ goto large; - - case DW_TAG_structure_type: - case DW_TAG_class_type: - case DW_TAG_union_type: -- case DW_TAG_array_type: -- aggregate: -- /* XXX TODO: Can't handle structure return with other ABI's yet :-/ */ -- if ((abi != MIPS_ABI_O32) && (abi != MIPS_ABI_O64)) -- return -2; -+ /* Handle special cases for structures <= 128 bytes in newer ABIs */ -+ if (abi == MIPS_ABI_EABI32 || abi == MIPS_ABI_EABI64 || -+ abi == MIPS_ABI_N32 || abi == MIPS_ABI_N64) -+ { -+ if (dwarf_aggregate_size (typedie, &size) == 0 && size <= 16) -+ { -+ /* -+ * Special case in N64 / N32 - -+ * structures containing only floats are returned in fp regs. -+ * Everything else is returned in integer regs. -+ */ -+ if (tag != DW_TAG_union_type && -+ (abi == MIPS_ABI_N32 || abi == MIPS_ABI_N64)) -+ { -+ int num_fpregs = get_struct_fpregs(typedie); -+ if (num_fpregs == 1 || num_fpregs == 2) -+ { -+ *locp = loc_fpreg; -+ if (num_fpregs == 1) -+ return nloc_fpreg; -+ else -+ return nloc_fpregpair; -+ } -+ } -+ -+ *locp = loc_intreg; -+ if (size <= 8) -+ return nloc_intreg; -+ else -+ return nloc_intregpair; -+ } -+ } -+ -+ /* Fallthrough to handle large types */ - -+ case DW_TAG_array_type: -+ large: -+ /* Return large structures in memory */ - *locp = loc_aggregate; - return nloc_aggregate; - } --- -2.1.4 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0003-Add-mips-n64-relocation-format-hack.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0003-Add-mips-n64-relocation-format-hack.patch deleted file mode 100644 index 14b7985ce..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/0003-Add-mips-n64-relocation-format-hack.patch +++ /dev/null @@ -1,226 +0,0 @@ -From 59d4b8c48e5040af7e02b34eb26ea602ec82a38e Mon Sep 17 00:00:00 2001 -From: James Cowgill -Date: Mon, 5 Jan 2015 15:17:02 +0000 -Subject: [PATCH 3/3] Add mips n64 relocation format hack - -MIPSEL N64 ELF files use a slightly different format for storing relocation -entries which is incompatible with the normal R_SYM / R_INFO macros. -To workaround this, we rearrange the bytes in the relocation's r_info field -when reading and writing the relocations. - -This patch also ensures that strip.c sets the correct value of e_machine -before manipulating relocations so that these changes take effect. - -Signed-off-by: James Cowgill ---- - libelf/gelf_getrel.c | 25 +++++++++++++++++++++++-- - libelf/gelf_getrela.c | 25 +++++++++++++++++++++++-- - libelf/gelf_update_rel.c | 20 +++++++++++++++++++- - libelf/gelf_update_rela.c | 20 +++++++++++++++++++- - src/strip.c | 17 +++++++++++++++++ - 5 files changed, 101 insertions(+), 6 deletions(-) - -Index: elfutils-0.164/libelf/gelf_getrel.c -=================================================================== ---- elfutils-0.164.orig/libelf/gelf_getrel.c -+++ elfutils-0.164/libelf/gelf_getrel.c -@@ -36,6 +36,7 @@ - - #include "libelfP.h" - -+#define EF_MIPS_ABI 0x0000F000 - - GElf_Rel * - gelf_getrel (Elf_Data *data, int ndx, GElf_Rel *dst) -@@ -89,8 +90,28 @@ gelf_getrel (Elf_Data *data, int ndx, GE - result = NULL; - } - else -- result = memcpy (dst, &((Elf64_Rel *) data_scn->d.d_buf)[ndx], -- sizeof (Elf64_Rel)); -+ { -+ GElf_Ehdr hdr; -+ result = memcpy (dst, &((Elf64_Rel *) data_scn->d.d_buf)[ndx], -+ sizeof (Elf64_Rel)); -+ -+ if (gelf_getehdr(scn->elf, &hdr) != NULL && -+ hdr.e_ident[EI_DATA] == ELFDATA2LSB && -+ hdr.e_machine == EM_MIPS && -+ (hdr.e_flags & EF_MIPS_ABI) == 0) -+ { -+ /* -+ * The relocation format is mangled on MIPSEL N64 -+ * We'll adjust it so at least R_SYM will work on it -+ */ -+ GElf_Xword r_info = dst->r_info; -+ dst->r_info = (r_info << 32) | -+ ((r_info >> 8) & 0xFF000000) | -+ ((r_info >> 24) & 0x00FF0000) | -+ ((r_info >> 40) & 0x0000FF00) | -+ ((r_info >> 56) & 0x000000FF); -+ } -+ } - } - - rwlock_unlock (scn->elf->lock); -Index: elfutils-0.164/libelf/gelf_getrela.c -=================================================================== ---- elfutils-0.164.orig/libelf/gelf_getrela.c -+++ elfutils-0.164/libelf/gelf_getrela.c -@@ -36,6 +36,7 @@ - - #include "libelfP.h" - -+#define EF_MIPS_ABI 0x0000F000 - - GElf_Rela * - gelf_getrela (Elf_Data *data, int ndx, GElf_Rela *dst) -@@ -90,8 +91,28 @@ gelf_getrela (Elf_Data *data, int ndx, G - result = NULL; - } - else -- result = memcpy (dst, &((Elf64_Rela *) data_scn->d.d_buf)[ndx], -- sizeof (Elf64_Rela)); -+ { -+ GElf_Ehdr hdr; -+ result = memcpy (dst, &((Elf64_Rela *) data_scn->d.d_buf)[ndx], -+ sizeof (Elf64_Rela)); -+ -+ if (gelf_getehdr(scn->elf, &hdr) != NULL && -+ hdr.e_ident[EI_DATA] == ELFDATA2LSB && -+ hdr.e_machine == EM_MIPS && -+ (hdr.e_flags & EF_MIPS_ABI) == 0) -+ { -+ /* -+ * The relocation format is mangled on MIPSEL N64 -+ * We'll adjust it so at least R_SYM will work on it -+ */ -+ GElf_Xword r_info = dst->r_info; -+ dst->r_info = (r_info << 32) | -+ ((r_info >> 8) & 0xFF000000) | -+ ((r_info >> 24) & 0x00FF0000) | -+ ((r_info >> 40) & 0x0000FF00) | -+ ((r_info >> 56) & 0x000000FF); -+ } -+ } - } - - rwlock_unlock (scn->elf->lock); -Index: elfutils-0.164/libelf/gelf_update_rel.c -=================================================================== ---- elfutils-0.164.orig/libelf/gelf_update_rel.c -+++ elfutils-0.164/libelf/gelf_update_rel.c -@@ -36,6 +36,7 @@ - - #include "libelfP.h" - -+#define EF_MIPS_ABI 0x0000F000 - - int - gelf_update_rel (Elf_Data *dst, int ndx, GElf_Rel *src) -@@ -86,6 +87,9 @@ gelf_update_rel (Elf_Data *dst, int ndx, - } - else - { -+ GElf_Ehdr hdr; -+ GElf_Rel value = *src; -+ - /* Check whether we have to resize the data buffer. */ - if (INVALID_NDX (ndx, Elf64_Rel, &data_scn->d)) - { -@@ -93,7 +97,21 @@ gelf_update_rel (Elf_Data *dst, int ndx, - goto out; - } - -- ((Elf64_Rel *) data_scn->d.d_buf)[ndx] = *src; -+ if (gelf_getehdr(scn->elf, &hdr) != NULL && -+ hdr.e_ident[EI_DATA] == ELFDATA2LSB && -+ hdr.e_machine == EM_MIPS && -+ (hdr.e_flags & EF_MIPS_ABI) == 0) -+ { -+ /* Undo the MIPSEL N64 hack from gelf_getrel */ -+ GElf_Xword r_info = value.r_info; -+ value.r_info = (r_info >> 32) | -+ ((r_info << 8) & 0x000000FF00000000) | -+ ((r_info << 24) & 0x0000FF0000000000) | -+ ((r_info << 40) & 0x00FF000000000000) | -+ ((r_info << 56) & 0xFF00000000000000); -+ } -+ -+ ((Elf64_Rel *) data_scn->d.d_buf)[ndx] = value; - } - - result = 1; -Index: elfutils-0.164/libelf/gelf_update_rela.c -=================================================================== ---- elfutils-0.164.orig/libelf/gelf_update_rela.c -+++ elfutils-0.164/libelf/gelf_update_rela.c -@@ -36,6 +36,7 @@ - - #include "libelfP.h" - -+#define EF_MIPS_ABI 0x0000F000 - - int - gelf_update_rela (Elf_Data *dst, int ndx, GElf_Rela *src) -@@ -89,6 +90,9 @@ gelf_update_rela (Elf_Data *dst, int ndx - } - else - { -+ GElf_Ehdr hdr; -+ GElf_Rela value = *src; -+ - /* Check whether we have to resize the data buffer. */ - if (INVALID_NDX (ndx, Elf64_Rela, &data_scn->d)) - { -@@ -96,7 +100,21 @@ gelf_update_rela (Elf_Data *dst, int ndx - goto out; - } - -- ((Elf64_Rela *) data_scn->d.d_buf)[ndx] = *src; -+ if (gelf_getehdr(scn->elf, &hdr) != NULL && -+ hdr.e_ident[EI_DATA] == ELFDATA2LSB && -+ hdr.e_machine == EM_MIPS && -+ (hdr.e_flags & EF_MIPS_ABI) == 0) -+ { -+ /* Undo the MIPSEL N64 hack from gelf_getrel */ -+ GElf_Xword r_info = value.r_info; -+ value.r_info = (r_info >> 32) | -+ ((r_info << 8) & 0x000000FF00000000) | -+ ((r_info << 24) & 0x0000FF0000000000) | -+ ((r_info << 40) & 0x00FF000000000000) | -+ ((r_info << 56) & 0xFF00000000000000); -+ } -+ -+ ((Elf64_Rela *) data_scn->d.d_buf)[ndx] = value; - } - - result = 1; -Index: elfutils-0.164/src/strip.c -=================================================================== ---- elfutils-0.164.orig/src/strip.c -+++ elfutils-0.164/src/strip.c -@@ -546,6 +546,23 @@ handle_elf (int fd, Elf *elf, const char - goto fail; - } - -+ /* Copy identity part of the ELF header now */ -+ newehdr = gelf_getehdr (newelf, &newehdr_mem); -+ if (newehdr == NULL) -+ INTERNAL_ERROR (fname); -+ -+ memcpy (newehdr->e_ident, ehdr->e_ident, EI_NIDENT); -+ newehdr->e_type = ehdr->e_type; -+ newehdr->e_machine = ehdr->e_machine; -+ newehdr->e_version = ehdr->e_version; -+ -+ if (gelf_update_ehdr (newelf, newehdr) == 0) -+ { -+ error (0, 0, gettext ("%s: error while creating ELF header: %s"), -+ fname, elf_errmsg (-1)); -+ return 1; -+ } -+ - /* Copy over the old program header if needed. */ - if (ehdr->e_type != ET_REL) - for (cnt = 0; cnt < phnum; ++cnt) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/Fix_one_GCC7_warning.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/Fix_one_GCC7_warning.patch deleted file mode 100644 index 25f5e1482..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/Fix_one_GCC7_warning.patch +++ /dev/null @@ -1,45 +0,0 @@ -From 8e2ab18b874d1fda06243ad00209d44e2992928a Mon Sep 17 00:00:00 2001 -From: Mark Wielaard -Date: Sun, 12 Feb 2017 21:51:34 +0100 -Subject: [PATCH 1/2] libasm: Fix one GCC7 -Wformat-truncation=2 warning. - -Make sure that if we have really lots of labels the tempsym doesn't get -truncated because it is too small to hold the whole name. - -This doesn't enable -Wformat-truncation=2 or fix other "issues" pointed -out by enabling this warning because there are currently some issues -with it. https://gcc.gnu.org/bugzilla/show_bug.cgi?id=79448 - -Signed-off-by: Mark Wielaard - -Upstream-Status: Backport (https://sourceware.org/git/?p=elfutils.git;a=commit;h=93c51144c3f664d4e9709da75a1d0fa00ea0fe95) -Signed-off-by: Joshua Lock ---- - libasm/asm_newsym.c | 6 ++++-- - 1 file changed, 4 insertions(+), 2 deletions(-) - -diff --git a/libasm/asm_newsym.c b/libasm/asm_newsym.c -index 7f522910..76482bb2 100644 ---- a/libasm/asm_newsym.c -+++ b/libasm/asm_newsym.c -@@ -1,5 +1,5 @@ - /* Define new symbol for current position in given section. -- Copyright (C) 2002, 2005 Red Hat, Inc. -+ Copyright (C) 2002, 2005, 2017 Red Hat, Inc. - This file is part of elfutils. - Written by Ulrich Drepper , 2002. - -@@ -44,7 +44,9 @@ AsmSym_t * - asm_newsym (AsmScn_t *asmscn, const char *name, GElf_Xword size, - int type, int binding) - { --#define TEMPSYMLEN 10 -+/* We don't really expect labels with many digits, but in theory it could -+ be 10 digits (plus ".L" and a zero terminator). */ -+#define TEMPSYMLEN 13 - char tempsym[TEMPSYMLEN]; - AsmSym_t *result; - --- -2.13.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/aarch64_uio.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/aarch64_uio.patch deleted file mode 100644 index 38dc57bef..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/aarch64_uio.patch +++ /dev/null @@ -1,47 +0,0 @@ -Fix build on aarch64/musl - -Errors - -invalid operands to binary & (have 'long double' and 'unsigned int') - -error: redefinition - of 'struct iovec' - struct iovec { void *iov_base; size_t iov_len; }; - ^ -Upstream-Status: Pending -Signed-off-by: Khem Raj -Index: elfutils-0.163/backends/aarch64_initreg.c -=================================================================== ---- elfutils-0.163.orig/backends/aarch64_initreg.c -+++ elfutils-0.163/backends/aarch64_initreg.c -@@ -33,7 +33,7 @@ - #include "system.h" - #include - #ifdef __aarch64__ --# include -+# include - # include - # include - /* Deal with old glibc defining user_pt_regs instead of user_regs_struct. */ -@@ -82,7 +82,7 @@ aarch64_set_initial_registers_tid (pid_t - - Dwarf_Word dwarf_fregs[32]; - for (int r = 0; r < 32; r++) -- dwarf_fregs[r] = fregs.vregs[r] & 0xFFFFFFFF; -+ dwarf_fregs[r] = (unsigned int)fregs.vregs[r] & 0xFFFFFFFF; - - if (! setfunc (64, 32, dwarf_fregs, arg)) - return false; -Index: elfutils-0.163/backends/arm_initreg.c -=================================================================== ---- elfutils-0.163.orig/backends/arm_initreg.c -+++ elfutils-0.163/backends/arm_initreg.c -@@ -37,7 +37,7 @@ - #endif - - #ifdef __aarch64__ --# include -+# include - # include - # include - /* Deal with old glibc defining user_pt_regs instead of user_regs_struct. */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/arm_backend.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/arm_backend.diff deleted file mode 100644 index 9d47f95f2..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/arm_backend.diff +++ /dev/null @@ -1,600 +0,0 @@ -Index: elfutils-0.164/backends/arm_init.c -=================================================================== ---- elfutils-0.164.orig/backends/arm_init.c -+++ elfutils-0.164/backends/arm_init.c -@@ -35,20 +35,31 @@ - #define RELOC_PREFIX R_ARM_ - #include "libebl_CPU.h" - -+#include "libebl_arm.h" -+ - /* This defines the common reloc hooks based on arm_reloc.def. */ - #include "common-reloc.c" - - - const char * --arm_init (Elf *elf __attribute__ ((unused)), -+arm_init (Elf *elf, - GElf_Half machine __attribute__ ((unused)), - Ebl *eh, - size_t ehlen) - { -+ int soft_float = 0; -+ - /* Check whether the Elf_BH object has a sufficent size. */ - if (ehlen < sizeof (Ebl)) - return NULL; - -+ if (elf) { -+ GElf_Ehdr ehdr_mem; -+ GElf_Ehdr *ehdr = gelf_getehdr (elf, &ehdr_mem); -+ if (ehdr && (ehdr->e_flags & EF_ARM_SOFT_FLOAT)) -+ soft_float = 1; -+ } -+ - /* We handle it. */ - eh->name = "ARM"; - arm_init_reloc (eh); -@@ -60,7 +71,10 @@ arm_init (Elf *elf __attribute__ ((unuse - HOOK (eh, core_note); - HOOK (eh, auxv_info); - HOOK (eh, check_object_attribute); -- HOOK (eh, return_value_location); -+ if (soft_float) -+ eh->return_value_location = arm_return_value_location_soft; -+ else -+ eh->return_value_location = arm_return_value_location_hard; - HOOK (eh, abi_cfi); - HOOK (eh, check_reloc_target_type); - HOOK (eh, symbol_type_name); -Index: elfutils-0.164/backends/arm_regs.c -=================================================================== ---- elfutils-0.164.orig/backends/arm_regs.c -+++ elfutils-0.164/backends/arm_regs.c -@@ -31,6 +31,7 @@ - #endif - - #include -+#include - #include - - #define BACKEND arm_ -@@ -76,6 +77,9 @@ arm_register_info (Ebl *ebl __attribute_ - break; - - case 16 + 0 ... 16 + 7: -+ /* AADWARF says that there are no registers in that range, -+ * but gcc maps FPA registers here -+ */ - regno += 96 - 16; - /* Fall through. */ - case 96 + 0 ... 96 + 7: -@@ -87,11 +91,139 @@ arm_register_info (Ebl *ebl __attribute_ - namelen = 2; - break; - -+ case 64 + 0 ... 64 + 9: -+ *setname = "VFP"; -+ *bits = 32; -+ *type = DW_ATE_float; -+ name[0] = 's'; -+ name[1] = regno - 64 + '0'; -+ namelen = 2; -+ break; -+ -+ case 64 + 10 ... 64 + 31: -+ *setname = "VFP"; -+ *bits = 32; -+ *type = DW_ATE_float; -+ name[0] = 's'; -+ name[1] = (regno - 64) / 10 + '0'; -+ name[2] = (regno - 64) % 10 + '0'; -+ namelen = 3; -+ break; -+ -+ case 104 + 0 ... 104 + 7: -+ /* XXX TODO: -+ * This can be either intel wireless MMX general purpose/control -+ * registers or xscale accumulator, which have different usage. -+ * We only have the intel wireless MMX here now. -+ * The name needs to be changed for the xscale accumulator too. */ -+ *setname = "MMX"; -+ *type = DW_ATE_unsigned; -+ *bits = 32; -+ memcpy(name, "wcgr", 4); -+ name[4] = regno - 104 + '0'; -+ namelen = 5; -+ break; -+ -+ case 112 + 0 ... 112 + 9: -+ *setname = "MMX"; -+ *type = DW_ATE_unsigned; -+ *bits = 64; -+ name[0] = 'w'; -+ name[1] = 'r'; -+ name[2] = regno - 112 + '0'; -+ namelen = 3; -+ break; -+ -+ case 112 + 10 ... 112 + 15: -+ *setname = "MMX"; -+ *type = DW_ATE_unsigned; -+ *bits = 64; -+ name[0] = 'w'; -+ name[1] = 'r'; -+ name[2] = '1'; -+ name[3] = regno - 112 - 10 + '0'; -+ namelen = 4; -+ break; -+ - case 128: -+ *setname = "state"; - *type = DW_ATE_unsigned; - return stpcpy (name, "spsr") + 1 - name; - -+ case 129: -+ *setname = "state"; -+ *type = DW_ATE_unsigned; -+ return stpcpy(name, "spsr_fiq") + 1 - name; -+ -+ case 130: -+ *setname = "state"; -+ *type = DW_ATE_unsigned; -+ return stpcpy(name, "spsr_irq") + 1 - name; -+ -+ case 131: -+ *setname = "state"; -+ *type = DW_ATE_unsigned; -+ return stpcpy(name, "spsr_abt") + 1 - name; -+ -+ case 132: -+ *setname = "state"; -+ *type = DW_ATE_unsigned; -+ return stpcpy(name, "spsr_und") + 1 - name; -+ -+ case 133: -+ *setname = "state"; -+ *type = DW_ATE_unsigned; -+ return stpcpy(name, "spsr_svc") + 1 - name; -+ -+ case 144 ... 150: -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ *bits = 32; -+ return sprintf(name, "r%d_usr", regno - 144 + 8) + 1; -+ -+ case 151 ... 157: -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ *bits = 32; -+ return sprintf(name, "r%d_fiq", regno - 151 + 8) + 1; -+ -+ case 158 ... 159: -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ *bits = 32; -+ return sprintf(name, "r%d_irq", regno - 158 + 13) + 1; -+ -+ case 160 ... 161: -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ *bits = 32; -+ return sprintf(name, "r%d_abt", regno - 160 + 13) + 1; -+ -+ case 162 ... 163: -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ *bits = 32; -+ return sprintf(name, "r%d_und", regno - 162 + 13) + 1; -+ -+ case 164 ... 165: -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ *bits = 32; -+ return sprintf(name, "r%d_svc", regno - 164 + 13) + 1; -+ -+ case 192 ... 199: -+ *setname = "MMX"; -+ *bits = 32; -+ *type = DW_ATE_unsigned; -+ name[0] = 'w'; -+ name[1] = 'c'; -+ name[2] = regno - 192 + '0'; -+ namelen = 3; -+ break; -+ - case 256 + 0 ... 256 + 9: -+ /* XXX TODO: Neon also uses those registers and can contain -+ * both float and integers */ - *setname = "VFP"; - *type = DW_ATE_float; - *bits = 64; -Index: elfutils-0.164/backends/arm_retval.c -=================================================================== ---- elfutils-0.164.orig/backends/arm_retval.c -+++ elfutils-0.164/backends/arm_retval.c -@@ -48,6 +48,13 @@ static const Dwarf_Op loc_intreg[] = - #define nloc_intreg 1 - #define nloc_intregs(n) (2 * (n)) - -+/* f1 */ /* XXX TODO: f0 can also have number 96 if program was compiled with -mabi=aapcs */ -+static const Dwarf_Op loc_fpreg[] = -+ { -+ { .atom = DW_OP_reg16 }, -+ }; -+#define nloc_fpreg 1 -+ - /* The return value is a structure and is actually stored in stack space - passed in a hidden argument by the caller. But, the compiler - helpfully returns the address of that space in r0. */ -@@ -58,8 +65,9 @@ static const Dwarf_Op loc_aggregate[] = - #define nloc_aggregate 1 - - --int --arm_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) -+static int -+arm_return_value_location_ (Dwarf_Die *functypedie, const Dwarf_Op **locp, -+ int soft_float) - { - /* Start with the function's type, and get the DW_AT_type attribute, - which is the type of the return value. */ -@@ -98,6 +106,21 @@ arm_return_value_location (Dwarf_Die *fu - else - return -1; - } -+ if (tag == DW_TAG_base_type) -+ { -+ Dwarf_Word encoding; -+ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_encoding, -+ &attr_mem), &encoding) != 0) -+ return -1; -+ -+ if ((encoding == DW_ATE_float) && !soft_float) -+ { -+ *locp = loc_fpreg; -+ if (size <= 8) -+ return nloc_fpreg; -+ goto aggregate; -+ } -+ } - if (size <= 16) - { - intreg: -@@ -106,6 +129,7 @@ arm_return_value_location (Dwarf_Die *fu - } - - aggregate: -+ /* XXX TODO sometimes aggregates are returned in r0 (-mabi=aapcs) */ - *locp = loc_aggregate; - return nloc_aggregate; - } -@@ -125,3 +149,18 @@ arm_return_value_location (Dwarf_Die *fu - DWARF and might be valid. */ - return -2; - } -+ -+/* return location for -mabi=apcs-gnu -msoft-float */ -+int -+arm_return_value_location_soft (Dwarf_Die *functypedie, const Dwarf_Op **locp) -+{ -+ return arm_return_value_location_ (functypedie, locp, 1); -+} -+ -+/* return location for -mabi=apcs-gnu -mhard-float (current default) */ -+int -+arm_return_value_location_hard (Dwarf_Die *functypedie, const Dwarf_Op **locp) -+{ -+ return arm_return_value_location_ (functypedie, locp, 0); -+} -+ -Index: elfutils-0.164/libelf/elf.h -=================================================================== ---- elfutils-0.164.orig/libelf/elf.h -+++ elfutils-0.164/libelf/elf.h -@@ -2450,6 +2450,9 @@ enum - #define EF_ARM_EABI_VER4 0x04000000 - #define EF_ARM_EABI_VER5 0x05000000 - -+/* EI_OSABI values */ -+#define ELFOSABI_ARM_AEABI 64 /* Contains symbol versioning. */ -+ - /* Additional symbol types for Thumb. */ - #define STT_ARM_TFUNC STT_LOPROC /* A Thumb function. */ - #define STT_ARM_16BIT STT_HIPROC /* A Thumb label. */ -@@ -2467,12 +2470,19 @@ enum - - /* Processor specific values for the Phdr p_type field. */ - #define PT_ARM_EXIDX (PT_LOPROC + 1) /* ARM unwind segment. */ -+#define PT_ARM_UNWIND PT_ARM_EXIDX - - /* Processor specific values for the Shdr sh_type field. */ - #define SHT_ARM_EXIDX (SHT_LOPROC + 1) /* ARM unwind section. */ - #define SHT_ARM_PREEMPTMAP (SHT_LOPROC + 2) /* Preemption details. */ - #define SHT_ARM_ATTRIBUTES (SHT_LOPROC + 3) /* ARM attributes section. */ - -+/* Processor specific values for the Dyn d_tag field. */ -+#define DT_ARM_RESERVED1 (DT_LOPROC + 0) -+#define DT_ARM_SYMTABSZ (DT_LOPROC + 1) -+#define DT_ARM_PREEMTMAB (DT_LOPROC + 2) -+#define DT_ARM_RESERVED2 (DT_LOPROC + 3) -+#define DT_ARM_NUM 4 - - /* AArch64 relocs. */ - -@@ -2765,6 +2775,7 @@ enum - TLS block (LDR, STR). */ - #define R_ARM_TLS_IE12GP 111 /* 12 bit GOT entry relative - to GOT origin (LDR). */ -+/* 112 - 127 private range */ - #define R_ARM_ME_TOO 128 /* Obsolete. */ - #define R_ARM_THM_TLS_DESCSEQ 129 - #define R_ARM_THM_TLS_DESCSEQ16 129 -Index: elfutils-0.164/backends/libebl_arm.h -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/libebl_arm.h -@@ -0,0 +1,9 @@ -+#ifndef _LIBEBL_ARM_H -+#define _LIBEBL_ARM_H 1 -+ -+#include -+ -+extern int arm_return_value_location_soft(Dwarf_Die *, const Dwarf_Op **locp); -+extern int arm_return_value_location_hard(Dwarf_Die *, const Dwarf_Op **locp); -+ -+#endif -Index: elfutils-0.164/tests/run-allregs.sh -=================================================================== ---- elfutils-0.164.orig/tests/run-allregs.sh -+++ elfutils-0.164/tests/run-allregs.sh -@@ -2672,7 +2672,28 @@ integer registers: - 13: sp (sp), address 32 bits - 14: lr (lr), address 32 bits - 15: pc (pc), address 32 bits -- 128: spsr (spsr), unsigned 32 bits -+ 144: r8_usr (r8_usr), signed 32 bits -+ 145: r9_usr (r9_usr), signed 32 bits -+ 146: r10_usr (r10_usr), signed 32 bits -+ 147: r11_usr (r11_usr), signed 32 bits -+ 148: r12_usr (r12_usr), signed 32 bits -+ 149: r13_usr (r13_usr), signed 32 bits -+ 150: r14_usr (r14_usr), signed 32 bits -+ 151: r8_fiq (r8_fiq), signed 32 bits -+ 152: r9_fiq (r9_fiq), signed 32 bits -+ 153: r10_fiq (r10_fiq), signed 32 bits -+ 154: r11_fiq (r11_fiq), signed 32 bits -+ 155: r12_fiq (r12_fiq), signed 32 bits -+ 156: r13_fiq (r13_fiq), signed 32 bits -+ 157: r14_fiq (r14_fiq), signed 32 bits -+ 158: r13_irq (r13_irq), signed 32 bits -+ 159: r14_irq (r14_irq), signed 32 bits -+ 160: r13_abt (r13_abt), signed 32 bits -+ 161: r14_abt (r14_abt), signed 32 bits -+ 162: r13_und (r13_und), signed 32 bits -+ 163: r14_und (r14_und), signed 32 bits -+ 164: r13_svc (r13_svc), signed 32 bits -+ 165: r14_svc (r14_svc), signed 32 bits - FPA registers: - 16: f0 (f0), float 96 bits - 17: f1 (f1), float 96 bits -@@ -2690,7 +2711,72 @@ FPA registers: - 101: f5 (f5), float 96 bits - 102: f6 (f6), float 96 bits - 103: f7 (f7), float 96 bits -+MMX registers: -+ 104: wcgr0 (wcgr0), unsigned 32 bits -+ 105: wcgr1 (wcgr1), unsigned 32 bits -+ 106: wcgr2 (wcgr2), unsigned 32 bits -+ 107: wcgr3 (wcgr3), unsigned 32 bits -+ 108: wcgr4 (wcgr4), unsigned 32 bits -+ 109: wcgr5 (wcgr5), unsigned 32 bits -+ 110: wcgr6 (wcgr6), unsigned 32 bits -+ 111: wcgr7 (wcgr7), unsigned 32 bits -+ 112: wr0 (wr0), unsigned 64 bits -+ 113: wr1 (wr1), unsigned 64 bits -+ 114: wr2 (wr2), unsigned 64 bits -+ 115: wr3 (wr3), unsigned 64 bits -+ 116: wr4 (wr4), unsigned 64 bits -+ 117: wr5 (wr5), unsigned 64 bits -+ 118: wr6 (wr6), unsigned 64 bits -+ 119: wr7 (wr7), unsigned 64 bits -+ 120: wr8 (wr8), unsigned 64 bits -+ 121: wr9 (wr9), unsigned 64 bits -+ 122: wr10 (wr10), unsigned 64 bits -+ 123: wr11 (wr11), unsigned 64 bits -+ 124: wr12 (wr12), unsigned 64 bits -+ 125: wr13 (wr13), unsigned 64 bits -+ 126: wr14 (wr14), unsigned 64 bits -+ 127: wr15 (wr15), unsigned 64 bits -+ 192: wc0 (wc0), unsigned 32 bits -+ 193: wc1 (wc1), unsigned 32 bits -+ 194: wc2 (wc2), unsigned 32 bits -+ 195: wc3 (wc3), unsigned 32 bits -+ 196: wc4 (wc4), unsigned 32 bits -+ 197: wc5 (wc5), unsigned 32 bits -+ 198: wc6 (wc6), unsigned 32 bits -+ 199: wc7 (wc7), unsigned 32 bits - VFP registers: -+ 64: s0 (s0), float 32 bits -+ 65: s1 (s1), float 32 bits -+ 66: s2 (s2), float 32 bits -+ 67: s3 (s3), float 32 bits -+ 68: s4 (s4), float 32 bits -+ 69: s5 (s5), float 32 bits -+ 70: s6 (s6), float 32 bits -+ 71: s7 (s7), float 32 bits -+ 72: s8 (s8), float 32 bits -+ 73: s9 (s9), float 32 bits -+ 74: s10 (s10), float 32 bits -+ 75: s11 (s11), float 32 bits -+ 76: s12 (s12), float 32 bits -+ 77: s13 (s13), float 32 bits -+ 78: s14 (s14), float 32 bits -+ 79: s15 (s15), float 32 bits -+ 80: s16 (s16), float 32 bits -+ 81: s17 (s17), float 32 bits -+ 82: s18 (s18), float 32 bits -+ 83: s19 (s19), float 32 bits -+ 84: s20 (s20), float 32 bits -+ 85: s21 (s21), float 32 bits -+ 86: s22 (s22), float 32 bits -+ 87: s23 (s23), float 32 bits -+ 88: s24 (s24), float 32 bits -+ 89: s25 (s25), float 32 bits -+ 90: s26 (s26), float 32 bits -+ 91: s27 (s27), float 32 bits -+ 92: s28 (s28), float 32 bits -+ 93: s29 (s29), float 32 bits -+ 94: s30 (s30), float 32 bits -+ 95: s31 (s31), float 32 bits - 256: d0 (d0), float 64 bits - 257: d1 (d1), float 64 bits - 258: d2 (d2), float 64 bits -@@ -2723,6 +2809,13 @@ VFP registers: - 285: d29 (d29), float 64 bits - 286: d30 (d30), float 64 bits - 287: d31 (d31), float 64 bits -+state registers: -+ 128: spsr (spsr), unsigned 32 bits -+ 129: spsr_fiq (spsr_fiq), unsigned 32 bits -+ 130: spsr_irq (spsr_irq), unsigned 32 bits -+ 131: spsr_abt (spsr_abt), unsigned 32 bits -+ 132: spsr_und (spsr_und), unsigned 32 bits -+ 133: spsr_svc (spsr_svc), unsigned 32 bits - EOF - - # See run-readelf-mixed-corenote.sh for instructions to regenerate -Index: elfutils-0.164/tests/run-readelf-mixed-corenote.sh -=================================================================== ---- elfutils-0.164.orig/tests/run-readelf-mixed-corenote.sh -+++ elfutils-0.164/tests/run-readelf-mixed-corenote.sh -@@ -31,12 +31,11 @@ Note segment of 892 bytes at offset 0x27 - pid: 11087, ppid: 11063, pgrp: 11087, sid: 11063 - utime: 0.000000, stime: 0.010000, cutime: 0.000000, cstime: 0.000000 - orig_r0: -1, fpvalid: 1 -- r0: 1 r1: -1091672508 r2: -1091672500 -- r3: 0 r4: 0 r5: 0 -- r6: 33728 r7: 0 r8: 0 -- r9: 0 r10: -1225703496 r11: -1091672844 -- r12: 0 sp: 0xbeee64f4 lr: 0xb6dc3f48 -- pc: 0x00008500 spsr: 0x60000010 -+ r0: 1 r1: -1091672508 r2: -1091672500 r3: 0 -+ r4: 0 r5: 0 r6: 33728 r7: 0 -+ r8: 0 r9: 0 r10: -1225703496 r11: -1091672844 -+ r12: 0 sp: 0xbeee64f4 lr: 0xb6dc3f48 pc: 0x00008500 -+ spsr: 0x60000010 - CORE 124 PRPSINFO - state: 0, sname: R, zomb: 0, nice: 0, flag: 0x00400500 - uid: 0, gid: 0, pid: 11087, ppid: 11063, pgrp: 11087, sid: 11063 -Index: elfutils-0.164/tests/run-addrcfi.sh -=================================================================== ---- elfutils-0.164.orig/tests/run-addrcfi.sh -+++ elfutils-0.164/tests/run-addrcfi.sh -@@ -3554,6 +3554,38 @@ dwarf_cfi_addrframe (.eh_frame): no matc - FPA reg21 (f5): undefined - FPA reg22 (f6): undefined - FPA reg23 (f7): undefined -+ VFP reg64 (s0): undefined -+ VFP reg65 (s1): undefined -+ VFP reg66 (s2): undefined -+ VFP reg67 (s3): undefined -+ VFP reg68 (s4): undefined -+ VFP reg69 (s5): undefined -+ VFP reg70 (s6): undefined -+ VFP reg71 (s7): undefined -+ VFP reg72 (s8): undefined -+ VFP reg73 (s9): undefined -+ VFP reg74 (s10): undefined -+ VFP reg75 (s11): undefined -+ VFP reg76 (s12): undefined -+ VFP reg77 (s13): undefined -+ VFP reg78 (s14): undefined -+ VFP reg79 (s15): undefined -+ VFP reg80 (s16): undefined -+ VFP reg81 (s17): undefined -+ VFP reg82 (s18): undefined -+ VFP reg83 (s19): undefined -+ VFP reg84 (s20): undefined -+ VFP reg85 (s21): undefined -+ VFP reg86 (s22): undefined -+ VFP reg87 (s23): undefined -+ VFP reg88 (s24): undefined -+ VFP reg89 (s25): undefined -+ VFP reg90 (s26): undefined -+ VFP reg91 (s27): undefined -+ VFP reg92 (s28): undefined -+ VFP reg93 (s29): undefined -+ VFP reg94 (s30): undefined -+ VFP reg95 (s31): undefined - FPA reg96 (f0): undefined - FPA reg97 (f1): undefined - FPA reg98 (f2): undefined -@@ -3562,7 +3594,66 @@ dwarf_cfi_addrframe (.eh_frame): no matc - FPA reg101 (f5): undefined - FPA reg102 (f6): undefined - FPA reg103 (f7): undefined -- integer reg128 (spsr): undefined -+ MMX reg104 (wcgr0): undefined -+ MMX reg105 (wcgr1): undefined -+ MMX reg106 (wcgr2): undefined -+ MMX reg107 (wcgr3): undefined -+ MMX reg108 (wcgr4): undefined -+ MMX reg109 (wcgr5): undefined -+ MMX reg110 (wcgr6): undefined -+ MMX reg111 (wcgr7): undefined -+ MMX reg112 (wr0): undefined -+ MMX reg113 (wr1): undefined -+ MMX reg114 (wr2): undefined -+ MMX reg115 (wr3): undefined -+ MMX reg116 (wr4): undefined -+ MMX reg117 (wr5): undefined -+ MMX reg118 (wr6): undefined -+ MMX reg119 (wr7): undefined -+ MMX reg120 (wr8): undefined -+ MMX reg121 (wr9): undefined -+ MMX reg122 (wr10): undefined -+ MMX reg123 (wr11): undefined -+ MMX reg124 (wr12): undefined -+ MMX reg125 (wr13): undefined -+ MMX reg126 (wr14): undefined -+ MMX reg127 (wr15): undefined -+ state reg128 (spsr): undefined -+ state reg129 (spsr_fiq): undefined -+ state reg130 (spsr_irq): undefined -+ state reg131 (spsr_abt): undefined -+ state reg132 (spsr_und): undefined -+ state reg133 (spsr_svc): undefined -+ integer reg144 (r8_usr): undefined -+ integer reg145 (r9_usr): undefined -+ integer reg146 (r10_usr): undefined -+ integer reg147 (r11_usr): undefined -+ integer reg148 (r12_usr): undefined -+ integer reg149 (r13_usr): undefined -+ integer reg150 (r14_usr): undefined -+ integer reg151 (r8_fiq): undefined -+ integer reg152 (r9_fiq): undefined -+ integer reg153 (r10_fiq): undefined -+ integer reg154 (r11_fiq): undefined -+ integer reg155 (r12_fiq): undefined -+ integer reg156 (r13_fiq): undefined -+ integer reg157 (r14_fiq): undefined -+ integer reg158 (r13_irq): undefined -+ integer reg159 (r14_irq): undefined -+ integer reg160 (r13_abt): undefined -+ integer reg161 (r14_abt): undefined -+ integer reg162 (r13_und): undefined -+ integer reg163 (r14_und): undefined -+ integer reg164 (r13_svc): undefined -+ integer reg165 (r14_svc): undefined -+ MMX reg192 (wc0): undefined -+ MMX reg193 (wc1): undefined -+ MMX reg194 (wc2): undefined -+ MMX reg195 (wc3): undefined -+ MMX reg196 (wc4): undefined -+ MMX reg197 (wc5): undefined -+ MMX reg198 (wc6): undefined -+ MMX reg199 (wc7): undefined - VFP reg256 (d0): undefined - VFP reg257 (d1): undefined - VFP reg258 (d2): undefined diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/elfcmp-fix-self-comparision.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/elfcmp-fix-self-comparision.patch deleted file mode 100644 index 6c150bb54..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/elfcmp-fix-self-comparision.patch +++ /dev/null @@ -1,41 +0,0 @@ -From 836a16fe5b5bab4a3afe2c991c104652775ce3a3 Mon Sep 17 00:00:00 2001 -From: David Abdurachmanov -Date: Mon, 11 Apr 2016 16:00:57 +0200 -Subject: elfcmp: fix self-comparison error with GCC 6 -MIME-Version: 1.0 -Content-Type: text/plain; charset=UTF-8 -Content-Transfer-Encoding: 8bit - -Noticed with Fedora 24 Alpha, gcc (GCC) 6.0.0 20160406 -(Red Hat 6.0.0-0.20). - -elfcmp.c: In function ‘main’: -elfcmp.c:364:199: error: self-comparison always evaluates -to false [-Werror=tautological-compare] - if (unlikely (name1 == NULL || name2 == NULL - -Signed-off-by: David Abdurachmanov ---- - -Upstream-Status: Backport - - src/ChangeLog | 4 ++++ - src/elfcmp.c | 2 +- - 2 files changed, 5 insertions(+), 1 deletion(-) - -diff --git a/src/elfcmp.c b/src/elfcmp.c -index 852b92f..7b5d39c 100644 ---- a/src/elfcmp.c -+++ b/src/elfcmp.c -@@ -368,7 +368,7 @@ main (int argc, char *argv[]) - && sym1->st_shndx != SHN_UNDEF) - || sym1->st_info != sym2->st_info - || sym1->st_other != sym2->st_other -- || sym1->st_shndx != sym1->st_shndx)) -+ || sym1->st_shndx != sym2->st_shndx)) - { - // XXX Do we want to allow reordered symbol tables? - symtab_mismatch: --- -cgit v0.12 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/fallthrough.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/fallthrough.patch deleted file mode 100644 index b2623f9d2..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/fallthrough.patch +++ /dev/null @@ -1,36 +0,0 @@ -GCC7 adds -Wimplicit-fallthrough to warn when a switch case falls through, -however this causes warnings (which are promoted to errors) with the elfutils -patches from Debian for mips and parisc, which use fallthrough's by design. - -Explicitly mark the intentional fallthrough switch cases with a comment to -disable the warnings where the fallthrough behaviour is desired. - -Upstream-Status: Pending [debian] -Signed-off-by: Joshua Lock - -Index: elfutils-0.168/backends/parisc_retval.c -=================================================================== ---- elfutils-0.168.orig/backends/parisc_retval.c -+++ elfutils-0.168/backends/parisc_retval.c -@@ -166,7 +166,7 @@ parisc_return_value_location_ (Dwarf_Die - return nloc_intregpair; - - /* Else fall through. */ -- } -+ } // fallthrough - - case DW_TAG_structure_type: - case DW_TAG_class_type: -Index: elfutils-0.168/backends/mips_retval.c -=================================================================== ---- elfutils-0.168.orig/backends/mips_retval.c -+++ elfutils-0.168/backends/mips_retval.c -@@ -387,7 +387,7 @@ mips_return_value_location (Dwarf_Die *f - else - return nloc_intregpair; - } -- } -+ } // fallthrough - - /* Fallthrough to handle large types */ - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/fixheadercheck.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/fixheadercheck.patch deleted file mode 100644 index 5de3b24c8..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/fixheadercheck.patch +++ /dev/null @@ -1,23 +0,0 @@ -For some binaries we can get a invalid section alignment, for example if -sh_align = 1 and sh_addralign is 0. In the case of a zero size section like -".note.GNU-stack", this is irrelavent as far as I can tell and we shouldn't -error in this case. - -RP 2014/6/11 - -Upstream-Status: Pending - -diff --git a/libelf/elf32_updatenull.c b/libelf/elf32_updatenull.c ---- a/libelf/elf32_updatenull.c -+++ b/libelf/elf32_updatenull.c -@@ -339,8 +339,8 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum) - we test for the alignment of the section being large - enough for the largest alignment required by a data - block. */ -- if (unlikely (! powerof2 (shdr->sh_addralign)) -- || unlikely ((shdr->sh_addralign ?: 1) < sh_align)) -+ if (shdr->sh_size && (unlikely (! powerof2 (shdr->sh_addralign)) -+ || unlikely ((shdr->sh_addralign ?: 1) < sh_align))) - { - __libelf_seterrno (ELF_E_INVALID_ALIGN); - return -1; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/hppa_backend.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/hppa_backend.diff deleted file mode 100644 index 45456715a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/hppa_backend.diff +++ /dev/null @@ -1,799 +0,0 @@ -Index: elfutils-0.164/backends/parisc_init.c -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/parisc_init.c -@@ -0,0 +1,73 @@ -+/* Initialization of PA-RISC specific backend library. -+ Copyright (C) 2002, 2005, 2006 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ Written by Ulrich Drepper , 2002. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#define BACKEND parisc_ -+#define RELOC_PREFIX R_PARISC_ -+#include "libebl_CPU.h" -+#include "libebl_parisc.h" -+ -+/* This defines the common reloc hooks based on parisc_reloc.def. */ -+#include "common-reloc.c" -+ -+ -+const char * -+parisc_init (Elf *elf __attribute__ ((unused)), -+ GElf_Half machine __attribute__ ((unused)), -+ Ebl *eh, -+ size_t ehlen) -+{ -+ int pa64 = 0; -+ -+ /* Check whether the Elf_BH object has a sufficent size. */ -+ if (ehlen < sizeof (Ebl)) -+ return NULL; -+ -+ if (elf) { -+ GElf_Ehdr ehdr_mem; -+ GElf_Ehdr *ehdr = gelf_getehdr (elf, &ehdr_mem); -+ if (ehdr && (ehdr->e_flags & EF_PARISC_WIDE)) -+ pa64 = 1; -+ } -+ /* We handle it. */ -+ eh->name = "PA-RISC"; -+ parisc_init_reloc (eh); -+ HOOK (eh, reloc_simple_type); -+ HOOK (eh, machine_flag_check); -+ HOOK (eh, symbol_type_name); -+ HOOK (eh, segment_type_name); -+ HOOK (eh, section_type_name); -+ HOOK (eh, register_info); -+ if (pa64) -+ eh->return_value_location = parisc_return_value_location_64; -+ else -+ eh->return_value_location = parisc_return_value_location_32; -+ -+ return MODVERSION; -+} -Index: elfutils-0.164/backends/parisc_regs.c -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/parisc_regs.c -@@ -0,0 +1,159 @@ -+/* Register names and numbers for PA-RISC DWARF. -+ Copyright (C) 2005, 2006 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND parisc_ -+#include "libebl_CPU.h" -+ -+ssize_t -+parisc_register_info (Ebl *ebl, int regno, char *name, size_t namelen, -+ const char **prefix, const char **setname, -+ int *bits, int *type) -+{ -+ int pa64 = 0; -+ -+ if (ebl->elf) { -+ GElf_Ehdr ehdr_mem; -+ GElf_Ehdr *ehdr = gelf_getehdr (ebl->elf, &ehdr_mem); -+ if (ehdr->e_flags & EF_PARISC_WIDE) -+ pa64 = 1; -+ } -+ -+ int nregs = pa64 ? 127 : 128; -+ -+ if (name == NULL) -+ return nregs; -+ -+ if (regno < 0 || regno >= nregs || namelen < 6) -+ return -1; -+ -+ *prefix = "%"; -+ -+ if (regno < 32) -+ { -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ if (pa64) -+ { -+ *bits = 64; -+ } -+ else -+ { -+ *bits = 32; -+ } -+ } -+ else if (regno == 32) -+ { -+ *setname = "special"; -+ if (pa64) -+ { -+ *bits = 6; -+ } -+ else -+ { -+ *bits = 5; -+ } -+ *type = DW_ATE_unsigned; -+ } -+ else -+ { -+ *setname = "FPU"; -+ *type = DW_ATE_float; -+ if (pa64) -+ { -+ *bits = 64; -+ } -+ else -+ { -+ *bits = 32; -+ } -+ } -+ -+ if (regno < 33) { -+ switch (regno) -+ { -+ case 0 ... 9: -+ name[0] = 'r'; -+ name[1] = regno + '0'; -+ namelen = 2; -+ break; -+ case 10 ... 31: -+ name[0] = 'r'; -+ name[1] = regno / 10 + '0'; -+ name[2] = regno % 10 + '0'; -+ namelen = 3; -+ break; -+ case 32: -+ *prefix = NULL; -+ name[0] = 'S'; -+ name[1] = 'A'; -+ name[2] = 'R'; -+ namelen = 3; -+ break; -+ } -+ } -+ else { -+ if (pa64 && ((regno - 72) % 2)) { -+ *setname = NULL; -+ return 0; -+ } -+ -+ switch (regno) -+ { -+ case 72 + 0 ... 72 + 11: -+ name[0] = 'f'; -+ name[1] = 'r'; -+ name[2] = (regno + 8 - 72) / 2 + '0'; -+ namelen = 3; -+ if ((regno + 8 - 72) % 2) { -+ name[3] = 'R'; -+ namelen++; -+ } -+ break; -+ case 72 + 12 ... 72 + 55: -+ name[0] = 'f'; -+ name[1] = 'r'; -+ name[2] = (regno + 8 - 72) / 2 / 10 + '0'; -+ name[3] = (regno + 8 - 72) / 2 % 10 + '0'; -+ namelen = 4; -+ if ((regno + 8 - 72) % 2) { -+ name[4] = 'R'; -+ namelen++; -+ } -+ break; -+ default: -+ *setname = NULL; -+ return 0; -+ } -+ } -+ name[namelen++] = '\0'; -+ return namelen; -+} -Index: elfutils-0.164/backends/parisc_reloc.def -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/parisc_reloc.def -@@ -0,0 +1,128 @@ -+/* List the relocation types for PA-RISC. -*- C -*- -+ Copyright (C) 2005 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+/* NAME, REL|EXEC|DYN */ -+ -+RELOC_TYPE (NONE, EXEC|DYN) -+RELOC_TYPE (DIR32, REL|EXEC|DYN) -+RELOC_TYPE (DIR21L, REL|EXEC|DYN) -+RELOC_TYPE (DIR17R, REL) -+RELOC_TYPE (DIR17F, REL) -+RELOC_TYPE (DIR14R, REL|DYN) -+RELOC_TYPE (PCREL32, REL) -+RELOC_TYPE (PCREL21L, REL) -+RELOC_TYPE (PCREL17R, REL) -+RELOC_TYPE (PCREL17F, REL) -+RELOC_TYPE (PCREL14R, REL|EXEC) -+RELOC_TYPE (DPREL21L, REL) -+RELOC_TYPE (DPREL14WR, REL) -+RELOC_TYPE (DPREL14DR, REL) -+RELOC_TYPE (DPREL14R, REL) -+RELOC_TYPE (GPREL21L, 0) -+RELOC_TYPE (GPREL14R, 0) -+RELOC_TYPE (LTOFF21L, REL) -+RELOC_TYPE (LTOFF14R, REL) -+RELOC_TYPE (DLTIND14F, 0) -+RELOC_TYPE (SETBASE, 0) -+RELOC_TYPE (SECREL32, REL) -+RELOC_TYPE (BASEREL21L, 0) -+RELOC_TYPE (BASEREL17R, 0) -+RELOC_TYPE (BASEREL14R, 0) -+RELOC_TYPE (SEGBASE, 0) -+RELOC_TYPE (SEGREL32, REL) -+RELOC_TYPE (PLTOFF21L, 0) -+RELOC_TYPE (PLTOFF14R, 0) -+RELOC_TYPE (PLTOFF14F, 0) -+RELOC_TYPE (LTOFF_FPTR32, 0) -+RELOC_TYPE (LTOFF_FPTR21L, 0) -+RELOC_TYPE (LTOFF_FPTR14R, 0) -+RELOC_TYPE (FPTR64, 0) -+RELOC_TYPE (PLABEL32, REL|DYN) -+RELOC_TYPE (PCREL64, 0) -+RELOC_TYPE (PCREL22C, 0) -+RELOC_TYPE (PCREL22F, 0) -+RELOC_TYPE (PCREL14WR, 0) -+RELOC_TYPE (PCREL14DR, 0) -+RELOC_TYPE (PCREL16F, 0) -+RELOC_TYPE (PCREL16WF, 0) -+RELOC_TYPE (PCREL16DF, 0) -+RELOC_TYPE (DIR64, REL|DYN) -+RELOC_TYPE (DIR14WR, REL) -+RELOC_TYPE (DIR14DR, REL) -+RELOC_TYPE (DIR16F, REL) -+RELOC_TYPE (DIR16WF, REL) -+RELOC_TYPE (DIR16DF, REL) -+RELOC_TYPE (GPREL64, 0) -+RELOC_TYPE (GPREL14WR, 0) -+RELOC_TYPE (GPREL14DR, 0) -+RELOC_TYPE (GPREL16F, 0) -+RELOC_TYPE (GPREL16WF, 0) -+RELOC_TYPE (GPREL16DF, 0) -+RELOC_TYPE (LTOFF64, 0) -+RELOC_TYPE (LTOFF14WR, 0) -+RELOC_TYPE (LTOFF14DR, 0) -+RELOC_TYPE (LTOFF16F, 0) -+RELOC_TYPE (LTOFF16WF, 0) -+RELOC_TYPE (LTOFF16DF, 0) -+RELOC_TYPE (SECREL64, 0) -+RELOC_TYPE (BASEREL14WR, 0) -+RELOC_TYPE (BASEREL14DR, 0) -+RELOC_TYPE (SEGREL64, 0) -+RELOC_TYPE (PLTOFF14WR, 0) -+RELOC_TYPE (PLTOFF14DR, 0) -+RELOC_TYPE (PLTOFF16F, 0) -+RELOC_TYPE (PLTOFF16WF, 0) -+RELOC_TYPE (PLTOFF16DF, 0) -+RELOC_TYPE (LTOFF_FPTR64, 0) -+RELOC_TYPE (LTOFF_FPTR14WR, 0) -+RELOC_TYPE (LTOFF_FPTR14DR, 0) -+RELOC_TYPE (LTOFF_FPTR16F, 0) -+RELOC_TYPE (LTOFF_FPTR16WF, 0) -+RELOC_TYPE (LTOFF_FPTR16DF, 0) -+RELOC_TYPE (COPY, EXEC) -+RELOC_TYPE (IPLT, EXEC|DYN) -+RELOC_TYPE (EPLT, 0) -+RELOC_TYPE (TPREL32, DYN) -+RELOC_TYPE (TPREL21L, 0) -+RELOC_TYPE (TPREL14R, 0) -+RELOC_TYPE (LTOFF_TP21L, 0) -+RELOC_TYPE (LTOFF_TP14R, 0) -+RELOC_TYPE (LTOFF_TP14F, 0) -+RELOC_TYPE (TPREL64, 0) -+RELOC_TYPE (TPREL14WR, 0) -+RELOC_TYPE (TPREL14DR, 0) -+RELOC_TYPE (TPREL16F, 0) -+RELOC_TYPE (TPREL16WF, 0) -+RELOC_TYPE (TPREL16DF, 0) -+RELOC_TYPE (LTOFF_TP64, 0) -+RELOC_TYPE (LTOFF_TP14WR, 0) -+RELOC_TYPE (LTOFF_TP14DR, 0) -+RELOC_TYPE (LTOFF_TP16F, 0) -+RELOC_TYPE (LTOFF_TP16WF, 0) -+RELOC_TYPE (LTOFF_TP16DF, 0) -+RELOC_TYPE (TLS_DTPMOD32, DYN) -+RELOC_TYPE (TLS_DTPMOD64, DYN) -+ -+#define NO_RELATIVE_RELOC 1 -Index: elfutils-0.164/backends/parisc_retval.c -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/parisc_retval.c -@@ -0,0 +1,213 @@ -+/* Function return value location for Linux/PA-RISC ABI. -+ Copyright (C) 2005 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND parisc_ -+#include "libebl_CPU.h" -+#include "libebl_parisc.h" -+ -+/* %r28, or pair %r28, %r29. */ -+static const Dwarf_Op loc_intreg32[] = -+ { -+ { .atom = DW_OP_reg28 }, { .atom = DW_OP_piece, .number = 4 }, -+ { .atom = DW_OP_reg29 }, { .atom = DW_OP_piece, .number = 4 }, -+ }; -+ -+static const Dwarf_Op loc_intreg[] = -+ { -+ { .atom = DW_OP_reg28 }, { .atom = DW_OP_piece, .number = 8 }, -+ { .atom = DW_OP_reg29 }, { .atom = DW_OP_piece, .number = 8 }, -+ }; -+#define nloc_intreg 1 -+#define nloc_intregpair 4 -+ -+/* %fr4L, or pair %fr4L, %fr4R on pa-32 */ -+static const Dwarf_Op loc_fpreg32[] = -+ { -+ { .atom = DW_OP_regx, .number = 72 }, { .atom = DW_OP_piece, .number = 4 }, -+ { .atom = DW_OP_regx, .number = 73 }, { .atom = DW_OP_piece, .number = 4 }, -+ }; -+#define nloc_fpreg32 2 -+#define nloc_fpregpair32 4 -+ -+/* $fr4 */ -+static const Dwarf_Op loc_fpreg[] = -+ { -+ { .atom = DW_OP_regx, .number = 72 }, -+ }; -+#define nloc_fpreg 1 -+ -+#if 0 -+/* The return value is a structure and is actually stored in stack space -+ passed in a hidden argument by the caller. Address of the location is stored -+ in %r28 before function call, but it may be changed by function. */ -+static const Dwarf_Op loc_aggregate[] = -+ { -+ { .atom = DW_OP_breg28 }, -+ }; -+#define nloc_aggregate 1 -+#endif -+ -+static int -+parisc_return_value_location_ (Dwarf_Die *functypedie, const Dwarf_Op **locp, int pa64) -+{ -+ Dwarf_Word regsize = pa64 ? 8 : 4; -+ -+ /* Start with the function's type, and get the DW_AT_type attribute, -+ which is the type of the return value. */ -+ -+ Dwarf_Attribute attr_mem; -+ Dwarf_Attribute *attr = dwarf_attr_integrate (functypedie, DW_AT_type, &attr_mem); -+ if (attr == NULL) -+ /* The function has no return value, like a `void' function in C. */ -+ return 0; -+ -+ Dwarf_Die die_mem; -+ Dwarf_Die *typedie = dwarf_formref_die (attr, &die_mem); -+ int tag = dwarf_tag (typedie); -+ -+ /* Follow typedefs and qualifiers to get to the actual type. */ -+ while (tag == DW_TAG_typedef -+ || tag == DW_TAG_const_type || tag == DW_TAG_volatile_type -+ || tag == DW_TAG_restrict_type) -+ { -+ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem); -+ typedie = dwarf_formref_die (attr, &die_mem); -+ tag = dwarf_tag (typedie); -+ } -+ -+ switch (tag) -+ { -+ case -1: -+ return -1; -+ -+ case DW_TAG_subrange_type: -+ if (! dwarf_hasattr_integrate (typedie, DW_AT_byte_size)) -+ { -+ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem); -+ typedie = dwarf_formref_die (attr, &die_mem); -+ tag = dwarf_tag (typedie); -+ } -+ /* Fall through. */ -+ -+ case DW_TAG_base_type: -+ case DW_TAG_enumeration_type: -+ case DW_TAG_pointer_type: -+ case DW_TAG_ptr_to_member_type: -+ { -+ Dwarf_Word size; -+ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_byte_size, -+ &attr_mem), &size) != 0) -+ { -+ if (tag == DW_TAG_pointer_type || tag == DW_TAG_ptr_to_member_type) -+ size = 4; -+ else -+ return -1; -+ } -+ if (tag == DW_TAG_base_type) -+ { -+ Dwarf_Word encoding; -+ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_encoding, -+ &attr_mem), &encoding) != 0) -+ return -1; -+ -+ if (encoding == DW_ATE_float) -+ { -+ if (pa64) { -+ *locp = loc_fpreg; -+ if (size <= 8) -+ return nloc_fpreg; -+ } -+ else { -+ *locp = loc_fpreg32; -+ if (size <= 4) -+ return nloc_fpreg32; -+ else if (size <= 8) -+ return nloc_fpregpair32; -+ } -+ goto aggregate; -+ } -+ } -+ if (pa64) -+ *locp = loc_intreg; -+ else -+ *locp = loc_intreg32; -+ if (size <= regsize) -+ return nloc_intreg; -+ if (size <= 2 * regsize) -+ return nloc_intregpair; -+ -+ /* Else fall through. */ -+ } -+ -+ case DW_TAG_structure_type: -+ case DW_TAG_class_type: -+ case DW_TAG_union_type: -+ case DW_TAG_array_type: -+ aggregate: { -+ Dwarf_Word size; -+ if (dwarf_aggregate_size (typedie, &size) != 0) -+ return -1; -+ if (pa64) -+ *locp = loc_intreg; -+ else -+ *locp = loc_intreg32; -+ if (size <= regsize) -+ return nloc_intreg; -+ if (size <= 2 * regsize) -+ return nloc_intregpair; -+#if 0 -+ /* there should be some way to know this location... But I do not see it. */ -+ *locp = loc_aggregate; -+ return nloc_aggregate; -+#endif -+ /* fall through. */ -+ } -+ } -+ -+ /* XXX We don't have a good way to return specific errors from ebl calls. -+ This value means we do not understand the type, but it is well-formed -+ DWARF and might be valid. */ -+ return -2; -+} -+ -+int -+parisc_return_value_location_32 (Dwarf_Die *functypedie, const Dwarf_Op **locp) -+{ -+ return parisc_return_value_location_ (functypedie, locp, 0); -+} -+ -+int -+parisc_return_value_location_64 (Dwarf_Die *functypedie, const Dwarf_Op **locp) -+{ -+ return parisc_return_value_location_ (functypedie, locp, 1); -+} -+ -Index: elfutils-0.164/backends/parisc_symbol.c -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/parisc_symbol.c -@@ -0,0 +1,112 @@ -+/* PA-RISC specific symbolic name handling. -+ Copyright (C) 2002, 2005 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ Written by Ulrich Drepper , 2002. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND parisc_ -+#include "libebl_CPU.h" -+ -+const char * -+parisc_segment_type_name (int segment, char *buf __attribute__ ((unused)), -+ size_t len __attribute__ ((unused))) -+{ -+ switch (segment) -+ { -+ case PT_PARISC_ARCHEXT: -+ return "PARISC_ARCHEXT"; -+ case PT_PARISC_UNWIND: -+ return "PARISC_UNWIND"; -+ default: -+ break; -+ } -+ return NULL; -+} -+ -+/* Return symbolic representation of symbol type. */ -+const char * -+parisc_symbol_type_name(int symbol, char *buf __attribute__ ((unused)), -+ size_t len __attribute__ ((unused))) -+{ -+ if (symbol == STT_PARISC_MILLICODE) -+ return "PARISC_MILLI"; -+ return NULL; -+} -+ -+/* Return symbolic representation of section type. */ -+const char * -+parisc_section_type_name (int type, -+ char *buf __attribute__ ((unused)), -+ size_t len __attribute__ ((unused))) -+{ -+ switch (type) -+ { -+ case SHT_PARISC_EXT: -+ return "PARISC_EXT"; -+ case SHT_PARISC_UNWIND: -+ return "PARISC_UNWIND"; -+ case SHT_PARISC_DOC: -+ return "PARISC_DOC"; -+ } -+ -+ return NULL; -+} -+ -+/* Check whether machine flags are valid. */ -+bool -+parisc_machine_flag_check (GElf_Word flags) -+{ -+ if (flags &~ (EF_PARISC_TRAPNIL | EF_PARISC_EXT | EF_PARISC_LSB | -+ EF_PARISC_WIDE | EF_PARISC_NO_KABP | -+ EF_PARISC_LAZYSWAP | EF_PARISC_ARCH)) -+ return 0; -+ -+ GElf_Word arch = flags & EF_PARISC_ARCH; -+ -+ return ((arch == EFA_PARISC_1_0) || (arch == EFA_PARISC_1_1) || -+ (arch == EFA_PARISC_2_0)); -+} -+ -+/* Check for the simple reloc types. */ -+Elf_Type -+parisc_reloc_simple_type (Ebl *ebl __attribute__ ((unused)), int type) -+{ -+ switch (type) -+ { -+ case R_PARISC_DIR64: -+ case R_PARISC_SECREL64: -+ return ELF_T_XWORD; -+ case R_PARISC_DIR32: -+ case R_PARISC_SECREL32: -+ return ELF_T_WORD; -+ default: -+ return ELF_T_NUM; -+ } -+} -Index: elfutils-0.164/backends/libebl_parisc.h -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/libebl_parisc.h -@@ -0,0 +1,9 @@ -+#ifndef _LIBEBL_HPPA_H -+#define _LIBEBL_HPPA_H 1 -+ -+#include -+ -+extern int parisc_return_value_location_32(Dwarf_Die *, const Dwarf_Op **locp); -+extern int parisc_return_value_location_64(Dwarf_Die *, const Dwarf_Op **locp); -+ -+#endif -Index: elfutils-0.164/backends/Makefile.am -=================================================================== ---- elfutils-0.164.orig/backends/Makefile.am -+++ elfutils-0.164/backends/Makefile.am -@@ -33,11 +33,12 @@ AM_CPPFLAGS += -I$(top_srcdir)/libebl -I - - - modules = i386 sh x86_64 ia64 alpha arm aarch64 sparc ppc ppc64 s390 \ -- tilegx -+ tilegx parisc - libebl_pic = libebl_i386_pic.a libebl_sh_pic.a libebl_x86_64_pic.a \ - libebl_ia64_pic.a libebl_alpha_pic.a libebl_arm_pic.a \ - libebl_aarch64_pic.a libebl_sparc_pic.a libebl_ppc_pic.a \ -- libebl_ppc64_pic.a libebl_s390_pic.a libebl_tilegx_pic.a -+ libebl_ppc64_pic.a libebl_s390_pic.a libebl_tilegx_pic.a \ -+ libebl_parisc_pic.a - noinst_LIBRARIES = $(libebl_pic) - noinst_DATA = $(libebl_pic:_pic.a=.so) - -@@ -111,6 +112,9 @@ tilegx_SRCS = tilegx_init.c tilegx_symbo - libebl_tilegx_pic_a_SOURCES = $(tilegx_SRCS) - am_libebl_tilegx_pic_a_OBJECTS = $(tilegx_SRCS:.c=.os) - -+parisc_SRCS = parisc_init.c parisc_symbol.c parisc_regs.c parisc_retval.c -+libebl_parisc_pic_a_SOURCES = $(parisc_SRCS) -+am_libebl_parisc_pic_a_OBJECTS = $(parisc_SRCS:.c=.os) - - libebl_%.so libebl_%.map: libebl_%_pic.a $(libelf) $(libdw) - @rm -f $(@:.so=.map) -Index: elfutils-0.164/libelf/elf.h -=================================================================== ---- elfutils-0.164.orig/libelf/elf.h -+++ elfutils-0.164/libelf/elf.h -@@ -1912,16 +1912,24 @@ enum - #define R_PARISC_PCREL17F 12 /* 17 bits of rel. address. */ - #define R_PARISC_PCREL14R 14 /* Right 14 bits of rel. address. */ - #define R_PARISC_DPREL21L 18 /* Left 21 bits of rel. address. */ -+#define R_PARISC_DPREL14WR 19 -+#define R_PARISC_DPREL14DR 20 - #define R_PARISC_DPREL14R 22 /* Right 14 bits of rel. address. */ - #define R_PARISC_GPREL21L 26 /* GP-relative, left 21 bits. */ - #define R_PARISC_GPREL14R 30 /* GP-relative, right 14 bits. */ - #define R_PARISC_LTOFF21L 34 /* LT-relative, left 21 bits. */ - #define R_PARISC_LTOFF14R 38 /* LT-relative, right 14 bits. */ -+#define R_PARISC_DLTIND14F 39 -+#define R_PARISC_SETBASE 40 - #define R_PARISC_SECREL32 41 /* 32 bits section rel. address. */ -+#define R_PARISC_BASEREL21L 42 -+#define R_PARISC_BASEREL17R 43 -+#define R_PARISC_BASEREL14R 46 - #define R_PARISC_SEGBASE 48 /* No relocation, set segment base. */ - #define R_PARISC_SEGREL32 49 /* 32 bits segment rel. address. */ - #define R_PARISC_PLTOFF21L 50 /* PLT rel. address, left 21 bits. */ - #define R_PARISC_PLTOFF14R 54 /* PLT rel. address, right 14 bits. */ -+#define R_PARISC_PLTOFF14F 55 - #define R_PARISC_LTOFF_FPTR32 57 /* 32 bits LT-rel. function pointer. */ - #define R_PARISC_LTOFF_FPTR21L 58 /* LT-rel. fct ptr, left 21 bits. */ - #define R_PARISC_LTOFF_FPTR14R 62 /* LT-rel. fct ptr, right 14 bits. */ -@@ -1930,6 +1938,7 @@ enum - #define R_PARISC_PLABEL21L 66 /* Left 21 bits of fdesc address. */ - #define R_PARISC_PLABEL14R 70 /* Right 14 bits of fdesc address. */ - #define R_PARISC_PCREL64 72 /* 64 bits PC-rel. address. */ -+#define R_PARISC_PCREL22C 73 - #define R_PARISC_PCREL22F 74 /* 22 bits PC-rel. address. */ - #define R_PARISC_PCREL14WR 75 /* PC-rel. address, right 14 bits. */ - #define R_PARISC_PCREL14DR 76 /* PC rel. address, right 14 bits. */ -@@ -1955,6 +1964,8 @@ enum - #define R_PARISC_LTOFF16WF 102 /* 16 bits LT-rel. address. */ - #define R_PARISC_LTOFF16DF 103 /* 16 bits LT-rel. address. */ - #define R_PARISC_SECREL64 104 /* 64 bits section rel. address. */ -+#define R_PARISC_BASEREL14WR 107 -+#define R_PARISC_BASEREL14DR 108 - #define R_PARISC_SEGREL64 112 /* 64 bits segment rel. address. */ - #define R_PARISC_PLTOFF14WR 115 /* PLT-rel. address, right 14 bits. */ - #define R_PARISC_PLTOFF14DR 116 /* PLT-rel. address, right 14 bits. */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/kfreebsd_path.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/kfreebsd_path.patch deleted file mode 100644 index ba454ee77..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/kfreebsd_path.patch +++ /dev/null @@ -1,15 +0,0 @@ ---- elfutils/tests/run-native-test.sh.orig -+++ elfutils/tests/run-native-test.sh -@@ -78,6 +78,12 @@ - test $native -eq 0 || testrun "$@" -p $native > /dev/null - } - -+# On the Debian buildds, GNU/kFreeBSD linprocfs /proc/$PID/maps does -+# not give absolute paths due to sbuild's bind mounts (bug #570805) -+# therefore the next two test programs are expected to fail with -+# "cannot attach to process: Function not implemented". -+[ "$(uname)" = "GNU/kFreeBSD" ] && exit 77 -+ - native_test ${abs_builddir}/allregs - native_test ${abs_builddir}/funcretval - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/m68k_backend.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/m68k_backend.diff deleted file mode 100644 index d73855b60..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/m68k_backend.diff +++ /dev/null @@ -1,492 +0,0 @@ -From: Kurt Roeckx -From: Thorsten Glaser -Subject: m68k support - -Written by Kurt Roeckx, except for the retval support which was written -by Thorsten Glaser - - -Index: elfutils-0.164/backends/m68k_init.c -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/m68k_init.c -@@ -0,0 +1,49 @@ -+/* Initialization of m68k specific backend library. -+ Copyright (C) 2007 Kurt Roeckx -+ -+ This software is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ This softare is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with this software; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+*/ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#define BACKEND m68k_ -+#define RELOC_PREFIX R_68K_ -+#include "libebl_CPU.h" -+ -+/* This defines the common reloc hooks based on m68k_reloc.def. */ -+#include "common-reloc.c" -+ -+ -+const char * -+m68k_init (Elf *elf __attribute__ ((unused)), -+ GElf_Half machine __attribute__ ((unused)), -+ Ebl *eh, -+ size_t ehlen) -+{ -+ /* Check whether the Elf_BH object has a sufficent size. */ -+ if (ehlen < sizeof (Ebl)) -+ return NULL; -+ -+ /* We handle it. */ -+ eh->name = "m68k"; -+ m68k_init_reloc (eh); -+ HOOK (eh, reloc_simple_type); -+ HOOK (eh, return_value_location); -+ HOOK (eh, register_info); -+ -+ return MODVERSION; -+} -Index: elfutils-0.164/backends/m68k_regs.c -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/m68k_regs.c -@@ -0,0 +1,106 @@ -+/* Register names and numbers for m68k DWARF. -+ Copyright (C) 2007 Kurt Roeckx -+ -+ This software is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ This software is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with this software; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND m68k_ -+#include "libebl_CPU.h" -+ -+ssize_t -+m68k_register_info (Ebl *ebl __attribute__ ((unused)), -+ int regno, char *name, size_t namelen, -+ const char **prefix, const char **setname, -+ int *bits, int *type) -+{ -+ if (name == NULL) -+ return 25; -+ -+ if (regno < 0 || regno > 24 || namelen < 5) -+ return -1; -+ -+ *prefix = "%"; -+ *bits = 32; -+ *type = (regno < 8 ? DW_ATE_signed -+ : regno < 16 ? DW_ATE_address : DW_ATE_float); -+ -+ if (regno < 8) -+ { -+ *setname = "integer"; -+ } -+ else if (regno < 16) -+ { -+ *setname = "address"; -+ } -+ else if (regno < 24) -+ { -+ *setname = "FPU"; -+ } -+ else -+ { -+ *setname = "address"; -+ *type = DW_ATE_address; -+ } -+ -+ switch (regno) -+ { -+ case 0 ... 7: -+ name[0] = 'd'; -+ name[1] = regno + '0'; -+ namelen = 2; -+ break; -+ -+ case 8 ... 13: -+ name[0] = 'a'; -+ name[1] = regno - 8 + '0'; -+ namelen = 2; -+ break; -+ -+ case 14: -+ name[0] = 'f'; -+ name[1] = 'p'; -+ namelen = 2; -+ break; -+ -+ case 15: -+ name[0] = 's'; -+ name[1] = 'p'; -+ namelen = 2; -+ break; -+ -+ case 16 ... 23: -+ name[0] = 'f'; -+ name[1] = 'p'; -+ name[2] = regno - 16 + '0'; -+ namelen = 3; -+ break; -+ -+ case 24: -+ name[0] = 'p'; -+ name[1] = 'c'; -+ namelen = 2; -+ } -+ -+ name[namelen++] = '\0'; -+ return namelen; -+} -+ -Index: elfutils-0.164/backends/m68k_reloc.def -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/m68k_reloc.def -@@ -0,0 +1,45 @@ -+/* List the relocation types for m68k. -*- C -*- -+ Copyright (C) 2007 Kurt Roeckx -+ -+ This software is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ This software is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with this software; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+*/ -+ -+/* NAME, REL|EXEC|DYN */ -+ -+RELOC_TYPE (NONE, 0) -+RELOC_TYPE (32, REL|EXEC|DYN) -+RELOC_TYPE (16, REL) -+RELOC_TYPE (8, REL) -+RELOC_TYPE (PC32, REL|EXEC|DYN) -+RELOC_TYPE (PC16, REL) -+RELOC_TYPE (PC8, REL) -+RELOC_TYPE (GOT32, REL) -+RELOC_TYPE (GOT16, REL) -+RELOC_TYPE (GOT8, REL) -+RELOC_TYPE (GOT32O, REL) -+RELOC_TYPE (GOT16O, REL) -+RELOC_TYPE (GOT8O, REL) -+RELOC_TYPE (PLT32, REL) -+RELOC_TYPE (PLT16, REL) -+RELOC_TYPE (PLT8, REL) -+RELOC_TYPE (PLT32O, REL) -+RELOC_TYPE (PLT16O, REL) -+RELOC_TYPE (PLT8O, REL) -+RELOC_TYPE (COPY, EXEC) -+RELOC_TYPE (GLOB_DAT, EXEC|DYN) -+RELOC_TYPE (JMP_SLOT, EXEC|DYN) -+RELOC_TYPE (RELATIVE, EXEC|DYN) -+RELOC_TYPE (GNU_VTINHERIT, REL) -+RELOC_TYPE (GNU_VTENTRY, REL) -+ -Index: elfutils-0.164/libelf/elf.h -=================================================================== ---- elfutils-0.164.orig/libelf/elf.h -+++ elfutils-0.164/libelf/elf.h -@@ -1158,6 +1158,9 @@ typedef struct - #define R_68K_GLOB_DAT 20 /* Create GOT entry */ - #define R_68K_JMP_SLOT 21 /* Create PLT entry */ - #define R_68K_RELATIVE 22 /* Adjust by program base */ -+/* The next 2 are GNU extensions to enable C++ vtable garbage collection. */ -+#define R_68K_GNU_VTINHERIT 23 -+#define R_68K_GNU_VTENTRY 24 - #define R_68K_TLS_GD32 25 /* 32 bit GOT offset for GD */ - #define R_68K_TLS_GD16 26 /* 16 bit GOT offset for GD */ - #define R_68K_TLS_GD8 27 /* 8 bit GOT offset for GD */ -Index: elfutils-0.164/backends/Makefile.am -=================================================================== ---- elfutils-0.164.orig/backends/Makefile.am -+++ elfutils-0.164/backends/Makefile.am -@@ -33,12 +33,12 @@ AM_CPPFLAGS += -I$(top_srcdir)/libebl -I - - - modules = i386 sh x86_64 ia64 alpha arm aarch64 sparc ppc ppc64 s390 \ -- tilegx parisc mips -+ tilegx parisc mips m68k - libebl_pic = libebl_i386_pic.a libebl_sh_pic.a libebl_x86_64_pic.a \ - libebl_ia64_pic.a libebl_alpha_pic.a libebl_arm_pic.a \ - libebl_aarch64_pic.a libebl_sparc_pic.a libebl_ppc_pic.a \ - libebl_ppc64_pic.a libebl_s390_pic.a libebl_tilegx_pic.a \ -- libebl_parisc_pic.a libebl_mips_pic.a -+ libebl_parisc_pic.a libebl_mips_pic.a libebl_m68k_pic.a - noinst_LIBRARIES = $(libebl_pic) - noinst_DATA = $(libebl_pic:_pic.a=.so) - -@@ -120,6 +120,10 @@ mips_SRCS = mips_init.c mips_symbol.c mi - libebl_mips_pic_a_SOURCES = $(mips_SRCS) - am_libebl_mips_pic_a_OBJECTS = $(mips_SRCS:.c=.os) - -+m68k_SRCS = m68k_init.c m68k_symbol.c m68k_regs.c m68k_retval.c -+libebl_m68k_pic_a_SOURCES = $(m68k_SRCS) -+am_libebl_m68k_pic_a_OBJECTS = $(m68k_SRCS:.c=.os) -+ - libebl_%.so libebl_%.map: libebl_%_pic.a $(libelf) $(libdw) - @rm -f $(@:.so=.map) - $(AM_V_at)echo 'ELFUTILS_$(PACKAGE_VERSION) { global: $*_init; local: *; };' \ -Index: elfutils-0.164/backends/m68k_symbol.c -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/m68k_symbol.c -@@ -0,0 +1,43 @@ -+/* m68k specific symbolic name handling. -+ Copyright (C) 2007 Kurt Roeckx -+ -+ This software is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ This software distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with this software; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+*/ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND m68k_ -+#include "libebl_CPU.h" -+ -+/* Check for the simple reloc types. */ -+Elf_Type -+m68k_reloc_simple_type (Ebl *ebl __attribute__ ((unused)), int type) -+{ -+ switch (type) -+ { -+ case R_68K_32: -+ return ELF_T_SWORD; -+ case R_68K_16: -+ return ELF_T_HALF; -+ case R_68K_8: -+ return ELF_T_BYTE; -+ default: -+ return ELF_T_NUM; -+ } -+} -Index: elfutils-0.164/backends/m68k_retval.c -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/m68k_retval.c -@@ -0,0 +1,172 @@ -+/* Function return value location for Linux/m68k ABI. -+ Copyright (C) 2005-2010 Red Hat, Inc. -+ Copyright (c) 2011 Thorsten Glaser, Debian. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND m68k_ -+#include "libebl_CPU.h" -+ -+ -+/* %d0, or pair %d0, %d1, or %a0 */ -+static const Dwarf_Op loc_intreg[] = -+ { -+ { .atom = DW_OP_reg0 }, { .atom = DW_OP_piece, .number = 4 }, -+ { .atom = DW_OP_reg1 }, { .atom = DW_OP_piece, .number = 4 }, -+ }; -+static const Dwarf_Op loc_ptrreg[] = -+ { -+ { .atom = DW_OP_reg8 }, -+ }; -+#define nloc_intreg 1 -+#define nloc_intregpair 4 -+#define nloc_ptrreg 1 -+ -+/* %f0 */ -+static const Dwarf_Op loc_fpreg[] = -+ { -+ { .atom = DW_OP_reg16 } -+ }; -+#define nloc_fpreg 1 -+ -+/* Structures are a bit more complicated - small structures are returned -+ in %d0 / %d1 (-freg-struct-return which is enabled by default), large -+ structures use %a1 (in constrast to the SYSV psABI which says %a0) as -+ reentrant storage space indicator. */ -+static const Dwarf_Op loc_aggregate[] = -+ { -+ { .atom = DW_OP_breg9, .number = 0 } -+ }; -+#define nloc_aggregate 1 -+ -+int -+m68k_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) -+{ -+ Dwarf_Word size; -+ -+ /* Start with the function's type, and get the DW_AT_type attribute, -+ which is the type of the return value. */ -+ -+ Dwarf_Attribute attr_mem; -+ Dwarf_Attribute *attr = dwarf_attr_integrate (functypedie, DW_AT_type, -+ &attr_mem); -+ if (attr == NULL) -+ /* The function has no return value, like a `void' function in C. */ -+ return 0; -+ -+ Dwarf_Die die_mem; -+ Dwarf_Die *typedie = dwarf_formref_die (attr, &die_mem); -+ int tag = dwarf_tag (typedie); -+ -+ /* Follow typedefs and qualifiers to get to the actual type. */ -+ while (tag == DW_TAG_typedef -+ || tag == DW_TAG_const_type || tag == DW_TAG_volatile_type -+ || tag == DW_TAG_restrict_type) -+ { -+ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem); -+ typedie = dwarf_formref_die (attr, &die_mem); -+ tag = dwarf_tag (typedie); -+ } -+ -+ switch (tag) -+ { -+ case -1: -+ return -1; -+ -+ case DW_TAG_subrange_type: -+ if (! dwarf_hasattr_integrate (typedie, DW_AT_byte_size)) -+ { -+ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem); -+ typedie = dwarf_formref_die (attr, &die_mem); -+ tag = dwarf_tag (typedie); -+ } -+ /* Fall through. */ -+ -+ case DW_TAG_base_type: -+ case DW_TAG_enumeration_type: -+ case DW_TAG_pointer_type: -+ case DW_TAG_ptr_to_member_type: -+ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_byte_size, -+ &attr_mem), &size) != 0) -+ { -+ if (tag == DW_TAG_pointer_type || tag == DW_TAG_ptr_to_member_type) -+ size = 4; -+ else -+ return -1; -+ } -+ if (tag == DW_TAG_pointer_type || tag == DW_TAG_ptr_to_member_type) -+ { -+ *locp = loc_ptrreg; -+ return nloc_ptrreg; -+ } -+ if (tag == DW_TAG_base_type) -+ { -+ Dwarf_Word encoding; -+ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_encoding, -+ &attr_mem), -+ &encoding) != 0) -+ return -1; -+ if (encoding == DW_ATE_float) -+ { -+ /* XXX really 10? */ -+ if (size > 10) -+ return -2; -+ *locp = loc_fpreg; -+ return nloc_fpreg; -+ } -+ } -+ if (size <= 8) -+ { -+ intreg: -+ /* XXX check endianness of dword pair, int64 vs aggregate */ -+ *locp = loc_intreg; -+ return size <= 4 ? nloc_intreg : nloc_intregpair; -+ } -+ -+ aggregate: -+ *locp = loc_aggregate; -+ return nloc_aggregate; -+ -+ case DW_TAG_structure_type: -+ case DW_TAG_class_type: -+ case DW_TAG_union_type: -+ case DW_TAG_array_type: -+ if (dwarf_aggregate_size (typedie, &size) == 0 -+ && size > 0 && size <= 8) -+ /* not accurate for a struct whose only member is a float */ -+ goto intreg; -+ goto aggregate; -+ } -+ -+ /* XXX We don't have a good way to return specific errors from ebl calls. -+ This value means we do not understand the type, but it is well-formed -+ DWARF and might be valid. */ -+ return -2; -+} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/mips_backend.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/mips_backend.diff deleted file mode 100644 index de1237be0..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/mips_backend.diff +++ /dev/null @@ -1,711 +0,0 @@ -Index: elfutils-0.164/backends/mips_init.c -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/mips_init.c -@@ -0,0 +1,59 @@ -+/* Initialization of mips specific backend library. -+ Copyright (C) 2006 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#define BACKEND mips_ -+#define RELOC_PREFIX R_MIPS_ -+#include "libebl_CPU.h" -+ -+/* This defines the common reloc hooks based on mips_reloc.def. */ -+#include "common-reloc.c" -+ -+const char * -+mips_init (Elf *elf __attribute__ ((unused)), -+ GElf_Half machine __attribute__ ((unused)), -+ Ebl *eh, -+ size_t ehlen) -+{ -+ /* Check whether the Elf_BH object has a sufficent size. */ -+ if (ehlen < sizeof (Ebl)) -+ return NULL; -+ -+ /* We handle it. */ -+ if (machine == EM_MIPS) -+ eh->name = "MIPS R3000 big-endian"; -+ else if (machine == EM_MIPS_RS3_LE) -+ eh->name = "MIPS R3000 little-endian"; -+ -+ mips_init_reloc (eh); -+ HOOK (eh, reloc_simple_type); -+ HOOK (eh, return_value_location); -+ HOOK (eh, register_info); -+ -+ return MODVERSION; -+} -Index: elfutils-0.164/backends/mips_regs.c -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/mips_regs.c -@@ -0,0 +1,104 @@ -+/* Register names and numbers for MIPS DWARF. -+ Copyright (C) 2006 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND mips_ -+#include "libebl_CPU.h" -+ -+ssize_t -+mips_register_info (Ebl *ebl __attribute__((unused)), -+ int regno, char *name, size_t namelen, -+ const char **prefix, const char **setname, -+ int *bits, int *type) -+{ -+ if (name == NULL) -+ return 66; -+ -+ if (regno < 0 || regno > 65 || namelen < 4) -+ return -1; -+ -+ *prefix = "$"; -+ -+ if (regno < 32) -+ { -+ *setname = "integer"; -+ *type = DW_ATE_signed; -+ *bits = 32; -+ if (regno < 32 + 10) -+ { -+ name[0] = regno + '0'; -+ namelen = 1; -+ } -+ else -+ { -+ name[0] = (regno / 10) + '0'; -+ name[1] = (regno % 10) + '0'; -+ namelen = 2; -+ } -+ } -+ else if (regno < 64) -+ { -+ *setname = "FPU"; -+ *type = DW_ATE_float; -+ *bits = 32; -+ name[0] = 'f'; -+ if (regno < 32 + 10) -+ { -+ name[1] = (regno - 32) + '0'; -+ namelen = 2; -+ } -+ else -+ { -+ name[1] = (regno - 32) / 10 + '0'; -+ name[2] = (regno - 32) % 10 + '0'; -+ namelen = 3; -+ } -+ } -+ else if (regno == 64) -+ { -+ *type = DW_ATE_signed; -+ *bits = 32; -+ name[0] = 'h'; -+ name[1] = 'i'; -+ namelen = 2; -+ } -+ else -+ { -+ *type = DW_ATE_signed; -+ *bits = 32; -+ name[0] = 'l'; -+ name[1] = 'o'; -+ namelen = 2; -+ } -+ -+ name[namelen++] = '\0'; -+ return namelen; -+} -Index: elfutils-0.164/backends/mips_reloc.def -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/mips_reloc.def -@@ -0,0 +1,79 @@ -+/* List the relocation types for mips. -*- C -*- -+ Copyright (C) 2006 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+/* NAME, REL|EXEC|DYN */ -+ -+RELOC_TYPE (NONE, 0) -+RELOC_TYPE (16, 0) -+RELOC_TYPE (32, 0) -+RELOC_TYPE (REL32, 0) -+RELOC_TYPE (26, 0) -+RELOC_TYPE (HI16, 0) -+RELOC_TYPE (LO16, 0) -+RELOC_TYPE (GPREL16, 0) -+RELOC_TYPE (LITERAL, 0) -+RELOC_TYPE (GOT16, 0) -+RELOC_TYPE (PC16, 0) -+RELOC_TYPE (CALL16, 0) -+RELOC_TYPE (GPREL32, 0) -+ -+RELOC_TYPE (SHIFT5, 0) -+RELOC_TYPE (SHIFT6, 0) -+RELOC_TYPE (64, 0) -+RELOC_TYPE (GOT_DISP, 0) -+RELOC_TYPE (GOT_PAGE, 0) -+RELOC_TYPE (GOT_OFST, 0) -+RELOC_TYPE (GOT_HI16, 0) -+RELOC_TYPE (GOT_LO16, 0) -+RELOC_TYPE (SUB, 0) -+RELOC_TYPE (INSERT_A, 0) -+RELOC_TYPE (INSERT_B, 0) -+RELOC_TYPE (DELETE, 0) -+RELOC_TYPE (HIGHER, 0) -+RELOC_TYPE (HIGHEST, 0) -+RELOC_TYPE (CALL_HI16, 0) -+RELOC_TYPE (CALL_LO16, 0) -+RELOC_TYPE (SCN_DISP, 0) -+RELOC_TYPE (REL16, 0) -+RELOC_TYPE (ADD_IMMEDIATE, 0) -+RELOC_TYPE (PJUMP, 0) -+RELOC_TYPE (RELGOT, 0) -+RELOC_TYPE (JALR, 0) -+RELOC_TYPE (TLS_DTPMOD32, 0) -+RELOC_TYPE (TLS_DTPREL32, 0) -+RELOC_TYPE (TLS_DTPMOD64, 0) -+RELOC_TYPE (TLS_DTPREL64, 0) -+RELOC_TYPE (TLS_GD, 0) -+RELOC_TYPE (TLS_LDM, 0) -+RELOC_TYPE (TLS_DTPREL_HI16, 0) -+RELOC_TYPE (TLS_DTPREL_LO16, 0) -+RELOC_TYPE (TLS_GOTTPREL, 0) -+RELOC_TYPE (TLS_TPREL32, 0) -+RELOC_TYPE (TLS_TPREL64, 0) -+RELOC_TYPE (TLS_TPREL_HI16, 0) -+RELOC_TYPE (TLS_TPREL_LO16, 0) -+ -+#define NO_COPY_RELOC 1 -+#define NO_RELATIVE_RELOC 1 -Index: elfutils-0.164/backends/mips_retval.c -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/mips_retval.c -@@ -0,0 +1,321 @@ -+/* Function return value location for Linux/mips ABI. -+ Copyright (C) 2005 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+#include -+#include -+ -+#include "../libebl/libeblP.h" -+#include "../libdw/libdwP.h" -+ -+#define BACKEND mips_ -+#include "libebl_CPU.h" -+ -+/* The ABI of the file. Also see EF_MIPS_ABI2 above. */ -+#define EF_MIPS_ABI 0x0000F000 -+ -+/* The original o32 abi. */ -+#define E_MIPS_ABI_O32 0x00001000 -+ -+/* O32 extended to work on 64 bit architectures */ -+#define E_MIPS_ABI_O64 0x00002000 -+ -+/* EABI in 32 bit mode */ -+#define E_MIPS_ABI_EABI32 0x00003000 -+ -+/* EABI in 64 bit mode */ -+#define E_MIPS_ABI_EABI64 0x00004000 -+ -+/* All the possible MIPS ABIs. */ -+enum mips_abi -+ { -+ MIPS_ABI_UNKNOWN = 0, -+ MIPS_ABI_N32, -+ MIPS_ABI_O32, -+ MIPS_ABI_N64, -+ MIPS_ABI_O64, -+ MIPS_ABI_EABI32, -+ MIPS_ABI_EABI64, -+ MIPS_ABI_LAST -+ }; -+ -+/* Find the mips ABI of the current file */ -+enum mips_abi find_mips_abi(Elf *elf) -+{ -+ GElf_Ehdr ehdr_mem; -+ GElf_Ehdr *ehdr = gelf_getehdr (elf, &ehdr_mem); -+ -+ if (ehdr == NULL) -+ return MIPS_ABI_LAST; -+ -+ GElf_Word elf_flags = ehdr->e_flags; -+ -+ /* Check elf_flags to see if it specifies the ABI being used. */ -+ switch ((elf_flags & EF_MIPS_ABI)) -+ { -+ case E_MIPS_ABI_O32: -+ return MIPS_ABI_O32; -+ case E_MIPS_ABI_O64: -+ return MIPS_ABI_O64; -+ case E_MIPS_ABI_EABI32: -+ return MIPS_ABI_EABI32; -+ case E_MIPS_ABI_EABI64: -+ return MIPS_ABI_EABI64; -+ default: -+ if ((elf_flags & EF_MIPS_ABI2)) -+ return MIPS_ABI_N32; -+ } -+ -+ /* GCC creates a pseudo-section whose name describes the ABI. */ -+ size_t shstrndx; -+ if (elf_getshdrstrndx (elf, &shstrndx) < 0) -+ return MIPS_ABI_LAST; -+ -+ const char *name; -+ Elf_Scn *scn = NULL; -+ while ((scn = elf_nextscn (elf, scn)) != NULL) -+ { -+ GElf_Shdr shdr_mem; -+ GElf_Shdr *shdr = gelf_getshdr (scn, &shdr_mem); -+ if (shdr == NULL) -+ return MIPS_ABI_LAST; -+ -+ name = elf_strptr (elf, shstrndx, shdr->sh_name) ?: ""; -+ if (strncmp (name, ".mdebug.", 8) != 0) -+ continue; -+ -+ if (strcmp (name, ".mdebug.abi32") == 0) -+ return MIPS_ABI_O32; -+ else if (strcmp (name, ".mdebug.abiN32") == 0) -+ return MIPS_ABI_N32; -+ else if (strcmp (name, ".mdebug.abi64") == 0) -+ return MIPS_ABI_N64; -+ else if (strcmp (name, ".mdebug.abiO64") == 0) -+ return MIPS_ABI_O64; -+ else if (strcmp (name, ".mdebug.eabi32") == 0) -+ return MIPS_ABI_EABI32; -+ else if (strcmp (name, ".mdebug.eabi64") == 0) -+ return MIPS_ABI_EABI64; -+ else -+ return MIPS_ABI_UNKNOWN; -+ } -+ -+ return MIPS_ABI_UNKNOWN; -+} -+ -+unsigned int -+mips_abi_regsize (enum mips_abi abi) -+{ -+ switch (abi) -+ { -+ case MIPS_ABI_EABI32: -+ case MIPS_ABI_O32: -+ return 4; -+ case MIPS_ABI_N32: -+ case MIPS_ABI_N64: -+ case MIPS_ABI_O64: -+ case MIPS_ABI_EABI64: -+ return 8; -+ case MIPS_ABI_UNKNOWN: -+ case MIPS_ABI_LAST: -+ default: -+ return 0; -+ } -+} -+ -+ -+/* $v0 or pair $v0, $v1 */ -+static const Dwarf_Op loc_intreg_o32[] = -+ { -+ { .atom = DW_OP_reg2 }, { .atom = DW_OP_piece, .number = 4 }, -+ { .atom = DW_OP_reg3 }, { .atom = DW_OP_piece, .number = 4 }, -+ }; -+ -+static const Dwarf_Op loc_intreg[] = -+ { -+ { .atom = DW_OP_reg2 }, { .atom = DW_OP_piece, .number = 8 }, -+ { .atom = DW_OP_reg3 }, { .atom = DW_OP_piece, .number = 8 }, -+ }; -+#define nloc_intreg 1 -+#define nloc_intregpair 4 -+ -+/* $f0 (float), or pair $f0, $f1 (double). -+ * f2/f3 are used for COMPLEX (= 2 doubles) returns in Fortran */ -+static const Dwarf_Op loc_fpreg_o32[] = -+ { -+ { .atom = DW_OP_regx, .number = 32 }, { .atom = DW_OP_piece, .number = 4 }, -+ { .atom = DW_OP_regx, .number = 33 }, { .atom = DW_OP_piece, .number = 4 }, -+ { .atom = DW_OP_regx, .number = 34 }, { .atom = DW_OP_piece, .number = 4 }, -+ { .atom = DW_OP_regx, .number = 35 }, { .atom = DW_OP_piece, .number = 4 }, -+ }; -+ -+/* $f0, or pair $f0, $f2. */ -+static const Dwarf_Op loc_fpreg[] = -+ { -+ { .atom = DW_OP_regx, .number = 32 }, { .atom = DW_OP_piece, .number = 8 }, -+ { .atom = DW_OP_regx, .number = 34 }, { .atom = DW_OP_piece, .number = 8 }, -+ }; -+#define nloc_fpreg 1 -+#define nloc_fpregpair 4 -+#define nloc_fpregquad 8 -+ -+/* The return value is a structure and is actually stored in stack space -+ passed in a hidden argument by the caller. But, the compiler -+ helpfully returns the address of that space in $v0. */ -+static const Dwarf_Op loc_aggregate[] = -+ { -+ { .atom = DW_OP_breg2, .number = 0 } -+ }; -+#define nloc_aggregate 1 -+ -+int -+mips_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) -+{ -+ /* First find the ABI used by the elf object */ -+ enum mips_abi abi = find_mips_abi(functypedie->cu->dbg->elf); -+ -+ /* Something went seriously wrong while trying to figure out the ABI */ -+ if (abi == MIPS_ABI_LAST) -+ return -1; -+ -+ /* We couldn't identify the ABI, but the file seems valid */ -+ if (abi == MIPS_ABI_UNKNOWN) -+ return -2; -+ -+ /* Can't handle EABI variants */ -+ if ((abi == MIPS_ABI_EABI32) || (abi == MIPS_ABI_EABI64)) -+ return -2; -+ -+ unsigned int regsize = mips_abi_regsize (abi); -+ if (!regsize) -+ return -2; -+ -+ /* Start with the function's type, and get the DW_AT_type attribute, -+ which is the type of the return value. */ -+ -+ Dwarf_Attribute attr_mem; -+ Dwarf_Attribute *attr = dwarf_attr_integrate (functypedie, DW_AT_type, &attr_mem); -+ if (attr == NULL) -+ /* The function has no return value, like a `void' function in C. */ -+ return 0; -+ -+ Dwarf_Die die_mem; -+ Dwarf_Die *typedie = dwarf_formref_die (attr, &die_mem); -+ int tag = dwarf_tag (typedie); -+ -+ /* Follow typedefs and qualifiers to get to the actual type. */ -+ while (tag == DW_TAG_typedef -+ || tag == DW_TAG_const_type || tag == DW_TAG_volatile_type -+ || tag == DW_TAG_restrict_type) -+ { -+ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem); -+ typedie = dwarf_formref_die (attr, &die_mem); -+ tag = dwarf_tag (typedie); -+ } -+ -+ switch (tag) -+ { -+ case -1: -+ return -1; -+ -+ case DW_TAG_subrange_type: -+ if (! dwarf_hasattr_integrate (typedie, DW_AT_byte_size)) -+ { -+ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem); -+ typedie = dwarf_formref_die (attr, &die_mem); -+ tag = dwarf_tag (typedie); -+ } -+ /* Fall through. */ -+ -+ case DW_TAG_base_type: -+ case DW_TAG_enumeration_type: -+ case DW_TAG_pointer_type: -+ case DW_TAG_ptr_to_member_type: -+ { -+ Dwarf_Word size; -+ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_byte_size, -+ &attr_mem), &size) != 0) -+ { -+ if (tag == DW_TAG_pointer_type || tag == DW_TAG_ptr_to_member_type) -+ size = regsize; -+ else -+ return -1; -+ } -+ if (tag == DW_TAG_base_type) -+ { -+ Dwarf_Word encoding; -+ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_encoding, -+ &attr_mem), &encoding) != 0) -+ return -1; -+ -+#define ABI_LOC(loc, regsize) ((regsize) == 4 ? (loc ## _o32) : (loc)) -+ -+ if (encoding == DW_ATE_float) -+ { -+ *locp = ABI_LOC(loc_fpreg, regsize); -+ if (size <= regsize) -+ return nloc_fpreg; -+ -+ if (size <= 2*regsize) -+ return nloc_fpregpair; -+ -+ if (size <= 4*regsize && abi == MIPS_ABI_O32) -+ return nloc_fpregquad; -+ -+ goto aggregate; -+ } -+ } -+ *locp = ABI_LOC(loc_intreg, regsize); -+ if (size <= regsize) -+ return nloc_intreg; -+ if (size <= 2*regsize) -+ return nloc_intregpair; -+ -+ /* Else fall through. Shouldn't happen though (at least with gcc) */ -+ } -+ -+ case DW_TAG_structure_type: -+ case DW_TAG_class_type: -+ case DW_TAG_union_type: -+ case DW_TAG_array_type: -+ aggregate: -+ /* XXX TODO: Can't handle structure return with other ABI's yet :-/ */ -+ if ((abi != MIPS_ABI_O32) && (abi != MIPS_ABI_O64)) -+ return -2; -+ -+ *locp = loc_aggregate; -+ return nloc_aggregate; -+ } -+ -+ /* XXX We don't have a good way to return specific errors from ebl calls. -+ This value means we do not understand the type, but it is well-formed -+ DWARF and might be valid. */ -+ return -2; -+} -Index: elfutils-0.164/backends/mips_symbol.c -=================================================================== ---- /dev/null -+++ elfutils-0.164/backends/mips_symbol.c -@@ -0,0 +1,52 @@ -+/* MIPS specific symbolic name handling. -+ Copyright (C) 2002, 2003, 2005 Red Hat, Inc. -+ This file is part of Red Hat elfutils. -+ Written by Jakub Jelinek , 2002. -+ -+ Red Hat elfutils is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by the -+ Free Software Foundation; version 2 of the License. -+ -+ Red Hat elfutils is distributed in the hope that it will be useful, but -+ WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -+ General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License along -+ with Red Hat elfutils; if not, write to the Free Software Foundation, -+ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. -+ -+ Red Hat elfutils is an included package of the Open Invention Network. -+ An included package of the Open Invention Network is a package for which -+ Open Invention Network licensees cross-license their patents. No patent -+ license is granted, either expressly or impliedly, by designation as an -+ included package. Should you wish to participate in the Open Invention -+ Network licensing program, please visit www.openinventionnetwork.com -+ . */ -+ -+#ifdef HAVE_CONFIG_H -+# include -+#endif -+ -+#include -+#include -+ -+#define BACKEND mips_ -+#include "libebl_CPU.h" -+ -+/* Check for the simple reloc types. */ -+Elf_Type -+mips_reloc_simple_type (Ebl *ebl __attribute__ ((unused)), int type) -+{ -+ switch (type) -+ { -+ case R_MIPS_16: -+ return ELF_T_HALF; -+ case R_MIPS_32: -+ return ELF_T_WORD; -+ case R_MIPS_64: -+ return ELF_T_XWORD; -+ default: -+ return ELF_T_NUM; -+ } -+} -Index: elfutils-0.164/libebl/eblopenbackend.c -=================================================================== ---- elfutils-0.164.orig/libebl/eblopenbackend.c -+++ elfutils-0.164/libebl/eblopenbackend.c -@@ -71,6 +71,8 @@ static const struct - { "sparc", "elf_sparc", "sparc", 5, EM_SPARC, 0, 0 }, - { "sparc", "elf_sparcv8plus", "sparc", 5, EM_SPARC32PLUS, 0, 0 }, - { "s390", "ebl_s390", "s390", 4, EM_S390, 0, 0 }, -+ { "mips", "elf_mips", "mips", 4, EM_MIPS, 0, 0 }, -+ { "mips", "elf_mipsel", "mipsel", 4, EM_MIPS_RS3_LE, 0, 0 }, - - { "m32", "elf_m32", "m32", 3, EM_M32, 0, 0 }, - { "m68k", "elf_m68k", "m68k", 4, EM_68K, 0, 0 }, -Index: elfutils-0.164/backends/common-reloc.c -=================================================================== ---- elfutils-0.164.orig/backends/common-reloc.c -+++ elfutils-0.164/backends/common-reloc.c -@@ -125,11 +125,13 @@ EBLHOOK(reloc_valid_use) (Elf *elf, int - } - - -+#ifndef NO_COPY_RELOC - bool - EBLHOOK(copy_reloc_p) (int reloc) - { - return reloc == R_TYPE (COPY); - } -+#endif - - bool - EBLHOOK(none_reloc_p) (int reloc) -@@ -151,7 +153,9 @@ EBLHOOK(init_reloc) (Ebl *ebl) - ebl->reloc_type_name = EBLHOOK(reloc_type_name); - ebl->reloc_type_check = EBLHOOK(reloc_type_check); - ebl->reloc_valid_use = EBLHOOK(reloc_valid_use); -+#ifndef NO_COPY_RELOC - ebl->copy_reloc_p = EBLHOOK(copy_reloc_p); -+#endif - ebl->none_reloc_p = EBLHOOK(none_reloc_p); - #ifndef NO_RELATIVE_RELOC - ebl->relative_reloc_p = EBLHOOK(relative_reloc_p); -Index: elfutils-0.164/backends/Makefile.am -=================================================================== ---- elfutils-0.164.orig/backends/Makefile.am -+++ elfutils-0.164/backends/Makefile.am -@@ -33,12 +33,12 @@ AM_CPPFLAGS += -I$(top_srcdir)/libebl -I - - - modules = i386 sh x86_64 ia64 alpha arm aarch64 sparc ppc ppc64 s390 \ -- tilegx parisc -+ tilegx parisc mips - libebl_pic = libebl_i386_pic.a libebl_sh_pic.a libebl_x86_64_pic.a \ - libebl_ia64_pic.a libebl_alpha_pic.a libebl_arm_pic.a \ - libebl_aarch64_pic.a libebl_sparc_pic.a libebl_ppc_pic.a \ - libebl_ppc64_pic.a libebl_s390_pic.a libebl_tilegx_pic.a \ -- libebl_parisc_pic.a -+ libebl_parisc_pic.a libebl_mips_pic.a - noinst_LIBRARIES = $(libebl_pic) - noinst_DATA = $(libebl_pic:_pic.a=.so) - -@@ -116,6 +116,10 @@ parisc_SRCS = parisc_init.c parisc_symbo - libebl_parisc_pic_a_SOURCES = $(parisc_SRCS) - am_libebl_parisc_pic_a_OBJECTS = $(parisc_SRCS:.c=.os) - -+mips_SRCS = mips_init.c mips_symbol.c mips_regs.c mips_retval.c -+libebl_mips_pic_a_SOURCES = $(mips_SRCS) -+am_libebl_mips_pic_a_OBJECTS = $(mips_SRCS:.c=.os) -+ - libebl_%.so libebl_%.map: libebl_%_pic.a $(libelf) $(libdw) - @rm -f $(@:.so=.map) - $(AM_V_at)echo 'ELFUTILS_$(PACKAGE_VERSION) { global: $*_init; local: *; };' \ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/mips_readelf_w.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/mips_readelf_w.patch deleted file mode 100644 index 930d6f664..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/mips_readelf_w.patch +++ /dev/null @@ -1,22 +0,0 @@ -From: Kurt Roeckx -Subject: Make readelf -w output debug information on mips -Bug-Debian: http://bugs.debian.org/662041 -Forwarded: not-needed - -Upstreams wants a change where this is handled by a hook that needs -to be filled in by the backend for the arch. - -Index: elfutils-0.164/src/readelf.c -=================================================================== ---- elfutils-0.164.orig/src/readelf.c -+++ elfutils-0.164/src/readelf.c -@@ -8218,7 +8218,8 @@ print_debug (Dwfl_Module *dwflmod, Ebl * - GElf_Shdr shdr_mem; - GElf_Shdr *shdr = gelf_getshdr (scn, &shdr_mem); - -- if (shdr != NULL && shdr->sh_type == SHT_PROGBITS) -+ if (shdr != NULL && ( -+ (shdr->sh_type == SHT_PROGBITS) || (shdr->sh_type == SHT_MIPS_DWARF))) - { - static const struct - { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/shadow.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/shadow.patch deleted file mode 100644 index d31961f94..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/shadow.patch +++ /dev/null @@ -1,23 +0,0 @@ -Fix control path where we have str as uninitialized string - -| /home/ubuntu/work/oe/openembedded-core/build/tmp-musl/work/i586-oe-linux-musl/elfutils/0.164-r0/elfutils-0.164/libcpu/i386_disasm.c: In function 'i386_disasm': -| /home/ubuntu/work/oe/openembedded-core/build/tmp-musl/work/i586-oe-linux-musl/elfutils/0.164-r0/elfutils-0.164/libcpu/i386_disasm.c:310:5: error: 'str' may be used uninitialized in this function [-Werror=maybe-uninitialized] -| memcpy (buf + bufcnt, _str, _len); \ -| ^ -| /home/ubuntu/work/oe/openembedded-core/build/tmp-musl/work/i586-oe-linux-musl/elfutils/0.164-r0/elfutils-0.164/libcpu/i386_disasm.c:709:17: note: 'str' was declared here -| const char *str; - -Signed-off-by: Khem Raj -Upstream-Status: Pending -Index: elfutils-0.164/libcpu/i386_disasm.c -=================================================================== ---- elfutils-0.164.orig/libcpu/i386_disasm.c -+++ elfutils-0.164/libcpu/i386_disasm.c -@@ -821,6 +821,7 @@ i386_disasm (const uint8_t **startp, con - } - - default: -+ str = ""; - assert (! "INVALID not handled"); - } - } diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/testsuite-ignore-elflint.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/testsuite-ignore-elflint.diff deleted file mode 100644 index eae5796de..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/testsuite-ignore-elflint.diff +++ /dev/null @@ -1,39 +0,0 @@ -On many architectures this test fails because binaries/libs produced by -binutils don't pass elflint. However elfutils shouldn't FTBFS because of this. - -So we run the tests on all archs to see what breaks, but if it breaks we ignore -the result (exitcode 77 means: this test was skipped). - -Index: elfutils-0.156/tests/run-elflint-self.sh -=================================================================== ---- elfutils-0.156.orig/tests/run-elflint-self.sh 2013-07-28 14:35:36.000000000 +0200 -+++ elfutils-0.156/tests/run-elflint-self.sh 2013-07-28 14:36:10.000000000 +0200 -@@ -18,4 +18,4 @@ - - . $srcdir/test-subr.sh - --testrun_on_self ${abs_top_builddir}/src/elflint --quiet --gnu-ld -+testrun_on_self_skip ${abs_top_builddir}/src/elflint --quiet --gnu-ld -Index: elfutils-0.156/tests/test-subr.sh -=================================================================== ---- elfutils-0.156.orig/tests/test-subr.sh 2013-07-28 14:35:36.000000000 +0200 -+++ elfutils-0.156/tests/test-subr.sh 2013-07-28 14:35:36.000000000 +0200 -@@ -149,3 +149,18 @@ - # Only exit if something failed - if test $exit_status != 0; then exit $exit_status; fi - } -+ -+# Same as testrun_on_self(), but skip on failure. -+testrun_on_self_skip() -+{ -+ exit_status=0 -+ -+ for file in $self_test_files; do -+ testrun $* $file \ -+ || { echo "*** failure in $* $file"; exit_status=77; } -+ done -+ -+ # Only exit if something failed -+ if test $exit_status != 0; then exit $exit_status; fi -+} -+ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/uclibc-support.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/uclibc-support.patch deleted file mode 100644 index 62999985c..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.166/uclibc-support.patch +++ /dev/null @@ -1,128 +0,0 @@ -From 3daec2dd11a04955f95e8f65a48820103d84dbec Mon Sep 17 00:00:00 2001 -From: Junling Zheng -Date: Thu, 9 Apr 2015 12:12:49 +0000 -Subject: [PATCH] uclibc support for elfutils 0.161 - -on uclibc systems libintl and libuargp are separate from libc. -so they need to be specified on commandline when we use proxy-libintl -then libintl is a static archive so it should be listed last since -elfutils does not respect disable-nls we need to link in libintl - -We add a new option --enable-uclibc which will be used to control -the uclibc specific configurations during build. - -Upstream-Status: Inappropriate [uclibc specific] - -Signed-off-by: Khem Raj -Signed-off-by: Junling Zheng -[Junling Zheng: - - adjust context -] ---- - configure.ac | 8 ++++++++ - libcpu/Makefile.am | 4 ++++ - libdw/Makefile.am | 7 ++++++- - libelf/Makefile.am | 5 +++++ - 4 files changed, 23 insertions(+), 1 deletion(-) - -Index: elfutils-0.166/configure.ac -=================================================================== ---- elfutils-0.166.orig/configure.ac -+++ elfutils-0.166/configure.ac -@@ -79,6 +79,14 @@ AS_IF([test "$use_locks" = yes], - - AH_TEMPLATE([USE_LOCKS], [Defined if libraries should be thread-safe.]) - -+AC_ARG_ENABLE([uclibc], -+AS_HELP_STRING([--enable-uclibc], [Use uclibc for system libraries]), -+use_uclibc=yes, use_uclibc=no) -+AM_CONDITIONAL(USE_UCLIBC, test "$use_uclibc" = yes) -+AS_IF([test "$use_uclibc" = yes], [AC_DEFINE(USE_UCLIBC)]) -+ -+AH_TEMPLATE([USE_UCLIBC], [Defined if uclibc libraries are used.]) -+ - AC_PROG_CC - AC_PROG_RANLIB - AC_PROG_YACC -@@ -347,18 +355,13 @@ AC_LINK_IFELSE( - ) - - dnl If our libc doesn't provide argp, then test for libargp --if test "$libc_has_argp" = "false" ; then -- AC_MSG_WARN("libc does not have argp") -- AC_CHECK_LIB([argp], [argp_parse], [have_argp="true"], [have_argp="false"]) -- -- if test "$have_argp" = "false"; then -- AC_MSG_ERROR("no libargp found") -- else -- argp_LDADD="-largp" -- fi --else -- argp_LDADD="" --fi -+AS_IF([test "x$libc_has_argp" = "xfalse"], -+ [AC_MSG_WARN("libc does not have argp") -+ AC_CHECK_LIB([argp], [argp_parse], [have_argp="true" argp_LDADD="-largp"], [have_argp="false"])], [argp_LDADD=""]) -+ -+AS_IF([test "x$libc_has_argp" = "xfalse" -a "x$have_argp" = "xfalse"], -+ AC_CHECK_LIB([uargp], [argp_parse], [have_uargp="true" argp_LDADD="-luargp"], [have_uargp="false"])], [argp_LDADD=""]) -+ - AC_SUBST([argp_LDADD]) - - dnl The directories with content. -Index: elfutils-0.166/libcpu/Makefile.am -=================================================================== ---- elfutils-0.166.orig/libcpu/Makefile.am -+++ elfutils-0.166/libcpu/Makefile.am -@@ -80,6 +80,10 @@ i386_parse_CFLAGS = -DNMNES="`wc -l < i3 - i386_lex.o: i386_parse.h - i386_gendis_LDADD = $(libeu) -lm - -+if USE_UCLIBC -+i386_gendis_LDADD += -lintl -+endif -+ - i386_parse.h: i386_parse.c ; - - EXTRA_DIST = defs/i386 -Index: elfutils-0.166/libdw/Makefile.am -=================================================================== ---- elfutils-0.166.orig/libdw/Makefile.am -+++ elfutils-0.166/libdw/Makefile.am -@@ -102,6 +102,11 @@ endif - libdw_pic_a_SOURCES = - am_libdw_pic_a_OBJECTS = $(libdw_a_SOURCES:.c=.os) - -+libdw_so_LDLIBS = -+if USE_UCLIBC -+libdw_so_LDLIBS += -lintl -+endif -+ - libdw_so_SOURCES = - libdw.so$(EXEEXT): $(srcdir)/libdw.map libdw_pic.a ../libdwelf/libdwelf_pic.a \ - ../libdwfl/libdwfl_pic.a ../libebl/libebl.a \ -@@ -112,7 +117,7 @@ libdw.so$(EXEEXT): $(srcdir)/libdw.map l - -Wl,--enable-new-dtags,-rpath,$(pkglibdir) \ - -Wl,--version-script,$<,--no-undefined \ - -Wl,--whole-archive $(filter-out $<,$^) -Wl,--no-whole-archive\ -- -ldl -lz $(argp_LDADD) $(zip_LIBS) -+ -ldl -lz $(argp_LDADD) $(zip_LIBS) $(libdw_so_LDLIBS) - @$(textrel_check) - $(AM_V_at)ln -fs $@ $@.$(VERSION) - -Index: elfutils-0.166/libelf/Makefile.am -=================================================================== ---- elfutils-0.166.orig/libelf/Makefile.am -+++ elfutils-0.166/libelf/Makefile.am -@@ -96,6 +96,11 @@ libelf_pic_a_SOURCES = - am_libelf_pic_a_OBJECTS = $(libelf_a_SOURCES:.c=.os) - - libelf_so_LDLIBS = -lz -+ -+if USE_UCLIBC -+libelf_so_LDLIBS += -lintl -+endif -+ - if USE_LOCKS - libelf_so_LDLIBS += -lpthread - endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-build-Provide-alternatives-for-glibc-assumptions-hel.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-build-Provide-alternatives-for-glibc-assumptions-hel.patch new file mode 100644 index 000000000..020ffa141 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-build-Provide-alternatives-for-glibc-assumptions-hel.patch @@ -0,0 +1,1051 @@ +From 054fedda5ab9b84160d40d90cb967f2f5822b889 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Thu, 31 Dec 2015 06:35:34 +0000 +Subject: [PATCH] build: Provide alternatives for glibc assumptions helps + compiling it on musl + +Upstream-Status: Pending +Signed-off-by: Khem Raj + +Rebase to 0.68 +Signed-off-by: Hongxu Jia +--- + Makefile.am | 2 +- + lib/color.c | 3 ++- + lib/crc32_file.c | 1 + + lib/fixedsizehash.h | 1 - + lib/system.h | 10 ++++++++++ + lib/xmalloc.c | 2 +- + libasm/asm_end.c | 2 +- + libasm/asm_newscn.c | 2 +- + libcpu/i386_gendis.c | 2 +- + libcpu/i386_lex.c | 2 +- + libcpu/i386_parse.c | 2 +- + libdw/Makefile.am | 4 +++- + libdw/libdw_alloc.c | 2 +- + libdwfl/dwfl_build_id_find_elf.c | 3 ++- + libdwfl/dwfl_error.c | 4 +++- + libdwfl/dwfl_module_getdwarf.c | 1 + + libdwfl/find-debuginfo.c | 2 +- + libdwfl/libdwfl_crc32_file.c | 10 ++++++++++ + libdwfl/linux-kernel-modules.c | 1 + + libebl/eblopenbackend.c | 2 +- + libelf/elf.h | 8 ++++++-- + libelf/libelf.h | 1 + + libelf/libelfP.h | 1 + + src/addr2line.c | 2 +- + src/ar.c | 2 +- + src/arlib.c | 2 +- + src/arlib2.c | 2 +- + src/elfcmp.c | 2 +- + src/elflint.c | 2 +- + src/findtextrel.c | 2 +- + src/nm.c | 2 +- + src/objdump.c | 2 +- + src/ranlib.c | 2 +- + src/readelf.c | 2 +- + src/size.c | 2 +- + src/stack.c | 2 +- + src/strings.c | 2 +- + src/strip.c | 2 +- + src/unstrip.c | 2 +- + tests/addrscopes.c | 2 +- + tests/allregs.c | 2 +- + tests/backtrace-data.c | 2 +- + tests/backtrace-dwarf.c | 2 +- + tests/backtrace.c | 2 +- + tests/buildid.c | 2 +- + tests/debugaltlink.c | 2 +- + tests/debuglink.c | 2 +- + tests/deleted.c | 2 +- + tests/dwfl-addr-sect.c | 2 +- + tests/dwfl-bug-addr-overflow.c | 2 +- + tests/dwfl-bug-fd-leak.c | 2 +- + tests/dwfl-bug-getmodules.c | 2 +- + tests/dwfl-report-elf-align.c | 2 +- + tests/dwfllines.c | 2 +- + tests/dwflmodtest.c | 2 +- + tests/dwflsyms.c | 2 +- + tests/early-offscn.c | 2 +- + tests/ecp.c | 2 +- + tests/find-prologues.c | 2 +- + tests/funcretval.c | 2 +- + tests/funcscopes.c | 2 +- + tests/getsrc_die.c | 2 +- + tests/line2addr.c | 2 +- + tests/low_high_pc.c | 2 +- + tests/md5-sha1-test.c | 2 +- + tests/rdwrmmap.c | 2 +- + tests/saridx.c | 2 +- + tests/sectiondump.c | 2 +- + tests/varlocs.c | 2 +- + tests/vdsosyms.c | 2 +- + 70 files changed, 98 insertions(+), 64 deletions(-) + +diff --git a/Makefile.am b/Makefile.am +index 2ff444e..41f77df 100644 +--- a/Makefile.am ++++ b/Makefile.am +@@ -28,7 +28,7 @@ pkginclude_HEADERS = version.h + + # Add doc back when we have some real content. + SUBDIRS = config m4 lib libelf libebl libdwelf libdwfl libdw libcpu libasm \ +- backends src po tests ++ backends po tests + + EXTRA_DIST = elfutils.spec GPG-KEY NOTES CONTRIBUTING \ + COPYING COPYING-GPLV2 COPYING-LGPLV3 +diff --git a/lib/color.c b/lib/color.c +index fde2d9d..73292ac 100644 +--- a/lib/color.c ++++ b/lib/color.c +@@ -32,12 +32,13 @@ + #endif + + #include +-#include ++#include + #include + #include + #include + #include + #include "libeu.h" ++#include "system.h" + + + /* Prototype for option handler. */ +diff --git a/lib/crc32_file.c b/lib/crc32_file.c +index a8434d4..57e4298 100644 +--- a/lib/crc32_file.c ++++ b/lib/crc32_file.c +@@ -35,6 +35,7 @@ + #include + #include + #include ++#include "system.h" + + int + crc32_file (int fd, uint32_t *resp) +diff --git a/lib/fixedsizehash.h b/lib/fixedsizehash.h +index dac2a5f..43016fc 100644 +--- a/lib/fixedsizehash.h ++++ b/lib/fixedsizehash.h +@@ -30,7 +30,6 @@ + #include + #include + #include +-#include + + #include + +diff --git a/lib/system.h b/lib/system.h +index ccd99d6..0e93e60 100644 +--- a/lib/system.h ++++ b/lib/system.h +@@ -55,6 +55,16 @@ + #else + # error "Unknown byte order" + #endif ++#ifndef TEMP_FAILURE_RETRY ++#define TEMP_FAILURE_RETRY(expression) \ ++ (__extension__ \ ++ ({ long int __result; \ ++ do __result = (long int) (expression); \ ++ while (__result == -1L && errno == EINTR); \ ++ __result; })) ++#endif ++ ++#define error(status, errno, ...) err(status, __VA_ARGS__) + + #ifndef MAX + #define MAX(m, n) ((m) < (n) ? (n) : (m)) +diff --git a/lib/xmalloc.c b/lib/xmalloc.c +index 0cde384..217b054 100644 +--- a/lib/xmalloc.c ++++ b/lib/xmalloc.c +@@ -30,7 +30,7 @@ + # include + #endif + +-#include ++#include + #include + #include + #include +diff --git a/libasm/asm_end.c b/libasm/asm_end.c +index 191a535..bf5ab06 100644 +--- a/libasm/asm_end.c ++++ b/libasm/asm_end.c +@@ -32,7 +32,7 @@ + #endif + + #include +-#include ++#include + #include + #include + #include +diff --git a/libasm/asm_newscn.c b/libasm/asm_newscn.c +index ddbb25d..74a598d 100644 +--- a/libasm/asm_newscn.c ++++ b/libasm/asm_newscn.c +@@ -32,7 +32,7 @@ + #endif + + #include +-#include ++#include + #include + #include + #include +diff --git a/libcpu/i386_gendis.c b/libcpu/i386_gendis.c +index aae5eae..6d76016 100644 +--- a/libcpu/i386_gendis.c ++++ b/libcpu/i386_gendis.c +@@ -31,7 +31,7 @@ + # include + #endif + +-#include ++#include + #include + #include + #include +diff --git a/libcpu/i386_lex.c b/libcpu/i386_lex.c +index b670608..b842c25 100644 +--- a/libcpu/i386_lex.c ++++ b/libcpu/i386_lex.c +@@ -592,7 +592,7 @@ char *i386_text; + #endif + + #include +-#include ++#include + #include + + #include +diff --git a/libcpu/i386_parse.c b/libcpu/i386_parse.c +index 724addf..5b67802 100644 +--- a/libcpu/i386_parse.c ++++ b/libcpu/i386_parse.c +@@ -107,7 +107,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/libdw/Makefile.am b/libdw/Makefile.am +index 082d96c..51cbea0 100644 +--- a/libdw/Makefile.am ++++ b/libdw/Makefile.am +@@ -102,6 +102,8 @@ endif + libdw_pic_a_SOURCES = + am_libdw_pic_a_OBJECTS = $(libdw_a_SOURCES:.c=.os) + ++fts_LDADD = -lfts ++ + libdw_so_SOURCES = + libdw.so$(EXEEXT): $(srcdir)/libdw.map libdw_pic.a ../libdwelf/libdwelf_pic.a \ + ../libdwfl/libdwfl_pic.a ../libebl/libebl.a \ +@@ -112,7 +114,7 @@ libdw.so$(EXEEXT): $(srcdir)/libdw.map libdw_pic.a ../libdwelf/libdwelf_pic.a \ + -Wl,--enable-new-dtags,-rpath,$(pkglibdir) \ + -Wl,--version-script,$<,--no-undefined \ + -Wl,--whole-archive $(filter-out $<,$^) -Wl,--no-whole-archive\ +- -ldl -lz $(argp_LDADD) $(zip_LIBS) ++ -ldl -lz $(argp_LDADD) $(zip_LIBS) $(fts_LDADD) + @$(textrel_check) + $(AM_V_at)ln -fs $@ $@.$(VERSION) + +diff --git a/libdw/libdw_alloc.c b/libdw/libdw_alloc.c +index 28a8cf6..29aeb3f 100644 +--- a/libdw/libdw_alloc.c ++++ b/libdw/libdw_alloc.c +@@ -31,7 +31,7 @@ + # include + #endif + +-#include ++#include + #include + #include + #include "libdwP.h" +diff --git a/libdwfl/dwfl_build_id_find_elf.c b/libdwfl/dwfl_build_id_find_elf.c +index 903e193..b00d10c 100644 +--- a/libdwfl/dwfl_build_id_find_elf.c ++++ b/libdwfl/dwfl_build_id_find_elf.c +@@ -27,6 +27,7 @@ + not, see . */ + + #include "libdwflP.h" ++#include "system.h" + #include + #include + #include +@@ -94,7 +95,7 @@ __libdwfl_open_by_build_id (Dwfl_Module *mod, bool debug, char **file_name, + { + if (*file_name != NULL) + free (*file_name); +- *file_name = canonicalize_file_name (name); ++ *file_name = realpath (name, NULL); + if (*file_name == NULL) + { + *file_name = name; +diff --git a/libdwfl/dwfl_error.c b/libdwfl/dwfl_error.c +index 7bcf61c..c345797 100644 +--- a/libdwfl/dwfl_error.c ++++ b/libdwfl/dwfl_error.c +@@ -140,6 +140,7 @@ __libdwfl_seterrno (Dwfl_Error error) + const char * + dwfl_errmsg (int error) + { ++ static __thread char s[64] = ""; + if (error == 0 || error == -1) + { + int last_error = global_error; +@@ -154,7 +155,8 @@ dwfl_errmsg (int error) + switch (error &~ 0xffff) + { + case OTHER_ERROR (ERRNO): +- return strerror_r (error & 0xffff, "bad", 0); ++ strerror_r (error & 0xffff, s, sizeof(s)); ++ return s; + case OTHER_ERROR (LIBELF): + return elf_errmsg (error & 0xffff); + case OTHER_ERROR (LIBDW): +diff --git a/libdwfl/dwfl_module_getdwarf.c b/libdwfl/dwfl_module_getdwarf.c +index 0e8810b..82ad665 100644 +--- a/libdwfl/dwfl_module_getdwarf.c ++++ b/libdwfl/dwfl_module_getdwarf.c +@@ -31,6 +31,7 @@ + #include + #include + #include ++#include "system.h" + #include "../libdw/libdwP.h" /* DWARF_E_* values are here. */ + #include "../libelf/libelfP.h" + +diff --git a/libdwfl/find-debuginfo.c b/libdwfl/find-debuginfo.c +index 80515db..80b0148 100644 +--- a/libdwfl/find-debuginfo.c ++++ b/libdwfl/find-debuginfo.c +@@ -385,7 +385,7 @@ dwfl_standard_find_debuginfo (Dwfl_Module *mod, + /* If FILE_NAME is a symlink, the debug file might be associated + with the symlink target name instead. */ + +- char *canon = canonicalize_file_name (file_name); ++ char *canon = realpath (file_name, NULL); + if (canon != NULL && strcmp (file_name, canon)) + fd = find_debuginfo_in_path (mod, canon, + debuglink_file, debuglink_crc, +diff --git a/libdwfl/libdwfl_crc32_file.c b/libdwfl/libdwfl_crc32_file.c +index 6b6b7d3..debc4a4 100644 +--- a/libdwfl/libdwfl_crc32_file.c ++++ b/libdwfl/libdwfl_crc32_file.c +@@ -31,6 +31,16 @@ + + #define crc32_file attribute_hidden __libdwfl_crc32_file + #define crc32 __libdwfl_crc32 ++ ++#ifndef TEMP_FAILURE_RETRY ++#define TEMP_FAILURE_RETRY(expression) \ ++ (__extension__ \ ++ ({ long int __result; \ ++ do __result = (long int) (expression); \ ++ while (__result == -1L && errno == EINTR); \ ++ __result; })) ++#endif ++ + #define LIB_SYSTEM_H 1 + #include + #include "../lib/crc32_file.c" +diff --git a/libdwfl/linux-kernel-modules.c b/libdwfl/linux-kernel-modules.c +index 9cd8ea9..4dbf4c5 100644 +--- a/libdwfl/linux-kernel-modules.c ++++ b/libdwfl/linux-kernel-modules.c +@@ -36,6 +36,7 @@ + #include + + #include "libdwflP.h" ++#include "system.h" + #include + #include + #include +diff --git a/libebl/eblopenbackend.c b/libebl/eblopenbackend.c +index 34d439a..56d2345 100644 +--- a/libebl/eblopenbackend.c ++++ b/libebl/eblopenbackend.c +@@ -32,7 +32,7 @@ + + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/libelf/elf.h b/libelf/elf.h +index 74654d6..81eee8b 100644 +--- a/libelf/elf.h ++++ b/libelf/elf.h +@@ -21,7 +21,9 @@ + + #include + +-__BEGIN_DECLS ++#ifdef __cplusplus ++extern "C" { ++#endif + + /* Standard ELF types. */ + +@@ -3704,6 +3706,8 @@ enum + #define R_BPF_NONE 0 /* No reloc */ + #define R_BPF_MAP_FD 1 /* Map fd to pointer */ + +-__END_DECLS ++#ifdef __cplusplus ++} ++#endif + + #endif /* elf.h */ +diff --git a/libelf/libelf.h b/libelf/libelf.h +index c0d6389..38a68fd 100644 +--- a/libelf/libelf.h ++++ b/libelf/libelf.h +@@ -29,6 +29,7 @@ + #ifndef _LIBELF_H + #define _LIBELF_H 1 + ++#include + #include + #include + +diff --git a/libelf/libelfP.h b/libelf/libelfP.h +index 4459982..1296f20 100644 +--- a/libelf/libelfP.h ++++ b/libelf/libelfP.h +@@ -36,6 +36,7 @@ + + #include + #include ++#include + + #include + #include +diff --git a/src/addr2line.c b/src/addr2line.c +index 0222088..cd6a9a6 100644 +--- a/src/addr2line.c ++++ b/src/addr2line.c +@@ -23,7 +23,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/src/ar.c b/src/ar.c +index f2f322b..6e70031 100644 +--- a/src/ar.c ++++ b/src/ar.c +@@ -22,7 +22,7 @@ + + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/src/arlib.c b/src/arlib.c +index e0839aa..1143658 100644 +--- a/src/arlib.c ++++ b/src/arlib.c +@@ -21,7 +21,7 @@ + #endif + + #include +-#include ++#include + #include + #include + #include +diff --git a/src/arlib2.c b/src/arlib2.c +index 553fc57..46443d0 100644 +--- a/src/arlib2.c ++++ b/src/arlib2.c +@@ -20,7 +20,7 @@ + # include + #endif + +-#include ++#include + #include + #include + #include +diff --git a/src/elfcmp.c b/src/elfcmp.c +index 401ab31..873d253 100644 +--- a/src/elfcmp.c ++++ b/src/elfcmp.c +@@ -23,7 +23,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/src/elflint.c b/src/elflint.c +index 7d3f227..074d21c 100644 +--- a/src/elflint.c ++++ b/src/elflint.c +@@ -24,7 +24,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/src/findtextrel.c b/src/findtextrel.c +index dc41502..325888c 100644 +--- a/src/findtextrel.c ++++ b/src/findtextrel.c +@@ -23,7 +23,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/src/nm.c b/src/nm.c +index c54e96f..9e031d9 100644 +--- a/src/nm.c ++++ b/src/nm.c +@@ -26,7 +26,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/src/objdump.c b/src/objdump.c +index fff4b81..4b1f966 100644 +--- a/src/objdump.c ++++ b/src/objdump.c +@@ -21,7 +21,7 @@ + #endif + + #include +-#include ++#include + #include + #include + #include +diff --git a/src/ranlib.c b/src/ranlib.c +index 41a3bcf..0c7da2c 100644 +--- a/src/ranlib.c ++++ b/src/ranlib.c +@@ -24,7 +24,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/src/readelf.c b/src/readelf.c +index d18a4b7..a6cfb35 100644 +--- a/src/readelf.c ++++ b/src/readelf.c +@@ -25,7 +25,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/src/size.c b/src/size.c +index de0d791..4639d42 100644 +--- a/src/size.c ++++ b/src/size.c +@@ -21,7 +21,7 @@ + #endif + + #include +-#include ++#include + #include + #include + #include +diff --git a/src/stack.c b/src/stack.c +index a5a7beb..4c075bc 100644 +--- a/src/stack.c ++++ b/src/stack.c +@@ -18,7 +18,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/src/strings.c b/src/strings.c +index 49aab8b..09d5b1c 100644 +--- a/src/strings.c ++++ b/src/strings.c +@@ -25,7 +25,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/src/strip.c b/src/strip.c +index a875ddf..fd76f7f 100644 +--- a/src/strip.c ++++ b/src/strip.c +@@ -24,7 +24,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/src/unstrip.c b/src/unstrip.c +index d838ae9..0108272 100644 +--- a/src/unstrip.c ++++ b/src/unstrip.c +@@ -31,7 +31,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/tests/addrscopes.c b/tests/addrscopes.c +index 791569f..54f4311 100644 +--- a/tests/addrscopes.c ++++ b/tests/addrscopes.c +@@ -25,7 +25,7 @@ + #include + #include + #include +-#include ++#include + #include + + +diff --git a/tests/allregs.c b/tests/allregs.c +index 286f7e3..c9de089 100644 +--- a/tests/allregs.c ++++ b/tests/allregs.c +@@ -21,7 +21,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/tests/backtrace-data.c b/tests/backtrace-data.c +index b7158da..354fa6a 100644 +--- a/tests/backtrace-data.c ++++ b/tests/backtrace-data.c +@@ -27,7 +27,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #if defined(__x86_64__) && defined(__linux__) +diff --git a/tests/backtrace-dwarf.c b/tests/backtrace-dwarf.c +index a644c8a..b8cbe27 100644 +--- a/tests/backtrace-dwarf.c ++++ b/tests/backtrace-dwarf.c +@@ -22,7 +22,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/tests/backtrace.c b/tests/backtrace.c +index 1ff6353..47e3f7b 100644 +--- a/tests/backtrace.c ++++ b/tests/backtrace.c +@@ -24,7 +24,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #ifdef __linux__ +diff --git a/tests/buildid.c b/tests/buildid.c +index 87c1877..2953e6b 100644 +--- a/tests/buildid.c ++++ b/tests/buildid.c +@@ -23,7 +23,7 @@ + #include ELFUTILS_HEADER(elf) + #include ELFUTILS_HEADER(dwelf) + #include +-#include ++#include + #include + #include + #include +diff --git a/tests/debugaltlink.c b/tests/debugaltlink.c +index 6d97d50..ee7e559 100644 +--- a/tests/debugaltlink.c ++++ b/tests/debugaltlink.c +@@ -23,7 +23,7 @@ + #include ELFUTILS_HEADER(dw) + #include ELFUTILS_HEADER(dwelf) + #include +-#include ++#include + #include + #include + #include +diff --git a/tests/debuglink.c b/tests/debuglink.c +index 935d102..741cb81 100644 +--- a/tests/debuglink.c ++++ b/tests/debuglink.c +@@ -21,7 +21,7 @@ + #include + #include ELFUTILS_HEADER(dwelf) + #include +-#include ++#include + #include + #include + #include +diff --git a/tests/deleted.c b/tests/deleted.c +index 6be35bc..0190711 100644 +--- a/tests/deleted.c ++++ b/tests/deleted.c +@@ -21,7 +21,7 @@ + #include + #include + #include +-#include ++#include + #include + #ifdef __linux__ + #include +diff --git a/tests/dwfl-addr-sect.c b/tests/dwfl-addr-sect.c +index 21e470a..1ea1e3b 100644 +--- a/tests/dwfl-addr-sect.c ++++ b/tests/dwfl-addr-sect.c +@@ -23,7 +23,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include ELFUTILS_HEADER(dwfl) +diff --git a/tests/dwfl-bug-addr-overflow.c b/tests/dwfl-bug-addr-overflow.c +index aa8030e..02c8bef 100644 +--- a/tests/dwfl-bug-addr-overflow.c ++++ b/tests/dwfl-bug-addr-overflow.c +@@ -20,7 +20,7 @@ + #include + #include + #include +-#include ++#include + #include + #include ELFUTILS_HEADER(dwfl) + +diff --git a/tests/dwfl-bug-fd-leak.c b/tests/dwfl-bug-fd-leak.c +index 689cdd7..5973da3 100644 +--- a/tests/dwfl-bug-fd-leak.c ++++ b/tests/dwfl-bug-fd-leak.c +@@ -24,7 +24,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + +diff --git a/tests/dwfl-bug-getmodules.c b/tests/dwfl-bug-getmodules.c +index 1ee989f..fd62e65 100644 +--- a/tests/dwfl-bug-getmodules.c ++++ b/tests/dwfl-bug-getmodules.c +@@ -18,7 +18,7 @@ + #include + #include ELFUTILS_HEADER(dwfl) + +-#include ++#include + + static const Dwfl_Callbacks callbacks = + { +diff --git a/tests/dwfl-report-elf-align.c b/tests/dwfl-report-elf-align.c +index a4e97d3..f471587 100644 +--- a/tests/dwfl-report-elf-align.c ++++ b/tests/dwfl-report-elf-align.c +@@ -20,7 +20,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/tests/dwfllines.c b/tests/dwfllines.c +index 90379dd..cbdf6c4 100644 +--- a/tests/dwfllines.c ++++ b/tests/dwfllines.c +@@ -27,7 +27,7 @@ + #include + #include + #include +-#include ++#include + + int + main (int argc, char *argv[]) +diff --git a/tests/dwflmodtest.c b/tests/dwflmodtest.c +index 0027f96..e68d3bc 100644 +--- a/tests/dwflmodtest.c ++++ b/tests/dwflmodtest.c +@@ -23,7 +23,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include ELFUTILS_HEADER(dwfl) +diff --git a/tests/dwflsyms.c b/tests/dwflsyms.c +index 49ac334..cf07830 100644 +--- a/tests/dwflsyms.c ++++ b/tests/dwflsyms.c +@@ -25,7 +25,7 @@ + #include + #include + #include +-#include ++#include + #include + + static const char * +diff --git a/tests/early-offscn.c b/tests/early-offscn.c +index 924cb9e..6f60d5a 100644 +--- a/tests/early-offscn.c ++++ b/tests/early-offscn.c +@@ -19,7 +19,7 @@ + #endif + + #include +-#include ++#include + #include + #include + #include +diff --git a/tests/ecp.c b/tests/ecp.c +index 38a6859..743cea5 100644 +--- a/tests/ecp.c ++++ b/tests/ecp.c +@@ -20,7 +20,7 @@ + #endif + + #include +-#include ++#include + #include + #include + #include +diff --git a/tests/find-prologues.c b/tests/find-prologues.c +index ba8ae37..76f5f04 100644 +--- a/tests/find-prologues.c ++++ b/tests/find-prologues.c +@@ -25,7 +25,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + +diff --git a/tests/funcretval.c b/tests/funcretval.c +index 8d19d11..c8aaa93 100644 +--- a/tests/funcretval.c ++++ b/tests/funcretval.c +@@ -25,7 +25,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + +diff --git a/tests/funcscopes.c b/tests/funcscopes.c +index 9c90185..dbccb89 100644 +--- a/tests/funcscopes.c ++++ b/tests/funcscopes.c +@@ -25,7 +25,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + +diff --git a/tests/getsrc_die.c b/tests/getsrc_die.c +index 055aede..9c394dd 100644 +--- a/tests/getsrc_die.c ++++ b/tests/getsrc_die.c +@@ -19,7 +19,7 @@ + #endif + + #include +-#include ++#include + #include + #include + #include +diff --git a/tests/line2addr.c b/tests/line2addr.c +index e0d65d3..9bf0023 100644 +--- a/tests/line2addr.c ++++ b/tests/line2addr.c +@@ -26,7 +26,7 @@ + #include + #include + #include +-#include ++#include + + + static void +diff --git a/tests/low_high_pc.c b/tests/low_high_pc.c +index d0f4302..8da4fbd 100644 +--- a/tests/low_high_pc.c ++++ b/tests/low_high_pc.c +@@ -25,7 +25,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + +diff --git a/tests/md5-sha1-test.c b/tests/md5-sha1-test.c +index d50355e..3c41f40 100644 +--- a/tests/md5-sha1-test.c ++++ b/tests/md5-sha1-test.c +@@ -19,7 +19,7 @@ + #endif + + #include +-#include ++#include + + #include "md5.h" + #include "sha1.h" +diff --git a/tests/rdwrmmap.c b/tests/rdwrmmap.c +index 6f027df..1ce5e6e 100644 +--- a/tests/rdwrmmap.c ++++ b/tests/rdwrmmap.c +@@ -19,7 +19,7 @@ + #endif + + #include +-#include ++#include + #include + #include + #include +diff --git a/tests/saridx.c b/tests/saridx.c +index 8a450d8..b387801 100644 +--- a/tests/saridx.c ++++ b/tests/saridx.c +@@ -17,7 +17,7 @@ + + #include + +-#include ++#include + #include + #include + #include +diff --git a/tests/sectiondump.c b/tests/sectiondump.c +index 3033fed..8e888db 100644 +--- a/tests/sectiondump.c ++++ b/tests/sectiondump.c +@@ -18,7 +18,7 @@ + #include + + #include +-#include ++#include + #include + #include + #include +diff --git a/tests/varlocs.c b/tests/varlocs.c +index c3fba89..e043ea2 100644 +--- a/tests/varlocs.c ++++ b/tests/varlocs.c +@@ -25,7 +25,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +diff --git a/tests/vdsosyms.c b/tests/vdsosyms.c +index b876c10..afb2823 100644 +--- a/tests/vdsosyms.c ++++ b/tests/vdsosyms.c +@@ -18,7 +18,7 @@ + #include + #include + #include +-#include ++#include + #include + #include + #include +-- +2.8.1 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-elf_getarsym-Silence-Werror-maybe-uninitialized-fals.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-elf_getarsym-Silence-Werror-maybe-uninitialized-fals.patch new file mode 100644 index 000000000..3754c1c36 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-elf_getarsym-Silence-Werror-maybe-uninitialized-fals.patch @@ -0,0 +1,35 @@ +From 668accf322fd7185e273bfd50b84320e71d9de5a Mon Sep 17 00:00:00 2001 +From: Martin Jansa +Date: Fri, 10 Apr 2015 00:29:18 +0200 +Subject: [PATCH] elf_getarsym: Silence -Werror=maybe-uninitialized false + positive + +Upstream-Status: Pending +Signed-off-by: Martin Jansa +--- + libelf/elf_getarsym.c | 9 +++++++-- + 1 file changed, 7 insertions(+), 2 deletions(-) + +diff --git a/libelf/elf_getarsym.c b/libelf/elf_getarsym.c +index d0bb28a..08954d2 100644 +--- a/libelf/elf_getarsym.c ++++ b/libelf/elf_getarsym.c +@@ -165,8 +165,13 @@ elf_getarsym (elf, ptr) + int w = index64_p ? 8 : 4; + + /* We have an archive. The first word in there is the number of +- entries in the table. */ +- uint64_t n; ++ entries in the table. ++ Set to SIZE_MAX just to silence -Werror=maybe-uninitialized ++ elf_getarsym.c:290:9: error: 'n' may be used uninitialized in this function ++ The read_number_entries function doesn't initialize n only when returning ++ -1 which in turn ensures to jump over usage of this uninitialized variable. ++ */ ++ uint64_t n = SIZE_MAX; + size_t off = elf->start_offset + SARMAG + sizeof (struct ar_hdr); + if (read_number_entries (&n, elf, &off, index64_p) < 0) + { +-- +2.3.5 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-fix-a-stack-usage-warning.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-fix-a-stack-usage-warning.patch new file mode 100644 index 000000000..6923bf705 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-fix-a-stack-usage-warning.patch @@ -0,0 +1,28 @@ +[PATCH] fix a stack-usage warning + +Upstream-Status: Pending + +not use a variable to as a array size, otherwise the warning to error that +stack usage might be unbounded [-Werror=stack-usage=] will happen + +Signed-off-by: Roy Li +--- + backends/ppc_initreg.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/backends/ppc_initreg.c b/backends/ppc_initreg.c +index 64f5379..52dde3e 100644 +--- a/backends/ppc_initreg.c ++++ b/backends/ppc_initreg.c +@@ -93,7 +93,7 @@ ppc_set_initial_registers_tid (pid_t tid __attribute__ ((unused)), + return false; + } + const size_t gprs = sizeof (user_regs.r.gpr) / sizeof (*user_regs.r.gpr); +- Dwarf_Word dwarf_regs[gprs]; ++ Dwarf_Word dwarf_regs[sizeof (user_regs.r.gpr) / sizeof (*user_regs.r.gpr)]; + for (unsigned gpr = 0; gpr < gprs; gpr++) + dwarf_regs[gpr] = user_regs.r.gpr[gpr]; + if (! setfunc (0, gprs, dwarf_regs, arg)) +-- +1.9.1 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-remove-the-unneed-checking.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-remove-the-unneed-checking.patch new file mode 100644 index 000000000..5be92d705 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/0001-remove-the-unneed-checking.patch @@ -0,0 +1,38 @@ +Disable the test to convert euc-jp + +Remove the test "Test against HP-UX 11.11 bug: +No converter from EUC-JP to UTF-8 is provided" +since we don't support HP-UX and if the euc-jp is not +installed on the host, the dependence will be built without +iconv support and will cause guild-native building fail. + +Upstream-Status: Inappropriate [OE specific] + +Signed-off-by: Roy Li +--- + m4/iconv.m4 | 2 ++ + 1 file changed, 2 insertions(+) + +diff --git a/m4/iconv.m4 b/m4/iconv.m4 +index a503646..299f1eb 100644 +--- a/m4/iconv.m4 ++++ b/m4/iconv.m4 +@@ -159,6 +159,7 @@ int main () + } + } + #endif ++#if 0 + /* Test against HP-UX 11.11 bug: No converter from EUC-JP to UTF-8 is + provided. */ + if (/* Try standardized names. */ +@@ -170,6 +171,7 @@ int main () + /* Try HP-UX names. */ + && iconv_open ("utf8", "eucJP") == (iconv_t)(-1)) + result |= 16; ++#endif + return result; + }]])], + [am_cv_func_iconv_works=yes], +-- +2.0.1 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/Fix_one_GCC7_warning.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/Fix_one_GCC7_warning.patch new file mode 100644 index 000000000..d88f4ebd0 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/Fix_one_GCC7_warning.patch @@ -0,0 +1,44 @@ +From 93c51144c3f664d4e9709da75a1d0fa00ea0fe95 Mon Sep 17 00:00:00 2001 +From: Mark Wielaard +Date: Sun, 12 Feb 2017 21:51:34 +0100 +Subject: [PATCH] libasm: Fix one GCC7 -Wformat-truncation=2 warning. + +Make sure that if we have really lots of labels the tempsym doesn't get +truncated because it is too small to hold the whole name. + +This doesn't enable -Wformat-truncation=2 or fix other "issues" pointed +out by enabling this warning because there are currently some issues +with it. https://gcc.gnu.org/bugzilla/show_bug.cgi?id=79448 + +Signed-off-by: Mark Wielaard + +Upstream-Status: Backport (https://sourceware.org/git/?p=elfutils.git;a=commit;h=93c51144c3f664d4e9709da75a1d0fa00ea0fe95) +Signed-off-by: Joshua Lock + +--- + libasm/ChangeLog | 6 +++++- + libasm/asm_newsym.c | 6 ++++-- + 2 files changed, 9 insertions(+), 3 deletions(-) + +Index: elfutils-0.168/libasm/asm_newsym.c +=================================================================== +--- elfutils-0.168.orig/libasm/asm_newsym.c ++++ elfutils-0.168/libasm/asm_newsym.c +@@ -1,5 +1,5 @@ + /* Define new symbol for current position in given section. +- Copyright (C) 2002, 2005, 2016 Red Hat, Inc. ++ Copyright (C) 2002, 2005, 2016, 2017 Red Hat, Inc. + This file is part of elfutils. + Written by Ulrich Drepper , 2002. + +@@ -44,7 +44,9 @@ AsmSym_t * + asm_newsym (AsmScn_t *asmscn, const char *name, GElf_Xword size, + int type, int binding) + { +-#define TEMPSYMLEN 10 ++/* We don't really expect labels with many digits, but in theory it could ++ be 10 digits (plus ".L" and a zero terminator). */ ++#define TEMPSYMLEN 13 + char tempsym[TEMPSYMLEN]; + AsmSym_t *result; + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/aarch64_uio.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/aarch64_uio.patch new file mode 100644 index 000000000..38dc57bef --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/aarch64_uio.patch @@ -0,0 +1,47 @@ +Fix build on aarch64/musl + +Errors + +invalid operands to binary & (have 'long double' and 'unsigned int') + +error: redefinition + of 'struct iovec' + struct iovec { void *iov_base; size_t iov_len; }; + ^ +Upstream-Status: Pending +Signed-off-by: Khem Raj +Index: elfutils-0.163/backends/aarch64_initreg.c +=================================================================== +--- elfutils-0.163.orig/backends/aarch64_initreg.c ++++ elfutils-0.163/backends/aarch64_initreg.c +@@ -33,7 +33,7 @@ + #include "system.h" + #include + #ifdef __aarch64__ +-# include ++# include + # include + # include + /* Deal with old glibc defining user_pt_regs instead of user_regs_struct. */ +@@ -82,7 +82,7 @@ aarch64_set_initial_registers_tid (pid_t + + Dwarf_Word dwarf_fregs[32]; + for (int r = 0; r < 32; r++) +- dwarf_fregs[r] = fregs.vregs[r] & 0xFFFFFFFF; ++ dwarf_fregs[r] = (unsigned int)fregs.vregs[r] & 0xFFFFFFFF; + + if (! setfunc (64, 32, dwarf_fregs, arg)) + return false; +Index: elfutils-0.163/backends/arm_initreg.c +=================================================================== +--- elfutils-0.163.orig/backends/arm_initreg.c ++++ elfutils-0.163/backends/arm_initreg.c +@@ -37,7 +37,7 @@ + #endif + + #ifdef __aarch64__ +-# include ++# include + # include + # include + /* Deal with old glibc defining user_pt_regs instead of user_regs_struct. */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/0001-Ignore-differences-between-mips-machine-identifiers.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/0001-Ignore-differences-between-mips-machine-identifiers.patch new file mode 100644 index 000000000..e0291b4cf --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/0001-Ignore-differences-between-mips-machine-identifiers.patch @@ -0,0 +1,35 @@ +From 77cb4a53c270d5854d3af24f19547bc3de825233 Mon Sep 17 00:00:00 2001 +From: James Cowgill +Date: Mon, 5 Jan 2015 15:16:58 +0000 +Subject: [PATCH 1/3] Ignore differences between mips machine identifiers + +Little endian binaries actually use EM_MIPS so you can't tell the endianness +from the elf machine id. Also, the EM_MIPS_RS3_LE machine is dead anyway (the +kernel will not load binaries containing it). + +Signed-off-by: James Cowgill + +Upstream-Status: Backport [from debian] +Signed-off-by: Hongxu Jia + +--- + backends/mips_init.c | 6 +----- + 1 file changed, 1 insertion(+), 5 deletions(-) + +Index: b/backends/mips_init.c +=================================================================== +--- a/backends/mips_init.c ++++ b/backends/mips_init.c +@@ -45,11 +45,7 @@ mips_init (Elf *elf __attribute__ ((unus + return NULL; + + /* We handle it. */ +- if (machine == EM_MIPS) +- eh->name = "MIPS R3000 big-endian"; +- else if (machine == EM_MIPS_RS3_LE) +- eh->name = "MIPS R3000 little-endian"; +- ++ eh->name = "MIPS"; + mips_init_reloc (eh); + HOOK (eh, reloc_simple_type); + HOOK (eh, return_value_location); diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/0002-Add-support-for-mips64-abis-in-mips_retval.c.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/0002-Add-support-for-mips64-abis-in-mips_retval.c.patch new file mode 100644 index 000000000..b17498f5f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/0002-Add-support-for-mips64-abis-in-mips_retval.c.patch @@ -0,0 +1,171 @@ +From fdaab18a65ed2529656baa64cb6169f34d7e507b Mon Sep 17 00:00:00 2001 +From: James Cowgill +Date: Mon, 5 Jan 2015 15:17:01 +0000 +Subject: [PATCH 2/3] Add support for mips64 abis in mips_retval.c + +Signed-off-by: James Cowgill + +Upstream-Status: Backport [from debian] +Signed-off-by: Hongxu Jia +--- + backends/mips_retval.c | 104 ++++++++++++++++++++++++++++++++++++++++++++----- + 1 file changed, 94 insertions(+), 10 deletions(-) + +diff --git a/backends/mips_retval.c b/backends/mips_retval.c +index 33f12a7..d5c6ef0 100644 +--- a/backends/mips_retval.c ++++ b/backends/mips_retval.c +@@ -91,6 +91,8 @@ enum mips_abi find_mips_abi(Elf *elf) + default: + if ((elf_flags & EF_MIPS_ABI2)) + return MIPS_ABI_N32; ++ else if ((ehdr->e_ident[EI_CLASS] == ELFCLASS64)) ++ return MIPS_ABI_N64; + } + + /* GCC creates a pseudo-section whose name describes the ABI. */ +@@ -195,6 +197,57 @@ static const Dwarf_Op loc_aggregate[] = + }; + #define nloc_aggregate 1 + ++/* Test if a struct member is a float */ ++static int is_float_child(Dwarf_Die *childdie) ++{ ++ /* Test if this is actually a struct member */ ++ if (dwarf_tag(childdie) != DW_TAG_member) ++ return 0; ++ ++ /* Get type of member */ ++ Dwarf_Attribute attr_mem; ++ Dwarf_Die child_type_mem; ++ Dwarf_Die *child_typedie = ++ dwarf_formref_die(dwarf_attr_integrate(childdie, ++ DW_AT_type, ++ &attr_mem), &child_type_mem); ++ ++ if (dwarf_tag(child_typedie) != DW_TAG_base_type) ++ return 0; ++ ++ /* Get base subtype */ ++ Dwarf_Word encoding; ++ if (dwarf_formudata (dwarf_attr_integrate (child_typedie, ++ DW_AT_encoding, ++ &attr_mem), &encoding) != 0) ++ return 0; ++ ++ return encoding == DW_ATE_float; ++} ++ ++/* Returns the number of fpregs which can be returned in the given struct */ ++static int get_struct_fpregs(Dwarf_Die *structtypedie) ++{ ++ Dwarf_Die child_mem; ++ int fpregs = 0; ++ ++ /* Get first structure member */ ++ if (dwarf_child(structtypedie, &child_mem) != 0) ++ return 0; ++ ++ do ++ { ++ /* Ensure this register is a float */ ++ if (!is_float_child(&child_mem)) ++ return 0; ++ ++ fpregs++; ++ } ++ while (dwarf_siblingof (&child_mem, &child_mem) == 0); ++ ++ return fpregs; ++} ++ + int + mips_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) + { +@@ -240,6 +293,7 @@ mips_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) + tag = dwarf_tag (typedie); + } + ++ Dwarf_Word size; + switch (tag) + { + case -1: +@@ -258,8 +312,6 @@ mips_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) + case DW_TAG_enumeration_type: + case DW_TAG_pointer_type: + case DW_TAG_ptr_to_member_type: +- { +- Dwarf_Word size; + if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_byte_size, + &attr_mem), &size) != 0) + { +@@ -289,7 +341,7 @@ mips_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) + if (size <= 4*regsize && abi == MIPS_ABI_O32) + return nloc_fpregquad; + +- goto aggregate; ++ goto large; + } + } + *locp = ABI_LOC(loc_intreg, regsize); +@@ -298,18 +350,50 @@ mips_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) + if (size <= 2*regsize) + return nloc_intregpair; + +- /* Else fall through. Shouldn't happen though (at least with gcc) */ +- } ++ /* Else pass in memory. Shouldn't happen though (at least with gcc) */ ++ goto large; + + case DW_TAG_structure_type: + case DW_TAG_class_type: + case DW_TAG_union_type: +- case DW_TAG_array_type: +- aggregate: +- /* XXX TODO: Can't handle structure return with other ABI's yet :-/ */ +- if ((abi != MIPS_ABI_O32) && (abi != MIPS_ABI_O64)) +- return -2; ++ /* Handle special cases for structures <= 128 bytes in newer ABIs */ ++ if (abi == MIPS_ABI_EABI32 || abi == MIPS_ABI_EABI64 || ++ abi == MIPS_ABI_N32 || abi == MIPS_ABI_N64) ++ { ++ if (dwarf_aggregate_size (typedie, &size) == 0 && size <= 16) ++ { ++ /* ++ * Special case in N64 / N32 - ++ * structures containing only floats are returned in fp regs. ++ * Everything else is returned in integer regs. ++ */ ++ if (tag != DW_TAG_union_type && ++ (abi == MIPS_ABI_N32 || abi == MIPS_ABI_N64)) ++ { ++ int num_fpregs = get_struct_fpregs(typedie); ++ if (num_fpregs == 1 || num_fpregs == 2) ++ { ++ *locp = loc_fpreg; ++ if (num_fpregs == 1) ++ return nloc_fpreg; ++ else ++ return nloc_fpregpair; ++ } ++ } ++ ++ *locp = loc_intreg; ++ if (size <= 8) ++ return nloc_intreg; ++ else ++ return nloc_intregpair; ++ } ++ } ++ ++ /* Fallthrough to handle large types */ + ++ case DW_TAG_array_type: ++ large: ++ /* Return large structures in memory */ + *locp = loc_aggregate; + return nloc_aggregate; + } +-- +2.1.4 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/0003-Add-mips-n64-relocation-format-hack.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/0003-Add-mips-n64-relocation-format-hack.patch new file mode 100644 index 000000000..2a5f8628d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/0003-Add-mips-n64-relocation-format-hack.patch @@ -0,0 +1,229 @@ +From 59d4b8c48e5040af7e02b34eb26ea602ec82a38e Mon Sep 17 00:00:00 2001 +From: James Cowgill +Date: Mon, 5 Jan 2015 15:17:02 +0000 +Subject: [PATCH 3/3] Add mips n64 relocation format hack + +MIPSEL N64 ELF files use a slightly different format for storing relocation +entries which is incompatible with the normal R_SYM / R_INFO macros. +To workaround this, we rearrange the bytes in the relocation's r_info field +when reading and writing the relocations. + +This patch also ensures that strip.c sets the correct value of e_machine +before manipulating relocations so that these changes take effect. + +Signed-off-by: James Cowgill + +Upstream-Status: Backport [from debian] +Signed-off-by: Hongxu Jia +--- + libelf/gelf_getrel.c | 25 +++++++++++++++++++++++-- + libelf/gelf_getrela.c | 25 +++++++++++++++++++++++-- + libelf/gelf_update_rel.c | 20 +++++++++++++++++++- + libelf/gelf_update_rela.c | 20 +++++++++++++++++++- + src/strip.c | 17 +++++++++++++++++ + 5 files changed, 101 insertions(+), 6 deletions(-) + +Index: b/libelf/gelf_getrel.c +=================================================================== +--- a/libelf/gelf_getrel.c ++++ b/libelf/gelf_getrel.c +@@ -36,6 +36,7 @@ + + #include "libelfP.h" + ++#define EF_MIPS_ABI 0x0000F000 + + GElf_Rel * + gelf_getrel (Elf_Data *data, int ndx, GElf_Rel *dst) +@@ -89,8 +90,28 @@ gelf_getrel (Elf_Data *data, int ndx, GE + result = NULL; + } + else +- result = memcpy (dst, &((Elf64_Rel *) data_scn->d.d_buf)[ndx], +- sizeof (Elf64_Rel)); ++ { ++ GElf_Ehdr hdr; ++ result = memcpy (dst, &((Elf64_Rel *) data_scn->d.d_buf)[ndx], ++ sizeof (Elf64_Rel)); ++ ++ if (gelf_getehdr(scn->elf, &hdr) != NULL && ++ hdr.e_ident[EI_DATA] == ELFDATA2LSB && ++ hdr.e_machine == EM_MIPS && ++ (hdr.e_flags & EF_MIPS_ABI) == 0) ++ { ++ /* ++ * The relocation format is mangled on MIPSEL N64 ++ * We'll adjust it so at least R_SYM will work on it ++ */ ++ GElf_Xword r_info = dst->r_info; ++ dst->r_info = (r_info << 32) | ++ ((r_info >> 8) & 0xFF000000) | ++ ((r_info >> 24) & 0x00FF0000) | ++ ((r_info >> 40) & 0x0000FF00) | ++ ((r_info >> 56) & 0x000000FF); ++ } ++ } + } + + rwlock_unlock (scn->elf->lock); +Index: b/libelf/gelf_getrela.c +=================================================================== +--- a/libelf/gelf_getrela.c ++++ b/libelf/gelf_getrela.c +@@ -36,6 +36,7 @@ + + #include "libelfP.h" + ++#define EF_MIPS_ABI 0x0000F000 + + GElf_Rela * + gelf_getrela (Elf_Data *data, int ndx, GElf_Rela *dst) +@@ -90,8 +91,28 @@ gelf_getrela (Elf_Data *data, int ndx, G + result = NULL; + } + else +- result = memcpy (dst, &((Elf64_Rela *) data_scn->d.d_buf)[ndx], +- sizeof (Elf64_Rela)); ++ { ++ GElf_Ehdr hdr; ++ result = memcpy (dst, &((Elf64_Rela *) data_scn->d.d_buf)[ndx], ++ sizeof (Elf64_Rela)); ++ ++ if (gelf_getehdr(scn->elf, &hdr) != NULL && ++ hdr.e_ident[EI_DATA] == ELFDATA2LSB && ++ hdr.e_machine == EM_MIPS && ++ (hdr.e_flags & EF_MIPS_ABI) == 0) ++ { ++ /* ++ * The relocation format is mangled on MIPSEL N64 ++ * We'll adjust it so at least R_SYM will work on it ++ */ ++ GElf_Xword r_info = dst->r_info; ++ dst->r_info = (r_info << 32) | ++ ((r_info >> 8) & 0xFF000000) | ++ ((r_info >> 24) & 0x00FF0000) | ++ ((r_info >> 40) & 0x0000FF00) | ++ ((r_info >> 56) & 0x000000FF); ++ } ++ } + } + + rwlock_unlock (scn->elf->lock); +Index: b/libelf/gelf_update_rel.c +=================================================================== +--- a/libelf/gelf_update_rel.c ++++ b/libelf/gelf_update_rel.c +@@ -36,6 +36,7 @@ + + #include "libelfP.h" + ++#define EF_MIPS_ABI 0x0000F000 + + int + gelf_update_rel (Elf_Data *dst, int ndx, GElf_Rel *src) +@@ -86,6 +87,9 @@ gelf_update_rel (Elf_Data *dst, int ndx, + } + else + { ++ GElf_Ehdr hdr; ++ GElf_Rel value = *src; ++ + /* Check whether we have to resize the data buffer. */ + if (INVALID_NDX (ndx, Elf64_Rel, &data_scn->d)) + { +@@ -93,7 +97,21 @@ gelf_update_rel (Elf_Data *dst, int ndx, + goto out; + } + +- ((Elf64_Rel *) data_scn->d.d_buf)[ndx] = *src; ++ if (gelf_getehdr(scn->elf, &hdr) != NULL && ++ hdr.e_ident[EI_DATA] == ELFDATA2LSB && ++ hdr.e_machine == EM_MIPS && ++ (hdr.e_flags & EF_MIPS_ABI) == 0) ++ { ++ /* Undo the MIPSEL N64 hack from gelf_getrel */ ++ GElf_Xword r_info = value.r_info; ++ value.r_info = (r_info >> 32) | ++ ((r_info << 8) & 0x000000FF00000000) | ++ ((r_info << 24) & 0x0000FF0000000000) | ++ ((r_info << 40) & 0x00FF000000000000) | ++ ((r_info << 56) & 0xFF00000000000000); ++ } ++ ++ ((Elf64_Rel *) data_scn->d.d_buf)[ndx] = value; + } + + result = 1; +Index: b/libelf/gelf_update_rela.c +=================================================================== +--- a/libelf/gelf_update_rela.c ++++ b/libelf/gelf_update_rela.c +@@ -36,6 +36,7 @@ + + #include "libelfP.h" + ++#define EF_MIPS_ABI 0x0000F000 + + int + gelf_update_rela (Elf_Data *dst, int ndx, GElf_Rela *src) +@@ -89,6 +90,9 @@ gelf_update_rela (Elf_Data *dst, int ndx + } + else + { ++ GElf_Ehdr hdr; ++ GElf_Rela value = *src; ++ + /* Check whether we have to resize the data buffer. */ + if (INVALID_NDX (ndx, Elf64_Rela, &data_scn->d)) + { +@@ -96,7 +100,21 @@ gelf_update_rela (Elf_Data *dst, int ndx + goto out; + } + +- ((Elf64_Rela *) data_scn->d.d_buf)[ndx] = *src; ++ if (gelf_getehdr(scn->elf, &hdr) != NULL && ++ hdr.e_ident[EI_DATA] == ELFDATA2LSB && ++ hdr.e_machine == EM_MIPS && ++ (hdr.e_flags & EF_MIPS_ABI) == 0) ++ { ++ /* Undo the MIPSEL N64 hack from gelf_getrel */ ++ GElf_Xword r_info = value.r_info; ++ value.r_info = (r_info >> 32) | ++ ((r_info << 8) & 0x000000FF00000000) | ++ ((r_info << 24) & 0x0000FF0000000000) | ++ ((r_info << 40) & 0x00FF000000000000) | ++ ((r_info << 56) & 0xFF00000000000000); ++ } ++ ++ ((Elf64_Rela *) data_scn->d.d_buf)[ndx] = value; + } + + result = 1; +Index: b/src/strip.c +=================================================================== +--- a/src/strip.c ++++ b/src/strip.c +@@ -532,6 +532,23 @@ handle_elf (int fd, Elf *elf, const char + goto fail; + } + ++ /* Copy identity part of the ELF header now */ ++ newehdr = gelf_getehdr (newelf, &newehdr_mem); ++ if (newehdr == NULL) ++ INTERNAL_ERROR (fname); ++ ++ memcpy (newehdr->e_ident, ehdr->e_ident, EI_NIDENT); ++ newehdr->e_type = ehdr->e_type; ++ newehdr->e_machine = ehdr->e_machine; ++ newehdr->e_version = ehdr->e_version; ++ ++ if (gelf_update_ehdr (newelf, newehdr) == 0) ++ { ++ error (0, 0, gettext ("%s: error while creating ELF header: %s"), ++ fname, elf_errmsg (-1)); ++ return 1; ++ } ++ + /* Copy over the old program header if needed. */ + if (ehdr->e_type != ET_REL) + for (cnt = 0; cnt < phnum; ++cnt) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/arm_backend.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/arm_backend.diff new file mode 100644 index 000000000..50f4b059e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/arm_backend.diff @@ -0,0 +1,603 @@ +Upstream-Status: Backport [from debian] +Signed-off-by: Hongxu Jia + +Index: b/backends/arm_init.c +=================================================================== +--- a/backends/arm_init.c ++++ b/backends/arm_init.c +@@ -35,20 +35,31 @@ + #define RELOC_PREFIX R_ARM_ + #include "libebl_CPU.h" + ++#include "libebl_arm.h" ++ + /* This defines the common reloc hooks based on arm_reloc.def. */ + #include "common-reloc.c" + + + const char * +-arm_init (Elf *elf __attribute__ ((unused)), ++arm_init (Elf *elf, + GElf_Half machine __attribute__ ((unused)), + Ebl *eh, + size_t ehlen) + { ++ int soft_float = 0; ++ + /* Check whether the Elf_BH object has a sufficent size. */ + if (ehlen < sizeof (Ebl)) + return NULL; + ++ if (elf) { ++ GElf_Ehdr ehdr_mem; ++ GElf_Ehdr *ehdr = gelf_getehdr (elf, &ehdr_mem); ++ if (ehdr && (ehdr->e_flags & EF_ARM_SOFT_FLOAT)) ++ soft_float = 1; ++ } ++ + /* We handle it. */ + eh->name = "ARM"; + arm_init_reloc (eh); +@@ -60,7 +71,10 @@ arm_init (Elf *elf __attribute__ ((unuse + HOOK (eh, core_note); + HOOK (eh, auxv_info); + HOOK (eh, check_object_attribute); +- HOOK (eh, return_value_location); ++ if (soft_float) ++ eh->return_value_location = arm_return_value_location_soft; ++ else ++ eh->return_value_location = arm_return_value_location_hard; + HOOK (eh, abi_cfi); + HOOK (eh, check_reloc_target_type); + HOOK (eh, symbol_type_name); +Index: b/backends/arm_regs.c +=================================================================== +--- a/backends/arm_regs.c ++++ b/backends/arm_regs.c +@@ -31,6 +31,7 @@ + #endif + + #include ++#include + #include + + #define BACKEND arm_ +@@ -76,6 +77,9 @@ arm_register_info (Ebl *ebl __attribute_ + break; + + case 16 + 0 ... 16 + 7: ++ /* AADWARF says that there are no registers in that range, ++ * but gcc maps FPA registers here ++ */ + regno += 96 - 16; + /* Fall through. */ + case 96 + 0 ... 96 + 7: +@@ -87,11 +91,139 @@ arm_register_info (Ebl *ebl __attribute_ + namelen = 2; + break; + ++ case 64 + 0 ... 64 + 9: ++ *setname = "VFP"; ++ *bits = 32; ++ *type = DW_ATE_float; ++ name[0] = 's'; ++ name[1] = regno - 64 + '0'; ++ namelen = 2; ++ break; ++ ++ case 64 + 10 ... 64 + 31: ++ *setname = "VFP"; ++ *bits = 32; ++ *type = DW_ATE_float; ++ name[0] = 's'; ++ name[1] = (regno - 64) / 10 + '0'; ++ name[2] = (regno - 64) % 10 + '0'; ++ namelen = 3; ++ break; ++ ++ case 104 + 0 ... 104 + 7: ++ /* XXX TODO: ++ * This can be either intel wireless MMX general purpose/control ++ * registers or xscale accumulator, which have different usage. ++ * We only have the intel wireless MMX here now. ++ * The name needs to be changed for the xscale accumulator too. */ ++ *setname = "MMX"; ++ *type = DW_ATE_unsigned; ++ *bits = 32; ++ memcpy(name, "wcgr", 4); ++ name[4] = regno - 104 + '0'; ++ namelen = 5; ++ break; ++ ++ case 112 + 0 ... 112 + 9: ++ *setname = "MMX"; ++ *type = DW_ATE_unsigned; ++ *bits = 64; ++ name[0] = 'w'; ++ name[1] = 'r'; ++ name[2] = regno - 112 + '0'; ++ namelen = 3; ++ break; ++ ++ case 112 + 10 ... 112 + 15: ++ *setname = "MMX"; ++ *type = DW_ATE_unsigned; ++ *bits = 64; ++ name[0] = 'w'; ++ name[1] = 'r'; ++ name[2] = '1'; ++ name[3] = regno - 112 - 10 + '0'; ++ namelen = 4; ++ break; ++ + case 128: ++ *setname = "state"; + *type = DW_ATE_unsigned; + return stpcpy (name, "spsr") + 1 - name; + ++ case 129: ++ *setname = "state"; ++ *type = DW_ATE_unsigned; ++ return stpcpy(name, "spsr_fiq") + 1 - name; ++ ++ case 130: ++ *setname = "state"; ++ *type = DW_ATE_unsigned; ++ return stpcpy(name, "spsr_irq") + 1 - name; ++ ++ case 131: ++ *setname = "state"; ++ *type = DW_ATE_unsigned; ++ return stpcpy(name, "spsr_abt") + 1 - name; ++ ++ case 132: ++ *setname = "state"; ++ *type = DW_ATE_unsigned; ++ return stpcpy(name, "spsr_und") + 1 - name; ++ ++ case 133: ++ *setname = "state"; ++ *type = DW_ATE_unsigned; ++ return stpcpy(name, "spsr_svc") + 1 - name; ++ ++ case 144 ... 150: ++ *setname = "integer"; ++ *type = DW_ATE_signed; ++ *bits = 32; ++ return sprintf(name, "r%d_usr", regno - 144 + 8) + 1; ++ ++ case 151 ... 157: ++ *setname = "integer"; ++ *type = DW_ATE_signed; ++ *bits = 32; ++ return sprintf(name, "r%d_fiq", regno - 151 + 8) + 1; ++ ++ case 158 ... 159: ++ *setname = "integer"; ++ *type = DW_ATE_signed; ++ *bits = 32; ++ return sprintf(name, "r%d_irq", regno - 158 + 13) + 1; ++ ++ case 160 ... 161: ++ *setname = "integer"; ++ *type = DW_ATE_signed; ++ *bits = 32; ++ return sprintf(name, "r%d_abt", regno - 160 + 13) + 1; ++ ++ case 162 ... 163: ++ *setname = "integer"; ++ *type = DW_ATE_signed; ++ *bits = 32; ++ return sprintf(name, "r%d_und", regno - 162 + 13) + 1; ++ ++ case 164 ... 165: ++ *setname = "integer"; ++ *type = DW_ATE_signed; ++ *bits = 32; ++ return sprintf(name, "r%d_svc", regno - 164 + 13) + 1; ++ ++ case 192 ... 199: ++ *setname = "MMX"; ++ *bits = 32; ++ *type = DW_ATE_unsigned; ++ name[0] = 'w'; ++ name[1] = 'c'; ++ name[2] = regno - 192 + '0'; ++ namelen = 3; ++ break; ++ + case 256 + 0 ... 256 + 9: ++ /* XXX TODO: Neon also uses those registers and can contain ++ * both float and integers */ + *setname = "VFP"; + *type = DW_ATE_float; + *bits = 64; +Index: b/backends/arm_retval.c +=================================================================== +--- a/backends/arm_retval.c ++++ b/backends/arm_retval.c +@@ -48,6 +48,13 @@ static const Dwarf_Op loc_intreg[] = + #define nloc_intreg 1 + #define nloc_intregs(n) (2 * (n)) + ++/* f1 */ /* XXX TODO: f0 can also have number 96 if program was compiled with -mabi=aapcs */ ++static const Dwarf_Op loc_fpreg[] = ++ { ++ { .atom = DW_OP_reg16 }, ++ }; ++#define nloc_fpreg 1 ++ + /* The return value is a structure and is actually stored in stack space + passed in a hidden argument by the caller. But, the compiler + helpfully returns the address of that space in r0. */ +@@ -58,8 +65,9 @@ static const Dwarf_Op loc_aggregate[] = + #define nloc_aggregate 1 + + +-int +-arm_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) ++static int ++arm_return_value_location_ (Dwarf_Die *functypedie, const Dwarf_Op **locp, ++ int soft_float) + { + /* Start with the function's type, and get the DW_AT_type attribute, + which is the type of the return value. */ +@@ -98,6 +106,21 @@ arm_return_value_location (Dwarf_Die *fu + else + return -1; + } ++ if (tag == DW_TAG_base_type) ++ { ++ Dwarf_Word encoding; ++ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_encoding, ++ &attr_mem), &encoding) != 0) ++ return -1; ++ ++ if ((encoding == DW_ATE_float) && !soft_float) ++ { ++ *locp = loc_fpreg; ++ if (size <= 8) ++ return nloc_fpreg; ++ goto aggregate; ++ } ++ } + if (size <= 16) + { + intreg: +@@ -106,6 +129,7 @@ arm_return_value_location (Dwarf_Die *fu + } + + aggregate: ++ /* XXX TODO sometimes aggregates are returned in r0 (-mabi=aapcs) */ + *locp = loc_aggregate; + return nloc_aggregate; + } +@@ -125,3 +149,18 @@ arm_return_value_location (Dwarf_Die *fu + DWARF and might be valid. */ + return -2; + } ++ ++/* return location for -mabi=apcs-gnu -msoft-float */ ++int ++arm_return_value_location_soft (Dwarf_Die *functypedie, const Dwarf_Op **locp) ++{ ++ return arm_return_value_location_ (functypedie, locp, 1); ++} ++ ++/* return location for -mabi=apcs-gnu -mhard-float (current default) */ ++int ++arm_return_value_location_hard (Dwarf_Die *functypedie, const Dwarf_Op **locp) ++{ ++ return arm_return_value_location_ (functypedie, locp, 0); ++} ++ +Index: b/libelf/elf.h +=================================================================== +--- a/libelf/elf.h ++++ b/libelf/elf.h +@@ -2593,6 +2593,9 @@ enum + #define EF_ARM_EABI_VER4 0x04000000 + #define EF_ARM_EABI_VER5 0x05000000 + ++/* EI_OSABI values */ ++#define ELFOSABI_ARM_AEABI 64 /* Contains symbol versioning. */ ++ + /* Additional symbol types for Thumb. */ + #define STT_ARM_TFUNC STT_LOPROC /* A Thumb function. */ + #define STT_ARM_16BIT STT_HIPROC /* A Thumb label. */ +@@ -2610,12 +2613,19 @@ enum + + /* Processor specific values for the Phdr p_type field. */ + #define PT_ARM_EXIDX (PT_LOPROC + 1) /* ARM unwind segment. */ ++#define PT_ARM_UNWIND PT_ARM_EXIDX + + /* Processor specific values for the Shdr sh_type field. */ + #define SHT_ARM_EXIDX (SHT_LOPROC + 1) /* ARM unwind section. */ + #define SHT_ARM_PREEMPTMAP (SHT_LOPROC + 2) /* Preemption details. */ + #define SHT_ARM_ATTRIBUTES (SHT_LOPROC + 3) /* ARM attributes section. */ + ++/* Processor specific values for the Dyn d_tag field. */ ++#define DT_ARM_RESERVED1 (DT_LOPROC + 0) ++#define DT_ARM_SYMTABSZ (DT_LOPROC + 1) ++#define DT_ARM_PREEMTMAB (DT_LOPROC + 2) ++#define DT_ARM_RESERVED2 (DT_LOPROC + 3) ++#define DT_ARM_NUM 4 + + /* AArch64 relocs. */ + +@@ -2908,6 +2918,7 @@ enum + TLS block (LDR, STR). */ + #define R_ARM_TLS_IE12GP 111 /* 12 bit GOT entry relative + to GOT origin (LDR). */ ++/* 112 - 127 private range */ + #define R_ARM_ME_TOO 128 /* Obsolete. */ + #define R_ARM_THM_TLS_DESCSEQ 129 + #define R_ARM_THM_TLS_DESCSEQ16 129 +Index: b/backends/libebl_arm.h +=================================================================== +--- /dev/null ++++ b/backends/libebl_arm.h +@@ -0,0 +1,9 @@ ++#ifndef _LIBEBL_ARM_H ++#define _LIBEBL_ARM_H 1 ++ ++#include ++ ++extern int arm_return_value_location_soft(Dwarf_Die *, const Dwarf_Op **locp); ++extern int arm_return_value_location_hard(Dwarf_Die *, const Dwarf_Op **locp); ++ ++#endif +Index: b/tests/run-allregs.sh +=================================================================== +--- a/tests/run-allregs.sh ++++ b/tests/run-allregs.sh +@@ -2672,7 +2672,28 @@ integer registers: + 13: sp (sp), address 32 bits + 14: lr (lr), address 32 bits + 15: pc (pc), address 32 bits +- 128: spsr (spsr), unsigned 32 bits ++ 144: r8_usr (r8_usr), signed 32 bits ++ 145: r9_usr (r9_usr), signed 32 bits ++ 146: r10_usr (r10_usr), signed 32 bits ++ 147: r11_usr (r11_usr), signed 32 bits ++ 148: r12_usr (r12_usr), signed 32 bits ++ 149: r13_usr (r13_usr), signed 32 bits ++ 150: r14_usr (r14_usr), signed 32 bits ++ 151: r8_fiq (r8_fiq), signed 32 bits ++ 152: r9_fiq (r9_fiq), signed 32 bits ++ 153: r10_fiq (r10_fiq), signed 32 bits ++ 154: r11_fiq (r11_fiq), signed 32 bits ++ 155: r12_fiq (r12_fiq), signed 32 bits ++ 156: r13_fiq (r13_fiq), signed 32 bits ++ 157: r14_fiq (r14_fiq), signed 32 bits ++ 158: r13_irq (r13_irq), signed 32 bits ++ 159: r14_irq (r14_irq), signed 32 bits ++ 160: r13_abt (r13_abt), signed 32 bits ++ 161: r14_abt (r14_abt), signed 32 bits ++ 162: r13_und (r13_und), signed 32 bits ++ 163: r14_und (r14_und), signed 32 bits ++ 164: r13_svc (r13_svc), signed 32 bits ++ 165: r14_svc (r14_svc), signed 32 bits + FPA registers: + 16: f0 (f0), float 96 bits + 17: f1 (f1), float 96 bits +@@ -2690,7 +2711,72 @@ FPA registers: + 101: f5 (f5), float 96 bits + 102: f6 (f6), float 96 bits + 103: f7 (f7), float 96 bits ++MMX registers: ++ 104: wcgr0 (wcgr0), unsigned 32 bits ++ 105: wcgr1 (wcgr1), unsigned 32 bits ++ 106: wcgr2 (wcgr2), unsigned 32 bits ++ 107: wcgr3 (wcgr3), unsigned 32 bits ++ 108: wcgr4 (wcgr4), unsigned 32 bits ++ 109: wcgr5 (wcgr5), unsigned 32 bits ++ 110: wcgr6 (wcgr6), unsigned 32 bits ++ 111: wcgr7 (wcgr7), unsigned 32 bits ++ 112: wr0 (wr0), unsigned 64 bits ++ 113: wr1 (wr1), unsigned 64 bits ++ 114: wr2 (wr2), unsigned 64 bits ++ 115: wr3 (wr3), unsigned 64 bits ++ 116: wr4 (wr4), unsigned 64 bits ++ 117: wr5 (wr5), unsigned 64 bits ++ 118: wr6 (wr6), unsigned 64 bits ++ 119: wr7 (wr7), unsigned 64 bits ++ 120: wr8 (wr8), unsigned 64 bits ++ 121: wr9 (wr9), unsigned 64 bits ++ 122: wr10 (wr10), unsigned 64 bits ++ 123: wr11 (wr11), unsigned 64 bits ++ 124: wr12 (wr12), unsigned 64 bits ++ 125: wr13 (wr13), unsigned 64 bits ++ 126: wr14 (wr14), unsigned 64 bits ++ 127: wr15 (wr15), unsigned 64 bits ++ 192: wc0 (wc0), unsigned 32 bits ++ 193: wc1 (wc1), unsigned 32 bits ++ 194: wc2 (wc2), unsigned 32 bits ++ 195: wc3 (wc3), unsigned 32 bits ++ 196: wc4 (wc4), unsigned 32 bits ++ 197: wc5 (wc5), unsigned 32 bits ++ 198: wc6 (wc6), unsigned 32 bits ++ 199: wc7 (wc7), unsigned 32 bits + VFP registers: ++ 64: s0 (s0), float 32 bits ++ 65: s1 (s1), float 32 bits ++ 66: s2 (s2), float 32 bits ++ 67: s3 (s3), float 32 bits ++ 68: s4 (s4), float 32 bits ++ 69: s5 (s5), float 32 bits ++ 70: s6 (s6), float 32 bits ++ 71: s7 (s7), float 32 bits ++ 72: s8 (s8), float 32 bits ++ 73: s9 (s9), float 32 bits ++ 74: s10 (s10), float 32 bits ++ 75: s11 (s11), float 32 bits ++ 76: s12 (s12), float 32 bits ++ 77: s13 (s13), float 32 bits ++ 78: s14 (s14), float 32 bits ++ 79: s15 (s15), float 32 bits ++ 80: s16 (s16), float 32 bits ++ 81: s17 (s17), float 32 bits ++ 82: s18 (s18), float 32 bits ++ 83: s19 (s19), float 32 bits ++ 84: s20 (s20), float 32 bits ++ 85: s21 (s21), float 32 bits ++ 86: s22 (s22), float 32 bits ++ 87: s23 (s23), float 32 bits ++ 88: s24 (s24), float 32 bits ++ 89: s25 (s25), float 32 bits ++ 90: s26 (s26), float 32 bits ++ 91: s27 (s27), float 32 bits ++ 92: s28 (s28), float 32 bits ++ 93: s29 (s29), float 32 bits ++ 94: s30 (s30), float 32 bits ++ 95: s31 (s31), float 32 bits + 256: d0 (d0), float 64 bits + 257: d1 (d1), float 64 bits + 258: d2 (d2), float 64 bits +@@ -2723,6 +2809,13 @@ VFP registers: + 285: d29 (d29), float 64 bits + 286: d30 (d30), float 64 bits + 287: d31 (d31), float 64 bits ++state registers: ++ 128: spsr (spsr), unsigned 32 bits ++ 129: spsr_fiq (spsr_fiq), unsigned 32 bits ++ 130: spsr_irq (spsr_irq), unsigned 32 bits ++ 131: spsr_abt (spsr_abt), unsigned 32 bits ++ 132: spsr_und (spsr_und), unsigned 32 bits ++ 133: spsr_svc (spsr_svc), unsigned 32 bits + EOF + + # See run-readelf-mixed-corenote.sh for instructions to regenerate +Index: b/tests/run-readelf-mixed-corenote.sh +=================================================================== +--- a/tests/run-readelf-mixed-corenote.sh ++++ b/tests/run-readelf-mixed-corenote.sh +@@ -31,12 +31,11 @@ Note segment of 892 bytes at offset 0x27 + pid: 11087, ppid: 11063, pgrp: 11087, sid: 11063 + utime: 0.000000, stime: 0.010000, cutime: 0.000000, cstime: 0.000000 + orig_r0: -1, fpvalid: 1 +- r0: 1 r1: -1091672508 r2: -1091672500 +- r3: 0 r4: 0 r5: 0 +- r6: 33728 r7: 0 r8: 0 +- r9: 0 r10: -1225703496 r11: -1091672844 +- r12: 0 sp: 0xbeee64f4 lr: 0xb6dc3f48 +- pc: 0x00008500 spsr: 0x60000010 ++ r0: 1 r1: -1091672508 r2: -1091672500 r3: 0 ++ r4: 0 r5: 0 r6: 33728 r7: 0 ++ r8: 0 r9: 0 r10: -1225703496 r11: -1091672844 ++ r12: 0 sp: 0xbeee64f4 lr: 0xb6dc3f48 pc: 0x00008500 ++ spsr: 0x60000010 + CORE 124 PRPSINFO + state: 0, sname: R, zomb: 0, nice: 0, flag: 0x00400500 + uid: 0, gid: 0, pid: 11087, ppid: 11063, pgrp: 11087, sid: 11063 +Index: b/tests/run-addrcfi.sh +=================================================================== +--- a/tests/run-addrcfi.sh ++++ b/tests/run-addrcfi.sh +@@ -3554,6 +3554,38 @@ dwarf_cfi_addrframe (.eh_frame): no matc + FPA reg21 (f5): undefined + FPA reg22 (f6): undefined + FPA reg23 (f7): undefined ++ VFP reg64 (s0): undefined ++ VFP reg65 (s1): undefined ++ VFP reg66 (s2): undefined ++ VFP reg67 (s3): undefined ++ VFP reg68 (s4): undefined ++ VFP reg69 (s5): undefined ++ VFP reg70 (s6): undefined ++ VFP reg71 (s7): undefined ++ VFP reg72 (s8): undefined ++ VFP reg73 (s9): undefined ++ VFP reg74 (s10): undefined ++ VFP reg75 (s11): undefined ++ VFP reg76 (s12): undefined ++ VFP reg77 (s13): undefined ++ VFP reg78 (s14): undefined ++ VFP reg79 (s15): undefined ++ VFP reg80 (s16): undefined ++ VFP reg81 (s17): undefined ++ VFP reg82 (s18): undefined ++ VFP reg83 (s19): undefined ++ VFP reg84 (s20): undefined ++ VFP reg85 (s21): undefined ++ VFP reg86 (s22): undefined ++ VFP reg87 (s23): undefined ++ VFP reg88 (s24): undefined ++ VFP reg89 (s25): undefined ++ VFP reg90 (s26): undefined ++ VFP reg91 (s27): undefined ++ VFP reg92 (s28): undefined ++ VFP reg93 (s29): undefined ++ VFP reg94 (s30): undefined ++ VFP reg95 (s31): undefined + FPA reg96 (f0): undefined + FPA reg97 (f1): undefined + FPA reg98 (f2): undefined +@@ -3562,7 +3594,66 @@ dwarf_cfi_addrframe (.eh_frame): no matc + FPA reg101 (f5): undefined + FPA reg102 (f6): undefined + FPA reg103 (f7): undefined +- integer reg128 (spsr): undefined ++ MMX reg104 (wcgr0): undefined ++ MMX reg105 (wcgr1): undefined ++ MMX reg106 (wcgr2): undefined ++ MMX reg107 (wcgr3): undefined ++ MMX reg108 (wcgr4): undefined ++ MMX reg109 (wcgr5): undefined ++ MMX reg110 (wcgr6): undefined ++ MMX reg111 (wcgr7): undefined ++ MMX reg112 (wr0): undefined ++ MMX reg113 (wr1): undefined ++ MMX reg114 (wr2): undefined ++ MMX reg115 (wr3): undefined ++ MMX reg116 (wr4): undefined ++ MMX reg117 (wr5): undefined ++ MMX reg118 (wr6): undefined ++ MMX reg119 (wr7): undefined ++ MMX reg120 (wr8): undefined ++ MMX reg121 (wr9): undefined ++ MMX reg122 (wr10): undefined ++ MMX reg123 (wr11): undefined ++ MMX reg124 (wr12): undefined ++ MMX reg125 (wr13): undefined ++ MMX reg126 (wr14): undefined ++ MMX reg127 (wr15): undefined ++ state reg128 (spsr): undefined ++ state reg129 (spsr_fiq): undefined ++ state reg130 (spsr_irq): undefined ++ state reg131 (spsr_abt): undefined ++ state reg132 (spsr_und): undefined ++ state reg133 (spsr_svc): undefined ++ integer reg144 (r8_usr): undefined ++ integer reg145 (r9_usr): undefined ++ integer reg146 (r10_usr): undefined ++ integer reg147 (r11_usr): undefined ++ integer reg148 (r12_usr): undefined ++ integer reg149 (r13_usr): undefined ++ integer reg150 (r14_usr): undefined ++ integer reg151 (r8_fiq): undefined ++ integer reg152 (r9_fiq): undefined ++ integer reg153 (r10_fiq): undefined ++ integer reg154 (r11_fiq): undefined ++ integer reg155 (r12_fiq): undefined ++ integer reg156 (r13_fiq): undefined ++ integer reg157 (r14_fiq): undefined ++ integer reg158 (r13_irq): undefined ++ integer reg159 (r14_irq): undefined ++ integer reg160 (r13_abt): undefined ++ integer reg161 (r14_abt): undefined ++ integer reg162 (r13_und): undefined ++ integer reg163 (r14_und): undefined ++ integer reg164 (r13_svc): undefined ++ integer reg165 (r14_svc): undefined ++ MMX reg192 (wc0): undefined ++ MMX reg193 (wc1): undefined ++ MMX reg194 (wc2): undefined ++ MMX reg195 (wc3): undefined ++ MMX reg196 (wc4): undefined ++ MMX reg197 (wc5): undefined ++ MMX reg198 (wc6): undefined ++ MMX reg199 (wc7): undefined + VFP reg256 (d0): undefined + VFP reg257 (d1): undefined + VFP reg258 (d2): undefined diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/hppa_backend.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/hppa_backend.diff new file mode 100644 index 000000000..44fda7f30 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/hppa_backend.diff @@ -0,0 +1,802 @@ +Upstream-Status: Backport [from debian] +Signed-off-by: Hongxu Jia + +Index: b/backends/parisc_init.c +=================================================================== +--- /dev/null ++++ b/backends/parisc_init.c +@@ -0,0 +1,73 @@ ++/* Initialization of PA-RISC specific backend library. ++ Copyright (C) 2002, 2005, 2006 Red Hat, Inc. ++ This file is part of Red Hat elfutils. ++ Written by Ulrich Drepper , 2002. ++ ++ Red Hat elfutils is free software; you can redistribute it and/or modify ++ it under the terms of the GNU General Public License as published by the ++ Free Software Foundation; version 2 of the License. ++ ++ Red Hat elfutils is distributed in the hope that it will be useful, but ++ WITHOUT ANY WARRANTY; without even the implied warranty of ++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ General Public License for more details. ++ ++ You should have received a copy of the GNU General Public License along ++ with Red Hat elfutils; if not, write to the Free Software Foundation, ++ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. ++ ++ Red Hat elfutils is an included package of the Open Invention Network. ++ An included package of the Open Invention Network is a package for which ++ Open Invention Network licensees cross-license their patents. No patent ++ license is granted, either expressly or impliedly, by designation as an ++ included package. Should you wish to participate in the Open Invention ++ Network licensing program, please visit www.openinventionnetwork.com ++ . */ ++ ++#ifdef HAVE_CONFIG_H ++# include ++#endif ++ ++#define BACKEND parisc_ ++#define RELOC_PREFIX R_PARISC_ ++#include "libebl_CPU.h" ++#include "libebl_parisc.h" ++ ++/* This defines the common reloc hooks based on parisc_reloc.def. */ ++#include "common-reloc.c" ++ ++ ++const char * ++parisc_init (Elf *elf __attribute__ ((unused)), ++ GElf_Half machine __attribute__ ((unused)), ++ Ebl *eh, ++ size_t ehlen) ++{ ++ int pa64 = 0; ++ ++ /* Check whether the Elf_BH object has a sufficent size. */ ++ if (ehlen < sizeof (Ebl)) ++ return NULL; ++ ++ if (elf) { ++ GElf_Ehdr ehdr_mem; ++ GElf_Ehdr *ehdr = gelf_getehdr (elf, &ehdr_mem); ++ if (ehdr && (ehdr->e_flags & EF_PARISC_WIDE)) ++ pa64 = 1; ++ } ++ /* We handle it. */ ++ eh->name = "PA-RISC"; ++ parisc_init_reloc (eh); ++ HOOK (eh, reloc_simple_type); ++ HOOK (eh, machine_flag_check); ++ HOOK (eh, symbol_type_name); ++ HOOK (eh, segment_type_name); ++ HOOK (eh, section_type_name); ++ HOOK (eh, register_info); ++ if (pa64) ++ eh->return_value_location = parisc_return_value_location_64; ++ else ++ eh->return_value_location = parisc_return_value_location_32; ++ ++ return MODVERSION; ++} +Index: b/backends/parisc_regs.c +=================================================================== +--- /dev/null ++++ b/backends/parisc_regs.c +@@ -0,0 +1,159 @@ ++/* Register names and numbers for PA-RISC DWARF. ++ Copyright (C) 2005, 2006 Red Hat, Inc. ++ This file is part of Red Hat elfutils. ++ ++ Red Hat elfutils is free software; you can redistribute it and/or modify ++ it under the terms of the GNU General Public License as published by the ++ Free Software Foundation; version 2 of the License. ++ ++ Red Hat elfutils is distributed in the hope that it will be useful, but ++ WITHOUT ANY WARRANTY; without even the implied warranty of ++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ General Public License for more details. ++ ++ You should have received a copy of the GNU General Public License along ++ with Red Hat elfutils; if not, write to the Free Software Foundation, ++ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. ++ ++ Red Hat elfutils is an included package of the Open Invention Network. ++ An included package of the Open Invention Network is a package for which ++ Open Invention Network licensees cross-license their patents. No patent ++ license is granted, either expressly or impliedly, by designation as an ++ included package. Should you wish to participate in the Open Invention ++ Network licensing program, please visit www.openinventionnetwork.com ++ . */ ++ ++#ifdef HAVE_CONFIG_H ++# include ++#endif ++ ++#include ++#include ++ ++#define BACKEND parisc_ ++#include "libebl_CPU.h" ++ ++ssize_t ++parisc_register_info (Ebl *ebl, int regno, char *name, size_t namelen, ++ const char **prefix, const char **setname, ++ int *bits, int *type) ++{ ++ int pa64 = 0; ++ ++ if (ebl->elf) { ++ GElf_Ehdr ehdr_mem; ++ GElf_Ehdr *ehdr = gelf_getehdr (ebl->elf, &ehdr_mem); ++ if (ehdr->e_flags & EF_PARISC_WIDE) ++ pa64 = 1; ++ } ++ ++ int nregs = pa64 ? 127 : 128; ++ ++ if (name == NULL) ++ return nregs; ++ ++ if (regno < 0 || regno >= nregs || namelen < 6) ++ return -1; ++ ++ *prefix = "%"; ++ ++ if (regno < 32) ++ { ++ *setname = "integer"; ++ *type = DW_ATE_signed; ++ if (pa64) ++ { ++ *bits = 64; ++ } ++ else ++ { ++ *bits = 32; ++ } ++ } ++ else if (regno == 32) ++ { ++ *setname = "special"; ++ if (pa64) ++ { ++ *bits = 6; ++ } ++ else ++ { ++ *bits = 5; ++ } ++ *type = DW_ATE_unsigned; ++ } ++ else ++ { ++ *setname = "FPU"; ++ *type = DW_ATE_float; ++ if (pa64) ++ { ++ *bits = 64; ++ } ++ else ++ { ++ *bits = 32; ++ } ++ } ++ ++ if (regno < 33) { ++ switch (regno) ++ { ++ case 0 ... 9: ++ name[0] = 'r'; ++ name[1] = regno + '0'; ++ namelen = 2; ++ break; ++ case 10 ... 31: ++ name[0] = 'r'; ++ name[1] = regno / 10 + '0'; ++ name[2] = regno % 10 + '0'; ++ namelen = 3; ++ break; ++ case 32: ++ *prefix = NULL; ++ name[0] = 'S'; ++ name[1] = 'A'; ++ name[2] = 'R'; ++ namelen = 3; ++ break; ++ } ++ } ++ else { ++ if (pa64 && ((regno - 72) % 2)) { ++ *setname = NULL; ++ return 0; ++ } ++ ++ switch (regno) ++ { ++ case 72 + 0 ... 72 + 11: ++ name[0] = 'f'; ++ name[1] = 'r'; ++ name[2] = (regno + 8 - 72) / 2 + '0'; ++ namelen = 3; ++ if ((regno + 8 - 72) % 2) { ++ name[3] = 'R'; ++ namelen++; ++ } ++ break; ++ case 72 + 12 ... 72 + 55: ++ name[0] = 'f'; ++ name[1] = 'r'; ++ name[2] = (regno + 8 - 72) / 2 / 10 + '0'; ++ name[3] = (regno + 8 - 72) / 2 % 10 + '0'; ++ namelen = 4; ++ if ((regno + 8 - 72) % 2) { ++ name[4] = 'R'; ++ namelen++; ++ } ++ break; ++ default: ++ *setname = NULL; ++ return 0; ++ } ++ } ++ name[namelen++] = '\0'; ++ return namelen; ++} +Index: b/backends/parisc_reloc.def +=================================================================== +--- /dev/null ++++ b/backends/parisc_reloc.def +@@ -0,0 +1,128 @@ ++/* List the relocation types for PA-RISC. -*- C -*- ++ Copyright (C) 2005 Red Hat, Inc. ++ This file is part of Red Hat elfutils. ++ ++ Red Hat elfutils is free software; you can redistribute it and/or modify ++ it under the terms of the GNU General Public License as published by the ++ Free Software Foundation; version 2 of the License. ++ ++ Red Hat elfutils is distributed in the hope that it will be useful, but ++ WITHOUT ANY WARRANTY; without even the implied warranty of ++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ General Public License for more details. ++ ++ You should have received a copy of the GNU General Public License along ++ with Red Hat elfutils; if not, write to the Free Software Foundation, ++ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. ++ ++ Red Hat elfutils is an included package of the Open Invention Network. ++ An included package of the Open Invention Network is a package for which ++ Open Invention Network licensees cross-license their patents. No patent ++ license is granted, either expressly or impliedly, by designation as an ++ included package. Should you wish to participate in the Open Invention ++ Network licensing program, please visit www.openinventionnetwork.com ++ . */ ++ ++/* NAME, REL|EXEC|DYN */ ++ ++RELOC_TYPE (NONE, EXEC|DYN) ++RELOC_TYPE (DIR32, REL|EXEC|DYN) ++RELOC_TYPE (DIR21L, REL|EXEC|DYN) ++RELOC_TYPE (DIR17R, REL) ++RELOC_TYPE (DIR17F, REL) ++RELOC_TYPE (DIR14R, REL|DYN) ++RELOC_TYPE (PCREL32, REL) ++RELOC_TYPE (PCREL21L, REL) ++RELOC_TYPE (PCREL17R, REL) ++RELOC_TYPE (PCREL17F, REL) ++RELOC_TYPE (PCREL14R, REL|EXEC) ++RELOC_TYPE (DPREL21L, REL) ++RELOC_TYPE (DPREL14WR, REL) ++RELOC_TYPE (DPREL14DR, REL) ++RELOC_TYPE (DPREL14R, REL) ++RELOC_TYPE (GPREL21L, 0) ++RELOC_TYPE (GPREL14R, 0) ++RELOC_TYPE (LTOFF21L, REL) ++RELOC_TYPE (LTOFF14R, REL) ++RELOC_TYPE (DLTIND14F, 0) ++RELOC_TYPE (SETBASE, 0) ++RELOC_TYPE (SECREL32, REL) ++RELOC_TYPE (BASEREL21L, 0) ++RELOC_TYPE (BASEREL17R, 0) ++RELOC_TYPE (BASEREL14R, 0) ++RELOC_TYPE (SEGBASE, 0) ++RELOC_TYPE (SEGREL32, REL) ++RELOC_TYPE (PLTOFF21L, 0) ++RELOC_TYPE (PLTOFF14R, 0) ++RELOC_TYPE (PLTOFF14F, 0) ++RELOC_TYPE (LTOFF_FPTR32, 0) ++RELOC_TYPE (LTOFF_FPTR21L, 0) ++RELOC_TYPE (LTOFF_FPTR14R, 0) ++RELOC_TYPE (FPTR64, 0) ++RELOC_TYPE (PLABEL32, REL|DYN) ++RELOC_TYPE (PCREL64, 0) ++RELOC_TYPE (PCREL22C, 0) ++RELOC_TYPE (PCREL22F, 0) ++RELOC_TYPE (PCREL14WR, 0) ++RELOC_TYPE (PCREL14DR, 0) ++RELOC_TYPE (PCREL16F, 0) ++RELOC_TYPE (PCREL16WF, 0) ++RELOC_TYPE (PCREL16DF, 0) ++RELOC_TYPE (DIR64, REL|DYN) ++RELOC_TYPE (DIR14WR, REL) ++RELOC_TYPE (DIR14DR, REL) ++RELOC_TYPE (DIR16F, REL) ++RELOC_TYPE (DIR16WF, REL) ++RELOC_TYPE (DIR16DF, REL) ++RELOC_TYPE (GPREL64, 0) ++RELOC_TYPE (GPREL14WR, 0) ++RELOC_TYPE (GPREL14DR, 0) ++RELOC_TYPE (GPREL16F, 0) ++RELOC_TYPE (GPREL16WF, 0) ++RELOC_TYPE (GPREL16DF, 0) ++RELOC_TYPE (LTOFF64, 0) ++RELOC_TYPE (LTOFF14WR, 0) ++RELOC_TYPE (LTOFF14DR, 0) ++RELOC_TYPE (LTOFF16F, 0) ++RELOC_TYPE (LTOFF16WF, 0) ++RELOC_TYPE (LTOFF16DF, 0) ++RELOC_TYPE (SECREL64, 0) ++RELOC_TYPE (BASEREL14WR, 0) ++RELOC_TYPE (BASEREL14DR, 0) ++RELOC_TYPE (SEGREL64, 0) ++RELOC_TYPE (PLTOFF14WR, 0) ++RELOC_TYPE (PLTOFF14DR, 0) ++RELOC_TYPE (PLTOFF16F, 0) ++RELOC_TYPE (PLTOFF16WF, 0) ++RELOC_TYPE (PLTOFF16DF, 0) ++RELOC_TYPE (LTOFF_FPTR64, 0) ++RELOC_TYPE (LTOFF_FPTR14WR, 0) ++RELOC_TYPE (LTOFF_FPTR14DR, 0) ++RELOC_TYPE (LTOFF_FPTR16F, 0) ++RELOC_TYPE (LTOFF_FPTR16WF, 0) ++RELOC_TYPE (LTOFF_FPTR16DF, 0) ++RELOC_TYPE (COPY, EXEC) ++RELOC_TYPE (IPLT, EXEC|DYN) ++RELOC_TYPE (EPLT, 0) ++RELOC_TYPE (TPREL32, DYN) ++RELOC_TYPE (TPREL21L, 0) ++RELOC_TYPE (TPREL14R, 0) ++RELOC_TYPE (LTOFF_TP21L, 0) ++RELOC_TYPE (LTOFF_TP14R, 0) ++RELOC_TYPE (LTOFF_TP14F, 0) ++RELOC_TYPE (TPREL64, 0) ++RELOC_TYPE (TPREL14WR, 0) ++RELOC_TYPE (TPREL14DR, 0) ++RELOC_TYPE (TPREL16F, 0) ++RELOC_TYPE (TPREL16WF, 0) ++RELOC_TYPE (TPREL16DF, 0) ++RELOC_TYPE (LTOFF_TP64, 0) ++RELOC_TYPE (LTOFF_TP14WR, 0) ++RELOC_TYPE (LTOFF_TP14DR, 0) ++RELOC_TYPE (LTOFF_TP16F, 0) ++RELOC_TYPE (LTOFF_TP16WF, 0) ++RELOC_TYPE (LTOFF_TP16DF, 0) ++RELOC_TYPE (TLS_DTPMOD32, DYN) ++RELOC_TYPE (TLS_DTPMOD64, DYN) ++ ++#define NO_RELATIVE_RELOC 1 +Index: b/backends/parisc_retval.c +=================================================================== +--- /dev/null ++++ b/backends/parisc_retval.c +@@ -0,0 +1,213 @@ ++/* Function return value location for Linux/PA-RISC ABI. ++ Copyright (C) 2005 Red Hat, Inc. ++ This file is part of Red Hat elfutils. ++ ++ Red Hat elfutils is free software; you can redistribute it and/or modify ++ it under the terms of the GNU General Public License as published by the ++ Free Software Foundation; version 2 of the License. ++ ++ Red Hat elfutils is distributed in the hope that it will be useful, but ++ WITHOUT ANY WARRANTY; without even the implied warranty of ++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ General Public License for more details. ++ ++ You should have received a copy of the GNU General Public License along ++ with Red Hat elfutils; if not, write to the Free Software Foundation, ++ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. ++ ++ Red Hat elfutils is an included package of the Open Invention Network. ++ An included package of the Open Invention Network is a package for which ++ Open Invention Network licensees cross-license their patents. No patent ++ license is granted, either expressly or impliedly, by designation as an ++ included package. Should you wish to participate in the Open Invention ++ Network licensing program, please visit www.openinventionnetwork.com ++ . */ ++ ++#ifdef HAVE_CONFIG_H ++# include ++#endif ++ ++#include ++#include ++ ++#define BACKEND parisc_ ++#include "libebl_CPU.h" ++#include "libebl_parisc.h" ++ ++/* %r28, or pair %r28, %r29. */ ++static const Dwarf_Op loc_intreg32[] = ++ { ++ { .atom = DW_OP_reg28 }, { .atom = DW_OP_piece, .number = 4 }, ++ { .atom = DW_OP_reg29 }, { .atom = DW_OP_piece, .number = 4 }, ++ }; ++ ++static const Dwarf_Op loc_intreg[] = ++ { ++ { .atom = DW_OP_reg28 }, { .atom = DW_OP_piece, .number = 8 }, ++ { .atom = DW_OP_reg29 }, { .atom = DW_OP_piece, .number = 8 }, ++ }; ++#define nloc_intreg 1 ++#define nloc_intregpair 4 ++ ++/* %fr4L, or pair %fr4L, %fr4R on pa-32 */ ++static const Dwarf_Op loc_fpreg32[] = ++ { ++ { .atom = DW_OP_regx, .number = 72 }, { .atom = DW_OP_piece, .number = 4 }, ++ { .atom = DW_OP_regx, .number = 73 }, { .atom = DW_OP_piece, .number = 4 }, ++ }; ++#define nloc_fpreg32 2 ++#define nloc_fpregpair32 4 ++ ++/* $fr4 */ ++static const Dwarf_Op loc_fpreg[] = ++ { ++ { .atom = DW_OP_regx, .number = 72 }, ++ }; ++#define nloc_fpreg 1 ++ ++#if 0 ++/* The return value is a structure and is actually stored in stack space ++ passed in a hidden argument by the caller. Address of the location is stored ++ in %r28 before function call, but it may be changed by function. */ ++static const Dwarf_Op loc_aggregate[] = ++ { ++ { .atom = DW_OP_breg28 }, ++ }; ++#define nloc_aggregate 1 ++#endif ++ ++static int ++parisc_return_value_location_ (Dwarf_Die *functypedie, const Dwarf_Op **locp, int pa64) ++{ ++ Dwarf_Word regsize = pa64 ? 8 : 4; ++ ++ /* Start with the function's type, and get the DW_AT_type attribute, ++ which is the type of the return value. */ ++ ++ Dwarf_Attribute attr_mem; ++ Dwarf_Attribute *attr = dwarf_attr_integrate (functypedie, DW_AT_type, &attr_mem); ++ if (attr == NULL) ++ /* The function has no return value, like a `void' function in C. */ ++ return 0; ++ ++ Dwarf_Die die_mem; ++ Dwarf_Die *typedie = dwarf_formref_die (attr, &die_mem); ++ int tag = dwarf_tag (typedie); ++ ++ /* Follow typedefs and qualifiers to get to the actual type. */ ++ while (tag == DW_TAG_typedef ++ || tag == DW_TAG_const_type || tag == DW_TAG_volatile_type ++ || tag == DW_TAG_restrict_type) ++ { ++ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem); ++ typedie = dwarf_formref_die (attr, &die_mem); ++ tag = dwarf_tag (typedie); ++ } ++ ++ switch (tag) ++ { ++ case -1: ++ return -1; ++ ++ case DW_TAG_subrange_type: ++ if (! dwarf_hasattr_integrate (typedie, DW_AT_byte_size)) ++ { ++ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem); ++ typedie = dwarf_formref_die (attr, &die_mem); ++ tag = dwarf_tag (typedie); ++ } ++ /* Fall through. */ ++ ++ case DW_TAG_base_type: ++ case DW_TAG_enumeration_type: ++ case DW_TAG_pointer_type: ++ case DW_TAG_ptr_to_member_type: ++ { ++ Dwarf_Word size; ++ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_byte_size, ++ &attr_mem), &size) != 0) ++ { ++ if (tag == DW_TAG_pointer_type || tag == DW_TAG_ptr_to_member_type) ++ size = 4; ++ else ++ return -1; ++ } ++ if (tag == DW_TAG_base_type) ++ { ++ Dwarf_Word encoding; ++ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_encoding, ++ &attr_mem), &encoding) != 0) ++ return -1; ++ ++ if (encoding == DW_ATE_float) ++ { ++ if (pa64) { ++ *locp = loc_fpreg; ++ if (size <= 8) ++ return nloc_fpreg; ++ } ++ else { ++ *locp = loc_fpreg32; ++ if (size <= 4) ++ return nloc_fpreg32; ++ else if (size <= 8) ++ return nloc_fpregpair32; ++ } ++ goto aggregate; ++ } ++ } ++ if (pa64) ++ *locp = loc_intreg; ++ else ++ *locp = loc_intreg32; ++ if (size <= regsize) ++ return nloc_intreg; ++ if (size <= 2 * regsize) ++ return nloc_intregpair; ++ ++ /* Else fall through. */ ++ } ++ ++ case DW_TAG_structure_type: ++ case DW_TAG_class_type: ++ case DW_TAG_union_type: ++ case DW_TAG_array_type: ++ aggregate: { ++ Dwarf_Word size; ++ if (dwarf_aggregate_size (typedie, &size) != 0) ++ return -1; ++ if (pa64) ++ *locp = loc_intreg; ++ else ++ *locp = loc_intreg32; ++ if (size <= regsize) ++ return nloc_intreg; ++ if (size <= 2 * regsize) ++ return nloc_intregpair; ++#if 0 ++ /* there should be some way to know this location... But I do not see it. */ ++ *locp = loc_aggregate; ++ return nloc_aggregate; ++#endif ++ /* fall through. */ ++ } ++ } ++ ++ /* XXX We don't have a good way to return specific errors from ebl calls. ++ This value means we do not understand the type, but it is well-formed ++ DWARF and might be valid. */ ++ return -2; ++} ++ ++int ++parisc_return_value_location_32 (Dwarf_Die *functypedie, const Dwarf_Op **locp) ++{ ++ return parisc_return_value_location_ (functypedie, locp, 0); ++} ++ ++int ++parisc_return_value_location_64 (Dwarf_Die *functypedie, const Dwarf_Op **locp) ++{ ++ return parisc_return_value_location_ (functypedie, locp, 1); ++} ++ +Index: b/backends/parisc_symbol.c +=================================================================== +--- /dev/null ++++ b/backends/parisc_symbol.c +@@ -0,0 +1,112 @@ ++/* PA-RISC specific symbolic name handling. ++ Copyright (C) 2002, 2005 Red Hat, Inc. ++ This file is part of Red Hat elfutils. ++ Written by Ulrich Drepper , 2002. ++ ++ Red Hat elfutils is free software; you can redistribute it and/or modify ++ it under the terms of the GNU General Public License as published by the ++ Free Software Foundation; version 2 of the License. ++ ++ Red Hat elfutils is distributed in the hope that it will be useful, but ++ WITHOUT ANY WARRANTY; without even the implied warranty of ++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ General Public License for more details. ++ ++ You should have received a copy of the GNU General Public License along ++ with Red Hat elfutils; if not, write to the Free Software Foundation, ++ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. ++ ++ Red Hat elfutils is an included package of the Open Invention Network. ++ An included package of the Open Invention Network is a package for which ++ Open Invention Network licensees cross-license their patents. No patent ++ license is granted, either expressly or impliedly, by designation as an ++ included package. Should you wish to participate in the Open Invention ++ Network licensing program, please visit www.openinventionnetwork.com ++ . */ ++ ++#ifdef HAVE_CONFIG_H ++# include ++#endif ++ ++#include ++#include ++ ++#define BACKEND parisc_ ++#include "libebl_CPU.h" ++ ++const char * ++parisc_segment_type_name (int segment, char *buf __attribute__ ((unused)), ++ size_t len __attribute__ ((unused))) ++{ ++ switch (segment) ++ { ++ case PT_PARISC_ARCHEXT: ++ return "PARISC_ARCHEXT"; ++ case PT_PARISC_UNWIND: ++ return "PARISC_UNWIND"; ++ default: ++ break; ++ } ++ return NULL; ++} ++ ++/* Return symbolic representation of symbol type. */ ++const char * ++parisc_symbol_type_name(int symbol, char *buf __attribute__ ((unused)), ++ size_t len __attribute__ ((unused))) ++{ ++ if (symbol == STT_PARISC_MILLICODE) ++ return "PARISC_MILLI"; ++ return NULL; ++} ++ ++/* Return symbolic representation of section type. */ ++const char * ++parisc_section_type_name (int type, ++ char *buf __attribute__ ((unused)), ++ size_t len __attribute__ ((unused))) ++{ ++ switch (type) ++ { ++ case SHT_PARISC_EXT: ++ return "PARISC_EXT"; ++ case SHT_PARISC_UNWIND: ++ return "PARISC_UNWIND"; ++ case SHT_PARISC_DOC: ++ return "PARISC_DOC"; ++ } ++ ++ return NULL; ++} ++ ++/* Check whether machine flags are valid. */ ++bool ++parisc_machine_flag_check (GElf_Word flags) ++{ ++ if (flags &~ (EF_PARISC_TRAPNIL | EF_PARISC_EXT | EF_PARISC_LSB | ++ EF_PARISC_WIDE | EF_PARISC_NO_KABP | ++ EF_PARISC_LAZYSWAP | EF_PARISC_ARCH)) ++ return 0; ++ ++ GElf_Word arch = flags & EF_PARISC_ARCH; ++ ++ return ((arch == EFA_PARISC_1_0) || (arch == EFA_PARISC_1_1) || ++ (arch == EFA_PARISC_2_0)); ++} ++ ++/* Check for the simple reloc types. */ ++Elf_Type ++parisc_reloc_simple_type (Ebl *ebl __attribute__ ((unused)), int type) ++{ ++ switch (type) ++ { ++ case R_PARISC_DIR64: ++ case R_PARISC_SECREL64: ++ return ELF_T_XWORD; ++ case R_PARISC_DIR32: ++ case R_PARISC_SECREL32: ++ return ELF_T_WORD; ++ default: ++ return ELF_T_NUM; ++ } ++} +Index: b/backends/libebl_parisc.h +=================================================================== +--- /dev/null ++++ b/backends/libebl_parisc.h +@@ -0,0 +1,9 @@ ++#ifndef _LIBEBL_HPPA_H ++#define _LIBEBL_HPPA_H 1 ++ ++#include ++ ++extern int parisc_return_value_location_32(Dwarf_Die *, const Dwarf_Op **locp); ++extern int parisc_return_value_location_64(Dwarf_Die *, const Dwarf_Op **locp); ++ ++#endif +Index: b/backends/Makefile.am +=================================================================== +--- a/backends/Makefile.am ++++ b/backends/Makefile.am +@@ -33,12 +33,12 @@ AM_CPPFLAGS += -I$(top_srcdir)/libebl -I + + + modules = i386 sh x86_64 ia64 alpha arm aarch64 sparc ppc ppc64 s390 \ +- tilegx m68k bpf ++ tilegx m68k bpf parisc + libebl_pic = libebl_i386_pic.a libebl_sh_pic.a libebl_x86_64_pic.a \ + libebl_ia64_pic.a libebl_alpha_pic.a libebl_arm_pic.a \ + libebl_aarch64_pic.a libebl_sparc_pic.a libebl_ppc_pic.a \ + libebl_ppc64_pic.a libebl_s390_pic.a libebl_tilegx_pic.a \ +- libebl_m68k_pic.a libebl_bpf_pic.a ++ libebl_m68k_pic.a libebl_bpf_pic.a libebl_parisc_pic.a + noinst_LIBRARIES = $(libebl_pic) + noinst_DATA = $(libebl_pic:_pic.a=.so) + +@@ -128,6 +128,9 @@ endif + libebl_bpf_pic_a_SOURCES = $(bpf_SRCS) + am_libebl_bpf_pic_a_OBJECTS = $(bpf_SRCS:.c=.os) + ++parisc_SRCS = parisc_init.c parisc_symbol.c parisc_regs.c parisc_retval.c ++libebl_parisc_pic_a_SOURCES = $(parisc_SRCS) ++am_libebl_parisc_pic_a_OBJECTS = $(parisc_SRCS:.c=.os) + + libebl_%.so libebl_%.map: libebl_%_pic.a $(libelf) $(libdw) + @rm -f $(@:.so=.map) +Index: b/libelf/elf.h +=================================================================== +--- a/libelf/elf.h ++++ b/libelf/elf.h +@@ -2055,16 +2055,24 @@ enum + #define R_PARISC_PCREL17F 12 /* 17 bits of rel. address. */ + #define R_PARISC_PCREL14R 14 /* Right 14 bits of rel. address. */ + #define R_PARISC_DPREL21L 18 /* Left 21 bits of rel. address. */ ++#define R_PARISC_DPREL14WR 19 ++#define R_PARISC_DPREL14DR 20 + #define R_PARISC_DPREL14R 22 /* Right 14 bits of rel. address. */ + #define R_PARISC_GPREL21L 26 /* GP-relative, left 21 bits. */ + #define R_PARISC_GPREL14R 30 /* GP-relative, right 14 bits. */ + #define R_PARISC_LTOFF21L 34 /* LT-relative, left 21 bits. */ + #define R_PARISC_LTOFF14R 38 /* LT-relative, right 14 bits. */ ++#define R_PARISC_DLTIND14F 39 ++#define R_PARISC_SETBASE 40 + #define R_PARISC_SECREL32 41 /* 32 bits section rel. address. */ ++#define R_PARISC_BASEREL21L 42 ++#define R_PARISC_BASEREL17R 43 ++#define R_PARISC_BASEREL14R 46 + #define R_PARISC_SEGBASE 48 /* No relocation, set segment base. */ + #define R_PARISC_SEGREL32 49 /* 32 bits segment rel. address. */ + #define R_PARISC_PLTOFF21L 50 /* PLT rel. address, left 21 bits. */ + #define R_PARISC_PLTOFF14R 54 /* PLT rel. address, right 14 bits. */ ++#define R_PARISC_PLTOFF14F 55 + #define R_PARISC_LTOFF_FPTR32 57 /* 32 bits LT-rel. function pointer. */ + #define R_PARISC_LTOFF_FPTR21L 58 /* LT-rel. fct ptr, left 21 bits. */ + #define R_PARISC_LTOFF_FPTR14R 62 /* LT-rel. fct ptr, right 14 bits. */ +@@ -2073,6 +2081,7 @@ enum + #define R_PARISC_PLABEL21L 66 /* Left 21 bits of fdesc address. */ + #define R_PARISC_PLABEL14R 70 /* Right 14 bits of fdesc address. */ + #define R_PARISC_PCREL64 72 /* 64 bits PC-rel. address. */ ++#define R_PARISC_PCREL22C 73 + #define R_PARISC_PCREL22F 74 /* 22 bits PC-rel. address. */ + #define R_PARISC_PCREL14WR 75 /* PC-rel. address, right 14 bits. */ + #define R_PARISC_PCREL14DR 76 /* PC rel. address, right 14 bits. */ +@@ -2098,6 +2107,8 @@ enum + #define R_PARISC_LTOFF16WF 102 /* 16 bits LT-rel. address. */ + #define R_PARISC_LTOFF16DF 103 /* 16 bits LT-rel. address. */ + #define R_PARISC_SECREL64 104 /* 64 bits section rel. address. */ ++#define R_PARISC_BASEREL14WR 107 ++#define R_PARISC_BASEREL14DR 108 + #define R_PARISC_SEGREL64 112 /* 64 bits segment rel. address. */ + #define R_PARISC_PLTOFF14WR 115 /* PLT-rel. address, right 14 bits. */ + #define R_PARISC_PLTOFF14DR 116 /* PLT-rel. address, right 14 bits. */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/hurd_path.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/hurd_path.patch new file mode 100644 index 000000000..a4d568b08 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/hurd_path.patch @@ -0,0 +1,17 @@ +Upstream-Status: Backport [from debian] +Signed-off-by: Hongxu Jia + +Index: elfutils-0.165/tests/run-native-test.sh +=================================================================== +--- elfutils-0.165.orig/tests/run-native-test.sh ++++ elfutils-0.165/tests/run-native-test.sh +@@ -83,6 +83,9 @@ native_test() + # "cannot attach to process: Function not implemented". + [ "$(uname)" = "GNU/kFreeBSD" ] && exit 77 + ++# hurd's /proc/$PID/maps does not give paths yet. ++[ "$(uname)" = "GNU" ] && exit 77 ++ + native_test ${abs_builddir}/allregs + native_test ${abs_builddir}/funcretval + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/ignore_strmerge.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/ignore_strmerge.diff new file mode 100644 index 000000000..3570deca9 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/ignore_strmerge.diff @@ -0,0 +1,14 @@ +Upstream-Status: Backport [from debian] +Signed-off-by: Hongxu Jia + +--- elfutils-0.165.orig/tests/run-strip-strmerge.sh ++++ elfutils-0.165/tests/run-strip-strmerge.sh +@@ -30,7 +30,7 @@ remerged=remerged.elf + tempfiles $merged $stripped $debugfile $remerged + + echo elflint $input +-testrun ${abs_top_builddir}/src/elflint --gnu $input ++testrun_on_self_skip ${abs_top_builddir}/src/elflint --gnu $input + echo elfstrmerge + testrun ${abs_top_builddir}/tests/elfstrmerge -o $merged $input + echo elflint $merged diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/kfreebsd_path.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/kfreebsd_path.patch new file mode 100644 index 000000000..49085d1c8 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/kfreebsd_path.patch @@ -0,0 +1,20 @@ +Upstream-Status: Backport [from debian] +Signed-off-by: Hongxu Jia + +Index: b/tests/run-native-test.sh +=================================================================== +--- a/tests/run-native-test.sh ++++ b/tests/run-native-test.sh +@@ -77,6 +77,12 @@ native_test() + test $native -eq 0 || testrun "$@" -p $native > /dev/null + } + ++# On the Debian buildds, GNU/kFreeBSD linprocfs /proc/$PID/maps does ++# not give absolute paths due to sbuild's bind mounts (bug #570805) ++# therefore the next two test programs are expected to fail with ++# "cannot attach to process: Function not implemented". ++[ "$(uname)" = "GNU/kFreeBSD" ] && exit 77 ++ + native_test ${abs_builddir}/allregs + native_test ${abs_builddir}/funcretval + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/mips_backend.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/mips_backend.diff new file mode 100644 index 000000000..a5e76dda4 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/mips_backend.diff @@ -0,0 +1,686 @@ +Upstream-Status: Backport [from debian] +Signed-off-by: Hongxu Jia + +Index: b/backends/mips_init.c +=================================================================== +--- /dev/null ++++ b/backends/mips_init.c +@@ -0,0 +1,59 @@ ++/* Initialization of mips specific backend library. ++ Copyright (C) 2006 Red Hat, Inc. ++ This file is part of Red Hat elfutils. ++ ++ Red Hat elfutils is free software; you can redistribute it and/or modify ++ it under the terms of the GNU General Public License as published by the ++ Free Software Foundation; version 2 of the License. ++ ++ Red Hat elfutils is distributed in the hope that it will be useful, but ++ WITHOUT ANY WARRANTY; without even the implied warranty of ++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ General Public License for more details. ++ ++ You should have received a copy of the GNU General Public License along ++ with Red Hat elfutils; if not, write to the Free Software Foundation, ++ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. ++ ++ Red Hat elfutils is an included package of the Open Invention Network. ++ An included package of the Open Invention Network is a package for which ++ Open Invention Network licensees cross-license their patents. No patent ++ license is granted, either expressly or impliedly, by designation as an ++ included package. Should you wish to participate in the Open Invention ++ Network licensing program, please visit www.openinventionnetwork.com ++ . */ ++ ++#ifdef HAVE_CONFIG_H ++# include ++#endif ++ ++#define BACKEND mips_ ++#define RELOC_PREFIX R_MIPS_ ++#include "libebl_CPU.h" ++ ++/* This defines the common reloc hooks based on mips_reloc.def. */ ++#include "common-reloc.c" ++ ++const char * ++mips_init (Elf *elf __attribute__ ((unused)), ++ GElf_Half machine __attribute__ ((unused)), ++ Ebl *eh, ++ size_t ehlen) ++{ ++ /* Check whether the Elf_BH object has a sufficent size. */ ++ if (ehlen < sizeof (Ebl)) ++ return NULL; ++ ++ /* We handle it. */ ++ if (machine == EM_MIPS) ++ eh->name = "MIPS R3000 big-endian"; ++ else if (machine == EM_MIPS_RS3_LE) ++ eh->name = "MIPS R3000 little-endian"; ++ ++ mips_init_reloc (eh); ++ HOOK (eh, reloc_simple_type); ++ HOOK (eh, return_value_location); ++ HOOK (eh, register_info); ++ ++ return MODVERSION; ++} +Index: b/backends/mips_regs.c +=================================================================== +--- /dev/null ++++ b/backends/mips_regs.c +@@ -0,0 +1,104 @@ ++/* Register names and numbers for MIPS DWARF. ++ Copyright (C) 2006 Red Hat, Inc. ++ This file is part of Red Hat elfutils. ++ ++ Red Hat elfutils is free software; you can redistribute it and/or modify ++ it under the terms of the GNU General Public License as published by the ++ Free Software Foundation; version 2 of the License. ++ ++ Red Hat elfutils is distributed in the hope that it will be useful, but ++ WITHOUT ANY WARRANTY; without even the implied warranty of ++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ General Public License for more details. ++ ++ You should have received a copy of the GNU General Public License along ++ with Red Hat elfutils; if not, write to the Free Software Foundation, ++ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. ++ ++ Red Hat elfutils is an included package of the Open Invention Network. ++ An included package of the Open Invention Network is a package for which ++ Open Invention Network licensees cross-license their patents. No patent ++ license is granted, either expressly or impliedly, by designation as an ++ included package. Should you wish to participate in the Open Invention ++ Network licensing program, please visit www.openinventionnetwork.com ++ . */ ++ ++#ifdef HAVE_CONFIG_H ++# include ++#endif ++ ++#include ++#include ++ ++#define BACKEND mips_ ++#include "libebl_CPU.h" ++ ++ssize_t ++mips_register_info (Ebl *ebl __attribute__((unused)), ++ int regno, char *name, size_t namelen, ++ const char **prefix, const char **setname, ++ int *bits, int *type) ++{ ++ if (name == NULL) ++ return 66; ++ ++ if (regno < 0 || regno > 65 || namelen < 4) ++ return -1; ++ ++ *prefix = "$"; ++ ++ if (regno < 32) ++ { ++ *setname = "integer"; ++ *type = DW_ATE_signed; ++ *bits = 32; ++ if (regno < 32 + 10) ++ { ++ name[0] = regno + '0'; ++ namelen = 1; ++ } ++ else ++ { ++ name[0] = (regno / 10) + '0'; ++ name[1] = (regno % 10) + '0'; ++ namelen = 2; ++ } ++ } ++ else if (regno < 64) ++ { ++ *setname = "FPU"; ++ *type = DW_ATE_float; ++ *bits = 32; ++ name[0] = 'f'; ++ if (regno < 32 + 10) ++ { ++ name[1] = (regno - 32) + '0'; ++ namelen = 2; ++ } ++ else ++ { ++ name[1] = (regno - 32) / 10 + '0'; ++ name[2] = (regno - 32) % 10 + '0'; ++ namelen = 3; ++ } ++ } ++ else if (regno == 64) ++ { ++ *type = DW_ATE_signed; ++ *bits = 32; ++ name[0] = 'h'; ++ name[1] = 'i'; ++ namelen = 2; ++ } ++ else ++ { ++ *type = DW_ATE_signed; ++ *bits = 32; ++ name[0] = 'l'; ++ name[1] = 'o'; ++ namelen = 2; ++ } ++ ++ name[namelen++] = '\0'; ++ return namelen; ++} +Index: b/backends/mips_reloc.def +=================================================================== +--- /dev/null ++++ b/backends/mips_reloc.def +@@ -0,0 +1,79 @@ ++/* List the relocation types for mips. -*- C -*- ++ Copyright (C) 2006 Red Hat, Inc. ++ This file is part of Red Hat elfutils. ++ ++ Red Hat elfutils is free software; you can redistribute it and/or modify ++ it under the terms of the GNU General Public License as published by the ++ Free Software Foundation; version 2 of the License. ++ ++ Red Hat elfutils is distributed in the hope that it will be useful, but ++ WITHOUT ANY WARRANTY; without even the implied warranty of ++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ General Public License for more details. ++ ++ You should have received a copy of the GNU General Public License along ++ with Red Hat elfutils; if not, write to the Free Software Foundation, ++ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. ++ ++ Red Hat elfutils is an included package of the Open Invention Network. ++ An included package of the Open Invention Network is a package for which ++ Open Invention Network licensees cross-license their patents. No patent ++ license is granted, either expressly or impliedly, by designation as an ++ included package. Should you wish to participate in the Open Invention ++ Network licensing program, please visit www.openinventionnetwork.com ++ . */ ++ ++/* NAME, REL|EXEC|DYN */ ++ ++RELOC_TYPE (NONE, 0) ++RELOC_TYPE (16, 0) ++RELOC_TYPE (32, 0) ++RELOC_TYPE (REL32, 0) ++RELOC_TYPE (26, 0) ++RELOC_TYPE (HI16, 0) ++RELOC_TYPE (LO16, 0) ++RELOC_TYPE (GPREL16, 0) ++RELOC_TYPE (LITERAL, 0) ++RELOC_TYPE (GOT16, 0) ++RELOC_TYPE (PC16, 0) ++RELOC_TYPE (CALL16, 0) ++RELOC_TYPE (GPREL32, 0) ++ ++RELOC_TYPE (SHIFT5, 0) ++RELOC_TYPE (SHIFT6, 0) ++RELOC_TYPE (64, 0) ++RELOC_TYPE (GOT_DISP, 0) ++RELOC_TYPE (GOT_PAGE, 0) ++RELOC_TYPE (GOT_OFST, 0) ++RELOC_TYPE (GOT_HI16, 0) ++RELOC_TYPE (GOT_LO16, 0) ++RELOC_TYPE (SUB, 0) ++RELOC_TYPE (INSERT_A, 0) ++RELOC_TYPE (INSERT_B, 0) ++RELOC_TYPE (DELETE, 0) ++RELOC_TYPE (HIGHER, 0) ++RELOC_TYPE (HIGHEST, 0) ++RELOC_TYPE (CALL_HI16, 0) ++RELOC_TYPE (CALL_LO16, 0) ++RELOC_TYPE (SCN_DISP, 0) ++RELOC_TYPE (REL16, 0) ++RELOC_TYPE (ADD_IMMEDIATE, 0) ++RELOC_TYPE (PJUMP, 0) ++RELOC_TYPE (RELGOT, 0) ++RELOC_TYPE (JALR, 0) ++RELOC_TYPE (TLS_DTPMOD32, 0) ++RELOC_TYPE (TLS_DTPREL32, 0) ++RELOC_TYPE (TLS_DTPMOD64, 0) ++RELOC_TYPE (TLS_DTPREL64, 0) ++RELOC_TYPE (TLS_GD, 0) ++RELOC_TYPE (TLS_LDM, 0) ++RELOC_TYPE (TLS_DTPREL_HI16, 0) ++RELOC_TYPE (TLS_DTPREL_LO16, 0) ++RELOC_TYPE (TLS_GOTTPREL, 0) ++RELOC_TYPE (TLS_TPREL32, 0) ++RELOC_TYPE (TLS_TPREL64, 0) ++RELOC_TYPE (TLS_TPREL_HI16, 0) ++RELOC_TYPE (TLS_TPREL_LO16, 0) ++ ++#define NO_COPY_RELOC 1 ++#define NO_RELATIVE_RELOC 1 +Index: b/backends/mips_retval.c +=================================================================== +--- /dev/null ++++ b/backends/mips_retval.c +@@ -0,0 +1,321 @@ ++/* Function return value location for Linux/mips ABI. ++ Copyright (C) 2005 Red Hat, Inc. ++ This file is part of Red Hat elfutils. ++ ++ Red Hat elfutils is free software; you can redistribute it and/or modify ++ it under the terms of the GNU General Public License as published by the ++ Free Software Foundation; version 2 of the License. ++ ++ Red Hat elfutils is distributed in the hope that it will be useful, but ++ WITHOUT ANY WARRANTY; without even the implied warranty of ++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ General Public License for more details. ++ ++ You should have received a copy of the GNU General Public License along ++ with Red Hat elfutils; if not, write to the Free Software Foundation, ++ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. ++ ++ Red Hat elfutils is an included package of the Open Invention Network. ++ An included package of the Open Invention Network is a package for which ++ Open Invention Network licensees cross-license their patents. No patent ++ license is granted, either expressly or impliedly, by designation as an ++ included package. Should you wish to participate in the Open Invention ++ Network licensing program, please visit www.openinventionnetwork.com ++ . */ ++ ++#ifdef HAVE_CONFIG_H ++# include ++#endif ++ ++#include ++#include ++#include ++#include ++ ++#include "../libebl/libeblP.h" ++#include "../libdw/libdwP.h" ++ ++#define BACKEND mips_ ++#include "libebl_CPU.h" ++ ++/* The ABI of the file. Also see EF_MIPS_ABI2 above. */ ++#define EF_MIPS_ABI 0x0000F000 ++ ++/* The original o32 abi. */ ++#define E_MIPS_ABI_O32 0x00001000 ++ ++/* O32 extended to work on 64 bit architectures */ ++#define E_MIPS_ABI_O64 0x00002000 ++ ++/* EABI in 32 bit mode */ ++#define E_MIPS_ABI_EABI32 0x00003000 ++ ++/* EABI in 64 bit mode */ ++#define E_MIPS_ABI_EABI64 0x00004000 ++ ++/* All the possible MIPS ABIs. */ ++enum mips_abi ++ { ++ MIPS_ABI_UNKNOWN = 0, ++ MIPS_ABI_N32, ++ MIPS_ABI_O32, ++ MIPS_ABI_N64, ++ MIPS_ABI_O64, ++ MIPS_ABI_EABI32, ++ MIPS_ABI_EABI64, ++ MIPS_ABI_LAST ++ }; ++ ++/* Find the mips ABI of the current file */ ++enum mips_abi find_mips_abi(Elf *elf) ++{ ++ GElf_Ehdr ehdr_mem; ++ GElf_Ehdr *ehdr = gelf_getehdr (elf, &ehdr_mem); ++ ++ if (ehdr == NULL) ++ return MIPS_ABI_LAST; ++ ++ GElf_Word elf_flags = ehdr->e_flags; ++ ++ /* Check elf_flags to see if it specifies the ABI being used. */ ++ switch ((elf_flags & EF_MIPS_ABI)) ++ { ++ case E_MIPS_ABI_O32: ++ return MIPS_ABI_O32; ++ case E_MIPS_ABI_O64: ++ return MIPS_ABI_O64; ++ case E_MIPS_ABI_EABI32: ++ return MIPS_ABI_EABI32; ++ case E_MIPS_ABI_EABI64: ++ return MIPS_ABI_EABI64; ++ default: ++ if ((elf_flags & EF_MIPS_ABI2)) ++ return MIPS_ABI_N32; ++ } ++ ++ /* GCC creates a pseudo-section whose name describes the ABI. */ ++ size_t shstrndx; ++ if (elf_getshdrstrndx (elf, &shstrndx) < 0) ++ return MIPS_ABI_LAST; ++ ++ const char *name; ++ Elf_Scn *scn = NULL; ++ while ((scn = elf_nextscn (elf, scn)) != NULL) ++ { ++ GElf_Shdr shdr_mem; ++ GElf_Shdr *shdr = gelf_getshdr (scn, &shdr_mem); ++ if (shdr == NULL) ++ return MIPS_ABI_LAST; ++ ++ name = elf_strptr (elf, shstrndx, shdr->sh_name) ?: ""; ++ if (strncmp (name, ".mdebug.", 8) != 0) ++ continue; ++ ++ if (strcmp (name, ".mdebug.abi32") == 0) ++ return MIPS_ABI_O32; ++ else if (strcmp (name, ".mdebug.abiN32") == 0) ++ return MIPS_ABI_N32; ++ else if (strcmp (name, ".mdebug.abi64") == 0) ++ return MIPS_ABI_N64; ++ else if (strcmp (name, ".mdebug.abiO64") == 0) ++ return MIPS_ABI_O64; ++ else if (strcmp (name, ".mdebug.eabi32") == 0) ++ return MIPS_ABI_EABI32; ++ else if (strcmp (name, ".mdebug.eabi64") == 0) ++ return MIPS_ABI_EABI64; ++ else ++ return MIPS_ABI_UNKNOWN; ++ } ++ ++ return MIPS_ABI_UNKNOWN; ++} ++ ++unsigned int ++mips_abi_regsize (enum mips_abi abi) ++{ ++ switch (abi) ++ { ++ case MIPS_ABI_EABI32: ++ case MIPS_ABI_O32: ++ return 4; ++ case MIPS_ABI_N32: ++ case MIPS_ABI_N64: ++ case MIPS_ABI_O64: ++ case MIPS_ABI_EABI64: ++ return 8; ++ case MIPS_ABI_UNKNOWN: ++ case MIPS_ABI_LAST: ++ default: ++ return 0; ++ } ++} ++ ++ ++/* $v0 or pair $v0, $v1 */ ++static const Dwarf_Op loc_intreg_o32[] = ++ { ++ { .atom = DW_OP_reg2 }, { .atom = DW_OP_piece, .number = 4 }, ++ { .atom = DW_OP_reg3 }, { .atom = DW_OP_piece, .number = 4 }, ++ }; ++ ++static const Dwarf_Op loc_intreg[] = ++ { ++ { .atom = DW_OP_reg2 }, { .atom = DW_OP_piece, .number = 8 }, ++ { .atom = DW_OP_reg3 }, { .atom = DW_OP_piece, .number = 8 }, ++ }; ++#define nloc_intreg 1 ++#define nloc_intregpair 4 ++ ++/* $f0 (float), or pair $f0, $f1 (double). ++ * f2/f3 are used for COMPLEX (= 2 doubles) returns in Fortran */ ++static const Dwarf_Op loc_fpreg_o32[] = ++ { ++ { .atom = DW_OP_regx, .number = 32 }, { .atom = DW_OP_piece, .number = 4 }, ++ { .atom = DW_OP_regx, .number = 33 }, { .atom = DW_OP_piece, .number = 4 }, ++ { .atom = DW_OP_regx, .number = 34 }, { .atom = DW_OP_piece, .number = 4 }, ++ { .atom = DW_OP_regx, .number = 35 }, { .atom = DW_OP_piece, .number = 4 }, ++ }; ++ ++/* $f0, or pair $f0, $f2. */ ++static const Dwarf_Op loc_fpreg[] = ++ { ++ { .atom = DW_OP_regx, .number = 32 }, { .atom = DW_OP_piece, .number = 8 }, ++ { .atom = DW_OP_regx, .number = 34 }, { .atom = DW_OP_piece, .number = 8 }, ++ }; ++#define nloc_fpreg 1 ++#define nloc_fpregpair 4 ++#define nloc_fpregquad 8 ++ ++/* The return value is a structure and is actually stored in stack space ++ passed in a hidden argument by the caller. But, the compiler ++ helpfully returns the address of that space in $v0. */ ++static const Dwarf_Op loc_aggregate[] = ++ { ++ { .atom = DW_OP_breg2, .number = 0 } ++ }; ++#define nloc_aggregate 1 ++ ++int ++mips_return_value_location (Dwarf_Die *functypedie, const Dwarf_Op **locp) ++{ ++ /* First find the ABI used by the elf object */ ++ enum mips_abi abi = find_mips_abi(functypedie->cu->dbg->elf); ++ ++ /* Something went seriously wrong while trying to figure out the ABI */ ++ if (abi == MIPS_ABI_LAST) ++ return -1; ++ ++ /* We couldn't identify the ABI, but the file seems valid */ ++ if (abi == MIPS_ABI_UNKNOWN) ++ return -2; ++ ++ /* Can't handle EABI variants */ ++ if ((abi == MIPS_ABI_EABI32) || (abi == MIPS_ABI_EABI64)) ++ return -2; ++ ++ unsigned int regsize = mips_abi_regsize (abi); ++ if (!regsize) ++ return -2; ++ ++ /* Start with the function's type, and get the DW_AT_type attribute, ++ which is the type of the return value. */ ++ ++ Dwarf_Attribute attr_mem; ++ Dwarf_Attribute *attr = dwarf_attr_integrate (functypedie, DW_AT_type, &attr_mem); ++ if (attr == NULL) ++ /* The function has no return value, like a `void' function in C. */ ++ return 0; ++ ++ Dwarf_Die die_mem; ++ Dwarf_Die *typedie = dwarf_formref_die (attr, &die_mem); ++ int tag = dwarf_tag (typedie); ++ ++ /* Follow typedefs and qualifiers to get to the actual type. */ ++ while (tag == DW_TAG_typedef ++ || tag == DW_TAG_const_type || tag == DW_TAG_volatile_type ++ || tag == DW_TAG_restrict_type) ++ { ++ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem); ++ typedie = dwarf_formref_die (attr, &die_mem); ++ tag = dwarf_tag (typedie); ++ } ++ ++ switch (tag) ++ { ++ case -1: ++ return -1; ++ ++ case DW_TAG_subrange_type: ++ if (! dwarf_hasattr_integrate (typedie, DW_AT_byte_size)) ++ { ++ attr = dwarf_attr_integrate (typedie, DW_AT_type, &attr_mem); ++ typedie = dwarf_formref_die (attr, &die_mem); ++ tag = dwarf_tag (typedie); ++ } ++ /* Fall through. */ ++ ++ case DW_TAG_base_type: ++ case DW_TAG_enumeration_type: ++ case DW_TAG_pointer_type: ++ case DW_TAG_ptr_to_member_type: ++ { ++ Dwarf_Word size; ++ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_byte_size, ++ &attr_mem), &size) != 0) ++ { ++ if (tag == DW_TAG_pointer_type || tag == DW_TAG_ptr_to_member_type) ++ size = regsize; ++ else ++ return -1; ++ } ++ if (tag == DW_TAG_base_type) ++ { ++ Dwarf_Word encoding; ++ if (dwarf_formudata (dwarf_attr_integrate (typedie, DW_AT_encoding, ++ &attr_mem), &encoding) != 0) ++ return -1; ++ ++#define ABI_LOC(loc, regsize) ((regsize) == 4 ? (loc ## _o32) : (loc)) ++ ++ if (encoding == DW_ATE_float) ++ { ++ *locp = ABI_LOC(loc_fpreg, regsize); ++ if (size <= regsize) ++ return nloc_fpreg; ++ ++ if (size <= 2*regsize) ++ return nloc_fpregpair; ++ ++ if (size <= 4*regsize && abi == MIPS_ABI_O32) ++ return nloc_fpregquad; ++ ++ goto aggregate; ++ } ++ } ++ *locp = ABI_LOC(loc_intreg, regsize); ++ if (size <= regsize) ++ return nloc_intreg; ++ if (size <= 2*regsize) ++ return nloc_intregpair; ++ ++ /* Else fall through. Shouldn't happen though (at least with gcc) */ ++ } ++ ++ case DW_TAG_structure_type: ++ case DW_TAG_class_type: ++ case DW_TAG_union_type: ++ case DW_TAG_array_type: ++ aggregate: ++ /* XXX TODO: Can't handle structure return with other ABI's yet :-/ */ ++ if ((abi != MIPS_ABI_O32) && (abi != MIPS_ABI_O64)) ++ return -2; ++ ++ *locp = loc_aggregate; ++ return nloc_aggregate; ++ } ++ ++ /* XXX We don't have a good way to return specific errors from ebl calls. ++ This value means we do not understand the type, but it is well-formed ++ DWARF and might be valid. */ ++ return -2; ++} +Index: b/backends/mips_symbol.c +=================================================================== +--- /dev/null ++++ b/backends/mips_symbol.c +@@ -0,0 +1,52 @@ ++/* MIPS specific symbolic name handling. ++ Copyright (C) 2002, 2003, 2005 Red Hat, Inc. ++ This file is part of Red Hat elfutils. ++ Written by Jakub Jelinek , 2002. ++ ++ Red Hat elfutils is free software; you can redistribute it and/or modify ++ it under the terms of the GNU General Public License as published by the ++ Free Software Foundation; version 2 of the License. ++ ++ Red Hat elfutils is distributed in the hope that it will be useful, but ++ WITHOUT ANY WARRANTY; without even the implied warranty of ++ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ++ General Public License for more details. ++ ++ You should have received a copy of the GNU General Public License along ++ with Red Hat elfutils; if not, write to the Free Software Foundation, ++ Inc., 51 Franklin Street, Fifth Floor, Boston MA 02110-1301 USA. ++ ++ Red Hat elfutils is an included package of the Open Invention Network. ++ An included package of the Open Invention Network is a package for which ++ Open Invention Network licensees cross-license their patents. No patent ++ license is granted, either expressly or impliedly, by designation as an ++ included package. Should you wish to participate in the Open Invention ++ Network licensing program, please visit www.openinventionnetwork.com ++ . */ ++ ++#ifdef HAVE_CONFIG_H ++# include ++#endif ++ ++#include ++#include ++ ++#define BACKEND mips_ ++#include "libebl_CPU.h" ++ ++/* Check for the simple reloc types. */ ++Elf_Type ++mips_reloc_simple_type (Ebl *ebl __attribute__ ((unused)), int type) ++{ ++ switch (type) ++ { ++ case R_MIPS_16: ++ return ELF_T_HALF; ++ case R_MIPS_32: ++ return ELF_T_WORD; ++ case R_MIPS_64: ++ return ELF_T_XWORD; ++ default: ++ return ELF_T_NUM; ++ } ++} +Index: b/libebl/eblopenbackend.c +=================================================================== +--- a/libebl/eblopenbackend.c ++++ b/libebl/eblopenbackend.c +@@ -71,6 +71,8 @@ static const struct + { "sparc", "elf_sparc", "sparc", 5, EM_SPARC, 0, 0 }, + { "sparc", "elf_sparcv8plus", "sparc", 5, EM_SPARC32PLUS, 0, 0 }, + { "s390", "ebl_s390", "s390", 4, EM_S390, 0, 0 }, ++ { "mips", "elf_mips", "mips", 4, EM_MIPS, 0, 0 }, ++ { "mips", "elf_mipsel", "mipsel", 4, EM_MIPS_RS3_LE, 0, 0 }, + + { "m32", "elf_m32", "m32", 3, EM_M32, 0, 0 }, + { "m68k", "elf_m68k", "m68k", 4, EM_68K, ELFCLASS32, ELFDATA2MSB }, +Index: b/backends/Makefile.am +=================================================================== +--- a/backends/Makefile.am ++++ b/backends/Makefile.am +@@ -33,12 +33,12 @@ AM_CPPFLAGS += -I$(top_srcdir)/libebl -I + + + modules = i386 sh x86_64 ia64 alpha arm aarch64 sparc ppc ppc64 s390 \ +- tilegx m68k bpf parisc ++ tilegx m68k bpf parisc mips + libebl_pic = libebl_i386_pic.a libebl_sh_pic.a libebl_x86_64_pic.a \ + libebl_ia64_pic.a libebl_alpha_pic.a libebl_arm_pic.a \ + libebl_aarch64_pic.a libebl_sparc_pic.a libebl_ppc_pic.a \ + libebl_ppc64_pic.a libebl_s390_pic.a libebl_tilegx_pic.a \ +- libebl_m68k_pic.a libebl_bpf_pic.a libebl_parisc_pic.a ++ libebl_m68k_pic.a libebl_bpf_pic.a libebl_parisc_pic.a libebl_mips_pic.a + noinst_LIBRARIES = $(libebl_pic) + noinst_DATA = $(libebl_pic:_pic.a=.so) + +@@ -132,6 +132,10 @@ parisc_SRCS = parisc_init.c parisc_symbo + libebl_parisc_pic_a_SOURCES = $(parisc_SRCS) + am_libebl_parisc_pic_a_OBJECTS = $(parisc_SRCS:.c=.os) + ++mips_SRCS = mips_init.c mips_symbol.c mips_regs.c mips_retval.c ++libebl_mips_pic_a_SOURCES = $(mips_SRCS) ++am_libebl_mips_pic_a_OBJECTS = $(mips_SRCS:.c=.os) ++ + libebl_%.so libebl_%.map: libebl_%_pic.a $(libelf) $(libdw) + @rm -f $(@:.so=.map) + $(AM_V_at)echo 'ELFUTILS_$(PACKAGE_VERSION) { global: $*_init; local: *; };' \ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/mips_readelf_w.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/mips_readelf_w.patch new file mode 100644 index 000000000..790930cf3 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/mips_readelf_w.patch @@ -0,0 +1,25 @@ +From: Kurt Roeckx +Subject: Make readelf -w output debug information on mips +Bug-Debian: http://bugs.debian.org/662041 +Forwarded: not-needed + +Upstreams wants a change where this is handled by a hook that needs +to be filled in by the backend for the arch. + +Upstream-Status: Backport [from debian] +Signed-off-by: Hongxu Jia + +Index: b/src/readelf.c +=================================================================== +--- a/src/readelf.c ++++ b/src/readelf.c +@@ -8343,7 +8343,8 @@ print_debug (Dwfl_Module *dwflmod, Ebl * + GElf_Shdr shdr_mem; + GElf_Shdr *shdr = gelf_getshdr (scn, &shdr_mem); + +- if (shdr != NULL && shdr->sh_type == SHT_PROGBITS) ++ if (shdr != NULL && ( ++ (shdr->sh_type == SHT_PROGBITS) || (shdr->sh_type == SHT_MIPS_DWARF))) + { + static const struct + { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/testsuite-ignore-elflint.diff b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/testsuite-ignore-elflint.diff new file mode 100644 index 000000000..3df35768b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/debian/testsuite-ignore-elflint.diff @@ -0,0 +1,42 @@ +On many architectures this test fails because binaries/libs produced by +binutils don't pass elflint. However elfutils shouldn't FTBFS because of this. + +So we run the tests on all archs to see what breaks, but if it breaks we ignore +the result (exitcode 77 means: this test was skipped). + +Upstream-Status: Backport [from debian] +Signed-off-by: Hongxu Jia + +Index: b/tests/run-elflint-self.sh +=================================================================== +--- a/tests/run-elflint-self.sh ++++ b/tests/run-elflint-self.sh +@@ -18,4 +18,4 @@ + + . $srcdir/test-subr.sh + +-testrun_on_self ${abs_top_builddir}/src/elflint --quiet --gnu-ld ++testrun_on_self_skip ${abs_top_builddir}/src/elflint --quiet --gnu-ld +Index: b/tests/test-subr.sh +=================================================================== +--- a/tests/test-subr.sh ++++ b/tests/test-subr.sh +@@ -152,3 +152,18 @@ testrun_on_self_quiet() + # Only exit if something failed + if test $exit_status != 0; then exit $exit_status; fi + } ++ ++# Same as testrun_on_self(), but skip on failure. ++testrun_on_self_skip() ++{ ++ exit_status=0 ++ ++ for file in $self_test_files; do ++ testrun $* $file \ ++ || { echo "*** failure in $* $file"; exit_status=77; } ++ done ++ ++ # Only exit if something failed ++ if test $exit_status != 0; then exit $exit_status; fi ++} ++ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/fallthrough.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/fallthrough.patch new file mode 100644 index 000000000..b2623f9d2 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/fallthrough.patch @@ -0,0 +1,36 @@ +GCC7 adds -Wimplicit-fallthrough to warn when a switch case falls through, +however this causes warnings (which are promoted to errors) with the elfutils +patches from Debian for mips and parisc, which use fallthrough's by design. + +Explicitly mark the intentional fallthrough switch cases with a comment to +disable the warnings where the fallthrough behaviour is desired. + +Upstream-Status: Pending [debian] +Signed-off-by: Joshua Lock + +Index: elfutils-0.168/backends/parisc_retval.c +=================================================================== +--- elfutils-0.168.orig/backends/parisc_retval.c ++++ elfutils-0.168/backends/parisc_retval.c +@@ -166,7 +166,7 @@ parisc_return_value_location_ (Dwarf_Die + return nloc_intregpair; + + /* Else fall through. */ +- } ++ } // fallthrough + + case DW_TAG_structure_type: + case DW_TAG_class_type: +Index: elfutils-0.168/backends/mips_retval.c +=================================================================== +--- elfutils-0.168.orig/backends/mips_retval.c ++++ elfutils-0.168/backends/mips_retval.c +@@ -387,7 +387,7 @@ mips_return_value_location (Dwarf_Die *f + else + return nloc_intregpair; + } +- } ++ } // fallthrough + + /* Fallthrough to handle large types */ + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/fixheadercheck.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/fixheadercheck.patch new file mode 100644 index 000000000..5de3b24c8 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/fixheadercheck.patch @@ -0,0 +1,23 @@ +For some binaries we can get a invalid section alignment, for example if +sh_align = 1 and sh_addralign is 0. In the case of a zero size section like +".note.GNU-stack", this is irrelavent as far as I can tell and we shouldn't +error in this case. + +RP 2014/6/11 + +Upstream-Status: Pending + +diff --git a/libelf/elf32_updatenull.c b/libelf/elf32_updatenull.c +--- a/libelf/elf32_updatenull.c ++++ b/libelf/elf32_updatenull.c +@@ -339,8 +339,8 @@ __elfw2(LIBELFBITS,updatenull_wrlock) (Elf *elf, int *change_bop, size_t shnum) + we test for the alignment of the section being large + enough for the largest alignment required by a data + block. */ +- if (unlikely (! powerof2 (shdr->sh_addralign)) +- || unlikely ((shdr->sh_addralign ?: 1) < sh_align)) ++ if (shdr->sh_size && (unlikely (! powerof2 (shdr->sh_addralign)) ++ || unlikely ((shdr->sh_addralign ?: 1) < sh_align))) + { + __libelf_seterrno (ELF_E_INVALID_ALIGN); + return -1; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/shadow.patch b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/shadow.patch new file mode 100644 index 000000000..d31961f94 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils-0.168/shadow.patch @@ -0,0 +1,23 @@ +Fix control path where we have str as uninitialized string + +| /home/ubuntu/work/oe/openembedded-core/build/tmp-musl/work/i586-oe-linux-musl/elfutils/0.164-r0/elfutils-0.164/libcpu/i386_disasm.c: In function 'i386_disasm': +| /home/ubuntu/work/oe/openembedded-core/build/tmp-musl/work/i586-oe-linux-musl/elfutils/0.164-r0/elfutils-0.164/libcpu/i386_disasm.c:310:5: error: 'str' may be used uninitialized in this function [-Werror=maybe-uninitialized] +| memcpy (buf + bufcnt, _str, _len); \ +| ^ +| /home/ubuntu/work/oe/openembedded-core/build/tmp-musl/work/i586-oe-linux-musl/elfutils/0.164-r0/elfutils-0.164/libcpu/i386_disasm.c:709:17: note: 'str' was declared here +| const char *str; + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: elfutils-0.164/libcpu/i386_disasm.c +=================================================================== +--- elfutils-0.164.orig/libcpu/i386_disasm.c ++++ elfutils-0.164/libcpu/i386_disasm.c +@@ -821,6 +821,7 @@ i386_disasm (const uint8_t **startp, con + } + + default: ++ str = ""; + assert (! "INVALID not handled"); + } + } diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils_0.148.bb b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils_0.148.bb deleted file mode 100644 index d18b732fe..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils_0.148.bb +++ /dev/null @@ -1,90 +0,0 @@ -SUMMARY = "Utilities and libraries for handling compiled object files" -HOMEPAGE = "https://fedorahosted.org/elfutils" -SECTION = "base" -LICENSE = "(GPL-2+ & Elfutils-Exception)" -LIC_FILES_CHKSUM = "file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3\ - file://EXCEPTION;md5=570adcb0c1218ab57f2249c67d0ce417" -DEPENDS = "libtool bzip2 zlib virtual/libintl" - -PR = "r11" - -SRC_URI = "ftp://sourceware.org/pub/elfutils/${PV}/${BP}.tar.bz2" -SRC_URI[md5sum] = "a0bed1130135f17ad27533b0034dba8d" -SRC_URI[sha256sum] = "8aebfa4a745db21cf5429c9541fe482729b62efc7e53e9110151b4169fe887da" - -# pick the patch from debian -# http://ftp.de.debian.org/debian/pool/main/e/elfutils/elfutils_0.148-1.debian.tar.gz - -SRC_URI += "\ - file://redhat-portability.diff \ - file://redhat-robustify.diff \ - file://hppa_backend.diff \ - file://arm_backend.diff \ - file://mips_backend.diff \ - file://m68k_backend.diff \ - file://testsuite-ignore-elflint.diff \ - file://elf_additions.diff \ - file://elfutils-fsize.patch \ - file://remove-unused.patch \ - file://fix_for_gcc-4.7.patch \ - file://dso-link-change.patch \ - file://nm-Fix-size-passed-to-snprintf-for-invalid-sh_name-case.patch \ - file://elfutils-ar-c-fix-num-passed-to-memset.patch \ - file://Fix_elf_cvt_gunhash.patch \ - file://elf_begin.c-CVE-2014-9447-fix.patch \ - file://fix-build-gcc-4.8.patch \ - file://gcc6.patch \ -" -# Only apply when building uclibc based target recipe -SRC_URI_append_libc-uclibc = " file://uclibc-support-for-elfutils-0.148.patch" - -# The buildsystem wants to generate 2 .h files from source using a binary it just built, -# which can not pass the cross compiling, so let's work around it by adding 2 .h files -# along with the do_configure_prepend() - -SRC_URI += "\ - file://i386_dis.h \ - file://x86_64_dis.h \ -" -inherit autotools gettext - -EXTRA_OECONF = "--program-prefix=eu- --without-lzma" -EXTRA_OECONF_append_class-native = " --without-bzlib" -EXTRA_OECONF_append_libc-uclibc = " --enable-uclibc" - -do_configure_prepend() { - sed -i '/^i386_dis.h:/,+4 {/.*/d}' ${S}/libcpu/Makefile.am - - cp ${WORKDIR}/*dis.h ${S}/libcpu -} - -# we can not build complete elfutils when using uclibc -# but some recipes e.g. gcc 4.5 depends on libelf so we -# build only libelf for uclibc case - -EXTRA_OEMAKE_libc-uclibc = "-C libelf" -EXTRA_OEMAKE_class-native = "" -EXTRA_OEMAKE_class-nativesdk = "" - -BBCLASSEXTEND = "native nativesdk" - -# Package utilities separately -PACKAGES =+ "${PN}-binutils libelf libasm libdw" -FILES_${PN}-binutils = "\ - ${bindir}/eu-addr2line \ - ${bindir}/eu-ld \ - ${bindir}/eu-nm \ - ${bindir}/eu-readelf \ - ${bindir}/eu-size \ - ${bindir}/eu-strip" - -FILES_libelf = "${libdir}/libelf-${PV}.so ${libdir}/libelf.so.*" -FILES_libasm = "${libdir}/libasm-${PV}.so ${libdir}/libasm.so.*" -FILES_libdw = "${libdir}/libdw-${PV}.so ${libdir}/libdw.so.* ${libdir}/elfutils/lib*" -# Some packages have the version preceeding the .so instead properly -# versioned .so., so we need to reorder and repackage. -#FILES_${PN} += "${libdir}/*-${PV}.so ${base_libdir}/*-${PV}.so" -#FILES_SOLIBSDEV = "${libdir}/libasm.so ${libdir}/libdw.so ${libdir}/libelf.so" - -# The package contains symlinks that trip up insane -INSANE_SKIP_${MLPREFIX}libdw = "dev-so" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils_0.166.bb b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils_0.166.bb deleted file mode 100644 index 3593c1c1d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils_0.166.bb +++ /dev/null @@ -1,94 +0,0 @@ -SUMMARY = "Utilities and libraries for handling compiled object files" -HOMEPAGE = "https://sourceware.org/elfutils" -SECTION = "base" -LICENSE = "(GPLv3 & Elfutils-Exception)" -LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504" -DEPENDS = "libtool bzip2 zlib virtual/libintl" -DEPENDS_append_libc-musl = " argp-standalone fts " - -SRC_URI = "ftp://sourceware.org/pub/elfutils/${PV}/${BP}.tar.bz2" -SRC_URI[md5sum] = "d4e462b7891915dc5326bccefa2024ff" -SRC_URI[sha256sum] = "3c056914c8a438b210be0d790463b960fc79d234c3f05ce707cbff80e94cba30" - -SRC_URI += "\ - file://dso-link-change.patch \ - file://Fix_elf_cvt_gunhash.patch \ - file://fixheadercheck.patch \ - file://0001-elf_getarsym-Silence-Werror-maybe-uninitialized-fals.patch \ - file://0001-remove-the-unneed-checking.patch \ - file://0001-fix-a-stack-usage-warning.patch \ - file://aarch64_uio.patch \ - file://Fix_one_GCC7_warning.patch \ - file://0001-Add-GCC7-Wimplicit-fallthrough-support-fixes.patch \ - file://shadow.patch \ - file://0001-ar-Fix-GCC7-Wformat-length-issues.patch \ -" - -# pick the patch from debian -# http://ftp.de.debian.org/debian/pool/main/e/elfutils/elfutils_0.164-1.debian.tar.xz -SRC_URI += "\ - file://hppa_backend.diff \ - file://arm_backend.diff \ - file://mips_backend.diff \ - file://m68k_backend.diff \ - file://testsuite-ignore-elflint.diff \ - file://mips_readelf_w.patch \ - file://kfreebsd_path.patch \ - file://0001-Ignore-differences-between-mips-machine-identifiers.patch \ - file://0002-Add-support-for-mips64-abis-in-mips_retval.c.patch \ - file://0003-Add-mips-n64-relocation-format-hack.patch \ - file://uclibc-support.patch \ - file://elfcmp-fix-self-comparision.patch \ -" -# Fix the patches from Debian with GCC7 -SRC_URI += "file://fallthrough.patch" -SRC_URI_append_libc-musl = " file://0001-build-Provide-alternatives-for-glibc-assumptions-hel.patch " - -# The buildsystem wants to generate 2 .h files from source using a binary it just built, -# which can not pass the cross compiling, so let's work around it by adding 2 .h files -# along with the do_configure_prepend() - -inherit autotools gettext - -EXTRA_OECONF = "--program-prefix=eu- --without-lzma" -EXTRA_OECONF_append_class-native = " --without-bzlib" -EXTRA_OECONF_append_libc-uclibc = " --enable-uclibc" - -do_install_append() { - if [ "${TARGET_ARCH}" != "x86_64" ] && [ -z `echo "${TARGET_ARCH}"|grep 'i.86'` ];then - rm -f ${D}${bindir}/eu-objdump - fi -} - -# we can not build complete elfutils when using uclibc -# but some recipes e.g. gcc 4.5 depends on libelf so we -# build only libelf for uclibc case - -EXTRA_OEMAKE_libc-uclibc = "-C libelf" -EXTRA_OEMAKE_class-native = "" -EXTRA_OEMAKE_class-nativesdk = "" - -ALLOW_EMPTY_${PN}_libc-musl = "1" - -BBCLASSEXTEND = "native nativesdk" - -# Package utilities separately -PACKAGES =+ "${PN}-binutils libelf libasm libdw" -FILES_${PN}-binutils = "\ - ${bindir}/eu-addr2line \ - ${bindir}/eu-ld \ - ${bindir}/eu-nm \ - ${bindir}/eu-readelf \ - ${bindir}/eu-size \ - ${bindir}/eu-strip" - -FILES_libelf = "${libdir}/libelf-${PV}.so ${libdir}/libelf.so.*" -FILES_libasm = "${libdir}/libasm-${PV}.so ${libdir}/libasm.so.*" -FILES_libdw = "${libdir}/libdw-${PV}.so ${libdir}/libdw.so.* ${libdir}/elfutils/lib*" -# Some packages have the version preceeding the .so instead properly -# versioned .so., so we need to reorder and repackage. -#FILES_${PN} += "${libdir}/*-${PV}.so ${base_libdir}/*-${PV}.so" -#FILES_SOLIBSDEV = "${libdir}/libasm.so ${libdir}/libdw.so ${libdir}/libelf.so" - -# The package contains symlinks that trip up insane -INSANE_SKIP_${MLPREFIX}libdw = "dev-so" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils_0.168.bb b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils_0.168.bb new file mode 100644 index 000000000..b977ce0ea --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/elfutils/elfutils_0.168.bb @@ -0,0 +1,90 @@ +SUMMARY = "Utilities and libraries for handling compiled object files" +HOMEPAGE = "https://sourceware.org/elfutils" +SECTION = "base" +LICENSE = "(GPLv3 & Elfutils-Exception)" +LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504" +DEPENDS = "libtool bzip2 zlib virtual/libintl" +DEPENDS_append_libc-musl = " argp-standalone fts " +SRC_URI = "https://sourceware.org/elfutils/ftp/${PV}/${BP}.tar.bz2" +SRC_URI[md5sum] = "52adfa40758d0d39e5d5c57689bf38d6" +SRC_URI[sha256sum] = "b88d07893ba1373c7dd69a7855974706d05377766568a7d9002706d5de72c276" + +SRC_URI += "\ + file://dso-link-change.patch \ + file://Fix_elf_cvt_gunhash.patch \ + file://fixheadercheck.patch \ + file://0001-elf_getarsym-Silence-Werror-maybe-uninitialized-fals.patch \ + file://0001-remove-the-unneed-checking.patch \ + file://0001-fix-a-stack-usage-warning.patch \ + file://aarch64_uio.patch \ + file://Fix_one_GCC7_warning.patch \ + file://shadow.patch \ +" + +# pick the patch from debian +# http://ftp.de.debian.org/debian/pool/main/e/elfutils/elfutils_0.168-0.2.debian.tar.xz +SRC_URI += "\ + file://debian/hppa_backend.diff \ + file://debian/arm_backend.diff \ + file://debian/mips_backend.diff \ + file://debian/testsuite-ignore-elflint.diff \ + file://debian/mips_readelf_w.patch \ + file://debian/kfreebsd_path.patch \ + file://debian/0001-Ignore-differences-between-mips-machine-identifiers.patch \ + file://debian/0002-Add-support-for-mips64-abis-in-mips_retval.c.patch \ + file://debian/0003-Add-mips-n64-relocation-format-hack.patch \ + file://debian/hurd_path.patch \ + file://debian/ignore_strmerge.diff \ +" +# Fix the patches from Debian with GCC7 +SRC_URI += "file://fallthrough.patch" +SRC_URI_append_libc-musl = " file://0001-build-Provide-alternatives-for-glibc-assumptions-hel.patch " + +# The buildsystem wants to generate 2 .h files from source using a binary it just built, +# which can not pass the cross compiling, so let's work around it by adding 2 .h files +# along with the do_configure_prepend() + +inherit autotools gettext + +EXTRA_OECONF = "--program-prefix=eu- --without-lzma" +EXTRA_OECONF_append_class-native = " --without-bzlib" +EXTRA_OECONF_append_libc-uclibc = " --enable-uclibc" + +do_install_append() { + if [ "${TARGET_ARCH}" != "x86_64" ] && [ -z `echo "${TARGET_ARCH}"|grep 'i.86'` ];then + rm -f ${D}${bindir}/eu-objdump + fi +} + +# we can not build complete elfutils when using uclibc +# but some recipes e.g. gcc 4.5 depends on libelf so we +# build only libelf for uclibc case + +EXTRA_OEMAKE_libc-uclibc = "-C libelf" +EXTRA_OEMAKE_class-native = "" +EXTRA_OEMAKE_class-nativesdk = "" + +ALLOW_EMPTY_${PN}_libc-musl = "1" + +BBCLASSEXTEND = "native nativesdk" + +# Package utilities separately +PACKAGES =+ "${PN}-binutils libelf libasm libdw" +FILES_${PN}-binutils = "\ + ${bindir}/eu-addr2line \ + ${bindir}/eu-ld \ + ${bindir}/eu-nm \ + ${bindir}/eu-readelf \ + ${bindir}/eu-size \ + ${bindir}/eu-strip" + +FILES_libelf = "${libdir}/libelf-${PV}.so ${libdir}/libelf.so.*" +FILES_libasm = "${libdir}/libasm-${PV}.so ${libdir}/libasm.so.*" +FILES_libdw = "${libdir}/libdw-${PV}.so ${libdir}/libdw.so.* ${libdir}/elfutils/lib*" +# Some packages have the version preceeding the .so instead properly +# versioned .so., so we need to reorder and repackage. +#FILES_${PN} += "${libdir}/*-${PV}.so ${base_libdir}/*-${PV}.so" +#FILES_SOLIBSDEV = "${libdir}/libasm.so ${libdir}/libdw.so ${libdir}/libelf.so" + +# The package contains symlinks that trip up insane +INSANE_SKIP_${MLPREFIX}libdw = "dev-so" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/expect/expect/0001-Resolve-string-formatting-issues.patch b/import-layers/yocto-poky/meta/recipes-devtools/expect/expect/0001-Resolve-string-formatting-issues.patch new file mode 100644 index 000000000..af1d8c626 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/expect/expect/0001-Resolve-string-formatting-issues.patch @@ -0,0 +1,29 @@ +From 107cc370705d8520ba42f1416d89ed3544277c83 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Thu, 23 Mar 2017 13:44:41 +0200 +Subject: [PATCH] Resolve string formatting issues. + +Upstream-Status: Inappropriate [upstream seems dead] +Signed-off-by: Alexander Kanavin +--- + exp_clib.c | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/exp_clib.c b/exp_clib.c +index 172c05e..809200e 100644 +--- a/exp_clib.c ++++ b/exp_clib.c +@@ -1476,8 +1476,8 @@ expDiagLogU(str) + char *str; + { + if (exp_is_debugging) { +- fprintf(stderr,str); +- if (exp_logfile) fprintf(exp_logfile,str); ++ fprintf(stderr, "%s", str); ++ if (exp_logfile) fprintf(exp_logfile, "%s", str); + } + } + +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/expect/expect_5.45.bb b/import-layers/yocto-poky/meta/recipes-devtools/expect/expect_5.45.bb index b4dfe157c..630f2e464 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/expect/expect_5.45.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/expect/expect_5.45.bb @@ -25,6 +25,7 @@ SRC_URI = "${SOURCEFORGE_MIRROR}/expect/Expect/${PV}/${BPN}${PV}.tar.gz \ file://0002-tcl.m4.patch \ file://01-example-shebang.patch \ file://0001-expect-install-scripts-without-using-the-fixline1-tc.patch \ + file://0001-Resolve-string-formatting-issues.patch \ " SRC_URI[md5sum] = "44e1a4f4c877e9ddc5a542dfa7ecc92b" SRC_URI[sha256sum] = "b28dca90428a3b30e650525cdc16255d76bb6ccd65d448be53e620d95d5cc040" @@ -43,11 +44,16 @@ do_install_append() { sed -e 's|$dir|${libdir}|' -i ${D}${libdir}/expect${PV}/pkgIndex.tcl } +# Apparently the public Tcl headers are only in /usr/include/tcl8.6 +# when building for the target. +TCL_INCLUDE_PATH = "" +TCL_INCLUDE_PATH_class-target = "--with-tclinclude=${STAGING_INCDIR}/tcl8.6" + EXTRA_OECONF += "--with-tcl=${STAGING_LIBDIR} \ - --with-tclinclude=${STAGING_INCDIR}/tcl8.6 \ --enable-shared \ --enable-threads \ --disable-rpath \ + ${TCL_INCLUDE_PATH} \ " EXTRA_OEMAKE_install = " 'SCRIPTS=' " @@ -62,3 +68,5 @@ FILES_${PN}-dev = "${libdir_native}/expect${PV}/libexpect*.so \ FILES_${PN} += "${libdir}/libexpect${PV}.so \ ${libdir}/expect${PV}/* \ " + +BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/fdisk/gptfdisk_1.0.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/fdisk/gptfdisk_1.0.1.bb index 8fab28f33..d62a903a7 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/fdisk/gptfdisk_1.0.1.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/fdisk/gptfdisk_1.0.1.bb @@ -13,7 +13,7 @@ SRC_URI[sha256sum] = "864c8aee2efdda50346804d7e6230407d5f42a8ae754df70404dd8b2fd UPSTREAM_CHECK_URI = "http://sourceforge.net/projects/gptfdisk/files/gptfdisk/" UPSTREAM_CHECK_REGEX = "/gptfdisk/(?P(\d+[\.\-_]*)+)/" -EXTRA_OEMAKE = "-e MAKEFLAGS=" +EXTRA_OEMAKE = "'CC=${CC}' 'CXX=${CXX}'" do_install() { install -d ${D}${sbindir} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/file/file_5.28.bb b/import-layers/yocto-poky/meta/recipes-devtools/file/file_5.28.bb deleted file mode 100644 index e64a89c80..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/file/file_5.28.bb +++ /dev/null @@ -1,48 +0,0 @@ -SUMMARY = "File classification tool" -DESCRIPTION = "File attempts to classify files depending \ -on their contents and prints a description if a match is found." -HOMEPAGE = "http://www.darwinsys.com/file/" -SECTION = "console/utils" - -# two clause BSD -LICENSE = "BSD" -LIC_FILES_CHKSUM = "file://COPYING;beginline=2;md5=6a7382872edb68d33e1a9398b6e03188" - -DEPENDS = "zlib file-replacement-native" -DEPENDS_class-native = "zlib-native" - -# Blacklist a bogus tag in upstream check -UPSTREAM_CHECK_GITTAGREGEX = "FILE(?P(?!6_23).+)" - -SRC_URI = "git://github.com/file/file.git \ - file://debian-742262.patch \ - file://0001-Add-P-prompt-into-Usage-info.patch \ - " - -SRCREV = "3c521817322a6bf5160cfeb09b9145ccde587b2a" -S = "${WORKDIR}/git" - -inherit autotools - -EXTRA_OEMAKE_append_class-target = "-e FILE_COMPILE=${STAGING_BINDIR_NATIVE}/file-native/file" -EXTRA_OEMAKE_append_class-nativesdk = "-e FILE_COMPILE=${STAGING_BINDIR_NATIVE}/file-native/file" - -CFLAGS_append = " -std=c99" - -FILES_${PN} += "${datadir}/misc/*.mgc" - -do_install_append_class-native() { - create_cmdline_wrapper ${D}/${bindir}/file \ - --magic-file ${datadir}/misc/magic.mgc -} - -do_install_append_class-nativesdk() { - create_cmdline_wrapper ${D}/${bindir}/file \ - --magic-file ${datadir}/misc/magic.mgc -} - -BBCLASSEXTEND = "native nativesdk" -PROVIDES_append_class-native = " file-replacement-native" -# Don't use NATIVE_PACKAGE_PATH_SUFFIX as that hides libmagic from anyone who -# depends on file-replacement-native. -bindir_append_class-native = "/file-native" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/file/file_5.30.bb b/import-layers/yocto-poky/meta/recipes-devtools/file/file_5.30.bb new file mode 100644 index 000000000..0998fcfa8 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/file/file_5.30.bb @@ -0,0 +1,48 @@ +SUMMARY = "File classification tool" +DESCRIPTION = "File attempts to classify files depending \ +on their contents and prints a description if a match is found." +HOMEPAGE = "http://www.darwinsys.com/file/" +SECTION = "console/utils" + +# two clause BSD +LICENSE = "BSD" +LIC_FILES_CHKSUM = "file://COPYING;beginline=2;md5=6a7382872edb68d33e1a9398b6e03188" + +DEPENDS = "zlib file-replacement-native" +DEPENDS_class-native = "zlib-native" + +# Blacklist a bogus tag in upstream check +UPSTREAM_CHECK_GITTAGREGEX = "FILE(?P(?!6_23).+)" + +SRC_URI = "git://github.com/file/file.git \ + file://debian-742262.patch \ + file://0001-Add-P-prompt-into-Usage-info.patch \ + " + +SRCREV = "79814950aafb81ecd6a910c2a8a3b8ec12f3e4a6" +S = "${WORKDIR}/git" + +inherit autotools + +EXTRA_OEMAKE_append_class-target = "-e FILE_COMPILE=${STAGING_BINDIR_NATIVE}/file-native/file" +EXTRA_OEMAKE_append_class-nativesdk = "-e FILE_COMPILE=${STAGING_BINDIR_NATIVE}/file-native/file" + +CFLAGS_append = " -std=c99" + +FILES_${PN} += "${datadir}/misc/*.mgc" + +do_install_append_class-native() { + create_cmdline_wrapper ${D}/${bindir}/file \ + --magic-file ${datadir}/misc/magic.mgc +} + +do_install_append_class-nativesdk() { + create_cmdline_wrapper ${D}/${bindir}/file \ + --magic-file ${datadir}/misc/magic.mgc +} + +BBCLASSEXTEND = "native nativesdk" +PROVIDES_append_class-native = " file-replacement-native" +# Don't use NATIVE_PACKAGE_PATH_SUFFIX as that hides libmagic from anyone who +# depends on file-replacement-native. +bindir_append_class-native = "/file-native" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-5.4.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-5.4.inc index 338530fd6..b7696756a 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-5.4.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-5.4.inc @@ -89,6 +89,7 @@ SRC_URI = "\ file://0057-unwind-fix-for-musl.patch \ file://0058-fdebug-prefix-map-support-to-remap-relative-path.patch \ file://0059-libgcc-use-ldflags.patch \ + file://CVE-2016-6131.patch \ " BACKPORTS = "" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-5.4/CVE-2016-6131.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-5.4/CVE-2016-6131.patch new file mode 100644 index 000000000..88524c342 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-5.4/CVE-2016-6131.patch @@ -0,0 +1,251 @@ +From b3f6b32165d3f437bd0ac6269c3c499b68ecf036 Mon Sep 17 00:00:00 2001 +From: law +Date: Thu, 4 Aug 2016 16:53:18 +0000 +Subject: [PATCH] Fix for PR71696 in Libiberty Demangler +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +[BZ #71696] -- https://gcc.gnu.org/bugzilla/show_bug.cgi?id=71696 + +2016-08-04 Marcel Böhme + + PR c++/71696 + * cplus-dem.c: Prevent infinite recursion when there is a cycle + in the referencing of remembered mangled types. + (work_stuff): New stack to keep track of the remembered mangled + types that are currently being processed. + (push_processed_type): New method to push currently processed + remembered type onto the stack. + (pop_processed_type): New method to pop currently processed + remembered type from the stack. + (work_stuff_copy_to_from): Copy values of new variables. + (delete_non_B_K_work_stuff): Free stack memory. + (demangle_args): Push/Pop currently processed remembered type. + (do_type): Do not demangle a cyclic reference and push/pop + referenced remembered type. + +cherry-picked from commit of +git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@239143 138bc75d-0d04-0410-961f-82ee72b054a4 + +Upstream-Status: Backport [master] +CVE: CVE-2016-6131 +Signed-off-by: Yuanjie Huang +--- + libiberty/ChangeLog | 17 ++++++++ + libiberty/cplus-dem.c | 78 ++++++++++++++++++++++++++++++++--- + libiberty/testsuite/demangle-expected | 18 ++++++++ + 3 files changed, 108 insertions(+), 5 deletions(-) + +diff --git a/libiberty/ChangeLog b/libiberty/ChangeLog +index 9859ad3..7939480 100644 +--- a/libiberty/ChangeLog ++++ b/libiberty/ChangeLog +@@ -1,3 +1,20 @@ ++2016-08-04 Marcel Böhme ++ ++ PR c++/71696 ++ * cplus-dem.c: Prevent infinite recursion when there is a cycle ++ in the referencing of remembered mangled types. ++ (work_stuff): New stack to keep track of the remembered mangled ++ types that are currently being processed. ++ (push_processed_type): New method to push currently processed ++ remembered type onto the stack. ++ (pop_processed_type): New method to pop currently processed ++ remembered type from the stack. ++ (work_stuff_copy_to_from): Copy values of new variables. ++ (delete_non_B_K_work_stuff): Free stack memory. ++ (demangle_args): Push/Pop currently processed remembered type. ++ (do_type): Do not demangle a cyclic reference and push/pop ++ referenced remembered type. ++ + 2016-06-03 Release Manager + + * GCC 5.4.0 released. +diff --git a/libiberty/cplus-dem.c b/libiberty/cplus-dem.c +index 7514e57..f21e630 100644 +--- a/libiberty/cplus-dem.c ++++ b/libiberty/cplus-dem.c +@@ -144,6 +144,9 @@ struct work_stuff + string* previous_argument; /* The last function argument demangled. */ + int nrepeats; /* The number of times to repeat the previous + argument. */ ++ int *proctypevec; /* Indices of currently processed remembered typevecs. */ ++ int proctypevec_size; ++ int nproctypes; + }; + + #define PRINT_ANSI_QUALIFIERS (work -> options & DMGL_ANSI) +@@ -435,6 +438,10 @@ iterate_demangle_function (struct work_stuff *, + + static void remember_type (struct work_stuff *, const char *, int); + ++static void push_processed_type (struct work_stuff *, int); ++ ++static void pop_processed_type (struct work_stuff *); ++ + static void remember_Btype (struct work_stuff *, const char *, int, int); + + static int register_Btype (struct work_stuff *); +@@ -1301,6 +1308,10 @@ work_stuff_copy_to_from (struct work_stuff *to, struct work_stuff *from) + memcpy (to->btypevec[i], from->btypevec[i], len); + } + ++ if (from->proctypevec) ++ to->proctypevec = ++ XDUPVEC (int, from->proctypevec, from->proctypevec_size); ++ + if (from->ntmpl_args) + to->tmpl_argvec = XNEWVEC (char *, from->ntmpl_args); + +@@ -1329,11 +1340,17 @@ delete_non_B_K_work_stuff (struct work_stuff *work) + /* Discard the remembered types, if any. */ + + forget_types (work); +- if (work -> typevec != NULL) ++ if (work->typevec != NULL) + { +- free ((char *) work -> typevec); +- work -> typevec = NULL; +- work -> typevec_size = 0; ++ free ((char *) work->typevec); ++ work->typevec = NULL; ++ work->typevec_size = 0; ++ } ++ if (work->proctypevec != NULL) ++ { ++ free (work->proctypevec); ++ work->proctypevec = NULL; ++ work->proctypevec_size = 0; + } + if (work->tmpl_argvec) + { +@@ -3552,6 +3569,8 @@ static int + do_type (struct work_stuff *work, const char **mangled, string *result) + { + int n; ++ int i; ++ int is_proctypevec; + int done; + int success; + string decl; +@@ -3564,6 +3583,7 @@ do_type (struct work_stuff *work, const char **mangled, string *result) + + done = 0; + success = 1; ++ is_proctypevec = 0; + while (success && !done) + { + int member; +@@ -3616,8 +3636,15 @@ do_type (struct work_stuff *work, const char **mangled, string *result) + success = 0; + } + else ++ for (i = 0; i < work->nproctypes; i++) ++ if (work -> proctypevec [i] == n) ++ success = 0; ++ ++ if (success) + { +- remembered_type = work -> typevec[n]; ++ is_proctypevec = 1; ++ push_processed_type (work, n); ++ remembered_type = work->typevec[n]; + mangled = &remembered_type; + } + break; +@@ -3840,6 +3867,9 @@ do_type (struct work_stuff *work, const char **mangled, string *result) + string_delete (result); + string_delete (&decl); + ++ if (is_proctypevec) ++ pop_processed_type (work); ++ + if (success) + /* Assume an integral type, if we're not sure. */ + return (int) ((tk == tk_none) ? tk_integral : tk); +@@ -4252,6 +4282,41 @@ do_arg (struct work_stuff *work, const char **mangled, string *result) + } + + static void ++push_processed_type (struct work_stuff *work, int typevec_index) ++{ ++ if (work->nproctypes >= work->proctypevec_size) ++ { ++ if (!work->proctypevec_size) ++ { ++ work->proctypevec_size = 4; ++ work->proctypevec = XNEWVEC (int, work->proctypevec_size); ++ } ++ else ++ { ++ if (work->proctypevec_size < 16) ++ /* Double when small. */ ++ work->proctypevec_size *= 2; ++ else ++ { ++ /* Grow slower when large. */ ++ if (work->proctypevec_size > (INT_MAX / 3) * 2) ++ xmalloc_failed (INT_MAX); ++ work->proctypevec_size = (work->proctypevec_size * 3 / 2); ++ } ++ work->proctypevec ++ = XRESIZEVEC (int, work->proctypevec, work->proctypevec_size); ++ } ++ } ++ work->proctypevec [work->nproctypes++] = typevec_index; ++} ++ ++static void ++pop_processed_type (struct work_stuff *work) ++{ ++ work->nproctypes--; ++} ++ ++static void + remember_type (struct work_stuff *work, const char *start, int len) + { + char *tem; +@@ -4515,10 +4580,13 @@ demangle_args (struct work_stuff *work, const char **mangled, + { + string_append (declp, ", "); + } ++ push_processed_type (work, t); + if (!do_arg (work, &tem, &arg)) + { ++ pop_processed_type (work); + return (0); + } ++ pop_processed_type (work); + if (PRINT_ARG_TYPES) + { + string_appends (declp, &arg); +diff --git a/libiberty/testsuite/demangle-expected b/libiberty/testsuite/demangle-expected +index 1d8b771..d690b23 100644 +--- a/libiberty/testsuite/demangle-expected ++++ b/libiberty/testsuite/demangle-expected +@@ -4429,3 +4429,21 @@ __vt_90000000000cafebabe + + _Z80800000000000000000000 + _Z80800000000000000000000 ++# ++# Tests write access violation PR70926 ++ ++0__Ot2m02R5T0000500000 ++0__Ot2m02R5T0000500000 ++# ++ ++0__GT50000000000_ ++0__GT50000000000_ ++# ++ ++__t2m05B500000000000000000_ ++__t2m05B500000000000000000_ ++# ++# Tests stack overflow PR71696 ++ ++__10%0__S4_0T0T0 ++%0<>::%0(%0<>) +-- +2.9.3 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2.inc deleted file mode 100644 index 39ae65380..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2.inc +++ /dev/null @@ -1,134 +0,0 @@ -require gcc-common.inc - -# Third digit in PV should be incremented after a minor release - -PV = "6.2.0" - -# BINV should be incremented to a revision after a minor gcc release - -BINV = "6.2.0" - -FILESEXTRAPATHS =. "${FILE_DIRNAME}/gcc-6.2:${FILE_DIRNAME}/gcc-6.2/backport:" - -DEPENDS =+ "mpfr gmp libmpc zlib" -NATIVEDEPS = "mpfr-native gmp-native libmpc-native zlib-native" - -LICENSE = "GPL-3.0-with-GCC-exception & GPLv3" - -LIC_FILES_CHKSUM = "\ - file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \ - file://COPYING3;md5=d32239bcb673463ab874e80d47fae504 \ - file://COPYING3.LIB;md5=6a6a8e020838b23406c81b19c1d46df6 \ - file://COPYING.LIB;md5=2d5025d4aa3495befef8f17206a5b0a1 \ - file://COPYING.RUNTIME;md5=fe60d87048567d4fe8c8a0ed2448bcc8 \ -" - - -BASEURI ?= "${GNU_MIRROR}/gcc/gcc-${PV}/gcc-${PV}.tar.bz2" -#SRCREV = "bd9a826d5448db11d29d2ec5884e7e679066f140" -#BASEURI ?= "git://github.com/gcc-mirror/gcc;branch=gcc-6-branch;protocol=git" -#BASEURI ?= "ftp://sourceware.org/pub/gcc/snapshots/6.2.0-RC-20160815/gcc-6.2.0-RC-20160815.tar.bz2" - -SRC_URI = "\ - ${BASEURI} \ - file://0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch \ - file://0002-uclibc-conf.patch \ - file://0003-gcc-uclibc-locale-ctype_touplow_t.patch \ - file://0004-uclibc-locale.patch \ - file://0005-uclibc-locale-no__x.patch \ - file://0006-uclibc-locale-wchar_fix.patch \ - file://0007-uclibc-locale-update.patch \ - file://0008-missing-execinfo_h.patch \ - file://0009-c99-snprintf.patch \ - file://0010-gcc-poison-system-directories.patch \ - file://0011-gcc-poison-dir-extend.patch \ - file://0012-gcc-4.3.3-SYSROOT_CFLAGS_FOR_TARGET.patch \ - file://0013-64-bit-multilib-hack.patch \ - file://0014-optional-libstdc.patch \ - file://0015-gcc-disable-MASK_RELAX_PIC_CALLS-bit.patch \ - file://0016-COLLECT_GCC_OPTIONS.patch \ - file://0017-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch \ - file://0018-fortran-cross-compile-hack.patch \ - file://0019-cpp-honor-sysroot.patch \ - file://0020-MIPS64-Default-to-N64-ABI.patch \ - file://0021-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch \ - file://0022-gcc-Fix-argument-list-too-long-error.patch \ - file://0023-Disable-sdt.patch \ - file://0024-libtool.patch \ - file://0025-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch \ - file://0026-Use-the-multilib-config-files-from-B-instead-of-usin.patch \ - file://0027-Avoid-using-libdir-from-.la-which-usually-points-to-.patch \ - file://0028-export-CPP.patch \ - file://0029-Enable-SPE-AltiVec-generation-on-powepc-linux-target.patch \ - file://0030-Disable-the-MULTILIB_OSDIRNAMES-and-other-multilib-o.patch \ - file://0031-Ensure-target-gcc-headers-can-be-included.patch \ - file://0032-gcc-4.8-won-t-build-with-disable-dependency-tracking.patch \ - file://0033-Don-t-search-host-directory-during-relink-if-inst_pr.patch \ - file://0034-Use-SYSTEMLIBS_DIR-replacement-instead-of-hardcoding.patch \ - file://0035-aarch64-Add-support-for-musl-ldso.patch \ - file://0036-libcc1-fix-libcc1-s-install-path-and-rpath.patch \ - file://0037-handle-sysroot-support-for-nativesdk-gcc.patch \ - file://0038-Search-target-sysroot-gcc-version-specific-dirs-with.patch \ - file://0039-Fix-various-_FOR_BUILD-and-related-variables.patch \ - file://0040-nios2-Define-MUSL_DYNAMIC_LINKER.patch \ - file://0041-ssp_nonshared.patch \ - file://0042-gcc-libcpp-support-ffile-prefix-map-old-new.patch \ - file://0043-Reuse-fdebug-prefix-map-to-replace-ffile-prefix-map.patch \ - file://0044-gcc-final.c-fdebug-prefix-map-support-to-remap-sourc.patch \ - file://0045-libgcc-Add-knob-to-use-ldbl-128-on-ppc.patch \ - file://0046-Link-libgcc-using-LDFLAGS-not-just-SHLIB_LDFLAGS.patch \ - file://0047-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch \ - ${BACKPORTS} \ -" -BACKPORTS = "\ - file://ubsan-fix-check-empty-string.patch \ -" -SRC_URI[md5sum] = "9768625159663b300ae4de2f4745fcc4" -SRC_URI[sha256sum] = "9944589fc722d3e66308c0ce5257788ebd7872982a718aa2516123940671b7c5" - -S = "${TMPDIR}/work-shared/gcc-${PV}-${PR}/gcc-${PV}" -#S = "${TMPDIR}/work-shared/gcc-${PV}-${PR}/git" -B = "${WORKDIR}/gcc-${PV}/build.${HOST_SYS}.${TARGET_SYS}" - -# Language Overrides -FORTRAN = "" -JAVA = "" - -LTO = "--enable-lto" - -EXTRA_OECONF_BASE = "\ - ${LTO} \ - --enable-libssp \ - --enable-libitm \ - --disable-bootstrap \ - --disable-libmudflap \ - --with-system-zlib \ - --with-linker-hash-style=${LINKER_HASH_STYLE} \ - --enable-linker-build-id \ - --with-ppl=no \ - --with-cloog=no \ - --enable-checking=release \ - --enable-cheaders=c_global \ - --without-isl \ -" - -EXTRA_OECONF_INITIAL = "\ - --disable-libmudflap \ - --disable-libgomp \ - --disable-libitm \ - --disable-libquadmath \ - --with-system-zlib \ - --disable-lto \ - --disable-plugin \ - --enable-decimal-float=no \ - --without-isl \ - gcc_cv_libc_provides_ssp=yes \ -" - -EXTRA_OECONF_append_libc-uclibc = " --disable-decimal-float " - -EXTRA_OECONF_PATHS = "\ - --with-gxx-include-dir=/not/exist{target_includedir}/c++/${BINV} \ - --with-sysroot=/not/exist \ - --with-build-sysroot=${STAGING_DIR_TARGET} \ -" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch deleted file mode 100644 index 415f091ee..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch +++ /dev/null @@ -1,42 +0,0 @@ -From 92ed30da16b7487b334f739be177eb39885ab772 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 08:37:11 +0400 -Subject: [PATCH 01/46] gcc-4.3.1: ARCH_FLAGS_FOR_TARGET - -Signed-off-by: Khem Raj - -Upstream-Status: Inappropriate [embedded specific] ---- - configure | 2 +- - configure.ac | 2 +- - 2 files changed, 2 insertions(+), 2 deletions(-) - -diff --git a/configure b/configure -index 35f231e..bfadc33 100755 ---- a/configure -+++ b/configure -@@ -7550,7 +7550,7 @@ fi - # for target_alias and gcc doesn't manage it consistently. - target_configargs="--cache-file=./config.cache ${target_configargs}" - --FLAGS_FOR_TARGET= -+FLAGS_FOR_TARGET="$ARCH_FLAGS_FOR_TARGET" - case " $target_configdirs " in - *" newlib "*) - case " $target_configargs " in -diff --git a/configure.ac b/configure.ac -index 74bf58a..197d61b 100644 ---- a/configure.ac -+++ b/configure.ac -@@ -3149,7 +3149,7 @@ fi - # for target_alias and gcc doesn't manage it consistently. - target_configargs="--cache-file=./config.cache ${target_configargs}" - --FLAGS_FOR_TARGET= -+FLAGS_FOR_TARGET="$ARCH_FLAGS_FOR_TARGET" - case " $target_configdirs " in - *" newlib "*) - case " $target_configargs " in --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0002-uclibc-conf.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0002-uclibc-conf.patch deleted file mode 100644 index 4d284ef86..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0002-uclibc-conf.patch +++ /dev/null @@ -1,53 +0,0 @@ -From 4efc5a258c812875743647d756f75c93c4d514a5 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 08:38:25 +0400 -Subject: [PATCH 02/46] uclibc-conf - -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - contrib/regression/objs-gcc.sh | 4 ++++ - libjava/classpath/ltconfig | 4 ++-- - 2 files changed, 6 insertions(+), 2 deletions(-) - -diff --git a/contrib/regression/objs-gcc.sh b/contrib/regression/objs-gcc.sh -index 60b0497..6dc7ead 100755 ---- a/contrib/regression/objs-gcc.sh -+++ b/contrib/regression/objs-gcc.sh -@@ -106,6 +106,10 @@ if [ $H_REAL_TARGET = $H_REAL_HOST -a $H_REAL_TARGET = i686-pc-linux-gnu ] - then - make all-gdb all-dejagnu all-ld || exit 1 - make install-gdb install-dejagnu install-ld || exit 1 -+elif [ $H_REAL_TARGET = $H_REAL_HOST -a $H_REAL_TARGET = i686-pc-linux-uclibc ] -+ then -+ make all-gdb all-dejagnu all-ld || exit 1 -+ make install-gdb install-dejagnu install-ld || exit 1 - elif [ $H_REAL_TARGET = $H_REAL_HOST ] ; then - make bootstrap || exit 1 - make install || exit 1 -diff --git a/libjava/classpath/ltconfig b/libjava/classpath/ltconfig -index d318957..df55950 100755 ---- a/libjava/classpath/ltconfig -+++ b/libjava/classpath/ltconfig -@@ -603,7 +603,7 @@ host_os=`echo $host | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\3/'` - - # Transform linux* to *-*-linux-gnu*, to support old configure scripts. - case $host_os in --linux-gnu*) ;; -+linux-gnu*|linux-uclibc*) ;; - linux*) host=`echo $host | sed 's/^\(.*-.*-linux\)\(.*\)$/\1-gnu\2/'` - esac - -@@ -1247,7 +1247,7 @@ linux-gnuoldld* | linux-gnuaout* | linux-gnucoff*) - ;; - - # This must be Linux ELF. --linux-gnu*) -+linux*) - version_type=linux - need_lib_prefix=no - need_version=no --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0003-gcc-uclibc-locale-ctype_touplow_t.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0003-gcc-uclibc-locale-ctype_touplow_t.patch deleted file mode 100644 index df07febee..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0003-gcc-uclibc-locale-ctype_touplow_t.patch +++ /dev/null @@ -1,87 +0,0 @@ -From ad5fd283fc7ef04f66c7fb003805364ea3bd34e9 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 08:40:12 +0400 -Subject: [PATCH 03/46] gcc-uclibc-locale-ctype_touplow_t - -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - libstdc++-v3/config/locale/generic/c_locale.cc | 5 +++++ - libstdc++-v3/config/locale/generic/c_locale.h | 9 +++++++++ - libstdc++-v3/config/os/gnu-linux/ctype_base.h | 9 +++++++++ - 3 files changed, 23 insertions(+) - -diff --git a/libstdc++-v3/config/locale/generic/c_locale.cc b/libstdc++-v3/config/locale/generic/c_locale.cc -index ef6ce8f..4740636 100644 ---- a/libstdc++-v3/config/locale/generic/c_locale.cc -+++ b/libstdc++-v3/config/locale/generic/c_locale.cc -@@ -273,5 +273,10 @@ _GLIBCXX_END_NAMESPACE_VERSION - #ifdef _GLIBCXX_LONG_DOUBLE_COMPAT - #define _GLIBCXX_LDBL_COMPAT(dbl, ldbl) \ - extern "C" void ldbl (void) __attribute__ ((alias (#dbl))) -+#ifdef __UCLIBC__ -+// This is because __c_locale is of type __ctype_touplow_t* which is short on uclibc. for glibc its int* -+_GLIBCXX_LDBL_COMPAT(_ZSt14__convert_to_vIdEvPKcRT_RSt12_Ios_IostateRKPs, _ZSt14__convert_to_vIeEvPKcRT_RSt12_Ios_IostateRKPs); -+#else - _GLIBCXX_LDBL_COMPAT(_ZSt14__convert_to_vIdEvPKcRT_RSt12_Ios_IostateRKPi, _ZSt14__convert_to_vIeEvPKcRT_RSt12_Ios_IostateRKPi); -+#endif - #endif // _GLIBCXX_LONG_DOUBLE_COMPAT -diff --git a/libstdc++-v3/config/locale/generic/c_locale.h b/libstdc++-v3/config/locale/generic/c_locale.h -index 794471e..d65f955 100644 ---- a/libstdc++-v3/config/locale/generic/c_locale.h -+++ b/libstdc++-v3/config/locale/generic/c_locale.h -@@ -40,13 +40,22 @@ - - #include - -+#ifdef __UCLIBC__ -+#include -+#include -+#endif -+ - #define _GLIBCXX_NUM_CATEGORIES 0 - - namespace std _GLIBCXX_VISIBILITY(default) - { - _GLIBCXX_BEGIN_NAMESPACE_VERSION - -+#ifdef __UCLIBC__ -+ typedef __ctype_touplow_t* __c_locale; -+#else - typedef int* __c_locale; -+#endif - - // Convert numeric value of type double and long double to string and - // return length of string. If vsnprintf is available use it, otherwise -diff --git a/libstdc++-v3/config/os/gnu-linux/ctype_base.h b/libstdc++-v3/config/os/gnu-linux/ctype_base.h -index 591c793..55eb0e9 100644 ---- a/libstdc++-v3/config/os/gnu-linux/ctype_base.h -+++ b/libstdc++-v3/config/os/gnu-linux/ctype_base.h -@@ -33,6 +33,11 @@ - - // Information as gleaned from /usr/include/ctype.h - -+#ifdef __UCLIBC__ -+#include -+#include -+#endif -+ - namespace std _GLIBCXX_VISIBILITY(default) - { - _GLIBCXX_BEGIN_NAMESPACE_VERSION -@@ -41,7 +46,11 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION - struct ctype_base - { - // Non-standard typedefs. -+#ifdef __UCLIBC__ -+ typedef const __ctype_touplow_t* __to_type; -+#else - typedef const int* __to_type; -+#endif - - // NB: Offsets into ctype::_M_table force a particular size - // on the mask type. Because of this, we don't use an enum. --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0004-uclibc-locale.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0004-uclibc-locale.patch deleted file mode 100644 index ae2627c2e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0004-uclibc-locale.patch +++ /dev/null @@ -1,2862 +0,0 @@ -From 68bd083357e78678a9baac760beb2a31f00954a5 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 08:41:39 +0400 -Subject: [PATCH 04/46] uclibc-locale - -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - libstdc++-v3/acinclude.m4 | 37 ++ - .../config/locale/uclibc/c++locale_internal.h | 63 ++ - libstdc++-v3/config/locale/uclibc/c_locale.cc | 160 +++++ - libstdc++-v3/config/locale/uclibc/c_locale.h | 117 ++++ - .../config/locale/uclibc/codecvt_members.cc | 308 +++++++++ - .../config/locale/uclibc/collate_members.cc | 80 +++ - libstdc++-v3/config/locale/uclibc/ctype_members.cc | 300 +++++++++ - .../config/locale/uclibc/messages_members.cc | 100 +++ - .../config/locale/uclibc/messages_members.h | 118 ++++ - .../config/locale/uclibc/monetary_members.cc | 692 +++++++++++++++++++++ - .../config/locale/uclibc/numeric_members.cc | 160 +++++ - libstdc++-v3/config/locale/uclibc/time_members.cc | 406 ++++++++++++ - libstdc++-v3/config/locale/uclibc/time_members.h | 68 ++ - libstdc++-v3/configure | 75 +++ - libstdc++-v3/include/c_compatibility/wchar.h | 2 + - libstdc++-v3/include/c_std/cwchar | 2 + - 16 files changed, 2688 insertions(+) - create mode 100644 libstdc++-v3/config/locale/uclibc/c++locale_internal.h - create mode 100644 libstdc++-v3/config/locale/uclibc/c_locale.cc - create mode 100644 libstdc++-v3/config/locale/uclibc/c_locale.h - create mode 100644 libstdc++-v3/config/locale/uclibc/codecvt_members.cc - create mode 100644 libstdc++-v3/config/locale/uclibc/collate_members.cc - create mode 100644 libstdc++-v3/config/locale/uclibc/ctype_members.cc - create mode 100644 libstdc++-v3/config/locale/uclibc/messages_members.cc - create mode 100644 libstdc++-v3/config/locale/uclibc/messages_members.h - create mode 100644 libstdc++-v3/config/locale/uclibc/monetary_members.cc - create mode 100644 libstdc++-v3/config/locale/uclibc/numeric_members.cc - create mode 100644 libstdc++-v3/config/locale/uclibc/time_members.cc - create mode 100644 libstdc++-v3/config/locale/uclibc/time_members.h - -diff --git a/libstdc++-v3/acinclude.m4 b/libstdc++-v3/acinclude.m4 -index b0f88cb..a0ee36b 100644 ---- a/libstdc++-v3/acinclude.m4 -+++ b/libstdc++-v3/acinclude.m4 -@@ -2358,6 +2358,9 @@ AC_DEFUN([GLIBCXX_ENABLE_CLOCALE], [ - # Default to "generic". - if test $enable_clocale_flag = auto; then - case ${target_os} in -+ *-uclibc*) -+ enable_clocale_flag=uclibc -+ ;; - linux* | gnu* | kfreebsd*-gnu | knetbsd*-gnu) - enable_clocale_flag=gnu - ;; -@@ -2542,6 +2545,40 @@ AC_DEFUN([GLIBCXX_ENABLE_CLOCALE], [ - CTIME_CC=config/locale/generic/time_members.cc - CLOCALE_INTERNAL_H=config/locale/generic/c++locale_internal.h - ;; -+ uclibc) -+ AC_MSG_RESULT(uclibc) -+ -+ # Declare intention to use gettext, and add support for specific -+ # languages. -+ # For some reason, ALL_LINGUAS has to be before AM-GNU-GETTEXT -+ ALL_LINGUAS="de fr" -+ -+ # Don't call AM-GNU-GETTEXT here. Instead, assume glibc. -+ AC_CHECK_PROG(check_msgfmt, msgfmt, yes, no) -+ if test x"$check_msgfmt" = x"yes" && test x"$enable_nls" = x"yes"; then -+ USE_NLS=yes -+ fi -+ # Export the build objects. -+ for ling in $ALL_LINGUAS; do \ -+ glibcxx_MOFILES="$glibcxx_MOFILES $ling.mo"; \ -+ glibcxx_POFILES="$glibcxx_POFILES $ling.po"; \ -+ done -+ AC_SUBST(glibcxx_MOFILES) -+ AC_SUBST(glibcxx_POFILES) -+ -+ CLOCALE_H=config/locale/uclibc/c_locale.h -+ CLOCALE_CC=config/locale/uclibc/c_locale.cc -+ CCODECVT_CC=config/locale/uclibc/codecvt_members.cc -+ CCOLLATE_CC=config/locale/uclibc/collate_members.cc -+ CCTYPE_CC=config/locale/uclibc/ctype_members.cc -+ CMESSAGES_H=config/locale/uclibc/messages_members.h -+ CMESSAGES_CC=config/locale/uclibc/messages_members.cc -+ CMONEY_CC=config/locale/uclibc/monetary_members.cc -+ CNUMERIC_CC=config/locale/uclibc/numeric_members.cc -+ CTIME_H=config/locale/uclibc/time_members.h -+ CTIME_CC=config/locale/uclibc/time_members.cc -+ CLOCALE_INTERNAL_H=config/locale/uclibc/c++locale_internal.h -+ ;; - esac - - # This is where the testsuite looks for locale catalogs, using the -diff --git a/libstdc++-v3/config/locale/uclibc/c++locale_internal.h b/libstdc++-v3/config/locale/uclibc/c++locale_internal.h -new file mode 100644 -index 0000000..2ae3e4a ---- /dev/null -+++ b/libstdc++-v3/config/locale/uclibc/c++locale_internal.h -@@ -0,0 +1,63 @@ -+// Prototypes for GLIBC thread locale __-prefixed functions -*- C++ -*- -+ -+// Copyright (C) 2002, 2004, 2005 Free Software Foundation, Inc. -+// -+// This file is part of the GNU ISO C++ Library. This library is free -+// software; you can redistribute it and/or modify it under the -+// terms of the GNU General Public License as published by the -+// Free Software Foundation; either version 2, or (at your option) -+// any later version. -+ -+// This library is distributed in the hope that it will be useful, -+// but WITHOUT ANY WARRANTY; without even the implied warranty of -+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+// GNU General Public License for more details. -+ -+// You should have received a copy of the GNU General Public License along -+// with this library; see the file COPYING. If not, write to the Free -+// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, -+// USA. -+ -+// As a special exception, you may use this file as part of a free software -+// library without restriction. Specifically, if other files instantiate -+// templates or use macros or inline functions from this file, or you compile -+// this file and link it with other files to produce an executable, this -+// file does not by itself cause the resulting executable to be covered by -+// the GNU General Public License. This exception does not however -+// invalidate any other reasons why the executable file might be covered by -+// the GNU General Public License. -+ -+// Written by Jakub Jelinek -+ -+#include -+#include -+ -+#ifdef __UCLIBC_MJN3_ONLY__ -+#warning clean this up -+#endif -+ -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ -+extern "C" __typeof(nl_langinfo_l) __nl_langinfo_l; -+extern "C" __typeof(strcoll_l) __strcoll_l; -+extern "C" __typeof(strftime_l) __strftime_l; -+extern "C" __typeof(strtod_l) __strtod_l; -+extern "C" __typeof(strtof_l) __strtof_l; -+extern "C" __typeof(strtold_l) __strtold_l; -+extern "C" __typeof(strxfrm_l) __strxfrm_l; -+extern "C" __typeof(newlocale) __newlocale; -+extern "C" __typeof(freelocale) __freelocale; -+extern "C" __typeof(duplocale) __duplocale; -+extern "C" __typeof(uselocale) __uselocale; -+ -+#ifdef _GLIBCXX_USE_WCHAR_T -+extern "C" __typeof(iswctype_l) __iswctype_l; -+extern "C" __typeof(towlower_l) __towlower_l; -+extern "C" __typeof(towupper_l) __towupper_l; -+extern "C" __typeof(wcscoll_l) __wcscoll_l; -+extern "C" __typeof(wcsftime_l) __wcsftime_l; -+extern "C" __typeof(wcsxfrm_l) __wcsxfrm_l; -+extern "C" __typeof(wctype_l) __wctype_l; -+#endif -+ -+#endif // GLIBC 2.3 and later -diff --git a/libstdc++-v3/config/locale/uclibc/c_locale.cc b/libstdc++-v3/config/locale/uclibc/c_locale.cc -new file mode 100644 -index 0000000..5081dc1 ---- /dev/null -+++ b/libstdc++-v3/config/locale/uclibc/c_locale.cc -@@ -0,0 +1,160 @@ -+// Wrapper for underlying C-language localization -*- C++ -*- -+ -+// Copyright (C) 2001, 2002, 2003 Free Software Foundation, Inc. -+// -+// This file is part of the GNU ISO C++ Library. This library is free -+// software; you can redistribute it and/or modify it under the -+// terms of the GNU General Public License as published by the -+// Free Software Foundation; either version 2, or (at your option) -+// any later version. -+ -+// This library is distributed in the hope that it will be useful, -+// but WITHOUT ANY WARRANTY; without even the implied warranty of -+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+// GNU General Public License for more details. -+ -+// You should have received a copy of the GNU General Public License along -+// with this library; see the file COPYING. If not, write to the Free -+// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, -+// USA. -+ -+// As a special exception, you may use this file as part of a free software -+// library without restriction. Specifically, if other files instantiate -+// templates or use macros or inline functions from this file, or you compile -+// this file and link it with other files to produce an executable, this -+// file does not by itself cause the resulting executable to be covered by -+// the GNU General Public License. This exception does not however -+// invalidate any other reasons why the executable file might be covered by -+// the GNU General Public License. -+ -+// -+// ISO C++ 14882: 22.8 Standard locale categories. -+// -+ -+// Written by Benjamin Kosnik -+ -+#include // For errno -+#include -+#include -+#include -+#include -+ -+#ifndef __UCLIBC_HAS_XLOCALE__ -+#define __strtol_l(S, E, B, L) strtol((S), (E), (B)) -+#define __strtoul_l(S, E, B, L) strtoul((S), (E), (B)) -+#define __strtoll_l(S, E, B, L) strtoll((S), (E), (B)) -+#define __strtoull_l(S, E, B, L) strtoull((S), (E), (B)) -+#define __strtof_l(S, E, L) strtof((S), (E)) -+#define __strtod_l(S, E, L) strtod((S), (E)) -+#define __strtold_l(S, E, L) strtold((S), (E)) -+#warning should dummy __newlocale check for C|POSIX ? -+#define __newlocale(a, b, c) NULL -+#define __freelocale(a) ((void)0) -+#define __duplocale(a) __c_locale() -+#endif -+ -+namespace std -+{ -+ template<> -+ void -+ __convert_to_v(const char* __s, float& __v, ios_base::iostate& __err, -+ const __c_locale& __cloc) -+ { -+ if (!(__err & ios_base::failbit)) -+ { -+ char* __sanity; -+ errno = 0; -+ float __f = __strtof_l(__s, &__sanity, __cloc); -+ if (__sanity != __s && errno != ERANGE) -+ __v = __f; -+ else -+ __err |= ios_base::failbit; -+ } -+ } -+ -+ template<> -+ void -+ __convert_to_v(const char* __s, double& __v, ios_base::iostate& __err, -+ const __c_locale& __cloc) -+ { -+ if (!(__err & ios_base::failbit)) -+ { -+ char* __sanity; -+ errno = 0; -+ double __d = __strtod_l(__s, &__sanity, __cloc); -+ if (__sanity != __s && errno != ERANGE) -+ __v = __d; -+ else -+ __err |= ios_base::failbit; -+ } -+ } -+ -+ template<> -+ void -+ __convert_to_v(const char* __s, long double& __v, ios_base::iostate& __err, -+ const __c_locale& __cloc) -+ { -+ if (!(__err & ios_base::failbit)) -+ { -+ char* __sanity; -+ errno = 0; -+ long double __ld = __strtold_l(__s, &__sanity, __cloc); -+ if (__sanity != __s && errno != ERANGE) -+ __v = __ld; -+ else -+ __err |= ios_base::failbit; -+ } -+ } -+ -+ void -+ locale::facet::_S_create_c_locale(__c_locale& __cloc, const char* __s, -+ __c_locale __old) -+ { -+ __cloc = __newlocale(1 << LC_ALL, __s, __old); -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ if (!__cloc) -+ { -+ // This named locale is not supported by the underlying OS. -+ __throw_runtime_error(__N("locale::facet::_S_create_c_locale " -+ "name not valid")); -+ } -+#endif -+ } -+ -+ void -+ locale::facet::_S_destroy_c_locale(__c_locale& __cloc) -+ { -+ if (_S_get_c_locale() != __cloc) -+ __freelocale(__cloc); -+ } -+ -+ __c_locale -+ locale::facet::_S_clone_c_locale(__c_locale& __cloc) -+ { return __duplocale(__cloc); } -+} // namespace std -+ -+namespace __gnu_cxx -+{ -+ const char* const category_names[6 + _GLIBCXX_NUM_CATEGORIES] = -+ { -+ "LC_CTYPE", -+ "LC_NUMERIC", -+ "LC_TIME", -+ "LC_COLLATE", -+ "LC_MONETARY", -+ "LC_MESSAGES", -+#if _GLIBCXX_NUM_CATEGORIES != 0 -+ "LC_PAPER", -+ "LC_NAME", -+ "LC_ADDRESS", -+ "LC_TELEPHONE", -+ "LC_MEASUREMENT", -+ "LC_IDENTIFICATION" -+#endif -+ }; -+} -+ -+namespace std -+{ -+ const char* const* const locale::_S_categories = __gnu_cxx::category_names; -+} // namespace std -diff --git a/libstdc++-v3/config/locale/uclibc/c_locale.h b/libstdc++-v3/config/locale/uclibc/c_locale.h -new file mode 100644 -index 0000000..da07c1f ---- /dev/null -+++ b/libstdc++-v3/config/locale/uclibc/c_locale.h -@@ -0,0 +1,117 @@ -+// Wrapper for underlying C-language localization -*- C++ -*- -+ -+// Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. -+// -+// This file is part of the GNU ISO C++ Library. This library is free -+// software; you can redistribute it and/or modify it under the -+// terms of the GNU General Public License as published by the -+// Free Software Foundation; either version 2, or (at your option) -+// any later version. -+ -+// This library is distributed in the hope that it will be useful, -+// but WITHOUT ANY WARRANTY; without even the implied warranty of -+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+// GNU General Public License for more details. -+ -+// You should have received a copy of the GNU General Public License along -+// with this library; see the file COPYING. If not, write to the Free -+// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, -+// USA. -+ -+// As a special exception, you may use this file as part of a free software -+// library without restriction. Specifically, if other files instantiate -+// templates or use macros or inline functions from this file, or you compile -+// this file and link it with other files to produce an executable, this -+// file does not by itself cause the resulting executable to be covered by -+// the GNU General Public License. This exception does not however -+// invalidate any other reasons why the executable file might be covered by -+// the GNU General Public License. -+ -+// -+// ISO C++ 14882: 22.8 Standard locale categories. -+// -+ -+// Written by Benjamin Kosnik -+ -+#ifndef _C_LOCALE_H -+#define _C_LOCALE_H 1 -+ -+#pragma GCC system_header -+ -+#include // get std::strlen -+#include // get std::snprintf or std::sprintf -+#include -+#include // For codecvt -+#ifdef __UCLIBC_MJN3_ONLY__ -+#warning fix this -+#endif -+#ifdef __UCLIBC_HAS_LOCALE__ -+#include // For codecvt using iconv, iconv_t -+#endif -+#ifdef __UCLIBC_HAS_GETTEXT_AWARENESS__ -+#include // For messages -+#endif -+ -+#ifdef __UCLIBC_MJN3_ONLY__ -+#warning what is _GLIBCXX_C_LOCALE_GNU for -+#endif -+#define _GLIBCXX_C_LOCALE_GNU 1 -+ -+#ifdef __UCLIBC_MJN3_ONLY__ -+#warning fix categories -+#endif -+// #define _GLIBCXX_NUM_CATEGORIES 6 -+#define _GLIBCXX_NUM_CATEGORIES 0 -+ -+#ifdef __UCLIBC_HAS_XLOCALE__ -+namespace __gnu_cxx -+{ -+ extern "C" __typeof(uselocale) __uselocale; -+} -+#endif -+ -+namespace std -+{ -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ typedef __locale_t __c_locale; -+#else -+ typedef int* __c_locale; -+#endif -+ -+ // Convert numeric value of type _Tv to string and return length of -+ // string. If snprintf is available use it, otherwise fall back to -+ // the unsafe sprintf which, in general, can be dangerous and should -+ // be avoided. -+ template -+ int -+ __convert_from_v(char* __out, -+ const int __size __attribute__ ((__unused__)), -+ const char* __fmt, -+#ifdef __UCLIBC_HAS_XCLOCALE__ -+ _Tv __v, const __c_locale& __cloc, int __prec) -+ { -+ __c_locale __old = __gnu_cxx::__uselocale(__cloc); -+#else -+ _Tv __v, const __c_locale&, int __prec) -+ { -+# ifdef __UCLIBC_HAS_LOCALE__ -+ char* __old = std::setlocale(LC_ALL, NULL); -+ char* __sav = new char[std::strlen(__old) + 1]; -+ std::strcpy(__sav, __old); -+ std::setlocale(LC_ALL, "C"); -+# endif -+#endif -+ -+ const int __ret = std::snprintf(__out, __size, __fmt, __prec, __v); -+ -+#ifdef __UCLIBC_HAS_XCLOCALE__ -+ __gnu_cxx::__uselocale(__old); -+#elif defined __UCLIBC_HAS_LOCALE__ -+ std::setlocale(LC_ALL, __sav); -+ delete [] __sav; -+#endif -+ return __ret; -+ } -+} -+ -+#endif -diff --git a/libstdc++-v3/config/locale/uclibc/codecvt_members.cc b/libstdc++-v3/config/locale/uclibc/codecvt_members.cc -new file mode 100644 -index 0000000..64aa962 ---- /dev/null -+++ b/libstdc++-v3/config/locale/uclibc/codecvt_members.cc -@@ -0,0 +1,308 @@ -+// std::codecvt implementation details, GNU version -*- C++ -*- -+ -+// Copyright (C) 2002, 2003 Free Software Foundation, Inc. -+// -+// This file is part of the GNU ISO C++ Library. This library is free -+// software; you can redistribute it and/or modify it under the -+// terms of the GNU General Public License as published by the -+// Free Software Foundation; either version 2, or (at your option) -+// any later version. -+ -+// This library is distributed in the hope that it will be useful, -+// but WITHOUT ANY WARRANTY; without even the implied warranty of -+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+// GNU General Public License for more details. -+ -+// You should have received a copy of the GNU General Public License along -+// with this library; see the file COPYING. If not, write to the Free -+// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, -+// USA. -+ -+// As a special exception, you may use this file as part of a free software -+// library without restriction. Specifically, if other files instantiate -+// templates or use macros or inline functions from this file, or you compile -+// this file and link it with other files to produce an executable, this -+// file does not by itself cause the resulting executable to be covered by -+// the GNU General Public License. This exception does not however -+// invalidate any other reasons why the executable file might be covered by -+// the GNU General Public License. -+ -+// -+// ISO C++ 14882: 22.2.1.5 - Template class codecvt -+// -+ -+// Written by Benjamin Kosnik -+ -+#include -+#include // For MB_CUR_MAX -+#include // For MB_LEN_MAX -+#include -+ -+namespace std -+{ -+ // Specializations. -+#ifdef _GLIBCXX_USE_WCHAR_T -+ codecvt_base::result -+ codecvt:: -+ do_out(state_type& __state, const intern_type* __from, -+ const intern_type* __from_end, const intern_type*& __from_next, -+ extern_type* __to, extern_type* __to_end, -+ extern_type*& __to_next) const -+ { -+ result __ret = ok; -+ state_type __tmp_state(__state); -+ -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __c_locale __old = __uselocale(_M_c_locale_codecvt); -+#endif -+ -+ // wcsnrtombs is *very* fast but stops if encounters NUL characters: -+ // in case we fall back to wcrtomb and then continue, in a loop. -+ // NB: wcsnrtombs is a GNU extension -+ for (__from_next = __from, __to_next = __to; -+ __from_next < __from_end && __to_next < __to_end -+ && __ret == ok;) -+ { -+ const intern_type* __from_chunk_end = wmemchr(__from_next, L'\0', -+ __from_end - __from_next); -+ if (!__from_chunk_end) -+ __from_chunk_end = __from_end; -+ -+ __from = __from_next; -+ const size_t __conv = wcsnrtombs(__to_next, &__from_next, -+ __from_chunk_end - __from_next, -+ __to_end - __to_next, &__state); -+ if (__conv == static_cast(-1)) -+ { -+ // In case of error, in order to stop at the exact place we -+ // have to start again from the beginning with a series of -+ // wcrtomb. -+ for (; __from < __from_next; ++__from) -+ __to_next += wcrtomb(__to_next, *__from, &__tmp_state); -+ __state = __tmp_state; -+ __ret = error; -+ } -+ else if (__from_next && __from_next < __from_chunk_end) -+ { -+ __to_next += __conv; -+ __ret = partial; -+ } -+ else -+ { -+ __from_next = __from_chunk_end; -+ __to_next += __conv; -+ } -+ -+ if (__from_next < __from_end && __ret == ok) -+ { -+ extern_type __buf[MB_LEN_MAX]; -+ __tmp_state = __state; -+ const size_t __conv = wcrtomb(__buf, *__from_next, &__tmp_state); -+ if (__conv > static_cast(__to_end - __to_next)) -+ __ret = partial; -+ else -+ { -+ memcpy(__to_next, __buf, __conv); -+ __state = __tmp_state; -+ __to_next += __conv; -+ ++__from_next; -+ } -+ } -+ } -+ -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __uselocale(__old); -+#endif -+ -+ return __ret; -+ } -+ -+ codecvt_base::result -+ codecvt:: -+ do_in(state_type& __state, const extern_type* __from, -+ const extern_type* __from_end, const extern_type*& __from_next, -+ intern_type* __to, intern_type* __to_end, -+ intern_type*& __to_next) const -+ { -+ result __ret = ok; -+ state_type __tmp_state(__state); -+ -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __c_locale __old = __uselocale(_M_c_locale_codecvt); -+#endif -+ -+ // mbsnrtowcs is *very* fast but stops if encounters NUL characters: -+ // in case we store a L'\0' and then continue, in a loop. -+ // NB: mbsnrtowcs is a GNU extension -+ for (__from_next = __from, __to_next = __to; -+ __from_next < __from_end && __to_next < __to_end -+ && __ret == ok;) -+ { -+ const extern_type* __from_chunk_end; -+ __from_chunk_end = static_cast(memchr(__from_next, '\0', -+ __from_end -+ - __from_next)); -+ if (!__from_chunk_end) -+ __from_chunk_end = __from_end; -+ -+ __from = __from_next; -+ size_t __conv = mbsnrtowcs(__to_next, &__from_next, -+ __from_chunk_end - __from_next, -+ __to_end - __to_next, &__state); -+ if (__conv == static_cast(-1)) -+ { -+ // In case of error, in order to stop at the exact place we -+ // have to start again from the beginning with a series of -+ // mbrtowc. -+ for (;; ++__to_next, __from += __conv) -+ { -+ __conv = mbrtowc(__to_next, __from, __from_end - __from, -+ &__tmp_state); -+ if (__conv == static_cast(-1) -+ || __conv == static_cast(-2)) -+ break; -+ } -+ __from_next = __from; -+ __state = __tmp_state; -+ __ret = error; -+ } -+ else if (__from_next && __from_next < __from_chunk_end) -+ { -+ // It is unclear what to return in this case (see DR 382). -+ __to_next += __conv; -+ __ret = partial; -+ } -+ else -+ { -+ __from_next = __from_chunk_end; -+ __to_next += __conv; -+ } -+ -+ if (__from_next < __from_end && __ret == ok) -+ { -+ if (__to_next < __to_end) -+ { -+ // XXX Probably wrong for stateful encodings -+ __tmp_state = __state; -+ ++__from_next; -+ *__to_next++ = L'\0'; -+ } -+ else -+ __ret = partial; -+ } -+ } -+ -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __uselocale(__old); -+#endif -+ -+ return __ret; -+ } -+ -+ int -+ codecvt:: -+ do_encoding() const throw() -+ { -+ // XXX This implementation assumes that the encoding is -+ // stateless and is either single-byte or variable-width. -+ int __ret = 0; -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __c_locale __old = __uselocale(_M_c_locale_codecvt); -+#endif -+ if (MB_CUR_MAX == 1) -+ __ret = 1; -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __uselocale(__old); -+#endif -+ return __ret; -+ } -+ -+ int -+ codecvt:: -+ do_max_length() const throw() -+ { -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __c_locale __old = __uselocale(_M_c_locale_codecvt); -+#endif -+ // XXX Probably wrong for stateful encodings. -+ int __ret = MB_CUR_MAX; -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __uselocale(__old); -+#endif -+ return __ret; -+ } -+ -+ int -+ codecvt:: -+ do_length(state_type& __state, const extern_type* __from, -+ const extern_type* __end, size_t __max) const -+ { -+ int __ret = 0; -+ state_type __tmp_state(__state); -+ -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __c_locale __old = __uselocale(_M_c_locale_codecvt); -+#endif -+ -+ // mbsnrtowcs is *very* fast but stops if encounters NUL characters: -+ // in case we advance past it and then continue, in a loop. -+ // NB: mbsnrtowcs is a GNU extension -+ -+ // A dummy internal buffer is needed in order for mbsnrtocws to consider -+ // its fourth parameter (it wouldn't with NULL as first parameter). -+ wchar_t* __to = static_cast(__builtin_alloca(sizeof(wchar_t) -+ * __max)); -+ while (__from < __end && __max) -+ { -+ const extern_type* __from_chunk_end; -+ __from_chunk_end = static_cast(memchr(__from, '\0', -+ __end -+ - __from)); -+ if (!__from_chunk_end) -+ __from_chunk_end = __end; -+ -+ const extern_type* __tmp_from = __from; -+ size_t __conv = mbsnrtowcs(__to, &__from, -+ __from_chunk_end - __from, -+ __max, &__state); -+ if (__conv == static_cast(-1)) -+ { -+ // In case of error, in order to stop at the exact place we -+ // have to start again from the beginning with a series of -+ // mbrtowc. -+ for (__from = __tmp_from;; __from += __conv) -+ { -+ __conv = mbrtowc(NULL, __from, __end - __from, -+ &__tmp_state); -+ if (__conv == static_cast(-1) -+ || __conv == static_cast(-2)) -+ break; -+ } -+ __state = __tmp_state; -+ __ret += __from - __tmp_from; -+ break; -+ } -+ if (!__from) -+ __from = __from_chunk_end; -+ -+ __ret += __from - __tmp_from; -+ __max -= __conv; -+ -+ if (__from < __end && __max) -+ { -+ // XXX Probably wrong for stateful encodings -+ __tmp_state = __state; -+ ++__from; -+ ++__ret; -+ --__max; -+ } -+ } -+ -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __uselocale(__old); -+#endif -+ -+ return __ret; -+ } -+#endif -+} -diff --git a/libstdc++-v3/config/locale/uclibc/collate_members.cc b/libstdc++-v3/config/locale/uclibc/collate_members.cc -new file mode 100644 -index 0000000..c2664a7 ---- /dev/null -+++ b/libstdc++-v3/config/locale/uclibc/collate_members.cc -@@ -0,0 +1,80 @@ -+// std::collate implementation details, GNU version -*- C++ -*- -+ -+// Copyright (C) 2001, 2002 Free Software Foundation, Inc. -+// -+// This file is part of the GNU ISO C++ Library. This library is free -+// software; you can redistribute it and/or modify it under the -+// terms of the GNU General Public License as published by the -+// Free Software Foundation; either version 2, or (at your option) -+// any later version. -+ -+// This library is distributed in the hope that it will be useful, -+// but WITHOUT ANY WARRANTY; without even the implied warranty of -+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+// GNU General Public License for more details. -+ -+// You should have received a copy of the GNU General Public License along -+// with this library; see the file COPYING. If not, write to the Free -+// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, -+// USA. -+ -+// As a special exception, you may use this file as part of a free software -+// library without restriction. Specifically, if other files instantiate -+// templates or use macros or inline functions from this file, or you compile -+// this file and link it with other files to produce an executable, this -+// file does not by itself cause the resulting executable to be covered by -+// the GNU General Public License. This exception does not however -+// invalidate any other reasons why the executable file might be covered by -+// the GNU General Public License. -+ -+// -+// ISO C++ 14882: 22.2.4.1.2 collate virtual functions -+// -+ -+// Written by Benjamin Kosnik -+ -+#include -+#include -+ -+#ifndef __UCLIBC_HAS_XLOCALE__ -+#define __strcoll_l(S1, S2, L) strcoll((S1), (S2)) -+#define __strxfrm_l(S1, S2, N, L) strxfrm((S1), (S2), (N)) -+#define __wcscoll_l(S1, S2, L) wcscoll((S1), (S2)) -+#define __wcsxfrm_l(S1, S2, N, L) wcsxfrm((S1), (S2), (N)) -+#endif -+ -+namespace std -+{ -+ // These are basically extensions to char_traits, and perhaps should -+ // be put there instead of here. -+ template<> -+ int -+ collate::_M_compare(const char* __one, const char* __two) const -+ { -+ int __cmp = __strcoll_l(__one, __two, _M_c_locale_collate); -+ return (__cmp >> (8 * sizeof (int) - 2)) | (__cmp != 0); -+ } -+ -+ template<> -+ size_t -+ collate::_M_transform(char* __to, const char* __from, -+ size_t __n) const -+ { return __strxfrm_l(__to, __from, __n, _M_c_locale_collate); } -+ -+#ifdef _GLIBCXX_USE_WCHAR_T -+ template<> -+ int -+ collate::_M_compare(const wchar_t* __one, -+ const wchar_t* __two) const -+ { -+ int __cmp = __wcscoll_l(__one, __two, _M_c_locale_collate); -+ return (__cmp >> (8 * sizeof (int) - 2)) | (__cmp != 0); -+ } -+ -+ template<> -+ size_t -+ collate::_M_transform(wchar_t* __to, const wchar_t* __from, -+ size_t __n) const -+ { return __wcsxfrm_l(__to, __from, __n, _M_c_locale_collate); } -+#endif -+} -diff --git a/libstdc++-v3/config/locale/uclibc/ctype_members.cc b/libstdc++-v3/config/locale/uclibc/ctype_members.cc -new file mode 100644 -index 0000000..7294e3a ---- /dev/null -+++ b/libstdc++-v3/config/locale/uclibc/ctype_members.cc -@@ -0,0 +1,300 @@ -+// std::ctype implementation details, GNU version -*- C++ -*- -+ -+// Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc. -+// -+// This file is part of the GNU ISO C++ Library. This library is free -+// software; you can redistribute it and/or modify it under the -+// terms of the GNU General Public License as published by the -+// Free Software Foundation; either version 2, or (at your option) -+// any later version. -+ -+// This library is distributed in the hope that it will be useful, -+// but WITHOUT ANY WARRANTY; without even the implied warranty of -+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+// GNU General Public License for more details. -+ -+// You should have received a copy of the GNU General Public License along -+// with this library; see the file COPYING. If not, write to the Free -+// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, -+// USA. -+ -+// As a special exception, you may use this file as part of a free software -+// library without restriction. Specifically, if other files instantiate -+// templates or use macros or inline functions from this file, or you compile -+// this file and link it with other files to produce an executable, this -+// file does not by itself cause the resulting executable to be covered by -+// the GNU General Public License. This exception does not however -+// invalidate any other reasons why the executable file might be covered by -+// the GNU General Public License. -+ -+// -+// ISO C++ 14882: 22.2.1.1.2 ctype virtual functions. -+// -+ -+// Written by Benjamin Kosnik -+ -+#define _LIBC -+#include -+#undef _LIBC -+#include -+ -+#ifndef __UCLIBC_HAS_XLOCALE__ -+#define __wctype_l(S, L) wctype((S)) -+#define __towupper_l(C, L) towupper((C)) -+#define __towlower_l(C, L) towlower((C)) -+#define __iswctype_l(C, M, L) iswctype((C), (M)) -+#endif -+ -+namespace std -+{ -+ // NB: The other ctype specializations are in src/locale.cc and -+ // various /config/os/* files. -+ template<> -+ ctype_byname::ctype_byname(const char* __s, size_t __refs) -+ : ctype(0, false, __refs) -+ { -+ if (std::strcmp(__s, "C") != 0 && std::strcmp(__s, "POSIX") != 0) -+ { -+ this->_S_destroy_c_locale(this->_M_c_locale_ctype); -+ this->_S_create_c_locale(this->_M_c_locale_ctype, __s); -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ this->_M_toupper = this->_M_c_locale_ctype->__ctype_toupper; -+ this->_M_tolower = this->_M_c_locale_ctype->__ctype_tolower; -+ this->_M_table = this->_M_c_locale_ctype->__ctype_b; -+#endif -+ } -+ } -+ -+#ifdef _GLIBCXX_USE_WCHAR_T -+ ctype::__wmask_type -+ ctype::_M_convert_to_wmask(const mask __m) const -+ { -+ __wmask_type __ret; -+ switch (__m) -+ { -+ case space: -+ __ret = __wctype_l("space", _M_c_locale_ctype); -+ break; -+ case print: -+ __ret = __wctype_l("print", _M_c_locale_ctype); -+ break; -+ case cntrl: -+ __ret = __wctype_l("cntrl", _M_c_locale_ctype); -+ break; -+ case upper: -+ __ret = __wctype_l("upper", _M_c_locale_ctype); -+ break; -+ case lower: -+ __ret = __wctype_l("lower", _M_c_locale_ctype); -+ break; -+ case alpha: -+ __ret = __wctype_l("alpha", _M_c_locale_ctype); -+ break; -+ case digit: -+ __ret = __wctype_l("digit", _M_c_locale_ctype); -+ break; -+ case punct: -+ __ret = __wctype_l("punct", _M_c_locale_ctype); -+ break; -+ case xdigit: -+ __ret = __wctype_l("xdigit", _M_c_locale_ctype); -+ break; -+ case alnum: -+ __ret = __wctype_l("alnum", _M_c_locale_ctype); -+ break; -+ case graph: -+ __ret = __wctype_l("graph", _M_c_locale_ctype); -+ break; -+ default: -+ __ret = __wmask_type(); -+ } -+ return __ret; -+ } -+ -+ wchar_t -+ ctype::do_toupper(wchar_t __c) const -+ { return __towupper_l(__c, _M_c_locale_ctype); } -+ -+ const wchar_t* -+ ctype::do_toupper(wchar_t* __lo, const wchar_t* __hi) const -+ { -+ while (__lo < __hi) -+ { -+ *__lo = __towupper_l(*__lo, _M_c_locale_ctype); -+ ++__lo; -+ } -+ return __hi; -+ } -+ -+ wchar_t -+ ctype::do_tolower(wchar_t __c) const -+ { return __towlower_l(__c, _M_c_locale_ctype); } -+ -+ const wchar_t* -+ ctype::do_tolower(wchar_t* __lo, const wchar_t* __hi) const -+ { -+ while (__lo < __hi) -+ { -+ *__lo = __towlower_l(*__lo, _M_c_locale_ctype); -+ ++__lo; -+ } -+ return __hi; -+ } -+ -+ bool -+ ctype:: -+ do_is(mask __m, wchar_t __c) const -+ { -+ // Highest bitmask in ctype_base == 10, but extra in "C" -+ // library for blank. -+ bool __ret = false; -+ const size_t __bitmasksize = 11; -+ for (size_t __bitcur = 0; __bitcur <= __bitmasksize; ++__bitcur) -+ if (__m & _M_bit[__bitcur] -+ && __iswctype_l(__c, _M_wmask[__bitcur], _M_c_locale_ctype)) -+ { -+ __ret = true; -+ break; -+ } -+ return __ret; -+ } -+ -+ const wchar_t* -+ ctype:: -+ do_is(const wchar_t* __lo, const wchar_t* __hi, mask* __vec) const -+ { -+ for (; __lo < __hi; ++__vec, ++__lo) -+ { -+ // Highest bitmask in ctype_base == 10, but extra in "C" -+ // library for blank. -+ const size_t __bitmasksize = 11; -+ mask __m = 0; -+ for (size_t __bitcur = 0; __bitcur <= __bitmasksize; ++__bitcur) -+ if (__iswctype_l(*__lo, _M_wmask[__bitcur], _M_c_locale_ctype)) -+ __m |= _M_bit[__bitcur]; -+ *__vec = __m; -+ } -+ return __hi; -+ } -+ -+ const wchar_t* -+ ctype:: -+ do_scan_is(mask __m, const wchar_t* __lo, const wchar_t* __hi) const -+ { -+ while (__lo < __hi && !this->do_is(__m, *__lo)) -+ ++__lo; -+ return __lo; -+ } -+ -+ const wchar_t* -+ ctype:: -+ do_scan_not(mask __m, const char_type* __lo, const char_type* __hi) const -+ { -+ while (__lo < __hi && this->do_is(__m, *__lo) != 0) -+ ++__lo; -+ return __lo; -+ } -+ -+ wchar_t -+ ctype:: -+ do_widen(char __c) const -+ { return _M_widen[static_cast(__c)]; } -+ -+ const char* -+ ctype:: -+ do_widen(const char* __lo, const char* __hi, wchar_t* __dest) const -+ { -+ while (__lo < __hi) -+ { -+ *__dest = _M_widen[static_cast(*__lo)]; -+ ++__lo; -+ ++__dest; -+ } -+ return __hi; -+ } -+ -+ char -+ ctype:: -+ do_narrow(wchar_t __wc, char __dfault) const -+ { -+ if (__wc >= 0 && __wc < 128 && _M_narrow_ok) -+ return _M_narrow[__wc]; -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __c_locale __old = __uselocale(_M_c_locale_ctype); -+#endif -+ const int __c = wctob(__wc); -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __uselocale(__old); -+#endif -+ return (__c == EOF ? __dfault : static_cast(__c)); -+ } -+ -+ const wchar_t* -+ ctype:: -+ do_narrow(const wchar_t* __lo, const wchar_t* __hi, char __dfault, -+ char* __dest) const -+ { -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __c_locale __old = __uselocale(_M_c_locale_ctype); -+#endif -+ if (_M_narrow_ok) -+ while (__lo < __hi) -+ { -+ if (*__lo >= 0 && *__lo < 128) -+ *__dest = _M_narrow[*__lo]; -+ else -+ { -+ const int __c = wctob(*__lo); -+ *__dest = (__c == EOF ? __dfault : static_cast(__c)); -+ } -+ ++__lo; -+ ++__dest; -+ } -+ else -+ while (__lo < __hi) -+ { -+ const int __c = wctob(*__lo); -+ *__dest = (__c == EOF ? __dfault : static_cast(__c)); -+ ++__lo; -+ ++__dest; -+ } -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __uselocale(__old); -+#endif -+ return __hi; -+ } -+ -+ void -+ ctype::_M_initialize_ctype() -+ { -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __c_locale __old = __uselocale(_M_c_locale_ctype); -+#endif -+ wint_t __i; -+ for (__i = 0; __i < 128; ++__i) -+ { -+ const int __c = wctob(__i); -+ if (__c == EOF) -+ break; -+ else -+ _M_narrow[__i] = static_cast(__c); -+ } -+ if (__i == 128) -+ _M_narrow_ok = true; -+ else -+ _M_narrow_ok = false; -+ for (size_t __j = 0; -+ __j < sizeof(_M_widen) / sizeof(wint_t); ++__j) -+ _M_widen[__j] = btowc(__j); -+ -+ for (size_t __k = 0; __k <= 11; ++__k) -+ { -+ _M_bit[__k] = static_cast(_ISbit(__k)); -+ _M_wmask[__k] = _M_convert_to_wmask(_M_bit[__k]); -+ } -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __uselocale(__old); -+#endif -+ } -+#endif // _GLIBCXX_USE_WCHAR_T -+} -diff --git a/libstdc++-v3/config/locale/uclibc/messages_members.cc b/libstdc++-v3/config/locale/uclibc/messages_members.cc -new file mode 100644 -index 0000000..13594d9 ---- /dev/null -+++ b/libstdc++-v3/config/locale/uclibc/messages_members.cc -@@ -0,0 +1,100 @@ -+// std::messages implementation details, GNU version -*- C++ -*- -+ -+// Copyright (C) 2001, 2002 Free Software Foundation, Inc. -+// -+// This file is part of the GNU ISO C++ Library. This library is free -+// software; you can redistribute it and/or modify it under the -+// terms of the GNU General Public License as published by the -+// Free Software Foundation; either version 2, or (at your option) -+// any later version. -+ -+// This library is distributed in the hope that it will be useful, -+// but WITHOUT ANY WARRANTY; without even the implied warranty of -+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+// GNU General Public License for more details. -+ -+// You should have received a copy of the GNU General Public License along -+// with this library; see the file COPYING. If not, write to the Free -+// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, -+// USA. -+ -+// As a special exception, you may use this file as part of a free software -+// library without restriction. Specifically, if other files instantiate -+// templates or use macros or inline functions from this file, or you compile -+// this file and link it with other files to produce an executable, this -+// file does not by itself cause the resulting executable to be covered by -+// the GNU General Public License. This exception does not however -+// invalidate any other reasons why the executable file might be covered by -+// the GNU General Public License. -+ -+// -+// ISO C++ 14882: 22.2.7.1.2 messages virtual functions -+// -+ -+// Written by Benjamin Kosnik -+ -+#include -+#include -+ -+#ifdef __UCLIBC_MJN3_ONLY__ -+#warning fix gettext stuff -+#endif -+#ifdef __UCLIBC_HAS_GETTEXT_AWARENESS__ -+extern "C" char *__dcgettext(const char *domainname, -+ const char *msgid, int category); -+#undef gettext -+#define gettext(msgid) __dcgettext(NULL, msgid, LC_MESSAGES) -+#else -+#undef gettext -+#define gettext(msgid) (msgid) -+#endif -+ -+namespace std -+{ -+ // Specializations. -+ template<> -+ string -+ messages::do_get(catalog, int, int, const string& __dfault) const -+ { -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __c_locale __old = __uselocale(_M_c_locale_messages); -+ const char* __msg = const_cast(gettext(__dfault.c_str())); -+ __uselocale(__old); -+ return string(__msg); -+#elif defined __UCLIBC_HAS_LOCALE__ -+ char* __old = strdup(setlocale(LC_ALL, NULL)); -+ setlocale(LC_ALL, _M_name_messages); -+ const char* __msg = gettext(__dfault.c_str()); -+ setlocale(LC_ALL, __old); -+ free(__old); -+ return string(__msg); -+#else -+ const char* __msg = gettext(__dfault.c_str()); -+ return string(__msg); -+#endif -+ } -+ -+#ifdef _GLIBCXX_USE_WCHAR_T -+ template<> -+ wstring -+ messages::do_get(catalog, int, int, const wstring& __dfault) const -+ { -+# ifdef __UCLIBC_HAS_XLOCALE__ -+ __c_locale __old = __uselocale(_M_c_locale_messages); -+ char* __msg = gettext(_M_convert_to_char(__dfault)); -+ __uselocale(__old); -+ return _M_convert_from_char(__msg); -+# elif defined __UCLIBC_HAS_LOCALE__ -+ char* __old = strdup(setlocale(LC_ALL, NULL)); -+ setlocale(LC_ALL, _M_name_messages); -+ char* __msg = gettext(_M_convert_to_char(__dfault)); -+ setlocale(LC_ALL, __old); -+ free(__old); -+ return _M_convert_from_char(__msg); -+# else -+ char* __msg = gettext(_M_convert_to_char(__dfault)); -+ return _M_convert_from_char(__msg); -+# endif -+ } -+#endif -+} -diff --git a/libstdc++-v3/config/locale/uclibc/messages_members.h b/libstdc++-v3/config/locale/uclibc/messages_members.h -new file mode 100644 -index 0000000..1424078 ---- /dev/null -+++ b/libstdc++-v3/config/locale/uclibc/messages_members.h -@@ -0,0 +1,118 @@ -+// std::messages implementation details, GNU version -*- C++ -*- -+ -+// Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc. -+// -+// This file is part of the GNU ISO C++ Library. This library is free -+// software; you can redistribute it and/or modify it under the -+// terms of the GNU General Public License as published by the -+// Free Software Foundation; either version 2, or (at your option) -+// any later version. -+ -+// This library is distributed in the hope that it will be useful, -+// but WITHOUT ANY WARRANTY; without even the implied warranty of -+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+// GNU General Public License for more details. -+ -+// You should have received a copy of the GNU General Public License along -+// with this library; see the file COPYING. If not, write to the Free -+// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, -+// USA. -+ -+// As a special exception, you may use this file as part of a free software -+// library without restriction. Specifically, if other files instantiate -+// templates or use macros or inline functions from this file, or you compile -+// this file and link it with other files to produce an executable, this -+// file does not by itself cause the resulting executable to be covered by -+// the GNU General Public License. This exception does not however -+// invalidate any other reasons why the executable file might be covered by -+// the GNU General Public License. -+ -+// -+// ISO C++ 14882: 22.2.7.1.2 messages functions -+// -+ -+// Written by Benjamin Kosnik -+ -+#ifdef __UCLIBC_MJN3_ONLY__ -+#warning fix prototypes for *textdomain funcs -+#endif -+#ifdef __UCLIBC_HAS_GETTEXT_AWARENESS__ -+extern "C" char *__textdomain(const char *domainname); -+extern "C" char *__bindtextdomain(const char *domainname, -+ const char *dirname); -+#else -+#undef __textdomain -+#undef __bindtextdomain -+#define __textdomain(D) ((void)0) -+#define __bindtextdomain(D,P) ((void)0) -+#endif -+ -+ // Non-virtual member functions. -+ template -+ messages<_CharT>::messages(size_t __refs) -+ : facet(__refs), _M_c_locale_messages(_S_get_c_locale()), -+ _M_name_messages(_S_get_c_name()) -+ { } -+ -+ template -+ messages<_CharT>::messages(__c_locale __cloc, const char* __s, -+ size_t __refs) -+ : facet(__refs), _M_c_locale_messages(_S_clone_c_locale(__cloc)), -+ _M_name_messages(__s) -+ { -+ char* __tmp = new char[std::strlen(__s) + 1]; -+ std::strcpy(__tmp, __s); -+ _M_name_messages = __tmp; -+ } -+ -+ template -+ typename messages<_CharT>::catalog -+ messages<_CharT>::open(const basic_string& __s, const locale& __loc, -+ const char* __dir) const -+ { -+ __bindtextdomain(__s.c_str(), __dir); -+ return this->do_open(__s, __loc); -+ } -+ -+ // Virtual member functions. -+ template -+ messages<_CharT>::~messages() -+ { -+ if (_M_name_messages != _S_get_c_name()) -+ delete [] _M_name_messages; -+ _S_destroy_c_locale(_M_c_locale_messages); -+ } -+ -+ template -+ typename messages<_CharT>::catalog -+ messages<_CharT>::do_open(const basic_string& __s, -+ const locale&) const -+ { -+ // No error checking is done, assume the catalog exists and can -+ // be used. -+ __textdomain(__s.c_str()); -+ return 0; -+ } -+ -+ template -+ void -+ messages<_CharT>::do_close(catalog) const -+ { } -+ -+ // messages_byname -+ template -+ messages_byname<_CharT>::messages_byname(const char* __s, size_t __refs) -+ : messages<_CharT>(__refs) -+ { -+ if (this->_M_name_messages != locale::facet::_S_get_c_name()) -+ delete [] this->_M_name_messages; -+ char* __tmp = new char[std::strlen(__s) + 1]; -+ std::strcpy(__tmp, __s); -+ this->_M_name_messages = __tmp; -+ -+ if (std::strcmp(__s, "C") != 0 && std::strcmp(__s, "POSIX") != 0) -+ { -+ this->_S_destroy_c_locale(this->_M_c_locale_messages); -+ this->_S_create_c_locale(this->_M_c_locale_messages, __s); -+ } -+ } -diff --git a/libstdc++-v3/config/locale/uclibc/monetary_members.cc b/libstdc++-v3/config/locale/uclibc/monetary_members.cc -new file mode 100644 -index 0000000..aa52731 ---- /dev/null -+++ b/libstdc++-v3/config/locale/uclibc/monetary_members.cc -@@ -0,0 +1,692 @@ -+// std::moneypunct implementation details, GNU version -*- C++ -*- -+ -+// Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc. -+// -+// This file is part of the GNU ISO C++ Library. This library is free -+// software; you can redistribute it and/or modify it under the -+// terms of the GNU General Public License as published by the -+// Free Software Foundation; either version 2, or (at your option) -+// any later version. -+ -+// This library is distributed in the hope that it will be useful, -+// but WITHOUT ANY WARRANTY; without even the implied warranty of -+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+// GNU General Public License for more details. -+ -+// You should have received a copy of the GNU General Public License along -+// with this library; see the file COPYING. If not, write to the Free -+// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, -+// USA. -+ -+// As a special exception, you may use this file as part of a free software -+// library without restriction. Specifically, if other files instantiate -+// templates or use macros or inline functions from this file, or you compile -+// this file and link it with other files to produce an executable, this -+// file does not by itself cause the resulting executable to be covered by -+// the GNU General Public License. This exception does not however -+// invalidate any other reasons why the executable file might be covered by -+// the GNU General Public License. -+ -+// -+// ISO C++ 14882: 22.2.6.3.2 moneypunct virtual functions -+// -+ -+// Written by Benjamin Kosnik -+ -+#define _LIBC -+#include -+#undef _LIBC -+#include -+ -+#ifdef __UCLIBC_MJN3_ONLY__ -+#warning optimize this for uclibc -+#warning tailor for stub locale support -+#endif -+ -+#ifndef __UCLIBC_HAS_XLOCALE__ -+#define __nl_langinfo_l(N, L) nl_langinfo((N)) -+#endif -+ -+namespace std -+{ -+ // Construct and return valid pattern consisting of some combination of: -+ // space none symbol sign value -+ money_base::pattern -+ money_base::_S_construct_pattern(char __precedes, char __space, char __posn) -+ { -+ pattern __ret; -+ -+ // This insanely complicated routine attempts to construct a valid -+ // pattern for use with monyepunct. A couple of invariants: -+ -+ // if (__precedes) symbol -> value -+ // else value -> symbol -+ -+ // if (__space) space -+ // else none -+ -+ // none == never first -+ // space never first or last -+ -+ // Any elegant implementations of this are welcome. -+ switch (__posn) -+ { -+ case 0: -+ case 1: -+ // 1 The sign precedes the value and symbol. -+ __ret.field[0] = sign; -+ if (__space) -+ { -+ // Pattern starts with sign. -+ if (__precedes) -+ { -+ __ret.field[1] = symbol; -+ __ret.field[3] = value; -+ } -+ else -+ { -+ __ret.field[1] = value; -+ __ret.field[3] = symbol; -+ } -+ __ret.field[2] = space; -+ } -+ else -+ { -+ // Pattern starts with sign and ends with none. -+ if (__precedes) -+ { -+ __ret.field[1] = symbol; -+ __ret.field[2] = value; -+ } -+ else -+ { -+ __ret.field[1] = value; -+ __ret.field[2] = symbol; -+ } -+ __ret.field[3] = none; -+ } -+ break; -+ case 2: -+ // 2 The sign follows the value and symbol. -+ if (__space) -+ { -+ // Pattern either ends with sign. -+ if (__precedes) -+ { -+ __ret.field[0] = symbol; -+ __ret.field[2] = value; -+ } -+ else -+ { -+ __ret.field[0] = value; -+ __ret.field[2] = symbol; -+ } -+ __ret.field[1] = space; -+ __ret.field[3] = sign; -+ } -+ else -+ { -+ // Pattern ends with sign then none. -+ if (__precedes) -+ { -+ __ret.field[0] = symbol; -+ __ret.field[1] = value; -+ } -+ else -+ { -+ __ret.field[0] = value; -+ __ret.field[1] = symbol; -+ } -+ __ret.field[2] = sign; -+ __ret.field[3] = none; -+ } -+ break; -+ case 3: -+ // 3 The sign immediately precedes the symbol. -+ if (__precedes) -+ { -+ __ret.field[0] = sign; -+ __ret.field[1] = symbol; -+ if (__space) -+ { -+ __ret.field[2] = space; -+ __ret.field[3] = value; -+ } -+ else -+ { -+ __ret.field[2] = value; -+ __ret.field[3] = none; -+ } -+ } -+ else -+ { -+ __ret.field[0] = value; -+ if (__space) -+ { -+ __ret.field[1] = space; -+ __ret.field[2] = sign; -+ __ret.field[3] = symbol; -+ } -+ else -+ { -+ __ret.field[1] = sign; -+ __ret.field[2] = symbol; -+ __ret.field[3] = none; -+ } -+ } -+ break; -+ case 4: -+ // 4 The sign immediately follows the symbol. -+ if (__precedes) -+ { -+ __ret.field[0] = symbol; -+ __ret.field[1] = sign; -+ if (__space) -+ { -+ __ret.field[2] = space; -+ __ret.field[3] = value; -+ } -+ else -+ { -+ __ret.field[2] = value; -+ __ret.field[3] = none; -+ } -+ } -+ else -+ { -+ __ret.field[0] = value; -+ if (__space) -+ { -+ __ret.field[1] = space; -+ __ret.field[2] = symbol; -+ __ret.field[3] = sign; -+ } -+ else -+ { -+ __ret.field[1] = symbol; -+ __ret.field[2] = sign; -+ __ret.field[3] = none; -+ } -+ } -+ break; -+ default: -+ ; -+ } -+ return __ret; -+ } -+ -+ template<> -+ void -+ moneypunct::_M_initialize_moneypunct(__c_locale __cloc, -+ const char*) -+ { -+ if (!_M_data) -+ _M_data = new __moneypunct_cache; -+ -+ if (!__cloc) -+ { -+ // "C" locale -+ _M_data->_M_decimal_point = '.'; -+ _M_data->_M_thousands_sep = ','; -+ _M_data->_M_grouping = ""; -+ _M_data->_M_grouping_size = 0; -+ _M_data->_M_curr_symbol = ""; -+ _M_data->_M_curr_symbol_size = 0; -+ _M_data->_M_positive_sign = ""; -+ _M_data->_M_positive_sign_size = 0; -+ _M_data->_M_negative_sign = ""; -+ _M_data->_M_negative_sign_size = 0; -+ _M_data->_M_frac_digits = 0; -+ _M_data->_M_pos_format = money_base::_S_default_pattern; -+ _M_data->_M_neg_format = money_base::_S_default_pattern; -+ -+ for (size_t __i = 0; __i < money_base::_S_end; ++__i) -+ _M_data->_M_atoms[__i] = money_base::_S_atoms[__i]; -+ } -+ else -+ { -+ // Named locale. -+ _M_data->_M_decimal_point = *(__nl_langinfo_l(__MON_DECIMAL_POINT, -+ __cloc)); -+ _M_data->_M_thousands_sep = *(__nl_langinfo_l(__MON_THOUSANDS_SEP, -+ __cloc)); -+ _M_data->_M_grouping = __nl_langinfo_l(__MON_GROUPING, __cloc); -+ _M_data->_M_grouping_size = strlen(_M_data->_M_grouping); -+ _M_data->_M_positive_sign = __nl_langinfo_l(__POSITIVE_SIGN, __cloc); -+ _M_data->_M_positive_sign_size = strlen(_M_data->_M_positive_sign); -+ -+ char __nposn = *(__nl_langinfo_l(__INT_N_SIGN_POSN, __cloc)); -+ if (!__nposn) -+ _M_data->_M_negative_sign = "()"; -+ else -+ _M_data->_M_negative_sign = __nl_langinfo_l(__NEGATIVE_SIGN, -+ __cloc); -+ _M_data->_M_negative_sign_size = strlen(_M_data->_M_negative_sign); -+ -+ // _Intl == true -+ _M_data->_M_curr_symbol = __nl_langinfo_l(__INT_CURR_SYMBOL, __cloc); -+ _M_data->_M_curr_symbol_size = strlen(_M_data->_M_curr_symbol); -+ _M_data->_M_frac_digits = *(__nl_langinfo_l(__INT_FRAC_DIGITS, -+ __cloc)); -+ char __pprecedes = *(__nl_langinfo_l(__INT_P_CS_PRECEDES, __cloc)); -+ char __pspace = *(__nl_langinfo_l(__INT_P_SEP_BY_SPACE, __cloc)); -+ char __pposn = *(__nl_langinfo_l(__INT_P_SIGN_POSN, __cloc)); -+ _M_data->_M_pos_format = _S_construct_pattern(__pprecedes, __pspace, -+ __pposn); -+ char __nprecedes = *(__nl_langinfo_l(__INT_N_CS_PRECEDES, __cloc)); -+ char __nspace = *(__nl_langinfo_l(__INT_N_SEP_BY_SPACE, __cloc)); -+ _M_data->_M_neg_format = _S_construct_pattern(__nprecedes, __nspace, -+ __nposn); -+ } -+ } -+ -+ template<> -+ void -+ moneypunct::_M_initialize_moneypunct(__c_locale __cloc, -+ const char*) -+ { -+ if (!_M_data) -+ _M_data = new __moneypunct_cache; -+ -+ if (!__cloc) -+ { -+ // "C" locale -+ _M_data->_M_decimal_point = '.'; -+ _M_data->_M_thousands_sep = ','; -+ _M_data->_M_grouping = ""; -+ _M_data->_M_grouping_size = 0; -+ _M_data->_M_curr_symbol = ""; -+ _M_data->_M_curr_symbol_size = 0; -+ _M_data->_M_positive_sign = ""; -+ _M_data->_M_positive_sign_size = 0; -+ _M_data->_M_negative_sign = ""; -+ _M_data->_M_negative_sign_size = 0; -+ _M_data->_M_frac_digits = 0; -+ _M_data->_M_pos_format = money_base::_S_default_pattern; -+ _M_data->_M_neg_format = money_base::_S_default_pattern; -+ -+ for (size_t __i = 0; __i < money_base::_S_end; ++__i) -+ _M_data->_M_atoms[__i] = money_base::_S_atoms[__i]; -+ } -+ else -+ { -+ // Named locale. -+ _M_data->_M_decimal_point = *(__nl_langinfo_l(__MON_DECIMAL_POINT, -+ __cloc)); -+ _M_data->_M_thousands_sep = *(__nl_langinfo_l(__MON_THOUSANDS_SEP, -+ __cloc)); -+ _M_data->_M_grouping = __nl_langinfo_l(__MON_GROUPING, __cloc); -+ _M_data->_M_grouping_size = strlen(_M_data->_M_grouping); -+ _M_data->_M_positive_sign = __nl_langinfo_l(__POSITIVE_SIGN, __cloc); -+ _M_data->_M_positive_sign_size = strlen(_M_data->_M_positive_sign); -+ -+ char __nposn = *(__nl_langinfo_l(__N_SIGN_POSN, __cloc)); -+ if (!__nposn) -+ _M_data->_M_negative_sign = "()"; -+ else -+ _M_data->_M_negative_sign = __nl_langinfo_l(__NEGATIVE_SIGN, -+ __cloc); -+ _M_data->_M_negative_sign_size = strlen(_M_data->_M_negative_sign); -+ -+ // _Intl == false -+ _M_data->_M_curr_symbol = __nl_langinfo_l(__CURRENCY_SYMBOL, __cloc); -+ _M_data->_M_curr_symbol_size = strlen(_M_data->_M_curr_symbol); -+ _M_data->_M_frac_digits = *(__nl_langinfo_l(__FRAC_DIGITS, __cloc)); -+ char __pprecedes = *(__nl_langinfo_l(__P_CS_PRECEDES, __cloc)); -+ char __pspace = *(__nl_langinfo_l(__P_SEP_BY_SPACE, __cloc)); -+ char __pposn = *(__nl_langinfo_l(__P_SIGN_POSN, __cloc)); -+ _M_data->_M_pos_format = _S_construct_pattern(__pprecedes, __pspace, -+ __pposn); -+ char __nprecedes = *(__nl_langinfo_l(__N_CS_PRECEDES, __cloc)); -+ char __nspace = *(__nl_langinfo_l(__N_SEP_BY_SPACE, __cloc)); -+ _M_data->_M_neg_format = _S_construct_pattern(__nprecedes, __nspace, -+ __nposn); -+ } -+ } -+ -+ template<> -+ moneypunct::~moneypunct() -+ { delete _M_data; } -+ -+ template<> -+ moneypunct::~moneypunct() -+ { delete _M_data; } -+ -+#ifdef _GLIBCXX_USE_WCHAR_T -+ template<> -+ void -+ moneypunct::_M_initialize_moneypunct(__c_locale __cloc, -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ const char*) -+#else -+ const char* __name) -+#endif -+ { -+ if (!_M_data) -+ _M_data = new __moneypunct_cache; -+ -+ if (!__cloc) -+ { -+ // "C" locale -+ _M_data->_M_decimal_point = L'.'; -+ _M_data->_M_thousands_sep = L','; -+ _M_data->_M_grouping = ""; -+ _M_data->_M_grouping_size = 0; -+ _M_data->_M_curr_symbol = L""; -+ _M_data->_M_curr_symbol_size = 0; -+ _M_data->_M_positive_sign = L""; -+ _M_data->_M_positive_sign_size = 0; -+ _M_data->_M_negative_sign = L""; -+ _M_data->_M_negative_sign_size = 0; -+ _M_data->_M_frac_digits = 0; -+ _M_data->_M_pos_format = money_base::_S_default_pattern; -+ _M_data->_M_neg_format = money_base::_S_default_pattern; -+ -+ // Use ctype::widen code without the facet... -+ for (size_t __i = 0; __i < money_base::_S_end; ++__i) -+ _M_data->_M_atoms[__i] = -+ static_cast(money_base::_S_atoms[__i]); -+ } -+ else -+ { -+ // Named locale. -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __c_locale __old = __uselocale(__cloc); -+#else -+ // Switch to named locale so that mbsrtowcs will work. -+ char* __old = strdup(setlocale(LC_ALL, NULL)); -+ setlocale(LC_ALL, __name); -+#endif -+ -+#ifdef __UCLIBC_MJN3_ONLY__ -+#warning fix this... should be monetary -+#endif -+#ifdef __UCLIBC__ -+# ifdef __UCLIBC_HAS_XLOCALE__ -+ _M_data->_M_decimal_point = __cloc->decimal_point_wc; -+ _M_data->_M_thousands_sep = __cloc->thousands_sep_wc; -+# else -+ _M_data->_M_decimal_point = __global_locale->decimal_point_wc; -+ _M_data->_M_thousands_sep = __global_locale->thousands_sep_wc; -+# endif -+#else -+ union { char *__s; wchar_t __w; } __u; -+ __u.__s = __nl_langinfo_l(_NL_MONETARY_DECIMAL_POINT_WC, __cloc); -+ _M_data->_M_decimal_point = __u.__w; -+ -+ __u.__s = __nl_langinfo_l(_NL_MONETARY_THOUSANDS_SEP_WC, __cloc); -+ _M_data->_M_thousands_sep = __u.__w; -+#endif -+ _M_data->_M_grouping = __nl_langinfo_l(__MON_GROUPING, __cloc); -+ _M_data->_M_grouping_size = strlen(_M_data->_M_grouping); -+ -+ const char* __cpossign = __nl_langinfo_l(__POSITIVE_SIGN, __cloc); -+ const char* __cnegsign = __nl_langinfo_l(__NEGATIVE_SIGN, __cloc); -+ const char* __ccurr = __nl_langinfo_l(__INT_CURR_SYMBOL, __cloc); -+ -+ wchar_t* __wcs_ps = 0; -+ wchar_t* __wcs_ns = 0; -+ const char __nposn = *(__nl_langinfo_l(__INT_N_SIGN_POSN, __cloc)); -+ try -+ { -+ mbstate_t __state; -+ size_t __len = strlen(__cpossign); -+ if (__len) -+ { -+ ++__len; -+ memset(&__state, 0, sizeof(mbstate_t)); -+ __wcs_ps = new wchar_t[__len]; -+ mbsrtowcs(__wcs_ps, &__cpossign, __len, &__state); -+ _M_data->_M_positive_sign = __wcs_ps; -+ } -+ else -+ _M_data->_M_positive_sign = L""; -+ _M_data->_M_positive_sign_size = wcslen(_M_data->_M_positive_sign); -+ -+ __len = strlen(__cnegsign); -+ if (!__nposn) -+ _M_data->_M_negative_sign = L"()"; -+ else if (__len) -+ { -+ ++__len; -+ memset(&__state, 0, sizeof(mbstate_t)); -+ __wcs_ns = new wchar_t[__len]; -+ mbsrtowcs(__wcs_ns, &__cnegsign, __len, &__state); -+ _M_data->_M_negative_sign = __wcs_ns; -+ } -+ else -+ _M_data->_M_negative_sign = L""; -+ _M_data->_M_negative_sign_size = wcslen(_M_data->_M_negative_sign); -+ -+ // _Intl == true. -+ __len = strlen(__ccurr); -+ if (__len) -+ { -+ ++__len; -+ memset(&__state, 0, sizeof(mbstate_t)); -+ wchar_t* __wcs = new wchar_t[__len]; -+ mbsrtowcs(__wcs, &__ccurr, __len, &__state); -+ _M_data->_M_curr_symbol = __wcs; -+ } -+ else -+ _M_data->_M_curr_symbol = L""; -+ _M_data->_M_curr_symbol_size = wcslen(_M_data->_M_curr_symbol); -+ } -+ catch (...) -+ { -+ delete _M_data; -+ _M_data = 0; -+ delete __wcs_ps; -+ delete __wcs_ns; -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __uselocale(__old); -+#else -+ setlocale(LC_ALL, __old); -+ free(__old); -+#endif -+ __throw_exception_again; -+ } -+ -+ _M_data->_M_frac_digits = *(__nl_langinfo_l(__INT_FRAC_DIGITS, -+ __cloc)); -+ char __pprecedes = *(__nl_langinfo_l(__INT_P_CS_PRECEDES, __cloc)); -+ char __pspace = *(__nl_langinfo_l(__INT_P_SEP_BY_SPACE, __cloc)); -+ char __pposn = *(__nl_langinfo_l(__INT_P_SIGN_POSN, __cloc)); -+ _M_data->_M_pos_format = _S_construct_pattern(__pprecedes, __pspace, -+ __pposn); -+ char __nprecedes = *(__nl_langinfo_l(__INT_N_CS_PRECEDES, __cloc)); -+ char __nspace = *(__nl_langinfo_l(__INT_N_SEP_BY_SPACE, __cloc)); -+ _M_data->_M_neg_format = _S_construct_pattern(__nprecedes, __nspace, -+ __nposn); -+ -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __uselocale(__old); -+#else -+ setlocale(LC_ALL, __old); -+ free(__old); -+#endif -+ } -+ } -+ -+ template<> -+ void -+ moneypunct::_M_initialize_moneypunct(__c_locale __cloc, -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ const char*) -+#else -+ const char* __name) -+#endif -+ { -+ if (!_M_data) -+ _M_data = new __moneypunct_cache; -+ -+ if (!__cloc) -+ { -+ // "C" locale -+ _M_data->_M_decimal_point = L'.'; -+ _M_data->_M_thousands_sep = L','; -+ _M_data->_M_grouping = ""; -+ _M_data->_M_grouping_size = 0; -+ _M_data->_M_curr_symbol = L""; -+ _M_data->_M_curr_symbol_size = 0; -+ _M_data->_M_positive_sign = L""; -+ _M_data->_M_positive_sign_size = 0; -+ _M_data->_M_negative_sign = L""; -+ _M_data->_M_negative_sign_size = 0; -+ _M_data->_M_frac_digits = 0; -+ _M_data->_M_pos_format = money_base::_S_default_pattern; -+ _M_data->_M_neg_format = money_base::_S_default_pattern; -+ -+ // Use ctype::widen code without the facet... -+ for (size_t __i = 0; __i < money_base::_S_end; ++__i) -+ _M_data->_M_atoms[__i] = -+ static_cast(money_base::_S_atoms[__i]); -+ } -+ else -+ { -+ // Named locale. -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __c_locale __old = __uselocale(__cloc); -+#else -+ // Switch to named locale so that mbsrtowcs will work. -+ char* __old = strdup(setlocale(LC_ALL, NULL)); -+ setlocale(LC_ALL, __name); -+#endif -+ -+#ifdef __UCLIBC_MJN3_ONLY__ -+#warning fix this... should be monetary -+#endif -+#ifdef __UCLIBC__ -+# ifdef __UCLIBC_HAS_XLOCALE__ -+ _M_data->_M_decimal_point = __cloc->decimal_point_wc; -+ _M_data->_M_thousands_sep = __cloc->thousands_sep_wc; -+# else -+ _M_data->_M_decimal_point = __global_locale->decimal_point_wc; -+ _M_data->_M_thousands_sep = __global_locale->thousands_sep_wc; -+# endif -+#else -+ union { char *__s; wchar_t __w; } __u; -+ __u.__s = __nl_langinfo_l(_NL_MONETARY_DECIMAL_POINT_WC, __cloc); -+ _M_data->_M_decimal_point = __u.__w; -+ -+ __u.__s = __nl_langinfo_l(_NL_MONETARY_THOUSANDS_SEP_WC, __cloc); -+ _M_data->_M_thousands_sep = __u.__w; -+#endif -+ _M_data->_M_grouping = __nl_langinfo_l(__MON_GROUPING, __cloc); -+ _M_data->_M_grouping_size = strlen(_M_data->_M_grouping); -+ -+ const char* __cpossign = __nl_langinfo_l(__POSITIVE_SIGN, __cloc); -+ const char* __cnegsign = __nl_langinfo_l(__NEGATIVE_SIGN, __cloc); -+ const char* __ccurr = __nl_langinfo_l(__CURRENCY_SYMBOL, __cloc); -+ -+ wchar_t* __wcs_ps = 0; -+ wchar_t* __wcs_ns = 0; -+ const char __nposn = *(__nl_langinfo_l(__N_SIGN_POSN, __cloc)); -+ try -+ { -+ mbstate_t __state; -+ size_t __len; -+ __len = strlen(__cpossign); -+ if (__len) -+ { -+ ++__len; -+ memset(&__state, 0, sizeof(mbstate_t)); -+ __wcs_ps = new wchar_t[__len]; -+ mbsrtowcs(__wcs_ps, &__cpossign, __len, &__state); -+ _M_data->_M_positive_sign = __wcs_ps; -+ } -+ else -+ _M_data->_M_positive_sign = L""; -+ _M_data->_M_positive_sign_size = wcslen(_M_data->_M_positive_sign); -+ -+ __len = strlen(__cnegsign); -+ if (!__nposn) -+ _M_data->_M_negative_sign = L"()"; -+ else if (__len) -+ { -+ ++__len; -+ memset(&__state, 0, sizeof(mbstate_t)); -+ __wcs_ns = new wchar_t[__len]; -+ mbsrtowcs(__wcs_ns, &__cnegsign, __len, &__state); -+ _M_data->_M_negative_sign = __wcs_ns; -+ } -+ else -+ _M_data->_M_negative_sign = L""; -+ _M_data->_M_negative_sign_size = wcslen(_M_data->_M_negative_sign); -+ -+ // _Intl == true. -+ __len = strlen(__ccurr); -+ if (__len) -+ { -+ ++__len; -+ memset(&__state, 0, sizeof(mbstate_t)); -+ wchar_t* __wcs = new wchar_t[__len]; -+ mbsrtowcs(__wcs, &__ccurr, __len, &__state); -+ _M_data->_M_curr_symbol = __wcs; -+ } -+ else -+ _M_data->_M_curr_symbol = L""; -+ _M_data->_M_curr_symbol_size = wcslen(_M_data->_M_curr_symbol); -+ } -+ catch (...) -+ { -+ delete _M_data; -+ _M_data = 0; -+ delete __wcs_ps; -+ delete __wcs_ns; -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __uselocale(__old); -+#else -+ setlocale(LC_ALL, __old); -+ free(__old); -+#endif -+ __throw_exception_again; -+ } -+ -+ _M_data->_M_frac_digits = *(__nl_langinfo_l(__FRAC_DIGITS, __cloc)); -+ char __pprecedes = *(__nl_langinfo_l(__P_CS_PRECEDES, __cloc)); -+ char __pspace = *(__nl_langinfo_l(__P_SEP_BY_SPACE, __cloc)); -+ char __pposn = *(__nl_langinfo_l(__P_SIGN_POSN, __cloc)); -+ _M_data->_M_pos_format = _S_construct_pattern(__pprecedes, __pspace, -+ __pposn); -+ char __nprecedes = *(__nl_langinfo_l(__N_CS_PRECEDES, __cloc)); -+ char __nspace = *(__nl_langinfo_l(__N_SEP_BY_SPACE, __cloc)); -+ _M_data->_M_neg_format = _S_construct_pattern(__nprecedes, __nspace, -+ __nposn); -+ -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __uselocale(__old); -+#else -+ setlocale(LC_ALL, __old); -+ free(__old); -+#endif -+ } -+ } -+ -+ template<> -+ moneypunct::~moneypunct() -+ { -+ if (_M_data->_M_positive_sign_size) -+ delete [] _M_data->_M_positive_sign; -+ if (_M_data->_M_negative_sign_size -+ && wcscmp(_M_data->_M_negative_sign, L"()") != 0) -+ delete [] _M_data->_M_negative_sign; -+ if (_M_data->_M_curr_symbol_size) -+ delete [] _M_data->_M_curr_symbol; -+ delete _M_data; -+ } -+ -+ template<> -+ moneypunct::~moneypunct() -+ { -+ if (_M_data->_M_positive_sign_size) -+ delete [] _M_data->_M_positive_sign; -+ if (_M_data->_M_negative_sign_size -+ && wcscmp(_M_data->_M_negative_sign, L"()") != 0) -+ delete [] _M_data->_M_negative_sign; -+ if (_M_data->_M_curr_symbol_size) -+ delete [] _M_data->_M_curr_symbol; -+ delete _M_data; -+ } -+#endif -+} -diff --git a/libstdc++-v3/config/locale/uclibc/numeric_members.cc b/libstdc++-v3/config/locale/uclibc/numeric_members.cc -new file mode 100644 -index 0000000..883ec1a ---- /dev/null -+++ b/libstdc++-v3/config/locale/uclibc/numeric_members.cc -@@ -0,0 +1,160 @@ -+// std::numpunct implementation details, GNU version -*- C++ -*- -+ -+// Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc. -+// -+// This file is part of the GNU ISO C++ Library. This library is free -+// software; you can redistribute it and/or modify it under the -+// terms of the GNU General Public License as published by the -+// Free Software Foundation; either version 2, or (at your option) -+// any later version. -+ -+// This library is distributed in the hope that it will be useful, -+// but WITHOUT ANY WARRANTY; without even the implied warranty of -+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+// GNU General Public License for more details. -+ -+// You should have received a copy of the GNU General Public License along -+// with this library; see the file COPYING. If not, write to the Free -+// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, -+// USA. -+ -+// As a special exception, you may use this file as part of a free software -+// library without restriction. Specifically, if other files instantiate -+// templates or use macros or inline functions from this file, or you compile -+// this file and link it with other files to produce an executable, this -+// file does not by itself cause the resulting executable to be covered by -+// the GNU General Public License. This exception does not however -+// invalidate any other reasons why the executable file might be covered by -+// the GNU General Public License. -+ -+// -+// ISO C++ 14882: 22.2.3.1.2 numpunct virtual functions -+// -+ -+// Written by Benjamin Kosnik -+ -+#define _LIBC -+#include -+#undef _LIBC -+#include -+ -+#ifdef __UCLIBC_MJN3_ONLY__ -+#warning tailor for stub locale support -+#endif -+#ifndef __UCLIBC_HAS_XLOCALE__ -+#define __nl_langinfo_l(N, L) nl_langinfo((N)) -+#endif -+ -+namespace std -+{ -+ template<> -+ void -+ numpunct::_M_initialize_numpunct(__c_locale __cloc) -+ { -+ if (!_M_data) -+ _M_data = new __numpunct_cache; -+ -+ if (!__cloc) -+ { -+ // "C" locale -+ _M_data->_M_grouping = ""; -+ _M_data->_M_grouping_size = 0; -+ _M_data->_M_use_grouping = false; -+ -+ _M_data->_M_decimal_point = '.'; -+ _M_data->_M_thousands_sep = ','; -+ -+ for (size_t __i = 0; __i < __num_base::_S_oend; ++__i) -+ _M_data->_M_atoms_out[__i] = __num_base::_S_atoms_out[__i]; -+ -+ for (size_t __j = 0; __j < __num_base::_S_iend; ++__j) -+ _M_data->_M_atoms_in[__j] = __num_base::_S_atoms_in[__j]; -+ } -+ else -+ { -+ // Named locale. -+ _M_data->_M_decimal_point = *(__nl_langinfo_l(DECIMAL_POINT, -+ __cloc)); -+ _M_data->_M_thousands_sep = *(__nl_langinfo_l(THOUSANDS_SEP, -+ __cloc)); -+ -+ // Check for NULL, which implies no grouping. -+ if (_M_data->_M_thousands_sep == '\0') -+ _M_data->_M_grouping = ""; -+ else -+ _M_data->_M_grouping = __nl_langinfo_l(GROUPING, __cloc); -+ _M_data->_M_grouping_size = strlen(_M_data->_M_grouping); -+ } -+ -+ // NB: There is no way to extact this info from posix locales. -+ // _M_truename = __nl_langinfo_l(YESSTR, __cloc); -+ _M_data->_M_truename = "true"; -+ _M_data->_M_truename_size = 4; -+ // _M_falsename = __nl_langinfo_l(NOSTR, __cloc); -+ _M_data->_M_falsename = "false"; -+ _M_data->_M_falsename_size = 5; -+ } -+ -+ template<> -+ numpunct::~numpunct() -+ { delete _M_data; } -+ -+#ifdef _GLIBCXX_USE_WCHAR_T -+ template<> -+ void -+ numpunct::_M_initialize_numpunct(__c_locale __cloc) -+ { -+ if (!_M_data) -+ _M_data = new __numpunct_cache; -+ -+ if (!__cloc) -+ { -+ // "C" locale -+ _M_data->_M_grouping = ""; -+ _M_data->_M_grouping_size = 0; -+ _M_data->_M_use_grouping = false; -+ -+ _M_data->_M_decimal_point = L'.'; -+ _M_data->_M_thousands_sep = L','; -+ -+ // Use ctype::widen code without the facet... -+ for (size_t __i = 0; __i < __num_base::_S_oend; ++__i) -+ _M_data->_M_atoms_out[__i] = -+ static_cast(__num_base::_S_atoms_out[__i]); -+ -+ for (size_t __j = 0; __j < __num_base::_S_iend; ++__j) -+ _M_data->_M_atoms_in[__j] = -+ static_cast(__num_base::_S_atoms_in[__j]); -+ } -+ else -+ { -+ // Named locale. -+ // NB: In the GNU model wchar_t is always 32 bit wide. -+ union { char *__s; wchar_t __w; } __u; -+ __u.__s = __nl_langinfo_l(_NL_NUMERIC_DECIMAL_POINT_WC, __cloc); -+ _M_data->_M_decimal_point = __u.__w; -+ -+ __u.__s = __nl_langinfo_l(_NL_NUMERIC_THOUSANDS_SEP_WC, __cloc); -+ _M_data->_M_thousands_sep = __u.__w; -+ -+ if (_M_data->_M_thousands_sep == L'\0') -+ _M_data->_M_grouping = ""; -+ else -+ _M_data->_M_grouping = __nl_langinfo_l(GROUPING, __cloc); -+ _M_data->_M_grouping_size = strlen(_M_data->_M_grouping); -+ } -+ -+ // NB: There is no way to extact this info from posix locales. -+ // _M_truename = __nl_langinfo_l(YESSTR, __cloc); -+ _M_data->_M_truename = L"true"; -+ _M_data->_M_truename_size = 4; -+ // _M_falsename = __nl_langinfo_l(NOSTR, __cloc); -+ _M_data->_M_falsename = L"false"; -+ _M_data->_M_falsename_size = 5; -+ } -+ -+ template<> -+ numpunct::~numpunct() -+ { delete _M_data; } -+ #endif -+} -diff --git a/libstdc++-v3/config/locale/uclibc/time_members.cc b/libstdc++-v3/config/locale/uclibc/time_members.cc -new file mode 100644 -index 0000000..e0707d7 ---- /dev/null -+++ b/libstdc++-v3/config/locale/uclibc/time_members.cc -@@ -0,0 +1,406 @@ -+// std::time_get, std::time_put implementation, GNU version -*- C++ -*- -+ -+// Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc. -+// -+// This file is part of the GNU ISO C++ Library. This library is free -+// software; you can redistribute it and/or modify it under the -+// terms of the GNU General Public License as published by the -+// Free Software Foundation; either version 2, or (at your option) -+// any later version. -+ -+// This library is distributed in the hope that it will be useful, -+// but WITHOUT ANY WARRANTY; without even the implied warranty of -+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+// GNU General Public License for more details. -+ -+// You should have received a copy of the GNU General Public License along -+// with this library; see the file COPYING. If not, write to the Free -+// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, -+// USA. -+ -+// As a special exception, you may use this file as part of a free software -+// library without restriction. Specifically, if other files instantiate -+// templates or use macros or inline functions from this file, or you compile -+// this file and link it with other files to produce an executable, this -+// file does not by itself cause the resulting executable to be covered by -+// the GNU General Public License. This exception does not however -+// invalidate any other reasons why the executable file might be covered by -+// the GNU General Public License. -+ -+// -+// ISO C++ 14882: 22.2.5.1.2 - time_get virtual functions -+// ISO C++ 14882: 22.2.5.3.2 - time_put virtual functions -+// -+ -+// Written by Benjamin Kosnik -+ -+#include -+#include -+ -+#ifdef __UCLIBC_MJN3_ONLY__ -+#warning tailor for stub locale support -+#endif -+#ifndef __UCLIBC_HAS_XLOCALE__ -+#define __nl_langinfo_l(N, L) nl_langinfo((N)) -+#endif -+ -+namespace std -+{ -+ template<> -+ void -+ __timepunct:: -+ _M_put(char* __s, size_t __maxlen, const char* __format, -+ const tm* __tm) const -+ { -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ const size_t __len = __strftime_l(__s, __maxlen, __format, __tm, -+ _M_c_locale_timepunct); -+#else -+ char* __old = strdup(setlocale(LC_ALL, NULL)); -+ setlocale(LC_ALL, _M_name_timepunct); -+ const size_t __len = strftime(__s, __maxlen, __format, __tm); -+ setlocale(LC_ALL, __old); -+ free(__old); -+#endif -+ // Make sure __s is null terminated. -+ if (__len == 0) -+ __s[0] = '\0'; -+ } -+ -+ template<> -+ void -+ __timepunct::_M_initialize_timepunct(__c_locale __cloc) -+ { -+ if (!_M_data) -+ _M_data = new __timepunct_cache; -+ -+ if (!__cloc) -+ { -+ // "C" locale -+ _M_c_locale_timepunct = _S_get_c_locale(); -+ -+ _M_data->_M_date_format = "%m/%d/%y"; -+ _M_data->_M_date_era_format = "%m/%d/%y"; -+ _M_data->_M_time_format = "%H:%M:%S"; -+ _M_data->_M_time_era_format = "%H:%M:%S"; -+ _M_data->_M_date_time_format = ""; -+ _M_data->_M_date_time_era_format = ""; -+ _M_data->_M_am = "AM"; -+ _M_data->_M_pm = "PM"; -+ _M_data->_M_am_pm_format = ""; -+ -+ // Day names, starting with "C"'s Sunday. -+ _M_data->_M_day1 = "Sunday"; -+ _M_data->_M_day2 = "Monday"; -+ _M_data->_M_day3 = "Tuesday"; -+ _M_data->_M_day4 = "Wednesday"; -+ _M_data->_M_day5 = "Thursday"; -+ _M_data->_M_day6 = "Friday"; -+ _M_data->_M_day7 = "Saturday"; -+ -+ // Abbreviated day names, starting with "C"'s Sun. -+ _M_data->_M_aday1 = "Sun"; -+ _M_data->_M_aday2 = "Mon"; -+ _M_data->_M_aday3 = "Tue"; -+ _M_data->_M_aday4 = "Wed"; -+ _M_data->_M_aday5 = "Thu"; -+ _M_data->_M_aday6 = "Fri"; -+ _M_data->_M_aday7 = "Sat"; -+ -+ // Month names, starting with "C"'s January. -+ _M_data->_M_month01 = "January"; -+ _M_data->_M_month02 = "February"; -+ _M_data->_M_month03 = "March"; -+ _M_data->_M_month04 = "April"; -+ _M_data->_M_month05 = "May"; -+ _M_data->_M_month06 = "June"; -+ _M_data->_M_month07 = "July"; -+ _M_data->_M_month08 = "August"; -+ _M_data->_M_month09 = "September"; -+ _M_data->_M_month10 = "October"; -+ _M_data->_M_month11 = "November"; -+ _M_data->_M_month12 = "December"; -+ -+ // Abbreviated month names, starting with "C"'s Jan. -+ _M_data->_M_amonth01 = "Jan"; -+ _M_data->_M_amonth02 = "Feb"; -+ _M_data->_M_amonth03 = "Mar"; -+ _M_data->_M_amonth04 = "Apr"; -+ _M_data->_M_amonth05 = "May"; -+ _M_data->_M_amonth06 = "Jun"; -+ _M_data->_M_amonth07 = "Jul"; -+ _M_data->_M_amonth08 = "Aug"; -+ _M_data->_M_amonth09 = "Sep"; -+ _M_data->_M_amonth10 = "Oct"; -+ _M_data->_M_amonth11 = "Nov"; -+ _M_data->_M_amonth12 = "Dec"; -+ } -+ else -+ { -+ _M_c_locale_timepunct = _S_clone_c_locale(__cloc); -+ -+ _M_data->_M_date_format = __nl_langinfo_l(D_FMT, __cloc); -+ _M_data->_M_date_era_format = __nl_langinfo_l(ERA_D_FMT, __cloc); -+ _M_data->_M_time_format = __nl_langinfo_l(T_FMT, __cloc); -+ _M_data->_M_time_era_format = __nl_langinfo_l(ERA_T_FMT, __cloc); -+ _M_data->_M_date_time_format = __nl_langinfo_l(D_T_FMT, __cloc); -+ _M_data->_M_date_time_era_format = __nl_langinfo_l(ERA_D_T_FMT, -+ __cloc); -+ _M_data->_M_am = __nl_langinfo_l(AM_STR, __cloc); -+ _M_data->_M_pm = __nl_langinfo_l(PM_STR, __cloc); -+ _M_data->_M_am_pm_format = __nl_langinfo_l(T_FMT_AMPM, __cloc); -+ -+ // Day names, starting with "C"'s Sunday. -+ _M_data->_M_day1 = __nl_langinfo_l(DAY_1, __cloc); -+ _M_data->_M_day2 = __nl_langinfo_l(DAY_2, __cloc); -+ _M_data->_M_day3 = __nl_langinfo_l(DAY_3, __cloc); -+ _M_data->_M_day4 = __nl_langinfo_l(DAY_4, __cloc); -+ _M_data->_M_day5 = __nl_langinfo_l(DAY_5, __cloc); -+ _M_data->_M_day6 = __nl_langinfo_l(DAY_6, __cloc); -+ _M_data->_M_day7 = __nl_langinfo_l(DAY_7, __cloc); -+ -+ // Abbreviated day names, starting with "C"'s Sun. -+ _M_data->_M_aday1 = __nl_langinfo_l(ABDAY_1, __cloc); -+ _M_data->_M_aday2 = __nl_langinfo_l(ABDAY_2, __cloc); -+ _M_data->_M_aday3 = __nl_langinfo_l(ABDAY_3, __cloc); -+ _M_data->_M_aday4 = __nl_langinfo_l(ABDAY_4, __cloc); -+ _M_data->_M_aday5 = __nl_langinfo_l(ABDAY_5, __cloc); -+ _M_data->_M_aday6 = __nl_langinfo_l(ABDAY_6, __cloc); -+ _M_data->_M_aday7 = __nl_langinfo_l(ABDAY_7, __cloc); -+ -+ // Month names, starting with "C"'s January. -+ _M_data->_M_month01 = __nl_langinfo_l(MON_1, __cloc); -+ _M_data->_M_month02 = __nl_langinfo_l(MON_2, __cloc); -+ _M_data->_M_month03 = __nl_langinfo_l(MON_3, __cloc); -+ _M_data->_M_month04 = __nl_langinfo_l(MON_4, __cloc); -+ _M_data->_M_month05 = __nl_langinfo_l(MON_5, __cloc); -+ _M_data->_M_month06 = __nl_langinfo_l(MON_6, __cloc); -+ _M_data->_M_month07 = __nl_langinfo_l(MON_7, __cloc); -+ _M_data->_M_month08 = __nl_langinfo_l(MON_8, __cloc); -+ _M_data->_M_month09 = __nl_langinfo_l(MON_9, __cloc); -+ _M_data->_M_month10 = __nl_langinfo_l(MON_10, __cloc); -+ _M_data->_M_month11 = __nl_langinfo_l(MON_11, __cloc); -+ _M_data->_M_month12 = __nl_langinfo_l(MON_12, __cloc); -+ -+ // Abbreviated month names, starting with "C"'s Jan. -+ _M_data->_M_amonth01 = __nl_langinfo_l(ABMON_1, __cloc); -+ _M_data->_M_amonth02 = __nl_langinfo_l(ABMON_2, __cloc); -+ _M_data->_M_amonth03 = __nl_langinfo_l(ABMON_3, __cloc); -+ _M_data->_M_amonth04 = __nl_langinfo_l(ABMON_4, __cloc); -+ _M_data->_M_amonth05 = __nl_langinfo_l(ABMON_5, __cloc); -+ _M_data->_M_amonth06 = __nl_langinfo_l(ABMON_6, __cloc); -+ _M_data->_M_amonth07 = __nl_langinfo_l(ABMON_7, __cloc); -+ _M_data->_M_amonth08 = __nl_langinfo_l(ABMON_8, __cloc); -+ _M_data->_M_amonth09 = __nl_langinfo_l(ABMON_9, __cloc); -+ _M_data->_M_amonth10 = __nl_langinfo_l(ABMON_10, __cloc); -+ _M_data->_M_amonth11 = __nl_langinfo_l(ABMON_11, __cloc); -+ _M_data->_M_amonth12 = __nl_langinfo_l(ABMON_12, __cloc); -+ } -+ } -+ -+#ifdef _GLIBCXX_USE_WCHAR_T -+ template<> -+ void -+ __timepunct:: -+ _M_put(wchar_t* __s, size_t __maxlen, const wchar_t* __format, -+ const tm* __tm) const -+ { -+#ifdef __UCLIBC_HAS_XLOCALE__ -+ __wcsftime_l(__s, __maxlen, __format, __tm, _M_c_locale_timepunct); -+ const size_t __len = __wcsftime_l(__s, __maxlen, __format, __tm, -+ _M_c_locale_timepunct); -+#else -+ char* __old = strdup(setlocale(LC_ALL, NULL)); -+ setlocale(LC_ALL, _M_name_timepunct); -+ const size_t __len = wcsftime(__s, __maxlen, __format, __tm); -+ setlocale(LC_ALL, __old); -+ free(__old); -+#endif -+ // Make sure __s is null terminated. -+ if (__len == 0) -+ __s[0] = L'\0'; -+ } -+ -+ template<> -+ void -+ __timepunct::_M_initialize_timepunct(__c_locale __cloc) -+ { -+ if (!_M_data) -+ _M_data = new __timepunct_cache; -+ -+#warning wide time stuff -+// if (!__cloc) -+ { -+ // "C" locale -+ _M_c_locale_timepunct = _S_get_c_locale(); -+ -+ _M_data->_M_date_format = L"%m/%d/%y"; -+ _M_data->_M_date_era_format = L"%m/%d/%y"; -+ _M_data->_M_time_format = L"%H:%M:%S"; -+ _M_data->_M_time_era_format = L"%H:%M:%S"; -+ _M_data->_M_date_time_format = L""; -+ _M_data->_M_date_time_era_format = L""; -+ _M_data->_M_am = L"AM"; -+ _M_data->_M_pm = L"PM"; -+ _M_data->_M_am_pm_format = L""; -+ -+ // Day names, starting with "C"'s Sunday. -+ _M_data->_M_day1 = L"Sunday"; -+ _M_data->_M_day2 = L"Monday"; -+ _M_data->_M_day3 = L"Tuesday"; -+ _M_data->_M_day4 = L"Wednesday"; -+ _M_data->_M_day5 = L"Thursday"; -+ _M_data->_M_day6 = L"Friday"; -+ _M_data->_M_day7 = L"Saturday"; -+ -+ // Abbreviated day names, starting with "C"'s Sun. -+ _M_data->_M_aday1 = L"Sun"; -+ _M_data->_M_aday2 = L"Mon"; -+ _M_data->_M_aday3 = L"Tue"; -+ _M_data->_M_aday4 = L"Wed"; -+ _M_data->_M_aday5 = L"Thu"; -+ _M_data->_M_aday6 = L"Fri"; -+ _M_data->_M_aday7 = L"Sat"; -+ -+ // Month names, starting with "C"'s January. -+ _M_data->_M_month01 = L"January"; -+ _M_data->_M_month02 = L"February"; -+ _M_data->_M_month03 = L"March"; -+ _M_data->_M_month04 = L"April"; -+ _M_data->_M_month05 = L"May"; -+ _M_data->_M_month06 = L"June"; -+ _M_data->_M_month07 = L"July"; -+ _M_data->_M_month08 = L"August"; -+ _M_data->_M_month09 = L"September"; -+ _M_data->_M_month10 = L"October"; -+ _M_data->_M_month11 = L"November"; -+ _M_data->_M_month12 = L"December"; -+ -+ // Abbreviated month names, starting with "C"'s Jan. -+ _M_data->_M_amonth01 = L"Jan"; -+ _M_data->_M_amonth02 = L"Feb"; -+ _M_data->_M_amonth03 = L"Mar"; -+ _M_data->_M_amonth04 = L"Apr"; -+ _M_data->_M_amonth05 = L"May"; -+ _M_data->_M_amonth06 = L"Jun"; -+ _M_data->_M_amonth07 = L"Jul"; -+ _M_data->_M_amonth08 = L"Aug"; -+ _M_data->_M_amonth09 = L"Sep"; -+ _M_data->_M_amonth10 = L"Oct"; -+ _M_data->_M_amonth11 = L"Nov"; -+ _M_data->_M_amonth12 = L"Dec"; -+ } -+#if 0 -+ else -+ { -+ _M_c_locale_timepunct = _S_clone_c_locale(__cloc); -+ -+ union { char *__s; wchar_t *__w; } __u; -+ -+ __u.__s = __nl_langinfo_l(_NL_WD_FMT, __cloc); -+ _M_data->_M_date_format = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WERA_D_FMT, __cloc); -+ _M_data->_M_date_era_format = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WT_FMT, __cloc); -+ _M_data->_M_time_format = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WERA_T_FMT, __cloc); -+ _M_data->_M_time_era_format = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WD_T_FMT, __cloc); -+ _M_data->_M_date_time_format = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WERA_D_T_FMT, __cloc); -+ _M_data->_M_date_time_era_format = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WAM_STR, __cloc); -+ _M_data->_M_am = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WPM_STR, __cloc); -+ _M_data->_M_pm = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WT_FMT_AMPM, __cloc); -+ _M_data->_M_am_pm_format = __u.__w; -+ -+ // Day names, starting with "C"'s Sunday. -+ __u.__s = __nl_langinfo_l(_NL_WDAY_1, __cloc); -+ _M_data->_M_day1 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WDAY_2, __cloc); -+ _M_data->_M_day2 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WDAY_3, __cloc); -+ _M_data->_M_day3 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WDAY_4, __cloc); -+ _M_data->_M_day4 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WDAY_5, __cloc); -+ _M_data->_M_day5 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WDAY_6, __cloc); -+ _M_data->_M_day6 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WDAY_7, __cloc); -+ _M_data->_M_day7 = __u.__w; -+ -+ // Abbreviated day names, starting with "C"'s Sun. -+ __u.__s = __nl_langinfo_l(_NL_WABDAY_1, __cloc); -+ _M_data->_M_aday1 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABDAY_2, __cloc); -+ _M_data->_M_aday2 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABDAY_3, __cloc); -+ _M_data->_M_aday3 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABDAY_4, __cloc); -+ _M_data->_M_aday4 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABDAY_5, __cloc); -+ _M_data->_M_aday5 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABDAY_6, __cloc); -+ _M_data->_M_aday6 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABDAY_7, __cloc); -+ _M_data->_M_aday7 = __u.__w; -+ -+ // Month names, starting with "C"'s January. -+ __u.__s = __nl_langinfo_l(_NL_WMON_1, __cloc); -+ _M_data->_M_month01 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WMON_2, __cloc); -+ _M_data->_M_month02 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WMON_3, __cloc); -+ _M_data->_M_month03 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WMON_4, __cloc); -+ _M_data->_M_month04 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WMON_5, __cloc); -+ _M_data->_M_month05 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WMON_6, __cloc); -+ _M_data->_M_month06 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WMON_7, __cloc); -+ _M_data->_M_month07 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WMON_8, __cloc); -+ _M_data->_M_month08 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WMON_9, __cloc); -+ _M_data->_M_month09 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WMON_10, __cloc); -+ _M_data->_M_month10 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WMON_11, __cloc); -+ _M_data->_M_month11 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WMON_12, __cloc); -+ _M_data->_M_month12 = __u.__w; -+ -+ // Abbreviated month names, starting with "C"'s Jan. -+ __u.__s = __nl_langinfo_l(_NL_WABMON_1, __cloc); -+ _M_data->_M_amonth01 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABMON_2, __cloc); -+ _M_data->_M_amonth02 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABMON_3, __cloc); -+ _M_data->_M_amonth03 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABMON_4, __cloc); -+ _M_data->_M_amonth04 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABMON_5, __cloc); -+ _M_data->_M_amonth05 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABMON_6, __cloc); -+ _M_data->_M_amonth06 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABMON_7, __cloc); -+ _M_data->_M_amonth07 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABMON_8, __cloc); -+ _M_data->_M_amonth08 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABMON_9, __cloc); -+ _M_data->_M_amonth09 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABMON_10, __cloc); -+ _M_data->_M_amonth10 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABMON_11, __cloc); -+ _M_data->_M_amonth11 = __u.__w; -+ __u.__s = __nl_langinfo_l(_NL_WABMON_12, __cloc); -+ _M_data->_M_amonth12 = __u.__w; -+ } -+#endif // 0 -+ } -+#endif -+} -diff --git a/libstdc++-v3/config/locale/uclibc/time_members.h b/libstdc++-v3/config/locale/uclibc/time_members.h -new file mode 100644 -index 0000000..ba8e858 ---- /dev/null -+++ b/libstdc++-v3/config/locale/uclibc/time_members.h -@@ -0,0 +1,68 @@ -+// std::time_get, std::time_put implementation, GNU version -*- C++ -*- -+ -+// Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc. -+// -+// This file is part of the GNU ISO C++ Library. This library is free -+// software; you can redistribute it and/or modify it under the -+// terms of the GNU General Public License as published by the -+// Free Software Foundation; either version 2, or (at your option) -+// any later version. -+ -+// This library is distributed in the hope that it will be useful, -+// but WITHOUT ANY WARRANTY; without even the implied warranty of -+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+// GNU General Public License for more details. -+ -+// You should have received a copy of the GNU General Public License along -+// with this library; see the file COPYING. If not, write to the Free -+// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, -+// USA. -+ -+// As a special exception, you may use this file as part of a free software -+// library without restriction. Specifically, if other files instantiate -+// templates or use macros or inline functions from this file, or you compile -+// this file and link it with other files to produce an executable, this -+// file does not by itself cause the resulting executable to be covered by -+// the GNU General Public License. This exception does not however -+// invalidate any other reasons why the executable file might be covered by -+// the GNU General Public License. -+ -+// -+// ISO C++ 14882: 22.2.5.1.2 - time_get functions -+// ISO C++ 14882: 22.2.5.3.2 - time_put functions -+// -+ -+// Written by Benjamin Kosnik -+ -+ template -+ __timepunct<_CharT>::__timepunct(size_t __refs) -+ : facet(__refs), _M_data(NULL), _M_c_locale_timepunct(NULL), -+ _M_name_timepunct(_S_get_c_name()) -+ { _M_initialize_timepunct(); } -+ -+ template -+ __timepunct<_CharT>::__timepunct(__cache_type* __cache, size_t __refs) -+ : facet(__refs), _M_data(__cache), _M_c_locale_timepunct(NULL), -+ _M_name_timepunct(_S_get_c_name()) -+ { _M_initialize_timepunct(); } -+ -+ template -+ __timepunct<_CharT>::__timepunct(__c_locale __cloc, const char* __s, -+ size_t __refs) -+ : facet(__refs), _M_data(NULL), _M_c_locale_timepunct(NULL), -+ _M_name_timepunct(__s) -+ { -+ char* __tmp = new char[std::strlen(__s) + 1]; -+ std::strcpy(__tmp, __s); -+ _M_name_timepunct = __tmp; -+ _M_initialize_timepunct(__cloc); -+ } -+ -+ template -+ __timepunct<_CharT>::~__timepunct() -+ { -+ if (_M_name_timepunct != _S_get_c_name()) -+ delete [] _M_name_timepunct; -+ delete _M_data; -+ _S_destroy_c_locale(_M_c_locale_timepunct); -+ } -diff --git a/libstdc++-v3/configure b/libstdc++-v3/configure -index 41797a9..8a5481c 100755 ---- a/libstdc++-v3/configure -+++ b/libstdc++-v3/configure -@@ -15830,6 +15830,9 @@ fi - # Default to "generic". - if test $enable_clocale_flag = auto; then - case ${target_os} in -+ *-uclibc*) -+ enable_clocale_flag=uclibc -+ ;; - linux* | gnu* | kfreebsd*-gnu | knetbsd*-gnu) - enable_clocale_flag=gnu - ;; -@@ -16108,6 +16111,78 @@ $as_echo "newlib" >&6; } - CTIME_CC=config/locale/generic/time_members.cc - CLOCALE_INTERNAL_H=config/locale/generic/c++locale_internal.h - ;; -+ uclibc) -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: uclibc" >&5 -+$as_echo "uclibc" >&6; } -+ -+ # Declare intention to use gettext, and add support for specific -+ # languages. -+ # For some reason, ALL_LINGUAS has to be before AM-GNU-GETTEXT -+ ALL_LINGUAS="de fr" -+ -+ # Don't call AM-GNU-GETTEXT here. Instead, assume glibc. -+ # Extract the first word of "msgfmt", so it can be a program name with args. -+set dummy msgfmt; ac_word=$2 -+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -+$as_echo_n "checking for $ac_word... " >&6; } -+if test "${ac_cv_prog_check_msgfmt+set}" = set; then : -+ $as_echo_n "(cached) " >&6 -+else -+ if test -n "$check_msgfmt"; then -+ ac_cv_prog_check_msgfmt="$check_msgfmt" # Let the user override the test. -+else -+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -+for as_dir in $PATH -+do -+ IFS=$as_save_IFS -+ test -z "$as_dir" && as_dir=. -+ for ac_exec_ext in '' $ac_executable_extensions; do -+ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then -+ ac_cv_prog_check_msgfmt="yes" -+ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 -+ break 2 -+ fi -+done -+ done -+IFS=$as_save_IFS -+ -+ test -z "$ac_cv_prog_check_msgfmt" && ac_cv_prog_check_msgfmt="no" -+fi -+fi -+check_msgfmt=$ac_cv_prog_check_msgfmt -+if test -n "$check_msgfmt"; then -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $check_msgfmt" >&5 -+$as_echo "$check_msgfmt" >&6; } -+else -+ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -+$as_echo "no" >&6; } -+fi -+ -+ -+ if test x"$check_msgfmt" = x"yes" && test x"$enable_nls" = x"yes"; then -+ USE_NLS=yes -+ fi -+ # Export the build objects. -+ for ling in $ALL_LINGUAS; do \ -+ glibcxx_MOFILES="$glibcxx_MOFILES $ling.mo"; \ -+ glibcxx_POFILES="$glibcxx_POFILES $ling.po"; \ -+ done -+ -+ -+ -+ CLOCALE_H=config/locale/uclibc/c_locale.h -+ CLOCALE_CC=config/locale/uclibc/c_locale.cc -+ CCODECVT_CC=config/locale/uclibc/codecvt_members.cc -+ CCOLLATE_CC=config/locale/uclibc/collate_members.cc -+ CCTYPE_CC=config/locale/uclibc/ctype_members.cc -+ CMESSAGES_H=config/locale/uclibc/messages_members.h -+ CMESSAGES_CC=config/locale/uclibc/messages_members.cc -+ CMONEY_CC=config/locale/uclibc/monetary_members.cc -+ CNUMERIC_CC=config/locale/uclibc/numeric_members.cc -+ CTIME_H=config/locale/uclibc/time_members.h -+ CTIME_CC=config/locale/uclibc/time_members.cc -+ CLOCALE_INTERNAL_H=config/locale/uclibc/c++locale_internal.h -+ ;; - esac - - # This is where the testsuite looks for locale catalogs, using the -diff --git a/libstdc++-v3/include/c_compatibility/wchar.h b/libstdc++-v3/include/c_compatibility/wchar.h -index 55a0b52..7d8bb15 100644 ---- a/libstdc++-v3/include/c_compatibility/wchar.h -+++ b/libstdc++-v3/include/c_compatibility/wchar.h -@@ -101,7 +101,9 @@ using std::wmemcmp; - using std::wmemcpy; - using std::wmemmove; - using std::wmemset; -+#if _GLIBCXX_HAVE_WCSFTIME - using std::wcsftime; -+#endif - - #if _GLIBCXX_USE_C99_WCHAR - using std::wcstold; -diff --git a/libstdc++-v3/include/c_std/cwchar b/libstdc++-v3/include/c_std/cwchar -index dc4cef02..256d126 100644 ---- a/libstdc++-v3/include/c_std/cwchar -+++ b/libstdc++-v3/include/c_std/cwchar -@@ -175,7 +175,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION - using ::wcscoll; - using ::wcscpy; - using ::wcscspn; -+#if _GLIBCXX_HAVE_WCSFTIME - using ::wcsftime; -+#endif - using ::wcslen; - using ::wcsncat; - using ::wcsncmp; --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0005-uclibc-locale-no__x.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0005-uclibc-locale-no__x.patch deleted file mode 100644 index 3275016e7..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0005-uclibc-locale-no__x.patch +++ /dev/null @@ -1,257 +0,0 @@ -From c01c14e8e9be382ecd4121ee70f5003b4cb0f904 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 08:42:36 +0400 -Subject: [PATCH 05/46] uclibc-locale-no__x - -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - .../config/locale/uclibc/c++locale_internal.h | 45 ++++++++++++++++++++++ - libstdc++-v3/config/locale/uclibc/c_locale.cc | 14 ------- - libstdc++-v3/config/locale/uclibc/c_locale.h | 1 + - .../config/locale/uclibc/collate_members.cc | 7 ---- - libstdc++-v3/config/locale/uclibc/ctype_members.cc | 7 ---- - .../config/locale/uclibc/messages_members.cc | 7 +--- - .../config/locale/uclibc/messages_members.h | 18 ++++----- - .../config/locale/uclibc/monetary_members.cc | 4 -- - .../config/locale/uclibc/numeric_members.cc | 3 -- - libstdc++-v3/config/locale/uclibc/time_members.cc | 3 -- - 10 files changed, 55 insertions(+), 54 deletions(-) - -diff --git a/libstdc++-v3/config/locale/uclibc/c++locale_internal.h b/libstdc++-v3/config/locale/uclibc/c++locale_internal.h -index 2ae3e4a..e74fddf 100644 ---- a/libstdc++-v3/config/locale/uclibc/c++locale_internal.h -+++ b/libstdc++-v3/config/locale/uclibc/c++locale_internal.h -@@ -60,4 +60,49 @@ extern "C" __typeof(wcsxfrm_l) __wcsxfrm_l; - extern "C" __typeof(wctype_l) __wctype_l; - #endif - -+# define __nl_langinfo_l nl_langinfo_l -+# define __strcoll_l strcoll_l -+# define __strftime_l strftime_l -+# define __strtod_l strtod_l -+# define __strtof_l strtof_l -+# define __strtold_l strtold_l -+# define __strxfrm_l strxfrm_l -+# define __newlocale newlocale -+# define __freelocale freelocale -+# define __duplocale duplocale -+# define __uselocale uselocale -+ -+# ifdef _GLIBCXX_USE_WCHAR_T -+# define __iswctype_l iswctype_l -+# define __towlower_l towlower_l -+# define __towupper_l towupper_l -+# define __wcscoll_l wcscoll_l -+# define __wcsftime_l wcsftime_l -+# define __wcsxfrm_l wcsxfrm_l -+# define __wctype_l wctype_l -+# endif -+ -+#else -+# define __nl_langinfo_l(N, L) nl_langinfo((N)) -+# define __strcoll_l(S1, S2, L) strcoll((S1), (S2)) -+# define __strtod_l(S, E, L) strtod((S), (E)) -+# define __strtof_l(S, E, L) strtof((S), (E)) -+# define __strtold_l(S, E, L) strtold((S), (E)) -+# define __strxfrm_l(S1, S2, N, L) strxfrm((S1), (S2), (N)) -+# warning should dummy __newlocale check for C|POSIX ? -+# define __newlocale(a, b, c) NULL -+# define __freelocale(a) ((void)0) -+# define __duplocale(a) __c_locale() -+//# define __uselocale ? -+// -+# ifdef _GLIBCXX_USE_WCHAR_T -+# define __iswctype_l(C, M, L) iswctype((C), (M)) -+# define __towlower_l(C, L) towlower((C)) -+# define __towupper_l(C, L) towupper((C)) -+# define __wcscoll_l(S1, S2, L) wcscoll((S1), (S2)) -+//# define __wcsftime_l(S, M, F, T, L) wcsftime((S), (M), (F), (T)) -+# define __wcsxfrm_l(S1, S2, N, L) wcsxfrm((S1), (S2), (N)) -+# define __wctype_l(S, L) wctype((S)) -+# endif -+ - #endif // GLIBC 2.3 and later -diff --git a/libstdc++-v3/config/locale/uclibc/c_locale.cc b/libstdc++-v3/config/locale/uclibc/c_locale.cc -index 5081dc1..21430d0 100644 ---- a/libstdc++-v3/config/locale/uclibc/c_locale.cc -+++ b/libstdc++-v3/config/locale/uclibc/c_locale.cc -@@ -39,20 +39,6 @@ - #include - #include - --#ifndef __UCLIBC_HAS_XLOCALE__ --#define __strtol_l(S, E, B, L) strtol((S), (E), (B)) --#define __strtoul_l(S, E, B, L) strtoul((S), (E), (B)) --#define __strtoll_l(S, E, B, L) strtoll((S), (E), (B)) --#define __strtoull_l(S, E, B, L) strtoull((S), (E), (B)) --#define __strtof_l(S, E, L) strtof((S), (E)) --#define __strtod_l(S, E, L) strtod((S), (E)) --#define __strtold_l(S, E, L) strtold((S), (E)) --#warning should dummy __newlocale check for C|POSIX ? --#define __newlocale(a, b, c) NULL --#define __freelocale(a) ((void)0) --#define __duplocale(a) __c_locale() --#endif -- - namespace std - { - template<> -diff --git a/libstdc++-v3/config/locale/uclibc/c_locale.h b/libstdc++-v3/config/locale/uclibc/c_locale.h -index da07c1f..4bca5f1 100644 ---- a/libstdc++-v3/config/locale/uclibc/c_locale.h -+++ b/libstdc++-v3/config/locale/uclibc/c_locale.h -@@ -68,6 +68,7 @@ namespace __gnu_cxx - { - extern "C" __typeof(uselocale) __uselocale; - } -+#define __uselocale uselocale - #endif - - namespace std -diff --git a/libstdc++-v3/config/locale/uclibc/collate_members.cc b/libstdc++-v3/config/locale/uclibc/collate_members.cc -index c2664a7..ec5c329 100644 ---- a/libstdc++-v3/config/locale/uclibc/collate_members.cc -+++ b/libstdc++-v3/config/locale/uclibc/collate_members.cc -@@ -36,13 +36,6 @@ - #include - #include - --#ifndef __UCLIBC_HAS_XLOCALE__ --#define __strcoll_l(S1, S2, L) strcoll((S1), (S2)) --#define __strxfrm_l(S1, S2, N, L) strxfrm((S1), (S2), (N)) --#define __wcscoll_l(S1, S2, L) wcscoll((S1), (S2)) --#define __wcsxfrm_l(S1, S2, N, L) wcsxfrm((S1), (S2), (N)) --#endif -- - namespace std - { - // These are basically extensions to char_traits, and perhaps should -diff --git a/libstdc++-v3/config/locale/uclibc/ctype_members.cc b/libstdc++-v3/config/locale/uclibc/ctype_members.cc -index 7294e3a..7b12861 100644 ---- a/libstdc++-v3/config/locale/uclibc/ctype_members.cc -+++ b/libstdc++-v3/config/locale/uclibc/ctype_members.cc -@@ -38,13 +38,6 @@ - #undef _LIBC - #include - --#ifndef __UCLIBC_HAS_XLOCALE__ --#define __wctype_l(S, L) wctype((S)) --#define __towupper_l(C, L) towupper((C)) --#define __towlower_l(C, L) towlower((C)) --#define __iswctype_l(C, M, L) iswctype((C), (M)) --#endif -- - namespace std - { - // NB: The other ctype specializations are in src/locale.cc and -diff --git a/libstdc++-v3/config/locale/uclibc/messages_members.cc b/libstdc++-v3/config/locale/uclibc/messages_members.cc -index 13594d9..d7693b4 100644 ---- a/libstdc++-v3/config/locale/uclibc/messages_members.cc -+++ b/libstdc++-v3/config/locale/uclibc/messages_members.cc -@@ -39,13 +39,10 @@ - #ifdef __UCLIBC_MJN3_ONLY__ - #warning fix gettext stuff - #endif --#ifdef __UCLIBC_HAS_GETTEXT_AWARENESS__ --extern "C" char *__dcgettext(const char *domainname, -- const char *msgid, int category); - #undef gettext --#define gettext(msgid) __dcgettext(NULL, msgid, LC_MESSAGES) -+#ifdef __UCLIBC_HAS_GETTEXT_AWARENESS__ -+#define gettext(msgid) dcgettext(NULL, msgid, LC_MESSAGES) - #else --#undef gettext - #define gettext(msgid) (msgid) - #endif - -diff --git a/libstdc++-v3/config/locale/uclibc/messages_members.h b/libstdc++-v3/config/locale/uclibc/messages_members.h -index 1424078..d89da33 100644 ---- a/libstdc++-v3/config/locale/uclibc/messages_members.h -+++ b/libstdc++-v3/config/locale/uclibc/messages_members.h -@@ -36,15 +36,11 @@ - #ifdef __UCLIBC_MJN3_ONLY__ - #warning fix prototypes for *textdomain funcs - #endif --#ifdef __UCLIBC_HAS_GETTEXT_AWARENESS__ --extern "C" char *__textdomain(const char *domainname); --extern "C" char *__bindtextdomain(const char *domainname, -- const char *dirname); --#else --#undef __textdomain --#undef __bindtextdomain --#define __textdomain(D) ((void)0) --#define __bindtextdomain(D,P) ((void)0) -+#ifndef __UCLIBC_HAS_GETTEXT_AWARENESS__ -+#undef textdomain -+#undef bindtextdomain -+#define textdomain(D) ((void)0) -+#define bindtextdomain(D,P) ((void)0) - #endif - - // Non-virtual member functions. -@@ -70,7 +66,7 @@ extern "C" char *__bindtextdomain(const char *domainname, - messages<_CharT>::open(const basic_string& __s, const locale& __loc, - const char* __dir) const - { -- __bindtextdomain(__s.c_str(), __dir); -+ bindtextdomain(__s.c_str(), __dir); - return this->do_open(__s, __loc); - } - -@@ -90,7 +86,7 @@ extern "C" char *__bindtextdomain(const char *domainname, - { - // No error checking is done, assume the catalog exists and can - // be used. -- __textdomain(__s.c_str()); -+ textdomain(__s.c_str()); - return 0; - } - -diff --git a/libstdc++-v3/config/locale/uclibc/monetary_members.cc b/libstdc++-v3/config/locale/uclibc/monetary_members.cc -index aa52731..2e6f80a 100644 ---- a/libstdc++-v3/config/locale/uclibc/monetary_members.cc -+++ b/libstdc++-v3/config/locale/uclibc/monetary_members.cc -@@ -43,10 +43,6 @@ - #warning tailor for stub locale support - #endif - --#ifndef __UCLIBC_HAS_XLOCALE__ --#define __nl_langinfo_l(N, L) nl_langinfo((N)) --#endif -- - namespace std - { - // Construct and return valid pattern consisting of some combination of: -diff --git a/libstdc++-v3/config/locale/uclibc/numeric_members.cc b/libstdc++-v3/config/locale/uclibc/numeric_members.cc -index 883ec1a..2c70642 100644 ---- a/libstdc++-v3/config/locale/uclibc/numeric_members.cc -+++ b/libstdc++-v3/config/locale/uclibc/numeric_members.cc -@@ -41,9 +41,6 @@ - #ifdef __UCLIBC_MJN3_ONLY__ - #warning tailor for stub locale support - #endif --#ifndef __UCLIBC_HAS_XLOCALE__ --#define __nl_langinfo_l(N, L) nl_langinfo((N)) --#endif - - namespace std - { -diff --git a/libstdc++-v3/config/locale/uclibc/time_members.cc b/libstdc++-v3/config/locale/uclibc/time_members.cc -index e0707d7..d848ed5 100644 ---- a/libstdc++-v3/config/locale/uclibc/time_members.cc -+++ b/libstdc++-v3/config/locale/uclibc/time_members.cc -@@ -40,9 +40,6 @@ - #ifdef __UCLIBC_MJN3_ONLY__ - #warning tailor for stub locale support - #endif --#ifndef __UCLIBC_HAS_XLOCALE__ --#define __nl_langinfo_l(N, L) nl_langinfo((N)) --#endif - - namespace std - { --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0006-uclibc-locale-wchar_fix.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0006-uclibc-locale-wchar_fix.patch deleted file mode 100644 index e45a482d5..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0006-uclibc-locale-wchar_fix.patch +++ /dev/null @@ -1,68 +0,0 @@ -From e7a4760fb40008cae33e6fc7dc4cfef6c2fd5f93 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 08:45:57 +0400 -Subject: [PATCH 06/46] uclibc-locale-wchar_fix - -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - libstdc++-v3/config/locale/uclibc/monetary_members.cc | 4 ++-- - libstdc++-v3/config/locale/uclibc/numeric_members.cc | 13 +++++++++++++ - 2 files changed, 15 insertions(+), 2 deletions(-) - -diff --git a/libstdc++-v3/config/locale/uclibc/monetary_members.cc b/libstdc++-v3/config/locale/uclibc/monetary_members.cc -index 2e6f80a..31ebb9f 100644 ---- a/libstdc++-v3/config/locale/uclibc/monetary_members.cc -+++ b/libstdc++-v3/config/locale/uclibc/monetary_members.cc -@@ -401,7 +401,7 @@ namespace std - # ifdef __UCLIBC_HAS_XLOCALE__ - _M_data->_M_decimal_point = __cloc->decimal_point_wc; - _M_data->_M_thousands_sep = __cloc->thousands_sep_wc; --# else -+# elif defined __UCLIBC_HAS_LOCALE__ - _M_data->_M_decimal_point = __global_locale->decimal_point_wc; - _M_data->_M_thousands_sep = __global_locale->thousands_sep_wc; - # endif -@@ -556,7 +556,7 @@ namespace std - # ifdef __UCLIBC_HAS_XLOCALE__ - _M_data->_M_decimal_point = __cloc->decimal_point_wc; - _M_data->_M_thousands_sep = __cloc->thousands_sep_wc; --# else -+# elif defined __UCLIBC_HAS_LOCALE__ - _M_data->_M_decimal_point = __global_locale->decimal_point_wc; - _M_data->_M_thousands_sep = __global_locale->thousands_sep_wc; - # endif -diff --git a/libstdc++-v3/config/locale/uclibc/numeric_members.cc b/libstdc++-v3/config/locale/uclibc/numeric_members.cc -index 2c70642..d5c8961 100644 ---- a/libstdc++-v3/config/locale/uclibc/numeric_members.cc -+++ b/libstdc++-v3/config/locale/uclibc/numeric_members.cc -@@ -127,12 +127,25 @@ namespace std - { - // Named locale. - // NB: In the GNU model wchar_t is always 32 bit wide. -+#ifdef __UCLIBC_MJN3_ONLY__ -+#warning fix this... should be numeric -+#endif -+#ifdef __UCLIBC__ -+# ifdef __UCLIBC_HAS_XLOCALE__ -+ _M_data->_M_decimal_point = __cloc->decimal_point_wc; -+ _M_data->_M_thousands_sep = __cloc->thousands_sep_wc; -+# elif defined __UCLIBC_HAS_LOCALE__ -+ _M_data->_M_decimal_point = __global_locale->decimal_point_wc; -+ _M_data->_M_thousands_sep = __global_locale->thousands_sep_wc; -+# endif -+#else - union { char *__s; wchar_t __w; } __u; - __u.__s = __nl_langinfo_l(_NL_NUMERIC_DECIMAL_POINT_WC, __cloc); - _M_data->_M_decimal_point = __u.__w; - - __u.__s = __nl_langinfo_l(_NL_NUMERIC_THOUSANDS_SEP_WC, __cloc); - _M_data->_M_thousands_sep = __u.__w; -+#endif - - if (_M_data->_M_thousands_sep == L'\0') - _M_data->_M_grouping = ""; --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0007-uclibc-locale-update.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0007-uclibc-locale-update.patch deleted file mode 100644 index b73e5914e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0007-uclibc-locale-update.patch +++ /dev/null @@ -1,542 +0,0 @@ -From 8d53a38a3038104e6830ecea5e4beadce54457c1 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 08:46:58 +0400 -Subject: [PATCH 07/46] uclibc-locale-update - -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - .../config/locale/uclibc/c++locale_internal.h | 3 + - libstdc++-v3/config/locale/uclibc/c_locale.cc | 74 ++++++++++------------ - libstdc++-v3/config/locale/uclibc/c_locale.h | 42 ++++++------ - libstdc++-v3/config/locale/uclibc/ctype_members.cc | 51 +++++++++++---- - .../config/locale/uclibc/messages_members.h | 12 ++-- - .../config/locale/uclibc/monetary_members.cc | 34 ++++++---- - .../config/locale/uclibc/numeric_members.cc | 5 ++ - libstdc++-v3/config/locale/uclibc/time_members.cc | 18 ++++-- - libstdc++-v3/config/locale/uclibc/time_members.h | 17 +++-- - 9 files changed, 158 insertions(+), 98 deletions(-) - -diff --git a/libstdc++-v3/config/locale/uclibc/c++locale_internal.h b/libstdc++-v3/config/locale/uclibc/c++locale_internal.h -index e74fddf..971a6b4 100644 ---- a/libstdc++-v3/config/locale/uclibc/c++locale_internal.h -+++ b/libstdc++-v3/config/locale/uclibc/c++locale_internal.h -@@ -31,6 +31,9 @@ - - #include - #include -+#include -+#include -+#include - - #ifdef __UCLIBC_MJN3_ONLY__ - #warning clean this up -diff --git a/libstdc++-v3/config/locale/uclibc/c_locale.cc b/libstdc++-v3/config/locale/uclibc/c_locale.cc -index 21430d0..1b9d8e1 100644 ---- a/libstdc++-v3/config/locale/uclibc/c_locale.cc -+++ b/libstdc++-v3/config/locale/uclibc/c_locale.cc -@@ -39,23 +39,20 @@ - #include - #include - --namespace std --{ -+_GLIBCXX_BEGIN_NAMESPACE(std) -+ - template<> - void - __convert_to_v(const char* __s, float& __v, ios_base::iostate& __err, - const __c_locale& __cloc) - { -- if (!(__err & ios_base::failbit)) -- { -- char* __sanity; -- errno = 0; -- float __f = __strtof_l(__s, &__sanity, __cloc); -- if (__sanity != __s && errno != ERANGE) -- __v = __f; -- else -- __err |= ios_base::failbit; -- } -+ char* __sanity; -+ errno = 0; -+ float __f = __strtof_l(__s, &__sanity, __cloc); -+ if (__sanity != __s && errno != ERANGE) -+ __v = __f; -+ else -+ __err |= ios_base::failbit; - } - - template<> -@@ -63,16 +60,13 @@ namespace std - __convert_to_v(const char* __s, double& __v, ios_base::iostate& __err, - const __c_locale& __cloc) - { -- if (!(__err & ios_base::failbit)) -- { -- char* __sanity; -- errno = 0; -- double __d = __strtod_l(__s, &__sanity, __cloc); -- if (__sanity != __s && errno != ERANGE) -- __v = __d; -- else -- __err |= ios_base::failbit; -- } -+ char* __sanity; -+ errno = 0; -+ double __d = __strtod_l(__s, &__sanity, __cloc); -+ if (__sanity != __s && errno != ERANGE) -+ __v = __d; -+ else -+ __err |= ios_base::failbit; - } - - template<> -@@ -80,16 +74,13 @@ namespace std - __convert_to_v(const char* __s, long double& __v, ios_base::iostate& __err, - const __c_locale& __cloc) - { -- if (!(__err & ios_base::failbit)) -- { -- char* __sanity; -- errno = 0; -- long double __ld = __strtold_l(__s, &__sanity, __cloc); -- if (__sanity != __s && errno != ERANGE) -- __v = __ld; -- else -- __err |= ios_base::failbit; -- } -+ char* __sanity; -+ errno = 0; -+ long double __ld = __strtold_l(__s, &__sanity, __cloc); -+ if (__sanity != __s && errno != ERANGE) -+ __v = __ld; -+ else -+ __err |= ios_base::failbit; - } - - void -@@ -110,17 +101,18 @@ namespace std - void - locale::facet::_S_destroy_c_locale(__c_locale& __cloc) - { -- if (_S_get_c_locale() != __cloc) -+ if (__cloc && _S_get_c_locale() != __cloc) - __freelocale(__cloc); - } - - __c_locale - locale::facet::_S_clone_c_locale(__c_locale& __cloc) - { return __duplocale(__cloc); } --} // namespace std - --namespace __gnu_cxx --{ -+_GLIBCXX_END_NAMESPACE -+ -+_GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx) -+ - const char* const category_names[6 + _GLIBCXX_NUM_CATEGORIES] = - { - "LC_CTYPE", -@@ -138,9 +130,11 @@ namespace __gnu_cxx - "LC_IDENTIFICATION" - #endif - }; --} - --namespace std --{ -+_GLIBCXX_END_NAMESPACE -+ -+_GLIBCXX_BEGIN_NAMESPACE(std) -+ - const char* const* const locale::_S_categories = __gnu_cxx::category_names; --} // namespace std -+ -+_GLIBCXX_END_NAMESPACE -diff --git a/libstdc++-v3/config/locale/uclibc/c_locale.h b/libstdc++-v3/config/locale/uclibc/c_locale.h -index 4bca5f1..64a6d46 100644 ---- a/libstdc++-v3/config/locale/uclibc/c_locale.h -+++ b/libstdc++-v3/config/locale/uclibc/c_locale.h -@@ -39,21 +39,23 @@ - #pragma GCC system_header - - #include // get std::strlen --#include // get std::snprintf or std::sprintf -+#include // get std::vsnprintf or std::vsprintf - #include - #include // For codecvt - #ifdef __UCLIBC_MJN3_ONLY__ - #warning fix this - #endif --#ifdef __UCLIBC_HAS_LOCALE__ -+#ifdef _GLIBCXX_USE_ICONV - #include // For codecvt using iconv, iconv_t - #endif --#ifdef __UCLIBC_HAS_GETTEXT_AWARENESS__ --#include // For messages -+#ifdef HAVE_LIBINTL_H -+#include // For messages - #endif -+#include - - #ifdef __UCLIBC_MJN3_ONLY__ - #warning what is _GLIBCXX_C_LOCALE_GNU for -+// psm: used in os/gnu-linux/ctype_noninline.h - #endif - #define _GLIBCXX_C_LOCALE_GNU 1 - -@@ -78,23 +80,25 @@ namespace std - #else - typedef int* __c_locale; - #endif -- -- // Convert numeric value of type _Tv to string and return length of -- // string. If snprintf is available use it, otherwise fall back to -- // the unsafe sprintf which, in general, can be dangerous and should -+ // Convert numeric value of type double to string and return length of -+ // string. If vsnprintf is available use it, otherwise fall back to -+ // the unsafe vsprintf which, in general, can be dangerous and should - // be avoided. -- template -- int -- __convert_from_v(char* __out, -- const int __size __attribute__ ((__unused__)), -- const char* __fmt, --#ifdef __UCLIBC_HAS_XCLOCALE__ -- _Tv __v, const __c_locale& __cloc, int __prec) -+ inline int -+ __convert_from_v(const __c_locale& -+#ifndef __UCLIBC_HAS_XCLOCALE__ -+ __cloc __attribute__ ((__unused__)) -+#endif -+ , -+ char* __out, -+ const int __size, -+ const char* __fmt, ...) - { -+ va_list __args; -+#ifdef __UCLIBC_HAS_XCLOCALE__ -+ - __c_locale __old = __gnu_cxx::__uselocale(__cloc); - #else -- _Tv __v, const __c_locale&, int __prec) -- { - # ifdef __UCLIBC_HAS_LOCALE__ - char* __old = std::setlocale(LC_ALL, NULL); - char* __sav = new char[std::strlen(__old) + 1]; -@@ -103,7 +107,9 @@ namespace std - # endif - #endif - -- const int __ret = std::snprintf(__out, __size, __fmt, __prec, __v); -+ va_start(__args, __fmt); -+ const int __ret = std::vsnprintf(__out, __size, __fmt, __args); -+ va_end(__args); - - #ifdef __UCLIBC_HAS_XCLOCALE__ - __gnu_cxx::__uselocale(__old); -diff --git a/libstdc++-v3/config/locale/uclibc/ctype_members.cc b/libstdc++-v3/config/locale/uclibc/ctype_members.cc -index 7b12861..13e011d 100644 ---- a/libstdc++-v3/config/locale/uclibc/ctype_members.cc -+++ b/libstdc++-v3/config/locale/uclibc/ctype_members.cc -@@ -33,16 +33,20 @@ - - // Written by Benjamin Kosnik - -+#include -+#ifdef __UCLIBC_HAS_LOCALE__ - #define _LIBC - #include - #undef _LIBC -+#else -+#include -+#endif - #include - --namespace std --{ -+_GLIBCXX_BEGIN_NAMESPACE(std) -+ - // NB: The other ctype specializations are in src/locale.cc and - // various /config/os/* files. -- template<> - ctype_byname::ctype_byname(const char* __s, size_t __refs) - : ctype(0, false, __refs) - { -@@ -57,6 +61,8 @@ namespace std - #endif - } - } -+ ctype_byname::~ctype_byname() -+ { } - - #ifdef _GLIBCXX_USE_WCHAR_T - ctype::__wmask_type -@@ -138,17 +144,33 @@ namespace std - ctype:: - do_is(mask __m, wchar_t __c) const - { -- // Highest bitmask in ctype_base == 10, but extra in "C" -- // library for blank. -+ // The case of __m == ctype_base::space is particularly important, -+ // due to its use in many istream functions. Therefore we deal with -+ // it first, exploiting the knowledge that on GNU systems _M_bit[5] -+ // is the mask corresponding to ctype_base::space. NB: an encoding -+ // change would not affect correctness! -+ - bool __ret = false; -- const size_t __bitmasksize = 11; -- for (size_t __bitcur = 0; __bitcur <= __bitmasksize; ++__bitcur) -- if (__m & _M_bit[__bitcur] -- && __iswctype_l(__c, _M_wmask[__bitcur], _M_c_locale_ctype)) -- { -- __ret = true; -- break; -- } -+ if (__m == _M_bit[5]) -+ __ret = __iswctype_l(__c, _M_wmask[5], _M_c_locale_ctype); -+ else -+ { -+ // Highest bitmask in ctype_base == 10, but extra in "C" -+ // library for blank. -+ const size_t __bitmasksize = 11; -+ for (size_t __bitcur = 0; __bitcur <= __bitmasksize; ++__bitcur) -+ if (__m & _M_bit[__bitcur]) -+ { -+ if (__iswctype_l(__c, _M_wmask[__bitcur], _M_c_locale_ctype)) -+ { -+ __ret = true; -+ break; -+ } -+ else if (__m == _M_bit[__bitcur]) -+ break; -+ } -+ } -+ - return __ret; - } - -@@ -290,4 +312,5 @@ namespace std - #endif - } - #endif // _GLIBCXX_USE_WCHAR_T --} -+ -+_GLIBCXX_END_NAMESPACE -diff --git a/libstdc++-v3/config/locale/uclibc/messages_members.h b/libstdc++-v3/config/locale/uclibc/messages_members.h -index d89da33..067657a 100644 ---- a/libstdc++-v3/config/locale/uclibc/messages_members.h -+++ b/libstdc++-v3/config/locale/uclibc/messages_members.h -@@ -53,12 +53,16 @@ - template - messages<_CharT>::messages(__c_locale __cloc, const char* __s, - size_t __refs) -- : facet(__refs), _M_c_locale_messages(_S_clone_c_locale(__cloc)), -- _M_name_messages(__s) -+ : facet(__refs), _M_c_locale_messages(NULL), -+ _M_name_messages(NULL) - { -- char* __tmp = new char[std::strlen(__s) + 1]; -- std::strcpy(__tmp, __s); -+ const size_t __len = std::strlen(__s) + 1; -+ char* __tmp = new char[__len]; -+ std::memcpy(__tmp, __s, __len); - _M_name_messages = __tmp; -+ -+ // Last to avoid leaking memory if new throws. -+ _M_c_locale_messages = _S_clone_c_locale(__cloc); - } - - template -diff --git a/libstdc++-v3/config/locale/uclibc/monetary_members.cc b/libstdc++-v3/config/locale/uclibc/monetary_members.cc -index 31ebb9f..7679b9c 100644 ---- a/libstdc++-v3/config/locale/uclibc/monetary_members.cc -+++ b/libstdc++-v3/config/locale/uclibc/monetary_members.cc -@@ -33,9 +33,14 @@ - - // Written by Benjamin Kosnik - -+#include -+#ifdef __UCLIBC_HAS_LOCALE__ - #define _LIBC - #include - #undef _LIBC -+#else -+#include -+#endif - #include - - #ifdef __UCLIBC_MJN3_ONLY__ -@@ -206,7 +211,7 @@ namespace std - } - break; - default: -- ; -+ __ret = pattern(); - } - return __ret; - } -@@ -390,7 +395,9 @@ namespace std - __c_locale __old = __uselocale(__cloc); - #else - // Switch to named locale so that mbsrtowcs will work. -- char* __old = strdup(setlocale(LC_ALL, NULL)); -+ char* __old = setlocale(LC_ALL, NULL); -+ const size_t __llen = strlen(__old) + 1; -+ char* __sav = new char[__llen]; - setlocale(LC_ALL, __name); - #endif - -@@ -477,8 +484,8 @@ namespace std - #ifdef __UCLIBC_HAS_XLOCALE__ - __uselocale(__old); - #else -- setlocale(LC_ALL, __old); -- free(__old); -+ setlocale(LC_ALL, __sav); -+ delete [] __sav; - #endif - __throw_exception_again; - } -@@ -498,8 +505,8 @@ namespace std - #ifdef __UCLIBC_HAS_XLOCALE__ - __uselocale(__old); - #else -- setlocale(LC_ALL, __old); -- free(__old); -+ setlocale(LC_ALL, __sav); -+ delete [] __sav; - #endif - } - } -@@ -545,8 +552,11 @@ namespace std - __c_locale __old = __uselocale(__cloc); - #else - // Switch to named locale so that mbsrtowcs will work. -- char* __old = strdup(setlocale(LC_ALL, NULL)); -- setlocale(LC_ALL, __name); -+ char* __old = setlocale(LC_ALL, NULL); -+ const size_t __llen = strlen(__old) + 1; -+ char* __sav = new char[__llen]; -+ memcpy(__sav, __old, __llen); -+ setlocale(LC_ALL, __name); - #endif - - #ifdef __UCLIBC_MJN3_ONLY__ -@@ -633,8 +643,8 @@ namespace std - #ifdef __UCLIBC_HAS_XLOCALE__ - __uselocale(__old); - #else -- setlocale(LC_ALL, __old); -- free(__old); -+ setlocale(LC_ALL, __sav); -+ delete [] __sav; - #endif - __throw_exception_again; - } -@@ -653,8 +663,8 @@ namespace std - #ifdef __UCLIBC_HAS_XLOCALE__ - __uselocale(__old); - #else -- setlocale(LC_ALL, __old); -- free(__old); -+ setlocale(LC_ALL, __sav); -+ delete [] __sav; - #endif - } - } -diff --git a/libstdc++-v3/config/locale/uclibc/numeric_members.cc b/libstdc++-v3/config/locale/uclibc/numeric_members.cc -index d5c8961..8ae8969 100644 ---- a/libstdc++-v3/config/locale/uclibc/numeric_members.cc -+++ b/libstdc++-v3/config/locale/uclibc/numeric_members.cc -@@ -33,9 +33,14 @@ - - // Written by Benjamin Kosnik - -+#include -+#ifdef __UCLIBC_HAS_LOCALE__ - #define _LIBC - #include - #undef _LIBC -+#else -+#include -+#endif - #include - - #ifdef __UCLIBC_MJN3_ONLY__ -diff --git a/libstdc++-v3/config/locale/uclibc/time_members.cc b/libstdc++-v3/config/locale/uclibc/time_members.cc -index d848ed5..f24d53e 100644 ---- a/libstdc++-v3/config/locale/uclibc/time_members.cc -+++ b/libstdc++-v3/config/locale/uclibc/time_members.cc -@@ -53,11 +53,14 @@ namespace std - const size_t __len = __strftime_l(__s, __maxlen, __format, __tm, - _M_c_locale_timepunct); - #else -- char* __old = strdup(setlocale(LC_ALL, NULL)); -+ char* __old = setlocale(LC_ALL, NULL); -+ const size_t __llen = strlen(__old) + 1; -+ char* __sav = new char[__llen]; -+ memcpy(__sav, __old, __llen); - setlocale(LC_ALL, _M_name_timepunct); - const size_t __len = strftime(__s, __maxlen, __format, __tm); -- setlocale(LC_ALL, __old); -- free(__old); -+ setlocale(LC_ALL, __sav); -+ delete [] __sav; - #endif - // Make sure __s is null terminated. - if (__len == 0) -@@ -207,11 +210,14 @@ namespace std - const size_t __len = __wcsftime_l(__s, __maxlen, __format, __tm, - _M_c_locale_timepunct); - #else -- char* __old = strdup(setlocale(LC_ALL, NULL)); -+ char* __old = setlocale(LC_ALL, NULL); -+ const size_t __llen = strlen(__old) + 1; -+ char* __sav = new char[__llen]; -+ memcpy(__sav, __old, __llen); - setlocale(LC_ALL, _M_name_timepunct); - const size_t __len = wcsftime(__s, __maxlen, __format, __tm); -- setlocale(LC_ALL, __old); -- free(__old); -+ setlocale(LC_ALL, __sav); -+ delete [] __sav; - #endif - // Make sure __s is null terminated. - if (__len == 0) -diff --git a/libstdc++-v3/config/locale/uclibc/time_members.h b/libstdc++-v3/config/locale/uclibc/time_members.h -index ba8e858..1665dde 100644 ---- a/libstdc++-v3/config/locale/uclibc/time_members.h -+++ b/libstdc++-v3/config/locale/uclibc/time_members.h -@@ -50,12 +50,21 @@ - __timepunct<_CharT>::__timepunct(__c_locale __cloc, const char* __s, - size_t __refs) - : facet(__refs), _M_data(NULL), _M_c_locale_timepunct(NULL), -- _M_name_timepunct(__s) -+ _M_name_timepunct(NULL) - { -- char* __tmp = new char[std::strlen(__s) + 1]; -- std::strcpy(__tmp, __s); -+ const size_t __len = std::strlen(__s) + 1; -+ char* __tmp = new char[__len]; -+ std::memcpy(__tmp, __s, __len); - _M_name_timepunct = __tmp; -- _M_initialize_timepunct(__cloc); -+ -+ try -+ { _M_initialize_timepunct(__cloc); } -+ catch(...) -+ { -+ delete [] _M_name_timepunct; -+ __throw_exception_again; -+ } -+ - } - - template --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0008-missing-execinfo_h.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0008-missing-execinfo_h.patch deleted file mode 100644 index 01e7c9549..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0008-missing-execinfo_h.patch +++ /dev/null @@ -1,28 +0,0 @@ -From 6dde3da24ef4b9b357bca670d8551cab3fdda843 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 08:48:10 +0400 -Subject: [PATCH 08/46] missing-execinfo_h - -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - boehm-gc/include/gc.h | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/boehm-gc/include/gc.h b/boehm-gc/include/gc.h -index 6b38f2d..fca98ff 100644 ---- a/boehm-gc/include/gc.h -+++ b/boehm-gc/include/gc.h -@@ -503,7 +503,7 @@ GC_API GC_PTR GC_malloc_atomic_ignore_off_page GC_PROTO((size_t lb)); - #if defined(__linux__) || defined(__GLIBC__) - # include - # if (__GLIBC__ == 2 && __GLIBC_MINOR__ >= 1 || __GLIBC__ > 2) \ -- && !defined(__ia64__) -+ && !defined(__ia64__) && !defined(__UCLIBC__) - # ifndef GC_HAVE_BUILTIN_BACKTRACE - # define GC_HAVE_BUILTIN_BACKTRACE - # endif --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0009-c99-snprintf.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0009-c99-snprintf.patch deleted file mode 100644 index d62341ac6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0009-c99-snprintf.patch +++ /dev/null @@ -1,28 +0,0 @@ -From b794f1f1c1c7c06f3f0d78cf76c4fb90c2ab8dfb Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 08:49:03 +0400 -Subject: [PATCH 09/46] c99-snprintf - -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - libstdc++-v3/include/c_std/cstdio | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/libstdc++-v3/include/c_std/cstdio b/libstdc++-v3/include/c_std/cstdio -index a4119ba..8396f43 100644 ---- a/libstdc++-v3/include/c_std/cstdio -+++ b/libstdc++-v3/include/c_std/cstdio -@@ -144,7 +144,7 @@ namespace std - using ::vsprintf; - } // namespace std - --#if _GLIBCXX_USE_C99_STDIO -+#if _GLIBCXX_USE_C99_STDIO || defined(__UCLIBC__) - - #undef snprintf - #undef vfscanf --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0010-gcc-poison-system-directories.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0010-gcc-poison-system-directories.patch deleted file mode 100644 index ac4cf442d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0010-gcc-poison-system-directories.patch +++ /dev/null @@ -1,192 +0,0 @@ -From d76250323dad69212c958e4857a98d99ab51a39e Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 08:59:00 +0400 -Subject: [PATCH 10/46] gcc: poison-system-directories - -Signed-off-by: Khem Raj - -Upstream-Status: Inappropriate [distribution: codesourcery] ---- - gcc/common.opt | 4 ++++ - gcc/config.in | 6 ++++++ - gcc/configure | 16 ++++++++++++++++ - gcc/configure.ac | 10 ++++++++++ - gcc/doc/invoke.texi | 9 +++++++++ - gcc/gcc.c | 2 ++ - gcc/incpath.c | 19 +++++++++++++++++++ - 7 files changed, 66 insertions(+) - -diff --git a/gcc/common.opt b/gcc/common.opt -index 67048db..733185c 100644 ---- a/gcc/common.opt -+++ b/gcc/common.opt -@@ -659,6 +659,10 @@ Wreturn-local-addr - Common Var(warn_return_local_addr) Init(1) Warning - Warn about returning a pointer/reference to a local or temporary variable. - -+Wpoison-system-directories -+Common Var(flag_poison_system_directories) Init(1) Warning -+Warn for -I and -L options using system directories if cross compiling -+ - Wshadow - Common Var(warn_shadow) Warning - Warn when one local variable shadows another. -diff --git a/gcc/config.in b/gcc/config.in -index 115cb61..105b30f 100644 ---- a/gcc/config.in -+++ b/gcc/config.in -@@ -187,6 +187,12 @@ - #endif - - -+/* Define to warn for use of native system header directories */ -+#ifndef USED_FOR_TARGET -+#undef ENABLE_POISON_SYSTEM_DIRECTORIES -+#endif -+ -+ - /* Define if you want all operations on RTL (the basic data structure of the - optimizer and back end) to be checked for dynamic type safety at runtime. - This is quite expensive. */ -diff --git a/gcc/configure b/gcc/configure -index 1c6e340..8f83152 100755 ---- a/gcc/configure -+++ b/gcc/configure -@@ -942,6 +942,7 @@ with_system_zlib - enable_maintainer_mode - enable_link_mutex - enable_version_specific_runtime_libs -+enable_poison_system_directories - enable_plugin - enable_host_shared - enable_libquadmath_support -@@ -1681,6 +1682,8 @@ Optional Features: - --enable-version-specific-runtime-libs - specify that runtime libraries should be installed - in a compiler-specific directory -+ --enable-poison-system-directories -+ warn for use of native system header directories - --enable-plugin enable plugin support - --enable-host-shared build host code as shared libraries - --disable-libquadmath-support -@@ -28908,6 +28911,19 @@ if test "${enable_version_specific_runtime_libs+set}" = set; then : - fi - - -+# Check whether --enable-poison-system-directories was given. -+if test "${enable_poison_system_directories+set}" = set; then : -+ enableval=$enable_poison_system_directories; -+else -+ enable_poison_system_directories=no -+fi -+ -+if test "x${enable_poison_system_directories}" = "xyes"; then -+ -+$as_echo "#define ENABLE_POISON_SYSTEM_DIRECTORIES 1" >>confdefs.h -+ -+fi -+ - # Substitute configuration variables - - -diff --git a/gcc/configure.ac b/gcc/configure.ac -index 6c1dcd9..0fccaef 100644 ---- a/gcc/configure.ac -+++ b/gcc/configure.ac -@@ -5861,6 +5861,16 @@ AC_ARG_ENABLE(version-specific-runtime-libs, - [specify that runtime libraries should be - installed in a compiler-specific directory])]) - -+AC_ARG_ENABLE([poison-system-directories], -+ AS_HELP_STRING([--enable-poison-system-directories], -+ [warn for use of native system header directories]),, -+ [enable_poison_system_directories=no]) -+if test "x${enable_poison_system_directories}" = "xyes"; then -+ AC_DEFINE([ENABLE_POISON_SYSTEM_DIRECTORIES], -+ [1], -+ [Define to warn for use of native system header directories]) -+fi -+ - # Substitute configuration variables - AC_SUBST(subdirs) - AC_SUBST(srcdir) -diff --git a/gcc/doc/invoke.texi b/gcc/doc/invoke.texi -index 821f8fd..8bb49e7 100644 ---- a/gcc/doc/invoke.texi -+++ b/gcc/doc/invoke.texi -@@ -284,6 +284,7 @@ Objective-C and Objective-C++ Dialects}. - -Wparentheses -Wno-pedantic-ms-format @gol - -Wplacement-new -Wplacement-new=@var{n} @gol - -Wpointer-arith -Wno-pointer-to-int-cast @gol -+-Wno-poison-system-directories @gol - -Wno-pragmas -Wredundant-decls -Wno-return-local-addr @gol - -Wreturn-type -Wsequence-point -Wshadow -Wno-shadow-ivar @gol - -Wshift-overflow -Wshift-overflow=@var{n} @gol -@@ -4723,6 +4724,14 @@ made up of data only and thus requires no special treatment. But, for - most targets, it is made up of code and thus requires the stack to be - made executable in order for the program to work properly. - -+@item -Wno-poison-system-directories -+@opindex Wno-poison-system-directories -+Do not warn for @option{-I} or @option{-L} options using system -+directories such as @file{/usr/include} when cross compiling. This -+option is intended for use in chroot environments when such -+directories contain the correct headers and libraries for the target -+system rather than the host. -+ - @item -Wfloat-equal - @opindex Wfloat-equal - @opindex Wno-float-equal -diff --git a/gcc/gcc.c b/gcc/gcc.c -index 1af5920..4cfef7f 100644 ---- a/gcc/gcc.c -+++ b/gcc/gcc.c -@@ -1017,6 +1017,8 @@ proper position among the other output files. */ - "%{fuse-ld=*:-fuse-ld=%*} " LINK_COMPRESS_DEBUG_SPEC \ - "%X %{o*} %{e*} %{N} %{n} %{r}\ - %{s} %{t} %{u*} %{z} %{Z} %{!nostdlib:%{!nostartfiles:%S}} \ -+ %{Wno-poison-system-directories:--no-poison-system-directories} \ -+ %{Werror=poison-system-directories:--error-poison-system-directories} \ - %{static:} %{L*} %(mfwrap) %(link_libgcc) " \ - VTABLE_VERIFICATION_SPEC " " SANITIZER_EARLY_SPEC " %o " CHKP_SPEC " \ - %{fopenacc|fopenmp|%:gt(%{ftree-parallelize-loops=*:%*} 1):\ -diff --git a/gcc/incpath.c b/gcc/incpath.c -index ea40f4a..856da41 100644 ---- a/gcc/incpath.c -+++ b/gcc/incpath.c -@@ -26,6 +26,7 @@ - #include "intl.h" - #include "incpath.h" - #include "cppdefault.h" -+#include "diagnostic-core.h" - - /* Microsoft Windows does not natively support inodes. - VMS has non-numeric inodes. */ -@@ -381,6 +382,24 @@ merge_include_chains (const char *sysroot, cpp_reader *pfile, int verbose) - } - fprintf (stderr, _("End of search list.\n")); - } -+ -+#ifdef ENABLE_POISON_SYSTEM_DIRECTORIES -+ if (flag_poison_system_directories) -+ { -+ struct cpp_dir *p; -+ -+ for (p = heads[QUOTE]; p; p = p->next) -+ { -+ if ((!strncmp (p->name, "/usr/include", 12)) -+ || (!strncmp (p->name, "/usr/local/include", 18)) -+ || (!strncmp (p->name, "/usr/X11R6/include", 18))) -+ warning (OPT_Wpoison_system_directories, -+ "include location \"%s\" is unsafe for " -+ "cross-compilation", -+ p->name); -+ } -+ } -+#endif - } - - /* Use given -I paths for #include "..." but not #include <...>, and --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0011-gcc-poison-dir-extend.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0011-gcc-poison-dir-extend.patch deleted file mode 100644 index a1736aea1..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0011-gcc-poison-dir-extend.patch +++ /dev/null @@ -1,39 +0,0 @@ -From a1c24b59def393e43cd50b6768604a212c788ed3 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:00:34 +0400 -Subject: [PATCH 11/46] gcc-poison-dir-extend - -Add /sw/include and /opt/include based on the original -zecke-no-host-includes.patch patch. The original patch checked for -/usr/include, /sw/include and /opt/include and then triggered a failure and -aborted. - -Instead, we add the two missing items to the current scan. If the user -wants this to be a failure, they can add "-Werror=poison-system-directories". - -Signed-off-by: Mark Hatle -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - gcc/incpath.c | 4 +++- - 1 file changed, 3 insertions(+), 1 deletion(-) - -diff --git a/gcc/incpath.c b/gcc/incpath.c -index 856da41..d2fc82c 100644 ---- a/gcc/incpath.c -+++ b/gcc/incpath.c -@@ -392,7 +392,9 @@ merge_include_chains (const char *sysroot, cpp_reader *pfile, int verbose) - { - if ((!strncmp (p->name, "/usr/include", 12)) - || (!strncmp (p->name, "/usr/local/include", 18)) -- || (!strncmp (p->name, "/usr/X11R6/include", 18))) -+ || (!strncmp (p->name, "/usr/X11R6/include", 18)) -+ || (!strncmp (p->name, "/sw/include", 11)) -+ || (!strncmp (p->name, "/opt/include", 12))) - warning (OPT_Wpoison_system_directories, - "include location \"%s\" is unsafe for " - "cross-compilation", --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0012-gcc-4.3.3-SYSROOT_CFLAGS_FOR_TARGET.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0012-gcc-4.3.3-SYSROOT_CFLAGS_FOR_TARGET.patch deleted file mode 100644 index 939b0705f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0012-gcc-4.3.3-SYSROOT_CFLAGS_FOR_TARGET.patch +++ /dev/null @@ -1,73 +0,0 @@ -From 00ef5f0f2a8d3b33aeb1e55c0d23439f4dd495af Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:08:31 +0400 -Subject: [PATCH 12/46] gcc-4.3.3: SYSROOT_CFLAGS_FOR_TARGET - -Before committing, I noticed that PR/32161 was marked as a dup of PR/32009, but my previous patch did not fix it. - -This alternative patch is better because it lets you just use CFLAGS_FOR_TARGET to set the compilation flags for libgcc. Since bootstrapped target libraries are never compiled with the native compiler, it makes little sense to use different flags for stage1 and later stages. And it also makes little sense to use a different variable than CFLAGS_FOR_TARGET. - -Other changes I had to do include: - -- moving the creation of default CFLAGS_FOR_TARGET from Makefile.am to configure.ac, because otherwise the BOOT_CFLAGS are substituted into CFLAGS_FOR_TARGET (which is "-O2 -g $(CFLAGS)") via $(CFLAGS). It is also cleaner this way though. - -- passing the right CFLAGS to configure scripts as exported environment variables - -I also stopped passing LIBCFLAGS to configure scripts since they are unused in the whole src tree. And I updated the documentation as H-P reminded me to do. - -Bootstrapped/regtested i686-pc-linux-gnu, will commit to 4.4 shortly. Ok for 4.3? - -Signed-off-by: Paolo Bonzini -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - configure | 32 ++++++++++++++++++++++++++++++++ - 1 file changed, 32 insertions(+) - -diff --git a/configure b/configure -index bfadc33..755d382 100755 ---- a/configure -+++ b/configure -@@ -6819,6 +6819,38 @@ fi - - - -+# During gcc bootstrap, if we use some random cc for stage1 then CFLAGS -+# might be empty or "-g". We don't require a C++ compiler, so CXXFLAGS -+# might also be empty (or "-g", if a non-GCC C++ compiler is in the path). -+# We want to ensure that TARGET libraries (which we know are built with -+# gcc) are built with "-O2 -g", so include those options when setting -+# CFLAGS_FOR_TARGET and CXXFLAGS_FOR_TARGET. -+if test "x$CFLAGS_FOR_TARGET" = x; then -+ CFLAGS_FOR_TARGET=$CFLAGS -+ case " $CFLAGS " in -+ *" -O2 "*) ;; -+ *) CFLAGS_FOR_TARGET="-O2 $CFLAGS" ;; -+ esac -+ case " $CFLAGS " in -+ *" -g "* | *" -g3 "*) ;; -+ *) CFLAGS_FOR_TARGET="-g $CFLAGS" ;; -+ esac -+fi -+ -+ -+if test "x$CXXFLAGS_FOR_TARGET" = x; then -+ CXXFLAGS_FOR_TARGET=$CXXFLAGS -+ case " $CXXFLAGS " in -+ *" -O2 "*) ;; -+ *) CXXFLAGS_FOR_TARGET="-O2 $CXXFLAGS" ;; -+ esac -+ case " $CXXFLAGS " in -+ *" -g "* | *" -g3 "*) ;; -+ *) CXXFLAGS_FOR_TARGET="-g $CXXFLAGS" ;; -+ esac -+fi -+ -+ - # Handle --with-headers=XXX. If the value is not "yes", the contents of - # the named directory are copied to $(tooldir)/sys-include. - if test x"${with_headers}" != x && test x"${with_headers}" != xno ; then --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0013-64-bit-multilib-hack.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0013-64-bit-multilib-hack.patch deleted file mode 100644 index e31cde431..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0013-64-bit-multilib-hack.patch +++ /dev/null @@ -1,85 +0,0 @@ -From 7fc7a070ac53a55950a1eac76f02877d4106b4b3 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:10:06 +0400 -Subject: [PATCH 13/46] 64-bit multilib hack. - -GCC has internal multilib handling code but it assumes a very specific rigid directory -layout. The build system implementation of multilib layout is very generic and allows -complete customisation of the library directories. - -This patch is a partial solution to allow any custom directories to be passed into gcc -and handled correctly. It forces gcc to use the base_libdir (which is the current -directory, "."). We need to do this for each multilib that is configured as we don't -know which compiler options may be being passed into the compiler. Since we have a compiler -per mulitlib at this point that isn't an issue. - -The one problem is the target compiler is only going to work for the default multlilib at -this point. Ideally we'd figure out which multilibs were being enabled with which paths -and be able to patch these entries with a complete set of correct paths but this we -don't have such code at this point. This is something the target gcc recipe should do -and override these platform defaults in its build config. - -RP 15/8/11 - -Signed-off-by: Khem Raj -Signed-off-by: Elvis Dowson - -Upstream-Status: Pending ---- - gcc/config/i386/t-linux64 | 6 ++---- - gcc/config/mips/t-linux64 | 10 +++------- - gcc/config/rs6000/t-linux64 | 5 ++--- - 3 files changed, 7 insertions(+), 14 deletions(-) - -diff --git a/gcc/config/i386/t-linux64 b/gcc/config/i386/t-linux64 -index c0cc8a3..365a5d6 100644 ---- a/gcc/config/i386/t-linux64 -+++ b/gcc/config/i386/t-linux64 -@@ -32,7 +32,5 @@ - # - comma=, - MULTILIB_OPTIONS = $(subst $(comma),/,$(TM_MULTILIB_CONFIG)) --MULTILIB_DIRNAMES = $(patsubst m%, %, $(subst /, ,$(MULTILIB_OPTIONS))) --MULTILIB_OSDIRNAMES = m64=../lib64$(call if_multiarch,:x86_64-linux-gnu) --MULTILIB_OSDIRNAMES+= m32=$(if $(wildcard $(shell echo $(SYSTEM_HEADER_DIR))/../../usr/lib32),../lib32,../lib)$(call if_multiarch,:i386-linux-gnu) --MULTILIB_OSDIRNAMES+= mx32=../libx32$(call if_multiarch,:x86_64-linux-gnux32) -+MULTILIB_DIRNAMES = . . -+MULTILIB_OSDIRNAMES = ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) -diff --git a/gcc/config/mips/t-linux64 b/gcc/config/mips/t-linux64 -index 16c8adf..b932ace 100644 ---- a/gcc/config/mips/t-linux64 -+++ b/gcc/config/mips/t-linux64 -@@ -17,10 +17,6 @@ - # . - - MULTILIB_OPTIONS = mabi=n32/mabi=32/mabi=64 --MULTILIB_DIRNAMES = n32 32 64 --MIPS_EL = $(if $(filter %el, $(firstword $(subst -, ,$(target)))),el) --MIPS_SOFT = $(if $(strip $(filter MASK_SOFT_FLOAT_ABI, $(target_cpu_default)) $(filter soft, $(with_float))),soft) --MULTILIB_OSDIRNAMES = \ -- ../lib32$(call if_multiarch,:mips64$(MIPS_EL)-linux-gnuabin32$(MIPS_SOFT)) \ -- ../lib$(call if_multiarch,:mips$(MIPS_EL)-linux-gnu$(MIPS_SOFT)) \ -- ../lib64$(call if_multiarch,:mips64$(MIPS_EL)-linux-gnuabi64$(MIPS_SOFT)) -+MULTILIB_DIRNAMES = . . . -+MULTILIB_OSDIRNAMES = ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) -+ -diff --git a/gcc/config/rs6000/t-linux64 b/gcc/config/rs6000/t-linux64 -index fa7550b..9b5d9ef 100644 ---- a/gcc/config/rs6000/t-linux64 -+++ b/gcc/config/rs6000/t-linux64 -@@ -26,10 +26,9 @@ - # MULTILIB_OSDIRNAMES according to what is found on the target. - - MULTILIB_OPTIONS := m64/m32 --MULTILIB_DIRNAMES := 64 32 -+MULTILIB_DIRNAMES := . . - MULTILIB_EXTRA_OPTS := --MULTILIB_OSDIRNAMES := m64=../lib64$(call if_multiarch,:powerpc64-linux-gnu) --MULTILIB_OSDIRNAMES += m32=$(if $(wildcard $(shell echo $(SYSTEM_HEADER_DIR))/../../usr/lib32),../lib32,../lib)$(call if_multiarch,:powerpc-linux-gnu) -+MULTILIB_OSDIRNAMES := ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) - - rs6000-linux.o: $(srcdir)/config/rs6000/rs6000-linux.c - $(COMPILE) $< --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0014-optional-libstdc.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0014-optional-libstdc.patch deleted file mode 100644 index 44b0cc7d6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0014-optional-libstdc.patch +++ /dev/null @@ -1,125 +0,0 @@ -From 36275f7981bdaf919bbc9b51a7c7fae1e192adb3 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:12:56 +0400 -Subject: [PATCH 14/46] optional libstdc - -gcc-runtime builds libstdc++ separately from gcc-cross-*. Its configure tests using g++ -will not run correctly since by default the linker will try to link against libstdc++ -which shouldn't exist yet. We need an option to disable -lstdc++ -option whilst leaving -lc, -lgcc and other automatic library dependencies added by gcc -driver. This patch adds such an option which only disables the -lstdc++. - -A "standard" gcc build uses xgcc and hence avoids this. We should ask upstream how to -do this officially, the likely answer is don't build libstdc++ separately. - -RP 29/6/10 - -Signed-off-by: Khem Raj - -Upstream-Status: Inappropriate [embedded specific] ---- - gcc/c-family/c.opt | 4 ++++ - gcc/cp/g++spec.c | 1 + - gcc/doc/invoke.texi | 32 +++++++++++++++++++++++++++++++- - gcc/gcc.c | 1 + - 4 files changed, 37 insertions(+), 1 deletion(-) - -diff --git a/gcc/c-family/c.opt b/gcc/c-family/c.opt -index 4f86876..660da6c 100644 ---- a/gcc/c-family/c.opt -+++ b/gcc/c-family/c.opt -@@ -1647,6 +1647,10 @@ nostdinc++ - C++ ObjC++ - Do not search standard system include directories for C++. - -+nostdlib++ -+Driver -+Do not link standard C++ runtime library -+ - o - C ObjC C++ ObjC++ Joined Separate - ; Documented in common.opt -diff --git a/gcc/cp/g++spec.c b/gcc/cp/g++spec.c -index 03cbde0..3642540 100644 ---- a/gcc/cp/g++spec.c -+++ b/gcc/cp/g++spec.c -@@ -137,6 +137,7 @@ lang_specific_driver (struct cl_decoded_option **in_decoded_options, - switch (decoded_options[i].opt_index) - { - case OPT_nostdlib: -+ case OPT_nostdlib__: - case OPT_nodefaultlibs: - library = -1; - break; -diff --git a/gcc/doc/invoke.texi b/gcc/doc/invoke.texi -index 8bb49e7..84d68aa 100644 ---- a/gcc/doc/invoke.texi -+++ b/gcc/doc/invoke.texi -@@ -207,6 +207,9 @@ in the following sections. - -fno-weak -nostdinc++ @gol - -fvisibility-inlines-hidden @gol - -fvisibility-ms-compat @gol -+-fvtable-verify=@r{[}std@r{|}preinit@r{|}none@r{]} @gol -+-fvtv-counts -fvtv-debug @gol -+-nostdlib++ @gol - -fext-numeric-literals @gol - -Wabi=@var{n} -Wabi-tag -Wconversion-null -Wctor-dtor-privacy @gol - -Wdelete-non-virtual-dtor -Wliteral-suffix -Wmultiple-inheritance @gol -@@ -470,7 +473,7 @@ Objective-C and Objective-C++ Dialects}. - -s -static -static-libgcc -static-libstdc++ @gol - -static-libasan -static-libtsan -static-liblsan -static-libubsan @gol - -static-libmpx -static-libmpxwrappers @gol ---shared -shared-libgcc -symbolic @gol -+-shared -shared-libgcc -symbolic -nostdlib++ @gol - -T @var{script} -Wl,@var{option} -Xlinker @var{option} @gol - -u @var{symbol} -z @var{keyword}} - -@@ -10601,6 +10604,33 @@ library subroutines. - constructors are called; @pxref{Collect2,,@code{collect2}, gccint, - GNU Compiler Collection (GCC) Internals}.) - -+@item -nostdlib++ -+@opindex nostdlib++ -+Do not use the standard system C++ runtime libraries when linking. -+Only the libraries you specify will be passed to the linker. -+ -+@cindex @option{-lgcc}, use with @option{-nostdlib} -+@cindex @option{-nostdlib} and unresolved references -+@cindex unresolved references and @option{-nostdlib} -+@cindex @option{-lgcc}, use with @option{-nodefaultlibs} -+@cindex @option{-nodefaultlibs} and unresolved references -+@cindex unresolved references and @option{-nodefaultlibs} -+One of the standard libraries bypassed by @option{-nostdlib} and -+@option{-nodefaultlibs} is @file{libgcc.a}, a library of internal subroutines -+which GCC uses to overcome shortcomings of particular machines, or special -+needs for some languages. -+(@xref{Interface,,Interfacing to GCC Output,gccint,GNU Compiler -+Collection (GCC) Internals}, -+for more discussion of @file{libgcc.a}.) -+In most cases, you need @file{libgcc.a} even when you want to avoid -+other standard libraries. In other words, when you specify @option{-nostdlib} -+or @option{-nodefaultlibs} you should usually specify @option{-lgcc} as well. -+This ensures that you have no unresolved references to internal GCC -+library subroutines. -+(An example of such an internal subroutine is @code{__main}, used to ensure C++ -+constructors are called; @pxref{Collect2,,@code{collect2}, gccint, -+GNU Compiler Collection (GCC) Internals}.) -+ - @item -pie - @opindex pie - Produce a position independent executable on targets that support it. -diff --git a/gcc/gcc.c b/gcc/gcc.c -index 4cfef7f..da0b482 100644 ---- a/gcc/gcc.c -+++ b/gcc/gcc.c -@@ -1028,6 +1028,7 @@ proper position among the other output files. */ - %(mflib) " STACK_SPLIT_SPEC "\ - %{fprofile-arcs|fprofile-generate*|coverage:-lgcov} " SANITIZER_SPEC " \ - %{!nostdlib:%{!nodefaultlibs:%(link_ssp) %(link_gcc_c_sequence)}}\ -+ %{!nostdlib++:}\ - %{!nostdlib:%{!nostartfiles:%E}} %{T*} \n%(post_link) }}}}}}" - #endif - --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0015-gcc-disable-MASK_RELAX_PIC_CALLS-bit.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0015-gcc-disable-MASK_RELAX_PIC_CALLS-bit.patch deleted file mode 100644 index 6fc7346f6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0015-gcc-disable-MASK_RELAX_PIC_CALLS-bit.patch +++ /dev/null @@ -1,59 +0,0 @@ -From 9346f6042d8f7f85a75ca2af15f3b8d234985165 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:14:20 +0400 -Subject: [PATCH 15/46] gcc: disable MASK_RELAX_PIC_CALLS bit - -The new feature added after 4.3.3 -"http://www.pubbs.net/200909/gcc/94048-patch-add-support-for-rmipsjalr.html" -will cause cc1plus eat up all the system memory when build webkit-gtk. -The function mips_get_pic_call_symbol keeps on recursively calling itself. -Disable this feature to walk aside the bug. - -Signed-off-by: Dongxiao Xu -Signed-off-by: Khem Raj - -Upstream-Status: Inappropriate [configuration] ---- - gcc/configure | 7 ------- - gcc/configure.ac | 7 ------- - 2 files changed, 14 deletions(-) - -diff --git a/gcc/configure b/gcc/configure -index 8f83152..5e72f17 100755 ---- a/gcc/configure -+++ b/gcc/configure -@@ -27018,13 +27018,6 @@ $as_echo_n "checking assembler and linker for explicit JALR relocation... " >&6; - rm -f conftest.* - fi - fi -- if test $gcc_cv_as_ld_jalr_reloc = yes; then -- if test x$target_cpu_default = x; then -- target_cpu_default=MASK_RELAX_PIC_CALLS -- else -- target_cpu_default="($target_cpu_default)|MASK_RELAX_PIC_CALLS" -- fi -- fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gcc_cv_as_ld_jalr_reloc" >&5 - $as_echo "$gcc_cv_as_ld_jalr_reloc" >&6; } - -diff --git a/gcc/configure.ac b/gcc/configure.ac -index 0fccaef..dd8f6fa 100644 ---- a/gcc/configure.ac -+++ b/gcc/configure.ac -@@ -4522,13 +4522,6 @@ x: - rm -f conftest.* - fi - fi -- if test $gcc_cv_as_ld_jalr_reloc = yes; then -- if test x$target_cpu_default = x; then -- target_cpu_default=MASK_RELAX_PIC_CALLS -- else -- target_cpu_default="($target_cpu_default)|MASK_RELAX_PIC_CALLS" -- fi -- fi - AC_MSG_RESULT($gcc_cv_as_ld_jalr_reloc) - - AC_CACHE_CHECK([linker for .eh_frame personality relaxation], --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0016-COLLECT_GCC_OPTIONS.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0016-COLLECT_GCC_OPTIONS.patch deleted file mode 100644 index c1548647c..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0016-COLLECT_GCC_OPTIONS.patch +++ /dev/null @@ -1,38 +0,0 @@ -From 1033bc2d4efc5c301bb822b607a673f5b10cc69f Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:16:28 +0400 -Subject: [PATCH 16/46] COLLECT_GCC_OPTIONS - -This patch adds --sysroot into COLLECT_GCC_OPTIONS which is used to -invoke collect2. - -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - gcc/gcc.c | 9 +++++++++ - 1 file changed, 9 insertions(+) - -diff --git a/gcc/gcc.c b/gcc/gcc.c -index da0b482..7ca129f 100644 ---- a/gcc/gcc.c -+++ b/gcc/gcc.c -@@ -4591,6 +4591,15 @@ set_collect_gcc_options (void) - sizeof ("COLLECT_GCC_OPTIONS=") - 1); - - first_time = TRUE; -+#ifdef HAVE_LD_SYSROOT -+ if (target_system_root_changed && target_system_root) -+ { -+ obstack_grow (&collect_obstack, "'--sysroot=", sizeof("'--sysroot=")-1); -+ obstack_grow (&collect_obstack, target_system_root,strlen(target_system_root)); -+ obstack_grow (&collect_obstack, "'", 1); -+ first_time = FALSE; -+ } -+#endif - for (i = 0; (int) i < n_switches; i++) - { - const char *const *args; --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0017-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0017-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch deleted file mode 100644 index 0dbabd9e9..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0017-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch +++ /dev/null @@ -1,96 +0,0 @@ -From 67123a306880af997a99ae514677c2da4973bd1a Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:17:25 +0400 -Subject: [PATCH 17/46] Use the defaults.h in ${B} instead of ${S}, and t-oe in - ${B} - -Use the defaults.h in ${B} instead of ${S}, and t-oe in ${B}, so that -the source can be shared between gcc-cross-initial, -gcc-cross-intermediate, gcc-cross, gcc-runtime, and also the sdk build. - -Signed-off-by: Khem Raj - -Upstream-Status: Pending - -While compiling gcc-crosssdk-initial-x86_64 on some host, there is -occasionally failure that test the existance of default.h doesn't -work, the reason is tm_include_list='** defaults.h' rather than -tm_include_list='** ./defaults.h' - -So we add the test condition for this situation. -Signed-off-by: Hongxu Jia ---- - gcc/Makefile.in | 2 +- - gcc/configure | 4 ++-- - gcc/configure.ac | 4 ++-- - gcc/mkconfig.sh | 4 ++-- - 4 files changed, 7 insertions(+), 7 deletions(-) - -diff --git a/gcc/Makefile.in b/gcc/Makefile.in -index 6c5adc0..9a1d466 100644 ---- a/gcc/Makefile.in -+++ b/gcc/Makefile.in -@@ -517,7 +517,7 @@ TARGET_SYSTEM_ROOT = @TARGET_SYSTEM_ROOT@ - TARGET_SYSTEM_ROOT_DEFINE = @TARGET_SYSTEM_ROOT_DEFINE@ - - xmake_file=@xmake_file@ --tmake_file=@tmake_file@ -+tmake_file=@tmake_file@ ./t-oe - TM_ENDIAN_CONFIG=@TM_ENDIAN_CONFIG@ - TM_MULTILIB_CONFIG=@TM_MULTILIB_CONFIG@ - TM_MULTILIB_EXCEPTIONS_CONFIG=@TM_MULTILIB_EXCEPTIONS_CONFIG@ -diff --git a/gcc/configure b/gcc/configure -index 5e72f17..389b6d5 100755 ---- a/gcc/configure -+++ b/gcc/configure -@@ -12130,8 +12130,8 @@ for f in $tm_file; do - tm_include_list="${tm_include_list} $f" - ;; - defaults.h ) -- tm_file_list="${tm_file_list} \$(srcdir)/$f" -- tm_include_list="${tm_include_list} $f" -+ tm_file_list="${tm_file_list} ./$f" -+ tm_include_list="${tm_include_list} ./$f" - ;; - * ) - tm_file_list="${tm_file_list} \$(srcdir)/config/$f" -diff --git a/gcc/configure.ac b/gcc/configure.ac -index dd8f6fa..91ac800 100644 ---- a/gcc/configure.ac -+++ b/gcc/configure.ac -@@ -1883,8 +1883,8 @@ for f in $tm_file; do - tm_include_list="${tm_include_list} $f" - ;; - defaults.h ) -- tm_file_list="${tm_file_list} \$(srcdir)/$f" -- tm_include_list="${tm_include_list} $f" -+ tm_file_list="${tm_file_list} ./$f" -+ tm_include_list="${tm_include_list} ./$f" - ;; - * ) - tm_file_list="${tm_file_list} \$(srcdir)/config/$f" -diff --git a/gcc/mkconfig.sh b/gcc/mkconfig.sh -index 67dfac6..b73e08d 100644 ---- a/gcc/mkconfig.sh -+++ b/gcc/mkconfig.sh -@@ -77,7 +77,7 @@ if [ -n "$HEADERS" ]; then - if [ $# -ge 1 ]; then - echo '#ifdef IN_GCC' >> ${output}T - for file in "$@"; do -- if test x"$file" = x"defaults.h"; then -+ if test x"$file" = x"./defaults.h" -o x"$file" = x"defaults.h"; then - postpone_defaults_h="yes" - else - echo "# include \"$file\"" >> ${output}T -@@ -109,7 +109,7 @@ esac - - # If we postponed including defaults.h, add the #include now. - if test x"$postpone_defaults_h" = x"yes"; then -- echo "# include \"defaults.h\"" >> ${output}T -+ echo "# include \"./defaults.h\"" >> ${output}T - fi - - # Add multiple inclusion protection guard, part two. --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0018-fortran-cross-compile-hack.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0018-fortran-cross-compile-hack.patch deleted file mode 100644 index b43d89ea8..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0018-fortran-cross-compile-hack.patch +++ /dev/null @@ -1,46 +0,0 @@ -From e360dc3e0f1e0b9b001ef722fcf66f8120a03dbc Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:20:01 +0400 -Subject: [PATCH 18/46] fortran cross-compile hack. - -* Fortran would have searched for arm-angstrom-gnueabi-gfortran but would have used -used gfortan. For gcc_4.2.2.bb we want to use the gfortran compiler from our cross -directory. - -Signed-off-by: Khem Raj - -Upstream-Status: Inappropriate [embedded specific] ---- - libgfortran/configure | 2 +- - libgfortran/configure.ac | 2 +- - 2 files changed, 2 insertions(+), 2 deletions(-) - -diff --git a/libgfortran/configure b/libgfortran/configure -index f746f6f..b4f3278 100755 ---- a/libgfortran/configure -+++ b/libgfortran/configure -@@ -12734,7 +12734,7 @@ esac - - # We need gfortran to compile parts of the library - #AC_PROG_FC(gfortran) --FC="$GFORTRAN" -+#FC="$GFORTRAN" - ac_ext=${ac_fc_srcext-f} - ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' - ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' -diff --git a/libgfortran/configure.ac b/libgfortran/configure.ac -index 8f377bb..67710b5 100644 ---- a/libgfortran/configure.ac -+++ b/libgfortran/configure.ac -@@ -240,7 +240,7 @@ AC_SUBST(enable_static) - - # We need gfortran to compile parts of the library - #AC_PROG_FC(gfortran) --FC="$GFORTRAN" -+#FC="$GFORTRAN" - AC_PROG_FC(gfortran) - - # extra LD Flags which are required for targets --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0019-cpp-honor-sysroot.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0019-cpp-honor-sysroot.patch deleted file mode 100644 index 417a5ede4..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0019-cpp-honor-sysroot.patch +++ /dev/null @@ -1,54 +0,0 @@ -From 98d9c0c1b5552294b2130f7304bfb522da323442 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:22:00 +0400 -Subject: [PATCH 19/46] cpp: honor sysroot. - -Currently, if the gcc toolchain is relocated and installed from sstate, then you try and compile -preprocessed source (.i or .ii files), the compiler will try and access the builtin sysroot location -rather than the --sysroot option specified on the commandline. If access to that directory is -permission denied (unreadable), gcc will error. - -This happens when ccache is in use due to the fact it uses preprocessed source files. - -The fix below adds %I to the cpp-output spec macro so the default substitutions for -iprefix, --isystem, -isysroot happen and the correct sysroot is used. - -[YOCTO #2074] - -RP 2012/04/13 - -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - gcc/cp/lang-specs.h | 2 +- - gcc/gcc.c | 2 +- - 2 files changed, 2 insertions(+), 2 deletions(-) - -diff --git a/gcc/cp/lang-specs.h b/gcc/cp/lang-specs.h -index 9707fac..fe487a2 100644 ---- a/gcc/cp/lang-specs.h -+++ b/gcc/cp/lang-specs.h -@@ -64,5 +64,5 @@ along with GCC; see the file COPYING3. If not see - {".ii", "@c++-cpp-output", 0, 0, 0}, - {"@c++-cpp-output", - "%{!M:%{!MM:%{!E:\ -- cc1plus -fpreprocessed %i %(cc1_options) %2\ -+ cc1plus -fpreprocessed %i %I %(cc1_options) %2\ - %{!fsyntax-only:%(invoke_as)}}}}", 0, 0, 0}, -diff --git a/gcc/gcc.c b/gcc/gcc.c -index 7ca129f..04fa81d 100644 ---- a/gcc/gcc.c -+++ b/gcc/gcc.c -@@ -1329,7 +1329,7 @@ static const struct compiler default_compilers[] = - %W{o*:--output-pch=%*}}%V}}}}}}}", 0, 0, 0}, - {".i", "@cpp-output", 0, 0, 0}, - {"@cpp-output", -- "%{!M:%{!MM:%{!E:cc1 -fpreprocessed %i %(cc1_options) %{!fsyntax-only:%(invoke_as)}}}}", 0, 0, 0}, -+ "%{!M:%{!MM:%{!E:cc1 -fpreprocessed %i %I %(cc1_options) %{!fsyntax-only:%(invoke_as)}}}}", 0, 0, 0}, - {".s", "@assembler", 0, 0, 0}, - {"@assembler", - "%{!M:%{!MM:%{!E:%{!S:as %(asm_debug) %(asm_options) %i %A }}}}", 0, 0, 0}, --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0020-MIPS64-Default-to-N64-ABI.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0020-MIPS64-Default-to-N64-ABI.patch deleted file mode 100644 index ba612f545..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0020-MIPS64-Default-to-N64-ABI.patch +++ /dev/null @@ -1,57 +0,0 @@ -From 1f15447fbcf65142627af8a9694761534da8d0d1 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:23:08 +0400 -Subject: [PATCH 20/46] MIPS64: Default to N64 ABI - -MIPS64 defaults to n32 ABI, this patch makes it -so that it defaults to N64 ABI - -Signed-off-by: Khem Raj - -Upstream-Status: Inappropriate [OE config specific] ---- - gcc/config.gcc | 10 +++++----- - 1 file changed, 5 insertions(+), 5 deletions(-) - -diff --git a/gcc/config.gcc b/gcc/config.gcc -index f66e48c..9c6d156 100644 ---- a/gcc/config.gcc -+++ b/gcc/config.gcc -@@ -2065,29 +2065,29 @@ mips*-*-linux*) # Linux MIPS, either endian. - default_mips_arch=mips32 - ;; - mips64el-st-linux-gnu) -- default_mips_abi=n32 -+ default_mips_abi=64 - tm_file="${tm_file} mips/st.h" - tmake_file="${tmake_file} mips/t-st" - enable_mips_multilibs="yes" - ;; - mips64octeon*-*-linux*) -- default_mips_abi=n32 -+ default_mips_abi=64 - tm_defines="${tm_defines} MIPS_CPU_STRING_DEFAULT=\\\"octeon\\\"" - target_cpu_default=MASK_SOFT_FLOAT_ABI - enable_mips_multilibs="yes" - ;; - mipsisa64r6*-*-linux*) -- default_mips_abi=n32 -+ default_mips_abi=64 - default_mips_arch=mips64r6 - enable_mips_multilibs="yes" - ;; - mipsisa64r2*-*-linux*) -- default_mips_abi=n32 -+ default_mips_abi=64 - default_mips_arch=mips64r2 - enable_mips_multilibs="yes" - ;; - mips64*-*-linux* | mipsisa64*-*-linux*) -- default_mips_abi=n32 -+ default_mips_abi=64 - enable_mips_multilibs="yes" - ;; - esac --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0021-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0021-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch deleted file mode 100644 index 6675ce34f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0021-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch +++ /dev/null @@ -1,234 +0,0 @@ -From 9f73c8918b63e6a1c9b79384fac411d1056eec1c Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:24:50 +0400 -Subject: [PATCH 21/46] Define GLIBC_DYNAMIC_LINKER and UCLIBC_DYNAMIC_LINKER - relative to SYSTEMLIBS_DIR - -This patch defines GLIBC_DYNAMIC_LINKER and UCLIBC_DYNAMIC_LINKER -relative to SYSTEMLIBS_DIR which can be set in generated headers -This breaks the assumption of hardcoded multilib in gcc -Change is only for the supported architectures in OE including -SH, sparc, alpha for possible future support (if any) - -Removes the do_headerfix task in metadata - -Signed-off-by: Khem Raj - -Upstream-Status: Inappropriate [OE configuration] ---- - gcc/config/alpha/linux-elf.h | 4 ++-- - gcc/config/arm/linux-eabi.h | 4 ++-- - gcc/config/arm/linux-elf.h | 2 +- - gcc/config/i386/linux.h | 2 +- - gcc/config/i386/linux64.h | 6 +++--- - gcc/config/linux.h | 8 ++++---- - gcc/config/mips/linux.h | 12 ++++++------ - gcc/config/rs6000/linux64.h | 16 ++++++---------- - gcc/config/sh/linux.h | 2 +- - gcc/config/sparc/linux.h | 2 +- - gcc/config/sparc/linux64.h | 4 ++-- - 11 files changed, 29 insertions(+), 33 deletions(-) - -diff --git a/gcc/config/alpha/linux-elf.h b/gcc/config/alpha/linux-elf.h -index a0764d3..02f7a7c 100644 ---- a/gcc/config/alpha/linux-elf.h -+++ b/gcc/config/alpha/linux-elf.h -@@ -23,8 +23,8 @@ along with GCC; see the file COPYING3. If not see - #define EXTRA_SPECS \ - { "elf_dynamic_linker", ELF_DYNAMIC_LINKER }, - --#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux.so.2" --#define UCLIBC_DYNAMIC_LINKER "/lib/ld-uClibc.so.0" -+#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux.so.2" -+#define UCLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-uClibc.so.0" - #if DEFAULT_LIBC == LIBC_UCLIBC - #define CHOOSE_DYNAMIC_LINKER(G, U) "%{mglibc:" G ";:" U "}" - #elif DEFAULT_LIBC == LIBC_GLIBC -diff --git a/gcc/config/arm/linux-eabi.h b/gcc/config/arm/linux-eabi.h -index ace8481..4010435 100644 ---- a/gcc/config/arm/linux-eabi.h -+++ b/gcc/config/arm/linux-eabi.h -@@ -68,8 +68,8 @@ - GLIBC_DYNAMIC_LINKER_DEFAULT and TARGET_DEFAULT_FLOAT_ABI. */ - - #undef GLIBC_DYNAMIC_LINKER --#define GLIBC_DYNAMIC_LINKER_SOFT_FLOAT "/lib/ld-linux.so.3" --#define GLIBC_DYNAMIC_LINKER_HARD_FLOAT "/lib/ld-linux-armhf.so.3" -+#define GLIBC_DYNAMIC_LINKER_SOFT_FLOAT SYSTEMLIBS_DIR "ld-linux.so.3" -+#define GLIBC_DYNAMIC_LINKER_HARD_FLOAT SYSTEMLIBS_DIR "ld-linux-armhf.so.3" - #define GLIBC_DYNAMIC_LINKER_DEFAULT GLIBC_DYNAMIC_LINKER_SOFT_FLOAT - - #define GLIBC_DYNAMIC_LINKER \ -diff --git a/gcc/config/arm/linux-elf.h b/gcc/config/arm/linux-elf.h -index a94bd2d..0220628 100644 ---- a/gcc/config/arm/linux-elf.h -+++ b/gcc/config/arm/linux-elf.h -@@ -62,7 +62,7 @@ - - #define LIBGCC_SPEC "%{mfloat-abi=soft*:-lfloat} -lgcc" - --#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux.so.2" -+#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux.so.2" - - #define LINUX_TARGET_LINK_SPEC "%{h*} \ - %{static:-Bstatic} \ -diff --git a/gcc/config/i386/linux.h b/gcc/config/i386/linux.h -index d37a875..edde586 100644 ---- a/gcc/config/i386/linux.h -+++ b/gcc/config/i386/linux.h -@@ -20,7 +20,7 @@ along with GCC; see the file COPYING3. If not see - . */ - - #define GNU_USER_LINK_EMULATION "elf_i386" --#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux.so.2" -+#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux.so.2" - - #undef MUSL_DYNAMIC_LINKER - #define MUSL_DYNAMIC_LINKER "/lib/ld-musl-i386.so.1" -diff --git a/gcc/config/i386/linux64.h b/gcc/config/i386/linux64.h -index 73d22e3..f4b5615 100644 ---- a/gcc/config/i386/linux64.h -+++ b/gcc/config/i386/linux64.h -@@ -27,9 +27,9 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see - #define GNU_USER_LINK_EMULATION64 "elf_x86_64" - #define GNU_USER_LINK_EMULATIONX32 "elf32_x86_64" - --#define GLIBC_DYNAMIC_LINKER32 "/lib/ld-linux.so.2" --#define GLIBC_DYNAMIC_LINKER64 "/lib64/ld-linux-x86-64.so.2" --#define GLIBC_DYNAMIC_LINKERX32 "/libx32/ld-linux-x32.so.2" -+#define GLIBC_DYNAMIC_LINKER32 SYSTEMLIBS_DIR "ld-linux.so.2" -+#define GLIBC_DYNAMIC_LINKER64 SYSTEMLIBS_DIR "ld-linux-x86-64.so.2" -+#define GLIBC_DYNAMIC_LINKERX32 SYSTEMLIBS_DIR "ld-linux-x32.so.2" - - #undef MUSL_DYNAMIC_LINKER32 - #define MUSL_DYNAMIC_LINKER32 "/lib/ld-musl-i386.so.1" -diff --git a/gcc/config/linux.h b/gcc/config/linux.h -index 9aeeb94..b055652 100644 ---- a/gcc/config/linux.h -+++ b/gcc/config/linux.h -@@ -81,10 +81,10 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see - GLIBC_DYNAMIC_LINKER must be defined for each target using them, or - GLIBC_DYNAMIC_LINKER32 and GLIBC_DYNAMIC_LINKER64 for targets - supporting both 32-bit and 64-bit compilation. */ --#define UCLIBC_DYNAMIC_LINKER "/lib/ld-uClibc.so.0" --#define UCLIBC_DYNAMIC_LINKER32 "/lib/ld-uClibc.so.0" --#define UCLIBC_DYNAMIC_LINKER64 "/lib/ld64-uClibc.so.0" --#define UCLIBC_DYNAMIC_LINKERX32 "/lib/ldx32-uClibc.so.0" -+#define UCLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-uClibc.so.0" -+#define UCLIBC_DYNAMIC_LINKER32 SYSTEMLIBS_DIR "ld-uClibc.so.0" -+#define UCLIBC_DYNAMIC_LINKER64 SYSTEMLIBS_DIR "ld64-uClibc.so.0" -+#define UCLIBC_DYNAMIC_LINKERX32 SYSTEMLIBS_DIR "ldx32-uClibc.so.0" - #define BIONIC_DYNAMIC_LINKER "/system/bin/linker" - #define BIONIC_DYNAMIC_LINKER32 "/system/bin/linker" - #define BIONIC_DYNAMIC_LINKER64 "/system/bin/linker64" -diff --git a/gcc/config/mips/linux.h b/gcc/config/mips/linux.h -index fa253b6..da02c8d 100644 ---- a/gcc/config/mips/linux.h -+++ b/gcc/config/mips/linux.h -@@ -22,20 +22,20 @@ along with GCC; see the file COPYING3. If not see - #define GNU_USER_LINK_EMULATIONN32 "elf32%{EB:b}%{EL:l}tsmipn32" - - #define GLIBC_DYNAMIC_LINKER32 \ -- "%{mnan=2008:/lib/ld-linux-mipsn8.so.1;:/lib/ld.so.1}" -+ "%{mnan=2008:" SYSTEMLIBS_DIR "ld-linux-mipsn8.so.1;:" SYSTEMLIBS_DIR "ld.so.1}" - #define GLIBC_DYNAMIC_LINKER64 \ -- "%{mnan=2008:/lib64/ld-linux-mipsn8.so.1;:/lib64/ld.so.1}" -+ "%{mnan=2008:" SYSTEMLIBS_DIR "ld-linux-mipsn8.so.1;:" SYSTEMLIBS_DIR "ld.so.1}" - #define GLIBC_DYNAMIC_LINKERN32 \ -- "%{mnan=2008:/lib32/ld-linux-mipsn8.so.1;:/lib32/ld.so.1}" -+ "%{mnan=2008:" SYSTEMLIBS_DIR "ld-linux-mipsn8.so.1;:" SYSTEMLIBS_DIR "ld.so.1}" - - #undef UCLIBC_DYNAMIC_LINKER32 - #define UCLIBC_DYNAMIC_LINKER32 \ -- "%{mnan=2008:/lib/ld-uClibc-mipsn8.so.0;:/lib/ld-uClibc.so.0}" -+ "%{mnan=2008:" SYSTEMLIBS_DIR "ld-uClibc-mipsn8.so.0;:" SYSTEMLIBS_DIR "ld-uClibc.so.0}" - #undef UCLIBC_DYNAMIC_LINKER64 - #define UCLIBC_DYNAMIC_LINKER64 \ -- "%{mnan=2008:/lib/ld64-uClibc-mipsn8.so.0;:/lib/ld64-uClibc.so.0}" -+ "%{mnan=2008:" SYSTEMLIBS_DIR "ld64-uClibc-mipsn8.so.0;:" SYSTEMLIBS_DIR "ld64-uClibc.so.0}" - #define UCLIBC_DYNAMIC_LINKERN32 \ -- "%{mnan=2008:/lib32/ld-uClibc-mipsn8.so.0;:/lib32/ld-uClibc.so.0}" -+ "%{mnan=2008:" SYSTEMLIBS_DIR "ld-uClibc-mipsn8.so.0;:" SYSTEMLIBS_DIR "ld-uClibc.so.0}" - - #undef MUSL_DYNAMIC_LINKER32 - #define MUSL_DYNAMIC_LINKER32 "/lib/ld-musl-mips%{EL:el}%{msoft-float:-sf}.so.1" -diff --git a/gcc/config/rs6000/linux64.h b/gcc/config/rs6000/linux64.h -index fefa0c4..7173938 100644 ---- a/gcc/config/rs6000/linux64.h -+++ b/gcc/config/rs6000/linux64.h -@@ -412,16 +412,11 @@ extern int dot_symbols; - #undef LINK_OS_DEFAULT_SPEC - #define LINK_OS_DEFAULT_SPEC "%(link_os_linux)" - --#define GLIBC_DYNAMIC_LINKER32 "%(dynamic_linker_prefix)/lib/ld.so.1" -- -+#define GLIBC_DYNAMIC_LINKER32 SYSTEMLIBS_DIR "ld.so.1" - #ifdef LINUX64_DEFAULT_ABI_ELFv2 --#define GLIBC_DYNAMIC_LINKER64 \ --"%{mabi=elfv1:%(dynamic_linker_prefix)/lib64/ld64.so.1;" \ --":%(dynamic_linker_prefix)/lib64/ld64.so.2}" -+#define GLIBC_DYNAMIC_LINKER64 "%{mabi=elfv1:" SYSTEMLIBS_DIR "ld64.so.1;:" SYSTEMLIBS_DIR "ld64.so.2}" - #else --#define GLIBC_DYNAMIC_LINKER64 \ --"%{mabi=elfv2:%(dynamic_linker_prefix)/lib64/ld64.so.2;" \ --":%(dynamic_linker_prefix)/lib64/ld64.so.1}" -+#define GLIBC_DYNAMIC_LINKER64 "%{mabi=elfv2:" SYSTEMLIBS_DIR "ld64.so.2;:" SYSTEMLIBS_DIR "ld64.so.1}" - #endif - - #define MUSL_DYNAMIC_LINKER32 \ -@@ -429,8 +424,9 @@ extern int dot_symbols; - #define MUSL_DYNAMIC_LINKER64 \ - "/lib/ld-musl-powerpc64" MUSL_DYNAMIC_LINKER_E "%{msoft-float:-sf}.so.1" - --#define UCLIBC_DYNAMIC_LINKER32 "/lib/ld-uClibc.so.0" --#define UCLIBC_DYNAMIC_LINKER64 "/lib/ld64-uClibc.so.0" -+#define UCLIBC_DYNAMIC_LINKER32 SYSTEMLIBS_DIR "ld-uClibc.so.0" -+#define UCLIBC_DYNAMIC_LINKER64 SYSTEMLIBS_DIR "ld64-uClibc.so.0" -+ - #if DEFAULT_LIBC == LIBC_UCLIBC - #define CHOOSE_DYNAMIC_LINKER(G, U, M) \ - "%{mglibc:" G ";:%{mmusl:" M ";:" U "}}" -diff --git a/gcc/config/sh/linux.h b/gcc/config/sh/linux.h -index 2a036ac..50fb735 100644 ---- a/gcc/config/sh/linux.h -+++ b/gcc/config/sh/linux.h -@@ -64,7 +64,7 @@ along with GCC; see the file COPYING3. If not see - "/lib/ld-musl-sh" MUSL_DYNAMIC_LINKER_E MUSL_DYNAMIC_LINKER_FP \ - "%{mfdpic:-fdpic}.so.1" - --#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux.so.2" -+#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux.so.2" - - #undef SUBTARGET_LINK_EMUL_SUFFIX - #define SUBTARGET_LINK_EMUL_SUFFIX "%{mfdpic:_fd;:_linux}" -diff --git a/gcc/config/sparc/linux.h b/gcc/config/sparc/linux.h -index 9b32577..7bd66de 100644 ---- a/gcc/config/sparc/linux.h -+++ b/gcc/config/sparc/linux.h -@@ -83,7 +83,7 @@ extern const char *host_detect_local_cpu (int argc, const char **argv); - When the -shared link option is used a final link is not being - done. */ - --#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux.so.2" -+#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux.so.2" - - #undef LINK_SPEC - #define LINK_SPEC "-m elf32_sparc %{shared:-shared} \ -diff --git a/gcc/config/sparc/linux64.h b/gcc/config/sparc/linux64.h -index a1ef325..3bae3d5 100644 ---- a/gcc/config/sparc/linux64.h -+++ b/gcc/config/sparc/linux64.h -@@ -84,8 +84,8 @@ along with GCC; see the file COPYING3. If not see - When the -shared link option is used a final link is not being - done. */ - --#define GLIBC_DYNAMIC_LINKER32 "/lib/ld-linux.so.2" --#define GLIBC_DYNAMIC_LINKER64 "/lib64/ld-linux.so.2" -+#define GLIBC_DYNAMIC_LINKER32 SYSTEMLIBS_DIR "ld-linux.so.2" -+#define GLIBC_DYNAMIC_LINKER64 SYSTEMLIBS_DIR "ld-linux.so.2" - - #ifdef SPARC_BI_ARCH - --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0022-gcc-Fix-argument-list-too-long-error.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0022-gcc-Fix-argument-list-too-long-error.patch deleted file mode 100644 index fab6e4aeb..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0022-gcc-Fix-argument-list-too-long-error.patch +++ /dev/null @@ -1,40 +0,0 @@ -From acce5157f6b3a1dc9a3676b7118ac887dc5693be Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:26:37 +0400 -Subject: [PATCH 22/46] gcc: Fix argument list too long error. - -There would be an "Argument list too long" error when the -build directory is longer than 200, this is caused by: - -headers=`echo $(PLUGIN_HEADERS) | tr ' ' '\012' | sort -u` - -The PLUGIN_HEADERS is too long before sort, so the "echo" can't handle -it, use the $(sort list) of GNU make which can handle the too long list -would fix the problem, the header would be short enough after sorted. -The "tr ' ' '\012'" was used for translating the space to "\n", the -$(sort list) doesn't need this. - -Signed-off-by: Robert Yang -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - gcc/Makefile.in | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/gcc/Makefile.in b/gcc/Makefile.in -index 9a1d466..450cb79 100644 ---- a/gcc/Makefile.in -+++ b/gcc/Makefile.in -@@ -3349,7 +3349,7 @@ install-plugin: installdirs lang.install-plugin s-header-vars install-gengtype - # We keep the directory structure for files in config or c-family and .def - # files. All other files are flattened to a single directory. - $(mkinstalldirs) $(DESTDIR)$(plugin_includedir) -- headers=`echo $(PLUGIN_HEADERS) $$(cd $(srcdir); echo *.h *.def) | tr ' ' '\012' | sort -u`; \ -+ headers="$(sort $(PLUGIN_HEADERS) $$(cd $(srcdir); echo *.h *.def))"; \ - srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`; \ - for file in $$headers; do \ - if [ -f $$file ] ; then \ --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0023-Disable-sdt.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0023-Disable-sdt.patch deleted file mode 100644 index 0efd890aa..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0023-Disable-sdt.patch +++ /dev/null @@ -1,113 +0,0 @@ -From 6573aec00ada35c48c1838c8491ce8f7798ae993 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:28:10 +0400 -Subject: [PATCH 23/46] Disable sdt. - -We don't list dtrace in DEPENDS so we shouldn't be depending on this header. -It may or may not exist from preivous builds though. To be determinstic, disable -sdt.h usage always. This avoids build failures if the header is removed after configure -but before libgcc is compiled for example. - -RP 2012/8/7 - -Signed-off-by: Khem Raj - -Disable sdt for libstdc++-v3. - -Signed-off-by: Robert Yang - -Upstream-Status: Inappropriate [hack] ---- - gcc/configure | 12 ++++++------ - gcc/configure.ac | 18 +++++++++--------- - libstdc++-v3/configure | 6 +++--- - libstdc++-v3/configure.ac | 2 +- - 4 files changed, 19 insertions(+), 19 deletions(-) - -diff --git a/gcc/configure b/gcc/configure -index 389b6d5..73c264d 100755 ---- a/gcc/configure -+++ b/gcc/configure -@@ -28528,12 +28528,12 @@ fi - { $as_echo "$as_me:${as_lineno-$LINENO}: checking sys/sdt.h in the target C library" >&5 - $as_echo_n "checking sys/sdt.h in the target C library... " >&6; } - have_sys_sdt_h=no --if test -f $target_header_dir/sys/sdt.h; then -- have_sys_sdt_h=yes -- --$as_echo "#define HAVE_SYS_SDT_H 1" >>confdefs.h -- --fi -+#if test -f $target_header_dir/sys/sdt.h; then -+# have_sys_sdt_h=yes -+# -+#$as_echo "#define HAVE_SYS_SDT_H 1" >>confdefs.h -+# -+#fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $have_sys_sdt_h" >&5 - $as_echo "$have_sys_sdt_h" >&6; } - -diff --git a/gcc/configure.ac b/gcc/configure.ac -index 91ac800..cecf121 100644 ---- a/gcc/configure.ac -+++ b/gcc/configure.ac -@@ -5514,15 +5514,15 @@ fi - AC_SUBST([enable_default_ssp]) - - # Test for on the target. --GCC_TARGET_TEMPLATE([HAVE_SYS_SDT_H]) --AC_MSG_CHECKING(sys/sdt.h in the target C library) --have_sys_sdt_h=no --if test -f $target_header_dir/sys/sdt.h; then -- have_sys_sdt_h=yes -- AC_DEFINE(HAVE_SYS_SDT_H, 1, -- [Define if your target C library provides sys/sdt.h]) --fi --AC_MSG_RESULT($have_sys_sdt_h) -+#GCC_TARGET_TEMPLATE([HAVE_SYS_SDT_H]) -+#AC_MSG_CHECKING(sys/sdt.h in the target C library) -+#have_sys_sdt_h=no -+#if test -f $target_header_dir/sys/sdt.h; then -+# have_sys_sdt_h=yes -+# AC_DEFINE(HAVE_SYS_SDT_H, 1, -+# [Define if your target C library provides sys/sdt.h]) -+#fi -+#AC_MSG_RESULT($have_sys_sdt_h) - - # Check if TFmode long double should be used by default or not. - # Some glibc targets used DFmode long double, but with glibc 2.4 -diff --git a/libstdc++-v3/configure b/libstdc++-v3/configure -index 8a5481c..6a40e92 100755 ---- a/libstdc++-v3/configure -+++ b/libstdc++-v3/configure -@@ -21735,11 +21735,11 @@ ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' - ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' - ac_compiler_gnu=$ac_cv_c_compiler_gnu - -- if test $glibcxx_cv_sys_sdt_h = yes; then -+# if test $glibcxx_cv_sys_sdt_h = yes; then - --$as_echo "#define HAVE_SYS_SDT_H 1" >>confdefs.h -+#$as_echo "#define HAVE_SYS_SDT_H 1" >>confdefs.h - -- fi -+# fi - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $glibcxx_cv_sys_sdt_h" >&5 - $as_echo "$glibcxx_cv_sys_sdt_h" >&6; } - -diff --git a/libstdc++-v3/configure.ac b/libstdc++-v3/configure.ac -index 9e19e99..0077ffd 100644 ---- a/libstdc++-v3/configure.ac -+++ b/libstdc++-v3/configure.ac -@@ -230,7 +230,7 @@ GLIBCXX_CHECK_SC_NPROCESSORS_ONLN - GLIBCXX_CHECK_SC_NPROC_ONLN - GLIBCXX_CHECK_PTHREADS_NUM_PROCESSORS_NP - GLIBCXX_CHECK_SYSCTL_HW_NCPU --GLIBCXX_CHECK_SDT_H -+#GLIBCXX_CHECK_SDT_H - - # Check for available headers. - AC_CHECK_HEADERS([endian.h execinfo.h float.h fp.h ieeefp.h inttypes.h \ --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0024-libtool.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0024-libtool.patch deleted file mode 100644 index 1f73b5db5..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0024-libtool.patch +++ /dev/null @@ -1,42 +0,0 @@ -From 6c0aa5c2538829248547197718037ff0b9788676 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:29:11 +0400 -Subject: [PATCH 24/46] libtool - -libstdc++ from gcc-runtime gets created with -rpath=/usr/lib/../lib for qemux86-64 -when running on am x86_64 build host. - -This patch stops this speading to libdir in the libstdc++.la file within libtool. -Arguably, it shouldn't be passing this into libtool in the first place but -for now this resolves the nastiest problems this causes. - -func_normal_abspath would resolve an empty path to `pwd` so we need -to filter the zero case. - -RP 2012/8/24 - -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - ltmain.sh | 4 ++++ - 1 file changed, 4 insertions(+) - -diff --git a/ltmain.sh b/ltmain.sh -index 9503ec8..0121fba 100644 ---- a/ltmain.sh -+++ b/ltmain.sh -@@ -6359,6 +6359,10 @@ func_mode_link () - func_warning "ignoring multiple \`-rpath's for a libtool library" - - install_libdir="$1" -+ if test -n "$install_libdir"; then -+ func_normal_abspath "$install_libdir" -+ install_libdir=$func_normal_abspath_result -+ fi - - oldlibs= - if test -z "$rpath"; then --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0025-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0025-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch deleted file mode 100644 index 3b7ee497f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0025-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch +++ /dev/null @@ -1,43 +0,0 @@ -From b1263a48553ce75f8c3bed4fe12122af57845567 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:30:32 +0400 -Subject: [PATCH 25/46] gcc: armv4: pass fix-v4bx to linker to support EABI. - -The LINK_SPEC for linux gets overwritten by linux-eabi.h which -means the value of TARGET_FIX_V4BX_SPEC gets lost and as a result -the option is not passed to linker when chosing march=armv4 -This patch redefines this in linux-eabi.h and reinserts it -for eabi defaulting toolchains. - -We might want to send it upstream. - -Signed-off-by: Khem Raj - -Upstream-Status: Pending ---- - gcc/config/arm/linux-eabi.h | 6 +++++- - 1 file changed, 5 insertions(+), 1 deletion(-) - -diff --git a/gcc/config/arm/linux-eabi.h b/gcc/config/arm/linux-eabi.h -index 4010435..aaea1c9 100644 ---- a/gcc/config/arm/linux-eabi.h -+++ b/gcc/config/arm/linux-eabi.h -@@ -94,10 +94,14 @@ - #define MUSL_DYNAMIC_LINKER \ - "/lib/ld-musl-arm" MUSL_DYNAMIC_LINKER_E "%{mfloat-abi=hard:hf}.so.1" - -+/* For armv4 we pass --fix-v4bx to linker to support EABI */ -+#undef TARGET_FIX_V4BX_SPEC -+#define TARGET_FIX_V4BX_SPEC "%{mcpu=arm8|mcpu=arm810|mcpu=strongarm*|march=armv4: --fix-v4bx}" -+ - /* At this point, bpabi.h will have clobbered LINK_SPEC. We want to - use the GNU/Linux version, not the generic BPABI version. */ - #undef LINK_SPEC --#define LINK_SPEC EABI_LINK_SPEC \ -+#define LINK_SPEC TARGET_FIX_V4BX_SPEC EABI_LINK_SPEC \ - LINUX_OR_ANDROID_LD (LINUX_TARGET_LINK_SPEC, \ - LINUX_TARGET_LINK_SPEC " " ANDROID_LINK_SPEC) - --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0026-Use-the-multilib-config-files-from-B-instead-of-usin.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0026-Use-the-multilib-config-files-from-B-instead-of-usin.patch deleted file mode 100644 index be25be616..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0026-Use-the-multilib-config-files-from-B-instead-of-usin.patch +++ /dev/null @@ -1,102 +0,0 @@ -From b5c305fc251299f2e328410b18cfb55c75b5f038 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Mar 2013 09:33:04 +0400 -Subject: [PATCH 26/46] Use the multilib config files from ${B} instead of - using the ones from ${S} - -Use the multilib config files from ${B} instead of using the ones from ${S} -so that the source can be shared between gcc-cross-initial, -gcc-cross-intermediate, gcc-cross, gcc-runtime, and also the sdk build. - -Signed-off-by: Khem Raj -Signed-off-by: Constantin Musca - -Upstream-Status: Inappropriate [configuration] ---- - gcc/configure | 22 ++++++++++++++++++---- - gcc/configure.ac | 22 ++++++++++++++++++---- - 2 files changed, 36 insertions(+), 8 deletions(-) - -diff --git a/gcc/configure b/gcc/configure -index 73c264d..377253e 100755 ---- a/gcc/configure -+++ b/gcc/configure -@@ -12110,10 +12110,20 @@ done - tmake_file_= - for f in ${tmake_file} - do -- if test -f ${srcdir}/config/$f -- then -- tmake_file_="${tmake_file_} \$(srcdir)/config/$f" -- fi -+ case $f in -+ */t-linux64 ) -+ if test -f ./config/$f -+ then -+ tmake_file_="${tmake_file_} ./config/$f" -+ fi -+ ;; -+ * ) -+ if test -f ${srcdir}/config/$f -+ then -+ tmake_file_="${tmake_file_} \$(srcdir)/config/$f" -+ fi -+ ;; -+ esac - done - tmake_file="${tmake_file_}" - -@@ -12124,6 +12134,10 @@ tm_file_list="options.h" - tm_include_list="options.h insn-constants.h" - for f in $tm_file; do - case $f in -+ */linux64.h ) -+ tm_file_list="${tm_file_list} ./config/$f" -+ tm_include_list="${tm_include_list} ./config/$f" -+ ;; - ./* ) - f=`echo $f | sed 's/^..//'` - tm_file_list="${tm_file_list} $f" -diff --git a/gcc/configure.ac b/gcc/configure.ac -index cecf121..54e7619 100644 ---- a/gcc/configure.ac -+++ b/gcc/configure.ac -@@ -1863,10 +1863,20 @@ done - tmake_file_= - for f in ${tmake_file} - do -- if test -f ${srcdir}/config/$f -- then -- tmake_file_="${tmake_file_} \$(srcdir)/config/$f" -- fi -+ case $f in -+ */t-linux64 ) -+ if test -f ./config/$f -+ then -+ tmake_file_="${tmake_file_} ./config/$f" -+ fi -+ ;; -+ * ) -+ if test -f ${srcdir}/config/$f -+ then -+ tmake_file_="${tmake_file_} \$(srcdir)/config/$f" -+ fi -+ ;; -+ esac - done - tmake_file="${tmake_file_}" - -@@ -1877,6 +1887,10 @@ tm_file_list="options.h" - tm_include_list="options.h insn-constants.h" - for f in $tm_file; do - case $f in -+ */linux64.h ) -+ tm_file_list="${tm_file_list} ./config/$f" -+ tm_include_list="${tm_include_list} ./config/$f" -+ ;; - ./* ) - f=`echo $f | sed 's/^..//'` - tm_file_list="${tm_file_list} $f" --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0027-Avoid-using-libdir-from-.la-which-usually-points-to-.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0027-Avoid-using-libdir-from-.la-which-usually-points-to-.patch deleted file mode 100644 index d1bbebc0a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0027-Avoid-using-libdir-from-.la-which-usually-points-to-.patch +++ /dev/null @@ -1,31 +0,0 @@ -From eb6178b7fb466ae429c56380c6dbc564a16d900a Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 20 Feb 2015 09:39:38 +0000 -Subject: [PATCH 27/46] Avoid using libdir from .la which usually points to a - host path - -Upstream-Status: Inappropriate [embedded specific] - -Signed-off-by: Jonathan Liu -Signed-off-by: Khem Raj ---- - ltmain.sh | 3 +++ - 1 file changed, 3 insertions(+) - -diff --git a/ltmain.sh b/ltmain.sh -index 0121fba..52bdbdb 100644 ---- a/ltmain.sh -+++ b/ltmain.sh -@@ -5628,6 +5628,9 @@ func_mode_link () - absdir="$abs_ladir" - libdir="$abs_ladir" - else -+ # Instead of using libdir from .la which usually points to a host path, -+ # use the path the .la is contained in. -+ libdir="$abs_ladir" - dir="$libdir" - absdir="$libdir" - fi --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0028-export-CPP.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0028-export-CPP.patch deleted file mode 100644 index c21253938..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0028-export-CPP.patch +++ /dev/null @@ -1,53 +0,0 @@ -From 617184f35e97934d9e6268e71378574e2b776c2b Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 20 Feb 2015 09:40:59 +0000 -Subject: [PATCH 28/46] export CPP - -The OE environment sets and exports CPP as being the target gcc. When -building gcc-cross-canadian for a mingw targetted sdk, the following can be found -in build.x86_64-pokysdk-mingw32.i586-poky-linux/build-x86_64-linux/libiberty/config.log: - -configure:3641: checking for _FILE_OFFSET_BITS value needed for large files -configure:3666: gcc -c -isystem/media/build1/poky/build/tmp/sysroots/x86_64-linux/usr/include -O2 -pipe conftest.c >&5 -configure:3666: $? = 0 -configure:3698: result: no -configure:3786: checking how to run the C preprocessor -configure:3856: result: x86_64-pokysdk-mingw32-gcc -E --sysroot=/media/build1/poky/build/tmp/sysroots/x86_64-nativesdk-mingw32-pokysdk-mingw32 -configure:3876: x86_64-pokysdk-mingw32-gcc -E --sysroot=/media/build1/poky/build/tmp/sysroots/x86_64-nativesdk-mingw32-pokysdk-mingw32 conftest.c -configure:3876: $? = 0 - -Note this is a *build* target (in build-x86_64-linux) so it should be -using the host "gcc", not x86_64-pokysdk-mingw32-gcc. Since the mingw32 -headers are very different, using the wrong cpp is a real problem. It is leaking -into configure through the CPP variable. Ultimately this leads to build -failures related to not being able to include a process.h file for pem-unix.c. - -The fix is to ensure we export a sane CPP value into the build -environment when using build targets. We could define a CPP_FOR_BUILD value which may be -the version which needs to be upstreamed but for now, this fix is good enough to -avoid the problem. - -RP 22/08/2013 - -Upstream-Status: Pending - -Signed-off-by: Khem Raj ---- - Makefile.in | 1 + - 1 file changed, 1 insertion(+) - -diff --git a/Makefile.in b/Makefile.in -index 1522e39..beb9b9a 100644 ---- a/Makefile.in -+++ b/Makefile.in -@@ -149,6 +149,7 @@ BUILD_EXPORTS = \ - AR="$(AR_FOR_BUILD)"; export AR; \ - AS="$(AS_FOR_BUILD)"; export AS; \ - CC="$(CC_FOR_BUILD)"; export CC; \ -+ CPP="$(CC_FOR_BUILD) -E"; export CPP; \ - CFLAGS="$(CFLAGS_FOR_BUILD)"; export CFLAGS; \ - CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \ - CXX="$(CXX_FOR_BUILD)"; export CXX; \ --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0029-Enable-SPE-AltiVec-generation-on-powepc-linux-target.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0029-Enable-SPE-AltiVec-generation-on-powepc-linux-target.patch deleted file mode 100644 index 47b9c0d1b..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0029-Enable-SPE-AltiVec-generation-on-powepc-linux-target.patch +++ /dev/null @@ -1,56 +0,0 @@ -From e140700976e3b7eb4250b1ffde9bc16494456903 Mon Sep 17 00:00:00 2001 -From: Alexandru-Cezar Sardan -Date: Wed, 5 Feb 2014 16:52:31 +0200 -Subject: [PATCH 29/46] Enable SPE & AltiVec generation on powepc*linux target - -When is configured with --target=powerpc-linux, the resulting GCC will -not be able to generate code for SPE targets (e500v1/v2). -GCC configured with --target=powerpc-linuxspe will not be able to -generate AltiVec instructions (for e6500). -This patch modifies the configured file such that SPE or AltiVec code -can be generated when gcc is configured with --target=powerpc-linux. -The ABI and speciffic instructions can be selected through the -"-mabi=spe or -mabi=altivec" and the "-mspe or -maltivec" parameters. - -Upstream-Status: Inappropriate [configuration] - -Signed-off-by: Alexandru-Cezar Sardan ---- - gcc/config.gcc | 9 ++++++++- - gcc/config/rs6000/linuxspe.h | 3 --- - 2 files changed, 8 insertions(+), 4 deletions(-) - -diff --git a/gcc/config.gcc b/gcc/config.gcc -index 9c6d156..18cff5a 100644 ---- a/gcc/config.gcc -+++ b/gcc/config.gcc -@@ -2392,7 +2392,14 @@ powerpc-*-rtems*) - tmake_file="${tmake_file} rs6000/t-fprules rs6000/t-rtems rs6000/t-ppccomm" - ;; - powerpc*-*-linux*) -- tm_file="${tm_file} dbxelf.h elfos.h gnu-user.h freebsd-spec.h rs6000/sysv4.h" -+ case ${target} in -+ powerpc*-*-linux*spe* | powerpc*-*-linux*altivec*) -+ tm_file="${tm_file} dbxelf.h elfos.h gnu-user.h freebsd-spec.h rs6000/sysv4.h" -+ ;; -+ *) -+ tm_file="${tm_file} dbxelf.h elfos.h gnu-user.h freebsd-spec.h rs6000/sysv4.h rs6000/linuxaltivec.h rs6000/linuxspe.h rs6000/e500.h" -+ ;; -+ esac - extra_options="${extra_options} rs6000/sysv4.opt" - tmake_file="${tmake_file} rs6000/t-fprules rs6000/t-ppccomm" - extra_objs="$extra_objs rs6000-linux.o" -diff --git a/gcc/config/rs6000/linuxspe.h b/gcc/config/rs6000/linuxspe.h -index 35623cd..f74e00d 100644 ---- a/gcc/config/rs6000/linuxspe.h -+++ b/gcc/config/rs6000/linuxspe.h -@@ -27,6 +27,3 @@ - #undef TARGET_DEFAULT - #define TARGET_DEFAULT MASK_STRICT_ALIGN - #endif -- --#undef ASM_DEFAULT_SPEC --#define ASM_DEFAULT_SPEC "-mppc -mspe -me500" --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0030-Disable-the-MULTILIB_OSDIRNAMES-and-other-multilib-o.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0030-Disable-the-MULTILIB_OSDIRNAMES-and-other-multilib-o.patch deleted file mode 100644 index c09d0192e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0030-Disable-the-MULTILIB_OSDIRNAMES-and-other-multilib-o.patch +++ /dev/null @@ -1,42 +0,0 @@ -From 0ddcb95a86830766fd02122f19384fc929b377c5 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 20 Feb 2015 10:21:55 +0000 -Subject: [PATCH 30/46] Disable the MULTILIB_OSDIRNAMES and other multilib - options. - -Hard coding the MULTILIB_OSDIRNAMES with ../lib64 is causing problems on -systems where the libdir is NOT set to /lib64. This is allowed by the -ABI, as -long as the dynamic loader is present in /lib. - -We simply want to use the default rules in gcc to find and configure the -normal libdir. - -Upstream-Status: Inappropriate[OE-Specific] - -Signed-off-by: Mark Hatle -Signed-off-by: Khem Raj ---- - gcc/config/aarch64/t-aarch64-linux | 8 ++++---- - 1 file changed, 4 insertions(+), 4 deletions(-) - -diff --git a/gcc/config/aarch64/t-aarch64-linux b/gcc/config/aarch64/t-aarch64-linux -index 1cfe9f3..d688ac9 100644 ---- a/gcc/config/aarch64/t-aarch64-linux -+++ b/gcc/config/aarch64/t-aarch64-linux -@@ -21,8 +21,8 @@ - LIB1ASMSRC = aarch64/lib1funcs.asm - LIB1ASMFUNCS = _aarch64_sync_cache_range - --AARCH_BE = $(if $(findstring TARGET_BIG_ENDIAN_DEFAULT=1, $(tm_defines)),_be) --MULTILIB_OSDIRNAMES = mabi.lp64=../lib64$(call if_multiarch,:aarch64$(AARCH_BE)-linux-gnu) --MULTIARCH_DIRNAME = $(call if_multiarch,aarch64$(AARCH_BE)-linux-gnu) -+#AARCH_BE = $(if $(findstring TARGET_BIG_ENDIAN_DEFAULT=1, $(tm_defines)),_be) -+#MULTILIB_OSDIRNAMES = mabi.lp64=../lib64$(call if_multiarch,:aarch64$(AARCH_BE)-linux-gnu) -+#MULTIARCH_DIRNAME = $(call if_multiarch,aarch64$(AARCH_BE)-linux-gnu) - --MULTILIB_OSDIRNAMES += mabi.ilp32=../libilp32 -+#MULTILIB_OSDIRNAMES += mabi.ilp32=../libilp32 --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0031-Ensure-target-gcc-headers-can-be-included.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0031-Ensure-target-gcc-headers-can-be-included.patch deleted file mode 100644 index fb1cd0f16..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0031-Ensure-target-gcc-headers-can-be-included.patch +++ /dev/null @@ -1,98 +0,0 @@ -From fc6621435a64a9d69aa251b70361da94cf2db6be Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 20 Feb 2015 10:25:11 +0000 -Subject: [PATCH 31/46] Ensure target gcc headers can be included - -There are a few headers installed as part of the OpenEmbedded -gcc-runtime target (omp.h, ssp/*.h). Being installed from a recipe -built for the target architecture, these are within the target -sysroot and not cross/nativesdk; thus they weren't able to be -found by gcc with the existing search paths. Add support for -picking up these headers under the sysroot supplied on the gcc -command line in order to resolve this. - -Upstream-Status: Pending - -Signed-off-by: Paul Eggleton -Signed-off-by: Khem Raj ---- - gcc/Makefile.in | 2 ++ - gcc/cppdefault.c | 4 ++++ - gcc/defaults.h | 9 +++++++++ - gcc/gcc.c | 7 ------- - 4 files changed, 15 insertions(+), 7 deletions(-) - -diff --git a/gcc/Makefile.in b/gcc/Makefile.in -index 450cb79..cc75536 100644 ---- a/gcc/Makefile.in -+++ b/gcc/Makefile.in -@@ -593,6 +593,7 @@ libexecdir = @libexecdir@ - - # Directory in which the compiler finds libraries etc. - libsubdir = $(libdir)/gcc/$(real_target_noncanonical)/$(version)$(accel_dir_suffix) -+libsubdir_target = gcc/$(target_noncanonical)/$(version) - # Directory in which the compiler finds executables - libexecsubdir = $(libexecdir)/gcc/$(real_target_noncanonical)/$(version)$(accel_dir_suffix) - # Directory in which all plugin resources are installed -@@ -2688,6 +2689,7 @@ CFLAGS-intl.o += -DLOCALEDIR=\"$(localedir)\" - - PREPROCESSOR_DEFINES = \ - -DGCC_INCLUDE_DIR=\"$(libsubdir)/include\" \ -+ -DGCC_INCLUDE_SUBDIR_TARGET=\"$(libsubdir_target)/include\" \ - -DFIXED_INCLUDE_DIR=\"$(libsubdir)/include-fixed\" \ - -DGPLUSPLUS_INCLUDE_DIR=\"$(gcc_gxx_include_dir)\" \ - -DGPLUSPLUS_INCLUDE_DIR_ADD_SYSROOT=$(gcc_gxx_include_dir_add_sysroot) \ -diff --git a/gcc/cppdefault.c b/gcc/cppdefault.c -index 54aaf06..7b4dd51 100644 ---- a/gcc/cppdefault.c -+++ b/gcc/cppdefault.c -@@ -59,6 +59,10 @@ const struct default_include cpp_include_defaults[] - /* This is the dir for gcc's private headers. */ - { GCC_INCLUDE_DIR, "GCC", 0, 0, 0, 0 }, - #endif -+#ifdef GCC_INCLUDE_SUBDIR_TARGET -+ /* This is the dir for gcc's private headers under the specified sysroot. */ -+ { STANDARD_STARTFILE_PREFIX_2 GCC_INCLUDE_SUBDIR_TARGET, "GCC", 0, 0, 1, 0 }, -+#endif - #ifdef LOCAL_INCLUDE_DIR - /* /usr/local/include comes before the fixincluded header files. */ - { LOCAL_INCLUDE_DIR, 0, 0, 1, 1, 2 }, -diff --git a/gcc/defaults.h b/gcc/defaults.h -index 3e18338..0f317f2 100644 ---- a/gcc/defaults.h -+++ b/gcc/defaults.h -@@ -1492,4 +1492,13 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see - #define DWARF_GNAT_ENCODINGS_DEFAULT DWARF_GNAT_ENCODINGS_GDB - #endif - -+/* Default prefixes to attach to command names. */ -+ -+#ifndef STANDARD_STARTFILE_PREFIX_1 -+#define STANDARD_STARTFILE_PREFIX_1 "/lib/" -+#endif -+#ifndef STANDARD_STARTFILE_PREFIX_2 -+#define STANDARD_STARTFILE_PREFIX_2 "/usr/lib/" -+#endif -+ - #endif /* ! GCC_DEFAULTS_H */ -diff --git a/gcc/gcc.c b/gcc/gcc.c -index 04fa81d..9750cc2 100644 ---- a/gcc/gcc.c -+++ b/gcc/gcc.c -@@ -1450,13 +1450,6 @@ static const char *gcc_libexec_prefix; - - /* Default prefixes to attach to command names. */ - --#ifndef STANDARD_STARTFILE_PREFIX_1 --#define STANDARD_STARTFILE_PREFIX_1 "/lib/" --#endif --#ifndef STANDARD_STARTFILE_PREFIX_2 --#define STANDARD_STARTFILE_PREFIX_2 "/usr/lib/" --#endif -- - #ifdef CROSS_DIRECTORY_STRUCTURE /* Don't use these prefixes for a cross compiler. */ - #undef MD_EXEC_PREFIX - #undef MD_STARTFILE_PREFIX --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0032-gcc-4.8-won-t-build-with-disable-dependency-tracking.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0032-gcc-4.8-won-t-build-with-disable-dependency-tracking.patch deleted file mode 100644 index c0b001db5..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0032-gcc-4.8-won-t-build-with-disable-dependency-tracking.patch +++ /dev/null @@ -1,54 +0,0 @@ -From ff939c5063d8f8d444bdb25651a0a48e608efaa4 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 20 Feb 2015 11:17:19 +0000 -Subject: [PATCH 32/46] gcc 4.8+ won't build with --disable-dependency-tracking - -since the *.Ppo files don't get created unless --enable-dependency-tracking is true. - -This patch ensures we only use those compiler options when its enabled. - -Upstream-Status: Submitted - -(Problem was already reported upstream, attached this patch there -http://gcc.gnu.org/bugzilla/show_bug.cgi?id=55930) - -RP -2012/09/22 - -Signed-off-by: Khem Raj ---- - libatomic/Makefile.am | 3 ++- - libatomic/Makefile.in | 3 ++- - 2 files changed, 4 insertions(+), 2 deletions(-) - -diff --git a/libatomic/Makefile.am b/libatomic/Makefile.am -index b351244..399ce18 100644 ---- a/libatomic/Makefile.am -+++ b/libatomic/Makefile.am -@@ -101,7 +101,8 @@ PAT_S = $(word 3,$(PAT_SPLIT)) - IFUNC_DEF = -DIFUNC_ALT=$(PAT_S) - IFUNC_OPT = $(word $(PAT_S),$(IFUNC_OPTIONS)) - --M_DEPS = -MT $@ -MD -MP -MF $(DEPDIR)/$(@F).Ppo -+@AMDEP_TRUE@M_DEPS = -MT $@ -MD -MP -MF $(DEPDIR)/$(@F).Ppo -+@AMDEP_FALSE@M_DEPS = - M_SIZE = -DN=$(PAT_N) - M_IFUNC = $(if $(PAT_S),$(IFUNC_DEF) $(IFUNC_OPT)) - M_FILE = $(PAT_BASE)_n.c -diff --git a/libatomic/Makefile.in b/libatomic/Makefile.in -index a083d87..a92cfce 100644 ---- a/libatomic/Makefile.in -+++ b/libatomic/Makefile.in -@@ -330,7 +330,8 @@ PAT_N = $(word 2,$(PAT_SPLIT)) - PAT_S = $(word 3,$(PAT_SPLIT)) - IFUNC_DEF = -DIFUNC_ALT=$(PAT_S) - IFUNC_OPT = $(word $(PAT_S),$(IFUNC_OPTIONS)) --M_DEPS = -MT $@ -MD -MP -MF $(DEPDIR)/$(@F).Ppo -+@AMDEP_TRUE@M_DEPS = -MT $@ -MD -MP -MF $(DEPDIR)/$(@F).Ppo -+@AMDEP_FALSE@M_DEPS = - M_SIZE = -DN=$(PAT_N) - M_IFUNC = $(if $(PAT_S),$(IFUNC_DEF) $(IFUNC_OPT)) - M_FILE = $(PAT_BASE)_n.c --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0033-Don-t-search-host-directory-during-relink-if-inst_pr.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0033-Don-t-search-host-directory-during-relink-if-inst_pr.patch deleted file mode 100644 index e425d7146..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0033-Don-t-search-host-directory-during-relink-if-inst_pr.patch +++ /dev/null @@ -1,38 +0,0 @@ -From 5092f5389d02e78cd59690cf3fca24b56a97aff2 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Tue, 3 Mar 2015 08:21:19 +0000 -Subject: [PATCH 33/46] Don't search host directory during "relink" if - $inst_prefix is provided - -http://lists.gnu.org/archive/html/libtool-patches/2011-01/msg00026.html - -Upstream-Status: Submitted - -Signed-off-by: Khem Raj ---- - ltmain.sh | 5 +++-- - 1 file changed, 3 insertions(+), 2 deletions(-) - -diff --git a/ltmain.sh b/ltmain.sh -index 52bdbdb..82bcec3 100644 ---- a/ltmain.sh -+++ b/ltmain.sh -@@ -6004,12 +6004,13 @@ func_mode_link () - fi - else - # We cannot seem to hardcode it, guess we'll fake it. -+ # Default if $libdir is not relative to the prefix: - add_dir="-L$libdir" -- # Try looking first in the location we're being installed to. -+ - if test -n "$inst_prefix_dir"; then - case $libdir in - [\\/]*) -- add_dir="$add_dir -L$inst_prefix_dir$libdir" -+ add_dir="-L$inst_prefix_dir$libdir" - ;; - esac - fi --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0034-Use-SYSTEMLIBS_DIR-replacement-instead-of-hardcoding.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0034-Use-SYSTEMLIBS_DIR-replacement-instead-of-hardcoding.patch deleted file mode 100644 index 922a8555b..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0034-Use-SYSTEMLIBS_DIR-replacement-instead-of-hardcoding.patch +++ /dev/null @@ -1,29 +0,0 @@ -From 1faa6f69f93bb95af2b2b2bd24e181b50fb5b37c Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Tue, 28 Apr 2015 23:15:27 -0700 -Subject: [PATCH 34/46] Use SYSTEMLIBS_DIR replacement instead of hardcoding - base_libdir - -Signed-off-by: Khem Raj ---- -Upstream-Status: Inappropriate [OE Configuration] - - gcc/config/aarch64/aarch64-linux.h | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/gcc/config/aarch64/aarch64-linux.h b/gcc/config/aarch64/aarch64-linux.h -index 5fcaa59..8588ac0 100644 ---- a/gcc/config/aarch64/aarch64-linux.h -+++ b/gcc/config/aarch64/aarch64-linux.h -@@ -21,7 +21,7 @@ - #ifndef GCC_AARCH64_LINUX_H - #define GCC_AARCH64_LINUX_H - --#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux-aarch64%{mbig-endian:_be}%{mabi=ilp32:_ilp32}.so.1" -+#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux-aarch64%{mbig-endian:_be}%{mabi=ilp32:_ilp32}.so.1" - - #undef MUSL_DYNAMIC_LINKER - #define MUSL_DYNAMIC_LINKER "/lib/ld-musl-aarch64%{mbig-endian:_be}%{mabi=ilp32:_ilp32}.so.1" --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0035-aarch64-Add-support-for-musl-ldso.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0035-aarch64-Add-support-for-musl-ldso.patch deleted file mode 100644 index 9dfc47276..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0035-aarch64-Add-support-for-musl-ldso.patch +++ /dev/null @@ -1,28 +0,0 @@ -From 3768468c1a6cc170fff88c03b808c975ac653811 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Tue, 28 Apr 2015 23:18:39 -0700 -Subject: [PATCH 35/46] aarch64: Add support for musl ldso - -Signed-off-by: Khem Raj ---- -Upstream-Status: Inappropriate [OE Configuration] - - gcc/config/aarch64/aarch64-linux.h | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/gcc/config/aarch64/aarch64-linux.h b/gcc/config/aarch64/aarch64-linux.h -index 8588ac0..946b3ca 100644 ---- a/gcc/config/aarch64/aarch64-linux.h -+++ b/gcc/config/aarch64/aarch64-linux.h -@@ -24,7 +24,7 @@ - #define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux-aarch64%{mbig-endian:_be}%{mabi=ilp32:_ilp32}.so.1" - - #undef MUSL_DYNAMIC_LINKER --#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-aarch64%{mbig-endian:_be}%{mabi=ilp32:_ilp32}.so.1" -+#define MUSL_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-musl-aarch64%{mbig-endian:_be}%{mabi=ilp32:_ilp32}.so.1" - - #undef ASAN_CC1_SPEC - #define ASAN_CC1_SPEC "%{%:sanitize(address):-funwind-tables}" --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0036-libcc1-fix-libcc1-s-install-path-and-rpath.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0036-libcc1-fix-libcc1-s-install-path-and-rpath.patch deleted file mode 100644 index f89a8860f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0036-libcc1-fix-libcc1-s-install-path-and-rpath.patch +++ /dev/null @@ -1,54 +0,0 @@ -From f4d3c8e970d42a43cd3d2f751e13324efa936ff8 Mon Sep 17 00:00:00 2001 -From: Robert Yang -Date: Sun, 5 Jul 2015 20:25:18 -0700 -Subject: [PATCH 36/46] libcc1: fix libcc1's install path and rpath - -* Install libcc1.so and libcc1plugin.so into - $(libexecdir)/gcc/$(target_noncanonical)/$(gcc_version), as what we - had done to lto-plugin. -* Fix bad RPATH iussue: - gcc-5.2.0: package gcc-plugins contains bad RPATH /patht/to/tmp/sysroots/qemux86-64/usr/lib64/../lib64 in file - /path/to/gcc/5.2.0-r0/packages-split/gcc-plugins/usr/lib64/gcc/x86_64-poky-linux/5.2.0/plugin/libcc1plugin.so.0.0.0 - [rpaths] - -Upstream-Status: Inappropriate [OE configuration] - -Signed-off-by: Robert Yang ---- - libcc1/Makefile.am | 4 ++-- - libcc1/Makefile.in | 4 ++-- - 2 files changed, 4 insertions(+), 4 deletions(-) - -diff --git a/libcc1/Makefile.am b/libcc1/Makefile.am -index b40820b..32930c5 100644 ---- a/libcc1/Makefile.am -+++ b/libcc1/Makefile.am -@@ -35,8 +35,8 @@ libiberty = $(if $(wildcard $(libiberty_noasan)),$(Wc)$(libiberty_noasan), \ - $(Wc)$(libiberty_normal))) - libiberty_dep = $(patsubst $(Wc)%,%,$(libiberty)) - --plugindir = $(libdir)/gcc/$(target_noncanonical)/$(gcc_version)/plugin --cc1libdir = $(libdir)/$(libsuffix) -+cc1libdir = $(libexecdir)/gcc/$(target_noncanonical)/$(gcc_version) -+plugindir = $(cc1libdir) - - if ENABLE_PLUGIN - plugin_LTLIBRARIES = libcc1plugin.la -diff --git a/libcc1/Makefile.in b/libcc1/Makefile.in -index 79d39d3..227ec22 100644 ---- a/libcc1/Makefile.in -+++ b/libcc1/Makefile.in -@@ -291,8 +291,8 @@ libiberty = $(if $(wildcard $(libiberty_noasan)),$(Wc)$(libiberty_noasan), \ - $(Wc)$(libiberty_normal))) - - libiberty_dep = $(patsubst $(Wc)%,%,$(libiberty)) --plugindir = $(libdir)/gcc/$(target_noncanonical)/$(gcc_version)/plugin --cc1libdir = $(libdir)/$(libsuffix) -+cc1libdir = $(libexecdir)/gcc/$(target_noncanonical)/$(gcc_version) -+plugindir = $(cc1libdir) - @ENABLE_PLUGIN_TRUE@plugin_LTLIBRARIES = libcc1plugin.la - @ENABLE_PLUGIN_TRUE@cc1lib_LTLIBRARIES = libcc1.la - BUILT_SOURCES = compiler-name.h --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0037-handle-sysroot-support-for-nativesdk-gcc.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0037-handle-sysroot-support-for-nativesdk-gcc.patch deleted file mode 100644 index 15efcb12e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0037-handle-sysroot-support-for-nativesdk-gcc.patch +++ /dev/null @@ -1,213 +0,0 @@ -From 1475b941d7a9c9874b0fb0558d01805945467331 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Mon, 7 Dec 2015 23:39:54 +0000 -Subject: [PATCH 37/46] handle sysroot support for nativesdk-gcc - -Being able to build a nativesdk gcc is useful, particularly in cases -where the host compiler may be of an incompatible version (or a 32 -bit compiler is needed). - -Sadly, building nativesdk-gcc is not straight forward. We install -nativesdk-gcc into a relocatable location and this means that its -library locations can change. "Normal" sysroot support doesn't help -in this case since the values of paths like "libdir" change, not just -base root directory of the system. - -In order to handle this we do two things: - -a) Add %r into spec file markup which can be used for injected paths - such as SYSTEMLIBS_DIR (see gcc_multilib_setup()). -b) Add other paths which need relocation into a .gccrelocprefix section - which the relocation code will notice and adjust automatically. - -Upstream-Status: Inappropriate -RP 2015/7/28 - -Signed-off-by: Khem Raj ---- - gcc/cppdefault.c | 50 +++++++++++++++++++++++++++++++++++++------------- - gcc/cppdefault.h | 3 ++- - gcc/gcc.c | 20 ++++++++++++++------ - 3 files changed, 53 insertions(+), 20 deletions(-) - -diff --git a/gcc/cppdefault.c b/gcc/cppdefault.c -index 7b4dd51..9d1166c 100644 ---- a/gcc/cppdefault.c -+++ b/gcc/cppdefault.c -@@ -35,6 +35,30 @@ - # undef CROSS_INCLUDE_DIR - #endif - -+static char GPLUSPLUS_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = GPLUSPLUS_INCLUDE_DIR; -+static char GCC_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = GCC_INCLUDE_DIR; -+static char GPLUSPLUS_TOOL_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = GPLUSPLUS_TOOL_INCLUDE_DIR; -+static char GPLUSPLUS_BACKWARD_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = GPLUSPLUS_BACKWARD_INCLUDE_DIR; -+static char STANDARD_STARTFILE_PREFIX_2VAR[4096] __attribute__ ((section (".gccrelocprefix"))) = STANDARD_STARTFILE_PREFIX_2 GCC_INCLUDE_SUBDIR_TARGET; -+#ifdef LOCAL_INCLUDE_DIR -+static char LOCAL_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = LOCAL_INCLUDE_DIR; -+#endif -+#ifdef PREFIX_INCLUDE_DIR -+static char PREFIX_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = PREFIX_INCLUDE_DIR; -+#endif -+#ifdef FIXED_INCLUDE_DIR -+static char FIXED_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = FIXED_INCLUDE_DIR; -+#endif -+#ifdef CROSS_INCLUDE_DIR -+static char CROSS_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = CROSS_INCLUDE_DIR; -+#endif -+#ifdef TOOL_INCLUDE_DIR -+static char TOOL_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = TOOL_INCLUDE_DIR; -+#endif -+#ifdef NATIVE_SYSTEM_HEADER_DIR -+static char NATIVE_SYSTEM_HEADER_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = NATIVE_SYSTEM_HEADER_DIR; -+#endif -+ - const struct default_include cpp_include_defaults[] - #ifdef INCLUDE_DEFAULTS - = INCLUDE_DEFAULTS; -@@ -42,38 +66,38 @@ const struct default_include cpp_include_defaults[] - = { - #ifdef GPLUSPLUS_INCLUDE_DIR - /* Pick up GNU C++ generic include files. */ -- { GPLUSPLUS_INCLUDE_DIR, "G++", 1, 1, -+ { GPLUSPLUS_INCLUDE_DIRVAR, "G++", 1, 1, - GPLUSPLUS_INCLUDE_DIR_ADD_SYSROOT, 0 }, - #endif - #ifdef GPLUSPLUS_TOOL_INCLUDE_DIR - /* Pick up GNU C++ target-dependent include files. */ -- { GPLUSPLUS_TOOL_INCLUDE_DIR, "G++", 1, 1, -+ { GPLUSPLUS_TOOL_INCLUDE_DIRVAR, "G++", 1, 1, - GPLUSPLUS_INCLUDE_DIR_ADD_SYSROOT, 1 }, - #endif - #ifdef GPLUSPLUS_BACKWARD_INCLUDE_DIR - /* Pick up GNU C++ backward and deprecated include files. */ -- { GPLUSPLUS_BACKWARD_INCLUDE_DIR, "G++", 1, 1, -+ { GPLUSPLUS_BACKWARD_INCLUDE_DIRVAR, "G++", 1, 1, - GPLUSPLUS_INCLUDE_DIR_ADD_SYSROOT, 0 }, - #endif - #ifdef GCC_INCLUDE_DIR - /* This is the dir for gcc's private headers. */ -- { GCC_INCLUDE_DIR, "GCC", 0, 0, 0, 0 }, -+ { GCC_INCLUDE_DIRVAR, "GCC", 0, 0, 0, 0 }, - #endif - #ifdef GCC_INCLUDE_SUBDIR_TARGET - /* This is the dir for gcc's private headers under the specified sysroot. */ -- { STANDARD_STARTFILE_PREFIX_2 GCC_INCLUDE_SUBDIR_TARGET, "GCC", 0, 0, 1, 0 }, -+ { STANDARD_STARTFILE_PREFIX_2VAR, "GCC", 0, 0, 1, 0 }, - #endif - #ifdef LOCAL_INCLUDE_DIR - /* /usr/local/include comes before the fixincluded header files. */ -- { LOCAL_INCLUDE_DIR, 0, 0, 1, 1, 2 }, -- { LOCAL_INCLUDE_DIR, 0, 0, 1, 1, 0 }, -+ { LOCAL_INCLUDE_DIRVAR, 0, 0, 1, 1, 2 }, -+ { LOCAL_INCLUDE_DIRVAR, 0, 0, 1, 1, 0 }, - #endif - #ifdef PREFIX_INCLUDE_DIR -- { PREFIX_INCLUDE_DIR, 0, 0, 1, 0, 0 }, -+ { PREFIX_INCLUDE_DIRVAR, 0, 0, 1, 0, 0 }, - #endif - #ifdef FIXED_INCLUDE_DIR - /* This is the dir for fixincludes. */ -- { FIXED_INCLUDE_DIR, "GCC", 0, 0, 0, -+ { FIXED_INCLUDE_DIRVAR, "GCC", 0, 0, 0, - /* A multilib suffix needs adding if different multilibs use - different headers. */ - #ifdef SYSROOT_HEADERS_SUFFIX_SPEC -@@ -85,16 +109,16 @@ const struct default_include cpp_include_defaults[] - #endif - #ifdef CROSS_INCLUDE_DIR - /* One place the target system's headers might be. */ -- { CROSS_INCLUDE_DIR, "GCC", 0, 0, 0, 0 }, -+ { CROSS_INCLUDE_DIRVAR, "GCC", 0, 0, 0, 0 }, - #endif - #ifdef TOOL_INCLUDE_DIR - /* Another place the target system's headers might be. */ -- { TOOL_INCLUDE_DIR, "BINUTILS", 0, 1, 0, 0 }, -+ { TOOL_INCLUDE_DIRVAR, "BINUTILS", 0, 1, 0, 0 }, - #endif - #ifdef NATIVE_SYSTEM_HEADER_DIR - /* /usr/include comes dead last. */ -- { NATIVE_SYSTEM_HEADER_DIR, NATIVE_SYSTEM_HEADER_COMPONENT, 0, 0, 1, 2 }, -- { NATIVE_SYSTEM_HEADER_DIR, NATIVE_SYSTEM_HEADER_COMPONENT, 0, 0, 1, 0 }, -+ { NATIVE_SYSTEM_HEADER_DIRVAR, NATIVE_SYSTEM_HEADER_COMPONENT, 0, 0, 1, 2 }, -+ { NATIVE_SYSTEM_HEADER_DIRVAR, NATIVE_SYSTEM_HEADER_COMPONENT, 0, 0, 1, 0 }, - #endif - { 0, 0, 0, 0, 0, 0 } - }; -diff --git a/gcc/cppdefault.h b/gcc/cppdefault.h -index 8a81b45..9759efd 100644 ---- a/gcc/cppdefault.h -+++ b/gcc/cppdefault.h -@@ -33,7 +33,8 @@ - - struct default_include - { -- const char *const fname; /* The name of the directory. */ -+ const char *fname; /* The name of the directory. */ -+ - const char *const component; /* The component containing the directory - (see update_path in prefix.c) */ - const char cplusplus; /* Only look here if we're compiling C++. */ -diff --git a/gcc/gcc.c b/gcc/gcc.c -index 9750cc2..94c240e 100644 ---- a/gcc/gcc.c -+++ b/gcc/gcc.c -@@ -247,6 +247,8 @@ FILE *report_times_to_file = NULL; - #endif - static const char *target_system_root = DEFAULT_TARGET_SYSTEM_ROOT; - -+static char target_relocatable_prefix[4096] __attribute__ ((section (".gccrelocprefix"))) = SYSTEMLIBS_DIR; -+ - /* Nonzero means pass the updated target_system_root to the compiler. */ - - static int target_system_root_changed; -@@ -517,6 +519,7 @@ or with constant text in a single argument. - %G process LIBGCC_SPEC as a spec. - %R Output the concatenation of target_system_root and - target_sysroot_suffix. -+ %r Output the base path target_relocatable_prefix - %S process STARTFILE_SPEC as a spec. A capital S is actually used here. - %E process ENDFILE_SPEC as a spec. A capital E is actually used here. - %C process CPP_SPEC as a spec. -@@ -1473,10 +1476,10 @@ static const char *gcc_libexec_prefix; - gcc_exec_prefix is set because, in that case, we know where the - compiler has been installed, and use paths relative to that - location instead. */ --static const char *const standard_exec_prefix = STANDARD_EXEC_PREFIX; --static const char *const standard_libexec_prefix = STANDARD_LIBEXEC_PREFIX; --static const char *const standard_bindir_prefix = STANDARD_BINDIR_PREFIX; --static const char *const standard_startfile_prefix = STANDARD_STARTFILE_PREFIX; -+static char standard_exec_prefix[4096] __attribute__ ((section (".gccrelocprefix"))) = STANDARD_EXEC_PREFIX; -+static char standard_libexec_prefix[4096] __attribute__ ((section (".gccrelocprefix"))) = STANDARD_LIBEXEC_PREFIX; -+static char standard_bindir_prefix[4096] __attribute__ ((section (".gccrelocprefix"))) = STANDARD_BINDIR_PREFIX; -+static char *const standard_startfile_prefix = STANDARD_STARTFILE_PREFIX; - - /* For native compilers, these are well-known paths containing - components that may be provided by the system. For cross -@@ -1484,9 +1487,9 @@ static const char *const standard_startfile_prefix = STANDARD_STARTFILE_PREFIX; - static const char *md_exec_prefix = MD_EXEC_PREFIX; - static const char *md_startfile_prefix = MD_STARTFILE_PREFIX; - static const char *md_startfile_prefix_1 = MD_STARTFILE_PREFIX_1; --static const char *const standard_startfile_prefix_1 -+static char standard_startfile_prefix_1[4096] __attribute__ ((section (".gccrelocprefix"))) - = STANDARD_STARTFILE_PREFIX_1; --static const char *const standard_startfile_prefix_2 -+static char standard_startfile_prefix_2[4096] __attribute__ ((section (".gccrelocprefix"))) - = STANDARD_STARTFILE_PREFIX_2; - - /* A relative path to be used in finding the location of tools -@@ -5762,6 +5765,11 @@ do_spec_1 (const char *spec, int inswitch, const char *soft_matched_part) - } - break; - -+ case 'r': -+ obstack_grow (&obstack, target_relocatable_prefix, -+ strlen (target_relocatable_prefix)); -+ break; -+ - case 'S': - value = do_spec_1 (startfile_spec, 0, NULL); - if (value != 0) --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0038-Search-target-sysroot-gcc-version-specific-dirs-with.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0038-Search-target-sysroot-gcc-version-specific-dirs-with.patch deleted file mode 100644 index 89ee79db8..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0038-Search-target-sysroot-gcc-version-specific-dirs-with.patch +++ /dev/null @@ -1,102 +0,0 @@ -From 42e4cdcaad590536246866b0846ec279e124fa16 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Mon, 7 Dec 2015 23:41:45 +0000 -Subject: [PATCH 38/46] Search target sysroot gcc version specific dirs with - multilib. - -We install the gcc libraries (such as crtbegin.p) into -//5.2.0/ -which is a default search path for GCC (aka multi_suffix in the -code below). is 'machine' in gcc's terminology. We use -these directories so that multiple gcc versions could in theory -co-exist on target. - -We only want to build one gcc-cross-canadian per arch and have this work -for all multilibs. can be handled by mapping the multilib - to the one used by gcc-cross-canadian, e.g. -mips64-polkmllib32-linux -is symlinked to by mips64-poky-linux. - -The default gcc search path in the target sysroot for a "lib64" mutlilib -is: - -/lib32/mips64-poky-linux/5.2.0/ -/lib32/../lib64/ -/usr/lib32/mips64-poky-linux/5.2.0/ -/usr/lib32/../lib64/ -/lib32/ -/usr/lib32/ - -which means that the lib32 crtbegin.o will be found and the lib64 ones -will not which leads to compiler failures. - -This patch injects a multilib version of that path first so the lib64 -binaries can be found first. With this change the search path becomes: - -/lib32/../lib64/mips64-poky-linux/5.2.0/ -/lib32/mips64-poky-linux/5.2.0/ -/lib32/../lib64/ -/usr/lib32/../lib64/mips64-poky-linux/5.2.0/ -/usr/lib32/mips64-poky-linux/5.2.0/ -/usr/lib32/../lib64/ -/lib32/ -/usr/lib32/ - -Upstream-Status: Pending -RP 2015/7/31 - -Signed-off-by: Khem Raj ---- - gcc/gcc.c | 29 ++++++++++++++++++++++++++++- - 1 file changed, 28 insertions(+), 1 deletion(-) - -diff --git a/gcc/gcc.c b/gcc/gcc.c -index 94c240e..2812819 100644 ---- a/gcc/gcc.c -+++ b/gcc/gcc.c -@@ -2507,7 +2507,7 @@ for_each_path (const struct path_prefix *paths, - if (path == NULL) - { - len = paths->max_len + extra_space + 1; -- len += MAX (MAX (suffix_len, multi_os_dir_len), multiarch_len); -+ len += MAX ((suffix_len + multi_os_dir_len), multiarch_len); - path = XNEWVEC (char, len); - } - -@@ -2519,6 +2519,33 @@ for_each_path (const struct path_prefix *paths, - /* Look first in MACHINE/VERSION subdirectory. */ - if (!skip_multi_dir) - { -+ if (!(pl->os_multilib ? skip_multi_os_dir : skip_multi_dir)) -+ { -+ const char *this_multi; -+ size_t this_multi_len; -+ -+ if (pl->os_multilib) -+ { -+ this_multi = multi_os_dir; -+ this_multi_len = multi_os_dir_len; -+ } -+ else -+ { -+ this_multi = multi_dir; -+ this_multi_len = multi_dir_len; -+ } -+ -+ /* Look in multilib MACHINE/VERSION subdirectory first */ -+ if (this_multi_len) -+ { -+ memcpy (path + len, this_multi, this_multi_len + 1); -+ memcpy (path + len + this_multi_len, multi_suffix, suffix_len + 1); -+ ret = callback (path, callback_info); -+ if (ret) -+ break; -+ } -+ } -+ - memcpy (path + len, multi_suffix, suffix_len + 1); - ret = callback (path, callback_info); - if (ret) --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0039-Fix-various-_FOR_BUILD-and-related-variables.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0039-Fix-various-_FOR_BUILD-and-related-variables.patch deleted file mode 100644 index 0ce7aec79..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0039-Fix-various-_FOR_BUILD-and-related-variables.patch +++ /dev/null @@ -1,137 +0,0 @@ -From 9ced49e459ccf1887feb58adf1e8836dcb4b1bdf Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Mon, 7 Dec 2015 23:42:45 +0000 -Subject: [PATCH 39/46] Fix various _FOR_BUILD and related variables - -When doing a FOR_BUILD thing, you have to override CFLAGS with -CFLAGS_FOR_BUILD. And if you use C++, you also have to override -CXXFLAGS with CXXFLAGS_FOR_BUILD. -Without this, when building for mingw, you end up trying to use -the mingw headers for a host build. - -The same goes for other variables as well, such as CPPFLAGS, -CPP, and GMPINC. - -Upstream-Status: Pending - -Signed-off-by: Peter Seebach -Signed-off-by: Mark Hatle -Signed-off-by: Khem Raj ---- - Makefile.in | 6 ++++++ - Makefile.tpl | 5 +++++ - gcc/Makefile.in | 2 +- - gcc/configure | 2 +- - gcc/configure.ac | 2 +- - 5 files changed, 14 insertions(+), 3 deletions(-) - -diff --git a/Makefile.in b/Makefile.in -index beb9b9a..3e1c6bc 100644 ---- a/Makefile.in -+++ b/Makefile.in -@@ -152,6 +152,7 @@ BUILD_EXPORTS = \ - CPP="$(CC_FOR_BUILD) -E"; export CPP; \ - CFLAGS="$(CFLAGS_FOR_BUILD)"; export CFLAGS; \ - CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \ -+ CPPFLAGS="$(CPPFLAGS_FOR_BUILD)"; export CPPFLAGS; \ - CXX="$(CXX_FOR_BUILD)"; export CXX; \ - CXXFLAGS="$(CXXFLAGS_FOR_BUILD)"; export CXXFLAGS; \ - GCJ="$(GCJ_FOR_BUILD)"; export GCJ; \ -@@ -170,6 +171,9 @@ BUILD_EXPORTS = \ - # built for the build system to override those in BASE_FLAGS_TO_PASS. - EXTRA_BUILD_FLAGS = \ - CFLAGS="$(CFLAGS_FOR_BUILD)" \ -+ CXXFLAGS="$(CXXFLAGS_FOR_BUILD)" \ -+ CPP="$(CC_FOR_BUILD) -E" \ -+ CPPFLAGS="$(CPPFLAGS_FOR_BUILD)" \ - LDFLAGS="$(LDFLAGS_FOR_BUILD)" - - # This is the list of directories to built for the host system. -@@ -187,6 +191,7 @@ HOST_SUBDIR = @host_subdir@ - HOST_EXPORTS = \ - $(BASE_EXPORTS) \ - CC="$(CC)"; export CC; \ -+ CPP="$(CC) -E"; export CPP; \ - ADA_CFLAGS="$(ADA_CFLAGS)"; export ADA_CFLAGS; \ - CFLAGS="$(CFLAGS)"; export CFLAGS; \ - CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \ -@@ -713,6 +718,7 @@ BASE_FLAGS_TO_PASS = \ - "CC_FOR_BUILD=$(CC_FOR_BUILD)" \ - "CFLAGS_FOR_BUILD=$(CFLAGS_FOR_BUILD)" \ - "CXX_FOR_BUILD=$(CXX_FOR_BUILD)" \ -+ "CXXFLAGS_FOR_BUILD=$(CXXFLAGS_FOR_BUILD)" \ - "EXPECT=$(EXPECT)" \ - "FLEX=$(FLEX)" \ - "INSTALL=$(INSTALL)" \ -diff --git a/Makefile.tpl b/Makefile.tpl -index 6b2eb6a..114e462 100644 ---- a/Makefile.tpl -+++ b/Makefile.tpl -@@ -154,6 +154,7 @@ BUILD_EXPORTS = \ - CC="$(CC_FOR_BUILD)"; export CC; \ - CFLAGS="$(CFLAGS_FOR_BUILD)"; export CFLAGS; \ - CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \ -+ CPPFLAGS="$(CPPFLAGS_FOR_BUILD)"; export CPPFLAGS; \ - CXX="$(CXX_FOR_BUILD)"; export CXX; \ - CXXFLAGS="$(CXXFLAGS_FOR_BUILD)"; export CXXFLAGS; \ - GCJ="$(GCJ_FOR_BUILD)"; export GCJ; \ -@@ -172,6 +173,9 @@ BUILD_EXPORTS = \ - # built for the build system to override those in BASE_FLAGS_TO_PASS. - EXTRA_BUILD_FLAGS = \ - CFLAGS="$(CFLAGS_FOR_BUILD)" \ -+ CXXFLAGS="$(CXXFLAGS_FOR_BUILD)" \ -+ CPP="$(CC_FOR_BUILD) -E" \ -+ CPPFLAGS="$(CPPFLAGS_FOR_BUILD)" \ - LDFLAGS="$(LDFLAGS_FOR_BUILD)" - - # This is the list of directories to built for the host system. -@@ -189,6 +193,7 @@ HOST_SUBDIR = @host_subdir@ - HOST_EXPORTS = \ - $(BASE_EXPORTS) \ - CC="$(CC)"; export CC; \ -+ CPP="$(CC) -E"; export CPP; \ - ADA_CFLAGS="$(ADA_CFLAGS)"; export ADA_CFLAGS; \ - CFLAGS="$(CFLAGS)"; export CFLAGS; \ - CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \ -diff --git a/gcc/Makefile.in b/gcc/Makefile.in -index cc75536..0ad2dc8 100644 ---- a/gcc/Makefile.in -+++ b/gcc/Makefile.in -@@ -780,7 +780,7 @@ BUILD_LDFLAGS=@BUILD_LDFLAGS@ - BUILD_NO_PIE_FLAG = @BUILD_NO_PIE_FLAG@ - BUILD_LDFLAGS += $(BUILD_NO_PIE_FLAG) - BUILD_CPPFLAGS= -I. -I$(@D) -I$(srcdir) -I$(srcdir)/$(@D) \ -- -I$(srcdir)/../include @INCINTL@ $(CPPINC) $(CPPFLAGS) -+ -I$(srcdir)/../include @INCINTL@ $(CPPINC) $(CPPFLAGS_FOR_BUILD) - - # Actual name to use when installing a native compiler. - GCC_INSTALL_NAME := $(shell echo gcc|sed '$(program_transform_name)') -diff --git a/gcc/configure b/gcc/configure -index 377253e..78fc64a 100755 ---- a/gcc/configure -+++ b/gcc/configure -@@ -11799,7 +11799,7 @@ else - CC="${CC_FOR_BUILD}" CFLAGS="${CFLAGS_FOR_BUILD}" \ - CXX="${CXX_FOR_BUILD}" CXXFLAGS="${CXXFLAGS_FOR_BUILD}" \ - LD="${LD_FOR_BUILD}" LDFLAGS="${LDFLAGS_FOR_BUILD}" \ -- GMPINC="" CPPFLAGS="${CPPFLAGS} -DGENERATOR_FILE" \ -+ GMPINC="" CPPFLAGS="${CPPFLAGS_FOR_BUILD} -DGENERATOR_FILE" \ - ${realsrcdir}/configure \ - --enable-languages=${enable_languages-all} \ - --target=$target_alias --host=$build_alias --build=$build_alias -diff --git a/gcc/configure.ac b/gcc/configure.ac -index 54e7619..a94666e 100644 ---- a/gcc/configure.ac -+++ b/gcc/configure.ac -@@ -1682,7 +1682,7 @@ else - CC="${CC_FOR_BUILD}" CFLAGS="${CFLAGS_FOR_BUILD}" \ - CXX="${CXX_FOR_BUILD}" CXXFLAGS="${CXXFLAGS_FOR_BUILD}" \ - LD="${LD_FOR_BUILD}" LDFLAGS="${LDFLAGS_FOR_BUILD}" \ -- GMPINC="" CPPFLAGS="${CPPFLAGS} -DGENERATOR_FILE" \ -+ GMPINC="" CPPFLAGS="${CPPFLAGS_FOR_BUILD} -DGENERATOR_FILE" \ - ${realsrcdir}/configure \ - --enable-languages=${enable_languages-all} \ - --target=$target_alias --host=$build_alias --build=$build_alias --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0040-nios2-Define-MUSL_DYNAMIC_LINKER.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0040-nios2-Define-MUSL_DYNAMIC_LINKER.patch deleted file mode 100644 index c9a6fd0eb..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0040-nios2-Define-MUSL_DYNAMIC_LINKER.patch +++ /dev/null @@ -1,28 +0,0 @@ -From b0412c01c275aaeb6b458461cd2425120c8bcec8 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Tue, 2 Feb 2016 10:26:10 -0800 -Subject: [PATCH 40/46] nios2: Define MUSL_DYNAMIC_LINKER - -Signed-off-by: Marek Vasut -Signed-off-by: Khem Raj ---- -Upstream-Status: Pending - - gcc/config/nios2/linux.h | 1 + - 1 file changed, 1 insertion(+) - -diff --git a/gcc/config/nios2/linux.h b/gcc/config/nios2/linux.h -index 4ef55b5..62bc1e7 100644 ---- a/gcc/config/nios2/linux.h -+++ b/gcc/config/nios2/linux.h -@@ -30,6 +30,7 @@ - #define CPP_SPEC "%{posix:-D_POSIX_SOURCE} %{pthread:-D_REENTRANT}" - - #define GLIBC_DYNAMIC_LINKER "/lib/ld-linux-nios2.so.1" -+#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-nios2.so.1" - - #undef LINK_SPEC - #define LINK_SPEC LINK_SPEC_ENDIAN \ --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0041-ssp_nonshared.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0041-ssp_nonshared.patch deleted file mode 100644 index 074452974..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0041-ssp_nonshared.patch +++ /dev/null @@ -1,28 +0,0 @@ -From 551a5db7acb56e085a101f1c222d51b2c1b039a4 Mon Sep 17 00:00:00 2001 -From: Szabolcs Nagy -Date: Sat, 7 Nov 2015 14:58:40 +0000 -Subject: [PATCH 41/46] ssp_nonshared - ---- -Upstream-Status: Inappropriate [OE Configuration] - - gcc/gcc.c | 3 ++- - 1 file changed, 2 insertions(+), 1 deletion(-) - -diff --git a/gcc/gcc.c b/gcc/gcc.c -index 2812819..9de96ee 100644 ---- a/gcc/gcc.c -+++ b/gcc/gcc.c -@@ -863,7 +863,8 @@ proper position among the other output files. */ - #ifndef LINK_SSP_SPEC - #ifdef TARGET_LIBC_PROVIDES_SSP - #define LINK_SSP_SPEC "%{fstack-protector|fstack-protector-all" \ -- "|fstack-protector-strong|fstack-protector-explicit:}" -+ "|fstack-protector-strong|fstack-protector-explicit" \ -+ ":-lssp_nonshared}" - #else - #define LINK_SSP_SPEC "%{fstack-protector|fstack-protector-all" \ - "|fstack-protector-strong|fstack-protector-explicit" \ --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0042-gcc-libcpp-support-ffile-prefix-map-old-new.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0042-gcc-libcpp-support-ffile-prefix-map-old-new.patch deleted file mode 100644 index 861f0fd7f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0042-gcc-libcpp-support-ffile-prefix-map-old-new.patch +++ /dev/null @@ -1,292 +0,0 @@ -From ba738cc411c9a54e389e336bcaa0a2428dd4a9d2 Mon Sep 17 00:00:00 2001 -From: Hongxu Jia -Date: Wed, 16 Mar 2016 02:27:43 -0400 -Subject: [PATCH 42/46] gcc/libcpp: support -ffile-prefix-map== - -Similar -fdebug-prefix-map, add option -ffile-prefix-map to map one -directory name (old) to another (new) in __FILE__, __BASE_FILE__ and -__builtin_FILE (). - -https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70268 - -Upstream-Status: Submitted [gcc-patches@gcc.gnu.org] -Signed-off-by: Hongxu Jia ---- - gcc/c-family/c-opts.c | 13 +++++++ - gcc/c-family/c.opt | 4 +++ - gcc/dwarf2out.c | 1 + - gcc/gimplify.c | 3 ++ - libcpp/Makefile.in | 10 +++--- - libcpp/file-map.c | 92 +++++++++++++++++++++++++++++++++++++++++++++++ - libcpp/include/file-map.h | 30 ++++++++++++++++ - libcpp/macro.c | 2 ++ - 8 files changed, 150 insertions(+), 5 deletions(-) - create mode 100644 libcpp/file-map.c - create mode 100644 libcpp/include/file-map.h - -diff --git a/gcc/c-family/c-opts.c b/gcc/c-family/c-opts.c -index fec58bc..7a0af43 100644 ---- a/gcc/c-family/c-opts.c -+++ b/gcc/c-family/c-opts.c -@@ -38,6 +38,14 @@ along with GCC; see the file COPYING3. If not see - #include "opts.h" - #include "plugin.h" /* For PLUGIN_INCLUDE_FILE event. */ - #include "mkdeps.h" -+#include "file-map.h" -+#include "c-target.h" -+#include "tm.h" /* For BYTES_BIG_ENDIAN, -+ DOLLARS_IN_IDENTIFIERS, -+ STDC_0_IN_SYSTEM_HEADERS, -+ TARGET_FLT_EVAL_METHOD_NON_DEFAULT and -+ TARGET_OPTF. */ -+#include "tm_p.h" /* For C_COMMON_OVERRIDE_OPTIONS. */ - #include "dumpfile.h" - - #ifndef DOLLARS_IN_IDENTIFIERS -@@ -503,6 +511,11 @@ c_common_handle_option (size_t scode, const char *arg, int value, - cpp_opts->narrow_charset = arg; - break; - -+ case OPT_ffile_prefix_map_: -+ if (add_file_prefix_map (arg) < 0) -+ error ("invalid argument %qs to -ffile-prefix-map", arg); -+ break; -+ - case OPT_fwide_exec_charset_: - cpp_opts->wide_charset = arg; - break; -diff --git a/gcc/c-family/c.opt b/gcc/c-family/c.opt -index 660da6c..31f7b34 100644 ---- a/gcc/c-family/c.opt -+++ b/gcc/c-family/c.opt -@@ -1208,6 +1208,10 @@ fexec-charset= - C ObjC C++ ObjC++ Joined RejectNegative - -fexec-charset= Convert all strings and character constants to character set . - -+ffile-prefix-map= -+C ObjC C++ ObjC++ Joined RejectNegative -+-ffile-prefix-map= Map one directory name to another in __FILE__, __BASE_FILE__ and __builtin_FILE () -+ - fextended-identifiers - C ObjC C++ ObjC++ - Permit universal character names (\\u and \\U) in identifiers. -diff --git a/gcc/dwarf2out.c b/gcc/dwarf2out.c -index 80f2df5..a2bfcc0 100644 ---- a/gcc/dwarf2out.c -+++ b/gcc/dwarf2out.c -@@ -21672,6 +21672,7 @@ gen_producer_string (void) - case OPT_fltrans_output_list_: - case OPT_fresolution_: - case OPT_fdebug_prefix_map_: -+ case OPT_ffile_prefix_map_: - /* Ignore these. */ - continue; - default: -diff --git a/gcc/gimplify.c b/gcc/gimplify.c -index e223e59..1433c25 100644 ---- a/gcc/gimplify.c -+++ b/gcc/gimplify.c -@@ -57,6 +57,8 @@ along with GCC; see the file COPYING3. If not see - #include "gomp-constants.h" - #include "tree-dump.h" - #include "gimple-walk.h" -+#include "file-map.h" -+ - #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */ - #include "builtins.h" - -@@ -2432,6 +2434,7 @@ gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) - case BUILT_IN_FILE: - { - const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p)); -+ locfile = remap_file_filename (locfile); - *expr_p = build_string_literal (strlen (locfile) + 1, locfile); - return GS_OK; - } -diff --git a/libcpp/Makefile.in b/libcpp/Makefile.in -index a7d7828..3d29572 100644 ---- a/libcpp/Makefile.in -+++ b/libcpp/Makefile.in -@@ -84,12 +84,12 @@ DEPMODE = $(CXXDEPMODE) - - - libcpp_a_OBJS = charset.o directives.o directives-only.o errors.o \ -- expr.o files.o identifiers.o init.o lex.o line-map.o macro.o \ -- mkdeps.o pch.o symtab.o traditional.o -+ expr.o file-map.o files.o identifiers.o init.o lex.o line-map.o \ -+ macro.o mkdeps.o pch.o symtab.o traditional.o - - libcpp_a_SOURCES = charset.c directives.c directives-only.c errors.c \ -- expr.c files.c identifiers.c init.c lex.c line-map.c macro.c \ -- mkdeps.c pch.c symtab.c traditional.c -+ expr.c file-map.c files.c identifiers.c init.c lex.c line-map.c \ -+ macro.c mkdeps.c pch.c symtab.c traditional.c - - all: libcpp.a $(USED_CATALOGS) - -@@ -263,7 +263,7 @@ po/$(PACKAGE).pot: $(libcpp_a_SOURCES) - - TAGS_SOURCES = $(libcpp_a_SOURCES) internal.h ucnid.h \ - include/line-map.h include/symtab.h include/cpp-id-data.h \ -- include/cpplib.h include/mkdeps.h system.h -+ include/cpplib.h include/mkdeps.h system.h include/file-map.h - - TAGS: $(TAGS_SOURCES) - cd $(srcdir) && etags $(TAGS_SOURCES) -diff --git a/libcpp/file-map.c b/libcpp/file-map.c -new file mode 100644 -index 0000000..18035ef ---- /dev/null -+++ b/libcpp/file-map.c -@@ -0,0 +1,92 @@ -+/* Map one directory name to another in __FILE__, __BASE_FILE__ -+ and __builtin_FILE (). -+ Copyright (C) 2001-2016 Free Software Foundation, Inc. -+ -+This program is free software; you can redistribute it and/or modify it -+under the terms of the GNU General Public License as published by the -+Free Software Foundation; either version 3, or (at your option) any -+later version. -+ -+This program is distributed in the hope that it will be useful, -+but WITHOUT ANY WARRANTY; without even the implied warranty of -+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+GNU General Public License for more details. -+ -+You should have received a copy of the GNU General Public License -+along with this program; see the file COPYING3. If not see -+. -+ -+ In other words, you are welcome to use, share and improve this program. -+ You are forbidden to forbid anyone else to use, share and improve -+ what you give them. Help stamp out software-hoarding! */ -+ -+#include "config.h" -+#include "system.h" -+#include "file-map.h" -+ -+/* Structure recording the mapping from source file and directory -+ names at compile time to __FILE__ */ -+typedef struct file_prefix_map -+{ -+ const char *old_prefix; -+ const char *new_prefix; -+ size_t old_len; -+ size_t new_len; -+ struct file_prefix_map *next; -+} file_prefix_map; -+ -+/* Linked list of such structures. */ -+static file_prefix_map *file_prefix_maps; -+ -+/* Record prefix mapping of __FILE__. ARG is the argument to -+ -ffile-prefix-map and must be of the form OLD=NEW. */ -+int -+add_file_prefix_map (const char *arg) -+{ -+ file_prefix_map *map; -+ const char *p; -+ -+ p = strchr (arg, '='); -+ if (!p) -+ { -+ fprintf(stderr, "invalid argument %qs to -ffile-prefix-map", arg); -+ return -1; -+ } -+ map = XNEW (file_prefix_map); -+ map->old_prefix = xstrndup (arg, p - arg); -+ map->old_len = p - arg; -+ p++; -+ map->new_prefix = xstrdup (p); -+ map->new_len = strlen (p); -+ map->next = file_prefix_maps; -+ file_prefix_maps = map; -+ -+ return 0; -+} -+ -+/* Perform user-specified mapping of __FILE__ prefixes. Return -+ the new name corresponding to filename. */ -+ -+const char * -+remap_file_filename (const char *filename) -+{ -+ file_prefix_map *map; -+ char *s; -+ const char *name; -+ size_t name_len; -+ -+ for (map = file_prefix_maps; map; map = map->next) -+ if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0) -+ break; -+ if (!map) -+ return filename; -+ name = filename + map->old_len; -+ name_len = strlen (name) + 1; -+ s = (char *) alloca (name_len + map->new_len); -+ memcpy (s, map->new_prefix, map->new_len); -+ memcpy (s + map->new_len, name, name_len); -+ -+ return xstrdup (s); -+} -+ -+ -diff --git a/libcpp/include/file-map.h b/libcpp/include/file-map.h -new file mode 100644 -index 0000000..8750315 ---- /dev/null -+++ b/libcpp/include/file-map.h -@@ -0,0 +1,30 @@ -+/* Map one directory name to another in __FILE__, __BASE_FILE__ -+ and __builtin_FILE (). -+ Copyright (C) 2001-2016 Free Software Foundation, Inc. -+ -+This program is free software; you can redistribute it and/or modify it -+under the terms of the GNU General Public License as published by the -+Free Software Foundation; either version 3, or (at your option) any -+later version. -+ -+This program is distributed in the hope that it will be useful, -+but WITHOUT ANY WARRANTY; without even the implied warranty of -+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+GNU General Public License for more details. -+ -+You should have received a copy of the GNU General Public License -+along with this program; see the file COPYING3. If not see -+. -+ -+ In other words, you are welcome to use, share and improve this program. -+ You are forbidden to forbid anyone else to use, share and improve -+ what you give them. Help stamp out software-hoarding! */ -+ -+#ifndef LIBCPP_FILE_MAP_H -+#define LIBCPP_FILE_MAP_H -+ -+const char * remap_file_filename (const char *filename); -+ -+int add_file_prefix_map (const char *arg); -+ -+#endif /* !LIBCPP_FILE_MAP_H */ -diff --git a/libcpp/macro.c b/libcpp/macro.c -index c251553..3ceec3d 100644 ---- a/libcpp/macro.c -+++ b/libcpp/macro.c -@@ -26,6 +26,7 @@ along with this program; see the file COPYING3. If not see - #include "system.h" - #include "cpplib.h" - #include "internal.h" -+#include "file-map.h" - - typedef struct macro_arg macro_arg; - /* This structure represents the tokens of a macro argument. These -@@ -301,6 +302,7 @@ _cpp_builtin_macro_text (cpp_reader *pfile, cpp_hashnode *node, - if (!name) - abort (); - } -+ name = remap_file_filename (name); - len = strlen (name); - buf = _cpp_unaligned_alloc (pfile, len * 2 + 3); - result = buf; --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0043-Reuse-fdebug-prefix-map-to-replace-ffile-prefix-map.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0043-Reuse-fdebug-prefix-map-to-replace-ffile-prefix-map.patch deleted file mode 100644 index 0077f80e4..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0043-Reuse-fdebug-prefix-map-to-replace-ffile-prefix-map.patch +++ /dev/null @@ -1,43 +0,0 @@ -From 25c87c6cc40ec5cc6965f8bfb215bec01abd6d82 Mon Sep 17 00:00:00 2001 -From: Hongxu Jia -Date: Wed, 16 Mar 2016 05:39:59 -0400 -Subject: [PATCH 43/46] Reuse -fdebug-prefix-map to replace -ffile-prefix-map - -The oe-core may use external toolchain to compile, -which may not support -ffile-prefix-map. - -Since we use -fdebug-prefix-map to do the same thing, -so we could reuse it to replace -ffile-prefix-map. - -Upstream-Status: Inappropriate[oe-core specific] - -Signed-off-by: Hongxu Jia ---- - gcc/opts-global.c | 4 ++++ - 1 file changed, 4 insertions(+) - -diff --git a/gcc/opts-global.c b/gcc/opts-global.c -index b7e5232..121d7b9 100644 ---- a/gcc/opts-global.c -+++ b/gcc/opts-global.c -@@ -31,6 +31,7 @@ along with GCC; see the file COPYING3. If not see - #include "langhooks.h" - #include "dbgcnt.h" - #include "debug.h" -+#include "file-map.h" - #include "output.h" - #include "plugin.h" - #include "toplev.h" -@@ -357,6 +358,9 @@ handle_common_deferred_options (void) - - case OPT_fdebug_prefix_map_: - add_debug_prefix_map (opt->arg); -+ -+ /* Reuse -fdebug-prefix-map to replace -ffile-prefix-map */ -+ add_file_prefix_map (opt->arg); - break; - - case OPT_fdump_: --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0044-gcc-final.c-fdebug-prefix-map-support-to-remap-sourc.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0044-gcc-final.c-fdebug-prefix-map-support-to-remap-sourc.patch deleted file mode 100644 index 5d41af44a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0044-gcc-final.c-fdebug-prefix-map-support-to-remap-sourc.patch +++ /dev/null @@ -1,54 +0,0 @@ -From 6ab23e88aef22bbabee7b9600c459ff39547bb66 Mon Sep 17 00:00:00 2001 -From: Hongxu Jia -Date: Thu, 24 Mar 2016 11:23:14 -0400 -Subject: [PATCH 44/46] gcc/final.c: -fdebug-prefix-map support to remap - sources with relative path - -PR other/70428 -* final.c (remap_debug_filename): Use lrealpath to translate -relative path before remapping - -https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70428 -Upstream-Status: Submitted [gcc-patches@gcc.gnu.org] - -Signed-off-by: Hongxu Jia ---- - gcc/final.c | 15 ++++++++++++--- - 1 file changed, 12 insertions(+), 3 deletions(-) - -diff --git a/gcc/final.c b/gcc/final.c -index 55cf509..23293e5 100644 ---- a/gcc/final.c -+++ b/gcc/final.c -@@ -1554,16 +1554,25 @@ remap_debug_filename (const char *filename) - const char *name; - size_t name_len; - -+ /* Support to remap filename with relative path */ -+ char *realpath = lrealpath (filename); -+ if (realpath == NULL) -+ return filename; -+ - for (map = debug_prefix_maps; map; map = map->next) -- if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0) -+ if (filename_ncmp (realpath, map->old_prefix, map->old_len) == 0) - break; - if (!map) -- return filename; -- name = filename + map->old_len; -+ { -+ free (realpath); -+ return filename; -+ } -+ name = realpath + map->old_len; - name_len = strlen (name) + 1; - s = (char *) alloca (name_len + map->new_len); - memcpy (s, map->new_prefix, map->new_len); - memcpy (s + map->new_len, name, name_len); -+ free (realpath); - return ggc_strdup (s); - } - --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0045-libgcc-Add-knob-to-use-ldbl-128-on-ppc.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0045-libgcc-Add-knob-to-use-ldbl-128-on-ppc.patch deleted file mode 100644 index c62b727d6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0045-libgcc-Add-knob-to-use-ldbl-128-on-ppc.patch +++ /dev/null @@ -1,125 +0,0 @@ -From 5a47d404ea29e2547269e3ddf38754462d93f903 Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Fri, 29 Apr 2016 20:03:28 +0000 -Subject: [PATCH 45/46] libgcc: Add knob to use ldbl-128 on ppc - -musl does not support ldbl 128 so we can not assume -that linux as a whole supports ldbl-128 bits, instead -act upon configure option passed to gcc and assume no -on musl and yes otherwise if no option is passed since -default behaviour is to assume ldbl128 it does not -change the defaults - -Signed-off-by: Khem Raj ---- -Upstream-Status: Pending - - libgcc/Makefile.in | 1 + - libgcc/config/rs6000/t-linux | 5 ++++- - libgcc/configure | 18 ++++++++++++++++++ - libgcc/configure.ac | 12 ++++++++++++ - 4 files changed, 35 insertions(+), 1 deletion(-) - mode change 100644 => 100755 libgcc/configure - -diff --git a/libgcc/Makefile.in b/libgcc/Makefile.in -index f09b39b..296cf0f 100644 ---- a/libgcc/Makefile.in -+++ b/libgcc/Makefile.in -@@ -43,6 +43,7 @@ enable_vtable_verify = @enable_vtable_verify@ - enable_decimal_float = @enable_decimal_float@ - fixed_point = @fixed_point@ - with_aix_soname = @with_aix_soname@ -+with_ldbl128 = @with_ldbl128@ - - host_noncanonical = @host_noncanonical@ - real_host_noncanonical = @real_host_noncanonical@ -diff --git a/libgcc/config/rs6000/t-linux b/libgcc/config/rs6000/t-linux -index 4f6d4c4..c50dd94 100644 ---- a/libgcc/config/rs6000/t-linux -+++ b/libgcc/config/rs6000/t-linux -@@ -1,3 +1,6 @@ - SHLIB_MAPFILES += $(srcdir)/config/rs6000/libgcc-glibc.ver - --HOST_LIBGCC2_CFLAGS += -mlong-double-128 -mno-minimal-toc -+ifeq ($(with_ldbl128),yes) -+HOST_LIBGCC2_CFLAGS += -mlong-double-128 -+endif -+HOST_LIBGCC2_CFLAGS += -mno-minimal-toc -diff --git a/libgcc/configure b/libgcc/configure -old mode 100644 -new mode 100755 -index e7d6c75..e9a9019 ---- a/libgcc/configure -+++ b/libgcc/configure -@@ -614,6 +614,7 @@ build_vendor - build_cpu - build - with_aix_soname -+with_ldbl128 - enable_vtable_verify - enable_shared - libgcc_topdir -@@ -663,6 +664,7 @@ with_cross_host - with_ld - enable_shared - enable_vtable_verify -+with_long_double_128 - with_aix_soname - enable_version_specific_runtime_libs - with_slibdir -@@ -1319,6 +1321,7 @@ Optional Packages: - --with-target-subdir=SUBDIR Configuring in a subdirectory for target - --with-cross-host=HOST Configuring with a cross compiler - --with-ld arrange to use the specified ld (full pathname) -+ --with-long-double-128 use 128-bit long double by default - --with-aix-soname=aix|svr4|both - shared library versioning (aka "SONAME") variant to - provide on AIX -@@ -2201,6 +2204,21 @@ fi - - - -+# Check whether --with-long-double-128 was given. -+if test "${with_long_double_128+set}" = set; then : -+ withval=$with_long_double_128; with_ldbl128="$with_long_double_128" -+else -+ case "${host}" in -+ power*-*-musl*) -+ with_ldbl128="no";; -+ *) with_ldbl128="yes";; -+ esac -+ -+fi -+ -+ -+ -+ - # Check whether --with-aix-soname was given. - if test "${with_aix_soname+set}" = set; then : - withval=$with_aix_soname; case "${host}:${enable_shared}" in -diff --git a/libgcc/configure.ac b/libgcc/configure.ac -index 269997f..81dc3ba 100644 ---- a/libgcc/configure.ac -+++ b/libgcc/configure.ac -@@ -77,6 +77,18 @@ AC_ARG_ENABLE(vtable-verify, - [enable_vtable_verify=no]) - AC_SUBST(enable_vtable_verify) - -+AC_ARG_WITH(long-double-128, -+[AS_HELP_STRING([--with-long-double-128], -+ [use 128-bit long double by default])], -+ with_ldbl128="$with_long_double_128", -+[case "${host}" in -+ power*-*-musl*) -+ with_ldbl128="no";; -+ *) with_ldbl128="yes";; -+ esac -+]) -+AC_SUBST(with_ldbl128) -+ - AC_ARG_WITH(aix-soname, - [AS_HELP_STRING([--with-aix-soname=aix|svr4|both], - [shared library versioning (aka "SONAME") variant to provide on AIX])], --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0046-Link-libgcc-using-LDFLAGS-not-just-SHLIB_LDFLAGS.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0046-Link-libgcc-using-LDFLAGS-not-just-SHLIB_LDFLAGS.patch deleted file mode 100644 index 390037f7b..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0046-Link-libgcc-using-LDFLAGS-not-just-SHLIB_LDFLAGS.patch +++ /dev/null @@ -1,29 +0,0 @@ -From 513bf3c33e2f551f08bd57605091d5ddeba3536b Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Wed, 4 May 2016 21:11:34 -0700 -Subject: [PATCH 46/46] Link libgcc using LDFLAGS, not just SHLIB_LDFLAGS - -Upstream-Status: Pending - -Signed-off-by: Christopher Larson -Signed-off-by: Khem Raj ---- - libgcc/config/t-slibgcc | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/libgcc/config/t-slibgcc b/libgcc/config/t-slibgcc -index 8c5f890..29be909 100644 ---- a/libgcc/config/t-slibgcc -+++ b/libgcc/config/t-slibgcc -@@ -32,7 +32,7 @@ SHLIB_INSTALL_SOLINK = $(LN_S) $(SHLIB_SONAME) \ - $(DESTDIR)$(slibdir)$(SHLIB_SLIBDIR_QUAL)/$(SHLIB_SOLINK) - - SHLIB_LINK = $(CC) $(LIBGCC2_CFLAGS) -shared -nodefaultlibs \ -- $(SHLIB_LDFLAGS) \ -+ $(LDFLAGS) $(SHLIB_LDFLAGS) \ - -o $(SHLIB_DIR)/$(SHLIB_SONAME).tmp @multilib_flags@ \ - $(SHLIB_OBJS) $(SHLIB_LC) && \ - rm -f $(SHLIB_DIR)/$(SHLIB_SOLINK) && \ --- -2.8.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0047-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0047-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch deleted file mode 100644 index ed6cd6905..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0047-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch +++ /dev/null @@ -1,85 +0,0 @@ -From 0a9ed0479203cb7e69c3745b0c259007410f39ba Mon Sep 17 00:00:00 2001 -From: Szabolcs Nagy -Date: Sat, 24 Oct 2015 20:09:53 +0000 -Subject: [PATCH 47/47] libgcc_s: Use alias for __cpu_indicator_init instead of - symver - -Adapter from - -https://gcc.gnu.org/ml/gcc-patches/2015-05/msg00899.html - -This fix was debated but hasnt been applied gcc upstream since -they expect musl to support '@' in symbol versioning which is -a sun/gnu versioning extention. This patch however avoids the -need for the '@' symbols at all - -libgcc/Changelog: - -2015-05-11 Szabolcs Nagy - - * config/i386/cpuinfo.c (__cpu_indicator_init_local): Add. - (__cpu_indicator_init@GCC_4.8.0, __cpu_model@GCC_4.8.0): Remove. - - * config/i386/t-linux (HOST_LIBGCC2_CFLAGS): Remove -DUSE_ELF_SYMVER. - -gcc/Changelog: - -2015-05-11 Szabolcs Nagy - - * config/i386/i386.c (ix86_expand_builtin): Make __builtin_cpu_init - call __cpu_indicator_init_local instead of __cpu_indicator_init. - -Signed-off-by: Khem Raj ---- -Upstream-Status: Rejected - - gcc/config/i386/i386.c | 4 ++-- - libgcc/config/i386/cpuinfo.c | 6 +++--- - libgcc/config/i386/t-linux | 2 +- - 3 files changed, 6 insertions(+), 6 deletions(-) - -diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c -index 861a029..1c97d72 100644 ---- a/gcc/config/i386/i386.c -+++ b/gcc/config/i386/i386.c -@@ -40323,10 +40323,10 @@ ix86_expand_builtin (tree exp, rtx target, rtx subtarget, - { - case IX86_BUILTIN_CPU_INIT: - { -- /* Make it call __cpu_indicator_init in libgcc. */ -+ /* Make it call __cpu_indicator_init_local in libgcc.a. */ - tree call_expr, fndecl, type; - type = build_function_type_list (integer_type_node, NULL_TREE); -- fndecl = build_fn_decl ("__cpu_indicator_init", type); -+ fndecl = build_fn_decl ("__cpu_indicator_init_local", type); - call_expr = build_call_expr (fndecl, 0); - return expand_expr (call_expr, target, mode, EXPAND_NORMAL); - } -diff --git a/libgcc/config/i386/cpuinfo.c b/libgcc/config/i386/cpuinfo.c -index 8c2248d..6c82f15 100644 ---- a/libgcc/config/i386/cpuinfo.c -+++ b/libgcc/config/i386/cpuinfo.c -@@ -485,7 +485,7 @@ __cpu_indicator_init (void) - return 0; - } - --#if defined SHARED && defined USE_ELF_SYMVER --__asm__ (".symver __cpu_indicator_init, __cpu_indicator_init@GCC_4.8.0"); --__asm__ (".symver __cpu_model, __cpu_model@GCC_4.8.0"); -+#ifndef SHARED -+int __cpu_indicator_init_local (void) -+ __attribute__ ((weak, alias ("__cpu_indicator_init"))); - #endif -diff --git a/libgcc/config/i386/t-linux b/libgcc/config/i386/t-linux -index 11bb46e..4f47f7b 100644 ---- a/libgcc/config/i386/t-linux -+++ b/libgcc/config/i386/t-linux -@@ -3,4 +3,4 @@ - # t-slibgcc-elf-ver and t-linux - SHLIB_MAPFILES = libgcc-std.ver $(srcdir)/config/i386/libgcc-glibc.ver - --HOST_LIBGCC2_CFLAGS += -mlong-double-80 -DUSE_ELF_SYMVER -+HOST_LIBGCC2_CFLAGS += -mlong-double-80 --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0048-ARM-PR-target-71056-Don-t-use-vectorized-builtins-wh.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0048-ARM-PR-target-71056-Don-t-use-vectorized-builtins-wh.patch deleted file mode 100644 index 9c39c7f7a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/0048-ARM-PR-target-71056-Don-t-use-vectorized-builtins-wh.patch +++ /dev/null @@ -1,92 +0,0 @@ -From 84d2a5509892b65ed60d39e6e2f9719e3762e40e Mon Sep 17 00:00:00 2001 -From: ktkachov -Date: Tue, 31 May 2016 08:29:39 +0000 -Subject: [PATCH] [ARM] PR target/71056: Don't use vectorized builtins when - NEON is not available - - PR target/71056 - * config/arm/arm-builtins.c (arm_builtin_vectorized_function): Return - NULL_TREE early if NEON is not available. Remove now redundant check - in ARM_CHECK_BUILTIN_MODE. - - * gcc.target/arm/pr71056.c: New test. - - - -git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/gcc-6-branch@236910 138bc75d-0d04-0410-961f-82ee72b054a4 ---- -Upstream-Status: Backport -Signed-off-by: Khem Raj - - gcc/ChangeLog | 7 +++++++ - gcc/config/arm/arm-builtins.c | 6 +++++- - gcc/testsuite/ChangeLog | 5 +++++ - gcc/testsuite/gcc.target/arm/pr71056.c | 32 ++++++++++++++++++++++++++++++++ - 4 files changed, 49 insertions(+), 1 deletion(-) - create mode 100644 gcc/testsuite/gcc.target/arm/pr71056.c - -diff --git a/gcc/config/arm/arm-builtins.c b/gcc/config/arm/arm-builtins.c -index 90fb40f..68b2839 100644 ---- a/gcc/config/arm/arm-builtins.c -+++ b/gcc/config/arm/arm-builtins.c -@@ -2861,6 +2861,10 @@ arm_builtin_vectorized_function (unsigned int fn, tree type_out, tree type_in) - int in_n, out_n; - bool out_unsigned_p = TYPE_UNSIGNED (type_out); - -+ /* Can't provide any vectorized builtins when we can't use NEON. */ -+ if (!TARGET_NEON) -+ return NULL_TREE; -+ - if (TREE_CODE (type_out) != VECTOR_TYPE - || TREE_CODE (type_in) != VECTOR_TYPE) - return NULL_TREE; -@@ -2875,7 +2879,7 @@ arm_builtin_vectorized_function (unsigned int fn, tree type_out, tree type_in) - NULL_TREE is returned if no such builtin is available. */ - #undef ARM_CHECK_BUILTIN_MODE - #define ARM_CHECK_BUILTIN_MODE(C) \ -- (TARGET_NEON && TARGET_FPU_ARMV8 \ -+ (TARGET_FPU_ARMV8 \ - && flag_unsafe_math_optimizations \ - && ARM_CHECK_BUILTIN_MODE_1 (C)) - -diff --git a/gcc/testsuite/gcc.target/arm/pr71056.c b/gcc/testsuite/gcc.target/arm/pr71056.c -new file mode 100644 -index 0000000..136754e ---- /dev/null -+++ b/gcc/testsuite/gcc.target/arm/pr71056.c -@@ -0,0 +1,32 @@ -+/* PR target/71056. */ -+/* { dg-do compile } */ -+/* { dg-require-effective-target arm_vfp3_ok } */ -+/* { dg-options "-O3 -mfpu=vfpv3" } */ -+ -+/* Check that compiling for a non-NEON target doesn't try to introduce -+ a NEON vectorized builtin. */ -+ -+extern char *buff; -+int f2 (); -+struct T1 -+{ -+ int reserved[2]; -+ unsigned int ip; -+ unsigned short cs; -+ unsigned short rsrv2; -+}; -+void -+f3 (const char *p) -+{ -+ struct T1 x; -+ __builtin_memcpy (&x, p, sizeof (struct T1)); -+ x.reserved[0] = __builtin_bswap32 (x.reserved[0]); -+ x.reserved[1] = __builtin_bswap32 (x.reserved[1]); -+ x.ip = __builtin_bswap32 (x.ip); -+ x.cs = x.cs << 8 | x.cs >> 8; -+ x.rsrv2 = x.rsrv2 << 8 | x.rsrv2 >> 8; -+ if (f2 ()) -+ { -+ __builtin_memcpy (buff, "\n", 1); -+ } -+} --- -2.9.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/CVE-2016-4490.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/CVE-2016-4490.patch deleted file mode 100644 index f32e91d4f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/CVE-2016-4490.patch +++ /dev/null @@ -1,290 +0,0 @@ -From 7d235b1b5ea35352c54957ef5530d9a02c46962f Mon Sep 17 00:00:00 2001 -From: bernds -Date: Mon, 2 May 2016 17:06:40 +0000 -Subject: [PATCH] =?UTF-8?q?Demangler=20integer=20overflow=20fixes=20from?= - =?UTF-8?q?=20Marcel=20B=C3=B6hme.?= -MIME-Version: 1.0 -Content-Type: text/plain; charset=UTF-8 -Content-Transfer-Encoding: 8bit - - PR c++/70498 - * cp-demangle.c: Parse numbers as integer instead of long to avoid - overflow after sanity checks. Include if available. - (INT_MAX): Define if necessary. - (d_make_template_param): Takes integer argument instead of long. - (d_make_function_param): Likewise. - (d_append_num): Likewise. - (d_identifier): Likewise. - (d_number): Parse as and return integer. - (d_compact_number): Handle overflow. - (d_source_name): Change variable type to integer for parsed number. - (d_java_resource): Likewise. - (d_special_name): Likewise. - (d_discriminator): Likewise. - (d_unnamed_type): Likewise. - * testsuite/demangle-expected: Add regression test cases. - - - -git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@235767 138bc75d-0d04-0410-961f-82ee72b054a4 - -Upstream-Status: Backport -CVE: CVE-2016-4490 -[Yocto #9632] - -Signed-off-by: Armin Kuster - ---- - libiberty/ChangeLog | 19 +++++++++++++ - libiberty/cp-demangle.c | 52 ++++++++++++++++++++--------------- - libiberty/testsuite/demangle-expected | 14 ++++++++-- - 3 files changed, 61 insertions(+), 24 deletions(-) - -Index: git/libiberty/ChangeLog -=================================================================== ---- git.orig/libiberty/ChangeLog -+++ git/libiberty/ChangeLog -@@ -1,3 +1,22 @@ -+2016-05-02 Marcel Böhme -+ -+ PR c++/70498 -+ * cp-demangle.c: Parse numbers as integer instead of long to avoid -+ overflow after sanity checks. Include if available. -+ (INT_MAX): Define if necessary. -+ (d_make_template_param): Takes integer argument instead of long. -+ (d_make_function_param): Likewise. -+ (d_append_num): Likewise. -+ (d_identifier): Likewise. -+ (d_number): Parse as and return integer. -+ (d_compact_number): Handle overflow. -+ (d_source_name): Change variable type to integer for parsed number. -+ (d_java_resource): Likewise. -+ (d_special_name): Likewise. -+ (d_discriminator): Likewise. -+ (d_unnamed_type): Likewise. -+ * testsuite/demangle-expected: Add regression test cases. -+ - 2016-04-27 Release Manager - - * GCC 6.1.0 released. -Index: git/libiberty/cp-demangle.c -=================================================================== ---- git.orig/libiberty/cp-demangle.c -+++ git/libiberty/cp-demangle.c -@@ -128,6 +128,13 @@ extern char *alloca (); - # endif /* alloca */ - #endif /* HAVE_ALLOCA_H */ - -+#ifdef HAVE_LIMITS_H -+#include -+#endif -+#ifndef INT_MAX -+# define INT_MAX (int)(((unsigned int) ~0) >> 1) /* 0x7FFFFFFF */ -+#endif -+ - #include "ansidecl.h" - #include "libiberty.h" - #include "demangle.h" -@@ -398,7 +405,7 @@ d_make_dtor (struct d_info *, enum gnu_v - struct demangle_component *); - - static struct demangle_component * --d_make_template_param (struct d_info *, long); -+d_make_template_param (struct d_info *, int); - - static struct demangle_component * - d_make_sub (struct d_info *, const char *, int); -@@ -421,9 +428,9 @@ static struct demangle_component *d_unqu - - static struct demangle_component *d_source_name (struct d_info *); - --static long d_number (struct d_info *); -+static int d_number (struct d_info *); - --static struct demangle_component *d_identifier (struct d_info *, long); -+static struct demangle_component *d_identifier (struct d_info *, int); - - static struct demangle_component *d_operator_name (struct d_info *); - -@@ -1119,7 +1126,7 @@ d_make_dtor (struct d_info *di, enum gnu - /* Add a new template parameter. */ - - static struct demangle_component * --d_make_template_param (struct d_info *di, long i) -+d_make_template_param (struct d_info *di, int i) - { - struct demangle_component *p; - -@@ -1135,7 +1142,7 @@ d_make_template_param (struct d_info *di - /* Add a new function parameter. */ - - static struct demangle_component * --d_make_function_param (struct d_info *di, long i) -+d_make_function_param (struct d_info *di, int i) - { - struct demangle_component *p; - -@@ -1620,7 +1627,7 @@ d_unqualified_name (struct d_info *di) - static struct demangle_component * - d_source_name (struct d_info *di) - { -- long len; -+ int len; - struct demangle_component *ret; - - len = d_number (di); -@@ -1633,12 +1640,12 @@ d_source_name (struct d_info *di) - - /* number ::= [n] <(non-negative decimal integer)> */ - --static long -+static int - d_number (struct d_info *di) - { - int negative; - char peek; -- long ret; -+ int ret; - - negative = 0; - peek = d_peek_char (di); -@@ -1681,7 +1688,7 @@ d_number_component (struct d_info *di) - /* identifier ::= <(unqualified source code identifier)> */ - - static struct demangle_component * --d_identifier (struct d_info *di, long len) -+d_identifier (struct d_info *di, int len) - { - const char *name; - -@@ -1702,7 +1709,7 @@ d_identifier (struct d_info *di, long le - /* Look for something which looks like a gcc encoding of an - anonymous namespace, and replace it with a more user friendly - name. */ -- if (len >= (long) ANONYMOUS_NAMESPACE_PREFIX_LEN + 2 -+ if (len >= (int) ANONYMOUS_NAMESPACE_PREFIX_LEN + 2 - && memcmp (name, ANONYMOUS_NAMESPACE_PREFIX, - ANONYMOUS_NAMESPACE_PREFIX_LEN) == 0) - { -@@ -1870,7 +1877,7 @@ d_java_resource (struct d_info *di) - { - struct demangle_component *p = NULL; - struct demangle_component *next = NULL; -- long len, i; -+ int len, i; - char c; - const char *str; - -@@ -2012,7 +2019,7 @@ d_special_name (struct d_info *di) - case 'C': - { - struct demangle_component *derived_type; -- long offset; -+ int offset; - struct demangle_component *base_type; - - derived_type = cplus_demangle_type (di); -@@ -2946,10 +2953,10 @@ d_pointer_to_member_type (struct d_info - - /* _ */ - --static long -+static int - d_compact_number (struct d_info *di) - { -- long num; -+ int num; - if (d_peek_char (di) == '_') - num = 0; - else if (d_peek_char (di) == 'n') -@@ -2957,7 +2964,7 @@ d_compact_number (struct d_info *di) - else - num = d_number (di) + 1; - -- if (! d_check_char (di, '_')) -+ if (num < 0 || ! d_check_char (di, '_')) - return -1; - return num; - } -@@ -2969,7 +2976,7 @@ d_compact_number (struct d_info *di) - static struct demangle_component * - d_template_param (struct d_info *di) - { -- long param; -+ int param; - - if (! d_check_char (di, 'T')) - return NULL; -@@ -3171,9 +3178,10 @@ d_expression_1 (struct d_info *di) - } - else - { -- index = d_compact_number (di) + 1; -- if (index == 0) -+ index = d_compact_number (di); -+ if (index == INT_MAX || index == -1) - return NULL; -+ index ++; - } - return d_make_function_param (di, index); - } -@@ -3502,7 +3510,7 @@ d_local_name (struct d_info *di) - static int - d_discriminator (struct d_info *di) - { -- long discrim; -+ int discrim; - - if (d_peek_char (di) != '_') - return 1; -@@ -3558,7 +3566,7 @@ static struct demangle_component * - d_unnamed_type (struct d_info *di) - { - struct demangle_component *ret; -- long num; -+ int num; - - if (! d_check_char (di, 'U')) - return NULL; -@@ -4086,10 +4094,10 @@ d_append_string (struct d_print_info *dp - } - - static inline void --d_append_num (struct d_print_info *dpi, long l) -+d_append_num (struct d_print_info *dpi, int l) - { - char buf[25]; -- sprintf (buf,"%ld", l); -+ sprintf (buf,"%d", l); - d_append_string (dpi, buf); - } - -Index: git/libiberty/testsuite/demangle-expected -=================================================================== ---- git.orig/libiberty/testsuite/demangle-expected -+++ git/libiberty/testsuite/demangle-expected -@@ -4422,12 +4422,22 @@ void baz(A::Type foo() - # --# Tests a use-after-free problem -+# Tests a use-after-free problem PR70481 - - _Q.__0 - ::Q.(void) - # --# Tests a use-after-free problem -+# Tests a use-after-free problem PR70481 - - _Q10-__9cafebabe. - cafebabe.::-(void) -+# -+# Tests integer overflow problem PR70492 -+ -+__vt_90000000000cafebabe -+__vt_90000000000cafebabe -+# -+# Tests write access violation PR70498 -+ -+_Z80800000000000000000000 -+_Z80800000000000000000000 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/ubsan-fix-check-empty-string.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/ubsan-fix-check-empty-string.patch deleted file mode 100644 index c0127198e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.2/ubsan-fix-check-empty-string.patch +++ /dev/null @@ -1,28 +0,0 @@ -From 8db2cf6353c13f2a84cbe49b689654897906c499 Mon Sep 17 00:00:00 2001 -From: kyukhin -Date: Sat, 3 Sep 2016 10:57:05 +0000 -Subject: [PATCH] gcc/ * ubsan.c (ubsan_use_new_style_p): Fix check for empty - string. - -git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@239971 138bc75d-0d04-0410-961f-82ee72b054a4 - -Upstream-Status: Backport -Signed-off-by: Joshua Lock - ---- - gcc/ubsan.c | 2 +- - 2 files changed, 5 insertions(+), 1 deletion(-) - -Index: gcc-6.3.0/gcc/ubsan.c -=================================================================== ---- gcc-6.3.0.orig/gcc/ubsan.c -+++ gcc-6.3.0/gcc/ubsan.c -@@ -1471,7 +1471,7 @@ ubsan_use_new_style_p (location_t loc) - - expanded_location xloc = expand_location (loc); - if (xloc.file == NULL || strncmp (xloc.file, "\1", 2) == 0 -- || xloc.file == '\0' || xloc.file[0] == '\xff' -+ || xloc.file[0] == '\0' || xloc.file[0] == '\xff' - || xloc.file[1] == '\xff') - return false; - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3.inc new file mode 100644 index 000000000..5c81a33af --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3.inc @@ -0,0 +1,138 @@ +require gcc-common.inc + +# Third digit in PV should be incremented after a minor release + +PV = "6.3.0" + +# BINV should be incremented to a revision after a minor gcc release + +BINV = "6.3.0" + +FILESEXTRAPATHS =. "${FILE_DIRNAME}/gcc-6.3:${FILE_DIRNAME}/gcc-6.3/backport:" + +DEPENDS =+ "mpfr gmp libmpc zlib" +NATIVEDEPS = "mpfr-native gmp-native libmpc-native zlib-native" + +LICENSE = "GPL-3.0-with-GCC-exception & GPLv3" + +LIC_FILES_CHKSUM = "\ + file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \ + file://COPYING3;md5=d32239bcb673463ab874e80d47fae504 \ + file://COPYING3.LIB;md5=6a6a8e020838b23406c81b19c1d46df6 \ + file://COPYING.LIB;md5=2d5025d4aa3495befef8f17206a5b0a1 \ + file://COPYING.RUNTIME;md5=fe60d87048567d4fe8c8a0ed2448bcc8 \ +" + + +BASEURI ?= "${GNU_MIRROR}/gcc/gcc-${PV}/gcc-${PV}.tar.bz2" +#SRCREV = "bd9a826d5448db11d29d2ec5884e7e679066f140" +#BASEURI ?= "git://github.com/gcc-mirror/gcc;branch=gcc-6-branch;protocol=git" +#BASEURI ?= "ftp://sourceware.org/pub/gcc/snapshots/6.2.0-RC-20160815/gcc-6.2.0-RC-20160815.tar.bz2" + +SRC_URI = "\ + ${BASEURI} \ + file://0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch \ + file://0002-uclibc-conf.patch \ + file://0003-gcc-uclibc-locale-ctype_touplow_t.patch \ + file://0004-uclibc-locale.patch \ + file://0005-uclibc-locale-no__x.patch \ + file://0006-uclibc-locale-wchar_fix.patch \ + file://0007-uclibc-locale-update.patch \ + file://0008-missing-execinfo_h.patch \ + file://0009-c99-snprintf.patch \ + file://0010-gcc-poison-system-directories.patch \ + file://0011-gcc-poison-dir-extend.patch \ + file://0012-gcc-4.3.3-SYSROOT_CFLAGS_FOR_TARGET.patch \ + file://0013-64-bit-multilib-hack.patch \ + file://0014-optional-libstdc.patch \ + file://0015-gcc-disable-MASK_RELAX_PIC_CALLS-bit.patch \ + file://0016-COLLECT_GCC_OPTIONS.patch \ + file://0017-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch \ + file://0018-fortran-cross-compile-hack.patch \ + file://0019-cpp-honor-sysroot.patch \ + file://0020-MIPS64-Default-to-N64-ABI.patch \ + file://0021-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch \ + file://0022-gcc-Fix-argument-list-too-long-error.patch \ + file://0023-Disable-sdt.patch \ + file://0024-libtool.patch \ + file://0025-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch \ + file://0026-Use-the-multilib-config-files-from-B-instead-of-usin.patch \ + file://0027-Avoid-using-libdir-from-.la-which-usually-points-to-.patch \ + file://0028-export-CPP.patch \ + file://0029-Enable-SPE-AltiVec-generation-on-powepc-linux-target.patch \ + file://0030-Disable-the-MULTILIB_OSDIRNAMES-and-other-multilib-o.patch \ + file://0031-Ensure-target-gcc-headers-can-be-included.patch \ + file://0032-gcc-4.8-won-t-build-with-disable-dependency-tracking.patch \ + file://0033-Don-t-search-host-directory-during-relink-if-inst_pr.patch \ + file://0034-Use-SYSTEMLIBS_DIR-replacement-instead-of-hardcoding.patch \ + file://0035-aarch64-Add-support-for-musl-ldso.patch \ + file://0036-libcc1-fix-libcc1-s-install-path-and-rpath.patch \ + file://0037-handle-sysroot-support-for-nativesdk-gcc.patch \ + file://0038-Search-target-sysroot-gcc-version-specific-dirs-with.patch \ + file://0039-Fix-various-_FOR_BUILD-and-related-variables.patch \ + file://0040-nios2-Define-MUSL_DYNAMIC_LINKER.patch \ + file://0041-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch \ + file://0042-gcc-libcpp-support-ffile-prefix-map-old-new.patch \ + file://0043-Reuse-fdebug-prefix-map-to-replace-ffile-prefix-map.patch \ + file://0044-gcc-final.c-fdebug-prefix-map-support-to-remap-sourc.patch \ + file://0045-libgcc-Add-knob-to-use-ldbl-128-on-ppc.patch \ + file://0046-Link-libgcc-using-LDFLAGS-not-just-SHLIB_LDFLAGS.patch \ + file://0047-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch \ + file://0048-sync-gcc-stddef.h-with-musl.patch \ + file://0054_all_nopie-all-flags.patch \ + file://0055-unwind_h-glibc26.patch \ + ${BACKPORTS} \ +" +BACKPORTS = "\ + file://CVE-2016-6131.patch \ + file://ubsan-fix-check-empty-string.patch \ +" +SRC_URI[md5sum] = "677a7623c7ef6ab99881bc4e048debb6" +SRC_URI[sha256sum] = "f06ae7f3f790fbf0f018f6d40e844451e6bc3b7bc96e128e63b09825c1f8b29f" + +S = "${TMPDIR}/work-shared/gcc-${PV}-${PR}/gcc-${PV}" +#S = "${TMPDIR}/work-shared/gcc-${PV}-${PR}/git" +B = "${WORKDIR}/gcc-${PV}/build.${HOST_SYS}.${TARGET_SYS}" + +# Language Overrides +FORTRAN = "" +JAVA = "" + +LTO = "--enable-lto" + +EXTRA_OECONF_BASE = "\ + ${LTO} \ + --enable-libssp \ + --enable-libitm \ + --disable-bootstrap \ + --disable-libmudflap \ + --with-system-zlib \ + --with-linker-hash-style=${LINKER_HASH_STYLE} \ + --enable-linker-build-id \ + --with-ppl=no \ + --with-cloog=no \ + --enable-checking=release \ + --enable-cheaders=c_global \ + --without-isl \ +" + +EXTRA_OECONF_INITIAL = "\ + --disable-libmudflap \ + --disable-libgomp \ + --disable-libitm \ + --disable-libquadmath \ + --with-system-zlib \ + --disable-lto \ + --disable-plugin \ + --enable-decimal-float=no \ + --without-isl \ + gcc_cv_libc_provides_ssp=yes \ +" + +EXTRA_OECONF_append_libc-uclibc = " --disable-decimal-float " + +EXTRA_OECONF_PATHS = "\ + --with-gxx-include-dir=/not/exist{target_includedir}/c++/${BINV} \ + --with-sysroot=/not/exist \ + --with-build-sysroot=${STAGING_DIR_TARGET} \ +" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch new file mode 100644 index 000000000..415f091ee --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0001-gcc-4.3.1-ARCH_FLAGS_FOR_TARGET.patch @@ -0,0 +1,42 @@ +From 92ed30da16b7487b334f739be177eb39885ab772 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 08:37:11 +0400 +Subject: [PATCH 01/46] gcc-4.3.1: ARCH_FLAGS_FOR_TARGET + +Signed-off-by: Khem Raj + +Upstream-Status: Inappropriate [embedded specific] +--- + configure | 2 +- + configure.ac | 2 +- + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/configure b/configure +index 35f231e..bfadc33 100755 +--- a/configure ++++ b/configure +@@ -7550,7 +7550,7 @@ fi + # for target_alias and gcc doesn't manage it consistently. + target_configargs="--cache-file=./config.cache ${target_configargs}" + +-FLAGS_FOR_TARGET= ++FLAGS_FOR_TARGET="$ARCH_FLAGS_FOR_TARGET" + case " $target_configdirs " in + *" newlib "*) + case " $target_configargs " in +diff --git a/configure.ac b/configure.ac +index 74bf58a..197d61b 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -3149,7 +3149,7 @@ fi + # for target_alias and gcc doesn't manage it consistently. + target_configargs="--cache-file=./config.cache ${target_configargs}" + +-FLAGS_FOR_TARGET= ++FLAGS_FOR_TARGET="$ARCH_FLAGS_FOR_TARGET" + case " $target_configdirs " in + *" newlib "*) + case " $target_configargs " in +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0002-uclibc-conf.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0002-uclibc-conf.patch new file mode 100644 index 000000000..4d284ef86 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0002-uclibc-conf.patch @@ -0,0 +1,53 @@ +From 4efc5a258c812875743647d756f75c93c4d514a5 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 08:38:25 +0400 +Subject: [PATCH 02/46] uclibc-conf + +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + contrib/regression/objs-gcc.sh | 4 ++++ + libjava/classpath/ltconfig | 4 ++-- + 2 files changed, 6 insertions(+), 2 deletions(-) + +diff --git a/contrib/regression/objs-gcc.sh b/contrib/regression/objs-gcc.sh +index 60b0497..6dc7ead 100755 +--- a/contrib/regression/objs-gcc.sh ++++ b/contrib/regression/objs-gcc.sh +@@ -106,6 +106,10 @@ if [ $H_REAL_TARGET = $H_REAL_HOST -a $H_REAL_TARGET = i686-pc-linux-gnu ] + then + make all-gdb all-dejagnu all-ld || exit 1 + make install-gdb install-dejagnu install-ld || exit 1 ++elif [ $H_REAL_TARGET = $H_REAL_HOST -a $H_REAL_TARGET = i686-pc-linux-uclibc ] ++ then ++ make all-gdb all-dejagnu all-ld || exit 1 ++ make install-gdb install-dejagnu install-ld || exit 1 + elif [ $H_REAL_TARGET = $H_REAL_HOST ] ; then + make bootstrap || exit 1 + make install || exit 1 +diff --git a/libjava/classpath/ltconfig b/libjava/classpath/ltconfig +index d318957..df55950 100755 +--- a/libjava/classpath/ltconfig ++++ b/libjava/classpath/ltconfig +@@ -603,7 +603,7 @@ host_os=`echo $host | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\3/'` + + # Transform linux* to *-*-linux-gnu*, to support old configure scripts. + case $host_os in +-linux-gnu*) ;; ++linux-gnu*|linux-uclibc*) ;; + linux*) host=`echo $host | sed 's/^\(.*-.*-linux\)\(.*\)$/\1-gnu\2/'` + esac + +@@ -1247,7 +1247,7 @@ linux-gnuoldld* | linux-gnuaout* | linux-gnucoff*) + ;; + + # This must be Linux ELF. +-linux-gnu*) ++linux*) + version_type=linux + need_lib_prefix=no + need_version=no +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0003-gcc-uclibc-locale-ctype_touplow_t.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0003-gcc-uclibc-locale-ctype_touplow_t.patch new file mode 100644 index 000000000..df07febee --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0003-gcc-uclibc-locale-ctype_touplow_t.patch @@ -0,0 +1,87 @@ +From ad5fd283fc7ef04f66c7fb003805364ea3bd34e9 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 08:40:12 +0400 +Subject: [PATCH 03/46] gcc-uclibc-locale-ctype_touplow_t + +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + libstdc++-v3/config/locale/generic/c_locale.cc | 5 +++++ + libstdc++-v3/config/locale/generic/c_locale.h | 9 +++++++++ + libstdc++-v3/config/os/gnu-linux/ctype_base.h | 9 +++++++++ + 3 files changed, 23 insertions(+) + +diff --git a/libstdc++-v3/config/locale/generic/c_locale.cc b/libstdc++-v3/config/locale/generic/c_locale.cc +index ef6ce8f..4740636 100644 +--- a/libstdc++-v3/config/locale/generic/c_locale.cc ++++ b/libstdc++-v3/config/locale/generic/c_locale.cc +@@ -273,5 +273,10 @@ _GLIBCXX_END_NAMESPACE_VERSION + #ifdef _GLIBCXX_LONG_DOUBLE_COMPAT + #define _GLIBCXX_LDBL_COMPAT(dbl, ldbl) \ + extern "C" void ldbl (void) __attribute__ ((alias (#dbl))) ++#ifdef __UCLIBC__ ++// This is because __c_locale is of type __ctype_touplow_t* which is short on uclibc. for glibc its int* ++_GLIBCXX_LDBL_COMPAT(_ZSt14__convert_to_vIdEvPKcRT_RSt12_Ios_IostateRKPs, _ZSt14__convert_to_vIeEvPKcRT_RSt12_Ios_IostateRKPs); ++#else + _GLIBCXX_LDBL_COMPAT(_ZSt14__convert_to_vIdEvPKcRT_RSt12_Ios_IostateRKPi, _ZSt14__convert_to_vIeEvPKcRT_RSt12_Ios_IostateRKPi); ++#endif + #endif // _GLIBCXX_LONG_DOUBLE_COMPAT +diff --git a/libstdc++-v3/config/locale/generic/c_locale.h b/libstdc++-v3/config/locale/generic/c_locale.h +index 794471e..d65f955 100644 +--- a/libstdc++-v3/config/locale/generic/c_locale.h ++++ b/libstdc++-v3/config/locale/generic/c_locale.h +@@ -40,13 +40,22 @@ + + #include + ++#ifdef __UCLIBC__ ++#include ++#include ++#endif ++ + #define _GLIBCXX_NUM_CATEGORIES 0 + + namespace std _GLIBCXX_VISIBILITY(default) + { + _GLIBCXX_BEGIN_NAMESPACE_VERSION + ++#ifdef __UCLIBC__ ++ typedef __ctype_touplow_t* __c_locale; ++#else + typedef int* __c_locale; ++#endif + + // Convert numeric value of type double and long double to string and + // return length of string. If vsnprintf is available use it, otherwise +diff --git a/libstdc++-v3/config/os/gnu-linux/ctype_base.h b/libstdc++-v3/config/os/gnu-linux/ctype_base.h +index 591c793..55eb0e9 100644 +--- a/libstdc++-v3/config/os/gnu-linux/ctype_base.h ++++ b/libstdc++-v3/config/os/gnu-linux/ctype_base.h +@@ -33,6 +33,11 @@ + + // Information as gleaned from /usr/include/ctype.h + ++#ifdef __UCLIBC__ ++#include ++#include ++#endif ++ + namespace std _GLIBCXX_VISIBILITY(default) + { + _GLIBCXX_BEGIN_NAMESPACE_VERSION +@@ -41,7 +46,11 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION + struct ctype_base + { + // Non-standard typedefs. ++#ifdef __UCLIBC__ ++ typedef const __ctype_touplow_t* __to_type; ++#else + typedef const int* __to_type; ++#endif + + // NB: Offsets into ctype::_M_table force a particular size + // on the mask type. Because of this, we don't use an enum. +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0004-uclibc-locale.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0004-uclibc-locale.patch new file mode 100644 index 000000000..ae2627c2e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0004-uclibc-locale.patch @@ -0,0 +1,2862 @@ +From 68bd083357e78678a9baac760beb2a31f00954a5 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 08:41:39 +0400 +Subject: [PATCH 04/46] uclibc-locale + +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + libstdc++-v3/acinclude.m4 | 37 ++ + .../config/locale/uclibc/c++locale_internal.h | 63 ++ + libstdc++-v3/config/locale/uclibc/c_locale.cc | 160 +++++ + libstdc++-v3/config/locale/uclibc/c_locale.h | 117 ++++ + .../config/locale/uclibc/codecvt_members.cc | 308 +++++++++ + .../config/locale/uclibc/collate_members.cc | 80 +++ + libstdc++-v3/config/locale/uclibc/ctype_members.cc | 300 +++++++++ + .../config/locale/uclibc/messages_members.cc | 100 +++ + .../config/locale/uclibc/messages_members.h | 118 ++++ + .../config/locale/uclibc/monetary_members.cc | 692 +++++++++++++++++++++ + .../config/locale/uclibc/numeric_members.cc | 160 +++++ + libstdc++-v3/config/locale/uclibc/time_members.cc | 406 ++++++++++++ + libstdc++-v3/config/locale/uclibc/time_members.h | 68 ++ + libstdc++-v3/configure | 75 +++ + libstdc++-v3/include/c_compatibility/wchar.h | 2 + + libstdc++-v3/include/c_std/cwchar | 2 + + 16 files changed, 2688 insertions(+) + create mode 100644 libstdc++-v3/config/locale/uclibc/c++locale_internal.h + create mode 100644 libstdc++-v3/config/locale/uclibc/c_locale.cc + create mode 100644 libstdc++-v3/config/locale/uclibc/c_locale.h + create mode 100644 libstdc++-v3/config/locale/uclibc/codecvt_members.cc + create mode 100644 libstdc++-v3/config/locale/uclibc/collate_members.cc + create mode 100644 libstdc++-v3/config/locale/uclibc/ctype_members.cc + create mode 100644 libstdc++-v3/config/locale/uclibc/messages_members.cc + create mode 100644 libstdc++-v3/config/locale/uclibc/messages_members.h + create mode 100644 libstdc++-v3/config/locale/uclibc/monetary_members.cc + create mode 100644 libstdc++-v3/config/locale/uclibc/numeric_members.cc + create mode 100644 libstdc++-v3/config/locale/uclibc/time_members.cc + create mode 100644 libstdc++-v3/config/locale/uclibc/time_members.h + +diff --git a/libstdc++-v3/acinclude.m4 b/libstdc++-v3/acinclude.m4 +index b0f88cb..a0ee36b 100644 +--- a/libstdc++-v3/acinclude.m4 ++++ b/libstdc++-v3/acinclude.m4 +@@ -2358,6 +2358,9 @@ AC_DEFUN([GLIBCXX_ENABLE_CLOCALE], [ + # Default to "generic". + if test $enable_clocale_flag = auto; then + case ${target_os} in ++ *-uclibc*) ++ enable_clocale_flag=uclibc ++ ;; + linux* | gnu* | kfreebsd*-gnu | knetbsd*-gnu) + enable_clocale_flag=gnu + ;; +@@ -2542,6 +2545,40 @@ AC_DEFUN([GLIBCXX_ENABLE_CLOCALE], [ + CTIME_CC=config/locale/generic/time_members.cc + CLOCALE_INTERNAL_H=config/locale/generic/c++locale_internal.h + ;; ++ uclibc) ++ AC_MSG_RESULT(uclibc) ++ ++ # Declare intention to use gettext, and add support for specific ++ # languages. ++ # For some reason, ALL_LINGUAS has to be before AM-GNU-GETTEXT ++ ALL_LINGUAS="de fr" ++ ++ # Don't call AM-GNU-GETTEXT here. Instead, assume glibc. ++ AC_CHECK_PROG(check_msgfmt, msgfmt, yes, no) ++ if test x"$check_msgfmt" = x"yes" && test x"$enable_nls" = x"yes"; then ++ USE_NLS=yes ++ fi ++ # Export the build objects. ++ for ling in $ALL_LINGUAS; do \ ++ glibcxx_MOFILES="$glibcxx_MOFILES $ling.mo"; \ ++ glibcxx_POFILES="$glibcxx_POFILES $ling.po"; \ ++ done ++ AC_SUBST(glibcxx_MOFILES) ++ AC_SUBST(glibcxx_POFILES) ++ ++ CLOCALE_H=config/locale/uclibc/c_locale.h ++ CLOCALE_CC=config/locale/uclibc/c_locale.cc ++ CCODECVT_CC=config/locale/uclibc/codecvt_members.cc ++ CCOLLATE_CC=config/locale/uclibc/collate_members.cc ++ CCTYPE_CC=config/locale/uclibc/ctype_members.cc ++ CMESSAGES_H=config/locale/uclibc/messages_members.h ++ CMESSAGES_CC=config/locale/uclibc/messages_members.cc ++ CMONEY_CC=config/locale/uclibc/monetary_members.cc ++ CNUMERIC_CC=config/locale/uclibc/numeric_members.cc ++ CTIME_H=config/locale/uclibc/time_members.h ++ CTIME_CC=config/locale/uclibc/time_members.cc ++ CLOCALE_INTERNAL_H=config/locale/uclibc/c++locale_internal.h ++ ;; + esac + + # This is where the testsuite looks for locale catalogs, using the +diff --git a/libstdc++-v3/config/locale/uclibc/c++locale_internal.h b/libstdc++-v3/config/locale/uclibc/c++locale_internal.h +new file mode 100644 +index 0000000..2ae3e4a +--- /dev/null ++++ b/libstdc++-v3/config/locale/uclibc/c++locale_internal.h +@@ -0,0 +1,63 @@ ++// Prototypes for GLIBC thread locale __-prefixed functions -*- C++ -*- ++ ++// Copyright (C) 2002, 2004, 2005 Free Software Foundation, Inc. ++// ++// This file is part of the GNU ISO C++ Library. This library is free ++// software; you can redistribute it and/or modify it under the ++// terms of the GNU General Public License as published by the ++// Free Software Foundation; either version 2, or (at your option) ++// any later version. ++ ++// This library is distributed in the hope that it will be useful, ++// but WITHOUT ANY WARRANTY; without even the implied warranty of ++// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++// GNU General Public License for more details. ++ ++// You should have received a copy of the GNU General Public License along ++// with this library; see the file COPYING. If not, write to the Free ++// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, ++// USA. ++ ++// As a special exception, you may use this file as part of a free software ++// library without restriction. Specifically, if other files instantiate ++// templates or use macros or inline functions from this file, or you compile ++// this file and link it with other files to produce an executable, this ++// file does not by itself cause the resulting executable to be covered by ++// the GNU General Public License. This exception does not however ++// invalidate any other reasons why the executable file might be covered by ++// the GNU General Public License. ++ ++// Written by Jakub Jelinek ++ ++#include ++#include ++ ++#ifdef __UCLIBC_MJN3_ONLY__ ++#warning clean this up ++#endif ++ ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ ++extern "C" __typeof(nl_langinfo_l) __nl_langinfo_l; ++extern "C" __typeof(strcoll_l) __strcoll_l; ++extern "C" __typeof(strftime_l) __strftime_l; ++extern "C" __typeof(strtod_l) __strtod_l; ++extern "C" __typeof(strtof_l) __strtof_l; ++extern "C" __typeof(strtold_l) __strtold_l; ++extern "C" __typeof(strxfrm_l) __strxfrm_l; ++extern "C" __typeof(newlocale) __newlocale; ++extern "C" __typeof(freelocale) __freelocale; ++extern "C" __typeof(duplocale) __duplocale; ++extern "C" __typeof(uselocale) __uselocale; ++ ++#ifdef _GLIBCXX_USE_WCHAR_T ++extern "C" __typeof(iswctype_l) __iswctype_l; ++extern "C" __typeof(towlower_l) __towlower_l; ++extern "C" __typeof(towupper_l) __towupper_l; ++extern "C" __typeof(wcscoll_l) __wcscoll_l; ++extern "C" __typeof(wcsftime_l) __wcsftime_l; ++extern "C" __typeof(wcsxfrm_l) __wcsxfrm_l; ++extern "C" __typeof(wctype_l) __wctype_l; ++#endif ++ ++#endif // GLIBC 2.3 and later +diff --git a/libstdc++-v3/config/locale/uclibc/c_locale.cc b/libstdc++-v3/config/locale/uclibc/c_locale.cc +new file mode 100644 +index 0000000..5081dc1 +--- /dev/null ++++ b/libstdc++-v3/config/locale/uclibc/c_locale.cc +@@ -0,0 +1,160 @@ ++// Wrapper for underlying C-language localization -*- C++ -*- ++ ++// Copyright (C) 2001, 2002, 2003 Free Software Foundation, Inc. ++// ++// This file is part of the GNU ISO C++ Library. This library is free ++// software; you can redistribute it and/or modify it under the ++// terms of the GNU General Public License as published by the ++// Free Software Foundation; either version 2, or (at your option) ++// any later version. ++ ++// This library is distributed in the hope that it will be useful, ++// but WITHOUT ANY WARRANTY; without even the implied warranty of ++// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++// GNU General Public License for more details. ++ ++// You should have received a copy of the GNU General Public License along ++// with this library; see the file COPYING. If not, write to the Free ++// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, ++// USA. ++ ++// As a special exception, you may use this file as part of a free software ++// library without restriction. Specifically, if other files instantiate ++// templates or use macros or inline functions from this file, or you compile ++// this file and link it with other files to produce an executable, this ++// file does not by itself cause the resulting executable to be covered by ++// the GNU General Public License. This exception does not however ++// invalidate any other reasons why the executable file might be covered by ++// the GNU General Public License. ++ ++// ++// ISO C++ 14882: 22.8 Standard locale categories. ++// ++ ++// Written by Benjamin Kosnik ++ ++#include // For errno ++#include ++#include ++#include ++#include ++ ++#ifndef __UCLIBC_HAS_XLOCALE__ ++#define __strtol_l(S, E, B, L) strtol((S), (E), (B)) ++#define __strtoul_l(S, E, B, L) strtoul((S), (E), (B)) ++#define __strtoll_l(S, E, B, L) strtoll((S), (E), (B)) ++#define __strtoull_l(S, E, B, L) strtoull((S), (E), (B)) ++#define __strtof_l(S, E, L) strtof((S), (E)) ++#define __strtod_l(S, E, L) strtod((S), (E)) ++#define __strtold_l(S, E, L) strtold((S), (E)) ++#warning should dummy __newlocale check for C|POSIX ? ++#define __newlocale(a, b, c) NULL ++#define __freelocale(a) ((void)0) ++#define __duplocale(a) __c_locale() ++#endif ++ ++namespace std ++{ ++ template<> ++ void ++ __convert_to_v(const char* __s, float& __v, ios_base::iostate& __err, ++ const __c_locale& __cloc) ++ { ++ if (!(__err & ios_base::failbit)) ++ { ++ char* __sanity; ++ errno = 0; ++ float __f = __strtof_l(__s, &__sanity, __cloc); ++ if (__sanity != __s && errno != ERANGE) ++ __v = __f; ++ else ++ __err |= ios_base::failbit; ++ } ++ } ++ ++ template<> ++ void ++ __convert_to_v(const char* __s, double& __v, ios_base::iostate& __err, ++ const __c_locale& __cloc) ++ { ++ if (!(__err & ios_base::failbit)) ++ { ++ char* __sanity; ++ errno = 0; ++ double __d = __strtod_l(__s, &__sanity, __cloc); ++ if (__sanity != __s && errno != ERANGE) ++ __v = __d; ++ else ++ __err |= ios_base::failbit; ++ } ++ } ++ ++ template<> ++ void ++ __convert_to_v(const char* __s, long double& __v, ios_base::iostate& __err, ++ const __c_locale& __cloc) ++ { ++ if (!(__err & ios_base::failbit)) ++ { ++ char* __sanity; ++ errno = 0; ++ long double __ld = __strtold_l(__s, &__sanity, __cloc); ++ if (__sanity != __s && errno != ERANGE) ++ __v = __ld; ++ else ++ __err |= ios_base::failbit; ++ } ++ } ++ ++ void ++ locale::facet::_S_create_c_locale(__c_locale& __cloc, const char* __s, ++ __c_locale __old) ++ { ++ __cloc = __newlocale(1 << LC_ALL, __s, __old); ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ if (!__cloc) ++ { ++ // This named locale is not supported by the underlying OS. ++ __throw_runtime_error(__N("locale::facet::_S_create_c_locale " ++ "name not valid")); ++ } ++#endif ++ } ++ ++ void ++ locale::facet::_S_destroy_c_locale(__c_locale& __cloc) ++ { ++ if (_S_get_c_locale() != __cloc) ++ __freelocale(__cloc); ++ } ++ ++ __c_locale ++ locale::facet::_S_clone_c_locale(__c_locale& __cloc) ++ { return __duplocale(__cloc); } ++} // namespace std ++ ++namespace __gnu_cxx ++{ ++ const char* const category_names[6 + _GLIBCXX_NUM_CATEGORIES] = ++ { ++ "LC_CTYPE", ++ "LC_NUMERIC", ++ "LC_TIME", ++ "LC_COLLATE", ++ "LC_MONETARY", ++ "LC_MESSAGES", ++#if _GLIBCXX_NUM_CATEGORIES != 0 ++ "LC_PAPER", ++ "LC_NAME", ++ "LC_ADDRESS", ++ "LC_TELEPHONE", ++ "LC_MEASUREMENT", ++ "LC_IDENTIFICATION" ++#endif ++ }; ++} ++ ++namespace std ++{ ++ const char* const* const locale::_S_categories = __gnu_cxx::category_names; ++} // namespace std +diff --git a/libstdc++-v3/config/locale/uclibc/c_locale.h b/libstdc++-v3/config/locale/uclibc/c_locale.h +new file mode 100644 +index 0000000..da07c1f +--- /dev/null ++++ b/libstdc++-v3/config/locale/uclibc/c_locale.h +@@ -0,0 +1,117 @@ ++// Wrapper for underlying C-language localization -*- C++ -*- ++ ++// Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. ++// ++// This file is part of the GNU ISO C++ Library. This library is free ++// software; you can redistribute it and/or modify it under the ++// terms of the GNU General Public License as published by the ++// Free Software Foundation; either version 2, or (at your option) ++// any later version. ++ ++// This library is distributed in the hope that it will be useful, ++// but WITHOUT ANY WARRANTY; without even the implied warranty of ++// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++// GNU General Public License for more details. ++ ++// You should have received a copy of the GNU General Public License along ++// with this library; see the file COPYING. If not, write to the Free ++// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, ++// USA. ++ ++// As a special exception, you may use this file as part of a free software ++// library without restriction. Specifically, if other files instantiate ++// templates or use macros or inline functions from this file, or you compile ++// this file and link it with other files to produce an executable, this ++// file does not by itself cause the resulting executable to be covered by ++// the GNU General Public License. This exception does not however ++// invalidate any other reasons why the executable file might be covered by ++// the GNU General Public License. ++ ++// ++// ISO C++ 14882: 22.8 Standard locale categories. ++// ++ ++// Written by Benjamin Kosnik ++ ++#ifndef _C_LOCALE_H ++#define _C_LOCALE_H 1 ++ ++#pragma GCC system_header ++ ++#include // get std::strlen ++#include // get std::snprintf or std::sprintf ++#include ++#include // For codecvt ++#ifdef __UCLIBC_MJN3_ONLY__ ++#warning fix this ++#endif ++#ifdef __UCLIBC_HAS_LOCALE__ ++#include // For codecvt using iconv, iconv_t ++#endif ++#ifdef __UCLIBC_HAS_GETTEXT_AWARENESS__ ++#include // For messages ++#endif ++ ++#ifdef __UCLIBC_MJN3_ONLY__ ++#warning what is _GLIBCXX_C_LOCALE_GNU for ++#endif ++#define _GLIBCXX_C_LOCALE_GNU 1 ++ ++#ifdef __UCLIBC_MJN3_ONLY__ ++#warning fix categories ++#endif ++// #define _GLIBCXX_NUM_CATEGORIES 6 ++#define _GLIBCXX_NUM_CATEGORIES 0 ++ ++#ifdef __UCLIBC_HAS_XLOCALE__ ++namespace __gnu_cxx ++{ ++ extern "C" __typeof(uselocale) __uselocale; ++} ++#endif ++ ++namespace std ++{ ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ typedef __locale_t __c_locale; ++#else ++ typedef int* __c_locale; ++#endif ++ ++ // Convert numeric value of type _Tv to string and return length of ++ // string. If snprintf is available use it, otherwise fall back to ++ // the unsafe sprintf which, in general, can be dangerous and should ++ // be avoided. ++ template ++ int ++ __convert_from_v(char* __out, ++ const int __size __attribute__ ((__unused__)), ++ const char* __fmt, ++#ifdef __UCLIBC_HAS_XCLOCALE__ ++ _Tv __v, const __c_locale& __cloc, int __prec) ++ { ++ __c_locale __old = __gnu_cxx::__uselocale(__cloc); ++#else ++ _Tv __v, const __c_locale&, int __prec) ++ { ++# ifdef __UCLIBC_HAS_LOCALE__ ++ char* __old = std::setlocale(LC_ALL, NULL); ++ char* __sav = new char[std::strlen(__old) + 1]; ++ std::strcpy(__sav, __old); ++ std::setlocale(LC_ALL, "C"); ++# endif ++#endif ++ ++ const int __ret = std::snprintf(__out, __size, __fmt, __prec, __v); ++ ++#ifdef __UCLIBC_HAS_XCLOCALE__ ++ __gnu_cxx::__uselocale(__old); ++#elif defined __UCLIBC_HAS_LOCALE__ ++ std::setlocale(LC_ALL, __sav); ++ delete [] __sav; ++#endif ++ return __ret; ++ } ++} ++ ++#endif +diff --git a/libstdc++-v3/config/locale/uclibc/codecvt_members.cc b/libstdc++-v3/config/locale/uclibc/codecvt_members.cc +new file mode 100644 +index 0000000..64aa962 +--- /dev/null ++++ b/libstdc++-v3/config/locale/uclibc/codecvt_members.cc +@@ -0,0 +1,308 @@ ++// std::codecvt implementation details, GNU version -*- C++ -*- ++ ++// Copyright (C) 2002, 2003 Free Software Foundation, Inc. ++// ++// This file is part of the GNU ISO C++ Library. This library is free ++// software; you can redistribute it and/or modify it under the ++// terms of the GNU General Public License as published by the ++// Free Software Foundation; either version 2, or (at your option) ++// any later version. ++ ++// This library is distributed in the hope that it will be useful, ++// but WITHOUT ANY WARRANTY; without even the implied warranty of ++// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++// GNU General Public License for more details. ++ ++// You should have received a copy of the GNU General Public License along ++// with this library; see the file COPYING. If not, write to the Free ++// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, ++// USA. ++ ++// As a special exception, you may use this file as part of a free software ++// library without restriction. Specifically, if other files instantiate ++// templates or use macros or inline functions from this file, or you compile ++// this file and link it with other files to produce an executable, this ++// file does not by itself cause the resulting executable to be covered by ++// the GNU General Public License. This exception does not however ++// invalidate any other reasons why the executable file might be covered by ++// the GNU General Public License. ++ ++// ++// ISO C++ 14882: 22.2.1.5 - Template class codecvt ++// ++ ++// Written by Benjamin Kosnik ++ ++#include ++#include // For MB_CUR_MAX ++#include // For MB_LEN_MAX ++#include ++ ++namespace std ++{ ++ // Specializations. ++#ifdef _GLIBCXX_USE_WCHAR_T ++ codecvt_base::result ++ codecvt:: ++ do_out(state_type& __state, const intern_type* __from, ++ const intern_type* __from_end, const intern_type*& __from_next, ++ extern_type* __to, extern_type* __to_end, ++ extern_type*& __to_next) const ++ { ++ result __ret = ok; ++ state_type __tmp_state(__state); ++ ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __c_locale __old = __uselocale(_M_c_locale_codecvt); ++#endif ++ ++ // wcsnrtombs is *very* fast but stops if encounters NUL characters: ++ // in case we fall back to wcrtomb and then continue, in a loop. ++ // NB: wcsnrtombs is a GNU extension ++ for (__from_next = __from, __to_next = __to; ++ __from_next < __from_end && __to_next < __to_end ++ && __ret == ok;) ++ { ++ const intern_type* __from_chunk_end = wmemchr(__from_next, L'\0', ++ __from_end - __from_next); ++ if (!__from_chunk_end) ++ __from_chunk_end = __from_end; ++ ++ __from = __from_next; ++ const size_t __conv = wcsnrtombs(__to_next, &__from_next, ++ __from_chunk_end - __from_next, ++ __to_end - __to_next, &__state); ++ if (__conv == static_cast(-1)) ++ { ++ // In case of error, in order to stop at the exact place we ++ // have to start again from the beginning with a series of ++ // wcrtomb. ++ for (; __from < __from_next; ++__from) ++ __to_next += wcrtomb(__to_next, *__from, &__tmp_state); ++ __state = __tmp_state; ++ __ret = error; ++ } ++ else if (__from_next && __from_next < __from_chunk_end) ++ { ++ __to_next += __conv; ++ __ret = partial; ++ } ++ else ++ { ++ __from_next = __from_chunk_end; ++ __to_next += __conv; ++ } ++ ++ if (__from_next < __from_end && __ret == ok) ++ { ++ extern_type __buf[MB_LEN_MAX]; ++ __tmp_state = __state; ++ const size_t __conv = wcrtomb(__buf, *__from_next, &__tmp_state); ++ if (__conv > static_cast(__to_end - __to_next)) ++ __ret = partial; ++ else ++ { ++ memcpy(__to_next, __buf, __conv); ++ __state = __tmp_state; ++ __to_next += __conv; ++ ++__from_next; ++ } ++ } ++ } ++ ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __uselocale(__old); ++#endif ++ ++ return __ret; ++ } ++ ++ codecvt_base::result ++ codecvt:: ++ do_in(state_type& __state, const extern_type* __from, ++ const extern_type* __from_end, const extern_type*& __from_next, ++ intern_type* __to, intern_type* __to_end, ++ intern_type*& __to_next) const ++ { ++ result __ret = ok; ++ state_type __tmp_state(__state); ++ ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __c_locale __old = __uselocale(_M_c_locale_codecvt); ++#endif ++ ++ // mbsnrtowcs is *very* fast but stops if encounters NUL characters: ++ // in case we store a L'\0' and then continue, in a loop. ++ // NB: mbsnrtowcs is a GNU extension ++ for (__from_next = __from, __to_next = __to; ++ __from_next < __from_end && __to_next < __to_end ++ && __ret == ok;) ++ { ++ const extern_type* __from_chunk_end; ++ __from_chunk_end = static_cast(memchr(__from_next, '\0', ++ __from_end ++ - __from_next)); ++ if (!__from_chunk_end) ++ __from_chunk_end = __from_end; ++ ++ __from = __from_next; ++ size_t __conv = mbsnrtowcs(__to_next, &__from_next, ++ __from_chunk_end - __from_next, ++ __to_end - __to_next, &__state); ++ if (__conv == static_cast(-1)) ++ { ++ // In case of error, in order to stop at the exact place we ++ // have to start again from the beginning with a series of ++ // mbrtowc. ++ for (;; ++__to_next, __from += __conv) ++ { ++ __conv = mbrtowc(__to_next, __from, __from_end - __from, ++ &__tmp_state); ++ if (__conv == static_cast(-1) ++ || __conv == static_cast(-2)) ++ break; ++ } ++ __from_next = __from; ++ __state = __tmp_state; ++ __ret = error; ++ } ++ else if (__from_next && __from_next < __from_chunk_end) ++ { ++ // It is unclear what to return in this case (see DR 382). ++ __to_next += __conv; ++ __ret = partial; ++ } ++ else ++ { ++ __from_next = __from_chunk_end; ++ __to_next += __conv; ++ } ++ ++ if (__from_next < __from_end && __ret == ok) ++ { ++ if (__to_next < __to_end) ++ { ++ // XXX Probably wrong for stateful encodings ++ __tmp_state = __state; ++ ++__from_next; ++ *__to_next++ = L'\0'; ++ } ++ else ++ __ret = partial; ++ } ++ } ++ ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __uselocale(__old); ++#endif ++ ++ return __ret; ++ } ++ ++ int ++ codecvt:: ++ do_encoding() const throw() ++ { ++ // XXX This implementation assumes that the encoding is ++ // stateless and is either single-byte or variable-width. ++ int __ret = 0; ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __c_locale __old = __uselocale(_M_c_locale_codecvt); ++#endif ++ if (MB_CUR_MAX == 1) ++ __ret = 1; ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __uselocale(__old); ++#endif ++ return __ret; ++ } ++ ++ int ++ codecvt:: ++ do_max_length() const throw() ++ { ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __c_locale __old = __uselocale(_M_c_locale_codecvt); ++#endif ++ // XXX Probably wrong for stateful encodings. ++ int __ret = MB_CUR_MAX; ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __uselocale(__old); ++#endif ++ return __ret; ++ } ++ ++ int ++ codecvt:: ++ do_length(state_type& __state, const extern_type* __from, ++ const extern_type* __end, size_t __max) const ++ { ++ int __ret = 0; ++ state_type __tmp_state(__state); ++ ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __c_locale __old = __uselocale(_M_c_locale_codecvt); ++#endif ++ ++ // mbsnrtowcs is *very* fast but stops if encounters NUL characters: ++ // in case we advance past it and then continue, in a loop. ++ // NB: mbsnrtowcs is a GNU extension ++ ++ // A dummy internal buffer is needed in order for mbsnrtocws to consider ++ // its fourth parameter (it wouldn't with NULL as first parameter). ++ wchar_t* __to = static_cast(__builtin_alloca(sizeof(wchar_t) ++ * __max)); ++ while (__from < __end && __max) ++ { ++ const extern_type* __from_chunk_end; ++ __from_chunk_end = static_cast(memchr(__from, '\0', ++ __end ++ - __from)); ++ if (!__from_chunk_end) ++ __from_chunk_end = __end; ++ ++ const extern_type* __tmp_from = __from; ++ size_t __conv = mbsnrtowcs(__to, &__from, ++ __from_chunk_end - __from, ++ __max, &__state); ++ if (__conv == static_cast(-1)) ++ { ++ // In case of error, in order to stop at the exact place we ++ // have to start again from the beginning with a series of ++ // mbrtowc. ++ for (__from = __tmp_from;; __from += __conv) ++ { ++ __conv = mbrtowc(NULL, __from, __end - __from, ++ &__tmp_state); ++ if (__conv == static_cast(-1) ++ || __conv == static_cast(-2)) ++ break; ++ } ++ __state = __tmp_state; ++ __ret += __from - __tmp_from; ++ break; ++ } ++ if (!__from) ++ __from = __from_chunk_end; ++ ++ __ret += __from - __tmp_from; ++ __max -= __conv; ++ ++ if (__from < __end && __max) ++ { ++ // XXX Probably wrong for stateful encodings ++ __tmp_state = __state; ++ ++__from; ++ ++__ret; ++ --__max; ++ } ++ } ++ ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __uselocale(__old); ++#endif ++ ++ return __ret; ++ } ++#endif ++} +diff --git a/libstdc++-v3/config/locale/uclibc/collate_members.cc b/libstdc++-v3/config/locale/uclibc/collate_members.cc +new file mode 100644 +index 0000000..c2664a7 +--- /dev/null ++++ b/libstdc++-v3/config/locale/uclibc/collate_members.cc +@@ -0,0 +1,80 @@ ++// std::collate implementation details, GNU version -*- C++ -*- ++ ++// Copyright (C) 2001, 2002 Free Software Foundation, Inc. ++// ++// This file is part of the GNU ISO C++ Library. This library is free ++// software; you can redistribute it and/or modify it under the ++// terms of the GNU General Public License as published by the ++// Free Software Foundation; either version 2, or (at your option) ++// any later version. ++ ++// This library is distributed in the hope that it will be useful, ++// but WITHOUT ANY WARRANTY; without even the implied warranty of ++// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++// GNU General Public License for more details. ++ ++// You should have received a copy of the GNU General Public License along ++// with this library; see the file COPYING. If not, write to the Free ++// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, ++// USA. ++ ++// As a special exception, you may use this file as part of a free software ++// library without restriction. Specifically, if other files instantiate ++// templates or use macros or inline functions from this file, or you compile ++// this file and link it with other files to produce an executable, this ++// file does not by itself cause the resulting executable to be covered by ++// the GNU General Public License. This exception does not however ++// invalidate any other reasons why the executable file might be covered by ++// the GNU General Public License. ++ ++// ++// ISO C++ 14882: 22.2.4.1.2 collate virtual functions ++// ++ ++// Written by Benjamin Kosnik ++ ++#include ++#include ++ ++#ifndef __UCLIBC_HAS_XLOCALE__ ++#define __strcoll_l(S1, S2, L) strcoll((S1), (S2)) ++#define __strxfrm_l(S1, S2, N, L) strxfrm((S1), (S2), (N)) ++#define __wcscoll_l(S1, S2, L) wcscoll((S1), (S2)) ++#define __wcsxfrm_l(S1, S2, N, L) wcsxfrm((S1), (S2), (N)) ++#endif ++ ++namespace std ++{ ++ // These are basically extensions to char_traits, and perhaps should ++ // be put there instead of here. ++ template<> ++ int ++ collate::_M_compare(const char* __one, const char* __two) const ++ { ++ int __cmp = __strcoll_l(__one, __two, _M_c_locale_collate); ++ return (__cmp >> (8 * sizeof (int) - 2)) | (__cmp != 0); ++ } ++ ++ template<> ++ size_t ++ collate::_M_transform(char* __to, const char* __from, ++ size_t __n) const ++ { return __strxfrm_l(__to, __from, __n, _M_c_locale_collate); } ++ ++#ifdef _GLIBCXX_USE_WCHAR_T ++ template<> ++ int ++ collate::_M_compare(const wchar_t* __one, ++ const wchar_t* __two) const ++ { ++ int __cmp = __wcscoll_l(__one, __two, _M_c_locale_collate); ++ return (__cmp >> (8 * sizeof (int) - 2)) | (__cmp != 0); ++ } ++ ++ template<> ++ size_t ++ collate::_M_transform(wchar_t* __to, const wchar_t* __from, ++ size_t __n) const ++ { return __wcsxfrm_l(__to, __from, __n, _M_c_locale_collate); } ++#endif ++} +diff --git a/libstdc++-v3/config/locale/uclibc/ctype_members.cc b/libstdc++-v3/config/locale/uclibc/ctype_members.cc +new file mode 100644 +index 0000000..7294e3a +--- /dev/null ++++ b/libstdc++-v3/config/locale/uclibc/ctype_members.cc +@@ -0,0 +1,300 @@ ++// std::ctype implementation details, GNU version -*- C++ -*- ++ ++// Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc. ++// ++// This file is part of the GNU ISO C++ Library. This library is free ++// software; you can redistribute it and/or modify it under the ++// terms of the GNU General Public License as published by the ++// Free Software Foundation; either version 2, or (at your option) ++// any later version. ++ ++// This library is distributed in the hope that it will be useful, ++// but WITHOUT ANY WARRANTY; without even the implied warranty of ++// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++// GNU General Public License for more details. ++ ++// You should have received a copy of the GNU General Public License along ++// with this library; see the file COPYING. If not, write to the Free ++// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, ++// USA. ++ ++// As a special exception, you may use this file as part of a free software ++// library without restriction. Specifically, if other files instantiate ++// templates or use macros or inline functions from this file, or you compile ++// this file and link it with other files to produce an executable, this ++// file does not by itself cause the resulting executable to be covered by ++// the GNU General Public License. This exception does not however ++// invalidate any other reasons why the executable file might be covered by ++// the GNU General Public License. ++ ++// ++// ISO C++ 14882: 22.2.1.1.2 ctype virtual functions. ++// ++ ++// Written by Benjamin Kosnik ++ ++#define _LIBC ++#include ++#undef _LIBC ++#include ++ ++#ifndef __UCLIBC_HAS_XLOCALE__ ++#define __wctype_l(S, L) wctype((S)) ++#define __towupper_l(C, L) towupper((C)) ++#define __towlower_l(C, L) towlower((C)) ++#define __iswctype_l(C, M, L) iswctype((C), (M)) ++#endif ++ ++namespace std ++{ ++ // NB: The other ctype specializations are in src/locale.cc and ++ // various /config/os/* files. ++ template<> ++ ctype_byname::ctype_byname(const char* __s, size_t __refs) ++ : ctype(0, false, __refs) ++ { ++ if (std::strcmp(__s, "C") != 0 && std::strcmp(__s, "POSIX") != 0) ++ { ++ this->_S_destroy_c_locale(this->_M_c_locale_ctype); ++ this->_S_create_c_locale(this->_M_c_locale_ctype, __s); ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ this->_M_toupper = this->_M_c_locale_ctype->__ctype_toupper; ++ this->_M_tolower = this->_M_c_locale_ctype->__ctype_tolower; ++ this->_M_table = this->_M_c_locale_ctype->__ctype_b; ++#endif ++ } ++ } ++ ++#ifdef _GLIBCXX_USE_WCHAR_T ++ ctype::__wmask_type ++ ctype::_M_convert_to_wmask(const mask __m) const ++ { ++ __wmask_type __ret; ++ switch (__m) ++ { ++ case space: ++ __ret = __wctype_l("space", _M_c_locale_ctype); ++ break; ++ case print: ++ __ret = __wctype_l("print", _M_c_locale_ctype); ++ break; ++ case cntrl: ++ __ret = __wctype_l("cntrl", _M_c_locale_ctype); ++ break; ++ case upper: ++ __ret = __wctype_l("upper", _M_c_locale_ctype); ++ break; ++ case lower: ++ __ret = __wctype_l("lower", _M_c_locale_ctype); ++ break; ++ case alpha: ++ __ret = __wctype_l("alpha", _M_c_locale_ctype); ++ break; ++ case digit: ++ __ret = __wctype_l("digit", _M_c_locale_ctype); ++ break; ++ case punct: ++ __ret = __wctype_l("punct", _M_c_locale_ctype); ++ break; ++ case xdigit: ++ __ret = __wctype_l("xdigit", _M_c_locale_ctype); ++ break; ++ case alnum: ++ __ret = __wctype_l("alnum", _M_c_locale_ctype); ++ break; ++ case graph: ++ __ret = __wctype_l("graph", _M_c_locale_ctype); ++ break; ++ default: ++ __ret = __wmask_type(); ++ } ++ return __ret; ++ } ++ ++ wchar_t ++ ctype::do_toupper(wchar_t __c) const ++ { return __towupper_l(__c, _M_c_locale_ctype); } ++ ++ const wchar_t* ++ ctype::do_toupper(wchar_t* __lo, const wchar_t* __hi) const ++ { ++ while (__lo < __hi) ++ { ++ *__lo = __towupper_l(*__lo, _M_c_locale_ctype); ++ ++__lo; ++ } ++ return __hi; ++ } ++ ++ wchar_t ++ ctype::do_tolower(wchar_t __c) const ++ { return __towlower_l(__c, _M_c_locale_ctype); } ++ ++ const wchar_t* ++ ctype::do_tolower(wchar_t* __lo, const wchar_t* __hi) const ++ { ++ while (__lo < __hi) ++ { ++ *__lo = __towlower_l(*__lo, _M_c_locale_ctype); ++ ++__lo; ++ } ++ return __hi; ++ } ++ ++ bool ++ ctype:: ++ do_is(mask __m, wchar_t __c) const ++ { ++ // Highest bitmask in ctype_base == 10, but extra in "C" ++ // library for blank. ++ bool __ret = false; ++ const size_t __bitmasksize = 11; ++ for (size_t __bitcur = 0; __bitcur <= __bitmasksize; ++__bitcur) ++ if (__m & _M_bit[__bitcur] ++ && __iswctype_l(__c, _M_wmask[__bitcur], _M_c_locale_ctype)) ++ { ++ __ret = true; ++ break; ++ } ++ return __ret; ++ } ++ ++ const wchar_t* ++ ctype:: ++ do_is(const wchar_t* __lo, const wchar_t* __hi, mask* __vec) const ++ { ++ for (; __lo < __hi; ++__vec, ++__lo) ++ { ++ // Highest bitmask in ctype_base == 10, but extra in "C" ++ // library for blank. ++ const size_t __bitmasksize = 11; ++ mask __m = 0; ++ for (size_t __bitcur = 0; __bitcur <= __bitmasksize; ++__bitcur) ++ if (__iswctype_l(*__lo, _M_wmask[__bitcur], _M_c_locale_ctype)) ++ __m |= _M_bit[__bitcur]; ++ *__vec = __m; ++ } ++ return __hi; ++ } ++ ++ const wchar_t* ++ ctype:: ++ do_scan_is(mask __m, const wchar_t* __lo, const wchar_t* __hi) const ++ { ++ while (__lo < __hi && !this->do_is(__m, *__lo)) ++ ++__lo; ++ return __lo; ++ } ++ ++ const wchar_t* ++ ctype:: ++ do_scan_not(mask __m, const char_type* __lo, const char_type* __hi) const ++ { ++ while (__lo < __hi && this->do_is(__m, *__lo) != 0) ++ ++__lo; ++ return __lo; ++ } ++ ++ wchar_t ++ ctype:: ++ do_widen(char __c) const ++ { return _M_widen[static_cast(__c)]; } ++ ++ const char* ++ ctype:: ++ do_widen(const char* __lo, const char* __hi, wchar_t* __dest) const ++ { ++ while (__lo < __hi) ++ { ++ *__dest = _M_widen[static_cast(*__lo)]; ++ ++__lo; ++ ++__dest; ++ } ++ return __hi; ++ } ++ ++ char ++ ctype:: ++ do_narrow(wchar_t __wc, char __dfault) const ++ { ++ if (__wc >= 0 && __wc < 128 && _M_narrow_ok) ++ return _M_narrow[__wc]; ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __c_locale __old = __uselocale(_M_c_locale_ctype); ++#endif ++ const int __c = wctob(__wc); ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __uselocale(__old); ++#endif ++ return (__c == EOF ? __dfault : static_cast(__c)); ++ } ++ ++ const wchar_t* ++ ctype:: ++ do_narrow(const wchar_t* __lo, const wchar_t* __hi, char __dfault, ++ char* __dest) const ++ { ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __c_locale __old = __uselocale(_M_c_locale_ctype); ++#endif ++ if (_M_narrow_ok) ++ while (__lo < __hi) ++ { ++ if (*__lo >= 0 && *__lo < 128) ++ *__dest = _M_narrow[*__lo]; ++ else ++ { ++ const int __c = wctob(*__lo); ++ *__dest = (__c == EOF ? __dfault : static_cast(__c)); ++ } ++ ++__lo; ++ ++__dest; ++ } ++ else ++ while (__lo < __hi) ++ { ++ const int __c = wctob(*__lo); ++ *__dest = (__c == EOF ? __dfault : static_cast(__c)); ++ ++__lo; ++ ++__dest; ++ } ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __uselocale(__old); ++#endif ++ return __hi; ++ } ++ ++ void ++ ctype::_M_initialize_ctype() ++ { ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __c_locale __old = __uselocale(_M_c_locale_ctype); ++#endif ++ wint_t __i; ++ for (__i = 0; __i < 128; ++__i) ++ { ++ const int __c = wctob(__i); ++ if (__c == EOF) ++ break; ++ else ++ _M_narrow[__i] = static_cast(__c); ++ } ++ if (__i == 128) ++ _M_narrow_ok = true; ++ else ++ _M_narrow_ok = false; ++ for (size_t __j = 0; ++ __j < sizeof(_M_widen) / sizeof(wint_t); ++__j) ++ _M_widen[__j] = btowc(__j); ++ ++ for (size_t __k = 0; __k <= 11; ++__k) ++ { ++ _M_bit[__k] = static_cast(_ISbit(__k)); ++ _M_wmask[__k] = _M_convert_to_wmask(_M_bit[__k]); ++ } ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __uselocale(__old); ++#endif ++ } ++#endif // _GLIBCXX_USE_WCHAR_T ++} +diff --git a/libstdc++-v3/config/locale/uclibc/messages_members.cc b/libstdc++-v3/config/locale/uclibc/messages_members.cc +new file mode 100644 +index 0000000..13594d9 +--- /dev/null ++++ b/libstdc++-v3/config/locale/uclibc/messages_members.cc +@@ -0,0 +1,100 @@ ++// std::messages implementation details, GNU version -*- C++ -*- ++ ++// Copyright (C) 2001, 2002 Free Software Foundation, Inc. ++// ++// This file is part of the GNU ISO C++ Library. This library is free ++// software; you can redistribute it and/or modify it under the ++// terms of the GNU General Public License as published by the ++// Free Software Foundation; either version 2, or (at your option) ++// any later version. ++ ++// This library is distributed in the hope that it will be useful, ++// but WITHOUT ANY WARRANTY; without even the implied warranty of ++// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++// GNU General Public License for more details. ++ ++// You should have received a copy of the GNU General Public License along ++// with this library; see the file COPYING. If not, write to the Free ++// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, ++// USA. ++ ++// As a special exception, you may use this file as part of a free software ++// library without restriction. Specifically, if other files instantiate ++// templates or use macros or inline functions from this file, or you compile ++// this file and link it with other files to produce an executable, this ++// file does not by itself cause the resulting executable to be covered by ++// the GNU General Public License. This exception does not however ++// invalidate any other reasons why the executable file might be covered by ++// the GNU General Public License. ++ ++// ++// ISO C++ 14882: 22.2.7.1.2 messages virtual functions ++// ++ ++// Written by Benjamin Kosnik ++ ++#include ++#include ++ ++#ifdef __UCLIBC_MJN3_ONLY__ ++#warning fix gettext stuff ++#endif ++#ifdef __UCLIBC_HAS_GETTEXT_AWARENESS__ ++extern "C" char *__dcgettext(const char *domainname, ++ const char *msgid, int category); ++#undef gettext ++#define gettext(msgid) __dcgettext(NULL, msgid, LC_MESSAGES) ++#else ++#undef gettext ++#define gettext(msgid) (msgid) ++#endif ++ ++namespace std ++{ ++ // Specializations. ++ template<> ++ string ++ messages::do_get(catalog, int, int, const string& __dfault) const ++ { ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __c_locale __old = __uselocale(_M_c_locale_messages); ++ const char* __msg = const_cast(gettext(__dfault.c_str())); ++ __uselocale(__old); ++ return string(__msg); ++#elif defined __UCLIBC_HAS_LOCALE__ ++ char* __old = strdup(setlocale(LC_ALL, NULL)); ++ setlocale(LC_ALL, _M_name_messages); ++ const char* __msg = gettext(__dfault.c_str()); ++ setlocale(LC_ALL, __old); ++ free(__old); ++ return string(__msg); ++#else ++ const char* __msg = gettext(__dfault.c_str()); ++ return string(__msg); ++#endif ++ } ++ ++#ifdef _GLIBCXX_USE_WCHAR_T ++ template<> ++ wstring ++ messages::do_get(catalog, int, int, const wstring& __dfault) const ++ { ++# ifdef __UCLIBC_HAS_XLOCALE__ ++ __c_locale __old = __uselocale(_M_c_locale_messages); ++ char* __msg = gettext(_M_convert_to_char(__dfault)); ++ __uselocale(__old); ++ return _M_convert_from_char(__msg); ++# elif defined __UCLIBC_HAS_LOCALE__ ++ char* __old = strdup(setlocale(LC_ALL, NULL)); ++ setlocale(LC_ALL, _M_name_messages); ++ char* __msg = gettext(_M_convert_to_char(__dfault)); ++ setlocale(LC_ALL, __old); ++ free(__old); ++ return _M_convert_from_char(__msg); ++# else ++ char* __msg = gettext(_M_convert_to_char(__dfault)); ++ return _M_convert_from_char(__msg); ++# endif ++ } ++#endif ++} +diff --git a/libstdc++-v3/config/locale/uclibc/messages_members.h b/libstdc++-v3/config/locale/uclibc/messages_members.h +new file mode 100644 +index 0000000..1424078 +--- /dev/null ++++ b/libstdc++-v3/config/locale/uclibc/messages_members.h +@@ -0,0 +1,118 @@ ++// std::messages implementation details, GNU version -*- C++ -*- ++ ++// Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc. ++// ++// This file is part of the GNU ISO C++ Library. This library is free ++// software; you can redistribute it and/or modify it under the ++// terms of the GNU General Public License as published by the ++// Free Software Foundation; either version 2, or (at your option) ++// any later version. ++ ++// This library is distributed in the hope that it will be useful, ++// but WITHOUT ANY WARRANTY; without even the implied warranty of ++// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++// GNU General Public License for more details. ++ ++// You should have received a copy of the GNU General Public License along ++// with this library; see the file COPYING. If not, write to the Free ++// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, ++// USA. ++ ++// As a special exception, you may use this file as part of a free software ++// library without restriction. Specifically, if other files instantiate ++// templates or use macros or inline functions from this file, or you compile ++// this file and link it with other files to produce an executable, this ++// file does not by itself cause the resulting executable to be covered by ++// the GNU General Public License. This exception does not however ++// invalidate any other reasons why the executable file might be covered by ++// the GNU General Public License. ++ ++// ++// ISO C++ 14882: 22.2.7.1.2 messages functions ++// ++ ++// Written by Benjamin Kosnik ++ ++#ifdef __UCLIBC_MJN3_ONLY__ ++#warning fix prototypes for *textdomain funcs ++#endif ++#ifdef __UCLIBC_HAS_GETTEXT_AWARENESS__ ++extern "C" char *__textdomain(const char *domainname); ++extern "C" char *__bindtextdomain(const char *domainname, ++ const char *dirname); ++#else ++#undef __textdomain ++#undef __bindtextdomain ++#define __textdomain(D) ((void)0) ++#define __bindtextdomain(D,P) ((void)0) ++#endif ++ ++ // Non-virtual member functions. ++ template ++ messages<_CharT>::messages(size_t __refs) ++ : facet(__refs), _M_c_locale_messages(_S_get_c_locale()), ++ _M_name_messages(_S_get_c_name()) ++ { } ++ ++ template ++ messages<_CharT>::messages(__c_locale __cloc, const char* __s, ++ size_t __refs) ++ : facet(__refs), _M_c_locale_messages(_S_clone_c_locale(__cloc)), ++ _M_name_messages(__s) ++ { ++ char* __tmp = new char[std::strlen(__s) + 1]; ++ std::strcpy(__tmp, __s); ++ _M_name_messages = __tmp; ++ } ++ ++ template ++ typename messages<_CharT>::catalog ++ messages<_CharT>::open(const basic_string& __s, const locale& __loc, ++ const char* __dir) const ++ { ++ __bindtextdomain(__s.c_str(), __dir); ++ return this->do_open(__s, __loc); ++ } ++ ++ // Virtual member functions. ++ template ++ messages<_CharT>::~messages() ++ { ++ if (_M_name_messages != _S_get_c_name()) ++ delete [] _M_name_messages; ++ _S_destroy_c_locale(_M_c_locale_messages); ++ } ++ ++ template ++ typename messages<_CharT>::catalog ++ messages<_CharT>::do_open(const basic_string& __s, ++ const locale&) const ++ { ++ // No error checking is done, assume the catalog exists and can ++ // be used. ++ __textdomain(__s.c_str()); ++ return 0; ++ } ++ ++ template ++ void ++ messages<_CharT>::do_close(catalog) const ++ { } ++ ++ // messages_byname ++ template ++ messages_byname<_CharT>::messages_byname(const char* __s, size_t __refs) ++ : messages<_CharT>(__refs) ++ { ++ if (this->_M_name_messages != locale::facet::_S_get_c_name()) ++ delete [] this->_M_name_messages; ++ char* __tmp = new char[std::strlen(__s) + 1]; ++ std::strcpy(__tmp, __s); ++ this->_M_name_messages = __tmp; ++ ++ if (std::strcmp(__s, "C") != 0 && std::strcmp(__s, "POSIX") != 0) ++ { ++ this->_S_destroy_c_locale(this->_M_c_locale_messages); ++ this->_S_create_c_locale(this->_M_c_locale_messages, __s); ++ } ++ } +diff --git a/libstdc++-v3/config/locale/uclibc/monetary_members.cc b/libstdc++-v3/config/locale/uclibc/monetary_members.cc +new file mode 100644 +index 0000000..aa52731 +--- /dev/null ++++ b/libstdc++-v3/config/locale/uclibc/monetary_members.cc +@@ -0,0 +1,692 @@ ++// std::moneypunct implementation details, GNU version -*- C++ -*- ++ ++// Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc. ++// ++// This file is part of the GNU ISO C++ Library. This library is free ++// software; you can redistribute it and/or modify it under the ++// terms of the GNU General Public License as published by the ++// Free Software Foundation; either version 2, or (at your option) ++// any later version. ++ ++// This library is distributed in the hope that it will be useful, ++// but WITHOUT ANY WARRANTY; without even the implied warranty of ++// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++// GNU General Public License for more details. ++ ++// You should have received a copy of the GNU General Public License along ++// with this library; see the file COPYING. If not, write to the Free ++// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, ++// USA. ++ ++// As a special exception, you may use this file as part of a free software ++// library without restriction. Specifically, if other files instantiate ++// templates or use macros or inline functions from this file, or you compile ++// this file and link it with other files to produce an executable, this ++// file does not by itself cause the resulting executable to be covered by ++// the GNU General Public License. This exception does not however ++// invalidate any other reasons why the executable file might be covered by ++// the GNU General Public License. ++ ++// ++// ISO C++ 14882: 22.2.6.3.2 moneypunct virtual functions ++// ++ ++// Written by Benjamin Kosnik ++ ++#define _LIBC ++#include ++#undef _LIBC ++#include ++ ++#ifdef __UCLIBC_MJN3_ONLY__ ++#warning optimize this for uclibc ++#warning tailor for stub locale support ++#endif ++ ++#ifndef __UCLIBC_HAS_XLOCALE__ ++#define __nl_langinfo_l(N, L) nl_langinfo((N)) ++#endif ++ ++namespace std ++{ ++ // Construct and return valid pattern consisting of some combination of: ++ // space none symbol sign value ++ money_base::pattern ++ money_base::_S_construct_pattern(char __precedes, char __space, char __posn) ++ { ++ pattern __ret; ++ ++ // This insanely complicated routine attempts to construct a valid ++ // pattern for use with monyepunct. A couple of invariants: ++ ++ // if (__precedes) symbol -> value ++ // else value -> symbol ++ ++ // if (__space) space ++ // else none ++ ++ // none == never first ++ // space never first or last ++ ++ // Any elegant implementations of this are welcome. ++ switch (__posn) ++ { ++ case 0: ++ case 1: ++ // 1 The sign precedes the value and symbol. ++ __ret.field[0] = sign; ++ if (__space) ++ { ++ // Pattern starts with sign. ++ if (__precedes) ++ { ++ __ret.field[1] = symbol; ++ __ret.field[3] = value; ++ } ++ else ++ { ++ __ret.field[1] = value; ++ __ret.field[3] = symbol; ++ } ++ __ret.field[2] = space; ++ } ++ else ++ { ++ // Pattern starts with sign and ends with none. ++ if (__precedes) ++ { ++ __ret.field[1] = symbol; ++ __ret.field[2] = value; ++ } ++ else ++ { ++ __ret.field[1] = value; ++ __ret.field[2] = symbol; ++ } ++ __ret.field[3] = none; ++ } ++ break; ++ case 2: ++ // 2 The sign follows the value and symbol. ++ if (__space) ++ { ++ // Pattern either ends with sign. ++ if (__precedes) ++ { ++ __ret.field[0] = symbol; ++ __ret.field[2] = value; ++ } ++ else ++ { ++ __ret.field[0] = value; ++ __ret.field[2] = symbol; ++ } ++ __ret.field[1] = space; ++ __ret.field[3] = sign; ++ } ++ else ++ { ++ // Pattern ends with sign then none. ++ if (__precedes) ++ { ++ __ret.field[0] = symbol; ++ __ret.field[1] = value; ++ } ++ else ++ { ++ __ret.field[0] = value; ++ __ret.field[1] = symbol; ++ } ++ __ret.field[2] = sign; ++ __ret.field[3] = none; ++ } ++ break; ++ case 3: ++ // 3 The sign immediately precedes the symbol. ++ if (__precedes) ++ { ++ __ret.field[0] = sign; ++ __ret.field[1] = symbol; ++ if (__space) ++ { ++ __ret.field[2] = space; ++ __ret.field[3] = value; ++ } ++ else ++ { ++ __ret.field[2] = value; ++ __ret.field[3] = none; ++ } ++ } ++ else ++ { ++ __ret.field[0] = value; ++ if (__space) ++ { ++ __ret.field[1] = space; ++ __ret.field[2] = sign; ++ __ret.field[3] = symbol; ++ } ++ else ++ { ++ __ret.field[1] = sign; ++ __ret.field[2] = symbol; ++ __ret.field[3] = none; ++ } ++ } ++ break; ++ case 4: ++ // 4 The sign immediately follows the symbol. ++ if (__precedes) ++ { ++ __ret.field[0] = symbol; ++ __ret.field[1] = sign; ++ if (__space) ++ { ++ __ret.field[2] = space; ++ __ret.field[3] = value; ++ } ++ else ++ { ++ __ret.field[2] = value; ++ __ret.field[3] = none; ++ } ++ } ++ else ++ { ++ __ret.field[0] = value; ++ if (__space) ++ { ++ __ret.field[1] = space; ++ __ret.field[2] = symbol; ++ __ret.field[3] = sign; ++ } ++ else ++ { ++ __ret.field[1] = symbol; ++ __ret.field[2] = sign; ++ __ret.field[3] = none; ++ } ++ } ++ break; ++ default: ++ ; ++ } ++ return __ret; ++ } ++ ++ template<> ++ void ++ moneypunct::_M_initialize_moneypunct(__c_locale __cloc, ++ const char*) ++ { ++ if (!_M_data) ++ _M_data = new __moneypunct_cache; ++ ++ if (!__cloc) ++ { ++ // "C" locale ++ _M_data->_M_decimal_point = '.'; ++ _M_data->_M_thousands_sep = ','; ++ _M_data->_M_grouping = ""; ++ _M_data->_M_grouping_size = 0; ++ _M_data->_M_curr_symbol = ""; ++ _M_data->_M_curr_symbol_size = 0; ++ _M_data->_M_positive_sign = ""; ++ _M_data->_M_positive_sign_size = 0; ++ _M_data->_M_negative_sign = ""; ++ _M_data->_M_negative_sign_size = 0; ++ _M_data->_M_frac_digits = 0; ++ _M_data->_M_pos_format = money_base::_S_default_pattern; ++ _M_data->_M_neg_format = money_base::_S_default_pattern; ++ ++ for (size_t __i = 0; __i < money_base::_S_end; ++__i) ++ _M_data->_M_atoms[__i] = money_base::_S_atoms[__i]; ++ } ++ else ++ { ++ // Named locale. ++ _M_data->_M_decimal_point = *(__nl_langinfo_l(__MON_DECIMAL_POINT, ++ __cloc)); ++ _M_data->_M_thousands_sep = *(__nl_langinfo_l(__MON_THOUSANDS_SEP, ++ __cloc)); ++ _M_data->_M_grouping = __nl_langinfo_l(__MON_GROUPING, __cloc); ++ _M_data->_M_grouping_size = strlen(_M_data->_M_grouping); ++ _M_data->_M_positive_sign = __nl_langinfo_l(__POSITIVE_SIGN, __cloc); ++ _M_data->_M_positive_sign_size = strlen(_M_data->_M_positive_sign); ++ ++ char __nposn = *(__nl_langinfo_l(__INT_N_SIGN_POSN, __cloc)); ++ if (!__nposn) ++ _M_data->_M_negative_sign = "()"; ++ else ++ _M_data->_M_negative_sign = __nl_langinfo_l(__NEGATIVE_SIGN, ++ __cloc); ++ _M_data->_M_negative_sign_size = strlen(_M_data->_M_negative_sign); ++ ++ // _Intl == true ++ _M_data->_M_curr_symbol = __nl_langinfo_l(__INT_CURR_SYMBOL, __cloc); ++ _M_data->_M_curr_symbol_size = strlen(_M_data->_M_curr_symbol); ++ _M_data->_M_frac_digits = *(__nl_langinfo_l(__INT_FRAC_DIGITS, ++ __cloc)); ++ char __pprecedes = *(__nl_langinfo_l(__INT_P_CS_PRECEDES, __cloc)); ++ char __pspace = *(__nl_langinfo_l(__INT_P_SEP_BY_SPACE, __cloc)); ++ char __pposn = *(__nl_langinfo_l(__INT_P_SIGN_POSN, __cloc)); ++ _M_data->_M_pos_format = _S_construct_pattern(__pprecedes, __pspace, ++ __pposn); ++ char __nprecedes = *(__nl_langinfo_l(__INT_N_CS_PRECEDES, __cloc)); ++ char __nspace = *(__nl_langinfo_l(__INT_N_SEP_BY_SPACE, __cloc)); ++ _M_data->_M_neg_format = _S_construct_pattern(__nprecedes, __nspace, ++ __nposn); ++ } ++ } ++ ++ template<> ++ void ++ moneypunct::_M_initialize_moneypunct(__c_locale __cloc, ++ const char*) ++ { ++ if (!_M_data) ++ _M_data = new __moneypunct_cache; ++ ++ if (!__cloc) ++ { ++ // "C" locale ++ _M_data->_M_decimal_point = '.'; ++ _M_data->_M_thousands_sep = ','; ++ _M_data->_M_grouping = ""; ++ _M_data->_M_grouping_size = 0; ++ _M_data->_M_curr_symbol = ""; ++ _M_data->_M_curr_symbol_size = 0; ++ _M_data->_M_positive_sign = ""; ++ _M_data->_M_positive_sign_size = 0; ++ _M_data->_M_negative_sign = ""; ++ _M_data->_M_negative_sign_size = 0; ++ _M_data->_M_frac_digits = 0; ++ _M_data->_M_pos_format = money_base::_S_default_pattern; ++ _M_data->_M_neg_format = money_base::_S_default_pattern; ++ ++ for (size_t __i = 0; __i < money_base::_S_end; ++__i) ++ _M_data->_M_atoms[__i] = money_base::_S_atoms[__i]; ++ } ++ else ++ { ++ // Named locale. ++ _M_data->_M_decimal_point = *(__nl_langinfo_l(__MON_DECIMAL_POINT, ++ __cloc)); ++ _M_data->_M_thousands_sep = *(__nl_langinfo_l(__MON_THOUSANDS_SEP, ++ __cloc)); ++ _M_data->_M_grouping = __nl_langinfo_l(__MON_GROUPING, __cloc); ++ _M_data->_M_grouping_size = strlen(_M_data->_M_grouping); ++ _M_data->_M_positive_sign = __nl_langinfo_l(__POSITIVE_SIGN, __cloc); ++ _M_data->_M_positive_sign_size = strlen(_M_data->_M_positive_sign); ++ ++ char __nposn = *(__nl_langinfo_l(__N_SIGN_POSN, __cloc)); ++ if (!__nposn) ++ _M_data->_M_negative_sign = "()"; ++ else ++ _M_data->_M_negative_sign = __nl_langinfo_l(__NEGATIVE_SIGN, ++ __cloc); ++ _M_data->_M_negative_sign_size = strlen(_M_data->_M_negative_sign); ++ ++ // _Intl == false ++ _M_data->_M_curr_symbol = __nl_langinfo_l(__CURRENCY_SYMBOL, __cloc); ++ _M_data->_M_curr_symbol_size = strlen(_M_data->_M_curr_symbol); ++ _M_data->_M_frac_digits = *(__nl_langinfo_l(__FRAC_DIGITS, __cloc)); ++ char __pprecedes = *(__nl_langinfo_l(__P_CS_PRECEDES, __cloc)); ++ char __pspace = *(__nl_langinfo_l(__P_SEP_BY_SPACE, __cloc)); ++ char __pposn = *(__nl_langinfo_l(__P_SIGN_POSN, __cloc)); ++ _M_data->_M_pos_format = _S_construct_pattern(__pprecedes, __pspace, ++ __pposn); ++ char __nprecedes = *(__nl_langinfo_l(__N_CS_PRECEDES, __cloc)); ++ char __nspace = *(__nl_langinfo_l(__N_SEP_BY_SPACE, __cloc)); ++ _M_data->_M_neg_format = _S_construct_pattern(__nprecedes, __nspace, ++ __nposn); ++ } ++ } ++ ++ template<> ++ moneypunct::~moneypunct() ++ { delete _M_data; } ++ ++ template<> ++ moneypunct::~moneypunct() ++ { delete _M_data; } ++ ++#ifdef _GLIBCXX_USE_WCHAR_T ++ template<> ++ void ++ moneypunct::_M_initialize_moneypunct(__c_locale __cloc, ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ const char*) ++#else ++ const char* __name) ++#endif ++ { ++ if (!_M_data) ++ _M_data = new __moneypunct_cache; ++ ++ if (!__cloc) ++ { ++ // "C" locale ++ _M_data->_M_decimal_point = L'.'; ++ _M_data->_M_thousands_sep = L','; ++ _M_data->_M_grouping = ""; ++ _M_data->_M_grouping_size = 0; ++ _M_data->_M_curr_symbol = L""; ++ _M_data->_M_curr_symbol_size = 0; ++ _M_data->_M_positive_sign = L""; ++ _M_data->_M_positive_sign_size = 0; ++ _M_data->_M_negative_sign = L""; ++ _M_data->_M_negative_sign_size = 0; ++ _M_data->_M_frac_digits = 0; ++ _M_data->_M_pos_format = money_base::_S_default_pattern; ++ _M_data->_M_neg_format = money_base::_S_default_pattern; ++ ++ // Use ctype::widen code without the facet... ++ for (size_t __i = 0; __i < money_base::_S_end; ++__i) ++ _M_data->_M_atoms[__i] = ++ static_cast(money_base::_S_atoms[__i]); ++ } ++ else ++ { ++ // Named locale. ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __c_locale __old = __uselocale(__cloc); ++#else ++ // Switch to named locale so that mbsrtowcs will work. ++ char* __old = strdup(setlocale(LC_ALL, NULL)); ++ setlocale(LC_ALL, __name); ++#endif ++ ++#ifdef __UCLIBC_MJN3_ONLY__ ++#warning fix this... should be monetary ++#endif ++#ifdef __UCLIBC__ ++# ifdef __UCLIBC_HAS_XLOCALE__ ++ _M_data->_M_decimal_point = __cloc->decimal_point_wc; ++ _M_data->_M_thousands_sep = __cloc->thousands_sep_wc; ++# else ++ _M_data->_M_decimal_point = __global_locale->decimal_point_wc; ++ _M_data->_M_thousands_sep = __global_locale->thousands_sep_wc; ++# endif ++#else ++ union { char *__s; wchar_t __w; } __u; ++ __u.__s = __nl_langinfo_l(_NL_MONETARY_DECIMAL_POINT_WC, __cloc); ++ _M_data->_M_decimal_point = __u.__w; ++ ++ __u.__s = __nl_langinfo_l(_NL_MONETARY_THOUSANDS_SEP_WC, __cloc); ++ _M_data->_M_thousands_sep = __u.__w; ++#endif ++ _M_data->_M_grouping = __nl_langinfo_l(__MON_GROUPING, __cloc); ++ _M_data->_M_grouping_size = strlen(_M_data->_M_grouping); ++ ++ const char* __cpossign = __nl_langinfo_l(__POSITIVE_SIGN, __cloc); ++ const char* __cnegsign = __nl_langinfo_l(__NEGATIVE_SIGN, __cloc); ++ const char* __ccurr = __nl_langinfo_l(__INT_CURR_SYMBOL, __cloc); ++ ++ wchar_t* __wcs_ps = 0; ++ wchar_t* __wcs_ns = 0; ++ const char __nposn = *(__nl_langinfo_l(__INT_N_SIGN_POSN, __cloc)); ++ try ++ { ++ mbstate_t __state; ++ size_t __len = strlen(__cpossign); ++ if (__len) ++ { ++ ++__len; ++ memset(&__state, 0, sizeof(mbstate_t)); ++ __wcs_ps = new wchar_t[__len]; ++ mbsrtowcs(__wcs_ps, &__cpossign, __len, &__state); ++ _M_data->_M_positive_sign = __wcs_ps; ++ } ++ else ++ _M_data->_M_positive_sign = L""; ++ _M_data->_M_positive_sign_size = wcslen(_M_data->_M_positive_sign); ++ ++ __len = strlen(__cnegsign); ++ if (!__nposn) ++ _M_data->_M_negative_sign = L"()"; ++ else if (__len) ++ { ++ ++__len; ++ memset(&__state, 0, sizeof(mbstate_t)); ++ __wcs_ns = new wchar_t[__len]; ++ mbsrtowcs(__wcs_ns, &__cnegsign, __len, &__state); ++ _M_data->_M_negative_sign = __wcs_ns; ++ } ++ else ++ _M_data->_M_negative_sign = L""; ++ _M_data->_M_negative_sign_size = wcslen(_M_data->_M_negative_sign); ++ ++ // _Intl == true. ++ __len = strlen(__ccurr); ++ if (__len) ++ { ++ ++__len; ++ memset(&__state, 0, sizeof(mbstate_t)); ++ wchar_t* __wcs = new wchar_t[__len]; ++ mbsrtowcs(__wcs, &__ccurr, __len, &__state); ++ _M_data->_M_curr_symbol = __wcs; ++ } ++ else ++ _M_data->_M_curr_symbol = L""; ++ _M_data->_M_curr_symbol_size = wcslen(_M_data->_M_curr_symbol); ++ } ++ catch (...) ++ { ++ delete _M_data; ++ _M_data = 0; ++ delete __wcs_ps; ++ delete __wcs_ns; ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __uselocale(__old); ++#else ++ setlocale(LC_ALL, __old); ++ free(__old); ++#endif ++ __throw_exception_again; ++ } ++ ++ _M_data->_M_frac_digits = *(__nl_langinfo_l(__INT_FRAC_DIGITS, ++ __cloc)); ++ char __pprecedes = *(__nl_langinfo_l(__INT_P_CS_PRECEDES, __cloc)); ++ char __pspace = *(__nl_langinfo_l(__INT_P_SEP_BY_SPACE, __cloc)); ++ char __pposn = *(__nl_langinfo_l(__INT_P_SIGN_POSN, __cloc)); ++ _M_data->_M_pos_format = _S_construct_pattern(__pprecedes, __pspace, ++ __pposn); ++ char __nprecedes = *(__nl_langinfo_l(__INT_N_CS_PRECEDES, __cloc)); ++ char __nspace = *(__nl_langinfo_l(__INT_N_SEP_BY_SPACE, __cloc)); ++ _M_data->_M_neg_format = _S_construct_pattern(__nprecedes, __nspace, ++ __nposn); ++ ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __uselocale(__old); ++#else ++ setlocale(LC_ALL, __old); ++ free(__old); ++#endif ++ } ++ } ++ ++ template<> ++ void ++ moneypunct::_M_initialize_moneypunct(__c_locale __cloc, ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ const char*) ++#else ++ const char* __name) ++#endif ++ { ++ if (!_M_data) ++ _M_data = new __moneypunct_cache; ++ ++ if (!__cloc) ++ { ++ // "C" locale ++ _M_data->_M_decimal_point = L'.'; ++ _M_data->_M_thousands_sep = L','; ++ _M_data->_M_grouping = ""; ++ _M_data->_M_grouping_size = 0; ++ _M_data->_M_curr_symbol = L""; ++ _M_data->_M_curr_symbol_size = 0; ++ _M_data->_M_positive_sign = L""; ++ _M_data->_M_positive_sign_size = 0; ++ _M_data->_M_negative_sign = L""; ++ _M_data->_M_negative_sign_size = 0; ++ _M_data->_M_frac_digits = 0; ++ _M_data->_M_pos_format = money_base::_S_default_pattern; ++ _M_data->_M_neg_format = money_base::_S_default_pattern; ++ ++ // Use ctype::widen code without the facet... ++ for (size_t __i = 0; __i < money_base::_S_end; ++__i) ++ _M_data->_M_atoms[__i] = ++ static_cast(money_base::_S_atoms[__i]); ++ } ++ else ++ { ++ // Named locale. ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __c_locale __old = __uselocale(__cloc); ++#else ++ // Switch to named locale so that mbsrtowcs will work. ++ char* __old = strdup(setlocale(LC_ALL, NULL)); ++ setlocale(LC_ALL, __name); ++#endif ++ ++#ifdef __UCLIBC_MJN3_ONLY__ ++#warning fix this... should be monetary ++#endif ++#ifdef __UCLIBC__ ++# ifdef __UCLIBC_HAS_XLOCALE__ ++ _M_data->_M_decimal_point = __cloc->decimal_point_wc; ++ _M_data->_M_thousands_sep = __cloc->thousands_sep_wc; ++# else ++ _M_data->_M_decimal_point = __global_locale->decimal_point_wc; ++ _M_data->_M_thousands_sep = __global_locale->thousands_sep_wc; ++# endif ++#else ++ union { char *__s; wchar_t __w; } __u; ++ __u.__s = __nl_langinfo_l(_NL_MONETARY_DECIMAL_POINT_WC, __cloc); ++ _M_data->_M_decimal_point = __u.__w; ++ ++ __u.__s = __nl_langinfo_l(_NL_MONETARY_THOUSANDS_SEP_WC, __cloc); ++ _M_data->_M_thousands_sep = __u.__w; ++#endif ++ _M_data->_M_grouping = __nl_langinfo_l(__MON_GROUPING, __cloc); ++ _M_data->_M_grouping_size = strlen(_M_data->_M_grouping); ++ ++ const char* __cpossign = __nl_langinfo_l(__POSITIVE_SIGN, __cloc); ++ const char* __cnegsign = __nl_langinfo_l(__NEGATIVE_SIGN, __cloc); ++ const char* __ccurr = __nl_langinfo_l(__CURRENCY_SYMBOL, __cloc); ++ ++ wchar_t* __wcs_ps = 0; ++ wchar_t* __wcs_ns = 0; ++ const char __nposn = *(__nl_langinfo_l(__N_SIGN_POSN, __cloc)); ++ try ++ { ++ mbstate_t __state; ++ size_t __len; ++ __len = strlen(__cpossign); ++ if (__len) ++ { ++ ++__len; ++ memset(&__state, 0, sizeof(mbstate_t)); ++ __wcs_ps = new wchar_t[__len]; ++ mbsrtowcs(__wcs_ps, &__cpossign, __len, &__state); ++ _M_data->_M_positive_sign = __wcs_ps; ++ } ++ else ++ _M_data->_M_positive_sign = L""; ++ _M_data->_M_positive_sign_size = wcslen(_M_data->_M_positive_sign); ++ ++ __len = strlen(__cnegsign); ++ if (!__nposn) ++ _M_data->_M_negative_sign = L"()"; ++ else if (__len) ++ { ++ ++__len; ++ memset(&__state, 0, sizeof(mbstate_t)); ++ __wcs_ns = new wchar_t[__len]; ++ mbsrtowcs(__wcs_ns, &__cnegsign, __len, &__state); ++ _M_data->_M_negative_sign = __wcs_ns; ++ } ++ else ++ _M_data->_M_negative_sign = L""; ++ _M_data->_M_negative_sign_size = wcslen(_M_data->_M_negative_sign); ++ ++ // _Intl == true. ++ __len = strlen(__ccurr); ++ if (__len) ++ { ++ ++__len; ++ memset(&__state, 0, sizeof(mbstate_t)); ++ wchar_t* __wcs = new wchar_t[__len]; ++ mbsrtowcs(__wcs, &__ccurr, __len, &__state); ++ _M_data->_M_curr_symbol = __wcs; ++ } ++ else ++ _M_data->_M_curr_symbol = L""; ++ _M_data->_M_curr_symbol_size = wcslen(_M_data->_M_curr_symbol); ++ } ++ catch (...) ++ { ++ delete _M_data; ++ _M_data = 0; ++ delete __wcs_ps; ++ delete __wcs_ns; ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __uselocale(__old); ++#else ++ setlocale(LC_ALL, __old); ++ free(__old); ++#endif ++ __throw_exception_again; ++ } ++ ++ _M_data->_M_frac_digits = *(__nl_langinfo_l(__FRAC_DIGITS, __cloc)); ++ char __pprecedes = *(__nl_langinfo_l(__P_CS_PRECEDES, __cloc)); ++ char __pspace = *(__nl_langinfo_l(__P_SEP_BY_SPACE, __cloc)); ++ char __pposn = *(__nl_langinfo_l(__P_SIGN_POSN, __cloc)); ++ _M_data->_M_pos_format = _S_construct_pattern(__pprecedes, __pspace, ++ __pposn); ++ char __nprecedes = *(__nl_langinfo_l(__N_CS_PRECEDES, __cloc)); ++ char __nspace = *(__nl_langinfo_l(__N_SEP_BY_SPACE, __cloc)); ++ _M_data->_M_neg_format = _S_construct_pattern(__nprecedes, __nspace, ++ __nposn); ++ ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __uselocale(__old); ++#else ++ setlocale(LC_ALL, __old); ++ free(__old); ++#endif ++ } ++ } ++ ++ template<> ++ moneypunct::~moneypunct() ++ { ++ if (_M_data->_M_positive_sign_size) ++ delete [] _M_data->_M_positive_sign; ++ if (_M_data->_M_negative_sign_size ++ && wcscmp(_M_data->_M_negative_sign, L"()") != 0) ++ delete [] _M_data->_M_negative_sign; ++ if (_M_data->_M_curr_symbol_size) ++ delete [] _M_data->_M_curr_symbol; ++ delete _M_data; ++ } ++ ++ template<> ++ moneypunct::~moneypunct() ++ { ++ if (_M_data->_M_positive_sign_size) ++ delete [] _M_data->_M_positive_sign; ++ if (_M_data->_M_negative_sign_size ++ && wcscmp(_M_data->_M_negative_sign, L"()") != 0) ++ delete [] _M_data->_M_negative_sign; ++ if (_M_data->_M_curr_symbol_size) ++ delete [] _M_data->_M_curr_symbol; ++ delete _M_data; ++ } ++#endif ++} +diff --git a/libstdc++-v3/config/locale/uclibc/numeric_members.cc b/libstdc++-v3/config/locale/uclibc/numeric_members.cc +new file mode 100644 +index 0000000..883ec1a +--- /dev/null ++++ b/libstdc++-v3/config/locale/uclibc/numeric_members.cc +@@ -0,0 +1,160 @@ ++// std::numpunct implementation details, GNU version -*- C++ -*- ++ ++// Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc. ++// ++// This file is part of the GNU ISO C++ Library. This library is free ++// software; you can redistribute it and/or modify it under the ++// terms of the GNU General Public License as published by the ++// Free Software Foundation; either version 2, or (at your option) ++// any later version. ++ ++// This library is distributed in the hope that it will be useful, ++// but WITHOUT ANY WARRANTY; without even the implied warranty of ++// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++// GNU General Public License for more details. ++ ++// You should have received a copy of the GNU General Public License along ++// with this library; see the file COPYING. If not, write to the Free ++// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, ++// USA. ++ ++// As a special exception, you may use this file as part of a free software ++// library without restriction. Specifically, if other files instantiate ++// templates or use macros or inline functions from this file, or you compile ++// this file and link it with other files to produce an executable, this ++// file does not by itself cause the resulting executable to be covered by ++// the GNU General Public License. This exception does not however ++// invalidate any other reasons why the executable file might be covered by ++// the GNU General Public License. ++ ++// ++// ISO C++ 14882: 22.2.3.1.2 numpunct virtual functions ++// ++ ++// Written by Benjamin Kosnik ++ ++#define _LIBC ++#include ++#undef _LIBC ++#include ++ ++#ifdef __UCLIBC_MJN3_ONLY__ ++#warning tailor for stub locale support ++#endif ++#ifndef __UCLIBC_HAS_XLOCALE__ ++#define __nl_langinfo_l(N, L) nl_langinfo((N)) ++#endif ++ ++namespace std ++{ ++ template<> ++ void ++ numpunct::_M_initialize_numpunct(__c_locale __cloc) ++ { ++ if (!_M_data) ++ _M_data = new __numpunct_cache; ++ ++ if (!__cloc) ++ { ++ // "C" locale ++ _M_data->_M_grouping = ""; ++ _M_data->_M_grouping_size = 0; ++ _M_data->_M_use_grouping = false; ++ ++ _M_data->_M_decimal_point = '.'; ++ _M_data->_M_thousands_sep = ','; ++ ++ for (size_t __i = 0; __i < __num_base::_S_oend; ++__i) ++ _M_data->_M_atoms_out[__i] = __num_base::_S_atoms_out[__i]; ++ ++ for (size_t __j = 0; __j < __num_base::_S_iend; ++__j) ++ _M_data->_M_atoms_in[__j] = __num_base::_S_atoms_in[__j]; ++ } ++ else ++ { ++ // Named locale. ++ _M_data->_M_decimal_point = *(__nl_langinfo_l(DECIMAL_POINT, ++ __cloc)); ++ _M_data->_M_thousands_sep = *(__nl_langinfo_l(THOUSANDS_SEP, ++ __cloc)); ++ ++ // Check for NULL, which implies no grouping. ++ if (_M_data->_M_thousands_sep == '\0') ++ _M_data->_M_grouping = ""; ++ else ++ _M_data->_M_grouping = __nl_langinfo_l(GROUPING, __cloc); ++ _M_data->_M_grouping_size = strlen(_M_data->_M_grouping); ++ } ++ ++ // NB: There is no way to extact this info from posix locales. ++ // _M_truename = __nl_langinfo_l(YESSTR, __cloc); ++ _M_data->_M_truename = "true"; ++ _M_data->_M_truename_size = 4; ++ // _M_falsename = __nl_langinfo_l(NOSTR, __cloc); ++ _M_data->_M_falsename = "false"; ++ _M_data->_M_falsename_size = 5; ++ } ++ ++ template<> ++ numpunct::~numpunct() ++ { delete _M_data; } ++ ++#ifdef _GLIBCXX_USE_WCHAR_T ++ template<> ++ void ++ numpunct::_M_initialize_numpunct(__c_locale __cloc) ++ { ++ if (!_M_data) ++ _M_data = new __numpunct_cache; ++ ++ if (!__cloc) ++ { ++ // "C" locale ++ _M_data->_M_grouping = ""; ++ _M_data->_M_grouping_size = 0; ++ _M_data->_M_use_grouping = false; ++ ++ _M_data->_M_decimal_point = L'.'; ++ _M_data->_M_thousands_sep = L','; ++ ++ // Use ctype::widen code without the facet... ++ for (size_t __i = 0; __i < __num_base::_S_oend; ++__i) ++ _M_data->_M_atoms_out[__i] = ++ static_cast(__num_base::_S_atoms_out[__i]); ++ ++ for (size_t __j = 0; __j < __num_base::_S_iend; ++__j) ++ _M_data->_M_atoms_in[__j] = ++ static_cast(__num_base::_S_atoms_in[__j]); ++ } ++ else ++ { ++ // Named locale. ++ // NB: In the GNU model wchar_t is always 32 bit wide. ++ union { char *__s; wchar_t __w; } __u; ++ __u.__s = __nl_langinfo_l(_NL_NUMERIC_DECIMAL_POINT_WC, __cloc); ++ _M_data->_M_decimal_point = __u.__w; ++ ++ __u.__s = __nl_langinfo_l(_NL_NUMERIC_THOUSANDS_SEP_WC, __cloc); ++ _M_data->_M_thousands_sep = __u.__w; ++ ++ if (_M_data->_M_thousands_sep == L'\0') ++ _M_data->_M_grouping = ""; ++ else ++ _M_data->_M_grouping = __nl_langinfo_l(GROUPING, __cloc); ++ _M_data->_M_grouping_size = strlen(_M_data->_M_grouping); ++ } ++ ++ // NB: There is no way to extact this info from posix locales. ++ // _M_truename = __nl_langinfo_l(YESSTR, __cloc); ++ _M_data->_M_truename = L"true"; ++ _M_data->_M_truename_size = 4; ++ // _M_falsename = __nl_langinfo_l(NOSTR, __cloc); ++ _M_data->_M_falsename = L"false"; ++ _M_data->_M_falsename_size = 5; ++ } ++ ++ template<> ++ numpunct::~numpunct() ++ { delete _M_data; } ++ #endif ++} +diff --git a/libstdc++-v3/config/locale/uclibc/time_members.cc b/libstdc++-v3/config/locale/uclibc/time_members.cc +new file mode 100644 +index 0000000..e0707d7 +--- /dev/null ++++ b/libstdc++-v3/config/locale/uclibc/time_members.cc +@@ -0,0 +1,406 @@ ++// std::time_get, std::time_put implementation, GNU version -*- C++ -*- ++ ++// Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc. ++// ++// This file is part of the GNU ISO C++ Library. This library is free ++// software; you can redistribute it and/or modify it under the ++// terms of the GNU General Public License as published by the ++// Free Software Foundation; either version 2, or (at your option) ++// any later version. ++ ++// This library is distributed in the hope that it will be useful, ++// but WITHOUT ANY WARRANTY; without even the implied warranty of ++// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++// GNU General Public License for more details. ++ ++// You should have received a copy of the GNU General Public License along ++// with this library; see the file COPYING. If not, write to the Free ++// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, ++// USA. ++ ++// As a special exception, you may use this file as part of a free software ++// library without restriction. Specifically, if other files instantiate ++// templates or use macros or inline functions from this file, or you compile ++// this file and link it with other files to produce an executable, this ++// file does not by itself cause the resulting executable to be covered by ++// the GNU General Public License. This exception does not however ++// invalidate any other reasons why the executable file might be covered by ++// the GNU General Public License. ++ ++// ++// ISO C++ 14882: 22.2.5.1.2 - time_get virtual functions ++// ISO C++ 14882: 22.2.5.3.2 - time_put virtual functions ++// ++ ++// Written by Benjamin Kosnik ++ ++#include ++#include ++ ++#ifdef __UCLIBC_MJN3_ONLY__ ++#warning tailor for stub locale support ++#endif ++#ifndef __UCLIBC_HAS_XLOCALE__ ++#define __nl_langinfo_l(N, L) nl_langinfo((N)) ++#endif ++ ++namespace std ++{ ++ template<> ++ void ++ __timepunct:: ++ _M_put(char* __s, size_t __maxlen, const char* __format, ++ const tm* __tm) const ++ { ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ const size_t __len = __strftime_l(__s, __maxlen, __format, __tm, ++ _M_c_locale_timepunct); ++#else ++ char* __old = strdup(setlocale(LC_ALL, NULL)); ++ setlocale(LC_ALL, _M_name_timepunct); ++ const size_t __len = strftime(__s, __maxlen, __format, __tm); ++ setlocale(LC_ALL, __old); ++ free(__old); ++#endif ++ // Make sure __s is null terminated. ++ if (__len == 0) ++ __s[0] = '\0'; ++ } ++ ++ template<> ++ void ++ __timepunct::_M_initialize_timepunct(__c_locale __cloc) ++ { ++ if (!_M_data) ++ _M_data = new __timepunct_cache; ++ ++ if (!__cloc) ++ { ++ // "C" locale ++ _M_c_locale_timepunct = _S_get_c_locale(); ++ ++ _M_data->_M_date_format = "%m/%d/%y"; ++ _M_data->_M_date_era_format = "%m/%d/%y"; ++ _M_data->_M_time_format = "%H:%M:%S"; ++ _M_data->_M_time_era_format = "%H:%M:%S"; ++ _M_data->_M_date_time_format = ""; ++ _M_data->_M_date_time_era_format = ""; ++ _M_data->_M_am = "AM"; ++ _M_data->_M_pm = "PM"; ++ _M_data->_M_am_pm_format = ""; ++ ++ // Day names, starting with "C"'s Sunday. ++ _M_data->_M_day1 = "Sunday"; ++ _M_data->_M_day2 = "Monday"; ++ _M_data->_M_day3 = "Tuesday"; ++ _M_data->_M_day4 = "Wednesday"; ++ _M_data->_M_day5 = "Thursday"; ++ _M_data->_M_day6 = "Friday"; ++ _M_data->_M_day7 = "Saturday"; ++ ++ // Abbreviated day names, starting with "C"'s Sun. ++ _M_data->_M_aday1 = "Sun"; ++ _M_data->_M_aday2 = "Mon"; ++ _M_data->_M_aday3 = "Tue"; ++ _M_data->_M_aday4 = "Wed"; ++ _M_data->_M_aday5 = "Thu"; ++ _M_data->_M_aday6 = "Fri"; ++ _M_data->_M_aday7 = "Sat"; ++ ++ // Month names, starting with "C"'s January. ++ _M_data->_M_month01 = "January"; ++ _M_data->_M_month02 = "February"; ++ _M_data->_M_month03 = "March"; ++ _M_data->_M_month04 = "April"; ++ _M_data->_M_month05 = "May"; ++ _M_data->_M_month06 = "June"; ++ _M_data->_M_month07 = "July"; ++ _M_data->_M_month08 = "August"; ++ _M_data->_M_month09 = "September"; ++ _M_data->_M_month10 = "October"; ++ _M_data->_M_month11 = "November"; ++ _M_data->_M_month12 = "December"; ++ ++ // Abbreviated month names, starting with "C"'s Jan. ++ _M_data->_M_amonth01 = "Jan"; ++ _M_data->_M_amonth02 = "Feb"; ++ _M_data->_M_amonth03 = "Mar"; ++ _M_data->_M_amonth04 = "Apr"; ++ _M_data->_M_amonth05 = "May"; ++ _M_data->_M_amonth06 = "Jun"; ++ _M_data->_M_amonth07 = "Jul"; ++ _M_data->_M_amonth08 = "Aug"; ++ _M_data->_M_amonth09 = "Sep"; ++ _M_data->_M_amonth10 = "Oct"; ++ _M_data->_M_amonth11 = "Nov"; ++ _M_data->_M_amonth12 = "Dec"; ++ } ++ else ++ { ++ _M_c_locale_timepunct = _S_clone_c_locale(__cloc); ++ ++ _M_data->_M_date_format = __nl_langinfo_l(D_FMT, __cloc); ++ _M_data->_M_date_era_format = __nl_langinfo_l(ERA_D_FMT, __cloc); ++ _M_data->_M_time_format = __nl_langinfo_l(T_FMT, __cloc); ++ _M_data->_M_time_era_format = __nl_langinfo_l(ERA_T_FMT, __cloc); ++ _M_data->_M_date_time_format = __nl_langinfo_l(D_T_FMT, __cloc); ++ _M_data->_M_date_time_era_format = __nl_langinfo_l(ERA_D_T_FMT, ++ __cloc); ++ _M_data->_M_am = __nl_langinfo_l(AM_STR, __cloc); ++ _M_data->_M_pm = __nl_langinfo_l(PM_STR, __cloc); ++ _M_data->_M_am_pm_format = __nl_langinfo_l(T_FMT_AMPM, __cloc); ++ ++ // Day names, starting with "C"'s Sunday. ++ _M_data->_M_day1 = __nl_langinfo_l(DAY_1, __cloc); ++ _M_data->_M_day2 = __nl_langinfo_l(DAY_2, __cloc); ++ _M_data->_M_day3 = __nl_langinfo_l(DAY_3, __cloc); ++ _M_data->_M_day4 = __nl_langinfo_l(DAY_4, __cloc); ++ _M_data->_M_day5 = __nl_langinfo_l(DAY_5, __cloc); ++ _M_data->_M_day6 = __nl_langinfo_l(DAY_6, __cloc); ++ _M_data->_M_day7 = __nl_langinfo_l(DAY_7, __cloc); ++ ++ // Abbreviated day names, starting with "C"'s Sun. ++ _M_data->_M_aday1 = __nl_langinfo_l(ABDAY_1, __cloc); ++ _M_data->_M_aday2 = __nl_langinfo_l(ABDAY_2, __cloc); ++ _M_data->_M_aday3 = __nl_langinfo_l(ABDAY_3, __cloc); ++ _M_data->_M_aday4 = __nl_langinfo_l(ABDAY_4, __cloc); ++ _M_data->_M_aday5 = __nl_langinfo_l(ABDAY_5, __cloc); ++ _M_data->_M_aday6 = __nl_langinfo_l(ABDAY_6, __cloc); ++ _M_data->_M_aday7 = __nl_langinfo_l(ABDAY_7, __cloc); ++ ++ // Month names, starting with "C"'s January. ++ _M_data->_M_month01 = __nl_langinfo_l(MON_1, __cloc); ++ _M_data->_M_month02 = __nl_langinfo_l(MON_2, __cloc); ++ _M_data->_M_month03 = __nl_langinfo_l(MON_3, __cloc); ++ _M_data->_M_month04 = __nl_langinfo_l(MON_4, __cloc); ++ _M_data->_M_month05 = __nl_langinfo_l(MON_5, __cloc); ++ _M_data->_M_month06 = __nl_langinfo_l(MON_6, __cloc); ++ _M_data->_M_month07 = __nl_langinfo_l(MON_7, __cloc); ++ _M_data->_M_month08 = __nl_langinfo_l(MON_8, __cloc); ++ _M_data->_M_month09 = __nl_langinfo_l(MON_9, __cloc); ++ _M_data->_M_month10 = __nl_langinfo_l(MON_10, __cloc); ++ _M_data->_M_month11 = __nl_langinfo_l(MON_11, __cloc); ++ _M_data->_M_month12 = __nl_langinfo_l(MON_12, __cloc); ++ ++ // Abbreviated month names, starting with "C"'s Jan. ++ _M_data->_M_amonth01 = __nl_langinfo_l(ABMON_1, __cloc); ++ _M_data->_M_amonth02 = __nl_langinfo_l(ABMON_2, __cloc); ++ _M_data->_M_amonth03 = __nl_langinfo_l(ABMON_3, __cloc); ++ _M_data->_M_amonth04 = __nl_langinfo_l(ABMON_4, __cloc); ++ _M_data->_M_amonth05 = __nl_langinfo_l(ABMON_5, __cloc); ++ _M_data->_M_amonth06 = __nl_langinfo_l(ABMON_6, __cloc); ++ _M_data->_M_amonth07 = __nl_langinfo_l(ABMON_7, __cloc); ++ _M_data->_M_amonth08 = __nl_langinfo_l(ABMON_8, __cloc); ++ _M_data->_M_amonth09 = __nl_langinfo_l(ABMON_9, __cloc); ++ _M_data->_M_amonth10 = __nl_langinfo_l(ABMON_10, __cloc); ++ _M_data->_M_amonth11 = __nl_langinfo_l(ABMON_11, __cloc); ++ _M_data->_M_amonth12 = __nl_langinfo_l(ABMON_12, __cloc); ++ } ++ } ++ ++#ifdef _GLIBCXX_USE_WCHAR_T ++ template<> ++ void ++ __timepunct:: ++ _M_put(wchar_t* __s, size_t __maxlen, const wchar_t* __format, ++ const tm* __tm) const ++ { ++#ifdef __UCLIBC_HAS_XLOCALE__ ++ __wcsftime_l(__s, __maxlen, __format, __tm, _M_c_locale_timepunct); ++ const size_t __len = __wcsftime_l(__s, __maxlen, __format, __tm, ++ _M_c_locale_timepunct); ++#else ++ char* __old = strdup(setlocale(LC_ALL, NULL)); ++ setlocale(LC_ALL, _M_name_timepunct); ++ const size_t __len = wcsftime(__s, __maxlen, __format, __tm); ++ setlocale(LC_ALL, __old); ++ free(__old); ++#endif ++ // Make sure __s is null terminated. ++ if (__len == 0) ++ __s[0] = L'\0'; ++ } ++ ++ template<> ++ void ++ __timepunct::_M_initialize_timepunct(__c_locale __cloc) ++ { ++ if (!_M_data) ++ _M_data = new __timepunct_cache; ++ ++#warning wide time stuff ++// if (!__cloc) ++ { ++ // "C" locale ++ _M_c_locale_timepunct = _S_get_c_locale(); ++ ++ _M_data->_M_date_format = L"%m/%d/%y"; ++ _M_data->_M_date_era_format = L"%m/%d/%y"; ++ _M_data->_M_time_format = L"%H:%M:%S"; ++ _M_data->_M_time_era_format = L"%H:%M:%S"; ++ _M_data->_M_date_time_format = L""; ++ _M_data->_M_date_time_era_format = L""; ++ _M_data->_M_am = L"AM"; ++ _M_data->_M_pm = L"PM"; ++ _M_data->_M_am_pm_format = L""; ++ ++ // Day names, starting with "C"'s Sunday. ++ _M_data->_M_day1 = L"Sunday"; ++ _M_data->_M_day2 = L"Monday"; ++ _M_data->_M_day3 = L"Tuesday"; ++ _M_data->_M_day4 = L"Wednesday"; ++ _M_data->_M_day5 = L"Thursday"; ++ _M_data->_M_day6 = L"Friday"; ++ _M_data->_M_day7 = L"Saturday"; ++ ++ // Abbreviated day names, starting with "C"'s Sun. ++ _M_data->_M_aday1 = L"Sun"; ++ _M_data->_M_aday2 = L"Mon"; ++ _M_data->_M_aday3 = L"Tue"; ++ _M_data->_M_aday4 = L"Wed"; ++ _M_data->_M_aday5 = L"Thu"; ++ _M_data->_M_aday6 = L"Fri"; ++ _M_data->_M_aday7 = L"Sat"; ++ ++ // Month names, starting with "C"'s January. ++ _M_data->_M_month01 = L"January"; ++ _M_data->_M_month02 = L"February"; ++ _M_data->_M_month03 = L"March"; ++ _M_data->_M_month04 = L"April"; ++ _M_data->_M_month05 = L"May"; ++ _M_data->_M_month06 = L"June"; ++ _M_data->_M_month07 = L"July"; ++ _M_data->_M_month08 = L"August"; ++ _M_data->_M_month09 = L"September"; ++ _M_data->_M_month10 = L"October"; ++ _M_data->_M_month11 = L"November"; ++ _M_data->_M_month12 = L"December"; ++ ++ // Abbreviated month names, starting with "C"'s Jan. ++ _M_data->_M_amonth01 = L"Jan"; ++ _M_data->_M_amonth02 = L"Feb"; ++ _M_data->_M_amonth03 = L"Mar"; ++ _M_data->_M_amonth04 = L"Apr"; ++ _M_data->_M_amonth05 = L"May"; ++ _M_data->_M_amonth06 = L"Jun"; ++ _M_data->_M_amonth07 = L"Jul"; ++ _M_data->_M_amonth08 = L"Aug"; ++ _M_data->_M_amonth09 = L"Sep"; ++ _M_data->_M_amonth10 = L"Oct"; ++ _M_data->_M_amonth11 = L"Nov"; ++ _M_data->_M_amonth12 = L"Dec"; ++ } ++#if 0 ++ else ++ { ++ _M_c_locale_timepunct = _S_clone_c_locale(__cloc); ++ ++ union { char *__s; wchar_t *__w; } __u; ++ ++ __u.__s = __nl_langinfo_l(_NL_WD_FMT, __cloc); ++ _M_data->_M_date_format = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WERA_D_FMT, __cloc); ++ _M_data->_M_date_era_format = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WT_FMT, __cloc); ++ _M_data->_M_time_format = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WERA_T_FMT, __cloc); ++ _M_data->_M_time_era_format = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WD_T_FMT, __cloc); ++ _M_data->_M_date_time_format = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WERA_D_T_FMT, __cloc); ++ _M_data->_M_date_time_era_format = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WAM_STR, __cloc); ++ _M_data->_M_am = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WPM_STR, __cloc); ++ _M_data->_M_pm = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WT_FMT_AMPM, __cloc); ++ _M_data->_M_am_pm_format = __u.__w; ++ ++ // Day names, starting with "C"'s Sunday. ++ __u.__s = __nl_langinfo_l(_NL_WDAY_1, __cloc); ++ _M_data->_M_day1 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WDAY_2, __cloc); ++ _M_data->_M_day2 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WDAY_3, __cloc); ++ _M_data->_M_day3 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WDAY_4, __cloc); ++ _M_data->_M_day4 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WDAY_5, __cloc); ++ _M_data->_M_day5 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WDAY_6, __cloc); ++ _M_data->_M_day6 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WDAY_7, __cloc); ++ _M_data->_M_day7 = __u.__w; ++ ++ // Abbreviated day names, starting with "C"'s Sun. ++ __u.__s = __nl_langinfo_l(_NL_WABDAY_1, __cloc); ++ _M_data->_M_aday1 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABDAY_2, __cloc); ++ _M_data->_M_aday2 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABDAY_3, __cloc); ++ _M_data->_M_aday3 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABDAY_4, __cloc); ++ _M_data->_M_aday4 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABDAY_5, __cloc); ++ _M_data->_M_aday5 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABDAY_6, __cloc); ++ _M_data->_M_aday6 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABDAY_7, __cloc); ++ _M_data->_M_aday7 = __u.__w; ++ ++ // Month names, starting with "C"'s January. ++ __u.__s = __nl_langinfo_l(_NL_WMON_1, __cloc); ++ _M_data->_M_month01 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WMON_2, __cloc); ++ _M_data->_M_month02 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WMON_3, __cloc); ++ _M_data->_M_month03 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WMON_4, __cloc); ++ _M_data->_M_month04 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WMON_5, __cloc); ++ _M_data->_M_month05 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WMON_6, __cloc); ++ _M_data->_M_month06 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WMON_7, __cloc); ++ _M_data->_M_month07 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WMON_8, __cloc); ++ _M_data->_M_month08 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WMON_9, __cloc); ++ _M_data->_M_month09 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WMON_10, __cloc); ++ _M_data->_M_month10 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WMON_11, __cloc); ++ _M_data->_M_month11 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WMON_12, __cloc); ++ _M_data->_M_month12 = __u.__w; ++ ++ // Abbreviated month names, starting with "C"'s Jan. ++ __u.__s = __nl_langinfo_l(_NL_WABMON_1, __cloc); ++ _M_data->_M_amonth01 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABMON_2, __cloc); ++ _M_data->_M_amonth02 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABMON_3, __cloc); ++ _M_data->_M_amonth03 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABMON_4, __cloc); ++ _M_data->_M_amonth04 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABMON_5, __cloc); ++ _M_data->_M_amonth05 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABMON_6, __cloc); ++ _M_data->_M_amonth06 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABMON_7, __cloc); ++ _M_data->_M_amonth07 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABMON_8, __cloc); ++ _M_data->_M_amonth08 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABMON_9, __cloc); ++ _M_data->_M_amonth09 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABMON_10, __cloc); ++ _M_data->_M_amonth10 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABMON_11, __cloc); ++ _M_data->_M_amonth11 = __u.__w; ++ __u.__s = __nl_langinfo_l(_NL_WABMON_12, __cloc); ++ _M_data->_M_amonth12 = __u.__w; ++ } ++#endif // 0 ++ } ++#endif ++} +diff --git a/libstdc++-v3/config/locale/uclibc/time_members.h b/libstdc++-v3/config/locale/uclibc/time_members.h +new file mode 100644 +index 0000000..ba8e858 +--- /dev/null ++++ b/libstdc++-v3/config/locale/uclibc/time_members.h +@@ -0,0 +1,68 @@ ++// std::time_get, std::time_put implementation, GNU version -*- C++ -*- ++ ++// Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc. ++// ++// This file is part of the GNU ISO C++ Library. This library is free ++// software; you can redistribute it and/or modify it under the ++// terms of the GNU General Public License as published by the ++// Free Software Foundation; either version 2, or (at your option) ++// any later version. ++ ++// This library is distributed in the hope that it will be useful, ++// but WITHOUT ANY WARRANTY; without even the implied warranty of ++// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++// GNU General Public License for more details. ++ ++// You should have received a copy of the GNU General Public License along ++// with this library; see the file COPYING. If not, write to the Free ++// Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, ++// USA. ++ ++// As a special exception, you may use this file as part of a free software ++// library without restriction. Specifically, if other files instantiate ++// templates or use macros or inline functions from this file, or you compile ++// this file and link it with other files to produce an executable, this ++// file does not by itself cause the resulting executable to be covered by ++// the GNU General Public License. This exception does not however ++// invalidate any other reasons why the executable file might be covered by ++// the GNU General Public License. ++ ++// ++// ISO C++ 14882: 22.2.5.1.2 - time_get functions ++// ISO C++ 14882: 22.2.5.3.2 - time_put functions ++// ++ ++// Written by Benjamin Kosnik ++ ++ template ++ __timepunct<_CharT>::__timepunct(size_t __refs) ++ : facet(__refs), _M_data(NULL), _M_c_locale_timepunct(NULL), ++ _M_name_timepunct(_S_get_c_name()) ++ { _M_initialize_timepunct(); } ++ ++ template ++ __timepunct<_CharT>::__timepunct(__cache_type* __cache, size_t __refs) ++ : facet(__refs), _M_data(__cache), _M_c_locale_timepunct(NULL), ++ _M_name_timepunct(_S_get_c_name()) ++ { _M_initialize_timepunct(); } ++ ++ template ++ __timepunct<_CharT>::__timepunct(__c_locale __cloc, const char* __s, ++ size_t __refs) ++ : facet(__refs), _M_data(NULL), _M_c_locale_timepunct(NULL), ++ _M_name_timepunct(__s) ++ { ++ char* __tmp = new char[std::strlen(__s) + 1]; ++ std::strcpy(__tmp, __s); ++ _M_name_timepunct = __tmp; ++ _M_initialize_timepunct(__cloc); ++ } ++ ++ template ++ __timepunct<_CharT>::~__timepunct() ++ { ++ if (_M_name_timepunct != _S_get_c_name()) ++ delete [] _M_name_timepunct; ++ delete _M_data; ++ _S_destroy_c_locale(_M_c_locale_timepunct); ++ } +diff --git a/libstdc++-v3/configure b/libstdc++-v3/configure +index 41797a9..8a5481c 100755 +--- a/libstdc++-v3/configure ++++ b/libstdc++-v3/configure +@@ -15830,6 +15830,9 @@ fi + # Default to "generic". + if test $enable_clocale_flag = auto; then + case ${target_os} in ++ *-uclibc*) ++ enable_clocale_flag=uclibc ++ ;; + linux* | gnu* | kfreebsd*-gnu | knetbsd*-gnu) + enable_clocale_flag=gnu + ;; +@@ -16108,6 +16111,78 @@ $as_echo "newlib" >&6; } + CTIME_CC=config/locale/generic/time_members.cc + CLOCALE_INTERNAL_H=config/locale/generic/c++locale_internal.h + ;; ++ uclibc) ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: uclibc" >&5 ++$as_echo "uclibc" >&6; } ++ ++ # Declare intention to use gettext, and add support for specific ++ # languages. ++ # For some reason, ALL_LINGUAS has to be before AM-GNU-GETTEXT ++ ALL_LINGUAS="de fr" ++ ++ # Don't call AM-GNU-GETTEXT here. Instead, assume glibc. ++ # Extract the first word of "msgfmt", so it can be a program name with args. ++set dummy msgfmt; ac_word=$2 ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 ++$as_echo_n "checking for $ac_word... " >&6; } ++if test "${ac_cv_prog_check_msgfmt+set}" = set; then : ++ $as_echo_n "(cached) " >&6 ++else ++ if test -n "$check_msgfmt"; then ++ ac_cv_prog_check_msgfmt="$check_msgfmt" # Let the user override the test. ++else ++as_save_IFS=$IFS; IFS=$PATH_SEPARATOR ++for as_dir in $PATH ++do ++ IFS=$as_save_IFS ++ test -z "$as_dir" && as_dir=. ++ for ac_exec_ext in '' $ac_executable_extensions; do ++ if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ++ ac_cv_prog_check_msgfmt="yes" ++ $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 ++ break 2 ++ fi ++done ++ done ++IFS=$as_save_IFS ++ ++ test -z "$ac_cv_prog_check_msgfmt" && ac_cv_prog_check_msgfmt="no" ++fi ++fi ++check_msgfmt=$ac_cv_prog_check_msgfmt ++if test -n "$check_msgfmt"; then ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $check_msgfmt" >&5 ++$as_echo "$check_msgfmt" >&6; } ++else ++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 ++$as_echo "no" >&6; } ++fi ++ ++ ++ if test x"$check_msgfmt" = x"yes" && test x"$enable_nls" = x"yes"; then ++ USE_NLS=yes ++ fi ++ # Export the build objects. ++ for ling in $ALL_LINGUAS; do \ ++ glibcxx_MOFILES="$glibcxx_MOFILES $ling.mo"; \ ++ glibcxx_POFILES="$glibcxx_POFILES $ling.po"; \ ++ done ++ ++ ++ ++ CLOCALE_H=config/locale/uclibc/c_locale.h ++ CLOCALE_CC=config/locale/uclibc/c_locale.cc ++ CCODECVT_CC=config/locale/uclibc/codecvt_members.cc ++ CCOLLATE_CC=config/locale/uclibc/collate_members.cc ++ CCTYPE_CC=config/locale/uclibc/ctype_members.cc ++ CMESSAGES_H=config/locale/uclibc/messages_members.h ++ CMESSAGES_CC=config/locale/uclibc/messages_members.cc ++ CMONEY_CC=config/locale/uclibc/monetary_members.cc ++ CNUMERIC_CC=config/locale/uclibc/numeric_members.cc ++ CTIME_H=config/locale/uclibc/time_members.h ++ CTIME_CC=config/locale/uclibc/time_members.cc ++ CLOCALE_INTERNAL_H=config/locale/uclibc/c++locale_internal.h ++ ;; + esac + + # This is where the testsuite looks for locale catalogs, using the +diff --git a/libstdc++-v3/include/c_compatibility/wchar.h b/libstdc++-v3/include/c_compatibility/wchar.h +index 55a0b52..7d8bb15 100644 +--- a/libstdc++-v3/include/c_compatibility/wchar.h ++++ b/libstdc++-v3/include/c_compatibility/wchar.h +@@ -101,7 +101,9 @@ using std::wmemcmp; + using std::wmemcpy; + using std::wmemmove; + using std::wmemset; ++#if _GLIBCXX_HAVE_WCSFTIME + using std::wcsftime; ++#endif + + #if _GLIBCXX_USE_C99_WCHAR + using std::wcstold; +diff --git a/libstdc++-v3/include/c_std/cwchar b/libstdc++-v3/include/c_std/cwchar +index dc4cef02..256d126 100644 +--- a/libstdc++-v3/include/c_std/cwchar ++++ b/libstdc++-v3/include/c_std/cwchar +@@ -175,7 +175,9 @@ _GLIBCXX_BEGIN_NAMESPACE_VERSION + using ::wcscoll; + using ::wcscpy; + using ::wcscspn; ++#if _GLIBCXX_HAVE_WCSFTIME + using ::wcsftime; ++#endif + using ::wcslen; + using ::wcsncat; + using ::wcsncmp; +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0005-uclibc-locale-no__x.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0005-uclibc-locale-no__x.patch new file mode 100644 index 000000000..3275016e7 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0005-uclibc-locale-no__x.patch @@ -0,0 +1,257 @@ +From c01c14e8e9be382ecd4121ee70f5003b4cb0f904 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 08:42:36 +0400 +Subject: [PATCH 05/46] uclibc-locale-no__x + +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + .../config/locale/uclibc/c++locale_internal.h | 45 ++++++++++++++++++++++ + libstdc++-v3/config/locale/uclibc/c_locale.cc | 14 ------- + libstdc++-v3/config/locale/uclibc/c_locale.h | 1 + + .../config/locale/uclibc/collate_members.cc | 7 ---- + libstdc++-v3/config/locale/uclibc/ctype_members.cc | 7 ---- + .../config/locale/uclibc/messages_members.cc | 7 +--- + .../config/locale/uclibc/messages_members.h | 18 ++++----- + .../config/locale/uclibc/monetary_members.cc | 4 -- + .../config/locale/uclibc/numeric_members.cc | 3 -- + libstdc++-v3/config/locale/uclibc/time_members.cc | 3 -- + 10 files changed, 55 insertions(+), 54 deletions(-) + +diff --git a/libstdc++-v3/config/locale/uclibc/c++locale_internal.h b/libstdc++-v3/config/locale/uclibc/c++locale_internal.h +index 2ae3e4a..e74fddf 100644 +--- a/libstdc++-v3/config/locale/uclibc/c++locale_internal.h ++++ b/libstdc++-v3/config/locale/uclibc/c++locale_internal.h +@@ -60,4 +60,49 @@ extern "C" __typeof(wcsxfrm_l) __wcsxfrm_l; + extern "C" __typeof(wctype_l) __wctype_l; + #endif + ++# define __nl_langinfo_l nl_langinfo_l ++# define __strcoll_l strcoll_l ++# define __strftime_l strftime_l ++# define __strtod_l strtod_l ++# define __strtof_l strtof_l ++# define __strtold_l strtold_l ++# define __strxfrm_l strxfrm_l ++# define __newlocale newlocale ++# define __freelocale freelocale ++# define __duplocale duplocale ++# define __uselocale uselocale ++ ++# ifdef _GLIBCXX_USE_WCHAR_T ++# define __iswctype_l iswctype_l ++# define __towlower_l towlower_l ++# define __towupper_l towupper_l ++# define __wcscoll_l wcscoll_l ++# define __wcsftime_l wcsftime_l ++# define __wcsxfrm_l wcsxfrm_l ++# define __wctype_l wctype_l ++# endif ++ ++#else ++# define __nl_langinfo_l(N, L) nl_langinfo((N)) ++# define __strcoll_l(S1, S2, L) strcoll((S1), (S2)) ++# define __strtod_l(S, E, L) strtod((S), (E)) ++# define __strtof_l(S, E, L) strtof((S), (E)) ++# define __strtold_l(S, E, L) strtold((S), (E)) ++# define __strxfrm_l(S1, S2, N, L) strxfrm((S1), (S2), (N)) ++# warning should dummy __newlocale check for C|POSIX ? ++# define __newlocale(a, b, c) NULL ++# define __freelocale(a) ((void)0) ++# define __duplocale(a) __c_locale() ++//# define __uselocale ? ++// ++# ifdef _GLIBCXX_USE_WCHAR_T ++# define __iswctype_l(C, M, L) iswctype((C), (M)) ++# define __towlower_l(C, L) towlower((C)) ++# define __towupper_l(C, L) towupper((C)) ++# define __wcscoll_l(S1, S2, L) wcscoll((S1), (S2)) ++//# define __wcsftime_l(S, M, F, T, L) wcsftime((S), (M), (F), (T)) ++# define __wcsxfrm_l(S1, S2, N, L) wcsxfrm((S1), (S2), (N)) ++# define __wctype_l(S, L) wctype((S)) ++# endif ++ + #endif // GLIBC 2.3 and later +diff --git a/libstdc++-v3/config/locale/uclibc/c_locale.cc b/libstdc++-v3/config/locale/uclibc/c_locale.cc +index 5081dc1..21430d0 100644 +--- a/libstdc++-v3/config/locale/uclibc/c_locale.cc ++++ b/libstdc++-v3/config/locale/uclibc/c_locale.cc +@@ -39,20 +39,6 @@ + #include + #include + +-#ifndef __UCLIBC_HAS_XLOCALE__ +-#define __strtol_l(S, E, B, L) strtol((S), (E), (B)) +-#define __strtoul_l(S, E, B, L) strtoul((S), (E), (B)) +-#define __strtoll_l(S, E, B, L) strtoll((S), (E), (B)) +-#define __strtoull_l(S, E, B, L) strtoull((S), (E), (B)) +-#define __strtof_l(S, E, L) strtof((S), (E)) +-#define __strtod_l(S, E, L) strtod((S), (E)) +-#define __strtold_l(S, E, L) strtold((S), (E)) +-#warning should dummy __newlocale check for C|POSIX ? +-#define __newlocale(a, b, c) NULL +-#define __freelocale(a) ((void)0) +-#define __duplocale(a) __c_locale() +-#endif +- + namespace std + { + template<> +diff --git a/libstdc++-v3/config/locale/uclibc/c_locale.h b/libstdc++-v3/config/locale/uclibc/c_locale.h +index da07c1f..4bca5f1 100644 +--- a/libstdc++-v3/config/locale/uclibc/c_locale.h ++++ b/libstdc++-v3/config/locale/uclibc/c_locale.h +@@ -68,6 +68,7 @@ namespace __gnu_cxx + { + extern "C" __typeof(uselocale) __uselocale; + } ++#define __uselocale uselocale + #endif + + namespace std +diff --git a/libstdc++-v3/config/locale/uclibc/collate_members.cc b/libstdc++-v3/config/locale/uclibc/collate_members.cc +index c2664a7..ec5c329 100644 +--- a/libstdc++-v3/config/locale/uclibc/collate_members.cc ++++ b/libstdc++-v3/config/locale/uclibc/collate_members.cc +@@ -36,13 +36,6 @@ + #include + #include + +-#ifndef __UCLIBC_HAS_XLOCALE__ +-#define __strcoll_l(S1, S2, L) strcoll((S1), (S2)) +-#define __strxfrm_l(S1, S2, N, L) strxfrm((S1), (S2), (N)) +-#define __wcscoll_l(S1, S2, L) wcscoll((S1), (S2)) +-#define __wcsxfrm_l(S1, S2, N, L) wcsxfrm((S1), (S2), (N)) +-#endif +- + namespace std + { + // These are basically extensions to char_traits, and perhaps should +diff --git a/libstdc++-v3/config/locale/uclibc/ctype_members.cc b/libstdc++-v3/config/locale/uclibc/ctype_members.cc +index 7294e3a..7b12861 100644 +--- a/libstdc++-v3/config/locale/uclibc/ctype_members.cc ++++ b/libstdc++-v3/config/locale/uclibc/ctype_members.cc +@@ -38,13 +38,6 @@ + #undef _LIBC + #include + +-#ifndef __UCLIBC_HAS_XLOCALE__ +-#define __wctype_l(S, L) wctype((S)) +-#define __towupper_l(C, L) towupper((C)) +-#define __towlower_l(C, L) towlower((C)) +-#define __iswctype_l(C, M, L) iswctype((C), (M)) +-#endif +- + namespace std + { + // NB: The other ctype specializations are in src/locale.cc and +diff --git a/libstdc++-v3/config/locale/uclibc/messages_members.cc b/libstdc++-v3/config/locale/uclibc/messages_members.cc +index 13594d9..d7693b4 100644 +--- a/libstdc++-v3/config/locale/uclibc/messages_members.cc ++++ b/libstdc++-v3/config/locale/uclibc/messages_members.cc +@@ -39,13 +39,10 @@ + #ifdef __UCLIBC_MJN3_ONLY__ + #warning fix gettext stuff + #endif +-#ifdef __UCLIBC_HAS_GETTEXT_AWARENESS__ +-extern "C" char *__dcgettext(const char *domainname, +- const char *msgid, int category); + #undef gettext +-#define gettext(msgid) __dcgettext(NULL, msgid, LC_MESSAGES) ++#ifdef __UCLIBC_HAS_GETTEXT_AWARENESS__ ++#define gettext(msgid) dcgettext(NULL, msgid, LC_MESSAGES) + #else +-#undef gettext + #define gettext(msgid) (msgid) + #endif + +diff --git a/libstdc++-v3/config/locale/uclibc/messages_members.h b/libstdc++-v3/config/locale/uclibc/messages_members.h +index 1424078..d89da33 100644 +--- a/libstdc++-v3/config/locale/uclibc/messages_members.h ++++ b/libstdc++-v3/config/locale/uclibc/messages_members.h +@@ -36,15 +36,11 @@ + #ifdef __UCLIBC_MJN3_ONLY__ + #warning fix prototypes for *textdomain funcs + #endif +-#ifdef __UCLIBC_HAS_GETTEXT_AWARENESS__ +-extern "C" char *__textdomain(const char *domainname); +-extern "C" char *__bindtextdomain(const char *domainname, +- const char *dirname); +-#else +-#undef __textdomain +-#undef __bindtextdomain +-#define __textdomain(D) ((void)0) +-#define __bindtextdomain(D,P) ((void)0) ++#ifndef __UCLIBC_HAS_GETTEXT_AWARENESS__ ++#undef textdomain ++#undef bindtextdomain ++#define textdomain(D) ((void)0) ++#define bindtextdomain(D,P) ((void)0) + #endif + + // Non-virtual member functions. +@@ -70,7 +66,7 @@ extern "C" char *__bindtextdomain(const char *domainname, + messages<_CharT>::open(const basic_string& __s, const locale& __loc, + const char* __dir) const + { +- __bindtextdomain(__s.c_str(), __dir); ++ bindtextdomain(__s.c_str(), __dir); + return this->do_open(__s, __loc); + } + +@@ -90,7 +86,7 @@ extern "C" char *__bindtextdomain(const char *domainname, + { + // No error checking is done, assume the catalog exists and can + // be used. +- __textdomain(__s.c_str()); ++ textdomain(__s.c_str()); + return 0; + } + +diff --git a/libstdc++-v3/config/locale/uclibc/monetary_members.cc b/libstdc++-v3/config/locale/uclibc/monetary_members.cc +index aa52731..2e6f80a 100644 +--- a/libstdc++-v3/config/locale/uclibc/monetary_members.cc ++++ b/libstdc++-v3/config/locale/uclibc/monetary_members.cc +@@ -43,10 +43,6 @@ + #warning tailor for stub locale support + #endif + +-#ifndef __UCLIBC_HAS_XLOCALE__ +-#define __nl_langinfo_l(N, L) nl_langinfo((N)) +-#endif +- + namespace std + { + // Construct and return valid pattern consisting of some combination of: +diff --git a/libstdc++-v3/config/locale/uclibc/numeric_members.cc b/libstdc++-v3/config/locale/uclibc/numeric_members.cc +index 883ec1a..2c70642 100644 +--- a/libstdc++-v3/config/locale/uclibc/numeric_members.cc ++++ b/libstdc++-v3/config/locale/uclibc/numeric_members.cc +@@ -41,9 +41,6 @@ + #ifdef __UCLIBC_MJN3_ONLY__ + #warning tailor for stub locale support + #endif +-#ifndef __UCLIBC_HAS_XLOCALE__ +-#define __nl_langinfo_l(N, L) nl_langinfo((N)) +-#endif + + namespace std + { +diff --git a/libstdc++-v3/config/locale/uclibc/time_members.cc b/libstdc++-v3/config/locale/uclibc/time_members.cc +index e0707d7..d848ed5 100644 +--- a/libstdc++-v3/config/locale/uclibc/time_members.cc ++++ b/libstdc++-v3/config/locale/uclibc/time_members.cc +@@ -40,9 +40,6 @@ + #ifdef __UCLIBC_MJN3_ONLY__ + #warning tailor for stub locale support + #endif +-#ifndef __UCLIBC_HAS_XLOCALE__ +-#define __nl_langinfo_l(N, L) nl_langinfo((N)) +-#endif + + namespace std + { +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0006-uclibc-locale-wchar_fix.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0006-uclibc-locale-wchar_fix.patch new file mode 100644 index 000000000..e45a482d5 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0006-uclibc-locale-wchar_fix.patch @@ -0,0 +1,68 @@ +From e7a4760fb40008cae33e6fc7dc4cfef6c2fd5f93 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 08:45:57 +0400 +Subject: [PATCH 06/46] uclibc-locale-wchar_fix + +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + libstdc++-v3/config/locale/uclibc/monetary_members.cc | 4 ++-- + libstdc++-v3/config/locale/uclibc/numeric_members.cc | 13 +++++++++++++ + 2 files changed, 15 insertions(+), 2 deletions(-) + +diff --git a/libstdc++-v3/config/locale/uclibc/monetary_members.cc b/libstdc++-v3/config/locale/uclibc/monetary_members.cc +index 2e6f80a..31ebb9f 100644 +--- a/libstdc++-v3/config/locale/uclibc/monetary_members.cc ++++ b/libstdc++-v3/config/locale/uclibc/monetary_members.cc +@@ -401,7 +401,7 @@ namespace std + # ifdef __UCLIBC_HAS_XLOCALE__ + _M_data->_M_decimal_point = __cloc->decimal_point_wc; + _M_data->_M_thousands_sep = __cloc->thousands_sep_wc; +-# else ++# elif defined __UCLIBC_HAS_LOCALE__ + _M_data->_M_decimal_point = __global_locale->decimal_point_wc; + _M_data->_M_thousands_sep = __global_locale->thousands_sep_wc; + # endif +@@ -556,7 +556,7 @@ namespace std + # ifdef __UCLIBC_HAS_XLOCALE__ + _M_data->_M_decimal_point = __cloc->decimal_point_wc; + _M_data->_M_thousands_sep = __cloc->thousands_sep_wc; +-# else ++# elif defined __UCLIBC_HAS_LOCALE__ + _M_data->_M_decimal_point = __global_locale->decimal_point_wc; + _M_data->_M_thousands_sep = __global_locale->thousands_sep_wc; + # endif +diff --git a/libstdc++-v3/config/locale/uclibc/numeric_members.cc b/libstdc++-v3/config/locale/uclibc/numeric_members.cc +index 2c70642..d5c8961 100644 +--- a/libstdc++-v3/config/locale/uclibc/numeric_members.cc ++++ b/libstdc++-v3/config/locale/uclibc/numeric_members.cc +@@ -127,12 +127,25 @@ namespace std + { + // Named locale. + // NB: In the GNU model wchar_t is always 32 bit wide. ++#ifdef __UCLIBC_MJN3_ONLY__ ++#warning fix this... should be numeric ++#endif ++#ifdef __UCLIBC__ ++# ifdef __UCLIBC_HAS_XLOCALE__ ++ _M_data->_M_decimal_point = __cloc->decimal_point_wc; ++ _M_data->_M_thousands_sep = __cloc->thousands_sep_wc; ++# elif defined __UCLIBC_HAS_LOCALE__ ++ _M_data->_M_decimal_point = __global_locale->decimal_point_wc; ++ _M_data->_M_thousands_sep = __global_locale->thousands_sep_wc; ++# endif ++#else + union { char *__s; wchar_t __w; } __u; + __u.__s = __nl_langinfo_l(_NL_NUMERIC_DECIMAL_POINT_WC, __cloc); + _M_data->_M_decimal_point = __u.__w; + + __u.__s = __nl_langinfo_l(_NL_NUMERIC_THOUSANDS_SEP_WC, __cloc); + _M_data->_M_thousands_sep = __u.__w; ++#endif + + if (_M_data->_M_thousands_sep == L'\0') + _M_data->_M_grouping = ""; +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0007-uclibc-locale-update.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0007-uclibc-locale-update.patch new file mode 100644 index 000000000..b73e5914e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0007-uclibc-locale-update.patch @@ -0,0 +1,542 @@ +From 8d53a38a3038104e6830ecea5e4beadce54457c1 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 08:46:58 +0400 +Subject: [PATCH 07/46] uclibc-locale-update + +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + .../config/locale/uclibc/c++locale_internal.h | 3 + + libstdc++-v3/config/locale/uclibc/c_locale.cc | 74 ++++++++++------------ + libstdc++-v3/config/locale/uclibc/c_locale.h | 42 ++++++------ + libstdc++-v3/config/locale/uclibc/ctype_members.cc | 51 +++++++++++---- + .../config/locale/uclibc/messages_members.h | 12 ++-- + .../config/locale/uclibc/monetary_members.cc | 34 ++++++---- + .../config/locale/uclibc/numeric_members.cc | 5 ++ + libstdc++-v3/config/locale/uclibc/time_members.cc | 18 ++++-- + libstdc++-v3/config/locale/uclibc/time_members.h | 17 +++-- + 9 files changed, 158 insertions(+), 98 deletions(-) + +diff --git a/libstdc++-v3/config/locale/uclibc/c++locale_internal.h b/libstdc++-v3/config/locale/uclibc/c++locale_internal.h +index e74fddf..971a6b4 100644 +--- a/libstdc++-v3/config/locale/uclibc/c++locale_internal.h ++++ b/libstdc++-v3/config/locale/uclibc/c++locale_internal.h +@@ -31,6 +31,9 @@ + + #include + #include ++#include ++#include ++#include + + #ifdef __UCLIBC_MJN3_ONLY__ + #warning clean this up +diff --git a/libstdc++-v3/config/locale/uclibc/c_locale.cc b/libstdc++-v3/config/locale/uclibc/c_locale.cc +index 21430d0..1b9d8e1 100644 +--- a/libstdc++-v3/config/locale/uclibc/c_locale.cc ++++ b/libstdc++-v3/config/locale/uclibc/c_locale.cc +@@ -39,23 +39,20 @@ + #include + #include + +-namespace std +-{ ++_GLIBCXX_BEGIN_NAMESPACE(std) ++ + template<> + void + __convert_to_v(const char* __s, float& __v, ios_base::iostate& __err, + const __c_locale& __cloc) + { +- if (!(__err & ios_base::failbit)) +- { +- char* __sanity; +- errno = 0; +- float __f = __strtof_l(__s, &__sanity, __cloc); +- if (__sanity != __s && errno != ERANGE) +- __v = __f; +- else +- __err |= ios_base::failbit; +- } ++ char* __sanity; ++ errno = 0; ++ float __f = __strtof_l(__s, &__sanity, __cloc); ++ if (__sanity != __s && errno != ERANGE) ++ __v = __f; ++ else ++ __err |= ios_base::failbit; + } + + template<> +@@ -63,16 +60,13 @@ namespace std + __convert_to_v(const char* __s, double& __v, ios_base::iostate& __err, + const __c_locale& __cloc) + { +- if (!(__err & ios_base::failbit)) +- { +- char* __sanity; +- errno = 0; +- double __d = __strtod_l(__s, &__sanity, __cloc); +- if (__sanity != __s && errno != ERANGE) +- __v = __d; +- else +- __err |= ios_base::failbit; +- } ++ char* __sanity; ++ errno = 0; ++ double __d = __strtod_l(__s, &__sanity, __cloc); ++ if (__sanity != __s && errno != ERANGE) ++ __v = __d; ++ else ++ __err |= ios_base::failbit; + } + + template<> +@@ -80,16 +74,13 @@ namespace std + __convert_to_v(const char* __s, long double& __v, ios_base::iostate& __err, + const __c_locale& __cloc) + { +- if (!(__err & ios_base::failbit)) +- { +- char* __sanity; +- errno = 0; +- long double __ld = __strtold_l(__s, &__sanity, __cloc); +- if (__sanity != __s && errno != ERANGE) +- __v = __ld; +- else +- __err |= ios_base::failbit; +- } ++ char* __sanity; ++ errno = 0; ++ long double __ld = __strtold_l(__s, &__sanity, __cloc); ++ if (__sanity != __s && errno != ERANGE) ++ __v = __ld; ++ else ++ __err |= ios_base::failbit; + } + + void +@@ -110,17 +101,18 @@ namespace std + void + locale::facet::_S_destroy_c_locale(__c_locale& __cloc) + { +- if (_S_get_c_locale() != __cloc) ++ if (__cloc && _S_get_c_locale() != __cloc) + __freelocale(__cloc); + } + + __c_locale + locale::facet::_S_clone_c_locale(__c_locale& __cloc) + { return __duplocale(__cloc); } +-} // namespace std + +-namespace __gnu_cxx +-{ ++_GLIBCXX_END_NAMESPACE ++ ++_GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx) ++ + const char* const category_names[6 + _GLIBCXX_NUM_CATEGORIES] = + { + "LC_CTYPE", +@@ -138,9 +130,11 @@ namespace __gnu_cxx + "LC_IDENTIFICATION" + #endif + }; +-} + +-namespace std +-{ ++_GLIBCXX_END_NAMESPACE ++ ++_GLIBCXX_BEGIN_NAMESPACE(std) ++ + const char* const* const locale::_S_categories = __gnu_cxx::category_names; +-} // namespace std ++ ++_GLIBCXX_END_NAMESPACE +diff --git a/libstdc++-v3/config/locale/uclibc/c_locale.h b/libstdc++-v3/config/locale/uclibc/c_locale.h +index 4bca5f1..64a6d46 100644 +--- a/libstdc++-v3/config/locale/uclibc/c_locale.h ++++ b/libstdc++-v3/config/locale/uclibc/c_locale.h +@@ -39,21 +39,23 @@ + #pragma GCC system_header + + #include // get std::strlen +-#include // get std::snprintf or std::sprintf ++#include // get std::vsnprintf or std::vsprintf + #include + #include // For codecvt + #ifdef __UCLIBC_MJN3_ONLY__ + #warning fix this + #endif +-#ifdef __UCLIBC_HAS_LOCALE__ ++#ifdef _GLIBCXX_USE_ICONV + #include // For codecvt using iconv, iconv_t + #endif +-#ifdef __UCLIBC_HAS_GETTEXT_AWARENESS__ +-#include // For messages ++#ifdef HAVE_LIBINTL_H ++#include // For messages + #endif ++#include + + #ifdef __UCLIBC_MJN3_ONLY__ + #warning what is _GLIBCXX_C_LOCALE_GNU for ++// psm: used in os/gnu-linux/ctype_noninline.h + #endif + #define _GLIBCXX_C_LOCALE_GNU 1 + +@@ -78,23 +80,25 @@ namespace std + #else + typedef int* __c_locale; + #endif +- +- // Convert numeric value of type _Tv to string and return length of +- // string. If snprintf is available use it, otherwise fall back to +- // the unsafe sprintf which, in general, can be dangerous and should ++ // Convert numeric value of type double to string and return length of ++ // string. If vsnprintf is available use it, otherwise fall back to ++ // the unsafe vsprintf which, in general, can be dangerous and should + // be avoided. +- template +- int +- __convert_from_v(char* __out, +- const int __size __attribute__ ((__unused__)), +- const char* __fmt, +-#ifdef __UCLIBC_HAS_XCLOCALE__ +- _Tv __v, const __c_locale& __cloc, int __prec) ++ inline int ++ __convert_from_v(const __c_locale& ++#ifndef __UCLIBC_HAS_XCLOCALE__ ++ __cloc __attribute__ ((__unused__)) ++#endif ++ , ++ char* __out, ++ const int __size, ++ const char* __fmt, ...) + { ++ va_list __args; ++#ifdef __UCLIBC_HAS_XCLOCALE__ ++ + __c_locale __old = __gnu_cxx::__uselocale(__cloc); + #else +- _Tv __v, const __c_locale&, int __prec) +- { + # ifdef __UCLIBC_HAS_LOCALE__ + char* __old = std::setlocale(LC_ALL, NULL); + char* __sav = new char[std::strlen(__old) + 1]; +@@ -103,7 +107,9 @@ namespace std + # endif + #endif + +- const int __ret = std::snprintf(__out, __size, __fmt, __prec, __v); ++ va_start(__args, __fmt); ++ const int __ret = std::vsnprintf(__out, __size, __fmt, __args); ++ va_end(__args); + + #ifdef __UCLIBC_HAS_XCLOCALE__ + __gnu_cxx::__uselocale(__old); +diff --git a/libstdc++-v3/config/locale/uclibc/ctype_members.cc b/libstdc++-v3/config/locale/uclibc/ctype_members.cc +index 7b12861..13e011d 100644 +--- a/libstdc++-v3/config/locale/uclibc/ctype_members.cc ++++ b/libstdc++-v3/config/locale/uclibc/ctype_members.cc +@@ -33,16 +33,20 @@ + + // Written by Benjamin Kosnik + ++#include ++#ifdef __UCLIBC_HAS_LOCALE__ + #define _LIBC + #include + #undef _LIBC ++#else ++#include ++#endif + #include + +-namespace std +-{ ++_GLIBCXX_BEGIN_NAMESPACE(std) ++ + // NB: The other ctype specializations are in src/locale.cc and + // various /config/os/* files. +- template<> + ctype_byname::ctype_byname(const char* __s, size_t __refs) + : ctype(0, false, __refs) + { +@@ -57,6 +61,8 @@ namespace std + #endif + } + } ++ ctype_byname::~ctype_byname() ++ { } + + #ifdef _GLIBCXX_USE_WCHAR_T + ctype::__wmask_type +@@ -138,17 +144,33 @@ namespace std + ctype:: + do_is(mask __m, wchar_t __c) const + { +- // Highest bitmask in ctype_base == 10, but extra in "C" +- // library for blank. ++ // The case of __m == ctype_base::space is particularly important, ++ // due to its use in many istream functions. Therefore we deal with ++ // it first, exploiting the knowledge that on GNU systems _M_bit[5] ++ // is the mask corresponding to ctype_base::space. NB: an encoding ++ // change would not affect correctness! ++ + bool __ret = false; +- const size_t __bitmasksize = 11; +- for (size_t __bitcur = 0; __bitcur <= __bitmasksize; ++__bitcur) +- if (__m & _M_bit[__bitcur] +- && __iswctype_l(__c, _M_wmask[__bitcur], _M_c_locale_ctype)) +- { +- __ret = true; +- break; +- } ++ if (__m == _M_bit[5]) ++ __ret = __iswctype_l(__c, _M_wmask[5], _M_c_locale_ctype); ++ else ++ { ++ // Highest bitmask in ctype_base == 10, but extra in "C" ++ // library for blank. ++ const size_t __bitmasksize = 11; ++ for (size_t __bitcur = 0; __bitcur <= __bitmasksize; ++__bitcur) ++ if (__m & _M_bit[__bitcur]) ++ { ++ if (__iswctype_l(__c, _M_wmask[__bitcur], _M_c_locale_ctype)) ++ { ++ __ret = true; ++ break; ++ } ++ else if (__m == _M_bit[__bitcur]) ++ break; ++ } ++ } ++ + return __ret; + } + +@@ -290,4 +312,5 @@ namespace std + #endif + } + #endif // _GLIBCXX_USE_WCHAR_T +-} ++ ++_GLIBCXX_END_NAMESPACE +diff --git a/libstdc++-v3/config/locale/uclibc/messages_members.h b/libstdc++-v3/config/locale/uclibc/messages_members.h +index d89da33..067657a 100644 +--- a/libstdc++-v3/config/locale/uclibc/messages_members.h ++++ b/libstdc++-v3/config/locale/uclibc/messages_members.h +@@ -53,12 +53,16 @@ + template + messages<_CharT>::messages(__c_locale __cloc, const char* __s, + size_t __refs) +- : facet(__refs), _M_c_locale_messages(_S_clone_c_locale(__cloc)), +- _M_name_messages(__s) ++ : facet(__refs), _M_c_locale_messages(NULL), ++ _M_name_messages(NULL) + { +- char* __tmp = new char[std::strlen(__s) + 1]; +- std::strcpy(__tmp, __s); ++ const size_t __len = std::strlen(__s) + 1; ++ char* __tmp = new char[__len]; ++ std::memcpy(__tmp, __s, __len); + _M_name_messages = __tmp; ++ ++ // Last to avoid leaking memory if new throws. ++ _M_c_locale_messages = _S_clone_c_locale(__cloc); + } + + template +diff --git a/libstdc++-v3/config/locale/uclibc/monetary_members.cc b/libstdc++-v3/config/locale/uclibc/monetary_members.cc +index 31ebb9f..7679b9c 100644 +--- a/libstdc++-v3/config/locale/uclibc/monetary_members.cc ++++ b/libstdc++-v3/config/locale/uclibc/monetary_members.cc +@@ -33,9 +33,14 @@ + + // Written by Benjamin Kosnik + ++#include ++#ifdef __UCLIBC_HAS_LOCALE__ + #define _LIBC + #include + #undef _LIBC ++#else ++#include ++#endif + #include + + #ifdef __UCLIBC_MJN3_ONLY__ +@@ -206,7 +211,7 @@ namespace std + } + break; + default: +- ; ++ __ret = pattern(); + } + return __ret; + } +@@ -390,7 +395,9 @@ namespace std + __c_locale __old = __uselocale(__cloc); + #else + // Switch to named locale so that mbsrtowcs will work. +- char* __old = strdup(setlocale(LC_ALL, NULL)); ++ char* __old = setlocale(LC_ALL, NULL); ++ const size_t __llen = strlen(__old) + 1; ++ char* __sav = new char[__llen]; + setlocale(LC_ALL, __name); + #endif + +@@ -477,8 +484,8 @@ namespace std + #ifdef __UCLIBC_HAS_XLOCALE__ + __uselocale(__old); + #else +- setlocale(LC_ALL, __old); +- free(__old); ++ setlocale(LC_ALL, __sav); ++ delete [] __sav; + #endif + __throw_exception_again; + } +@@ -498,8 +505,8 @@ namespace std + #ifdef __UCLIBC_HAS_XLOCALE__ + __uselocale(__old); + #else +- setlocale(LC_ALL, __old); +- free(__old); ++ setlocale(LC_ALL, __sav); ++ delete [] __sav; + #endif + } + } +@@ -545,8 +552,11 @@ namespace std + __c_locale __old = __uselocale(__cloc); + #else + // Switch to named locale so that mbsrtowcs will work. +- char* __old = strdup(setlocale(LC_ALL, NULL)); +- setlocale(LC_ALL, __name); ++ char* __old = setlocale(LC_ALL, NULL); ++ const size_t __llen = strlen(__old) + 1; ++ char* __sav = new char[__llen]; ++ memcpy(__sav, __old, __llen); ++ setlocale(LC_ALL, __name); + #endif + + #ifdef __UCLIBC_MJN3_ONLY__ +@@ -633,8 +643,8 @@ namespace std + #ifdef __UCLIBC_HAS_XLOCALE__ + __uselocale(__old); + #else +- setlocale(LC_ALL, __old); +- free(__old); ++ setlocale(LC_ALL, __sav); ++ delete [] __sav; + #endif + __throw_exception_again; + } +@@ -653,8 +663,8 @@ namespace std + #ifdef __UCLIBC_HAS_XLOCALE__ + __uselocale(__old); + #else +- setlocale(LC_ALL, __old); +- free(__old); ++ setlocale(LC_ALL, __sav); ++ delete [] __sav; + #endif + } + } +diff --git a/libstdc++-v3/config/locale/uclibc/numeric_members.cc b/libstdc++-v3/config/locale/uclibc/numeric_members.cc +index d5c8961..8ae8969 100644 +--- a/libstdc++-v3/config/locale/uclibc/numeric_members.cc ++++ b/libstdc++-v3/config/locale/uclibc/numeric_members.cc +@@ -33,9 +33,14 @@ + + // Written by Benjamin Kosnik + ++#include ++#ifdef __UCLIBC_HAS_LOCALE__ + #define _LIBC + #include + #undef _LIBC ++#else ++#include ++#endif + #include + + #ifdef __UCLIBC_MJN3_ONLY__ +diff --git a/libstdc++-v3/config/locale/uclibc/time_members.cc b/libstdc++-v3/config/locale/uclibc/time_members.cc +index d848ed5..f24d53e 100644 +--- a/libstdc++-v3/config/locale/uclibc/time_members.cc ++++ b/libstdc++-v3/config/locale/uclibc/time_members.cc +@@ -53,11 +53,14 @@ namespace std + const size_t __len = __strftime_l(__s, __maxlen, __format, __tm, + _M_c_locale_timepunct); + #else +- char* __old = strdup(setlocale(LC_ALL, NULL)); ++ char* __old = setlocale(LC_ALL, NULL); ++ const size_t __llen = strlen(__old) + 1; ++ char* __sav = new char[__llen]; ++ memcpy(__sav, __old, __llen); + setlocale(LC_ALL, _M_name_timepunct); + const size_t __len = strftime(__s, __maxlen, __format, __tm); +- setlocale(LC_ALL, __old); +- free(__old); ++ setlocale(LC_ALL, __sav); ++ delete [] __sav; + #endif + // Make sure __s is null terminated. + if (__len == 0) +@@ -207,11 +210,14 @@ namespace std + const size_t __len = __wcsftime_l(__s, __maxlen, __format, __tm, + _M_c_locale_timepunct); + #else +- char* __old = strdup(setlocale(LC_ALL, NULL)); ++ char* __old = setlocale(LC_ALL, NULL); ++ const size_t __llen = strlen(__old) + 1; ++ char* __sav = new char[__llen]; ++ memcpy(__sav, __old, __llen); + setlocale(LC_ALL, _M_name_timepunct); + const size_t __len = wcsftime(__s, __maxlen, __format, __tm); +- setlocale(LC_ALL, __old); +- free(__old); ++ setlocale(LC_ALL, __sav); ++ delete [] __sav; + #endif + // Make sure __s is null terminated. + if (__len == 0) +diff --git a/libstdc++-v3/config/locale/uclibc/time_members.h b/libstdc++-v3/config/locale/uclibc/time_members.h +index ba8e858..1665dde 100644 +--- a/libstdc++-v3/config/locale/uclibc/time_members.h ++++ b/libstdc++-v3/config/locale/uclibc/time_members.h +@@ -50,12 +50,21 @@ + __timepunct<_CharT>::__timepunct(__c_locale __cloc, const char* __s, + size_t __refs) + : facet(__refs), _M_data(NULL), _M_c_locale_timepunct(NULL), +- _M_name_timepunct(__s) ++ _M_name_timepunct(NULL) + { +- char* __tmp = new char[std::strlen(__s) + 1]; +- std::strcpy(__tmp, __s); ++ const size_t __len = std::strlen(__s) + 1; ++ char* __tmp = new char[__len]; ++ std::memcpy(__tmp, __s, __len); + _M_name_timepunct = __tmp; +- _M_initialize_timepunct(__cloc); ++ ++ try ++ { _M_initialize_timepunct(__cloc); } ++ catch(...) ++ { ++ delete [] _M_name_timepunct; ++ __throw_exception_again; ++ } ++ + } + + template +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0008-missing-execinfo_h.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0008-missing-execinfo_h.patch new file mode 100644 index 000000000..01e7c9549 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0008-missing-execinfo_h.patch @@ -0,0 +1,28 @@ +From 6dde3da24ef4b9b357bca670d8551cab3fdda843 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 08:48:10 +0400 +Subject: [PATCH 08/46] missing-execinfo_h + +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + boehm-gc/include/gc.h | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/boehm-gc/include/gc.h b/boehm-gc/include/gc.h +index 6b38f2d..fca98ff 100644 +--- a/boehm-gc/include/gc.h ++++ b/boehm-gc/include/gc.h +@@ -503,7 +503,7 @@ GC_API GC_PTR GC_malloc_atomic_ignore_off_page GC_PROTO((size_t lb)); + #if defined(__linux__) || defined(__GLIBC__) + # include + # if (__GLIBC__ == 2 && __GLIBC_MINOR__ >= 1 || __GLIBC__ > 2) \ +- && !defined(__ia64__) ++ && !defined(__ia64__) && !defined(__UCLIBC__) + # ifndef GC_HAVE_BUILTIN_BACKTRACE + # define GC_HAVE_BUILTIN_BACKTRACE + # endif +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0009-c99-snprintf.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0009-c99-snprintf.patch new file mode 100644 index 000000000..d62341ac6 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0009-c99-snprintf.patch @@ -0,0 +1,28 @@ +From b794f1f1c1c7c06f3f0d78cf76c4fb90c2ab8dfb Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 08:49:03 +0400 +Subject: [PATCH 09/46] c99-snprintf + +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + libstdc++-v3/include/c_std/cstdio | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/libstdc++-v3/include/c_std/cstdio b/libstdc++-v3/include/c_std/cstdio +index a4119ba..8396f43 100644 +--- a/libstdc++-v3/include/c_std/cstdio ++++ b/libstdc++-v3/include/c_std/cstdio +@@ -144,7 +144,7 @@ namespace std + using ::vsprintf; + } // namespace std + +-#if _GLIBCXX_USE_C99_STDIO ++#if _GLIBCXX_USE_C99_STDIO || defined(__UCLIBC__) + + #undef snprintf + #undef vfscanf +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0010-gcc-poison-system-directories.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0010-gcc-poison-system-directories.patch new file mode 100644 index 000000000..ac4cf442d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0010-gcc-poison-system-directories.patch @@ -0,0 +1,192 @@ +From d76250323dad69212c958e4857a98d99ab51a39e Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 08:59:00 +0400 +Subject: [PATCH 10/46] gcc: poison-system-directories + +Signed-off-by: Khem Raj + +Upstream-Status: Inappropriate [distribution: codesourcery] +--- + gcc/common.opt | 4 ++++ + gcc/config.in | 6 ++++++ + gcc/configure | 16 ++++++++++++++++ + gcc/configure.ac | 10 ++++++++++ + gcc/doc/invoke.texi | 9 +++++++++ + gcc/gcc.c | 2 ++ + gcc/incpath.c | 19 +++++++++++++++++++ + 7 files changed, 66 insertions(+) + +diff --git a/gcc/common.opt b/gcc/common.opt +index 67048db..733185c 100644 +--- a/gcc/common.opt ++++ b/gcc/common.opt +@@ -659,6 +659,10 @@ Wreturn-local-addr + Common Var(warn_return_local_addr) Init(1) Warning + Warn about returning a pointer/reference to a local or temporary variable. + ++Wpoison-system-directories ++Common Var(flag_poison_system_directories) Init(1) Warning ++Warn for -I and -L options using system directories if cross compiling ++ + Wshadow + Common Var(warn_shadow) Warning + Warn when one local variable shadows another. +diff --git a/gcc/config.in b/gcc/config.in +index 115cb61..105b30f 100644 +--- a/gcc/config.in ++++ b/gcc/config.in +@@ -187,6 +187,12 @@ + #endif + + ++/* Define to warn for use of native system header directories */ ++#ifndef USED_FOR_TARGET ++#undef ENABLE_POISON_SYSTEM_DIRECTORIES ++#endif ++ ++ + /* Define if you want all operations on RTL (the basic data structure of the + optimizer and back end) to be checked for dynamic type safety at runtime. + This is quite expensive. */ +diff --git a/gcc/configure b/gcc/configure +index 1c6e340..8f83152 100755 +--- a/gcc/configure ++++ b/gcc/configure +@@ -942,6 +942,7 @@ with_system_zlib + enable_maintainer_mode + enable_link_mutex + enable_version_specific_runtime_libs ++enable_poison_system_directories + enable_plugin + enable_host_shared + enable_libquadmath_support +@@ -1681,6 +1682,8 @@ Optional Features: + --enable-version-specific-runtime-libs + specify that runtime libraries should be installed + in a compiler-specific directory ++ --enable-poison-system-directories ++ warn for use of native system header directories + --enable-plugin enable plugin support + --enable-host-shared build host code as shared libraries + --disable-libquadmath-support +@@ -28908,6 +28911,19 @@ if test "${enable_version_specific_runtime_libs+set}" = set; then : + fi + + ++# Check whether --enable-poison-system-directories was given. ++if test "${enable_poison_system_directories+set}" = set; then : ++ enableval=$enable_poison_system_directories; ++else ++ enable_poison_system_directories=no ++fi ++ ++if test "x${enable_poison_system_directories}" = "xyes"; then ++ ++$as_echo "#define ENABLE_POISON_SYSTEM_DIRECTORIES 1" >>confdefs.h ++ ++fi ++ + # Substitute configuration variables + + +diff --git a/gcc/configure.ac b/gcc/configure.ac +index 6c1dcd9..0fccaef 100644 +--- a/gcc/configure.ac ++++ b/gcc/configure.ac +@@ -5861,6 +5861,16 @@ AC_ARG_ENABLE(version-specific-runtime-libs, + [specify that runtime libraries should be + installed in a compiler-specific directory])]) + ++AC_ARG_ENABLE([poison-system-directories], ++ AS_HELP_STRING([--enable-poison-system-directories], ++ [warn for use of native system header directories]),, ++ [enable_poison_system_directories=no]) ++if test "x${enable_poison_system_directories}" = "xyes"; then ++ AC_DEFINE([ENABLE_POISON_SYSTEM_DIRECTORIES], ++ [1], ++ [Define to warn for use of native system header directories]) ++fi ++ + # Substitute configuration variables + AC_SUBST(subdirs) + AC_SUBST(srcdir) +diff --git a/gcc/doc/invoke.texi b/gcc/doc/invoke.texi +index 821f8fd..8bb49e7 100644 +--- a/gcc/doc/invoke.texi ++++ b/gcc/doc/invoke.texi +@@ -284,6 +284,7 @@ Objective-C and Objective-C++ Dialects}. + -Wparentheses -Wno-pedantic-ms-format @gol + -Wplacement-new -Wplacement-new=@var{n} @gol + -Wpointer-arith -Wno-pointer-to-int-cast @gol ++-Wno-poison-system-directories @gol + -Wno-pragmas -Wredundant-decls -Wno-return-local-addr @gol + -Wreturn-type -Wsequence-point -Wshadow -Wno-shadow-ivar @gol + -Wshift-overflow -Wshift-overflow=@var{n} @gol +@@ -4723,6 +4724,14 @@ made up of data only and thus requires no special treatment. But, for + most targets, it is made up of code and thus requires the stack to be + made executable in order for the program to work properly. + ++@item -Wno-poison-system-directories ++@opindex Wno-poison-system-directories ++Do not warn for @option{-I} or @option{-L} options using system ++directories such as @file{/usr/include} when cross compiling. This ++option is intended for use in chroot environments when such ++directories contain the correct headers and libraries for the target ++system rather than the host. ++ + @item -Wfloat-equal + @opindex Wfloat-equal + @opindex Wno-float-equal +diff --git a/gcc/gcc.c b/gcc/gcc.c +index 1af5920..4cfef7f 100644 +--- a/gcc/gcc.c ++++ b/gcc/gcc.c +@@ -1017,6 +1017,8 @@ proper position among the other output files. */ + "%{fuse-ld=*:-fuse-ld=%*} " LINK_COMPRESS_DEBUG_SPEC \ + "%X %{o*} %{e*} %{N} %{n} %{r}\ + %{s} %{t} %{u*} %{z} %{Z} %{!nostdlib:%{!nostartfiles:%S}} \ ++ %{Wno-poison-system-directories:--no-poison-system-directories} \ ++ %{Werror=poison-system-directories:--error-poison-system-directories} \ + %{static:} %{L*} %(mfwrap) %(link_libgcc) " \ + VTABLE_VERIFICATION_SPEC " " SANITIZER_EARLY_SPEC " %o " CHKP_SPEC " \ + %{fopenacc|fopenmp|%:gt(%{ftree-parallelize-loops=*:%*} 1):\ +diff --git a/gcc/incpath.c b/gcc/incpath.c +index ea40f4a..856da41 100644 +--- a/gcc/incpath.c ++++ b/gcc/incpath.c +@@ -26,6 +26,7 @@ + #include "intl.h" + #include "incpath.h" + #include "cppdefault.h" ++#include "diagnostic-core.h" + + /* Microsoft Windows does not natively support inodes. + VMS has non-numeric inodes. */ +@@ -381,6 +382,24 @@ merge_include_chains (const char *sysroot, cpp_reader *pfile, int verbose) + } + fprintf (stderr, _("End of search list.\n")); + } ++ ++#ifdef ENABLE_POISON_SYSTEM_DIRECTORIES ++ if (flag_poison_system_directories) ++ { ++ struct cpp_dir *p; ++ ++ for (p = heads[QUOTE]; p; p = p->next) ++ { ++ if ((!strncmp (p->name, "/usr/include", 12)) ++ || (!strncmp (p->name, "/usr/local/include", 18)) ++ || (!strncmp (p->name, "/usr/X11R6/include", 18))) ++ warning (OPT_Wpoison_system_directories, ++ "include location \"%s\" is unsafe for " ++ "cross-compilation", ++ p->name); ++ } ++ } ++#endif + } + + /* Use given -I paths for #include "..." but not #include <...>, and +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0011-gcc-poison-dir-extend.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0011-gcc-poison-dir-extend.patch new file mode 100644 index 000000000..a1736aea1 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0011-gcc-poison-dir-extend.patch @@ -0,0 +1,39 @@ +From a1c24b59def393e43cd50b6768604a212c788ed3 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:00:34 +0400 +Subject: [PATCH 11/46] gcc-poison-dir-extend + +Add /sw/include and /opt/include based on the original +zecke-no-host-includes.patch patch. The original patch checked for +/usr/include, /sw/include and /opt/include and then triggered a failure and +aborted. + +Instead, we add the two missing items to the current scan. If the user +wants this to be a failure, they can add "-Werror=poison-system-directories". + +Signed-off-by: Mark Hatle +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + gcc/incpath.c | 4 +++- + 1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/gcc/incpath.c b/gcc/incpath.c +index 856da41..d2fc82c 100644 +--- a/gcc/incpath.c ++++ b/gcc/incpath.c +@@ -392,7 +392,9 @@ merge_include_chains (const char *sysroot, cpp_reader *pfile, int verbose) + { + if ((!strncmp (p->name, "/usr/include", 12)) + || (!strncmp (p->name, "/usr/local/include", 18)) +- || (!strncmp (p->name, "/usr/X11R6/include", 18))) ++ || (!strncmp (p->name, "/usr/X11R6/include", 18)) ++ || (!strncmp (p->name, "/sw/include", 11)) ++ || (!strncmp (p->name, "/opt/include", 12))) + warning (OPT_Wpoison_system_directories, + "include location \"%s\" is unsafe for " + "cross-compilation", +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0012-gcc-4.3.3-SYSROOT_CFLAGS_FOR_TARGET.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0012-gcc-4.3.3-SYSROOT_CFLAGS_FOR_TARGET.patch new file mode 100644 index 000000000..939b0705f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0012-gcc-4.3.3-SYSROOT_CFLAGS_FOR_TARGET.patch @@ -0,0 +1,73 @@ +From 00ef5f0f2a8d3b33aeb1e55c0d23439f4dd495af Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:08:31 +0400 +Subject: [PATCH 12/46] gcc-4.3.3: SYSROOT_CFLAGS_FOR_TARGET + +Before committing, I noticed that PR/32161 was marked as a dup of PR/32009, but my previous patch did not fix it. + +This alternative patch is better because it lets you just use CFLAGS_FOR_TARGET to set the compilation flags for libgcc. Since bootstrapped target libraries are never compiled with the native compiler, it makes little sense to use different flags for stage1 and later stages. And it also makes little sense to use a different variable than CFLAGS_FOR_TARGET. + +Other changes I had to do include: + +- moving the creation of default CFLAGS_FOR_TARGET from Makefile.am to configure.ac, because otherwise the BOOT_CFLAGS are substituted into CFLAGS_FOR_TARGET (which is "-O2 -g $(CFLAGS)") via $(CFLAGS). It is also cleaner this way though. + +- passing the right CFLAGS to configure scripts as exported environment variables + +I also stopped passing LIBCFLAGS to configure scripts since they are unused in the whole src tree. And I updated the documentation as H-P reminded me to do. + +Bootstrapped/regtested i686-pc-linux-gnu, will commit to 4.4 shortly. Ok for 4.3? + +Signed-off-by: Paolo Bonzini +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + configure | 32 ++++++++++++++++++++++++++++++++ + 1 file changed, 32 insertions(+) + +diff --git a/configure b/configure +index bfadc33..755d382 100755 +--- a/configure ++++ b/configure +@@ -6819,6 +6819,38 @@ fi + + + ++# During gcc bootstrap, if we use some random cc for stage1 then CFLAGS ++# might be empty or "-g". We don't require a C++ compiler, so CXXFLAGS ++# might also be empty (or "-g", if a non-GCC C++ compiler is in the path). ++# We want to ensure that TARGET libraries (which we know are built with ++# gcc) are built with "-O2 -g", so include those options when setting ++# CFLAGS_FOR_TARGET and CXXFLAGS_FOR_TARGET. ++if test "x$CFLAGS_FOR_TARGET" = x; then ++ CFLAGS_FOR_TARGET=$CFLAGS ++ case " $CFLAGS " in ++ *" -O2 "*) ;; ++ *) CFLAGS_FOR_TARGET="-O2 $CFLAGS" ;; ++ esac ++ case " $CFLAGS " in ++ *" -g "* | *" -g3 "*) ;; ++ *) CFLAGS_FOR_TARGET="-g $CFLAGS" ;; ++ esac ++fi ++ ++ ++if test "x$CXXFLAGS_FOR_TARGET" = x; then ++ CXXFLAGS_FOR_TARGET=$CXXFLAGS ++ case " $CXXFLAGS " in ++ *" -O2 "*) ;; ++ *) CXXFLAGS_FOR_TARGET="-O2 $CXXFLAGS" ;; ++ esac ++ case " $CXXFLAGS " in ++ *" -g "* | *" -g3 "*) ;; ++ *) CXXFLAGS_FOR_TARGET="-g $CXXFLAGS" ;; ++ esac ++fi ++ ++ + # Handle --with-headers=XXX. If the value is not "yes", the contents of + # the named directory are copied to $(tooldir)/sys-include. + if test x"${with_headers}" != x && test x"${with_headers}" != xno ; then +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0013-64-bit-multilib-hack.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0013-64-bit-multilib-hack.patch new file mode 100644 index 000000000..e31cde431 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0013-64-bit-multilib-hack.patch @@ -0,0 +1,85 @@ +From 7fc7a070ac53a55950a1eac76f02877d4106b4b3 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:10:06 +0400 +Subject: [PATCH 13/46] 64-bit multilib hack. + +GCC has internal multilib handling code but it assumes a very specific rigid directory +layout. The build system implementation of multilib layout is very generic and allows +complete customisation of the library directories. + +This patch is a partial solution to allow any custom directories to be passed into gcc +and handled correctly. It forces gcc to use the base_libdir (which is the current +directory, "."). We need to do this for each multilib that is configured as we don't +know which compiler options may be being passed into the compiler. Since we have a compiler +per mulitlib at this point that isn't an issue. + +The one problem is the target compiler is only going to work for the default multlilib at +this point. Ideally we'd figure out which multilibs were being enabled with which paths +and be able to patch these entries with a complete set of correct paths but this we +don't have such code at this point. This is something the target gcc recipe should do +and override these platform defaults in its build config. + +RP 15/8/11 + +Signed-off-by: Khem Raj +Signed-off-by: Elvis Dowson + +Upstream-Status: Pending +--- + gcc/config/i386/t-linux64 | 6 ++---- + gcc/config/mips/t-linux64 | 10 +++------- + gcc/config/rs6000/t-linux64 | 5 ++--- + 3 files changed, 7 insertions(+), 14 deletions(-) + +diff --git a/gcc/config/i386/t-linux64 b/gcc/config/i386/t-linux64 +index c0cc8a3..365a5d6 100644 +--- a/gcc/config/i386/t-linux64 ++++ b/gcc/config/i386/t-linux64 +@@ -32,7 +32,5 @@ + # + comma=, + MULTILIB_OPTIONS = $(subst $(comma),/,$(TM_MULTILIB_CONFIG)) +-MULTILIB_DIRNAMES = $(patsubst m%, %, $(subst /, ,$(MULTILIB_OPTIONS))) +-MULTILIB_OSDIRNAMES = m64=../lib64$(call if_multiarch,:x86_64-linux-gnu) +-MULTILIB_OSDIRNAMES+= m32=$(if $(wildcard $(shell echo $(SYSTEM_HEADER_DIR))/../../usr/lib32),../lib32,../lib)$(call if_multiarch,:i386-linux-gnu) +-MULTILIB_OSDIRNAMES+= mx32=../libx32$(call if_multiarch,:x86_64-linux-gnux32) ++MULTILIB_DIRNAMES = . . ++MULTILIB_OSDIRNAMES = ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) +diff --git a/gcc/config/mips/t-linux64 b/gcc/config/mips/t-linux64 +index 16c8adf..b932ace 100644 +--- a/gcc/config/mips/t-linux64 ++++ b/gcc/config/mips/t-linux64 +@@ -17,10 +17,6 @@ + # . + + MULTILIB_OPTIONS = mabi=n32/mabi=32/mabi=64 +-MULTILIB_DIRNAMES = n32 32 64 +-MIPS_EL = $(if $(filter %el, $(firstword $(subst -, ,$(target)))),el) +-MIPS_SOFT = $(if $(strip $(filter MASK_SOFT_FLOAT_ABI, $(target_cpu_default)) $(filter soft, $(with_float))),soft) +-MULTILIB_OSDIRNAMES = \ +- ../lib32$(call if_multiarch,:mips64$(MIPS_EL)-linux-gnuabin32$(MIPS_SOFT)) \ +- ../lib$(call if_multiarch,:mips$(MIPS_EL)-linux-gnu$(MIPS_SOFT)) \ +- ../lib64$(call if_multiarch,:mips64$(MIPS_EL)-linux-gnuabi64$(MIPS_SOFT)) ++MULTILIB_DIRNAMES = . . . ++MULTILIB_OSDIRNAMES = ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) ++ +diff --git a/gcc/config/rs6000/t-linux64 b/gcc/config/rs6000/t-linux64 +index fa7550b..9b5d9ef 100644 +--- a/gcc/config/rs6000/t-linux64 ++++ b/gcc/config/rs6000/t-linux64 +@@ -26,10 +26,9 @@ + # MULTILIB_OSDIRNAMES according to what is found on the target. + + MULTILIB_OPTIONS := m64/m32 +-MULTILIB_DIRNAMES := 64 32 ++MULTILIB_DIRNAMES := . . + MULTILIB_EXTRA_OPTS := +-MULTILIB_OSDIRNAMES := m64=../lib64$(call if_multiarch,:powerpc64-linux-gnu) +-MULTILIB_OSDIRNAMES += m32=$(if $(wildcard $(shell echo $(SYSTEM_HEADER_DIR))/../../usr/lib32),../lib32,../lib)$(call if_multiarch,:powerpc-linux-gnu) ++MULTILIB_OSDIRNAMES := ../$(shell basename $(base_libdir)) ../$(shell basename $(base_libdir)) + + rs6000-linux.o: $(srcdir)/config/rs6000/rs6000-linux.c + $(COMPILE) $< +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0014-optional-libstdc.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0014-optional-libstdc.patch new file mode 100644 index 000000000..44b0cc7d6 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0014-optional-libstdc.patch @@ -0,0 +1,125 @@ +From 36275f7981bdaf919bbc9b51a7c7fae1e192adb3 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:12:56 +0400 +Subject: [PATCH 14/46] optional libstdc + +gcc-runtime builds libstdc++ separately from gcc-cross-*. Its configure tests using g++ +will not run correctly since by default the linker will try to link against libstdc++ +which shouldn't exist yet. We need an option to disable -lstdc++ +option whilst leaving -lc, -lgcc and other automatic library dependencies added by gcc +driver. This patch adds such an option which only disables the -lstdc++. + +A "standard" gcc build uses xgcc and hence avoids this. We should ask upstream how to +do this officially, the likely answer is don't build libstdc++ separately. + +RP 29/6/10 + +Signed-off-by: Khem Raj + +Upstream-Status: Inappropriate [embedded specific] +--- + gcc/c-family/c.opt | 4 ++++ + gcc/cp/g++spec.c | 1 + + gcc/doc/invoke.texi | 32 +++++++++++++++++++++++++++++++- + gcc/gcc.c | 1 + + 4 files changed, 37 insertions(+), 1 deletion(-) + +diff --git a/gcc/c-family/c.opt b/gcc/c-family/c.opt +index 4f86876..660da6c 100644 +--- a/gcc/c-family/c.opt ++++ b/gcc/c-family/c.opt +@@ -1647,6 +1647,10 @@ nostdinc++ + C++ ObjC++ + Do not search standard system include directories for C++. + ++nostdlib++ ++Driver ++Do not link standard C++ runtime library ++ + o + C ObjC C++ ObjC++ Joined Separate + ; Documented in common.opt +diff --git a/gcc/cp/g++spec.c b/gcc/cp/g++spec.c +index 03cbde0..3642540 100644 +--- a/gcc/cp/g++spec.c ++++ b/gcc/cp/g++spec.c +@@ -137,6 +137,7 @@ lang_specific_driver (struct cl_decoded_option **in_decoded_options, + switch (decoded_options[i].opt_index) + { + case OPT_nostdlib: ++ case OPT_nostdlib__: + case OPT_nodefaultlibs: + library = -1; + break; +diff --git a/gcc/doc/invoke.texi b/gcc/doc/invoke.texi +index 8bb49e7..84d68aa 100644 +--- a/gcc/doc/invoke.texi ++++ b/gcc/doc/invoke.texi +@@ -207,6 +207,9 @@ in the following sections. + -fno-weak -nostdinc++ @gol + -fvisibility-inlines-hidden @gol + -fvisibility-ms-compat @gol ++-fvtable-verify=@r{[}std@r{|}preinit@r{|}none@r{]} @gol ++-fvtv-counts -fvtv-debug @gol ++-nostdlib++ @gol + -fext-numeric-literals @gol + -Wabi=@var{n} -Wabi-tag -Wconversion-null -Wctor-dtor-privacy @gol + -Wdelete-non-virtual-dtor -Wliteral-suffix -Wmultiple-inheritance @gol +@@ -470,7 +473,7 @@ Objective-C and Objective-C++ Dialects}. + -s -static -static-libgcc -static-libstdc++ @gol + -static-libasan -static-libtsan -static-liblsan -static-libubsan @gol + -static-libmpx -static-libmpxwrappers @gol +--shared -shared-libgcc -symbolic @gol ++-shared -shared-libgcc -symbolic -nostdlib++ @gol + -T @var{script} -Wl,@var{option} -Xlinker @var{option} @gol + -u @var{symbol} -z @var{keyword}} + +@@ -10601,6 +10604,33 @@ library subroutines. + constructors are called; @pxref{Collect2,,@code{collect2}, gccint, + GNU Compiler Collection (GCC) Internals}.) + ++@item -nostdlib++ ++@opindex nostdlib++ ++Do not use the standard system C++ runtime libraries when linking. ++Only the libraries you specify will be passed to the linker. ++ ++@cindex @option{-lgcc}, use with @option{-nostdlib} ++@cindex @option{-nostdlib} and unresolved references ++@cindex unresolved references and @option{-nostdlib} ++@cindex @option{-lgcc}, use with @option{-nodefaultlibs} ++@cindex @option{-nodefaultlibs} and unresolved references ++@cindex unresolved references and @option{-nodefaultlibs} ++One of the standard libraries bypassed by @option{-nostdlib} and ++@option{-nodefaultlibs} is @file{libgcc.a}, a library of internal subroutines ++which GCC uses to overcome shortcomings of particular machines, or special ++needs for some languages. ++(@xref{Interface,,Interfacing to GCC Output,gccint,GNU Compiler ++Collection (GCC) Internals}, ++for more discussion of @file{libgcc.a}.) ++In most cases, you need @file{libgcc.a} even when you want to avoid ++other standard libraries. In other words, when you specify @option{-nostdlib} ++or @option{-nodefaultlibs} you should usually specify @option{-lgcc} as well. ++This ensures that you have no unresolved references to internal GCC ++library subroutines. ++(An example of such an internal subroutine is @code{__main}, used to ensure C++ ++constructors are called; @pxref{Collect2,,@code{collect2}, gccint, ++GNU Compiler Collection (GCC) Internals}.) ++ + @item -pie + @opindex pie + Produce a position independent executable on targets that support it. +diff --git a/gcc/gcc.c b/gcc/gcc.c +index 4cfef7f..da0b482 100644 +--- a/gcc/gcc.c ++++ b/gcc/gcc.c +@@ -1028,6 +1028,7 @@ proper position among the other output files. */ + %(mflib) " STACK_SPLIT_SPEC "\ + %{fprofile-arcs|fprofile-generate*|coverage:-lgcov} " SANITIZER_SPEC " \ + %{!nostdlib:%{!nodefaultlibs:%(link_ssp) %(link_gcc_c_sequence)}}\ ++ %{!nostdlib++:}\ + %{!nostdlib:%{!nostartfiles:%E}} %{T*} \n%(post_link) }}}}}}" + #endif + +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0015-gcc-disable-MASK_RELAX_PIC_CALLS-bit.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0015-gcc-disable-MASK_RELAX_PIC_CALLS-bit.patch new file mode 100644 index 000000000..6fc7346f6 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0015-gcc-disable-MASK_RELAX_PIC_CALLS-bit.patch @@ -0,0 +1,59 @@ +From 9346f6042d8f7f85a75ca2af15f3b8d234985165 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:14:20 +0400 +Subject: [PATCH 15/46] gcc: disable MASK_RELAX_PIC_CALLS bit + +The new feature added after 4.3.3 +"http://www.pubbs.net/200909/gcc/94048-patch-add-support-for-rmipsjalr.html" +will cause cc1plus eat up all the system memory when build webkit-gtk. +The function mips_get_pic_call_symbol keeps on recursively calling itself. +Disable this feature to walk aside the bug. + +Signed-off-by: Dongxiao Xu +Signed-off-by: Khem Raj + +Upstream-Status: Inappropriate [configuration] +--- + gcc/configure | 7 ------- + gcc/configure.ac | 7 ------- + 2 files changed, 14 deletions(-) + +diff --git a/gcc/configure b/gcc/configure +index 8f83152..5e72f17 100755 +--- a/gcc/configure ++++ b/gcc/configure +@@ -27018,13 +27018,6 @@ $as_echo_n "checking assembler and linker for explicit JALR relocation... " >&6; + rm -f conftest.* + fi + fi +- if test $gcc_cv_as_ld_jalr_reloc = yes; then +- if test x$target_cpu_default = x; then +- target_cpu_default=MASK_RELAX_PIC_CALLS +- else +- target_cpu_default="($target_cpu_default)|MASK_RELAX_PIC_CALLS" +- fi +- fi + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gcc_cv_as_ld_jalr_reloc" >&5 + $as_echo "$gcc_cv_as_ld_jalr_reloc" >&6; } + +diff --git a/gcc/configure.ac b/gcc/configure.ac +index 0fccaef..dd8f6fa 100644 +--- a/gcc/configure.ac ++++ b/gcc/configure.ac +@@ -4522,13 +4522,6 @@ x: + rm -f conftest.* + fi + fi +- if test $gcc_cv_as_ld_jalr_reloc = yes; then +- if test x$target_cpu_default = x; then +- target_cpu_default=MASK_RELAX_PIC_CALLS +- else +- target_cpu_default="($target_cpu_default)|MASK_RELAX_PIC_CALLS" +- fi +- fi + AC_MSG_RESULT($gcc_cv_as_ld_jalr_reloc) + + AC_CACHE_CHECK([linker for .eh_frame personality relaxation], +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0016-COLLECT_GCC_OPTIONS.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0016-COLLECT_GCC_OPTIONS.patch new file mode 100644 index 000000000..c1548647c --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0016-COLLECT_GCC_OPTIONS.patch @@ -0,0 +1,38 @@ +From 1033bc2d4efc5c301bb822b607a673f5b10cc69f Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:16:28 +0400 +Subject: [PATCH 16/46] COLLECT_GCC_OPTIONS + +This patch adds --sysroot into COLLECT_GCC_OPTIONS which is used to +invoke collect2. + +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + gcc/gcc.c | 9 +++++++++ + 1 file changed, 9 insertions(+) + +diff --git a/gcc/gcc.c b/gcc/gcc.c +index da0b482..7ca129f 100644 +--- a/gcc/gcc.c ++++ b/gcc/gcc.c +@@ -4591,6 +4591,15 @@ set_collect_gcc_options (void) + sizeof ("COLLECT_GCC_OPTIONS=") - 1); + + first_time = TRUE; ++#ifdef HAVE_LD_SYSROOT ++ if (target_system_root_changed && target_system_root) ++ { ++ obstack_grow (&collect_obstack, "'--sysroot=", sizeof("'--sysroot=")-1); ++ obstack_grow (&collect_obstack, target_system_root,strlen(target_system_root)); ++ obstack_grow (&collect_obstack, "'", 1); ++ first_time = FALSE; ++ } ++#endif + for (i = 0; (int) i < n_switches; i++) + { + const char *const *args; +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0017-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0017-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch new file mode 100644 index 000000000..0dbabd9e9 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0017-Use-the-defaults.h-in-B-instead-of-S-and-t-oe-in-B.patch @@ -0,0 +1,96 @@ +From 67123a306880af997a99ae514677c2da4973bd1a Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:17:25 +0400 +Subject: [PATCH 17/46] Use the defaults.h in ${B} instead of ${S}, and t-oe in + ${B} + +Use the defaults.h in ${B} instead of ${S}, and t-oe in ${B}, so that +the source can be shared between gcc-cross-initial, +gcc-cross-intermediate, gcc-cross, gcc-runtime, and also the sdk build. + +Signed-off-by: Khem Raj + +Upstream-Status: Pending + +While compiling gcc-crosssdk-initial-x86_64 on some host, there is +occasionally failure that test the existance of default.h doesn't +work, the reason is tm_include_list='** defaults.h' rather than +tm_include_list='** ./defaults.h' + +So we add the test condition for this situation. +Signed-off-by: Hongxu Jia +--- + gcc/Makefile.in | 2 +- + gcc/configure | 4 ++-- + gcc/configure.ac | 4 ++-- + gcc/mkconfig.sh | 4 ++-- + 4 files changed, 7 insertions(+), 7 deletions(-) + +diff --git a/gcc/Makefile.in b/gcc/Makefile.in +index 6c5adc0..9a1d466 100644 +--- a/gcc/Makefile.in ++++ b/gcc/Makefile.in +@@ -517,7 +517,7 @@ TARGET_SYSTEM_ROOT = @TARGET_SYSTEM_ROOT@ + TARGET_SYSTEM_ROOT_DEFINE = @TARGET_SYSTEM_ROOT_DEFINE@ + + xmake_file=@xmake_file@ +-tmake_file=@tmake_file@ ++tmake_file=@tmake_file@ ./t-oe + TM_ENDIAN_CONFIG=@TM_ENDIAN_CONFIG@ + TM_MULTILIB_CONFIG=@TM_MULTILIB_CONFIG@ + TM_MULTILIB_EXCEPTIONS_CONFIG=@TM_MULTILIB_EXCEPTIONS_CONFIG@ +diff --git a/gcc/configure b/gcc/configure +index 5e72f17..389b6d5 100755 +--- a/gcc/configure ++++ b/gcc/configure +@@ -12130,8 +12130,8 @@ for f in $tm_file; do + tm_include_list="${tm_include_list} $f" + ;; + defaults.h ) +- tm_file_list="${tm_file_list} \$(srcdir)/$f" +- tm_include_list="${tm_include_list} $f" ++ tm_file_list="${tm_file_list} ./$f" ++ tm_include_list="${tm_include_list} ./$f" + ;; + * ) + tm_file_list="${tm_file_list} \$(srcdir)/config/$f" +diff --git a/gcc/configure.ac b/gcc/configure.ac +index dd8f6fa..91ac800 100644 +--- a/gcc/configure.ac ++++ b/gcc/configure.ac +@@ -1883,8 +1883,8 @@ for f in $tm_file; do + tm_include_list="${tm_include_list} $f" + ;; + defaults.h ) +- tm_file_list="${tm_file_list} \$(srcdir)/$f" +- tm_include_list="${tm_include_list} $f" ++ tm_file_list="${tm_file_list} ./$f" ++ tm_include_list="${tm_include_list} ./$f" + ;; + * ) + tm_file_list="${tm_file_list} \$(srcdir)/config/$f" +diff --git a/gcc/mkconfig.sh b/gcc/mkconfig.sh +index 67dfac6..b73e08d 100644 +--- a/gcc/mkconfig.sh ++++ b/gcc/mkconfig.sh +@@ -77,7 +77,7 @@ if [ -n "$HEADERS" ]; then + if [ $# -ge 1 ]; then + echo '#ifdef IN_GCC' >> ${output}T + for file in "$@"; do +- if test x"$file" = x"defaults.h"; then ++ if test x"$file" = x"./defaults.h" -o x"$file" = x"defaults.h"; then + postpone_defaults_h="yes" + else + echo "# include \"$file\"" >> ${output}T +@@ -109,7 +109,7 @@ esac + + # If we postponed including defaults.h, add the #include now. + if test x"$postpone_defaults_h" = x"yes"; then +- echo "# include \"defaults.h\"" >> ${output}T ++ echo "# include \"./defaults.h\"" >> ${output}T + fi + + # Add multiple inclusion protection guard, part two. +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0018-fortran-cross-compile-hack.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0018-fortran-cross-compile-hack.patch new file mode 100644 index 000000000..b43d89ea8 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0018-fortran-cross-compile-hack.patch @@ -0,0 +1,46 @@ +From e360dc3e0f1e0b9b001ef722fcf66f8120a03dbc Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:20:01 +0400 +Subject: [PATCH 18/46] fortran cross-compile hack. + +* Fortran would have searched for arm-angstrom-gnueabi-gfortran but would have used +used gfortan. For gcc_4.2.2.bb we want to use the gfortran compiler from our cross +directory. + +Signed-off-by: Khem Raj + +Upstream-Status: Inappropriate [embedded specific] +--- + libgfortran/configure | 2 +- + libgfortran/configure.ac | 2 +- + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/libgfortran/configure b/libgfortran/configure +index f746f6f..b4f3278 100755 +--- a/libgfortran/configure ++++ b/libgfortran/configure +@@ -12734,7 +12734,7 @@ esac + + # We need gfortran to compile parts of the library + #AC_PROG_FC(gfortran) +-FC="$GFORTRAN" ++#FC="$GFORTRAN" + ac_ext=${ac_fc_srcext-f} + ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' + ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' +diff --git a/libgfortran/configure.ac b/libgfortran/configure.ac +index 8f377bb..67710b5 100644 +--- a/libgfortran/configure.ac ++++ b/libgfortran/configure.ac +@@ -240,7 +240,7 @@ AC_SUBST(enable_static) + + # We need gfortran to compile parts of the library + #AC_PROG_FC(gfortran) +-FC="$GFORTRAN" ++#FC="$GFORTRAN" + AC_PROG_FC(gfortran) + + # extra LD Flags which are required for targets +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0019-cpp-honor-sysroot.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0019-cpp-honor-sysroot.patch new file mode 100644 index 000000000..417a5ede4 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0019-cpp-honor-sysroot.patch @@ -0,0 +1,54 @@ +From 98d9c0c1b5552294b2130f7304bfb522da323442 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:22:00 +0400 +Subject: [PATCH 19/46] cpp: honor sysroot. + +Currently, if the gcc toolchain is relocated and installed from sstate, then you try and compile +preprocessed source (.i or .ii files), the compiler will try and access the builtin sysroot location +rather than the --sysroot option specified on the commandline. If access to that directory is +permission denied (unreadable), gcc will error. + +This happens when ccache is in use due to the fact it uses preprocessed source files. + +The fix below adds %I to the cpp-output spec macro so the default substitutions for -iprefix, +-isystem, -isysroot happen and the correct sysroot is used. + +[YOCTO #2074] + +RP 2012/04/13 + +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + gcc/cp/lang-specs.h | 2 +- + gcc/gcc.c | 2 +- + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/gcc/cp/lang-specs.h b/gcc/cp/lang-specs.h +index 9707fac..fe487a2 100644 +--- a/gcc/cp/lang-specs.h ++++ b/gcc/cp/lang-specs.h +@@ -64,5 +64,5 @@ along with GCC; see the file COPYING3. If not see + {".ii", "@c++-cpp-output", 0, 0, 0}, + {"@c++-cpp-output", + "%{!M:%{!MM:%{!E:\ +- cc1plus -fpreprocessed %i %(cc1_options) %2\ ++ cc1plus -fpreprocessed %i %I %(cc1_options) %2\ + %{!fsyntax-only:%(invoke_as)}}}}", 0, 0, 0}, +diff --git a/gcc/gcc.c b/gcc/gcc.c +index 7ca129f..04fa81d 100644 +--- a/gcc/gcc.c ++++ b/gcc/gcc.c +@@ -1329,7 +1329,7 @@ static const struct compiler default_compilers[] = + %W{o*:--output-pch=%*}}%V}}}}}}}", 0, 0, 0}, + {".i", "@cpp-output", 0, 0, 0}, + {"@cpp-output", +- "%{!M:%{!MM:%{!E:cc1 -fpreprocessed %i %(cc1_options) %{!fsyntax-only:%(invoke_as)}}}}", 0, 0, 0}, ++ "%{!M:%{!MM:%{!E:cc1 -fpreprocessed %i %I %(cc1_options) %{!fsyntax-only:%(invoke_as)}}}}", 0, 0, 0}, + {".s", "@assembler", 0, 0, 0}, + {"@assembler", + "%{!M:%{!MM:%{!E:%{!S:as %(asm_debug) %(asm_options) %i %A }}}}", 0, 0, 0}, +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0020-MIPS64-Default-to-N64-ABI.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0020-MIPS64-Default-to-N64-ABI.patch new file mode 100644 index 000000000..ba612f545 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0020-MIPS64-Default-to-N64-ABI.patch @@ -0,0 +1,57 @@ +From 1f15447fbcf65142627af8a9694761534da8d0d1 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:23:08 +0400 +Subject: [PATCH 20/46] MIPS64: Default to N64 ABI + +MIPS64 defaults to n32 ABI, this patch makes it +so that it defaults to N64 ABI + +Signed-off-by: Khem Raj + +Upstream-Status: Inappropriate [OE config specific] +--- + gcc/config.gcc | 10 +++++----- + 1 file changed, 5 insertions(+), 5 deletions(-) + +diff --git a/gcc/config.gcc b/gcc/config.gcc +index f66e48c..9c6d156 100644 +--- a/gcc/config.gcc ++++ b/gcc/config.gcc +@@ -2065,29 +2065,29 @@ mips*-*-linux*) # Linux MIPS, either endian. + default_mips_arch=mips32 + ;; + mips64el-st-linux-gnu) +- default_mips_abi=n32 ++ default_mips_abi=64 + tm_file="${tm_file} mips/st.h" + tmake_file="${tmake_file} mips/t-st" + enable_mips_multilibs="yes" + ;; + mips64octeon*-*-linux*) +- default_mips_abi=n32 ++ default_mips_abi=64 + tm_defines="${tm_defines} MIPS_CPU_STRING_DEFAULT=\\\"octeon\\\"" + target_cpu_default=MASK_SOFT_FLOAT_ABI + enable_mips_multilibs="yes" + ;; + mipsisa64r6*-*-linux*) +- default_mips_abi=n32 ++ default_mips_abi=64 + default_mips_arch=mips64r6 + enable_mips_multilibs="yes" + ;; + mipsisa64r2*-*-linux*) +- default_mips_abi=n32 ++ default_mips_abi=64 + default_mips_arch=mips64r2 + enable_mips_multilibs="yes" + ;; + mips64*-*-linux* | mipsisa64*-*-linux*) +- default_mips_abi=n32 ++ default_mips_abi=64 + enable_mips_multilibs="yes" + ;; + esac +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0021-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0021-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch new file mode 100644 index 000000000..6675ce34f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0021-Define-GLIBC_DYNAMIC_LINKER-and-UCLIBC_DYNAMIC_LINKE.patch @@ -0,0 +1,234 @@ +From 9f73c8918b63e6a1c9b79384fac411d1056eec1c Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:24:50 +0400 +Subject: [PATCH 21/46] Define GLIBC_DYNAMIC_LINKER and UCLIBC_DYNAMIC_LINKER + relative to SYSTEMLIBS_DIR + +This patch defines GLIBC_DYNAMIC_LINKER and UCLIBC_DYNAMIC_LINKER +relative to SYSTEMLIBS_DIR which can be set in generated headers +This breaks the assumption of hardcoded multilib in gcc +Change is only for the supported architectures in OE including +SH, sparc, alpha for possible future support (if any) + +Removes the do_headerfix task in metadata + +Signed-off-by: Khem Raj + +Upstream-Status: Inappropriate [OE configuration] +--- + gcc/config/alpha/linux-elf.h | 4 ++-- + gcc/config/arm/linux-eabi.h | 4 ++-- + gcc/config/arm/linux-elf.h | 2 +- + gcc/config/i386/linux.h | 2 +- + gcc/config/i386/linux64.h | 6 +++--- + gcc/config/linux.h | 8 ++++---- + gcc/config/mips/linux.h | 12 ++++++------ + gcc/config/rs6000/linux64.h | 16 ++++++---------- + gcc/config/sh/linux.h | 2 +- + gcc/config/sparc/linux.h | 2 +- + gcc/config/sparc/linux64.h | 4 ++-- + 11 files changed, 29 insertions(+), 33 deletions(-) + +diff --git a/gcc/config/alpha/linux-elf.h b/gcc/config/alpha/linux-elf.h +index a0764d3..02f7a7c 100644 +--- a/gcc/config/alpha/linux-elf.h ++++ b/gcc/config/alpha/linux-elf.h +@@ -23,8 +23,8 @@ along with GCC; see the file COPYING3. If not see + #define EXTRA_SPECS \ + { "elf_dynamic_linker", ELF_DYNAMIC_LINKER }, + +-#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux.so.2" +-#define UCLIBC_DYNAMIC_LINKER "/lib/ld-uClibc.so.0" ++#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux.so.2" ++#define UCLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-uClibc.so.0" + #if DEFAULT_LIBC == LIBC_UCLIBC + #define CHOOSE_DYNAMIC_LINKER(G, U) "%{mglibc:" G ";:" U "}" + #elif DEFAULT_LIBC == LIBC_GLIBC +diff --git a/gcc/config/arm/linux-eabi.h b/gcc/config/arm/linux-eabi.h +index ace8481..4010435 100644 +--- a/gcc/config/arm/linux-eabi.h ++++ b/gcc/config/arm/linux-eabi.h +@@ -68,8 +68,8 @@ + GLIBC_DYNAMIC_LINKER_DEFAULT and TARGET_DEFAULT_FLOAT_ABI. */ + + #undef GLIBC_DYNAMIC_LINKER +-#define GLIBC_DYNAMIC_LINKER_SOFT_FLOAT "/lib/ld-linux.so.3" +-#define GLIBC_DYNAMIC_LINKER_HARD_FLOAT "/lib/ld-linux-armhf.so.3" ++#define GLIBC_DYNAMIC_LINKER_SOFT_FLOAT SYSTEMLIBS_DIR "ld-linux.so.3" ++#define GLIBC_DYNAMIC_LINKER_HARD_FLOAT SYSTEMLIBS_DIR "ld-linux-armhf.so.3" + #define GLIBC_DYNAMIC_LINKER_DEFAULT GLIBC_DYNAMIC_LINKER_SOFT_FLOAT + + #define GLIBC_DYNAMIC_LINKER \ +diff --git a/gcc/config/arm/linux-elf.h b/gcc/config/arm/linux-elf.h +index a94bd2d..0220628 100644 +--- a/gcc/config/arm/linux-elf.h ++++ b/gcc/config/arm/linux-elf.h +@@ -62,7 +62,7 @@ + + #define LIBGCC_SPEC "%{mfloat-abi=soft*:-lfloat} -lgcc" + +-#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux.so.2" ++#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux.so.2" + + #define LINUX_TARGET_LINK_SPEC "%{h*} \ + %{static:-Bstatic} \ +diff --git a/gcc/config/i386/linux.h b/gcc/config/i386/linux.h +index d37a875..edde586 100644 +--- a/gcc/config/i386/linux.h ++++ b/gcc/config/i386/linux.h +@@ -20,7 +20,7 @@ along with GCC; see the file COPYING3. If not see + . */ + + #define GNU_USER_LINK_EMULATION "elf_i386" +-#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux.so.2" ++#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux.so.2" + + #undef MUSL_DYNAMIC_LINKER + #define MUSL_DYNAMIC_LINKER "/lib/ld-musl-i386.so.1" +diff --git a/gcc/config/i386/linux64.h b/gcc/config/i386/linux64.h +index 73d22e3..f4b5615 100644 +--- a/gcc/config/i386/linux64.h ++++ b/gcc/config/i386/linux64.h +@@ -27,9 +27,9 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see + #define GNU_USER_LINK_EMULATION64 "elf_x86_64" + #define GNU_USER_LINK_EMULATIONX32 "elf32_x86_64" + +-#define GLIBC_DYNAMIC_LINKER32 "/lib/ld-linux.so.2" +-#define GLIBC_DYNAMIC_LINKER64 "/lib64/ld-linux-x86-64.so.2" +-#define GLIBC_DYNAMIC_LINKERX32 "/libx32/ld-linux-x32.so.2" ++#define GLIBC_DYNAMIC_LINKER32 SYSTEMLIBS_DIR "ld-linux.so.2" ++#define GLIBC_DYNAMIC_LINKER64 SYSTEMLIBS_DIR "ld-linux-x86-64.so.2" ++#define GLIBC_DYNAMIC_LINKERX32 SYSTEMLIBS_DIR "ld-linux-x32.so.2" + + #undef MUSL_DYNAMIC_LINKER32 + #define MUSL_DYNAMIC_LINKER32 "/lib/ld-musl-i386.so.1" +diff --git a/gcc/config/linux.h b/gcc/config/linux.h +index 9aeeb94..b055652 100644 +--- a/gcc/config/linux.h ++++ b/gcc/config/linux.h +@@ -81,10 +81,10 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see + GLIBC_DYNAMIC_LINKER must be defined for each target using them, or + GLIBC_DYNAMIC_LINKER32 and GLIBC_DYNAMIC_LINKER64 for targets + supporting both 32-bit and 64-bit compilation. */ +-#define UCLIBC_DYNAMIC_LINKER "/lib/ld-uClibc.so.0" +-#define UCLIBC_DYNAMIC_LINKER32 "/lib/ld-uClibc.so.0" +-#define UCLIBC_DYNAMIC_LINKER64 "/lib/ld64-uClibc.so.0" +-#define UCLIBC_DYNAMIC_LINKERX32 "/lib/ldx32-uClibc.so.0" ++#define UCLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-uClibc.so.0" ++#define UCLIBC_DYNAMIC_LINKER32 SYSTEMLIBS_DIR "ld-uClibc.so.0" ++#define UCLIBC_DYNAMIC_LINKER64 SYSTEMLIBS_DIR "ld64-uClibc.so.0" ++#define UCLIBC_DYNAMIC_LINKERX32 SYSTEMLIBS_DIR "ldx32-uClibc.so.0" + #define BIONIC_DYNAMIC_LINKER "/system/bin/linker" + #define BIONIC_DYNAMIC_LINKER32 "/system/bin/linker" + #define BIONIC_DYNAMIC_LINKER64 "/system/bin/linker64" +diff --git a/gcc/config/mips/linux.h b/gcc/config/mips/linux.h +index fa253b6..da02c8d 100644 +--- a/gcc/config/mips/linux.h ++++ b/gcc/config/mips/linux.h +@@ -22,20 +22,20 @@ along with GCC; see the file COPYING3. If not see + #define GNU_USER_LINK_EMULATIONN32 "elf32%{EB:b}%{EL:l}tsmipn32" + + #define GLIBC_DYNAMIC_LINKER32 \ +- "%{mnan=2008:/lib/ld-linux-mipsn8.so.1;:/lib/ld.so.1}" ++ "%{mnan=2008:" SYSTEMLIBS_DIR "ld-linux-mipsn8.so.1;:" SYSTEMLIBS_DIR "ld.so.1}" + #define GLIBC_DYNAMIC_LINKER64 \ +- "%{mnan=2008:/lib64/ld-linux-mipsn8.so.1;:/lib64/ld.so.1}" ++ "%{mnan=2008:" SYSTEMLIBS_DIR "ld-linux-mipsn8.so.1;:" SYSTEMLIBS_DIR "ld.so.1}" + #define GLIBC_DYNAMIC_LINKERN32 \ +- "%{mnan=2008:/lib32/ld-linux-mipsn8.so.1;:/lib32/ld.so.1}" ++ "%{mnan=2008:" SYSTEMLIBS_DIR "ld-linux-mipsn8.so.1;:" SYSTEMLIBS_DIR "ld.so.1}" + + #undef UCLIBC_DYNAMIC_LINKER32 + #define UCLIBC_DYNAMIC_LINKER32 \ +- "%{mnan=2008:/lib/ld-uClibc-mipsn8.so.0;:/lib/ld-uClibc.so.0}" ++ "%{mnan=2008:" SYSTEMLIBS_DIR "ld-uClibc-mipsn8.so.0;:" SYSTEMLIBS_DIR "ld-uClibc.so.0}" + #undef UCLIBC_DYNAMIC_LINKER64 + #define UCLIBC_DYNAMIC_LINKER64 \ +- "%{mnan=2008:/lib/ld64-uClibc-mipsn8.so.0;:/lib/ld64-uClibc.so.0}" ++ "%{mnan=2008:" SYSTEMLIBS_DIR "ld64-uClibc-mipsn8.so.0;:" SYSTEMLIBS_DIR "ld64-uClibc.so.0}" + #define UCLIBC_DYNAMIC_LINKERN32 \ +- "%{mnan=2008:/lib32/ld-uClibc-mipsn8.so.0;:/lib32/ld-uClibc.so.0}" ++ "%{mnan=2008:" SYSTEMLIBS_DIR "ld-uClibc-mipsn8.so.0;:" SYSTEMLIBS_DIR "ld-uClibc.so.0}" + + #undef MUSL_DYNAMIC_LINKER32 + #define MUSL_DYNAMIC_LINKER32 "/lib/ld-musl-mips%{EL:el}%{msoft-float:-sf}.so.1" +diff --git a/gcc/config/rs6000/linux64.h b/gcc/config/rs6000/linux64.h +index fefa0c4..7173938 100644 +--- a/gcc/config/rs6000/linux64.h ++++ b/gcc/config/rs6000/linux64.h +@@ -412,16 +412,11 @@ extern int dot_symbols; + #undef LINK_OS_DEFAULT_SPEC + #define LINK_OS_DEFAULT_SPEC "%(link_os_linux)" + +-#define GLIBC_DYNAMIC_LINKER32 "%(dynamic_linker_prefix)/lib/ld.so.1" +- ++#define GLIBC_DYNAMIC_LINKER32 SYSTEMLIBS_DIR "ld.so.1" + #ifdef LINUX64_DEFAULT_ABI_ELFv2 +-#define GLIBC_DYNAMIC_LINKER64 \ +-"%{mabi=elfv1:%(dynamic_linker_prefix)/lib64/ld64.so.1;" \ +-":%(dynamic_linker_prefix)/lib64/ld64.so.2}" ++#define GLIBC_DYNAMIC_LINKER64 "%{mabi=elfv1:" SYSTEMLIBS_DIR "ld64.so.1;:" SYSTEMLIBS_DIR "ld64.so.2}" + #else +-#define GLIBC_DYNAMIC_LINKER64 \ +-"%{mabi=elfv2:%(dynamic_linker_prefix)/lib64/ld64.so.2;" \ +-":%(dynamic_linker_prefix)/lib64/ld64.so.1}" ++#define GLIBC_DYNAMIC_LINKER64 "%{mabi=elfv2:" SYSTEMLIBS_DIR "ld64.so.2;:" SYSTEMLIBS_DIR "ld64.so.1}" + #endif + + #define MUSL_DYNAMIC_LINKER32 \ +@@ -429,8 +424,9 @@ extern int dot_symbols; + #define MUSL_DYNAMIC_LINKER64 \ + "/lib/ld-musl-powerpc64" MUSL_DYNAMIC_LINKER_E "%{msoft-float:-sf}.so.1" + +-#define UCLIBC_DYNAMIC_LINKER32 "/lib/ld-uClibc.so.0" +-#define UCLIBC_DYNAMIC_LINKER64 "/lib/ld64-uClibc.so.0" ++#define UCLIBC_DYNAMIC_LINKER32 SYSTEMLIBS_DIR "ld-uClibc.so.0" ++#define UCLIBC_DYNAMIC_LINKER64 SYSTEMLIBS_DIR "ld64-uClibc.so.0" ++ + #if DEFAULT_LIBC == LIBC_UCLIBC + #define CHOOSE_DYNAMIC_LINKER(G, U, M) \ + "%{mglibc:" G ";:%{mmusl:" M ";:" U "}}" +diff --git a/gcc/config/sh/linux.h b/gcc/config/sh/linux.h +index 2a036ac..50fb735 100644 +--- a/gcc/config/sh/linux.h ++++ b/gcc/config/sh/linux.h +@@ -64,7 +64,7 @@ along with GCC; see the file COPYING3. If not see + "/lib/ld-musl-sh" MUSL_DYNAMIC_LINKER_E MUSL_DYNAMIC_LINKER_FP \ + "%{mfdpic:-fdpic}.so.1" + +-#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux.so.2" ++#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux.so.2" + + #undef SUBTARGET_LINK_EMUL_SUFFIX + #define SUBTARGET_LINK_EMUL_SUFFIX "%{mfdpic:_fd;:_linux}" +diff --git a/gcc/config/sparc/linux.h b/gcc/config/sparc/linux.h +index 9b32577..7bd66de 100644 +--- a/gcc/config/sparc/linux.h ++++ b/gcc/config/sparc/linux.h +@@ -83,7 +83,7 @@ extern const char *host_detect_local_cpu (int argc, const char **argv); + When the -shared link option is used a final link is not being + done. */ + +-#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux.so.2" ++#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux.so.2" + + #undef LINK_SPEC + #define LINK_SPEC "-m elf32_sparc %{shared:-shared} \ +diff --git a/gcc/config/sparc/linux64.h b/gcc/config/sparc/linux64.h +index a1ef325..3bae3d5 100644 +--- a/gcc/config/sparc/linux64.h ++++ b/gcc/config/sparc/linux64.h +@@ -84,8 +84,8 @@ along with GCC; see the file COPYING3. If not see + When the -shared link option is used a final link is not being + done. */ + +-#define GLIBC_DYNAMIC_LINKER32 "/lib/ld-linux.so.2" +-#define GLIBC_DYNAMIC_LINKER64 "/lib64/ld-linux.so.2" ++#define GLIBC_DYNAMIC_LINKER32 SYSTEMLIBS_DIR "ld-linux.so.2" ++#define GLIBC_DYNAMIC_LINKER64 SYSTEMLIBS_DIR "ld-linux.so.2" + + #ifdef SPARC_BI_ARCH + +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0022-gcc-Fix-argument-list-too-long-error.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0022-gcc-Fix-argument-list-too-long-error.patch new file mode 100644 index 000000000..fab6e4aeb --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0022-gcc-Fix-argument-list-too-long-error.patch @@ -0,0 +1,40 @@ +From acce5157f6b3a1dc9a3676b7118ac887dc5693be Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:26:37 +0400 +Subject: [PATCH 22/46] gcc: Fix argument list too long error. + +There would be an "Argument list too long" error when the +build directory is longer than 200, this is caused by: + +headers=`echo $(PLUGIN_HEADERS) | tr ' ' '\012' | sort -u` + +The PLUGIN_HEADERS is too long before sort, so the "echo" can't handle +it, use the $(sort list) of GNU make which can handle the too long list +would fix the problem, the header would be short enough after sorted. +The "tr ' ' '\012'" was used for translating the space to "\n", the +$(sort list) doesn't need this. + +Signed-off-by: Robert Yang +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + gcc/Makefile.in | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/gcc/Makefile.in b/gcc/Makefile.in +index 9a1d466..450cb79 100644 +--- a/gcc/Makefile.in ++++ b/gcc/Makefile.in +@@ -3349,7 +3349,7 @@ install-plugin: installdirs lang.install-plugin s-header-vars install-gengtype + # We keep the directory structure for files in config or c-family and .def + # files. All other files are flattened to a single directory. + $(mkinstalldirs) $(DESTDIR)$(plugin_includedir) +- headers=`echo $(PLUGIN_HEADERS) $$(cd $(srcdir); echo *.h *.def) | tr ' ' '\012' | sort -u`; \ ++ headers="$(sort $(PLUGIN_HEADERS) $$(cd $(srcdir); echo *.h *.def))"; \ + srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`; \ + for file in $$headers; do \ + if [ -f $$file ] ; then \ +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0023-Disable-sdt.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0023-Disable-sdt.patch new file mode 100644 index 000000000..0efd890aa --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0023-Disable-sdt.patch @@ -0,0 +1,113 @@ +From 6573aec00ada35c48c1838c8491ce8f7798ae993 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:28:10 +0400 +Subject: [PATCH 23/46] Disable sdt. + +We don't list dtrace in DEPENDS so we shouldn't be depending on this header. +It may or may not exist from preivous builds though. To be determinstic, disable +sdt.h usage always. This avoids build failures if the header is removed after configure +but before libgcc is compiled for example. + +RP 2012/8/7 + +Signed-off-by: Khem Raj + +Disable sdt for libstdc++-v3. + +Signed-off-by: Robert Yang + +Upstream-Status: Inappropriate [hack] +--- + gcc/configure | 12 ++++++------ + gcc/configure.ac | 18 +++++++++--------- + libstdc++-v3/configure | 6 +++--- + libstdc++-v3/configure.ac | 2 +- + 4 files changed, 19 insertions(+), 19 deletions(-) + +diff --git a/gcc/configure b/gcc/configure +index 389b6d5..73c264d 100755 +--- a/gcc/configure ++++ b/gcc/configure +@@ -28528,12 +28528,12 @@ fi + { $as_echo "$as_me:${as_lineno-$LINENO}: checking sys/sdt.h in the target C library" >&5 + $as_echo_n "checking sys/sdt.h in the target C library... " >&6; } + have_sys_sdt_h=no +-if test -f $target_header_dir/sys/sdt.h; then +- have_sys_sdt_h=yes +- +-$as_echo "#define HAVE_SYS_SDT_H 1" >>confdefs.h +- +-fi ++#if test -f $target_header_dir/sys/sdt.h; then ++# have_sys_sdt_h=yes ++# ++#$as_echo "#define HAVE_SYS_SDT_H 1" >>confdefs.h ++# ++#fi + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $have_sys_sdt_h" >&5 + $as_echo "$have_sys_sdt_h" >&6; } + +diff --git a/gcc/configure.ac b/gcc/configure.ac +index 91ac800..cecf121 100644 +--- a/gcc/configure.ac ++++ b/gcc/configure.ac +@@ -5514,15 +5514,15 @@ fi + AC_SUBST([enable_default_ssp]) + + # Test for on the target. +-GCC_TARGET_TEMPLATE([HAVE_SYS_SDT_H]) +-AC_MSG_CHECKING(sys/sdt.h in the target C library) +-have_sys_sdt_h=no +-if test -f $target_header_dir/sys/sdt.h; then +- have_sys_sdt_h=yes +- AC_DEFINE(HAVE_SYS_SDT_H, 1, +- [Define if your target C library provides sys/sdt.h]) +-fi +-AC_MSG_RESULT($have_sys_sdt_h) ++#GCC_TARGET_TEMPLATE([HAVE_SYS_SDT_H]) ++#AC_MSG_CHECKING(sys/sdt.h in the target C library) ++#have_sys_sdt_h=no ++#if test -f $target_header_dir/sys/sdt.h; then ++# have_sys_sdt_h=yes ++# AC_DEFINE(HAVE_SYS_SDT_H, 1, ++# [Define if your target C library provides sys/sdt.h]) ++#fi ++#AC_MSG_RESULT($have_sys_sdt_h) + + # Check if TFmode long double should be used by default or not. + # Some glibc targets used DFmode long double, but with glibc 2.4 +diff --git a/libstdc++-v3/configure b/libstdc++-v3/configure +index 8a5481c..6a40e92 100755 +--- a/libstdc++-v3/configure ++++ b/libstdc++-v3/configure +@@ -21735,11 +21735,11 @@ ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' + ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' + ac_compiler_gnu=$ac_cv_c_compiler_gnu + +- if test $glibcxx_cv_sys_sdt_h = yes; then ++# if test $glibcxx_cv_sys_sdt_h = yes; then + +-$as_echo "#define HAVE_SYS_SDT_H 1" >>confdefs.h ++#$as_echo "#define HAVE_SYS_SDT_H 1" >>confdefs.h + +- fi ++# fi + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $glibcxx_cv_sys_sdt_h" >&5 + $as_echo "$glibcxx_cv_sys_sdt_h" >&6; } + +diff --git a/libstdc++-v3/configure.ac b/libstdc++-v3/configure.ac +index 9e19e99..0077ffd 100644 +--- a/libstdc++-v3/configure.ac ++++ b/libstdc++-v3/configure.ac +@@ -230,7 +230,7 @@ GLIBCXX_CHECK_SC_NPROCESSORS_ONLN + GLIBCXX_CHECK_SC_NPROC_ONLN + GLIBCXX_CHECK_PTHREADS_NUM_PROCESSORS_NP + GLIBCXX_CHECK_SYSCTL_HW_NCPU +-GLIBCXX_CHECK_SDT_H ++#GLIBCXX_CHECK_SDT_H + + # Check for available headers. + AC_CHECK_HEADERS([endian.h execinfo.h float.h fp.h ieeefp.h inttypes.h \ +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0024-libtool.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0024-libtool.patch new file mode 100644 index 000000000..1f73b5db5 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0024-libtool.patch @@ -0,0 +1,42 @@ +From 6c0aa5c2538829248547197718037ff0b9788676 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:29:11 +0400 +Subject: [PATCH 24/46] libtool + +libstdc++ from gcc-runtime gets created with -rpath=/usr/lib/../lib for qemux86-64 +when running on am x86_64 build host. + +This patch stops this speading to libdir in the libstdc++.la file within libtool. +Arguably, it shouldn't be passing this into libtool in the first place but +for now this resolves the nastiest problems this causes. + +func_normal_abspath would resolve an empty path to `pwd` so we need +to filter the zero case. + +RP 2012/8/24 + +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + ltmain.sh | 4 ++++ + 1 file changed, 4 insertions(+) + +diff --git a/ltmain.sh b/ltmain.sh +index 9503ec8..0121fba 100644 +--- a/ltmain.sh ++++ b/ltmain.sh +@@ -6359,6 +6359,10 @@ func_mode_link () + func_warning "ignoring multiple \`-rpath's for a libtool library" + + install_libdir="$1" ++ if test -n "$install_libdir"; then ++ func_normal_abspath "$install_libdir" ++ install_libdir=$func_normal_abspath_result ++ fi + + oldlibs= + if test -z "$rpath"; then +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0025-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0025-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch new file mode 100644 index 000000000..3b7ee497f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0025-gcc-armv4-pass-fix-v4bx-to-linker-to-support-EABI.patch @@ -0,0 +1,43 @@ +From b1263a48553ce75f8c3bed4fe12122af57845567 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:30:32 +0400 +Subject: [PATCH 25/46] gcc: armv4: pass fix-v4bx to linker to support EABI. + +The LINK_SPEC for linux gets overwritten by linux-eabi.h which +means the value of TARGET_FIX_V4BX_SPEC gets lost and as a result +the option is not passed to linker when chosing march=armv4 +This patch redefines this in linux-eabi.h and reinserts it +for eabi defaulting toolchains. + +We might want to send it upstream. + +Signed-off-by: Khem Raj + +Upstream-Status: Pending +--- + gcc/config/arm/linux-eabi.h | 6 +++++- + 1 file changed, 5 insertions(+), 1 deletion(-) + +diff --git a/gcc/config/arm/linux-eabi.h b/gcc/config/arm/linux-eabi.h +index 4010435..aaea1c9 100644 +--- a/gcc/config/arm/linux-eabi.h ++++ b/gcc/config/arm/linux-eabi.h +@@ -94,10 +94,14 @@ + #define MUSL_DYNAMIC_LINKER \ + "/lib/ld-musl-arm" MUSL_DYNAMIC_LINKER_E "%{mfloat-abi=hard:hf}.so.1" + ++/* For armv4 we pass --fix-v4bx to linker to support EABI */ ++#undef TARGET_FIX_V4BX_SPEC ++#define TARGET_FIX_V4BX_SPEC "%{mcpu=arm8|mcpu=arm810|mcpu=strongarm*|march=armv4: --fix-v4bx}" ++ + /* At this point, bpabi.h will have clobbered LINK_SPEC. We want to + use the GNU/Linux version, not the generic BPABI version. */ + #undef LINK_SPEC +-#define LINK_SPEC EABI_LINK_SPEC \ ++#define LINK_SPEC TARGET_FIX_V4BX_SPEC EABI_LINK_SPEC \ + LINUX_OR_ANDROID_LD (LINUX_TARGET_LINK_SPEC, \ + LINUX_TARGET_LINK_SPEC " " ANDROID_LINK_SPEC) + +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0026-Use-the-multilib-config-files-from-B-instead-of-usin.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0026-Use-the-multilib-config-files-from-B-instead-of-usin.patch new file mode 100644 index 000000000..be25be616 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0026-Use-the-multilib-config-files-from-B-instead-of-usin.patch @@ -0,0 +1,102 @@ +From b5c305fc251299f2e328410b18cfb55c75b5f038 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Mar 2013 09:33:04 +0400 +Subject: [PATCH 26/46] Use the multilib config files from ${B} instead of + using the ones from ${S} + +Use the multilib config files from ${B} instead of using the ones from ${S} +so that the source can be shared between gcc-cross-initial, +gcc-cross-intermediate, gcc-cross, gcc-runtime, and also the sdk build. + +Signed-off-by: Khem Raj +Signed-off-by: Constantin Musca + +Upstream-Status: Inappropriate [configuration] +--- + gcc/configure | 22 ++++++++++++++++++---- + gcc/configure.ac | 22 ++++++++++++++++++---- + 2 files changed, 36 insertions(+), 8 deletions(-) + +diff --git a/gcc/configure b/gcc/configure +index 73c264d..377253e 100755 +--- a/gcc/configure ++++ b/gcc/configure +@@ -12110,10 +12110,20 @@ done + tmake_file_= + for f in ${tmake_file} + do +- if test -f ${srcdir}/config/$f +- then +- tmake_file_="${tmake_file_} \$(srcdir)/config/$f" +- fi ++ case $f in ++ */t-linux64 ) ++ if test -f ./config/$f ++ then ++ tmake_file_="${tmake_file_} ./config/$f" ++ fi ++ ;; ++ * ) ++ if test -f ${srcdir}/config/$f ++ then ++ tmake_file_="${tmake_file_} \$(srcdir)/config/$f" ++ fi ++ ;; ++ esac + done + tmake_file="${tmake_file_}" + +@@ -12124,6 +12134,10 @@ tm_file_list="options.h" + tm_include_list="options.h insn-constants.h" + for f in $tm_file; do + case $f in ++ */linux64.h ) ++ tm_file_list="${tm_file_list} ./config/$f" ++ tm_include_list="${tm_include_list} ./config/$f" ++ ;; + ./* ) + f=`echo $f | sed 's/^..//'` + tm_file_list="${tm_file_list} $f" +diff --git a/gcc/configure.ac b/gcc/configure.ac +index cecf121..54e7619 100644 +--- a/gcc/configure.ac ++++ b/gcc/configure.ac +@@ -1863,10 +1863,20 @@ done + tmake_file_= + for f in ${tmake_file} + do +- if test -f ${srcdir}/config/$f +- then +- tmake_file_="${tmake_file_} \$(srcdir)/config/$f" +- fi ++ case $f in ++ */t-linux64 ) ++ if test -f ./config/$f ++ then ++ tmake_file_="${tmake_file_} ./config/$f" ++ fi ++ ;; ++ * ) ++ if test -f ${srcdir}/config/$f ++ then ++ tmake_file_="${tmake_file_} \$(srcdir)/config/$f" ++ fi ++ ;; ++ esac + done + tmake_file="${tmake_file_}" + +@@ -1877,6 +1887,10 @@ tm_file_list="options.h" + tm_include_list="options.h insn-constants.h" + for f in $tm_file; do + case $f in ++ */linux64.h ) ++ tm_file_list="${tm_file_list} ./config/$f" ++ tm_include_list="${tm_include_list} ./config/$f" ++ ;; + ./* ) + f=`echo $f | sed 's/^..//'` + tm_file_list="${tm_file_list} $f" +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0027-Avoid-using-libdir-from-.la-which-usually-points-to-.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0027-Avoid-using-libdir-from-.la-which-usually-points-to-.patch new file mode 100644 index 000000000..d1bbebc0a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0027-Avoid-using-libdir-from-.la-which-usually-points-to-.patch @@ -0,0 +1,31 @@ +From eb6178b7fb466ae429c56380c6dbc564a16d900a Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 20 Feb 2015 09:39:38 +0000 +Subject: [PATCH 27/46] Avoid using libdir from .la which usually points to a + host path + +Upstream-Status: Inappropriate [embedded specific] + +Signed-off-by: Jonathan Liu +Signed-off-by: Khem Raj +--- + ltmain.sh | 3 +++ + 1 file changed, 3 insertions(+) + +diff --git a/ltmain.sh b/ltmain.sh +index 0121fba..52bdbdb 100644 +--- a/ltmain.sh ++++ b/ltmain.sh +@@ -5628,6 +5628,9 @@ func_mode_link () + absdir="$abs_ladir" + libdir="$abs_ladir" + else ++ # Instead of using libdir from .la which usually points to a host path, ++ # use the path the .la is contained in. ++ libdir="$abs_ladir" + dir="$libdir" + absdir="$libdir" + fi +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0028-export-CPP.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0028-export-CPP.patch new file mode 100644 index 000000000..c21253938 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0028-export-CPP.patch @@ -0,0 +1,53 @@ +From 617184f35e97934d9e6268e71378574e2b776c2b Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 20 Feb 2015 09:40:59 +0000 +Subject: [PATCH 28/46] export CPP + +The OE environment sets and exports CPP as being the target gcc. When +building gcc-cross-canadian for a mingw targetted sdk, the following can be found +in build.x86_64-pokysdk-mingw32.i586-poky-linux/build-x86_64-linux/libiberty/config.log: + +configure:3641: checking for _FILE_OFFSET_BITS value needed for large files +configure:3666: gcc -c -isystem/media/build1/poky/build/tmp/sysroots/x86_64-linux/usr/include -O2 -pipe conftest.c >&5 +configure:3666: $? = 0 +configure:3698: result: no +configure:3786: checking how to run the C preprocessor +configure:3856: result: x86_64-pokysdk-mingw32-gcc -E --sysroot=/media/build1/poky/build/tmp/sysroots/x86_64-nativesdk-mingw32-pokysdk-mingw32 +configure:3876: x86_64-pokysdk-mingw32-gcc -E --sysroot=/media/build1/poky/build/tmp/sysroots/x86_64-nativesdk-mingw32-pokysdk-mingw32 conftest.c +configure:3876: $? = 0 + +Note this is a *build* target (in build-x86_64-linux) so it should be +using the host "gcc", not x86_64-pokysdk-mingw32-gcc. Since the mingw32 +headers are very different, using the wrong cpp is a real problem. It is leaking +into configure through the CPP variable. Ultimately this leads to build +failures related to not being able to include a process.h file for pem-unix.c. + +The fix is to ensure we export a sane CPP value into the build +environment when using build targets. We could define a CPP_FOR_BUILD value which may be +the version which needs to be upstreamed but for now, this fix is good enough to +avoid the problem. + +RP 22/08/2013 + +Upstream-Status: Pending + +Signed-off-by: Khem Raj +--- + Makefile.in | 1 + + 1 file changed, 1 insertion(+) + +diff --git a/Makefile.in b/Makefile.in +index 1522e39..beb9b9a 100644 +--- a/Makefile.in ++++ b/Makefile.in +@@ -149,6 +149,7 @@ BUILD_EXPORTS = \ + AR="$(AR_FOR_BUILD)"; export AR; \ + AS="$(AS_FOR_BUILD)"; export AS; \ + CC="$(CC_FOR_BUILD)"; export CC; \ ++ CPP="$(CC_FOR_BUILD) -E"; export CPP; \ + CFLAGS="$(CFLAGS_FOR_BUILD)"; export CFLAGS; \ + CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \ + CXX="$(CXX_FOR_BUILD)"; export CXX; \ +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0029-Enable-SPE-AltiVec-generation-on-powepc-linux-target.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0029-Enable-SPE-AltiVec-generation-on-powepc-linux-target.patch new file mode 100644 index 000000000..47b9c0d1b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0029-Enable-SPE-AltiVec-generation-on-powepc-linux-target.patch @@ -0,0 +1,56 @@ +From e140700976e3b7eb4250b1ffde9bc16494456903 Mon Sep 17 00:00:00 2001 +From: Alexandru-Cezar Sardan +Date: Wed, 5 Feb 2014 16:52:31 +0200 +Subject: [PATCH 29/46] Enable SPE & AltiVec generation on powepc*linux target + +When is configured with --target=powerpc-linux, the resulting GCC will +not be able to generate code for SPE targets (e500v1/v2). +GCC configured with --target=powerpc-linuxspe will not be able to +generate AltiVec instructions (for e6500). +This patch modifies the configured file such that SPE or AltiVec code +can be generated when gcc is configured with --target=powerpc-linux. +The ABI and speciffic instructions can be selected through the +"-mabi=spe or -mabi=altivec" and the "-mspe or -maltivec" parameters. + +Upstream-Status: Inappropriate [configuration] + +Signed-off-by: Alexandru-Cezar Sardan +--- + gcc/config.gcc | 9 ++++++++- + gcc/config/rs6000/linuxspe.h | 3 --- + 2 files changed, 8 insertions(+), 4 deletions(-) + +diff --git a/gcc/config.gcc b/gcc/config.gcc +index 9c6d156..18cff5a 100644 +--- a/gcc/config.gcc ++++ b/gcc/config.gcc +@@ -2392,7 +2392,14 @@ powerpc-*-rtems*) + tmake_file="${tmake_file} rs6000/t-fprules rs6000/t-rtems rs6000/t-ppccomm" + ;; + powerpc*-*-linux*) +- tm_file="${tm_file} dbxelf.h elfos.h gnu-user.h freebsd-spec.h rs6000/sysv4.h" ++ case ${target} in ++ powerpc*-*-linux*spe* | powerpc*-*-linux*altivec*) ++ tm_file="${tm_file} dbxelf.h elfos.h gnu-user.h freebsd-spec.h rs6000/sysv4.h" ++ ;; ++ *) ++ tm_file="${tm_file} dbxelf.h elfos.h gnu-user.h freebsd-spec.h rs6000/sysv4.h rs6000/linuxaltivec.h rs6000/linuxspe.h rs6000/e500.h" ++ ;; ++ esac + extra_options="${extra_options} rs6000/sysv4.opt" + tmake_file="${tmake_file} rs6000/t-fprules rs6000/t-ppccomm" + extra_objs="$extra_objs rs6000-linux.o" +diff --git a/gcc/config/rs6000/linuxspe.h b/gcc/config/rs6000/linuxspe.h +index 35623cd..f74e00d 100644 +--- a/gcc/config/rs6000/linuxspe.h ++++ b/gcc/config/rs6000/linuxspe.h +@@ -27,6 +27,3 @@ + #undef TARGET_DEFAULT + #define TARGET_DEFAULT MASK_STRICT_ALIGN + #endif +- +-#undef ASM_DEFAULT_SPEC +-#define ASM_DEFAULT_SPEC "-mppc -mspe -me500" +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0030-Disable-the-MULTILIB_OSDIRNAMES-and-other-multilib-o.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0030-Disable-the-MULTILIB_OSDIRNAMES-and-other-multilib-o.patch new file mode 100644 index 000000000..c09d0192e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0030-Disable-the-MULTILIB_OSDIRNAMES-and-other-multilib-o.patch @@ -0,0 +1,42 @@ +From 0ddcb95a86830766fd02122f19384fc929b377c5 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 20 Feb 2015 10:21:55 +0000 +Subject: [PATCH 30/46] Disable the MULTILIB_OSDIRNAMES and other multilib + options. + +Hard coding the MULTILIB_OSDIRNAMES with ../lib64 is causing problems on +systems where the libdir is NOT set to /lib64. This is allowed by the +ABI, as +long as the dynamic loader is present in /lib. + +We simply want to use the default rules in gcc to find and configure the +normal libdir. + +Upstream-Status: Inappropriate[OE-Specific] + +Signed-off-by: Mark Hatle +Signed-off-by: Khem Raj +--- + gcc/config/aarch64/t-aarch64-linux | 8 ++++---- + 1 file changed, 4 insertions(+), 4 deletions(-) + +diff --git a/gcc/config/aarch64/t-aarch64-linux b/gcc/config/aarch64/t-aarch64-linux +index 1cfe9f3..d688ac9 100644 +--- a/gcc/config/aarch64/t-aarch64-linux ++++ b/gcc/config/aarch64/t-aarch64-linux +@@ -21,8 +21,8 @@ + LIB1ASMSRC = aarch64/lib1funcs.asm + LIB1ASMFUNCS = _aarch64_sync_cache_range + +-AARCH_BE = $(if $(findstring TARGET_BIG_ENDIAN_DEFAULT=1, $(tm_defines)),_be) +-MULTILIB_OSDIRNAMES = mabi.lp64=../lib64$(call if_multiarch,:aarch64$(AARCH_BE)-linux-gnu) +-MULTIARCH_DIRNAME = $(call if_multiarch,aarch64$(AARCH_BE)-linux-gnu) ++#AARCH_BE = $(if $(findstring TARGET_BIG_ENDIAN_DEFAULT=1, $(tm_defines)),_be) ++#MULTILIB_OSDIRNAMES = mabi.lp64=../lib64$(call if_multiarch,:aarch64$(AARCH_BE)-linux-gnu) ++#MULTIARCH_DIRNAME = $(call if_multiarch,aarch64$(AARCH_BE)-linux-gnu) + +-MULTILIB_OSDIRNAMES += mabi.ilp32=../libilp32 ++#MULTILIB_OSDIRNAMES += mabi.ilp32=../libilp32 +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0031-Ensure-target-gcc-headers-can-be-included.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0031-Ensure-target-gcc-headers-can-be-included.patch new file mode 100644 index 000000000..fb1cd0f16 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0031-Ensure-target-gcc-headers-can-be-included.patch @@ -0,0 +1,98 @@ +From fc6621435a64a9d69aa251b70361da94cf2db6be Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 20 Feb 2015 10:25:11 +0000 +Subject: [PATCH 31/46] Ensure target gcc headers can be included + +There are a few headers installed as part of the OpenEmbedded +gcc-runtime target (omp.h, ssp/*.h). Being installed from a recipe +built for the target architecture, these are within the target +sysroot and not cross/nativesdk; thus they weren't able to be +found by gcc with the existing search paths. Add support for +picking up these headers under the sysroot supplied on the gcc +command line in order to resolve this. + +Upstream-Status: Pending + +Signed-off-by: Paul Eggleton +Signed-off-by: Khem Raj +--- + gcc/Makefile.in | 2 ++ + gcc/cppdefault.c | 4 ++++ + gcc/defaults.h | 9 +++++++++ + gcc/gcc.c | 7 ------- + 4 files changed, 15 insertions(+), 7 deletions(-) + +diff --git a/gcc/Makefile.in b/gcc/Makefile.in +index 450cb79..cc75536 100644 +--- a/gcc/Makefile.in ++++ b/gcc/Makefile.in +@@ -593,6 +593,7 @@ libexecdir = @libexecdir@ + + # Directory in which the compiler finds libraries etc. + libsubdir = $(libdir)/gcc/$(real_target_noncanonical)/$(version)$(accel_dir_suffix) ++libsubdir_target = gcc/$(target_noncanonical)/$(version) + # Directory in which the compiler finds executables + libexecsubdir = $(libexecdir)/gcc/$(real_target_noncanonical)/$(version)$(accel_dir_suffix) + # Directory in which all plugin resources are installed +@@ -2688,6 +2689,7 @@ CFLAGS-intl.o += -DLOCALEDIR=\"$(localedir)\" + + PREPROCESSOR_DEFINES = \ + -DGCC_INCLUDE_DIR=\"$(libsubdir)/include\" \ ++ -DGCC_INCLUDE_SUBDIR_TARGET=\"$(libsubdir_target)/include\" \ + -DFIXED_INCLUDE_DIR=\"$(libsubdir)/include-fixed\" \ + -DGPLUSPLUS_INCLUDE_DIR=\"$(gcc_gxx_include_dir)\" \ + -DGPLUSPLUS_INCLUDE_DIR_ADD_SYSROOT=$(gcc_gxx_include_dir_add_sysroot) \ +diff --git a/gcc/cppdefault.c b/gcc/cppdefault.c +index 54aaf06..7b4dd51 100644 +--- a/gcc/cppdefault.c ++++ b/gcc/cppdefault.c +@@ -59,6 +59,10 @@ const struct default_include cpp_include_defaults[] + /* This is the dir for gcc's private headers. */ + { GCC_INCLUDE_DIR, "GCC", 0, 0, 0, 0 }, + #endif ++#ifdef GCC_INCLUDE_SUBDIR_TARGET ++ /* This is the dir for gcc's private headers under the specified sysroot. */ ++ { STANDARD_STARTFILE_PREFIX_2 GCC_INCLUDE_SUBDIR_TARGET, "GCC", 0, 0, 1, 0 }, ++#endif + #ifdef LOCAL_INCLUDE_DIR + /* /usr/local/include comes before the fixincluded header files. */ + { LOCAL_INCLUDE_DIR, 0, 0, 1, 1, 2 }, +diff --git a/gcc/defaults.h b/gcc/defaults.h +index 3e18338..0f317f2 100644 +--- a/gcc/defaults.h ++++ b/gcc/defaults.h +@@ -1492,4 +1492,13 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see + #define DWARF_GNAT_ENCODINGS_DEFAULT DWARF_GNAT_ENCODINGS_GDB + #endif + ++/* Default prefixes to attach to command names. */ ++ ++#ifndef STANDARD_STARTFILE_PREFIX_1 ++#define STANDARD_STARTFILE_PREFIX_1 "/lib/" ++#endif ++#ifndef STANDARD_STARTFILE_PREFIX_2 ++#define STANDARD_STARTFILE_PREFIX_2 "/usr/lib/" ++#endif ++ + #endif /* ! GCC_DEFAULTS_H */ +diff --git a/gcc/gcc.c b/gcc/gcc.c +index 04fa81d..9750cc2 100644 +--- a/gcc/gcc.c ++++ b/gcc/gcc.c +@@ -1450,13 +1450,6 @@ static const char *gcc_libexec_prefix; + + /* Default prefixes to attach to command names. */ + +-#ifndef STANDARD_STARTFILE_PREFIX_1 +-#define STANDARD_STARTFILE_PREFIX_1 "/lib/" +-#endif +-#ifndef STANDARD_STARTFILE_PREFIX_2 +-#define STANDARD_STARTFILE_PREFIX_2 "/usr/lib/" +-#endif +- + #ifdef CROSS_DIRECTORY_STRUCTURE /* Don't use these prefixes for a cross compiler. */ + #undef MD_EXEC_PREFIX + #undef MD_STARTFILE_PREFIX +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0032-gcc-4.8-won-t-build-with-disable-dependency-tracking.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0032-gcc-4.8-won-t-build-with-disable-dependency-tracking.patch new file mode 100644 index 000000000..c0b001db5 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0032-gcc-4.8-won-t-build-with-disable-dependency-tracking.patch @@ -0,0 +1,54 @@ +From ff939c5063d8f8d444bdb25651a0a48e608efaa4 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 20 Feb 2015 11:17:19 +0000 +Subject: [PATCH 32/46] gcc 4.8+ won't build with --disable-dependency-tracking + +since the *.Ppo files don't get created unless --enable-dependency-tracking is true. + +This patch ensures we only use those compiler options when its enabled. + +Upstream-Status: Submitted + +(Problem was already reported upstream, attached this patch there +http://gcc.gnu.org/bugzilla/show_bug.cgi?id=55930) + +RP +2012/09/22 + +Signed-off-by: Khem Raj +--- + libatomic/Makefile.am | 3 ++- + libatomic/Makefile.in | 3 ++- + 2 files changed, 4 insertions(+), 2 deletions(-) + +diff --git a/libatomic/Makefile.am b/libatomic/Makefile.am +index b351244..399ce18 100644 +--- a/libatomic/Makefile.am ++++ b/libatomic/Makefile.am +@@ -101,7 +101,8 @@ PAT_S = $(word 3,$(PAT_SPLIT)) + IFUNC_DEF = -DIFUNC_ALT=$(PAT_S) + IFUNC_OPT = $(word $(PAT_S),$(IFUNC_OPTIONS)) + +-M_DEPS = -MT $@ -MD -MP -MF $(DEPDIR)/$(@F).Ppo ++@AMDEP_TRUE@M_DEPS = -MT $@ -MD -MP -MF $(DEPDIR)/$(@F).Ppo ++@AMDEP_FALSE@M_DEPS = + M_SIZE = -DN=$(PAT_N) + M_IFUNC = $(if $(PAT_S),$(IFUNC_DEF) $(IFUNC_OPT)) + M_FILE = $(PAT_BASE)_n.c +diff --git a/libatomic/Makefile.in b/libatomic/Makefile.in +index a083d87..a92cfce 100644 +--- a/libatomic/Makefile.in ++++ b/libatomic/Makefile.in +@@ -330,7 +330,8 @@ PAT_N = $(word 2,$(PAT_SPLIT)) + PAT_S = $(word 3,$(PAT_SPLIT)) + IFUNC_DEF = -DIFUNC_ALT=$(PAT_S) + IFUNC_OPT = $(word $(PAT_S),$(IFUNC_OPTIONS)) +-M_DEPS = -MT $@ -MD -MP -MF $(DEPDIR)/$(@F).Ppo ++@AMDEP_TRUE@M_DEPS = -MT $@ -MD -MP -MF $(DEPDIR)/$(@F).Ppo ++@AMDEP_FALSE@M_DEPS = + M_SIZE = -DN=$(PAT_N) + M_IFUNC = $(if $(PAT_S),$(IFUNC_DEF) $(IFUNC_OPT)) + M_FILE = $(PAT_BASE)_n.c +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0033-Don-t-search-host-directory-during-relink-if-inst_pr.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0033-Don-t-search-host-directory-during-relink-if-inst_pr.patch new file mode 100644 index 000000000..e425d7146 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0033-Don-t-search-host-directory-during-relink-if-inst_pr.patch @@ -0,0 +1,38 @@ +From 5092f5389d02e78cd59690cf3fca24b56a97aff2 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Tue, 3 Mar 2015 08:21:19 +0000 +Subject: [PATCH 33/46] Don't search host directory during "relink" if + $inst_prefix is provided + +http://lists.gnu.org/archive/html/libtool-patches/2011-01/msg00026.html + +Upstream-Status: Submitted + +Signed-off-by: Khem Raj +--- + ltmain.sh | 5 +++-- + 1 file changed, 3 insertions(+), 2 deletions(-) + +diff --git a/ltmain.sh b/ltmain.sh +index 52bdbdb..82bcec3 100644 +--- a/ltmain.sh ++++ b/ltmain.sh +@@ -6004,12 +6004,13 @@ func_mode_link () + fi + else + # We cannot seem to hardcode it, guess we'll fake it. ++ # Default if $libdir is not relative to the prefix: + add_dir="-L$libdir" +- # Try looking first in the location we're being installed to. ++ + if test -n "$inst_prefix_dir"; then + case $libdir in + [\\/]*) +- add_dir="$add_dir -L$inst_prefix_dir$libdir" ++ add_dir="-L$inst_prefix_dir$libdir" + ;; + esac + fi +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0034-Use-SYSTEMLIBS_DIR-replacement-instead-of-hardcoding.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0034-Use-SYSTEMLIBS_DIR-replacement-instead-of-hardcoding.patch new file mode 100644 index 000000000..922a8555b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0034-Use-SYSTEMLIBS_DIR-replacement-instead-of-hardcoding.patch @@ -0,0 +1,29 @@ +From 1faa6f69f93bb95af2b2b2bd24e181b50fb5b37c Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Tue, 28 Apr 2015 23:15:27 -0700 +Subject: [PATCH 34/46] Use SYSTEMLIBS_DIR replacement instead of hardcoding + base_libdir + +Signed-off-by: Khem Raj +--- +Upstream-Status: Inappropriate [OE Configuration] + + gcc/config/aarch64/aarch64-linux.h | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/gcc/config/aarch64/aarch64-linux.h b/gcc/config/aarch64/aarch64-linux.h +index 5fcaa59..8588ac0 100644 +--- a/gcc/config/aarch64/aarch64-linux.h ++++ b/gcc/config/aarch64/aarch64-linux.h +@@ -21,7 +21,7 @@ + #ifndef GCC_AARCH64_LINUX_H + #define GCC_AARCH64_LINUX_H + +-#define GLIBC_DYNAMIC_LINKER "/lib/ld-linux-aarch64%{mbig-endian:_be}%{mabi=ilp32:_ilp32}.so.1" ++#define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux-aarch64%{mbig-endian:_be}%{mabi=ilp32:_ilp32}.so.1" + + #undef MUSL_DYNAMIC_LINKER + #define MUSL_DYNAMIC_LINKER "/lib/ld-musl-aarch64%{mbig-endian:_be}%{mabi=ilp32:_ilp32}.so.1" +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0035-aarch64-Add-support-for-musl-ldso.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0035-aarch64-Add-support-for-musl-ldso.patch new file mode 100644 index 000000000..9dfc47276 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0035-aarch64-Add-support-for-musl-ldso.patch @@ -0,0 +1,28 @@ +From 3768468c1a6cc170fff88c03b808c975ac653811 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Tue, 28 Apr 2015 23:18:39 -0700 +Subject: [PATCH 35/46] aarch64: Add support for musl ldso + +Signed-off-by: Khem Raj +--- +Upstream-Status: Inappropriate [OE Configuration] + + gcc/config/aarch64/aarch64-linux.h | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/gcc/config/aarch64/aarch64-linux.h b/gcc/config/aarch64/aarch64-linux.h +index 8588ac0..946b3ca 100644 +--- a/gcc/config/aarch64/aarch64-linux.h ++++ b/gcc/config/aarch64/aarch64-linux.h +@@ -24,7 +24,7 @@ + #define GLIBC_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-linux-aarch64%{mbig-endian:_be}%{mabi=ilp32:_ilp32}.so.1" + + #undef MUSL_DYNAMIC_LINKER +-#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-aarch64%{mbig-endian:_be}%{mabi=ilp32:_ilp32}.so.1" ++#define MUSL_DYNAMIC_LINKER SYSTEMLIBS_DIR "ld-musl-aarch64%{mbig-endian:_be}%{mabi=ilp32:_ilp32}.so.1" + + #undef ASAN_CC1_SPEC + #define ASAN_CC1_SPEC "%{%:sanitize(address):-funwind-tables}" +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0036-libcc1-fix-libcc1-s-install-path-and-rpath.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0036-libcc1-fix-libcc1-s-install-path-and-rpath.patch new file mode 100644 index 000000000..f89a8860f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0036-libcc1-fix-libcc1-s-install-path-and-rpath.patch @@ -0,0 +1,54 @@ +From f4d3c8e970d42a43cd3d2f751e13324efa936ff8 Mon Sep 17 00:00:00 2001 +From: Robert Yang +Date: Sun, 5 Jul 2015 20:25:18 -0700 +Subject: [PATCH 36/46] libcc1: fix libcc1's install path and rpath + +* Install libcc1.so and libcc1plugin.so into + $(libexecdir)/gcc/$(target_noncanonical)/$(gcc_version), as what we + had done to lto-plugin. +* Fix bad RPATH iussue: + gcc-5.2.0: package gcc-plugins contains bad RPATH /patht/to/tmp/sysroots/qemux86-64/usr/lib64/../lib64 in file + /path/to/gcc/5.2.0-r0/packages-split/gcc-plugins/usr/lib64/gcc/x86_64-poky-linux/5.2.0/plugin/libcc1plugin.so.0.0.0 + [rpaths] + +Upstream-Status: Inappropriate [OE configuration] + +Signed-off-by: Robert Yang +--- + libcc1/Makefile.am | 4 ++-- + libcc1/Makefile.in | 4 ++-- + 2 files changed, 4 insertions(+), 4 deletions(-) + +diff --git a/libcc1/Makefile.am b/libcc1/Makefile.am +index b40820b..32930c5 100644 +--- a/libcc1/Makefile.am ++++ b/libcc1/Makefile.am +@@ -35,8 +35,8 @@ libiberty = $(if $(wildcard $(libiberty_noasan)),$(Wc)$(libiberty_noasan), \ + $(Wc)$(libiberty_normal))) + libiberty_dep = $(patsubst $(Wc)%,%,$(libiberty)) + +-plugindir = $(libdir)/gcc/$(target_noncanonical)/$(gcc_version)/plugin +-cc1libdir = $(libdir)/$(libsuffix) ++cc1libdir = $(libexecdir)/gcc/$(target_noncanonical)/$(gcc_version) ++plugindir = $(cc1libdir) + + if ENABLE_PLUGIN + plugin_LTLIBRARIES = libcc1plugin.la +diff --git a/libcc1/Makefile.in b/libcc1/Makefile.in +index 79d39d3..227ec22 100644 +--- a/libcc1/Makefile.in ++++ b/libcc1/Makefile.in +@@ -291,8 +291,8 @@ libiberty = $(if $(wildcard $(libiberty_noasan)),$(Wc)$(libiberty_noasan), \ + $(Wc)$(libiberty_normal))) + + libiberty_dep = $(patsubst $(Wc)%,%,$(libiberty)) +-plugindir = $(libdir)/gcc/$(target_noncanonical)/$(gcc_version)/plugin +-cc1libdir = $(libdir)/$(libsuffix) ++cc1libdir = $(libexecdir)/gcc/$(target_noncanonical)/$(gcc_version) ++plugindir = $(cc1libdir) + @ENABLE_PLUGIN_TRUE@plugin_LTLIBRARIES = libcc1plugin.la + @ENABLE_PLUGIN_TRUE@cc1lib_LTLIBRARIES = libcc1.la + BUILT_SOURCES = compiler-name.h +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0037-handle-sysroot-support-for-nativesdk-gcc.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0037-handle-sysroot-support-for-nativesdk-gcc.patch new file mode 100644 index 000000000..15efcb12e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0037-handle-sysroot-support-for-nativesdk-gcc.patch @@ -0,0 +1,213 @@ +From 1475b941d7a9c9874b0fb0558d01805945467331 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 7 Dec 2015 23:39:54 +0000 +Subject: [PATCH 37/46] handle sysroot support for nativesdk-gcc + +Being able to build a nativesdk gcc is useful, particularly in cases +where the host compiler may be of an incompatible version (or a 32 +bit compiler is needed). + +Sadly, building nativesdk-gcc is not straight forward. We install +nativesdk-gcc into a relocatable location and this means that its +library locations can change. "Normal" sysroot support doesn't help +in this case since the values of paths like "libdir" change, not just +base root directory of the system. + +In order to handle this we do two things: + +a) Add %r into spec file markup which can be used for injected paths + such as SYSTEMLIBS_DIR (see gcc_multilib_setup()). +b) Add other paths which need relocation into a .gccrelocprefix section + which the relocation code will notice and adjust automatically. + +Upstream-Status: Inappropriate +RP 2015/7/28 + +Signed-off-by: Khem Raj +--- + gcc/cppdefault.c | 50 +++++++++++++++++++++++++++++++++++++------------- + gcc/cppdefault.h | 3 ++- + gcc/gcc.c | 20 ++++++++++++++------ + 3 files changed, 53 insertions(+), 20 deletions(-) + +diff --git a/gcc/cppdefault.c b/gcc/cppdefault.c +index 7b4dd51..9d1166c 100644 +--- a/gcc/cppdefault.c ++++ b/gcc/cppdefault.c +@@ -35,6 +35,30 @@ + # undef CROSS_INCLUDE_DIR + #endif + ++static char GPLUSPLUS_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = GPLUSPLUS_INCLUDE_DIR; ++static char GCC_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = GCC_INCLUDE_DIR; ++static char GPLUSPLUS_TOOL_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = GPLUSPLUS_TOOL_INCLUDE_DIR; ++static char GPLUSPLUS_BACKWARD_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = GPLUSPLUS_BACKWARD_INCLUDE_DIR; ++static char STANDARD_STARTFILE_PREFIX_2VAR[4096] __attribute__ ((section (".gccrelocprefix"))) = STANDARD_STARTFILE_PREFIX_2 GCC_INCLUDE_SUBDIR_TARGET; ++#ifdef LOCAL_INCLUDE_DIR ++static char LOCAL_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = LOCAL_INCLUDE_DIR; ++#endif ++#ifdef PREFIX_INCLUDE_DIR ++static char PREFIX_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = PREFIX_INCLUDE_DIR; ++#endif ++#ifdef FIXED_INCLUDE_DIR ++static char FIXED_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = FIXED_INCLUDE_DIR; ++#endif ++#ifdef CROSS_INCLUDE_DIR ++static char CROSS_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = CROSS_INCLUDE_DIR; ++#endif ++#ifdef TOOL_INCLUDE_DIR ++static char TOOL_INCLUDE_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = TOOL_INCLUDE_DIR; ++#endif ++#ifdef NATIVE_SYSTEM_HEADER_DIR ++static char NATIVE_SYSTEM_HEADER_DIRVAR[4096] __attribute__ ((section (".gccrelocprefix"))) = NATIVE_SYSTEM_HEADER_DIR; ++#endif ++ + const struct default_include cpp_include_defaults[] + #ifdef INCLUDE_DEFAULTS + = INCLUDE_DEFAULTS; +@@ -42,38 +66,38 @@ const struct default_include cpp_include_defaults[] + = { + #ifdef GPLUSPLUS_INCLUDE_DIR + /* Pick up GNU C++ generic include files. */ +- { GPLUSPLUS_INCLUDE_DIR, "G++", 1, 1, ++ { GPLUSPLUS_INCLUDE_DIRVAR, "G++", 1, 1, + GPLUSPLUS_INCLUDE_DIR_ADD_SYSROOT, 0 }, + #endif + #ifdef GPLUSPLUS_TOOL_INCLUDE_DIR + /* Pick up GNU C++ target-dependent include files. */ +- { GPLUSPLUS_TOOL_INCLUDE_DIR, "G++", 1, 1, ++ { GPLUSPLUS_TOOL_INCLUDE_DIRVAR, "G++", 1, 1, + GPLUSPLUS_INCLUDE_DIR_ADD_SYSROOT, 1 }, + #endif + #ifdef GPLUSPLUS_BACKWARD_INCLUDE_DIR + /* Pick up GNU C++ backward and deprecated include files. */ +- { GPLUSPLUS_BACKWARD_INCLUDE_DIR, "G++", 1, 1, ++ { GPLUSPLUS_BACKWARD_INCLUDE_DIRVAR, "G++", 1, 1, + GPLUSPLUS_INCLUDE_DIR_ADD_SYSROOT, 0 }, + #endif + #ifdef GCC_INCLUDE_DIR + /* This is the dir for gcc's private headers. */ +- { GCC_INCLUDE_DIR, "GCC", 0, 0, 0, 0 }, ++ { GCC_INCLUDE_DIRVAR, "GCC", 0, 0, 0, 0 }, + #endif + #ifdef GCC_INCLUDE_SUBDIR_TARGET + /* This is the dir for gcc's private headers under the specified sysroot. */ +- { STANDARD_STARTFILE_PREFIX_2 GCC_INCLUDE_SUBDIR_TARGET, "GCC", 0, 0, 1, 0 }, ++ { STANDARD_STARTFILE_PREFIX_2VAR, "GCC", 0, 0, 1, 0 }, + #endif + #ifdef LOCAL_INCLUDE_DIR + /* /usr/local/include comes before the fixincluded header files. */ +- { LOCAL_INCLUDE_DIR, 0, 0, 1, 1, 2 }, +- { LOCAL_INCLUDE_DIR, 0, 0, 1, 1, 0 }, ++ { LOCAL_INCLUDE_DIRVAR, 0, 0, 1, 1, 2 }, ++ { LOCAL_INCLUDE_DIRVAR, 0, 0, 1, 1, 0 }, + #endif + #ifdef PREFIX_INCLUDE_DIR +- { PREFIX_INCLUDE_DIR, 0, 0, 1, 0, 0 }, ++ { PREFIX_INCLUDE_DIRVAR, 0, 0, 1, 0, 0 }, + #endif + #ifdef FIXED_INCLUDE_DIR + /* This is the dir for fixincludes. */ +- { FIXED_INCLUDE_DIR, "GCC", 0, 0, 0, ++ { FIXED_INCLUDE_DIRVAR, "GCC", 0, 0, 0, + /* A multilib suffix needs adding if different multilibs use + different headers. */ + #ifdef SYSROOT_HEADERS_SUFFIX_SPEC +@@ -85,16 +109,16 @@ const struct default_include cpp_include_defaults[] + #endif + #ifdef CROSS_INCLUDE_DIR + /* One place the target system's headers might be. */ +- { CROSS_INCLUDE_DIR, "GCC", 0, 0, 0, 0 }, ++ { CROSS_INCLUDE_DIRVAR, "GCC", 0, 0, 0, 0 }, + #endif + #ifdef TOOL_INCLUDE_DIR + /* Another place the target system's headers might be. */ +- { TOOL_INCLUDE_DIR, "BINUTILS", 0, 1, 0, 0 }, ++ { TOOL_INCLUDE_DIRVAR, "BINUTILS", 0, 1, 0, 0 }, + #endif + #ifdef NATIVE_SYSTEM_HEADER_DIR + /* /usr/include comes dead last. */ +- { NATIVE_SYSTEM_HEADER_DIR, NATIVE_SYSTEM_HEADER_COMPONENT, 0, 0, 1, 2 }, +- { NATIVE_SYSTEM_HEADER_DIR, NATIVE_SYSTEM_HEADER_COMPONENT, 0, 0, 1, 0 }, ++ { NATIVE_SYSTEM_HEADER_DIRVAR, NATIVE_SYSTEM_HEADER_COMPONENT, 0, 0, 1, 2 }, ++ { NATIVE_SYSTEM_HEADER_DIRVAR, NATIVE_SYSTEM_HEADER_COMPONENT, 0, 0, 1, 0 }, + #endif + { 0, 0, 0, 0, 0, 0 } + }; +diff --git a/gcc/cppdefault.h b/gcc/cppdefault.h +index 8a81b45..9759efd 100644 +--- a/gcc/cppdefault.h ++++ b/gcc/cppdefault.h +@@ -33,7 +33,8 @@ + + struct default_include + { +- const char *const fname; /* The name of the directory. */ ++ const char *fname; /* The name of the directory. */ ++ + const char *const component; /* The component containing the directory + (see update_path in prefix.c) */ + const char cplusplus; /* Only look here if we're compiling C++. */ +diff --git a/gcc/gcc.c b/gcc/gcc.c +index 9750cc2..94c240e 100644 +--- a/gcc/gcc.c ++++ b/gcc/gcc.c +@@ -247,6 +247,8 @@ FILE *report_times_to_file = NULL; + #endif + static const char *target_system_root = DEFAULT_TARGET_SYSTEM_ROOT; + ++static char target_relocatable_prefix[4096] __attribute__ ((section (".gccrelocprefix"))) = SYSTEMLIBS_DIR; ++ + /* Nonzero means pass the updated target_system_root to the compiler. */ + + static int target_system_root_changed; +@@ -517,6 +519,7 @@ or with constant text in a single argument. + %G process LIBGCC_SPEC as a spec. + %R Output the concatenation of target_system_root and + target_sysroot_suffix. ++ %r Output the base path target_relocatable_prefix + %S process STARTFILE_SPEC as a spec. A capital S is actually used here. + %E process ENDFILE_SPEC as a spec. A capital E is actually used here. + %C process CPP_SPEC as a spec. +@@ -1473,10 +1476,10 @@ static const char *gcc_libexec_prefix; + gcc_exec_prefix is set because, in that case, we know where the + compiler has been installed, and use paths relative to that + location instead. */ +-static const char *const standard_exec_prefix = STANDARD_EXEC_PREFIX; +-static const char *const standard_libexec_prefix = STANDARD_LIBEXEC_PREFIX; +-static const char *const standard_bindir_prefix = STANDARD_BINDIR_PREFIX; +-static const char *const standard_startfile_prefix = STANDARD_STARTFILE_PREFIX; ++static char standard_exec_prefix[4096] __attribute__ ((section (".gccrelocprefix"))) = STANDARD_EXEC_PREFIX; ++static char standard_libexec_prefix[4096] __attribute__ ((section (".gccrelocprefix"))) = STANDARD_LIBEXEC_PREFIX; ++static char standard_bindir_prefix[4096] __attribute__ ((section (".gccrelocprefix"))) = STANDARD_BINDIR_PREFIX; ++static char *const standard_startfile_prefix = STANDARD_STARTFILE_PREFIX; + + /* For native compilers, these are well-known paths containing + components that may be provided by the system. For cross +@@ -1484,9 +1487,9 @@ static const char *const standard_startfile_prefix = STANDARD_STARTFILE_PREFIX; + static const char *md_exec_prefix = MD_EXEC_PREFIX; + static const char *md_startfile_prefix = MD_STARTFILE_PREFIX; + static const char *md_startfile_prefix_1 = MD_STARTFILE_PREFIX_1; +-static const char *const standard_startfile_prefix_1 ++static char standard_startfile_prefix_1[4096] __attribute__ ((section (".gccrelocprefix"))) + = STANDARD_STARTFILE_PREFIX_1; +-static const char *const standard_startfile_prefix_2 ++static char standard_startfile_prefix_2[4096] __attribute__ ((section (".gccrelocprefix"))) + = STANDARD_STARTFILE_PREFIX_2; + + /* A relative path to be used in finding the location of tools +@@ -5762,6 +5765,11 @@ do_spec_1 (const char *spec, int inswitch, const char *soft_matched_part) + } + break; + ++ case 'r': ++ obstack_grow (&obstack, target_relocatable_prefix, ++ strlen (target_relocatable_prefix)); ++ break; ++ + case 'S': + value = do_spec_1 (startfile_spec, 0, NULL); + if (value != 0) +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0038-Search-target-sysroot-gcc-version-specific-dirs-with.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0038-Search-target-sysroot-gcc-version-specific-dirs-with.patch new file mode 100644 index 000000000..89ee79db8 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0038-Search-target-sysroot-gcc-version-specific-dirs-with.patch @@ -0,0 +1,102 @@ +From 42e4cdcaad590536246866b0846ec279e124fa16 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 7 Dec 2015 23:41:45 +0000 +Subject: [PATCH 38/46] Search target sysroot gcc version specific dirs with + multilib. + +We install the gcc libraries (such as crtbegin.p) into +//5.2.0/ +which is a default search path for GCC (aka multi_suffix in the +code below). is 'machine' in gcc's terminology. We use +these directories so that multiple gcc versions could in theory +co-exist on target. + +We only want to build one gcc-cross-canadian per arch and have this work +for all multilibs. can be handled by mapping the multilib + to the one used by gcc-cross-canadian, e.g. +mips64-polkmllib32-linux +is symlinked to by mips64-poky-linux. + +The default gcc search path in the target sysroot for a "lib64" mutlilib +is: + +/lib32/mips64-poky-linux/5.2.0/ +/lib32/../lib64/ +/usr/lib32/mips64-poky-linux/5.2.0/ +/usr/lib32/../lib64/ +/lib32/ +/usr/lib32/ + +which means that the lib32 crtbegin.o will be found and the lib64 ones +will not which leads to compiler failures. + +This patch injects a multilib version of that path first so the lib64 +binaries can be found first. With this change the search path becomes: + +/lib32/../lib64/mips64-poky-linux/5.2.0/ +/lib32/mips64-poky-linux/5.2.0/ +/lib32/../lib64/ +/usr/lib32/../lib64/mips64-poky-linux/5.2.0/ +/usr/lib32/mips64-poky-linux/5.2.0/ +/usr/lib32/../lib64/ +/lib32/ +/usr/lib32/ + +Upstream-Status: Pending +RP 2015/7/31 + +Signed-off-by: Khem Raj +--- + gcc/gcc.c | 29 ++++++++++++++++++++++++++++- + 1 file changed, 28 insertions(+), 1 deletion(-) + +diff --git a/gcc/gcc.c b/gcc/gcc.c +index 94c240e..2812819 100644 +--- a/gcc/gcc.c ++++ b/gcc/gcc.c +@@ -2507,7 +2507,7 @@ for_each_path (const struct path_prefix *paths, + if (path == NULL) + { + len = paths->max_len + extra_space + 1; +- len += MAX (MAX (suffix_len, multi_os_dir_len), multiarch_len); ++ len += MAX ((suffix_len + multi_os_dir_len), multiarch_len); + path = XNEWVEC (char, len); + } + +@@ -2519,6 +2519,33 @@ for_each_path (const struct path_prefix *paths, + /* Look first in MACHINE/VERSION subdirectory. */ + if (!skip_multi_dir) + { ++ if (!(pl->os_multilib ? skip_multi_os_dir : skip_multi_dir)) ++ { ++ const char *this_multi; ++ size_t this_multi_len; ++ ++ if (pl->os_multilib) ++ { ++ this_multi = multi_os_dir; ++ this_multi_len = multi_os_dir_len; ++ } ++ else ++ { ++ this_multi = multi_dir; ++ this_multi_len = multi_dir_len; ++ } ++ ++ /* Look in multilib MACHINE/VERSION subdirectory first */ ++ if (this_multi_len) ++ { ++ memcpy (path + len, this_multi, this_multi_len + 1); ++ memcpy (path + len + this_multi_len, multi_suffix, suffix_len + 1); ++ ret = callback (path, callback_info); ++ if (ret) ++ break; ++ } ++ } ++ + memcpy (path + len, multi_suffix, suffix_len + 1); + ret = callback (path, callback_info); + if (ret) +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0039-Fix-various-_FOR_BUILD-and-related-variables.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0039-Fix-various-_FOR_BUILD-and-related-variables.patch new file mode 100644 index 000000000..0ce7aec79 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0039-Fix-various-_FOR_BUILD-and-related-variables.patch @@ -0,0 +1,137 @@ +From 9ced49e459ccf1887feb58adf1e8836dcb4b1bdf Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 7 Dec 2015 23:42:45 +0000 +Subject: [PATCH 39/46] Fix various _FOR_BUILD and related variables + +When doing a FOR_BUILD thing, you have to override CFLAGS with +CFLAGS_FOR_BUILD. And if you use C++, you also have to override +CXXFLAGS with CXXFLAGS_FOR_BUILD. +Without this, when building for mingw, you end up trying to use +the mingw headers for a host build. + +The same goes for other variables as well, such as CPPFLAGS, +CPP, and GMPINC. + +Upstream-Status: Pending + +Signed-off-by: Peter Seebach +Signed-off-by: Mark Hatle +Signed-off-by: Khem Raj +--- + Makefile.in | 6 ++++++ + Makefile.tpl | 5 +++++ + gcc/Makefile.in | 2 +- + gcc/configure | 2 +- + gcc/configure.ac | 2 +- + 5 files changed, 14 insertions(+), 3 deletions(-) + +diff --git a/Makefile.in b/Makefile.in +index beb9b9a..3e1c6bc 100644 +--- a/Makefile.in ++++ b/Makefile.in +@@ -152,6 +152,7 @@ BUILD_EXPORTS = \ + CPP="$(CC_FOR_BUILD) -E"; export CPP; \ + CFLAGS="$(CFLAGS_FOR_BUILD)"; export CFLAGS; \ + CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \ ++ CPPFLAGS="$(CPPFLAGS_FOR_BUILD)"; export CPPFLAGS; \ + CXX="$(CXX_FOR_BUILD)"; export CXX; \ + CXXFLAGS="$(CXXFLAGS_FOR_BUILD)"; export CXXFLAGS; \ + GCJ="$(GCJ_FOR_BUILD)"; export GCJ; \ +@@ -170,6 +171,9 @@ BUILD_EXPORTS = \ + # built for the build system to override those in BASE_FLAGS_TO_PASS. + EXTRA_BUILD_FLAGS = \ + CFLAGS="$(CFLAGS_FOR_BUILD)" \ ++ CXXFLAGS="$(CXXFLAGS_FOR_BUILD)" \ ++ CPP="$(CC_FOR_BUILD) -E" \ ++ CPPFLAGS="$(CPPFLAGS_FOR_BUILD)" \ + LDFLAGS="$(LDFLAGS_FOR_BUILD)" + + # This is the list of directories to built for the host system. +@@ -187,6 +191,7 @@ HOST_SUBDIR = @host_subdir@ + HOST_EXPORTS = \ + $(BASE_EXPORTS) \ + CC="$(CC)"; export CC; \ ++ CPP="$(CC) -E"; export CPP; \ + ADA_CFLAGS="$(ADA_CFLAGS)"; export ADA_CFLAGS; \ + CFLAGS="$(CFLAGS)"; export CFLAGS; \ + CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \ +@@ -713,6 +718,7 @@ BASE_FLAGS_TO_PASS = \ + "CC_FOR_BUILD=$(CC_FOR_BUILD)" \ + "CFLAGS_FOR_BUILD=$(CFLAGS_FOR_BUILD)" \ + "CXX_FOR_BUILD=$(CXX_FOR_BUILD)" \ ++ "CXXFLAGS_FOR_BUILD=$(CXXFLAGS_FOR_BUILD)" \ + "EXPECT=$(EXPECT)" \ + "FLEX=$(FLEX)" \ + "INSTALL=$(INSTALL)" \ +diff --git a/Makefile.tpl b/Makefile.tpl +index 6b2eb6a..114e462 100644 +--- a/Makefile.tpl ++++ b/Makefile.tpl +@@ -154,6 +154,7 @@ BUILD_EXPORTS = \ + CC="$(CC_FOR_BUILD)"; export CC; \ + CFLAGS="$(CFLAGS_FOR_BUILD)"; export CFLAGS; \ + CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \ ++ CPPFLAGS="$(CPPFLAGS_FOR_BUILD)"; export CPPFLAGS; \ + CXX="$(CXX_FOR_BUILD)"; export CXX; \ + CXXFLAGS="$(CXXFLAGS_FOR_BUILD)"; export CXXFLAGS; \ + GCJ="$(GCJ_FOR_BUILD)"; export GCJ; \ +@@ -172,6 +173,9 @@ BUILD_EXPORTS = \ + # built for the build system to override those in BASE_FLAGS_TO_PASS. + EXTRA_BUILD_FLAGS = \ + CFLAGS="$(CFLAGS_FOR_BUILD)" \ ++ CXXFLAGS="$(CXXFLAGS_FOR_BUILD)" \ ++ CPP="$(CC_FOR_BUILD) -E" \ ++ CPPFLAGS="$(CPPFLAGS_FOR_BUILD)" \ + LDFLAGS="$(LDFLAGS_FOR_BUILD)" + + # This is the list of directories to built for the host system. +@@ -189,6 +193,7 @@ HOST_SUBDIR = @host_subdir@ + HOST_EXPORTS = \ + $(BASE_EXPORTS) \ + CC="$(CC)"; export CC; \ ++ CPP="$(CC) -E"; export CPP; \ + ADA_CFLAGS="$(ADA_CFLAGS)"; export ADA_CFLAGS; \ + CFLAGS="$(CFLAGS)"; export CFLAGS; \ + CONFIG_SHELL="$(SHELL)"; export CONFIG_SHELL; \ +diff --git a/gcc/Makefile.in b/gcc/Makefile.in +index cc75536..0ad2dc8 100644 +--- a/gcc/Makefile.in ++++ b/gcc/Makefile.in +@@ -780,7 +780,7 @@ BUILD_LDFLAGS=@BUILD_LDFLAGS@ + BUILD_NO_PIE_FLAG = @BUILD_NO_PIE_FLAG@ + BUILD_LDFLAGS += $(BUILD_NO_PIE_FLAG) + BUILD_CPPFLAGS= -I. -I$(@D) -I$(srcdir) -I$(srcdir)/$(@D) \ +- -I$(srcdir)/../include @INCINTL@ $(CPPINC) $(CPPFLAGS) ++ -I$(srcdir)/../include @INCINTL@ $(CPPINC) $(CPPFLAGS_FOR_BUILD) + + # Actual name to use when installing a native compiler. + GCC_INSTALL_NAME := $(shell echo gcc|sed '$(program_transform_name)') +diff --git a/gcc/configure b/gcc/configure +index 377253e..78fc64a 100755 +--- a/gcc/configure ++++ b/gcc/configure +@@ -11799,7 +11799,7 @@ else + CC="${CC_FOR_BUILD}" CFLAGS="${CFLAGS_FOR_BUILD}" \ + CXX="${CXX_FOR_BUILD}" CXXFLAGS="${CXXFLAGS_FOR_BUILD}" \ + LD="${LD_FOR_BUILD}" LDFLAGS="${LDFLAGS_FOR_BUILD}" \ +- GMPINC="" CPPFLAGS="${CPPFLAGS} -DGENERATOR_FILE" \ ++ GMPINC="" CPPFLAGS="${CPPFLAGS_FOR_BUILD} -DGENERATOR_FILE" \ + ${realsrcdir}/configure \ + --enable-languages=${enable_languages-all} \ + --target=$target_alias --host=$build_alias --build=$build_alias +diff --git a/gcc/configure.ac b/gcc/configure.ac +index 54e7619..a94666e 100644 +--- a/gcc/configure.ac ++++ b/gcc/configure.ac +@@ -1682,7 +1682,7 @@ else + CC="${CC_FOR_BUILD}" CFLAGS="${CFLAGS_FOR_BUILD}" \ + CXX="${CXX_FOR_BUILD}" CXXFLAGS="${CXXFLAGS_FOR_BUILD}" \ + LD="${LD_FOR_BUILD}" LDFLAGS="${LDFLAGS_FOR_BUILD}" \ +- GMPINC="" CPPFLAGS="${CPPFLAGS} -DGENERATOR_FILE" \ ++ GMPINC="" CPPFLAGS="${CPPFLAGS_FOR_BUILD} -DGENERATOR_FILE" \ + ${realsrcdir}/configure \ + --enable-languages=${enable_languages-all} \ + --target=$target_alias --host=$build_alias --build=$build_alias +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0040-nios2-Define-MUSL_DYNAMIC_LINKER.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0040-nios2-Define-MUSL_DYNAMIC_LINKER.patch new file mode 100644 index 000000000..c9a6fd0eb --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0040-nios2-Define-MUSL_DYNAMIC_LINKER.patch @@ -0,0 +1,28 @@ +From b0412c01c275aaeb6b458461cd2425120c8bcec8 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Tue, 2 Feb 2016 10:26:10 -0800 +Subject: [PATCH 40/46] nios2: Define MUSL_DYNAMIC_LINKER + +Signed-off-by: Marek Vasut +Signed-off-by: Khem Raj +--- +Upstream-Status: Pending + + gcc/config/nios2/linux.h | 1 + + 1 file changed, 1 insertion(+) + +diff --git a/gcc/config/nios2/linux.h b/gcc/config/nios2/linux.h +index 4ef55b5..62bc1e7 100644 +--- a/gcc/config/nios2/linux.h ++++ b/gcc/config/nios2/linux.h +@@ -30,6 +30,7 @@ + #define CPP_SPEC "%{posix:-D_POSIX_SOURCE} %{pthread:-D_REENTRANT}" + + #define GLIBC_DYNAMIC_LINKER "/lib/ld-linux-nios2.so.1" ++#define MUSL_DYNAMIC_LINKER "/lib/ld-musl-nios2.so.1" + + #undef LINK_SPEC + #define LINK_SPEC LINK_SPEC_ENDIAN \ +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0041-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0041-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch new file mode 100644 index 000000000..29b7ce72d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0041-Add-ssp_nonshared-to-link-commandline-for-musl-targe.patch @@ -0,0 +1,87 @@ +From 210f6b3b82084cc756e02b8bc12f909a43b14ee8 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Tue, 27 Jun 2017 18:10:54 -0700 +Subject: [PATCH 40/49] Add ssp_nonshared to link commandline for musl targets + +when -fstack-protector options are enabled we need to +link with ssp_shared on musl since it does not provide +the __stack_chk_fail_local() so essentially it provides +libssp but not libssp_nonshared something like +TARGET_LIBC_PROVIDES_SSP_BUT_NOT_SSP_NONSHARED + where-as for glibc the needed symbols +are already present in libc_nonshared library therefore +we do not need any library helper on glibc based systems +but musl needs the libssp_noshared from gcc + +Upstream-Status: Pending + +Signed-off-by: Khem Raj +--- + gcc/config/linux.h | 7 +++++++ + gcc/config/rs6000/linux.h | 10 ++++++++++ + gcc/config/rs6000/linux64.h | 10 ++++++++++ + 3 files changed, 27 insertions(+) + +diff --git a/gcc/config/linux.h b/gcc/config/linux.h +index 2e683d0c430..1b4df798671 100644 +--- a/gcc/config/linux.h ++++ b/gcc/config/linux.h +@@ -182,6 +182,13 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see + { GCC_INCLUDE_DIR, "GCC", 0, 1, 0, 0 }, \ + { 0, 0, 0, 0, 0, 0 } \ + } ++#ifdef TARGET_LIBC_PROVIDES_SSP ++#undef LINK_SSP_SPEC ++#define LINK_SSP_SPEC "%{fstack-protector|fstack-protector-all" \ ++ "|fstack-protector-strong|fstack-protector-explicit" \ ++ ":-lssp_nonshared}" ++#endif ++ + #endif + + #if (DEFAULT_LIBC == LIBC_UCLIBC) && defined (SINGLE_LIBC) /* uClinux */ +diff --git a/gcc/config/rs6000/linux.h b/gcc/config/rs6000/linux.h +index 684afd6c190..22cfa391b89 100644 +--- a/gcc/config/rs6000/linux.h ++++ b/gcc/config/rs6000/linux.h +@@ -91,6 +91,16 @@ + " -m elf32ppclinux") + #endif + ++/* link libssp_nonshared.a with musl */ ++#if DEFAULT_LIBC == LIBC_MUSL ++#ifdef TARGET_LIBC_PROVIDES_SSP ++#undef LINK_SSP_SPEC ++#define LINK_SSP_SPEC "%{fstack-protector|fstack-protector-all" \ ++ "|fstack-protector-strong|fstack-protector-explicit" \ ++ ":-lssp_nonshared}" ++#endif ++#endif ++ + #undef LINK_OS_LINUX_SPEC + #define LINK_OS_LINUX_SPEC LINK_OS_LINUX_EMUL " %{!shared: %{!static: \ + %{rdynamic:-export-dynamic} \ +diff --git a/gcc/config/rs6000/linux64.h b/gcc/config/rs6000/linux64.h +index 3b00ec0fcf0..8371f8d7b6b 100644 +--- a/gcc/config/rs6000/linux64.h ++++ b/gcc/config/rs6000/linux64.h +@@ -465,6 +465,16 @@ extern int dot_symbols; + " -m elf64ppc") + #endif + ++/* link libssp_nonshared.a with musl */ ++#if DEFAULT_LIBC == LIBC_MUSL ++#ifdef TARGET_LIBC_PROVIDES_SSP ++#undef LINK_SSP_SPEC ++#define LINK_SSP_SPEC "%{fstack-protector|fstack-protector-all" \ ++ "|fstack-protector-strong|fstack-protector-explicit" \ ++ ":-lssp_nonshared}" ++#endif ++#endif ++ + #define LINK_OS_LINUX_SPEC32 LINK_OS_LINUX_EMUL32 " %{!shared: %{!static: \ + %{rdynamic:-export-dynamic} \ + -dynamic-linker " GNU_USER_DYNAMIC_LINKER32 "}} \ +-- +2.13.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0042-gcc-libcpp-support-ffile-prefix-map-old-new.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0042-gcc-libcpp-support-ffile-prefix-map-old-new.patch new file mode 100644 index 000000000..861f0fd7f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0042-gcc-libcpp-support-ffile-prefix-map-old-new.patch @@ -0,0 +1,292 @@ +From ba738cc411c9a54e389e336bcaa0a2428dd4a9d2 Mon Sep 17 00:00:00 2001 +From: Hongxu Jia +Date: Wed, 16 Mar 2016 02:27:43 -0400 +Subject: [PATCH 42/46] gcc/libcpp: support -ffile-prefix-map== + +Similar -fdebug-prefix-map, add option -ffile-prefix-map to map one +directory name (old) to another (new) in __FILE__, __BASE_FILE__ and +__builtin_FILE (). + +https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70268 + +Upstream-Status: Submitted [gcc-patches@gcc.gnu.org] +Signed-off-by: Hongxu Jia +--- + gcc/c-family/c-opts.c | 13 +++++++ + gcc/c-family/c.opt | 4 +++ + gcc/dwarf2out.c | 1 + + gcc/gimplify.c | 3 ++ + libcpp/Makefile.in | 10 +++--- + libcpp/file-map.c | 92 +++++++++++++++++++++++++++++++++++++++++++++++ + libcpp/include/file-map.h | 30 ++++++++++++++++ + libcpp/macro.c | 2 ++ + 8 files changed, 150 insertions(+), 5 deletions(-) + create mode 100644 libcpp/file-map.c + create mode 100644 libcpp/include/file-map.h + +diff --git a/gcc/c-family/c-opts.c b/gcc/c-family/c-opts.c +index fec58bc..7a0af43 100644 +--- a/gcc/c-family/c-opts.c ++++ b/gcc/c-family/c-opts.c +@@ -38,6 +38,14 @@ along with GCC; see the file COPYING3. If not see + #include "opts.h" + #include "plugin.h" /* For PLUGIN_INCLUDE_FILE event. */ + #include "mkdeps.h" ++#include "file-map.h" ++#include "c-target.h" ++#include "tm.h" /* For BYTES_BIG_ENDIAN, ++ DOLLARS_IN_IDENTIFIERS, ++ STDC_0_IN_SYSTEM_HEADERS, ++ TARGET_FLT_EVAL_METHOD_NON_DEFAULT and ++ TARGET_OPTF. */ ++#include "tm_p.h" /* For C_COMMON_OVERRIDE_OPTIONS. */ + #include "dumpfile.h" + + #ifndef DOLLARS_IN_IDENTIFIERS +@@ -503,6 +511,11 @@ c_common_handle_option (size_t scode, const char *arg, int value, + cpp_opts->narrow_charset = arg; + break; + ++ case OPT_ffile_prefix_map_: ++ if (add_file_prefix_map (arg) < 0) ++ error ("invalid argument %qs to -ffile-prefix-map", arg); ++ break; ++ + case OPT_fwide_exec_charset_: + cpp_opts->wide_charset = arg; + break; +diff --git a/gcc/c-family/c.opt b/gcc/c-family/c.opt +index 660da6c..31f7b34 100644 +--- a/gcc/c-family/c.opt ++++ b/gcc/c-family/c.opt +@@ -1208,6 +1208,10 @@ fexec-charset= + C ObjC C++ ObjC++ Joined RejectNegative + -fexec-charset= Convert all strings and character constants to character set . + ++ffile-prefix-map= ++C ObjC C++ ObjC++ Joined RejectNegative ++-ffile-prefix-map= Map one directory name to another in __FILE__, __BASE_FILE__ and __builtin_FILE () ++ + fextended-identifiers + C ObjC C++ ObjC++ + Permit universal character names (\\u and \\U) in identifiers. +diff --git a/gcc/dwarf2out.c b/gcc/dwarf2out.c +index 80f2df5..a2bfcc0 100644 +--- a/gcc/dwarf2out.c ++++ b/gcc/dwarf2out.c +@@ -21672,6 +21672,7 @@ gen_producer_string (void) + case OPT_fltrans_output_list_: + case OPT_fresolution_: + case OPT_fdebug_prefix_map_: ++ case OPT_ffile_prefix_map_: + /* Ignore these. */ + continue; + default: +diff --git a/gcc/gimplify.c b/gcc/gimplify.c +index e223e59..1433c25 100644 +--- a/gcc/gimplify.c ++++ b/gcc/gimplify.c +@@ -57,6 +57,8 @@ along with GCC; see the file COPYING3. If not see + #include "gomp-constants.h" + #include "tree-dump.h" + #include "gimple-walk.h" ++#include "file-map.h" ++ + #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */ + #include "builtins.h" + +@@ -2432,6 +2434,7 @@ gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) + case BUILT_IN_FILE: + { + const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p)); ++ locfile = remap_file_filename (locfile); + *expr_p = build_string_literal (strlen (locfile) + 1, locfile); + return GS_OK; + } +diff --git a/libcpp/Makefile.in b/libcpp/Makefile.in +index a7d7828..3d29572 100644 +--- a/libcpp/Makefile.in ++++ b/libcpp/Makefile.in +@@ -84,12 +84,12 @@ DEPMODE = $(CXXDEPMODE) + + + libcpp_a_OBJS = charset.o directives.o directives-only.o errors.o \ +- expr.o files.o identifiers.o init.o lex.o line-map.o macro.o \ +- mkdeps.o pch.o symtab.o traditional.o ++ expr.o file-map.o files.o identifiers.o init.o lex.o line-map.o \ ++ macro.o mkdeps.o pch.o symtab.o traditional.o + + libcpp_a_SOURCES = charset.c directives.c directives-only.c errors.c \ +- expr.c files.c identifiers.c init.c lex.c line-map.c macro.c \ +- mkdeps.c pch.c symtab.c traditional.c ++ expr.c file-map.c files.c identifiers.c init.c lex.c line-map.c \ ++ macro.c mkdeps.c pch.c symtab.c traditional.c + + all: libcpp.a $(USED_CATALOGS) + +@@ -263,7 +263,7 @@ po/$(PACKAGE).pot: $(libcpp_a_SOURCES) + + TAGS_SOURCES = $(libcpp_a_SOURCES) internal.h ucnid.h \ + include/line-map.h include/symtab.h include/cpp-id-data.h \ +- include/cpplib.h include/mkdeps.h system.h ++ include/cpplib.h include/mkdeps.h system.h include/file-map.h + + TAGS: $(TAGS_SOURCES) + cd $(srcdir) && etags $(TAGS_SOURCES) +diff --git a/libcpp/file-map.c b/libcpp/file-map.c +new file mode 100644 +index 0000000..18035ef +--- /dev/null ++++ b/libcpp/file-map.c +@@ -0,0 +1,92 @@ ++/* Map one directory name to another in __FILE__, __BASE_FILE__ ++ and __builtin_FILE (). ++ Copyright (C) 2001-2016 Free Software Foundation, Inc. ++ ++This program is free software; you can redistribute it and/or modify it ++under the terms of the GNU General Public License as published by the ++Free Software Foundation; either version 3, or (at your option) any ++later version. ++ ++This program is distributed in the hope that it will be useful, ++but WITHOUT ANY WARRANTY; without even the implied warranty of ++MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++GNU General Public License for more details. ++ ++You should have received a copy of the GNU General Public License ++along with this program; see the file COPYING3. If not see ++. ++ ++ In other words, you are welcome to use, share and improve this program. ++ You are forbidden to forbid anyone else to use, share and improve ++ what you give them. Help stamp out software-hoarding! */ ++ ++#include "config.h" ++#include "system.h" ++#include "file-map.h" ++ ++/* Structure recording the mapping from source file and directory ++ names at compile time to __FILE__ */ ++typedef struct file_prefix_map ++{ ++ const char *old_prefix; ++ const char *new_prefix; ++ size_t old_len; ++ size_t new_len; ++ struct file_prefix_map *next; ++} file_prefix_map; ++ ++/* Linked list of such structures. */ ++static file_prefix_map *file_prefix_maps; ++ ++/* Record prefix mapping of __FILE__. ARG is the argument to ++ -ffile-prefix-map and must be of the form OLD=NEW. */ ++int ++add_file_prefix_map (const char *arg) ++{ ++ file_prefix_map *map; ++ const char *p; ++ ++ p = strchr (arg, '='); ++ if (!p) ++ { ++ fprintf(stderr, "invalid argument %qs to -ffile-prefix-map", arg); ++ return -1; ++ } ++ map = XNEW (file_prefix_map); ++ map->old_prefix = xstrndup (arg, p - arg); ++ map->old_len = p - arg; ++ p++; ++ map->new_prefix = xstrdup (p); ++ map->new_len = strlen (p); ++ map->next = file_prefix_maps; ++ file_prefix_maps = map; ++ ++ return 0; ++} ++ ++/* Perform user-specified mapping of __FILE__ prefixes. Return ++ the new name corresponding to filename. */ ++ ++const char * ++remap_file_filename (const char *filename) ++{ ++ file_prefix_map *map; ++ char *s; ++ const char *name; ++ size_t name_len; ++ ++ for (map = file_prefix_maps; map; map = map->next) ++ if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0) ++ break; ++ if (!map) ++ return filename; ++ name = filename + map->old_len; ++ name_len = strlen (name) + 1; ++ s = (char *) alloca (name_len + map->new_len); ++ memcpy (s, map->new_prefix, map->new_len); ++ memcpy (s + map->new_len, name, name_len); ++ ++ return xstrdup (s); ++} ++ ++ +diff --git a/libcpp/include/file-map.h b/libcpp/include/file-map.h +new file mode 100644 +index 0000000..8750315 +--- /dev/null ++++ b/libcpp/include/file-map.h +@@ -0,0 +1,30 @@ ++/* Map one directory name to another in __FILE__, __BASE_FILE__ ++ and __builtin_FILE (). ++ Copyright (C) 2001-2016 Free Software Foundation, Inc. ++ ++This program is free software; you can redistribute it and/or modify it ++under the terms of the GNU General Public License as published by the ++Free Software Foundation; either version 3, or (at your option) any ++later version. ++ ++This program is distributed in the hope that it will be useful, ++but WITHOUT ANY WARRANTY; without even the implied warranty of ++MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ++GNU General Public License for more details. ++ ++You should have received a copy of the GNU General Public License ++along with this program; see the file COPYING3. If not see ++. ++ ++ In other words, you are welcome to use, share and improve this program. ++ You are forbidden to forbid anyone else to use, share and improve ++ what you give them. Help stamp out software-hoarding! */ ++ ++#ifndef LIBCPP_FILE_MAP_H ++#define LIBCPP_FILE_MAP_H ++ ++const char * remap_file_filename (const char *filename); ++ ++int add_file_prefix_map (const char *arg); ++ ++#endif /* !LIBCPP_FILE_MAP_H */ +diff --git a/libcpp/macro.c b/libcpp/macro.c +index c251553..3ceec3d 100644 +--- a/libcpp/macro.c ++++ b/libcpp/macro.c +@@ -26,6 +26,7 @@ along with this program; see the file COPYING3. If not see + #include "system.h" + #include "cpplib.h" + #include "internal.h" ++#include "file-map.h" + + typedef struct macro_arg macro_arg; + /* This structure represents the tokens of a macro argument. These +@@ -301,6 +302,7 @@ _cpp_builtin_macro_text (cpp_reader *pfile, cpp_hashnode *node, + if (!name) + abort (); + } ++ name = remap_file_filename (name); + len = strlen (name); + buf = _cpp_unaligned_alloc (pfile, len * 2 + 3); + result = buf; +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0043-Reuse-fdebug-prefix-map-to-replace-ffile-prefix-map.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0043-Reuse-fdebug-prefix-map-to-replace-ffile-prefix-map.patch new file mode 100644 index 000000000..0077f80e4 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0043-Reuse-fdebug-prefix-map-to-replace-ffile-prefix-map.patch @@ -0,0 +1,43 @@ +From 25c87c6cc40ec5cc6965f8bfb215bec01abd6d82 Mon Sep 17 00:00:00 2001 +From: Hongxu Jia +Date: Wed, 16 Mar 2016 05:39:59 -0400 +Subject: [PATCH 43/46] Reuse -fdebug-prefix-map to replace -ffile-prefix-map + +The oe-core may use external toolchain to compile, +which may not support -ffile-prefix-map. + +Since we use -fdebug-prefix-map to do the same thing, +so we could reuse it to replace -ffile-prefix-map. + +Upstream-Status: Inappropriate[oe-core specific] + +Signed-off-by: Hongxu Jia +--- + gcc/opts-global.c | 4 ++++ + 1 file changed, 4 insertions(+) + +diff --git a/gcc/opts-global.c b/gcc/opts-global.c +index b7e5232..121d7b9 100644 +--- a/gcc/opts-global.c ++++ b/gcc/opts-global.c +@@ -31,6 +31,7 @@ along with GCC; see the file COPYING3. If not see + #include "langhooks.h" + #include "dbgcnt.h" + #include "debug.h" ++#include "file-map.h" + #include "output.h" + #include "plugin.h" + #include "toplev.h" +@@ -357,6 +358,9 @@ handle_common_deferred_options (void) + + case OPT_fdebug_prefix_map_: + add_debug_prefix_map (opt->arg); ++ ++ /* Reuse -fdebug-prefix-map to replace -ffile-prefix-map */ ++ add_file_prefix_map (opt->arg); + break; + + case OPT_fdump_: +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0044-gcc-final.c-fdebug-prefix-map-support-to-remap-sourc.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0044-gcc-final.c-fdebug-prefix-map-support-to-remap-sourc.patch new file mode 100644 index 000000000..5d41af44a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0044-gcc-final.c-fdebug-prefix-map-support-to-remap-sourc.patch @@ -0,0 +1,54 @@ +From 6ab23e88aef22bbabee7b9600c459ff39547bb66 Mon Sep 17 00:00:00 2001 +From: Hongxu Jia +Date: Thu, 24 Mar 2016 11:23:14 -0400 +Subject: [PATCH 44/46] gcc/final.c: -fdebug-prefix-map support to remap + sources with relative path + +PR other/70428 +* final.c (remap_debug_filename): Use lrealpath to translate +relative path before remapping + +https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70428 +Upstream-Status: Submitted [gcc-patches@gcc.gnu.org] + +Signed-off-by: Hongxu Jia +--- + gcc/final.c | 15 ++++++++++++--- + 1 file changed, 12 insertions(+), 3 deletions(-) + +diff --git a/gcc/final.c b/gcc/final.c +index 55cf509..23293e5 100644 +--- a/gcc/final.c ++++ b/gcc/final.c +@@ -1554,16 +1554,25 @@ remap_debug_filename (const char *filename) + const char *name; + size_t name_len; + ++ /* Support to remap filename with relative path */ ++ char *realpath = lrealpath (filename); ++ if (realpath == NULL) ++ return filename; ++ + for (map = debug_prefix_maps; map; map = map->next) +- if (filename_ncmp (filename, map->old_prefix, map->old_len) == 0) ++ if (filename_ncmp (realpath, map->old_prefix, map->old_len) == 0) + break; + if (!map) +- return filename; +- name = filename + map->old_len; ++ { ++ free (realpath); ++ return filename; ++ } ++ name = realpath + map->old_len; + name_len = strlen (name) + 1; + s = (char *) alloca (name_len + map->new_len); + memcpy (s, map->new_prefix, map->new_len); + memcpy (s + map->new_len, name, name_len); ++ free (realpath); + return ggc_strdup (s); + } + +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0045-libgcc-Add-knob-to-use-ldbl-128-on-ppc.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0045-libgcc-Add-knob-to-use-ldbl-128-on-ppc.patch new file mode 100644 index 000000000..c62b727d6 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0045-libgcc-Add-knob-to-use-ldbl-128-on-ppc.patch @@ -0,0 +1,125 @@ +From 5a47d404ea29e2547269e3ddf38754462d93f903 Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 29 Apr 2016 20:03:28 +0000 +Subject: [PATCH 45/46] libgcc: Add knob to use ldbl-128 on ppc + +musl does not support ldbl 128 so we can not assume +that linux as a whole supports ldbl-128 bits, instead +act upon configure option passed to gcc and assume no +on musl and yes otherwise if no option is passed since +default behaviour is to assume ldbl128 it does not +change the defaults + +Signed-off-by: Khem Raj +--- +Upstream-Status: Pending + + libgcc/Makefile.in | 1 + + libgcc/config/rs6000/t-linux | 5 ++++- + libgcc/configure | 18 ++++++++++++++++++ + libgcc/configure.ac | 12 ++++++++++++ + 4 files changed, 35 insertions(+), 1 deletion(-) + mode change 100644 => 100755 libgcc/configure + +diff --git a/libgcc/Makefile.in b/libgcc/Makefile.in +index f09b39b..296cf0f 100644 +--- a/libgcc/Makefile.in ++++ b/libgcc/Makefile.in +@@ -43,6 +43,7 @@ enable_vtable_verify = @enable_vtable_verify@ + enable_decimal_float = @enable_decimal_float@ + fixed_point = @fixed_point@ + with_aix_soname = @with_aix_soname@ ++with_ldbl128 = @with_ldbl128@ + + host_noncanonical = @host_noncanonical@ + real_host_noncanonical = @real_host_noncanonical@ +diff --git a/libgcc/config/rs6000/t-linux b/libgcc/config/rs6000/t-linux +index 4f6d4c4..c50dd94 100644 +--- a/libgcc/config/rs6000/t-linux ++++ b/libgcc/config/rs6000/t-linux +@@ -1,3 +1,6 @@ + SHLIB_MAPFILES += $(srcdir)/config/rs6000/libgcc-glibc.ver + +-HOST_LIBGCC2_CFLAGS += -mlong-double-128 -mno-minimal-toc ++ifeq ($(with_ldbl128),yes) ++HOST_LIBGCC2_CFLAGS += -mlong-double-128 ++endif ++HOST_LIBGCC2_CFLAGS += -mno-minimal-toc +diff --git a/libgcc/configure b/libgcc/configure +old mode 100644 +new mode 100755 +index e7d6c75..e9a9019 +--- a/libgcc/configure ++++ b/libgcc/configure +@@ -614,6 +614,7 @@ build_vendor + build_cpu + build + with_aix_soname ++with_ldbl128 + enable_vtable_verify + enable_shared + libgcc_topdir +@@ -663,6 +664,7 @@ with_cross_host + with_ld + enable_shared + enable_vtable_verify ++with_long_double_128 + with_aix_soname + enable_version_specific_runtime_libs + with_slibdir +@@ -1319,6 +1321,7 @@ Optional Packages: + --with-target-subdir=SUBDIR Configuring in a subdirectory for target + --with-cross-host=HOST Configuring with a cross compiler + --with-ld arrange to use the specified ld (full pathname) ++ --with-long-double-128 use 128-bit long double by default + --with-aix-soname=aix|svr4|both + shared library versioning (aka "SONAME") variant to + provide on AIX +@@ -2201,6 +2204,21 @@ fi + + + ++# Check whether --with-long-double-128 was given. ++if test "${with_long_double_128+set}" = set; then : ++ withval=$with_long_double_128; with_ldbl128="$with_long_double_128" ++else ++ case "${host}" in ++ power*-*-musl*) ++ with_ldbl128="no";; ++ *) with_ldbl128="yes";; ++ esac ++ ++fi ++ ++ ++ ++ + # Check whether --with-aix-soname was given. + if test "${with_aix_soname+set}" = set; then : + withval=$with_aix_soname; case "${host}:${enable_shared}" in +diff --git a/libgcc/configure.ac b/libgcc/configure.ac +index 269997f..81dc3ba 100644 +--- a/libgcc/configure.ac ++++ b/libgcc/configure.ac +@@ -77,6 +77,18 @@ AC_ARG_ENABLE(vtable-verify, + [enable_vtable_verify=no]) + AC_SUBST(enable_vtable_verify) + ++AC_ARG_WITH(long-double-128, ++[AS_HELP_STRING([--with-long-double-128], ++ [use 128-bit long double by default])], ++ with_ldbl128="$with_long_double_128", ++[case "${host}" in ++ power*-*-musl*) ++ with_ldbl128="no";; ++ *) with_ldbl128="yes";; ++ esac ++]) ++AC_SUBST(with_ldbl128) ++ + AC_ARG_WITH(aix-soname, + [AS_HELP_STRING([--with-aix-soname=aix|svr4|both], + [shared library versioning (aka "SONAME") variant to provide on AIX])], +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0046-Link-libgcc-using-LDFLAGS-not-just-SHLIB_LDFLAGS.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0046-Link-libgcc-using-LDFLAGS-not-just-SHLIB_LDFLAGS.patch new file mode 100644 index 000000000..390037f7b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0046-Link-libgcc-using-LDFLAGS-not-just-SHLIB_LDFLAGS.patch @@ -0,0 +1,29 @@ +From 513bf3c33e2f551f08bd57605091d5ddeba3536b Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Wed, 4 May 2016 21:11:34 -0700 +Subject: [PATCH 46/46] Link libgcc using LDFLAGS, not just SHLIB_LDFLAGS + +Upstream-Status: Pending + +Signed-off-by: Christopher Larson +Signed-off-by: Khem Raj +--- + libgcc/config/t-slibgcc | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/libgcc/config/t-slibgcc b/libgcc/config/t-slibgcc +index 8c5f890..29be909 100644 +--- a/libgcc/config/t-slibgcc ++++ b/libgcc/config/t-slibgcc +@@ -32,7 +32,7 @@ SHLIB_INSTALL_SOLINK = $(LN_S) $(SHLIB_SONAME) \ + $(DESTDIR)$(slibdir)$(SHLIB_SLIBDIR_QUAL)/$(SHLIB_SOLINK) + + SHLIB_LINK = $(CC) $(LIBGCC2_CFLAGS) -shared -nodefaultlibs \ +- $(SHLIB_LDFLAGS) \ ++ $(LDFLAGS) $(SHLIB_LDFLAGS) \ + -o $(SHLIB_DIR)/$(SHLIB_SONAME).tmp @multilib_flags@ \ + $(SHLIB_OBJS) $(SHLIB_LC) && \ + rm -f $(SHLIB_DIR)/$(SHLIB_SOLINK) && \ +-- +2.8.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0047-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0047-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch new file mode 100644 index 000000000..ed6cd6905 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0047-libgcc_s-Use-alias-for-__cpu_indicator_init-instead-.patch @@ -0,0 +1,85 @@ +From 0a9ed0479203cb7e69c3745b0c259007410f39ba Mon Sep 17 00:00:00 2001 +From: Szabolcs Nagy +Date: Sat, 24 Oct 2015 20:09:53 +0000 +Subject: [PATCH 47/47] libgcc_s: Use alias for __cpu_indicator_init instead of + symver + +Adapter from + +https://gcc.gnu.org/ml/gcc-patches/2015-05/msg00899.html + +This fix was debated but hasnt been applied gcc upstream since +they expect musl to support '@' in symbol versioning which is +a sun/gnu versioning extention. This patch however avoids the +need for the '@' symbols at all + +libgcc/Changelog: + +2015-05-11 Szabolcs Nagy + + * config/i386/cpuinfo.c (__cpu_indicator_init_local): Add. + (__cpu_indicator_init@GCC_4.8.0, __cpu_model@GCC_4.8.0): Remove. + + * config/i386/t-linux (HOST_LIBGCC2_CFLAGS): Remove -DUSE_ELF_SYMVER. + +gcc/Changelog: + +2015-05-11 Szabolcs Nagy + + * config/i386/i386.c (ix86_expand_builtin): Make __builtin_cpu_init + call __cpu_indicator_init_local instead of __cpu_indicator_init. + +Signed-off-by: Khem Raj +--- +Upstream-Status: Rejected + + gcc/config/i386/i386.c | 4 ++-- + libgcc/config/i386/cpuinfo.c | 6 +++--- + libgcc/config/i386/t-linux | 2 +- + 3 files changed, 6 insertions(+), 6 deletions(-) + +diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c +index 861a029..1c97d72 100644 +--- a/gcc/config/i386/i386.c ++++ b/gcc/config/i386/i386.c +@@ -40323,10 +40323,10 @@ ix86_expand_builtin (tree exp, rtx target, rtx subtarget, + { + case IX86_BUILTIN_CPU_INIT: + { +- /* Make it call __cpu_indicator_init in libgcc. */ ++ /* Make it call __cpu_indicator_init_local in libgcc.a. */ + tree call_expr, fndecl, type; + type = build_function_type_list (integer_type_node, NULL_TREE); +- fndecl = build_fn_decl ("__cpu_indicator_init", type); ++ fndecl = build_fn_decl ("__cpu_indicator_init_local", type); + call_expr = build_call_expr (fndecl, 0); + return expand_expr (call_expr, target, mode, EXPAND_NORMAL); + } +diff --git a/libgcc/config/i386/cpuinfo.c b/libgcc/config/i386/cpuinfo.c +index 8c2248d..6c82f15 100644 +--- a/libgcc/config/i386/cpuinfo.c ++++ b/libgcc/config/i386/cpuinfo.c +@@ -485,7 +485,7 @@ __cpu_indicator_init (void) + return 0; + } + +-#if defined SHARED && defined USE_ELF_SYMVER +-__asm__ (".symver __cpu_indicator_init, __cpu_indicator_init@GCC_4.8.0"); +-__asm__ (".symver __cpu_model, __cpu_model@GCC_4.8.0"); ++#ifndef SHARED ++int __cpu_indicator_init_local (void) ++ __attribute__ ((weak, alias ("__cpu_indicator_init"))); + #endif +diff --git a/libgcc/config/i386/t-linux b/libgcc/config/i386/t-linux +index 11bb46e..4f47f7b 100644 +--- a/libgcc/config/i386/t-linux ++++ b/libgcc/config/i386/t-linux +@@ -3,4 +3,4 @@ + # t-slibgcc-elf-ver and t-linux + SHLIB_MAPFILES = libgcc-std.ver $(srcdir)/config/i386/libgcc-glibc.ver + +-HOST_LIBGCC2_CFLAGS += -mlong-double-80 -DUSE_ELF_SYMVER ++HOST_LIBGCC2_CFLAGS += -mlong-double-80 +-- +2.9.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0048-sync-gcc-stddef.h-with-musl.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0048-sync-gcc-stddef.h-with-musl.patch new file mode 100644 index 000000000..30c158d7d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0048-sync-gcc-stddef.h-with-musl.patch @@ -0,0 +1,91 @@ +From 10595c03c39b4e980d2a00e16fc84e9caf82292e Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Fri, 3 Feb 2017 12:56:00 -0800 +Subject: [PATCH 48/48] sync gcc stddef.h with musl + +musl defines ptrdiff_t size_t and wchar_t +so dont define them here if musl is definining them + +Signed-off-by: Khem Raj +--- +Upstream-Status: Pending + + gcc/ginclude/stddef.h | 9 +++++++++ + 1 file changed, 9 insertions(+) + +diff --git a/gcc/ginclude/stddef.h b/gcc/ginclude/stddef.h +index d711530d053..c315b7a97c1 100644 +--- a/gcc/ginclude/stddef.h ++++ b/gcc/ginclude/stddef.h +@@ -134,6 +134,7 @@ _TYPE_wchar_t; + #ifndef ___int_ptrdiff_t_h + #ifndef _GCC_PTRDIFF_T + #ifndef _PTRDIFF_T_DECLARED /* DragonFly */ ++#ifndef __DEFINED_ptrdiff_t /* musl */ + #define _PTRDIFF_T + #define _T_PTRDIFF_ + #define _T_PTRDIFF +@@ -143,10 +144,12 @@ _TYPE_wchar_t; + #define ___int_ptrdiff_t_h + #define _GCC_PTRDIFF_T + #define _PTRDIFF_T_DECLARED ++#define __DEFINED_ptrdiff_t /* musl */ + #ifndef __PTRDIFF_TYPE__ + #define __PTRDIFF_TYPE__ long int + #endif + typedef __PTRDIFF_TYPE__ ptrdiff_t; ++#endif /* __DEFINED_ptrdiff_t */ + #endif /* _PTRDIFF_T_DECLARED */ + #endif /* _GCC_PTRDIFF_T */ + #endif /* ___int_ptrdiff_t_h */ +@@ -184,6 +187,7 @@ typedef __PTRDIFF_TYPE__ ptrdiff_t; + #ifndef _GCC_SIZE_T + #ifndef _SIZET_ + #ifndef __size_t ++#ifndef __DEFINED_size_t /* musl */ + #define __size_t__ /* BeOS */ + #define __SIZE_T__ /* Cray Unicos/Mk */ + #define _SIZE_T +@@ -200,6 +204,7 @@ typedef __PTRDIFF_TYPE__ ptrdiff_t; + #define ___int_size_t_h + #define _GCC_SIZE_T + #define _SIZET_ ++#define __DEFINED_size_t /* musl */ + #if (defined (__FreeBSD__) && (__FreeBSD__ >= 5)) \ + || defined(__DragonFly__) \ + || defined(__FreeBSD_kernel__) +@@ -235,6 +240,7 @@ typedef long ssize_t; + #endif /* _SIZE_T */ + #endif /* __SIZE_T__ */ + #endif /* __size_t__ */ ++#endif /* __DEFINED_size_t */ + #undef __need_size_t + #endif /* _STDDEF_H or __need_size_t. */ + +@@ -264,6 +270,7 @@ typedef long ssize_t; + #ifndef ___int_wchar_t_h + #ifndef __INT_WCHAR_T_H + #ifndef _GCC_WCHAR_T ++#ifndef __DEFINED_wchar_t /* musl */ + #define __wchar_t__ /* BeOS */ + #define __WCHAR_T__ /* Cray Unicos/Mk */ + #define _WCHAR_T +@@ -279,6 +286,7 @@ typedef long ssize_t; + #define __INT_WCHAR_T_H + #define _GCC_WCHAR_T + #define _WCHAR_T_DECLARED ++#define __DEFINED_wchar_t /* musl */ + + /* On BSD/386 1.1, at least, machine/ansi.h defines _BSD_WCHAR_T_ + instead of _WCHAR_T_, and _BSD_RUNE_T_ (which, unlike the other +@@ -344,6 +352,7 @@ typedef __WCHAR_TYPE__ wchar_t; + #endif + #endif /* __WCHAR_T__ */ + #endif /* __wchar_t__ */ ++#endif /* __DEFINED_wchar_t musl */ + #undef __need_wchar_t + #endif /* _STDDEF_H or __need_wchar_t. */ + +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0054_all_nopie-all-flags.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0054_all_nopie-all-flags.patch new file mode 100644 index 000000000..73ab9502d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0054_all_nopie-all-flags.patch @@ -0,0 +1,22 @@ +Need to pass NO_PIE_CFLAGS to ALL_* so gcc doesn't fail when +we compile it with older gcc and pie. + +Upstream-Status: Inappropriate [configuration] + +Maintained by: Gentoo Toolchain Project +Signed-off-by: Stephen Arnold + +--- a/gcc/Makefile.in 2015-06-25 19:18:12.000000000 +0200 ++++ b/gcc/Makefile.in 2016-04-22 00:12:54.029178860 +0200 +@@ -991,10 +991,10 @@ ALL_CXXFLAGS = $(T_CFLAGS) $(CFLAGS-$@) + ALL_CPPFLAGS = $(INCLUDES) $(CPPFLAGS) + + # This is the variable to use when using $(COMPILER). +-ALL_COMPILERFLAGS = $(ALL_CXXFLAGS) ++ALL_COMPILERFLAGS = $(NO_PIE_CFLAGS) $(ALL_CXXFLAGS) + + # This is the variable to use when using $(LINKER). +-ALL_LINKERFLAGS = $(ALL_CXXFLAGS) ++ALL_LINKERFLAGS = $(NO_PIE_CFLAGS) $(ALL_CXXFLAGS) + + # Build and host support libraries. diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0055-unwind_h-glibc26.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0055-unwind_h-glibc26.patch new file mode 100644 index 000000000..c266cfe21 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/0055-unwind_h-glibc26.patch @@ -0,0 +1,139 @@ +Backport and edit of patches from: +https://gcc.gnu.org/viewcvs/gcc?limit_changes=0&view=revision&revision=249957 +by jsm28 (Joseph Myers) + +Current glibc no longer gives the ucontext_t type the tag struct +ucontext, to conform with POSIX namespace rules. This requires +various linux-unwind.h files in libgcc, that were previously using +struct ucontext, to be fixed to use ucontext_t instead. This is +similar to the removal of the struct siginfo tag from siginfo_t some +years ago. + +This patch changes those files to use ucontext_t instead. As the +standard name that should be unconditionally safe, so this is not +restricted to architectures supported by glibc, or conditioned on the +glibc version. + +Upstream-Status: Backport + +Signed-off-by: Juro Bystricky + +--- branches/gcc-6-branch/libgcc/config/aarch64/linux-unwind.h 2017/07/04 10:22:56 249956 +--- b/libgcc/config/aarch64/linux-unwind.h 2017/07/04 10:23:57 249957 +@@ -52,7 +52,7 @@ + struct rt_sigframe + { + siginfo_t info; +- struct ucontext uc; ++ ucontext_t uc; + }; + + struct rt_sigframe *rt_; +--- branches/gcc-6-branch/libgcc/config/alpha/linux-unwind.h 2017/07/04 10:22:56 249956 +--- b/libgcc/config/alpha/linux-unwind.h 2017/07/04 10:23:57 249957 +@@ -51,7 +51,7 @@ + { + struct rt_sigframe { + siginfo_t info; +- struct ucontext uc; ++ ucontext_t uc; + } *rt_ = context->cfa; + sc = &rt_->uc.uc_mcontext; + } +--- branches/gcc-6-branch/libgcc/config/bfin/linux-unwind.h 2017/07/04 10:22:56 249956 +--- b/libgcc/config/bfin/linux-unwind.h 2017/07/04 10:23:57 249957 +@@ -52,7 +52,7 @@ + void *puc; + char retcode[8]; + siginfo_t info; +- struct ucontext uc; ++ ucontext_t uc; + } *rt_ = context->cfa; + + /* The void * cast is necessary to avoid an aliasing warning. +--- branches/gcc-6-branch/libgcc/config/i386/linux-unwind.h 2017/07/04 10:22:56 249956 +--- b/libgcc/config/i386/linux-unwind.h 2017/07/04 10:23:57 249957 +@@ -58,7 +58,7 @@ + if (*(unsigned char *)(pc+0) == 0x48 + && *(unsigned long long *)(pc+1) == RT_SIGRETURN_SYSCALL) + { +- struct ucontext *uc_ = context->cfa; ++ ucontext_t *uc_ = context->cfa; + /* The void * cast is necessary to avoid an aliasing warning. + The aliasing warning is correct, but should not be a problem + because it does not alias anything. */ +@@ -138,7 +138,7 @@ + siginfo_t *pinfo; + void *puc; + siginfo_t info; +- struct ucontext uc; ++ ucontext_t uc; + } *rt_ = context->cfa; + /* The void * cast is necessary to avoid an aliasing warning. + The aliasing warning is correct, but should not be a problem +--- branches/gcc-6-branch/libgcc/config/m68k/linux-unwind.h 2017/07/04 10:22:56 249956 +--- b/libgcc/config/m68k/linux-unwind.h 2017/07/04 10:23:57 249957 +@@ -33,7 +33,7 @@ + /* is unfortunately broken right now. */ + struct uw_ucontext { + unsigned long uc_flags; +- struct ucontext *uc_link; ++ ucontext_t *uc_link; + stack_t uc_stack; + mcontext_t uc_mcontext; + unsigned long uc_filler[80]; +--- branches/gcc-6-branch/libgcc/config/nios2/linux-unwind.h 2017/07/04 10:22:56 249956 +--- b/libgcc/config/nios2/linux-unwind.h 2017/07/04 10:23:57 249957 +@@ -38,7 +38,7 @@ + + struct nios2_ucontext { + unsigned long uc_flags; +- struct ucontext *uc_link; ++ ucontext_t *uc_link; + stack_t uc_stack; + struct nios2_mcontext uc_mcontext; + sigset_t uc_sigmask; /* mask last for extensibility */ +--- branches/gcc-6-branch/libgcc/config/pa/linux-unwind.h 2017/07/04 10:22:56 249956 +--- b/libgcc/config/pa/linux-unwind.h 2017/07/04 10:23:57 249957 +@@ -80,7 +80,7 @@ + struct sigcontext *sc; + struct rt_sigframe { + siginfo_t info; +- struct ucontext uc; ++ ucontext_t uc; + } *frame; + + /* rt_sigreturn trampoline: +--- branches/gcc-6-branch/libgcc/config/sh/linux-unwind.h 2017/07/04 10:22:56 249956 +--- b/libgcc/config/sh/linux-unwind.h 2017/07/04 10:23:57 249957 +@@ -180,7 +180,7 @@ + { + struct rt_sigframe { + siginfo_t info; +- struct ucontext uc; ++ ucontext_t uc; + } *rt_ = context->cfa; + /* The void * cast is necessary to avoid an aliasing warning. + The aliasing warning is correct, but should not be a problem +--- branches/gcc-6-branch/libgcc/config/tilepro/linux-unwind.h 2017/07/04 10:22:56 249956 +--- b/libgcc/config/tilepro/linux-unwind.h 2017/07/04 10:23:57 249957 +@@ -61,7 +61,7 @@ + struct rt_sigframe { + unsigned char save_area[C_ABI_SAVE_AREA_SIZE]; + siginfo_t info; +- struct ucontext uc; ++ ucontext_t uc; + } *rt_; + + /* Return if this is not a signal handler. */ +--- branches/gcc-6-branch/libgcc/config/xtensa/linux-unwind.h 2017/07/04 10:22:56 249956 +--- b/libgcc/config/xtensa/linux-unwind.h 2017/07/04 10:23:57 249957 +@@ -67,7 +67,7 @@ + + struct rt_sigframe { + siginfo_t info; +- struct ucontext uc; ++ ucontext_t uc; + } *rt_; + + /* movi a2, __NR_rt_sigreturn; syscall */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/CVE-2016-6131.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/CVE-2016-6131.patch new file mode 100644 index 000000000..e873cc6e8 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/CVE-2016-6131.patch @@ -0,0 +1,251 @@ +From 59a0e4bd8391962f62600ae3ac95ab0fba74d464 Mon Sep 17 00:00:00 2001 +From: law +Date: Thu, 4 Aug 2016 16:53:18 +0000 +Subject: [PATCH] Fix for PR71696 in Libiberty Demangler +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +[BZ #71696] -- https://gcc.gnu.org/bugzilla/show_bug.cgi?id=71696 + +2016-08-04 Marcel Böhme + + PR c++/71696 + * cplus-dem.c: Prevent infinite recursion when there is a cycle + in the referencing of remembered mangled types. + (work_stuff): New stack to keep track of the remembered mangled + types that are currently being processed. + (push_processed_type): New method to push currently processed + remembered type onto the stack. + (pop_processed_type): New method to pop currently processed + remembered type from the stack. + (work_stuff_copy_to_from): Copy values of new variables. + (delete_non_B_K_work_stuff): Free stack memory. + (demangle_args): Push/Pop currently processed remembered type. + (do_type): Do not demangle a cyclic reference and push/pop + referenced remembered type. + +cherry-picked from commit of +git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@239143 138bc75d-0d04-0410-961f-82ee72b054a4 + +Upstream-Status: Backport [master] +CVE: CVE-2016-6131 +Signed-off-by: Yuanjie Huang +--- + libiberty/ChangeLog | 17 ++++++++ + libiberty/cplus-dem.c | 78 ++++++++++++++++++++++++++++++++--- + libiberty/testsuite/demangle-expected | 18 ++++++++ + 3 files changed, 108 insertions(+), 5 deletions(-) + +diff --git a/libiberty/ChangeLog b/libiberty/ChangeLog +index 240138f..adf1d72 100644 +--- a/libiberty/ChangeLog ++++ b/libiberty/ChangeLog +@@ -1,3 +1,20 @@ ++2016-08-04 Marcel Böhme ++ ++ PR c++/71696 ++ * cplus-dem.c: Prevent infinite recursion when there is a cycle ++ in the referencing of remembered mangled types. ++ (work_stuff): New stack to keep track of the remembered mangled ++ types that are currently being processed. ++ (push_processed_type): New method to push currently processed ++ remembered type onto the stack. ++ (pop_processed_type): New method to pop currently processed ++ remembered type from the stack. ++ (work_stuff_copy_to_from): Copy values of new variables. ++ (delete_non_B_K_work_stuff): Free stack memory. ++ (demangle_args): Push/Pop currently processed remembered type. ++ (do_type): Do not demangle a cyclic reference and push/pop ++ referenced remembered type. ++ + 2016-12-21 Release Manager + + * GCC 6.3.0 released. +diff --git a/libiberty/cplus-dem.c b/libiberty/cplus-dem.c +index 7514e57..f21e630 100644 +--- a/libiberty/cplus-dem.c ++++ b/libiberty/cplus-dem.c +@@ -144,6 +144,9 @@ struct work_stuff + string* previous_argument; /* The last function argument demangled. */ + int nrepeats; /* The number of times to repeat the previous + argument. */ ++ int *proctypevec; /* Indices of currently processed remembered typevecs. */ ++ int proctypevec_size; ++ int nproctypes; + }; + + #define PRINT_ANSI_QUALIFIERS (work -> options & DMGL_ANSI) +@@ -435,6 +438,10 @@ iterate_demangle_function (struct work_stuff *, + + static void remember_type (struct work_stuff *, const char *, int); + ++static void push_processed_type (struct work_stuff *, int); ++ ++static void pop_processed_type (struct work_stuff *); ++ + static void remember_Btype (struct work_stuff *, const char *, int, int); + + static int register_Btype (struct work_stuff *); +@@ -1301,6 +1308,10 @@ work_stuff_copy_to_from (struct work_stuff *to, struct work_stuff *from) + memcpy (to->btypevec[i], from->btypevec[i], len); + } + ++ if (from->proctypevec) ++ to->proctypevec = ++ XDUPVEC (int, from->proctypevec, from->proctypevec_size); ++ + if (from->ntmpl_args) + to->tmpl_argvec = XNEWVEC (char *, from->ntmpl_args); + +@@ -1329,11 +1340,17 @@ delete_non_B_K_work_stuff (struct work_stuff *work) + /* Discard the remembered types, if any. */ + + forget_types (work); +- if (work -> typevec != NULL) ++ if (work->typevec != NULL) + { +- free ((char *) work -> typevec); +- work -> typevec = NULL; +- work -> typevec_size = 0; ++ free ((char *) work->typevec); ++ work->typevec = NULL; ++ work->typevec_size = 0; ++ } ++ if (work->proctypevec != NULL) ++ { ++ free (work->proctypevec); ++ work->proctypevec = NULL; ++ work->proctypevec_size = 0; + } + if (work->tmpl_argvec) + { +@@ -3552,6 +3569,8 @@ static int + do_type (struct work_stuff *work, const char **mangled, string *result) + { + int n; ++ int i; ++ int is_proctypevec; + int done; + int success; + string decl; +@@ -3564,6 +3583,7 @@ do_type (struct work_stuff *work, const char **mangled, string *result) + + done = 0; + success = 1; ++ is_proctypevec = 0; + while (success && !done) + { + int member; +@@ -3616,8 +3636,15 @@ do_type (struct work_stuff *work, const char **mangled, string *result) + success = 0; + } + else ++ for (i = 0; i < work->nproctypes; i++) ++ if (work -> proctypevec [i] == n) ++ success = 0; ++ ++ if (success) + { +- remembered_type = work -> typevec[n]; ++ is_proctypevec = 1; ++ push_processed_type (work, n); ++ remembered_type = work->typevec[n]; + mangled = &remembered_type; + } + break; +@@ -3840,6 +3867,9 @@ do_type (struct work_stuff *work, const char **mangled, string *result) + string_delete (result); + string_delete (&decl); + ++ if (is_proctypevec) ++ pop_processed_type (work); ++ + if (success) + /* Assume an integral type, if we're not sure. */ + return (int) ((tk == tk_none) ? tk_integral : tk); +@@ -4252,6 +4282,41 @@ do_arg (struct work_stuff *work, const char **mangled, string *result) + } + + static void ++push_processed_type (struct work_stuff *work, int typevec_index) ++{ ++ if (work->nproctypes >= work->proctypevec_size) ++ { ++ if (!work->proctypevec_size) ++ { ++ work->proctypevec_size = 4; ++ work->proctypevec = XNEWVEC (int, work->proctypevec_size); ++ } ++ else ++ { ++ if (work->proctypevec_size < 16) ++ /* Double when small. */ ++ work->proctypevec_size *= 2; ++ else ++ { ++ /* Grow slower when large. */ ++ if (work->proctypevec_size > (INT_MAX / 3) * 2) ++ xmalloc_failed (INT_MAX); ++ work->proctypevec_size = (work->proctypevec_size * 3 / 2); ++ } ++ work->proctypevec ++ = XRESIZEVEC (int, work->proctypevec, work->proctypevec_size); ++ } ++ } ++ work->proctypevec [work->nproctypes++] = typevec_index; ++} ++ ++static void ++pop_processed_type (struct work_stuff *work) ++{ ++ work->nproctypes--; ++} ++ ++static void + remember_type (struct work_stuff *work, const char *start, int len) + { + char *tem; +@@ -4515,10 +4580,13 @@ demangle_args (struct work_stuff *work, const char **mangled, + { + string_append (declp, ", "); + } ++ push_processed_type (work, t); + if (!do_arg (work, &tem, &arg)) + { ++ pop_processed_type (work); + return (0); + } ++ pop_processed_type (work); + if (PRINT_ARG_TYPES) + { + string_appends (declp, &arg); +diff --git a/libiberty/testsuite/demangle-expected b/libiberty/testsuite/demangle-expected +index 157d2ee..8793a0b 100644 +--- a/libiberty/testsuite/demangle-expected ++++ b/libiberty/testsuite/demangle-expected +@@ -4491,3 +4491,21 @@ void eat(int*&, Foo()::{lambda(auto:1 + + _Z3eatIPiZ3BarIsEvvEUlPsPT_PT0_E0_EvRS3_RS5_ + void eat()::{lambda(short*, auto:1*, auto:2*)#2}>(int*&, void Bar()::{lambda(short*, auto:1*, auto:2*)#2}&) ++# ++# Tests write access violation PR70926 ++ ++0__Ot2m02R5T0000500000 ++0__Ot2m02R5T0000500000 ++# ++ ++0__GT50000000000_ ++0__GT50000000000_ ++# ++ ++__t2m05B500000000000000000_ ++__t2m05B500000000000000000_ ++# ++# Tests stack overflow PR71696 ++ ++__10%0__S4_0T0T0 ++%0<>::%0(%0<>) +-- +2.9.3 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/ubsan-fix-check-empty-string.patch b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/ubsan-fix-check-empty-string.patch new file mode 100644 index 000000000..c0127198e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-6.3/ubsan-fix-check-empty-string.patch @@ -0,0 +1,28 @@ +From 8db2cf6353c13f2a84cbe49b689654897906c499 Mon Sep 17 00:00:00 2001 +From: kyukhin +Date: Sat, 3 Sep 2016 10:57:05 +0000 +Subject: [PATCH] gcc/ * ubsan.c (ubsan_use_new_style_p): Fix check for empty + string. + +git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@239971 138bc75d-0d04-0410-961f-82ee72b054a4 + +Upstream-Status: Backport +Signed-off-by: Joshua Lock + +--- + gcc/ubsan.c | 2 +- + 2 files changed, 5 insertions(+), 1 deletion(-) + +Index: gcc-6.3.0/gcc/ubsan.c +=================================================================== +--- gcc-6.3.0.orig/gcc/ubsan.c ++++ gcc-6.3.0/gcc/ubsan.c +@@ -1471,7 +1471,7 @@ ubsan_use_new_style_p (location_t loc) + + expanded_location xloc = expand_location (loc); + if (xloc.file == NULL || strncmp (xloc.file, "\1", 2) == 0 +- || xloc.file == '\0' || xloc.file[0] == '\xff' ++ || xloc.file[0] == '\0' || xloc.file[0] == '\xff' + || xloc.file[1] == '\xff') + return false; + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-common.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-common.inc index f540b4d96..857aa8f50 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-common.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-common.inc @@ -5,40 +5,52 @@ LICENSE = "GPL" NATIVEDEPS = "" +CVE_PRODUCT = "gcc" + inherit autotools gettext texinfo BPN = "gcc" +COMPILERINITIAL = "" +COMPILERDEP = "virtual/${MLPREFIX}${TARGET_PREFIX}gcc${COMPILERINITIAL}:do_gcc_stash_builddir" +COMPILERDEP_class-nativesdk = "virtual/${TARGET_PREFIX}gcc${COMPILERINITIAL}-crosssdk:do_gcc_stash_builddir" + +python extract_stashed_builddir () { + src = d.expand("${COMPONENTS_DIR}/${BUILD_ARCH}/gcc-stashed-builddir${COMPILERINITIAL}-${TARGET_SYS}") + dest = d.getVar("B") + oe.path.copyhardlinktree(src, dest) + staging_processfixme([src + "/fixmepath"], dest, dest, dest, d) +} def get_gcc_float_setting(bb, d): - if d.getVar('ARMPKGSFX_EABI', True) == "hf" and d.getVar('TRANSLATED_TARGET_ARCH', True) == "arm": + if d.getVar('ARMPKGSFX_EABI') == "hf" and d.getVar('TRANSLATED_TARGET_ARCH') == "arm": return "--with-float=hard" - if d.getVar('TARGET_FPU', True) in [ 'soft' ]: + if d.getVar('TARGET_FPU') in [ 'soft' ]: return "--with-float=soft" - if d.getVar('TARGET_FPU', True) in [ 'ppc-efd' ]: + if d.getVar('TARGET_FPU') in [ 'ppc-efd' ]: return "--enable-e500_double" return "" get_gcc_float_setting[vardepvalue] = "${@get_gcc_float_setting(bb, d)}" def get_gcc_mips_plt_setting(bb, d): - if d.getVar('TRANSLATED_TARGET_ARCH', True) in [ 'mips', 'mipsel' ] and bb.utils.contains('DISTRO_FEATURES', 'mplt', True, False, d): + if d.getVar('TRANSLATED_TARGET_ARCH') in [ 'mips', 'mipsel' ] and bb.utils.contains('DISTRO_FEATURES', 'mplt', True, False, d): return "--with-mips-plt" return "" def get_gcc_ppc_plt_settings(bb, d): - if d.getVar('TRANSLATED_TARGET_ARCH', True) in [ 'powerpc' ] and not bb.utils.contains('DISTRO_FEATURES', 'bssplt', True, False, d): + if d.getVar('TRANSLATED_TARGET_ARCH') in [ 'powerpc' ] and not bb.utils.contains('DISTRO_FEATURES', 'bssplt', True, False, d): return "--enable-secureplt" return "" def get_long_double_setting(bb, d): - if d.getVar('TRANSLATED_TARGET_ARCH', True) in [ 'powerpc', 'powerpc64' ] and d.getVar('TCLIBC', True) in [ 'uclibc', 'glibc' ]: + if d.getVar('TRANSLATED_TARGET_ARCH') in [ 'powerpc', 'powerpc64' ] and d.getVar('TCLIBC') in [ 'uclibc', 'glibc' ]: return "--with-long-double-128" else: return "--without-long-double-128" return "" def get_gcc_multiarch_setting(bb, d): - target_arch = d.getVar('TRANSLATED_TARGET_ARCH', True) + target_arch = d.getVar('TRANSLATED_TARGET_ARCH') multiarch_options = { "i586": "--enable-targets=all", "i686": "--enable-targets=all", @@ -54,26 +66,25 @@ def get_gcc_multiarch_setting(bb, d): # this is used by the multilib setup of gcc def get_tune_parameters(tune, d): - availtunes = d.getVar('AVAILTUNES', True) + availtunes = d.getVar('AVAILTUNES') if tune not in availtunes.split(): bb.error('The tune: %s is not one of the available tunes: %s' % (tune or None, availtunes)) localdata = bb.data.createCopy(d) override = ':tune-' + tune localdata.setVar('OVERRIDES', localdata.getVar('OVERRIDES', False) + override) - bb.data.update_data(localdata) retdict = {} retdict['tune'] = tune - retdict['ccargs'] = localdata.getVar('TUNE_CCARGS', True) - retdict['features'] = localdata.getVar('TUNE_FEATURES', True) + retdict['ccargs'] = localdata.getVar('TUNE_CCARGS') + retdict['features'] = localdata.getVar('TUNE_FEATURES') # BASELIB is used by the multilib code to change library paths - retdict['baselib'] = localdata.getVar('BASE_LIB', True) or localdata.getVar('BASELIB', True) - retdict['arch'] = localdata.getVar('TUNE_ARCH', True) - retdict['abiextension'] = localdata.getVar('ABIEXTENSION', True) - retdict['target_fpu'] = localdata.getVar('TARGET_FPU', True) - retdict['pkgarch'] = localdata.getVar('TUNE_PKGARCH', True) - retdict['package_extra_archs'] = localdata.getVar('PACKAGE_EXTRA_ARCHS', True) + retdict['baselib'] = localdata.getVar('BASE_LIB') or localdata.getVar('BASELIB') + retdict['arch'] = localdata.getVar('TUNE_ARCH') + retdict['abiextension'] = localdata.getVar('ABIEXTENSION') + retdict['target_fpu'] = localdata.getVar('TARGET_FPU') + retdict['pkgarch'] = localdata.getVar('TUNE_PKGARCH') + retdict['package_extra_archs'] = localdata.getVar('PACKAGE_EXTRA_ARCHS') return retdict get_tune_parameters[vardepsexclude] = "AVAILTUNES TUNE_CCARGS OVERRIDES TUNE_FEATURES BASE_LIB BASELIB TUNE_ARCH ABIEXTENSION TARGET_FPU TUNE_PKGARCH PACKAGE_EXTRA_ARCHS" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-configure-common.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-configure-common.inc index ddebbb841..00ef89ec5 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-configure-common.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-configure-common.inc @@ -23,7 +23,7 @@ GCCMULTILIB ?= "--disable-multilib" GCCTHREADS ?= "posix" EXTRA_OECONF = "\ - ${@['--enable-clocale=generic', ''][d.getVar('USE_NLS', True) != 'no']} \ + ${@['--enable-clocale=generic', ''][d.getVar('USE_NLS') != 'no']} \ --with-gnu-ld \ --enable-shared \ --enable-languages=${LANGUAGES} \ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-canadian_6.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-canadian_6.2.bb deleted file mode 100644 index bf53c5cd7..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-canadian_6.2.bb +++ /dev/null @@ -1,5 +0,0 @@ -require recipes-devtools/gcc/gcc-${PV}.inc -require gcc-cross-canadian.inc - - - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-canadian_6.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-canadian_6.3.bb new file mode 100644 index 000000000..bf53c5cd7 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-canadian_6.3.bb @@ -0,0 +1,5 @@ +require recipes-devtools/gcc/gcc-${PV}.inc +require gcc-cross-canadian.inc + + + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial.inc index dcf22710a..9502c2b1a 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial.inc @@ -24,7 +24,7 @@ EXTRA_OECONF = "\ --enable-languages=c \ --program-prefix=${TARGET_PREFIX} \ --with-sysroot=/not/exist \ - --with-build-sysroot=${GCCCROSS_BUILDSYSROOT} \ + --with-build-sysroot=${STAGING_DIR_TARGET} \ ${EXTRA_OECONF_INITIAL} \ ${@bb.utils.contains('DISTRO_FEATURES', 'ld-is-gold', '--with-ld=${STAGING_BINDIR_TOOLCHAIN}/${TARGET_PREFIX}ld.bfd', '', d)} \ ${EXTRA_OECONF_GCC_FLOAT} \ @@ -33,20 +33,10 @@ EXTRA_OECONF = "\ EXTRA_OECONF += "--with-native-system-header-dir=${SYSTEMHEADERS}" -GCCCROSS_BUILDSYSROOT = "${B}/tmpsysroot" - -do_configure_prepend () { - sysr=${GCCCROSS_BUILDSYSROOT}${target_includedir} - mkdir -p $sysr - for t in linux asm asm-generic; do - rm -f $sysr/$t - ln -s ${STAGING_DIR_TARGET}${target_includedir}/$t $sysr/ - done -} - do_compile () { oe_runmake all-gcc configure-target-libgcc } + do_install () { ( cd ${B}/${TARGET_SYS}/libgcc; oe_runmake 'DESTDIR=${D}' install-unwind_h ) oe_runmake 'DESTDIR=${D}' install-gcc @@ -74,14 +64,6 @@ do_install () { # so we overwirte the generated include-fixed/limits.h for gcc-cross-initial # to get rid references to real limits.h cp gcc/include-fixed/limits.h ${D}${gcclibdir}/${TARGET_SYS}/${BINV}/include/limits.h - - # gcc-runtime installs libgcc into a special location in staging since it breaks doing a standalone build - case ${PN} in - *gcc-cross-initial-${TARGET_ARCH}|*gcc-crosssdk-initial-${SDK_SYS}) - dest=${D}/${includedir}/gcc-build-internal-initial-${TARGET_SYS} - hardlinkdir . $dest - ;; - esac } # # Override the default sysroot staging copy since this won't look like a target system @@ -94,7 +76,13 @@ sysroot_stage_all() { mv ${SYSROOT_DESTDIR}${target_libdir}/* ${SYSROOT_DESTDIR}${STAGING_DIR_TARGET}${target_libdir}/ || true } -do_populate_sysroot[sstate-inputdirs] = "${SYSROOT_DESTDIR}/${STAGING_DIR_HOST}/ ${SYSROOT_DESTDIR}/${STAGING_DIR_TARGET}/${target_base_libdir}/" -do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR_HOST}/ ${STAGING_DIR_TCBOOTSTRAP}/${target_base_libdir}/" +do_populate_sysroot[sstate-inputdirs] = "${SYSROOT_DESTDIR}/${STAGING_DIR_HOST}/" +do_populate_sysroot[sstate-outputdirs] = "${COMPONENTS_DIR}/${PACKAGE_ARCH}/${PN}" inherit nopackages + +COMPILERINITIAL = "-initial" + + +# We really only want this built by things that need it, not any recrdeptask +deltask do_build diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial_6.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial_6.2.bb deleted file mode 100644 index 4c73e5ce6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial_6.2.bb +++ /dev/null @@ -1,2 +0,0 @@ -require recipes-devtools/gcc/gcc-cross_${PV}.bb -require gcc-cross-initial.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial_6.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial_6.3.bb new file mode 100644 index 000000000..4c73e5ce6 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross-initial_6.3.bb @@ -0,0 +1,2 @@ +require recipes-devtools/gcc/gcc-cross_${PV}.bb +require gcc-cross-initial.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross.inc index cc465a279..45985c384 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross.inc @@ -5,12 +5,15 @@ EXTRADEPENDS = "" DEPENDS = "virtual/${TARGET_PREFIX}binutils virtual/${TARGET_PREFIX}libc-for-gcc ${EXTRADEPENDS} ${NATIVEDEPS}" PROVIDES = "virtual/${TARGET_PREFIX}gcc virtual/${TARGET_PREFIX}g++" python () { - if d.getVar("TARGET_OS", True).startswith("linux"): + if d.getVar("TARGET_OS").startswith("linux"): d.setVar("EXTRADEPENDS", "linux-libc-headers") } PN = "gcc-cross-${TARGET_ARCH}" +# Ignore how TARGET_ARCH is computed. +TARGET_ARCH[vardepvalue] = "${TARGET_ARCH}" + require gcc-configure-common.inc # While we want the 'gnu' hash style, we explicitly set it to sysv here to @@ -185,21 +188,28 @@ do_install () { # We use libiberty from binutils find ${D}${exec_prefix}/lib -name libiberty.a | xargs rm -f find ${D}${exec_prefix}/lib -name libiberty.h | xargs rm -f - - # gcc-runtime installs libgcc into a special location in staging since it breaks doing a standalone build - case ${PN} in - *gcc-cross-${TARGET_ARCH}|*gcc-crosssdk-${SDK_SYS}) - dest=${D}/${includedir}/gcc-build-internal-${TARGET_SYS} - hardlinkdir . $dest - ;; - esac } -# This is reflected in the recipe name and target gcc shouldn't depend -# on SDK settings either -do_install[vardepsexclude] += "SDK_SYS" do_package[noexec] = "1" do_packagedata[noexec] = "1" do_package_write_ipk[noexec] = "1" do_package_write_rpm[noexec] = "1" do_package_write_deb[noexec] = "1" + +BUILDDIRSTASH = "${WORKDIR}/stashed-builddir" +do_gcc_stash_builddir[dirs] = "${B}" +do_gcc_stash_builddir[cleandirs] = "${BUILDDIRSTASH}" +do_gcc_stash_builddir () { + dest=${BUILDDIRSTASH} + hardlinkdir . $dest +} +addtask do_gcc_stash_builddir after do_compile before do_install +SSTATETASKS += "do_gcc_stash_builddir" +do_gcc_stash_builddir[sstate-inputdirs] = "${BUILDDIRSTASH}" +do_gcc_stash_builddir[sstate-outputdirs] = "${COMPONENTS_DIR}/${BUILD_ARCH}/gcc-stashed-builddir${COMPILERINITIAL}-${TARGET_SYS}" +do_gcc_stash_builddir[sstate-fixmedir] = "${COMPONENTS_DIR}/${BUILD_ARCH}/gcc-stashed-builddir${COMPILERINITIAL}-${TARGET_SYS}" + +python do_gcc_stash_builddir_setscene () { + sstate_setscene(d) +} +addtask do_gcc_stash_builddir_setscene diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross_6.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross_6.2.bb deleted file mode 100644 index b43cca0c5..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross_6.2.bb +++ /dev/null @@ -1,3 +0,0 @@ -require recipes-devtools/gcc/gcc-${PV}.inc -require gcc-cross.inc - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross_6.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross_6.3.bb new file mode 100644 index 000000000..b43cca0c5 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-cross_6.3.bb @@ -0,0 +1,3 @@ +require recipes-devtools/gcc/gcc-${PV}.inc +require gcc-cross.inc + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk-initial_6.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk-initial_6.2.bb deleted file mode 100644 index fd90e1140..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk-initial_6.2.bb +++ /dev/null @@ -1,3 +0,0 @@ -require recipes-devtools/gcc/gcc-cross-initial_${PV}.bb -require gcc-crosssdk-initial.inc - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk-initial_6.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk-initial_6.3.bb new file mode 100644 index 000000000..fd90e1140 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk-initial_6.3.bb @@ -0,0 +1,3 @@ +require recipes-devtools/gcc/gcc-cross-initial_${PV}.bb +require gcc-crosssdk-initial.inc + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk_6.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk_6.2.bb deleted file mode 100644 index 40a6c4fef..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk_6.2.bb +++ /dev/null @@ -1,2 +0,0 @@ -require recipes-devtools/gcc/gcc-cross_${PV}.bb -require gcc-crosssdk.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk_6.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk_6.3.bb new file mode 100644 index 000000000..40a6c4fef --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-crosssdk_6.3.bb @@ -0,0 +1,2 @@ +require recipes-devtools/gcc/gcc-cross_${PV}.bb +require gcc-crosssdk.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-multilib-config.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-multilib-config.inc index a0a2ac09a..31b8619be 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-multilib-config.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-multilib-config.inc @@ -21,8 +21,8 @@ python gcc_multilib_setup() { import shutil import glob - srcdir = d.getVar('S', True) - builddir = d.getVar('B', True) + srcdir = d.getVar('S') + builddir = d.getVar('B') src_conf_dir = '%s/gcc/config' % srcdir build_conf_dir = '%s/gcc/config' % builddir @@ -43,12 +43,12 @@ python gcc_multilib_setup() { bb.utils.mkdirhier('%s/%s' % (build_conf_dir, parent_dir)) bb.utils.copyfile(fn, '%s/%s' % (build_conf_dir, rel_path)) - pn = d.getVar('PN', True) - multilibs = (d.getVar('MULTILIB_VARIANTS', True) or '').split() + pn = d.getVar('PN') + multilibs = (d.getVar('MULTILIB_VARIANTS') or '').split() if not multilibs and pn != "nativesdk-gcc": return - mlprefix = d.getVar('MLPREFIX', True) + mlprefix = d.getVar('MLPREFIX') if ('%sgcc' % mlprefix) != pn and (not pn.startswith('gcc-cross-canadian')) and pn != "nativesdk-gcc": return @@ -155,10 +155,10 @@ python gcc_multilib_setup() { libdirn32 = 'SYSTEMLIBS_DIR' - target_arch = (d.getVar('TARGET_ARCH_MULTILIB_ORIGINAL', True) if mlprefix - else d.getVar('TARGET_ARCH', True)) + target_arch = (d.getVar('TARGET_ARCH_MULTILIB_ORIGINAL') if mlprefix + else d.getVar('TARGET_ARCH')) if pn == "nativesdk-gcc": - header_config_files = gcc_header_config_files[d.getVar("SDK_ARCH", True)] + header_config_files = gcc_header_config_files[d.getVar("SDK_ARCH")] write_headers(builddir, header_config_files, libdir32, libdir64, libdirx32, libdirn32) return @@ -188,7 +188,7 @@ python gcc_multilib_setup() { optsets = [] for ml in ml_list: - tune = d.getVar(ml, True) + tune = d.getVar(ml) if not tune: bb.warn("%s doesn't have a corresponding tune. Skipping..." % ml) continue @@ -212,7 +212,7 @@ python gcc_multilib_setup() { # take out '-' mcpu='s and march='s from parameters opts = [] - whitelist = (d.getVar("MULTILIB_OPTION_WHITELIST", True) or "").split() + whitelist = (d.getVar("MULTILIB_OPTION_WHITELIST") or "").split() for i in d.expand(tune_parameters['ccargs']).split(): if i in whitelist: # Need to strip '-' from option diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime.inc index 15252f1a4..0dc405c59 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime.inc @@ -17,55 +17,48 @@ EXTRA_OECONF_PATHS = "\ EXTRA_OECONF_append_linuxstdbase = " --enable-clocale=gnu" RUNTIMELIBITM = "libitm" -RUNTIMELIBITM_mips = "" -RUNTIMELIBITM_mipsel = "" -RUNTIMELIBITM_mips64 = "" -RUNTIMELIBITM_mips64el = "" -RUNTIMELIBITM_mipsisa32r6 = "" -RUNTIMELIBITM_mipsisa32r6el = "" -RUNTIMELIBITM_mipsisa64r6 = "" -RUNTIMELIBITM_mipsisa64r6el = "" +RUNTIMELIBITM_mipsarch = "" RUNTIMELIBITM_nios2 = "" RUNTIMELIBITM_microblaze = "" RUNTIMETARGET = "libssp libstdc++-v3 libgomp libatomic ${RUNTIMELIBITM} \ ${@bb.utils.contains_any('FORTRAN', [',fortran',',f77'], 'libquadmath', '', d)} \ " +RUNTIMETARGET_append_x86 = " libmpx" +RUNTIMETARGET_append_x86-64 = " libmpx" +RUNTIMETARGET_remove_libc-musl = "libmpx" -# ? # libiberty # libmudflap # libgfortran needs separate recipe due to libquadmath dependency do_configure () { export CXX="${CXX} -nostdinc++ -nostdlib++" - mtarget=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` - target=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` - hardlinkdir ${STAGING_INCDIR_NATIVE}/gcc-build-internal-$mtarget ${B} + for d in libgcc ${RUNTIMETARGET}; do echo "Configuring $d" - rm -rf ${B}/$target/$d/ - mkdir -p ${B}/$target/$d/ - cd ${B}/$target/$d/ + rm -rf ${B}/${TARGET_SYS}/$d/ + mkdir -p ${B}/${TARGET_SYS}/$d/ + cd ${B}/${TARGET_SYS}/$d/ chmod a+x ${S}/$d/configure - relpath=${@os.path.relpath("${S}/$d", "${B}/$target/$d")} + relpath=${@os.path.relpath("${S}/$d", "${B}/${TARGET_SYS}/$d")} $relpath/configure ${CONFIGUREOPTS} ${EXTRA_OECONF} done } +EXTRACONFFUNCS += "extract_stashed_builddir" +do_configure[depends] += "${COMPILERDEP}" do_compile () { - target=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` for d in libgcc ${RUNTIMETARGET}; do - cd ${B}/$target/$d/ - oe_runmake MULTIBUILDTOP=${B}/$target/$d/ + cd ${B}/${TARGET_SYS}/$d/ + oe_runmake MULTIBUILDTOP=${B}/${TARGET_SYS}/$d/ done } do_install () { - target=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` for d in ${RUNTIMETARGET}; do - cd ${B}/$target/$d/ - oe_runmake 'DESTDIR=${D}' MULTIBUILDTOP=${B}/$target/$d/ install + cd ${B}/${TARGET_SYS}/$d/ + oe_runmake 'DESTDIR=${D}' MULTIBUILDTOP=${B}/${TARGET_SYS}/$d/ install done rm -rf ${D}${infodir}/libgomp.info ${D}${infodir}/dir rm -rf ${D}${infodir}/libitm.info ${D}${infodir}/dir @@ -148,6 +141,17 @@ PACKAGES = "\ libitm-dev \ libitm-staticdev \ " +PACKAGES_append_x86 = "\ + libmpx \ + libmpx-dev \ + libmpx-staticdev \ +" + +PACKAGES_append_x86-64 = "\ + libmpx \ + libmpx-dev \ + libmpx-staticdev \ +" # The base package doesn't exist, so we clear the recommends. RRECOMMENDS_${PN}-dbg = "" @@ -252,6 +256,20 @@ SUMMARY_libitm-dev = "GNU transactional memory support library - development fil FILES_libitm-staticdev = "${libdir}/libitm.a" SUMMARY_libitm-staticdev = "GNU transactional memory support library - static development files" + +FILES_libmpx = "${libdir}/libmpx.so.* ${libdir}/libmpxwrappers.so.*" +SUMMARY_libmpx = "Intel Memory Protection Extension library" +FILES_libmpx-dev = "\ + ${libdir}/libmpxwrappers.so \ + ${libdir}/libmpxwrappers.la \ + ${libdir}/libmpx.so \ + ${libdir}/libmpx.la \ + ${libdir}/libmpx.spec \ +" +SUMMARY_libmpx-dev = "Intel Memory Protection Extension library - development files" +FILES_libmpx-staticdev = "${libdir}/libmpx.a ${libdir}/libmpxwrappers.a" +SUMMARY_libmpx-staticdev = "Intel Memory Protection Extension library - static development files" + do_package_write_ipk[depends] += "virtual/${MLPREFIX}libc:do_packagedata" do_package_write_deb[depends] += "virtual/${MLPREFIX}libc:do_packagedata" do_package_write_rpm[depends] += "virtual/${MLPREFIX}libc:do_packagedata" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime_6.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime_6.2.bb deleted file mode 100644 index 8f31e7792..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime_6.2.bb +++ /dev/null @@ -1,7 +0,0 @@ -require recipes-devtools/gcc/gcc-${PV}.inc -require gcc-runtime.inc - -FILES_libgomp-dev += "\ - ${libdir}/gcc/${TARGET_SYS}/${BINV}/include/openacc.h \ -" - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime_6.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime_6.3.bb new file mode 100644 index 000000000..8f31e7792 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-runtime_6.3.bb @@ -0,0 +1,7 @@ +require recipes-devtools/gcc/gcc-${PV}.inc +require gcc-runtime.inc + +FILES_libgomp-dev += "\ + ${libdir}/gcc/${TARGET_SYS}/${BINV}/include/openacc.h \ +" + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-sanitizers.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-sanitizers.inc index df4e297a4..f97885b37 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-sanitizers.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-sanitizers.inc @@ -12,36 +12,29 @@ EXTRA_OECONF_PATHS = "\ " do_configure () { - mtarget=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` - target=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` - if [ -d ${STAGING_INCDIR_NATIVE}/gcc-build-internal-$mtarget ]; then - hardlinkdir ${STAGING_INCDIR_NATIVE}/gcc-build-internal-$mtarget ${B} - fi - - echo "Configuring libsanitizer" - rm -rf ${B}/$target/libsanitizer/ - mkdir -p ${B}/$target/libsanitizer/ - cd ${B}/$target/libsanitizer/ + rm -rf ${B}/${TARGET_SYS}/libsanitizer/ + mkdir -p ${B}/${TARGET_SYS}/libsanitizer/ + cd ${B}/${TARGET_SYS}/libsanitizer/ chmod a+x ${S}/libsanitizer/configure - relpath=${@os.path.relpath("${S}/libsanitizer", "${B}/$target/libsanitizer")} + relpath=${@os.path.relpath("${S}/libsanitizer", "${B}/${TARGET_SYS}/libsanitizer")} $relpath/configure ${CONFIGUREOPTS} ${EXTRA_OECONF} # Easiest way to stop bad RPATHs getting into the library since we have a # broken libtool here - sed -i -e 's/hardcode_into_libs=yes/hardcode_into_libs=no/' ${B}/$target/libsanitizer/libtool + sed -i -e 's/hardcode_into_libs=yes/hardcode_into_libs=no/' ${B}/${TARGET_SYS}/libsanitizer/libtool # Link to the sysroot's libstdc++ instead of one gcc thinks it just built - sed -i -e '/LIBSTDCXX_RAW_CXX_\(CXXFLAGS\|LDFLAGS\)\s*=/d' ${B}/$target/libsanitizer/*/Makefile + sed -i -e '/LIBSTDCXX_RAW_CXX_\(CXXFLAGS\|LDFLAGS\)\s*=/d' ${B}/${TARGET_SYS}/libsanitizer/*/Makefile } +EXTRACONFFUNCS += "extract_stashed_builddir" +do_configure[depends] += "${COMPILERDEP}" do_compile () { - target=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` - cd ${B}/$target/libsanitizer/ - oe_runmake MULTIBUILDTOP=${B}/$target/libsanitizer/ + cd ${B}/${TARGET_SYS}/libsanitizer/ + oe_runmake MULTIBUILDTOP=${B}/${TARGET_SYS}/libsanitizer/ } do_install () { - target=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` - cd ${B}/$target/libsanitizer/ - oe_runmake 'DESTDIR=${D}' MULTIBUILDTOP=${B}/$target/libsanitizer/ install + cd ${B}/${TARGET_SYS}/libsanitizer/ + oe_runmake 'DESTDIR=${D}' MULTIBUILDTOP=${B}/${TARGET_SYS}/libsanitizer/ install if [ -d ${D}${infodir} ]; then rmdir --ignore-fail-on-non-empty -p ${D}${infodir} fi diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-sanitizers_6.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-sanitizers_6.2.bb deleted file mode 100644 index 601f66602..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-sanitizers_6.2.bb +++ /dev/null @@ -1,2 +0,0 @@ -require recipes-devtools/gcc/gcc-${PV}.inc -require gcc-sanitizers.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-sanitizers_6.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-sanitizers_6.3.bb new file mode 100644 index 000000000..601f66602 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-sanitizers_6.3.bb @@ -0,0 +1,2 @@ +require recipes-devtools/gcc/gcc-${PV}.inc +require gcc-sanitizers.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-source.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-source.inc index 49bde92c4..03bab9781 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-source.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-source.inc @@ -3,7 +3,7 @@ deltask do_compile deltask do_install deltask do_populate_sysroot deltask do_populate_lic -deltask do_rm_work +RM_WORK_EXCLUDE += "${PN}" inherit nopackages @@ -18,6 +18,8 @@ INHIBIT_DEFAULT_DEPS = "1" DEPENDS = "" PACKAGES = "" + +# This needs to be Python to avoid lots of shell variables becoming dependencies. python do_preconfigure () { import subprocess cmd = d.expand('cd ${S} && PATH=${PATH} gnu-configize') @@ -26,6 +28,11 @@ python do_preconfigure () { bb.utils.remove(d.expand("${S}/gcc/gengtype-lex.c")) cmd = d.expand("sed -i 's/BUILD_INFO=info/BUILD_INFO=/' ${S}/gcc/configure") subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) + + # Easiest way to stop bad RPATHs getting into the library since we have a + # broken libtool here (breaks cross-canadian and target at least) + cmd = d.expand("sed -i -e 's/hardcode_into_libs=yes/hardcode_into_libs=no/' ${S}/libcc1/configure") + subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) } addtask do_preconfigure after do_patch do_preconfigure[depends] += "gnu-config-native:do_populate_sysroot autoconf-native:do_populate_sysroot" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-source_6.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-source_6.2.bb deleted file mode 100644 index b890fa33e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-source_6.2.bb +++ /dev/null @@ -1,4 +0,0 @@ -require recipes-devtools/gcc/gcc-${PV}.inc -require recipes-devtools/gcc/gcc-source.inc - -EXCLUDE_FROM_WORLD = "1" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-source_6.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-source_6.3.bb new file mode 100644 index 000000000..b890fa33e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-source_6.3.bb @@ -0,0 +1,4 @@ +require recipes-devtools/gcc/gcc-${PV}.inc +require recipes-devtools/gcc/gcc-source.inc + +EXCLUDE_FROM_WORLD = "1" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-target.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-target.inc index f436fa24f..eef4434db 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-target.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc-target.inc @@ -31,7 +31,7 @@ PACKAGES = "\ FILES_${PN} = "\ ${bindir}/${TARGET_PREFIX}gcc* \ ${libexecdir}/gcc/${TARGET_SYS}/${BINV}/collect2* \ - ${libexecdir}/gcc/${TARGET_SYS}/${BINV}/cc* \ + ${libexecdir}/gcc/${TARGET_SYS}/${BINV}/cc1plus \ ${libexecdir}/gcc/${TARGET_SYS}/${BINV}/lto* \ ${libexecdir}/gcc/${TARGET_SYS}/${BINV}/lib*${SOLIBS} \ ${libexecdir}/gcc/${TARGET_SYS}/${BINV}/liblto*${SOLIBSDEV} \ @@ -46,6 +46,7 @@ RRECOMMENDS_${PN} += "\ libssp \ libssp-dev \ " +RDEPENDS_${PN} += "cpp" FILES_${PN}-dev = "\ ${gcclibdir}/${TARGET_SYS}/${BINV}/lib*${SOLIBSDEV} \ @@ -179,6 +180,8 @@ do_install () { ln -sf ${TARGET_PREFIX}g++ g++ ln -sf ${TARGET_PREFIX}gcc gcc ln -sf ${TARGET_PREFIX}cpp cpp + ln -sf ${TARGET_PREFIX}gcov gcov + ln -sf ${TARGET_PREFIX}gcov-tool gcov-tool install -d ${D}${base_libdir} ln -sf ${bindir}/${TARGET_PREFIX}cpp ${D}${base_libdir}/cpp ln -sf g++ c++ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc_5.4.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc_5.4.bb index b0a523cae..2c618dfb9 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc_5.4.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc_5.4.bb @@ -6,10 +6,4 @@ require gcc-target.inc # | gcc-4.8.1-r0/gcc-4.8.1/gcc/cp/decl.c:7442:(.text.unlikely+0x318): additional relocation overflows omitted from the output ARM_INSTRUCTION_SET_armv4 = "arm" -do_configure_prepend() { - # Easiest way to stop bad RPATHs getting into the library since we have a - # broken libtool here - sed -i -e 's/hardcode_into_libs=yes/hardcode_into_libs=no/' ${S}/libcc1/configure -} - BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc_6.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc_6.2.bb deleted file mode 100644 index b0a523cae..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc_6.2.bb +++ /dev/null @@ -1,15 +0,0 @@ -require recipes-devtools/gcc/gcc-${PV}.inc -require gcc-target.inc - -# Building with thumb enabled on armv4t fails with -# | gcc-4.8.1-r0/gcc-4.8.1/gcc/cp/decl.c:7438:(.text.unlikely+0x2fa): relocation truncated to fit: R_ARM_THM_CALL against symbol `fancy_abort(char const*, int, char const*)' defined in .glue_7 section in linker stubs -# | gcc-4.8.1-r0/gcc-4.8.1/gcc/cp/decl.c:7442:(.text.unlikely+0x318): additional relocation overflows omitted from the output -ARM_INSTRUCTION_SET_armv4 = "arm" - -do_configure_prepend() { - # Easiest way to stop bad RPATHs getting into the library since we have a - # broken libtool here - sed -i -e 's/hardcode_into_libs=yes/hardcode_into_libs=no/' ${S}/libcc1/configure -} - -BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc_6.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc_6.3.bb new file mode 100644 index 000000000..2c618dfb9 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/gcc_6.3.bb @@ -0,0 +1,9 @@ +require recipes-devtools/gcc/gcc-${PV}.inc +require gcc-target.inc + +# Building with thumb enabled on armv4t fails with +# | gcc-4.8.1-r0/gcc-4.8.1/gcc/cp/decl.c:7438:(.text.unlikely+0x2fa): relocation truncated to fit: R_ARM_THM_CALL against symbol `fancy_abort(char const*, int, char const*)' defined in .glue_7 section in linker stubs +# | gcc-4.8.1-r0/gcc-4.8.1/gcc/cp/decl.c:7442:(.text.unlikely+0x318): additional relocation overflows omitted from the output +ARM_INSTRUCTION_SET_armv4 = "arm" + +BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-common.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-common.inc index c4de31c34..848a47620 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-common.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-common.inc @@ -5,27 +5,25 @@ require gcc-configure-common.inc INHIBIT_DEFAULT_DEPS = "1" do_configure () { - target=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` install -d ${D}${base_libdir} ${D}${libdir} - hardlinkdir ${STAGING_INCDIR_NATIVE}/${LIBGCCBUILDTREENAME}$target/ ${B} mkdir -p ${B}/${BPN} - mkdir -p ${B}/$target/${BPN}/ + mkdir -p ${B}/${TARGET_SYS}/${BPN}/ cd ${B}/${BPN} chmod a+x ${S}/${BPN}/configure relpath=${@os.path.relpath("${S}/${BPN}", "${B}/${BPN}")} $relpath/configure ${CONFIGUREOPTS} ${EXTRA_OECONF} } +EXTRACONFFUNCS += "extract_stashed_builddir" +do_configure[depends] += "${COMPILERDEP}" do_compile () { - target=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` cd ${B}/${BPN} - oe_runmake MULTIBUILDTOP=${B}/$target/${BPN}/ + oe_runmake MULTIBUILDTOP=${B}/${TARGET_SYS}/${BPN}/ } do_install () { - target=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` cd ${B}/${BPN} - oe_runmake 'DESTDIR=${D}' MULTIBUILDTOP=${B}/$target/${BPN}/ install + oe_runmake 'DESTDIR=${D}' MULTIBUILDTOP=${B}/${TARGET_SYS}/${BPN}/ install # Move libgcc_s into /lib mkdir -p ${D}${base_libdir} @@ -64,18 +62,18 @@ addtask multilib_install after do_install before do_package do_populate_sysroot fakeroot python do_multilib_install() { import re - multilibs = d.getVar('MULTILIB_VARIANTS', True) + multilibs = d.getVar('MULTILIB_VARIANTS') if not multilibs or bb.data.inherits_class('nativesdk', d): return - binv = d.getVar('BINV', True) + binv = d.getVar('BINV') - mlprefix = d.getVar('MLPREFIX', True) - if ('%slibgcc' % mlprefix) != d.getVar('PN', True): + mlprefix = d.getVar('MLPREFIX') + if ('%slibgcc' % mlprefix) != d.getVar('PN'): return if mlprefix: - orig_tune = d.getVar('DEFAULTTUNE_MULTILIB_ORIGINAL', True) + orig_tune = d.getVar('DEFAULTTUNE_MULTILIB_ORIGINAL') orig_tune_params = get_tune_parameters(orig_tune, d) orig_tune_baselib = orig_tune_params['baselib'] orig_tune_bitness = orig_tune_baselib.replace('lib', '') @@ -83,10 +81,10 @@ fakeroot python do_multilib_install() { orig_tune_bitness = '32' src = '../../../' + orig_tune_baselib + '/' + \ - d.getVar('TARGET_SYS_MULTILIB_ORIGINAL', True) + '/' + binv + '/' + d.getVar('TARGET_SYS_MULTILIB_ORIGINAL') + '/' + binv + '/' - dest = d.getVar('D', True) + d.getVar('libdir', True) + '/' + \ - d.getVar('TARGET_SYS', True) + '/' + binv + '/' + orig_tune_bitness + dest = d.getVar('D') + d.getVar('libdir') + '/' + \ + d.getVar('TARGET_SYS') + '/' + binv + '/' + orig_tune_bitness if os.path.lexists(dest): os.unlink(dest) @@ -95,7 +93,7 @@ fakeroot python do_multilib_install() { for ml in multilibs.split(): - tune = d.getVar('DEFAULTTUNE_virtclass-multilib-' + ml, True) + tune = d.getVar('DEFAULTTUNE_virtclass-multilib-' + ml) if not tune: bb.warn('DEFAULTTUNE_virtclass-multilib-%s is not defined. Skipping...' % ml) continue @@ -118,11 +116,11 @@ fakeroot python do_multilib_install() { libcextension = '' src = '../../../' + tune_baselib + '/' + \ - tune_arch + d.getVar('TARGET_VENDOR', True) + 'ml' + ml + \ - '-' + d.getVar('TARGET_OS', True) + libcextension + '/' + binv + '/' + tune_arch + d.getVar('TARGET_VENDOR') + 'ml' + ml + \ + '-' + d.getVar('TARGET_OS') + libcextension + '/' + binv + '/' - dest = d.getVar('D', True) + d.getVar('libdir', True) + '/' + \ - d.getVar('TARGET_SYS', True) + '/' + binv + '/' + tune_bitness + dest = d.getVar('D') + d.getVar('libdir') + '/' + \ + d.getVar('TARGET_SYS') + '/' + binv + '/' + tune_bitness if os.path.lexists(dest): os.unlink(dest) @@ -131,7 +129,7 @@ fakeroot python do_multilib_install() { def get_original_os(d): vendoros = d.expand('${TARGET_ARCH}${ORIG_TARGET_VENDOR}-${TARGET_OS}') - for suffix in [d.getVar('ABIEXTENSION', True), d.getVar('LIBCEXTENSION', True)]: + for suffix in [d.getVar('ABIEXTENSION'), d.getVar('LIBCEXTENSION')]: if suffix and vendoros.endswith(suffix): vendoros = vendoros[:-len(suffix)] # Arm must use linux-gnueabi not linux as only the former is accepted by gcc @@ -147,11 +145,11 @@ fakeroot python do_extra_symlinks() { if bb.data.inherits_class('nativesdk', d): return - targetsys = d.getVar('BASETARGET_SYS', True) + targetsys = d.getVar('BASETARGET_SYS') - if targetsys != d.getVar('TARGET_SYS', True): - dest = d.getVar('D', True) + d.getVar('libdir', True) + '/' + targetsys - src = d.getVar('TARGET_SYS', True) - if not os.path.lexists(dest) and os.path.lexists(d.getVar('D', True) + d.getVar('libdir', True)): + if targetsys != d.getVar('TARGET_SYS'): + dest = d.getVar('D') + d.getVar('libdir') + '/' + targetsys + src = d.getVar('TARGET_SYS') + if not os.path.lexists(dest) and os.path.lexists(d.getVar('D') + d.getVar('libdir')): os.symlink(src, dest) } diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-initial.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-initial.inc index 687a8a0b0..950ad861e 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-initial.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-initial.inc @@ -12,8 +12,9 @@ PACKAGES = "" EXTRA_OECONF += "--disable-shared" -LIBGCCBUILDTREENAME = "gcc-build-internal-initial-" - -do_populate_sysroot[sstate-outputdirs] = "${STAGING_DIR_TCBOOTSTRAP}/" +COMPILERINITIAL = "-initial" inherit nopackages + +# We really only want this built by things that need it, not any recrdeptask +deltask do_build diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-initial_6.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-initial_6.2.bb deleted file mode 100644 index 19f253fce..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-initial_6.2.bb +++ /dev/null @@ -1,2 +0,0 @@ -require recipes-devtools/gcc/gcc-${PV}.inc -require libgcc-initial.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-initial_6.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-initial_6.3.bb new file mode 100644 index 000000000..19f253fce --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc-initial_6.3.bb @@ -0,0 +1,2 @@ +require recipes-devtools/gcc/gcc-${PV}.inc +require libgcc-initial.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc.inc index 4770394c4..38d1643a9 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc.inc @@ -33,8 +33,6 @@ FILES_${PN}-dev = "\ ${libdir}/${TARGET_ARCH}${TARGET_VENDOR}* \ " -LIBGCCBUILDTREENAME = "gcc-build-internal-" - do_package[depends] += "virtual/${MLPREFIX}libc:do_packagedata" do_package_write_ipk[depends] += "virtual/${MLPREFIX}libc:do_packagedata" do_package_write_deb[depends] += "virtual/${MLPREFIX}libc:do_packagedata" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc_6.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc_6.2.bb deleted file mode 100644 index a5152f28e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc_6.2.bb +++ /dev/null @@ -1,2 +0,0 @@ -require recipes-devtools/gcc/gcc-${PV}.inc -require libgcc.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc_6.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc_6.3.bb new file mode 100644 index 000000000..a5152f28e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgcc_6.3.bb @@ -0,0 +1,2 @@ +require recipes-devtools/gcc/gcc-${PV}.inc +require libgcc.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgfortran.inc b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgfortran.inc index 58ceb2e07..4846decbb 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgfortran.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgfortran.inc @@ -6,32 +6,27 @@ EXTRA_OECONF_PATHS = "\ " do_configure () { - mtarget=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` - target=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` - hardlinkdir ${STAGING_INCDIR_NATIVE}/gcc-build-internal-$mtarget ${B} - - echo "Configuring libgfortran" - rm -rf ${B}/$target/libgfortran/ - mkdir -p ${B}/$target/libgfortran/ - cd ${B}/$target/libgfortran/ + rm -rf ${B}/${TARGET_SYS}/libgfortran/ + mkdir -p ${B}/${TARGET_SYS}/libgfortran/ + cd ${B}/${TARGET_SYS}/libgfortran/ chmod a+x ${S}/libgfortran/configure - relpath=${@os.path.relpath("${S}/libgfortran", "${B}/$target/libgfortran")} + relpath=${@os.path.relpath("${S}/libgfortran", "${B}/${TARGET_SYS}/libgfortran")} $relpath/configure ${CONFIGUREOPTS} ${EXTRA_OECONF} # Easiest way to stop bad RPATHs getting into the library since we have a # broken libtool here - sed -i -e 's/hardcode_into_libs=yes/hardcode_into_libs=no/' ${B}/$target/libgfortran/libtool + sed -i -e 's/hardcode_into_libs=yes/hardcode_into_libs=no/' ${B}/${TARGET_SYS}/libgfortran/libtool } +EXTRACONFFUNCS += "extract_stashed_builddir" +do_configure[depends] += "${COMPILERDEP}" do_compile () { - target=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` - cd ${B}/$target/libgfortran/ - oe_runmake MULTIBUILDTOP=${B}/$target/libgfortran/ + cd ${B}/${TARGET_SYS}/libgfortran/ + oe_runmake MULTIBUILDTOP=${B}/${TARGET_SYS}/libgfortran/ } do_install () { - target=`echo ${TARGET_SYS} | sed -e s#-${SDKPKGSUFFIX}##` - cd ${B}/$target/libgfortran/ - oe_runmake 'DESTDIR=${D}' MULTIBUILDTOP=${B}/$target/libgfortran/ install + cd ${B}/${TARGET_SYS}/libgfortran/ + oe_runmake 'DESTDIR=${D}' MULTIBUILDTOP=${B}/${TARGET_SYS}/libgfortran/ install if [ -d ${D}${libdir}/gcc/${TARGET_SYS}/${BINV}/finclude ]; then rmdir --ignore-fail-on-non-empty -p ${D}${libdir}/gcc/${TARGET_SYS}/${BINV}/finclude fi @@ -69,7 +64,7 @@ do_package_write_deb[depends] += "virtual/${MLPREFIX}libc:do_packagedata" do_package_write_rpm[depends] += "virtual/${MLPREFIX}libc:do_packagedata" python __anonymous () { - f = d.getVar("FORTRAN", True) + f = d.getVar("FORTRAN") if "fortran" not in f: raise bb.parse.SkipPackage("libgfortran needs fortran support to be enabled in the compiler") } diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgfortran_6.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgfortran_6.2.bb deleted file mode 100644 index 71dd8b4bd..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgfortran_6.2.bb +++ /dev/null @@ -1,3 +0,0 @@ -require recipes-devtools/gcc/gcc-${PV}.inc -require libgfortran.inc - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgfortran_6.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgfortran_6.3.bb new file mode 100644 index 000000000..71dd8b4bd --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gcc/libgfortran_6.3.bb @@ -0,0 +1,3 @@ +require recipes-devtools/gcc/gcc-${PV}.inc +require libgfortran.inc + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-7.11.1.inc b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-7.11.1.inc deleted file mode 100644 index d9dfe6f3f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-7.11.1.inc +++ /dev/null @@ -1,22 +0,0 @@ -LICENSE = "GPLv2 & GPLv3 & LGPLv2 & LGPLv3" -LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \ - file://COPYING3;md5=d32239bcb673463ab874e80d47fae504 \ - file://COPYING3.LIB;md5=6a6a8e020838b23406c81b19c1d46df6 \ - file://COPYING.LIB;md5=9f604d8a4f8e74f4f5140845a21b6674" - -SRC_URI = "http://ftp.gnu.org/gnu/gdb/gdb-${PV}.tar.xz \ - file://0001-include-sys-types.h-for-mode_t.patch \ - file://0002-make-man-install-relative-to-DESTDIR.patch \ - file://0003-mips-linux-nat-Define-_ABIO32-if-not-defined.patch \ - file://0004-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch \ - file://0005-Add-support-for-Renesas-SH-sh4-architecture.patch \ - file://0006-Dont-disable-libreadline.a-when-using-disable-static.patch \ - file://0007-use-asm-sgidefs.h.patch \ - file://0008-Use-exorted-definitions-of-SIGRTMIN.patch \ - file://0009-Change-order-of-CFLAGS.patch \ - file://0010-resolve-restrict-keyword-conflict.patch \ - file://0011-avx_mpx.patch \ -" - -SRC_URI[md5sum] = "5aa71522e488e358243917967db87476" -SRC_URI[sha256sum] = "e9216da4e3755e9f414c1aa0026b626251dfc57ffe572a266e98da4f6988fc70" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-7.12.1.inc b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-7.12.1.inc new file mode 100644 index 000000000..634756ce4 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-7.12.1.inc @@ -0,0 +1,22 @@ +LICENSE = "GPLv2 & GPLv3 & LGPLv2 & LGPLv3" +LIC_FILES_CHKSUM = "file://COPYING;md5=59530bdf33659b29e73d4adb9f9f6552 \ + file://COPYING3;md5=d32239bcb673463ab874e80d47fae504 \ + file://COPYING3.LIB;md5=6a6a8e020838b23406c81b19c1d46df6 \ + file://COPYING.LIB;md5=9f604d8a4f8e74f4f5140845a21b6674" + +SRC_URI = "http://ftp.gnu.org/gnu/gdb/gdb-${PV}.tar.xz \ + file://0001-include-sys-types.h-for-mode_t.patch \ + file://0002-make-man-install-relative-to-DESTDIR.patch \ + file://0003-mips-linux-nat-Define-_ABIO32-if-not-defined.patch \ + file://0004-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch \ + file://0005-Add-support-for-Renesas-SH-sh4-architecture.patch \ + file://0006-Dont-disable-libreadline.a-when-using-disable-static.patch \ + file://0007-use-asm-sgidefs.h.patch \ + file://0008-Use-exorted-definitions-of-SIGRTMIN.patch \ + file://0009-Change-order-of-CFLAGS.patch \ + file://0010-resolve-restrict-keyword-conflict.patch \ + file://package_devel_gdb_patches_120-sigprocmask-invalid-call.patch \ +" +SRC_URI[md5sum] = "193453347ddced7acb6b1cd2ee8f2e4b" +SRC_URI[sha256sum] = "4607680b973d3ec92c30ad029f1b7dbde3876869e6b3a117d8a7e90081113186" + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-common.inc b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-common.inc index 33a5ce983..239b37586 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-common.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-common.inc @@ -6,12 +6,7 @@ DEPENDS = "expat zlib ncurses virtual/libiconv ${LTTNGUST}" LTTNGUST = "lttng-ust" LTTNGUST_aarch64 = "" LTTNGUST_libc-uclibc = "" -LTTNGUST_mips = "" -LTTNGUST_mipsel = "" -LTTNGUST_mips64 = "" -LTTNGUST_mips64el = "" -LTTNGUST_mips64n32 = "" -LTTNGUST_mips64eln32 = "" +LTTNGUST_mipsarch = "" LTTNGUST_sh4 = "" LTTNGUST_libc-musl = "" @@ -52,11 +47,12 @@ do_configure () { } # we don't want gdb to provide bfd/iberty/opcodes, which instead will override the -# right bits installed by binutils. +# right bits installed by binutils. Same for bfd.info -- also from binutils. do_install_append() { rm -rf ${D}${libdir} rm -rf ${D}${includedir} rm -rf ${D}${datadir}/locale + rm -f ${D}${infodir}/bfd.info } RRECOMMENDS_gdb_append_linux = " glibc-thread-db " diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross-canadian_7.11.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross-canadian_7.11.1.bb deleted file mode 100644 index 301035940..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross-canadian_7.11.1.bb +++ /dev/null @@ -1,3 +0,0 @@ -require gdb-common.inc -require gdb-cross-canadian.inc -require gdb-${PV}.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross-canadian_7.12.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross-canadian_7.12.1.bb new file mode 100644 index 000000000..301035940 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross-canadian_7.12.1.bb @@ -0,0 +1,3 @@ +require gdb-common.inc +require gdb-cross-canadian.inc +require gdb-${PV}.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross.inc b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross.inc index 5fa7c3367..ebe329f6d 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross.inc @@ -1,6 +1,6 @@ require gdb-common.inc -DEPENDS = "expat-native ncurses-native" +DEPENDS = "expat-native ncurses-native flex-native bison-native" inherit python3native @@ -21,7 +21,10 @@ GDBPROPREFIX = "" PN = "gdb-cross-${TARGET_ARCH}" BPN = "gdb" +# Ignore how TARGET_ARCH is computed. +TARGET_ARCH[vardepvalue] = "${TARGET_ARCH}" + inherit cross inherit gettext -datadir .= "/gdb-${TUNE_PKGARCH}${TARGET_VENDOR}-${TARGET_OS}" +datadir .= "/gdb-${TARGET_SYS}${TARGET_VENDOR}-${TARGET_OS}" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross_7.11.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross_7.11.1.bb deleted file mode 100644 index 50cf159fd..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross_7.11.1.bb +++ /dev/null @@ -1,2 +0,0 @@ -require gdb-cross.inc -require gdb-${PV}.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross_7.12.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross_7.12.1.bb new file mode 100644 index 000000000..50cf159fd --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb-cross_7.12.1.bb @@ -0,0 +1,2 @@ +require gdb-cross.inc +require gdb-${PV}.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0001-include-sys-types.h-for-mode_t.patch b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0001-include-sys-types.h-for-mode_t.patch index 0042c0063..fc6c92f18 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0001-include-sys-types.h-for-mode_t.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0001-include-sys-types.h-for-mode_t.patch @@ -1,4 +1,4 @@ -From fddd7178915968acf680814411b8b3cb137d0587 Mon Sep 17 00:00:00 2001 +From 2c81e17216b4e471a1ce0bddb50f374b0722a2ce Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Tue, 19 Jan 2016 18:18:52 -0800 Subject: [PATCH 01/10] include sys/types.h for mode_t @@ -14,7 +14,7 @@ Upstream-Status: Pending 1 file changed, 1 insertion(+) diff --git a/gdb/gdbserver/target.h b/gdb/gdbserver/target.h -index 5af2051..f42c510 100644 +index 4c14c204bb..bdab18f7f7 100644 --- a/gdb/gdbserver/target.h +++ b/gdb/gdbserver/target.h @@ -28,6 +28,7 @@ @@ -26,5 +26,5 @@ index 5af2051..f42c510 100644 struct emit_ops; struct buffer; -- -2.8.2 +2.11.0 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0002-make-man-install-relative-to-DESTDIR.patch b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0002-make-man-install-relative-to-DESTDIR.patch index defed621f..9a9201b39 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0002-make-man-install-relative-to-DESTDIR.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0002-make-man-install-relative-to-DESTDIR.patch @@ -1,4 +1,4 @@ -From 3229cb09033eeb5003a08d91fa9d43be8ba4c86b Mon Sep 17 00:00:00 2001 +From f316d604b312bead78594f02e1355633eda9507b Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Mon, 2 Mar 2015 02:27:55 +0000 Subject: [PATCH 02/10] make man install relative to DESTDIR @@ -11,7 +11,7 @@ Signed-off-by: Khem Raj 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sim/common/Makefile.in b/sim/common/Makefile.in -index a05f507..8d0fa64 100644 +index a05f50767a..8d0fa64ea8 100644 --- a/sim/common/Makefile.in +++ b/sim/common/Makefile.in @@ -35,7 +35,7 @@ tooldir = $(libdir)/$(target_alias) @@ -24,5 +24,5 @@ index a05f507..8d0fa64 100644 includedir = @includedir@ -- -2.8.2 +2.11.0 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0003-mips-linux-nat-Define-_ABIO32-if-not-defined.patch b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0003-mips-linux-nat-Define-_ABIO32-if-not-defined.patch index bac793964..74c00063e 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0003-mips-linux-nat-Define-_ABIO32-if-not-defined.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0003-mips-linux-nat-Define-_ABIO32-if-not-defined.patch @@ -1,4 +1,4 @@ -From 88e67caed662d8344c8db56176c9f1221e6cd2a2 Mon Sep 17 00:00:00 2001 +From f2912b1d2e5c854a112176682903b696da33e003 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Wed, 23 Mar 2016 06:30:09 +0000 Subject: [PATCH 03/10] mips-linux-nat: Define _ABIO32 if not defined @@ -17,7 +17,7 @@ Upstream-Status: Pending 1 file changed, 5 insertions(+) diff --git a/gdb/mips-linux-nat.c b/gdb/mips-linux-nat.c -index bfe9fcb..449b43a 100644 +index 0f20f16814..722532bb6c 100644 --- a/gdb/mips-linux-nat.c +++ b/gdb/mips-linux-nat.c @@ -46,6 +46,11 @@ @@ -33,5 +33,5 @@ index bfe9fcb..449b43a 100644 we'll clear this and use PTRACE_PEEKUSER instead. */ static int have_ptrace_regsets = 1; -- -2.8.2 +2.11.0 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0004-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0004-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch index a5fe2a279..847f24f7f 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0004-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0004-ppc-ptrace-Define-pt_regs-uapi_pt_regs-on-GLIBC-syst.patch @@ -1,6 +1,6 @@ -From bec564eb454bc7fc6ecfcb573aa53040bf39c1d5 Mon Sep 17 00:00:00 2001 +From 7ef7b709885378279c424eab0510b93233400b24 Mon Sep 17 00:00:00 2001 From: Khem Raj -Date: Sat, 30 Apr 2016 16:46:03 +0000 +Date: Sat, 6 Aug 2016 17:32:50 -0700 Subject: [PATCH 04/10] ppc/ptrace: Define pt_regs uapi_pt_regs on !GLIBC systems @@ -13,7 +13,7 @@ Signed-off-by: Khem Raj 2 files changed, 12 insertions(+) diff --git a/gdb/gdbserver/linux-ppc-low.c b/gdb/gdbserver/linux-ppc-low.c -index 2145c50..777905d 100644 +index 1d013f185f..68098b3db9 100644 --- a/gdb/gdbserver/linux-ppc-low.c +++ b/gdb/gdbserver/linux-ppc-low.c @@ -21,7 +21,13 @@ @@ -29,9 +29,9 @@ index 2145c50..777905d 100644 +#endif #include "nat/ppc-linux.h" - + #include "linux-ppc-tdesc.h" diff --git a/gdb/nat/ppc-linux.h b/gdb/nat/ppc-linux.h -index 85fbcd8..cbec9c5 100644 +index 85fbcd84bb..cbec9c53b2 100644 --- a/gdb/nat/ppc-linux.h +++ b/gdb/nat/ppc-linux.h @@ -18,7 +18,13 @@ @@ -49,5 +49,5 @@ index 85fbcd8..cbec9c5 100644 /* This sometimes isn't defined. */ -- -2.8.2 +2.11.0 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0005-Add-support-for-Renesas-SH-sh4-architecture.patch b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0005-Add-support-for-Renesas-SH-sh4-architecture.patch index 8809e6f8b..d0c15f628 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0005-Add-support-for-Renesas-SH-sh4-architecture.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0005-Add-support-for-Renesas-SH-sh4-architecture.patch @@ -1,4 +1,4 @@ -From 8c5fe58c5a0044ddb517a41b277ed27fb3d3bedc Mon Sep 17 00:00:00 2001 +From 6649e2cccfb11dec076abb02eae0afab95614829 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Mon, 2 Mar 2015 02:31:12 +0000 Subject: [PATCH 05/10] Add support for Renesas SH (sh4) architecture. @@ -27,10 +27,10 @@ Signed-off-by: Khem Raj 11 files changed, 617 insertions(+), 29 deletions(-) diff --git a/gdb/Makefile.in b/gdb/Makefile.in -index ec2af52..df8e84d 100644 +index 7b2df86878..10f1266fe3 100644 --- a/gdb/Makefile.in +++ b/gdb/Makefile.in -@@ -1736,6 +1736,7 @@ ALLDEPFILES = \ +@@ -1750,6 +1750,7 @@ ALLDEPFILES = \ score-tdep.c \ ser-go32.c ser-pipe.c ser-tcp.c ser-mingw.c \ sh-tdep.c sh64-tdep.c shnbsd-tdep.c shnbsd-nat.c \ @@ -39,7 +39,7 @@ index ec2af52..df8e84d 100644 solib-svr4.c \ sparc-linux-nat.c sparc-linux-tdep.c \ diff --git a/gdb/configure.host b/gdb/configure.host -index ef265eb..322a1e2 100644 +index ef265ebe29..322a1e2c67 100644 --- a/gdb/configure.host +++ b/gdb/configure.host @@ -149,6 +149,7 @@ powerpc*-*-linux*) gdb_host=linux ;; @@ -51,7 +51,7 @@ index ef265eb..322a1e2 100644 gdb_host=nbsd ;; sh*-*-openbsd*) gdb_host=nbsd ;; diff --git a/gdb/sh-linux-tdep.c b/gdb/sh-linux-tdep.c -index 2418d25..ac8ea9e 100644 +index 2418d25010..ac8ea9e2a4 100644 --- a/gdb/sh-linux-tdep.c +++ b/gdb/sh-linux-tdep.c @@ -18,14 +18,37 @@ @@ -599,7 +599,7 @@ index 2418d25..ac8ea9e 100644 /* GNU/Linux uses SVR4-style shared libraries. */ diff --git a/gdb/sh-tdep.c b/gdb/sh-tdep.c -index 336b48e..847b271 100644 +index 694f5f742d..8d54df7a1a 100644 --- a/gdb/sh-tdep.c +++ b/gdb/sh-tdep.c @@ -21,6 +21,9 @@ @@ -662,7 +662,7 @@ index 336b48e..847b271 100644 len = TYPE_LENGTH (type); val = sh_justify_value_in_reg (gdbarch, args[argnum], len); -@@ -1821,7 +1808,7 @@ sh_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum, +@@ -1819,7 +1806,7 @@ sh_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum, reg->how = DWARF2_FRAME_REG_UNDEFINED; } @@ -671,7 +671,7 @@ index 336b48e..847b271 100644 sh_alloc_frame_cache (void) { struct sh_frame_cache *cache; -@@ -1848,7 +1835,7 @@ sh_alloc_frame_cache (void) +@@ -1846,7 +1833,7 @@ sh_alloc_frame_cache (void) return cache; } @@ -680,7 +680,7 @@ index 336b48e..847b271 100644 sh_frame_cache (struct frame_info *this_frame, void **this_cache) { struct gdbarch *gdbarch = get_frame_arch (this_frame); -@@ -1915,9 +1902,9 @@ sh_frame_cache (struct frame_info *this_frame, void **this_cache) +@@ -1913,9 +1900,9 @@ sh_frame_cache (struct frame_info *this_frame, void **this_cache) return cache; } @@ -693,7 +693,7 @@ index 336b48e..847b271 100644 { struct gdbarch *gdbarch = get_frame_arch (this_frame); struct sh_frame_cache *cache = sh_frame_cache (this_frame, this_cache); -@@ -1931,7 +1918,7 @@ sh_frame_prev_register (struct frame_info *this_frame, +@@ -1929,7 +1916,7 @@ sh_frame_prev_register (struct frame_info *this_frame, the current frame. Frob regnum so that we pull the value from the correct place. */ if (regnum == gdbarch_pc_regnum (gdbarch)) @@ -702,7 +702,7 @@ index 336b48e..847b271 100644 if (regnum < SH_NUM_REGS && cache->saved_regs[regnum] != -1) return frame_unwind_got_memory (this_frame, regnum, -@@ -2240,8 +2227,8 @@ sh_return_in_first_hidden_param_p (struct gdbarch *gdbarch, +@@ -2238,8 +2225,8 @@ sh_return_in_first_hidden_param_p (struct gdbarch *gdbarch, static struct gdbarch * sh_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches) { @@ -712,7 +712,7 @@ index 336b48e..847b271 100644 /* SH5 is handled entirely in sh64-tdep.c. */ if (info.bfd_arch_info->mach == bfd_mach_sh5) -@@ -2257,6 +2244,18 @@ sh_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches) +@@ -2255,6 +2242,18 @@ sh_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches) tdep = XCNEW (struct gdbarch_tdep); gdbarch = gdbarch_alloc (&info, tdep); @@ -731,7 +731,7 @@ index 336b48e..847b271 100644 set_gdbarch_short_bit (gdbarch, 2 * TARGET_CHAR_BIT); set_gdbarch_int_bit (gdbarch, 4 * TARGET_CHAR_BIT); set_gdbarch_long_bit (gdbarch, 4 * TARGET_CHAR_BIT); -@@ -2407,10 +2406,11 @@ sh_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches) +@@ -2405,10 +2404,11 @@ sh_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches) break; } @@ -745,7 +745,7 @@ index 336b48e..847b271 100644 frame_unwind_append_unwinder (gdbarch, &sh_frame_unwind); diff --git a/gdb/sh-tdep.h b/gdb/sh-tdep.h -index 666968f..62c65b5 100644 +index 666968f787..62c65b55ea 100644 --- a/gdb/sh-tdep.h +++ b/gdb/sh-tdep.h @@ -21,6 +21,12 @@ @@ -828,7 +828,7 @@ index 666968f..62c65b5 100644 where each general-purpose register is stored inside the associated core file section. */ diff --git a/gdb/testsuite/gdb.asm/asm-source.exp b/gdb/testsuite/gdb.asm/asm-source.exp -index 6d9aef8..5b66b42 100644 +index 6d9aef81bb..5b66b429d1 100644 --- a/gdb/testsuite/gdb.asm/asm-source.exp +++ b/gdb/testsuite/gdb.asm/asm-source.exp @@ -116,6 +116,11 @@ switch -glob -- [istarget] { @@ -844,7 +844,7 @@ index 6d9aef8..5b66b42 100644 set asm-arch sh set debug-flags "-gdwarf-2" diff --git a/gdb/testsuite/gdb.asm/sh.inc b/gdb/testsuite/gdb.asm/sh.inc -index a4a5fc5..89efed7 100644 +index a4a5fc545e..89efed7795 100644 --- a/gdb/testsuite/gdb.asm/sh.inc +++ b/gdb/testsuite/gdb.asm/sh.inc @@ -40,9 +40,8 @@ @@ -859,7 +859,7 @@ index a4a5fc5..89efed7 100644 .align 1 .Lafterconst\@: diff --git a/gdb/testsuite/gdb.base/annota1.c b/gdb/testsuite/gdb.base/annota1.c -index 424e1b8..0de2e7b 100644 +index 424e1b8327..0de2e7b633 100644 --- a/gdb/testsuite/gdb.base/annota1.c +++ b/gdb/testsuite/gdb.base/annota1.c @@ -1,6 +1,9 @@ @@ -873,7 +873,7 @@ index 424e1b8..0de2e7b 100644 void handle_USR1 (int sig) diff --git a/gdb/testsuite/gdb.base/annota3.c b/gdb/testsuite/gdb.base/annota3.c -index 424e1b8..952aaf21 100644 +index 424e1b8327..952aaf218a 100644 --- a/gdb/testsuite/gdb.base/annota3.c +++ b/gdb/testsuite/gdb.base/annota3.c @@ -1,6 +1,10 @@ @@ -888,7 +888,7 @@ index 424e1b8..952aaf21 100644 void handle_USR1 (int sig) diff --git a/gdb/testsuite/gdb.base/sigall.c b/gdb/testsuite/gdb.base/sigall.c -index 81f3b08..1574b2d 100644 +index 81f3b08d6b..1574b2d6cb 100644 --- a/gdb/testsuite/gdb.base/sigall.c +++ b/gdb/testsuite/gdb.base/sigall.c @@ -1,6 +1,9 @@ @@ -902,7 +902,7 @@ index 81f3b08..1574b2d 100644 /* Signal handlers, we set breakpoints in them to make sure that the signals really get delivered. */ diff --git a/gdb/testsuite/gdb.base/signals.c b/gdb/testsuite/gdb.base/signals.c -index 7566068..1205a9b 100644 +index 756606880f..1205a9bc9c 100644 --- a/gdb/testsuite/gdb.base/signals.c +++ b/gdb/testsuite/gdb.base/signals.c @@ -3,6 +3,10 @@ @@ -917,5 +917,5 @@ index 7566068..1205a9b 100644 static int count = 0; -- -2.8.2 +2.11.0 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0006-Dont-disable-libreadline.a-when-using-disable-static.patch b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0006-Dont-disable-libreadline.a-when-using-disable-static.patch index 394d26d06..5ed8e81e4 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0006-Dont-disable-libreadline.a-when-using-disable-static.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0006-Dont-disable-libreadline.a-when-using-disable-static.patch @@ -1,4 +1,4 @@ -From f3932cb2960fd54655a448b13d5a5b80f356f8de Mon Sep 17 00:00:00 2001 +From 2a6e28ad5c0cad189a3697d96de031e4713052b8 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Sat, 30 Apr 2016 15:25:03 -0700 Subject: [PATCH 06/10] Dont disable libreadline.a when using --disable-static @@ -19,7 +19,7 @@ Signed-off-by: Khem Raj 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Makefile.def b/Makefile.def -index ea8453e..0fc66c6 100644 +index ea8453e851..0fc66c694b 100644 --- a/Makefile.def +++ b/Makefile.def @@ -104,7 +104,8 @@ host_modules= { module= libiconv; @@ -33,10 +33,10 @@ index ea8453e..0fc66c6 100644 host_modules= { module= sim; }; host_modules= { module= texinfo; no_install= true; }; diff --git a/Makefile.in b/Makefile.in -index 2733c4d..3e04e80 100644 +index cb0136e8f8..55f9085c16 100644 --- a/Makefile.in +++ b/Makefile.in -@@ -25380,7 +25380,7 @@ configure-readline: +@@ -25385,7 +25385,7 @@ configure-readline: $$s/$$module_srcdir/configure \ --srcdir=$${topdir}/$$module_srcdir \ $(HOST_CONFIGARGS) --build=${build_alias} --host=${host_alias} \ @@ -46,5 +46,5 @@ index 2733c4d..3e04e80 100644 @endif readline -- -2.8.2 +2.11.0 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0007-use-asm-sgidefs.h.patch b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0007-use-asm-sgidefs.h.patch index f32f8ee41..a42c9fd2a 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0007-use-asm-sgidefs.h.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0007-use-asm-sgidefs.h.patch @@ -1,4 +1,4 @@ -From 048675a915a72989f2613386975730da016e7c5d Mon Sep 17 00:00:00 2001 +From d7543b44255da4ae71447d4e4d63e0b6aa4ed909 Mon Sep 17 00:00:00 2001 From: Andre McCurdy Date: Sat, 30 Apr 2016 15:29:06 -0700 Subject: [PATCH 07/10] use @@ -19,7 +19,7 @@ Signed-off-by: Khem Raj 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gdb/mips-linux-nat.c b/gdb/mips-linux-nat.c -index 449b43a..09603da 100644 +index 722532bb6c..51d8fc8f66 100644 --- a/gdb/mips-linux-nat.c +++ b/gdb/mips-linux-nat.c @@ -31,7 +31,7 @@ @@ -32,5 +32,5 @@ index 449b43a..09603da 100644 #include -- -2.8.2 +2.11.0 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0008-Use-exorted-definitions-of-SIGRTMIN.patch b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0008-Use-exorted-definitions-of-SIGRTMIN.patch index 7e58b617a..ae9cb8c0e 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0008-Use-exorted-definitions-of-SIGRTMIN.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0008-Use-exorted-definitions-of-SIGRTMIN.patch @@ -1,4 +1,4 @@ -From e54ead9d81f4d38412751b815f909db3cb144bb1 Mon Sep 17 00:00:00 2001 +From aacd77184da1328908da41c9fdb55ad881fa0e99 Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Sat, 30 Apr 2016 15:31:40 -0700 Subject: [PATCH 08/10] Use exorted definitions of SIGRTMIN @@ -20,10 +20,10 @@ Signed-off-by: Khem Raj 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/gdb/linux-nat.c b/gdb/linux-nat.c -index 0829bcb..3b8a896 100644 +index 5d5efa0af4..e3420b49a0 100644 --- a/gdb/linux-nat.c +++ b/gdb/linux-nat.c -@@ -4845,6 +4845,6 @@ lin_thread_get_thread_signals (sigset_t *set) +@@ -5022,6 +5022,6 @@ lin_thread_get_thread_signals (sigset_t *set) /* NPTL reserves the first two RT signals, but does not provide any way for the debugger to query the signal numbers - fortunately they don't change. */ @@ -33,7 +33,7 @@ index 0829bcb..3b8a896 100644 + sigaddset (set, SIGRTMIN + 1); } diff --git a/gdb/nat/linux-nat.h b/gdb/nat/linux-nat.h -index 2b485db..d058afc 100644 +index 2b485db141..d058afcde8 100644 --- a/gdb/nat/linux-nat.h +++ b/gdb/nat/linux-nat.h @@ -85,4 +85,8 @@ extern enum target_stop_reason lwp_stop_reason (struct lwp_info *lwp); @@ -46,5 +46,5 @@ index 2b485db..d058afc 100644 + #endif /* LINUX_NAT_H */ -- -2.8.2 +2.11.0 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0009-Change-order-of-CFLAGS.patch b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0009-Change-order-of-CFLAGS.patch index a230047af..ed6e0aeb7 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0009-Change-order-of-CFLAGS.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0009-Change-order-of-CFLAGS.patch @@ -1,4 +1,4 @@ -From ba0bbf887d4911ccee9df57cb13eafb1de34bb31 Mon Sep 17 00:00:00 2001 +From 8c35d5d1825ed017cc58ea91011412e54c002eeb Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Sat, 30 Apr 2016 15:35:39 -0700 Subject: [PATCH 09/10] Change order of CFLAGS @@ -13,7 +13,7 @@ Signed-off-by: Khem Raj 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gdb/gdbserver/Makefile.in b/gdb/gdbserver/Makefile.in -index 1e874e3..91e8550 100644 +index f844ab8853..3f88db52e3 100644 --- a/gdb/gdbserver/Makefile.in +++ b/gdb/gdbserver/Makefile.in @@ -138,10 +138,10 @@ CXXFLAGS = @CXXFLAGS@ @@ -30,5 +30,5 @@ index 1e874e3..91e8550 100644 # LDFLAGS is specifically reserved for setting from the command line # when running make. -- -2.8.2 +2.11.0 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0010-resolve-restrict-keyword-conflict.patch b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0010-resolve-restrict-keyword-conflict.patch index 16c34c703..1938beb56 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0010-resolve-restrict-keyword-conflict.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0010-resolve-restrict-keyword-conflict.patch @@ -1,4 +1,4 @@ -From 5a9ccb8c0728b658fc4f7f0f7b36873c64274f10 Mon Sep 17 00:00:00 2001 +From 7816d3497266e55c1c921d7cc1c8bf81c8ed0b4a Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Tue, 10 May 2016 08:47:05 -0700 Subject: [PATCH 10/10] resolve restrict keyword conflict @@ -15,7 +15,7 @@ Signed-off-by: Khem Raj 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/gdb/gnulib/import/sys_time.in.h b/gdb/gnulib/import/sys_time.in.h -index c556c5d..2a6107f 100644 +index c556c5db23..2a6107fcf8 100644 --- a/gdb/gnulib/import/sys_time.in.h +++ b/gdb/gnulib/import/sys_time.in.h @@ -93,20 +93,20 @@ struct timeval @@ -44,5 +44,5 @@ index c556c5d..2a6107f 100644 _GL_CXXALIASWARN (gettimeofday); #elif defined GNULIB_POSIXCHECK -- -2.8.2 +2.11.0 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0011-avx_mpx.patch b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0011-avx_mpx.patch deleted file mode 100644 index 209c4fcbd..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/0011-avx_mpx.patch +++ /dev/null @@ -1,2601 +0,0 @@ -gdb: Backport patch to support changes with AVX and MPX - -The current MPX target descriptions assume that MPX is always combined -with AVX, however that's not correct. We can have machines with MPX -and without AVX; or machines with AVX and without MPX. - -This patch adds new target descriptions for machines that support -both MPX and AVX, as duplicates of the existing MPX descriptions. - -The following commit will remove AVX from the MPX-only descriptions. - -Upstream-Status: Backport - -Signed-off-by: bavery - - -Orignal patch changelog and author attribution: - -2016-04-16 Walfred Tedeschi - -gdb/ChangeLog: - - * amd64-linux-tdep.c (features/i386/amd64-avx-mpx-linux.c): - New include. - (amd64_linux_core_read_description): Add case for - X86_XSTATE_AVX_MPX_MASK. - (_initialize_amd64_linux_tdep): Call initialize_tdesc_amd64_avx_mpx_linux. - * amd64-linux-tdep.h (tdesc_amd64_avx_mpx_linux): New definition. - * amd64-tdep.c (features/i386/amd64-avx-mpx.c): New include. - (amd64_target_description): Add case for X86_XSTATE_AVX_MPX_MASK. - (_initialize_amd64_tdep): Call initialize_tdesc_amd64_avx_mpx. - * common/x86-xstate.h (X86_XSTATE_MPX_MASK): Remove AVX bits. - (X86_XSTATE_AVX_MPX_MASK): New case. - * features/Makefile (i386/i386-avx-mpx, i386/i386-avx-mpx-linux) - (i386/amd64-avx-mpx, i386/amd64-avx-mpx-linux): New rules. - (i386/i386-avx-mpx-expedite, i386/i386-avx-mpx-linux-expedite) - (i386/amd64-avx-mpx-expedite, i386/amd64-avx-mpx-linux-expedite): - New expedites. - * i386-linux-tdep.c (features/i386/i386-avx-mpx-linux.c): New - include. - (i386_linux_core_read_description): Add case - X86_XSTATE_AVX_MPX_MASK. - (_initialize_i386_linux_tdep): Call - initialize_tdesc_i386_avx_mpx_linux. - * i386-linux-tdep.h (tdesc_i386_avx_mpx_linux): New include. - * i386-tdep.c (features/i386/i386-avx-mpx.c): New include. - (i386_target_description): Add case for X86_XSTATE_AVX_MPX_MASK. - * x86-linux-nat.c (x86_linux_read_description): Add case for - X86_XSTATE_AVX_MPX_MASK. - * features/i386/amd64-avx-mpx-linux.xml: New file. - * features/i386/i386-avx-mpx-linux.xml: New file. - * features/i386/i386-avx-mpx.xml: New file. - * features/i386/amd64-avx-mpx.xml: New file. - * features/i386/amd64-avx-mpx-linux.c: Generated. - * features/i386/amd64-avx-mpx.c: Generated. - * features/i386/i386-avx-mpx-linux.c: Generated. - * features/i386/i386-avx-mpx.c: Generated. - * regformats/i386/amd64-avx-mpx-linux.dat: Generated. - * regformats/i386/amd64-avx-mpx.dat: Generated. - * regformats/i386/i386-avx-mpx-linux.dat: Generated. - * regformats/i386/i386-avx-mpx.dat: Generated. - -gdb/gdbserver/ChangeLog: - - * Makefile.in (clean): Add removal for i386-avx-mpx.c, - i386-avx-mpx-linux.c, amd64-avx-mpx.c and amd64-avx-mpx-linux.c. - (i386-avx-mpx.c, i386-avx-mpx-linux.c, amd64-avx-mpx.c) - (amd64-avx-mpx-linux.c): New rules. - (amd64-avx-mpx-linux-ipa.o, i386-avx-mpx-linux-ipa.o): New rule. - * configure.srv (srv_i386_regobj): Add i386-avx-mpx.o. - (srv_i386_linux_regobj): Add i386-avx-mpx-linux.o. - (srv_amd64_regobj): Add amd64-avx-mpx.o. - (srv_amd64_linux_regobj): Add amd64-avx-mpx-linux.o. - (srv_i386_xmlfiles): Add i386/i386-avx-mpx.xml. - (srv_amd64_xmlfiles): Add i386/amd64-avx-mpx.xml. - (srv_i386_linux_xmlfiles): Add i386/i386-avx-mpx-linux.xml. - (srv_amd64_linux_xmlfiles): Add i386/amd64-avx-mpx-linux.xml. - (ipa_i386_linux_regobj): Add i386-avx-mpx-linux-ipa.o. - (ipa_amd64_linux_regobj): Add amd64-avx-mpx-linux-ipa.o. - * linux-x86-low.c (x86_linux_read_description): Add case for - X86_XSTATE_AVX_MPX_MASK. - (x86_get_ipa_tdesc_idx): Add cases for avx_mpx. - (initialize_low_arch): Call init_registers_amd64_avx_mpx_linux and - init_registers_i386_avx_mpx_linux. - * linux-i386-ipa.c (get_ipa_tdesc): Add case for avx_mpx. - (initialize_low_tracepoint): Call - init_registers_i386_avx_mpx_linux. - * linux-amd64-ipa.c (get_ipa_tdesc): Add case for avx_mpx. - (initialize_low_tracepoint): Call - init_registers_amd64_avx_mpx_linux. - * linux-x86-tdesc.h (X86_TDESC_AVX_MPX): New enum value. - (init_registers_amd64_avx_mpx_linux, tdesc_amd64_avx_mpx_linux) - (init_registers_i386_avx_mpx_linux, tdesc_i386_avx_mpx_linux): New - declarations. - - - - -diff --git a/gdb/amd64-linux-tdep.c b/gdb/amd64-linux-tdep.c -index 21bcd99..5327f7c 100644 ---- a/gdb/amd64-linux-tdep.c -+++ b/gdb/amd64-linux-tdep.c -@@ -43,6 +43,7 @@ - #include "features/i386/amd64-linux.c" - #include "features/i386/amd64-avx-linux.c" - #include "features/i386/amd64-mpx-linux.c" -+#include "features/i386/amd64-avx-mpx-linux.c" - #include "features/i386/amd64-avx512-linux.c" - - #include "features/i386/x32-linux.c" -@@ -1590,6 +1591,11 @@ amd64_linux_core_read_description (struct gdbarch *gdbarch, - return tdesc_x32_avx_linux; /* No x32 MPX falling back to AVX. */ - else - return tdesc_amd64_mpx_linux; -+ case X86_XSTATE_AVX_MPX_MASK: -+ if (gdbarch_ptr_bit (gdbarch) == 32) -+ return tdesc_x32_avx_linux; /* No x32 MPX falling back to AVX. */ -+ else -+ return tdesc_amd64_avx_mpx_linux; - case X86_XSTATE_AVX_MASK: - if (gdbarch_ptr_bit (gdbarch) == 32) - return tdesc_x32_avx_linux; -@@ -2285,6 +2291,7 @@ _initialize_amd64_linux_tdep (void) - initialize_tdesc_amd64_linux (); - initialize_tdesc_amd64_avx_linux (); - initialize_tdesc_amd64_mpx_linux (); -+ initialize_tdesc_amd64_avx_mpx_linux (); - initialize_tdesc_amd64_avx512_linux (); - - initialize_tdesc_x32_linux (); -diff --git a/gdb/amd64-linux-tdep.h b/gdb/amd64-linux-tdep.h -index 8673442..d64d5d6 100644 ---- a/gdb/amd64-linux-tdep.h -+++ b/gdb/amd64-linux-tdep.h -@@ -35,6 +35,7 @@ - extern struct target_desc *tdesc_amd64_linux; - extern struct target_desc *tdesc_amd64_avx_linux; - extern struct target_desc *tdesc_amd64_mpx_linux; -+extern struct target_desc *tdesc_amd64_avx_mpx_linux; - extern struct target_desc *tdesc_amd64_avx512_linux; - - extern struct target_desc *tdesc_x32_linux; -diff --git a/gdb/amd64-tdep.c b/gdb/amd64-tdep.c -index fae92b2..88e3bf8 100644 ---- a/gdb/amd64-tdep.c -+++ b/gdb/amd64-tdep.c -@@ -44,6 +44,7 @@ - #include "features/i386/amd64.c" - #include "features/i386/amd64-avx.c" - #include "features/i386/amd64-mpx.c" -+#include "features/i386/amd64-avx-mpx.c" - #include "features/i386/amd64-avx512.c" - - #include "features/i386/x32.c" -@@ -3132,6 +3133,8 @@ amd64_target_description (uint64_t xcr0) - return tdesc_amd64_avx512; - case X86_XSTATE_MPX_MASK: - return tdesc_amd64_mpx; -+ case X86_XSTATE_AVX_MPX_MASK: -+ return tdesc_amd64_avx_mpx; - case X86_XSTATE_AVX_MASK: - return tdesc_amd64_avx; - default: -@@ -3148,6 +3151,7 @@ _initialize_amd64_tdep (void) - initialize_tdesc_amd64 (); - initialize_tdesc_amd64_avx (); - initialize_tdesc_amd64_mpx (); -+ initialize_tdesc_amd64_avx_mpx (); - initialize_tdesc_amd64_avx512 (); - - initialize_tdesc_x32 (); -diff --git a/gdb/common/x86-xstate.h b/gdb/common/x86-xstate.h -index 8386420..0aa9164 100644 ---- a/gdb/common/x86-xstate.h -+++ b/gdb/common/x86-xstate.h -@@ -39,9 +39,10 @@ - #define X86_XSTATE_X87_MASK X86_XSTATE_X87 - #define X86_XSTATE_SSE_MASK (X86_XSTATE_X87 | X86_XSTATE_SSE) - #define X86_XSTATE_AVX_MASK (X86_XSTATE_SSE_MASK | X86_XSTATE_AVX) --#define X86_XSTATE_MPX_MASK (X86_XSTATE_AVX_MASK | X86_XSTATE_MPX) -+#define X86_XSTATE_MPX_MASK (X86_XSTATE_SSE_MASK | X86_XSTATE_MPX) -+#define X86_XSTATE_AVX_MPX_MASK (X86_XSTATE_AVX_MASK | X86_XSTATE_MPX) - #define X86_XSTATE_AVX512_MASK (X86_XSTATE_AVX_MASK | X86_XSTATE_AVX512) --#define X86_XSTATE_MPX_AVX512_MASK (X86_XSTATE_MPX_MASK | X86_XSTATE_AVX512) -+#define X86_XSTATE_MPX_AVX512_MASK (X86_XSTATE_AVX_MPX_MASK | X86_XSTATE_AVX512) - - #define X86_XSTATE_ALL_MASK (X86_XSTATE_MPX_AVX512_MASK) - -diff --git a/gdb/features/Makefile b/gdb/features/Makefile -index 10173cf..e5c5154 100644 ---- a/gdb/features/Makefile -+++ b/gdb/features/Makefile -@@ -50,9 +50,11 @@ WHICH = aarch64 \ - i386/amd64 i386/amd64-linux \ - i386/i386-avx i386/i386-avx-linux \ - i386/i386-mpx i386/i386-mpx-linux \ -+ i386/i386-avx-mpx i386/i386-avx-mpx-linux \ - i386/i386-avx512 i386/i386-avx512-linux \ - i386/amd64-avx i386/amd64-avx-linux \ - i386/amd64-mpx i386/amd64-mpx-linux \ -+ i386/amd64-avx-mpx i386/amd64-avx-mpx-linux \ - i386/amd64-avx512 i386/amd64-avx512-linux \ - i386/x32 i386/x32-linux \ - i386/x32-avx i386/x32-avx-linux \ -@@ -83,7 +85,9 @@ i386/amd64-linux-expedite = rbp,rsp,rip - i386/i386-avx-expedite = ebp,esp,eip - i386/i386-avx-linux-expedite = ebp,esp,eip - i386/i386-mpx-expedite = ebp,esp,eip -+i386/i386-avx-mpx-expedite = ebp,esp,eip - i386/i386-mpx-linux-expedite = ebp,esp,eip -+i386/i386-avx-mpx-linux-expedite = ebp,esp,eip - i386/i386-avx512-expedite = ebp,esp,eip - i386/i386-avx512-linux-expedite = ebp,esp,eip - i386/i386-mmx-expedite = ebp,esp,eip -@@ -91,7 +95,9 @@ i386/i386-mmx-linux-expedite = ebp,esp,eip - i386/amd64-avx-expedite = rbp,rsp,rip - i386/amd64-avx-linux-expedite = rbp,rsp,rip - i386/amd64-mpx-expedite = rbp,rsp,rip -+i386/amd64-avx-mpx-expedite = rbp,rsp,rip - i386/amd64-mpx-linux-expedite = rbp,rsp,rip -+i386/amd64-avx-mpx-linux-expedite = rbp,rsp,rip - i386/amd64-avx512-expedite = rbp,rsp,rip - i386/amd64-avx512-linux-expedite = rbp,rsp,rip - i386/x32-expedite = rbp,rsp,rip -@@ -156,6 +162,8 @@ XMLTOC = \ - i386/amd64-linux.xml \ - i386/amd64-mpx-linux.xml \ - i386/amd64-mpx.xml \ -+ i386/amd64-avx-mpx-linux.xml \ -+ i386/amd64-avx-mpx.xml \ - i386/amd64.xml \ - i386/i386-avx-linux.xml \ - i386/i386-avx.xml \ -@@ -166,6 +174,8 @@ XMLTOC = \ - i386/i386-mmx.xml \ - i386/i386-mpx-linux.xml \ - i386/i386-mpx.xml \ -+ i386/i386-avx-mpx-linux.xml \ -+ i386/i386-avx-mpx.xml \ - i386/i386.xml \ - i386/x32-avx-linux.xml \ - i386/x32-avx.xml \ -@@ -271,6 +281,10 @@ $(outdir)/i386/i386-mpx.dat: i386/32bit-core.xml i386/32bit-avx.xml \ - i386/32bit-mpx.xml - $(outdir)/i386/i386-mpx-linux.dat: i386/32bit-core.xml i386/32bit-avx.xml \ - i386/32bit-linux.xml i386/32bit-mpx.xml -+$(outdir)/i386/i386-mpx-linux.dat: i386/32bit-core.xml \ -+ i386/32bit-linux.xml i386/32bit-mpx.xml -+$(outdir)/i386/i386-avx-mpx-linux.dat: i386/32bit-core.xml \ -+ i386/32bit-linux.xml i386/32bit-mpx.xml - $(outdir)/i386/i386-avx512.dat: i386/32bit-core.xml i386/32bit-avx.xml \ - i386/32bit-mpx.xml i386/32bit-avx512.xml - $(outdir)/i386/i386-avx512-linux.dat: i386/32bit-core.xml i386/32bit-avx.xml \ -@@ -282,8 +296,12 @@ $(outdir)/i386/amd64-avx-linux.dat: i386/64bit-core.xml i386/64bit-avx.xml \ - i386/64bit-linux.xml - $(outdir)/i386/amd64-mpx-linux.dat: i386/64bit-core.xml i386/64bit-avx.xml \ - i386/64bit-linux.xml i386/64bit-mpx.xml -+$(outdir)/i386/amd64-avx-mpx-linux.dat: i386/64bit-core.xml \ -+ i386/64bit-linux.xml i386/64bit-mpx.xml - $(outdir)/i386/amd64-mpx.dat: i386/64bit-core.xml i386/64bit-avx.xml \ - i386/64bit-mpx.xml -+$(outdir)/i386/amd64-avx-mpx.dat: i386/64bit-core.xml \ -+ i386/64bit-mpx.xml - $(outdir)/i386/amd64-avx512.dat: i386/64bit-core.xml i386/64bit-avx.xml \ - i386/64bit-mpx.xml i386/64bit-avx512.xml - $(outdir)/i386/amd64-avx512-linux.dat: i386/64bit-core.xml i386/64bit-avx.xml \ -diff --git a/gdb/features/i386/amd64-avx-mpx-linux.c b/gdb/features/i386/amd64-avx-mpx-linux.c -new file mode 100644 -index 0000000..37b4c81 ---- /dev/null -+++ b/gdb/features/i386/amd64-avx-mpx-linux.c -@@ -0,0 +1,211 @@ -+/* THIS FILE IS GENERATED. -*- buffer-read-only: t -*- vi:set ro: -+ Original: amd64-avx-mpx-linux.xml */ -+ -+#include "defs.h" -+#include "osabi.h" -+#include "target-descriptions.h" -+ -+struct target_desc *tdesc_amd64_avx_mpx_linux; -+static void -+initialize_tdesc_amd64_avx_mpx_linux (void) -+{ -+ struct target_desc *result = allocate_target_description (); -+ struct tdesc_feature *feature; -+ struct tdesc_type *field_type; -+ struct tdesc_type *type; -+ -+ set_tdesc_architecture (result, bfd_scan_arch ("i386:x86-64")); -+ -+ set_tdesc_osabi (result, osabi_from_tdesc_string ("GNU/Linux")); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.core"); -+ field_type = tdesc_create_flags (feature, "i386_eflags", 4); -+ tdesc_add_flag (field_type, 0, "CF"); -+ tdesc_add_flag (field_type, 1, ""); -+ tdesc_add_flag (field_type, 2, "PF"); -+ tdesc_add_flag (field_type, 4, "AF"); -+ tdesc_add_flag (field_type, 6, "ZF"); -+ tdesc_add_flag (field_type, 7, "SF"); -+ tdesc_add_flag (field_type, 8, "TF"); -+ tdesc_add_flag (field_type, 9, "IF"); -+ tdesc_add_flag (field_type, 10, "DF"); -+ tdesc_add_flag (field_type, 11, "OF"); -+ tdesc_add_flag (field_type, 14, "NT"); -+ tdesc_add_flag (field_type, 16, "RF"); -+ tdesc_add_flag (field_type, 17, "VM"); -+ tdesc_add_flag (field_type, 18, "AC"); -+ tdesc_add_flag (field_type, 19, "VIF"); -+ tdesc_add_flag (field_type, 20, "VIP"); -+ tdesc_add_flag (field_type, 21, "ID"); -+ -+ tdesc_create_reg (feature, "rax", 0, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "rbx", 1, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "rcx", 2, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "rdx", 3, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "rsi", 4, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "rdi", 5, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "rbp", 6, 1, NULL, 64, "data_ptr"); -+ tdesc_create_reg (feature, "rsp", 7, 1, NULL, 64, "data_ptr"); -+ tdesc_create_reg (feature, "r8", 8, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "r9", 9, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "r10", 10, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "r11", 11, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "r12", 12, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "r13", 13, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "r14", 14, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "r15", 15, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "rip", 16, 1, NULL, 64, "code_ptr"); -+ tdesc_create_reg (feature, "eflags", 17, 1, NULL, 32, "i386_eflags"); -+ tdesc_create_reg (feature, "cs", 18, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "ss", 19, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "ds", 20, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "es", 21, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "fs", 22, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "gs", 23, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "st0", 24, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st1", 25, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st2", 26, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st3", 27, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st4", 28, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st5", 29, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st6", 30, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st7", 31, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "fctrl", 32, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fstat", 33, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "ftag", 34, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fiseg", 35, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fioff", 36, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "foseg", 37, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fooff", 38, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fop", 39, 1, "float", 32, "int"); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.sse"); -+ field_type = tdesc_named_type (feature, "ieee_single"); -+ tdesc_create_vector (feature, "v4f", field_type, 4); -+ -+ field_type = tdesc_named_type (feature, "ieee_double"); -+ tdesc_create_vector (feature, "v2d", field_type, 2); -+ -+ field_type = tdesc_named_type (feature, "int8"); -+ tdesc_create_vector (feature, "v16i8", field_type, 16); -+ -+ field_type = tdesc_named_type (feature, "int16"); -+ tdesc_create_vector (feature, "v8i16", field_type, 8); -+ -+ field_type = tdesc_named_type (feature, "int32"); -+ tdesc_create_vector (feature, "v4i32", field_type, 4); -+ -+ field_type = tdesc_named_type (feature, "int64"); -+ tdesc_create_vector (feature, "v2i64", field_type, 2); -+ -+ type = tdesc_create_union (feature, "vec128"); -+ field_type = tdesc_named_type (feature, "v4f"); -+ tdesc_add_field (type, "v4_float", field_type); -+ field_type = tdesc_named_type (feature, "v2d"); -+ tdesc_add_field (type, "v2_double", field_type); -+ field_type = tdesc_named_type (feature, "v16i8"); -+ tdesc_add_field (type, "v16_int8", field_type); -+ field_type = tdesc_named_type (feature, "v8i16"); -+ tdesc_add_field (type, "v8_int16", field_type); -+ field_type = tdesc_named_type (feature, "v4i32"); -+ tdesc_add_field (type, "v4_int32", field_type); -+ field_type = tdesc_named_type (feature, "v2i64"); -+ tdesc_add_field (type, "v2_int64", field_type); -+ field_type = tdesc_named_type (feature, "uint128"); -+ tdesc_add_field (type, "uint128", field_type); -+ -+ field_type = tdesc_create_flags (feature, "i386_mxcsr", 4); -+ tdesc_add_flag (field_type, 0, "IE"); -+ tdesc_add_flag (field_type, 1, "DE"); -+ tdesc_add_flag (field_type, 2, "ZE"); -+ tdesc_add_flag (field_type, 3, "OE"); -+ tdesc_add_flag (field_type, 4, "UE"); -+ tdesc_add_flag (field_type, 5, "PE"); -+ tdesc_add_flag (field_type, 6, "DAZ"); -+ tdesc_add_flag (field_type, 7, "IM"); -+ tdesc_add_flag (field_type, 8, "DM"); -+ tdesc_add_flag (field_type, 9, "ZM"); -+ tdesc_add_flag (field_type, 10, "OM"); -+ tdesc_add_flag (field_type, 11, "UM"); -+ tdesc_add_flag (field_type, 12, "PM"); -+ tdesc_add_flag (field_type, 15, "FZ"); -+ -+ tdesc_create_reg (feature, "xmm0", 40, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm1", 41, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm2", 42, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm3", 43, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm4", 44, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm5", 45, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm6", 46, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm7", 47, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm8", 48, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm9", 49, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm10", 50, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm11", 51, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm12", 52, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm13", 53, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm14", 54, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm15", 55, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "mxcsr", 56, 1, "vector", 32, "i386_mxcsr"); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.linux"); -+ tdesc_create_reg (feature, "orig_rax", 57, 1, NULL, 64, "int"); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.avx"); -+ tdesc_create_reg (feature, "ymm0h", 58, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm1h", 59, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm2h", 60, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm3h", 61, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm4h", 62, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm5h", 63, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm6h", 64, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm7h", 65, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm8h", 66, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm9h", 67, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm10h", 68, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm11h", 69, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm12h", 70, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm13h", 71, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm14h", 72, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm15h", 73, 1, NULL, 128, "uint128"); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.mpx"); -+ type = tdesc_create_struct (feature, "br128"); -+ field_type = tdesc_named_type (feature, "uint64"); -+ tdesc_add_field (type, "lbound", field_type); -+ field_type = tdesc_named_type (feature, "uint64"); -+ tdesc_add_field (type, "ubound_raw", field_type); -+ -+ type = tdesc_create_struct (feature, "_bndstatus"); -+ tdesc_set_struct_size (type, 8); -+ tdesc_add_bitfield (type, "bde", 2, 63); -+ tdesc_add_bitfield (type, "error", 0, 1); -+ -+ type = tdesc_create_union (feature, "status"); -+ field_type = tdesc_named_type (feature, "data_ptr"); -+ tdesc_add_field (type, "raw", field_type); -+ field_type = tdesc_named_type (feature, "_bndstatus"); -+ tdesc_add_field (type, "status", field_type); -+ -+ type = tdesc_create_struct (feature, "_bndcfgu"); -+ tdesc_set_struct_size (type, 8); -+ tdesc_add_bitfield (type, "base", 12, 63); -+ tdesc_add_bitfield (type, "reserved", 2, 11); -+ tdesc_add_bitfield (type, "preserved", 1, 1); -+ tdesc_add_bitfield (type, "enabled", 0, 0); -+ -+ type = tdesc_create_union (feature, "cfgu"); -+ field_type = tdesc_named_type (feature, "data_ptr"); -+ tdesc_add_field (type, "raw", field_type); -+ field_type = tdesc_named_type (feature, "_bndcfgu"); -+ tdesc_add_field (type, "config", field_type); -+ -+ tdesc_create_reg (feature, "bnd0raw", 74, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd1raw", 75, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd2raw", 76, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd3raw", 77, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bndcfgu", 78, 1, NULL, 64, "cfgu"); -+ tdesc_create_reg (feature, "bndstatus", 79, 1, NULL, 64, "status"); -+ -+ tdesc_amd64_avx_mpx_linux = result; -+} -diff --git a/gdb/features/i386/amd64-avx-mpx-linux.xml b/gdb/features/i386/amd64-avx-mpx-linux.xml -new file mode 100644 -index 0000000..526c700 ---- /dev/null -+++ b/gdb/features/i386/amd64-avx-mpx-linux.xml -@@ -0,0 +1,19 @@ -+ -+ -+ -+ -+ -+ -+ -+ i386:x86-64 -+ GNU/Linux -+ -+ -+ -+ -+ -+ -diff --git a/gdb/features/i386/amd64-avx-mpx.c b/gdb/features/i386/amd64-avx-mpx.c -new file mode 100644 -index 0000000..1279f73 ---- /dev/null -+++ b/gdb/features/i386/amd64-avx-mpx.c -@@ -0,0 +1,206 @@ -+/* THIS FILE IS GENERATED. -*- buffer-read-only: t -*- vi:set ro: -+ Original: amd64-avx-mpx.xml */ -+ -+#include "defs.h" -+#include "osabi.h" -+#include "target-descriptions.h" -+ -+struct target_desc *tdesc_amd64_avx_mpx; -+static void -+initialize_tdesc_amd64_avx_mpx (void) -+{ -+ struct target_desc *result = allocate_target_description (); -+ struct tdesc_feature *feature; -+ struct tdesc_type *field_type; -+ struct tdesc_type *type; -+ -+ set_tdesc_architecture (result, bfd_scan_arch ("i386:x86-64")); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.core"); -+ field_type = tdesc_create_flags (feature, "i386_eflags", 4); -+ tdesc_add_flag (field_type, 0, "CF"); -+ tdesc_add_flag (field_type, 1, ""); -+ tdesc_add_flag (field_type, 2, "PF"); -+ tdesc_add_flag (field_type, 4, "AF"); -+ tdesc_add_flag (field_type, 6, "ZF"); -+ tdesc_add_flag (field_type, 7, "SF"); -+ tdesc_add_flag (field_type, 8, "TF"); -+ tdesc_add_flag (field_type, 9, "IF"); -+ tdesc_add_flag (field_type, 10, "DF"); -+ tdesc_add_flag (field_type, 11, "OF"); -+ tdesc_add_flag (field_type, 14, "NT"); -+ tdesc_add_flag (field_type, 16, "RF"); -+ tdesc_add_flag (field_type, 17, "VM"); -+ tdesc_add_flag (field_type, 18, "AC"); -+ tdesc_add_flag (field_type, 19, "VIF"); -+ tdesc_add_flag (field_type, 20, "VIP"); -+ tdesc_add_flag (field_type, 21, "ID"); -+ -+ tdesc_create_reg (feature, "rax", 0, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "rbx", 1, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "rcx", 2, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "rdx", 3, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "rsi", 4, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "rdi", 5, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "rbp", 6, 1, NULL, 64, "data_ptr"); -+ tdesc_create_reg (feature, "rsp", 7, 1, NULL, 64, "data_ptr"); -+ tdesc_create_reg (feature, "r8", 8, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "r9", 9, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "r10", 10, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "r11", 11, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "r12", 12, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "r13", 13, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "r14", 14, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "r15", 15, 1, NULL, 64, "int64"); -+ tdesc_create_reg (feature, "rip", 16, 1, NULL, 64, "code_ptr"); -+ tdesc_create_reg (feature, "eflags", 17, 1, NULL, 32, "i386_eflags"); -+ tdesc_create_reg (feature, "cs", 18, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "ss", 19, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "ds", 20, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "es", 21, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "fs", 22, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "gs", 23, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "st0", 24, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st1", 25, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st2", 26, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st3", 27, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st4", 28, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st5", 29, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st6", 30, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st7", 31, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "fctrl", 32, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fstat", 33, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "ftag", 34, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fiseg", 35, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fioff", 36, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "foseg", 37, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fooff", 38, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fop", 39, 1, "float", 32, "int"); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.sse"); -+ field_type = tdesc_named_type (feature, "ieee_single"); -+ tdesc_create_vector (feature, "v4f", field_type, 4); -+ -+ field_type = tdesc_named_type (feature, "ieee_double"); -+ tdesc_create_vector (feature, "v2d", field_type, 2); -+ -+ field_type = tdesc_named_type (feature, "int8"); -+ tdesc_create_vector (feature, "v16i8", field_type, 16); -+ -+ field_type = tdesc_named_type (feature, "int16"); -+ tdesc_create_vector (feature, "v8i16", field_type, 8); -+ -+ field_type = tdesc_named_type (feature, "int32"); -+ tdesc_create_vector (feature, "v4i32", field_type, 4); -+ -+ field_type = tdesc_named_type (feature, "int64"); -+ tdesc_create_vector (feature, "v2i64", field_type, 2); -+ -+ type = tdesc_create_union (feature, "vec128"); -+ field_type = tdesc_named_type (feature, "v4f"); -+ tdesc_add_field (type, "v4_float", field_type); -+ field_type = tdesc_named_type (feature, "v2d"); -+ tdesc_add_field (type, "v2_double", field_type); -+ field_type = tdesc_named_type (feature, "v16i8"); -+ tdesc_add_field (type, "v16_int8", field_type); -+ field_type = tdesc_named_type (feature, "v8i16"); -+ tdesc_add_field (type, "v8_int16", field_type); -+ field_type = tdesc_named_type (feature, "v4i32"); -+ tdesc_add_field (type, "v4_int32", field_type); -+ field_type = tdesc_named_type (feature, "v2i64"); -+ tdesc_add_field (type, "v2_int64", field_type); -+ field_type = tdesc_named_type (feature, "uint128"); -+ tdesc_add_field (type, "uint128", field_type); -+ -+ field_type = tdesc_create_flags (feature, "i386_mxcsr", 4); -+ tdesc_add_flag (field_type, 0, "IE"); -+ tdesc_add_flag (field_type, 1, "DE"); -+ tdesc_add_flag (field_type, 2, "ZE"); -+ tdesc_add_flag (field_type, 3, "OE"); -+ tdesc_add_flag (field_type, 4, "UE"); -+ tdesc_add_flag (field_type, 5, "PE"); -+ tdesc_add_flag (field_type, 6, "DAZ"); -+ tdesc_add_flag (field_type, 7, "IM"); -+ tdesc_add_flag (field_type, 8, "DM"); -+ tdesc_add_flag (field_type, 9, "ZM"); -+ tdesc_add_flag (field_type, 10, "OM"); -+ tdesc_add_flag (field_type, 11, "UM"); -+ tdesc_add_flag (field_type, 12, "PM"); -+ tdesc_add_flag (field_type, 15, "FZ"); -+ -+ tdesc_create_reg (feature, "xmm0", 40, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm1", 41, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm2", 42, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm3", 43, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm4", 44, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm5", 45, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm6", 46, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm7", 47, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm8", 48, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm9", 49, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm10", 50, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm11", 51, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm12", 52, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm13", 53, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm14", 54, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm15", 55, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "mxcsr", 56, 1, "vector", 32, "i386_mxcsr"); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.avx"); -+ tdesc_create_reg (feature, "ymm0h", 57, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm1h", 58, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm2h", 59, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm3h", 60, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm4h", 61, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm5h", 62, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm6h", 63, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm7h", 64, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm8h", 65, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm9h", 66, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm10h", 67, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm11h", 68, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm12h", 69, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm13h", 70, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm14h", 71, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm15h", 72, 1, NULL, 128, "uint128"); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.mpx"); -+ type = tdesc_create_struct (feature, "br128"); -+ field_type = tdesc_named_type (feature, "uint64"); -+ tdesc_add_field (type, "lbound", field_type); -+ field_type = tdesc_named_type (feature, "uint64"); -+ tdesc_add_field (type, "ubound_raw", field_type); -+ -+ type = tdesc_create_struct (feature, "_bndstatus"); -+ tdesc_set_struct_size (type, 8); -+ tdesc_add_bitfield (type, "bde", 2, 63); -+ tdesc_add_bitfield (type, "error", 0, 1); -+ -+ type = tdesc_create_union (feature, "status"); -+ field_type = tdesc_named_type (feature, "data_ptr"); -+ tdesc_add_field (type, "raw", field_type); -+ field_type = tdesc_named_type (feature, "_bndstatus"); -+ tdesc_add_field (type, "status", field_type); -+ -+ type = tdesc_create_struct (feature, "_bndcfgu"); -+ tdesc_set_struct_size (type, 8); -+ tdesc_add_bitfield (type, "base", 12, 63); -+ tdesc_add_bitfield (type, "reserved", 2, 11); -+ tdesc_add_bitfield (type, "preserved", 1, 1); -+ tdesc_add_bitfield (type, "enabled", 0, 0); -+ -+ type = tdesc_create_union (feature, "cfgu"); -+ field_type = tdesc_named_type (feature, "data_ptr"); -+ tdesc_add_field (type, "raw", field_type); -+ field_type = tdesc_named_type (feature, "_bndcfgu"); -+ tdesc_add_field (type, "config", field_type); -+ -+ tdesc_create_reg (feature, "bnd0raw", 73, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd1raw", 74, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd2raw", 75, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd3raw", 76, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bndcfgu", 77, 1, NULL, 64, "cfgu"); -+ tdesc_create_reg (feature, "bndstatus", 78, 1, NULL, 64, "status"); -+ -+ tdesc_amd64_avx_mpx = result; -+} -diff --git a/gdb/features/i386/amd64-avx-mpx.xml b/gdb/features/i386/amd64-avx-mpx.xml -new file mode 100644 -index 0000000..3eea6dd ---- /dev/null -+++ b/gdb/features/i386/amd64-avx-mpx.xml -@@ -0,0 +1,17 @@ -+ -+ -+ -+ -+ -+ -+ -+ i386:x86-64 -+ -+ -+ -+ -+ -diff --git a/gdb/features/i386/amd64-mpx-linux.c b/gdb/features/i386/amd64-mpx-linux.c -index 86a1774..5c1584b 100644 ---- a/gdb/features/i386/amd64-mpx-linux.c -+++ b/gdb/features/i386/amd64-mpx-linux.c -@@ -151,24 +151,6 @@ initialize_tdesc_amd64_mpx_linux (void) - feature = tdesc_create_feature (result, "org.gnu.gdb.i386.linux"); - tdesc_create_reg (feature, "orig_rax", 57, 1, NULL, 64, "int"); - -- feature = tdesc_create_feature (result, "org.gnu.gdb.i386.avx"); -- tdesc_create_reg (feature, "ymm0h", 58, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm1h", 59, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm2h", 60, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm3h", 61, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm4h", 62, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm5h", 63, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm6h", 64, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm7h", 65, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm8h", 66, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm9h", 67, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm10h", 68, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm11h", 69, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm12h", 70, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm13h", 71, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm14h", 72, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm15h", 73, 1, NULL, 128, "uint128"); -- - feature = tdesc_create_feature (result, "org.gnu.gdb.i386.mpx"); - type = tdesc_create_struct (feature, "br128"); - field_type = tdesc_named_type (feature, "uint64"); -@@ -200,12 +182,12 @@ initialize_tdesc_amd64_mpx_linux (void) - field_type = tdesc_named_type (feature, "_bndcfgu"); - tdesc_add_field (type, "config", field_type); - -- tdesc_create_reg (feature, "bnd0raw", 74, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bnd1raw", 75, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bnd2raw", 76, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bnd3raw", 77, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bndcfgu", 78, 1, NULL, 64, "cfgu"); -- tdesc_create_reg (feature, "bndstatus", 79, 1, NULL, 64, "status"); -+ tdesc_create_reg (feature, "bnd0raw", 58, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd1raw", 59, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd2raw", 60, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd3raw", 61, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bndcfgu", 62, 1, NULL, 64, "cfgu"); -+ tdesc_create_reg (feature, "bndstatus", 63, 1, NULL, 64, "status"); - - tdesc_amd64_mpx_linux = result; - } -diff --git a/gdb/features/i386/amd64-mpx-linux.xml b/gdb/features/i386/amd64-mpx-linux.xml -index 15e87b4..835126b 100644 ---- a/gdb/features/i386/amd64-mpx-linux.xml -+++ b/gdb/features/i386/amd64-mpx-linux.xml -@@ -14,6 +14,5 @@ - - - -- - - -diff --git a/gdb/features/i386/amd64-mpx.c b/gdb/features/i386/amd64-mpx.c -index 15ae5f7..4c852ef 100644 ---- a/gdb/features/i386/amd64-mpx.c -+++ b/gdb/features/i386/amd64-mpx.c -@@ -146,24 +146,6 @@ initialize_tdesc_amd64_mpx (void) - tdesc_create_reg (feature, "xmm15", 55, 1, NULL, 128, "vec128"); - tdesc_create_reg (feature, "mxcsr", 56, 1, "vector", 32, "i386_mxcsr"); - -- feature = tdesc_create_feature (result, "org.gnu.gdb.i386.avx"); -- tdesc_create_reg (feature, "ymm0h", 57, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm1h", 58, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm2h", 59, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm3h", 60, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm4h", 61, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm5h", 62, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm6h", 63, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm7h", 64, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm8h", 65, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm9h", 66, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm10h", 67, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm11h", 68, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm12h", 69, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm13h", 70, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm14h", 71, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm15h", 72, 1, NULL, 128, "uint128"); -- - feature = tdesc_create_feature (result, "org.gnu.gdb.i386.mpx"); - type = tdesc_create_struct (feature, "br128"); - field_type = tdesc_named_type (feature, "uint64"); -@@ -195,12 +177,12 @@ initialize_tdesc_amd64_mpx (void) - field_type = tdesc_named_type (feature, "_bndcfgu"); - tdesc_add_field (type, "config", field_type); - -- tdesc_create_reg (feature, "bnd0raw", 73, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bnd1raw", 74, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bnd2raw", 75, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bnd3raw", 76, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bndcfgu", 77, 1, NULL, 64, "cfgu"); -- tdesc_create_reg (feature, "bndstatus", 78, 1, NULL, 64, "status"); -+ tdesc_create_reg (feature, "bnd0raw", 57, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd1raw", 58, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd2raw", 59, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd3raw", 60, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bndcfgu", 61, 1, NULL, 64, "cfgu"); -+ tdesc_create_reg (feature, "bndstatus", 62, 1, NULL, 64, "status"); - - tdesc_amd64_mpx = result; - } -diff --git a/gdb/features/i386/amd64-mpx.xml b/gdb/features/i386/amd64-mpx.xml -index 21b6df3..e458736 100644 ---- a/gdb/features/i386/amd64-mpx.xml -+++ b/gdb/features/i386/amd64-mpx.xml -@@ -12,6 +12,5 @@ - i386:x86-64 - - -- - - -diff --git a/gdb/features/i386/i386-avx-mpx-linux.c b/gdb/features/i386/i386-avx-mpx-linux.c -new file mode 100644 -index 0000000..dd75403 ---- /dev/null -+++ b/gdb/features/i386/i386-avx-mpx-linux.c -@@ -0,0 +1,187 @@ -+/* THIS FILE IS GENERATED. -*- buffer-read-only: t -*- vi:set ro: -+ Original: i386-avx-mpx-linux.xml */ -+ -+#include "defs.h" -+#include "osabi.h" -+#include "target-descriptions.h" -+ -+struct target_desc *tdesc_i386_avx_mpx_linux; -+static void -+initialize_tdesc_i386_avx_mpx_linux (void) -+{ -+ struct target_desc *result = allocate_target_description (); -+ struct tdesc_feature *feature; -+ struct tdesc_type *field_type; -+ struct tdesc_type *type; -+ -+ set_tdesc_architecture (result, bfd_scan_arch ("i386")); -+ -+ set_tdesc_osabi (result, osabi_from_tdesc_string ("GNU/Linux")); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.core"); -+ field_type = tdesc_create_flags (feature, "i386_eflags", 4); -+ tdesc_add_flag (field_type, 0, "CF"); -+ tdesc_add_flag (field_type, 1, ""); -+ tdesc_add_flag (field_type, 2, "PF"); -+ tdesc_add_flag (field_type, 4, "AF"); -+ tdesc_add_flag (field_type, 6, "ZF"); -+ tdesc_add_flag (field_type, 7, "SF"); -+ tdesc_add_flag (field_type, 8, "TF"); -+ tdesc_add_flag (field_type, 9, "IF"); -+ tdesc_add_flag (field_type, 10, "DF"); -+ tdesc_add_flag (field_type, 11, "OF"); -+ tdesc_add_flag (field_type, 14, "NT"); -+ tdesc_add_flag (field_type, 16, "RF"); -+ tdesc_add_flag (field_type, 17, "VM"); -+ tdesc_add_flag (field_type, 18, "AC"); -+ tdesc_add_flag (field_type, 19, "VIF"); -+ tdesc_add_flag (field_type, 20, "VIP"); -+ tdesc_add_flag (field_type, 21, "ID"); -+ -+ tdesc_create_reg (feature, "eax", 0, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "ecx", 1, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "edx", 2, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "ebx", 3, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "esp", 4, 1, NULL, 32, "data_ptr"); -+ tdesc_create_reg (feature, "ebp", 5, 1, NULL, 32, "data_ptr"); -+ tdesc_create_reg (feature, "esi", 6, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "edi", 7, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "eip", 8, 1, NULL, 32, "code_ptr"); -+ tdesc_create_reg (feature, "eflags", 9, 1, NULL, 32, "i386_eflags"); -+ tdesc_create_reg (feature, "cs", 10, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "ss", 11, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "ds", 12, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "es", 13, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "fs", 14, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "gs", 15, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "st0", 16, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st1", 17, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st2", 18, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st3", 19, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st4", 20, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st5", 21, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st6", 22, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st7", 23, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "fctrl", 24, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fstat", 25, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "ftag", 26, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fiseg", 27, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fioff", 28, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "foseg", 29, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fooff", 30, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fop", 31, 1, "float", 32, "int"); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.sse"); -+ field_type = tdesc_named_type (feature, "ieee_single"); -+ tdesc_create_vector (feature, "v4f", field_type, 4); -+ -+ field_type = tdesc_named_type (feature, "ieee_double"); -+ tdesc_create_vector (feature, "v2d", field_type, 2); -+ -+ field_type = tdesc_named_type (feature, "int8"); -+ tdesc_create_vector (feature, "v16i8", field_type, 16); -+ -+ field_type = tdesc_named_type (feature, "int16"); -+ tdesc_create_vector (feature, "v8i16", field_type, 8); -+ -+ field_type = tdesc_named_type (feature, "int32"); -+ tdesc_create_vector (feature, "v4i32", field_type, 4); -+ -+ field_type = tdesc_named_type (feature, "int64"); -+ tdesc_create_vector (feature, "v2i64", field_type, 2); -+ -+ type = tdesc_create_union (feature, "vec128"); -+ field_type = tdesc_named_type (feature, "v4f"); -+ tdesc_add_field (type, "v4_float", field_type); -+ field_type = tdesc_named_type (feature, "v2d"); -+ tdesc_add_field (type, "v2_double", field_type); -+ field_type = tdesc_named_type (feature, "v16i8"); -+ tdesc_add_field (type, "v16_int8", field_type); -+ field_type = tdesc_named_type (feature, "v8i16"); -+ tdesc_add_field (type, "v8_int16", field_type); -+ field_type = tdesc_named_type (feature, "v4i32"); -+ tdesc_add_field (type, "v4_int32", field_type); -+ field_type = tdesc_named_type (feature, "v2i64"); -+ tdesc_add_field (type, "v2_int64", field_type); -+ field_type = tdesc_named_type (feature, "uint128"); -+ tdesc_add_field (type, "uint128", field_type); -+ -+ field_type = tdesc_create_flags (feature, "i386_mxcsr", 4); -+ tdesc_add_flag (field_type, 0, "IE"); -+ tdesc_add_flag (field_type, 1, "DE"); -+ tdesc_add_flag (field_type, 2, "ZE"); -+ tdesc_add_flag (field_type, 3, "OE"); -+ tdesc_add_flag (field_type, 4, "UE"); -+ tdesc_add_flag (field_type, 5, "PE"); -+ tdesc_add_flag (field_type, 6, "DAZ"); -+ tdesc_add_flag (field_type, 7, "IM"); -+ tdesc_add_flag (field_type, 8, "DM"); -+ tdesc_add_flag (field_type, 9, "ZM"); -+ tdesc_add_flag (field_type, 10, "OM"); -+ tdesc_add_flag (field_type, 11, "UM"); -+ tdesc_add_flag (field_type, 12, "PM"); -+ tdesc_add_flag (field_type, 15, "FZ"); -+ -+ tdesc_create_reg (feature, "xmm0", 32, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm1", 33, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm2", 34, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm3", 35, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm4", 36, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm5", 37, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm6", 38, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm7", 39, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "mxcsr", 40, 1, "vector", 32, "i386_mxcsr"); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.linux"); -+ tdesc_create_reg (feature, "orig_eax", 41, 1, NULL, 32, "int"); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.avx"); -+ tdesc_create_reg (feature, "ymm0h", 42, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm1h", 43, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm2h", 44, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm3h", 45, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm4h", 46, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm5h", 47, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm6h", 48, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm7h", 49, 1, NULL, 128, "uint128"); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.mpx"); -+ type = tdesc_create_struct (feature, "br128"); -+ field_type = tdesc_named_type (feature, "uint64"); -+ tdesc_add_field (type, "lbound", field_type); -+ field_type = tdesc_named_type (feature, "uint64"); -+ tdesc_add_field (type, "ubound_raw", field_type); -+ -+ type = tdesc_create_struct (feature, "_bndstatus"); -+ tdesc_set_struct_size (type, 8); -+ tdesc_add_bitfield (type, "bde", 2, 31); -+ tdesc_add_bitfield (type, "error", 0, 1); -+ -+ type = tdesc_create_union (feature, "status"); -+ field_type = tdesc_named_type (feature, "data_ptr"); -+ tdesc_add_field (type, "raw", field_type); -+ field_type = tdesc_named_type (feature, "_bndstatus"); -+ tdesc_add_field (type, "status", field_type); -+ -+ type = tdesc_create_struct (feature, "_bndcfgu"); -+ tdesc_set_struct_size (type, 8); -+ tdesc_add_bitfield (type, "base", 12, 31); -+ tdesc_add_bitfield (type, "reserved", 2, 11); -+ tdesc_add_bitfield (type, "preserved", 1, 1); -+ tdesc_add_bitfield (type, "enabled", 0, 1); -+ -+ type = tdesc_create_union (feature, "cfgu"); -+ field_type = tdesc_named_type (feature, "data_ptr"); -+ tdesc_add_field (type, "raw", field_type); -+ field_type = tdesc_named_type (feature, "_bndcfgu"); -+ tdesc_add_field (type, "config", field_type); -+ -+ tdesc_create_reg (feature, "bnd0raw", 50, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd1raw", 51, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd2raw", 52, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd3raw", 53, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bndcfgu", 54, 1, NULL, 64, "cfgu"); -+ tdesc_create_reg (feature, "bndstatus", 55, 1, NULL, 64, "status"); -+ -+ tdesc_i386_avx_mpx_linux = result; -+} -diff --git a/gdb/features/i386/i386-avx-mpx-linux.xml b/gdb/features/i386/i386-avx-mpx-linux.xml -new file mode 100644 -index 0000000..c9a1a61 ---- /dev/null -+++ b/gdb/features/i386/i386-avx-mpx-linux.xml -@@ -0,0 +1,19 @@ -+ -+ -+ -+ -+ -+ -+ -+ i386 -+ GNU/Linux -+ -+ -+ -+ -+ -+ -diff --git a/gdb/features/i386/i386-avx-mpx.c b/gdb/features/i386/i386-avx-mpx.c -new file mode 100644 -index 0000000..f104e4b ---- /dev/null -+++ b/gdb/features/i386/i386-avx-mpx.c -@@ -0,0 +1,182 @@ -+/* THIS FILE IS GENERATED. -*- buffer-read-only: t -*- vi:set ro: -+ Original: i386-avx-mpx.xml */ -+ -+#include "defs.h" -+#include "osabi.h" -+#include "target-descriptions.h" -+ -+struct target_desc *tdesc_i386_avx_mpx; -+static void -+initialize_tdesc_i386_avx_mpx (void) -+{ -+ struct target_desc *result = allocate_target_description (); -+ struct tdesc_feature *feature; -+ struct tdesc_type *field_type; -+ struct tdesc_type *type; -+ -+ set_tdesc_architecture (result, bfd_scan_arch ("i386")); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.core"); -+ field_type = tdesc_create_flags (feature, "i386_eflags", 4); -+ tdesc_add_flag (field_type, 0, "CF"); -+ tdesc_add_flag (field_type, 1, ""); -+ tdesc_add_flag (field_type, 2, "PF"); -+ tdesc_add_flag (field_type, 4, "AF"); -+ tdesc_add_flag (field_type, 6, "ZF"); -+ tdesc_add_flag (field_type, 7, "SF"); -+ tdesc_add_flag (field_type, 8, "TF"); -+ tdesc_add_flag (field_type, 9, "IF"); -+ tdesc_add_flag (field_type, 10, "DF"); -+ tdesc_add_flag (field_type, 11, "OF"); -+ tdesc_add_flag (field_type, 14, "NT"); -+ tdesc_add_flag (field_type, 16, "RF"); -+ tdesc_add_flag (field_type, 17, "VM"); -+ tdesc_add_flag (field_type, 18, "AC"); -+ tdesc_add_flag (field_type, 19, "VIF"); -+ tdesc_add_flag (field_type, 20, "VIP"); -+ tdesc_add_flag (field_type, 21, "ID"); -+ -+ tdesc_create_reg (feature, "eax", 0, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "ecx", 1, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "edx", 2, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "ebx", 3, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "esp", 4, 1, NULL, 32, "data_ptr"); -+ tdesc_create_reg (feature, "ebp", 5, 1, NULL, 32, "data_ptr"); -+ tdesc_create_reg (feature, "esi", 6, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "edi", 7, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "eip", 8, 1, NULL, 32, "code_ptr"); -+ tdesc_create_reg (feature, "eflags", 9, 1, NULL, 32, "i386_eflags"); -+ tdesc_create_reg (feature, "cs", 10, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "ss", 11, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "ds", 12, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "es", 13, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "fs", 14, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "gs", 15, 1, NULL, 32, "int32"); -+ tdesc_create_reg (feature, "st0", 16, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st1", 17, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st2", 18, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st3", 19, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st4", 20, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st5", 21, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st6", 22, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "st7", 23, 1, NULL, 80, "i387_ext"); -+ tdesc_create_reg (feature, "fctrl", 24, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fstat", 25, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "ftag", 26, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fiseg", 27, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fioff", 28, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "foseg", 29, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fooff", 30, 1, "float", 32, "int"); -+ tdesc_create_reg (feature, "fop", 31, 1, "float", 32, "int"); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.sse"); -+ field_type = tdesc_named_type (feature, "ieee_single"); -+ tdesc_create_vector (feature, "v4f", field_type, 4); -+ -+ field_type = tdesc_named_type (feature, "ieee_double"); -+ tdesc_create_vector (feature, "v2d", field_type, 2); -+ -+ field_type = tdesc_named_type (feature, "int8"); -+ tdesc_create_vector (feature, "v16i8", field_type, 16); -+ -+ field_type = tdesc_named_type (feature, "int16"); -+ tdesc_create_vector (feature, "v8i16", field_type, 8); -+ -+ field_type = tdesc_named_type (feature, "int32"); -+ tdesc_create_vector (feature, "v4i32", field_type, 4); -+ -+ field_type = tdesc_named_type (feature, "int64"); -+ tdesc_create_vector (feature, "v2i64", field_type, 2); -+ -+ type = tdesc_create_union (feature, "vec128"); -+ field_type = tdesc_named_type (feature, "v4f"); -+ tdesc_add_field (type, "v4_float", field_type); -+ field_type = tdesc_named_type (feature, "v2d"); -+ tdesc_add_field (type, "v2_double", field_type); -+ field_type = tdesc_named_type (feature, "v16i8"); -+ tdesc_add_field (type, "v16_int8", field_type); -+ field_type = tdesc_named_type (feature, "v8i16"); -+ tdesc_add_field (type, "v8_int16", field_type); -+ field_type = tdesc_named_type (feature, "v4i32"); -+ tdesc_add_field (type, "v4_int32", field_type); -+ field_type = tdesc_named_type (feature, "v2i64"); -+ tdesc_add_field (type, "v2_int64", field_type); -+ field_type = tdesc_named_type (feature, "uint128"); -+ tdesc_add_field (type, "uint128", field_type); -+ -+ field_type = tdesc_create_flags (feature, "i386_mxcsr", 4); -+ tdesc_add_flag (field_type, 0, "IE"); -+ tdesc_add_flag (field_type, 1, "DE"); -+ tdesc_add_flag (field_type, 2, "ZE"); -+ tdesc_add_flag (field_type, 3, "OE"); -+ tdesc_add_flag (field_type, 4, "UE"); -+ tdesc_add_flag (field_type, 5, "PE"); -+ tdesc_add_flag (field_type, 6, "DAZ"); -+ tdesc_add_flag (field_type, 7, "IM"); -+ tdesc_add_flag (field_type, 8, "DM"); -+ tdesc_add_flag (field_type, 9, "ZM"); -+ tdesc_add_flag (field_type, 10, "OM"); -+ tdesc_add_flag (field_type, 11, "UM"); -+ tdesc_add_flag (field_type, 12, "PM"); -+ tdesc_add_flag (field_type, 15, "FZ"); -+ -+ tdesc_create_reg (feature, "xmm0", 32, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm1", 33, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm2", 34, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm3", 35, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm4", 36, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm5", 37, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm6", 38, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "xmm7", 39, 1, NULL, 128, "vec128"); -+ tdesc_create_reg (feature, "mxcsr", 40, 1, "vector", 32, "i386_mxcsr"); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.avx"); -+ tdesc_create_reg (feature, "ymm0h", 41, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm1h", 42, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm2h", 43, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm3h", 44, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm4h", 45, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm5h", 46, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm6h", 47, 1, NULL, 128, "uint128"); -+ tdesc_create_reg (feature, "ymm7h", 48, 1, NULL, 128, "uint128"); -+ -+ feature = tdesc_create_feature (result, "org.gnu.gdb.i386.mpx"); -+ type = tdesc_create_struct (feature, "br128"); -+ field_type = tdesc_named_type (feature, "uint64"); -+ tdesc_add_field (type, "lbound", field_type); -+ field_type = tdesc_named_type (feature, "uint64"); -+ tdesc_add_field (type, "ubound_raw", field_type); -+ -+ type = tdesc_create_struct (feature, "_bndstatus"); -+ tdesc_set_struct_size (type, 8); -+ tdesc_add_bitfield (type, "bde", 2, 31); -+ tdesc_add_bitfield (type, "error", 0, 1); -+ -+ type = tdesc_create_union (feature, "status"); -+ field_type = tdesc_named_type (feature, "data_ptr"); -+ tdesc_add_field (type, "raw", field_type); -+ field_type = tdesc_named_type (feature, "_bndstatus"); -+ tdesc_add_field (type, "status", field_type); -+ -+ type = tdesc_create_struct (feature, "_bndcfgu"); -+ tdesc_set_struct_size (type, 8); -+ tdesc_add_bitfield (type, "base", 12, 31); -+ tdesc_add_bitfield (type, "reserved", 2, 11); -+ tdesc_add_bitfield (type, "preserved", 1, 1); -+ tdesc_add_bitfield (type, "enabled", 0, 1); -+ -+ type = tdesc_create_union (feature, "cfgu"); -+ field_type = tdesc_named_type (feature, "data_ptr"); -+ tdesc_add_field (type, "raw", field_type); -+ field_type = tdesc_named_type (feature, "_bndcfgu"); -+ tdesc_add_field (type, "config", field_type); -+ -+ tdesc_create_reg (feature, "bnd0raw", 49, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd1raw", 50, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd2raw", 51, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd3raw", 52, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bndcfgu", 53, 1, NULL, 64, "cfgu"); -+ tdesc_create_reg (feature, "bndstatus", 54, 1, NULL, 64, "status"); -+ -+ tdesc_i386_avx_mpx = result; -+} -diff --git a/gdb/features/i386/i386-avx-mpx.xml b/gdb/features/i386/i386-avx-mpx.xml -new file mode 100644 -index 0000000..ab97367 ---- /dev/null -+++ b/gdb/features/i386/i386-avx-mpx.xml -@@ -0,0 +1,17 @@ -+ -+ -+ -+ -+ -+ -+ -+ i386 -+ -+ -+ -+ -+ -diff --git a/gdb/features/i386/i386-mpx-linux.c b/gdb/features/i386/i386-mpx-linux.c -index dbf8789..c6bafea 100644 ---- a/gdb/features/i386/i386-mpx-linux.c -+++ b/gdb/features/i386/i386-mpx-linux.c -@@ -135,16 +135,6 @@ initialize_tdesc_i386_mpx_linux (void) - feature = tdesc_create_feature (result, "org.gnu.gdb.i386.linux"); - tdesc_create_reg (feature, "orig_eax", 41, 1, NULL, 32, "int"); - -- feature = tdesc_create_feature (result, "org.gnu.gdb.i386.avx"); -- tdesc_create_reg (feature, "ymm0h", 42, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm1h", 43, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm2h", 44, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm3h", 45, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm4h", 46, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm5h", 47, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm6h", 48, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm7h", 49, 1, NULL, 128, "uint128"); -- - feature = tdesc_create_feature (result, "org.gnu.gdb.i386.mpx"); - type = tdesc_create_struct (feature, "br128"); - field_type = tdesc_named_type (feature, "uint64"); -@@ -176,12 +166,12 @@ initialize_tdesc_i386_mpx_linux (void) - field_type = tdesc_named_type (feature, "_bndcfgu"); - tdesc_add_field (type, "config", field_type); - -- tdesc_create_reg (feature, "bnd0raw", 50, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bnd1raw", 51, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bnd2raw", 52, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bnd3raw", 53, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bndcfgu", 54, 1, NULL, 64, "cfgu"); -- tdesc_create_reg (feature, "bndstatus", 55, 1, NULL, 64, "status"); -+ tdesc_create_reg (feature, "bnd0raw", 42, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd1raw", 43, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd2raw", 44, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd3raw", 45, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bndcfgu", 46, 1, NULL, 64, "cfgu"); -+ tdesc_create_reg (feature, "bndstatus", 47, 1, NULL, 64, "status"); - - tdesc_i386_mpx_linux = result; - } -diff --git a/gdb/features/i386/i386-mpx-linux.xml b/gdb/features/i386/i386-mpx-linux.xml -index c4004d6..4228cf5 100644 ---- a/gdb/features/i386/i386-mpx-linux.xml -+++ b/gdb/features/i386/i386-mpx-linux.xml -@@ -14,6 +14,5 @@ - - - -- - - -diff --git a/gdb/features/i386/i386-mpx.c b/gdb/features/i386/i386-mpx.c -index 1e04afd..430db3f 100644 ---- a/gdb/features/i386/i386-mpx.c -+++ b/gdb/features/i386/i386-mpx.c -@@ -130,16 +130,6 @@ initialize_tdesc_i386_mpx (void) - tdesc_create_reg (feature, "xmm7", 39, 1, NULL, 128, "vec128"); - tdesc_create_reg (feature, "mxcsr", 40, 1, "vector", 32, "i386_mxcsr"); - -- feature = tdesc_create_feature (result, "org.gnu.gdb.i386.avx"); -- tdesc_create_reg (feature, "ymm0h", 41, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm1h", 42, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm2h", 43, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm3h", 44, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm4h", 45, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm5h", 46, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm6h", 47, 1, NULL, 128, "uint128"); -- tdesc_create_reg (feature, "ymm7h", 48, 1, NULL, 128, "uint128"); -- - feature = tdesc_create_feature (result, "org.gnu.gdb.i386.mpx"); - type = tdesc_create_struct (feature, "br128"); - field_type = tdesc_named_type (feature, "uint64"); -@@ -171,12 +161,12 @@ initialize_tdesc_i386_mpx (void) - field_type = tdesc_named_type (feature, "_bndcfgu"); - tdesc_add_field (type, "config", field_type); - -- tdesc_create_reg (feature, "bnd0raw", 49, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bnd1raw", 50, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bnd2raw", 51, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bnd3raw", 52, 1, NULL, 128, "br128"); -- tdesc_create_reg (feature, "bndcfgu", 53, 1, NULL, 64, "cfgu"); -- tdesc_create_reg (feature, "bndstatus", 54, 1, NULL, 64, "status"); -+ tdesc_create_reg (feature, "bnd0raw", 41, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd1raw", 42, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd2raw", 43, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bnd3raw", 44, 1, NULL, 128, "br128"); -+ tdesc_create_reg (feature, "bndcfgu", 45, 1, NULL, 64, "cfgu"); -+ tdesc_create_reg (feature, "bndstatus", 46, 1, NULL, 64, "status"); - - tdesc_i386_mpx = result; - } -diff --git a/gdb/features/i386/i386-mpx.xml b/gdb/features/i386/i386-mpx.xml -index 52a68db..c1806cb 100644 ---- a/gdb/features/i386/i386-mpx.xml -+++ b/gdb/features/i386/i386-mpx.xml -@@ -12,6 +12,5 @@ - i386 - - -- - - -diff --git a/gdb/gdbserver/Makefile.in b/gdb/gdbserver/Makefile.in -index 1e874e3..dee2ea1 100644 ---- a/gdb/gdbserver/Makefile.in -+++ b/gdb/gdbserver/Makefile.in -@@ -376,9 +376,11 @@ clean: - rm -f xml-builtin.c stamp-xml - rm -f i386-avx.c i386-avx-linux.c - rm -f i386-mpx.c i386-mpx-linux.c -+ rm -f i386-avx-mpx.c i386-avx-mpx-linux.c - rm -f i386-avx512.c i386-avx512-linux.c - rm -f amd64-avx.c amd64-avx-linux.c - rm -f amd64-mpx.c amd64-mpx-linux.c -+ rm -f amd64-avx-mpx.c amd64-avx-mpx-linux.c - rm -f amd64-avx512.c amd64-avx512-linux.c - rm -f i386-mmx.c i386-mmx-linux.c - rm -f x32.c x32-linux.c -@@ -495,6 +497,21 @@ regcache-ipa.o: regcache.c - i386-linux-ipa.o: i386-linux.c - $(IPAGENT_COMPILE) $< - $(POSTCOMPILE) -+i386-mmx-linux-ipa.o: i386-mmx-linux.c -+ $(IPAGENT_COMPILE) $< -+ $(POSTCOMPILE) -+i386-avx-linux-ipa.o: i386-avx-linux.c -+ $(IPAGENT_COMPILE) $< -+ $(POSTCOMPILE) -+i386-mpx-linux-ipa.o: i386-mpx-linux.c -+ $(IPAGENT_COMPILE) $< -+ $(POSTCOMPILE) -+i386-avx-mpx-linux-ipa.o: i386-avx-mpx-linux.c -+ $(IPAGENT_COMPILE) $< -+ $(POSTCOMPILE) -+i386-avx512-linux-ipa.o: i386-avx512-linux.c -+ $(IPAGENT_COMPILE) $< -+ $(POSTCOMPILE) - linux-i386-ipa.o: linux-i386-ipa.c - $(IPAGENT_COMPILE) $< - $(POSTCOMPILE) -@@ -504,6 +521,18 @@ linux-amd64-ipa.o: linux-amd64-ipa.c - amd64-linux-ipa.o: amd64-linux.c - $(IPAGENT_COMPILE) $< - $(POSTCOMPILE) -+amd64-avx-linux-ipa.o: amd64-avx-linux.c -+ $(IPAGENT_COMPILE) $< -+ $(POSTCOMPILE) -+amd64-mpx-linux-ipa.o: amd64-mpx-linux.c -+ $(IPAGENT_COMPILE) $< -+ $(POSTCOMPILE) -+amd64-avx-mpx-linux-ipa.o: amd64-avx-mpx-linux.c -+ $(IPAGENT_COMPILE) $< -+ $(POSTCOMPILE) -+amd64-avx512-linux-ipa.o: amd64-avx512-linux.c -+ $(IPAGENT_COMPILE) $< -+ $(POSTCOMPILE) - linux-aarch64-ipa.o: linux-aarch64-ipa.c - $(IPAGENT_COMPILE) $< - $(POSTCOMPILE) -@@ -694,6 +723,10 @@ i386-mpx.c : $(srcdir)/../regformats/i386/i386-mpx.dat $(regdat_sh) - $(SHELL) $(regdat_sh) $(srcdir)/../regformats/i386/i386-mpx.dat i386-mpx.c - i386-mpx-linux.c : $(srcdir)/../regformats/i386/i386-mpx-linux.dat $(regdat_sh) - $(SHELL) $(regdat_sh) $(srcdir)/../regformats/i386/i386-mpx-linux.dat i386-mpx-linux.c -+i386-avx-mpx.c : $(srcdir)/../regformats/i386/i386-avx-mpx.dat $(regdat_sh) -+ $(SHELL) $(regdat_sh) $(srcdir)/../regformats/i386/i386-avx-mpx.dat i386-avx-mpx.c -+i386-avx-mpx-linux.c : $(srcdir)/../regformats/i386/i386-avx-mpx-linux.dat $(regdat_sh) -+ $(SHELL) $(regdat_sh) $(srcdir)/../regformats/i386/i386-avx-mpx-linux.dat i386-avx-mpx-linux.c - i386-mmx.c : $(srcdir)/../regformats/i386/i386-mmx.dat $(regdat_sh) - $(SHELL) $(regdat_sh) $(srcdir)/../regformats/i386/i386-mmx.dat i386-mmx.c - i386-mmx-linux.c : $(srcdir)/../regformats/i386/i386-mmx-linux.dat $(regdat_sh) -@@ -808,6 +841,10 @@ amd64-mpx.c : $(srcdir)/../regformats/i386/amd64-mpx.dat $(regdat_sh) - $(SHELL) $(regdat_sh) $(srcdir)/../regformats/i386/amd64-mpx.dat amd64-mpx.c - amd64-mpx-linux.c : $(srcdir)/../regformats/i386/amd64-mpx-linux.dat $(regdat_sh) - $(SHELL) $(regdat_sh) $(srcdir)/../regformats/i386/amd64-mpx-linux.dat amd64-mpx-linux.c -+amd64-avx-mpx.c : $(srcdir)/../regformats/i386/amd64-avx-mpx.dat $(regdat_sh) -+ $(SHELL) $(regdat_sh) $(srcdir)/../regformats/i386/amd64-avx-mpx.dat amd64-avx-mpx.c -+amd64-avx-mpx-linux.c : $(srcdir)/../regformats/i386/amd64-avx-mpx-linux.dat $(regdat_sh) -+ $(SHELL) $(regdat_sh) $(srcdir)/../regformats/i386/amd64-avx-mpx-linux.dat amd64-avx-mpx-linux.c - x32.c : $(srcdir)/../regformats/i386/x32.dat $(regdat_sh) - $(SHELL) $(regdat_sh) $(srcdir)/../regformats/i386/x32.dat x32.c - x32-linux.c : $(srcdir)/../regformats/i386/x32-linux.dat $(regdat_sh) -diff --git a/gdb/gdbserver/configure.srv b/gdb/gdbserver/configure.srv -index a54b9e7..4935a36 100644 ---- a/gdb/gdbserver/configure.srv -+++ b/gdb/gdbserver/configure.srv -@@ -24,20 +24,23 @@ - # Default hostio_last_error implementation - srv_hostio_err_objs="hostio-errno.o" - --srv_i386_regobj="i386.o i386-avx.o i386-avx512.o i386-mpx.o i386-mmx.o" --srv_i386_linux_regobj="i386-linux.o i386-avx-linux.o i386-avx512-linux.o i386-mpx-linux.o i386-mmx-linux.o" --srv_amd64_regobj="amd64.o amd64-avx.o amd64-avx512.o amd64-mpx.o x32.o x32-avx.o x32-avx512.o" --srv_amd64_linux_regobj="amd64-linux.o amd64-avx-linux.o amd64-avx512-linux.o amd64-mpx-linux.o x32-linux.o x32-avx-linux.o x32-avx512-linux.o" -+srv_i386_regobj="i386.o i386-avx.o i386-avx512.o i386-mpx.o i386-avx-mpx.o i386-mmx.o" -+srv_i386_linux_regobj="i386-linux.o i386-avx-linux.o i386-avx512-linux.o i386-mpx-linux.o i386-avx-mpx-linux.o i386-mmx-linux.o" -+srv_amd64_regobj="amd64.o amd64-avx.o amd64-avx512.o amd64-mpx.o amd64-avx-mpx.o x32.o x32-avx.o x32-avx512.o" -+srv_amd64_linux_regobj="amd64-linux.o amd64-avx-linux.o amd64-avx512-linux.o amd64-mpx-linux.o amd64-avx-mpx-linux.o x32-linux.o x32-avx-linux.o x32-avx512-linux.o" -+ -+ -+ipa_i386_linux_regobj="i386-linux-ipa.o i386-avx-linux-ipa.o i386-avx-mpx-linux-ipa.o i386-avx512-linux-ipa.o i386-mpx-linux-ipa.o i386-mmx-linux-ipa.o" -+ipa_amd64_linux_regobj="amd64-linux-ipa.o amd64-avx-linux-ipa.o amd64-avx-mpx-linux-ipa.o amd64-avx512-linux-ipa.o amd64-mpx-linux-ipa.o" - --ipa_i386_linux_regobj=i386-linux-ipa.o --ipa_amd64_linux_regobj=amd64-linux-ipa.o - - srv_i386_32bit_xmlfiles="i386/32bit-core.xml i386/32bit-sse.xml i386/32bit-avx.xml i386/32bit-avx512.xml i386/32bit-mpx.xml" - srv_i386_64bit_xmlfiles="i386/64bit-core.xml i386/64bit-sse.xml i386/64bit-avx.xml i386/64bit-avx512.xml i386/x32-core.xml i386/64bit-mpx.xml" --srv_i386_xmlfiles="i386/i386.xml i386/i386-avx.xml i386/i386-avx512.xml i386/i386-mpx.xml i386/i386-mmx.xml $srv_i386_32bit_xmlfiles" --srv_amd64_xmlfiles="i386/amd64.xml i386/amd64-avx.xml i386/amd64-avx512.xml i386/x32.xml i386/x32-avx.xml i386/x32-avx512.xml i386/amd64-mpx.xml $srv_i386_64bit_xmlfiles" --srv_i386_linux_xmlfiles="i386/i386-linux.xml i386/i386-avx-linux.xml i386/i386-avx512-linux.xml i386/i386-mmx-linux.xml i386/32bit-linux.xml i386/i386-mpx-linux.xml $srv_i386_32bit_xmlfiles" --srv_amd64_linux_xmlfiles="i386/amd64-linux.xml i386/amd64-avx-linux.xml i386/amd64-avx512-linux.xml i386/64bit-linux.xml i386/amd64-mpx-linux.xml i386/x32-linux.xml i386/x32-avx-linux.xml i386/x32-avx512-linux.xml $srv_i386_64bit_xmlfiles" -+srv_i386_xmlfiles="i386/i386.xml i386/i386-avx.xml i386/i386-avx512.xml i386/i386-mpx.xml i386/i386-avx-mpx.xml i386/i386-mmx.xml $srv_i386_32bit_xmlfiles" -+srv_amd64_xmlfiles="i386/amd64.xml i386/amd64-avx.xml i386/amd64-avx512.xml i386/x32.xml i386/x32-avx.xml i386/x32-avx512.xml i386/amd64-mpx.xml i386/amd64-avx-mpx.xml $srv_i386_64bit_xmlfiles" -+srv_i386_linux_xmlfiles="i386/i386-linux.xml i386/i386-avx-linux.xml i386/i386-avx512-linux.xml i386/i386-mmx-linux.xml i386/32bit-linux.xml i386/i386-mpx-linux.xml i386/i386-avx-mpx-linux.xml $srv_i386_32bit_xmlfiles" -+srv_amd64_linux_xmlfiles="i386/amd64-linux.xml i386/amd64-avx-linux.xml i386/amd64-avx512-linux.xml i386/64bit-linux.xml i386/amd64-mpx-linux.xml i386/amd64-avx-mpx-linux.xml i386/x32-linux.xml i386/x32-avx-linux.xml i386/x32-avx512-linux.xml $srv_i386_64bit_xmlfiles" -+ - - - # Linux object files. This is so we don't have to repeat -diff --git a/gdb/gdbserver/linux-aarch64-ipa.c b/gdb/gdbserver/linux-aarch64-ipa.c -index 758708d..f1eaa70 100644 ---- a/gdb/gdbserver/linux-aarch64-ipa.c -+++ b/gdb/gdbserver/linux-aarch64-ipa.c -@@ -143,9 +143,18 @@ gdb_agent_get_raw_reg (const unsigned char *raw_regs, int regnum) - + aarch64_ft_collect_regmap[regnum] * FT_CR_SIZE); - } - -+/* Return target_desc to use for IPA, given the tdesc index passed by -+ gdbserver. Index is ignored, since we have only one tdesc -+ at the moment. */ -+ -+const struct target_desc * -+get_ipa_tdesc (int idx) -+{ -+ return tdesc_aarch64; -+} -+ - void - initialize_low_tracepoint (void) - { - init_registers_aarch64 (); -- ipa_tdesc = tdesc_aarch64; - } -diff --git a/gdb/gdbserver/linux-amd64-ipa.c b/gdb/gdbserver/linux-amd64-ipa.c -index 2eeedcd..f9c72a0 100644 ---- a/gdb/gdbserver/linux-amd64-ipa.c -+++ b/gdb/gdbserver/linux-amd64-ipa.c -@@ -20,6 +20,7 @@ - - #include "server.h" - #include "tracepoint.h" -+#include "linux-x86-tdesc.h" - - /* Defined in auto-generated file amd64-linux.c. */ - void init_registers_amd64_linux (void); -@@ -166,9 +167,37 @@ supply_static_tracepoint_registers (struct regcache *regcache, - - #endif /* HAVE_UST */ - -+/* Return target_desc to use for IPA, given the tdesc index passed by -+ gdbserver. */ -+ -+const struct target_desc * -+get_ipa_tdesc (int idx) -+{ -+ switch (idx) -+ { -+ case X86_TDESC_SSE: -+ return tdesc_amd64_linux; -+ case X86_TDESC_AVX: -+ return tdesc_amd64_avx_linux; -+ case X86_TDESC_MPX: -+ return tdesc_amd64_mpx_linux; -+ case X86_TDESC_AVX_MPX: -+ return tdesc_amd64_avx_mpx_linux; -+ case X86_TDESC_AVX512: -+ return tdesc_amd64_avx512_linux; -+ default: -+ internal_error (__FILE__, __LINE__, -+ "unknown ipa tdesc index: %d", idx); -+ return tdesc_amd64_linux; -+ } -+} -+ - void - initialize_low_tracepoint (void) - { - init_registers_amd64_linux (); -- ipa_tdesc = tdesc_amd64_linux; -+ init_registers_amd64_avx_linux (); -+ init_registers_amd64_avx_mpx_linux (); -+ init_registers_amd64_mpx_linux (); -+ init_registers_amd64_avx512_linux (); - } -diff --git a/gdb/gdbserver/linux-i386-ipa.c b/gdb/gdbserver/linux-i386-ipa.c -index 11dc038..d7a8e7d 100644 ---- a/gdb/gdbserver/linux-i386-ipa.c -+++ b/gdb/gdbserver/linux-i386-ipa.c -@@ -21,6 +21,7 @@ - #include "server.h" - #include - #include "tracepoint.h" -+#include "linux-x86-tdesc.h" - - /* GDB register numbers. */ - -@@ -47,10 +48,6 @@ enum i386_gdb_regnum - - #define i386_num_regs 16 - --/* Defined in auto-generated file i386-linux.c. */ --void init_registers_i386_linux (void); --extern const struct target_desc *tdesc_i386_linux; -- - #define FT_CR_EAX 15 - #define FT_CR_ECX 14 - #define FT_CR_EDX 13 -@@ -247,10 +244,40 @@ initialize_fast_tracepoint_trampoline_buffer (void) - } - } - -+/* Return target_desc to use for IPA, given the tdesc index passed by -+ gdbserver. */ -+ -+const struct target_desc * -+get_ipa_tdesc (int idx) -+{ -+ switch (idx) -+ { -+ case X86_TDESC_MMX: -+ return tdesc_i386_mmx_linux; -+ case X86_TDESC_SSE: -+ return tdesc_i386_linux; -+ case X86_TDESC_AVX: -+ return tdesc_i386_avx_linux; -+ case X86_TDESC_MPX: -+ return tdesc_i386_mpx_linux; -+ case X86_TDESC_AVX_MPX: -+ return tdesc_i386_avx_mpx_linux; -+ case X86_TDESC_AVX512: -+ return tdesc_i386_avx512_linux; -+ default: -+ internal_error (__FILE__, __LINE__, -+ "unknown ipa tdesc index: %d", idx); -+ return tdesc_i386_linux; -+ } -+} -+ - void - initialize_low_tracepoint (void) - { -+ init_registers_i386_mmx_linux (); - init_registers_i386_linux (); -- ipa_tdesc = tdesc_i386_linux; -+ init_registers_i386_avx_linux (); -+ init_registers_i386_mpx_linux (); -+ init_registers_i386_avx512_linux (); - initialize_fast_tracepoint_trampoline_buffer (); - } -diff --git a/gdb/gdbserver/linux-low.c b/gdb/gdbserver/linux-low.c -index 8b025bd..a142c2c 100644 ---- a/gdb/gdbserver/linux-low.c -+++ b/gdb/gdbserver/linux-low.c -@@ -6432,6 +6432,15 @@ linux_supports_catch_syscall (void) - } - - static int -+linux_get_ipa_tdesc_idx (void) -+{ -+ if (the_low_target.get_ipa_tdesc_idx == NULL) -+ return 0; -+ -+ return (*the_low_target.get_ipa_tdesc_idx) (); -+} -+ -+static int - linux_supports_tracepoints (void) - { - if (*the_low_target.supports_tracepoints == NULL) -@@ -7408,6 +7417,7 @@ static struct target_ops linux_target_ops = { - linux_breakpoint_kind_from_current_state, - linux_supports_software_single_step, - linux_supports_catch_syscall, -+ linux_get_ipa_tdesc_idx, - }; - - #ifdef HAVE_LINUX_REGSETS -diff --git a/gdb/gdbserver/linux-low.h b/gdb/gdbserver/linux-low.h -index 4ec8550..d4946c1 100644 ---- a/gdb/gdbserver/linux-low.h -+++ b/gdb/gdbserver/linux-low.h -@@ -246,6 +246,9 @@ struct linux_target_ops - due to SYSCALL_SIGTRAP. */ - void (*get_syscall_trapinfo) (struct regcache *regcache, - int *sysno, int *sysret); -+ -+ /* See target.h. */ -+ int (*get_ipa_tdesc_idx) (void); - }; - - extern struct linux_target_ops the_low_target; -diff --git a/gdb/gdbserver/linux-x86-low.c b/gdb/gdbserver/linux-x86-low.c -index 0c4954a..2535959 100644 ---- a/gdb/gdbserver/linux-x86-low.c -+++ b/gdb/gdbserver/linux-x86-low.c -@@ -45,57 +45,7 @@ - #include "nat/linux-nat.h" - #include "nat/x86-linux.h" - #include "nat/x86-linux-dregs.h" -- --#ifdef __x86_64__ --/* Defined in auto-generated file amd64-linux.c. */ --void init_registers_amd64_linux (void); --extern const struct target_desc *tdesc_amd64_linux; -- --/* Defined in auto-generated file amd64-avx-linux.c. */ --void init_registers_amd64_avx_linux (void); --extern const struct target_desc *tdesc_amd64_avx_linux; -- --/* Defined in auto-generated file amd64-avx512-linux.c. */ --void init_registers_amd64_avx512_linux (void); --extern const struct target_desc *tdesc_amd64_avx512_linux; -- --/* Defined in auto-generated file amd64-mpx-linux.c. */ --void init_registers_amd64_mpx_linux (void); --extern const struct target_desc *tdesc_amd64_mpx_linux; -- --/* Defined in auto-generated file x32-linux.c. */ --void init_registers_x32_linux (void); --extern const struct target_desc *tdesc_x32_linux; -- --/* Defined in auto-generated file x32-avx-linux.c. */ --void init_registers_x32_avx_linux (void); --extern const struct target_desc *tdesc_x32_avx_linux; -- --/* Defined in auto-generated file x32-avx512-linux.c. */ --void init_registers_x32_avx512_linux (void); --extern const struct target_desc *tdesc_x32_avx512_linux; -- --#endif -- --/* Defined in auto-generated file i386-linux.c. */ --void init_registers_i386_linux (void); --extern const struct target_desc *tdesc_i386_linux; -- --/* Defined in auto-generated file i386-mmx-linux.c. */ --void init_registers_i386_mmx_linux (void); --extern const struct target_desc *tdesc_i386_mmx_linux; -- --/* Defined in auto-generated file i386-avx-linux.c. */ --void init_registers_i386_avx_linux (void); --extern const struct target_desc *tdesc_i386_avx_linux; -- --/* Defined in auto-generated file i386-avx512-linux.c. */ --void init_registers_i386_avx512_linux (void); --extern const struct target_desc *tdesc_i386_avx512_linux; -- --/* Defined in auto-generated file i386-mpx-linux.c. */ --void init_registers_i386_mpx_linux (void); --extern const struct target_desc *tdesc_i386_mpx_linux; -+#include "linux-x86-tdesc.h" - - #ifdef __x86_64__ - static struct target_desc *tdesc_amd64_linux_no_xml; -@@ -839,6 +789,9 @@ x86_linux_read_description (void) - case X86_XSTATE_AVX512_MASK: - return tdesc_amd64_avx512_linux; - -+ case X86_XSTATE_AVX_MPX_MASK: -+ return tdesc_amd64_avx_mpx_linux; -+ - case X86_XSTATE_MPX_MASK: - return tdesc_amd64_mpx_linux; - -@@ -886,6 +839,9 @@ x86_linux_read_description (void) - case (X86_XSTATE_MPX_MASK): - return tdesc_i386_mpx_linux; - -+ case (X86_XSTATE_AVX_MPX_MASK): -+ return tdesc_i386_avx_mpx_linux; -+ - case (X86_XSTATE_AVX_MASK): - return tdesc_i386_avx_linux; - -@@ -2891,6 +2847,42 @@ x86_supports_hardware_single_step (void) - return 1; - } - -+static int -+x86_get_ipa_tdesc_idx (void) -+{ -+ struct regcache *regcache = get_thread_regcache (current_thread, 0); -+ const struct target_desc *tdesc = regcache->tdesc; -+ -+#ifdef __x86_64__ -+ if (tdesc == tdesc_amd64_linux || tdesc == tdesc_amd64_linux_no_xml -+ || tdesc == tdesc_x32_linux) -+ return X86_TDESC_SSE; -+ if (tdesc == tdesc_amd64_avx_linux || tdesc == tdesc_x32_avx_linux) -+ return X86_TDESC_AVX; -+ if (tdesc == tdesc_amd64_mpx_linux) -+ return X86_TDESC_MPX; -+ if (tdesc == tdesc_amd64_avx_mpx_linux) -+ return X86_TDESC_AVX_MPX; -+ if (tdesc == tdesc_amd64_avx512_linux || tdesc == tdesc_x32_avx512_linux) -+ return X86_TDESC_AVX512; -+#endif -+ -+ if (tdesc == tdesc_i386_mmx_linux) -+ return X86_TDESC_MMX; -+ if (tdesc == tdesc_i386_linux || tdesc == tdesc_i386_linux_no_xml) -+ return X86_TDESC_SSE; -+ if (tdesc == tdesc_i386_avx_linux) -+ return X86_TDESC_AVX; -+ if (tdesc == tdesc_i386_mpx_linux) -+ return X86_TDESC_MPX; -+ if (tdesc == tdesc_i386_avx_mpx_linux) -+ return X86_TDESC_AVX_MPX; -+ if (tdesc == tdesc_i386_avx512_linux) -+ return X86_TDESC_AVX512; -+ -+ return 0; -+} -+ - /* This is initialized assuming an amd64 target. - x86_arch_setup will correct it for i386 or amd64 targets. */ - -@@ -2934,6 +2926,7 @@ struct linux_target_ops the_low_target = - NULL, /* breakpoint_kind_from_current_state */ - x86_supports_hardware_single_step, - x86_get_syscall_trapinfo, -+ x86_get_ipa_tdesc_idx, - }; - - void -@@ -2945,6 +2938,7 @@ initialize_low_arch (void) - init_registers_amd64_avx_linux (); - init_registers_amd64_avx512_linux (); - init_registers_amd64_mpx_linux (); -+ init_registers_amd64_avx_mpx_linux (); - - init_registers_x32_linux (); - init_registers_x32_avx_linux (); -@@ -2959,6 +2953,7 @@ initialize_low_arch (void) - init_registers_i386_avx_linux (); - init_registers_i386_avx512_linux (); - init_registers_i386_mpx_linux (); -+ init_registers_i386_avx_mpx_linux (); - - tdesc_i386_linux_no_xml = XNEW (struct target_desc); - copy_target_description (tdesc_i386_linux_no_xml, tdesc_i386_linux); -diff --git a/gdb/gdbserver/linux-x86-tdesc.h b/gdb/gdbserver/linux-x86-tdesc.h -new file mode 100644 -index 0000000..720f50c ---- /dev/null -+++ b/gdb/gdbserver/linux-x86-tdesc.h -@@ -0,0 +1,98 @@ -+/* Low level support for x86 (i386 and x86-64), shared between gdbserver -+ and IPA. -+ -+ Copyright (C) 2016 Free Software Foundation, Inc. -+ -+ This file is part of GDB. -+ -+ This program is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by -+ the Free Software Foundation; either version 3 of the License, or -+ (at your option) any later version. -+ -+ This program is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ GNU General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License -+ along with this program. If not, see . */ -+ -+/* Note: since IPA obviously knows what ABI it's running on (i386 vs x86_64 -+ vs x32), it's sufficient to pass only the register set here. This, -+ together with the ABI known at IPA compile time, maps to a tdesc. */ -+ -+enum x86_linux_tdesc { -+ X86_TDESC_MMX = 0, -+ X86_TDESC_SSE = 1, -+ X86_TDESC_AVX = 2, -+ X86_TDESC_MPX = 3, -+ X86_TDESC_AVX_MPX = 4, -+ X86_TDESC_AVX512 = 5, -+}; -+ -+#ifdef __x86_64__ -+ -+#if defined __LP64__ || !defined IN_PROCESS_AGENT -+/* Defined in auto-generated file amd64-linux.c. */ -+void init_registers_amd64_linux (void); -+extern const struct target_desc *tdesc_amd64_linux; -+ -+/* Defined in auto-generated file amd64-avx-linux.c. */ -+void init_registers_amd64_avx_linux (void); -+extern const struct target_desc *tdesc_amd64_avx_linux; -+ -+/* Defined in auto-generated file amd64-avx512-linux.c. */ -+void init_registers_amd64_avx512_linux (void); -+extern const struct target_desc *tdesc_amd64_avx512_linux; -+ -+/* Defined in auto-generated file amd64-avx-mpx-linux.c. */ -+void init_registers_amd64_avx_mpx_linux (void); -+extern const struct target_desc *tdesc_amd64_avx_mpx_linux; -+ -+/* Defined in auto-generated file amd64-mpx-linux.c. */ -+void init_registers_amd64_mpx_linux (void); -+extern const struct target_desc *tdesc_amd64_mpx_linux; -+#endif -+ -+#if defined __ILP32__ || !defined IN_PROCESS_AGENT -+/* Defined in auto-generated file x32-linux.c. */ -+void init_registers_x32_linux (void); -+extern const struct target_desc *tdesc_x32_linux; -+ -+/* Defined in auto-generated file x32-avx-linux.c. */ -+void init_registers_x32_avx_linux (void); -+extern const struct target_desc *tdesc_x32_avx_linux; -+ -+/* Defined in auto-generated file x32-avx512-linux.c. */ -+void init_registers_x32_avx512_linux (void); -+extern const struct target_desc *tdesc_x32_avx512_linux; -+#endif -+ -+#endif -+ -+#if defined __i386__ || !defined IN_PROCESS_AGENT -+/* Defined in auto-generated file i386-linux.c. */ -+void init_registers_i386_linux (void); -+extern const struct target_desc *tdesc_i386_linux; -+ -+/* Defined in auto-generated file i386-mmx-linux.c. */ -+void init_registers_i386_mmx_linux (void); -+extern const struct target_desc *tdesc_i386_mmx_linux; -+ -+/* Defined in auto-generated file i386-avx-linux.c. */ -+void init_registers_i386_avx_linux (void); -+extern const struct target_desc *tdesc_i386_avx_linux; -+ -+/* Defined in auto-generated file i386-avx-mpx-linux.c. */ -+void init_registers_i386_avx_mpx_linux (void); -+extern const struct target_desc *tdesc_i386_avx_mpx_linux; -+ -+/* Defined in auto-generated file i386-avx512-linux.c. */ -+void init_registers_i386_avx512_linux (void); -+extern const struct target_desc *tdesc_i386_avx512_linux; -+ -+/* Defined in auto-generated file i386-mpx-linux.c. */ -+void init_registers_i386_mpx_linux (void); -+extern const struct target_desc *tdesc_i386_mpx_linux; -+#endif -diff --git a/gdb/gdbserver/target.h b/gdb/gdbserver/target.h -index 5af2051..4c14c20 100644 ---- a/gdb/gdbserver/target.h -+++ b/gdb/gdbserver/target.h -@@ -471,6 +471,9 @@ struct target_ops - /* Return 1 if the target supports catch syscall, 0 (or leave the - callback NULL) otherwise. */ - int (*supports_catch_syscall) (void); -+ -+ /* Return tdesc index for IPA. */ -+ int (*get_ipa_tdesc_idx) (void); - }; - - extern struct target_ops *the_target; -@@ -550,6 +553,10 @@ int kill_inferior (int); - (the_target->supports_catch_syscall ? \ - (*the_target->supports_catch_syscall) () : 0) - -+#define target_get_ipa_tdesc_idx() \ -+ (the_target->get_ipa_tdesc_idx \ -+ ? (*the_target->get_ipa_tdesc_idx) () : 0) -+ - #define target_supports_tracepoints() \ - (the_target->supports_tracepoints \ - ? (*the_target->supports_tracepoints) () : 0) -diff --git a/gdb/gdbserver/tracepoint.c b/gdb/gdbserver/tracepoint.c -index 0671999..cc86677 100644 ---- a/gdb/gdbserver/tracepoint.c -+++ b/gdb/gdbserver/tracepoint.c -@@ -134,6 +134,7 @@ trace_vdebug (const char *fmt, ...) - # define ust_loaded IPA_SYM_EXPORTED_NAME (ust_loaded) - # define helper_thread_id IPA_SYM_EXPORTED_NAME (helper_thread_id) - # define cmd_buf IPA_SYM_EXPORTED_NAME (cmd_buf) -+# define ipa_tdesc_idx IPA_SYM_EXPORTED_NAME (ipa_tdesc_idx) - #endif - - #ifndef IN_PROCESS_AGENT -@@ -171,6 +172,7 @@ struct ipa_sym_addresses - CORE_ADDR addr_get_trace_state_variable_value; - CORE_ADDR addr_set_trace_state_variable_value; - CORE_ADDR addr_ust_loaded; -+ CORE_ADDR addr_ipa_tdesc_idx; - }; - - static struct -@@ -207,6 +209,7 @@ static struct - IPA_SYM(get_trace_state_variable_value), - IPA_SYM(set_trace_state_variable_value), - IPA_SYM(ust_loaded), -+ IPA_SYM(ipa_tdesc_idx), - }; - - static struct ipa_sym_addresses ipa_sym_addrs; -@@ -3231,6 +3234,11 @@ cmd_qtstart (char *packet) - - *packet = '\0'; - -+ /* Tell IPA about the correct tdesc. */ -+ if (write_inferior_integer (ipa_sym_addrs.addr_ipa_tdesc_idx, -+ target_get_ipa_tdesc_idx ())) -+ error ("Error setting ipa_tdesc_idx variable in lib"); -+ - /* Start out empty. */ - if (agent_loaded_p ()) - write_inferior_data_ptr (ipa_sym_addrs.addr_tracepoints, 0); -@@ -4714,19 +4722,20 @@ collect_data_at_step (struct tracepoint_hit_ctx *ctx, - #endif - - #ifdef IN_PROCESS_AGENT --/* The target description used by the IPA. Given that the IPA library -- is built for a specific architecture that is loaded into the -- inferior, there only needs to be one such description per -- build. */ --const struct target_desc *ipa_tdesc; -+/* The target description index for IPA. Passed from gdbserver, used -+ to select ipa_tdesc. */ -+EXTERN_C_PUSH -+IP_AGENT_EXPORT_VAR int ipa_tdesc_idx; -+EXTERN_C_POP - #endif - - static struct regcache * - get_context_regcache (struct tracepoint_hit_ctx *ctx) - { - struct regcache *regcache = NULL; -- - #ifdef IN_PROCESS_AGENT -+ const struct target_desc *ipa_tdesc = get_ipa_tdesc (ipa_tdesc_idx); -+ - if (ctx->type == fast_tracepoint) - { - struct fast_tracepoint_ctx *fctx = (struct fast_tracepoint_ctx *) ctx; -@@ -5799,11 +5808,13 @@ IP_AGENT_EXPORT_FUNC void - gdb_collect (struct tracepoint *tpoint, unsigned char *regs) - { - struct fast_tracepoint_ctx ctx; -+ const struct target_desc *ipa_tdesc; - - /* Don't do anything until the trace run is completely set up. */ - if (!tracing) - return; - -+ ipa_tdesc = get_ipa_tdesc (ipa_tdesc_idx); - ctx.base.type = fast_tracepoint; - ctx.regs = regs; - ctx.regcache_initted = 0; -@@ -6660,6 +6671,7 @@ gdb_probe (const struct marker *mdata, void *probe_private, - { - struct tracepoint *tpoint; - struct static_tracepoint_ctx ctx; -+ const struct target_desc *ipa_tdesc; - - /* Don't do anything until the trace run is completely set up. */ - if (!tracing) -@@ -6668,6 +6680,7 @@ gdb_probe (const struct marker *mdata, void *probe_private, - return; - } - -+ ipa_tdesc = get_ipa_tdesc (ipa_tdesc_idx); - ctx.base.type = static_tracepoint; - ctx.regcache_initted = 0; - ctx.regs = regs; -diff --git a/gdb/gdbserver/tracepoint.h b/gdb/gdbserver/tracepoint.h -index cab89cf..e30f4f7 100644 ---- a/gdb/gdbserver/tracepoint.h -+++ b/gdb/gdbserver/tracepoint.h -@@ -124,6 +124,7 @@ int handle_tracepoint_bkpts (struct thread_info *tinfo, CORE_ADDR stop_pc); - - #ifdef IN_PROCESS_AGENT - void initialize_low_tracepoint (void); -+const struct target_desc *get_ipa_tdesc (int idx); - void supply_fast_tracepoint_registers (struct regcache *regcache, - const unsigned char *regs); - void supply_static_tracepoint_registers (struct regcache *regcache, -@@ -131,9 +132,6 @@ void supply_static_tracepoint_registers (struct regcache *regcache, - CORE_ADDR pc); - void set_trampoline_buffer_space (CORE_ADDR begin, CORE_ADDR end, - char *errmsg); -- --extern const struct target_desc *ipa_tdesc; -- - #else - void stop_tracing (void); - -diff --git a/gdb/i386-linux-tdep.c b/gdb/i386-linux-tdep.c -index af39e78..df47efd 100644 ---- a/gdb/i386-linux-tdep.c -+++ b/gdb/i386-linux-tdep.c -@@ -47,6 +47,7 @@ - #include "features/i386/i386-linux.c" - #include "features/i386/i386-mmx-linux.c" - #include "features/i386/i386-mpx-linux.c" -+#include "features/i386/i386-avx-mpx-linux.c" - #include "features/i386/i386-avx-linux.c" - #include "features/i386/i386-avx512-linux.c" - -@@ -630,6 +631,8 @@ i386_linux_core_read_description (struct gdbarch *gdbarch, - return tdesc_i386_avx512_linux; - case X86_XSTATE_MPX_MASK: - return tdesc_i386_mpx_linux; -+ case X86_XSTATE_AVX_MPX_MASK: -+ return tdesc_i386_avx_mpx_linux; - case X86_XSTATE_AVX_MASK: - return tdesc_i386_avx_linux; - case X86_XSTATE_SSE_MASK: -@@ -1018,5 +1021,6 @@ _initialize_i386_linux_tdep (void) - initialize_tdesc_i386_mmx_linux (); - initialize_tdesc_i386_avx_linux (); - initialize_tdesc_i386_mpx_linux (); -+ initialize_tdesc_i386_avx_mpx_linux (); - initialize_tdesc_i386_avx512_linux (); - } -diff --git a/gdb/i386-linux-tdep.h b/gdb/i386-linux-tdep.h -index ecc9e31..0cb0c4d 100644 ---- a/gdb/i386-linux-tdep.h -+++ b/gdb/i386-linux-tdep.h -@@ -42,6 +42,7 @@ extern struct target_desc *tdesc_i386_linux; - extern struct target_desc *tdesc_i386_mmx_linux; - extern struct target_desc *tdesc_i386_avx_linux; - extern struct target_desc *tdesc_i386_mpx_linux; -+extern struct target_desc *tdesc_i386_avx_mpx_linux; - extern struct target_desc *tdesc_i386_avx512_linux; - - /* Format of XSAVE extended state is: -diff --git a/gdb/i386-tdep.c b/gdb/i386-tdep.c -index b706463..1ec0a08 100644 ---- a/gdb/i386-tdep.c -+++ b/gdb/i386-tdep.c -@@ -53,6 +53,7 @@ - #include "features/i386/i386.c" - #include "features/i386/i386-avx.c" - #include "features/i386/i386-mpx.c" -+#include "features/i386/i386-avx-mpx.c" - #include "features/i386/i386-avx512.c" - #include "features/i386/i386-mmx.c" - -@@ -8618,6 +8619,8 @@ i386_target_description (uint64_t xcr0) - case X86_XSTATE_MPX_AVX512_MASK: - case X86_XSTATE_AVX512_MASK: - return tdesc_i386_avx512; -+ case X86_XSTATE_AVX_MPX_MASK: -+ return tdesc_i386_avx_mpx; - case X86_XSTATE_MPX_MASK: - return tdesc_i386_mpx; - case X86_XSTATE_AVX_MASK: -@@ -8957,6 +8960,7 @@ Show Intel Memory Protection Extensions specific variables."), - initialize_tdesc_i386_mmx (); - initialize_tdesc_i386_avx (); - initialize_tdesc_i386_mpx (); -+ initialize_tdesc_i386_avx_mpx (); - initialize_tdesc_i386_avx512 (); - - /* Tell remote stub that we support XML target description. */ -diff --git a/gdb/regformats/i386/amd64-avx-mpx-linux.dat b/gdb/regformats/i386/amd64-avx-mpx-linux.dat -new file mode 100644 -index 0000000..6d45324 ---- /dev/null -+++ b/gdb/regformats/i386/amd64-avx-mpx-linux.dat -@@ -0,0 +1,85 @@ -+# THIS FILE IS GENERATED. -*- buffer-read-only: t -*- vi :set ro: -+# Generated from: i386/amd64-avx-mpx-linux.xml -+name:amd64_avx_mpx_linux -+xmltarget:amd64-avx-mpx-linux.xml -+expedite:rbp,rsp,rip -+64:rax -+64:rbx -+64:rcx -+64:rdx -+64:rsi -+64:rdi -+64:rbp -+64:rsp -+64:r8 -+64:r9 -+64:r10 -+64:r11 -+64:r12 -+64:r13 -+64:r14 -+64:r15 -+64:rip -+32:eflags -+32:cs -+32:ss -+32:ds -+32:es -+32:fs -+32:gs -+80:st0 -+80:st1 -+80:st2 -+80:st3 -+80:st4 -+80:st5 -+80:st6 -+80:st7 -+32:fctrl -+32:fstat -+32:ftag -+32:fiseg -+32:fioff -+32:foseg -+32:fooff -+32:fop -+128:xmm0 -+128:xmm1 -+128:xmm2 -+128:xmm3 -+128:xmm4 -+128:xmm5 -+128:xmm6 -+128:xmm7 -+128:xmm8 -+128:xmm9 -+128:xmm10 -+128:xmm11 -+128:xmm12 -+128:xmm13 -+128:xmm14 -+128:xmm15 -+32:mxcsr -+64:orig_rax -+128:ymm0h -+128:ymm1h -+128:ymm2h -+128:ymm3h -+128:ymm4h -+128:ymm5h -+128:ymm6h -+128:ymm7h -+128:ymm8h -+128:ymm9h -+128:ymm10h -+128:ymm11h -+128:ymm12h -+128:ymm13h -+128:ymm14h -+128:ymm15h -+128:bnd0raw -+128:bnd1raw -+128:bnd2raw -+128:bnd3raw -+64:bndcfgu -+64:bndstatus -diff --git a/gdb/regformats/i386/amd64-avx-mpx.dat b/gdb/regformats/i386/amd64-avx-mpx.dat -new file mode 100644 -index 0000000..d985641 ---- /dev/null -+++ b/gdb/regformats/i386/amd64-avx-mpx.dat -@@ -0,0 +1,84 @@ -+# THIS FILE IS GENERATED. -*- buffer-read-only: t -*- vi :set ro: -+# Generated from: i386/amd64-avx-mpx.xml -+name:amd64_avx_mpx -+xmltarget:amd64-avx-mpx.xml -+expedite:rbp,rsp,rip -+64:rax -+64:rbx -+64:rcx -+64:rdx -+64:rsi -+64:rdi -+64:rbp -+64:rsp -+64:r8 -+64:r9 -+64:r10 -+64:r11 -+64:r12 -+64:r13 -+64:r14 -+64:r15 -+64:rip -+32:eflags -+32:cs -+32:ss -+32:ds -+32:es -+32:fs -+32:gs -+80:st0 -+80:st1 -+80:st2 -+80:st3 -+80:st4 -+80:st5 -+80:st6 -+80:st7 -+32:fctrl -+32:fstat -+32:ftag -+32:fiseg -+32:fioff -+32:foseg -+32:fooff -+32:fop -+128:xmm0 -+128:xmm1 -+128:xmm2 -+128:xmm3 -+128:xmm4 -+128:xmm5 -+128:xmm6 -+128:xmm7 -+128:xmm8 -+128:xmm9 -+128:xmm10 -+128:xmm11 -+128:xmm12 -+128:xmm13 -+128:xmm14 -+128:xmm15 -+32:mxcsr -+128:ymm0h -+128:ymm1h -+128:ymm2h -+128:ymm3h -+128:ymm4h -+128:ymm5h -+128:ymm6h -+128:ymm7h -+128:ymm8h -+128:ymm9h -+128:ymm10h -+128:ymm11h -+128:ymm12h -+128:ymm13h -+128:ymm14h -+128:ymm15h -+128:bnd0raw -+128:bnd1raw -+128:bnd2raw -+128:bnd3raw -+64:bndcfgu -+64:bndstatus -diff --git a/gdb/regformats/i386/amd64-mpx-linux.dat b/gdb/regformats/i386/amd64-mpx-linux.dat -index 7a05cfe..523ae01 100644 ---- a/gdb/regformats/i386/amd64-mpx-linux.dat -+++ b/gdb/regformats/i386/amd64-mpx-linux.dat -@@ -61,22 +61,6 @@ expedite:rbp,rsp,rip - 128:xmm15 - 32:mxcsr - 64:orig_rax --128:ymm0h --128:ymm1h --128:ymm2h --128:ymm3h --128:ymm4h --128:ymm5h --128:ymm6h --128:ymm7h --128:ymm8h --128:ymm9h --128:ymm10h --128:ymm11h --128:ymm12h --128:ymm13h --128:ymm14h --128:ymm15h - 128:bnd0raw - 128:bnd1raw - 128:bnd2raw -diff --git a/gdb/regformats/i386/amd64-mpx.dat b/gdb/regformats/i386/amd64-mpx.dat -index 1b75f4d..43b8776 100644 ---- a/gdb/regformats/i386/amd64-mpx.dat -+++ b/gdb/regformats/i386/amd64-mpx.dat -@@ -60,22 +60,6 @@ expedite:rbp,rsp,rip - 128:xmm14 - 128:xmm15 - 32:mxcsr --128:ymm0h --128:ymm1h --128:ymm2h --128:ymm3h --128:ymm4h --128:ymm5h --128:ymm6h --128:ymm7h --128:ymm8h --128:ymm9h --128:ymm10h --128:ymm11h --128:ymm12h --128:ymm13h --128:ymm14h --128:ymm15h - 128:bnd0raw - 128:bnd1raw - 128:bnd2raw -diff --git a/gdb/regformats/i386/i386-avx-mpx-linux.dat b/gdb/regformats/i386/i386-avx-mpx-linux.dat -new file mode 100644 -index 0000000..831c476 ---- /dev/null -+++ b/gdb/regformats/i386/i386-avx-mpx-linux.dat -@@ -0,0 +1,61 @@ -+# THIS FILE IS GENERATED. -*- buffer-read-only: t -*- vi :set ro: -+# Generated from: i386/i386-avx-mpx-linux.xml -+name:i386_avx_mpx_linux -+xmltarget:i386-avx-mpx-linux.xml -+expedite:ebp,esp,eip -+32:eax -+32:ecx -+32:edx -+32:ebx -+32:esp -+32:ebp -+32:esi -+32:edi -+32:eip -+32:eflags -+32:cs -+32:ss -+32:ds -+32:es -+32:fs -+32:gs -+80:st0 -+80:st1 -+80:st2 -+80:st3 -+80:st4 -+80:st5 -+80:st6 -+80:st7 -+32:fctrl -+32:fstat -+32:ftag -+32:fiseg -+32:fioff -+32:foseg -+32:fooff -+32:fop -+128:xmm0 -+128:xmm1 -+128:xmm2 -+128:xmm3 -+128:xmm4 -+128:xmm5 -+128:xmm6 -+128:xmm7 -+32:mxcsr -+32:orig_eax -+128:ymm0h -+128:ymm1h -+128:ymm2h -+128:ymm3h -+128:ymm4h -+128:ymm5h -+128:ymm6h -+128:ymm7h -+128:bnd0raw -+128:bnd1raw -+128:bnd2raw -+128:bnd3raw -+64:bndcfgu -+64:bndstatus -diff --git a/gdb/regformats/i386/i386-avx-mpx.dat b/gdb/regformats/i386/i386-avx-mpx.dat -new file mode 100644 -index 0000000..8caef75 ---- /dev/null -+++ b/gdb/regformats/i386/i386-avx-mpx.dat -@@ -0,0 +1,60 @@ -+# THIS FILE IS GENERATED. -*- buffer-read-only: t -*- vi :set ro: -+# Generated from: i386/i386-avx-mpx.xml -+name:i386_avx_mpx -+xmltarget:i386-avx-mpx.xml -+expedite:ebp,esp,eip -+32:eax -+32:ecx -+32:edx -+32:ebx -+32:esp -+32:ebp -+32:esi -+32:edi -+32:eip -+32:eflags -+32:cs -+32:ss -+32:ds -+32:es -+32:fs -+32:gs -+80:st0 -+80:st1 -+80:st2 -+80:st3 -+80:st4 -+80:st5 -+80:st6 -+80:st7 -+32:fctrl -+32:fstat -+32:ftag -+32:fiseg -+32:fioff -+32:foseg -+32:fooff -+32:fop -+128:xmm0 -+128:xmm1 -+128:xmm2 -+128:xmm3 -+128:xmm4 -+128:xmm5 -+128:xmm6 -+128:xmm7 -+32:mxcsr -+128:ymm0h -+128:ymm1h -+128:ymm2h -+128:ymm3h -+128:ymm4h -+128:ymm5h -+128:ymm6h -+128:ymm7h -+128:bnd0raw -+128:bnd1raw -+128:bnd2raw -+128:bnd3raw -+64:bndcfgu -+64:bndstatus -diff --git a/gdb/regformats/i386/i386-mpx-linux.dat b/gdb/regformats/i386/i386-mpx-linux.dat -index b52b68d..1dcdce9 100644 ---- a/gdb/regformats/i386/i386-mpx-linux.dat -+++ b/gdb/regformats/i386/i386-mpx-linux.dat -@@ -45,14 +45,6 @@ expedite:ebp,esp,eip - 128:xmm7 - 32:mxcsr - 32:orig_eax --128:ymm0h --128:ymm1h --128:ymm2h --128:ymm3h --128:ymm4h --128:ymm5h --128:ymm6h --128:ymm7h - 128:bnd0raw - 128:bnd1raw - 128:bnd2raw -diff --git a/gdb/regformats/i386/i386-mpx.dat b/gdb/regformats/i386/i386-mpx.dat -index 0ea420b..dda5164 100644 ---- a/gdb/regformats/i386/i386-mpx.dat -+++ b/gdb/regformats/i386/i386-mpx.dat -@@ -44,14 +44,6 @@ expedite:ebp,esp,eip - 128:xmm6 - 128:xmm7 - 32:mxcsr --128:ymm0h --128:ymm1h --128:ymm2h --128:ymm3h --128:ymm4h --128:ymm5h --128:ymm6h --128:ymm7h - 128:bnd0raw - 128:bnd1raw - 128:bnd2raw -diff --git a/gdb/testsuite/gdb.trace/ftrace.exp b/gdb/testsuite/gdb.trace/ftrace.exp -index 15ad7e7..3a94471 100644 ---- a/gdb/testsuite/gdb.trace/ftrace.exp -+++ b/gdb/testsuite/gdb.trace/ftrace.exp -@@ -132,7 +132,6 @@ proc test_fast_tracepoints {} { - gdb_test "tfind pc *set_point" "Found trace frame .*" \ - "tfind set_point frame, first time" - -- setup_kfail "gdb/13808" "x86_64-*-linux*" - gdb_test "print globvar" " = 1" - - gdb_test "tfind pc *set_point" "Found trace frame .*" \ -diff --git a/gdb/x86-linux-nat.c b/gdb/x86-linux-nat.c -index 3cc18c1..107576f 100644 ---- a/gdb/x86-linux-nat.c -+++ b/gdb/x86-linux-nat.c -@@ -215,6 +215,11 @@ x86_linux_read_description (struct target_ops *ops) - return tdesc_x32_avx_linux; /* No MPX on x32 using AVX. */ - else - return tdesc_amd64_mpx_linux; -+ case X86_XSTATE_AVX_MPX_MASK: -+ if (is_x32) -+ return tdesc_x32_avx_linux; /* No MPX on x32 using AVX. */ -+ else -+ return tdesc_amd64_avx_mpx_linux; - case X86_XSTATE_AVX_MASK: - if (is_x32) - return tdesc_x32_avx_linux; -@@ -237,6 +242,8 @@ x86_linux_read_description (struct target_ops *ops) - return tdesc_i386_avx512_linux; - case X86_XSTATE_MPX_MASK: - return tdesc_i386_mpx_linux; -+ case X86_XSTATE_AVX_MPX_MASK: -+ return tdesc_i386_avx_mpx_linux; - case X86_XSTATE_AVX_MASK: - return tdesc_i386_avx_linux; - default: diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/package_devel_gdb_patches_120-sigprocmask-invalid-call.patch b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/package_devel_gdb_patches_120-sigprocmask-invalid-call.patch new file mode 100644 index 000000000..c5484f789 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb/package_devel_gdb_patches_120-sigprocmask-invalid-call.patch @@ -0,0 +1,45 @@ +From 56893a61aa4f0270fa8d1197b9848247f90fce0d Mon Sep 17 00:00:00 2001 +From: Yousong Zhou +Date: Fri, 24 Mar 2017 10:36:03 +0800 +Subject: [PATCH] Fix invalid sigprocmask call + +The POSIX document says + + The pthread_sigmask() and sigprocmask() functions shall fail if: + + [EINVAL] + The value of the how argument is not equal to one of the defined values. + +and this is how musl-libc is currently doing. Fix the call to be safe +and correct + + [1] http://pubs.opengroup.org/onlinepubs/9699919799/functions/pthread_sigmask.html + +gdb/ChangeLog: +2017-03-24 Yousong Zhou + + * common/signals-state-save-restore.c (save_original_signals_state): + Fix invalid sigprocmask call. +--- +Upstream-Status: Pending [not author, cherry-picked from LEDE https://bugs.lede-project.org/index.php?do=details&task_id=637&openedfrom=-1%2Bweek] +Signed-off-by: André Draszik + gdb/ChangeLog | 5 +++++ + gdb/common/signals-state-save-restore.c | 2 +- + 2 files changed, 6 insertions(+), 1 deletion(-) + +diff --git a/gdb/common/signals-state-save-restore.c b/gdb/common/signals-state-save-restore.c +index d11a9ae..734335c 100644 +--- a/gdb/common/signals-state-save-restore.c ++++ b/gdb/common/signals-state-save-restore.c +@@ -41,7 +41,7 @@ save_original_signals_state (void) + int i; + int res; + +- res = sigprocmask (0, NULL, &original_signal_mask); ++ res = sigprocmask (SIG_BLOCK, NULL, &original_signal_mask); + if (res == -1) + perror_with_name (("sigprocmask")); + +-- +2.6.4 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb_7.11.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb_7.11.1.bb deleted file mode 100644 index 57cffc998..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb_7.11.1.bb +++ /dev/null @@ -1,26 +0,0 @@ -require gdb.inc -require gdb-${PV}.inc - -inherit python3-dir - -EXTRA_OEMAKE_append_libc-musl = "\ - gt_cv_func_gnugettext1_libc=yes \ - gt_cv_func_gnugettext2_libc=yes \ - " - -do_configure_prepend() { - if [ -n "${@bb.utils.contains('PACKAGECONFIG', 'python', 'python', '', d)}" ]; then - cat > ${WORKDIR}/python << EOF -#!/bin/sh -case "\$2" in - --includes) echo "-I${STAGING_INCDIR}/${PYTHON_DIR}${PYTHON_ABI}/" ;; - --ldflags) echo "-Wl,-rpath-link,${STAGING_LIBDIR}/.. -Wl,-rpath,${libdir}/.. -lpthread -ldl -lutil -lm -lpython${PYTHON_BASEVERSION}${PYTHON_ABI}" ;; - --exec-prefix) echo "${exec_prefix}" ;; - *) exit 1 ;; -esac -exit 0 -EOF - chmod +x ${WORKDIR}/python - fi -} -CFLAGS_append_libc-musl = " -Drpl_gettimeofday=gettimeofday" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb_7.12.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb_7.12.1.bb new file mode 100644 index 000000000..ea8fef122 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/gdb/gdb_7.12.1.bb @@ -0,0 +1,27 @@ +require gdb.inc +require gdb-${PV}.inc + +inherit python3-dir + +EXTRA_OEMAKE_append_libc-musl = "\ + gt_cv_func_gnugettext1_libc=yes \ + gt_cv_func_gnugettext2_libc=yes \ + gl_cv_func_gettimeofday_clobber=no \ + " + +do_configure_prepend() { + if [ "${@bb.utils.filter('PACKAGECONFIG', 'python', d)}" ]; then + cat > ${WORKDIR}/python << EOF +#!/bin/sh +case "\$2" in + --includes) echo "-I${STAGING_INCDIR}/${PYTHON_DIR}${PYTHON_ABI}/" ;; + --ldflags) echo "-Wl,-rpath-link,${STAGING_LIBDIR}/.. -Wl,-rpath,${libdir}/.. -lpthread -ldl -lutil -lm -lpython${PYTHON_BASEVERSION}${PYTHON_ABI}" ;; + --exec-prefix) echo "${exec_prefix}" ;; + *) exit 1 ;; +esac +exit 0 +EOF + chmod +x ${WORKDIR}/python + fi +} +CFLAGS_append_libc-musl = " -Drpl_gettimeofday=gettimeofday" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/git/git.inc b/import-layers/yocto-poky/meta/recipes-devtools/git/git.inc index 753b0472a..5c12ca8d4 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/git/git.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/git/git.inc @@ -13,6 +13,10 @@ S = "${WORKDIR}/git-${PV}" LIC_FILES_CHKSUM = "file://COPYING;md5=7c0d7ef03a7eb04ce795b0f60e68e7e1" +PACKAGECONFIG ??= "" +PACKAGECONFIG[cvsserver] = "" +PACKAGECONFIG[svn] = "" + EXTRA_OECONF = "--with-perl=${STAGING_BINDIR_NATIVE}/perl-native/perl \ --without-tcltk \ " @@ -46,7 +50,7 @@ do_install () { perl_native_fixup () { sed -i -e 's#${STAGING_BINDIR_NATIVE}/perl-native/#${bindir}/#' \ -e 's#${libdir}/perl-native/#${libdir}/#' \ - ${@d.getVar("PERLTOOLS", True).replace(' /',d.getVar('D', True) + '/')} + ${@d.getVar("PERLTOOLS").replace(' /',d.getVar('D') + '/')} # ${libdir} is not applicable here, perl-native files are always # installed to /usr/lib on both 32/64 bits targets. @@ -54,6 +58,23 @@ perl_native_fixup () { mkdir -p ${D}${libdir} mv ${D}${exec_prefix}/lib/perl-native/perl ${D}${libdir} rmdir -p ${D}${exec_prefix}/lib/perl-native || true + + if [ ! "${@bb.utils.filter('PACKAGECONFIG', 'cvsserver', d)}" ]; then + # Only install the git cvsserver command if explicitly requested + # as it requires the DBI Perl module, which does not exist in + # OE-Core. + rm ${D}${libexecdir}/git-core/git-cvsserver \ + ${D}${bindir}/git-cvsserver + fi + + if [ ! "${@bb.utils.filter('PACKAGECONFIG', 'svn', d)}" ]; then + # Only install the git svn command and all Git::SVN Perl modules + # if explicitly requested as they require the SVN::Core Perl + # module, which does not exist in OE-Core. + rm -r ${D}${libexecdir}/git-core/git-svn \ + ${D}${libdir}/perl/site_perl/*/Git/SVN* + sed -i -e '/SVN/d' ${D}${libdir}/perl/site_perl/*/auto/Git/.packlist + fi } REL_GIT_EXEC_PATH = "${@os.path.relpath(libexecdir, bindir)}/git-core" @@ -118,6 +139,6 @@ FILES_${PN}-tk = " \ PACKAGES =+ "gitweb" FILES_gitweb = "${datadir}/gitweb/" - +RDEPENDS_gitweb = "perl" BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/git/git_2.11.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/git/git_2.11.1.bb new file mode 100644 index 000000000..f2f072c52 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/git/git_2.11.1.bb @@ -0,0 +1,11 @@ +require git.inc + +EXTRA_OECONF += "ac_cv_snprintf_returns_bogus=no \ + ac_cv_fread_reads_directories=${ac_cv_fread_reads_directories=yes} \ + " +EXTRA_OEMAKE += "NO_GETTEXT=1" + +SRC_URI[tarball.md5sum] = "6a7a73db076bb0514b602720669d685c" +SRC_URI[tarball.sha256sum] = "a1cdd7c820f92c44abb5003b36dc8cb7201ba38e8744802399f59c97285ca043" +SRC_URI[manpages.md5sum] = "e4268a6b514ccdb624b6450ff55881a3" +SRC_URI[manpages.sha256sum] = "ee567e7b0f95333816793714bb31c54e288cf8041f77a0092b85e62c9c2974f9" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/git/git_2.9.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/git/git_2.9.3.bb deleted file mode 100644 index d59f3d283..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/git/git_2.9.3.bb +++ /dev/null @@ -1,11 +0,0 @@ -require git.inc - -EXTRA_OECONF += "ac_cv_snprintf_returns_bogus=no \ - ac_cv_fread_reads_directories=${ac_cv_fread_reads_directories=yes} \ - " -EXTRA_OEMAKE += "NO_GETTEXT=1" - -SRC_URI[tarball.md5sum] = "c783361be894b8bfa5373811b1b65602" -SRC_URI[tarball.sha256sum] = "a252b6636b12d5ba57732c8469701544c26c2b1689933bd1b425e603cbb247c0" -SRC_URI[manpages.md5sum] = "66fafd61d65f9d2d99581133170eb186" -SRC_URI[manpages.sha256sum] = "8ea1a55b048fafbf0c0c6fcbca4b5b0f5e9917893221fc7345c09051d65832ce" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/gnu-config/gnu-config_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/gnu-config/gnu-config_git.bb index 072726f60..f1c77884e 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/gnu-config/gnu-config_git.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/gnu-config/gnu-config_git.bb @@ -11,7 +11,7 @@ INHIBIT_DEFAULT_DEPS = "1" SRCREV = "b576fa87c140b824466ef1638e945e87dc5c0343" PV = "20150728+git${SRCPV}" -SRC_URI = "git://git.sv.gnu.org/config.git \ +SRC_URI = "git://git.savannah.gnu.org/config.git \ file://gnu-configize.in" S = "${WORKDIR}/git" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4.inc b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4.inc new file mode 100644 index 000000000..2f500f32b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4.inc @@ -0,0 +1,16 @@ +require go-common.inc + +PV = "1.4.3" +GO_BASEVERSION = "1.4" +FILESEXTRAPATHS_prepend := "${FILE_DIRNAME}/go-${GO_BASEVERSION}:" + +SRC_URI += "\ + file://016-armhf-elf-header.patch \ + file://go-cross-backport-cmd-link-support-new-386-amd64-rel.patch \ + file://syslog.patch \ + file://0001-cmd-ld-set-alignment-for-the-.rel.plt-section-on-32-.patch \ +" + +LIC_FILES_CHKSUM = "file://LICENSE;md5=591778525c869cdde0ab5a1bf283cd81" +SRC_URI[md5sum] = "dfb604511115dd402a77a553a5923a04" +SRC_URI[sha256sum] = "9947fc705b0b841b5938c48b22dc33e9647ec0752bae66e50278df4f23f64959" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/0001-cmd-ld-set-alignment-for-the-.rel.plt-section-on-32-.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/0001-cmd-ld-set-alignment-for-the-.rel.plt-section-on-32-.patch new file mode 100644 index 000000000..f2adc200b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/0001-cmd-ld-set-alignment-for-the-.rel.plt-section-on-32-.patch @@ -0,0 +1,33 @@ +From 855145d5c03c4b4faf60736c38d7a299c682af4a Mon Sep 17 00:00:00 2001 +From: Shenghou Ma +Date: Sat, 7 Feb 2015 14:06:02 -0500 +Subject: [PATCH] cmd/ld: set alignment for the .rel.plt section on 32-bit + architectures + +Fixes #9802. + +Change-Id: I22c52a37bdb23a14cc4615c9519431bb14ca81ca +Reviewed-on: https://go-review.googlesource.com/4170 +Reviewed-by: Ian Lance Taylor +--- +Upstream-Status: Backport +Signed-off-by: Khem Raj + + src/cmd/ld/elf.c | 1 + + 1 file changed, 1 insertion(+) + +diff --git a/src/cmd/ld/elf.c b/src/cmd/ld/elf.c +index 12ced98..97ed4bd 100644 +--- a/src/cmd/ld/elf.c ++++ b/src/cmd/ld/elf.c +@@ -1363,6 +1363,7 @@ asmbelf(vlong symo) + sh->type = SHT_REL; + sh->flags = SHF_ALLOC; + sh->entsize = ELF32RELSIZE; ++ sh->addralign = 4; + sh->link = elfshname(".dynsym")->shnum; + shsym(sh, linklookup(ctxt, ".rel.plt", 0)); + +-- +1.9.1 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/016-armhf-elf-header.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/016-armhf-elf-header.patch new file mode 100644 index 000000000..e6e414e52 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/016-armhf-elf-header.patch @@ -0,0 +1,24 @@ +Description: Use correct ELF header for armhf binaries. +Author: Adam Conrad +Last-Update: 2013-07-08 + +Upstream-Status: Pending +Signed-off-by: Khem Raj + +Index: go/src/cmd/ld/elf.c +=================================================================== +--- go.orig/src/cmd/ld/elf.c 2015-02-20 10:49:58.763451586 -0800 ++++ go/src/cmd/ld/elf.c 2015-02-20 10:49:27.895478521 -0800 +@@ -57,7 +57,11 @@ + case '5': + // we use EABI on both linux/arm and freebsd/arm. + if(HEADTYPE == Hlinux || HEADTYPE == Hfreebsd) +- hdr.flags = 0x5000002; // has entry point, Version5 EABI ++#ifdef __ARM_PCS_VFP ++ hdr.flags = 0x5000402; // has entry point, Version5 EABI, hard-float ABI ++#else ++ hdr.flags = 0x5000202; // has entry point, Version5 EABI, soft-float ABI ++#endif + // fallthrough + default: + hdr.phoff = ELF32HDRSIZE; /* Must be be ELF32HDRSIZE: first PHdr must follow ELF header */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/go-cross-backport-cmd-link-support-new-386-amd64-rel.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/go-cross-backport-cmd-link-support-new-386-amd64-rel.patch new file mode 100644 index 000000000..95ca9d3aa --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/go-cross-backport-cmd-link-support-new-386-amd64-rel.patch @@ -0,0 +1,225 @@ +From d6eefad445831c161fca130f9bdf7b3848aac23c Mon Sep 17 00:00:00 2001 +From: Paul Gortmaker +Date: Tue, 29 Mar 2016 21:14:33 -0400 +Subject: [PATCH] go-cross: backport "cmd/link: support new 386/amd64 + relocations" + +Newer binutils won't support building older go-1.4.3 as per: + +https://github.com/golang/go/issues/13114 + +Upstream commit 914db9f060b1fd3eb1f74d48f3bd46a73d4ae9c7 (see subj) +was identified as the fix and nominated for 1.4.4 but that release +never happened. The paths in 1.4.3 aren't the same as go1.6beta1~662 +where this commit appeared, but the NetBSD folks indicated what a +1.4.3 backport would look like here: https://gnats.netbsd.org/50777 + +This is based on that, but without the BSD wrapper infrastructure +layer that makes things look like patches of patches. + +Signed-off-by: Paul Gortmaker + +Upstream-Status: Backport [ Partial ] + +diff --git a/src/cmd/6l/asm.c b/src/cmd/6l/asm.c +index 18b5aa311981..2e9d339aef87 100644 +--- a/src/cmd/6l/asm.c ++++ b/src/cmd/6l/asm.c +@@ -118,6 +118,8 @@ adddynrel(LSym *s, Reloc *r) + return; + + case 256 + R_X86_64_GOTPCREL: ++ case 256 + R_X86_64_GOTPCRELX: ++ case 256 + R_X86_64_REX_GOTPCRELX: + if(targ->type != SDYNIMPORT) { + // have symbol + if(r->off >= 2 && s->p[r->off-2] == 0x8b) { +diff --git a/src/cmd/8l/asm.c b/src/cmd/8l/asm.c +index 98c04240374f..cff29488e8af 100644 +--- a/src/cmd/8l/asm.c ++++ b/src/cmd/8l/asm.c +@@ -115,6 +115,7 @@ adddynrel(LSym *s, Reloc *r) + return; + + case 256 + R_386_GOT32: ++ case 256 + R_386_GOT32X: + if(targ->type != SDYNIMPORT) { + // have symbol + if(r->off >= 2 && s->p[r->off-2] == 0x8b) { +diff --git a/src/cmd/ld/elf.h b/src/cmd/ld/elf.h +index e84d996f2596..bbf2cfaa3cc0 100644 +--- a/src/cmd/ld/elf.h ++++ b/src/cmd/ld/elf.h +@@ -478,32 +478,47 @@ typedef struct { + * Relocation types. + */ + +-#define R_X86_64_NONE 0 /* No relocation. */ +-#define R_X86_64_64 1 /* Add 64 bit symbol value. */ +-#define R_X86_64_PC32 2 /* PC-relative 32 bit signed sym value. */ +-#define R_X86_64_GOT32 3 /* PC-relative 32 bit GOT offset. */ +-#define R_X86_64_PLT32 4 /* PC-relative 32 bit PLT offset. */ +-#define R_X86_64_COPY 5 /* Copy data from shared object. */ +-#define R_X86_64_GLOB_DAT 6 /* Set GOT entry to data address. */ +-#define R_X86_64_JMP_SLOT 7 /* Set GOT entry to code address. */ +-#define R_X86_64_RELATIVE 8 /* Add load address of shared object. */ +-#define R_X86_64_GOTPCREL 9 /* Add 32 bit signed pcrel offset to GOT. */ +-#define R_X86_64_32 10 /* Add 32 bit zero extended symbol value */ +-#define R_X86_64_32S 11 /* Add 32 bit sign extended symbol value */ +-#define R_X86_64_16 12 /* Add 16 bit zero extended symbol value */ +-#define R_X86_64_PC16 13 /* Add 16 bit signed extended pc relative symbol value */ +-#define R_X86_64_8 14 /* Add 8 bit zero extended symbol value */ +-#define R_X86_64_PC8 15 /* Add 8 bit signed extended pc relative symbol value */ +-#define R_X86_64_DTPMOD64 16 /* ID of module containing symbol */ +-#define R_X86_64_DTPOFF64 17 /* Offset in TLS block */ +-#define R_X86_64_TPOFF64 18 /* Offset in static TLS block */ +-#define R_X86_64_TLSGD 19 /* PC relative offset to GD GOT entry */ +-#define R_X86_64_TLSLD 20 /* PC relative offset to LD GOT entry */ +-#define R_X86_64_DTPOFF32 21 /* Offset in TLS block */ +-#define R_X86_64_GOTTPOFF 22 /* PC relative offset to IE GOT entry */ +-#define R_X86_64_TPOFF32 23 /* Offset in static TLS block */ +- +-#define R_X86_64_COUNT 24 /* Count of defined relocation types. */ ++#define R_X86_64_NONE 0 ++#define R_X86_64_64 1 ++#define R_X86_64_PC32 2 ++#define R_X86_64_GOT32 3 ++#define R_X86_64_PLT32 4 ++#define R_X86_64_COPY 5 ++#define R_X86_64_GLOB_DAT 6 ++#define R_X86_64_JMP_SLOT 7 ++#define R_X86_64_RELATIVE 8 ++#define R_X86_64_GOTPCREL 9 ++#define R_X86_64_32 10 ++#define R_X86_64_32S 11 ++#define R_X86_64_16 12 ++#define R_X86_64_PC16 13 ++#define R_X86_64_8 14 ++#define R_X86_64_PC8 15 ++#define R_X86_64_DTPMOD64 16 ++#define R_X86_64_DTPOFF64 17 ++#define R_X86_64_TPOFF64 18 ++#define R_X86_64_TLSGD 19 ++#define R_X86_64_TLSLD 20 ++#define R_X86_64_DTPOFF32 21 ++#define R_X86_64_GOTTPOFF 22 ++#define R_X86_64_TPOFF32 23 ++#define R_X86_64_PC64 24 ++#define R_X86_64_GOTOFF64 25 ++#define R_X86_64_GOTPC32 26 ++#define R_X86_64_GOT64 27 ++#define R_X86_64_GOTPCREL64 28 ++#define R_X86_64_GOTPC64 29 ++#define R_X86_64_GOTPLT64 30 ++#define R_X86_64_PLTOFF64 31 ++#define R_X86_64_SIZE32 32 ++#define R_X86_64_SIZE64 33 ++#define R_X86_64_GOTPC32_TLSDEC 34 ++#define R_X86_64_TLSDESC_CALL 35 ++#define R_X86_64_TLSDESC 36 ++#define R_X86_64_IRELATIVE 37 ++#define R_X86_64_PC32_BND 40 ++#define R_X86_64_GOTPCRELX 41 ++#define R_X86_64_REX_GOTPCRELX 42 + + + #define R_ALPHA_NONE 0 /* No reloc */ +@@ -581,39 +596,42 @@ typedef struct { + #define R_ARM_COUNT 38 /* Count of defined relocation types. */ + + +-#define R_386_NONE 0 /* No relocation. */ +-#define R_386_32 1 /* Add symbol value. */ +-#define R_386_PC32 2 /* Add PC-relative symbol value. */ +-#define R_386_GOT32 3 /* Add PC-relative GOT offset. */ +-#define R_386_PLT32 4 /* Add PC-relative PLT offset. */ +-#define R_386_COPY 5 /* Copy data from shared object. */ +-#define R_386_GLOB_DAT 6 /* Set GOT entry to data address. */ +-#define R_386_JMP_SLOT 7 /* Set GOT entry to code address. */ +-#define R_386_RELATIVE 8 /* Add load address of shared object. */ +-#define R_386_GOTOFF 9 /* Add GOT-relative symbol address. */ +-#define R_386_GOTPC 10 /* Add PC-relative GOT table address. */ +-#define R_386_TLS_TPOFF 14 /* Negative offset in static TLS block */ +-#define R_386_TLS_IE 15 /* Absolute address of GOT for -ve static TLS */ +-#define R_386_TLS_GOTIE 16 /* GOT entry for negative static TLS block */ +-#define R_386_TLS_LE 17 /* Negative offset relative to static TLS */ +-#define R_386_TLS_GD 18 /* 32 bit offset to GOT (index,off) pair */ +-#define R_386_TLS_LDM 19 /* 32 bit offset to GOT (index,zero) pair */ +-#define R_386_TLS_GD_32 24 /* 32 bit offset to GOT (index,off) pair */ +-#define R_386_TLS_GD_PUSH 25 /* pushl instruction for Sun ABI GD sequence */ +-#define R_386_TLS_GD_CALL 26 /* call instruction for Sun ABI GD sequence */ +-#define R_386_TLS_GD_POP 27 /* popl instruction for Sun ABI GD sequence */ +-#define R_386_TLS_LDM_32 28 /* 32 bit offset to GOT (index,zero) pair */ +-#define R_386_TLS_LDM_PUSH 29 /* pushl instruction for Sun ABI LD sequence */ +-#define R_386_TLS_LDM_CALL 30 /* call instruction for Sun ABI LD sequence */ +-#define R_386_TLS_LDM_POP 31 /* popl instruction for Sun ABI LD sequence */ +-#define R_386_TLS_LDO_32 32 /* 32 bit offset from start of TLS block */ +-#define R_386_TLS_IE_32 33 /* 32 bit offset to GOT static TLS offset entry */ +-#define R_386_TLS_LE_32 34 /* 32 bit offset within static TLS block */ +-#define R_386_TLS_DTPMOD32 35 /* GOT entry containing TLS index */ +-#define R_386_TLS_DTPOFF32 36 /* GOT entry containing TLS offset */ +-#define R_386_TLS_TPOFF32 37 /* GOT entry of -ve static TLS offset */ +- +-#define R_386_COUNT 38 /* Count of defined relocation types. */ ++#define R_386_NONE 0 ++#define R_386_32 1 ++#define R_386_PC32 2 ++#define R_386_GOT32 3 ++#define R_386_PLT32 4 ++#define R_386_COPY 5 ++#define R_386_GLOB_DAT 6 ++#define R_386_JMP_SLOT 7 ++#define R_386_RELATIVE 8 ++#define R_386_GOTOFF 9 ++#define R_386_GOTPC 10 ++#define R_386_TLS_TPOFF 14 ++#define R_386_TLS_IE 15 ++#define R_386_TLS_GOTIE 16 ++#define R_386_TLS_LE 17 ++#define R_386_TLS_GD 18 ++#define R_386_TLS_LDM 19 ++#define R_386_TLS_GD_32 24 ++#define R_386_TLS_GD_PUSH 25 ++#define R_386_TLS_GD_CALL 26 ++#define R_386_TLS_GD_POP 27 ++#define R_386_TLS_LDM_32 28 ++#define R_386_TLS_LDM_PUSH 29 ++#define R_386_TLS_LDM_CALL 30 ++#define R_386_TLS_LDM_POP 31 ++#define R_386_TLS_LDO_32 32 ++#define R_386_TLS_IE_32 33 ++#define R_386_TLS_LE_32 34 ++#define R_386_TLS_DTPMOD32 35 ++#define R_386_TLS_DTPOFF32 36 ++#define R_386_TLS_TPOFF32 37 ++#define R_386_TLS_GOTDESC 39 ++#define R_386_TLS_DESC_CALL 40 ++#define R_386_TLS_DESC 41 ++#define R_386_IRELATIVE 42 ++#define R_386_GOT32X 43 + + #define R_PPC_NONE 0 /* No relocation. */ + #define R_PPC_ADDR32 1 +diff --git a/src/cmd/ld/ldelf.c b/src/cmd/ld/ldelf.c +index dd5fa0d2a839..2e2fbd17377f 100644 +--- a/src/cmd/ld/ldelf.c ++++ b/src/cmd/ld/ldelf.c +@@ -888,12 +888,15 @@ reltype(char *pn, int elftype, uchar *siz) + case R('6', R_X86_64_PC32): + case R('6', R_X86_64_PLT32): + case R('6', R_X86_64_GOTPCREL): ++ case R('6', R_X86_64_GOTPCRELX): ++ case R('6', R_X86_64_REX_GOTPCRELX): + case R('8', R_386_32): + case R('8', R_386_PC32): + case R('8', R_386_GOT32): + case R('8', R_386_PLT32): + case R('8', R_386_GOTOFF): + case R('8', R_386_GOTPC): ++ case R('8', R_386_GOT32X): + *siz = 4; + break; + case R('6', R_X86_64_64): +-- +2.7.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/syslog.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/syslog.patch new file mode 100644 index 000000000..29be06f1b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.4/syslog.patch @@ -0,0 +1,62 @@ +Add timeouts to logger + +Signed-off-by: Khem Raj +Upstream-Status: Pending + +diff -r -u go/src/log/syslog/syslog.go /home/achang/GOCOPY/go/src/log/syslog/syslog.go +--- go/src/log/syslog/syslog.go 2013-11-28 13:38:28.000000000 -0800 ++++ /home/achang/GOCOPY/go/src/log/syslog/syslog.go 2014-10-03 11:44:37.710403200 -0700 +@@ -33,6 +33,9 @@ + const severityMask = 0x07 + const facilityMask = 0xf8 + ++var writeTimeout = 1 * time.Second ++var connectTimeout = 1 * time.Second ++ + const ( + // Severity. + +@@ -100,6 +103,7 @@ + type serverConn interface { + writeString(p Priority, hostname, tag, s, nl string) error + close() error ++ setWriteDeadline(t time.Time) error + } + + type netConn struct { +@@ -273,7 +277,11 @@ + nl = "\n" + } + +- err := w.conn.writeString(p, w.hostname, w.tag, msg, nl) ++ err := w.conn.setWriteDeadline(time.Now().Add(writeTimeout)) ++ if err != nil { ++ return 0, err ++ } ++ err = w.conn.writeString(p, w.hostname, w.tag, msg, nl) + if err != nil { + return 0, err + } +@@ -305,6 +313,10 @@ + return n.conn.Close() + } + ++func (n *netConn) setWriteDeadline(t time.Time) error { ++ return n.conn.SetWriteDeadline(t) ++} ++ + // NewLogger creates a log.Logger whose output is written to + // the system log service with the specified priority. The logFlag + // argument is the flag set passed through to log.New to create +diff -r -u go/src/log/syslog/syslog_unix.go /home/achang/GOCOPY/go/src/log/syslog/syslog_unix.go +--- go/src/log/syslog/syslog_unix.go 2013-11-28 13:38:28.000000000 -0800 ++++ /home/achang/GOCOPY/go/src/log/syslog/syslog_unix.go 2014-10-03 11:44:39.010403175 -0700 +@@ -19,7 +19,7 @@ + logPaths := []string{"/dev/log", "/var/run/syslog"} + for _, network := range logTypes { + for _, path := range logPaths { +- conn, err := net.Dial(network, path) ++ conn, err := net.DialTimeout(network, path, connectTimeout) + if err != nil { + continue + } else { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6.inc b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6.inc new file mode 100644 index 000000000..769c1d8f2 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6.inc @@ -0,0 +1,19 @@ +require go-common.inc + +PV = "1.6.3" +GO_BASEVERSION = "1.6" +FILESEXTRAPATHS_prepend := "${FILE_DIRNAME}/go-${GO_BASEVERSION}:" + +LIC_FILES_CHKSUM = "file://LICENSE;md5=591778525c869cdde0ab5a1bf283cd81" + +SRC_URI += "\ + file://armhf-elf-header.patch \ + file://syslog.patch \ + file://fix-target-cc-for-build.patch \ + file://fix-cc-handling.patch \ + file://split-host-and-target-build.patch \ + file://gotooldir.patch \ +" +SRC_URI[md5sum] = "bf3fce6ccaadd310159c9e874220e2a2" +SRC_URI[sha256sum] = "6326aeed5f86cf18f16d6dc831405614f855e2d416a91fd3fdc334f772345b00" + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/armhf-elf-header.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/armhf-elf-header.patch new file mode 100644 index 000000000..1e3a16b31 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/armhf-elf-header.patch @@ -0,0 +1,23 @@ +Encode arm EABI ( hard/soft ) calling convention in ELF header + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/cmd/link/internal/ld/elf.go +=================================================================== +--- go.orig/src/cmd/link/internal/ld/elf.go ++++ go/src/cmd/link/internal/ld/elf.go +@@ -827,7 +827,13 @@ + // 32-bit architectures + case '5': + // we use EABI on both linux/arm and freebsd/arm. +- if HEADTYPE == obj.Hlinux || HEADTYPE == obj.Hfreebsd { ++ if HEADTYPE == obj.Hlinux { ++ if Ctxt.Goarm == 7 { ++ ehdr.flags = 0x5000402 // has entry point, Version5 EABI, hard float ++ } else { ++ ehdr.flags = 0x5000202 // has entry point, Version5 EABI, soft float ++ } ++ } else if HEADTYPE == obj.Hfreebsd { + // We set a value here that makes no indication of which + // float ABI the object uses, because this is information + // used by the dynamic linker to compare executables and diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/fix-cc-handling.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/fix-cc-handling.patch new file mode 100644 index 000000000..983323ace --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/fix-cc-handling.patch @@ -0,0 +1,50 @@ +Accept CC with multiple words in its name + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/cmd/go/build.go +=================================================================== +--- go.orig/src/cmd/go/build.go 2015-07-29 14:48:40.323185807 -0700 ++++ go/src/cmd/go/build.go 2015-07-30 07:37:40.529818586 -0700 +@@ -2805,12 +2805,24 @@ + return b.ccompilerCmd("CC", defaultCC, objdir) + } + ++// gccCmd returns a gcc command line prefix ++// defaultCC is defined in zdefaultcc.go, written by cmd/dist. ++func (b *builder) gccCmdForReal() []string { ++ return envList("CC", defaultCC) ++} ++ + // gxxCmd returns a g++ command line prefix + // defaultCXX is defined in zdefaultcc.go, written by cmd/dist. + func (b *builder) gxxCmd(objdir string) []string { + return b.ccompilerCmd("CXX", defaultCXX, objdir) + } + ++// gxxCmd returns a g++ command line prefix ++// defaultCXX is defined in zdefaultcc.go, written by cmd/dist. ++func (b *builder) gxxCmdForReal() []string { ++ return envList("CXX", defaultCXX) ++} ++ + // ccompilerCmd returns a command line prefix for the given environment + // variable and using the default command when the variable is empty. + func (b *builder) ccompilerCmd(envvar, defcmd, objdir string) []string { +Index: go/src/cmd/go/env.go +=================================================================== +--- go.orig/src/cmd/go/env.go 2015-07-29 14:48:40.323185807 -0700 ++++ go/src/cmd/go/env.go 2015-07-30 07:40:54.461655721 -0700 +@@ -52,10 +52,9 @@ + + if goos != "plan9" { + cmd := b.gccCmd(".") +- env = append(env, envVar{"CC", cmd[0]}) ++ env = append(env, envVar{"CC", strings.Join(b.gccCmdForReal(), " ")}) + env = append(env, envVar{"GOGCCFLAGS", strings.Join(cmd[3:], " ")}) +- cmd = b.gxxCmd(".") +- env = append(env, envVar{"CXX", cmd[0]}) ++ env = append(env, envVar{"CXX", strings.Join(b.gxxCmdForReal(), " ")}) + } + + if buildContext.CgoEnabled { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/fix-target-cc-for-build.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/fix-target-cc-for-build.patch new file mode 100644 index 000000000..2f6156ecd --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/fix-target-cc-for-build.patch @@ -0,0 +1,17 @@ +Put Quotes around CC_FOR_TARGET since it can be mutliple words e.g. in OE + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/make.bash +=================================================================== +--- go.orig/src/make.bash 2015-07-29 13:28:11.334031696 -0700 ++++ go/src/make.bash 2015-07-29 13:36:55.814465630 -0700 +@@ -158,7 +158,7 @@ + fi + + echo "##### Building packages and commands for $GOOS/$GOARCH." +-CC=$CC_FOR_TARGET "$GOTOOLDIR"/go_bootstrap install $GO_FLAGS -gcflags "$GO_GCFLAGS" -ldflags "$GO_LDFLAGS" -v std cmd ++CC="$CC_FOR_TARGET" "$GOTOOLDIR"/go_bootstrap install $GO_FLAGS -gcflags "$GO_GCFLAGS" -ldflags "$GO_LDFLAGS" -v std cmd + echo + + rm -f "$GOTOOLDIR"/go_bootstrap diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/gotooldir.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/gotooldir.patch new file mode 100644 index 000000000..94670259f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/gotooldir.patch @@ -0,0 +1,30 @@ +Define tooldir in relation to GOTOOLDIR env var + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/go/build/build.go +=================================================================== +--- go.orig/src/go/build/build.go ++++ go/src/go/build/build.go +@@ -1388,7 +1388,7 @@ func init() { + } + + // ToolDir is the directory containing build tools. +-var ToolDir = filepath.Join(runtime.GOROOT(), "pkg/tool/"+runtime.GOOS+"_"+runtime.GOARCH) ++var ToolDir = envOr("GOTOOLDIR", filepath.Join(runtime.GOROOT(), "pkg/tool/"+runtime.GOOS+"_"+runtime.GOARCH)) + + // IsLocalImport reports whether the import path is + // a local import path, like ".", "..", "./foo", or "../foo". +Index: go/src/cmd/go/build.go +=================================================================== +--- go.orig/src/cmd/go/build.go ++++ go/src/cmd/go/build.go +@@ -1312,7 +1312,7 @@ func (b *builder) build(a *action) (err + } + + cgoExe := tool("cgo") +- if a.cgo != nil && a.cgo.target != "" { ++ if a.cgo != nil && a.cgo.target != "" && os.Getenv("GOTOOLDIR") == "" { + cgoExe = a.cgo.target + } + outGo, outObj, err := b.cgo(a.p, cgoExe, obj, pcCFLAGS, pcLDFLAGS, cgofiles, gccfiles, cxxfiles, a.p.MFiles) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/split-host-and-target-build.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/split-host-and-target-build.patch new file mode 100644 index 000000000..afbae02b4 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/split-host-and-target-build.patch @@ -0,0 +1,63 @@ +Add new option --target-only to build target components +Separates the host and target pieces of build + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/make.bash +=================================================================== +--- go.orig/src/make.bash ++++ go/src/make.bash +@@ -143,12 +143,23 @@ if [ "$1" = "--no-clean" ]; then + buildall="" + shift + fi +-./cmd/dist/dist bootstrap $buildall $GO_DISTFLAGS -v # builds go_bootstrap +-# Delay move of dist tool to now, because bootstrap may clear tool directory. +-mv cmd/dist/dist "$GOTOOLDIR"/dist +-echo + +-if [ "$GOHOSTARCH" != "$GOARCH" -o "$GOHOSTOS" != "$GOOS" ]; then ++do_host_build="yes" ++do_target_build="yes" ++if [ "$1" = "--target-only" ]; then ++ do_host_build="no" ++ shift ++elif [ "$1" = "--host-only" ]; then ++ do_target_build="no" ++ shift ++fi ++ ++if [ "$do_host_build" = "yes" ]; then ++ ./cmd/dist/dist bootstrap $buildall $GO_DISTFLAGS -v # builds go_bootstrap ++ # Delay move of dist tool to now, because bootstrap may clear tool directory. ++ mv cmd/dist/dist "$GOTOOLDIR"/dist ++ echo ++ + echo "##### Building packages and commands for host, $GOHOSTOS/$GOHOSTARCH." + # CC_FOR_TARGET is recorded as the default compiler for the go tool. When building for the host, however, + # use the host compiler, CC, from `cmd/dist/dist env` instead. +@@ -157,11 +168,20 @@ if [ "$GOHOSTARCH" != "$GOARCH" -o "$GOH + echo + fi + +-echo "##### Building packages and commands for $GOOS/$GOARCH." +-CC="$CC_FOR_TARGET" "$GOTOOLDIR"/go_bootstrap install $GO_FLAGS -gcflags "$GO_GCFLAGS" -ldflags "$GO_LDFLAGS" -v std cmd +-echo ++if [ "$do_target_build" = "yes" ]; then ++ GO_INSTALL="${GO_TARGET_INSTALL:-std cmd}" ++ echo "##### Building packages and commands for $GOOS/$GOARCH." ++ if [ "$GOHOSTOS" = "$GOOS" -a "$GOHOSTARCH" = "$GOARCH" -a "$do_host_build" = "yes" ]; then ++ rm -rf ./host-tools ++ mkdir ./host-tools ++ mv "$GOTOOLDIR"/* ./host-tools ++ GOTOOLDIR="$PWD/host-tools" ++ fi ++ GOTOOLDIR="$GOTOOLDIR" CC="$CC_FOR_TARGET" "$GOTOOLDIR"/go_bootstrap install $GO_FLAGS -gcflags "$GO_GCFLAGS" -ldflags "$GO_LDFLAGS" -v ${GO_INSTALL} ++ echo + +-rm -f "$GOTOOLDIR"/go_bootstrap ++ rm -f "$GOTOOLDIR"/go_bootstrap ++fi + + if [ "$1" != "--no-banner" ]; then + "$GOTOOLDIR"/dist banner diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/syslog.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/syslog.patch new file mode 100644 index 000000000..29be06f1b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.6/syslog.patch @@ -0,0 +1,62 @@ +Add timeouts to logger + +Signed-off-by: Khem Raj +Upstream-Status: Pending + +diff -r -u go/src/log/syslog/syslog.go /home/achang/GOCOPY/go/src/log/syslog/syslog.go +--- go/src/log/syslog/syslog.go 2013-11-28 13:38:28.000000000 -0800 ++++ /home/achang/GOCOPY/go/src/log/syslog/syslog.go 2014-10-03 11:44:37.710403200 -0700 +@@ -33,6 +33,9 @@ + const severityMask = 0x07 + const facilityMask = 0xf8 + ++var writeTimeout = 1 * time.Second ++var connectTimeout = 1 * time.Second ++ + const ( + // Severity. + +@@ -100,6 +103,7 @@ + type serverConn interface { + writeString(p Priority, hostname, tag, s, nl string) error + close() error ++ setWriteDeadline(t time.Time) error + } + + type netConn struct { +@@ -273,7 +277,11 @@ + nl = "\n" + } + +- err := w.conn.writeString(p, w.hostname, w.tag, msg, nl) ++ err := w.conn.setWriteDeadline(time.Now().Add(writeTimeout)) ++ if err != nil { ++ return 0, err ++ } ++ err = w.conn.writeString(p, w.hostname, w.tag, msg, nl) + if err != nil { + return 0, err + } +@@ -305,6 +313,10 @@ + return n.conn.Close() + } + ++func (n *netConn) setWriteDeadline(t time.Time) error { ++ return n.conn.SetWriteDeadline(t) ++} ++ + // NewLogger creates a log.Logger whose output is written to + // the system log service with the specified priority. The logFlag + // argument is the flag set passed through to log.New to create +diff -r -u go/src/log/syslog/syslog_unix.go /home/achang/GOCOPY/go/src/log/syslog/syslog_unix.go +--- go/src/log/syslog/syslog_unix.go 2013-11-28 13:38:28.000000000 -0800 ++++ /home/achang/GOCOPY/go/src/log/syslog/syslog_unix.go 2014-10-03 11:44:39.010403175 -0700 +@@ -19,7 +19,7 @@ + logPaths := []string{"/dev/log", "/var/run/syslog"} + for _, network := range logTypes { + for _, path := range logPaths { +- conn, err := net.Dial(network, path) ++ conn, err := net.DialTimeout(network, path, connectTimeout) + if err != nil { + continue + } else { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7.inc b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7.inc new file mode 100644 index 000000000..5c3004e87 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7.inc @@ -0,0 +1,19 @@ +require go-common.inc + +PV = "1.7.4" +GO_BASEVERSION = "1.7" +FILESEXTRAPATHS_prepend := "${FILE_DIRNAME}/go-${GO_BASEVERSION}:" + +LIC_FILES_CHKSUM = "file://LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707" + +SRC_URI += "\ + file://armhf-elf-header.patch \ + file://syslog.patch \ + file://fix-target-cc-for-build.patch \ + file://fix-cc-handling.patch \ + file://split-host-and-target-build.patch \ + file://gotooldir.patch \ +" +SRC_URI[md5sum] = "49c1076428a5d3b5ad7ac65233fcca2f" +SRC_URI[sha256sum] = "4c189111e9ba651a2bb3ee868aa881fab36b2f2da3409e80885ca758a6b614cc" + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/armhf-elf-header.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/armhf-elf-header.patch new file mode 100644 index 000000000..1e3a16b31 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/armhf-elf-header.patch @@ -0,0 +1,23 @@ +Encode arm EABI ( hard/soft ) calling convention in ELF header + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/cmd/link/internal/ld/elf.go +=================================================================== +--- go.orig/src/cmd/link/internal/ld/elf.go ++++ go/src/cmd/link/internal/ld/elf.go +@@ -827,7 +827,13 @@ + // 32-bit architectures + case '5': + // we use EABI on both linux/arm and freebsd/arm. +- if HEADTYPE == obj.Hlinux || HEADTYPE == obj.Hfreebsd { ++ if HEADTYPE == obj.Hlinux { ++ if Ctxt.Goarm == 7 { ++ ehdr.flags = 0x5000402 // has entry point, Version5 EABI, hard float ++ } else { ++ ehdr.flags = 0x5000202 // has entry point, Version5 EABI, soft float ++ } ++ } else if HEADTYPE == obj.Hfreebsd { + // We set a value here that makes no indication of which + // float ABI the object uses, because this is information + // used by the dynamic linker to compare executables and diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/fix-cc-handling.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/fix-cc-handling.patch new file mode 100644 index 000000000..a67caf41a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/fix-cc-handling.patch @@ -0,0 +1,50 @@ +Accept CC with multiple words in its name + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/cmd/go/build.go +=================================================================== +--- go.orig/src/cmd/go/build.go ++++ go/src/cmd/go/build.go +@@ -2991,12 +2991,24 @@ func (b *builder) gccCmd(objdir string) + return b.ccompilerCmd("CC", defaultCC, objdir) + } + ++// gccCmd returns a gcc command line prefix ++// defaultCC is defined in zdefaultcc.go, written by cmd/dist. ++func (b *builder) gccCmdForReal() []string { ++ return envList("CC", defaultCC) ++} ++ + // gxxCmd returns a g++ command line prefix + // defaultCXX is defined in zdefaultcc.go, written by cmd/dist. + func (b *builder) gxxCmd(objdir string) []string { + return b.ccompilerCmd("CXX", defaultCXX, objdir) + } + ++// gxxCmd returns a g++ command line prefix ++// defaultCXX is defined in zdefaultcc.go, written by cmd/dist. ++func (b *builder) gxxCmdForReal() []string { ++ return envList("CXX", defaultCXX) ++} ++ + // gfortranCmd returns a gfortran command line prefix. + func (b *builder) gfortranCmd(objdir string) []string { + return b.ccompilerCmd("FC", "gfortran", objdir) +Index: go/src/cmd/go/env.go +=================================================================== +--- go.orig/src/cmd/go/env.go ++++ go/src/cmd/go/env.go +@@ -51,10 +51,9 @@ func mkEnv() []envVar { + + if goos != "plan9" { + cmd := b.gccCmd(".") +- env = append(env, envVar{"CC", cmd[0]}) ++ env = append(env, envVar{"CC", strings.Join(b.gccCmdForReal(), " ")}) + env = append(env, envVar{"GOGCCFLAGS", strings.Join(cmd[3:], " ")}) +- cmd = b.gxxCmd(".") +- env = append(env, envVar{"CXX", cmd[0]}) ++ env = append(env, envVar{"CXX", strings.Join(b.gxxCmdForReal(), " ")}) + } + + if buildContext.CgoEnabled { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/fix-target-cc-for-build.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/fix-target-cc-for-build.patch new file mode 100644 index 000000000..2f6156ecd --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/fix-target-cc-for-build.patch @@ -0,0 +1,17 @@ +Put Quotes around CC_FOR_TARGET since it can be mutliple words e.g. in OE + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/make.bash +=================================================================== +--- go.orig/src/make.bash 2015-07-29 13:28:11.334031696 -0700 ++++ go/src/make.bash 2015-07-29 13:36:55.814465630 -0700 +@@ -158,7 +158,7 @@ + fi + + echo "##### Building packages and commands for $GOOS/$GOARCH." +-CC=$CC_FOR_TARGET "$GOTOOLDIR"/go_bootstrap install $GO_FLAGS -gcflags "$GO_GCFLAGS" -ldflags "$GO_LDFLAGS" -v std cmd ++CC="$CC_FOR_TARGET" "$GOTOOLDIR"/go_bootstrap install $GO_FLAGS -gcflags "$GO_GCFLAGS" -ldflags "$GO_LDFLAGS" -v std cmd + echo + + rm -f "$GOTOOLDIR"/go_bootstrap diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/gotooldir.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/gotooldir.patch new file mode 100644 index 000000000..94670259f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/gotooldir.patch @@ -0,0 +1,30 @@ +Define tooldir in relation to GOTOOLDIR env var + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/go/build/build.go +=================================================================== +--- go.orig/src/go/build/build.go ++++ go/src/go/build/build.go +@@ -1388,7 +1388,7 @@ func init() { + } + + // ToolDir is the directory containing build tools. +-var ToolDir = filepath.Join(runtime.GOROOT(), "pkg/tool/"+runtime.GOOS+"_"+runtime.GOARCH) ++var ToolDir = envOr("GOTOOLDIR", filepath.Join(runtime.GOROOT(), "pkg/tool/"+runtime.GOOS+"_"+runtime.GOARCH)) + + // IsLocalImport reports whether the import path is + // a local import path, like ".", "..", "./foo", or "../foo". +Index: go/src/cmd/go/build.go +=================================================================== +--- go.orig/src/cmd/go/build.go ++++ go/src/cmd/go/build.go +@@ -1312,7 +1312,7 @@ func (b *builder) build(a *action) (err + } + + cgoExe := tool("cgo") +- if a.cgo != nil && a.cgo.target != "" { ++ if a.cgo != nil && a.cgo.target != "" && os.Getenv("GOTOOLDIR") == "" { + cgoExe = a.cgo.target + } + outGo, outObj, err := b.cgo(a.p, cgoExe, obj, pcCFLAGS, pcLDFLAGS, cgofiles, gccfiles, cxxfiles, a.p.MFiles) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/split-host-and-target-build.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/split-host-and-target-build.patch new file mode 100644 index 000000000..b0dd95bbe --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/split-host-and-target-build.patch @@ -0,0 +1,62 @@ +Add new option --target-only to build target components +Separates the host and target pieces of build + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/make.bash +=================================================================== +--- go.orig/src/make.bash ++++ go/src/make.bash +@@ -154,13 +154,22 @@ if [ "$1" = "--no-clean" ]; then + buildall="" + shift + fi +-./cmd/dist/dist bootstrap $buildall $GO_DISTFLAGS -v # builds go_bootstrap ++do_host_build="yes" ++do_target_build="yes" ++if [ "$1" = "--target-only" ]; then ++ do_host_build="no" ++ shift ++elif [ "$1" = "--host-only" ]; then ++ do_target_build="no" ++ shift ++fi + +-# Delay move of dist tool to now, because bootstrap may clear tool directory. +-mv cmd/dist/dist "$GOTOOLDIR"/dist +-echo ++if [ "$do_host_build" = "yes" ]; then ++ ./cmd/dist/dist bootstrap $buildall $GO_DISTFLAGS -v # builds go_bootstrap ++ # Delay move of dist tool to now, because bootstrap may clear tool directory. ++ mv cmd/dist/dist "$GOTOOLDIR"/dist ++ echo + +-if [ "$GOHOSTARCH" != "$GOARCH" -o "$GOHOSTOS" != "$GOOS" ]; then + echo "##### Building packages and commands for host, $GOHOSTOS/$GOHOSTARCH." + # CC_FOR_TARGET is recorded as the default compiler for the go tool. When building for the host, however, + # use the host compiler, CC, from `cmd/dist/dist env` instead. +@@ -169,11 +178,20 @@ if [ "$GOHOSTARCH" != "$GOARCH" -o "$GOH + echo + fi + +-echo "##### Building packages and commands for $GOOS/$GOARCH." +-CC="$CC_FOR_TARGET" "$GOTOOLDIR"/go_bootstrap install $GO_FLAGS -gcflags "$GO_GCFLAGS" -ldflags "$GO_LDFLAGS" -v std cmd +-echo ++if [ "$do_target_build" = "yes" ]; then ++ GO_INSTALL="${GO_TARGET_INSTALL:-std cmd}" ++ echo "##### Building packages and commands for $GOOS/$GOARCH." ++ if [ "$GOHOSTOS" = "$GOOS" -a "$GOHOSTARCH" = "$GOARCH" -a "$do_host_build" = "yes" ]; then ++ rm -rf ./host-tools ++ mkdir ./host-tools ++ mv "$GOTOOLDIR"/* ./host-tools ++ GOTOOLDIR="$PWD/host-tools" ++ fi ++ GOTOOLDIR="$GOTOOLDIR" CC="$CC_FOR_TARGET" "$GOTOOLDIR"/go_bootstrap install $GO_FLAGS -gcflags "$GO_GCFLAGS" -ldflags "$GO_LDFLAGS" -v ${GO_INSTALL} ++ echo + +-rm -f "$GOTOOLDIR"/go_bootstrap ++ rm -f "$GOTOOLDIR"/go_bootstrap ++fi + + if [ "$1" != "--no-banner" ]; then + "$GOTOOLDIR"/dist banner diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/syslog.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/syslog.patch new file mode 100644 index 000000000..29be06f1b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.7/syslog.patch @@ -0,0 +1,62 @@ +Add timeouts to logger + +Signed-off-by: Khem Raj +Upstream-Status: Pending + +diff -r -u go/src/log/syslog/syslog.go /home/achang/GOCOPY/go/src/log/syslog/syslog.go +--- go/src/log/syslog/syslog.go 2013-11-28 13:38:28.000000000 -0800 ++++ /home/achang/GOCOPY/go/src/log/syslog/syslog.go 2014-10-03 11:44:37.710403200 -0700 +@@ -33,6 +33,9 @@ + const severityMask = 0x07 + const facilityMask = 0xf8 + ++var writeTimeout = 1 * time.Second ++var connectTimeout = 1 * time.Second ++ + const ( + // Severity. + +@@ -100,6 +103,7 @@ + type serverConn interface { + writeString(p Priority, hostname, tag, s, nl string) error + close() error ++ setWriteDeadline(t time.Time) error + } + + type netConn struct { +@@ -273,7 +277,11 @@ + nl = "\n" + } + +- err := w.conn.writeString(p, w.hostname, w.tag, msg, nl) ++ err := w.conn.setWriteDeadline(time.Now().Add(writeTimeout)) ++ if err != nil { ++ return 0, err ++ } ++ err = w.conn.writeString(p, w.hostname, w.tag, msg, nl) + if err != nil { + return 0, err + } +@@ -305,6 +313,10 @@ + return n.conn.Close() + } + ++func (n *netConn) setWriteDeadline(t time.Time) error { ++ return n.conn.SetWriteDeadline(t) ++} ++ + // NewLogger creates a log.Logger whose output is written to + // the system log service with the specified priority. The logFlag + // argument is the flag set passed through to log.New to create +diff -r -u go/src/log/syslog/syslog_unix.go /home/achang/GOCOPY/go/src/log/syslog/syslog_unix.go +--- go/src/log/syslog/syslog_unix.go 2013-11-28 13:38:28.000000000 -0800 ++++ /home/achang/GOCOPY/go/src/log/syslog/syslog_unix.go 2014-10-03 11:44:39.010403175 -0700 +@@ -19,7 +19,7 @@ + logPaths := []string{"/dev/log", "/var/run/syslog"} + for _, network := range logTypes { + for _, path := range logPaths { +- conn, err := net.Dial(network, path) ++ conn, err := net.DialTimeout(network, path, connectTimeout) + if err != nil { + continue + } else { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8.inc b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8.inc new file mode 100644 index 000000000..5c376a2eb --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8.inc @@ -0,0 +1,19 @@ +require go-common.inc + +GOMINOR = "" +GO_BASEVERSION = "1.8" +PV .= "${GOMINOR}" +FILESEXTRAPATHS_prepend := "${FILE_DIRNAME}/go-${GO_BASEVERSION}:" + +LIC_FILES_CHKSUM = "file://LICENSE;md5=5d4950ecb7b26d2c5e4e7b4e0dd74707" + +SRC_URI += "\ + file://armhf-elf-header.patch \ + file://syslog.patch \ + file://fix-target-cc-for-build.patch \ + file://fix-cc-handling.patch \ + file://split-host-and-target-build.patch \ + file://gotooldir.patch \ +" +SRC_URI[md5sum] = "7743960c968760437b6e39093cfe6f67" +SRC_URI[sha256sum] = "406865f587b44be7092f206d73fc1de252600b79b3cacc587b74b5ef5c623596" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/armhf-elf-header.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/armhf-elf-header.patch new file mode 100644 index 000000000..3508838e8 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/armhf-elf-header.patch @@ -0,0 +1,23 @@ +Encode arm EABI ( hard/soft ) calling convention in ELF header + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/cmd/link/internal/ld/elf.go +=================================================================== +--- go.orig/src/cmd/link/internal/ld/elf.go ++++ go/src/cmd/link/internal/ld/elf.go +@@ -950,7 +950,13 @@ func Elfinit(ctxt *Link) { + case sys.ARM, sys.MIPS: + if SysArch.Family == sys.ARM { + // we use EABI on linux/arm, freebsd/arm, netbsd/arm. +- if Headtype == obj.Hlinux || Headtype == obj.Hfreebsd || Headtype == obj.Hnetbsd { ++ if Headtype == obj.Hlinux { ++ if obj.GOARM == 7 { ++ ehdr.flags = 0x5000402 // has entry point, Version5 EABI, hard float ++ } else { ++ ehdr.flags = 0x5000202 // has entry point, Version5 EABI, soft float ++ } ++ } else if Headtype == obj.Hfreebsd || Headtype == obj.Hnetbsd { + // We set a value here that makes no indication of which + // float ABI the object uses, because this is information + // used by the dynamic linker to compare executables and diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/fix-cc-handling.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/fix-cc-handling.patch new file mode 100644 index 000000000..dc9b811b2 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/fix-cc-handling.patch @@ -0,0 +1,50 @@ +Accept CC with multiple words in its name + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/cmd/go/build.go +=================================================================== +--- go.orig/src/cmd/go/build.go ++++ go/src/cmd/go/build.go +@@ -3100,12 +3100,24 @@ func (b *builder) gccCmd(objdir string) + return b.ccompilerCmd("CC", defaultCC, objdir) + } + ++// gccCmd returns a gcc command line prefix ++// defaultCC is defined in zdefaultcc.go, written by cmd/dist. ++func (b *builder) gccCmdForReal() []string { ++ return envList("CC", defaultCC) ++} ++ + // gxxCmd returns a g++ command line prefix + // defaultCXX is defined in zdefaultcc.go, written by cmd/dist. + func (b *builder) gxxCmd(objdir string) []string { + return b.ccompilerCmd("CXX", defaultCXX, objdir) + } + ++// gxxCmd returns a g++ command line prefix ++// defaultCXX is defined in zdefaultcc.go, written by cmd/dist. ++func (b *builder) gxxCmdForReal() []string { ++ return envList("CXX", defaultCXX) ++} ++ + // gfortranCmd returns a gfortran command line prefix. + func (b *builder) gfortranCmd(objdir string) []string { + return b.ccompilerCmd("FC", "gfortran", objdir) +Index: go/src/cmd/go/env.go +=================================================================== +--- go.orig/src/cmd/go/env.go ++++ go/src/cmd/go/env.go +@@ -63,10 +63,9 @@ func mkEnv() []envVar { + } + + cmd := b.gccCmd(".") +- env = append(env, envVar{"CC", cmd[0]}) ++ env = append(env, envVar{"CC", strings.Join(b.gccCmdForReal(), " ")}) + env = append(env, envVar{"GOGCCFLAGS", strings.Join(cmd[3:], " ")}) +- cmd = b.gxxCmd(".") +- env = append(env, envVar{"CXX", cmd[0]}) ++ env = append(env, envVar{"CXX", strings.Join(b.gxxCmdForReal(), " ")}) + + if buildContext.CgoEnabled { + env = append(env, envVar{"CGO_ENABLED", "1"}) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/fix-target-cc-for-build.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/fix-target-cc-for-build.patch new file mode 100644 index 000000000..2f6156ecd --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/fix-target-cc-for-build.patch @@ -0,0 +1,17 @@ +Put Quotes around CC_FOR_TARGET since it can be mutliple words e.g. in OE + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/make.bash +=================================================================== +--- go.orig/src/make.bash 2015-07-29 13:28:11.334031696 -0700 ++++ go/src/make.bash 2015-07-29 13:36:55.814465630 -0700 +@@ -158,7 +158,7 @@ + fi + + echo "##### Building packages and commands for $GOOS/$GOARCH." +-CC=$CC_FOR_TARGET "$GOTOOLDIR"/go_bootstrap install $GO_FLAGS -gcflags "$GO_GCFLAGS" -ldflags "$GO_LDFLAGS" -v std cmd ++CC="$CC_FOR_TARGET" "$GOTOOLDIR"/go_bootstrap install $GO_FLAGS -gcflags "$GO_GCFLAGS" -ldflags "$GO_LDFLAGS" -v std cmd + echo + + rm -f "$GOTOOLDIR"/go_bootstrap diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/gotooldir.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/gotooldir.patch new file mode 100644 index 000000000..94670259f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/gotooldir.patch @@ -0,0 +1,30 @@ +Define tooldir in relation to GOTOOLDIR env var + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/go/build/build.go +=================================================================== +--- go.orig/src/go/build/build.go ++++ go/src/go/build/build.go +@@ -1388,7 +1388,7 @@ func init() { + } + + // ToolDir is the directory containing build tools. +-var ToolDir = filepath.Join(runtime.GOROOT(), "pkg/tool/"+runtime.GOOS+"_"+runtime.GOARCH) ++var ToolDir = envOr("GOTOOLDIR", filepath.Join(runtime.GOROOT(), "pkg/tool/"+runtime.GOOS+"_"+runtime.GOARCH)) + + // IsLocalImport reports whether the import path is + // a local import path, like ".", "..", "./foo", or "../foo". +Index: go/src/cmd/go/build.go +=================================================================== +--- go.orig/src/cmd/go/build.go ++++ go/src/cmd/go/build.go +@@ -1312,7 +1312,7 @@ func (b *builder) build(a *action) (err + } + + cgoExe := tool("cgo") +- if a.cgo != nil && a.cgo.target != "" { ++ if a.cgo != nil && a.cgo.target != "" && os.Getenv("GOTOOLDIR") == "" { + cgoExe = a.cgo.target + } + outGo, outObj, err := b.cgo(a.p, cgoExe, obj, pcCFLAGS, pcLDFLAGS, cgofiles, gccfiles, cxxfiles, a.p.MFiles) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/split-host-and-target-build.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/split-host-and-target-build.patch new file mode 100644 index 000000000..b0dd95bbe --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/split-host-and-target-build.patch @@ -0,0 +1,62 @@ +Add new option --target-only to build target components +Separates the host and target pieces of build + +Signed-off-by: Khem Raj +Upstream-Status: Pending +Index: go/src/make.bash +=================================================================== +--- go.orig/src/make.bash ++++ go/src/make.bash +@@ -154,13 +154,22 @@ if [ "$1" = "--no-clean" ]; then + buildall="" + shift + fi +-./cmd/dist/dist bootstrap $buildall $GO_DISTFLAGS -v # builds go_bootstrap ++do_host_build="yes" ++do_target_build="yes" ++if [ "$1" = "--target-only" ]; then ++ do_host_build="no" ++ shift ++elif [ "$1" = "--host-only" ]; then ++ do_target_build="no" ++ shift ++fi + +-# Delay move of dist tool to now, because bootstrap may clear tool directory. +-mv cmd/dist/dist "$GOTOOLDIR"/dist +-echo ++if [ "$do_host_build" = "yes" ]; then ++ ./cmd/dist/dist bootstrap $buildall $GO_DISTFLAGS -v # builds go_bootstrap ++ # Delay move of dist tool to now, because bootstrap may clear tool directory. ++ mv cmd/dist/dist "$GOTOOLDIR"/dist ++ echo + +-if [ "$GOHOSTARCH" != "$GOARCH" -o "$GOHOSTOS" != "$GOOS" ]; then + echo "##### Building packages and commands for host, $GOHOSTOS/$GOHOSTARCH." + # CC_FOR_TARGET is recorded as the default compiler for the go tool. When building for the host, however, + # use the host compiler, CC, from `cmd/dist/dist env` instead. +@@ -169,11 +178,20 @@ if [ "$GOHOSTARCH" != "$GOARCH" -o "$GOH + echo + fi + +-echo "##### Building packages and commands for $GOOS/$GOARCH." +-CC="$CC_FOR_TARGET" "$GOTOOLDIR"/go_bootstrap install $GO_FLAGS -gcflags "$GO_GCFLAGS" -ldflags "$GO_LDFLAGS" -v std cmd +-echo ++if [ "$do_target_build" = "yes" ]; then ++ GO_INSTALL="${GO_TARGET_INSTALL:-std cmd}" ++ echo "##### Building packages and commands for $GOOS/$GOARCH." ++ if [ "$GOHOSTOS" = "$GOOS" -a "$GOHOSTARCH" = "$GOARCH" -a "$do_host_build" = "yes" ]; then ++ rm -rf ./host-tools ++ mkdir ./host-tools ++ mv "$GOTOOLDIR"/* ./host-tools ++ GOTOOLDIR="$PWD/host-tools" ++ fi ++ GOTOOLDIR="$GOTOOLDIR" CC="$CC_FOR_TARGET" "$GOTOOLDIR"/go_bootstrap install $GO_FLAGS -gcflags "$GO_GCFLAGS" -ldflags "$GO_LDFLAGS" -v ${GO_INSTALL} ++ echo + +-rm -f "$GOTOOLDIR"/go_bootstrap ++ rm -f "$GOTOOLDIR"/go_bootstrap ++fi + + if [ "$1" != "--no-banner" ]; then + "$GOTOOLDIR"/dist banner diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/syslog.patch b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/syslog.patch new file mode 100644 index 000000000..29be06f1b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-1.8/syslog.patch @@ -0,0 +1,62 @@ +Add timeouts to logger + +Signed-off-by: Khem Raj +Upstream-Status: Pending + +diff -r -u go/src/log/syslog/syslog.go /home/achang/GOCOPY/go/src/log/syslog/syslog.go +--- go/src/log/syslog/syslog.go 2013-11-28 13:38:28.000000000 -0800 ++++ /home/achang/GOCOPY/go/src/log/syslog/syslog.go 2014-10-03 11:44:37.710403200 -0700 +@@ -33,6 +33,9 @@ + const severityMask = 0x07 + const facilityMask = 0xf8 + ++var writeTimeout = 1 * time.Second ++var connectTimeout = 1 * time.Second ++ + const ( + // Severity. + +@@ -100,6 +103,7 @@ + type serverConn interface { + writeString(p Priority, hostname, tag, s, nl string) error + close() error ++ setWriteDeadline(t time.Time) error + } + + type netConn struct { +@@ -273,7 +277,11 @@ + nl = "\n" + } + +- err := w.conn.writeString(p, w.hostname, w.tag, msg, nl) ++ err := w.conn.setWriteDeadline(time.Now().Add(writeTimeout)) ++ if err != nil { ++ return 0, err ++ } ++ err = w.conn.writeString(p, w.hostname, w.tag, msg, nl) + if err != nil { + return 0, err + } +@@ -305,6 +313,10 @@ + return n.conn.Close() + } + ++func (n *netConn) setWriteDeadline(t time.Time) error { ++ return n.conn.SetWriteDeadline(t) ++} ++ + // NewLogger creates a log.Logger whose output is written to + // the system log service with the specified priority. The logFlag + // argument is the flag set passed through to log.New to create +diff -r -u go/src/log/syslog/syslog_unix.go /home/achang/GOCOPY/go/src/log/syslog/syslog_unix.go +--- go/src/log/syslog/syslog_unix.go 2013-11-28 13:38:28.000000000 -0800 ++++ /home/achang/GOCOPY/go/src/log/syslog/syslog_unix.go 2014-10-03 11:44:39.010403175 -0700 +@@ -19,7 +19,7 @@ + logPaths := []string{"/dev/log", "/var/run/syslog"} + for _, network := range logTypes { + for _, path := range logPaths { +- conn, err := net.Dial(network, path) ++ conn, err := net.DialTimeout(network, path, connectTimeout) + if err != nil { + continue + } else { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-bootstrap-native_1.4.bb b/import-layers/yocto-poky/meta/recipes-devtools/go/go-bootstrap-native_1.4.bb new file mode 100644 index 000000000..3d4141e87 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-bootstrap-native_1.4.bb @@ -0,0 +1,3 @@ +BOOTSTRAP = "1.4" +require go-native.inc +require go-${PV}.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-common.inc b/import-layers/yocto-poky/meta/recipes-devtools/go/go-common.inc new file mode 100644 index 000000000..f74b8b765 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-common.inc @@ -0,0 +1,22 @@ +SUMMARY = "Go programming language compiler" +DESCRIPTION = " The Go programming language is an open source project to make \ + programmers more productive. Go is expressive, concise, clean, and\ + efficient. Its concurrency mechanisms make it easy to write programs\ + that get the most out of multicore and networked machines, while its\ + novel type system enables flexible and modular program construction.\ + Go compiles quickly to machine code yet has the convenience of\ + garbage collection and the power of run-time reflection. It's a\ + fast, statically typed, compiled language that feels like a\ + dynamically typed, interpreted language." + +HOMEPAGE = " http://golang.org/" +LICENSE = "BSD-3-Clause" + +inherit goarch + +SRC_URI = "http://golang.org/dl/go${PV}.src.tar.gz" +S = "${WORKDIR}/go" +B = "${S}" + +INHIBIT_PACKAGE_DEBUG_SPLIT = "1" +SSTATE_SCAN_CMD = "true" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-cross.inc b/import-layers/yocto-poky/meta/recipes-devtools/go/go-cross.inc new file mode 100644 index 000000000..93206a5d0 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-cross.inc @@ -0,0 +1,18 @@ +inherit cross + +DEPENDS += "gcc-cross-${TARGET_ARCH}" + +PN = "go-cross-${TARGET_ARCH}" + +# Ignore how TARGET_ARCH is computed. +TARGET_ARCH[vardepvalue] = "${TARGET_ARCH}" + +FILESEXTRAPATHS =. "${FILE_DIRNAME}/go-cross:" + +GOROOT_FINAL = "${libdir}/go" +export GOROOT_FINAL + +# x32 ABI is not supported on go compiler so far +COMPATIBLE_HOST_linux-gnux32 = "null" +# ppc32 is not supported in go compilers +COMPATIBLE_HOST_powerpc = "null" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-cross_1.7.bb b/import-layers/yocto-poky/meta/recipes-devtools/go/go-cross_1.7.bb new file mode 100644 index 000000000..56ee084b1 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-cross_1.7.bb @@ -0,0 +1,5 @@ +require go-cross.inc +require go_${PV}.bb + +# Go binaries are not understood by the strip tool. +INHIBIT_SYSROOT_STRIP = "1" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-cross_1.8.bb b/import-layers/yocto-poky/meta/recipes-devtools/go/go-cross_1.8.bb new file mode 100644 index 000000000..56ee084b1 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-cross_1.8.bb @@ -0,0 +1,5 @@ +require go-cross.inc +require go_${PV}.bb + +# Go binaries are not understood by the strip tool. +INHIBIT_SYSROOT_STRIP = "1" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-native.inc b/import-layers/yocto-poky/meta/recipes-devtools/go/go-native.inc new file mode 100644 index 000000000..c21f8fda7 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-native.inc @@ -0,0 +1,56 @@ +inherit native + +BOOTSTRAP ?= "" +export GOOS = "${BUILD_GOOS}" +export GOARCH = "${BUILD_GOARCH}" +export GOROOT_FINAL = "${STAGING_LIBDIR_NATIVE}/go${BOOTSTRAP}" +export GOROOT_BOOTSTRAP = "${STAGING_LIBDIR_NATIVE}/go1.4" +export CGO_ENABLED = "1" + +do_configure[noexec] = "1" + +do_compile() { + export GOBIN="${B}/bin" + rm -rf ${GOBIN} + mkdir ${GOBIN} + + export TMPDIR=${WORKDIR}/build-tmp + mkdir -p ${WORKDIR}/build-tmp + + cd src + CGO_ENABLED=0 ./make.bash --host-only +} + +make_wrapper() { + rm -f ${D}${bindir}/$2$3 + cat <${D}${bindir}/$2$3 +#!/bin/bash +here=\`dirname \$0\` +export GOROOT="${GOROOT:-\`readlink -f \$here/../lib/go$3\`}" +\$here/../lib/go$3/bin/$1 "\$@" +END + chmod +x ${D}${bindir}/$2$3 +} + +do_install() { + install -d ${D}${libdir}/go${BOOTSTRAP} + cp -a ${B}/pkg ${D}${libdir}/go${BOOTSTRAP}/ + install -d ${D}${libdir}/go${BOOTSTRAP}/src + (cd ${S}/src; for d in *; do \ + [ -d $d ] && cp -a ${S}/src/$d ${D}${libdir}/go${BOOTSTRAP}/src/; \ + done) + + install -d ${D}${bindir} ${D}${libdir}/go${BOOTSTRAP}/bin + for f in ${B}/bin/* + do + base=`basename $f` + install -m755 $f ${D}${libdir}/go${BOOTSTRAP}/bin + make_wrapper $base $base ${BOOTSTRAP} + done +} + +do_package[noexec] = "1" +do_packagedata[noexec] = "1" +do_package_write_ipk[noexec] = "1" +do_package_write_deb[noexec] = "1" +do_package_write_rpm[noexec] = "1" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go-native_1.8.bb b/import-layers/yocto-poky/meta/recipes-devtools/go/go-native_1.8.bb new file mode 100644 index 000000000..182fca27a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go-native_1.8.bb @@ -0,0 +1,3 @@ +require ${PN}.inc +require go-${PV}.inc +DEPENDS += "go-bootstrap-native" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go.inc b/import-layers/yocto-poky/meta/recipes-devtools/go/go.inc new file mode 100644 index 000000000..25437ddfe --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go.inc @@ -0,0 +1,86 @@ +inherit goarch +DEPENDS += "go-bootstrap-native" + +# libgcc is required for the target specific libraries to build +# properly, but apparently not for go-cross and, more importantly, +# also can't be used there because go-cross cannot depend on +# the tune-specific libgcc. Otherwise go-cross also would have +# to be tune-specific. +DEPENDS += "${@ 'libgcc' if not oe.utils.inherits(d, 'cross') else ''}" + +# Prevent runstrip from running because you get errors when the host arch != target arch +INHIBIT_PACKAGE_STRIP = "1" +INHIBIT_SYSROOT_STRIP = "1" + +# x32 ABI is not supported on go compiler so far +COMPATIBLE_HOST_linux-gnux32 = "null" +# ppc32 is not supported in go compilers +COMPATIBLE_HOST_powerpc = "null" + +export GOHOSTOS = "${BUILD_GOOS}" +export GOHOSTARCH = "${BUILD_GOARCH}" +export GOOS = "${TARGET_GOOS}" +export GOARCH = "${TARGET_GOARCH}" +export GOARM = "${TARGET_GOARM}" +export GOROOT_BOOTSTRAP = "${STAGING_LIBDIR_NATIVE}/go1.4" +export GOROOT_FINAL = "${libdir}/go" +export CGO_ENABLED = "1" +export CC_FOR_TARGET = "${CC}" +export CXX_FOR_TARGET = "${CXX}" + +do_configure[noexec] = "1" + +do_compile_prepend_class-cross() { + export CGO_ENABLED=0 +} + +do_compile() { + export GOBIN="${B}/bin" + export CC="${@d.getVar('BUILD_CC', True).strip()}" + rm -rf ${GOBIN} ${B}/pkg + mkdir ${GOBIN} + + export TMPDIR=${WORKDIR}/build-tmp + mkdir -p ${WORKDIR}/build-tmp + + cd src + ./make.bash --host-only + # Ensure cgo.a is built with the target toolchain + export GOBIN="${B}/target/bin" + rm -rf ${GOBIN} + mkdir -p ${GOBIN} + GO_FLAGS="-a" ./make.bash +} + +do_install_class-target() { + install -d ${D}${libdir}/go + cp -a ${B}/pkg ${D}${libdir}/go/ + install -d ${D}${libdir}/go/src + (cd ${S}/src; for d in *; do \ + [ -d $d ] && cp -a ${S}/src/$d ${D}${libdir}/go/src/; \ + done) + install -d ${D}${bindir} + if [ -d ${B}/bin/${GOOS}_${GOARCH} ] + then + install -m 0755 ${B}/bin/${GOOS}_${GOARCH}/* ${D}${bindir} + else + install -m 0755 ${B}/bin/* ${D}${bindir} + fi +} + +do_install_class-cross() { + install -d ${D}${libdir}/go + cp -a ${B}/pkg ${D}${libdir}/go/ + install -d ${D}${libdir}/go/src + (cd ${S}/src; for d in *; do \ + [ -d $d ] && cp -a ${S}/src/$d ${D}${libdir}/go/src/; \ + done) + install -d ${D}${bindir} + for f in ${B}/bin/go* + do + install -m755 $f ${D}${bindir} + done +} +do_package_qa[noexec] = "1" + +RDEPENDS_${PN} += "perl" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go_1.6.bb b/import-layers/yocto-poky/meta/recipes-devtools/go/go_1.6.bb new file mode 100644 index 000000000..2f590338c --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go_1.6.bb @@ -0,0 +1,4 @@ +require go.inc +require go-${PV}.inc + +BBCLASSEXTEND = "cross" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go_1.7.bb b/import-layers/yocto-poky/meta/recipes-devtools/go/go_1.7.bb new file mode 100644 index 000000000..e7a6ab277 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go_1.7.bb @@ -0,0 +1,2 @@ +require go-${PV}.inc +require go.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/go/go_1.8.bb b/import-layers/yocto-poky/meta/recipes-devtools/go/go_1.8.bb new file mode 100644 index 000000000..091b1318a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/go/go_1.8.bb @@ -0,0 +1,3 @@ +require go-${PV}.inc +require go.inc +TUNE_CCARGS_remove = "-march=mips32r2" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/guile/files/0002-Recognize-nios2-as-compilation-target.patch b/import-layers/yocto-poky/meta/recipes-devtools/guile/files/0002-Recognize-nios2-as-compilation-target.patch deleted file mode 100644 index 8e8474770..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/guile/files/0002-Recognize-nios2-as-compilation-target.patch +++ /dev/null @@ -1,32 +0,0 @@ -From 76155065c70b5ab65c6c805423183b360141db84 Mon Sep 17 00:00:00 2001 -From: Marek Vasut -Date: Thu, 28 Jan 2016 04:46:23 +0100 -Subject: [PATCH] Recognize nios2 as compilation target - -Signed-off-by: Marek Vasut -Upstream-Status: Submitted [ http://debbugs.gnu.org/cgi/bugreport.cgi?bug=22480 ] ---- - module/system/base/target.scm | 4 ++-- - 1 file changed, 2 insertions(+), 2 deletions(-) - -diff --git a/module/system/base/target.scm b/module/system/base/target.scm ---- a/module/system/base/target.scm -+++ b/module/system/base/target.scm -@@ -65,7 +65,7 @@ - (cond ((string-match "^i[0-9]86$" cpu) - (endianness little)) - ((member cpu '("x86_64" "ia64" -- "powerpcle" "powerpc64le" "mipsel" "mips64el" "sh4")) -+ "powerpcle" "powerpc64le" "mipsel" "mips64el" "sh4" "nios2")) - (endianness little)) - ((member cpu '("sparc" "sparc64" "powerpc" "powerpc64" "spu" - "mips" "mips64" "m68k" "s390x")) -@@ -108,7 +108,7 @@ - - ((string-match "64$" cpu) 8) - ((string-match "64_?[lbe][lbe]$" cpu) 8) -- ((member cpu '("sparc" "powerpc" "mips" "mipsel" "m68k" "sh4")) 4) -+ ((member cpu '("sparc" "powerpc" "mips" "mipsel" "m68k" "sh4" "nios2")) 4) - ((member cpu '("s390x")) 8) - ((string-match "^arm.*" cpu) 4) - (else (error "unknown CPU word size" cpu))))) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/guile/guile_2.0.12.bb b/import-layers/yocto-poky/meta/recipes-devtools/guile/guile_2.0.12.bb deleted file mode 100644 index d2fe511ae..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/guile/guile_2.0.12.bb +++ /dev/null @@ -1,121 +0,0 @@ -SUMMARY = "Guile is the GNU Ubiquitous Intelligent Language for Extensions" -DESCRIPTION = "Guile is the GNU Ubiquitous Intelligent Language for Extensions,\ - the official extension language for the GNU operating system.\ - Guile is a library designed to help programmers create flexible applications.\ - Using Guile in an application allows the application's functionality to be\ - extended by users or other programmers with plug-ins, modules, or scripts.\ - Guile provides what might be described as 'practical software freedom,'\ - making it possible for users to customize an application to meet their\ - needs without digging into the application's internals." - -HOMEPAGE = "http://www.gnu.org/software/guile/" -SECTION = "devel" -LICENSE = "GPLv3" -LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504" - -SRC_URI = "${GNU_MIRROR}/guile/guile-${PV}.tar.xz \ - file://debian/0002-Mark-Unused-modules-are-removed-gc-test-as-unresolve.patch \ - file://debian/0003-Mark-mutex-with-owner-not-retained-threads-test-as-u.patch \ - file://opensuse/guile-64bit.patch \ - file://guile_2.0.6_fix_sed_error.patch \ - file://arm_endianness.patch \ - file://arm_aarch64.patch \ - file://workaround-ice-ssa-corruption.patch \ - file://libguile-Makefile.am-hook.patch \ - file://0002-Recognize-nios2-as-compilation-target.patch \ - " - -SRC_URI[md5sum] = "081fdf80cd3a76f260a2a0d87f773d6b" -SRC_URI[sha256sum] = "de8187736f9b260f2fa776ed39b52cb74dd389ccf7039c042f0606270196b7e9" - -inherit autotools gettext pkgconfig texinfo -BBCLASSEXTEND = "native" - -DEPENDS = "libunistring bdwgc gmp libtool libffi ncurses readline" -# add guile-native only to the target recipe's DEPENDS -DEPENDS_append_class-target = " guile-native libatomic-ops" - -# The comment of the script guile-config said it has been deprecated but we should -# at least add the required dependency to make it work since we still provide the script. -RDEPENDS_${PN} = "pkgconfig" - -RDEPENDS_${PN}_append_libc-glibc_class-target = " glibc-gconv-iso8859-1" - -EXTRA_OECONF += "${@['--without-libltdl-prefix --without-libgmp-prefix --without-libreadline-prefix', ''][bb.data.inherits_class('native',d)]}" - -EXTRA_OECONF_append_class-target = " --with-libunistring-prefix=${STAGING_LIBDIR} \ - --with-libgmp-prefix=${STAGING_LIBDIR} \ - --with-libltdl-prefix=${STAGING_LIBDIR}" -EXTRA_OECONF_append_libc-uclibc = " guile_cv_use_csqrt=no " - -CFLAGS_append_libc-musl = " -DHAVE_GC_SET_FINALIZER_NOTIFIER \ - -DHAVE_GC_GET_HEAP_USAGE_SAFE \ - -DHAVE_GC_GET_FREE_SPACE_DIVISOR \ - -DHAVE_GC_SET_FINALIZE_ON_DEMAND \ - " - -do_configure_prepend() { - mkdir -p po -} - -export GUILE_FOR_BUILD="${BUILD_SYS}-guile" - -do_install_append_class-native() { - install -m 0755 ${D}${bindir}/guile ${D}${bindir}/${HOST_SYS}-guile - - create_wrapper ${D}/${bindir}/guile \ - GUILE_LOAD_PATH=${STAGING_DATADIR_NATIVE}/guile/2.0 \ - GUILE_LOAD_COMPILED_PATH=${STAGING_LIBDIR_NATIVE}/guile/2.0/ccache - create_wrapper ${D}${bindir}/${HOST_SYS}-guile \ - GUILE_LOAD_PATH=${STAGING_DATADIR_NATIVE}/guile/2.0 \ - GUILE_LOAD_COMPILED_PATH=${STAGING_LIBDIR_NATIVE}/guile/2.0/ccache -} - -do_install_append_class-target() { - # cleanup buildpaths in scripts - sed -i -e 's:${STAGING_DIR_NATIVE}::' ${D}${bindir}/guile-config - sed -i -e 's:${STAGING_DIR_HOST}::' ${D}${bindir}/guile-snarf - - sed -i -e 's:${STAGING_DIR_TARGET}::g' ${D}${libdir}/pkgconfig/guile-2.0.pc -} - -do_install_append_libc-musl() { - rm -f ${D}${libdir}/charset.alias -} - -SYSROOT_PREPROCESS_FUNCS = "guile_cross_config" - -guile_cross_config() { - # this is only for target recipe - if [ "${PN}" = "guile" ] - then - # Create guile-config returning target values instead of native values - install -d ${SYSROOT_DESTDIR}${STAGING_BINDIR_CROSS} - echo '#!'`which ${BUILD_SYS}-guile`$' \\\n--no-auto-compile -e main -s\n!#\n(define %guile-build-info '\'\( \ - > ${B}/guile-config.cross - sed -n -e 's:^[ \t]*{[ \t]*": (:' \ - -e 's:",[ \t]*": . ":' \ - -e 's:" *}, *\\:"):' \ - -e 's:^.*cachedir.*$::' \ - -e '/^ (/p' \ - < ${B}/libguile/libpath.h >> ${B}/guile-config.cross - echo '))' >> ${B}/guile-config.cross - cat ${B}/meta/guile-config >> ${B}/guile-config.cross - install ${B}/guile-config.cross ${STAGING_BINDIR_CROSS}/guile-config - fi -} - -# Guile needs the compiled files to be newer than the source, and it won't -# auto-compile into the prefix even if it can write there, so touch them here as -# sysroot is managed. -SSTATEPOSTINSTFUNCS += "guile_sstate_postinst" -guile_sstate_postinst() { - if [ "${BB_CURRENTTASK}" = "populate_sysroot" -o "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ] - then - find ${STAGING_DIR_TARGET}/${libdir}/guile/2.0/ccache -type f | xargs touch - fi -} - -# http://errors.yoctoproject.org/Errors/Details/20491/ -ARM_INSTRUCTION_SET_armv4 = "arm" -ARM_INSTRUCTION_SET_armv5 = "arm" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/guile/guile_2.0.14.bb b/import-layers/yocto-poky/meta/recipes-devtools/guile/guile_2.0.14.bb new file mode 100644 index 000000000..7a01d0ffc --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/guile/guile_2.0.14.bb @@ -0,0 +1,125 @@ +SUMMARY = "Guile is the GNU Ubiquitous Intelligent Language for Extensions" +DESCRIPTION = "Guile is the GNU Ubiquitous Intelligent Language for Extensions,\ + the official extension language for the GNU operating system.\ + Guile is a library designed to help programmers create flexible applications.\ + Using Guile in an application allows the application's functionality to be\ + extended by users or other programmers with plug-ins, modules, or scripts.\ + Guile provides what might be described as 'practical software freedom,'\ + making it possible for users to customize an application to meet their\ + needs without digging into the application's internals." + +HOMEPAGE = "http://www.gnu.org/software/guile/" +SECTION = "devel" +LICENSE = "GPLv3" +LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504" + +SRC_URI = "${GNU_MIRROR}/guile/guile-${PV}.tar.xz \ + file://debian/0002-Mark-Unused-modules-are-removed-gc-test-as-unresolve.patch \ + file://debian/0003-Mark-mutex-with-owner-not-retained-threads-test-as-u.patch \ + file://opensuse/guile-64bit.patch \ + file://guile_2.0.6_fix_sed_error.patch \ + file://arm_endianness.patch \ + file://arm_aarch64.patch \ + file://workaround-ice-ssa-corruption.patch \ + file://libguile-Makefile.am-hook.patch \ + " + +SRC_URI[md5sum] = "c64977c775effd19393364b3018fd8cd" +SRC_URI[sha256sum] = "e8442566256e1be14e51fc18839cd799b966bc5b16c6a1d7a7c35155a8619d82" + +inherit autotools gettext pkgconfig texinfo +BBCLASSEXTEND = "native" + +# Fix "Argument list too long" error when len(TMPDIR) = 410 +acpaths = "-I ./m4" + +DEPENDS = "libunistring bdwgc gmp libtool libffi ncurses readline" +# add guile-native only to the target recipe's DEPENDS +DEPENDS_append_class-target = " guile-native libatomic-ops" + +# The comment of the script guile-config said it has been deprecated but we should +# at least add the required dependency to make it work since we still provide the script. +RDEPENDS_${PN} = "pkgconfig" + +RDEPENDS_${PN}_append_libc-glibc_class-target = " glibc-gconv-iso8859-1" + +EXTRA_OECONF += "${@['--without-libltdl-prefix --without-libgmp-prefix --without-libreadline-prefix', ''][bb.data.inherits_class('native',d)]}" + +EXTRA_OECONF_append_class-target = " --with-libunistring-prefix=${STAGING_LIBDIR} \ + --with-libgmp-prefix=${STAGING_LIBDIR} \ + --with-libltdl-prefix=${STAGING_LIBDIR}" +EXTRA_OECONF_append_libc-uclibc = " guile_cv_use_csqrt=no " + +CFLAGS_append_libc-musl = " -DHAVE_GC_SET_FINALIZER_NOTIFIER \ + -DHAVE_GC_GET_HEAP_USAGE_SAFE \ + -DHAVE_GC_GET_FREE_SPACE_DIVISOR \ + -DHAVE_GC_SET_FINALIZE_ON_DEMAND \ + " + +do_configure_prepend() { + mkdir -p po +} + +export GUILE_FOR_BUILD="${BUILD_SYS}-guile" + +do_install_append_class-native() { + install -m 0755 ${D}${bindir}/guile ${D}${bindir}/${HOST_SYS}-guile + + create_wrapper ${D}/${bindir}/guile \ + GUILE_LOAD_PATH=${STAGING_DATADIR_NATIVE}/guile/2.0 \ + GUILE_LOAD_COMPILED_PATH=${STAGING_LIBDIR_NATIVE}/guile/2.0/ccache + create_wrapper ${D}${bindir}/${HOST_SYS}-guile \ + GUILE_LOAD_PATH=${STAGING_DATADIR_NATIVE}/guile/2.0 \ + GUILE_LOAD_COMPILED_PATH=${STAGING_LIBDIR_NATIVE}/guile/2.0/ccache +} + +do_install_append_class-target() { + # cleanup buildpaths in scripts + sed -i -e 's:${STAGING_DIR_NATIVE}::' ${D}${bindir}/guile-config + sed -i -e 's:${STAGING_DIR_HOST}::' ${D}${bindir}/guile-snarf + + sed -i -e 's:${STAGING_DIR_TARGET}::g' ${D}${libdir}/pkgconfig/guile-2.0.pc +} + +do_install_append_libc-musl() { + rm -f ${D}${libdir}/charset.alias +} + +SYSROOT_PREPROCESS_FUNCS = "guile_cross_config" + +guile_cross_config() { + # this is only for target recipe + if [ "${PN}" = "guile" ] + then + # Create guile-config returning target values instead of native values + install -d ${SYSROOT_DESTDIR}${STAGING_BINDIR_CROSS} + printf '#!%s \\\n--no-auto-compile -e main -s\n!#\n(define %%guile-build-info %s(\n' $(which ${BUILD_SYS}-guile) "'" \ + > ${B}/guile-config.cross + sed -n -e 's:^[ \t]*{[ \t]*": (:' \ + -e 's:",[ \t]*": . ":' \ + -e 's:" *}, *\\:"):' \ + -e 's:^.*cachedir.*$::' \ + -e '/^ (/p' \ + < ${B}/libguile/libpath.h >> ${B}/guile-config.cross + echo '))' >> ${B}/guile-config.cross + cat ${B}/meta/guile-config >> ${B}/guile-config.cross + install ${B}/guile-config.cross ${STAGING_BINDIR_CROSS}/guile-config + fi +} + +# Guile needs the compiled files to be newer than the source, and it won't +# auto-compile into the prefix even if it can write there, so touch them here as +# sysroot is managed. +SSTATEPOSTINSTFUNCS += "guile_sstate_postinst" +GUILESSTATEDIR = "${COMPONENTS_DIR}/${TUNE_PKGARCH}/${PN}/${libdir}/guile/2.0/ccache" +GUILESSTATEDIR_class-native = "${COMPONENTS_DIR}/${BUILD_ARCH}/${PN}/${libdir_native}/guile/2.0/ccache" +guile_sstate_postinst() { + if [ "${BB_CURRENTTASK}" = "populate_sysroot" -o "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ] + then + find ${GUILESSTATEDIR} -type f | xargs touch + fi +} + +# http://errors.yoctoproject.org/Errors/Details/20491/ +ARM_INSTRUCTION_SET_armv4 = "arm" +ARM_INSTRUCTION_SET_armv5 = "arm" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/guilt/files/guilt-bash.patch b/import-layers/yocto-poky/meta/recipes-devtools/guilt/files/guilt-bash.patch deleted file mode 100644 index 70a439e81..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/guilt/files/guilt-bash.patch +++ /dev/null @@ -1,288 +0,0 @@ -guilt: explicitly call for bash - -Running complex guilt stacks of patches/commits can cause interesting -failures after ~20m of processing, with errors like "Bad substitution". - -These have been traced back to having /bin/sh --> /bin/dash on Ubuntu -systems. Putting a shell that actually *works* (such as /bin/bash) -in as /bin/sh makes the problem go away. So here we change the guilt -scripts to explicitly call for bash to ensure we don't have a similar -issue after deployment. - -Upstream-Status: Inappropriate [oe-specific] - -Signed-off-by: Paul Gortmaker - ---- - guilt | 2 +- - guilt-add | 2 +- - guilt-applied | 2 +- - guilt-branch | 2 +- - guilt-delete | 2 +- - guilt-diff | 2 +- - guilt-export | 2 +- - guilt-files | 2 +- - guilt-fold | 2 +- - guilt-fork | 2 +- - guilt-graph | 2 +- - guilt-header | 2 +- - guilt-help | 2 +- - guilt-import | 2 +- - guilt-import-commit | 2 +- - guilt-init | 4 ++-- - guilt-new | 2 +- - guilt-next | 2 +- - guilt-patchbomb | 2 +- - guilt-pop | 2 +- - guilt-prev | 2 +- - guilt-push | 2 +- - guilt-rebase | 2 +- - guilt-refresh | 2 +- - guilt-rm | 2 +- - guilt-series | 2 +- - guilt-status | 2 +- - guilt-top | 2 +- - guilt-unapplied | 2 +- - 29 files changed, 30 insertions(+), 30 deletions(-) - ---- a/guilt -+++ b/guilt -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006-2010 - # ---- a/guilt-add -+++ b/guilt-add -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # ---- a/guilt-applied -+++ b/guilt-applied -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # ---- a/guilt-branch -+++ b/guilt-branch -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2007-2008 - # ---- a/guilt-delete -+++ b/guilt-delete -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # ---- a/guilt-diff -+++ b/guilt-diff -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (C) 2007 Josef 'Jeff' Sipek - # ---- a/guilt-export -+++ b/guilt-export -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Pierre Habouzit, 2007 - # ---- a/guilt-files -+++ b/guilt-files -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (C) 2007 Yasushi SHOJI - # ---- a/guilt-fold -+++ b/guilt-fold -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # ---- a/guilt-fork -+++ b/guilt-fork -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2007 - # ---- a/guilt-graph -+++ b/guilt-graph -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2007 - # ---- a/guilt-header -+++ b/guilt-header -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006-2010 - # ---- a/guilt-help -+++ b/guilt-help -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2007 - # ---- a/guilt-import -+++ b/guilt-import -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2007 - # ---- a/guilt-import-commit -+++ b/guilt-import-commit -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2007 - # ---- a/guilt-init -+++ b/guilt-init -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # -@@ -31,7 +31,7 @@ touch "$GUILT_DIR/$branch/status" - - mkdir -p "$GIT_DIR/hooks/guilt" - cat > "$GIT_DIR/hooks/guilt/delete" < - - echo "Removing patch '\$1'..." ---- a/guilt-new -+++ b/guilt-new -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # ---- a/guilt-next -+++ b/guilt-next -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # ---- a/guilt-patchbomb -+++ b/guilt-patchbomb -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2007 - # ---- a/guilt-pop -+++ b/guilt-pop -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # ---- a/guilt-prev -+++ b/guilt-prev -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # ---- a/guilt-push -+++ b/guilt-push -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # ---- a/guilt-rebase -+++ b/guilt-rebase -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2007 - # ---- a/guilt-refresh -+++ b/guilt-refresh -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # ---- a/guilt-rm -+++ b/guilt-rm -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # ---- a/guilt-series -+++ b/guilt-series -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # ---- a/guilt-status -+++ b/guilt-status -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # ---- a/guilt-top -+++ b/guilt-top -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # ---- a/guilt-unapplied -+++ b/guilt-unapplied -@@ -1,4 +1,4 @@ --#!/bin/sh -+#!/bin/bash - # - # Copyright (c) Josef "Jeff" Sipek, 2006, 2007 - # diff --git a/import-layers/yocto-poky/meta/recipes-devtools/guilt/guilt-native_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/guilt/guilt-native_git.bb deleted file mode 100644 index b63c68c4d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/guilt/guilt-native_git.bb +++ /dev/null @@ -1,23 +0,0 @@ -SUMMARY = "quilt-like tool for Git" -LICENSE = "GPLv2" - -LIC_FILES_CHKSUM = "file://COPYING;md5=b6f3400dc1a01cebafe8a52b3f344135" - -inherit native - -SRC_URI = "git://repo.or.cz/guilt.git \ - file://guilt-bash.patch \ - " -PV = "0.35+git${SRCPV}" -SRCREV = "c2a5bae511c6d5354aa4e1cb59069c31df2b8eeb" - -S = "${WORKDIR}/git" - -# we don't compile, we just install -do_compile() { - : -} - -do_install() { - oe_runmake PREFIX=${D}/${prefix} install -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/intltool/intltool_0.51.0.bb b/import-layers/yocto-poky/meta/recipes-devtools/intltool/intltool_0.51.0.bb index a7ea3b727..551bdf061 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/intltool/intltool_0.51.0.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/intltool/intltool_0.51.0.bb @@ -22,6 +22,7 @@ inherit autotools pkgconfig perlnative export PERL = "${bindir}/env perl" PERL_class-native = "/usr/bin/env nativeperl" +PERL_class-nativesdk = "/usr/bin/env perl" # gettext is assumed to exist on the host RDEPENDS_${PN}_class-native = "libxml-parser-perl-native" @@ -33,4 +34,4 @@ FILES_${PN} += "${datadir}/aclocal" INSANE_SKIP_${PN} += "dev-deps" -BBCLASSEXTEND = "native" +BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/json-c/json-c/0001-Add-FALLTHRU-comment-to-handle-GCC7-warnings.patch b/import-layers/yocto-poky/meta/recipes-devtools/json-c/json-c/0001-Add-FALLTHRU-comment-to-handle-GCC7-warnings.patch new file mode 100644 index 000000000..df3b6002a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/json-c/json-c/0001-Add-FALLTHRU-comment-to-handle-GCC7-warnings.patch @@ -0,0 +1,77 @@ +From 9522ac8e5d5b20a472f3ffc356d388d36f7f582c Mon Sep 17 00:00:00 2001 +From: marxin +Date: Tue, 21 Mar 2017 08:42:11 +0100 +Subject: [PATCH] Add FALLTHRU comment to handle GCC7 warnings. +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit + +--- +Upstream-Status: Backport [https://github.com/json-c/json-c/commit/014924ba899f659917bb64392bbff7d3c803afc2] +Signed-off-by: André Draszik + + json_object.c | 1 + + json_tokener.c | 1 + + linkhash.c | 22 +++++++++++----------- + 3 files changed, 13 insertions(+), 11 deletions(-) + +diff --git a/json_object.c b/json_object.c +index 6cc73bc..77e8b21 100644 +--- a/json_object.c ++++ b/json_object.c +@@ -552,6 +552,7 @@ int64_t json_object_get_int64(struct json_object *jso) + return jso->o.c_boolean; + case json_type_string: + if (json_parse_int64(jso->o.c_string.str, &cint) == 0) return cint; ++ /* FALLTHRU */ + default: + return 0; + } +diff --git a/json_tokener.c b/json_tokener.c +index 7fa32ae..b32d657 100644 +--- a/json_tokener.c ++++ b/json_tokener.c +@@ -306,6 +306,7 @@ struct json_object* json_tokener_parse_ex(struct json_tokener *tok, + tok->err = json_tokener_error_parse_unexpected; + goto out; + } ++ /* FALLTHRU */ + case '"': + state = json_tokener_state_string; + printbuf_reset(tok->pb); +diff --git a/linkhash.c b/linkhash.c +index 712c387..74e3b0f 100644 +--- a/linkhash.c ++++ b/linkhash.c +@@ -376,17 +376,17 @@ static uint32_t hashlittle( const void *key, size_t length, uint32_t initval) + /*-------------------------------- last block: affect all 32 bits of (c) */ + switch(length) /* all the case statements fall through */ + { +- case 12: c+=((uint32_t)k[11])<<24; +- case 11: c+=((uint32_t)k[10])<<16; +- case 10: c+=((uint32_t)k[9])<<8; +- case 9 : c+=k[8]; +- case 8 : b+=((uint32_t)k[7])<<24; +- case 7 : b+=((uint32_t)k[6])<<16; +- case 6 : b+=((uint32_t)k[5])<<8; +- case 5 : b+=k[4]; +- case 4 : a+=((uint32_t)k[3])<<24; +- case 3 : a+=((uint32_t)k[2])<<16; +- case 2 : a+=((uint32_t)k[1])<<8; ++ case 12: c+=((uint32_t)k[11])<<24; /* FALLTHRU */ ++ case 11: c+=((uint32_t)k[10])<<16; /* FALLTHRU */ ++ case 10: c+=((uint32_t)k[9])<<8; /* FALLTHRU */ ++ case 9 : c+=k[8]; /* FALLTHRU */ ++ case 8 : b+=((uint32_t)k[7])<<24; /* FALLTHRU */ ++ case 7 : b+=((uint32_t)k[6])<<16; /* FALLTHRU */ ++ case 6 : b+=((uint32_t)k[5])<<8; /* FALLTHRU */ ++ case 5 : b+=k[4]; /* FALLTHRU */ ++ case 4 : a+=((uint32_t)k[3])<<24; /* FALLTHRU */ ++ case 3 : a+=((uint32_t)k[2])<<16; /* FALLTHRU */ ++ case 2 : a+=((uint32_t)k[1])<<8; /* FALLTHRU */ + case 1 : a+=k[0]; + break; + case 0 : return c; +-- +2.14.1 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/json-c/json-c_0.12.bb b/import-layers/yocto-poky/meta/recipes-devtools/json-c/json-c_0.12.bb index a15455c9b..072c092c0 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/json-c/json-c_0.12.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/json-c/json-c_0.12.bb @@ -7,6 +7,7 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=de54b60fbbc35123ba193fea8ee216f2" SRC_URI = "https://s3.amazonaws.com/json-c_releases/releases/${BP}.tar.gz \ file://0001-json_tokener-requires-INF-and-NAN.patch \ file://0001-Link-against-libm-when-needed.patch \ + file://0001-Add-FALLTHRU-comment-to-handle-GCC7-warnings.patch \ " SRC_URI[md5sum] = "3ca4bbb881dfc4017e8021b5e0a8c491" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/kconfig-frontends/kconfig-frontends_3.12.0.0.bb b/import-layers/yocto-poky/meta/recipes-devtools/kconfig-frontends/kconfig-frontends_3.12.0.0.bb index 9b65a991d..4ca0e4da3 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/kconfig-frontends/kconfig-frontends_3.12.0.0.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/kconfig-frontends/kconfig-frontends_3.12.0.0.bb @@ -13,14 +13,15 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=9b8cf60ff39767ff04b671fca8302408" SECTION = "devel" DEPENDS += "ncurses flex bison gperf-native" RDEPENDS_${PN} += "python bash" -SRC_URI = "http://ymorin.is-a-geek.org/download/${BPN}/${BP}.tar.xz" +SRC_URI = "git://ymorin.is-a-geek.org/kconfig-frontends" -SRC_URI[md5sum] = "b939280dcc83f8feabd87a1d5f9b00c2" -SRC_URI[sha256sum] = "ea2615a62c74bea6ce3b38402f00c7513858f307f6ba7aa9fdbf0bbc12bcf407" +SRCREV = "75d35b172fc0f7b6620dd659af41f2ce04edc4e6" + +S = "${WORKDIR}/git" inherit autotools pkgconfig do_configure_prepend () { - mkdir -p scripts/.autostuff/m4 + mkdir -p ${S}/scripts/.autostuff/m4 } do_install_append() { @@ -32,7 +33,7 @@ EXTRA_OECONF += "--disable-gconf --disable-qconf" # Some packages have the version preceeding the .so instead properly # versioned .so., so we need to reorder and repackage. -SOLIBS = "-${@d.getVar('PV', True)[:-2]}.so" +SOLIBS = "-${@d.getVar('PV')[:-2]}.so" FILES_SOLIBSDEV = "${libdir}/libkconfig-parser.so" BBCLASSEXTEND = "native" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps/0001-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch b/import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps/0001-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch new file mode 100644 index 000000000..26e8b06f3 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps/0001-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch @@ -0,0 +1,41 @@ +From ff4aae4c8beaf17cb8e7a3431f6c541eccfae244 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 30 Dec 2016 18:22:09 +0200 +Subject: [PATCH 1/2] Do not set PYTHON_INSTALL_DIR by running python. + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + libcomps/src/python/src/python2/CMakeLists.txt | 2 +- + libcomps/src/python/src/python3/CMakeLists.txt | 2 +- + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/libcomps/src/python/src/python2/CMakeLists.txt b/libcomps/src/python/src/python2/CMakeLists.txt +index 3ad9e18..0e7dd4b 100644 +--- a/libcomps/src/python/src/python2/CMakeLists.txt ++++ b/libcomps/src/python/src/python2/CMakeLists.txt +@@ -1,7 +1,7 @@ + find_package (PythonLibs 2.6) + find_package (PythonInterp 2.6 REQUIRED) + +-execute_process (COMMAND ${PYTHON_EXECUTABLE} -c "from sys import stdout; from distutils import sysconfig; stdout.write(sysconfig.get_python_lib(True))" OUTPUT_VARIABLE PYTHON_INSTALL_DIR) ++#execute_process (COMMAND ${PYTHON_EXECUTABLE} -c "from sys import stdout; from distutils import sysconfig; stdout.write(sysconfig.get_python_lib(True))" OUTPUT_VARIABLE PYTHON_INSTALL_DIR) + + include_directories(${PYTHON_INCLUDE_PATH}) + include_directories(${LIBCOMPS_INCLUDE_PATH}) +diff --git a/libcomps/src/python/src/python3/CMakeLists.txt b/libcomps/src/python/src/python3/CMakeLists.txt +index 7fafa9f..ed82d3d 100644 +--- a/libcomps/src/python/src/python3/CMakeLists.txt ++++ b/libcomps/src/python/src/python3/CMakeLists.txt +@@ -2,7 +2,7 @@ find_package (PythonLibs 3.0) + find_package (PythonInterp 3.0) + #add_custom_target(py3-copy) + +-execute_process (COMMAND ${PYTHON_EXECUTABLE} -c "from sys import stdout; from distutils import sysconfig; stdout.write(sysconfig.get_python_lib(True))" OUTPUT_VARIABLE PYTHON_INSTALL_DIR) ++#execute_process (COMMAND ${PYTHON_EXECUTABLE} -c "from sys import stdout; from distutils import sysconfig; stdout.write(sysconfig.get_python_lib(True))" OUTPUT_VARIABLE PYTHON_INSTALL_DIR) + + include_directories(${PYTHON_INCLUDE_PATH}) + include_directories(${LIBCOMPS_INCLUDE_PATH}) +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps/0001-Make-__comps_objmrtree_all-static-inline.patch b/import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps/0001-Make-__comps_objmrtree_all-static-inline.patch new file mode 100644 index 000000000..88469fb33 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps/0001-Make-__comps_objmrtree_all-static-inline.patch @@ -0,0 +1,35 @@ +From 91a324f8771818b81017fdf4daaad0c8c4b6987c Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Mon, 20 Mar 2017 11:38:54 -0700 +Subject: [PATCH] Make __comps_objmrtree_all() static inline + +This helps compilers to scope the symbol correctly +and apply the inlining optimizations, clang e.g. +emits the functions and calls in code which is +suboptimal, therefore give a little help to compiler +this function is not used anywhere else to have +a global scope. + +Upstream-Status: Pending + +Signed-off-by: Khem Raj +--- + libcomps/src/comps_objmradix.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/libcomps/src/comps_objmradix.c b/libcomps/src/comps_objmradix.c +index 9be6648..55f7793 100644 +--- a/libcomps/src/comps_objmradix.c ++++ b/libcomps/src/comps_objmradix.c +@@ -604,7 +604,7 @@ inline void comps_objmrtree_pair_destroy_v(void * pair) { + free(pair); + } + +-inline COMPS_HSList* __comps_objmrtree_all(COMPS_ObjMRTree * rt, char keyvalpair) { ++static inline COMPS_HSList* __comps_objmrtree_all(COMPS_ObjMRTree * rt, char keyvalpair) { + COMPS_HSList *to_process, *ret; + COMPS_HSListItem *hsit, *oldit; + size_t x; +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps/0002-Set-library-installation-path-correctly.patch b/import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps/0002-Set-library-installation-path-correctly.patch new file mode 100644 index 000000000..ec1fdc409 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps/0002-Set-library-installation-path-correctly.patch @@ -0,0 +1,27 @@ +From b1f61296e2f16c2b9a39c5501e4538628ff01ab4 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 30 Dec 2016 18:26:00 +0200 +Subject: [PATCH 2/2] Set library installation path correctly + +Upstream-Status: Pending +Signed-off-by: Alexander Kanavin +--- + libcomps/src/CMakeLists.txt | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/libcomps/src/CMakeLists.txt b/libcomps/src/CMakeLists.txt +index e553d77..e2eef9c 100644 +--- a/libcomps/src/CMakeLists.txt ++++ b/libcomps/src/CMakeLists.txt +@@ -52,7 +52,7 @@ add_dependencies(libcomps src-copy) + IF (CMAKE_SIZEOF_VOID_P MATCHES "8") + SET (LIB_SUFFIX "64") + ENDIF (CMAKE_SIZEOF_VOID_P MATCHES "8") +-set (LIB_INST_DIR ${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}) ++set (LIB_INST_DIR ${CMAKE_INSTALL_LIBDIR}) + + + install (FILES ${libcomps_HEADERS} DESTINATION include/libcomps) +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps_git.bb new file mode 100644 index 000000000..e69bf6772 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/libcomps/libcomps_git.bb @@ -0,0 +1,24 @@ +SUMMARY = "Libcomps is alternative for yum.comps library (which is for managing rpm package groups)." +LICENSE = "GPLv2" +LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263" + +SRC_URI = "git://github.com/rpm-software-management/libcomps.git \ + file://0001-Do-not-set-PYTHON_INSTALL_DIR-by-running-python.patch \ + file://0002-Set-library-installation-path-correctly.patch \ + file://0001-Make-__comps_objmrtree_all-static-inline.patch \ + " + +PV = "0.1.8+git${SRCPV}" +SRCREV = "01a4759894cccff64d2561614a58281adf5ce859" + +S = "${WORKDIR}/git" + +inherit cmake distutils3-base + +DEPENDS += "libxml2 expat libcheck" + +EXTRA_OECMAKE = " -DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} -DPYTHON_DESIRED=3" +OECMAKE_SOURCEPATH = "${S}/libcomps" + +BBCLASSEXTEND = "native nativesdk" + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0001-FindGtkDoc.cmake-drop-the-requirement-for-GTKDOC_SCA.patch b/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0001-FindGtkDoc.cmake-drop-the-requirement-for-GTKDOC_SCA.patch new file mode 100644 index 000000000..73acda6af --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0001-FindGtkDoc.cmake-drop-the-requirement-for-GTKDOC_SCA.patch @@ -0,0 +1,31 @@ +From 9bb7630915c3e787732463a3e2064fe0e177101b Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Thu, 24 Nov 2016 14:33:07 +0200 +Subject: [PATCH 1/4] FindGtkDoc.cmake: drop the requirement for + GTKDOC_SCANGOBJ_WRAPPER + +For some reason cmake is not able to find it when building in openembedded, +and it's bundled with the source code anyway. + +Upstream-Status: Pending +Signed-off-by: Alexander Kanavin +--- + cmake/modules/FindGtkDoc.cmake | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/cmake/modules/FindGtkDoc.cmake b/cmake/modules/FindGtkDoc.cmake +index 92b2cc7..39f34bd 100644 +--- a/cmake/modules/FindGtkDoc.cmake ++++ b/cmake/modules/FindGtkDoc.cmake +@@ -52,7 +52,7 @@ find_program(GTKDOC_MKHTML_EXE gtkdoc-mkhtml PATH "${GLIB_PREFIX}/bin") + find_program(GTKDOC_FIXXREF_EXE gtkdoc-fixxref PATH "${GLIB_PREFIX}/bin") + + find_package_handle_standard_args(GtkDoc +- REQUIRED_VARS GTKDOC_SCAN_EXE GTKDOC_SCANGOBJ_EXE GTKDOC_SCANGOBJ_WRAPPER GTKDOC_MKDB_EXE GTKDOC_MKHTML_EXE GTKDOC_FIXXREF_EXE ++ REQUIRED_VARS GTKDOC_SCAN_EXE GTKDOC_SCANGOBJ_EXE GTKDOC_MKDB_EXE GTKDOC_MKHTML_EXE GTKDOC_FIXXREF_EXE + VERSION_VAR GtkDoc_VERSION) + + # :: +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0001-Get-parameters-for-both-libsolv-and-libsolvext-libdn.patch b/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0001-Get-parameters-for-both-libsolv-and-libsolvext-libdn.patch new file mode 100644 index 000000000..954add6e7 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0001-Get-parameters-for-both-libsolv-and-libsolvext-libdn.patch @@ -0,0 +1,28 @@ +From 5958b151a4dbb89114e90c971a34b74f873b7beb Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Tue, 7 Feb 2017 12:16:03 +0200 +Subject: [PATCH] Get parameters for both libsolv and libsolvext (libdnf is + using both) + +Upstream-Status: Pending [depends on whether https://github.com/openSUSE/libsolv/pull/177 is accepted] +Signed-off-by: Alexander Kanavin +--- + CMakeLists.txt | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index b531da1..e512da0 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -28,7 +28,7 @@ find_package (PkgConfig REQUIRED) + SET (CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake/modules) + PKG_CHECK_MODULES(GLIB gio-unix-2.0>=2.44.0 REQUIRED) + FIND_LIBRARY (RPMDB_LIBRARY NAMES rpmdb) +-PKG_CHECK_MODULES (LIBSOLV REQUIRED libsolv) ++PKG_CHECK_MODULES (LIBSOLV REQUIRED libsolv libsolvext) + set(LIBSOLV_LIBRARY ${LIBSOLV_LIBRARIES}) + pkg_check_modules (CHECK REQUIRED check) + pkg_check_modules (REPO REQUIRED librepo) +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0002-Prefix-sysroot-path-to-introspection-tools-path.patch b/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0002-Prefix-sysroot-path-to-introspection-tools-path.patch new file mode 100644 index 000000000..3d772a5f8 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0002-Prefix-sysroot-path-to-introspection-tools-path.patch @@ -0,0 +1,36 @@ +From c8211ad99ccaa4af4a75e0ba639527267fcfd69e Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 30 Dec 2016 18:17:19 +0200 +Subject: [PATCH 2/4] Prefix sysroot path to introspection tools path. + +Upstream-Status: Pending +Signed-off-by: Alexander Kanavin +--- + libdnf/CMakeLists.txt | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/libdnf/CMakeLists.txt b/libdnf/CMakeLists.txt +index 63f07bf..837792b 100644 +--- a/libdnf/CMakeLists.txt ++++ b/libdnf/CMakeLists.txt +@@ -133,7 +133,7 @@ if (GOBJECT_INTROSPECTION_FOUND) + set(GIR_TYPELIB "${GIR_PREFIX}.typelib") + + add_custom_command(OUTPUT ${GIR_XML} +- COMMAND env CFLAGS=${CMAKE_C_FLAGS} ${GOBJECT_INTROSPECTION_1.0_G_IR_SCANNER} ++ COMMAND env CFLAGS=${CMAKE_C_FLAGS} $ENV{PKG_CONFIG_SYSROOT_DIR}${GOBJECT_INTROSPECTION_1.0_G_IR_SCANNER} + --namespace=Dnf + --nsversion=${DNF_SO_VERSION}.0 + --library-path=${CMAKE_CURRENT_BINARY_DIR} +@@ -153,7 +153,7 @@ if (GOBJECT_INTROSPECTION_FOUND) + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}) + + add_custom_command(OUTPUT ${GIR_TYPELIB} +- COMMAND ${GOBJECT_INTROSPECTION_1.0_G_IR_COMPILER} ++ COMMAND $ENV{PKG_CONFIG_SYSROOT_DIR}${GOBJECT_INTROSPECTION_1.0_G_IR_COMPILER} + -o ${GIR_TYPELIB} + ${GIR_XML} + DEPENDS ${GIR_XML} +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0003-Set-the-library-installation-directory-correctly.patch b/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0003-Set-the-library-installation-directory-correctly.patch new file mode 100644 index 000000000..d7e59d833 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0003-Set-the-library-installation-directory-correctly.patch @@ -0,0 +1,29 @@ +From 8d29879fe3606c78769c1bcdddf0bcfc7191c710 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 30 Dec 2016 18:20:01 +0200 +Subject: [PATCH 3/4] Set the library installation directory correctly. + +Upstream-Status: Pending +Signed-off-by: Alexander Kanavin +--- + CMakeLists.txt | 4 +++- + 1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index d35f0d7..8edb627 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -62,7 +62,9 @@ ADD_DEFINITIONS(-DPACKAGE_VERSION=\\"${LIBDNF_VERSION}\\") + IF (CMAKE_SIZEOF_VOID_P MATCHES "8") + SET (LIB_SUFFIX "64") + ENDIF (CMAKE_SIZEOF_VOID_P MATCHES "8") +-SET (LIB_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}") ++#SET (LIB_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}") ++SET (LIB_INSTALL_DIR "${CMAKE_INSTALL_LIBDIR}") ++ + + ADD_SUBDIRECTORY (libdnf) + ENABLE_TESTING() +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0004-Set-libsolv-variables-with-pkg-config-cmake-s-own-mo.patch b/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0004-Set-libsolv-variables-with-pkg-config-cmake-s-own-mo.patch new file mode 100644 index 000000000..931959b5f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf/0004-Set-libsolv-variables-with-pkg-config-cmake-s-own-mo.patch @@ -0,0 +1,29 @@ +From 6d2718b925453f9a6915001f489606eb8e4086c8 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 30 Dec 2016 18:24:50 +0200 +Subject: [PATCH 4/4] Set libsolv variables with pkg-config (cmake's own module + doesn't work properly). + +Upstream-Status: Pending +Signed-off-by: Alexander Kanavin +--- + CMakeLists.txt | 3 ++- + 1 file changed, 2 insertions(+), 1 deletion(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index 8edb627..b531da1 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -28,7 +28,8 @@ find_package (PkgConfig REQUIRED) + SET (CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake/modules) + PKG_CHECK_MODULES(GLIB gio-unix-2.0>=2.44.0 REQUIRED) + FIND_LIBRARY (RPMDB_LIBRARY NAMES rpmdb) +-find_package (LibSolv 0.6.21 REQUIRED COMPONENTS ext) ++PKG_CHECK_MODULES (LIBSOLV REQUIRED libsolv) ++set(LIBSOLV_LIBRARY ${LIBSOLV_LIBRARIES}) + pkg_check_modules (CHECK REQUIRED check) + pkg_check_modules (REPO REQUIRED librepo) + FIND_PROGRAM (VALGRIND_PROGRAM NAMES valgrind PATH /usr/bin /usr/local/bin) +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf_git.bb new file mode 100644 index 000000000..ef28611f8 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/libdnf/libdnf_git.bb @@ -0,0 +1,29 @@ +SUMMARY = "Library providing simplified C and Python API to libsolv" +LICENSE = "LGPLv2.1" +LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c" + +SRC_URI = "git://github.com/rpm-software-management/libdnf \ + file://0001-FindGtkDoc.cmake-drop-the-requirement-for-GTKDOC_SCA.patch \ + file://0002-Prefix-sysroot-path-to-introspection-tools-path.patch \ + file://0003-Set-the-library-installation-directory-correctly.patch \ + file://0004-Set-libsolv-variables-with-pkg-config-cmake-s-own-mo.patch \ + file://0001-Get-parameters-for-both-libsolv-and-libsolvext-libdn.patch \ + " + +PV = "0.2.3+git${SRCPV}" +SRCREV = "367545629cc01a8e622890d89bd13d062ce60d7b" + +S = "${WORKDIR}/git" + +DEPENDS = "glib-2.0 libsolv libcheck librepo rpm gtk-doc" + +inherit gtk-doc gobject-introspection cmake pkgconfig distutils3-base + +EXTRA_OECMAKE = " -DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} -DWITH_MAN=OFF -DPYTHON_DESIRED=3 \ + ${@bb.utils.contains('GI_DATA_ENABLED', 'True', '-DWITH_GIR=ON', '-DWITH_GIR=OFF', d)} \ + " +EXTRA_OECMAKE_append_class-native = " -DWITH_GIR=OFF" +EXTRA_OECMAKE_append_class-nativesdk = " -DWITH_GIR=OFF" + +BBCLASSEXTEND = "native nativesdk" + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0001-Correctly-set-the-library-installation-directory.patch b/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0001-Correctly-set-the-library-installation-directory.patch new file mode 100644 index 000000000..01fea4046 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0001-Correctly-set-the-library-installation-directory.patch @@ -0,0 +1,28 @@ +From 36d87919223db9b054862ad38cdda8d9222a2bab Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 30 Dec 2016 18:04:35 +0200 +Subject: [PATCH 1/4] Correctly set the library installation directory + +Upstream-Status: Pending +Signed-off-by: Alexander Kanavin +--- + librepo/CMakeLists.txt | 3 ++- + 1 file changed, 2 insertions(+), 1 deletion(-) + +diff --git a/librepo/CMakeLists.txt b/librepo/CMakeLists.txt +index 2fe76d8..5026def 100644 +--- a/librepo/CMakeLists.txt ++++ b/librepo/CMakeLists.txt +@@ -60,7 +60,8 @@ CONFIGURE_FILE("version.h.in" "${CMAKE_CURRENT_SOURCE_DIR}/version.h" @ONLY) + IF (CMAKE_SIZEOF_VOID_P MATCHES "8") + SET (LIB_SUFFIX "64") + ENDIF (CMAKE_SIZEOF_VOID_P MATCHES "8") +-SET (LIB_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}") ++#SET (LIB_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}") ++SET (LIB_INSTALL_DIR "${CMAKE_INSTALL_LIBDIR}") + + INSTALL(FILES ${librepo_HEADERS} DESTINATION include/librepo) + INSTALL(TARGETS librepo LIBRARY DESTINATION ${LIB_INSTALL_DIR}) +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0002-Do-not-try-to-obtain-PYTHON_INSTALL_DIR-by-running-p.patch b/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0002-Do-not-try-to-obtain-PYTHON_INSTALL_DIR-by-running-p.patch new file mode 100644 index 000000000..7138dfce2 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0002-Do-not-try-to-obtain-PYTHON_INSTALL_DIR-by-running-p.patch @@ -0,0 +1,41 @@ +From 1570ad33dd7e5d83f3ee80bd104b114709ac1e34 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 30 Dec 2016 18:05:36 +0200 +Subject: [PATCH 2/4] Do not try to obtain PYTHON_INSTALL_DIR by running + python. + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + librepo/python/python2/CMakeLists.txt | 2 +- + librepo/python/python3/CMakeLists.txt | 2 +- + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/librepo/python/python2/CMakeLists.txt b/librepo/python/python2/CMakeLists.txt +index 3615e17..cffa99f 100644 +--- a/librepo/python/python2/CMakeLists.txt ++++ b/librepo/python/python2/CMakeLists.txt +@@ -1,6 +1,6 @@ + FIND_PACKAGE (PythonLibs 2 ) + FIND_PACKAGE (PythonInterp 2 REQUIRED) +-EXECUTE_PROCESS(COMMAND ${PYTHON_EXECUTABLE} -c "from sys import stdout; from distutils import sysconfig; stdout.write(sysconfig.get_python_lib(True))" OUTPUT_VARIABLE PYTHON_INSTALL_DIR) ++#EXECUTE_PROCESS(COMMAND ${PYTHON_EXECUTABLE} -c "from sys import stdout; from distutils import sysconfig; stdout.write(sysconfig.get_python_lib(True))" OUTPUT_VARIABLE PYTHON_INSTALL_DIR) + INCLUDE_DIRECTORIES (${PYTHON_INCLUDE_PATH}) + + MESSAGE(STATUS "Python install dir is ${PYTHON_INSTALL_DIR}") +diff --git a/librepo/python/python3/CMakeLists.txt b/librepo/python/python3/CMakeLists.txt +index dfecac9..38bcc72 100644 +--- a/librepo/python/python3/CMakeLists.txt ++++ b/librepo/python/python3/CMakeLists.txt +@@ -10,7 +10,7 @@ message("--- ${PYTHON_INCLUDE_DIR}") + + FIND_PACKAGE(PythonLibs 3.0) + FIND_PACKAGE(PythonInterp 3.0 REQUIRED) +-EXECUTE_PROCESS(COMMAND ${PYTHON_EXECUTABLE} -c "from sys import stdout; from distutils import sysconfig; stdout.write(sysconfig.get_python_lib(True))" OUTPUT_VARIABLE PYTHON_INSTALL_DIR) ++#EXECUTE_PROCESS(COMMAND ${PYTHON_EXECUTABLE} -c "from sys import stdout; from distutils import sysconfig; stdout.write(sysconfig.get_python_lib(True))" OUTPUT_VARIABLE PYTHON_INSTALL_DIR) + INCLUDE_DIRECTORIES (${PYTHON_INCLUDE_PATH}) + + MESSAGE(STATUS "Python3 install dir is ${PYTHON_INSTALL_DIR}") +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0003-tests-fix-a-race-when-deleting-temporary-directories.patch b/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0003-tests-fix-a-race-when-deleting-temporary-directories.patch new file mode 100644 index 000000000..0d2fae434 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0003-tests-fix-a-race-when-deleting-temporary-directories.patch @@ -0,0 +1,41 @@ +From b1a5c92dbd1d11f1afdc094fccea64de334d2783 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 30 Dec 2016 18:06:24 +0200 +Subject: [PATCH 3/4] tests: fix a race when deleting temporary directories + +Upstream-Status: Pending +Signed-off-by: Alexander Kanavin +--- + tests/python/tests/test_yum_repo_downloading.py | 2 +- + tests/python/tests/test_yum_repo_locating.py | 2 +- + 2 files changed, 2 insertions(+), 2 deletions(-) + +diff --git a/tests/python/tests/test_yum_repo_downloading.py b/tests/python/tests/test_yum_repo_downloading.py +index ad597dc..4a32519 100644 +--- a/tests/python/tests/test_yum_repo_downloading.py ++++ b/tests/python/tests/test_yum_repo_downloading.py +@@ -32,7 +32,7 @@ class TestCaseYumRepoDownloading(TestCaseWithFlask): + os.environ.pop('GNUPGHOME') + else: + os.environ['GNUPGHOME'] = self._gnupghome +- shutil.rmtree(self.tmpdir) ++ shutil.rmtree(self.tmpdir, True) + + def test_download_repo_01(self): + h = librepo.Handle() +diff --git a/tests/python/tests/test_yum_repo_locating.py b/tests/python/tests/test_yum_repo_locating.py +index 8f4bea5..db4294c 100644 +--- a/tests/python/tests/test_yum_repo_locating.py ++++ b/tests/python/tests/test_yum_repo_locating.py +@@ -34,7 +34,7 @@ class TestCaseYumRepoLocating(TestCase): + os.environ.pop('GNUPGHOME') + else: + os.environ['GNUPGHOME'] = self._gnupghome +- shutil.rmtree(self.tmpdir) ++ shutil.rmtree(self.tmpdir, True) + + def test_read_mirrorlist(self): + h = librepo.Handle() +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0004-Set-gpgme-variables-with-pkg-config-not-with-cmake-m.patch b/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0004-Set-gpgme-variables-with-pkg-config-not-with-cmake-m.patch new file mode 100644 index 000000000..6665b316c --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo/0004-Set-gpgme-variables-with-pkg-config-not-with-cmake-m.patch @@ -0,0 +1,29 @@ +From a36be8192615e2a1fb5a5856d44565277f15583b Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 30 Dec 2016 18:23:27 +0200 +Subject: [PATCH 4/4] Set gpgme variables with pkg-config, not with cmake + module (which doesn't work properly) + +Upstream-Status: Pending +Signed-off-by: Alexander Kanavin +--- + CMakeLists.txt | 3 ++- + 1 file changed, 2 insertions(+), 1 deletion(-) + +diff --git a/CMakeLists.txt b/CMakeLists.txt +index ef07d2d..f1fa09b 100644 +--- a/CMakeLists.txt ++++ b/CMakeLists.txt +@@ -31,7 +31,8 @@ PKG_CHECK_MODULES(GLIB2 glib-2.0 REQUIRED) + PKG_SEARCH_MODULE(LIBCRYPTO REQUIRED libcrypto openssl) + FIND_PACKAGE(EXPAT REQUIRED) + FIND_PACKAGE(CURL REQUIRED) +-FIND_PACKAGE(Gpgme REQUIRED) ++PKG_CHECK_MODULES(GPGME gpgme REQUIRED) ++set(GPGME_VANILLA_LIBRARIES ${GPGME_LIBRARIES}) + FIND_PACKAGE(Xattr REQUIRED) + + INCLUDE_DIRECTORIES(${GLIB2_INCLUDE_DIRS}) +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo_git.bb new file mode 100644 index 000000000..2f194f143 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/librepo/librepo_git.bb @@ -0,0 +1,24 @@ +SUMMARY = " A library providing C and Python (libcURL like) API for downloading linux repository metadata and packages." +LICENSE = "LGPLv2.1" +LIC_FILES_CHKSUM = "file://COPYING;md5=4fbd65380cdd255951079008b364516c" + +SRC_URI = "git://github.com/rpm-software-management/librepo.git \ + file://0001-Correctly-set-the-library-installation-directory.patch \ + file://0002-Do-not-try-to-obtain-PYTHON_INSTALL_DIR-by-running-p.patch \ + file://0003-tests-fix-a-race-when-deleting-temporary-directories.patch \ + file://0004-Set-gpgme-variables-with-pkg-config-not-with-cmake-m.patch \ + " + +PV = "1.7.20+git${SRCPV}" +SRCREV = "e1137cbbda78fecb192146300790680a5bc811b1" + +S = "${WORKDIR}/git" + +DEPENDS = "curl expat glib-2.0 openssl attr libcheck gpgme" + +inherit cmake distutils3-base pkgconfig + +EXTRA_OECMAKE = " -DPYTHON_INSTALL_DIR=${PYTHON_SITEPACKAGES_DIR} -DPYTHON_DESIRED=3" + +BBCLASSEXTEND = "native nativesdk" + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/libtool/libtool_2.4.6.bb b/import-layers/yocto-poky/meta/recipes-devtools/libtool/libtool_2.4.6.bb index 8858f6eef..06abb0558 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/libtool/libtool_2.4.6.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/libtool/libtool_2.4.6.bb @@ -11,6 +11,8 @@ SYSROOT_DIRS_BLACKLIST += " \ ${datadir}/libtool/build-aux \ " +ACLOCALEXTRAPATH_class-target = "" + do_install_append () { sed -e 's@--sysroot=${STAGING_DIR_HOST}@@g' \ -e 's@${STAGING_DIR_HOST}@@g' \ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools-native_0.9.69.bb b/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools-native_0.9.69.bb deleted file mode 100644 index a5deb2e2d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools-native_0.9.69.bb +++ /dev/null @@ -1,26 +0,0 @@ -SUMMARY = "Convert LinuxDoc SGML source into other formats" -HOMEPAGE = "http://packages.debian.org/linuxdoc-tools" -LICENSE = "GPLv3+" -LIC_FILES_CHKSUM = "file://COPYING;md5=077ef64ec3ac257fb0d786531cf26931" - -DEPENDS = "groff-native openjade-native" - -SRC_URI = "http://snapshot.debian.org/archive/debian/20160728T043443Z/pool/main/l/${BPN}/${BPN}_${PV}.orig.tar.gz \ - file://disable_sgml2rtf.patch \ - file://disable_txt_doc.patch \ - file://disable_tex_doc.patch \ - file://disable_dvips_doc.patch" - -SRC_URI[md5sum] = "1d13d500918a7a145b0edc2f16f61dd1" -SRC_URI[sha256sum] = "7103facee18a2ea97186ca459d743d22f7f89ad4b5cd1dfd1c34f83d6bfd4101" - -UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/l/linuxdoc-tools/" -inherit autotools-brokensep native - -do_configure () { - oe_runconf -} - -do_install() { - oe_runmake 'DESTDIR=${D}' 'TMPDIR=${T}' install -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_dvips_doc.patch b/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_dvips_doc.patch deleted file mode 100644 index 490b36799..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_dvips_doc.patch +++ /dev/null @@ -1,33 +0,0 @@ -Disable building documentation which requires the dvips utility. -This patch should be dropped once we include a native version of -dvips. - -Upstream-Status: Inappropriate [Other] -Temporary workaround which disables documentation. - -Signed-off-by: Scott Garman - -diff -urN linuxdoc-tools-0.9.66.orig/doc/Makedoc.sh linuxdoc-tools-0.9.66/doc/Makedoc.sh ---- linuxdoc-tools-0.9.66.orig/doc/Makedoc.sh 2011-02-25 15:26:41.142917782 -0800 -+++ linuxdoc-tools-0.9.66/doc/Makedoc.sh 2011-02-25 15:27:25.141917472 -0800 -@@ -52,13 +52,13 @@ - # $TMPDIR/sgml2latex --pass="\usepackage{times}" -o dvi ./guide - #fi - --if [ -n "`which dvips`" ]; then -- echo " + dvips" >&2 -- dvips -t letter -o ./guide.ps ./guide.dvi -- if [ -n "`which gzip`" -a -f ./guide.ps ]; then -- gzip -fN ./guide.ps -- fi --fi -+#if [ -n "`which dvips`" ]; then -+# echo " + dvips" >&2 -+# dvips -t letter -o ./guide.ps ./guide.dvi -+# if [ -n "`which gzip`" -a -f ./guide.ps ]; then -+# gzip -fN ./guide.ps -+# fi -+#fi - - - echo "- Building info docs" >&2 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_sgml2rtf.patch b/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_sgml2rtf.patch deleted file mode 100644 index 50c8a8f3f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_sgml2rtf.patch +++ /dev/null @@ -1,64 +0,0 @@ -From 756f20e70a97ee2dea9b32c0955eabfc27f29be1 Mon Sep 17 00:00:00 2001 -From: Andrei Dinu -Date: Wed, 29 May 2013 16:50:17 +0300 -Subject: [PATCH] The build of sgml2rtf is problematic due to the way it wants - to link to a shared library version of flex. Flex only - ships with a static lib. Rather than diverging from - upstream flex, simply skip building this un-needed utility. - -Upstream-Status: Inappropriate [Other] -Workaround which disables a feature. - -Signed-off-by: Scott Garman -Signed-off-by: Andrei Dinu ---- - Makefile.in | 12 ++++++------ - 1 file changed, 6 insertions(+), 6 deletions(-) - -diff --git a/Makefile.in b/Makefile.in -index 359f14e..fc04020 100644 ---- a/Makefile.in -+++ b/Makefile.in -@@ -40,7 +40,7 @@ perl5lib_ddir = $(DESTDIR)$(perl5libdir) - pkgdata_ddir = $(DESTDIR)$(pkgdatadir) - tex_ddir = $(DESTDIR)$(texdir) - --progs := sgml2html sgml2info sgml2latex sgml2lyx sgml2rtf sgml2txt sgmlcheck -+progs := sgml2html sgml2info sgml2latex sgml2lyx sgml2txt sgmlcheck - - PROFILE = - INCLUDE = -@@ -71,9 +71,9 @@ endif - ( cd sgmlpre ; \ - $(MAKE) CFLAGS="$(CFLAGS)" LDFLAGS="$(LDFLAGS)" LEX=flex sgmlpre || exit -1 ) - -- @echo "Compiling RTF conversion tools (in rtf-fix/)..." -- ( cd rtf-fix ; \ -- $(MAKE) CFLAGS="$(CFLAGS)" LDFLAGS="$(LDFLAGS)" || exit -1 ) -+# @echo "Compiling RTF conversion tools (in rtf-fix/)..." -+# ( cd rtf-fix ; \ -+# $(MAKE) CFLAGS="$(CFLAGS)" LDFLAGS="$(LDFLAGS)" || exit -1 ) - - @echo "making man pages in genman ..." - if [ ! -d genman ]; then mkdir genman ; fi -@@ -117,7 +117,7 @@ endif - - # -- Install auxiliary programs - mkdir -p $(auxbin_ddir) -- for ii in sgmlpre/sgmlpre rtf-fix/rtf2rtf; do \ -+ for ii in sgmlpre/sgmlpre; do \ - bn=`basename $$ii`; \ - $(INSTALL_PROGRAM) $$ii $(auxbin_ddir)/$$bn; \ - done -@@ -206,7 +206,7 @@ bin/linuxdoc:: Makefile bin/linuxdoc.in - - clean:: - -rm -f *~ bin/*~ bin/linuxdoc -- for d in $(DDIRS) $(MDIRS) rtf-fix; do $(MAKE) -C $$d clean; done -+ for d in $(DDIRS) $(MDIRS); do $(MAKE) -C $$d clean; done - (cd sgmlpre ; rm -f sgmlpre.o sgmlpre) - -rm -rf genman/ - --- -1.7.9.5 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_tex_doc.patch b/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_tex_doc.patch deleted file mode 100644 index b62895c67..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_tex_doc.patch +++ /dev/null @@ -1,29 +0,0 @@ -Disable building documentation which requires the latex utility. -This patch should be dropped once we include a native version of -latex. - -Upstream-Status: Inappropriate [Other] -Temporary workaround which disables documentation. - -Signed-off-by: Scott Garman - -diff -urN linuxdoc-tools-0.9.66.orig/doc/Makedoc.sh linuxdoc-tools-0.9.66/doc/Makedoc.sh ---- linuxdoc-tools-0.9.66.orig/doc/Makedoc.sh 2009-11-09 11:58:25.000000000 -0800 -+++ linuxdoc-tools-0.9.66/doc/Makedoc.sh 2011-02-25 15:23:58.610016114 -0800 -@@ -46,11 +46,11 @@ - $TMPDIR/sgml2txt -b 1 ./guide - fi - --if [ -n "`which latex`" ]; then -- ln -s $TMPDIR/linuxdoc $TMPDIR/sgml2latex -- echo "- Building latex docs" >&2 -- $TMPDIR/sgml2latex --pass="\usepackage{times}" -o dvi ./guide --fi -+#if [ -n "`which latex`" ]; then -+# ln -s $TMPDIR/linuxdoc $TMPDIR/sgml2latex -+# echo "- Building latex docs" >&2 -+# $TMPDIR/sgml2latex --pass="\usepackage{times}" -o dvi ./guide -+#fi - - if [ -n "`which dvips`" ]; then - echo " + dvips" >&2 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_txt_doc.patch b/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_txt_doc.patch deleted file mode 100644 index 8d784110d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/linuxdoc-tools/linuxdoc-tools/disable_txt_doc.patch +++ /dev/null @@ -1,36 +0,0 @@ -Disable building txt documentation. This is a temporary workaround, -as I have found an Ubuntu 10.10 system which throws errors during -building this that I'd like to ultimately fix. The error manifests -itself from the end of LinuxDocTools.pm with the following messages -during do_install: - -| - Building txt docs -| Processing file ./guide -| troff: fatal error: can't find macro file s -| fmt_txt::postASP: Empty output file, error when calling groff. Aborting... - -Upstream-Status: Inappropriate [Other] -Temporary workaround which disables documentation. - -Signed-off-by: Scott Garman - -diff -urN linuxdoc-tools-0.9.66.orig//doc/Makedoc.sh linuxdoc-tools-0.9.66/doc/Makedoc.sh ---- linuxdoc-tools-0.9.66.orig//doc/Makedoc.sh 2009-11-09 11:58:25.000000000 -0800 -+++ linuxdoc-tools-0.9.66/doc/Makedoc.sh 2011-03-04 17:37:24.788923998 -0800 -@@ -40,11 +40,11 @@ - - chmod u+x $TMPDIR/linuxdoc - --if [ -n "`which groff`" ]; then -- ln -s $TMPDIR/linuxdoc $TMPDIR/sgml2txt -- echo "- Building txt docs" >&2 -- $TMPDIR/sgml2txt -b 1 ./guide --fi -+#if [ -n "`which groff`" ]; then -+# ln -s $TMPDIR/linuxdoc $TMPDIR/sgml2txt -+# echo "- Building txt docs" >&2 -+# $TMPDIR/sgml2txt -b 1 ./guide -+#fi - - if [ -n "`which latex`" ]; then - ln -s $TMPDIR/linuxdoc $TMPDIR/sgml2latex diff --git a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-1.4.17.inc b/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-1.4.17.inc deleted file mode 100644 index 8ea4e0490..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-1.4.17.inc +++ /dev/null @@ -1,18 +0,0 @@ -require m4.inc - -EXTRA_OECONF += "--without-libsigsegv-prefix" - -LICENSE = "GPLv3" - -LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504\ - file://examples/COPYING;md5=4031593b2166d6c47cae282d944a7ede" - -SRC_URI += "file://ac_config_links.patch \ - file://remove-gets.patch \ - " - -SRC_URI_append_class-target = "\ - file://0001-Unset-need_charset_alias-when-building-for-musl.patch \ - " -SRC_URI[md5sum] = "a5e9954b1dae036762f7b13673a2cf76" -SRC_URI[sha256sum] = "3ce725133ee552b8b4baca7837fb772940b25e81b2a9dc92537aeaf733538c9e" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-1.4.18.inc b/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-1.4.18.inc new file mode 100644 index 000000000..d7c864857 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-1.4.18.inc @@ -0,0 +1,22 @@ +require m4.inc + +EXTRA_OECONF += "--without-libsigsegv-prefix" + +LICENSE = "GPLv3" + +LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504\ + file://examples/COPYING;md5=4031593b2166d6c47cae282d944a7ede" + +SRC_URI += "file://ac_config_links.patch \ + file://remove-gets.patch \ + " + +SRC_URI_append_class-target = "\ + file://0001-Unset-need_charset_alias-when-building-for-musl.patch \ + " + +# Fix "Argument list too long" error when len(TMPDIR) = 410 +acpaths = "-I ./m4" + +SRC_URI[md5sum] = "a077779db287adf4e12a035029002d28" +SRC_URI[sha256sum] = "ab2633921a5cd38e48797bf5521ad259bdc4b979078034a3b790d7fec5493fab" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-1.4.9.inc b/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-1.4.9.inc deleted file mode 100644 index aab2c1efa..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-1.4.9.inc +++ /dev/null @@ -1,13 +0,0 @@ -require m4.inc - -LICENSE = "GPLv2" - -LIC_FILES_CHKSUM = "file://COPYING;md5=751419260aa954499f7abaabaa882bbe\ - file://examples/COPYING;md5=1d49bd61dc590f014cae7173b43e3e5c" - -PR = "r2" -SRC_URI += "file://fix_for_circular_dependency.patch" - -SRC_URI[md5sum] = "1ba8e147aff5e79bd2bfb983d86b53d5" -SRC_URI[sha256sum] = "815ce53853fbf6493617f467389b799208b1ec98296b95be44a683f8bcfd7c47" - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-native_1.4.17.bb b/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-native_1.4.17.bb deleted file mode 100644 index 06d8aa244..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-native_1.4.17.bb +++ /dev/null @@ -1,13 +0,0 @@ -require m4-${PV}.inc - -inherit native - -INHIBIT_AUTOTOOLS_DEPS = "1" -DEPENDS += "gnu-config-native" - -do_configure() { - install -m 0644 ${STAGING_DATADIR}/gnu-config/config.sub . - install -m 0644 ${STAGING_DATADIR}/gnu-config/config.guess . - oe_runconf -} - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-native_1.4.18.bb b/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-native_1.4.18.bb new file mode 100644 index 000000000..06d8aa244 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/m4/m4-native_1.4.18.bb @@ -0,0 +1,13 @@ +require m4-${PV}.inc + +inherit native + +INHIBIT_AUTOTOOLS_DEPS = "1" +DEPENDS += "gnu-config-native" + +do_configure() { + install -m 0644 ${STAGING_DATADIR}/gnu-config/config.sub . + install -m 0644 ${STAGING_DATADIR}/gnu-config/config.guess . + oe_runconf +} + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4/fix_for_circular_dependency.patch b/import-layers/yocto-poky/meta/recipes-devtools/m4/m4/fix_for_circular_dependency.patch deleted file mode 100644 index 98774535d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4/fix_for_circular_dependency.patch +++ /dev/null @@ -1,77 +0,0 @@ -Upstream-Status: Inappropriate [licensing] - -The older GPLv2 m4 does not work well with newer autoconf. It causes the -circular dependency as seen bellow. - Removing this m4 file which was needed only forl older autoconf - -| configure.ac:34: error: AC_REQUIRE: circular dependency of AC_GNU_SOURCE -| /build_disk/poky_build/build1/tmp/work/i586-poky-linux/m4-1.4.9-r0/m4-1.4.9/m4/extensions.m4:19: AC_USE_SYSTEM_EXTENSIONS is expanded from... -| ../../lib/autoconf/specific.m4:310: AC_GNU_SOURCE is expanded from... -| /build_disk/poky_build/build1/tmp/work/i586-poky-linux/m4-1.4.9-r0/m4-1.4.9/m4/gnulib-comp.m4:21: M4_EARLY is expanded from... -| configure.ac:34: the top level -| autom4te: /build_disk/poky_build/build1/tmp/sysroots/x86_64-linux/usr/bin/m4 failed with exit status: 1 -| aclocal: /build_disk/poky_build/build1/tmp/sysroots/x86_64-linux/usr/bin/autom4te failed with exit status: 1 -| autoreconf: aclocal failed with exit status: 1 - -Nitin A Kamble -2011/03/16 - -Index: m4-1.4.9/m4/extensions.m4 -=================================================================== ---- m4-1.4.9.orig/m4/extensions.m4 -+++ m4-1.4.9/m4/extensions.m4 -@@ -6,53 +6,10 @@ - # gives unlimited permission to copy and/or distribute it, - # with or without modifications, as long as this notice is preserved. - --# This definition of AC_USE_SYSTEM_EXTENSIONS is stolen from CVS --# Autoconf. Perhaps we can remove this once we can assume Autoconf --# 2.61 or later everywhere, but since CVS Autoconf mutates rapidly --# enough in this area it's likely we'll need to redefine --# AC_USE_SYSTEM_EXTENSIONS for quite some time. -- --# AC_USE_SYSTEM_EXTENSIONS --# ------------------------ --# Enable extensions on systems that normally disable them, --# typically due to standards-conformance issues. --AC_DEFUN([AC_USE_SYSTEM_EXTENSIONS], --[ -- AC_BEFORE([$0], [AC_COMPILE_IFELSE]) -- AC_BEFORE([$0], [AC_RUN_IFELSE]) -- -- AC_REQUIRE([AC_GNU_SOURCE]) -- AC_REQUIRE([AC_AIX]) -- AC_REQUIRE([AC_MINIX]) -- -- AH_VERBATIM([__EXTENSIONS__], --[/* Enable extensions on Solaris. */ --#ifndef __EXTENSIONS__ --# undef __EXTENSIONS__ --#endif --#ifndef _POSIX_PTHREAD_SEMANTICS --# undef _POSIX_PTHREAD_SEMANTICS --#endif --#ifndef _TANDEM_SOURCE --# undef _TANDEM_SOURCE --#endif]) -- AC_CACHE_CHECK([whether it is safe to define __EXTENSIONS__], -- [ac_cv_safe_to_define___extensions__], -- [AC_COMPILE_IFELSE( -- [AC_LANG_PROGRAM([ --# define __EXTENSIONS__ 1 -- AC_INCLUDES_DEFAULT])], -- [ac_cv_safe_to_define___extensions__=yes], -- [ac_cv_safe_to_define___extensions__=no])]) -- test $ac_cv_safe_to_define___extensions__ = yes && -- AC_DEFINE([__EXTENSIONS__]) -- AC_DEFINE([_POSIX_PTHREAD_SEMANTICS]) -- AC_DEFINE([_TANDEM_SOURCE]) --]) - - # gl_USE_SYSTEM_EXTENSIONS - # ------------------------ - # Enable extensions on systems that normally disable them, - # typically due to standards-conformance issues. - AC_DEFUN([gl_USE_SYSTEM_EXTENSIONS], -- [AC_REQUIRE([AC_USE_SYSTEM_EXTENSIONS])]) -+ []) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4/remove-gets.patch b/import-layers/yocto-poky/meta/recipes-devtools/m4/m4/remove-gets.patch index effb353f1..abe82f3b2 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4/remove-gets.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/m4/m4/remove-gets.patch @@ -4,20 +4,21 @@ undefining it. Upstream-Status: Pending Signed-off-by: Khem Raj -Index: m4-1.4.17/lib/stdio.in.h -=================================================================== ---- m4-1.4.17.orig/lib/stdio.in.h 2013-10-16 14:41:01.678496227 +0300 -+++ m4-1.4.17/lib/stdio.in.h 2013-10-16 14:41:48.849427839 +0300 -@@ -717,10 +717,12 @@ +--- +diff --git a/lib/stdio.in.h b/lib/stdio.in.h +index 5727452..f04a691 100644 +--- a/lib/stdio.in.h ++++ b/lib/stdio.in.h +@@ -742,10 +742,12 @@ _GL_WARN_ON_USE (getline, "getline is unportable - " /* It is very rare that the developer ever has full control of stdin, so any use of gets warrants an unconditional warning; besides, C11 removed it. */ +#if defined gets #undef gets - #if HAVE_RAW_DECL_GETS + #if HAVE_RAW_DECL_GETS && !defined __cplusplus _GL_WARN_ON_USE (gets, "gets is a security hole - use fgets instead"); #endif +#endif - #if @GNULIB_OBSTACK_PRINTF@ || @GNULIB_OBSTACK_PRINTF_POSIX@ + struct obstack; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4_1.4.17.bb b/import-layers/yocto-poky/meta/recipes-devtools/m4/m4_1.4.17.bb deleted file mode 100644 index b12c0adf3..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4_1.4.17.bb +++ /dev/null @@ -1,3 +0,0 @@ -require m4-${PV}.inc - -BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4_1.4.18.bb b/import-layers/yocto-poky/meta/recipes-devtools/m4/m4_1.4.18.bb new file mode 100644 index 000000000..b12c0adf3 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/m4/m4_1.4.18.bb @@ -0,0 +1,3 @@ +require m4-${PV}.inc + +BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4_1.4.9.bb b/import-layers/yocto-poky/meta/recipes-devtools/m4/m4_1.4.9.bb deleted file mode 100644 index b12c0adf3..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/m4/m4_1.4.9.bb +++ /dev/null @@ -1,3 +0,0 @@ -require m4-${PV}.inc - -BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/make/make-3.81/make_fix_for_automake-1.12.patch b/import-layers/yocto-poky/meta/recipes-devtools/make/make-3.81/make_fix_for_automake-1.12.patch deleted file mode 100644 index 102fe79ab..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/make/make-3.81/make_fix_for_automake-1.12.patch +++ /dev/null @@ -1,43 +0,0 @@ -Upstream-Status: Pending - -automake 1.12 has depricated automatic de-ANSI-fication support - -this patch avoids these kinds of errors: - -| configure.in:48: error: automatic de-ANSI-fication support has been removed -... -| Makefile.am:19: error: automatic de-ANSI-fication support has been removed -| autoreconf: automake failed with exit status: 1 -| ERROR: autoreconf execution failed. - - -Signed-off-by: Nitin A Kamble -2012/05/04 - -Index: make-3.81/configure.in -=================================================================== ---- make-3.81.orig/configure.in -+++ make-3.81/configure.in -@@ -44,9 +44,6 @@ AC_AIX - AC_ISC_POSIX - AC_MINIX - --# Needed for ansi2knr --AM_C_PROTOTYPES -- - # Enable gettext, in "external" mode. - - AM_GNU_GETTEXT_VERSION(0.14.1) -Index: make-3.81/Makefile.am -=================================================================== ---- make-3.81.orig/Makefile.am -+++ make-3.81/Makefile.am -@@ -16,7 +16,7 @@ - # GNU Make; see the file COPYING. If not, write to the Free Software - # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. - --AUTOMAKE_OPTIONS = 1.8 dist-bzip2 check-news ansi2knr -+AUTOMAKE_OPTIONS = 1.8 dist-bzip2 check-news - ACLOCAL_AMFLAGS = -I config - - MAKE_HOST = @MAKE_HOST@ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/make/make-3.81/makeinfo.patch b/import-layers/yocto-poky/meta/recipes-devtools/make/make-3.81/makeinfo.patch deleted file mode 100644 index 5dd760467..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/make/make-3.81/makeinfo.patch +++ /dev/null @@ -1,22 +0,0 @@ -Allow docs to build with makeinfo 5.X, fixing the error: - -doc/make.texi:8165: @itemx must follow @item - -Upstream-Status: Backport - -RP -2014/03/29 - -Index: make-3.81/doc/make.texi -=================================================================== ---- make-3.81.orig/doc/make.texi 2006-04-01 06:36:40.000000000 +0000 -+++ make-3.81/doc/make.texi 2014-03-29 09:39:51.007727012 +0000 -@@ -8162,7 +8162,7 @@ - rarely need to specify this option since @samp{make} does it for you; - see @ref{-w Option, ,The @samp{--print-directory} Option}.) - --@itemx --no-print-directory -+@item --no-print-directory - @cindex @code{--no-print-directory} - Disable printing of the working directory under @code{-w}. - This option is useful when @code{-w} is turned on automatically, diff --git a/import-layers/yocto-poky/meta/recipes-devtools/make/make_3.81.bb b/import-layers/yocto-poky/meta/recipes-devtools/make/make_3.81.bb deleted file mode 100644 index b8a79b0eb..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/make/make_3.81.bb +++ /dev/null @@ -1,15 +0,0 @@ -PR = "r1" - -LICENSE = "GPLv2 & LGPLv2" -LIC_FILES_CHKSUM = "file://COPYING;md5=361b6b837cad26c6900a926b62aada5f \ - file://tests/COPYING;md5=8ca43cbc842c2336e835926c2166c28b \ - file://glob/COPYING.LIB;md5=4a770b67e6be0f60da244beb2de0fce4" - -require make.inc - -SRC_URI += "file://make_fix_for_automake-1.12.patch" -SRC_URI += "file://makeinfo.patch" - -SRC_URI[md5sum] = "354853e0b2da90c527e35aabb8d6f1e6" -SRC_URI[sha256sum] = "f3e69023771e23908f5d5592954d8271d3d6af09693cecfd29cee6fde8550dc8" - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/mkelfimage/mkelfimage/fix-makefile-to-find-libz.patch b/import-layers/yocto-poky/meta/recipes-devtools/mkelfimage/mkelfimage/fix-makefile-to-find-libz.patch index 756a65cd9..be547543d 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/mkelfimage/mkelfimage/fix-makefile-to-find-libz.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/mkelfimage/mkelfimage/fix-makefile-to-find-libz.patch @@ -3,8 +3,11 @@ Let makefile find libz and zlib.h by CFLAGS and LDFLAGS. Signed-off-by: Hongxu Jia Upstream-Status: Pending --- + configure.ac | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + diff --git a/configure.ac b/configure.ac -index d1acc36..6f865b0 100644 +index 0f2ac72..f9099a2 100644 --- a/configure.ac +++ b/configure.ac @@ -62,7 +62,7 @@ AC_CHECK_PROG([RPM], rpm, rpm, [], [$PATH]) @@ -16,15 +19,15 @@ index d1acc36..6f865b0 100644 dnl Find the default programs if test "with_default" != no ; then -@@ -175,7 +175,7 @@ fi - +@@ -176,7 +176,7 @@ fi dnl ---Output variables... --HOST_CFLAGS="$HOST_CFLAGS -O2 -Wall \$(HOST_CPPFLAGS)" -+HOST_CFLAGS="$HOST_CFLAGS -O2 -Wall \$(HOST_CPPFLAGS) $CFLAGS" + CFLAGS="${CFLAGS:--O2} -Wall \$(CPPFLAGS)" +-HOST_CFLAGS="${HOST_CFLAGS:--O2} -Wall \$(HOST_CPPFLAGS)" ++HOST_CFLAGS="${HOST_CFLAGS:--O2} -Wall \$(HOST_CPPFLAGS) $CFLAGS" dnl TODO: figure out how to set these appropriately for compilers other than gcc I386_CFLAGS="$I386_CFLAGS -Os -ffreestanding -Wall -W -Wno-format \$(I386_CPPFLAGS)" -- -1.7.10.4 +2.7.4 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/mklibs/mklibs-native_0.1.41.bb b/import-layers/yocto-poky/meta/recipes-devtools/mklibs/mklibs-native_0.1.41.bb deleted file mode 100644 index b3c1b5ba7..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/mklibs/mklibs-native_0.1.41.bb +++ /dev/null @@ -1,21 +0,0 @@ -SUMMARY = "Shared library optimisation tool" -DESCRIPTION = "mklibs produces cut-down shared libraries that contain only the routines required by a particular set of executables." -HOMEPAGE = "https://launchpad.net/mklibs" -SECTION = "devel" -LICENSE = "GPLv2+" -LIC_FILES_CHKSUM = "file://debian/copyright;md5=98d31037b13d896e33890738ef01af64" -DEPENDS = "python-native" - -SRC_URI = "http://snapshot.debian.org/archive/debian/20160207T221625Z/pool/main/m/${BPN}/${BPN}_${PV}.tar.xz \ - file://ac_init_fix.patch\ - file://fix_STT_GNU_IFUNC.patch\ - file://sysrooted-ldso.patch \ - file://avoid-failure-on-symbol-provided-by-application.patch \ - file://show-GNU-unique-symbols-as-provided-symbols.patch \ - file://fix_cross_compile.patch \ -" - -SRC_URI[md5sum] = "6b2979876a611717df3d49e7f9cf291d" -SRC_URI[sha256sum] = "058c7349f8ec8a03b529c546a95cd6426741bd819f1e1211f499273eb4bf5d89" - -inherit autotools gettext native pythonnative diff --git a/import-layers/yocto-poky/meta/recipes-devtools/mklibs/mklibs-native_0.1.43.bb b/import-layers/yocto-poky/meta/recipes-devtools/mklibs/mklibs-native_0.1.43.bb new file mode 100644 index 000000000..b9c6c6f32 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/mklibs/mklibs-native_0.1.43.bb @@ -0,0 +1,25 @@ +SUMMARY = "Shared library optimisation tool" +DESCRIPTION = "mklibs produces cut-down shared libraries that contain only the routines required by a particular set of executables." +HOMEPAGE = "https://launchpad.net/mklibs" +SECTION = "devel" +LICENSE = "GPLv2+" +LIC_FILES_CHKSUM = "file://debian/copyright;md5=98d31037b13d896e33890738ef01af64" +DEPENDS = "python-native" + +SRC_URI = "http://snapshot.debian.org/archive/debian/20161123T152011Z/pool/main/m/mklibs/mklibs_${PV}.tar.xz \ + file://ac_init_fix.patch\ + file://fix_STT_GNU_IFUNC.patch\ + file://sysrooted-ldso.patch \ + file://avoid-failure-on-symbol-provided-by-application.patch \ + file://show-GNU-unique-symbols-as-provided-symbols.patch \ + file://fix_cross_compile.patch \ +" + +SRC_URI[md5sum] = "39b08a173454e5210ab3f598e94179bf" +SRC_URI[sha256sum] = "6f0cf24ade13fff76e943c003413d85c3e497c984c95c1ecea1c9731ca86f13c" + +UPSTREAM_CHECK_URI = "${DEBIAN_MIRROR}/main/m/mklibs/" + +inherit autotools gettext native pythonnative + +S = "${WORKDIR}/mklibs" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/mmc/mmc-utils_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/mmc/mmc-utils_git.bb index 0a8da19e0..7858819c3 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/mmc/mmc-utils_git.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/mmc/mmc-utils_git.bb @@ -4,7 +4,7 @@ LICENSE = "GPLv2" LIC_FILES_CHKSUM = "file://mmc.c;beginline=1;endline=20;md5=fae32792e20f4d27ade1c5a762d16b7d" SRCBRANCH ?= "master" -SRCREV = "bb779acfc385d135b32a6998c1d1fceef0491400" +SRCREV = "2cb6695e8dec00d887bdd5309d1b57d836fcd214" PV = "0.1" @@ -13,8 +13,8 @@ SRC_URI = "git://git.kernel.org/pub/scm/linux/kernel/git/cjb/mmc-utils.git;branc S = "${WORKDIR}/git" CFLAGS_append_powerpc64 = " -D__SANE_USERSPACE_TYPES__" -CFLAGS_append_mips64 = " -D__SANE_USERSPACE_TYPES__" -CFLAGS_append_mips64n32 = " -D__SANE_USERSPACE_TYPES__" +CFLAGS_append_mipsarchn64 = " -D__SANE_USERSPACE_TYPES__" +CFLAGS_append_mipsarchn32 = " -D__SANE_USERSPACE_TYPES__" do_install() { install -d ${D}${bindir} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/0001-Fix-build-with-musl.patch b/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/0001-Fix-build-with-musl.patch index 305be5215..bf3f98f14 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/0001-Fix-build-with-musl.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/0001-Fix-build-with-musl.patch @@ -1,21 +1,20 @@ -From e16fa28bc57c29923ab60af2ac343da83e1992d8 Mon Sep 17 00:00:00 2001 +From 4dab9bed1033f797ef9b482c77342fe3fe26d0be Mon Sep 17 00:00:00 2001 From: Khem Raj Date: Tue, 6 Oct 2015 23:51:34 +0000 Subject: [PATCH] Fix build with musl -Upstream-Status: Pending +Upstream-Status: Backport Signed-off-by: Khem Raj +Signed-off-by: David Oberhollenzer --- - mkfs.jffs2.c | 44 ++++++++++++++++++++++++++++++++++++++++++-- - recv_image.c | 1 - - serve_image.c | 1 - - 3 files changed, 42 insertions(+), 4 deletions(-) + jffsX-utils/mkfs.jffs2.c | 1 + + 1 file changed, 1 insertion(+) -diff --git a/mkfs.jffs2.c b/mkfs.jffs2.c -index f09c0b2..ed2dc43 100644 ---- a/mkfs.jffs2.c -+++ b/mkfs.jffs2.c +diff --git a/jffsX-utils/mkfs.jffs2.c b/jffsX-utils/mkfs.jffs2.c +index 5446a16..ca5e0d5 100644 +--- a/jffsX-utils/mkfs.jffs2.c ++++ b/jffsX-utils/mkfs.jffs2.c @@ -72,6 +72,7 @@ #include #include diff --git a/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/010-fix-rpmatch.patch b/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/010-fix-rpmatch.patch index 7d783e7a5..853de6af9 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/010-fix-rpmatch.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/010-fix-rpmatch.patch @@ -1,11 +1,48 @@ -Replace rpmatch() usage with checking first character of line +From 82839c3c0371ca2a1643a99d7d01f5bc1c850b28 Mon Sep 17 00:00:00 2001 +From: David Oberhollenzer +Date: Thu, 2 Mar 2017 11:40:36 +0100 +Subject: [PATCH] Replace rpmatch() usage with checking first character of line + +This is based on the patch from Khem Raj used by openembedded. In +addition to the original patch, this also removes the fallback +implementation that was provided for C libraries that don't implement +rpmatch. + +Upstream-Status: Backport -Upstream-Status: Pending Signed-off-by: Khem Raj +Signed-off-by: David Oberhollenzer +--- + include/common.h | 25 ++++++------------------- + 1 file changed, 6 insertions(+), 19 deletions(-) +diff --git a/include/common.h b/include/common.h +index d0c706d..d609257 100644 --- a/include/common.h +++ b/include/common.h -@@ -122,10 +122,12 @@ +@@ -129,21 +129,6 @@ extern "C" { + fprintf(stderr, "%s: warning!: " fmt "\n", PROGRAM_NAME, ##__VA_ARGS__); \ + } while(0) + +-/* uClibc versions before 0.9.34 and musl don't have rpmatch() */ +-#if defined(__UCLIBC__) && \ +- (__UCLIBC_MAJOR__ == 0 && \ +- (__UCLIBC_MINOR__ < 9 || \ +- (__UCLIBC_MINOR__ == 9 && __UCLIBC_SUBLEVEL__ < 34))) || \ +- !defined(__GLIBC__) +-#undef rpmatch +-#define rpmatch __rpmatch +-static inline int __rpmatch(const char *resp) +-{ +- return (resp[0] == 'y' || resp[0] == 'Y') ? 1 : +- (resp[0] == 'n' || resp[0] == 'N') ? 0 : -1; +-} +-#endif +- + /** + * prompt the user for confirmation + */ +@@ -164,10 +149,12 @@ static inline bool prompt(const char *msg, bool def) } if (strcmp("\n", line) != 0) { @@ -22,3 +59,5 @@ Signed-off-by: Khem Raj puts("unknown response; please try again"); continue; } +-- +2.6.1 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/add-exclusion-to-mkfs-jffs2-git-2.patch b/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/add-exclusion-to-mkfs-jffs2-git-2.patch index 57d6a30d8..0e3776af0 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/add-exclusion-to-mkfs-jffs2-git-2.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/add-exclusion-to-mkfs-jffs2-git-2.patch @@ -1,7 +1,7 @@ Upstream-Status: Pending --- /tmp/mkfs.jffs2.c 2009-01-11 15:28:41.000000000 +0100 -+++ git/mkfs.jffs2.c 2009-01-11 15:59:29.000000000 +0100 ++++ git/jffsX-utils/mkfs.jffs2.c 2009-01-11 15:59:29.000000000 +0100 @@ -100,6 +100,11 @@ struct rb_node hardlink_rb; }; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/fix-armv7-neon-alignment.patch b/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/fix-armv7-neon-alignment.patch index 05f1629d5..6fc594f00 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/fix-armv7-neon-alignment.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/fix-armv7-neon-alignment.patch @@ -1,16 +1,25 @@ -Upstream-Status: Pending +From 7d026a85946a08b8167dcd792ea6660bf6a49e08 Mon Sep 17 00:00:00 2001 +From: Yuanjie Huang +Date: Thu, 2 Mar 2017 10:43:56 +0100 +Subject: [PATCH] Fix alignment trap triggered by NEON instructions NEON instruction VLD1.64 was used to copy 64 bits data after type casting, and they will trigger alignment trap. This patch uses memcpy to avoid alignment problem. +Upstream-Status: Backport + Signed-off-by: Yuanjie Huang +Signed-off-by: David Oberhollenzer +--- + ubifs-utils/mkfs.ubifs/key.h | 16 ++++++++++------ + 1 file changed, 10 insertions(+), 6 deletions(-) -diff --git a/mkfs.ubifs/key.h b/mkfs.ubifs/key.h -index d3a02d4..e7e9218 100644 ---- a/mkfs.ubifs/key.h -+++ b/mkfs.ubifs/key.h -@@ -141,10 +141,12 @@ static inline void data_key_init(union ubifs_key *key, ino_t inum, +diff --git a/ubifs-utils/mkfs.ubifs/key.h b/ubifs-utils/mkfs.ubifs/key.h +index 39379fd..118858b 100644 +--- a/ubifs-utils/mkfs.ubifs/key.h ++++ b/ubifs-utils/mkfs.ubifs/key.h +@@ -159,10 +159,12 @@ static inline void data_key_init(union ubifs_key *key, ino_t inum, */ static inline void key_write(const union ubifs_key *from, void *to) { @@ -26,7 +35,7 @@ index d3a02d4..e7e9218 100644 memset(to + 8, 0, UBIFS_MAX_KEY_LEN - 8); } -@@ -156,10 +158,12 @@ static inline void key_write(const union ubifs_key *from, void *to) +@@ -174,10 +176,12 @@ static inline void key_write(const union ubifs_key *from, void *to) */ static inline void key_write_idx(const union ubifs_key *from, void *to) { @@ -42,3 +51,5 @@ index d3a02d4..e7e9218 100644 } /** +-- +2.6.1 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/mtd-utils-fix-corrupt-cleanmarker-with-flash_erase--j-command.patch b/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/mtd-utils-fix-corrupt-cleanmarker-with-flash_erase--j-command.patch index 7207cfcb3..0f42e73e9 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/mtd-utils-fix-corrupt-cleanmarker-with-flash_erase--j-command.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils/mtd-utils-fix-corrupt-cleanmarker-with-flash_erase--j-command.patch @@ -49,8 +49,8 @@ v2 : get length of availble freeoob bytes from oobinfo information, diff --git a/flash_erase.c b/flash_erase.c index 933373a..4b9d84b 100644 ---- a/flash_erase.c -+++ b/flash_erase.c +--- a/misc-utils/flash_erase.c ++++ b/misc-utils/flash_erase.c @@ -99,6 +99,7 @@ int main(int argc, char *argv[]) bool isNAND; int error = 0; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils_git.bb index 8a3afaf62..4fbc54f8f 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils_git.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/mtd/mtd-utils_git.bb @@ -5,26 +5,27 @@ LICENSE = "GPLv2+" LIC_FILES_CHKSUM = "file://COPYING;md5=0636e73ff0215e8d672dc4c32c317bb3 \ file://include/common.h;beginline=1;endline=17;md5=ba05b07912a44ea2bf81ce409380049c" +inherit autotools pkgconfig + DEPENDS = "zlib lzo e2fsprogs util-linux" -PV = "1.5.2" +PV = "2.0.0" -SRCREV = "aea36417067dade75192bafa03af70b6eb2677b1" +SRCREV = "1bfee8660131fca7a18f68e9548a18ca6b3378a0" SRC_URI = "git://git.infradead.org/mtd-utils.git \ file://add-exclusion-to-mkfs-jffs2-git-2.patch \ file://fix-armv7-neon-alignment.patch \ file://mtd-utils-fix-corrupt-cleanmarker-with-flash_erase--j-command.patch \ file://0001-Fix-build-with-musl.patch \ + file://010-fix-rpmatch.patch \ " -SRC_URI_append_libc-musl = " file://010-fix-rpmatch.patch " - S = "${WORKDIR}/git/" # xattr support creates an additional compile-time dependency on acl because # the sys/acl.h header is needed. libacl is not needed and thus enabling xattr # regardless whether acl is enabled or disabled in the distro should be okay. -PACKAGECONFIG ?= "${@bb.utils.contains('DISTRO_FEATURES', 'xattr', 'xattr', '', d)}" +PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'xattr', d)}" PACKAGECONFIG[xattr] = ",,acl," EXTRA_OEMAKE = "'CC=${CC}' 'RANLIB=${RANLIB}' 'AR=${AR}' 'CFLAGS=${CFLAGS} ${@bb.utils.contains('PACKAGECONFIG', 'xattr', '', '-DWITHOUT_XATTR', d)} -I${S}/include' 'BUILDDIR=${S}'" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools/fix-broken-lz.patch b/import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools/fix-broken-lz.patch deleted file mode 100644 index cb454917f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools/fix-broken-lz.patch +++ /dev/null @@ -1,23 +0,0 @@ -Upstream-Status: Backport - -Signed-off-by: Wenlin Kang -Signed-off-by: Jackie Huang ---- - Makefile.in | 1 + - 1 file changed, 1 insertion(+) - -diff --git a/Makefile.in b/Makefile.in -index 8f9305a..694e837 100644 ---- a/Makefile.in -+++ b/Makefile.in -@@ -251,6 +251,7 @@ install-scripts: ${DESTDIR}$(bindir)/mtools - @$(top_srcdir)/mkinstalldirs ${DESTDIR}$(bindir) - @for j in $(SCRIPTS) ; do \ - $(INSTALL_SCRIPT) $(srcdir)/scripts/$$j ${DESTDIR}$(bindir)/$$j ; \ -+ $(INSTALL_PROGRAM) $(srcdir)/scripts/$$j ${DESTDIR}$(bindir)/$$j ; \ - echo ${DESTDIR}$(bindir)/$$j ; \ - done - rm -f ${DESTDIR}$(bindir)/lz --- -2.0.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools/mtools.patch b/import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools/mtools.patch deleted file mode 100644 index 15a32088d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools/mtools.patch +++ /dev/null @@ -1,129 +0,0 @@ -$NetBSD: patch-aa,v 1.10 2007/08/17 20:55:34 joerg Exp $ - ---- - Makefile.in | 74 ++++++++++++++++++++++++++---------------------------------- - 1 file changed, 33 insertions(+), 41 deletions(-) - -Upstream-Status: Inappropriate [licensing] - -Index: mtools-3.9.9/Makefile.in -=================================================================== ---- mtools-3.9.9.orig/Makefile.in 2007-10-12 11:18:46.000000000 +0100 -+++ mtools-3.9.9/Makefile.in 2007-10-12 11:28:14.000000000 +0100 -@@ -195,30 +195,22 @@ html: mtools.html mtools_toc.html - - # Don't cd, to avoid breaking install-sh references. - install-info: info -- $(top_srcdir)/mkinstalldirs $(infodir) -+ $(top_srcdir)/mkinstalldirs ${DESTDIR}$(infodir) - if test -f mtools.info; then \ - for i in mtools.info*; do \ -- $(INSTALL_DATA) $$i $(infodir)/$$i; \ -+ $(INSTALL_DATA) $$i ${DESTDIR}$(infodir)/$$i; \ - done; \ - else \ - for i in $(srcdir)/mtools.info*; do \ -- $(INSTALL_DATA) $$i $(infodir)/`echo $$i | sed 's|^$(srcdir)/||'`; \ -+ $(INSTALL_DATA) $$i ${DESTDIR}$(infodir)/`echo $$i | sed 's|^$(srcdir)/||'`; \ - done; \ - fi; \ -- if [ -n "$(INSTALL_INFO)" ] ; then \ -- if [ -f $(infodir)/dir.info ] ; then \ -- $(INSTALL_INFO) $(infodir)/mtools.info $(infodir)/dir.info; \ -- fi; \ -- if [ -f $(infodir)/dir ] ; then \ -- $(INSTALL_INFO) $(infodir)/mtools.info $(infodir)/dir; \ -- fi; \ -- fi - - uninstall-info: - cd $(infodir) && rm -f mtools.info* - --install: $(bindir)/mtools @BINFLOPPYD@ install-man install-links \ -- $(bindir)/mkmanifest install-scripts install-info -+install: ${DESTDIR}$(bindir)/mtools ${DESTDIR}$(bindir)/floppyd install-man install-links \ -+ ${DESTDIR}$(bindir)/mkmanifest install-scripts install-info - - uninstall: uninstall-bin uninstall-man uninstall-links \ - uninstall-scripts -@@ -228,52 +220,52 @@ distclean: clean texclean - maintainer-clean: distclean - - --$(bindir)/floppyd: floppyd -- $(top_srcdir)/mkinstalldirs $(bindir) -- $(INSTALL_PROGRAM) floppyd $(bindir)/floppyd -+${DESTDIR}$(bindir)/floppyd: floppyd -+ $(top_srcdir)/mkinstalldirs ${DESTDIR}$(bindir) -+ $(INSTALL_PROGRAM) floppyd ${DESTDIR}$(bindir)/floppyd - --$(bindir)/floppyd_installtest: floppyd_installtest -- $(top_srcdir)/mkinstalldirs $(bindir) -- $(INSTALL_PROGRAM) floppyd_installtest $(bindir)/floppyd_installtest -+${DESTDIR}$(bindir)/floppyd_installtest: floppyd_installtest -+ $(top_srcdir)/mkinstalldirs ${DESTDIR}$(bindir) -+ $(INSTALL_PROGRAM) floppyd_installtest ${DESTDIR}$(bindir)/floppyd_installtest - --$(bindir)/mtools: mtools -- $(top_srcdir)/mkinstalldirs $(bindir) -- $(INSTALL_PROGRAM) mtools $(bindir)/mtools -+${DESTDIR}$(bindir)/mtools: mtools -+ $(top_srcdir)/mkinstalldirs ${DESTDIR}$(bindir) -+ $(INSTALL_PROGRAM) mtools ${DESTDIR}$(bindir)/mtools - --$(bindir)/mkmanifest: mkmanifest -- $(top_srcdir)/mkinstalldirs $(bindir) -- $(INSTALL_PROGRAM) mkmanifest $(bindir)/mkmanifest -+${DESTDIR}$(bindir)/mkmanifest: mkmanifest -+ $(top_srcdir)/mkinstalldirs ${DESTDIR}$(bindir) -+ $(INSTALL_PROGRAM) mkmanifest ${DESTDIR}$(bindir)/mkmanifest - - #$(ETCDIR)/mtools: mtools.etc - # cp mtools.etc $(ETCDIR)/mtools - --install-links: $(bindir)/mtools -+install-links: ${DESTDIR}$(bindir)/mtools - @for j in $(LINKS); do \ -- rm -f $(bindir)/$$j ; \ -- $(LN_S) mtools $(bindir)/$$j ; \ -- echo $(bindir)/$$j ; \ -+ rm -f ${DESTDIR}$(bindir)/$$j ; \ -+ $(LN_S) mtools ${DESTDIR}$(bindir)/$$j ; \ -+ echo ${DESTDIR}$(bindir)/$$j ; \ - done - - ## "z" is the older version of "gz"; the name is just *too* short --install-scripts: $(bindir)/mtools -- @$(top_srcdir)/mkinstalldirs $(bindir) -+install-scripts: ${DESTDIR}$(bindir)/mtools -+ @$(top_srcdir)/mkinstalldirs ${DESTDIR}$(bindir) - @for j in $(SCRIPTS) ; do \ -- $(INSTALL_PROGRAM) $(srcdir)/scripts/$$j $(bindir)/$$j ; \ -- echo $(bindir)/$$j ; \ -+ $(INSTALL_SCRIPT) $(srcdir)/scripts/$$j ${DESTDIR}$(bindir)/$$j ; \ -+ echo ${DESTDIR}$(bindir)/$$j ; \ - done -- rm -f $(bindir)/lz -- $(LN_S) uz $(bindir)/lz -+ rm -f ${DESTDIR}$(bindir)/lz -+ $(LN_S) uz ${DESTDIR}$(bindir)/lz - - install-man: -- @$(top_srcdir)/mkinstalldirs $(MAN1DIR) -+ @$(top_srcdir)/mkinstalldirs ${DESTDIR}$(MAN1DIR) - @for j in $(MAN1); do \ -- $(INSTALL_DATA) $(srcdir)/$$j $(MAN1DIR)/$$j ; \ -- echo $(MAN1DIR)/$$j ; \ -+ $(INSTALL_DATA) $(srcdir)/$$j ${DESTDIR}$(MAN1DIR)/$$j ; \ -+ echo ${DESTDIR}$(MAN1DIR)/$$j ; \ - done -- @$(top_srcdir)/mkinstalldirs $(MAN5DIR) -+ @$(top_srcdir)/mkinstalldirs ${DESTDIR}$(MAN5DIR) - @for j in $(MAN5); do \ -- $(INSTALL_DATA) $(srcdir)/$$j $(MAN5DIR)/$$j ; \ -- echo $(MAN5DIR)/$$j ; \ -+ $(INSTALL_DATA) $(srcdir)/$$j ${DESTDIR}$(MAN5DIR)/$$j ; \ -+ echo ${DESTDIR}$(MAN5DIR)/$$j ; \ - done - - uninstall-bin: diff --git a/import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools/no-x11.patch b/import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools/no-x11.patch deleted file mode 100644 index 705b62944..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools/no-x11.patch +++ /dev/null @@ -1,21 +0,0 @@ ---- - Makefile.in | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -Upstream-Status: Inappropriate [licensing] - ---- mtools-3.9.9.orig/Makefile.in -+++ mtools-3.9.9/Makefile.in -@@ -128,11 +128,11 @@ X_EXTRA_LIBS = @X_EXTRA_LIBS@ - X_PRE_LIBS = @X_PRE_LIBS@ - CFLAGS = $(CPPFLAGS) $(DEFS) $(MYCFLAGS) -I. @extraincludedir@ -I@srcdir@ $(USERCFLAGS) - CXXFLAGS = $(CPPFLAGS) $(DEFS) $(MYCXXFLAGS) -I. @extraincludedir@ -I@srcdir@ $(USERCFLAGS) - LINK = $(CC) $(LDFLAGS) $(USERLDFLAGS) @extralibdir@ - ALLLIBS = $(USERLDLIBS) $(MACHDEPLIBS) $(SHLIB) $(LIBS) --X_LDFLAGS = $(X_EXTRA_LIBS) $(X_LIBS) $(X_PRE_LIBS) -lXau -lX11 $(LIBS) -+X_LDFLAGS = $(X_EXTRA_LIBS) $(X_LIBS) $(X_PRE_LIBS) $(LIBS) - X_CCFLAGS = $(X_CFLAGS) $(CFLAGS) - - all: mtools $(LINKS) mkmanifest @FLOPPYD@ - - %.o: %.c diff --git a/import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools_3.9.9.bb b/import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools_3.9.9.bb deleted file mode 100644 index 2904ff4f9..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/mtools/mtools_3.9.9.bb +++ /dev/null @@ -1,55 +0,0 @@ -SUMMARY = "Utilities to access MS-DOS disks without mounting them" -DESCRIPTION = "Mtools is a collection of utilities to access MS-DOS disks from GNU and Unix without mounting them." -HOMEPAGE = "http://www.gnu.org/software/mtools/" -SECTION = "optional" -LICENSE = "GPLv2+" -LIC_FILES_CHKSUM = "file://COPYING;md5=92b58ec77696788ce278b044d2a8e9d3" -PR = "r6" - -DEPENDS += "virtual/libiconv" - -RDEPENDS_${PN}_libc-glibc = "glibc-gconv-ibm850" -RRECOMMENDS_${PN}_libc-glibc = "\ - glibc-gconv-ibm437 \ - glibc-gconv-ibm737 \ - glibc-gconv-ibm775 \ - glibc-gconv-ibm851 \ - glibc-gconv-ibm852 \ - glibc-gconv-ibm855 \ - glibc-gconv-ibm857 \ - glibc-gconv-ibm860 \ - glibc-gconv-ibm861 \ - glibc-gconv-ibm862 \ - glibc-gconv-ibm863 \ - glibc-gconv-ibm865 \ - glibc-gconv-ibm866 \ - glibc-gconv-ibm869 \ - " -SRC_URI[md5sum] = "3e68b857b4e1f3a6521d1dfefbd30a36" -SRC_URI[sha256sum] = "af083a73425d664d4607ef6c6564fd9319a0e47ee7c105259a45356cb834690e" - -#http://mtools.linux.lu/mtools-${PV}.tar.gz -SRC_URI = "http://downloads.yoctoproject.org/mirror/sources/mtools-${PV}.tar.gz \ - file://mtools-makeinfo.patch \ - file://mtools.patch \ - file://no-x11.patch \ - file://fix-broken-lz.patch \ - file://0001-Continue-even-if-fs-size-is-not-divisible-by-sectors.patch \ - " - -inherit autotools texinfo - -EXTRA_OECONF = "--without-x" - -LDFLAGS_append_libc-uclibc = " -liconv " - -BBCLASSEXTEND = "native nativesdk" - -PACKAGECONFIG ??= "" -PACKAGECONFIG[libbsd] = "ac_cv_lib_bsd_main=yes,ac_cv_lib_bsd_main=no,libbsd" - -do_install_prepend () { - # Create bindir to fix parallel installation issues - mkdir -p ${D}/${bindir} - mkdir -p ${D}/${datadir} -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/nasm/nasm_2.12.02.bb b/import-layers/yocto-poky/meta/recipes-devtools/nasm/nasm_2.12.02.bb index 9c4b60f84..3280b8458 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/nasm/nasm_2.12.02.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/nasm/nasm_2.12.02.bb @@ -2,7 +2,6 @@ SUMMARY = "General-purpose x86 assembler" SECTION = "devel" LICENSE = "BSD-2-Clause" LIC_FILES_CHKSUM = "file://LICENSE;md5=90904486f8fbf1861cf42752e1a39efe" -COMPATIBLE_HOST = '(x86_64|i.86).*-(linux|freebsd.*)' SRC_URI = "http://www.nasm.us/pub/nasm/releasebuilds/${PV}/nasm-${PV}.tar.bz2 " diff --git a/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/fix-regex.patch b/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/fix-regex.patch deleted file mode 100644 index 6b40afdad..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/fix-regex.patch +++ /dev/null @@ -1,32 +0,0 @@ -From 55f6fd8f1958aa36584eefeecce782a505963c88 Mon Sep 17 00:00:00 2001 -From: benvm -Date: Wed, 9 Jan 2013 12:14:06 -0500 -Subject: [PATCH] Fix Makefile regular expression - -This patch modifies a regular expression within a Makefile to stop builds -from failing in the case where the path contains the characters ".a". - -Upstream-Status: Submitted - -Signed-off-by: Kai Kang - ---- - Makefile.prog.in | 2 +- - 1 files changed, 1 insertions(+), 1 deletions(-) - -diff --git a/Makefile.prog.in b/Makefile.prog.in -index 76310c9..44c3534 100644 ---- a/Makefile.prog.in -+++ b/Makefile.prog.in -@@ -12,7 +12,7 @@ LINKFLAGS = @LINKFLAGS@ - - ALL_LIBS = $(XLIBS) $(LIBS) - Makefile.lt: -- echo 'LT_LIBS='`echo $(ALL_LIBS)|sed 's/\.a/.la/g'` >Makefile.lt -+ echo 'LT_LIBS='`echo $(ALL_LIBS) | sed 's/\.a\s/\.la /g' | sed s/\.a$$/\.la/` > Makefile.lt - - PROG:=$(shell echo "$(PROG)" | sed '@program_transform_name@') - --- -1.7.0.4 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/makefile.patch b/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/makefile.patch deleted file mode 100644 index 968b9b47c..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/makefile.patch +++ /dev/null @@ -1,39 +0,0 @@ -This patch fixes libtool QA issues with WORKDIR creeping in to -libospgrove.la and libostyle.la. Patch obtained from OpenEmbedded. - -Upstream-Status: Inappropriate [Other] -Workaround is specific to our build system. - -Signed-off-by: Scott Garman - -Index: openjade-1.3.2/spgrove/Makefile.sub -=================================================================== ---- openjade-1.3.2.orig/spgrove/Makefile.sub -+++ openjade-1.3.2/spgrove/Makefile.sub -@@ -1,8 +1,8 @@ - LTVERSION=0:1:0 - LIB=ospgrove - INCLUDE=-I$(srcdir)/../grove --DEPLIBS=-lm -L$(TOP)/grove -L$(TOP)/grove/.libs \ -- -L$(TOP)/lib -L$(TOP)/lib/.libs \ -+DEPLIBS=-lm $(TOP)/grove \ -+ $(TOP)/lib \ - $(LIB_THREADS) - OBJS=GroveApp.o GroveBuilder.o SdNode.o - GENSRCS=grove_inst.cxx -Index: openjade-1.3.2/style/Makefile.sub -=================================================================== ---- openjade-1.3.2.orig/style/Makefile.sub -+++ openjade-1.3.2/style/Makefile.sub -@@ -1,8 +1,8 @@ - LTVERSION=0:1:0 - LIB=ostyle --DEPLIBS=-lm -L$(TOP)/grove -L$(TOP)/grove/.libs \ -- -L$(TOP)/lib -L$(TOP)/lib/.libs \ -- -L$(TOP)/spgrove -L$(TOP)/spgrove/.libs -+DEPLIBS=-lm $(TOP)/grove \ -+ $(TOP)/lib \ -+ $(TOP)/spgrove - OBJS=LangObj.o \ - Collector.o \ - DssslApp.o \ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/msggen.pl.patch b/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/msggen.pl.patch deleted file mode 100644 index b47fd4655..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/msggen.pl.patch +++ /dev/null @@ -1,44 +0,0 @@ -commit fcc5b94f118495b1a467edcda6c6f631691c3f69 -Author: Dennis Lan -Date: Tue Jul 3 09:25:42 2012 +0800 - - openjade: fix undefined Getopts error, use std namespace - - Using Gentoo Linux as the build host, it fails without this patch - Use Getopt::Std in place of getopts.pl. - - Upstream-Status: Inappropriate [no upstream] - Original-Author-By: Mike Gilbert - Signed-off-by: Dennis Lan - -diff --git a/msggen.pl b/msggen.pl -index 0c33968..2ee3f66 100644 ---- a/msggen.pl -+++ b/msggen.pl -@@ -4,6 +4,7 @@ - # See the file COPYING for copying permission. - - use POSIX; -+use Getopt::Std; - - # Package and version. - $package = 'openjade'; -@@ -18,8 +19,7 @@ $gen_c = 0; - undef $opt_l; - undef $opt_p; - undef $opt_t; --do 'getopts.pl'; --&Getopts('l:p:t:'); -+getopts('l:p:t:'); - $module = $opt_l; - $pot_file = $opt_p; - -@@ -72,7 +72,7 @@ while () { - else { - $field[0] =~ /^[IWQXE][0-9]$/ || &error("invalid first field");; - $type[$num] = substr($field[0], 0, 1); -- $argc = int(substr($field[0], 1, 1)); -+ $argc = substr($field[0], 1, 1); - } - $nargs[$num] = $argc; - $field[1] =~ /^[a-zA-Z_][a-zA-Z0-9_]+$/ || &error("invalid tag"); diff --git a/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/no-libtool.patch b/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/no-libtool.patch deleted file mode 100644 index 2f57c000f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/no-libtool.patch +++ /dev/null @@ -1,20 +0,0 @@ -The openjade build is fairly unique in auto-generating explicit dependencies to -installed .la files. As some distributions may delete these files unless -clearly required, change the Makefile fragment to depend on the .so instead. - -Patch originally by Phil Blundell . - -Signed-off-by: Ross Burton -Upstream-Status: Inappropriate - ---- openjade-1.3.2/Makefile.prog.in~ 2013-05-14 11:42:02.646782318 +0100 -+++ openjade-1.3.2/Makefile.prog.in 2013-05-14 11:54:55.051728343 +0100 -@@ -12,7 +12,7 @@ - - ALL_LIBS = $(XLIBS) $(LIBS) - Makefile.lt: -- echo 'LT_LIBS='`echo $(ALL_LIBS) | sed 's/\.a\s/\.la /g' | sed s/\.a$$/\.la/` > Makefile.lt -+ echo 'LT_LIBS='`for d in $(ALL_LIBS); do case $$d in ../*) echo $$d | sed s/\.a$$/.la/g ;; *) echo $$d | sed s/\.a$$/.so/g ;; esac ; done` >Makefile.lt - - PROG:=$(shell echo "$(PROG)" | sed '@program_transform_name@') - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/reautoconf.patch b/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/reautoconf.patch deleted file mode 100644 index 1a23a4a68..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/reautoconf.patch +++ /dev/null @@ -1,83 +0,0 @@ -Ensure we reautoconf the packag - -Currently since configure.in in is in a subdirectory, we don't reautoconf the -recipe. We really need to do this, to update things like the libtool script used -and fix various issues such as those that could creep in if a reautoconf is -triggered for some reason. Since this source only calls AM_INIT_AUTOMAKE to -gain the PACKAGE and VERSION definitions and that macro now errors if Makefile.am -doesn't exist, we need to add these definitions manually. - -These changes avoid failures like: ----- -| ... -| DssslApp.cxx:117:36: error: 'PACKAGE' was not declared in this scope -| DssslApp.cxx:118:36: error: 'VERSION' was not declared in this scope -| make[2]: *** [DssslApp.lo] Error 1 ---- - -Upstream-Status: Pending - -RP 2012/6/12 - -Index: openjade-1.3.2/acinclude.m4 -=================================================================== ---- /dev/null 1970-01-01 00:00:00.000000000 +0000 -+++ openjade-1.3.2/acinclude.m4 2012-06-12 12:48:54.871365344 +0000 -@@ -0,0 +1,39 @@ -+dnl Configure-time switch with default -+dnl -+dnl Each switch defines an --enable-FOO and --disable-FOO option in -+dnl the resulting configure script. -+dnl -+dnl Usage: -+dnl smr_SWITCH(name, description, default, pos-def, neg-def) -+dnl -+dnl where: -+dnl -+dnl name name of switch; generates --enable-name & --disable-name -+dnl options -+dnl description help string is set to this prefixed by "enable" or -+dnl "disable", whichever is the non-default value -+dnl default either "on" or "off"; specifies default if neither -+dnl --enable-name nor --disable-name is specified -+dnl pos-def a symbol to AC_DEFINE if switch is on (optional) -+dnl neg-def a symbol to AC_DEFINE if switch is off (optional) -+dnl -+AC_DEFUN(smr_SWITCH, [ -+ AC_MSG_CHECKING(whether to enable $2) -+ AC_ARG_ENABLE( -+ $1, -+ ifelse($3, on, -+ [ --disable-[$1] disable [$2]], -+ [ --enable-[$1] enable [$2]]), -+ [ if test "$enableval" = yes; then -+ AC_MSG_RESULT(yes) -+ ifelse($4, , , AC_DEFINE($4)) -+ else -+ AC_MSG_RESULT(no) -+ ifelse($5, , , AC_DEFINE($5)) -+ fi ], -+ ifelse($3, on, -+ [ AC_MSG_RESULT(yes) -+ ifelse($4, , , AC_DEFINE($4)) ], -+ [ AC_MSG_RESULT(no) -+ ifelse($5, , , AC_DEFINE($5))]))]) -+ -Index: openjade-1.3.2/config/configure.in -=================================================================== ---- openjade-1.3.2.orig/config/configure.in 2012-06-12 12:47:20.735365445 +0000 -+++ openjade-1.3.2/config/configure.in 2012-06-12 12:48:17.507364080 +0000 -@@ -12,9 +12,12 @@ - dnl Credits: this autoconf script was largely "inspired" by the - dnl autoconf script around SP made by Henry Thompson. - dnl --AC_INIT(dsssl) -+AC_INIT([openjade], [1.3.2]) - AC_CONFIG_AUX_DIR(config) --AM_INIT_AUTOMAKE( openjade, 1.3.2) -+AC_SUBST([PACKAGE], [openjade]) -+AC_SUBST([VERSION], [1.3.2]) -+AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE") -+AC_DEFINE_UNQUOTED(VERSION, "$VERSION") - TOP=`pwd` - AC_SUBST(TOP) - dnl diff --git a/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/user-declared-default-constructor.patch b/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/user-declared-default-constructor.patch deleted file mode 100644 index 073af46fc..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-1.3.2/user-declared-default-constructor.patch +++ /dev/null @@ -1,92 +0,0 @@ -In GCC 4.6 the compiler no longer allows objects of const-qualified type to -be default initialized unless the type has a user-declared default -constructor. - -Patch from Gentoo bugzilla: http://bugs.gentoo.org/show_bug.cgi?id=358021 - -Gentoo Bugzilla description follows: -"If a class or struct has no user-defined default constructor, C++ doesn't -allow you to default construct a const instance of it. - -https://bugs.gentoo.org/358021 -http://clang.llvm.org/compatibility.html#default_init_const -http://gcc.gnu.org/PR44499" - -Upstream-Status: Pending - -Signed-off-by: Scott Garman - ---- a/jade/TeXFOTBuilder.cxx -+++ b/jade/TeXFOTBuilder.cxx -@@ -88,6 +88,8 @@ public: - value.convertString(nic_.placement); - } - ExtensionFlowObj *copy() const { return new PageFloatFlowObj(*this); } -+ public: -+ PageFloatFlowObj() {} - private: - PageFloatNIC nic_; - StringC name_; -@@ -101,6 +103,8 @@ public: - fotb.endPageFootnote(); - } - ExtensionFlowObj *copy() const { return new PageFootnoteFlowObj(*this); } -+ public: -+ PageFootnoteFlowObj() {} - private: - }; - ////////////////////////////////////////////////////////////////////// ---- a/jade/TransformFOTBuilder.cxx -+++ b/jade/TransformFOTBuilder.cxx -@@ -41,6 +41,7 @@ public: - }; - class EntityRefFlowObj : public TransformExtensionFlowObj { - public: -+ EntityRefFlowObj() {} - void atomic(TransformFOTBuilder &fotb, const NodePtr &) const { - fotb.entityRef(name_); - } -@@ -56,6 +57,7 @@ public: - }; - class ProcessingInstructionFlowObj : public TransformExtensionFlowObj { - public: -+ ProcessingInstructionFlowObj() {} - void atomic(TransformFOTBuilder &fotb, const NodePtr &) const { - fotb.processingInstruction(data_); - } -@@ -98,6 +100,8 @@ public: - } - } - ExtensionFlowObj *copy() const { return new EmptyElementFlowObj(*this); } -+ public: -+ EmptyElementFlowObj() {} - private: - ElementNIC nic_; - }; -@@ -133,6 +137,8 @@ public: - } - } - ExtensionFlowObj *copy() const { return new ElementFlowObj(*this); } -+ public: -+ ElementFlowObj() {} - private: - ElementNIC nic_; - }; -@@ -150,6 +156,8 @@ public: - value.convertString(systemId_); - } - ExtensionFlowObj *copy() const { return new EntityFlowObj(*this); } -+ public: -+ EntityFlowObj() {} - private: - StringC systemId_; - }; -@@ -174,6 +182,8 @@ public: - } - } - ExtensionFlowObj *copy() const { return new DocumentTypeFlowObj(*this); } -+ public: -+ DocumentTypeFlowObj() {} - private: - DocumentTypeNIC nic_; - }; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-native_1.3.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-native_1.3.2.bb deleted file mode 100644 index 8b15b0c7e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/openjade/openjade-native_1.3.2.bb +++ /dev/null @@ -1,121 +0,0 @@ -SUMMARY = "Tools for working with DSSSL stylesheets for SGML and XML documents" -DESCRIPTION = "OpenJade is a suite of tools for validating, \ -processing, and applying DSSSL (Document Style Semantics and \ -Specification Language) stylesheets to SGML and XML documents." -HOMEPAGE = "http://openjade.sourceforge.net" -SECTION = "base" -LICENSE = "BSD" -LIC_FILES_CHKSUM = "file://COPYING;md5=641ff1e4511f0a87044ad42f87cb1045" - -PR = "r5" - -DEPENDS = "opensp-native sgml-common-native" -RDEPENDS_${PN} = "sgml-common-native" - -SRC_URI = "${SOURCEFORGE_MIRROR}/openjade/openjade-${PV}.tar.gz \ - file://makefile.patch \ - file://msggen.pl.patch \ - file://reautoconf.patch \ - file://user-declared-default-constructor.patch \ - file://fix-regex.patch \ - file://no-libtool.patch" - -SRC_URI[md5sum] = "7df692e3186109cc00db6825b777201e" -SRC_URI[sha256sum] = "1d2d7996cc94f9b87d0c51cf0e028070ac177c4123ecbfd7ac1cb8d0b7d322d1" - -UPSTREAM_CHECK_URI = "http://openjade.sourceforge.net/download.html" - -inherit autotools-brokensep native - -# Statically link local libs to avoid gold link issue [YOCTO #2972] -PACKAGECONFIG ?= "static-only-libs" -PACKAGECONFIG[static-only-libs] = "--enable-static --disable-shared,--enable-static --enable-shared,," - -EXTRA_OECONF = "--enable-spincludedir=${STAGING_INCDIR}/OpenSP \ - --enable-splibdir=${STAGING_LIBDIR}" - -# We need to set datadir explicitly, but adding it to EXTRA_OECONF -# results in it being specified twice when configure is run. -CONFIGUREOPTS := "${@d.getVar('CONFIGUREOPTS', True).replace('--datadir=${datadir}', '--datadir=${STAGING_DATADIR}/sgml/openjade-${PV}')}" - -# CONFIGUREOPTS has hard coded paths so we need to ignore it's vardeps -# there are other bits in there too but they are picked up by other variable -# dependencies so it all works out -oe_runconf[vardepsexclude] += "CONFIGUREOPTS" - -CFLAGS =+ "-I${S}/include" -CXXFLAGS += "-fno-tree-dse" - -SSTATEPOSTINSTFUNCS += "openjade_sstate_postinst" -SYSROOT_PREPROCESS_FUNCS += "openjade_sysroot_preprocess" - -# configure.in needs to be reloacted to trigger reautoconf -do_extraunpack () { - cp ${S}/config/configure.in ${S}/ -} -addtask extraunpack after do_patch before do_configure - -# We need to do this else the source interdependencies aren't generated and -# build failures can result (e.g. zero size style/Makefile.dep file) -do_compile_prepend () { - oe_runmake depend -} - -do_install() { - # Refer to http://www.linuxfromscratch.org/blfs/view/stable/pst/openjade.html - # for details. - install -d ${D}${bindir} ${D}${libdir} - if ${@bb.utils.contains('PACKAGECONFIG', 'static-only-libs', 'true', 'false', d)}; then - install -m 0755 jade/openjade ${D}${bindir}/openjade - oe_libinstall -a -C style libostyle ${D}${libdir} - oe_libinstall -a -C spgrove libospgrove ${D}${libdir} - oe_libinstall -a -C grove libogrove ${D}${libdir} - else - install -m 0755 jade/.libs/openjade ${D}${bindir}/openjade - oe_libinstall -a -so -C style libostyle ${D}${libdir} - oe_libinstall -a -so -C spgrove libospgrove ${D}${libdir} - oe_libinstall -a -so -C grove libogrove ${D}${libdir} - fi - ln -sf openjade ${D}${bindir}/jade - - install -d ${D}${datadir}/sgml/openjade-${PV} - install -m 644 dsssl/catalog ${D}${datadir}/sgml/openjade-${PV} - install -m 644 dsssl/*.dtd ${D}${datadir}/sgml/openjade-${PV} - install -m 644 dsssl/*.dsl ${D}${datadir}/sgml/openjade-${PV} - install -m 644 dsssl/*.sgm ${D}${datadir}/sgml/openjade-${PV} - - install -d ${datadir}/sgml/openjade-${PV} - install -m 644 dsssl/catalog ${datadir}/sgml/openjade-${PV}/catalog - - install -d ${D}${sysconfdir}/sgml - echo "CATALOG ${datadir}/sgml/openjade-${PV}/catalog" > \ - ${D}${sysconfdir}/sgml/openjade-${PV}.cat -} - -openjade_sstate_postinst() { - if [ "${BB_CURRENTTASK}" = "populate_sysroot" -o "${BB_CURRENTTASK}" = "populate_sysroot_setscene" ] - then - # Ensure that the catalog file sgml-docbook.cat is properly - # updated when the package is installed from sstate cache. - ${SYSROOT_DESTDIR}${bindir_crossscripts}/install-catalog-openjade \ - --add ${sysconfdir}/sgml/sgml-docbook.bak \ - ${sysconfdir}/sgml/openjade-${PV}.cat - ${SYSROOT_DESTDIR}${bindir_crossscripts}/install-catalog-openjade \ - --add ${sysconfdir}/sgml/sgml-docbook.cat \ - ${sysconfdir}/sgml/openjade-${PV}.cat - cat << EOF > ${SSTATE_INST_POSTRM} -#!/bin/sh -# Ensure that the catalog file sgml-docbook.cat is properly -# updated when the package is removed from sstate cache. -files="${sysconfdir}/sgml/sgml-docbook.bak ${sysconfdir}/sgml/sgml-docbook.cat" -for f in \$files; do - [ ! -f \$f ] || sed -i '/\/sgml\/openjade-${PV}.cat/d' \$f -done -EOF - fi -} - -openjade_sysroot_preprocess () { - install -d ${SYSROOT_DESTDIR}${bindir_crossscripts}/ - install -m 755 ${STAGING_BINDIR_NATIVE}/install-catalog ${SYSROOT_DESTDIR}${bindir_crossscripts}/install-catalog-openjade -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/opensp/opensp-1.5.2/obsolete_automake_macros.patch b/import-layers/yocto-poky/meta/recipes-devtools/opensp/opensp-1.5.2/obsolete_automake_macros.patch deleted file mode 100644 index 42218a6d6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/opensp/opensp-1.5.2/obsolete_automake_macros.patch +++ /dev/null @@ -1,15 +0,0 @@ -Upstream-Status: Submitted [https://sourceforge.net/tracker/?func=detail&aid=3599291&group_id=2115&atid=102115] - -Signed-off-by: Marko Lindqvist -diff -Nurd OpenSP-1.5.2/configure.in OpenSP-1.5.2/configure.in ---- OpenSP-1.5.2/configure.in 2005-12-23 16:15:21.000000000 +0200 -+++ OpenSP-1.5.2/configure.in 2013-01-03 09:04:51.922645689 +0200 -@@ -16,7 +16,7 @@ - AM_INIT_AUTOMAKE(OpenSP, 1.5.2, no-define) - AM_MAINTAINER_MODE - AC_PREREQ(2.53) --AM_CONFIG_HEADER(config.h) -+AC_CONFIG_HEADERS(config.h) - AM_SANITY_CHECK - - dnl Autoheader definitions diff --git a/import-layers/yocto-poky/meta/recipes-devtools/opensp/opensp_1.5.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/opensp/opensp_1.5.2.bb deleted file mode 100644 index 60a7d2e47..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/opensp/opensp_1.5.2.bb +++ /dev/null @@ -1,59 +0,0 @@ -SUMMARY = "An SGML parser" -DESCRIPTION = "An SGML parser used by the OpenJade suite of utilities." -HOMEPAGE = "http://openjade.sourceforge.net" -SECTION = "libs" -LICENSE = "BSD" -LIC_FILES_CHKSUM = "file://COPYING;md5=641ff1e4511f0a87044ad42f87cb1045" - -PR = "r1" - -# At -Os it encounters calls to some inline functions which are then -# not found in any other objects with gcc 4.5 -FULL_OPTIMIZATION += "-O2" - -SRC_URI = "${SOURCEFORGE_MIRROR}/openjade/OpenSP-${PV}.tar.gz \ - file://obsolete_automake_macros.patch \ -" - -SRC_URI[md5sum] = "670b223c5d12cee40c9137be86b6c39b" -SRC_URI[sha256sum] = "57f4898498a368918b0d49c826aa434bb5b703d2c3b169beb348016ab25617ce" - -S = "${WORKDIR}/OpenSP-${PV}" - -inherit autotools gettext - -EXTRA_OECONF = "--disable-doc-build" - -EXTRA_OECONF_class-native = "\ - --disable-doc-build \ - --enable-default-catalog=${sysconfdir}/sgml/catalog \ - --enable-default-search-path=${datadir}/sgml \ - " - -do_install_append() { - # Set up symlinks to often-used alternate names. See - # http://www.linuxfromscratch.org/blfs/view/stable/pst/opensp.html - cd ${D}${libdir} - ln -sf libosp.so libsp.so - - cd ${D}${bindir} - for util in nsgmls sgmlnorm spam spcat spent sx; do - ln -sf o$util $util - done - ln -sf osx sgml2xml -} - -do_install_append_class-native() { - for util in nsgmls sgmlnorm spam spcat spent sx; do - create_cmdline_wrapper ${D}/${bindir}/$util \ - -D ${sysconfdir}/sgml - done -} - -FILES_${PN} += "${datadir}/OpenSP/" - -BBCLASSEXTEND = "native" - -# http://errors.yoctoproject.org/Errors/Details/20489/ -ARM_INSTRUCTION_SET_armv4 = "arm" -ARM_INSTRUCTION_SET_armv5 = "arm" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/opkg-utils/opkg-utils/tar_ignore_error.patch b/import-layers/yocto-poky/meta/recipes-devtools/opkg-utils/opkg-utils/tar_ignore_error.patch index 4dddb08c9..3824eb3b7 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/opkg-utils/opkg-utils/tar_ignore_error.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/opkg-utils/opkg-utils/tar_ignore_error.patch @@ -25,7 +25,7 @@ Index: git/opkg-build mkdir $tmp_dir echo $CONTROL > $tmp_dir/tarX --( cd $pkg_dir && tar $ogargs -X $tmp_dir/tarX -cz $tarformat -f $tmp_dir/data.tar.gz . ) +-( cd $pkg_dir && tar $ogargs -X $tmp_dir/tarX -c --$compressor $tarformat -f $tmp_dir/data.tar.$cext . ) -( cd $pkg_dir/$CONTROL && tar $ogargs -cz $tarformat -f $tmp_dir/control.tar.gz . ) + + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/opkg-utils/opkg-utils_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/opkg-utils/opkg-utils_git.bb index a7aec45f8..9deea0f5e 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/opkg-utils/opkg-utils_git.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/opkg-utils/opkg-utils_git.bb @@ -7,10 +7,11 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \ file://opkg.py;beginline=1;endline=18;md5=15917491ad6bf7acc666ca5f7cc1e083" PROVIDES += "${@bb.utils.contains('PACKAGECONFIG', 'update-alternatives', 'virtual/update-alternatives', '', d)}" -SRCREV = "3ffece9bf19a844edacc563aa092fd1fbfcffeee" -PV = "0.3.2+git${SRCPV}" +SRCREV = "1a708fd73d10c2b7677dd4cc4e017746ebbb9166" +PV = "0.3.4+git${SRCPV}" -SRC_URI = "git://git.yoctoproject.org/opkg-utils" +SRC_URI = "git://git.yoctoproject.org/opkg-utils \ +" SRC_URI_append_class-native = " file://tar_ignore_error.patch" S = "${WORKDIR}/git" @@ -43,4 +44,11 @@ RPROVIDES_update-alternatives-opkg = "update-alternatives update-alternatives-cw RREPLACES_update-alternatives-opkg = "update-alternatives-cworth" RCONFLICTS_update-alternatives-opkg = "update-alternatives-cworth" +pkg_postrm_update-alternatives-opkg() { + rm -rf $D${nonarch_libdir}/opkg/alternatives + rmdir $D${nonarch_libdir}/opkg || true +} + BBCLASSEXTEND = "native nativesdk" + +CLEANBROKEN = "1" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg-arch-config_1.0.bb b/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg-arch-config_1.0.bb index ace3de4f4..682142a5d 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg-arch-config_1.0.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg-arch-config_1.0.bb @@ -1,6 +1,5 @@ SUMMARY = "Architecture-dependent configuration for opkg" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" PACKAGE_ARCH = "${MACHINE_ARCH}" PR = "r1" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg/0001-opkg_conf-create-opkg.lock-in-run-instead-of-var-run.patch b/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg/0001-opkg_conf-create-opkg.lock-in-run-instead-of-var-run.patch index 255021b4b..e94a4123d 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg/0001-opkg_conf-create-opkg.lock-in-run-instead-of-var-run.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg/0001-opkg_conf-create-opkg.lock-in-run-instead-of-var-run.patch @@ -21,10 +21,10 @@ index 7bca948..5a1bc44 100644 --- a/libopkg/opkg_conf.h +++ b/libopkg/opkg_conf.h @@ -40,7 +40,7 @@ extern "C" { - #define OPKG_CONF_DEFAULT_STATUS_FILE "/var/lib/opkg/status" - #define OPKG_CONF_DEFAULT_CACHE_DIR "/var/cache/opkg" - #define OPKG_CONF_DEFAULT_CONF_FILE_DIR "/etc/opkg" --#define OPKG_CONF_DEFAULT_LOCK_FILE "/var/run/opkg.lock" + #define OPKG_CONF_DEFAULT_STATUS_FILE VARDIR "/lib/opkg/status" + #define OPKG_CONF_DEFAULT_CACHE_DIR VARDIR "/cache/opkg" + #define OPKG_CONF_DEFAULT_CONF_FILE_DIR SYSCONFDIR "/opkg" +-#define OPKG_CONF_DEFAULT_LOCK_FILE VARDIR "/run/opkg.lock" +#define OPKG_CONF_DEFAULT_LOCK_FILE "/run/opkg.lock" /* In case the config file defines no dest */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg/status-conffile.patch b/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg/status-conffile.patch new file mode 100644 index 000000000..6fc405b2f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg/status-conffile.patch @@ -0,0 +1,69 @@ +Upstream-Status: Submitted +Signed-off-by: Ross Burton + +From 086d5083dfe0102368cb7c8ce89b0c06b64ca773 Mon Sep 17 00:00:00 2001 +From: Ross Burton +Date: Tue, 10 Jan 2017 15:24:59 +0000 +Subject: [PATCH 1/2] opkg_cmd: only look at conffile status if we're going to + output it + +The loop to compare the recorded conffile hash with their hash on disk is +outputted at level INFO but the loop was executed at level NOTICE and higher. + +This means that if a conffile had been deleted the status operation would +produce error messages for output it isn't displaying. + +Signed-off-by: Ross Burton +--- + libopkg/opkg_cmd.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/libopkg/opkg_cmd.c b/libopkg/opkg_cmd.c +index ba57c6a..37416fd 100644 +--- a/libopkg/opkg_cmd.c ++++ b/libopkg/opkg_cmd.c +@@ -638,7 +638,7 @@ static int opkg_info_status_cmd(int argc, char **argv, int installed_only) + + pkg_formatted_info(stdout, pkg); + +- if (opkg_config->verbosity >= NOTICE) { ++ if (opkg_config->verbosity >= INFO) { + conffile_list_elt_t *iter; + for (iter = nv_pair_list_first(&pkg->conffiles); iter; + iter = nv_pair_list_next(&pkg->conffiles, iter)) { +-- +2.8.1 + +From 225e30e0f9fa7cfeaa3f89e2713e5147ab371def Mon Sep 17 00:00:00 2001 +From: Ross Burton +Date: Tue, 10 Jan 2017 15:28:47 +0000 +Subject: [PATCH 2/2] conffile: gracefully handle deleted conffiles in + conffile_has_been_modified + +Handle conffiles that don't exist gracefully so that instead of showing an error +message from file_md5sum_alloc() a notice that the file has been deleted is +shown instead. + +Signed-off-by: Ross Burton +--- + libopkg/conffile.c | 5 +++++ + 1 file changed, 5 insertions(+) + +diff --git a/libopkg/conffile.c b/libopkg/conffile.c +index b2f2469..7b4b87b 100644 +--- a/libopkg/conffile.c ++++ b/libopkg/conffile.c +@@ -51,6 +51,11 @@ int conffile_has_been_modified(conffile_t * conffile) + } + + root_filename = root_filename_alloc(filename); ++ if (!file_exists(root_filename)) { ++ opkg_msg(INFO, "Conffile %s deleted\n", conffile->name); ++ free(root_filename); ++ return 1; ++ } + + md5sum = file_md5sum_alloc(root_filename); + +-- +2.8.1 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg_0.3.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg_0.3.3.bb deleted file mode 100644 index bce72fcd2..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg_0.3.3.bb +++ /dev/null @@ -1,73 +0,0 @@ -SUMMARY = "Open Package Manager" -SUMMARY_libopkg = "Open Package Manager library" -SECTION = "base" -HOMEPAGE = "http://code.google.com/p/opkg/" -BUGTRACKER = "http://code.google.com/p/opkg/issues/list" -LICENSE = "GPLv2+" -LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \ - file://src/opkg.c;beginline=2;endline=21;md5=90435a519c6ea69ef22e4a88bcc52fa0" - -DEPENDS = "libarchive" - -PE = "1" - -SRC_URI = "http://downloads.yoctoproject.org/releases/${BPN}/${BPN}-${PV}.tar.gz \ - file://opkg-configure.service \ - file://opkg.conf \ - file://0001-opkg_conf-create-opkg.lock-in-run-instead-of-var-run.patch \ -" - -SRC_URI[md5sum] = "a4613038c8afc7d8d482f5c53f137bdf" -SRC_URI[sha256sum] = "19db9e73121a5e4c91fa228b0a6a4c55cc3591056130cfb3c66c30aa32f8d00e" - -inherit autotools pkgconfig systemd - -SYSTEMD_SERVICE_${PN} = "opkg-configure.service" - -target_localstatedir := "${localstatedir}" -OPKGLIBDIR = "${target_localstatedir}/lib" - -PACKAGECONFIG ??= "" - -PACKAGECONFIG[gpg] = "--enable-gpg,--disable-gpg,gpgme libgpg-error,gnupg" -PACKAGECONFIG[curl] = "--enable-curl,--disable-curl,curl" -PACKAGECONFIG[ssl-curl] = "--enable-ssl-curl,--disable-ssl-curl,curl openssl" -PACKAGECONFIG[openssl] = "--enable-openssl,--disable-openssl,openssl" -PACKAGECONFIG[sha256] = "--enable-sha256,--disable-sha256" -PACKAGECONFIG[pathfinder] = "--enable-pathfinder,--disable-pathfinder,pathfinder" -PACKAGECONFIG[libsolv] = "--with-libsolv,--without-libsolv,libsolv" - -do_install_append () { - install -d ${D}${sysconfdir}/opkg - install -m 0644 ${WORKDIR}/opkg.conf ${D}${sysconfdir}/opkg/opkg.conf - echo "option lists_dir ${OPKGLIBDIR}/opkg/lists" >>${D}${sysconfdir}/opkg/opkg.conf - - # We need to create the lock directory - install -d ${D}${OPKGLIBDIR}/opkg - - if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)};then - install -d ${D}${systemd_unitdir}/system - install -m 0644 ${WORKDIR}/opkg-configure.service ${D}${systemd_unitdir}/system/ - sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \ - -e 's,@SYSCONFDIR@,${sysconfdir},g' \ - -e 's,@BINDIR@,${bindir},g' \ - -e 's,@SYSTEMD_UNITDIR@,${systemd_unitdir},g' \ - ${D}${systemd_unitdir}/system/opkg-configure.service - fi -} - -RDEPENDS_${PN} = "${VIRTUAL-RUNTIME_update-alternatives} opkg-arch-config run-postinsts libarchive" -RDEPENDS_${PN}_class-native = "" -RDEPENDS_${PN}_class-nativesdk = "" -RREPLACES_${PN} = "opkg-nogpg opkg-collateral" -RCONFLICTS_${PN} = "opkg-collateral" -RPROVIDES_${PN} = "opkg-collateral" - -PACKAGES =+ "libopkg" - -FILES_libopkg = "${libdir}/*.so.* ${OPKGLIBDIR}/opkg/" -FILES_${PN} += "${systemd_unitdir}/system/" - -BBCLASSEXTEND = "native nativesdk" - -CONFFILES_${PN} = "${sysconfdir}/opkg/opkg.conf" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg_0.3.4.bb b/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg_0.3.4.bb new file mode 100644 index 000000000..a21fde1cf --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/opkg/opkg_0.3.4.bb @@ -0,0 +1,76 @@ +SUMMARY = "Open Package Manager" +SUMMARY_libopkg = "Open Package Manager library" +SECTION = "base" +HOMEPAGE = "http://code.google.com/p/opkg/" +BUGTRACKER = "http://code.google.com/p/opkg/issues/list" +LICENSE = "GPLv2+" +LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f \ + file://src/opkg.c;beginline=2;endline=21;md5=90435a519c6ea69ef22e4a88bcc52fa0" + +DEPENDS = "libarchive" + +PE = "1" + +SRC_URI = "http://downloads.yoctoproject.org/releases/${BPN}/${BPN}-${PV}.tar.gz \ + file://opkg-configure.service \ + file://opkg.conf \ + file://0001-opkg_conf-create-opkg.lock-in-run-instead-of-var-run.patch \ + file://status-conffile.patch \ +" + +SRC_URI[md5sum] = "6c52a065499056a196e0b45a27e392de" +SRC_URI[sha256sum] = "750b900b53b62a9b280b601a196f02da81091eda2f3478c509512aa5a1ec93be" + +inherit autotools pkgconfig systemd + +SYSTEMD_SERVICE_${PN} = "opkg-configure.service" + +target_localstatedir := "${localstatedir}" +OPKGLIBDIR = "${target_localstatedir}/lib" + +PACKAGECONFIG ??= "libsolv" + +PACKAGECONFIG[gpg] = "--enable-gpg,--disable-gpg,gpgme libgpg-error,gnupg" +PACKAGECONFIG[curl] = "--enable-curl,--disable-curl,curl" +PACKAGECONFIG[ssl-curl] = "--enable-ssl-curl,--disable-ssl-curl,curl openssl" +PACKAGECONFIG[openssl] = "--enable-openssl,--disable-openssl,openssl" +PACKAGECONFIG[sha256] = "--enable-sha256,--disable-sha256" +PACKAGECONFIG[pathfinder] = "--enable-pathfinder,--disable-pathfinder,pathfinder" +PACKAGECONFIG[libsolv] = "--with-libsolv,--without-libsolv,libsolv" + +EXTRA_OECONF_class-native = "--localstatedir=/${@os.path.relpath('${localstatedir}', '${STAGING_DIR_NATIVE}')} --sysconfdir=/${@os.path.relpath('${sysconfdir}', '${STAGING_DIR_NATIVE}')}" + +do_install_append () { + install -d ${D}${sysconfdir}/opkg + install -m 0644 ${WORKDIR}/opkg.conf ${D}${sysconfdir}/opkg/opkg.conf + echo "option lists_dir ${OPKGLIBDIR}/opkg/lists" >>${D}${sysconfdir}/opkg/opkg.conf + + # We need to create the lock directory + install -d ${D}${OPKGLIBDIR}/opkg + + if ${@bb.utils.contains('DISTRO_FEATURES','systemd','true','false',d)};then + install -d ${D}${systemd_unitdir}/system + install -m 0644 ${WORKDIR}/opkg-configure.service ${D}${systemd_unitdir}/system/ + sed -i -e 's,@BASE_BINDIR@,${base_bindir},g' \ + -e 's,@SYSCONFDIR@,${sysconfdir},g' \ + -e 's,@BINDIR@,${bindir},g' \ + -e 's,@SYSTEMD_UNITDIR@,${systemd_unitdir},g' \ + ${D}${systemd_unitdir}/system/opkg-configure.service + fi +} + +RDEPENDS_${PN} = "${VIRTUAL-RUNTIME_update-alternatives} opkg-arch-config libarchive" +RDEPENDS_${PN}_class-native = "" +RDEPENDS_${PN}_class-nativesdk = "" +RREPLACES_${PN} = "opkg-nogpg opkg-collateral" +RCONFLICTS_${PN} = "opkg-collateral" +RPROVIDES_${PN} = "opkg-collateral" + +PACKAGES =+ "libopkg" + +FILES_libopkg = "${libdir}/*.so.* ${OPKGLIBDIR}/opkg/" +FILES_${PN} += "${systemd_unitdir}/system/" + +BBCLASSEXTEND = "native nativesdk" + +CONFFILES_${PN} = "${sysconfdir}/opkg/opkg.conf" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/orc/orc_0.4.25.bb b/import-layers/yocto-poky/meta/recipes-devtools/orc/orc_0.4.25.bb deleted file mode 100644 index 897493a7d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/orc/orc_0.4.25.bb +++ /dev/null @@ -1,27 +0,0 @@ -SUMMARY = "Optimised Inner Loop Runtime Compiler" -HOMEPAGE = "http://gstreamer.freedesktop.org/modules/orc.html" -LICENSE = "BSD-2-Clause & BSD-3-Clause" -LIC_FILES_CHKSUM = "file://COPYING;md5=1400bd9d09e8af56b9ec982b3d85797e" - -SRC_URI = "http://gstreamer.freedesktop.org/src/orc/orc-${PV}.tar.xz" - -SRC_URI[md5sum] = "8582a28b15f53110c88d8043d9f55bcf" -SRC_URI[sha256sum] = "c1b1d54a58f26d483f0b3881538984789fe5d5460ab8fab74a1cacbd3d1c53d1" - -inherit autotools pkgconfig gtk-doc - -BBCLASSEXTEND = "native nativesdk" - -PACKAGES =+ "orc-examples" -PACKAGES_DYNAMIC += "^liborc-.*" -FILES_orc-examples = "${libdir}/orc/*" -FILES_${PN} = "${bindir}/*" - -python populate_packages_prepend () { - libdir = d.expand('${libdir}') - do_split_packages(d, libdir, '^lib(.*)\.so\.*', 'lib%s', 'ORC %s library', extra_depends='', allow_links=True) -} - -do_compile_prepend_class-native () { - sed -i -e 's#/tmp#.#g' ${S}/orc/orccodemem.c -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/orc/orc_0.4.26.bb b/import-layers/yocto-poky/meta/recipes-devtools/orc/orc_0.4.26.bb new file mode 100644 index 000000000..e47342f3f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/orc/orc_0.4.26.bb @@ -0,0 +1,27 @@ +SUMMARY = "Optimised Inner Loop Runtime Compiler" +HOMEPAGE = "http://gstreamer.freedesktop.org/modules/orc.html" +LICENSE = "BSD-2-Clause & BSD-3-Clause" +LIC_FILES_CHKSUM = "file://COPYING;md5=1400bd9d09e8af56b9ec982b3d85797e" + +SRC_URI = "http://gstreamer.freedesktop.org/src/orc/orc-${PV}.tar.xz" + +SRC_URI[md5sum] = "8e9bef677bae289d3324d81c337a4507" +SRC_URI[sha256sum] = "7d52fa80ef84988359c3434e1eea302d077a08987abdde6905678ebcad4fa649" + +inherit autotools pkgconfig gtk-doc + +BBCLASSEXTEND = "native nativesdk" + +PACKAGES =+ "orc-examples" +PACKAGES_DYNAMIC += "^liborc-.*" +FILES_orc-examples = "${libdir}/orc/*" +FILES_${PN} = "${bindir}/*" + +python populate_packages_prepend () { + libdir = d.expand('${libdir}') + do_split_packages(d, libdir, '^lib(.*)\.so\.*', 'lib%s', 'ORC %s library', extra_depends='', allow_links=True) +} + +do_compile_prepend_class-native () { + sed -i -e 's#/tmp#.#g' ${S}/orc/orccodemem.c +} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/patch/patch/debian.patch b/import-layers/yocto-poky/meta/recipes-devtools/patch/patch/debian.patch deleted file mode 100644 index 1a0764659..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/patch/patch/debian.patch +++ /dev/null @@ -1,10426 +0,0 @@ -Upstream-Status: Inappropriate [debian patch] - ---- patch-2.5.9.orig/m4/hash.m4 -+++ patch-2.5.9/m4/hash.m4 -@@ -0,0 +1,15 @@ -+# hash.m4 serial 1 -+dnl Copyright (C) 2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+AC_DEFUN([gl_HASH], -+[ -+ dnl Prerequisites of lib/hash.c. -+ AC_CHECK_HEADERS_ONCE(stdlib.h) -+ AC_HEADER_STDBOOL -+ AC_CHECK_DECLS_ONCE(free malloc) -+]) ---- patch-2.5.9.orig/m4/nanosecond_stat.m4 -+++ patch-2.5.9/m4/nanosecond_stat.m4 -@@ -0,0 +1,35 @@ -+AC_DEFUN([ag_CHECK_NANOSECOND_STAT], -+ [AC_CACHE_CHECK([for nanosecond timestamps in struct stat], -+ [ac_cv_stat_nsec], -+ [AC_TRY_COMPILE( -+ [ -+ #include -+ #include -+ #include -+ struct stat st; -+ ], -+ [ return st.st_atimensec + st.st_mtimensec + st.st_ctimensec; ], -+ [ac_cv_stat_nsec=yes], -+ [ac_cv_stat_nsec=no]) -+ ]) -+ if test $ac_cv_stat_nsec = yes; then -+ AC_DEFINE(HAVE_STAT_NSEC, 1, [Define to 1 if struct stat has nanosecond timestamps.]) -+ fi -+ -+ AC_CACHE_CHECK([for nanosecond timestamps in struct stat], -+ [ac_cv_stat_timeval], -+ [AC_TRY_COMPILE( -+ [ -+ #include -+ #include -+ #include -+ #include -+ struct stat st; -+ ], -+ [ return st.st_atim.tv_nsec + st.st_mtim.tv_nsec + st.st_ctim.tv_nsec; ], -+ [ac_cv_stat_timeval=yes], -+ [ac_cv_stat_timeval=no]) -+ ]) -+ if test $ac_cv_stat_timeval = yes; then -+ AC_DEFINE(HAVE_STAT_TIMEVAL, 1, [Define to 1 if struct stat comtains struct timeval's.]) -+ fi]) ---- patch-2.5.9.orig/Makefile.in -+++ patch-2.5.9/Makefile.in -@@ -62,7 +62,7 @@ - SHELL = /bin/sh - - LIBSRCS = error.c malloc.c memchr.c mkdir.c \ -- realloc.c rmdir.c strcasecmp.c strncasecmp.c -+ realloc.c rmdir.c strcasecmp.c strncasecmp.c hash.c - SRCS = $(LIBSRCS) \ - addext.c argmatch.c backupfile.c \ - basename.c dirname.c \ -@@ -78,12 +78,12 @@ - maketime.$(OBJEXT) partime.$(OBJEXT) \ - patch.$(OBJEXT) pch.$(OBJEXT) \ - quote.$(OBJEXT) quotearg.$(OBJEXT) quotesys.$(OBJEXT) \ -- util.$(OBJEXT) version.$(OBJEXT) xmalloc.$(OBJEXT) -+ util.$(OBJEXT) version.$(OBJEXT) xmalloc.$(OBJEXT) hash.$(OBJEXT) - HDRS = argmatch.h backupfile.h common.h dirname.h \ - error.h getopt.h gettext.h \ - inp.h maketime.h partime.h pch.h \ - quote.h quotearg.h quotesys.h \ -- unlocked-io.h util.h version.h xalloc.h -+ unlocked-io.h util.h version.h xalloc.h hash.h - MISC = AUTHORS COPYING ChangeLog INSTALL Makefile.in NEWS README \ - aclocal.m4 \ - config.hin configure configure.ac \ ---- patch-2.5.9.orig/aclocal.m4 -+++ patch-2.5.9/aclocal.m4 -@@ -1,3 +1,1058 @@ -+dnl aclocal.m4 generated automatically by aclocal 1.4-p6 -+ -+dnl Copyright (C) 1994, 1995-8, 1999, 2001 Free Software Foundation, Inc. -+dnl This file is free software; the Free Software Foundation -+dnl gives unlimited permission to copy and/or distribute it, -+dnl with or without modifications, as long as this notice is preserved. -+ -+dnl This program is distributed in the hope that it will be useful, -+dnl but WITHOUT ANY WARRANTY, to the extent permitted by law; without -+dnl even the implied warranty of MERCHANTABILITY or FITNESS FOR A -+dnl PARTICULAR PURPOSE. -+ -+# lib-prefix.m4 serial 3 (gettext-0.13) -+dnl Copyright (C) 2001-2003 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+dnl From Bruno Haible. -+ -+dnl AC_LIB_ARG_WITH is synonymous to AC_ARG_WITH in autoconf-2.13, and -+dnl similar to AC_ARG_WITH in autoconf 2.52...2.57 except that is doesn't -+dnl require excessive bracketing. -+ifdef([AC_HELP_STRING], -+[AC_DEFUN([AC_LIB_ARG_WITH], [AC_ARG_WITH([$1],[[$2]],[$3],[$4])])], -+[AC_DEFUN([AC_][LIB_ARG_WITH], [AC_ARG_WITH([$1],[$2],[$3],[$4])])]) -+ -+dnl AC_LIB_PREFIX adds to the CPPFLAGS and LDFLAGS the flags that are needed -+dnl to access previously installed libraries. The basic assumption is that -+dnl a user will want packages to use other packages he previously installed -+dnl with the same --prefix option. -+dnl This macro is not needed if only AC_LIB_LINKFLAGS is used to locate -+dnl libraries, but is otherwise very convenient. -+AC_DEFUN([AC_LIB_PREFIX], -+[ -+ AC_BEFORE([$0], [AC_LIB_LINKFLAGS]) -+ AC_REQUIRE([AC_PROG_CC]) -+ AC_REQUIRE([AC_CANONICAL_HOST]) -+ AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) -+ dnl By default, look in $includedir and $libdir. -+ use_additional=yes -+ AC_LIB_WITH_FINAL_PREFIX([ -+ eval additional_includedir=\"$includedir\" -+ eval additional_libdir=\"$libdir\" -+ ]) -+ AC_LIB_ARG_WITH([lib-prefix], -+[ --with-lib-prefix[=DIR] search for libraries in DIR/include and DIR/lib -+ --without-lib-prefix don't search for libraries in includedir and libdir], -+[ -+ if test "X$withval" = "Xno"; then -+ use_additional=no -+ else -+ if test "X$withval" = "X"; then -+ AC_LIB_WITH_FINAL_PREFIX([ -+ eval additional_includedir=\"$includedir\" -+ eval additional_libdir=\"$libdir\" -+ ]) -+ else -+ additional_includedir="$withval/include" -+ additional_libdir="$withval/lib" -+ fi -+ fi -+]) -+ if test $use_additional = yes; then -+ dnl Potentially add $additional_includedir to $CPPFLAGS. -+ dnl But don't add it -+ dnl 1. if it's the standard /usr/include, -+ dnl 2. if it's already present in $CPPFLAGS, -+ dnl 3. if it's /usr/local/include and we are using GCC on Linux, -+ dnl 4. if it doesn't exist as a directory. -+ if test "X$additional_includedir" != "X/usr/include"; then -+ haveit= -+ for x in $CPPFLAGS; do -+ AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) -+ if test "X$x" = "X-I$additional_includedir"; then -+ haveit=yes -+ break -+ fi -+ done -+ if test -z "$haveit"; then -+ if test "X$additional_includedir" = "X/usr/local/include"; then -+ if test -n "$GCC"; then -+ case $host_os in -+ linux*) haveit=yes;; -+ esac -+ fi -+ fi -+ if test -z "$haveit"; then -+ if test -d "$additional_includedir"; then -+ dnl Really add $additional_includedir to $CPPFLAGS. -+ CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }-I$additional_includedir" -+ fi -+ fi -+ fi -+ fi -+ dnl Potentially add $additional_libdir to $LDFLAGS. -+ dnl But don't add it -+ dnl 1. if it's the standard /usr/lib, -+ dnl 2. if it's already present in $LDFLAGS, -+ dnl 3. if it's /usr/local/lib and we are using GCC on Linux, -+ dnl 4. if it doesn't exist as a directory. -+ if test "X$additional_libdir" != "X/usr/lib"; then -+ haveit= -+ for x in $LDFLAGS; do -+ AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) -+ if test "X$x" = "X-L$additional_libdir"; then -+ haveit=yes -+ break -+ fi -+ done -+ if test -z "$haveit"; then -+ if test "X$additional_libdir" = "X/usr/local/lib"; then -+ if test -n "$GCC"; then -+ case $host_os in -+ linux*) haveit=yes;; -+ esac -+ fi -+ fi -+ if test -z "$haveit"; then -+ if test -d "$additional_libdir"; then -+ dnl Really add $additional_libdir to $LDFLAGS. -+ LDFLAGS="${LDFLAGS}${LDFLAGS:+ }-L$additional_libdir" -+ fi -+ fi -+ fi -+ fi -+ fi -+]) -+ -+dnl AC_LIB_PREPARE_PREFIX creates variables acl_final_prefix, -+dnl acl_final_exec_prefix, containing the values to which $prefix and -+dnl $exec_prefix will expand at the end of the configure script. -+AC_DEFUN([AC_LIB_PREPARE_PREFIX], -+[ -+ dnl Unfortunately, prefix and exec_prefix get only finally determined -+ dnl at the end of configure. -+ if test "X$prefix" = "XNONE"; then -+ acl_final_prefix="$ac_default_prefix" -+ else -+ acl_final_prefix="$prefix" -+ fi -+ if test "X$exec_prefix" = "XNONE"; then -+ acl_final_exec_prefix='${prefix}' -+ else -+ acl_final_exec_prefix="$exec_prefix" -+ fi -+ acl_save_prefix="$prefix" -+ prefix="$acl_final_prefix" -+ eval acl_final_exec_prefix=\"$acl_final_exec_prefix\" -+ prefix="$acl_save_prefix" -+]) -+ -+dnl AC_LIB_WITH_FINAL_PREFIX([statement]) evaluates statement, with the -+dnl variables prefix and exec_prefix bound to the values they will have -+dnl at the end of the configure script. -+AC_DEFUN([AC_LIB_WITH_FINAL_PREFIX], -+[ -+ acl_save_prefix="$prefix" -+ prefix="$acl_final_prefix" -+ acl_save_exec_prefix="$exec_prefix" -+ exec_prefix="$acl_final_exec_prefix" -+ $1 -+ exec_prefix="$acl_save_exec_prefix" -+ prefix="$acl_save_prefix" -+]) -+ -+# lib-link.m4 serial 4 (gettext-0.12) -+dnl Copyright (C) 2001-2003 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+dnl From Bruno Haible. -+ -+dnl AC_LIB_LINKFLAGS(name [, dependencies]) searches for libname and -+dnl the libraries corresponding to explicit and implicit dependencies. -+dnl Sets and AC_SUBSTs the LIB${NAME} and LTLIB${NAME} variables and -+dnl augments the CPPFLAGS variable. -+AC_DEFUN([AC_LIB_LINKFLAGS], -+[ -+ AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) -+ AC_REQUIRE([AC_LIB_RPATH]) -+ define([Name],[translit([$1],[./-], [___])]) -+ define([NAME],[translit([$1],[abcdefghijklmnopqrstuvwxyz./-], -+ [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])]) -+ AC_CACHE_CHECK([how to link with lib[]$1], [ac_cv_lib[]Name[]_libs], [ -+ AC_LIB_LINKFLAGS_BODY([$1], [$2]) -+ ac_cv_lib[]Name[]_libs="$LIB[]NAME" -+ ac_cv_lib[]Name[]_ltlibs="$LTLIB[]NAME" -+ ac_cv_lib[]Name[]_cppflags="$INC[]NAME" -+ ]) -+ LIB[]NAME="$ac_cv_lib[]Name[]_libs" -+ LTLIB[]NAME="$ac_cv_lib[]Name[]_ltlibs" -+ INC[]NAME="$ac_cv_lib[]Name[]_cppflags" -+ AC_LIB_APPENDTOVAR([CPPFLAGS], [$INC]NAME) -+ AC_SUBST([LIB]NAME) -+ AC_SUBST([LTLIB]NAME) -+ dnl Also set HAVE_LIB[]NAME so that AC_LIB_HAVE_LINKFLAGS can reuse the -+ dnl results of this search when this library appears as a dependency. -+ HAVE_LIB[]NAME=yes -+ undefine([Name]) -+ undefine([NAME]) -+]) -+ -+dnl AC_LIB_HAVE_LINKFLAGS(name, dependencies, includes, testcode) -+dnl searches for libname and the libraries corresponding to explicit and -+dnl implicit dependencies, together with the specified include files and -+dnl the ability to compile and link the specified testcode. If found, it -+dnl sets and AC_SUBSTs HAVE_LIB${NAME}=yes and the LIB${NAME} and -+dnl LTLIB${NAME} variables and augments the CPPFLAGS variable, and -+dnl #defines HAVE_LIB${NAME} to 1. Otherwise, it sets and AC_SUBSTs -+dnl HAVE_LIB${NAME}=no and LIB${NAME} and LTLIB${NAME} to empty. -+AC_DEFUN([AC_LIB_HAVE_LINKFLAGS], -+[ -+ AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) -+ AC_REQUIRE([AC_LIB_RPATH]) -+ define([Name],[translit([$1],[./-], [___])]) -+ define([NAME],[translit([$1],[abcdefghijklmnopqrstuvwxyz./-], -+ [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])]) -+ -+ dnl Search for lib[]Name and define LIB[]NAME, LTLIB[]NAME and INC[]NAME -+ dnl accordingly. -+ AC_LIB_LINKFLAGS_BODY([$1], [$2]) -+ -+ dnl Add $INC[]NAME to CPPFLAGS before performing the following checks, -+ dnl because if the user has installed lib[]Name and not disabled its use -+ dnl via --without-lib[]Name-prefix, he wants to use it. -+ ac_save_CPPFLAGS="$CPPFLAGS" -+ AC_LIB_APPENDTOVAR([CPPFLAGS], [$INC]NAME) -+ -+ AC_CACHE_CHECK([for lib[]$1], [ac_cv_lib[]Name], [ -+ ac_save_LIBS="$LIBS" -+ LIBS="$LIBS $LIB[]NAME" -+ AC_TRY_LINK([$3], [$4], [ac_cv_lib[]Name=yes], [ac_cv_lib[]Name=no]) -+ LIBS="$ac_save_LIBS" -+ ]) -+ if test "$ac_cv_lib[]Name" = yes; then -+ HAVE_LIB[]NAME=yes -+ AC_DEFINE([HAVE_LIB]NAME, 1, [Define if you have the $1 library.]) -+ AC_MSG_CHECKING([how to link with lib[]$1]) -+ AC_MSG_RESULT([$LIB[]NAME]) -+ else -+ HAVE_LIB[]NAME=no -+ dnl If $LIB[]NAME didn't lead to a usable library, we don't need -+ dnl $INC[]NAME either. -+ CPPFLAGS="$ac_save_CPPFLAGS" -+ LIB[]NAME= -+ LTLIB[]NAME= -+ fi -+ AC_SUBST([HAVE_LIB]NAME) -+ AC_SUBST([LIB]NAME) -+ AC_SUBST([LTLIB]NAME) -+ undefine([Name]) -+ undefine([NAME]) -+]) -+ -+dnl Determine the platform dependent parameters needed to use rpath: -+dnl libext, shlibext, hardcode_libdir_flag_spec, hardcode_libdir_separator, -+dnl hardcode_direct, hardcode_minus_L. -+AC_DEFUN([AC_LIB_RPATH], -+[ -+ AC_REQUIRE([AC_PROG_CC]) dnl we use $CC, $GCC, $LDFLAGS -+ AC_REQUIRE([AC_LIB_PROG_LD]) dnl we use $LD, $with_gnu_ld -+ AC_REQUIRE([AC_CANONICAL_HOST]) dnl we use $host -+ AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT]) dnl we use $ac_aux_dir -+ AC_CACHE_CHECK([for shared library run path origin], acl_cv_rpath, [ -+ CC="$CC" GCC="$GCC" LDFLAGS="$LDFLAGS" LD="$LD" with_gnu_ld="$with_gnu_ld" \ -+ ${CONFIG_SHELL-/bin/sh} "$ac_aux_dir/config.rpath" "$host" > conftest.sh -+ . ./conftest.sh -+ rm -f ./conftest.sh -+ acl_cv_rpath=done -+ ]) -+ wl="$acl_cv_wl" -+ libext="$acl_cv_libext" -+ shlibext="$acl_cv_shlibext" -+ hardcode_libdir_flag_spec="$acl_cv_hardcode_libdir_flag_spec" -+ hardcode_libdir_separator="$acl_cv_hardcode_libdir_separator" -+ hardcode_direct="$acl_cv_hardcode_direct" -+ hardcode_minus_L="$acl_cv_hardcode_minus_L" -+ dnl Determine whether the user wants rpath handling at all. -+ AC_ARG_ENABLE(rpath, -+ [ --disable-rpath do not hardcode runtime library paths], -+ :, enable_rpath=yes) -+]) -+ -+dnl AC_LIB_LINKFLAGS_BODY(name [, dependencies]) searches for libname and -+dnl the libraries corresponding to explicit and implicit dependencies. -+dnl Sets the LIB${NAME}, LTLIB${NAME} and INC${NAME} variables. -+AC_DEFUN([AC_LIB_LINKFLAGS_BODY], -+[ -+ define([NAME],[translit([$1],[abcdefghijklmnopqrstuvwxyz./-], -+ [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])]) -+ dnl By default, look in $includedir and $libdir. -+ use_additional=yes -+ AC_LIB_WITH_FINAL_PREFIX([ -+ eval additional_includedir=\"$includedir\" -+ eval additional_libdir=\"$libdir\" -+ ]) -+ AC_LIB_ARG_WITH([lib$1-prefix], -+[ --with-lib$1-prefix[=DIR] search for lib$1 in DIR/include and DIR/lib -+ --without-lib$1-prefix don't search for lib$1 in includedir and libdir], -+[ -+ if test "X$withval" = "Xno"; then -+ use_additional=no -+ else -+ if test "X$withval" = "X"; then -+ AC_LIB_WITH_FINAL_PREFIX([ -+ eval additional_includedir=\"$includedir\" -+ eval additional_libdir=\"$libdir\" -+ ]) -+ else -+ additional_includedir="$withval/include" -+ additional_libdir="$withval/lib" -+ fi -+ fi -+]) -+ dnl Search the library and its dependencies in $additional_libdir and -+ dnl $LDFLAGS. Using breadth-first-seach. -+ LIB[]NAME= -+ LTLIB[]NAME= -+ INC[]NAME= -+ rpathdirs= -+ ltrpathdirs= -+ names_already_handled= -+ names_next_round='$1 $2' -+ while test -n "$names_next_round"; do -+ names_this_round="$names_next_round" -+ names_next_round= -+ for name in $names_this_round; do -+ already_handled= -+ for n in $names_already_handled; do -+ if test "$n" = "$name"; then -+ already_handled=yes -+ break -+ fi -+ done -+ if test -z "$already_handled"; then -+ names_already_handled="$names_already_handled $name" -+ dnl See if it was already located by an earlier AC_LIB_LINKFLAGS -+ dnl or AC_LIB_HAVE_LINKFLAGS call. -+ uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./-|ABCDEFGHIJKLMNOPQRSTUVWXYZ___|'` -+ eval value=\"\$HAVE_LIB$uppername\" -+ if test -n "$value"; then -+ if test "$value" = yes; then -+ eval value=\"\$LIB$uppername\" -+ test -z "$value" || LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$value" -+ eval value=\"\$LTLIB$uppername\" -+ test -z "$value" || LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }$value" -+ else -+ dnl An earlier call to AC_LIB_HAVE_LINKFLAGS has determined -+ dnl that this library doesn't exist. So just drop it. -+ : -+ fi -+ else -+ dnl Search the library lib$name in $additional_libdir and $LDFLAGS -+ dnl and the already constructed $LIBNAME/$LTLIBNAME. -+ found_dir= -+ found_la= -+ found_so= -+ found_a= -+ if test $use_additional = yes; then -+ if test -n "$shlibext" && test -f "$additional_libdir/lib$name.$shlibext"; then -+ found_dir="$additional_libdir" -+ found_so="$additional_libdir/lib$name.$shlibext" -+ if test -f "$additional_libdir/lib$name.la"; then -+ found_la="$additional_libdir/lib$name.la" -+ fi -+ else -+ if test -f "$additional_libdir/lib$name.$libext"; then -+ found_dir="$additional_libdir" -+ found_a="$additional_libdir/lib$name.$libext" -+ if test -f "$additional_libdir/lib$name.la"; then -+ found_la="$additional_libdir/lib$name.la" -+ fi -+ fi -+ fi -+ fi -+ if test "X$found_dir" = "X"; then -+ for x in $LDFLAGS $LTLIB[]NAME; do -+ AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) -+ case "$x" in -+ -L*) -+ dir=`echo "X$x" | sed -e 's/^X-L//'` -+ if test -n "$shlibext" && test -f "$dir/lib$name.$shlibext"; then -+ found_dir="$dir" -+ found_so="$dir/lib$name.$shlibext" -+ if test -f "$dir/lib$name.la"; then -+ found_la="$dir/lib$name.la" -+ fi -+ else -+ if test -f "$dir/lib$name.$libext"; then -+ found_dir="$dir" -+ found_a="$dir/lib$name.$libext" -+ if test -f "$dir/lib$name.la"; then -+ found_la="$dir/lib$name.la" -+ fi -+ fi -+ fi -+ ;; -+ esac -+ if test "X$found_dir" != "X"; then -+ break -+ fi -+ done -+ fi -+ if test "X$found_dir" != "X"; then -+ dnl Found the library. -+ LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-L$found_dir -l$name" -+ if test "X$found_so" != "X"; then -+ dnl Linking with a shared library. We attempt to hardcode its -+ dnl directory into the executable's runpath, unless it's the -+ dnl standard /usr/lib. -+ if test "$enable_rpath" = no || test "X$found_dir" = "X/usr/lib"; then -+ dnl No hardcoding is needed. -+ LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" -+ else -+ dnl Use an explicit option to hardcode DIR into the resulting -+ dnl binary. -+ dnl Potentially add DIR to ltrpathdirs. -+ dnl The ltrpathdirs will be appended to $LTLIBNAME at the end. -+ haveit= -+ for x in $ltrpathdirs; do -+ if test "X$x" = "X$found_dir"; then -+ haveit=yes -+ break -+ fi -+ done -+ if test -z "$haveit"; then -+ ltrpathdirs="$ltrpathdirs $found_dir" -+ fi -+ dnl The hardcoding into $LIBNAME is system dependent. -+ if test "$hardcode_direct" = yes; then -+ dnl Using DIR/libNAME.so during linking hardcodes DIR into the -+ dnl resulting binary. -+ LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" -+ else -+ if test -n "$hardcode_libdir_flag_spec" && test "$hardcode_minus_L" = no; then -+ dnl Use an explicit option to hardcode DIR into the resulting -+ dnl binary. -+ LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" -+ dnl Potentially add DIR to rpathdirs. -+ dnl The rpathdirs will be appended to $LIBNAME at the end. -+ haveit= -+ for x in $rpathdirs; do -+ if test "X$x" = "X$found_dir"; then -+ haveit=yes -+ break -+ fi -+ done -+ if test -z "$haveit"; then -+ rpathdirs="$rpathdirs $found_dir" -+ fi -+ else -+ dnl Rely on "-L$found_dir". -+ dnl But don't add it if it's already contained in the LDFLAGS -+ dnl or the already constructed $LIBNAME -+ haveit= -+ for x in $LDFLAGS $LIB[]NAME; do -+ AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) -+ if test "X$x" = "X-L$found_dir"; then -+ haveit=yes -+ break -+ fi -+ done -+ if test -z "$haveit"; then -+ LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$found_dir" -+ fi -+ if test "$hardcode_minus_L" != no; then -+ dnl FIXME: Not sure whether we should use -+ dnl "-L$found_dir -l$name" or "-L$found_dir $found_so" -+ dnl here. -+ LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" -+ else -+ dnl We cannot use $hardcode_runpath_var and LD_RUN_PATH -+ dnl here, because this doesn't fit in flags passed to the -+ dnl compiler. So give up. No hardcoding. This affects only -+ dnl very old systems. -+ dnl FIXME: Not sure whether we should use -+ dnl "-L$found_dir -l$name" or "-L$found_dir $found_so" -+ dnl here. -+ LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-l$name" -+ fi -+ fi -+ fi -+ fi -+ else -+ if test "X$found_a" != "X"; then -+ dnl Linking with a static library. -+ LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_a" -+ else -+ dnl We shouldn't come here, but anyway it's good to have a -+ dnl fallback. -+ LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$found_dir -l$name" -+ fi -+ fi -+ dnl Assume the include files are nearby. -+ additional_includedir= -+ case "$found_dir" in -+ */lib | */lib/) -+ basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e 's,/lib/*$,,'` -+ additional_includedir="$basedir/include" -+ ;; -+ esac -+ if test "X$additional_includedir" != "X"; then -+ dnl Potentially add $additional_includedir to $INCNAME. -+ dnl But don't add it -+ dnl 1. if it's the standard /usr/include, -+ dnl 2. if it's /usr/local/include and we are using GCC on Linux, -+ dnl 3. if it's already present in $CPPFLAGS or the already -+ dnl constructed $INCNAME, -+ dnl 4. if it doesn't exist as a directory. -+ if test "X$additional_includedir" != "X/usr/include"; then -+ haveit= -+ if test "X$additional_includedir" = "X/usr/local/include"; then -+ if test -n "$GCC"; then -+ case $host_os in -+ linux*) haveit=yes;; -+ esac -+ fi -+ fi -+ if test -z "$haveit"; then -+ for x in $CPPFLAGS $INC[]NAME; do -+ AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) -+ if test "X$x" = "X-I$additional_includedir"; then -+ haveit=yes -+ break -+ fi -+ done -+ if test -z "$haveit"; then -+ if test -d "$additional_includedir"; then -+ dnl Really add $additional_includedir to $INCNAME. -+ INC[]NAME="${INC[]NAME}${INC[]NAME:+ }-I$additional_includedir" -+ fi -+ fi -+ fi -+ fi -+ fi -+ dnl Look for dependencies. -+ if test -n "$found_la"; then -+ dnl Read the .la file. It defines the variables -+ dnl dlname, library_names, old_library, dependency_libs, current, -+ dnl age, revision, installed, dlopen, dlpreopen, libdir. -+ save_libdir="$libdir" -+ case "$found_la" in -+ */* | *\\*) . "$found_la" ;; -+ *) . "./$found_la" ;; -+ esac -+ libdir="$save_libdir" -+ dnl We use only dependency_libs. -+ for dep in $dependency_libs; do -+ case "$dep" in -+ -L*) -+ additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` -+ dnl Potentially add $additional_libdir to $LIBNAME and $LTLIBNAME. -+ dnl But don't add it -+ dnl 1. if it's the standard /usr/lib, -+ dnl 2. if it's /usr/local/lib and we are using GCC on Linux, -+ dnl 3. if it's already present in $LDFLAGS or the already -+ dnl constructed $LIBNAME, -+ dnl 4. if it doesn't exist as a directory. -+ if test "X$additional_libdir" != "X/usr/lib"; then -+ haveit= -+ if test "X$additional_libdir" = "X/usr/local/lib"; then -+ if test -n "$GCC"; then -+ case $host_os in -+ linux*) haveit=yes;; -+ esac -+ fi -+ fi -+ if test -z "$haveit"; then -+ haveit= -+ for x in $LDFLAGS $LIB[]NAME; do -+ AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) -+ if test "X$x" = "X-L$additional_libdir"; then -+ haveit=yes -+ break -+ fi -+ done -+ if test -z "$haveit"; then -+ if test -d "$additional_libdir"; then -+ dnl Really add $additional_libdir to $LIBNAME. -+ LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$additional_libdir" -+ fi -+ fi -+ haveit= -+ for x in $LDFLAGS $LTLIB[]NAME; do -+ AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) -+ if test "X$x" = "X-L$additional_libdir"; then -+ haveit=yes -+ break -+ fi -+ done -+ if test -z "$haveit"; then -+ if test -d "$additional_libdir"; then -+ dnl Really add $additional_libdir to $LTLIBNAME. -+ LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-L$additional_libdir" -+ fi -+ fi -+ fi -+ fi -+ ;; -+ -R*) -+ dir=`echo "X$dep" | sed -e 's/^X-R//'` -+ if test "$enable_rpath" != no; then -+ dnl Potentially add DIR to rpathdirs. -+ dnl The rpathdirs will be appended to $LIBNAME at the end. -+ haveit= -+ for x in $rpathdirs; do -+ if test "X$x" = "X$dir"; then -+ haveit=yes -+ break -+ fi -+ done -+ if test -z "$haveit"; then -+ rpathdirs="$rpathdirs $dir" -+ fi -+ dnl Potentially add DIR to ltrpathdirs. -+ dnl The ltrpathdirs will be appended to $LTLIBNAME at the end. -+ haveit= -+ for x in $ltrpathdirs; do -+ if test "X$x" = "X$dir"; then -+ haveit=yes -+ break -+ fi -+ done -+ if test -z "$haveit"; then -+ ltrpathdirs="$ltrpathdirs $dir" -+ fi -+ fi -+ ;; -+ -l*) -+ dnl Handle this in the next round. -+ names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` -+ ;; -+ *.la) -+ dnl Handle this in the next round. Throw away the .la's -+ dnl directory; it is already contained in a preceding -L -+ dnl option. -+ names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` -+ ;; -+ *) -+ dnl Most likely an immediate library name. -+ LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$dep" -+ LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }$dep" -+ ;; -+ esac -+ done -+ fi -+ else -+ dnl Didn't find the library; assume it is in the system directories -+ dnl known to the linker and runtime loader. (All the system -+ dnl directories known to the linker should also be known to the -+ dnl runtime loader, otherwise the system is severely misconfigured.) -+ LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-l$name" -+ LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-l$name" -+ fi -+ fi -+ fi -+ done -+ done -+ if test "X$rpathdirs" != "X"; then -+ if test -n "$hardcode_libdir_separator"; then -+ dnl Weird platform: only the last -rpath option counts, the user must -+ dnl pass all path elements in one option. We can arrange that for a -+ dnl single library, but not when more than one $LIBNAMEs are used. -+ alldirs= -+ for found_dir in $rpathdirs; do -+ alldirs="${alldirs}${alldirs:+$hardcode_libdir_separator}$found_dir" -+ done -+ dnl Note: hardcode_libdir_flag_spec uses $libdir and $wl. -+ acl_save_libdir="$libdir" -+ libdir="$alldirs" -+ eval flag=\"$hardcode_libdir_flag_spec\" -+ libdir="$acl_save_libdir" -+ LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$flag" -+ else -+ dnl The -rpath options are cumulative. -+ for found_dir in $rpathdirs; do -+ acl_save_libdir="$libdir" -+ libdir="$found_dir" -+ eval flag=\"$hardcode_libdir_flag_spec\" -+ libdir="$acl_save_libdir" -+ LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$flag" -+ done -+ fi -+ fi -+ if test "X$ltrpathdirs" != "X"; then -+ dnl When using libtool, the option that works for both libraries and -+ dnl executables is -R. The -R options are cumulative. -+ for found_dir in $ltrpathdirs; do -+ LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-R$found_dir" -+ done -+ fi -+]) -+ -+dnl AC_LIB_APPENDTOVAR(VAR, CONTENTS) appends the elements of CONTENTS to VAR, -+dnl unless already present in VAR. -+dnl Works only for CPPFLAGS, not for LIB* variables because that sometimes -+dnl contains two or three consecutive elements that belong together. -+AC_DEFUN([AC_LIB_APPENDTOVAR], -+[ -+ for element in [$2]; do -+ haveit= -+ for x in $[$1]; do -+ AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) -+ if test "X$x" = "X$element"; then -+ haveit=yes -+ break -+ fi -+ done -+ if test -z "$haveit"; then -+ [$1]="${[$1]}${[$1]:+ }$element" -+ fi -+ done -+]) -+ -+# lib-ld.m4 serial 3 (gettext-0.13) -+dnl Copyright (C) 1996-2003 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+dnl Subroutines of libtool.m4, -+dnl with replacements s/AC_/AC_LIB/ and s/lt_cv/acl_cv/ to avoid collision -+dnl with libtool.m4. -+ -+dnl From libtool-1.4. Sets the variable with_gnu_ld to yes or no. -+AC_DEFUN([AC_LIB_PROG_LD_GNU], -+[AC_CACHE_CHECK([if the linker ($LD) is GNU ld], acl_cv_prog_gnu_ld, -+[# I'd rather use --version here, but apparently some GNU ld's only accept -v. -+case `$LD -v 2>&1 conf$$.sh -+ echo "exit 0" >>conf$$.sh -+ chmod +x conf$$.sh -+ if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then -+ PATH_SEPARATOR=';' -+ else -+ PATH_SEPARATOR=: -+ fi -+ rm -f conf$$.sh -+fi -+ac_prog=ld -+if test "$GCC" = yes; then -+ # Check if gcc -print-prog-name=ld gives a path. -+ AC_MSG_CHECKING([for ld used by GCC]) -+ case $host in -+ *-*-mingw*) -+ # gcc leaves a trailing carriage return which upsets mingw -+ ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; -+ *) -+ ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; -+ esac -+ case $ac_prog in -+ # Accept absolute paths. -+ [[\\/]* | [A-Za-z]:[\\/]*)] -+ [re_direlt='/[^/][^/]*/\.\./'] -+ # Canonicalize the path of ld -+ ac_prog=`echo $ac_prog| sed 's%\\\\%/%g'` -+ while echo $ac_prog | grep "$re_direlt" > /dev/null 2>&1; do -+ ac_prog=`echo $ac_prog| sed "s%$re_direlt%/%"` -+ done -+ test -z "$LD" && LD="$ac_prog" -+ ;; -+ "") -+ # If it fails, then pretend we aren't using GCC. -+ ac_prog=ld -+ ;; -+ *) -+ # If it is relative, then search for the first ld in PATH. -+ with_gnu_ld=unknown -+ ;; -+ esac -+elif test "$with_gnu_ld" = yes; then -+ AC_MSG_CHECKING([for GNU ld]) -+else -+ AC_MSG_CHECKING([for non-GNU ld]) -+fi -+AC_CACHE_VAL(acl_cv_path_LD, -+[if test -z "$LD"; then -+ IFS="${IFS= }"; ac_save_ifs="$IFS"; IFS="${IFS}${PATH_SEPARATOR-:}" -+ for ac_dir in $PATH; do -+ test -z "$ac_dir" && ac_dir=. -+ if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then -+ acl_cv_path_LD="$ac_dir/$ac_prog" -+ # Check to see if the program is GNU ld. I'd rather use --version, -+ # but apparently some GNU ld's only accept -v. -+ # Break only if it was the GNU/non-GNU ld that we prefer. -+ case `"$acl_cv_path_LD" -v 2>&1 < /dev/null` in -+ *GNU* | *'with BFD'*) -+ test "$with_gnu_ld" != no && break ;; -+ *) -+ test "$with_gnu_ld" != yes && break ;; -+ esac -+ fi -+ done -+ IFS="$ac_save_ifs" -+else -+ acl_cv_path_LD="$LD" # Let the user override the test with a path. -+fi]) -+LD="$acl_cv_path_LD" -+if test -n "$LD"; then -+ AC_MSG_RESULT($LD) -+else -+ AC_MSG_RESULT(no) -+fi -+test -z "$LD" && AC_MSG_ERROR([no acceptable ld found in \$PATH]) -+AC_LIB_PROG_LD_GNU -+]) -+ -+# isc-posix.m4 serial 2 (gettext-0.11.2) -+dnl Copyright (C) 1995-2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+# This file is not needed with autoconf-2.53 and newer. Remove it in 2005. -+ -+# This test replaces the one in autoconf. -+# Currently this macro should have the same name as the autoconf macro -+# because gettext's gettext.m4 (distributed in the automake package) -+# still uses it. Otherwise, the use in gettext.m4 makes autoheader -+# give these diagnostics: -+# configure.in:556: AC_TRY_COMPILE was called before AC_ISC_POSIX -+# configure.in:556: AC_TRY_RUN was called before AC_ISC_POSIX -+ -+undefine([AC_ISC_POSIX]) -+ -+AC_DEFUN([AC_ISC_POSIX], -+ [ -+ dnl This test replaces the obsolescent AC_ISC_POSIX kludge. -+ AC_CHECK_LIB(cposix, strerror, [LIBS="$LIBS -lcposix"]) -+ ] -+) -+ -+# Check for stdbool.h that conforms to C99. -+ -+# Copyright (C) 2002-2003 Free Software Foundation, Inc. -+ -+# This program is free software; you can redistribute it and/or modify -+# it under the terms of the GNU General Public License as published by -+# the Free Software Foundation; either version 2, or (at your option) -+# any later version. -+ -+# This program is distributed in the hope that it will be useful, -+# but WITHOUT ANY WARRANTY; without even the implied warranty of -+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+# GNU General Public License for more details. -+ -+# You should have received a copy of the GNU General Public License -+# along with this program; if not, write to the Free Software -+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA -+# 02111-1307, USA. -+ -+# Prepare for substituting if it is not supported. -+ -+AC_DEFUN([AM_STDBOOL_H], -+[ -+ AC_REQUIRE([AC_HEADER_STDBOOL]) -+ -+ # Define two additional variables used in the Makefile substitution. -+ -+ if test "$ac_cv_header_stdbool_h" = yes; then -+ STDBOOL_H='' -+ else -+ STDBOOL_H='stdbool.h' -+ fi -+ AC_SUBST([STDBOOL_H]) -+ -+ if test "$ac_cv_type__Bool" = yes; then -+ HAVE__BOOL=1 -+ else -+ HAVE__BOOL=0 -+ fi -+ AC_SUBST([HAVE__BOOL]) -+]) -+ -+# This macro is only needed in autoconf <= 2.54. Newer versions of autoconf -+# have this macro built-in. -+ -+AC_DEFUN([AC_HEADER_STDBOOL], -+ [AC_CACHE_CHECK([for stdbool.h that conforms to C99], -+ [ac_cv_header_stdbool_h], -+ [AC_TRY_COMPILE( -+ [ -+ #include -+ #ifndef bool -+ "error: bool is not defined" -+ #endif -+ #ifndef false -+ "error: false is not defined" -+ #endif -+ #if false -+ "error: false is not 0" -+ #endif -+ #ifndef true -+ "error: false is not defined" -+ #endif -+ #if true != 1 -+ "error: true is not 1" -+ #endif -+ #ifndef __bool_true_false_are_defined -+ "error: __bool_true_false_are_defined is not defined" -+ #endif -+ -+ struct s { _Bool s: 1; _Bool t; } s; -+ -+ char a[true == 1 ? 1 : -1]; -+ char b[false == 0 ? 1 : -1]; -+ char c[__bool_true_false_are_defined == 1 ? 1 : -1]; -+ char d[(bool) -0.5 == true ? 1 : -1]; -+ bool e = &s; -+ char f[(_Bool) -0.0 == false ? 1 : -1]; -+ char g[true]; -+ char h[sizeof (_Bool)]; -+ char i[sizeof s.t]; -+ ], -+ [ return !a + !b + !c + !d + !e + !f + !g + !h + !i; ], -+ [ac_cv_header_stdbool_h=yes], -+ [ac_cv_header_stdbool_h=no])]) -+ AC_CHECK_TYPES([_Bool]) -+ if test $ac_cv_header_stdbool_h = yes; then -+ AC_DEFINE(HAVE_STDBOOL_H, 1, [Define to 1 if stdbool.h conforms to C99.]) -+ fi]) -+ -+#serial 5 -+ -+dnl From Jim Meyering -+ -+dnl Define HAVE_STRUCT_UTIMBUF if `struct utimbuf' is declared -- -+dnl usually in . -+dnl Some systems have utime.h but don't declare the struct anywhere. -+ -+AC_DEFUN([jm_CHECK_TYPE_STRUCT_UTIMBUF], -+[ -+ AC_CHECK_HEADERS_ONCE(sys/time.h utime.h) -+ AC_REQUIRE([AC_HEADER_TIME]) -+ AC_CACHE_CHECK([for struct utimbuf], fu_cv_sys_struct_utimbuf, -+ [AC_TRY_COMPILE( -+ [ -+#ifdef TIME_WITH_SYS_TIME -+# include -+# include -+#else -+# ifdef HAVE_SYS_TIME_H -+# include -+# else -+# include -+# endif -+#endif -+#ifdef HAVE_UTIME_H -+# include -+#endif -+ ], -+ [static struct utimbuf x; x.actime = x.modtime;], -+ fu_cv_sys_struct_utimbuf=yes, -+ fu_cv_sys_struct_utimbuf=no) -+ ]) -+ -+ if test $fu_cv_sys_struct_utimbuf = yes; then -+ AC_DEFINE(HAVE_STRUCT_UTIMBUF, 1, -+ [Define if struct utimbuf is declared -- usually in . -+ Some systems have utime.h but don't declare the struct anywhere. ]) -+ fi -+]) -+ -+# onceonly.m4 serial 3 -+dnl Copyright (C) 2002, 2003 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+dnl This file defines some "once only" variants of standard autoconf macros. -+dnl AC_CHECK_HEADERS_ONCE like AC_CHECK_HEADERS -+dnl AC_CHECK_FUNCS_ONCE like AC_CHECK_FUNCS -+dnl AC_CHECK_DECLS_ONCE like AC_CHECK_DECLS -+dnl AC_REQUIRE([AC_HEADER_STDC]) like AC_HEADER_STDC -+dnl The advantage is that the check for each of the headers/functions/decls -+dnl will be put only once into the 'configure' file. It keeps the size of -+dnl the 'configure' file down, and avoids redundant output when 'configure' -+dnl is run. -+dnl The drawback is that the checks cannot be conditionalized. If you write -+dnl if some_condition; then gl_CHECK_HEADERS(stdlib.h); fi -+dnl inside an AC_DEFUNed function, the gl_CHECK_HEADERS macro call expands to -+dnl empty, and the check will be inserted before the body of the AC_DEFUNed -+dnl function. -+ -+dnl Autoconf version 2.57 or newer is recommended. -+AC_PREREQ(2.54) -+ -+# AC_CHECK_HEADERS_ONCE(HEADER1 HEADER2 ...) is a once-only variant of -+# AC_CHECK_HEADERS(HEADER1 HEADER2 ...). -+AC_DEFUN([AC_CHECK_HEADERS_ONCE], [ -+ : -+ AC_FOREACH([gl_HEADER_NAME], [$1], [ -+ AC_DEFUN([gl_CHECK_HEADER_]m4_quote(translit(m4_defn([gl_HEADER_NAME]), -+ [-./], [___])), [ -+ AC_CHECK_HEADERS(gl_HEADER_NAME) -+ ]) -+ AC_REQUIRE([gl_CHECK_HEADER_]m4_quote(translit(gl_HEADER_NAME, -+ [-./], [___]))) -+ ]) -+]) -+ -+# AC_CHECK_FUNCS_ONCE(FUNC1 FUNC2 ...) is a once-only variant of -+# AC_CHECK_FUNCS(FUNC1 FUNC2 ...). -+AC_DEFUN([AC_CHECK_FUNCS_ONCE], [ -+ : -+ AC_FOREACH([gl_FUNC_NAME], [$1], [ -+ AC_DEFUN([gl_CHECK_FUNC_]m4_defn([gl_FUNC_NAME]), [ -+ AC_CHECK_FUNCS(m4_defn([gl_FUNC_NAME])) -+ ]) -+ AC_REQUIRE([gl_CHECK_FUNC_]m4_defn([gl_FUNC_NAME])) -+ ]) -+]) -+ -+# AC_CHECK_DECLS_ONCE(DECL1 DECL2 ...) is a once-only variant of -+# AC_CHECK_DECLS(DECL1, DECL2, ...). -+AC_DEFUN([AC_CHECK_DECLS_ONCE], [ -+ : -+ AC_FOREACH([gl_DECL_NAME], [$1], [ -+ AC_DEFUN([gl_CHECK_DECL_]m4_defn([gl_DECL_NAME]), [ -+ AC_CHECK_DECLS(m4_defn([gl_DECL_NAME])) -+ ]) -+ AC_REQUIRE([gl_CHECK_DECL_]m4_defn([gl_DECL_NAME])) -+ ]) -+]) -+ - # backupfile.m4 serial 1 - dnl Copyright (C) 2002 Free Software Foundation, Inc. - dnl This file is free software, distributed under the terms of the GNU -@@ -21,6 +1076,7 @@ - AC_CHECK_HEADERS_ONCE(limits.h string.h unistd.h) - AC_CHECK_FUNCS(pathconf) - ]) -+ - #serial 5 - - dnl From Jim Meyering. -@@ -63,31 +1119,7 @@ - fi - ] - ) --# dirname.m4 serial 1 --dnl Copyright (C) 2002 Free Software Foundation, Inc. --dnl This file is free software, distributed under the terms of the GNU --dnl General Public License. As a special exception to the GNU General --dnl Public License, this file may be distributed as part of a program --dnl that contains a configuration script generated by Autoconf, under --dnl the same distribution terms as the rest of that program. - --AC_DEFUN([gl_DIRNAME], --[ -- dnl Prerequisites of lib/dirname.h. -- AC_REQUIRE([jm_AC_DOS]) -- -- dnl Prerequisites of lib/dirname.c. -- AC_REQUIRE([AC_HEADER_STDC]) -- AC_CHECK_HEADERS_ONCE(string.h) -- -- dnl Prerequisites of lib/basename.c. -- AC_REQUIRE([AC_HEADER_STDC]) -- AC_CHECK_HEADERS_ONCE(string.h) -- -- dnl Prerequisites of lib/stripslash.c. -- AC_REQUIRE([AC_HEADER_STDC]) -- AC_CHECK_HEADERS_ONCE(string.h) --]) - #serial 5 - - # Define some macros required for proper operation of code in lib/*.c -@@ -141,6 +1173,33 @@ - [Define if the backslash character may also serve as a file name - component separator.]) - ]) -+ -+# dirname.m4 serial 1 -+dnl Copyright (C) 2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+AC_DEFUN([gl_DIRNAME], -+[ -+ dnl Prerequisites of lib/dirname.h. -+ AC_REQUIRE([jm_AC_DOS]) -+ -+ dnl Prerequisites of lib/dirname.c. -+ AC_REQUIRE([AC_HEADER_STDC]) -+ AC_CHECK_HEADERS_ONCE(string.h) -+ -+ dnl Prerequisites of lib/basename.c. -+ AC_REQUIRE([AC_HEADER_STDC]) -+ AC_CHECK_HEADERS_ONCE(string.h) -+ -+ dnl Prerequisites of lib/stripslash.c. -+ AC_REQUIRE([AC_HEADER_STDC]) -+ AC_CHECK_HEADERS_ONCE(string.h) -+]) -+ - #serial 7 - - AC_DEFUN([gl_ERROR], -@@ -159,7 +1218,67 @@ - AC_CHECK_DECLS([strerror]) - AC_FUNC_STRERROR_R - ]) --# getopt.m4 serial 1 -+ -+# memchr.m4 serial 1 -+dnl Copyright (C) 2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+AC_DEFUN([gl_FUNC_MEMCHR], -+[ -+ AC_REPLACE_FUNCS(memchr) -+ if test $ac_cv_func_memchr = no; then -+ jm_PREREQ_MEMCHR -+ fi -+]) -+ -+# Prerequisites of lib/memchr.c. -+AC_DEFUN([jm_PREREQ_MEMCHR], [ -+ AC_CHECK_HEADERS_ONCE(limits.h stdlib.h) -+ AC_CHECK_HEADERS(bp-sym.h) -+]) -+ -+# rmdir.m4 serial 1 -+dnl Copyright (C) 2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+AC_DEFUN([gl_FUNC_RMDIR], -+[ -+ AC_REPLACE_FUNCS(rmdir) -+ if test $ac_cv_func_rmdir = no; then -+ gl_PREREQ_RMDIR -+ fi -+]) -+ -+# Prerequisites of lib/rmdir.c. -+AC_DEFUN([gl_PREREQ_RMDIR], [ -+ AC_REQUIRE([AC_HEADER_STAT]) -+ : -+]) -+ -+ -+# getopt.m4 serial 1 -+dnl Copyright (C) 2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+AC_DEFUN([gl_GETOPT], -+[ -+ dnl Prerequisites of lib/getopt.c. -+ AC_CHECK_HEADERS_ONCE(string.h) -+]) -+ -+# xalloc.m4 serial 1 - dnl Copyright (C) 2002 Free Software Foundation, Inc. - dnl This file is free software, distributed under the terms of the GNU - dnl General Public License. As a special exception to the GNU General -@@ -167,11 +1286,25 @@ - dnl that contains a configuration script generated by Autoconf, under - dnl the same distribution terms as the rest of that program. - --AC_DEFUN([gl_GETOPT], -+AC_DEFUN([gl_XALLOC], - [ -- dnl Prerequisites of lib/getopt.c. -+ gl_PREREQ_XMALLOC -+ gl_PREREQ_XSTRDUP -+]) -+ -+# Prerequisites of lib/xmalloc.c. -+AC_DEFUN([gl_PREREQ_XMALLOC], [ -+ AC_REQUIRE([AC_HEADER_STDC]) -+ AC_REQUIRE([jm_FUNC_MALLOC]) -+ AC_REQUIRE([jm_FUNC_REALLOC]) -+]) -+ -+# Prerequisites of lib/xstrdup.c. -+AC_DEFUN([gl_PREREQ_XSTRDUP], [ -+ AC_REQUIRE([AC_HEADER_STDC]) - AC_CHECK_HEADERS_ONCE(string.h) - ]) -+ - # malloc.m4 serial 7 - dnl Copyright (C) 2002 Free Software Foundation, Inc. - dnl This file is free software, distributed under the terms of the GNU -@@ -197,66 +1330,8 @@ - AC_DEFUN([gl_PREREQ_MALLOC], [ - : - ]) --# mbrtowc.m4 serial 5 --dnl Copyright (C) 2001-2002 Free Software Foundation, Inc. --dnl This file is free software, distributed under the terms of the GNU --dnl General Public License. As a special exception to the GNU General --dnl Public License, this file may be distributed as part of a program --dnl that contains a configuration script generated by Autoconf, under --dnl the same distribution terms as the rest of that program. -- --dnl From Paul Eggert -- --dnl This file can be removed, and jm_FUNC_MBRTOWC replaced with --dnl AC_FUNC_MBRTOWC, when autoconf 2.57 can be assumed everywhere. -- --AC_DEFUN([jm_FUNC_MBRTOWC], --[ -- AC_CACHE_CHECK([whether mbrtowc and mbstate_t are properly declared], -- jm_cv_func_mbrtowc, -- [AC_TRY_LINK( -- [#include ], -- [mbstate_t state; return ! (sizeof state && mbrtowc);], -- jm_cv_func_mbrtowc=yes, -- jm_cv_func_mbrtowc=no)]) -- if test $jm_cv_func_mbrtowc = yes; then -- AC_DEFINE(HAVE_MBRTOWC, 1, -- [Define to 1 if mbrtowc and mbstate_t are properly declared.]) -- fi --]) --# mbstate_t.m4 serial 9 --dnl Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc. --dnl This file is free software, distributed under the terms of the GNU --dnl General Public License. As a special exception to the GNU General --dnl Public License, this file may be distributed as part of a program --dnl that contains a configuration script generated by Autoconf, under --dnl the same distribution terms as the rest of that program. -- --# From Paul Eggert. -- --# BeOS 5 has but does not define mbstate_t, --# so you can't declare an object of that type. --# Check for this incompatibility with Standard C. - --# AC_TYPE_MBSTATE_T --# ----------------- --AC_DEFUN([AC_TYPE_MBSTATE_T], -- [AC_CACHE_CHECK([for mbstate_t], ac_cv_type_mbstate_t, -- [AC_COMPILE_IFELSE( -- [AC_LANG_PROGRAM( -- [AC_INCLUDES_DEFAULT --# include ], -- [mbstate_t x; return sizeof x;])], -- [ac_cv_type_mbstate_t=yes], -- [ac_cv_type_mbstate_t=no])]) -- if test $ac_cv_type_mbstate_t = yes; then -- AC_DEFINE([HAVE_MBSTATE_T], 1, -- [Define to 1 if declares mbstate_t.]) -- else -- AC_DEFINE([mbstate_t], int, -- [Define to a type if does not define.]) -- fi]) --# memchr.m4 serial 1 -+# realloc.m4 serial 7 - dnl Copyright (C) 2002 Free Software Foundation, Inc. - dnl This file is free software, distributed under the terms of the GNU - dnl General Public License. As a special exception to the GNU General -@@ -264,116 +1339,24 @@ - dnl that contains a configuration script generated by Autoconf, under - dnl the same distribution terms as the rest of that program. - --AC_DEFUN([gl_FUNC_MEMCHR], -+dnl From Jim Meyering. -+dnl Determine whether realloc works when both arguments are 0. -+dnl If it doesn't, arrange to use the replacement function. -+ -+AC_DEFUN([jm_FUNC_REALLOC], - [ -- AC_REPLACE_FUNCS(memchr) -- if test $ac_cv_func_memchr = no; then -- jm_PREREQ_MEMCHR -+ AC_REQUIRE([AC_FUNC_REALLOC]) -+ dnl autoconf < 2.57 used the symbol ac_cv_func_realloc_works. -+ if test X"$ac_cv_func_realloc_0_nonnull" = Xno || test X"$ac_cv_func_realloc_works" = Xno; then -+ gl_PREREQ_REALLOC - fi - ]) - --# Prerequisites of lib/memchr.c. --AC_DEFUN([jm_PREREQ_MEMCHR], [ -- AC_CHECK_HEADERS_ONCE(limits.h stdlib.h) -- AC_CHECK_HEADERS(bp-sym.h) --]) --#serial 1 -- --dnl From Mumit Khan and Paul Eggert --dnl Determine whether mkdir accepts only one argument instead of the usual two. -- --AC_DEFUN([PATCH_FUNC_MKDIR_TAKES_ONE_ARG], -- [AC_CHECK_FUNCS(mkdir) -- AC_CACHE_CHECK([whether mkdir takes only one argument], -- patch_cv_mkdir_takes_one_arg, -- [patch_cv_mkdir_takes_one_arg=no -- if test $ac_cv_func_mkdir = yes; then -- AC_TRY_COMPILE([ --#include --#include -- ], -- [mkdir (".", 0);], -- , -- [AC_TRY_COMPILE([ --#include --#include -- ], -- [mkdir (".");], -- patch_cv_mkdir_takes_one_arg=yes -- )] -- ) -- fi -- ] -- ) -- if test $patch_cv_mkdir_takes_one_arg = yes; then -- AC_DEFINE([MKDIR_TAKES_ONE_ARG], 1, -- [Define if mkdir takes only one argument.]) -- fi -- ] --) --# onceonly.m4 serial 3 --dnl Copyright (C) 2002, 2003 Free Software Foundation, Inc. --dnl This file is free software, distributed under the terms of the GNU --dnl General Public License. As a special exception to the GNU General --dnl Public License, this file may be distributed as part of a program --dnl that contains a configuration script generated by Autoconf, under --dnl the same distribution terms as the rest of that program. -- --dnl This file defines some "once only" variants of standard autoconf macros. --dnl AC_CHECK_HEADERS_ONCE like AC_CHECK_HEADERS --dnl AC_CHECK_FUNCS_ONCE like AC_CHECK_FUNCS --dnl AC_CHECK_DECLS_ONCE like AC_CHECK_DECLS --dnl AC_REQUIRE([AC_HEADER_STDC]) like AC_HEADER_STDC --dnl The advantage is that the check for each of the headers/functions/decls --dnl will be put only once into the 'configure' file. It keeps the size of --dnl the 'configure' file down, and avoids redundant output when 'configure' --dnl is run. --dnl The drawback is that the checks cannot be conditionalized. If you write --dnl if some_condition; then gl_CHECK_HEADERS(stdlib.h); fi --dnl inside an AC_DEFUNed function, the gl_CHECK_HEADERS macro call expands to --dnl empty, and the check will be inserted before the body of the AC_DEFUNed --dnl function. -- --dnl Autoconf version 2.57 or newer is recommended. --AC_PREREQ(2.54) -- --# AC_CHECK_HEADERS_ONCE(HEADER1 HEADER2 ...) is a once-only variant of --# AC_CHECK_HEADERS(HEADER1 HEADER2 ...). --AC_DEFUN([AC_CHECK_HEADERS_ONCE], [ -- : -- AC_FOREACH([gl_HEADER_NAME], [$1], [ -- AC_DEFUN([gl_CHECK_HEADER_]m4_quote(translit(m4_defn([gl_HEADER_NAME]), -- [-./], [___])), [ -- AC_CHECK_HEADERS(gl_HEADER_NAME) -- ]) -- AC_REQUIRE([gl_CHECK_HEADER_]m4_quote(translit(gl_HEADER_NAME, -- [-./], [___]))) -- ]) --]) -- --# AC_CHECK_FUNCS_ONCE(FUNC1 FUNC2 ...) is a once-only variant of --# AC_CHECK_FUNCS(FUNC1 FUNC2 ...). --AC_DEFUN([AC_CHECK_FUNCS_ONCE], [ -+# Prerequisites of lib/realloc.c. -+AC_DEFUN([gl_PREREQ_REALLOC], [ - : -- AC_FOREACH([gl_FUNC_NAME], [$1], [ -- AC_DEFUN([gl_CHECK_FUNC_]m4_defn([gl_FUNC_NAME]), [ -- AC_CHECK_FUNCS(m4_defn([gl_FUNC_NAME])) -- ]) -- AC_REQUIRE([gl_CHECK_FUNC_]m4_defn([gl_FUNC_NAME])) -- ]) - ]) - --# AC_CHECK_DECLS_ONCE(DECL1 DECL2 ...) is a once-only variant of --# AC_CHECK_DECLS(DECL1, DECL2, ...). --AC_DEFUN([AC_CHECK_DECLS_ONCE], [ -- : -- AC_FOREACH([gl_DECL_NAME], [$1], [ -- AC_DEFUN([gl_CHECK_DECL_]m4_defn([gl_DECL_NAME]), [ -- AC_CHECK_DECLS(m4_defn([gl_DECL_NAME])) -- ]) -- AC_REQUIRE([gl_CHECK_DECL_]m4_defn([gl_DECL_NAME])) -- ]) --]) - # quote.m4 serial 1 - dnl Copyright (C) 2002 Free Software Foundation, Inc. - dnl This file is free software, distributed under the terms of the GNU -@@ -387,6 +1370,7 @@ - dnl Prerequisites of lib/quote.c. - AC_CHECK_HEADERS_ONCE(stddef.h) - ]) -+ - # quotearg.m4 serial 1 - dnl Copyright (C) 2002 Free Software Foundation, Inc. - dnl This file is free software, distributed under the terms of the GNU -@@ -403,32 +1387,69 @@ - AC_TYPE_MBSTATE_T - jm_FUNC_MBRTOWC - ]) --# realloc.m4 serial 7 --dnl Copyright (C) 2002 Free Software Foundation, Inc. -+ -+# mbstate_t.m4 serial 9 -+dnl Copyright (C) 2000, 2001, 2002 Free Software Foundation, Inc. - dnl This file is free software, distributed under the terms of the GNU - dnl General Public License. As a special exception to the GNU General - dnl Public License, this file may be distributed as part of a program - dnl that contains a configuration script generated by Autoconf, under - dnl the same distribution terms as the rest of that program. - --dnl From Jim Meyering. --dnl Determine whether realloc works when both arguments are 0. --dnl If it doesn't, arrange to use the replacement function. -+# From Paul Eggert. - --AC_DEFUN([jm_FUNC_REALLOC], -+# BeOS 5 has but does not define mbstate_t, -+# so you can't declare an object of that type. -+# Check for this incompatibility with Standard C. -+ -+# AC_TYPE_MBSTATE_T -+# ----------------- -+AC_DEFUN([AC_TYPE_MBSTATE_T], -+ [AC_CACHE_CHECK([for mbstate_t], ac_cv_type_mbstate_t, -+ [AC_COMPILE_IFELSE( -+ [AC_LANG_PROGRAM( -+ [AC_INCLUDES_DEFAULT -+# include ], -+ [mbstate_t x; return sizeof x;])], -+ [ac_cv_type_mbstate_t=yes], -+ [ac_cv_type_mbstate_t=no])]) -+ if test $ac_cv_type_mbstate_t = yes; then -+ AC_DEFINE([HAVE_MBSTATE_T], 1, -+ [Define to 1 if declares mbstate_t.]) -+ else -+ AC_DEFINE([mbstate_t], int, -+ [Define to a type if does not define.]) -+ fi]) -+ -+# mbrtowc.m4 serial 5 -+dnl Copyright (C) 2001-2002 Free Software Foundation, Inc. -+dnl This file is free software, distributed under the terms of the GNU -+dnl General Public License. As a special exception to the GNU General -+dnl Public License, this file may be distributed as part of a program -+dnl that contains a configuration script generated by Autoconf, under -+dnl the same distribution terms as the rest of that program. -+ -+dnl From Paul Eggert -+ -+dnl This file can be removed, and jm_FUNC_MBRTOWC replaced with -+dnl AC_FUNC_MBRTOWC, when autoconf 2.57 can be assumed everywhere. -+ -+AC_DEFUN([jm_FUNC_MBRTOWC], - [ -- AC_REQUIRE([AC_FUNC_REALLOC]) -- dnl autoconf < 2.57 used the symbol ac_cv_func_realloc_works. -- if test X"$ac_cv_func_realloc_0_nonnull" = Xno || test X"$ac_cv_func_realloc_works" = Xno; then -- gl_PREREQ_REALLOC -+ AC_CACHE_CHECK([whether mbrtowc and mbstate_t are properly declared], -+ jm_cv_func_mbrtowc, -+ [AC_TRY_LINK( -+ [#include ], -+ [mbstate_t state; return ! (sizeof state && mbrtowc);], -+ jm_cv_func_mbrtowc=yes, -+ jm_cv_func_mbrtowc=no)]) -+ if test $jm_cv_func_mbrtowc = yes; then -+ AC_DEFINE(HAVE_MBRTOWC, 1, -+ [Define to 1 if mbrtowc and mbstate_t are properly declared.]) - fi - ]) - --# Prerequisites of lib/realloc.c. --AC_DEFUN([gl_PREREQ_REALLOC], [ -- : --]) --# rmdir.m4 serial 1 -+# hash.m4 serial 1 - dnl Copyright (C) 2002 Free Software Foundation, Inc. - dnl This file is free software, distributed under the terms of the GNU - dnl General Public License. As a special exception to the GNU General -@@ -436,18 +1457,71 @@ - dnl that contains a configuration script generated by Autoconf, under - dnl the same distribution terms as the rest of that program. - --AC_DEFUN([gl_FUNC_RMDIR], -+AC_DEFUN([gl_HASH], - [ -- AC_REPLACE_FUNCS(rmdir) -- if test $ac_cv_func_rmdir = no; then -- gl_PREREQ_RMDIR -- fi -+ dnl Prerequisites of lib/hash.c. -+ AC_CHECK_HEADERS_ONCE(stdlib.h) -+ AC_HEADER_STDBOOL -+ AC_CHECK_DECLS_ONCE(free malloc) - ]) - --# Prerequisites of lib/rmdir.c. --AC_DEFUN([gl_PREREQ_RMDIR], [ -- AC_REQUIRE([AC_HEADER_STAT]) -- : -+AC_DEFUN([ag_CHECK_NANOSECOND_STAT], -+ [AC_CACHE_CHECK([for nanosecond timestamps in struct stat], -+ [ac_cv_stat_nsec], -+ [AC_TRY_COMPILE( -+ [ -+ #include -+ #include -+ #include -+ struct stat st; -+ ], -+ [ return st.st_atimensec + st.st_mtimensec + st.st_ctimensec; ], -+ [ac_cv_stat_nsec=yes], -+ [ac_cv_stat_nsec=no]) -+ ]) -+ if test $ac_cv_stat_nsec = yes; then -+ AC_DEFINE(HAVE_STAT_NSEC, 1, [Define to 1 if struct stat has nanosecond timestamps.]) -+ fi -+ -+ AC_CACHE_CHECK([for nanosecond timestamps in struct stat], -+ [ac_cv_stat_timeval], -+ [AC_TRY_COMPILE( -+ [ -+ #include -+ #include -+ #include -+ #include -+ struct stat st; -+ ], -+ [ return st.st_atim.tv_nsec + st.st_mtim.tv_nsec + st.st_ctim.tv_nsec; ], -+ [ac_cv_stat_timeval=yes], -+ [ac_cv_stat_timeval=no]) -+ ]) -+ if test $ac_cv_stat_timeval = yes; then -+ AC_DEFINE(HAVE_STAT_TIMEVAL, 1, [Define to 1 if struct stat comtains struct timeval's.]) -+ fi]) -+ -+#serial 7 -*- autoconf -*- -+ -+dnl From Jim Meyering. -+dnl -+dnl See if the glibc *_unlocked I/O macros or functions are available. -+dnl Use only those *_unlocked macros or functions that are declared -+dnl (because some of them were declared in Solaris 2.5.1 but were removed -+dnl in Solaris 2.6, whereas we want binaries built on Solaris 2.5.1 to run -+dnl on Solaris 2.6). -+ -+AC_DEFUN([jm_FUNC_GLIBC_UNLOCKED_IO], -+[ -+ dnl Persuade glibc to declare fgets_unlocked(), fputs_unlocked() -+ dnl etc. -+ AC_REQUIRE([AC_GNU_SOURCE]) -+ -+ AC_CHECK_DECLS_ONCE( -+ [clearerr_unlocked feof_unlocked ferror_unlocked -+ fflush_unlocked fgets_unlocked fputc_unlocked fputs_unlocked -+ fread_unlocked fwrite_unlocked getc_unlocked -+ getchar_unlocked putc_unlocked putchar_unlocked]) - ]) - - # Check for setmode, DOS style. -@@ -488,180 +1562,39 @@ - AC_DEFINE(HAVE_SETMODE_DOS, 1, - [Define to 1 if you have the DOS-style `setmode' function.]) - fi]) --# Check for stdbool.h that conforms to C99. -- --# Copyright (C) 2002-2003 Free Software Foundation, Inc. -- --# This program is free software; you can redistribute it and/or modify --# it under the terms of the GNU General Public License as published by --# the Free Software Foundation; either version 2, or (at your option) --# any later version. -- --# This program is distributed in the hope that it will be useful, --# but WITHOUT ANY WARRANTY; without even the implied warranty of --# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the --# GNU General Public License for more details. -- --# You should have received a copy of the GNU General Public License --# along with this program; if not, write to the Free Software --# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA --# 02111-1307, USA. -- --# Prepare for substituting if it is not supported. -- --AC_DEFUN([AM_STDBOOL_H], --[ -- AC_REQUIRE([AC_HEADER_STDBOOL]) -- -- # Define two additional variables used in the Makefile substitution. -- -- if test "$ac_cv_header_stdbool_h" = yes; then -- STDBOOL_H='' -- else -- STDBOOL_H='stdbool.h' -- fi -- AC_SUBST([STDBOOL_H]) -- -- if test "$ac_cv_type__Bool" = yes; then -- HAVE__BOOL=1 -- else -- HAVE__BOOL=0 -- fi -- AC_SUBST([HAVE__BOOL]) --]) -- --# This macro is only needed in autoconf <= 2.54. Newer versions of autoconf --# have this macro built-in. -- --AC_DEFUN([AC_HEADER_STDBOOL], -- [AC_CACHE_CHECK([for stdbool.h that conforms to C99], -- [ac_cv_header_stdbool_h], -- [AC_TRY_COMPILE( -- [ -- #include -- #ifndef bool -- "error: bool is not defined" -- #endif -- #ifndef false -- "error: false is not defined" -- #endif -- #if false -- "error: false is not 0" -- #endif -- #ifndef true -- "error: false is not defined" -- #endif -- #if true != 1 -- "error: true is not 1" -- #endif -- #ifndef __bool_true_false_are_defined -- "error: __bool_true_false_are_defined is not defined" -- #endif -- -- struct s { _Bool s: 1; _Bool t; } s; -- -- char a[true == 1 ? 1 : -1]; -- char b[false == 0 ? 1 : -1]; -- char c[__bool_true_false_are_defined == 1 ? 1 : -1]; -- char d[(bool) -0.5 == true ? 1 : -1]; -- bool e = &s; -- char f[(_Bool) -0.0 == false ? 1 : -1]; -- char g[true]; -- char h[sizeof (_Bool)]; -- char i[sizeof s.t]; -- ], -- [ return !a + !b + !c + !d + !e + !f + !g + !h + !i; ], -- [ac_cv_header_stdbool_h=yes], -- [ac_cv_header_stdbool_h=no])]) -- AC_CHECK_TYPES([_Bool]) -- if test $ac_cv_header_stdbool_h = yes; then -- AC_DEFINE(HAVE_STDBOOL_H, 1, [Define to 1 if stdbool.h conforms to C99.]) -- fi]) --#serial 7 -*- autoconf -*- -- --dnl From Jim Meyering. --dnl --dnl See if the glibc *_unlocked I/O macros or functions are available. --dnl Use only those *_unlocked macros or functions that are declared --dnl (because some of them were declared in Solaris 2.5.1 but were removed --dnl in Solaris 2.6, whereas we want binaries built on Solaris 2.5.1 to run --dnl on Solaris 2.6). -- --AC_DEFUN([jm_FUNC_GLIBC_UNLOCKED_IO], --[ -- dnl Persuade glibc to declare fgets_unlocked(), fputs_unlocked() -- dnl etc. -- AC_REQUIRE([AC_GNU_SOURCE]) -- -- AC_CHECK_DECLS_ONCE( -- [clearerr_unlocked feof_unlocked ferror_unlocked -- fflush_unlocked fgets_unlocked fputc_unlocked fputs_unlocked -- fread_unlocked fwrite_unlocked getc_unlocked -- getchar_unlocked putc_unlocked putchar_unlocked]) --]) --#serial 5 -- --dnl From Jim Meyering -- --dnl Define HAVE_STRUCT_UTIMBUF if `struct utimbuf' is declared -- --dnl usually in . --dnl Some systems have utime.h but don't declare the struct anywhere. -- --AC_DEFUN([jm_CHECK_TYPE_STRUCT_UTIMBUF], --[ -- AC_CHECK_HEADERS_ONCE(sys/time.h utime.h) -- AC_REQUIRE([AC_HEADER_TIME]) -- AC_CACHE_CHECK([for struct utimbuf], fu_cv_sys_struct_utimbuf, -- [AC_TRY_COMPILE( -- [ --#ifdef TIME_WITH_SYS_TIME --# include --# include --#else --# ifdef HAVE_SYS_TIME_H --# include --# else --# include --# endif --#endif --#ifdef HAVE_UTIME_H --# include --#endif -- ], -- [static struct utimbuf x; x.actime = x.modtime;], -- fu_cv_sys_struct_utimbuf=yes, -- fu_cv_sys_struct_utimbuf=no) -- ]) - -- if test $fu_cv_sys_struct_utimbuf = yes; then -- AC_DEFINE(HAVE_STRUCT_UTIMBUF, 1, -- [Define if struct utimbuf is declared -- usually in . -- Some systems have utime.h but don't declare the struct anywhere. ]) -- fi --]) --# xalloc.m4 serial 1 --dnl Copyright (C) 2002 Free Software Foundation, Inc. --dnl This file is free software, distributed under the terms of the GNU --dnl General Public License. As a special exception to the GNU General --dnl Public License, this file may be distributed as part of a program --dnl that contains a configuration script generated by Autoconf, under --dnl the same distribution terms as the rest of that program. -+#serial 1 - --AC_DEFUN([gl_XALLOC], --[ -- gl_PREREQ_XMALLOC -- gl_PREREQ_XSTRDUP --]) -+dnl From Mumit Khan and Paul Eggert -+dnl Determine whether mkdir accepts only one argument instead of the usual two. - --# Prerequisites of lib/xmalloc.c. --AC_DEFUN([gl_PREREQ_XMALLOC], [ -- AC_REQUIRE([AC_HEADER_STDC]) -- AC_REQUIRE([jm_FUNC_MALLOC]) -- AC_REQUIRE([jm_FUNC_REALLOC]) --]) -+AC_DEFUN([PATCH_FUNC_MKDIR_TAKES_ONE_ARG], -+ [AC_CHECK_FUNCS(mkdir) -+ AC_CACHE_CHECK([whether mkdir takes only one argument], -+ patch_cv_mkdir_takes_one_arg, -+ [patch_cv_mkdir_takes_one_arg=no -+ if test $ac_cv_func_mkdir = yes; then -+ AC_TRY_COMPILE([ -+#include -+#include -+ ], -+ [mkdir (".", 0);], -+ , -+ [AC_TRY_COMPILE([ -+#include -+#include -+ ], -+ [mkdir (".");], -+ patch_cv_mkdir_takes_one_arg=yes -+ )] -+ ) -+ fi -+ ] -+ ) -+ if test $patch_cv_mkdir_takes_one_arg = yes; then -+ AC_DEFINE([MKDIR_TAKES_ONE_ARG], 1, -+ [Define if mkdir takes only one argument.]) -+ fi -+ ] -+) - --# Prerequisites of lib/xstrdup.c. --AC_DEFUN([gl_PREREQ_XSTRDUP], [ -- AC_REQUIRE([AC_HEADER_STDC]) -- AC_CHECK_HEADERS_ONCE(string.h) --]) ---- patch-2.5.9.orig/configure -+++ patch-2.5.9/configure -@@ -1,11 +1,10 @@ - #! /bin/sh - # Guess values for system-dependent variables and create Makefiles. --# Generated by GNU Autoconf 2.57 for patch 2.5.9. -+# Generated by GNU Autoconf 2.59 for patch 2.5.9. - # - # Report bugs to . - # --# Copyright 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002 --# Free Software Foundation, Inc. -+# Copyright (C) 2003 Free Software Foundation, Inc. - # This configure script is free software; the Free Software Foundation - # gives unlimited permission to copy, distribute and modify it. - ## --------------------- ## -@@ -22,9 +21,10 @@ - elif test -n "${BASH_VERSION+set}" && (set -o posix) >/dev/null 2>&1; then - set -o posix - fi -+DUALCASE=1; export DUALCASE # for MKS sh - - # Support unset when possible. --if (FOO=FOO; unset FOO) >/dev/null 2>&1; then -+if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then - as_unset=unset - else - as_unset=false -@@ -43,7 +43,7 @@ - LC_MEASUREMENT LC_MESSAGES LC_MONETARY LC_NAME LC_NUMERIC LC_PAPER \ - LC_TELEPHONE LC_TIME - do -- if (set +x; test -n "`(eval $as_var=C; export $as_var) 2>&1`"); then -+ if (set +x; test -z "`(eval $as_var=C; export $as_var) 2>&1`"); then - eval $as_var=C; export $as_var - else - $as_unset $as_var -@@ -220,16 +220,17 @@ - if mkdir -p . 2>/dev/null; then - as_mkdir_p=: - else -+ test -d ./-p && rmdir ./-p - as_mkdir_p=false - fi - - as_executable_p="test -f" - - # Sed expression to map a string onto a valid CPP name. --as_tr_cpp="sed y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g" -+as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - - # Sed expression to map a string onto a valid variable name. --as_tr_sh="sed y%*+%pp%;s%[^_$as_cr_alnum]%_%g" -+as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - - # IFS -@@ -669,7 +670,7 @@ - - # Be sure to have absolute paths. - for ac_var in bindir sbindir libexecdir datadir sysconfdir sharedstatedir \ -- localstatedir libdir includedir oldincludedir infodir mandir -+ localstatedir libdir includedir oldincludedir infodir mandir - do - eval ac_val=$`echo $ac_var` - case $ac_val in -@@ -709,10 +710,10 @@ - # Try the directory containing this script, then its parent. - ac_confdir=`(dirname "$0") 2>/dev/null || - $as_expr X"$0" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ -- X"$0" : 'X\(//\)[^/]' \| \ -- X"$0" : 'X\(//\)$' \| \ -- X"$0" : 'X\(/\)' \| \ -- . : '\(.\)' 2>/dev/null || -+ X"$0" : 'X\(//\)[^/]' \| \ -+ X"$0" : 'X\(//\)$' \| \ -+ X"$0" : 'X\(/\)' \| \ -+ . : '\(.\)' 2>/dev/null || - echo X"$0" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } - /^X\(\/\/\)[^/].*/{ s//\1/; q; } -@@ -804,9 +805,9 @@ - cat <<_ACEOF - Installation directories: - --prefix=PREFIX install architecture-independent files in PREFIX -- [$ac_default_prefix] -+ [$ac_default_prefix] - --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX -- [PREFIX] -+ [PREFIX] - - By default, \`make install' will install all the files in - \`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify -@@ -896,12 +897,45 @@ - ac_srcdir=$ac_top_builddir$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_builddir$srcdir ;; - esac --# Don't blindly perform a `cd "$ac_dir"/$ac_foo && pwd` since $ac_foo can be --# absolute. --ac_abs_builddir=`cd "$ac_dir" && cd $ac_builddir && pwd` --ac_abs_top_builddir=`cd "$ac_dir" && cd ${ac_top_builddir}. && pwd` --ac_abs_srcdir=`cd "$ac_dir" && cd $ac_srcdir && pwd` --ac_abs_top_srcdir=`cd "$ac_dir" && cd $ac_top_srcdir && pwd` -+ -+# Do not use `cd foo && pwd` to compute absolute paths, because -+# the directories may not exist. -+case `pwd` in -+.) ac_abs_builddir="$ac_dir";; -+*) -+ case "$ac_dir" in -+ .) ac_abs_builddir=`pwd`;; -+ [\\/]* | ?:[\\/]* ) ac_abs_builddir="$ac_dir";; -+ *) ac_abs_builddir=`pwd`/"$ac_dir";; -+ esac;; -+esac -+case $ac_abs_builddir in -+.) ac_abs_top_builddir=${ac_top_builddir}.;; -+*) -+ case ${ac_top_builddir}. in -+ .) ac_abs_top_builddir=$ac_abs_builddir;; -+ [\\/]* | ?:[\\/]* ) ac_abs_top_builddir=${ac_top_builddir}.;; -+ *) ac_abs_top_builddir=$ac_abs_builddir/${ac_top_builddir}.;; -+ esac;; -+esac -+case $ac_abs_builddir in -+.) ac_abs_srcdir=$ac_srcdir;; -+*) -+ case $ac_srcdir in -+ .) ac_abs_srcdir=$ac_abs_builddir;; -+ [\\/]* | ?:[\\/]* ) ac_abs_srcdir=$ac_srcdir;; -+ *) ac_abs_srcdir=$ac_abs_builddir/$ac_srcdir;; -+ esac;; -+esac -+case $ac_abs_builddir in -+.) ac_abs_top_srcdir=$ac_top_srcdir;; -+*) -+ case $ac_top_srcdir in -+ .) ac_abs_top_srcdir=$ac_abs_builddir;; -+ [\\/]* | ?:[\\/]* ) ac_abs_top_srcdir=$ac_top_srcdir;; -+ *) ac_abs_top_srcdir=$ac_abs_builddir/$ac_top_srcdir;; -+ esac;; -+esac - - cd $ac_dir - # Check for guested configure; otherwise get Cygnus style configure. -@@ -912,13 +946,13 @@ - echo - $SHELL $ac_srcdir/configure --help=recursive - elif test -f $ac_srcdir/configure.ac || -- test -f $ac_srcdir/configure.in; then -+ test -f $ac_srcdir/configure.in; then - echo - $ac_configure --help - else - echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 - fi -- cd $ac_popdir -+ cd "$ac_popdir" - done - fi - -@@ -926,10 +960,9 @@ - if $ac_init_version; then - cat <<\_ACEOF - patch configure 2.5.9 --generated by GNU Autoconf 2.57 -+generated by GNU Autoconf 2.59 - --Copyright 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, 2002 --Free Software Foundation, Inc. -+Copyright (C) 2003 Free Software Foundation, Inc. - This configure script is free software; the Free Software Foundation - gives unlimited permission to copy, distribute and modify it. - _ACEOF -@@ -941,7 +974,7 @@ - running configure, to aid debugging if configure makes a mistake. - - It was created by patch $as_me 2.5.9, which was --generated by GNU Autoconf 2.57. Invocation command line was -+generated by GNU Autoconf 2.59. Invocation command line was - - $ $0 $@ - -@@ -1018,19 +1051,19 @@ - 2) - ac_configure_args1="$ac_configure_args1 '$ac_arg'" - if test $ac_must_keep_next = true; then -- ac_must_keep_next=false # Got value, back to normal. -+ ac_must_keep_next=false # Got value, back to normal. - else -- case $ac_arg in -- *=* | --config-cache | -C | -disable-* | --disable-* \ -- | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ -- | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ -- | -with-* | --with-* | -without-* | --without-* | --x) -- case "$ac_configure_args0 " in -- "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; -- esac -- ;; -- -* ) ac_must_keep_next=true ;; -- esac -+ case $ac_arg in -+ *=* | --config-cache | -C | -disable-* | --disable-* \ -+ | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ -+ | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ -+ | -with-* | --with-* | -without-* | --without-* | --x) -+ case "$ac_configure_args0 " in -+ "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; -+ esac -+ ;; -+ -* ) ac_must_keep_next=true ;; -+ esac - fi - ac_configure_args="$ac_configure_args$ac_sep'$ac_arg'" - # Get rid of the leading space. -@@ -1064,12 +1097,12 @@ - case `(ac_space='"'"' '"'"'; set | grep ac_space) 2>&1` in - *ac_space=\ *) - sed -n \ -- "s/'"'"'/'"'"'\\\\'"'"''"'"'/g; -- s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='"'"'\\2'"'"'/p" -+ "s/'"'"'/'"'"'\\\\'"'"''"'"'/g; -+ s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='"'"'\\2'"'"'/p" - ;; - *) - sed -n \ -- "s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1=\\2/p" -+ "s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1=\\2/p" - ;; - esac; - } -@@ -1098,7 +1131,7 @@ - for ac_var in $ac_subst_files - do - eval ac_val=$`echo $ac_var` -- echo "$ac_var='"'"'$ac_val'"'"'" -+ echo "$ac_var='"'"'$ac_val'"'"'" - done | sort - echo - fi -@@ -1117,7 +1150,7 @@ - echo "$as_me: caught signal $ac_signal" - echo "$as_me: exit $exit_status" - } >&5 -- rm -f core core.* *.core && -+ rm -f core *.core && - rm -rf conftest* confdefs* conf$$* $ac_clean_files && - exit $exit_status - ' 0 -@@ -1197,7 +1230,7 @@ - # value. - ac_cache_corrupted=false - for ac_var in `(set) 2>&1 | -- sed -n 's/^ac_env_\([a-zA-Z_0-9]*\)_set=.*/\1/p'`; do -+ sed -n 's/^ac_env_\([a-zA-Z_0-9]*\)_set=.*/\1/p'`; do - eval ac_old_set=\$ac_cv_env_${ac_var}_set - eval ac_new_set=\$ac_env_${ac_var}_set - eval ac_old_val="\$ac_cv_env_${ac_var}_value" -@@ -1214,13 +1247,13 @@ - ,);; - *) - if test "x$ac_old_val" != "x$ac_new_val"; then -- { echo "$as_me:$LINENO: error: \`$ac_var' has changed since the previous run:" >&5 -+ { echo "$as_me:$LINENO: error: \`$ac_var' has changed since the previous run:" >&5 - echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} -- { echo "$as_me:$LINENO: former value: $ac_old_val" >&5 -+ { echo "$as_me:$LINENO: former value: $ac_old_val" >&5 - echo "$as_me: former value: $ac_old_val" >&2;} -- { echo "$as_me:$LINENO: current value: $ac_new_val" >&5 -+ { echo "$as_me:$LINENO: current value: $ac_new_val" >&5 - echo "$as_me: current value: $ac_new_val" >&2;} -- ac_cache_corrupted=: -+ ac_cache_corrupted=: - fi;; - esac - # Pass precious variables to config.status. -@@ -1629,7 +1662,6 @@ - (exit $ac_status); } - - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -1649,8 +1681,8 @@ - # Try to create an executable without -o first, disregard a.out. - # It will help us diagnose broken compilers, and finding out an intuition - # of exeext. --echo "$as_me:$LINENO: checking for C compiler default output" >&5 --echo $ECHO_N "checking for C compiler default output... $ECHO_C" >&6 -+echo "$as_me:$LINENO: checking for C compiler default output file name" >&5 -+echo $ECHO_N "checking for C compiler default output file name... $ECHO_C" >&6 - ac_link_default=`echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` - if { (eval echo "$as_me:$LINENO: \"$ac_link_default\"") >&5 - (eval $ac_link_default) 2>&5 -@@ -1670,23 +1702,23 @@ - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.o | *.obj ) -- ;; -+ ;; - conftest.$ac_ext ) -- # This is the source file. -- ;; -+ # This is the source file. -+ ;; - [ab].out ) -- # We found the default executable, but exeext='' is most -- # certainly right. -- break;; -+ # We found the default executable, but exeext='' is most -+ # certainly right. -+ break;; - *.* ) -- ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` -- # FIXME: I believe we export ac_cv_exeext for Libtool, -- # but it would be cool to find out if it's true. Does anybody -- # maintain Libtool? --akim. -- export ac_cv_exeext -- break;; -+ ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` -+ # FIXME: I believe we export ac_cv_exeext for Libtool, -+ # but it would be cool to find out if it's true. Does anybody -+ # maintain Libtool? --akim. -+ export ac_cv_exeext -+ break;; - * ) -- break;; -+ break;; - esac - done - else -@@ -1760,8 +1792,8 @@ - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.o | *.obj ) ;; - *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` -- export ac_cv_exeext -- break;; -+ export ac_cv_exeext -+ break;; - * ) break;; - esac - done -@@ -1786,7 +1818,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -1837,7 +1868,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -1857,11 +1887,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -1874,7 +1913,7 @@ - - ac_compiler_gnu=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - ac_cv_c_compiler_gnu=$ac_compiler_gnu - - fi -@@ -1890,7 +1929,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -1907,11 +1945,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -1924,7 +1971,7 @@ - - ac_cv_prog_cc_g=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_prog_cc_g" >&5 - echo "${ECHO_T}$ac_cv_prog_cc_g" >&6 -@@ -1951,7 +1998,6 @@ - ac_cv_prog_cc_stdc=no - ac_save_CC=$CC - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -1979,6 +2025,16 @@ - va_end (v); - return s; - } -+ -+/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has -+ function prototypes and stuff, but not '\xHH' hex character constants. -+ These don't provoke an error unfortunately, instead are silently treated -+ as 'x'. The following induces an error, until -std1 is added to get -+ proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an -+ array size at least. It's necessary to write '\x00'==0 to get something -+ that's true only with -std1. */ -+int osf4_cc_array ['\x00' == 0 ? 1 : -1]; -+ - int test (int i, double x); - struct s1 {int (*f) (int a);}; - struct s2 {int (*f) (double a);}; -@@ -2005,11 +2061,20 @@ - CC="$ac_save_CC $ac_arg" - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -2022,7 +2087,7 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext -+rm -f conftest.err conftest.$ac_objext - done - rm -f conftest.$ac_ext conftest.$ac_objext - CC=$ac_save_CC -@@ -2050,19 +2115,27 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; }; then - for ac_declaration in \ -- ''\ -- '#include ' \ -+ '' \ - 'extern "C" void std::exit (int) throw (); using std::exit;' \ - 'extern "C" void std::exit (int); using std::exit;' \ - 'extern "C" void exit (int) throw ();' \ -@@ -2070,14 +2143,13 @@ - 'void exit (int);' - do - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ --#include - $ac_declaration -+#include - int - main () - { -@@ -2088,11 +2160,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -2105,9 +2186,8 @@ - - continue - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -2124,11 +2204,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -2140,7 +2229,7 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - done - rm -f conftest* - if test -n "$ac_declaration"; then -@@ -2154,7 +2243,7 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - ac_ext=c - ac_cpp='$CPP $CPPFLAGS' - ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -@@ -2189,7 +2278,6 @@ - # On the NeXT, cc -E runs the code through the compiler's parser, - # not just through cpp. "Syntax error" is here to catch this case. - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -2200,7 +2288,7 @@ - #else - # include - #endif -- Syntax error -+ Syntax error - _ACEOF - if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 - (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 -@@ -2212,6 +2300,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -2232,7 +2321,6 @@ - # OK, works on sane cases. Now check whether non-existent headers - # can be detected and how. - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -2250,6 +2338,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -2296,7 +2385,6 @@ - # On the NeXT, cc -E runs the code through the compiler's parser, - # not just through cpp. "Syntax error" is here to catch this case. - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -2307,7 +2395,7 @@ - #else - # include - #endif -- Syntax error -+ Syntax error - _ACEOF - if { (eval echo "$as_me:$LINENO: \"$ac_cpp conftest.$ac_ext\"") >&5 - (eval $ac_cpp conftest.$ac_ext) 2>conftest.er1 -@@ -2319,6 +2407,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -2339,7 +2428,6 @@ - # OK, works on sane cases. Now check whether non-existent headers - # can be detected and how. - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -2357,6 +2445,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -2431,6 +2520,7 @@ - # AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag - # AFS /usr/afsws/bin/install, which mishandles nonexistent args - # SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" -+# OS/2's system install, which has a completely different semantic - # ./install, which can be erroneously created by make from ./install.sh. - echo "$as_me:$LINENO: checking for a BSD-compatible install" >&5 - echo $ECHO_N "checking for a BSD-compatible install... $ECHO_C" >&6 -@@ -2447,6 +2537,7 @@ - case $as_dir/ in - ./ | .// | /cC/* | \ - /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ -+ ?:\\/os2\\/install\\/* | ?:\\/OS2\\/INSTALL\\/* | \ - /usr/ucb/* ) ;; - *) - # OSF1 and SCO ODT 3.0 have their own names for install. -@@ -2454,20 +2545,20 @@ - # by default. - for ac_prog in ginstall scoinst install; do - for ac_exec_ext in '' $ac_executable_extensions; do -- if $as_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then -- if test $ac_prog = install && -- grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then -- # AIX install. It has an incompatible calling convention. -- : -- elif test $ac_prog = install && -- grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then -- # program-specific install script used by HP pwplus--don't use. -- : -- else -- ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" -- break 3 -- fi -- fi -+ if $as_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then -+ if test $ac_prog = install && -+ grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then -+ # AIX install. It has an incompatible calling convention. -+ : -+ elif test $ac_prog = install && -+ grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then -+ # program-specific install script used by HP pwplus--don't use. -+ : -+ else -+ ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" -+ break 3 -+ fi -+ fi - done - done - ;; -@@ -2499,7 +2590,7 @@ - - echo "$as_me:$LINENO: checking whether ${MAKE-make} sets \$(MAKE)" >&5 - echo $ECHO_N "checking whether ${MAKE-make} sets \$(MAKE)... $ECHO_C" >&6 --set dummy ${MAKE-make}; ac_make=`echo "$2" | sed 'y,./+-,__p_,'` -+set dummy ${MAKE-make}; ac_make=`echo "$2" | sed 'y,:./+-,___p_,'` - if eval "test \"\${ac_cv_prog_make_${ac_make}_set+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -@@ -2596,7 +2687,6 @@ - echo "$as_me:$LINENO: checking for AIX" >&5 - echo $ECHO_N "checking for AIX... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -2628,7 +2718,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -2649,11 +2738,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -2666,12 +2764,11 @@ - - ac_cv_header_stdc=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - - if test $ac_cv_header_stdc = yes; then - # SunOS 4.x string.h does not declare mem*, contrary to ANSI. - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -2693,7 +2790,6 @@ - if test $ac_cv_header_stdc = yes; then - # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -2718,7 +2814,6 @@ - : - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -2730,9 +2825,9 @@ - # define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) - #else - # define ISLOWER(c) \ -- (('a' <= (c) && (c) <= 'i') \ -- || ('j' <= (c) && (c) <= 'r') \ -- || ('s' <= (c) && (c) <= 'z')) -+ (('a' <= (c) && (c) <= 'i') \ -+ || ('j' <= (c) && (c) <= 'r') \ -+ || ('s' <= (c) && (c) <= 'z')) - # define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) - #endif - -@@ -2743,7 +2838,7 @@ - int i; - for (i = 0; i < 256; i++) - if (XOR (islower (i), ISLOWER (i)) -- || toupper (i) != TOUPPER (i)) -+ || toupper (i) != TOUPPER (i)) - exit(2); - exit (0); - } -@@ -2768,7 +2863,7 @@ - ( exit $ac_status ) - ac_cv_header_stdc=no - fi --rm -f core core.* *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext -+rm -f core *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext - fi - fi - fi -@@ -2793,7 +2888,7 @@ - - - for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ -- inttypes.h stdint.h unistd.h -+ inttypes.h stdint.h unistd.h - do - as_ac_Header=`echo "ac_cv_header_$ac_header" | $as_tr_sh` - echo "$as_me:$LINENO: checking for $ac_header" >&5 -@@ -2802,7 +2897,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -2814,11 +2908,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -2831,7 +2934,7 @@ - - eval "$as_ac_Header=no" - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -2858,7 +2961,6 @@ - echo "$as_me:$LINENO: checking minix/config.h usability" >&5 - echo $ECHO_N "checking minix/config.h usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -2869,11 +2971,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -2886,7 +2997,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -2894,7 +3005,6 @@ - echo "$as_me:$LINENO: checking minix/config.h presence" >&5 - echo $ECHO_N "checking minix/config.h presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -2912,6 +3022,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -2931,33 +3042,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: minix/config.h: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: minix/config.h: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: minix/config.h: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: minix/config.h: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: minix/config.h: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: minix/config.h: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: minix/config.h: present but cannot be compiled" >&5 - echo "$as_me: WARNING: minix/config.h: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: minix/config.h: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: minix/config.h: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: minix/config.h: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: minix/config.h: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: minix/config.h: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: minix/config.h: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: minix/config.h: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: minix/config.h: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: minix/config.h: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: minix/config.h: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: minix/config.h: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: minix/config.h: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -3000,15 +3110,15 @@ - fi - - --echo "$as_me:$LINENO: checking for library containing strerror" >&5 --echo $ECHO_N "checking for library containing strerror... $ECHO_C" >&6 --if test "${ac_cv_search_strerror+set}" = set; then -+ -+ echo "$as_me:$LINENO: checking for strerror in -lcposix" >&5 -+echo $ECHO_N "checking for strerror in -lcposix... $ECHO_C" >&6 -+if test "${ac_cv_lib_cposix_strerror+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- ac_func_search_save_LIBS=$LIBS --ac_cv_search_strerror=no -+ ac_check_lib_save_LIBS=$LIBS -+LIBS="-lcposix $LIBS" - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3032,79 +3142,43 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 -- (exit $ac_status); }; }; then -- ac_cv_search_strerror="none required" --else -- echo "$as_me: failed program was:" >&5 --sed 's/^/| /' conftest.$ac_ext >&5 -- --fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext --if test "$ac_cv_search_strerror" = no; then -- for ac_lib in cposix; do -- LIBS="-l$ac_lib $ac_func_search_save_LIBS" -- cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" --/* confdefs.h. */ --_ACEOF --cat confdefs.h >>conftest.$ac_ext --cat >>conftest.$ac_ext <<_ACEOF --/* end confdefs.h. */ -- --/* Override any gcc2 internal prototype to avoid an error. */ --#ifdef __cplusplus --extern "C" --#endif --/* We use char because int might match the return type of a gcc2 -- builtin and then its argument prototype would still apply. */ --char strerror (); --int --main () --{ --strerror (); -- ; -- return 0; --} --_ACEOF --rm -f conftest.$ac_objext conftest$ac_exeext --if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -- ac_status=$? -- echo "$as_me:$LINENO: \$? = $ac_status" >&5 -- (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); }; }; then -- ac_cv_search_strerror="-l$ac_lib" --break -+ ac_cv_lib_cposix_strerror=yes - else - echo "$as_me: failed program was:" >&5 - sed 's/^/| /' conftest.$ac_ext >&5 - -+ac_cv_lib_cposix_strerror=no - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -- done -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext -+LIBS=$ac_check_lib_save_LIBS - fi --LIBS=$ac_func_search_save_LIBS -+echo "$as_me:$LINENO: result: $ac_cv_lib_cposix_strerror" >&5 -+echo "${ECHO_T}$ac_cv_lib_cposix_strerror" >&6 -+if test $ac_cv_lib_cposix_strerror = yes; then -+ LIBS="$LIBS -lcposix" - fi --echo "$as_me:$LINENO: result: $ac_cv_search_strerror" >&5 --echo "${ECHO_T}$ac_cv_search_strerror" >&6 --if test "$ac_cv_search_strerror" != no; then -- test "$ac_cv_search_strerror" = "none required" || LIBS="$ac_cv_search_strerror $LIBS" - --fi -+ - - # Check whether --enable-largefile or --disable-largefile was given. - if test "${enable_largefile+set}" = set; then -@@ -3124,8 +3198,7 @@ - while :; do - # IRIX 6.2 and later do not support large files by default, - # so use the C compiler's -n32 option if that helps. -- cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" -+ cat >conftest.$ac_ext <<_ACEOF - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3150,11 +3223,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -3166,15 +3248,24 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext -+rm -f conftest.err conftest.$ac_objext - CC="$CC -n32" - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -3186,8 +3277,8 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext -- break -+rm -f conftest.err conftest.$ac_objext -+ break - done - CC=$ac_save_CC - rm -f conftest.$ac_ext -@@ -3207,7 +3298,6 @@ - while :; do - ac_cv_sys_file_offset_bits=no - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3232,11 +3322,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -3248,9 +3347,8 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3276,11 +3374,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -3292,7 +3399,7 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - break - done - fi -@@ -3314,7 +3421,6 @@ - while :; do - ac_cv_sys_large_files=no - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3339,11 +3445,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -3355,9 +3470,8 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3383,11 +3497,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -3399,7 +3522,7 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - break - done - fi -@@ -3443,7 +3566,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3506,11 +3628,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -3523,7 +3654,7 @@ - - ac_cv_c_const=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_c_const" >&5 - echo "${ECHO_T}$ac_cv_c_const" >&6 -@@ -3550,7 +3681,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3570,11 +3700,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -3587,7 +3726,7 @@ - - eval "$as_ac_Header=no" - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -3610,7 +3749,6 @@ - ac_func_search_save_LIBS=$LIBS - ac_cv_search_opendir=no - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3634,11 +3772,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -3650,12 +3797,12 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - if test "$ac_cv_search_opendir" = no; then - for ac_lib in dir; do - LIBS="-l$ac_lib $ac_func_search_save_LIBS" - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3679,11 +3826,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -3696,7 +3852,8 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - done - fi - LIBS=$ac_func_search_save_LIBS -@@ -3717,7 +3874,6 @@ - ac_func_search_save_LIBS=$LIBS - ac_cv_search_opendir=no - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3741,11 +3897,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -3757,12 +3922,12 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - if test "$ac_cv_search_opendir" = no; then - for ac_lib in x; do - LIBS="-l$ac_lib $ac_func_search_save_LIBS" - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3786,11 +3951,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -3803,7 +3977,8 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - done - fi - LIBS=$ac_func_search_save_LIBS -@@ -3823,7 +3998,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3844,11 +4018,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -3861,12 +4044,11 @@ - - ac_cv_header_stdc=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - - if test $ac_cv_header_stdc = yes; then - # SunOS 4.x string.h does not declare mem*, contrary to ANSI. - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3888,7 +4070,6 @@ - if test $ac_cv_header_stdc = yes; then - # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3913,7 +4094,6 @@ - : - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -3925,9 +4105,9 @@ - # define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) - #else - # define ISLOWER(c) \ -- (('a' <= (c) && (c) <= 'i') \ -- || ('j' <= (c) && (c) <= 'r') \ -- || ('s' <= (c) && (c) <= 'z')) -+ (('a' <= (c) && (c) <= 'i') \ -+ || ('j' <= (c) && (c) <= 'r') \ -+ || ('s' <= (c) && (c) <= 'z')) - # define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) - #endif - -@@ -3938,7 +4118,7 @@ - int i; - for (i = 0; i < 256; i++) - if (XOR (islower (i), ISLOWER (i)) -- || toupper (i) != TOUPPER (i)) -+ || toupper (i) != TOUPPER (i)) - exit(2); - exit (0); - } -@@ -3963,7 +4143,7 @@ - ( exit $ac_status ) - ac_cv_header_stdc=no - fi --rm -f core core.* *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext -+rm -f core *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext - fi - fi - fi -@@ -3999,7 +4179,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4010,11 +4189,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -4027,7 +4215,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -4035,7 +4223,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4053,6 +4240,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -4072,33 +4260,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes -+ ;; -+ no:yes:* ) -+ { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 -+echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -- ;; -- no:yes ) -- { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 --echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -4109,7 +4296,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -4131,7 +4318,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4151,11 +4337,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -4168,7 +4363,7 @@ - - ac_cv_type_mode_t=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_type_mode_t" >&5 - echo "${ECHO_T}$ac_cv_type_mode_t" >&6 -@@ -4188,7 +4383,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4208,11 +4402,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -4225,7 +4428,7 @@ - - ac_cv_type_off_t=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_type_off_t" >&5 - echo "${ECHO_T}$ac_cv_type_off_t" >&6 -@@ -4245,7 +4448,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4265,11 +4467,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -4282,7 +4493,7 @@ - - ac_cv_type_pid_t=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_type_pid_t" >&5 - echo "${ECHO_T}$ac_cv_type_pid_t" >&6 -@@ -4302,7 +4513,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4329,11 +4539,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -4346,7 +4565,7 @@ - - ac_cv_type_signal=int - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_type_signal" >&5 - echo "${ECHO_T}$ac_cv_type_signal" >&6 -@@ -4362,7 +4581,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4382,11 +4600,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -4399,7 +4626,7 @@ - - ac_cv_type_size_t=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_type_size_t" >&5 - echo "${ECHO_T}$ac_cv_type_size_t" >&6 -@@ -4419,7 +4646,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4468,11 +4694,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -4485,7 +4720,7 @@ - - ac_cv_header_stdbool_h=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_header_stdbool_h" >&5 - echo "${ECHO_T}$ac_cv_header_stdbool_h" >&6 -@@ -4495,7 +4730,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4515,11 +4749,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -4532,7 +4775,7 @@ - - ac_cv_type__Bool=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_type__Bool" >&5 - echo "${ECHO_T}$ac_cv_type__Bool" >&6 -@@ -4589,7 +4832,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4600,11 +4842,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -4617,7 +4868,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -4625,7 +4876,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4643,6 +4893,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -4662,33 +4913,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 - echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -4699,7 +4949,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -4733,7 +4983,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4744,11 +4993,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -4761,7 +5019,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -4769,7 +5027,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4787,6 +5044,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -4806,33 +5064,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 - echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -4843,7 +5100,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -4865,7 +5122,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4886,11 +5142,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -4903,7 +5168,7 @@ - - ac_cv_header_time=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_header_time" >&5 - echo "${ECHO_T}$ac_cv_header_time" >&6 -@@ -4933,7 +5198,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -4964,11 +5228,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -4981,7 +5254,7 @@ - - fu_cv_sys_struct_utimbuf=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - - fi - echo "$as_me:$LINENO: result: $fu_cv_sys_struct_utimbuf" >&5 -@@ -5005,7 +5278,6 @@ - ac_cv_func_closedir_void=yes - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -5045,7 +5317,7 @@ - ( exit $ac_status ) - ac_cv_func_closedir_void=yes - fi --rm -f core core.* *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext -+rm -f core *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext - fi - fi - echo "$as_me:$LINENO: result: $ac_cv_func_closedir_void" >&5 -@@ -5076,7 +5348,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -5087,11 +5358,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -5104,7 +5384,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -5112,7 +5392,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -5130,6 +5409,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -5149,33 +5429,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 - echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -5186,7 +5465,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -5220,7 +5499,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -5231,11 +5509,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -5248,7 +5535,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -5256,7 +5543,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -5274,6 +5560,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -5293,33 +5580,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 - echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -5330,7 +5616,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -5364,7 +5650,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -5375,11 +5660,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -5392,7 +5686,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -5400,7 +5694,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -5418,6 +5711,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -5437,33 +5731,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 - echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -5474,7 +5767,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -5497,7 +5790,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -5517,11 +5809,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -5534,7 +5835,7 @@ - - ac_cv_have_decl_getenv=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_getenv" >&5 - echo "${ECHO_T}$ac_cv_have_decl_getenv" >&6 -@@ -5562,7 +5863,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -5582,11 +5882,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -5599,7 +5908,7 @@ - - ac_cv_have_decl_malloc=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_malloc" >&5 - echo "${ECHO_T}$ac_cv_have_decl_malloc" >&6 -@@ -5628,7 +5937,6 @@ - else - - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -5647,11 +5955,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -5664,7 +5981,7 @@ - - ac_cv_win_or_dos=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - - fi - echo "$as_me:$LINENO: result: $ac_cv_win_or_dos" >&5 -@@ -5714,7 +6031,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -5725,11 +6041,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -5742,7 +6067,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -5750,7 +6075,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -5768,6 +6092,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -5787,33 +6112,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 - echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -5824,7 +6148,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -5872,7 +6196,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -5905,11 +6228,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -5922,7 +6254,8 @@ - - jm_cv_struct_dirent_d_ino=no - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - - - fi -@@ -6011,21 +6344,28 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -+/* Define $ac_func to an innocuous variant, in case declares $ac_func. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define $ac_func innocuous_$ac_func -+ - /* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $ac_func (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ -+ - #ifdef __STDC__ - # include - #else - # include - #endif -+ -+#undef $ac_func -+ - /* Override any gcc2 internal prototype to avoid an error. */ - #ifdef __cplusplus - extern "C" -@@ -6056,11 +6396,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -6073,7 +6422,8 @@ - - eval "$as_ac_var=no" - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_var'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_var'}'`" >&6 -@@ -6126,21 +6476,28 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -+/* Define $ac_func to an innocuous variant, in case declares $ac_func. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define $ac_func innocuous_$ac_func -+ - /* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $ac_func (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ -+ - #ifdef __STDC__ - # include - #else - # include - #endif -+ -+#undef $ac_func -+ - /* Override any gcc2 internal prototype to avoid an error. */ - #ifdef __cplusplus - extern "C" -@@ -6171,11 +6528,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -6188,7 +6554,8 @@ - - eval "$as_ac_var=no" - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_var'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_var'}'`" >&6 -@@ -6203,21 +6570,28 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -+/* Define _doprnt to an innocuous variant, in case declares _doprnt. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define _doprnt innocuous__doprnt -+ - /* System header to define __stub macros and hopefully few prototypes, - which can conflict with char _doprnt (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ -+ - #ifdef __STDC__ - # include - #else - # include - #endif -+ -+#undef _doprnt -+ - /* Override any gcc2 internal prototype to avoid an error. */ - #ifdef __cplusplus - extern "C" -@@ -6248,11 +6622,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -6265,7 +6648,8 @@ - - ac_cv_func__doprnt=no - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_func__doprnt" >&5 - echo "${ECHO_T}$ac_cv_func__doprnt" >&6 -@@ -6288,13 +6672,12 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -- -+$ac_includes_default - int - main () - { -@@ -6305,11 +6688,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -6322,12 +6714,20 @@ - - ac_cv_lib_error_at_line=no - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_lib_error_at_line" >&5 - echo "${ECHO_T}$ac_cv_lib_error_at_line" >&6 - if test $ac_cv_lib_error_at_line = no; then -- LIBOBJS="$LIBOBJS error.$ac_objext" -+ case $LIBOBJS in -+ "error.$ac_objext" | \ -+ *" error.$ac_objext" | \ -+ "error.$ac_objext "* | \ -+ *" error.$ac_objext "* ) ;; -+ *) LIBOBJS="$LIBOBJS error.$ac_objext" ;; -+esac -+ - fi - - -@@ -6343,21 +6743,28 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -+/* Define $ac_func to an innocuous variant, in case declares $ac_func. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define $ac_func innocuous_$ac_func -+ - /* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $ac_func (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ -+ - #ifdef __STDC__ - # include - #else - # include - #endif -+ -+#undef $ac_func -+ - /* Override any gcc2 internal prototype to avoid an error. */ - #ifdef __cplusplus - extern "C" -@@ -6388,11 +6795,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -- echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -6405,7 +6821,8 @@ - - eval "$as_ac_var=no" - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_var'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_var'}'`" >&6 -@@ -6423,7 +6840,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -6443,11 +6859,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -6460,7 +6885,7 @@ - - ac_cv_have_decl_strerror=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_strerror" >&5 - echo "${ECHO_T}$ac_cv_have_decl_strerror" >&6 -@@ -6486,7 +6911,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -6506,11 +6930,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -6523,7 +6956,7 @@ - - ac_cv_have_decl_strerror_r=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_strerror_r" >&5 - echo "${ECHO_T}$ac_cv_have_decl_strerror_r" >&6 -@@ -6553,21 +6986,28 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -+/* Define $ac_func to an innocuous variant, in case declares $ac_func. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define $ac_func innocuous_$ac_func -+ - /* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $ac_func (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ -+ - #ifdef __STDC__ - # include - #else - # include - #endif -+ -+#undef $ac_func -+ - /* Override any gcc2 internal prototype to avoid an error. */ - #ifdef __cplusplus - extern "C" -@@ -6598,11 +7038,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -6615,7 +7064,8 @@ - - eval "$as_ac_var=no" - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_var'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_var'}'`" >&6 -@@ -6636,7 +7086,6 @@ - ac_cv_func_strerror_r_char_p=no - if test $ac_cv_have_decl_strerror_r = yes; then - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -6657,11 +7106,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -6673,7 +7131,7 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - else - # strerror_r is not declared. Choose between - # systems that have relatively inaccessible declarations for the -@@ -6685,7 +7143,6 @@ - : - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -6721,7 +7178,7 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f core core.* *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext -+rm -f core *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext - fi - fi - -@@ -6749,21 +7206,28 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -+/* Define $ac_func to an innocuous variant, in case declares $ac_func. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define $ac_func innocuous_$ac_func -+ - /* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $ac_func (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ -+ - #ifdef __STDC__ - # include - #else - # include - #endif -+ -+#undef $ac_func -+ - /* Override any gcc2 internal prototype to avoid an error. */ - #ifdef __cplusplus - extern "C" -@@ -6794,11 +7258,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -6811,7 +7284,8 @@ - - eval "$as_ac_var=no" - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_var'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_var'}'`" >&6 -@@ -6821,7 +7295,14 @@ - _ACEOF - - else -- LIBOBJS="$LIBOBJS $ac_func.$ac_objext" -+ case $LIBOBJS in -+ "$ac_func.$ac_objext" | \ -+ *" $ac_func.$ac_objext" | \ -+ "$ac_func.$ac_objext "* | \ -+ *" $ac_func.$ac_objext "* ) ;; -+ *) LIBOBJS="$LIBOBJS $ac_func.$ac_objext" ;; -+esac -+ - fi - done - -@@ -6855,7 +7336,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -6866,11 +7346,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -6883,7 +7372,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -6891,7 +7380,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -6909,6 +7397,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -6928,33 +7417,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 - echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -6965,7 +7453,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -6989,7 +7477,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -7053,21 +7540,28 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -+/* Define $ac_func to an innocuous variant, in case declares $ac_func. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define $ac_func innocuous_$ac_func -+ - /* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $ac_func (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ -+ - #ifdef __STDC__ - # include - #else - # include - #endif -+ -+#undef $ac_func -+ - /* Override any gcc2 internal prototype to avoid an error. */ - #ifdef __cplusplus - extern "C" -@@ -7098,11 +7592,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -7115,7 +7618,8 @@ - - eval "$as_ac_var=no" - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_var'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_var'}'`" >&6 -@@ -7125,7 +7629,14 @@ - _ACEOF - - else -- LIBOBJS="$LIBOBJS $ac_func.$ac_objext" -+ case $LIBOBJS in -+ "$ac_func.$ac_objext" | \ -+ *" $ac_func.$ac_objext" | \ -+ "$ac_func.$ac_objext "* | \ -+ *" $ac_func.$ac_objext "* ) ;; -+ *) LIBOBJS="$LIBOBJS $ac_func.$ac_objext" ;; -+esac -+ - fi - done - -@@ -7163,7 +7674,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -7174,11 +7684,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -7191,7 +7710,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -7199,7 +7718,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -7217,6 +7735,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -7236,33 +7755,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 - echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -7273,7 +7791,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -7297,7 +7815,6 @@ - ac_cv_func_malloc_0_nonnull=no - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -7337,7 +7854,7 @@ - ( exit $ac_status ) - ac_cv_func_malloc_0_nonnull=no - fi --rm -f core core.* *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext -+rm -f core *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext - fi - fi - echo "$as_me:$LINENO: result: $ac_cv_func_malloc_0_nonnull" >&5 -@@ -7353,7 +7870,14 @@ - #define HAVE_MALLOC 0 - _ACEOF - -- LIBOBJS="$LIBOBJS malloc.$ac_objext" -+ case $LIBOBJS in -+ "malloc.$ac_objext" | \ -+ *" malloc.$ac_objext" | \ -+ "malloc.$ac_objext "* | \ -+ *" malloc.$ac_objext "* ) ;; -+ *) LIBOBJS="$LIBOBJS malloc.$ac_objext" ;; -+esac -+ - - cat >>confdefs.h <<\_ACEOF - #define malloc rpl_malloc -@@ -7388,7 +7912,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -7399,11 +7922,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -7416,7 +7948,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -7424,7 +7956,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -7442,6 +7973,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -7461,33 +7993,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 - echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -7498,7 +8029,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -7522,7 +8053,6 @@ - ac_cv_func_realloc_0_nonnull=no - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -7562,7 +8092,7 @@ - ( exit $ac_status ) - ac_cv_func_realloc_0_nonnull=no - fi --rm -f core core.* *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext -+rm -f core *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext - fi - fi - echo "$as_me:$LINENO: result: $ac_cv_func_realloc_0_nonnull" >&5 -@@ -7578,7 +8108,14 @@ - #define HAVE_REALLOC 0 - _ACEOF - -- LIBOBJS="$LIBOBJS realloc.$ac_objext" -+ case $LIBOBJS in -+ "realloc.$ac_objext" | \ -+ *" realloc.$ac_objext" | \ -+ "realloc.$ac_objext "* | \ -+ *" realloc.$ac_objext "* ) ;; -+ *) LIBOBJS="$LIBOBJS realloc.$ac_objext" ;; -+esac -+ - - cat >>confdefs.h <<\_ACEOF - #define realloc rpl_realloc -@@ -7619,7 +8156,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -7630,11 +8166,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -7647,7 +8192,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -7655,7 +8200,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -7673,6 +8217,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -7692,33 +8237,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 - echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -7729,7 +8273,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -7772,7 +8316,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -7783,11 +8326,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -7800,7 +8352,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -7808,7 +8360,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -7826,6 +8377,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -7845,33 +8397,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 - echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -7882,7 +8433,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -7916,7 +8467,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -7927,11 +8477,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -7944,7 +8503,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -7952,7 +8511,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -7970,6 +8528,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -7989,33 +8548,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 - echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -8026,7 +8584,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -8053,21 +8611,28 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -+/* Define $ac_func to an innocuous variant, in case declares $ac_func. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define $ac_func innocuous_$ac_func -+ - /* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $ac_func (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ -+ - #ifdef __STDC__ - # include - #else - # include - #endif -+ -+#undef $ac_func -+ - /* Override any gcc2 internal prototype to avoid an error. */ - #ifdef __cplusplus - extern "C" -@@ -8098,11 +8663,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -8115,7 +8689,8 @@ - - eval "$as_ac_var=no" - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_var'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_var'}'`" >&6 -@@ -8139,21 +8714,28 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -+/* Define $ac_func to an innocuous variant, in case declares $ac_func. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define $ac_func innocuous_$ac_func -+ - /* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $ac_func (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ -+ - #ifdef __STDC__ - # include - #else - # include - #endif -+ -+#undef $ac_func -+ - /* Override any gcc2 internal prototype to avoid an error. */ - #ifdef __cplusplus - extern "C" -@@ -8184,11 +8766,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -8201,7 +8792,8 @@ - - eval "$as_ac_var=no" - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_var'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_var'}'`" >&6 -@@ -8241,7 +8833,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -8259,11 +8850,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -8276,7 +8876,7 @@ - - ac_cv_type_mbstate_t=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_type_mbstate_t" >&5 - echo "${ECHO_T}$ac_cv_type_mbstate_t" >&6 -@@ -8300,7 +8900,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -8317,11 +8916,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -8334,7 +8942,8 @@ - - jm_cv_func_mbrtowc=no - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $jm_cv_func_mbrtowc" >&5 - echo "${ECHO_T}$jm_cv_func_mbrtowc" >&6 -@@ -8348,69 +8957,465 @@ - - - -- -- -- -- --for ac_func in pathconf --do --as_ac_var=`echo "ac_cv_func_$ac_func" | $as_tr_sh` --echo "$as_me:$LINENO: checking for $ac_func" >&5 --echo $ECHO_N "checking for $ac_func... $ECHO_C" >&6 --if eval "test \"\${$as_ac_var+set}\" = set"; then -+ echo "$as_me:$LINENO: checking whether free is declared" >&5 -+echo $ECHO_N "checking whether free is declared... $ECHO_C" >&6 -+if test "${ac_cv_have_decl_free+set}" = set; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ --/* System header to define __stub macros and hopefully few prototypes, -- which can conflict with char $ac_func (); below. -- Prefer to if __STDC__ is defined, since -- exists even on freestanding compilers. */ --#ifdef __STDC__ --# include --#else --# include --#endif --/* Override any gcc2 internal prototype to avoid an error. */ --#ifdef __cplusplus --extern "C" --{ --#endif --/* We use char because int might match the return type of a gcc2 -- builtin and then its argument prototype would still apply. */ --char $ac_func (); --/* The GNU C library defines this for functions which it implements -- to always fail with ENOSYS. Some functions are actually named -- something starting with __ and the normal name is an alias. */ --#if defined (__stub_$ac_func) || defined (__stub___$ac_func) --choke me --#else --char (*f) () = $ac_func; --#endif --#ifdef __cplusplus --} --#endif -- -+$ac_includes_default - int - main () - { --return f != $ac_func; -+#ifndef free -+ char *p = (char *) free; -+#endif -+ - ; - return 0; - } - _ACEOF --rm -f conftest.$ac_objext conftest$ac_exeext --if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+rm -f conftest.$ac_objext -+if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -+ (eval $ac_compile) 2>conftest.er1 -+ ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); } && -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; }; then -+ ac_cv_have_decl_free=yes -+else -+ echo "$as_me: failed program was:" >&5 -+sed 's/^/| /' conftest.$ac_ext >&5 -+ -+ac_cv_have_decl_free=no -+fi -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext -+fi -+echo "$as_me:$LINENO: result: $ac_cv_have_decl_free" >&5 -+echo "${ECHO_T}$ac_cv_have_decl_free" >&6 -+if test $ac_cv_have_decl_free = yes; then -+ -+cat >>confdefs.h <<_ACEOF -+#define HAVE_DECL_FREE 1 -+_ACEOF -+ -+ -+else -+ cat >>confdefs.h <<_ACEOF -+#define HAVE_DECL_FREE 0 -+_ACEOF -+ -+ -+fi -+ -+ -+ -+ -+ -+ : -+ -+ -+ -+ -+ -+ echo "$as_me:$LINENO: checking for stdbool.h that conforms to C99" >&5 -+echo $ECHO_N "checking for stdbool.h that conforms to C99... $ECHO_C" >&6 -+if test "${ac_cv_header_stdbool_h+set}" = set; then -+ echo $ECHO_N "(cached) $ECHO_C" >&6 -+else -+ cat >conftest.$ac_ext <<_ACEOF -+/* confdefs.h. */ -+_ACEOF -+cat confdefs.h >>conftest.$ac_ext -+cat >>conftest.$ac_ext <<_ACEOF -+/* end confdefs.h. */ -+ -+ #include -+ #ifndef bool -+ "error: bool is not defined" -+ #endif -+ #ifndef false -+ "error: false is not defined" -+ #endif -+ #if false -+ "error: false is not 0" -+ #endif -+ #ifndef true -+ "error: false is not defined" -+ #endif -+ #if true != 1 -+ "error: true is not 1" -+ #endif -+ #ifndef __bool_true_false_are_defined -+ "error: __bool_true_false_are_defined is not defined" -+ #endif -+ -+ struct s { _Bool s: 1; _Bool t; } s; -+ -+ char a[true == 1 ? 1 : -1]; -+ char b[false == 0 ? 1 : -1]; -+ char c[__bool_true_false_are_defined == 1 ? 1 : -1]; -+ char d[(bool) -0.5 == true ? 1 : -1]; -+ bool e = &s; -+ char f[(_Bool) -0.0 == false ? 1 : -1]; -+ char g[true]; -+ char h[sizeof (_Bool)]; -+ char i[sizeof s.t]; -+ -+int -+main () -+{ -+ return !a + !b + !c + !d + !e + !f + !g + !h + !i; -+ ; -+ return 0; -+} -+_ACEOF -+rm -f conftest.$ac_objext -+if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -+ (eval $ac_compile) 2>conftest.er1 -+ ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); } && -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; }; then -+ ac_cv_header_stdbool_h=yes -+else -+ echo "$as_me: failed program was:" >&5 -+sed 's/^/| /' conftest.$ac_ext >&5 -+ -+ac_cv_header_stdbool_h=no -+fi -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext -+fi -+echo "$as_me:$LINENO: result: $ac_cv_header_stdbool_h" >&5 -+echo "${ECHO_T}$ac_cv_header_stdbool_h" >&6 -+ echo "$as_me:$LINENO: checking for _Bool" >&5 -+echo $ECHO_N "checking for _Bool... $ECHO_C" >&6 -+if test "${ac_cv_type__Bool+set}" = set; then -+ echo $ECHO_N "(cached) $ECHO_C" >&6 -+else -+ cat >conftest.$ac_ext <<_ACEOF -+/* confdefs.h. */ -+_ACEOF -+cat confdefs.h >>conftest.$ac_ext -+cat >>conftest.$ac_ext <<_ACEOF -+/* end confdefs.h. */ -+$ac_includes_default -+int -+main () -+{ -+if ((_Bool *) 0) -+ return 0; -+if (sizeof (_Bool)) -+ return 0; -+ ; -+ return 0; -+} -+_ACEOF -+rm -f conftest.$ac_objext -+if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -+ (eval $ac_compile) 2>conftest.er1 -+ ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); } && -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; }; then -+ ac_cv_type__Bool=yes -+else -+ echo "$as_me: failed program was:" >&5 -+sed 's/^/| /' conftest.$ac_ext >&5 -+ -+ac_cv_type__Bool=no -+fi -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext -+fi -+echo "$as_me:$LINENO: result: $ac_cv_type__Bool" >&5 -+echo "${ECHO_T}$ac_cv_type__Bool" >&6 -+if test $ac_cv_type__Bool = yes; then -+ -+cat >>confdefs.h <<_ACEOF -+#define HAVE__BOOL 1 -+_ACEOF -+ -+ -+fi -+ -+ if test $ac_cv_header_stdbool_h = yes; then -+ -+cat >>confdefs.h <<\_ACEOF -+#define HAVE_STDBOOL_H 1 -+_ACEOF -+ -+ fi -+ -+ : -+ -+ -+ -+ -+ -+ -+ -+ -+ -+ -+echo "$as_me:$LINENO: checking for nanosecond timestamps in struct stat" >&5 -+echo $ECHO_N "checking for nanosecond timestamps in struct stat... $ECHO_C" >&6 -+if test "${ac_cv_stat_nsec+set}" = set; then -+ echo $ECHO_N "(cached) $ECHO_C" >&6 -+else -+ cat >conftest.$ac_ext <<_ACEOF -+/* confdefs.h. */ -+_ACEOF -+cat confdefs.h >>conftest.$ac_ext -+cat >>conftest.$ac_ext <<_ACEOF -+/* end confdefs.h. */ -+ -+ #include -+ #include -+ #include -+ struct stat st; -+ -+int -+main () -+{ -+ return st.st_atimensec + st.st_mtimensec + st.st_ctimensec; -+ ; -+ return 0; -+} -+_ACEOF -+rm -f conftest.$ac_objext -+if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; }; then -+ ac_cv_stat_nsec=yes -+else -+ echo "$as_me: failed program was:" >&5 -+sed 's/^/| /' conftest.$ac_ext >&5 -+ -+ac_cv_stat_nsec=no -+fi -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext -+ -+fi -+echo "$as_me:$LINENO: result: $ac_cv_stat_nsec" >&5 -+echo "${ECHO_T}$ac_cv_stat_nsec" >&6 -+ if test $ac_cv_stat_nsec = yes; then -+ -+cat >>confdefs.h <<\_ACEOF -+#define HAVE_STAT_NSEC 1 -+_ACEOF -+ -+ fi -+ -+ echo "$as_me:$LINENO: checking for nanosecond timestamps in struct stat" >&5 -+echo $ECHO_N "checking for nanosecond timestamps in struct stat... $ECHO_C" >&6 -+if test "${ac_cv_stat_timeval+set}" = set; then -+ echo $ECHO_N "(cached) $ECHO_C" >&6 -+else -+ cat >conftest.$ac_ext <<_ACEOF -+/* confdefs.h. */ -+_ACEOF -+cat confdefs.h >>conftest.$ac_ext -+cat >>conftest.$ac_ext <<_ACEOF -+/* end confdefs.h. */ -+ -+ #include -+ #include -+ #include -+ #include -+ struct stat st; -+ -+int -+main () -+{ -+ return st.st_atim.tv_nsec + st.st_mtim.tv_nsec + st.st_ctim.tv_nsec; -+ ; -+ return 0; -+} -+_ACEOF -+rm -f conftest.$ac_objext -+if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -+ (eval $ac_compile) 2>conftest.er1 -+ ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); } && -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; }; then -+ ac_cv_stat_timeval=yes -+else -+ echo "$as_me: failed program was:" >&5 -+sed 's/^/| /' conftest.$ac_ext >&5 -+ -+ac_cv_stat_timeval=no -+fi -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext -+ -+fi -+echo "$as_me:$LINENO: result: $ac_cv_stat_timeval" >&5 -+echo "${ECHO_T}$ac_cv_stat_timeval" >&6 -+ if test $ac_cv_stat_timeval = yes; then -+ -+cat >>confdefs.h <<\_ACEOF -+#define HAVE_STAT_TIMEVAL 1 -+_ACEOF -+ -+ fi -+ -+ -+ -+ -+ -+for ac_func in pathconf -+do -+as_ac_var=`echo "ac_cv_func_$ac_func" | $as_tr_sh` -+echo "$as_me:$LINENO: checking for $ac_func" >&5 -+echo $ECHO_N "checking for $ac_func... $ECHO_C" >&6 -+if eval "test \"\${$as_ac_var+set}\" = set"; then -+ echo $ECHO_N "(cached) $ECHO_C" >&6 -+else -+ cat >conftest.$ac_ext <<_ACEOF -+/* confdefs.h. */ -+_ACEOF -+cat confdefs.h >>conftest.$ac_ext -+cat >>conftest.$ac_ext <<_ACEOF -+/* end confdefs.h. */ -+/* Define $ac_func to an innocuous variant, in case declares $ac_func. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define $ac_func innocuous_$ac_func -+ -+/* System header to define __stub macros and hopefully few prototypes, -+ which can conflict with char $ac_func (); below. -+ Prefer to if __STDC__ is defined, since -+ exists even on freestanding compilers. */ -+ -+#ifdef __STDC__ -+# include -+#else -+# include -+#endif -+ -+#undef $ac_func -+ -+/* Override any gcc2 internal prototype to avoid an error. */ -+#ifdef __cplusplus -+extern "C" -+{ -+#endif -+/* We use char because int might match the return type of a gcc2 -+ builtin and then its argument prototype would still apply. */ -+char $ac_func (); -+/* The GNU C library defines this for functions which it implements -+ to always fail with ENOSYS. Some functions are actually named -+ something starting with __ and the normal name is an alias. */ -+#if defined (__stub_$ac_func) || defined (__stub___$ac_func) -+choke me -+#else -+char (*f) () = $ac_func; -+#endif -+#ifdef __cplusplus -+} -+#endif -+ -+int -+main () -+{ -+return f != $ac_func; -+ ; -+ return 0; -+} -+_ACEOF -+rm -f conftest.$ac_objext conftest$ac_exeext -+if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -+ (eval $ac_link) 2>conftest.er1 -+ ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); } && -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -8423,7 +9428,8 @@ - - eval "$as_ac_var=no" - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_var'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_var'}'`" >&6 -@@ -8454,7 +9460,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -8465,11 +9470,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -8482,7 +9496,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -8490,7 +9504,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -8508,6 +9521,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -8527,33 +9541,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 - echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -8564,7 +9577,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -8587,7 +9600,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -8607,11 +9619,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -8624,7 +9645,7 @@ - - ac_cv_have_decl_free=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_free" >&5 - echo "${ECHO_T}$ac_cv_have_decl_free" >&6 -@@ -8648,7 +9669,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -8668,11 +9688,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -8685,7 +9714,7 @@ - - ac_cv_have_decl_getenv=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_getenv" >&5 - echo "${ECHO_T}$ac_cv_have_decl_getenv" >&6 -@@ -8709,7 +9738,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -8729,11 +9757,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -8746,7 +9783,7 @@ - - ac_cv_have_decl_malloc=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_malloc" >&5 - echo "${ECHO_T}$ac_cv_have_decl_malloc" >&6 -@@ -8770,7 +9807,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -8790,11 +9826,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -8807,7 +9852,7 @@ - - ac_cv_have_decl_mktemp=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_mktemp" >&5 - echo "${ECHO_T}$ac_cv_have_decl_mktemp" >&6 -@@ -8849,21 +9894,28 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -+/* Define $ac_func to an innocuous variant, in case declares $ac_func. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define $ac_func innocuous_$ac_func -+ - /* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $ac_func (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ -+ - #ifdef __STDC__ - # include - #else - # include - #endif -+ -+#undef $ac_func -+ - /* Override any gcc2 internal prototype to avoid an error. */ - #ifdef __cplusplus - extern "C" -@@ -8894,11 +9946,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -8911,7 +9972,8 @@ - - eval "$as_ac_var=no" - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_var'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_var'}'`" >&6 -@@ -8934,21 +9996,28 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -+/* Define $ac_func to an innocuous variant, in case declares $ac_func. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define $ac_func innocuous_$ac_func -+ - /* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $ac_func (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ -+ - #ifdef __STDC__ - # include - #else - # include - #endif -+ -+#undef $ac_func -+ - /* Override any gcc2 internal prototype to avoid an error. */ - #ifdef __cplusplus - extern "C" -@@ -8979,11 +10048,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -8996,7 +10074,8 @@ - - eval "$as_ac_var=no" - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_var'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_var'}'`" >&6 -@@ -9006,7 +10085,14 @@ - _ACEOF - - else -- LIBOBJS="$LIBOBJS $ac_func.$ac_objext" -+ case $LIBOBJS in -+ "$ac_func.$ac_objext" | \ -+ *" $ac_func.$ac_objext" | \ -+ "$ac_func.$ac_objext "* | \ -+ *" $ac_func.$ac_objext "* ) ;; -+ *) LIBOBJS="$LIBOBJS $ac_func.$ac_objext" ;; -+esac -+ - fi - done - -@@ -9019,7 +10105,6 @@ - while :; do - ac_cv_sys_largefile_source=no - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9036,11 +10121,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9052,9 +10146,8 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9072,11 +10165,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9088,7 +10190,7 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - break - done - fi -@@ -9112,7 +10214,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9129,11 +10230,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9146,7 +10256,8 @@ - - ac_cv_func_fseeko=no - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_func_fseeko" >&5 - echo "${ECHO_T}$ac_cv_func_fseeko" >&6 -@@ -9165,7 +10276,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9185,11 +10295,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9202,7 +10321,7 @@ - - ac_cv_have_decl_clearerr_unlocked=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_clearerr_unlocked" >&5 - echo "${ECHO_T}$ac_cv_have_decl_clearerr_unlocked" >&6 -@@ -9230,7 +10349,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9250,11 +10368,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9267,7 +10394,7 @@ - - ac_cv_have_decl_feof_unlocked=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_feof_unlocked" >&5 - echo "${ECHO_T}$ac_cv_have_decl_feof_unlocked" >&6 -@@ -9295,7 +10422,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9315,11 +10441,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9332,7 +10467,7 @@ - - ac_cv_have_decl_ferror_unlocked=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_ferror_unlocked" >&5 - echo "${ECHO_T}$ac_cv_have_decl_ferror_unlocked" >&6 -@@ -9360,7 +10495,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9380,11 +10514,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9397,7 +10540,7 @@ - - ac_cv_have_decl_fflush_unlocked=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_fflush_unlocked" >&5 - echo "${ECHO_T}$ac_cv_have_decl_fflush_unlocked" >&6 -@@ -9425,7 +10568,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9445,11 +10587,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9462,7 +10613,7 @@ - - ac_cv_have_decl_fgets_unlocked=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_fgets_unlocked" >&5 - echo "${ECHO_T}$ac_cv_have_decl_fgets_unlocked" >&6 -@@ -9490,7 +10641,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9510,11 +10660,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9527,7 +10686,7 @@ - - ac_cv_have_decl_fputc_unlocked=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_fputc_unlocked" >&5 - echo "${ECHO_T}$ac_cv_have_decl_fputc_unlocked" >&6 -@@ -9555,7 +10714,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9575,11 +10733,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9592,7 +10759,7 @@ - - ac_cv_have_decl_fputs_unlocked=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_fputs_unlocked" >&5 - echo "${ECHO_T}$ac_cv_have_decl_fputs_unlocked" >&6 -@@ -9620,7 +10787,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9640,11 +10806,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9657,7 +10832,7 @@ - - ac_cv_have_decl_fread_unlocked=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_fread_unlocked" >&5 - echo "${ECHO_T}$ac_cv_have_decl_fread_unlocked" >&6 -@@ -9685,7 +10860,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9705,11 +10879,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9722,7 +10905,7 @@ - - ac_cv_have_decl_fwrite_unlocked=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_fwrite_unlocked" >&5 - echo "${ECHO_T}$ac_cv_have_decl_fwrite_unlocked" >&6 -@@ -9750,7 +10933,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9770,11 +10952,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9787,7 +10978,7 @@ - - ac_cv_have_decl_getc_unlocked=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_getc_unlocked" >&5 - echo "${ECHO_T}$ac_cv_have_decl_getc_unlocked" >&6 -@@ -9815,7 +11006,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9835,11 +11025,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9852,7 +11051,7 @@ - - ac_cv_have_decl_getchar_unlocked=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_getchar_unlocked" >&5 - echo "${ECHO_T}$ac_cv_have_decl_getchar_unlocked" >&6 -@@ -9880,7 +11079,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9900,11 +11098,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9917,7 +11124,7 @@ - - ac_cv_have_decl_putc_unlocked=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_putc_unlocked" >&5 - echo "${ECHO_T}$ac_cv_have_decl_putc_unlocked" >&6 -@@ -9945,7 +11152,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -9965,11 +11171,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -9982,7 +11197,7 @@ - - ac_cv_have_decl_putchar_unlocked=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_have_decl_putchar_unlocked" >&5 - echo "${ECHO_T}$ac_cv_have_decl_putchar_unlocked" >&6 -@@ -10075,7 +11290,6 @@ - ac_cv_func_closedir_void=yes - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -10115,7 +11329,7 @@ - ( exit $ac_status ) - ac_cv_func_closedir_void=yes - fi --rm -f core core.* *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext -+rm -f core *.core gmon.out bb.out conftest$ac_exeext conftest.$ac_objext conftest.$ac_ext - fi - fi - echo "$as_me:$LINENO: result: $ac_cv_func_closedir_void" >&5 -@@ -10146,7 +11360,6 @@ - echo "$as_me:$LINENO: checking $ac_header usability" >&5 - echo $ECHO_N "checking $ac_header usability... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -10157,11 +11370,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -10174,7 +11396,7 @@ - - ac_header_compiler=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - echo "$as_me:$LINENO: result: $ac_header_compiler" >&5 - echo "${ECHO_T}$ac_header_compiler" >&6 - -@@ -10182,7 +11404,6 @@ - echo "$as_me:$LINENO: checking $ac_header presence" >&5 - echo $ECHO_N "checking $ac_header presence... $ECHO_C" >&6 - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -10200,6 +11421,7 @@ - (exit $ac_status); } >/dev/null; then - if test -s conftest.err; then - ac_cpp_err=$ac_c_preproc_warn_flag -+ ac_cpp_err=$ac_cpp_err$ac_c_werror_flag - else - ac_cpp_err= - fi -@@ -10219,33 +11441,32 @@ - echo "${ECHO_T}$ac_header_preproc" >&6 - - # So? What about this header? --case $ac_header_compiler:$ac_header_preproc in -- yes:no ) -+case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in -+ yes:no: ) - { echo "$as_me:$LINENO: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&5 - echo "$as_me: WARNING: $ac_header: accepted by the compiler, rejected by the preprocessor!" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 --echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -- ( -- cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## --_ASBOX -- ) | -- sed "s/^/$as_me: WARNING: /" >&2 -+ { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the compiler's result" >&5 -+echo "$as_me: WARNING: $ac_header: proceeding with the compiler's result" >&2;} -+ ac_header_preproc=yes - ;; -- no:yes ) -+ no:yes:* ) - { echo "$as_me:$LINENO: WARNING: $ac_header: present but cannot be compiled" >&5 - echo "$as_me: WARNING: $ac_header: present but cannot be compiled" >&2;} -- { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 --echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: check for missing prerequisite headers?" >&5 -+echo "$as_me: WARNING: $ac_header: check for missing prerequisite headers?" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: see the Autoconf documentation" >&5 -+echo "$as_me: WARNING: $ac_header: see the Autoconf documentation" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&5 -+echo "$as_me: WARNING: $ac_header: section \"Present But Cannot Be Compiled\"" >&2;} - { echo "$as_me:$LINENO: WARNING: $ac_header: proceeding with the preprocessor's result" >&5 - echo "$as_me: WARNING: $ac_header: proceeding with the preprocessor's result" >&2;} -+ { echo "$as_me:$LINENO: WARNING: $ac_header: in the future, the compiler will take precedence" >&5 -+echo "$as_me: WARNING: $ac_header: in the future, the compiler will take precedence" >&2;} - ( - cat <<\_ASBOX --## ------------------------------------ ## --## Report this to bug-autoconf@gnu.org. ## --## ------------------------------------ ## -+## -------------------------------- ## -+## Report this to bug-patch@gnu.org ## -+## -------------------------------- ## - _ASBOX - ) | - sed "s/^/$as_me: WARNING: /" >&2 -@@ -10256,7 +11477,7 @@ - if eval "test \"\${$as_ac_Header+set}\" = set"; then - echo $ECHO_N "(cached) $ECHO_C" >&6 - else -- eval "$as_ac_Header=$ac_header_preproc" -+ eval "$as_ac_Header=\$ac_header_preproc" - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_Header'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_Header'}'`" >&6 -@@ -10277,7 +11498,6 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -10300,11 +11520,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -10317,7 +11546,8 @@ - - ac_cv_func_setmode_dos=no - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_func_setmode_dos" >&5 - echo "${ECHO_T}$ac_cv_func_setmode_dos" >&6 -@@ -10338,21 +11568,28 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -+/* Define $ac_func to an innocuous variant, in case declares $ac_func. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define $ac_func innocuous_$ac_func -+ - /* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $ac_func (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ -+ - #ifdef __STDC__ - # include - #else - # include - #endif -+ -+#undef $ac_func -+ - /* Override any gcc2 internal prototype to avoid an error. */ - #ifdef __cplusplus - extern "C" -@@ -10383,11 +11620,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -10400,7 +11646,8 @@ - - eval "$as_ac_var=no" - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_var'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_var'}'`" >&6 -@@ -10415,21 +11662,28 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -+/* Define _doprnt to an innocuous variant, in case declares _doprnt. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define _doprnt innocuous__doprnt -+ - /* System header to define __stub macros and hopefully few prototypes, - which can conflict with char _doprnt (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ -+ - #ifdef __STDC__ - # include - #else - # include - #endif -+ -+#undef _doprnt -+ - /* Override any gcc2 internal prototype to avoid an error. */ - #ifdef __cplusplus - extern "C" -@@ -10460,11 +11714,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -10477,7 +11740,8 @@ - - ac_cv_func__doprnt=no - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: $ac_cv_func__doprnt" >&5 - echo "${ECHO_T}$ac_cv_func__doprnt" >&6 -@@ -10503,21 +11767,28 @@ - echo $ECHO_N "(cached) $ECHO_C" >&6 - else - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext - cat >>conftest.$ac_ext <<_ACEOF - /* end confdefs.h. */ -+/* Define $ac_func to an innocuous variant, in case declares $ac_func. -+ For example, HP-UX 11i declares gettimeofday. */ -+#define $ac_func innocuous_$ac_func -+ - /* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $ac_func (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ -+ - #ifdef __STDC__ - # include - #else - # include - #endif -+ -+#undef $ac_func -+ - /* Override any gcc2 internal prototype to avoid an error. */ - #ifdef __cplusplus - extern "C" -@@ -10548,11 +11819,20 @@ - _ACEOF - rm -f conftest.$ac_objext conftest$ac_exeext - if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5 -- (eval $ac_link) 2>&5 -+ (eval $ac_link) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest$ac_exeext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest$ac_exeext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -10565,7 +11845,8 @@ - - eval "$as_ac_var=no" - fi --rm -f conftest.$ac_objext conftest$ac_exeext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext \ -+ conftest$ac_exeext conftest.$ac_ext - fi - echo "$as_me:$LINENO: result: `eval echo '${'$as_ac_var'}'`" >&5 - echo "${ECHO_T}`eval echo '${'$as_ac_var'}'`" >&6 -@@ -10585,7 +11866,6 @@ - patch_cv_mkdir_takes_one_arg=no - if test $ac_cv_func_mkdir = yes; then - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -10605,11 +11885,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -10621,7 +11910,6 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -10641,11 +11929,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -10658,10 +11955,10 @@ - sed 's/^/| /' conftest.$ac_ext >&5 - - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - fi - - -@@ -10686,7 +11983,6 @@ - else - - cat >conftest.$ac_ext <<_ACEOF --#line $LINENO "configure" - /* confdefs.h. */ - _ACEOF - cat confdefs.h >>conftest.$ac_ext -@@ -10705,11 +12001,20 @@ - _ACEOF - rm -f conftest.$ac_objext - if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5 -- (eval $ac_compile) 2>&5 -+ (eval $ac_compile) 2>conftest.er1 - ac_status=$? -+ grep -v '^ *+' conftest.er1 >conftest.err -+ rm -f conftest.er1 -+ cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - (exit $ac_status); } && -- { ac_try='test -s conftest.$ac_objext' -+ { ac_try='test -z "$ac_c_werror_flag" || test ! -s conftest.err' -+ { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 -+ (eval $ac_try) 2>&5 -+ ac_status=$? -+ echo "$as_me:$LINENO: \$? = $ac_status" >&5 -+ (exit $ac_status); }; } && -+ { ac_try='test -s conftest.$ac_objext' - { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5 - (eval $ac_try) 2>&5 - ac_status=$? -@@ -10722,7 +12027,7 @@ - - ac_cv_win_or_dos=no - fi --rm -f conftest.$ac_objext conftest.$ac_ext -+rm -f conftest.err conftest.$ac_objext conftest.$ac_ext - - fi - echo "$as_me:$LINENO: result: $ac_cv_win_or_dos" >&5 -@@ -10834,13 +12139,13 @@ - # `set' does not quote correctly, so add quotes (double-quote - # substitution turns \\\\ into \\, and sed turns \\ into \). - sed -n \ -- "s/'/'\\\\''/g; -- s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" -+ "s/'/'\\\\''/g; -+ s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" - ;; - *) - # `set' quotes correctly as required by POSIX, so do not add quotes. - sed -n \ -- "s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1=\\2/p" -+ "s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1=\\2/p" - ;; - esac; - } | -@@ -10870,13 +12175,13 @@ - # trailing colons and then remove the whole line if VPATH becomes empty - # (actually we leave an empty line to preserve line numbers). - if test "x$srcdir" = x.; then -- ac_vpsub='/^[ ]*VPATH[ ]*=/{ -+ ac_vpsub='/^[ ]*VPATH[ ]*=/{ - s/:*\$(srcdir):*/:/; - s/:*\${srcdir}:*/:/; - s/:*@srcdir@:*/:/; --s/^\([^=]*=[ ]*\):*/\1/; -+s/^\([^=]*=[ ]*\):*/\1/; - s/:*$//; --s/^[^=]*=[ ]*$//; -+s/^[^=]*=[ ]*$//; - }' - fi - -@@ -10887,7 +12192,7 @@ - for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue - # 1. Remove the extension, and $U if already installed. - ac_i=`echo "$ac_i" | -- sed 's/\$U\././;s/\.o$//;s/\.obj$//'` -+ sed 's/\$U\././;s/\.o$//;s/\.obj$//'` - # 2. Add them. - ac_libobjs="$ac_libobjs $ac_i\$U.$ac_objext" - ac_ltlibobjs="$ac_ltlibobjs $ac_i"'$U.lo' -@@ -10931,9 +12236,10 @@ - elif test -n "${BASH_VERSION+set}" && (set -o posix) >/dev/null 2>&1; then - set -o posix - fi -+DUALCASE=1; export DUALCASE # for MKS sh - - # Support unset when possible. --if (FOO=FOO; unset FOO) >/dev/null 2>&1; then -+if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then - as_unset=unset - else - as_unset=false -@@ -10952,7 +12258,7 @@ - LC_MEASUREMENT LC_MESSAGES LC_MONETARY LC_NAME LC_NUMERIC LC_PAPER \ - LC_TELEPHONE LC_TIME - do -- if (set +x; test -n "`(eval $as_var=C; export $as_var) 2>&1`"); then -+ if (set +x; test -z "`(eval $as_var=C; export $as_var) 2>&1`"); then - eval $as_var=C; export $as_var - else - $as_unset $as_var -@@ -11131,16 +12437,17 @@ - if mkdir -p . 2>/dev/null; then - as_mkdir_p=: - else -+ test -d ./-p && rmdir ./-p - as_mkdir_p=false - fi - - as_executable_p="test -f" - - # Sed expression to map a string onto a valid CPP name. --as_tr_cpp="sed y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g" -+as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - - # Sed expression to map a string onto a valid variable name. --as_tr_sh="sed y%*+%pp%;s%[^_$as_cr_alnum]%_%g" -+as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - - # IFS -@@ -11167,7 +12474,7 @@ - cat >&5 <<_CSEOF - - This file was extended by patch $as_me 2.5.9, which was --generated by GNU Autoconf 2.57. Invocation command line was -+generated by GNU Autoconf 2.59. Invocation command line was - - CONFIG_FILES = $CONFIG_FILES - CONFIG_HEADERS = $CONFIG_HEADERS -@@ -11211,9 +12518,9 @@ - -d, --debug don't remove temporary files - --recheck update $as_me by reconfiguring in the same conditions - --file=FILE[:TEMPLATE] -- instantiate the configuration file FILE -+ instantiate the configuration file FILE - --header=FILE[:TEMPLATE] -- instantiate the configuration header FILE -+ instantiate the configuration header FILE - - Configuration files: - $config_files -@@ -11227,11 +12534,10 @@ - cat >>$CONFIG_STATUS <<_ACEOF - ac_cs_version="\\ - patch config.status 2.5.9 --configured by $0, generated by GNU Autoconf 2.57, -+configured by $0, generated by GNU Autoconf 2.59, - with options \\"`echo "$ac_configure_args" | sed 's/[\\""\`\$]/\\\\&/g'`\\" - --Copyright 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001 --Free Software Foundation, Inc. -+Copyright (C) 2003 Free Software Foundation, Inc. - This config.status script is free software; the Free Software Foundation - gives unlimited permission to copy, distribute and modify it." - srcdir=$srcdir -@@ -11463,9 +12769,9 @@ - (echo ':t - /@[a-zA-Z_][a-zA-Z_0-9]*@/!b' && cat $tmp/subs.frag) >$tmp/subs-$ac_sed_frag.sed - if test -z "$ac_sed_cmds"; then -- ac_sed_cmds="sed -f $tmp/subs-$ac_sed_frag.sed" -+ ac_sed_cmds="sed -f $tmp/subs-$ac_sed_frag.sed" - else -- ac_sed_cmds="$ac_sed_cmds | sed -f $tmp/subs-$ac_sed_frag.sed" -+ ac_sed_cmds="$ac_sed_cmds | sed -f $tmp/subs-$ac_sed_frag.sed" - fi - ac_sed_frag=`expr $ac_sed_frag + 1` - ac_beg=$ac_end -@@ -11483,21 +12789,21 @@ - # Support "outfile[:infile[:infile...]]", defaulting infile="outfile.in". - case $ac_file in - - | *:- | *:-:* ) # input from stdin -- cat >$tmp/stdin -- ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` -- ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; -+ cat >$tmp/stdin -+ ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` -+ ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; - *:* ) ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` -- ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; -+ ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; - * ) ac_file_in=$ac_file.in ;; - esac - - # Compute @srcdir@, @top_srcdir@, and @INSTALL@ for subdirectories. - ac_dir=`(dirname "$ac_file") 2>/dev/null || - $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ -- X"$ac_file" : 'X\(//\)[^/]' \| \ -- X"$ac_file" : 'X\(//\)$' \| \ -- X"$ac_file" : 'X\(/\)' \| \ -- . : '\(.\)' 2>/dev/null || -+ X"$ac_file" : 'X\(//\)[^/]' \| \ -+ X"$ac_file" : 'X\(//\)$' \| \ -+ X"$ac_file" : 'X\(/\)' \| \ -+ . : '\(.\)' 2>/dev/null || - echo X"$ac_file" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } - /^X\(\/\/\)[^/].*/{ s//\1/; q; } -@@ -11513,10 +12819,10 @@ - as_dirs="$as_dir $as_dirs" - as_dir=`(dirname "$as_dir") 2>/dev/null || - $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ -- X"$as_dir" : 'X\(//\)[^/]' \| \ -- X"$as_dir" : 'X\(//\)$' \| \ -- X"$as_dir" : 'X\(/\)' \| \ -- . : '\(.\)' 2>/dev/null || -+ X"$as_dir" : 'X\(//\)[^/]' \| \ -+ X"$as_dir" : 'X\(//\)$' \| \ -+ X"$as_dir" : 'X\(/\)' \| \ -+ . : '\(.\)' 2>/dev/null || - echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } - /^X\(\/\/\)[^/].*/{ s//\1/; q; } -@@ -11554,12 +12860,45 @@ - ac_srcdir=$ac_top_builddir$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_builddir$srcdir ;; - esac --# Don't blindly perform a `cd "$ac_dir"/$ac_foo && pwd` since $ac_foo can be --# absolute. --ac_abs_builddir=`cd "$ac_dir" && cd $ac_builddir && pwd` --ac_abs_top_builddir=`cd "$ac_dir" && cd ${ac_top_builddir}. && pwd` --ac_abs_srcdir=`cd "$ac_dir" && cd $ac_srcdir && pwd` --ac_abs_top_srcdir=`cd "$ac_dir" && cd $ac_top_srcdir && pwd` -+ -+# Do not use `cd foo && pwd` to compute absolute paths, because -+# the directories may not exist. -+case `pwd` in -+.) ac_abs_builddir="$ac_dir";; -+*) -+ case "$ac_dir" in -+ .) ac_abs_builddir=`pwd`;; -+ [\\/]* | ?:[\\/]* ) ac_abs_builddir="$ac_dir";; -+ *) ac_abs_builddir=`pwd`/"$ac_dir";; -+ esac;; -+esac -+case $ac_abs_builddir in -+.) ac_abs_top_builddir=${ac_top_builddir}.;; -+*) -+ case ${ac_top_builddir}. in -+ .) ac_abs_top_builddir=$ac_abs_builddir;; -+ [\\/]* | ?:[\\/]* ) ac_abs_top_builddir=${ac_top_builddir}.;; -+ *) ac_abs_top_builddir=$ac_abs_builddir/${ac_top_builddir}.;; -+ esac;; -+esac -+case $ac_abs_builddir in -+.) ac_abs_srcdir=$ac_srcdir;; -+*) -+ case $ac_srcdir in -+ .) ac_abs_srcdir=$ac_abs_builddir;; -+ [\\/]* | ?:[\\/]* ) ac_abs_srcdir=$ac_srcdir;; -+ *) ac_abs_srcdir=$ac_abs_builddir/$ac_srcdir;; -+ esac;; -+esac -+case $ac_abs_builddir in -+.) ac_abs_top_srcdir=$ac_top_srcdir;; -+*) -+ case $ac_top_srcdir in -+ .) ac_abs_top_srcdir=$ac_abs_builddir;; -+ [\\/]* | ?:[\\/]* ) ac_abs_top_srcdir=$ac_top_srcdir;; -+ *) ac_abs_top_srcdir=$ac_abs_builddir/$ac_top_srcdir;; -+ esac;; -+esac - - - case $INSTALL in -@@ -11567,11 +12906,6 @@ - *) ac_INSTALL=$ac_top_builddir$INSTALL ;; - esac - -- if test x"$ac_file" != x-; then -- { echo "$as_me:$LINENO: creating $ac_file" >&5 --echo "$as_me: creating $ac_file" >&6;} -- rm -f "$ac_file" -- fi - # Let's still pretend it is `configure' which instantiates (i.e., don't - # use $as_me), people would be surprised to read: - # /* config.h. Generated by config.status. */ -@@ -11581,7 +12915,7 @@ - configure_input="$ac_file. " - fi - configure_input=$configure_input"Generated from `echo $ac_file_in | -- sed 's,.*/,,'` by configure." -+ sed 's,.*/,,'` by configure." - - # First look for the input files in the build tree, otherwise in the - # src tree. -@@ -11590,26 +12924,32 @@ - case $f in - -) echo $tmp/stdin ;; - [\\/$]*) -- # Absolute (can't be DOS-style, as IFS=:) -- test -f "$f" || { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 -+ # Absolute (can't be DOS-style, as IFS=:) -+ test -f "$f" || { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 - echo "$as_me: error: cannot find input file: $f" >&2;} - { (exit 1); exit 1; }; } -- echo $f;; -+ echo "$f";; - *) # Relative -- if test -f "$f"; then -- # Build tree -- echo $f -- elif test -f "$srcdir/$f"; then -- # Source tree -- echo $srcdir/$f -- else -- # /dev/null tree -- { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 -+ if test -f "$f"; then -+ # Build tree -+ echo "$f" -+ elif test -f "$srcdir/$f"; then -+ # Source tree -+ echo "$srcdir/$f" -+ else -+ # /dev/null tree -+ { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 - echo "$as_me: error: cannot find input file: $f" >&2;} - { (exit 1); exit 1; }; } -- fi;; -+ fi;; - esac - done` || { (exit 1); exit 1; } -+ -+ if test x"$ac_file" != x-; then -+ { echo "$as_me:$LINENO: creating $ac_file" >&5 -+echo "$as_me: creating $ac_file" >&6;} -+ rm -f "$ac_file" -+ fi - _ACEOF - cat >>$CONFIG_STATUS <<_ACEOF - sed "$ac_vpsub -@@ -11649,12 +12989,12 @@ - # NAME is the cpp macro being defined and VALUE is the value it is being given. - # - # ac_d sets the value in "#define NAME VALUE" lines. --ac_dA='s,^\([ ]*\)#\([ ]*define[ ][ ]*\)' --ac_dB='[ ].*$,\1#\2' -+ac_dA='s,^\([ ]*\)#\([ ]*define[ ][ ]*\)' -+ac_dB='[ ].*$,\1#\2' - ac_dC=' ' - ac_dD=',;t' - # ac_u turns "#undef NAME" without trailing blanks into "#define NAME VALUE". --ac_uA='s,^\([ ]*\)#\([ ]*\)undef\([ ][ ]*\)' -+ac_uA='s,^\([ ]*\)#\([ ]*\)undef\([ ][ ]*\)' - ac_uB='$,\1#\2define\3' - ac_uC=' ' - ac_uD=',;t' -@@ -11663,11 +13003,11 @@ - # Support "outfile[:infile[:infile...]]", defaulting infile="outfile.in". - case $ac_file in - - | *:- | *:-:* ) # input from stdin -- cat >$tmp/stdin -- ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` -- ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; -+ cat >$tmp/stdin -+ ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` -+ ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; - *:* ) ac_file_in=`echo "$ac_file" | sed 's,[^:]*:,,'` -- ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; -+ ac_file=`echo "$ac_file" | sed 's,:.*,,'` ;; - * ) ac_file_in=$ac_file.in ;; - esac - -@@ -11681,28 +13021,29 @@ - case $f in - -) echo $tmp/stdin ;; - [\\/$]*) -- # Absolute (can't be DOS-style, as IFS=:) -- test -f "$f" || { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 -+ # Absolute (can't be DOS-style, as IFS=:) -+ test -f "$f" || { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 - echo "$as_me: error: cannot find input file: $f" >&2;} - { (exit 1); exit 1; }; } -- echo $f;; -+ # Do quote $f, to prevent DOS paths from being IFS'd. -+ echo "$f";; - *) # Relative -- if test -f "$f"; then -- # Build tree -- echo $f -- elif test -f "$srcdir/$f"; then -- # Source tree -- echo $srcdir/$f -- else -- # /dev/null tree -- { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 -+ if test -f "$f"; then -+ # Build tree -+ echo "$f" -+ elif test -f "$srcdir/$f"; then -+ # Source tree -+ echo "$srcdir/$f" -+ else -+ # /dev/null tree -+ { { echo "$as_me:$LINENO: error: cannot find input file: $f" >&5 - echo "$as_me: error: cannot find input file: $f" >&2;} - { (exit 1); exit 1; }; } -- fi;; -+ fi;; - esac - done` || { (exit 1); exit 1; } - # Remove the trailing spaces. -- sed 's/[ ]*$//' $ac_file_inputs >$tmp/in -+ sed 's/[ ]*$//' $ac_file_inputs >$tmp/in - - _ACEOF - -@@ -11725,9 +13066,9 @@ - s,[\\$`],\\&,g - t clear - : clear --s,^[ ]*#[ ]*define[ ][ ]*\([^ (][^ (]*\)\(([^)]*)\)[ ]*\(.*\)$,${ac_dA}\1${ac_dB}\1\2${ac_dC}\3${ac_dD},gp -+s,^[ ]*#[ ]*define[ ][ ]*\([^ (][^ (]*\)\(([^)]*)\)[ ]*\(.*\)$,${ac_dA}\1${ac_dB}\1\2${ac_dC}\3${ac_dD},gp - t end --s,^[ ]*#[ ]*define[ ][ ]*\([^ ][^ ]*\)[ ]*\(.*\)$,${ac_dA}\1${ac_dB}\1${ac_dC}\2${ac_dD},gp -+s,^[ ]*#[ ]*define[ ][ ]*\([^ ][^ ]*\)[ ]*\(.*\)$,${ac_dA}\1${ac_dB}\1${ac_dC}\2${ac_dD},gp - : end - _ACEOF - # If some macros were called several times there might be several times -@@ -11741,13 +13082,13 @@ - # example, in the case of _POSIX_SOURCE, which is predefined and required - # on some systems where configure will not decide to define it. - cat >>conftest.undefs <<\_ACEOF --s,^[ ]*#[ ]*undef[ ][ ]*[a-zA-Z_][a-zA-Z_0-9]*,/* & */, -+s,^[ ]*#[ ]*undef[ ][ ]*[a-zA-Z_][a-zA-Z_0-9]*,/* & */, - _ACEOF - - # Break up conftest.defines because some shells have a limit on the size - # of here documents, and old seds have small limits too (100 cmds). - echo ' # Handle all the #define templates only if necessary.' >>$CONFIG_STATUS --echo ' if grep "^[ ]*#[ ]*define" $tmp/in >/dev/null; then' >>$CONFIG_STATUS -+echo ' if grep "^[ ]*#[ ]*define" $tmp/in >/dev/null; then' >>$CONFIG_STATUS - echo ' # If there are no defines, we may have an empty if/fi' >>$CONFIG_STATUS - echo ' :' >>$CONFIG_STATUS - rm -f conftest.tail -@@ -11756,7 +13097,7 @@ - # Write a limited-size here document to $tmp/defines.sed. - echo ' cat >$tmp/defines.sed <>$CONFIG_STATUS - # Speed up: don't consider the non `#define' lines. -- echo '/^[ ]*#[ ]*define/!b' >>$CONFIG_STATUS -+ echo '/^[ ]*#[ ]*define/!b' >>$CONFIG_STATUS - # Work around the forget-to-reset-the-flag bug. - echo 't clr' >>$CONFIG_STATUS - echo ': clr' >>$CONFIG_STATUS -@@ -11783,7 +13124,7 @@ - # Write a limited-size here document to $tmp/undefs.sed. - echo ' cat >$tmp/undefs.sed <>$CONFIG_STATUS - # Speed up: don't consider the non `#undef' -- echo '/^[ ]*#[ ]*undef/!b' >>$CONFIG_STATUS -+ echo '/^[ ]*#[ ]*undef/!b' >>$CONFIG_STATUS - # Work around the forget-to-reset-the-flag bug. - echo 't clr' >>$CONFIG_STATUS - echo ': clr' >>$CONFIG_STATUS -@@ -11817,10 +13158,10 @@ - else - ac_dir=`(dirname "$ac_file") 2>/dev/null || - $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ -- X"$ac_file" : 'X\(//\)[^/]' \| \ -- X"$ac_file" : 'X\(//\)$' \| \ -- X"$ac_file" : 'X\(/\)' \| \ -- . : '\(.\)' 2>/dev/null || -+ X"$ac_file" : 'X\(//\)[^/]' \| \ -+ X"$ac_file" : 'X\(//\)$' \| \ -+ X"$ac_file" : 'X\(/\)' \| \ -+ . : '\(.\)' 2>/dev/null || - echo X"$ac_file" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } - /^X\(\/\/\)[^/].*/{ s//\1/; q; } -@@ -11836,10 +13177,10 @@ - as_dirs="$as_dir $as_dirs" - as_dir=`(dirname "$as_dir") 2>/dev/null || - $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ -- X"$as_dir" : 'X\(//\)[^/]' \| \ -- X"$as_dir" : 'X\(//\)$' \| \ -- X"$as_dir" : 'X\(/\)' \| \ -- . : '\(.\)' 2>/dev/null || -+ X"$as_dir" : 'X\(//\)[^/]' \| \ -+ X"$as_dir" : 'X\(//\)$' \| \ -+ X"$as_dir" : 'X\(/\)' \| \ -+ . : '\(.\)' 2>/dev/null || - echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/; q; } - /^X\(\/\/\)[^/].*/{ s//\1/; q; } ---- patch-2.5.9.orig/configure.ac -+++ patch-2.5.9/configure.ac -@@ -64,6 +64,9 @@ - gl_PREREQ_XMALLOC - gl_QUOTE - gl_QUOTEARG -+gl_HASH -+ -+ag_CHECK_NANOSECOND_STAT - - dnl This should be in gnulib, but isn't for some reason. - AC_DEFUN([jm_PREREQ_ADDEXT], ---- patch-2.5.9.orig/pch.c -+++ patch-2.5.9/pch.c -@@ -1,6 +1,6 @@ - /* reading patches */ - --/* $Id: pch.c,v 1.44 2003/05/20 14:03:17 eggert Exp $ */ -+/* $Id: pch.c,v 1.45 2003/07/02 22:19:21 eggert Exp $ */ - - /* Copyright (C) 1986, 1987, 1988 Larry Wall - -@@ -366,10 +366,16 @@ - if (!stars_last_line && strnEQ(s, "*** ", 4)) - name[OLD] = fetchname (s+4, strippath, &p_timestamp[OLD]); - else if (strnEQ(s, "+++ ", 4)) -+ { - /* Swap with NEW below. */ - name[OLD] = fetchname (s+4, strippath, &p_timestamp[OLD]); -+ p_strip_trailing_cr = strip_trailing_cr; -+ } - else if (strnEQ(s, "Index:", 6)) -+ { - name[INDEX] = fetchname (s+6, strippath, (time_t *) 0); -+ p_strip_trailing_cr = strip_trailing_cr; -+ } - else if (strnEQ(s, "Prereq:", 7)) { - for (t = s + 7; ISSPACE ((unsigned char) *t); t++) - continue; -@@ -409,6 +415,7 @@ - p_timestamp[NEW] = timestamp; - p_rfc934_nesting = (t - s) >> 1; - } -+ p_strip_trailing_cr = strip_trailing_cr; - } - } - if ((diff_type == NO_DIFF || diff_type == ED_DIFF) && ---- patch-2.5.9.orig/util.c -+++ patch-2.5.9/util.c -@@ -45,9 +45,17 @@ - # define raise(sig) kill (getpid (), sig) - #endif - -+#if defined(HAVE_STAT_TIMEVAL) -+#include -+#endif -+ - #include -+#include - - static void makedirs (char *); -+static bool fid_search (const char *, const struct stat *, bool); -+# define fid_exists(name, pst) fid_search (name, pst, false) -+# define insert_fid(name) fid_search (name, NULL, true) - - /* Move a file FROM (where *FROM_NEEDS_REMOVAL is nonzero if FROM - needs removal when cleaning up at the end of execution) -@@ -64,7 +72,7 @@ - struct stat to_st; - int to_errno = ! backup ? -1 : stat (to, &to_st) == 0 ? 0 : errno; - -- if (backup) -+ if (backup && (to_errno || ! fid_exists (to, &to_st))) - { - int try_makedirs_errno = 0; - char *bakname; -@@ -165,6 +173,7 @@ - if (! to_dir_known_to_exist) - makedirs (to); - copy_file (from, to, 0, mode); -+ insert_fid (to); - return; - } - -@@ -173,6 +182,7 @@ - } - - rename_succeeded: -+ insert_fid (to); - /* Do not clear *FROM_NEEDS_REMOVAL if it's possible that the - rename returned zero because FROM and TO are hard links to - the same file. */ -@@ -1011,3 +1021,105 @@ - if (file_seek (stream, offset, ptrname) != 0) - pfatal ("fseek"); - } -+ -+typedef struct -+{ -+ dev_t fid_dev; -+ ino_t fid_ino; -+ time_t fid_mtime; -+ unsigned long fid_mtimensec; -+} file_id; -+ -+unsigned -+file_id_hasher (file_id *entry, unsigned table_size) -+{ -+ return ((unsigned long) entry->fid_ino + -+ (unsigned long) entry->fid_dev + -+ (unsigned long) entry->fid_mtime + -+ (unsigned long) entry->fid_mtimensec) % table_size; -+} -+ -+bool -+file_id_comparator (file_id *entry1, file_id *entry2) -+{ -+ return (entry1->fid_dev == entry2->fid_dev && -+ entry1->fid_ino == entry2->fid_ino && -+ entry1->fid_mtime == entry2->fid_mtime && -+ entry1->fid_mtimensec == entry2->fid_mtimensec); -+} -+ -+void -+file_id_freer (file_id *entry) -+{ -+ free (entry); -+} -+ -+Hash_table *file_id_hash; -+ -+/* Check if the file identified by FILENAME and PST was already seen. If the -+ file was already seen, returns TRUE. If the file has not yet been seen -+ and INSERT is TRUE, it is inserted. PST or FILENAME may be NULL (but not -+ both of them). */ -+ -+static bool -+fid_search (const char *filename, const struct stat *pst, bool insert) -+{ -+ struct stat st; -+ -+ if (!file_id_hash) -+ { -+ file_id_hash = hash_initialize (0, NULL, (Hash_hasher) file_id_hasher, -+ (Hash_comparator) file_id_comparator, -+ (Hash_data_freer) file_id_freer); -+ if (!file_id_hash) -+ pfatal ("hash_initialize"); -+ } -+ -+ if (!pst) -+ { -+ if (stat (filename, &st) != 0) -+ pfatal ("%s", quotearg (filename)); -+ pst = &st; -+ } -+ -+ if (insert) -+ { -+ file_id *pfid = xmalloc (sizeof (file_id)), *old_pfid; -+ pfid->fid_dev = pst->st_dev; -+ pfid->fid_ino = pst->st_ino; -+ pfid->fid_mtime = pst->st_mtime; -+#if defined(HAVE_STAT_NSEC) -+ pfid->fid_mtimensec = pst->st_mtimensec; -+#elif defined(HAVE_STAT_TIMEVAL) -+ pfid->fid_mtimensec = pst->st_mtim.tv_nsec; -+#else -+ pfid->fid_mtimensec = 0; -+#endif -+ old_pfid = hash_insert (file_id_hash, pfid); -+ if (!old_pfid) -+ pfatal ("hash_insert"); -+ else if (old_pfid != pfid) -+ { -+ free (pfid); -+ return true; -+ } -+ else -+ return false; -+ } -+ else -+ { -+ file_id fid; -+ fid.fid_dev = pst->st_dev; -+ fid.fid_ino = pst->st_ino; -+ fid.fid_mtime = pst->st_mtime; -+#if defined(HAVE_STAT_NSEC) -+ fid.fid_mtimensec = pst->st_mtimensec; -+#elif defined(HAVE_STAT_TIMEVAL) -+ fid.fid_mtimensec = pst->st_mtim.tv_nsec; -+#else -+ fid.fid_mtimensec = 0; -+#endif -+ return hash_lookup (file_id_hash, &fid) != 0; -+ } -+} -+ ---- patch-2.5.9.orig/hash.c -+++ patch-2.5.9/hash.c -@@ -0,0 +1,1051 @@ -+/* hash - hashing table processing. -+ -+ Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003 Free Software -+ Foundation, Inc. -+ -+ Written by Jim Meyering, 1992. -+ -+ This program is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by -+ the Free Software Foundation; either version 2, or (at your option) -+ any later version. -+ -+ This program is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ GNU General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License -+ along with this program; if not, write to the Free Software Foundation, -+ Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ -+ -+/* A generic hash table package. */ -+ -+/* Define USE_OBSTACK to 1 if you want the allocator to use obstacks instead -+ of malloc. If you change USE_OBSTACK, you have to recompile! */ -+ -+#if HAVE_CONFIG_H -+# include -+#endif -+#if HAVE_STDLIB_H -+# include -+#endif -+ -+#include -+#include -+#include -+ -+#ifndef HAVE_DECL_FREE -+"this configure-time declaration test was not run" -+#endif -+#if !HAVE_DECL_FREE -+void free (); -+#endif -+ -+#ifndef HAVE_DECL_MALLOC -+"this configure-time declaration test was not run" -+#endif -+#if !HAVE_DECL_MALLOC -+char *malloc (); -+#endif -+ -+#if USE_OBSTACK -+# include "obstack.h" -+# ifndef obstack_chunk_alloc -+# define obstack_chunk_alloc malloc -+# endif -+# ifndef obstack_chunk_free -+# define obstack_chunk_free free -+# endif -+#endif -+ -+#include "hash.h" -+ -+struct hash_table -+ { -+ /* The array of buckets starts at BUCKET and extends to BUCKET_LIMIT-1, -+ for a possibility of N_BUCKETS. Among those, N_BUCKETS_USED buckets -+ are not empty, there are N_ENTRIES active entries in the table. */ -+ struct hash_entry *bucket; -+ struct hash_entry *bucket_limit; -+ unsigned n_buckets; -+ unsigned n_buckets_used; -+ unsigned n_entries; -+ -+ /* Tuning arguments, kept in a physicaly separate structure. */ -+ const Hash_tuning *tuning; -+ -+ /* Three functions are given to `hash_initialize', see the documentation -+ block for this function. In a word, HASHER randomizes a user entry -+ into a number up from 0 up to some maximum minus 1; COMPARATOR returns -+ true if two user entries compare equally; and DATA_FREER is the cleanup -+ function for a user entry. */ -+ Hash_hasher hasher; -+ Hash_comparator comparator; -+ Hash_data_freer data_freer; -+ -+ /* A linked list of freed struct hash_entry structs. */ -+ struct hash_entry *free_entry_list; -+ -+#if USE_OBSTACK -+ /* Whenever obstacks are used, it is possible to allocate all overflowed -+ entries into a single stack, so they all can be freed in a single -+ operation. It is not clear if the speedup is worth the trouble. */ -+ struct obstack entry_stack; -+#endif -+ }; -+ -+/* A hash table contains many internal entries, each holding a pointer to -+ some user provided data (also called a user entry). An entry indistinctly -+ refers to both the internal entry and its associated user entry. A user -+ entry contents may be hashed by a randomization function (the hashing -+ function, or just `hasher' for short) into a number (or `slot') between 0 -+ and the current table size. At each slot position in the hash table, -+ starts a linked chain of entries for which the user data all hash to this -+ slot. A bucket is the collection of all entries hashing to the same slot. -+ -+ A good `hasher' function will distribute entries rather evenly in buckets. -+ In the ideal case, the length of each bucket is roughly the number of -+ entries divided by the table size. Finding the slot for a data is usually -+ done in constant time by the `hasher', and the later finding of a precise -+ entry is linear in time with the size of the bucket. Consequently, a -+ larger hash table size (that is, a larger number of buckets) is prone to -+ yielding shorter chains, *given* the `hasher' function behaves properly. -+ -+ Long buckets slow down the lookup algorithm. One might use big hash table -+ sizes in hope to reduce the average length of buckets, but this might -+ become inordinate, as unused slots in the hash table take some space. The -+ best bet is to make sure you are using a good `hasher' function (beware -+ that those are not that easy to write! :-), and to use a table size -+ larger than the actual number of entries. */ -+ -+/* If an insertion makes the ratio of nonempty buckets to table size larger -+ than the growth threshold (a number between 0.0 and 1.0), then increase -+ the table size by multiplying by the growth factor (a number greater than -+ 1.0). The growth threshold defaults to 0.8, and the growth factor -+ defaults to 1.414, meaning that the table will have doubled its size -+ every second time 80% of the buckets get used. */ -+#define DEFAULT_GROWTH_THRESHOLD 0.8 -+#define DEFAULT_GROWTH_FACTOR 1.414 -+ -+/* If a deletion empties a bucket and causes the ratio of used buckets to -+ table size to become smaller than the shrink threshold (a number between -+ 0.0 and 1.0), then shrink the table by multiplying by the shrink factor (a -+ number greater than the shrink threshold but smaller than 1.0). The shrink -+ threshold and factor default to 0.0 and 1.0, meaning that the table never -+ shrinks. */ -+#define DEFAULT_SHRINK_THRESHOLD 0.0 -+#define DEFAULT_SHRINK_FACTOR 1.0 -+ -+/* Use this to initialize or reset a TUNING structure to -+ some sensible values. */ -+static const Hash_tuning default_tuning = -+ { -+ DEFAULT_SHRINK_THRESHOLD, -+ DEFAULT_SHRINK_FACTOR, -+ DEFAULT_GROWTH_THRESHOLD, -+ DEFAULT_GROWTH_FACTOR, -+ false -+ }; -+ -+/* Information and lookup. */ -+ -+/* The following few functions provide information about the overall hash -+ table organization: the number of entries, number of buckets and maximum -+ length of buckets. */ -+ -+/* Return the number of buckets in the hash table. The table size, the total -+ number of buckets (used plus unused), or the maximum number of slots, are -+ the same quantity. */ -+ -+unsigned -+hash_get_n_buckets (const Hash_table *table) -+{ -+ return table->n_buckets; -+} -+ -+/* Return the number of slots in use (non-empty buckets). */ -+ -+unsigned -+hash_get_n_buckets_used (const Hash_table *table) -+{ -+ return table->n_buckets_used; -+} -+ -+/* Return the number of active entries. */ -+ -+unsigned -+hash_get_n_entries (const Hash_table *table) -+{ -+ return table->n_entries; -+} -+ -+/* Return the length of the longest chain (bucket). */ -+ -+unsigned -+hash_get_max_bucket_length (const Hash_table *table) -+{ -+ struct hash_entry *bucket; -+ unsigned max_bucket_length = 0; -+ -+ for (bucket = table->bucket; bucket < table->bucket_limit; bucket++) -+ { -+ if (bucket->data) -+ { -+ struct hash_entry *cursor = bucket; -+ unsigned bucket_length = 1; -+ -+ while (cursor = cursor->next, cursor) -+ bucket_length++; -+ -+ if (bucket_length > max_bucket_length) -+ max_bucket_length = bucket_length; -+ } -+ } -+ -+ return max_bucket_length; -+} -+ -+/* Do a mild validation of a hash table, by traversing it and checking two -+ statistics. */ -+ -+bool -+hash_table_ok (const Hash_table *table) -+{ -+ struct hash_entry *bucket; -+ unsigned n_buckets_used = 0; -+ unsigned n_entries = 0; -+ -+ for (bucket = table->bucket; bucket < table->bucket_limit; bucket++) -+ { -+ if (bucket->data) -+ { -+ struct hash_entry *cursor = bucket; -+ -+ /* Count bucket head. */ -+ n_buckets_used++; -+ n_entries++; -+ -+ /* Count bucket overflow. */ -+ while (cursor = cursor->next, cursor) -+ n_entries++; -+ } -+ } -+ -+ if (n_buckets_used == table->n_buckets_used && n_entries == table->n_entries) -+ return true; -+ -+ return false; -+} -+ -+void -+hash_print_statistics (const Hash_table *table, FILE *stream) -+{ -+ unsigned n_entries = hash_get_n_entries (table); -+ unsigned n_buckets = hash_get_n_buckets (table); -+ unsigned n_buckets_used = hash_get_n_buckets_used (table); -+ unsigned max_bucket_length = hash_get_max_bucket_length (table); -+ -+ fprintf (stream, "# entries: %u\n", n_entries); -+ fprintf (stream, "# buckets: %u\n", n_buckets); -+ fprintf (stream, "# buckets used: %u (%.2f%%)\n", n_buckets_used, -+ (100.0 * n_buckets_used) / n_buckets); -+ fprintf (stream, "max bucket length: %u\n", max_bucket_length); -+} -+ -+/* If ENTRY matches an entry already in the hash table, return the -+ entry from the table. Otherwise, return NULL. */ -+ -+void * -+hash_lookup (const Hash_table *table, const void *entry) -+{ -+ struct hash_entry *bucket -+ = table->bucket + table->hasher (entry, table->n_buckets); -+ struct hash_entry *cursor; -+ -+ if (! (bucket < table->bucket_limit)) -+ abort (); -+ -+ if (bucket->data == NULL) -+ return NULL; -+ -+ for (cursor = bucket; cursor; cursor = cursor->next) -+ if (table->comparator (entry, cursor->data)) -+ return cursor->data; -+ -+ return NULL; -+} -+ -+/* Walking. */ -+ -+/* The functions in this page traverse the hash table and process the -+ contained entries. For the traversal to work properly, the hash table -+ should not be resized nor modified while any particular entry is being -+ processed. In particular, entries should not be added or removed. */ -+ -+/* Return the first data in the table, or NULL if the table is empty. */ -+ -+void * -+hash_get_first (const Hash_table *table) -+{ -+ struct hash_entry *bucket; -+ -+ if (table->n_entries == 0) -+ return NULL; -+ -+ for (bucket = table->bucket; ; bucket++) -+ if (! (bucket < table->bucket_limit)) -+ abort (); -+ else if (bucket->data) -+ return bucket->data; -+} -+ -+/* Return the user data for the entry following ENTRY, where ENTRY has been -+ returned by a previous call to either `hash_get_first' or `hash_get_next'. -+ Return NULL if there are no more entries. */ -+ -+void * -+hash_get_next (const Hash_table *table, const void *entry) -+{ -+ struct hash_entry *bucket -+ = table->bucket + table->hasher (entry, table->n_buckets); -+ struct hash_entry *cursor; -+ -+ if (! (bucket < table->bucket_limit)) -+ abort (); -+ -+ /* Find next entry in the same bucket. */ -+ for (cursor = bucket; cursor; cursor = cursor->next) -+ if (cursor->data == entry && cursor->next) -+ return cursor->next->data; -+ -+ /* Find first entry in any subsequent bucket. */ -+ while (++bucket < table->bucket_limit) -+ if (bucket->data) -+ return bucket->data; -+ -+ /* None found. */ -+ return NULL; -+} -+ -+/* Fill BUFFER with pointers to active user entries in the hash table, then -+ return the number of pointers copied. Do not copy more than BUFFER_SIZE -+ pointers. */ -+ -+unsigned -+hash_get_entries (const Hash_table *table, void **buffer, -+ unsigned buffer_size) -+{ -+ unsigned counter = 0; -+ struct hash_entry *bucket; -+ struct hash_entry *cursor; -+ -+ for (bucket = table->bucket; bucket < table->bucket_limit; bucket++) -+ { -+ if (bucket->data) -+ { -+ for (cursor = bucket; cursor; cursor = cursor->next) -+ { -+ if (counter >= buffer_size) -+ return counter; -+ buffer[counter++] = cursor->data; -+ } -+ } -+ } -+ -+ return counter; -+} -+ -+/* Call a PROCESSOR function for each entry of a hash table, and return the -+ number of entries for which the processor function returned success. A -+ pointer to some PROCESSOR_DATA which will be made available to each call to -+ the processor function. The PROCESSOR accepts two arguments: the first is -+ the user entry being walked into, the second is the value of PROCESSOR_DATA -+ as received. The walking continue for as long as the PROCESSOR function -+ returns nonzero. When it returns zero, the walking is interrupted. */ -+ -+unsigned -+hash_do_for_each (const Hash_table *table, Hash_processor processor, -+ void *processor_data) -+{ -+ unsigned counter = 0; -+ struct hash_entry *bucket; -+ struct hash_entry *cursor; -+ -+ for (bucket = table->bucket; bucket < table->bucket_limit; bucket++) -+ { -+ if (bucket->data) -+ { -+ for (cursor = bucket; cursor; cursor = cursor->next) -+ { -+ if (!(*processor) (cursor->data, processor_data)) -+ return counter; -+ counter++; -+ } -+ } -+ } -+ -+ return counter; -+} -+ -+/* Allocation and clean-up. */ -+ -+/* Return a hash index for a NUL-terminated STRING between 0 and N_BUCKETS-1. -+ This is a convenience routine for constructing other hashing functions. */ -+ -+#if USE_DIFF_HASH -+ -+/* About hashings, Paul Eggert writes to me (FP), on 1994-01-01: "Please see -+ B. J. McKenzie, R. Harries & T. Bell, Selecting a hashing algorithm, -+ Software--practice & experience 20, 2 (Feb 1990), 209-224. Good hash -+ algorithms tend to be domain-specific, so what's good for [diffutils'] io.c -+ may not be good for your application." */ -+ -+unsigned -+hash_string (const char *string, unsigned n_buckets) -+{ -+# define ROTATE_LEFT(Value, Shift) \ -+ ((Value) << (Shift) | (Value) >> ((sizeof (unsigned) * CHAR_BIT) - (Shift))) -+# define HASH_ONE_CHAR(Value, Byte) \ -+ ((Byte) + ROTATE_LEFT (Value, 7)) -+ -+ unsigned value = 0; -+ -+ for (; *string; string++) -+ value = HASH_ONE_CHAR (value, *(const unsigned char *) string); -+ return value % n_buckets; -+ -+# undef ROTATE_LEFT -+# undef HASH_ONE_CHAR -+} -+ -+#else /* not USE_DIFF_HASH */ -+ -+/* This one comes from `recode', and performs a bit better than the above as -+ per a few experiments. It is inspired from a hashing routine found in the -+ very old Cyber `snoop', itself written in typical Greg Mansfield style. -+ (By the way, what happened to this excellent man? Is he still alive?) */ -+ -+unsigned -+hash_string (const char *string, unsigned n_buckets) -+{ -+ unsigned value = 0; -+ -+ while (*string) -+ value = ((value * 31 + (int) *(const unsigned char *) string++) -+ % n_buckets); -+ return value; -+} -+ -+#endif /* not USE_DIFF_HASH */ -+ -+/* Return true if CANDIDATE is a prime number. CANDIDATE should be an odd -+ number at least equal to 11. */ -+ -+static bool -+is_prime (unsigned long candidate) -+{ -+ unsigned long divisor = 3; -+ unsigned long square = divisor * divisor; -+ -+ while (square < candidate && (candidate % divisor)) -+ { -+ divisor++; -+ square += 4 * divisor; -+ divisor++; -+ } -+ -+ return (candidate % divisor ? true : false); -+} -+ -+/* Round a given CANDIDATE number up to the nearest prime, and return that -+ prime. Primes lower than 10 are merely skipped. */ -+ -+static unsigned long -+next_prime (unsigned long candidate) -+{ -+ /* Skip small primes. */ -+ if (candidate < 10) -+ candidate = 10; -+ -+ /* Make it definitely odd. */ -+ candidate |= 1; -+ -+ while (!is_prime (candidate)) -+ candidate += 2; -+ -+ return candidate; -+} -+ -+void -+hash_reset_tuning (Hash_tuning *tuning) -+{ -+ *tuning = default_tuning; -+} -+ -+/* For the given hash TABLE, check the user supplied tuning structure for -+ reasonable values, and return true if there is no gross error with it. -+ Otherwise, definitively reset the TUNING field to some acceptable default -+ in the hash table (that is, the user loses the right of further modifying -+ tuning arguments), and return false. */ -+ -+static bool -+check_tuning (Hash_table *table) -+{ -+ const Hash_tuning *tuning = table->tuning; -+ -+ if (tuning->growth_threshold > 0.0 -+ && tuning->growth_threshold < 1.0 -+ && tuning->growth_factor > 1.0 -+ && tuning->shrink_threshold >= 0.0 -+ && tuning->shrink_threshold < 1.0 -+ && tuning->shrink_factor > tuning->shrink_threshold -+ && tuning->shrink_factor <= 1.0 -+ && tuning->shrink_threshold < tuning->growth_threshold) -+ return true; -+ -+ table->tuning = &default_tuning; -+ return false; -+} -+ -+/* Allocate and return a new hash table, or NULL upon failure. The initial -+ number of buckets is automatically selected so as to _guarantee_ that you -+ may insert at least CANDIDATE different user entries before any growth of -+ the hash table size occurs. So, if have a reasonably tight a-priori upper -+ bound on the number of entries you intend to insert in the hash table, you -+ may save some table memory and insertion time, by specifying it here. If -+ the IS_N_BUCKETS field of the TUNING structure is true, the CANDIDATE -+ argument has its meaning changed to the wanted number of buckets. -+ -+ TUNING points to a structure of user-supplied values, in case some fine -+ tuning is wanted over the default behavior of the hasher. If TUNING is -+ NULL, the default tuning parameters are used instead. -+ -+ The user-supplied HASHER function should be provided. It accepts two -+ arguments ENTRY and TABLE_SIZE. It computes, by hashing ENTRY contents, a -+ slot number for that entry which should be in the range 0..TABLE_SIZE-1. -+ This slot number is then returned. -+ -+ The user-supplied COMPARATOR function should be provided. It accepts two -+ arguments pointing to user data, it then returns true for a pair of entries -+ that compare equal, or false otherwise. This function is internally called -+ on entries which are already known to hash to the same bucket index. -+ -+ The user-supplied DATA_FREER function, when not NULL, may be later called -+ with the user data as an argument, just before the entry containing the -+ data gets freed. This happens from within `hash_free' or `hash_clear'. -+ You should specify this function only if you want these functions to free -+ all of your `data' data. This is typically the case when your data is -+ simply an auxiliary struct that you have malloc'd to aggregate several -+ values. */ -+ -+Hash_table * -+hash_initialize (unsigned candidate, const Hash_tuning *tuning, -+ Hash_hasher hasher, Hash_comparator comparator, -+ Hash_data_freer data_freer) -+{ -+ Hash_table *table; -+ struct hash_entry *bucket; -+ -+ if (hasher == NULL || comparator == NULL) -+ return NULL; -+ -+ table = (Hash_table *) malloc (sizeof (Hash_table)); -+ if (table == NULL) -+ return NULL; -+ -+ if (!tuning) -+ tuning = &default_tuning; -+ table->tuning = tuning; -+ if (!check_tuning (table)) -+ { -+ /* Fail if the tuning options are invalid. This is the only occasion -+ when the user gets some feedback about it. Once the table is created, -+ if the user provides invalid tuning options, we silently revert to -+ using the defaults, and ignore further request to change the tuning -+ options. */ -+ free (table); -+ return NULL; -+ } -+ -+ table->n_buckets -+ = next_prime (tuning->is_n_buckets ? candidate -+ : (unsigned) (candidate / tuning->growth_threshold)); -+ -+ table->bucket = (struct hash_entry *) -+ malloc (table->n_buckets * sizeof (struct hash_entry)); -+ if (table->bucket == NULL) -+ { -+ free (table); -+ return NULL; -+ } -+ table->bucket_limit = table->bucket + table->n_buckets; -+ -+ for (bucket = table->bucket; bucket < table->bucket_limit; bucket++) -+ { -+ bucket->data = NULL; -+ bucket->next = NULL; -+ } -+ table->n_buckets_used = 0; -+ table->n_entries = 0; -+ -+ table->hasher = hasher; -+ table->comparator = comparator; -+ table->data_freer = data_freer; -+ -+ table->free_entry_list = NULL; -+#if USE_OBSTACK -+ obstack_init (&table->entry_stack); -+#endif -+ return table; -+} -+ -+/* Make all buckets empty, placing any chained entries on the free list. -+ Apply the user-specified function data_freer (if any) to the datas of any -+ affected entries. */ -+ -+void -+hash_clear (Hash_table *table) -+{ -+ struct hash_entry *bucket; -+ -+ for (bucket = table->bucket; bucket < table->bucket_limit; bucket++) -+ { -+ if (bucket->data) -+ { -+ struct hash_entry *cursor; -+ struct hash_entry *next; -+ -+ /* Free the bucket overflow. */ -+ for (cursor = bucket->next; cursor; cursor = next) -+ { -+ if (table->data_freer) -+ (*table->data_freer) (cursor->data); -+ cursor->data = NULL; -+ -+ next = cursor->next; -+ /* Relinking is done one entry at a time, as it is to be expected -+ that overflows are either rare or short. */ -+ cursor->next = table->free_entry_list; -+ table->free_entry_list = cursor; -+ } -+ -+ /* Free the bucket head. */ -+ if (table->data_freer) -+ (*table->data_freer) (bucket->data); -+ bucket->data = NULL; -+ bucket->next = NULL; -+ } -+ } -+ -+ table->n_buckets_used = 0; -+ table->n_entries = 0; -+} -+ -+/* Reclaim all storage associated with a hash table. If a data_freer -+ function has been supplied by the user when the hash table was created, -+ this function applies it to the data of each entry before freeing that -+ entry. */ -+ -+void -+hash_free (Hash_table *table) -+{ -+ struct hash_entry *bucket; -+ struct hash_entry *cursor; -+ struct hash_entry *next; -+ -+ /* Call the user data_freer function. */ -+ if (table->data_freer && table->n_entries) -+ { -+ for (bucket = table->bucket; bucket < table->bucket_limit; bucket++) -+ { -+ if (bucket->data) -+ { -+ for (cursor = bucket; cursor; cursor = cursor->next) -+ { -+ (*table->data_freer) (cursor->data); -+ } -+ } -+ } -+ } -+ -+#if USE_OBSTACK -+ -+ obstack_free (&table->entry_stack, NULL); -+ -+#else -+ -+ /* Free all bucket overflowed entries. */ -+ for (bucket = table->bucket; bucket < table->bucket_limit; bucket++) -+ { -+ for (cursor = bucket->next; cursor; cursor = next) -+ { -+ next = cursor->next; -+ free (cursor); -+ } -+ } -+ -+ /* Also reclaim the internal list of previously freed entries. */ -+ for (cursor = table->free_entry_list; cursor; cursor = next) -+ { -+ next = cursor->next; -+ free (cursor); -+ } -+ -+#endif -+ -+ /* Free the remainder of the hash table structure. */ -+ free (table->bucket); -+ free (table); -+} -+ -+/* Insertion and deletion. */ -+ -+/* Get a new hash entry for a bucket overflow, possibly by reclying a -+ previously freed one. If this is not possible, allocate a new one. */ -+ -+static struct hash_entry * -+allocate_entry (Hash_table *table) -+{ -+ struct hash_entry *new; -+ -+ if (table->free_entry_list) -+ { -+ new = table->free_entry_list; -+ table->free_entry_list = new->next; -+ } -+ else -+ { -+#if USE_OBSTACK -+ new = (struct hash_entry *) -+ obstack_alloc (&table->entry_stack, sizeof (struct hash_entry)); -+#else -+ new = (struct hash_entry *) malloc (sizeof (struct hash_entry)); -+#endif -+ } -+ -+ return new; -+} -+ -+/* Free a hash entry which was part of some bucket overflow, -+ saving it for later recycling. */ -+ -+static void -+free_entry (Hash_table *table, struct hash_entry *entry) -+{ -+ entry->data = NULL; -+ entry->next = table->free_entry_list; -+ table->free_entry_list = entry; -+} -+ -+/* This private function is used to help with insertion and deletion. When -+ ENTRY matches an entry in the table, return a pointer to the corresponding -+ user data and set *BUCKET_HEAD to the head of the selected bucket. -+ Otherwise, return NULL. When DELETE is true and ENTRY matches an entry in -+ the table, unlink the matching entry. */ -+ -+static void * -+hash_find_entry (Hash_table *table, const void *entry, -+ struct hash_entry **bucket_head, bool delete) -+{ -+ struct hash_entry *bucket -+ = table->bucket + table->hasher (entry, table->n_buckets); -+ struct hash_entry *cursor; -+ -+ if (! (bucket < table->bucket_limit)) -+ abort (); -+ -+ *bucket_head = bucket; -+ -+ /* Test for empty bucket. */ -+ if (bucket->data == NULL) -+ return NULL; -+ -+ /* See if the entry is the first in the bucket. */ -+ if ((*table->comparator) (entry, bucket->data)) -+ { -+ void *data = bucket->data; -+ -+ if (delete) -+ { -+ if (bucket->next) -+ { -+ struct hash_entry *next = bucket->next; -+ -+ /* Bump the first overflow entry into the bucket head, then save -+ the previous first overflow entry for later recycling. */ -+ *bucket = *next; -+ free_entry (table, next); -+ } -+ else -+ { -+ bucket->data = NULL; -+ } -+ } -+ -+ return data; -+ } -+ -+ /* Scan the bucket overflow. */ -+ for (cursor = bucket; cursor->next; cursor = cursor->next) -+ { -+ if ((*table->comparator) (entry, cursor->next->data)) -+ { -+ void *data = cursor->next->data; -+ -+ if (delete) -+ { -+ struct hash_entry *next = cursor->next; -+ -+ /* Unlink the entry to delete, then save the freed entry for later -+ recycling. */ -+ cursor->next = next->next; -+ free_entry (table, next); -+ } -+ -+ return data; -+ } -+ } -+ -+ /* No entry found. */ -+ return NULL; -+} -+ -+/* For an already existing hash table, change the number of buckets through -+ specifying CANDIDATE. The contents of the hash table are preserved. The -+ new number of buckets is automatically selected so as to _guarantee_ that -+ the table may receive at least CANDIDATE different user entries, including -+ those already in the table, before any other growth of the hash table size -+ occurs. If TUNING->IS_N_BUCKETS is true, then CANDIDATE specifies the -+ exact number of buckets desired. */ -+ -+bool -+hash_rehash (Hash_table *table, unsigned candidate) -+{ -+ Hash_table *new_table; -+ struct hash_entry *bucket; -+ struct hash_entry *cursor; -+ struct hash_entry *next; -+ -+ new_table = hash_initialize (candidate, table->tuning, table->hasher, -+ table->comparator, table->data_freer); -+ if (new_table == NULL) -+ return false; -+ -+ /* Merely reuse the extra old space into the new table. */ -+#if USE_OBSTACK -+ obstack_free (&new_table->entry_stack, NULL); -+ new_table->entry_stack = table->entry_stack; -+#endif -+ new_table->free_entry_list = table->free_entry_list; -+ -+ for (bucket = table->bucket; bucket < table->bucket_limit; bucket++) -+ if (bucket->data) -+ for (cursor = bucket; cursor; cursor = next) -+ { -+ void *data = cursor->data; -+ struct hash_entry *new_bucket -+ = (new_table->bucket -+ + new_table->hasher (data, new_table->n_buckets)); -+ -+ if (! (new_bucket < new_table->bucket_limit)) -+ abort (); -+ -+ next = cursor->next; -+ -+ if (new_bucket->data) -+ { -+ if (cursor == bucket) -+ { -+ /* Allocate or recycle an entry, when moving from a bucket -+ header into a bucket overflow. */ -+ struct hash_entry *new_entry = allocate_entry (new_table); -+ -+ if (new_entry == NULL) -+ return false; -+ -+ new_entry->data = data; -+ new_entry->next = new_bucket->next; -+ new_bucket->next = new_entry; -+ } -+ else -+ { -+ /* Merely relink an existing entry, when moving from a -+ bucket overflow into a bucket overflow. */ -+ cursor->next = new_bucket->next; -+ new_bucket->next = cursor; -+ } -+ } -+ else -+ { -+ /* Free an existing entry, when moving from a bucket -+ overflow into a bucket header. Also take care of the -+ simple case of moving from a bucket header into a bucket -+ header. */ -+ new_bucket->data = data; -+ new_table->n_buckets_used++; -+ if (cursor != bucket) -+ free_entry (new_table, cursor); -+ } -+ } -+ -+ free (table->bucket); -+ table->bucket = new_table->bucket; -+ table->bucket_limit = new_table->bucket_limit; -+ table->n_buckets = new_table->n_buckets; -+ table->n_buckets_used = new_table->n_buckets_used; -+ table->free_entry_list = new_table->free_entry_list; -+ /* table->n_entries already holds its value. */ -+#if USE_OBSTACK -+ table->entry_stack = new_table->entry_stack; -+#endif -+ free (new_table); -+ -+ return true; -+} -+ -+/* If ENTRY matches an entry already in the hash table, return the pointer -+ to the entry from the table. Otherwise, insert ENTRY and return ENTRY. -+ Return NULL if the storage required for insertion cannot be allocated. */ -+ -+void * -+hash_insert (Hash_table *table, const void *entry) -+{ -+ void *data; -+ struct hash_entry *bucket; -+ -+ /* The caller cannot insert a NULL entry. */ -+ if (! entry) -+ abort (); -+ -+ /* If there's a matching entry already in the table, return that. */ -+ if ((data = hash_find_entry (table, entry, &bucket, false)) != NULL) -+ return data; -+ -+ /* ENTRY is not matched, it should be inserted. */ -+ -+ if (bucket->data) -+ { -+ struct hash_entry *new_entry = allocate_entry (table); -+ -+ if (new_entry == NULL) -+ return NULL; -+ -+ /* Add ENTRY in the overflow of the bucket. */ -+ -+ new_entry->data = (void *) entry; -+ new_entry->next = bucket->next; -+ bucket->next = new_entry; -+ table->n_entries++; -+ return (void *) entry; -+ } -+ -+ /* Add ENTRY right in the bucket head. */ -+ -+ bucket->data = (void *) entry; -+ table->n_entries++; -+ table->n_buckets_used++; -+ -+ /* If the growth threshold of the buckets in use has been reached, increase -+ the table size and rehash. There's no point in checking the number of -+ entries: if the hashing function is ill-conditioned, rehashing is not -+ likely to improve it. */ -+ -+ if (table->n_buckets_used -+ > table->tuning->growth_threshold * table->n_buckets) -+ { -+ /* Check more fully, before starting real work. If tuning arguments -+ became invalid, the second check will rely on proper defaults. */ -+ check_tuning (table); -+ if (table->n_buckets_used -+ > table->tuning->growth_threshold * table->n_buckets) -+ { -+ const Hash_tuning *tuning = table->tuning; -+ unsigned candidate -+ = (unsigned) (tuning->is_n_buckets -+ ? (table->n_buckets * tuning->growth_factor) -+ : (table->n_buckets * tuning->growth_factor -+ * tuning->growth_threshold)); -+ -+ /* If the rehash fails, arrange to return NULL. */ -+ if (!hash_rehash (table, candidate)) -+ entry = NULL; -+ } -+ } -+ -+ return (void *) entry; -+} -+ -+/* If ENTRY is already in the table, remove it and return the just-deleted -+ data (the user may want to deallocate its storage). If ENTRY is not in the -+ table, don't modify the table and return NULL. */ -+ -+void * -+hash_delete (Hash_table *table, const void *entry) -+{ -+ void *data; -+ struct hash_entry *bucket; -+ -+ data = hash_find_entry (table, entry, &bucket, true); -+ if (!data) -+ return NULL; -+ -+ table->n_entries--; -+ if (!bucket->data) -+ { -+ table->n_buckets_used--; -+ -+ /* If the shrink threshold of the buckets in use has been reached, -+ rehash into a smaller table. */ -+ -+ if (table->n_buckets_used -+ < table->tuning->shrink_threshold * table->n_buckets) -+ { -+ /* Check more fully, before starting real work. If tuning arguments -+ became invalid, the second check will rely on proper defaults. */ -+ check_tuning (table); -+ if (table->n_buckets_used -+ < table->tuning->shrink_threshold * table->n_buckets) -+ { -+ const Hash_tuning *tuning = table->tuning; -+ unsigned candidate -+ = (unsigned) (tuning->is_n_buckets -+ ? table->n_buckets * tuning->shrink_factor -+ : (table->n_buckets * tuning->shrink_factor -+ * tuning->growth_threshold)); -+ -+ hash_rehash (table, candidate); -+ } -+ } -+ } -+ -+ return data; -+} -+ -+/* Testing. */ -+ -+#if TESTING -+ -+void -+hash_print (const Hash_table *table) -+{ -+ struct hash_entry *bucket; -+ -+ for (bucket = table->bucket; bucket < table->bucket_limit; bucket++) -+ { -+ struct hash_entry *cursor; -+ -+ if (bucket) -+ printf ("%d:\n", bucket - table->bucket); -+ -+ for (cursor = bucket; cursor; cursor = cursor->next) -+ { -+ char *s = (char *) cursor->data; -+ /* FIXME */ -+ if (s) -+ printf (" %s\n", s); -+ } -+ } -+} -+ -+#endif /* TESTING */ ---- patch-2.5.9.orig/hash.h -+++ patch-2.5.9/hash.h -@@ -0,0 +1,93 @@ -+/* hash - hashing table processing. -+ Copyright (C) 1998, 1999, 2001 Free Software Foundation, Inc. -+ Written by Jim Meyering , 1998. -+ -+ This program is free software; you can redistribute it and/or modify -+ it under the terms of the GNU General Public License as published by -+ the Free Software Foundation; either version 2, or (at your option) -+ any later version. -+ -+ This program is distributed in the hope that it will be useful, -+ but WITHOUT ANY WARRANTY; without even the implied warranty of -+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ GNU General Public License for more details. -+ -+ You should have received a copy of the GNU General Public License -+ along with this program; if not, write to the Free Software Foundation, -+ Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ -+ -+/* A generic hash table package. */ -+ -+/* Make sure USE_OBSTACK is defined to 1 if you want the allocator to use -+ obstacks instead of malloc, and recompile `hash.c' with same setting. */ -+ -+#ifndef HASH_H_ -+# define HASH_H_ -+ -+# ifndef PARAMS -+# if PROTOTYPES || __STDC__ -+# define PARAMS(Args) Args -+# else -+# define PARAMS(Args) () -+# endif -+# endif -+ -+typedef unsigned (*Hash_hasher) PARAMS ((const void *, unsigned)); -+typedef bool (*Hash_comparator) PARAMS ((const void *, const void *)); -+typedef void (*Hash_data_freer) PARAMS ((void *)); -+typedef bool (*Hash_processor) PARAMS ((void *, void *)); -+ -+struct hash_entry -+ { -+ void *data; -+ struct hash_entry *next; -+ }; -+ -+struct hash_tuning -+ { -+ /* This structure is mainly used for `hash_initialize', see the block -+ documentation of `hash_reset_tuning' for more complete comments. */ -+ -+ float shrink_threshold; /* ratio of used buckets to trigger a shrink */ -+ float shrink_factor; /* ratio of new smaller size to original size */ -+ float growth_threshold; /* ratio of used buckets to trigger a growth */ -+ float growth_factor; /* ratio of new bigger size to original size */ -+ bool is_n_buckets; /* if CANDIDATE really means table size */ -+ }; -+ -+typedef struct hash_tuning Hash_tuning; -+ -+struct hash_table; -+ -+typedef struct hash_table Hash_table; -+ -+/* Information and lookup. */ -+unsigned hash_get_n_buckets PARAMS ((const Hash_table *)); -+unsigned hash_get_n_buckets_used PARAMS ((const Hash_table *)); -+unsigned hash_get_n_entries PARAMS ((const Hash_table *)); -+unsigned hash_get_max_bucket_length PARAMS ((const Hash_table *)); -+bool hash_table_ok PARAMS ((const Hash_table *)); -+void hash_print_statistics PARAMS ((const Hash_table *, FILE *)); -+void *hash_lookup PARAMS ((const Hash_table *, const void *)); -+ -+/* Walking. */ -+void *hash_get_first PARAMS ((const Hash_table *)); -+void *hash_get_next PARAMS ((const Hash_table *, const void *)); -+unsigned hash_get_entries PARAMS ((const Hash_table *, void **, unsigned)); -+unsigned hash_do_for_each PARAMS ((const Hash_table *, Hash_processor, void *)); -+ -+/* Allocation and clean-up. */ -+unsigned hash_string PARAMS ((const char *, unsigned)); -+void hash_reset_tuning PARAMS ((Hash_tuning *)); -+Hash_table *hash_initialize PARAMS ((unsigned, const Hash_tuning *, -+ Hash_hasher, Hash_comparator, -+ Hash_data_freer)); -+void hash_clear PARAMS ((Hash_table *)); -+void hash_free PARAMS ((Hash_table *)); -+ -+/* Insertion and deletion. */ -+bool hash_rehash PARAMS ((Hash_table *, unsigned)); -+void *hash_insert PARAMS ((Hash_table *, const void *)); -+void *hash_delete PARAMS ((Hash_table *, const void *)); -+ -+#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/patch/patch/global-reject-file.diff b/import-layers/yocto-poky/meta/recipes-devtools/patch/patch/global-reject-file.diff deleted file mode 100644 index bb7ca7912..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/patch/patch/global-reject-file.diff +++ /dev/null @@ -1,203 +0,0 @@ -Upstream-Status: Inappropriate [debian patch] - -Index: patch-2.5.9/patch.man -=================================================================== ---- patch-2.5.9.orig/patch.man -+++ patch-2.5.9/patch.man -@@ -520,6 +520,15 @@ file. - \fB\*=reject\-unified\fP - Produce unified reject files. The default is to produce context type reject files. - .TP -+.BI \*=global\-reject\-file= rejectfile -+Put all rejects into -+.I rejectfile -+instead of creating separate reject files for all files that have rejects. The -+.I rejectfile -+will contain headers that identify which file each reject refers to. Note that -+the global reject file is created even if \-\-dry\-run is specified (while -+non-global reject files will only be created without \-\-dry\-run). -+.TP - \fB\-R\fP or \fB\*=reverse\fP - Assume that this patch was created with the old and new files swapped. - (Yes, I'm afraid that does happen occasionally, human nature being what it -Index: patch-2.5.9/patch.c -=================================================================== ---- patch-2.5.9.orig/patch.c -+++ patch-2.5.9/patch.c -@@ -67,6 +67,7 @@ static bool similar (char const *, size_ - static bool spew_output (struct outstate *); - static char const *make_temp (char); - static int numeric_string (char const *, bool, char const *); -+static void reject_header (const char *filename); - static void abort_hunk (void); - static void cleanup (void); - static void get_some_switches (void); -@@ -98,6 +99,7 @@ static int Argc; - static char * const *Argv; - - static FILE *rejfp; /* reject file pointer */ -+static char *global_reject; - - static char const *patchname; - static char *rejname; -@@ -172,6 +174,10 @@ main (int argc, char **argv) - /* Make sure we clean up in case of disaster. */ - set_signals (false); - -+ /* initialize global reject file */ -+ if (global_reject) -+ init_reject (); -+ - for ( - open_patch_file (patchname); - there_is_another_patch(); -@@ -208,8 +214,9 @@ main (int argc, char **argv) - init_output (TMPOUTNAME, exclusive, &outstate); - } - -- /* initialize reject file */ -- init_reject (); -+ /* initialize per-patch reject file */ -+ if (!global_reject) -+ init_reject (); - - /* find out where all the lines are */ - if (!skip_rest_of_patch) -@@ -278,6 +285,8 @@ main (int argc, char **argv) - - newwhere = pch_newfirst() + last_offset; - if (skip_rest_of_patch) { -+ if (!failed) -+ reject_header(outname); - abort_hunk(); - failed++; - if (verbosity == VERBOSE) -@@ -292,6 +301,8 @@ main (int argc, char **argv) - say ("Patch attempted to create file %s, which already exists.\n", - quotearg (inname)); - -+ if (!failed) -+ reject_header(outname); - abort_hunk(); - failed++; - if (verbosity != SILENT) -@@ -299,6 +310,8 @@ main (int argc, char **argv) - format_linenum (numbuf, newwhere)); - } - else if (! apply_hunk (&outstate, where)) { -+ if (!failed) -+ reject_header(outname); - abort_hunk (); - failed++; - if (verbosity != SILENT) -@@ -332,7 +345,8 @@ main (int argc, char **argv) - fclose (outstate.ofp); - outstate.ofp = 0; - } -- fclose (rejfp); -+ if (!global_reject) -+ fclose (rejfp); - continue; - } - -@@ -412,13 +426,13 @@ main (int argc, char **argv) - } - } - if (diff_type != ED_DIFF) { -- if (fclose (rejfp) != 0) -+ if (!global_reject && fclose (rejfp) != 0) - write_fatal (); - if (failed) { - somefailed = true; - say ("%d out of %d hunk%s %s", failed, hunk, "s" + (hunk == 1), - skip_rest_of_patch ? "ignored" : "FAILED"); -- if (outname) { -+ if (!global_reject && outname) { - char *rej = rejname; - if (!rejname) { - rej = xmalloc (strlen (outname) + 5); -@@ -445,6 +459,20 @@ main (int argc, char **argv) - } - set_signals (true); - } -+ if (global_reject) -+ { -+ if (fclose (rejfp) != 0) -+ write_fatal (); -+ if (somefailed) -+ { -+ say (" -- saving rejects to file %s\n", quotearg (global_reject)); -+ /*if (! dry_run) -+ {*/ -+ move_file (TMPREJNAME, &TMPREJNAME_needs_removal, -+ global_reject, 0644, false); -+ /*}*/ -+ } -+ } - if (outstate.ofp && (ferror (outstate.ofp) || fclose (outstate.ofp) != 0)) - write_fatal (); - cleanup (); -@@ -523,6 +551,7 @@ static struct option const longopts[] = - {"posix", no_argument, NULL, CHAR_MAX + 7}, - {"quoting-style", required_argument, NULL, CHAR_MAX + 8}, - {"unified-reject-files", no_argument, NULL, CHAR_MAX + 9}, -+ {"global-reject-file", required_argument, NULL, CHAR_MAX + 10}, - {NULL, no_argument, NULL, 0} - }; - -@@ -582,6 +611,7 @@ static char const *const option_help[] = - " --dry-run Do not actually change any files; just print what would happen.", - " --posix Conform to the POSIX standard.", - " --unified-reject-files Create unified reject files.", -+" --global-reject-file=file Put all rejects into one file.", - "", - " -d DIR --directory=DIR Change the working directory to DIR first.", - #if HAVE_SETMODE_DOS -@@ -784,6 +814,9 @@ get_some_switches (void) - case CHAR_MAX + 9: - unified_reject_files = true; - break; -+ case CHAR_MAX + 10: -+ global_reject = savestr (optarg); -+ break; - default: - usage (stderr, 2); - } -@@ -933,6 +966,37 @@ locate_hunk (LINENUM fuzz) - } - - static char * -+format_timestamp (char timebuf[37], bool which) -+{ -+ time_t ts = pch_timestamp(which); -+ if (ts != -1) -+ { -+ struct tm *tm = localtime(&ts); -+ strftime(timebuf, 37, "\t%Y-%m-%d %H:%M:%S.000000000 %z", tm); -+ } -+ else -+ timebuf[0] = 0; -+ return timebuf; -+} -+ -+/* Write a header in a reject file that combines multiple hunks. */ -+static void -+reject_header (const char *outname) -+{ -+ char timebuf0[37], timebuf1[37]; -+ if (!global_reject) -+ return; -+ if (diff_type == UNI_DIFF) -+ fprintf(rejfp, "--- %s.orig%s\n+++ %s%s\n", -+ outname, format_timestamp(timebuf0, reverse), -+ outname, format_timestamp(timebuf1, !reverse)); -+ else -+ fprintf(rejfp, "*** %s.orig%s\n--- %s%s\n", -+ outname, format_timestamp(timebuf0, reverse), -+ outname, format_timestamp(timebuf1, !reverse)); -+} -+ -+static char * - format_linerange (char rangebuf[LINENUM_LENGTH_BOUND*2 + 2], - LINENUM first, LINENUM lines) - { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/patch/patch/install.patch b/import-layers/yocto-poky/meta/recipes-devtools/patch/patch/install.patch deleted file mode 100644 index 0354ec8f0..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/patch/patch/install.patch +++ /dev/null @@ -1,43 +0,0 @@ -Upstream-Status: Inappropriate [embedded specific] - -Index: patch-2.5.4/Makefile.in -=================================================================== ---- patch-2.5.4.orig/Makefile.in 2005-03-09 07:23:54.779311824 -0500 -+++ patch-2.5.4/Makefile.in 2005-03-09 07:26:09.616813408 -0500 -@@ -43,10 +43,11 @@ - PACKAGE_NAME = @PACKAGE_NAME@ - PACKAGE_VERSION = @PACKAGE_VERSION@ - -+DESTDIR = - prefix = @prefix@ - exec_prefix = @exec_prefix@ - --bindir = $(exec_prefix)/bin -+bindir = @bindir@ - - # Where to put the manual pages. - mandir = @mandir@ -@@ -112,18 +113,18 @@ - $(CC) -o $@ $(CFLAGS) $(LDFLAGS) $(OBJS) $(LIBS) - - install:: all installdirs -- $(INSTALL_PROGRAM) patch$(EXEEXT) $(bindir)/$(patch_name)$(EXEEXT) -- -$(INSTALL_DATA) $(srcdir)/patch.man $(man1dir)/$(patch_name)$(man1ext) -+ $(INSTALL_PROGRAM) patch$(EXEEXT) $(DESTDIR)$(bindir)/$(patch_name)$(EXEEXT) -+ -$(INSTALL_DATA) $(srcdir)/patch.man $(DESTDIR)$(man1dir)/$(patch_name)$(man1ext) - - installdirs:: -- $(SHELL) $(srcdir)/mkinstalldirs $(bindir) $(man1dir) -+ $(SHELL) $(srcdir)/mkinstalldirs $(DESTDIR)$(bindir) $(DESTDIR)$(man1dir) - - install-strip:: - $(MAKE) INSTALL_PROGRAM='$(INSTALL_PROGRAM) -s' install - - uninstall:: -- rm -f $(bindir)/$(patch_name)$(EXEEXT) -- rm -f $(man1dir)/$(patch_name)$(man1ext) -+ rm -f $(DESTDIR)$(bindir)/$(patch_name)$(EXEEXT) -+ rm -f $(DESTDIR)$(man1dir)/$(patch_name)$(man1ext) - - Makefile: Makefile.in $(CONFIG_STATUS) - $(SHELL) $(CONFIG_STATUS) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/patch/patch/unified-reject-files.diff b/import-layers/yocto-poky/meta/recipes-devtools/patch/patch/unified-reject-files.diff deleted file mode 100644 index 4b59212db..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/patch/patch/unified-reject-files.diff +++ /dev/null @@ -1,307 +0,0 @@ -Upstream-Status: Inappropriate [debian patch] - -Generate unified diff style reject files. Also include the C function names -in reject files whenever possible. - - $ cat > f.orig - < a() { - < 2 - < 3 - < - < 5 - < 6 - < } - - $ sed -e 's/5/5a/' f.orig > f - $ diff -U2 -p f.orig f > f.diff - $ sed -e 's/5/5a/' -e 's/6/6x/' f.orig > f - $ ./patch -F0 -s --no-backup-if-mismatch f --reject-unified < f.diff - > 1 out of 1 hunk FAILED -- saving rejects to file f.rej - - $ cat f.rej - > @@ -3,5 +3,5 @@ a() { - > 3 - > - > -5 - > +5a - > 6 - > } - - $ ./patch -F0 -s --no-backup-if-mismatch f < f.diff - > 1 out of 1 hunk FAILED -- saving rejects to file f.rej - - $ cat f.rej - > *************** a() { - > *** 3,7 **** - > 3 - > - > - 5 - > 6 - > } - > --- 3,7 ---- - > 3 - > - > + 5a - > 6 - > } - - $ diff -Nu -p /dev/null f.orig > f2.diff - $ ./patch -F0 -s --no-backup-if-mismatch f --reject-unified < f2.diff - > Patch attempted to create file f, which already exists. - > 1 out of 1 hunk FAILED -- saving rejects to file f.rej - - $ cat f.rej - > @@ -0,0 +1,7 @@ - > +a() { - > +2 - > +3 - > + - > +5 - > +6 - > +} - - $ rm -f f f.orig f.rej f.diff f2.diff - -Index: patch-2.5.9/pch.c -=================================================================== ---- patch-2.5.9.orig/pch.c -+++ patch-2.5.9/pch.c -@@ -68,6 +68,7 @@ static LINENUM p_sline; /* and the lin - static LINENUM p_hunk_beg; /* line number of current hunk */ - static LINENUM p_efake = -1; /* end of faked up lines--don't free */ - static LINENUM p_bfake = -1; /* beg of faked up lines */ -+static char *p_c_function; /* the C function a hunk is in */ - - enum nametype { OLD, NEW, INDEX, NONE }; - -@@ -888,6 +889,19 @@ another_hunk (enum diff difftype, bool r - next_intuit_at(line_beginning,p_input_line); - return chars_read == (size_t) -1 ? -1 : 0; - } -+ s = buf; -+ while (*s == '*') -+ s++; -+ if (*s == ' ') -+ { -+ p_c_function = s; -+ while (*s != '\n') -+ s++; -+ *s = '\0'; -+ p_c_function = savestr (p_c_function); -+ } -+ else -+ p_c_function = NULL; - p_hunk_beg = p_input_line + 1; - while (p_end < p_max) { - chars_read = get_line (); -@@ -1277,8 +1291,18 @@ another_hunk (enum diff difftype, bool r - else - p_repl_lines = 1; - if (*s == ' ') s++; -- if (*s != '@') -+ if (*s++ != '@') - malformed (); -+ if (*s++ == '@' && *s == ' ' && *s != '\0') -+ { -+ p_c_function = s; -+ while (*s != '\n') -+ s++; -+ *s = '\0'; -+ p_c_function = savestr (p_c_function); -+ } -+ else -+ p_c_function = NULL; - if (!p_ptrn_lines) - p_first++; /* do append rather than insert */ - if (!p_repl_lines) -@@ -1884,6 +1908,12 @@ pch_hunk_beg (void) - return p_hunk_beg; - } - -+char const * -+pch_c_function (void) -+{ -+ return p_c_function; -+} -+ - /* Is the newline-terminated line a valid `ed' command for patch - input? If so, return the command character; if not, return 0. - This accepts accepts just a subset of the valid commands, but it's -Index: patch-2.5.9/pch.h -=================================================================== ---- patch-2.5.9.orig/pch.h -+++ patch-2.5.9/pch.h -@@ -25,6 +25,7 @@ - LINENUM pch_end (void); - LINENUM pch_first (void); - LINENUM pch_hunk_beg (void); -+char const *pch_c_function (void); - LINENUM pch_newfirst (void); - LINENUM pch_prefix_context (void); - LINENUM pch_ptrn_lines (void); -Index: patch-2.5.9/patch.man -=================================================================== ---- patch-2.5.9.orig/patch.man -+++ patch-2.5.9/patch.man -@@ -517,6 +517,9 @@ instead of the default - .B \&.rej - file. - .TP -+\fB\*=reject\-unified\fP -+Produce unified reject files. The default is to produce context type reject files. -+.TP - \fB\-R\fP or \fB\*=reverse\fP - Assume that this patch was created with the old and new files swapped. - (Yes, I'm afraid that does happen occasionally, human nature being what it -Index: patch-2.5.9/common.h -=================================================================== ---- patch-2.5.9.orig/common.h -+++ patch-2.5.9/common.h -@@ -146,6 +146,7 @@ XTERN int invc; - XTERN struct stat instat; - XTERN bool dry_run; - XTERN bool posixly_correct; -+XTERN bool unified_reject_files; - - XTERN char const *origprae; - XTERN char const *origbase; -Index: patch-2.5.9/patch.c -=================================================================== ---- patch-2.5.9.orig/patch.c -+++ patch-2.5.9/patch.c -@@ -522,6 +522,7 @@ static struct option const longopts[] = - {"no-backup-if-mismatch", no_argument, NULL, CHAR_MAX + 6}, - {"posix", no_argument, NULL, CHAR_MAX + 7}, - {"quoting-style", required_argument, NULL, CHAR_MAX + 8}, -+ {"unified-reject-files", no_argument, NULL, CHAR_MAX + 9}, - {NULL, no_argument, NULL, 0} - }; - -@@ -580,6 +581,7 @@ static char const *const option_help[] = - " --verbose Output extra information about the work being done.", - " --dry-run Do not actually change any files; just print what would happen.", - " --posix Conform to the POSIX standard.", -+" --unified-reject-files Create unified reject files.", - "", - " -d DIR --directory=DIR Change the working directory to DIR first.", - #if HAVE_SETMODE_DOS -@@ -779,6 +781,9 @@ get_some_switches (void) - (enum quoting_style) i); - } - break; -+ case CHAR_MAX + 9: -+ unified_reject_files = true; -+ break; - default: - usage (stderr, 2); - } -@@ -927,6 +932,24 @@ locate_hunk (LINENUM fuzz) - return 0; - } - -+static char * -+format_linerange (char rangebuf[LINENUM_LENGTH_BOUND*2 + 2], -+ LINENUM first, LINENUM lines) -+{ -+ if (lines == 1) -+ rangebuf = format_linenum (rangebuf, first); -+ else -+ { -+ char *rb; -+ rangebuf = format_linenum (rangebuf + LINENUM_LENGTH_BOUND + 1, lines); -+ rb = rangebuf-1; -+ rangebuf = format_linenum (rangebuf - LINENUM_LENGTH_BOUND - 1, -+ (lines > 0) ? first : 0); -+ *rb = ','; -+ } -+ return rangebuf; -+} -+ - /* We did not find the pattern, dump out the hunk so they can handle it. */ - - static void -@@ -943,8 +966,83 @@ abort_hunk (void) - (int) NEW_CONTEXT_DIFF <= (int) diff_type ? " ****" : ""; - char const *minuses = - (int) NEW_CONTEXT_DIFF <= (int) diff_type ? " ----" : " -----"; -+ char const *function = pch_c_function(); -+ if (function == NULL) -+ function = ""; -+ -+ if (unified_reject_files) -+ { -+ /* produce unified reject files */ -+ char rangebuf0[LINENUM_LENGTH_BOUND*2 + 2]; -+ char rangebuf1[LINENUM_LENGTH_BOUND*2 + 2]; -+ LINENUM j; -+ -+ /* Find the beginning of the remove and insert section. */ -+ for (j = 0; j <= pat_end; j++) -+ if (pch_char (j) == '=') -+ break; -+ for (i = j+1; i <= pat_end; i++) -+ if (pch_char (i) == '^') -+ break; -+ if (pch_char (0) != '*' || j > pat_end || i > pat_end+1) -+ fatal ("internal error in abort_hunk"); -+ i = 1; j++; -+ -+ /* @@ -from,lines +to,lines @@ */ -+ fprintf (rejfp, "@@ -%s +%s @@%s\n", -+ format_linerange (rangebuf0, oldfirst, pch_ptrn_lines()), -+ format_linerange (rangebuf1, newfirst, pch_repl_lines()), -+ function); -+ -+ while ( (i <= pat_end && pch_char (i) != '=') -+ || (j <= pat_end && pch_char (j) != '^')) -+ { -+ if (i <= pat_end -+ && (pch_char (i) == '-' || pch_char (i) == '!')) -+ { -+ fputc('-', rejfp); -+ pch_write_line (i++, rejfp); -+ } -+ else if (j <= pat_end -+ && (pch_char (j) == '+' || pch_char (j) == '!')) -+ { -+ fputc('+', rejfp); -+ pch_write_line (j++, rejfp); -+ } -+ else if ((i <= pat_end -+ && (pch_char (i) == ' ' || pch_char (i) == '\n')) && -+ (j > pat_end -+ || (pch_char (j) == ' ' || pch_char (j) == '\n'))) -+ { -+ /* Unless j is already past the end, lines i and j -+ must be equal here. */ -+ -+ if (pch_char (i) == ' ') -+ fputc(' ', rejfp); -+ pch_write_line (i++, rejfp); -+ if (j <= pat_end) -+ j++; -+ } -+ else if ((j <= pat_end && -+ (pch_char (j) == ' ' || pch_char (j) == '\n')) && -+ (pch_char (i) == '=')) -+ { -+ if (pch_char (j) == ' ') -+ fputc(' ', rejfp); -+ pch_write_line (j++, rejfp); -+ } -+ else -+ fatal ("internal error in abort_hunk"); -+ } -+ -+ if (ferror (rejfp)) -+ write_fatal (); -+ return; -+ } - -- fprintf(rejfp, "***************\n"); -+ /* produce context type reject files */ -+ -+ fprintf(rejfp, "***************%s\n", function); - for (i=0; i<=pat_end; i++) { - char numbuf0[LINENUM_LENGTH_BOUND + 1]; - char numbuf1[LINENUM_LENGTH_BOUND + 1]; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/patch/patch_2.5.9.bb b/import-layers/yocto-poky/meta/recipes-devtools/patch/patch_2.5.9.bb deleted file mode 100644 index c29b24075..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/patch/patch_2.5.9.bb +++ /dev/null @@ -1,12 +0,0 @@ -require patch.inc -LICENSE = "GPLv2" - -SRC_URI += " file://debian.patch \ - file://install.patch \ - file://unified-reject-files.diff \ - file://global-reject-file.diff " -PR = "r3" - -LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f" -SRC_URI[md5sum] = "dacfb618082f8d3a2194601193cf8716" -SRC_URI[sha256sum] = "ecb5c6469d732bcf01d6ec1afe9e64f1668caba5bfdb103c28d7f537ba3cdb8a" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/patch/patch_2.7.5.bb b/import-layers/yocto-poky/meta/recipes-devtools/patch/patch_2.7.5.bb index f3fcf5e86..151f021b2 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/patch/patch_2.7.5.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/patch/patch_2.7.5.bb @@ -10,6 +10,6 @@ LIC_FILES_CHKSUM = "file://COPYING;md5=d32239bcb673463ab874e80d47fae504" acpaths = "-I ${S}/m4 " -PACKAGECONFIG ?= "${@bb.utils.contains('DISTRO_FEATURES', 'xattr', 'xattr', '', d)}" +PACKAGECONFIG ?= "${@bb.utils.filter('DISTRO_FEATURES', 'xattr', d)}" PACKAGECONFIG[xattr] = "--enable-xattr,--disable-xattr,attr," diff --git a/import-layers/yocto-poky/meta/recipes-devtools/patchelf/patchelf/avoidholes.patch b/import-layers/yocto-poky/meta/recipes-devtools/patchelf/patchelf/avoidholes.patch new file mode 100644 index 000000000..a273688b9 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/patchelf/patchelf/avoidholes.patch @@ -0,0 +1,163 @@ +Different types of binaries create challenges for patchelf. In order to extend +sections they need to be moved within the binary. The current approach to +handling ET_DYN binaries is to move the INTERP section to the end of the file. +This means changing PT_PHDR to add an extra PT_LOAD section so that the new section +is mmaped into memory by the elf loader in the kernel. In order to extend PHDR, +this means moving it to the end of the file. + +Its documented in patchelf there is a kernel 'bug' which means that if you have holes +in memory between the base load address and the PT_LOAD segment that contains PHDR, +it will pass an incorrect PHDR address to ld.so and fail to load the binary, segfaulting. + +To avoid this, the code currently inserts space into the binary to ensure that when +loaded into memory there are no holes between the PT_LOAD sections. This inflates the +binaries by many MBs in some cases. Whilst we could make them sparse, there is a second +issue which is that strip can fail to process these binaries: + +$ strip fixincl +Not enough room for program headers, try linking with -N +[.note.ABI-tag]: Bad value + +This turns out to be due to libbfd not liking the relocated PHDR section either +(https://github.com/NixOS/patchelf/issues/10). + +Instead this patch implements a different approach, leaving PHDR where it is but extending +it in place to allow addition of a new PT_LOAD section. This overwrites sections in the +binary but those get moved to the end of the file in the new PT_LOAD section. + +This is based on patches linked from the above github issue, however whilst the idea +was good, the implementation wasn't correct and they've been rewritten here. + +RP +2017/3/7 + +Upstream-Status: Pending + +Index: patchelf-0.9/src/patchelf.cc +=================================================================== +--- patchelf-0.9.orig/src/patchelf.cc ++++ patchelf-0.9/src/patchelf.cc +@@ -146,6 +146,8 @@ private: + string & replaceSection(const SectionName & sectionName, + unsigned int size); + ++ bool haveReplacedSection(const SectionName & sectionName); ++ + void writeReplacedSections(Elf_Off & curOff, + Elf_Addr startAddr, Elf_Off startOffset); + +@@ -497,6 +499,16 @@ unsigned int ElfFile: + return 0; + } + ++template ++bool ElfFile::haveReplacedSection(const SectionName & sectionName) ++{ ++ ReplacedSections::iterator i = replacedSections.find(sectionName); ++ ++ if (i != replacedSections.end()) ++ return true; ++ return false; ++} ++ + + template + string & ElfFile::replaceSection(const SectionName & sectionName, +@@ -595,52 +607,52 @@ void ElfFile::rewrite + + debug("last page is 0x%llx\n", (unsigned long long) startPage); + ++ /* Because we're adding a new section header, we're necessarily increasing ++ the size of the program header table. This can cause the first section ++ to overlap the program header table in memory; we need to shift the first ++ few segments to someplace else. */ ++ /* Some sections may already be replaced so account for that */ ++ unsigned int i = 1; ++ Elf_Addr pht_size = sizeof(Elf_Ehdr) + (phdrs.size() + 1)*sizeof(Elf_Phdr); ++ while( shdrs[i].sh_addr <= pht_size && i < rdi(hdr->e_shnum) ) { ++ if (not haveReplacedSection(getSectionName(shdrs[i]))) ++ replaceSection(getSectionName(shdrs[i]), shdrs[i].sh_size); ++ i++; ++ } + +- /* Compute the total space needed for the replaced sections and +- the program headers. */ +- off_t neededSpace = (phdrs.size() + 1) * sizeof(Elf_Phdr); ++ /* Compute the total space needed for the replaced sections */ ++ off_t neededSpace = 0; + for (ReplacedSections::iterator i = replacedSections.begin(); + i != replacedSections.end(); ++i) + neededSpace += roundUp(i->second.size(), sectionAlignment); + debug("needed space is %d\n", neededSpace); + +- + size_t startOffset = roundUp(fileSize, getPageSize()); + + growFile(startOffset + neededSpace); + +- + /* Even though this file is of type ET_DYN, it could actually be + an executable. For instance, Gold produces executables marked +- ET_DYN. In that case we can still hit the kernel bug that +- necessitated rewriteSectionsExecutable(). However, such +- executables also tend to start at virtual address 0, so ++ ET_DYN as does LD when linking with pie. If we move PT_PHDR, it ++ has to stay in the first PT_LOAD segment or any subsequent ones ++ if they're continuous in memory due to linux kernel constraints ++ (see BUGS). Since the end of the file would be after bss, we can't ++ move PHDR there, we therefore choose to leave PT_PHDR where it is but ++ move enough following sections such that we can add the extra PT_LOAD ++ section to it. This PT_LOAD segment ensures the sections at the end of ++ the file are mapped into memory for ld.so to process. ++ We can't use the approach in rewriteSectionsExecutable() ++ since DYN executables tend to start at virtual address 0, so + rewriteSectionsExecutable() won't work because it doesn't have +- any virtual address space to grow downwards into. As a +- workaround, make sure that the virtual address of our new +- PT_LOAD segment relative to the first PT_LOAD segment is equal +- to its offset; otherwise we hit the kernel bug. This may +- require creating a hole in the executable. The bigger the size +- of the uninitialised data segment, the bigger the hole. */ ++ any virtual address space to grow downwards into. */ + if (isExecutable) { + if (startOffset >= startPage) { + debug("shifting new PT_LOAD segment by %d bytes to work around a Linux kernel bug\n", startOffset - startPage); +- } else { +- size_t hole = startPage - startOffset; +- /* Print a warning, because the hole could be very big. */ +- fprintf(stderr, "warning: working around a Linux kernel bug by creating a hole of %zu bytes in ‘%s’\n", hole, fileName.c_str()); +- assert(hole % getPageSize() == 0); +- /* !!! We could create an actual hole in the file here, +- but it's probably not worth the effort. */ +- growFile(fileSize + hole); +- startOffset += hole; + } + startPage = startOffset; + } + +- +- /* Add a segment that maps the replaced sections and program +- headers into memory. */ ++ /* Add a segment that maps the replaced sections into memory. */ + phdrs.resize(rdi(hdr->e_phnum) + 1); + wri(hdr->e_phnum, rdi(hdr->e_phnum) + 1); + Elf_Phdr & phdr = phdrs[rdi(hdr->e_phnum) - 1]; +@@ -653,15 +665,12 @@ void ElfFile::rewrite + + + /* Write out the replaced sections. */ +- Elf_Off curOff = startOffset + phdrs.size() * sizeof(Elf_Phdr); ++ Elf_Off curOff = startOffset; + writeReplacedSections(curOff, startPage, startOffset); + assert(curOff == startOffset + neededSpace); + +- +- /* Move the program header to the start of the new area. */ +- wri(hdr->e_phoff, startOffset); +- +- rewriteHeaders(startPage); ++ /* Write out the updated program and section headers */ ++ rewriteHeaders(hdr->e_phoff); + } + + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/patchelf/patchelf_0.9.bb b/import-layers/yocto-poky/meta/recipes-devtools/patchelf/patchelf_0.9.bb index 54e654bdc..01f0e6213 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/patchelf/patchelf_0.9.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/patchelf/patchelf_0.9.bb @@ -2,6 +2,7 @@ SRC_URI = "http://nixos.org/releases/${BPN}/${BPN}-${PV}/${BPN}-${PV}.tar.bz2 \ file://Skip-empty-section-fixes-66.patch \ file://handle-read-only-files.patch \ file://Increase-maxSize-to-64MB.patch \ + file://avoidholes.patch \ " LICENSE = "GPLv3" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pax-utils/pax-utils_1.1.6.bb b/import-layers/yocto-poky/meta/recipes-devtools/pax-utils/pax-utils_1.1.6.bb deleted file mode 100644 index 5cc546301..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/pax-utils/pax-utils_1.1.6.bb +++ /dev/null @@ -1,35 +0,0 @@ -SUMMARY = "Security-focused ELF files checking tool" -DESCRIPTION = "This is a small set of various PaX aware and related \ -utilities for ELF binaries. It can check ELF binary files and running \ -processes for issues that might be relevant when using ELF binaries \ -along with PaX, such as non-PIC code or executable stack and heap." -HOMEPAGE = "http://www.gentoo.org/proj/en/hardened/pax-utils.xml" -LICENSE = "GPLv2+" -LIC_FILES_CHKSUM = "file://COPYING;md5=eb723b61539feef013de476e68b5c50a" - -SRC_URI = "http://gentoo.osuosl.org/distfiles/pax-utils-${PV}.tar.xz \ -" - -SRC_URI[md5sum] = "96f56a5a10ed50f2448c5ccebd27764f" -SRC_URI[sha256sum] = "f5436c517bea40f7035ec29a6f34034c739b943f2e3a080d76df5dfd7fd41b12" - -RDEPENDS_${PN} += "bash" - -do_configure_prepend() { - touch ${S}/NEWS ${S}/AUTHORS ${S}/ChangeLog ${S}/README -} - -do_install() { - oe_runmake PREFIX=${D}${prefix} DESTDIR=${D} install -} - -BBCLASSEXTEND = "native" - -inherit autotools pkgconfig - -PACKAGECONFIG ??= "${@bb.utils.contains('DISTRO_FEATURES', 'largefile', 'largefile', '', d)} \ -" -PACKAGECONFIG[libcap] = "--with-caps, --without-caps, libcap" -PACKAGECONFIG[libseccomp] = "--with-seccomp, --without-seccomp, libseccomp" -PACKAGECONFIG[largefile] = "--enable-largefile,--disable-largefile,," -PACKAGECONFIG[pyelftools] = "--with-python, --without-python,, pyelftools" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pax-utils/pax-utils_1.2.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/pax-utils/pax-utils_1.2.2.bb new file mode 100644 index 000000000..476fa6f07 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/pax-utils/pax-utils_1.2.2.bb @@ -0,0 +1,38 @@ +SUMMARY = "Security-focused ELF files checking tool" +DESCRIPTION = "This is a small set of various PaX aware and related \ +utilities for ELF binaries. It can check ELF binary files and running \ +processes for issues that might be relevant when using ELF binaries \ +along with PaX, such as non-PIC code or executable stack and heap." +HOMEPAGE = "http://www.gentoo.org/proj/en/hardened/pax-utils.xml" +LICENSE = "GPLv2+" +LIC_FILES_CHKSUM = "file://COPYING;md5=eb723b61539feef013de476e68b5c50a" + +SRC_URI = "http://gentoo.osuosl.org/distfiles/pax-utils-${PV}.tar.xz \ +" + +SRC_URI[md5sum] = "a580468318f0ff42edf4a8cd314cc942" +SRC_URI[sha256sum] = "7f4a7f8db6b4743adde7582fa48992ad01776796fcde030683732f56221337d9" + +RDEPENDS_${PN} += "bash" + +export GNULIB_OVERRIDES_WINT_T = "0" + +do_configure_prepend() { + touch ${S}/NEWS ${S}/AUTHORS ${S}/ChangeLog ${S}/README +} + +do_install() { + oe_runmake PREFIX=${D}${prefix} DESTDIR=${D} install +} + +BBCLASSEXTEND = "native" + +inherit autotools pkgconfig + +PACKAGECONFIG ??= "" + +PACKAGECONFIG[libcap] = "--with-caps, --without-caps, libcap" +PACKAGECONFIG[libseccomp] = "--with-seccomp, --without-seccomp, libseccomp" +PACKAGECONFIG[pyelftools] = "--with-python, --without-python,, pyelftools" + +EXTRA_OECONF += "--enable-largefile" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/liburi-perl_1.60.bb b/import-layers/yocto-poky/meta/recipes-devtools/perl/liburi-perl_1.60.bb deleted file mode 100644 index 8809a44fc..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/perl/liburi-perl_1.60.bb +++ /dev/null @@ -1,29 +0,0 @@ -SUMMARY = "Perl module to manipulate and access URI strings" -DESCRIPTION = "This package contains the URI.pm module with friends. \ -The module implements the URI class. URI objects can be used to access \ -and manipulate the various components that make up these strings." - -SECTION = "libs" -LICENSE = "Artistic-1.0 | GPL-1.0+" - -LIC_FILES_CHKSUM = "file://README;beginline=26;endline=30;md5=6c33ae5c87fd1c4897714e122dd9c23d" - -DEPENDS += "perl" - -SRC_URI = "http://www.cpan.org/authors/id/G/GA/GAAS/URI-${PV}.tar.gz" - -SRC_URI[md5sum] = "70f739be8ce28b8baba7c5920ffee4dc" -SRC_URI[sha256sum] = "1f92d3dc64acb8845e9917c945e22b9a5275aeb9ff924eb7873c3b7a5c0d2377" - -S = "${WORKDIR}/URI-${PV}" - -EXTRA_CPANFLAGS = "EXPATLIBPATH=${STAGING_LIBDIR} EXPATINCPATH=${STAGING_INCDIR}" - -inherit cpan - -do_compile() { - export LIBC="$(find ${STAGING_DIR_TARGET}/${base_libdir}/ -name 'libc-*.so')" - cpan_do_compile -} - -BBCLASSEXTEND = "native" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/liburi-perl_1.71.bb b/import-layers/yocto-poky/meta/recipes-devtools/perl/liburi-perl_1.71.bb new file mode 100644 index 000000000..432803c7b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/perl/liburi-perl_1.71.bb @@ -0,0 +1,30 @@ +SUMMARY = "Perl module to manipulate and access URI strings" +DESCRIPTION = "This package contains the URI.pm module with friends. \ +The module implements the URI class. URI objects can be used to access \ +and manipulate the various components that make up these strings." + +HOMEPAGE = "http://search.cpan.org/dist/URI/" +SECTION = "libs" +LICENSE = "Artistic-1.0 | GPL-1.0+" + +LIC_FILES_CHKSUM = "file://LICENSE;md5=c453e94fae672800f83bc1bd7a38b53f" + +DEPENDS += "perl" + +SRC_URI = "https://downloads.yoctoproject.org/mirror/sources/URI-${PV}.tar.gz" + +SRC_URI[md5sum] = "247c3da29a794f72730e01aa5a715daf" +SRC_URI[sha256sum] = "9c8eca0d7f39e74bbc14706293e653b699238eeb1a7690cc9c136fb8c2644115" + +S = "${WORKDIR}/URI-${PV}" + +EXTRA_CPANFLAGS = "EXPATLIBPATH=${STAGING_LIBDIR} EXPATINCPATH=${STAGING_INCDIR}" + +inherit cpan + +do_compile() { + export LIBC="$(find ${STAGING_DIR_TARGET}/${base_libdir}/ -name 'libc-*.so')" + cpan_do_compile +} + +BBCLASSEXTEND = "native" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/libxml-parser-perl_2.44.bb b/import-layers/yocto-poky/meta/recipes-devtools/perl/libxml-parser-perl_2.44.bb index 3acd82686..d9bbf713a 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/perl/libxml-parser-perl_2.44.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/perl/libxml-parser-perl_2.44.bb @@ -20,6 +20,8 @@ inherit cpan do_configure_append() { sed 's:--sysroot=.*\(\s\|$\):--sysroot=${STAGING_DIR_TARGET} :g' -i Makefile Expat/Makefile sed 's:^FULL_AR = .*:FULL_AR = ${AR}:g' -i Expat/Makefile + # make sure these two do not build in parallel + sed 's!^$(INST_DYNAMIC):!$(INST_DYNAMIC): $(BOOTSTRAP)!' -i Expat/Makefile } do_compile() { @@ -31,4 +33,4 @@ do_compile_class-native() { cpan_do_compile } -BBCLASSEXTEND="native" +BBCLASSEXTEND="native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-native_5.22.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-native_5.22.1.bb deleted file mode 100644 index 1c21522be..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-native_5.22.1.bb +++ /dev/null @@ -1,128 +0,0 @@ -require perl.inc - -EXTRA_OEMAKE = "-e MAKEFLAGS=" - -SRC_URI += "\ - file://Configure-multilib.patch \ - file://perl-configpm-switch.patch \ - file://native-nopacklist.patch \ - file://native-perlinc.patch \ - file://MM_Unix.pm.patch \ - file://debian/errno_ver.diff \ - file://dynaloaderhack.patch \ - file://perl-PathTools-don-t-filter-out-blib-from-INC.patch \ - file://perl-remove-nm-from-libswanted.patch \ - " - -SRC_URI[md5sum] = "6671e4829cbaf9cecafa9a84f141b0a3" -SRC_URI[sha256sum] = "9e87317d693ce828095204be0d09af8d60b8785533fadea1a82b6f0e071e5c79" - -inherit native - -NATIVE_PACKAGE_PATH_SUFFIX = "/${PN}" - -export LD="${CCLD}" - -do_configure () { - ./Configure \ - -Dcc="${CC}" \ - -Dcflags="${CFLAGS}" \ - -Dldflags="${LDFLAGS}" \ - -Dcf_by="Open Embedded" \ - -Dprefix=${prefix} \ - -Dvendorprefix=${prefix} \ - -Dsiteprefix=${prefix} \ - \ - -Dbin=${STAGING_BINDIR}/${PN} \ - -Dprivlib=${STAGING_LIBDIR}/perl/${PV} \ - -Darchlib=${STAGING_LIBDIR}/perl/${PV} \ - -Dvendorlib=${STAGING_LIBDIR}/perl/vendor_perl/${PV} \ - -Dvendorarch=${STAGING_LIBDIR}/perl/vendor_perl/${PV} \ - -Dsitelib=${STAGING_LIBDIR}/perl/site_perl/${PV} \ - -Dsitearch=${STAGING_LIBDIR}/perl/site_perl/${PV} \ - \ - -Duseshrplib \ - -Dusethreads \ - -Duseithreads \ - -Duselargefiles \ - -Dnoextensions=ODBM_File \ - -Ud_dosuid \ - -Ui_db \ - -Ui_ndbm \ - -Ui_gdbm \ - -Ui_gdbm_ndbm \ - -Ui_gdbmndbm \ - -Di_shadow \ - -Di_syslog \ - -Duseperlio \ - -Dman3ext=3pm \ - -Dsed=/bin/sed \ - -Uafs \ - -Ud_csh \ - -Uusesfio \ - -Uusenm -des -} - -do_install () { - oe_runmake 'DESTDIR=${D}' install - - # We need a hostperl link for building perl - ln -sf perl${PV} ${D}${bindir}/hostperl - - ln -sf perl ${D}${libdir}/perl5 - - install -d ${D}${libdir}/perl/${PV}/CORE \ - ${D}${datadir}/perl/${PV}/ExtUtils - - # Save native config - install config.sh ${D}${libdir}/perl - install lib/Config.pm ${D}${libdir}/perl/${PV}/ - install lib/ExtUtils/typemap ${D}${libdir}/perl/${PV}/ExtUtils/ - - # perl shared library headers - # reference perl 5.20.0-1 in debian: - # https://packages.debian.org/experimental/i386/perl/filelist - for i in av.h bitcount.h charclass_invlists.h config.h cop.h cv.h dosish.h \ - embed.h embedvar.h EXTERN.h fakesdio.h feature.h form.h git_version.h \ - gv.h handy.h hv_func.h hv.h inline.h INTERN.h intrpvar.h iperlsys.h \ - keywords.h l1_char_class_tab.h malloc_ctl.h metaconfig.h mg_data.h \ - mg.h mg_raw.h mg_vtable.h mydtrace.h nostdio.h opcode.h op.h \ - opnames.h op_reg_common.h overload.h pad.h parser.h patchlevel.h \ - perlapi.h perl.h perlio.h perliol.h perlsdio.h perlvars.h perly.h \ - pp.h pp_proto.h proto.h reentr.h regcharclass.h regcomp.h regexp.h \ - regnodes.h scope.h sv.h thread.h time64_config.h time64.h uconfig.h \ - unicode_constants.h unixish.h utf8.h utfebcdic.h util.h uudmap.h \ - vutil.h warnings.h XSUB.h - do - install $i ${D}${libdir}/perl/${PV}/CORE - done - - # Those wrappers mean that perl installed from sstate (which may change - # path location) works and that in the nativesdk case, the SDK can be - # installed to a different location from the one it was built for. - create_wrapper ${D}${bindir}/perl PERL5LIB='$PERL5LIB:${STAGING_LIBDIR}/perl/site_perl/${PV}:${STAGING_LIBDIR}/perl/vendor_perl/${PV}:${STAGING_LIBDIR}/perl/${PV}' - create_wrapper ${D}${bindir}/perl${PV} PERL5LIB='$PERL5LIB:${STAGING_LIBDIR}/perl/site_perl/${PV}:${STAGING_LIBDIR}/perl/vendor_perl/${PV}:${STAGING_LIBDIR}/perl/${PV}' - - # Use /usr/bin/env nativeperl for the perl script. - for f in `grep -Il '#! *${bindir}/perl' ${D}/${bindir}/*`; do - sed -i -e 's|${bindir}/perl|/usr/bin/env nativeperl|' $f - done -} - -SYSROOT_PREPROCESS_FUNCS += "perl_sysroot_create_wrapper" - -perl_sysroot_create_wrapper () { - mkdir -p ${SYSROOT_DESTDIR}${bindir} - # Create a wrapper that /usr/bin/env perl will use to get perl-native. - # This MUST live in the normal bindir. - cat > ${SYSROOT_DESTDIR}${bindir}/../nativeperl << EOF -#!/bin/sh -realpath=\`readlink -fn \$0\` -exec \`dirname \$realpath\`/perl-native/perl "\$@" -EOF - chmod 0755 ${SYSROOT_DESTDIR}${bindir}/../nativeperl - cat ${SYSROOT_DESTDIR}${bindir}/../nativeperl -} - -# Fix the path in sstate -SSTATE_SCAN_FILES += "*.pm *.pod *.h *.pl *.sh" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-native_5.24.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-native_5.24.1.bb new file mode 100644 index 000000000..e01d11fbe --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-native_5.24.1.bb @@ -0,0 +1,136 @@ +require perl.inc + +# We need gnugrep (for -I) +DEPENDS = "virtual/db-native grep-native" +DEPENDS += "gdbm-native zlib-native" + +EXTRA_OEMAKE = "-e MAKEFLAGS=" + +SRC_URI += "\ + file://Configure-multilib.patch \ + file://perl-configpm-switch.patch \ + file://native-nopacklist.patch \ + file://native-perlinc.patch \ + file://MM_Unix.pm.patch \ + file://debian/errno_ver.diff \ + file://dynaloaderhack.patch \ + file://perl-PathTools-don-t-filter-out-blib-from-INC.patch \ + file://0001-Configure-Remove-fstack-protector-strong-for-native-.patch \ + " + +SRC_URI[md5sum] = "af6a84c7c3e2b8b269c105a5db2f6d53" +SRC_URI[sha256sum] = "03a77bac4505c270f1890ece75afc7d4b555090b41aa41ea478747e23b2afb3f" + +inherit native + +NATIVE_PACKAGE_PATH_SUFFIX = "/${PN}" + +export LD="${CCLD}" + +do_configure () { + ./Configure \ + -Dcc="${CC}" \ + -Dcflags="${CFLAGS}" \ + -Dldflags="${LDFLAGS}" \ + -Dcf_by="Open Embedded" \ + -Dprefix=${prefix} \ + -Dvendorprefix=${prefix} \ + -Dsiteprefix=${prefix} \ + \ + -Dbin=${STAGING_BINDIR}/${PN} \ + -Dprivlib=${STAGING_LIBDIR}/perl/${PV} \ + -Darchlib=${STAGING_LIBDIR}/perl/${PV} \ + -Dvendorlib=${STAGING_LIBDIR}/perl/vendor_perl/${PV} \ + -Dvendorarch=${STAGING_LIBDIR}/perl/vendor_perl/${PV} \ + -Dsitelib=${STAGING_LIBDIR}/perl/site_perl/${PV} \ + -Dsitearch=${STAGING_LIBDIR}/perl/site_perl/${PV} \ + \ + -Duseshrplib \ + -Dusethreads \ + -Duseithreads \ + -Duselargefiles \ + -Dnoextensions=ODBM_File \ + -Ud_dosuid \ + -Ui_db \ + -Ui_ndbm \ + -Ui_gdbm \ + -Ui_gdbm_ndbm \ + -Ui_gdbmndbm \ + -Di_shadow \ + -Di_syslog \ + -Duseperlio \ + -Dman3ext=3pm \ + -Dsed=/bin/sed \ + -Uafs \ + -Ud_csh \ + -Uusesfio \ + -Uusenm -des +} + +do_install () { + oe_runmake 'DESTDIR=${D}' install + + # We need a hostperl link for building perl + ln -sf perl${PV} ${D}${bindir}/hostperl + + ln -sf perl ${D}${libdir}/perl5 + + install -d ${D}${libdir}/perl/${PV}/CORE \ + ${D}${datadir}/perl/${PV}/ExtUtils + + # Save native config + install config.sh ${D}${libdir}/perl + install lib/Config.pm ${D}${libdir}/perl/${PV}/ + install lib/ExtUtils/typemap ${D}${libdir}/perl/${PV}/ExtUtils/ + + # perl shared library headers + # reference perl 5.20.0-1 in debian: + # https://packages.debian.org/experimental/i386/perl/filelist + for i in av.h bitcount.h charclass_invlists.h config.h cop.h cv.h dosish.h \ + embed.h embedvar.h EXTERN.h fakesdio.h feature.h form.h git_version.h \ + gv.h handy.h hv_func.h hv.h inline.h INTERN.h intrpvar.h iperlsys.h \ + keywords.h l1_char_class_tab.h malloc_ctl.h metaconfig.h mg_data.h \ + mg.h mg_raw.h mg_vtable.h mydtrace.h nostdio.h opcode.h op.h \ + opnames.h op_reg_common.h overload.h pad.h parser.h patchlevel.h \ + perlapi.h perl.h perlio.h perliol.h perlsdio.h perlvars.h perly.h \ + pp.h pp_proto.h proto.h reentr.h regcharclass.h regcomp.h regexp.h \ + regnodes.h scope.h sv.h thread.h time64_config.h time64.h uconfig.h \ + unicode_constants.h unixish.h utf8.h utfebcdic.h util.h uudmap.h \ + vutil.h warnings.h XSUB.h + do + install $i ${D}${libdir}/perl/${PV}/CORE + done + + # Those wrappers mean that perl installed from sstate (which may change + # path location) works and that in the nativesdk case, the SDK can be + # installed to a different location from the one it was built for. + create_wrapper ${D}${bindir}/perl PERL5LIB='$PERL5LIB:${STAGING_LIBDIR}/perl/site_perl/${PV}:${STAGING_LIBDIR}/perl/vendor_perl/${PV}:${STAGING_LIBDIR}/perl/${PV}' + create_wrapper ${D}${bindir}/perl${PV} PERL5LIB='$PERL5LIB:${STAGING_LIBDIR}/perl/site_perl/${PV}:${STAGING_LIBDIR}/perl/vendor_perl/${PV}:${STAGING_LIBDIR}/perl/${PV}' + + # Use /usr/bin/env nativeperl for the perl script. + for f in `grep -Il '#! *${bindir}/perl' ${D}/${bindir}/*`; do + sed -i -e 's|${bindir}/perl|/usr/bin/env nativeperl|' $f + done + + # The packlist is large with hardcoded paths meaning it needs relocating + # so just remove it. + rm ${D}${libdir}/perl/${PV}/.packlist +} + +SYSROOT_PREPROCESS_FUNCS += "perl_sysroot_create_wrapper" + +perl_sysroot_create_wrapper () { + mkdir -p ${SYSROOT_DESTDIR}${bindir} + # Create a wrapper that /usr/bin/env perl will use to get perl-native. + # This MUST live in the normal bindir. + cat > ${SYSROOT_DESTDIR}${bindir}/../nativeperl << EOF +#!/bin/sh +realpath=\`readlink -fn \$0\` +exec \`dirname \$realpath\`/perl-native/perl "\$@" +EOF + chmod 0755 ${SYSROOT_DESTDIR}${bindir}/../nativeperl + cat ${SYSROOT_DESTDIR}${bindir}/../nativeperl +} + +# Fix the path in sstate +SSTATE_SCAN_FILES += "*.pm *.pod *.h *.pl *.sh" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-ptest.inc b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-ptest.inc index d136c5c0e..1f549af1b 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-ptest.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-ptest.inc @@ -20,10 +20,13 @@ do_install_ptest () { -e "s,${STAGING_BINDIR_NATIVE}/perl-native/,${bindir}/,g" \ -e "s,${STAGING_BINDIR_NATIVE}/,,g" \ -e "s,${STAGING_BINDIR_TOOLCHAIN}/${TARGET_PREFIX},${bindir},g" \ - ${D}${PTEST_PATH}/lib/Config.pm + ${D}${PTEST_PATH}/lib/Config.pm \ + ${D}${PTEST_PATH}/cpan/podlators/scripts/pod2man \ + ${D}${PTEST_PATH}/cpan/podlators/scripts/pod2text ln -sf ${bindir}/perl ${D}${PTEST_PATH}/t/perl - + # Remove plan9 related stuff + rm -rf ${D}${PTEST_PATH}/plan9 ${D}${PTEST_PATH}/README.plan9 ${D}${PTEST_PATH}/pod/perlplan9.pod } python populate_packages_prepend() { @@ -36,3 +39,7 @@ python populate_packages_prepend() { } RDEPENDS_${PN}-ptest += "${PN}-modules ${PN}-doc ${PN}-misc sed libssp" + +# The perl-ptest package contains Perl internal modules and generating file +# dependencies for it causes problems. +SKIP_FILEDEPS_${PN}-ptest = '1' diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-rdepends_5.22.1.inc b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-rdepends_5.22.1.inc deleted file mode 100644 index 2c497fe6a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-rdepends_5.22.1.inc +++ /dev/null @@ -1,2563 +0,0 @@ -# To create/update the perl-rdepends_${PV}.inc use this piece of ugly script (modified for your arch/paths etc): - -#jiahongxu:5.20.0-r1$ pwd -#/home/jiahongxu/yocto/build-20140618-perl/tmp/work/i586-poky-linux/perl/5.20.0-r1 - -#1 cp -r packages-split packages-split.new && cd packages-split.new -#2 find . -name \*.pm | xargs sed -i '/^=head/,/^=cut/d' -#3 egrep -r "^\s*(\ */+= \"perl-module-/g;s/CPANPLUS::.*/cpanplus/g;s/CPAN::.*/cpan/g;s/::/-/g;s/ [^+\"].*//g;s/_/-/g;s/\.pl\"$/\"/;s/\"\?\$/\"/;s/(//;" | tr [:upper:] [:lower:] -#| awk '{if ($3 != "\x22"$1"\x22"){ print $0}}' -#| grep -v -e "\-vms\-" -e module-5 -e "^$" -e "\\$" -e your -e tk -e autoperl -e html -e http -e parse-cpan -e perl-ostype -e ndbm-file -e module-mac -e fcgi -e lwp -e dbd -e dbix -#| sort -u -#| sed 's/^/RDEPENDS_/;s/perl-module-/${PN}-module-/g;s/module-\(module-\)/\1/g;s/\(module-load\)-conditional/\1/g;s/encode-configlocal/&-pm/;' -#| egrep -wv '=>|module-a|module-apache.?|module-apr|module-authen-sasl|module-b-asmdata|module-convert-ebcdic|module-devel-size|module-digest-perl-md5|module-dumpvalue|module-extutils-constant-aaargh56hash|module-extutils-xssymset|module-file-bsdglob|module-for|module-it|module-io-string|module-ipc-system-simple|module-lexical|module-local-lib|metadata|module-modperl-util|module-pluggable-object|module-test-builder-io-scalar|module-text-unidecode|module-win32|objects\sload|syscall.ph|systeminfo.ph|%s' > /tmp/perl-rdepends - -RDEPENDS_perl-misc += "perl perl-modules" -RDEPENDS_${PN}-pod += "perl" - -# Some additional dependencies that the above doesn't manage to figure out -RDEPENDS_${PN}-module-file-spec += "${PN}-module-file-spec-unix" -RDEPENDS_${PN}-module-math-bigint += "${PN}-module-math-bigint-calc" -RDEPENDS_${PN}-module-thread-queue += "${PN}-module-attributes" -RDEPENDS_${PN}-module-overload += "${PN}-module-overloading" - - -# Depends list -# copy contents of /tmp/perl-rdepends in this file -RDEPENDS_${PN}-module-anydbm-file += "${PN}-module-strict" -RDEPENDS_${PN}-module-anydbm-file += "${PN}-module-warnings" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-carp" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-config" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-constant" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-cpan" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-cwd" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-data-dumper" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-file-find" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-file-spec-functions" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-getopt-std" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-if" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-net-ping" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-strict" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-user-pwent" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-vars" -RDEPENDS_${PN}-module-app-cpan += "${PN}-module-warnings" -RDEPENDS_${PN}-module-app-prove += "${PN}-module-app-prove-state" -RDEPENDS_${PN}-module-app-prove += "${PN}-module-base" -RDEPENDS_${PN}-module-app-prove += "${PN}-module-carp" -RDEPENDS_${PN}-module-app-prove += "${PN}-module-constant" -RDEPENDS_${PN}-module-app-prove += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-app-prove += "${PN}-module-getopt-long" -RDEPENDS_${PN}-module-app-prove += "${PN}-module-strict" -RDEPENDS_${PN}-module-app-prove += "${PN}-module-tap-harness" -RDEPENDS_${PN}-module-app-prove += "${PN}-module-tap-harness-env" -RDEPENDS_${PN}-module-app-prove += "${PN}-module-text-parsewords" -RDEPENDS_${PN}-module-app-prove += "${PN}-module-warnings" -RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-app-prove-state-result" -RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-base" -RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-carp" -RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-constant" -RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-file-find" -RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-strict" -RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-tap-parser-yamlish-reader" -RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-tap-parser-yamlish-writer" -RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-warnings" -RDEPENDS_${PN}-module-app-prove-state-result += "${PN}-module-app-prove-state-result-test" -RDEPENDS_${PN}-module-app-prove-state-result += "${PN}-module-carp" -RDEPENDS_${PN}-module-app-prove-state-result += "${PN}-module-constant" -RDEPENDS_${PN}-module-app-prove-state-result += "${PN}-module-strict" -RDEPENDS_${PN}-module-app-prove-state-result += "${PN}-module-warnings" -RDEPENDS_${PN}-module-app-prove-state-result-test += "${PN}-module-strict" -RDEPENDS_${PN}-module-app-prove-state-result-test += "${PN}-module-warnings" -RDEPENDS_${PN}-module-archive-tar-constant += "${PN}-module-constant" -RDEPENDS_${PN}-module-archive-tar-constant += "${PN}-module-exporter" -RDEPENDS_${PN}-module-archive-tar-constant += "${PN}-module-io-compress-bzip2" -RDEPENDS_${PN}-module-archive-tar-constant += "${PN}-module-time-local" -RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-archive-tar" -RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-archive-tar-constant" -RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-carp" -RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-file-spec-unix" -RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-io-file" -RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-strict" -RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-vars" -RDEPENDS_${PN}-module-archive-tar += "${PN}-module-archive-tar-constant" -RDEPENDS_${PN}-module-archive-tar += "${PN}-module-archive-tar-file" -RDEPENDS_${PN}-module-archive-tar += "${PN}-module-carp" -RDEPENDS_${PN}-module-archive-tar += "${PN}-module-config" -RDEPENDS_${PN}-module-archive-tar += "${PN}-module-cwd" -RDEPENDS_${PN}-module-archive-tar += "${PN}-module-exporter" -RDEPENDS_${PN}-module-archive-tar += "${PN}-module-file-path" -RDEPENDS_${PN}-module-archive-tar += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-archive-tar += "${PN}-module-file-spec-unix" -RDEPENDS_${PN}-module-archive-tar += "${PN}-module-io-file" -RDEPENDS_${PN}-module-archive-tar += "${PN}-module-io-zlib" -RDEPENDS_${PN}-module-archive-tar += "${PN}-module-strict" -RDEPENDS_${PN}-module-archive-tar += "${PN}-module-vars" -RDEPENDS_${PN}-module-arybase += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-attribute-handlers += "${PN}-module-carp" -RDEPENDS_${PN}-module-attribute-handlers += "${PN}-module-strict" -RDEPENDS_${PN}-module-attribute-handlers += "${PN}-module-vars" -RDEPENDS_${PN}-module-attribute-handlers += "${PN}-module-warnings" -RDEPENDS_${PN}-module-attributes += "${PN}-module-carp" -RDEPENDS_${PN}-module-attributes += "${PN}-module-exporter" -RDEPENDS_${PN}-module-attributes += "${PN}-module-strict" -RDEPENDS_${PN}-module-attributes += "${PN}-module-warnings" -RDEPENDS_${PN}-module-attributes += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-autodie-exception += "${PN}-module-carp" -RDEPENDS_${PN}-module-autodie-exception += "${PN}-module-constant" -RDEPENDS_${PN}-module-autodie-exception += "${PN}-module-fatal" -RDEPENDS_${PN}-module-autodie-exception += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-autodie-exception += "${PN}-module-overload" -RDEPENDS_${PN}-module-autodie-exception += "${PN}-module-strict" -RDEPENDS_${PN}-module-autodie-exception += "${PN}-module-warnings" -RDEPENDS_${PN}-module-autodie-exception-system += "${PN}-module-carp" -RDEPENDS_${PN}-module-autodie-exception-system += "${PN}-module-parent" -RDEPENDS_${PN}-module-autodie-exception-system += "${PN}-module-strict" -RDEPENDS_${PN}-module-autodie-exception-system += "${PN}-module-warnings" -RDEPENDS_${PN}-module-autodie-hints += "${PN}-module-b" -RDEPENDS_${PN}-module-autodie-hints += "${PN}-module-carp" -RDEPENDS_${PN}-module-autodie-hints += "${PN}-module-constant" -RDEPENDS_${PN}-module-autodie-hints += "${PN}-module-strict" -RDEPENDS_${PN}-module-autodie-hints += "${PN}-module-warnings" -RDEPENDS_${PN}-module-autodie += "${PN}-module-carp" -RDEPENDS_${PN}-module-autodie += "${PN}-module-constant" -RDEPENDS_${PN}-module-autodie += "${PN}-module-lib" -RDEPENDS_${PN}-module-autodie += "${PN}-module-parent" -RDEPENDS_${PN}-module-autodie += "${PN}-module-strict" -RDEPENDS_${PN}-module-autodie += "${PN}-module-warnings" -RDEPENDS_${PN}-module-autodie-scope-guard += "${PN}-module-strict" -RDEPENDS_${PN}-module-autodie-scope-guard += "${PN}-module-warnings" -RDEPENDS_${PN}-module-autodie-scope-guardstack += "${PN}-module-autodie-scope-guard" -RDEPENDS_${PN}-module-autodie-scope-guardstack += "${PN}-module-strict" -RDEPENDS_${PN}-module-autodie-scope-guardstack += "${PN}-module-warnings" -RDEPENDS_${PN}-module-autodie-scopeutil += "${PN}-module-autodie-scope-guardstack" -RDEPENDS_${PN}-module-autodie-scopeutil += "${PN}-module-exporter" -RDEPENDS_${PN}-module-autodie-scopeutil += "${PN}-module-strict" -RDEPENDS_${PN}-module-autodie-scopeutil += "${PN}-module-warnings" -RDEPENDS_${PN}-module-autodie-skip += "${PN}-module-strict" -RDEPENDS_${PN}-module-autodie-skip += "${PN}-module-warnings" -RDEPENDS_${PN}-module-autoloader += "${PN}-module-carp" -RDEPENDS_${PN}-module-autoloader += "${PN}-module-strict" -RDEPENDS_${PN}-module-autosplit += "${PN}-module-carp" -RDEPENDS_${PN}-module-autosplit += "${PN}-module-config" -RDEPENDS_${PN}-module-autosplit += "${PN}-module-exporter" -RDEPENDS_${PN}-module-autosplit += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-autosplit += "${PN}-module-file-path" -RDEPENDS_${PN}-module-autosplit += "${PN}-module-file-spec-functions" -RDEPENDS_${PN}-module-autosplit += "${PN}-module-strict" -RDEPENDS_${PN}-module-autouse += "${PN}-module-carp" -RDEPENDS_${PN}-module-base += "${PN}-module-carp" -RDEPENDS_${PN}-module-base += "${PN}-module-strict" -RDEPENDS_${PN}-module-base += "${PN}-module-vars" -RDEPENDS_${PN}-module-b-concise += "${PN}-module-b" -RDEPENDS_${PN}-module-b-concise += "${PN}-module-b-op-private" -RDEPENDS_${PN}-module-b-concise += "${PN}-module-config" -RDEPENDS_${PN}-module-b-concise += "${PN}-module-exporter" -RDEPENDS_${PN}-module-b-concise += "${PN}-module-strict" -RDEPENDS_${PN}-module-b-concise += "${PN}-module-warnings" -RDEPENDS_${PN}-module-b-debug += "${PN}-module-b" -RDEPENDS_${PN}-module-b-debug += "${PN}-module-config" -RDEPENDS_${PN}-module-b-debug += "${PN}-module-strict" -RDEPENDS_${PN}-module-b-deparse += "${PN}-module-b" -RDEPENDS_${PN}-module-b-deparse += "${PN}-module-carp" -RDEPENDS_${PN}-module-b-deparse += "${PN}-module-data-dumper" -RDEPENDS_${PN}-module-b-deparse += "${PN}-module-feature" -RDEPENDS_${PN}-module-b-deparse += "${PN}-module-overloading" -RDEPENDS_${PN}-module-b-deparse += "${PN}-module-re" -RDEPENDS_${PN}-module-b-deparse += "${PN}-module-strict" -RDEPENDS_${PN}-module-b-deparse += "${PN}-module-vars" -RDEPENDS_${PN}-module-b-deparse += "${PN}-module-warnings" -RDEPENDS_${PN}-module-benchmark += "${PN}-module-carp" -RDEPENDS_${PN}-module-benchmark += "${PN}-module-exporter" -RDEPENDS_${PN}-module-benchmark += "${PN}-module-strict" -RDEPENDS_${PN}-module-bigint += "${PN}-module-carp" -RDEPENDS_${PN}-module-bigint += "${PN}-module-constant" -RDEPENDS_${PN}-module-bigint += "${PN}-module-exporter" -RDEPENDS_${PN}-module-bigint += "${PN}-module-math-bigint" -RDEPENDS_${PN}-module-bigint += "${PN}-module-math-bigint-trace" -RDEPENDS_${PN}-module-bigint += "${PN}-module-overload" -RDEPENDS_${PN}-module-bigint += "${PN}-module-strict" -RDEPENDS_${PN}-module-bigint += "${PN}-module-vars" -RDEPENDS_${PN}-module-bignum += "${PN}-module-bigint" -RDEPENDS_${PN}-module-bignum += "${PN}-module-carp" -RDEPENDS_${PN}-module-bignum += "${PN}-module-exporter" -RDEPENDS_${PN}-module-bignum += "${PN}-module-math-bigfloat" -RDEPENDS_${PN}-module-bignum += "${PN}-module-math-bigfloat-trace" -RDEPENDS_${PN}-module-bignum += "${PN}-module-math-bigint" -RDEPENDS_${PN}-module-bignum += "${PN}-module-math-bigint-trace" -RDEPENDS_${PN}-module-bignum += "${PN}-module-overload" -RDEPENDS_${PN}-module-bignum += "${PN}-module-strict" -RDEPENDS_${PN}-module-bignum += "${PN}-module-vars" -RDEPENDS_${PN}-module-bigrat += "${PN}-module-bigint" -RDEPENDS_${PN}-module-bigrat += "${PN}-module-carp" -RDEPENDS_${PN}-module-bigrat += "${PN}-module-exporter" -RDEPENDS_${PN}-module-bigrat += "${PN}-module-math-bigfloat" -RDEPENDS_${PN}-module-bigrat += "${PN}-module-math-bigint" -RDEPENDS_${PN}-module-bigrat += "${PN}-module-math-bigint-trace" -RDEPENDS_${PN}-module-bigrat += "${PN}-module-math-bigrat" -RDEPENDS_${PN}-module-bigrat += "${PN}-module-overload" -RDEPENDS_${PN}-module-bigrat += "${PN}-module-strict" -RDEPENDS_${PN}-module-bigrat += "${PN}-module-vars" -RDEPENDS_${PN}-module-blib += "${PN}-module-cwd" -RDEPENDS_${PN}-module-blib += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-blib += "${PN}-module-vars" -RDEPENDS_${PN}-module-b += "${PN}-module-exporter" -RDEPENDS_${PN}-module-b += "${PN}-module-strict" -RDEPENDS_${PN}-module-b += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-b-showlex += "${PN}-module-b" -RDEPENDS_${PN}-module-b-showlex += "${PN}-module-b-concise" -RDEPENDS_${PN}-module-b-showlex += "${PN}-module-b-terse" -RDEPENDS_${PN}-module-b-showlex += "${PN}-module-strict" -RDEPENDS_${PN}-module-b-terse += "${PN}-module-b" -RDEPENDS_${PN}-module-b-terse += "${PN}-module-b-concise" -RDEPENDS_${PN}-module-b-terse += "${PN}-module-carp" -RDEPENDS_${PN}-module-b-terse += "${PN}-module-strict" -RDEPENDS_${PN}-module-b-xref += "${PN}-module-b" -RDEPENDS_${PN}-module-b-xref += "${PN}-module-config" -RDEPENDS_${PN}-module-b-xref += "${PN}-module-strict" -RDEPENDS_${PN}-module-bytes += "${PN}-module-bytes-heavy" -RDEPENDS_${PN}-module-bytes += "${PN}-module-carp" -RDEPENDS_${PN}-module-carp-heavy += "${PN}-module-carp" -RDEPENDS_${PN}-module-carp += "${PN}-module-exporter" -RDEPENDS_${PN}-module-carp += "${PN}-module-strict" -RDEPENDS_${PN}-module-carp += "${PN}-module-warnings" -RDEPENDS_${PN}-module-charnames += "${PN}-module-bytes" -RDEPENDS_${PN}-module-charnames += "${PN}-module-carp" -RDEPENDS_${PN}-module-charnames += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-charnames += "${PN}-module-re" -RDEPENDS_${PN}-module-charnames += "${PN}-module-strict" -RDEPENDS_${PN}-module-charnames += "${PN}-module-warnings" -RDEPENDS_${PN}-module-class-struct += "${PN}-module-carp" -RDEPENDS_${PN}-module-class-struct += "${PN}-module-exporter" -RDEPENDS_${PN}-module-class-struct += "${PN}-module-strict" -RDEPENDS_${PN}-module-class-struct += "${PN}-module-warnings-register" -RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-bytes " -RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-carp " -RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-constant" -RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-dynaloader" -RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-exporter" -RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-strict " -RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-warnings " -RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-bytes " -RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-carp " -RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-constant" -RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-dynaloader" -RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-exporter" -RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-strict " -RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-warnings " -RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-bytes " -RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-carp " -RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-compress-raw-zlib" -RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-constant" -RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-exporter" -RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-io-compress-gzip" -RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-io-compress-gzip-constants" -RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-io-handle " -RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-io-uncompress-gunzip" -RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-strict " -RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-warnings " -RDEPENDS_${PN}-module-config-extensions += "${PN}-module-config" -RDEPENDS_${PN}-module-config-extensions += "${PN}-module-exporter" -RDEPENDS_${PN}-module-config-extensions += "${PN}-module-strict" -RDEPENDS_${PN}-module-config-extensions += "${PN}-module-vars" -RDEPENDS_${PN}-module-config-perl-v += "${PN}-module-config" -RDEPENDS_${PN}-module-config-perl-v += "${PN}-module-exporter" -RDEPENDS_${PN}-module-config-perl-v += "${PN}-module-strict" -RDEPENDS_${PN}-module-config-perl-v += "${PN}-module-vars" -RDEPENDS_${PN}-module-config-perl-v += "${PN}-module-warnings" -RDEPENDS_${PN}-module-constant += "${PN}-module-carp" -RDEPENDS_${PN}-module-constant += "${PN}-module-strict" -RDEPENDS_${PN}-module-constant += "${PN}-module-warnings-register" -RDEPENDS_${PN}-module-corelist += "${PN}-module-list-util" -RDEPENDS_${PN}-module-corelist += "${PN}-module-strict" -RDEPENDS_${PN}-module-corelist += "${PN}-module-vars" -RDEPENDS_${PN}-module-corelist += "${PN}-module-version" -RDEPENDS_${PN}-module-corelist += "${PN}-module-warnings" -RDEPENDS_${PN}-module-cpan += "${PN}-module-b" -RDEPENDS_${PN}-module-cpan += "${PN}-module-carp" -RDEPENDS_${PN}-module-cpan += "${PN}-module-config" -RDEPENDS_${PN}-module-cpan += "${PN}-module-cwd" -RDEPENDS_${PN}-module-cpan += "${PN}-module-data-dumper" -RDEPENDS_${PN}-module-cpan += "${PN}-module-dirhandle" -RDEPENDS_${PN}-module-cpan += "${PN}-module-exporter" -RDEPENDS_${PN}-module-cpan += "${PN}-module-extutils-makemaker" -RDEPENDS_${PN}-module-cpan += "${PN}-module-extutils-manifest" -RDEPENDS_${PN}-module-cpan += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-cpan += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-cpan += "${PN}-module-file-copy" -RDEPENDS_${PN}-module-cpan += "${PN}-module-file-find" -RDEPENDS_${PN}-module-cpan += "${PN}-module-filehandle" -RDEPENDS_${PN}-module-cpan += "${PN}-module-file-path" -RDEPENDS_${PN}-module-cpan += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-cpan += "${PN}-module-lib" -RDEPENDS_${PN}-module-cpan += "${PN}-module-net-ping" -RDEPENDS_${PN}-module-cpan += "${PN}-module-overload" -RDEPENDS_${PN}-module-cpan += "${PN}-module-posix" -RDEPENDS_${PN}-module-cpan += "${PN}-module-safe" -RDEPENDS_${PN}-module-cpan += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-cpan += "${PN}-module-strict" -RDEPENDS_${PN}-module-cpan += "${PN}-module-sys-hostname" -RDEPENDS_${PN}-module-cpan += "${PN}-module-term-readline" -RDEPENDS_${PN}-module-cpan += "${PN}-module-text-parsewords" -RDEPENDS_${PN}-module-cpan += "${PN}-module-text-wrap" -RDEPENDS_${PN}-module-cpan += "${PN}-module-time-local" -RDEPENDS_${PN}-module-cpan += "${PN}-module-vars" -RDEPENDS_${PN}-module-cpan += "${PN}-module-warnings" -RDEPENDS_${PN}-module-cwd += "${PN}-module-dynaloader" -RDEPENDS_${PN}-module-cwd += "${PN}-module-exporter" -RDEPENDS_${PN}-module-cwd += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-cwd += "${PN}-module-strict" -RDEPENDS_${PN}-module-cwd += "${PN}-module-vars" -RDEPENDS_${PN}-module-cwd += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-data-dumper += "${PN}-module-b-deparse" -RDEPENDS_${PN}-module-data-dumper += "${PN}-module-carp" -RDEPENDS_${PN}-module-data-dumper += "${PN}-module-config" -RDEPENDS_${PN}-module-data-dumper += "${PN}-module-constant" -RDEPENDS_${PN}-module-data-dumper += "${PN}-module-exporter" -RDEPENDS_${PN}-module-data-dumper += "${PN}-module-overload" -RDEPENDS_${PN}-module-data-dumper += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-data-dumper += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-dbm-filter-compress += "${PN}-module-carp" -RDEPENDS_${PN}-module-dbm-filter-compress += "${PN}-module-strict" -RDEPENDS_${PN}-module-dbm-filter-compress += "${PN}-module-warnings" -RDEPENDS_${PN}-module-dbm-filter-encode += "${PN}-module-carp" -RDEPENDS_${PN}-module-dbm-filter-encode += "${PN}-module-strict" -RDEPENDS_${PN}-module-dbm-filter-encode += "${PN}-module-warnings" -RDEPENDS_${PN}-module-dbm-filter-int32 += "${PN}-module-strict" -RDEPENDS_${PN}-module-dbm-filter-int32 += "${PN}-module-warnings" -RDEPENDS_${PN}-module-dbm-filter-null += "${PN}-module-strict" -RDEPENDS_${PN}-module-dbm-filter-null += "${PN}-module-warnings" -RDEPENDS_${PN}-module-dbm-filter += "${PN}-module-carp" -RDEPENDS_${PN}-module-dbm-filter += "${PN}-module-strict" -RDEPENDS_${PN}-module-dbm-filter += "${PN}-module-warnings" -RDEPENDS_${PN}-module-dbm-filter-utf8 += "${PN}-module-carp" -RDEPENDS_${PN}-module-dbm-filter-utf8 += "${PN}-module-strict" -RDEPENDS_${PN}-module-dbm-filter-utf8 += "${PN}-module-warnings" -RDEPENDS_${PN}-module-db += "${PN}-module-strict" -RDEPENDS_${PN}-module-deprecate += "${PN}-module-carp" -RDEPENDS_${PN}-module-deprecate += "${PN}-module-strict" -RDEPENDS_${PN}-module-deprecate += "${PN}-module-warnings" -RDEPENDS_${PN}-module-devel-peek += "${PN}-module-exporter" -RDEPENDS_${PN}-module-devel-peek += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-devel-ppport += "${PN}-module-file-find" -RDEPENDS_${PN}-module-devel-ppport += "${PN}-module-getopt-long" -RDEPENDS_${PN}-module-devel-ppport += "${PN}-module-strict" -RDEPENDS_${PN}-module-devel-ppport += "${PN}-module-vars" -RDEPENDS_${PN}-module-devel-selfstubber += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-devel-selfstubber += "${PN}-module-selfloader" -RDEPENDS_${PN}-module-diagnostics += "${PN}-module-carp" -RDEPENDS_${PN}-module-diagnostics += "${PN}-module-config" -RDEPENDS_${PN}-module-diagnostics += "${PN}-module-getopt-std" -RDEPENDS_${PN}-module-diagnostics += "${PN}-module-strict" -RDEPENDS_${PN}-module-diagnostics += "${PN}-module-text-tabs" -RDEPENDS_${PN}-module-digest-base += "${PN}-module-carp" -RDEPENDS_${PN}-module-digest-base += "${PN}-module-mime-base64" -RDEPENDS_${PN}-module-digest-base += "${PN}-module-strict" -RDEPENDS_${PN}-module-digest-base += "${PN}-module-vars" -RDEPENDS_${PN}-module-digest-file += "${PN}-module-carp" -RDEPENDS_${PN}-module-digest-file += "${PN}-module-digest" -RDEPENDS_${PN}-module-digest-file += "${PN}-module-exporter" -RDEPENDS_${PN}-module-digest-file += "${PN}-module-strict" -RDEPENDS_${PN}-module-digest-file += "${PN}-module-vars" -RDEPENDS_${PN}-module-digest-md5 += "${PN}-module-digest-base" -RDEPENDS_${PN}-module-digest-md5 += "${PN}-module-exporter" -RDEPENDS_${PN}-module-digest-md5 += "${PN}-module-strict" -RDEPENDS_${PN}-module-digest-md5 += "${PN}-module-vars" -RDEPENDS_${PN}-module-digest-md5 += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-digest += "${PN}-module-strict" -RDEPENDS_${PN}-module-digest += "${PN}-module-vars" -RDEPENDS_${PN}-module-digest-sha += "${PN}-module-carp" -RDEPENDS_${PN}-module-digest-sha += "${PN}-module-digest-base" -RDEPENDS_${PN}-module-digest-sha += "${PN}-module-dynaloader" -RDEPENDS_${PN}-module-digest-sha += "${PN}-module-exporter" -RDEPENDS_${PN}-module-digest-sha += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-digest-sha += "${PN}-module-integer" -RDEPENDS_${PN}-module-digest-sha += "${PN}-module-strict" -RDEPENDS_${PN}-module-digest-sha += "${PN}-module-vars" -RDEPENDS_${PN}-module-digest-sha += "${PN}-module-warnings" -RDEPENDS_${PN}-module-dirhandle += "${PN}-module-carp" -RDEPENDS_${PN}-module-dirhandle += "${PN}-module-symbol" -RDEPENDS_${PN}-module-dynaloader += "${PN}-module-carp" -RDEPENDS_${PN}-module-dynaloader += "${PN}-module-config" -RDEPENDS_${PN}-module-encode-alias += "${PN}-module-constant" -RDEPENDS_${PN}-module-encode-alias += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-alias += "${PN}-module-exporter" -RDEPENDS_${PN}-module-encode-alias += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-alias += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-byte += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-byte += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-byte += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-byte += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-encode-cjkconstants += "${PN}-module-carp" -RDEPENDS_${PN}-module-encode-cjkconstants += "${PN}-module-exporter" -RDEPENDS_${PN}-module-encode-cjkconstants += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-cjkconstants += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-cn-hz += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-cn-hz += "${PN}-module-parent" -RDEPENDS_${PN}-module-encode-cn-hz += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-cn-hz += "${PN}-module-utf8" -RDEPENDS_${PN}-module-encode-cn-hz += "${PN}-module-vars" -RDEPENDS_${PN}-module-encode-cn-hz += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-cn += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-cn += "${PN}-module-encode-cn-hz" -RDEPENDS_${PN}-module-encode-cn += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-cn += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-cn += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-encode-config += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-config += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-ebcdic += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-ebcdic += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-ebcdic += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-ebcdic += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-encode-encoder += "${PN}-module-carp" -RDEPENDS_${PN}-module-encode-encoder += "${PN}-module-constant" -RDEPENDS_${PN}-module-encode-encoder += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-encoder += "${PN}-module-exporter" -RDEPENDS_${PN}-module-encode-encoder += "${PN}-module-overload" -RDEPENDS_${PN}-module-encode-encoder += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-encoder += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-encoding += "${PN}-module-carp" -RDEPENDS_${PN}-module-encode-encoding += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-encoding += "${PN}-module-encode-mime-name" -RDEPENDS_${PN}-module-encode-encoding += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-encoding += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-gsm0338 += "${PN}-module-carp" -RDEPENDS_${PN}-module-encode-gsm0338 += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-gsm0338 += "${PN}-module-parent" -RDEPENDS_${PN}-module-encode-gsm0338 += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-gsm0338 += "${PN}-module-utf8" -RDEPENDS_${PN}-module-encode-gsm0338 += "${PN}-module-vars" -RDEPENDS_${PN}-module-encode-gsm0338 += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-guess += "${PN}-module-bytes" -RDEPENDS_${PN}-module-encode-guess += "${PN}-module-carp" -RDEPENDS_${PN}-module-encode-guess += "${PN}-module-constant" -RDEPENDS_${PN}-module-encode-guess += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-guess += "${PN}-module-encode-unicode" -RDEPENDS_${PN}-module-encode-guess += "${PN}-module-parent" -RDEPENDS_${PN}-module-encode-guess += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-guess += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-jp-h2z += "${PN}-module-encode-cjkconstants" -RDEPENDS_${PN}-module-encode-jp-h2z += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-jp-h2z += "${PN}-module-vars" -RDEPENDS_${PN}-module-encode-jp-h2z += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-jp-jis7 += "${PN}-module-bytes" -RDEPENDS_${PN}-module-encode-jp-jis7 += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-jp-jis7 += "${PN}-module-encode-cjkconstants" -RDEPENDS_${PN}-module-encode-jp-jis7 += "${PN}-module-encode-jp-h2z" -RDEPENDS_${PN}-module-encode-jp-jis7 += "${PN}-module-parent" -RDEPENDS_${PN}-module-encode-jp-jis7 += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-jp-jis7 += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-jp += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-jp += "${PN}-module-encode-jp-jis7" -RDEPENDS_${PN}-module-encode-jp += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-jp += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-jp += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-encode-kr-2022-kr += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-kr-2022-kr += "${PN}-module-encode-cjkconstants" -RDEPENDS_${PN}-module-encode-kr-2022-kr += "${PN}-module-parent" -RDEPENDS_${PN}-module-encode-kr-2022-kr += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-kr-2022-kr += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-kr += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-kr += "${PN}-module-encode-kr-2022-kr" -RDEPENDS_${PN}-module-encode-kr += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-kr += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-kr += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-encode-mime-header-iso-2022-jp += "${PN}-module-constant" -RDEPENDS_${PN}-module-encode-mime-header-iso-2022-jp += "${PN}-module-encode-cjkconstants" -RDEPENDS_${PN}-module-encode-mime-header-iso-2022-jp += "${PN}-module-parent" -RDEPENDS_${PN}-module-encode-mime-header-iso-2022-jp += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-mime-header-iso-2022-jp += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-bytes" -RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-carp" -RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-constant" -RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-mime-base64" -RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-parent" -RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-utf8" -RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-mime-name += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-mime-name += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode += "${PN}-module-bytes" -RDEPENDS_${PN}-module-encode += "${PN}-module-carp" -RDEPENDS_${PN}-module-encode += "${PN}-module-constant" -RDEPENDS_${PN}-module-encode += "${PN}-module-encode-alias" -RDEPENDS_${PN}-module-encode += "${PN}-module-encode-config" -RDEPENDS_${PN}-module-encode += "${PN}-module-encode-configlocal-pm" -RDEPENDS_${PN}-module-encode += "${PN}-module-encode-encoding" -RDEPENDS_${PN}-module-encode += "${PN}-module-exporter" -RDEPENDS_${PN}-module-encode += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-encode-symbol += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-symbol += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-symbol += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-symbol += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-encode-tw += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-tw += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-tw += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-tw += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-encode-unicode += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-unicode += "${PN}-module-parent" -RDEPENDS_${PN}-module-encode-unicode += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-unicode += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encode-unicode += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-encode-unicode-utf7 += "${PN}-module-encode" -RDEPENDS_${PN}-module-encode-unicode-utf7 += "${PN}-module-mime-base64" -RDEPENDS_${PN}-module-encode-unicode-utf7 += "${PN}-module-parent" -RDEPENDS_${PN}-module-encode-unicode-utf7 += "${PN}-module-re" -RDEPENDS_${PN}-module-encode-unicode-utf7 += "${PN}-module-strict" -RDEPENDS_${PN}-module-encode-unicode-utf7 += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encoding += "${PN}-module-carp" -RDEPENDS_${PN}-module-encoding += "${PN}-module-config" -RDEPENDS_${PN}-module-encoding += "${PN}-module-constant" -RDEPENDS_${PN}-module-encoding += "${PN}-module-encode" -RDEPENDS_${PN}-module-encoding += "${PN}-module-filter-util-call" -RDEPENDS_${PN}-module-encoding += "${PN}-module-i18n-langinfo" -RDEPENDS_${PN}-module-encoding += "${PN}-module-strict" -RDEPENDS_${PN}-module-encoding += "${PN}-module-utf8" -RDEPENDS_${PN}-module-encoding += "${PN}-module-warnings" -RDEPENDS_${PN}-module-encoding-warnings += "${PN}-module-carp" -RDEPENDS_${PN}-module-encoding-warnings += "${PN}-module-strict" -RDEPENDS_${PN}-module-english += "${PN}-module-carp " -RDEPENDS_${PN}-module-english += "${PN}-module-exporter" -RDEPENDS_${PN}-module-env += "${PN}-module-config" -RDEPENDS_${PN}-module-env += "${PN}-module-tie-array" -RDEPENDS_${PN}-module-errno += "${PN}-module-carp" -RDEPENDS_${PN}-module-errno += "${PN}-module-exporter" -RDEPENDS_${PN}-module-errno += "${PN}-module-strict" -RDEPENDS_${PN}-module-experimental += "${PN}-module-carp" -RDEPENDS_${PN}-module-experimental += "${PN}-module-feature" -RDEPENDS_${PN}-module-experimental += "${PN}-module-strict" -RDEPENDS_${PN}-module-experimental += "${PN}-module-version" -RDEPENDS_${PN}-module-experimental += "${PN}-module-warnings" -RDEPENDS_${PN}-module-exporter-heavy += "${PN}-module-carp" -RDEPENDS_${PN}-module-exporter-heavy += "${PN}-module-exporter" -RDEPENDS_${PN}-module-exporter-heavy += "${PN}-module-strict" -RDEPENDS_${PN}-module-exporter += "${PN}-module-exporter-heavy" -RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-config" -RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-cwd" -RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-dynaloader" -RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-extutils-mksymlists" -RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-file-temp" -RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-ipc-cmd" -RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-text-parsewords" -RDEPENDS_${PN}-module-extutils-cbuilder += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-extutils-cbuilder += "${PN}-module-file-path" -RDEPENDS_${PN}-module-extutils-cbuilder += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-cbuilder += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-aix += "${PN}-module-extutils-cbuilder-platform-unix" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-aix += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-aix += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-aix += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-android += "${PN}-module-config" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-android += "${PN}-module-extutils-cbuilder-platform-unix" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-android += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-android += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-android += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-cygwin += "${PN}-module-extutils-cbuilder-platform-unix" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-cygwin += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-cygwin += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-cygwin += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-darwin += "${PN}-module-extutils-cbuilder-platform-unix" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-darwin += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-darwin += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-dec-osf += "${PN}-module-extutils-cbuilder-platform-unix" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-dec-osf += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-dec-osf += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-dec-osf += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-os2 += "${PN}-module-extutils-cbuilder-platform-unix" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-os2 += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-os2 += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-unix += "${PN}-module-extutils-cbuilder-base" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-unix += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-unix += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-vms += "${PN}-module-config" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-vms += "${PN}-module-extutils-cbuilder-base" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-vms += "${PN}-module-file-spec-functions" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-vms += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-vms += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows += "${PN}-module-extutils-cbuilder-base" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows += "${PN}-module-io-file" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows += "${PN}-module-warnings" -RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-exporter" -RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-extutils-command" -RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-extutils-install" -RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-getopt-long" -RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-test-harness" -RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-warnings" -RDEPENDS_${PN}-module-extutils-command += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-command += "${PN}-module-exporter" -RDEPENDS_${PN}-module-extutils-command += "${PN}-module-file-copy" -RDEPENDS_${PN}-module-extutils-command += "${PN}-module-file-find" -RDEPENDS_${PN}-module-extutils-command += "${PN}-module-file-path" -RDEPENDS_${PN}-module-extutils-command += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-command += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-command += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-constant-base += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-constant-base += "${PN}-module-constant" -RDEPENDS_${PN}-module-extutils-constant-base += "${PN}-module-extutils-constant-utils" -RDEPENDS_${PN}-module-extutils-constant-base += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-constant-base += "${PN}-module-text-wrap" -RDEPENDS_${PN}-module-extutils-constant-base += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-exporter" -RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-extutils-constant-proxysubs" -RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-extutils-constant-utils" -RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-extutils-constant-xs" -RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-filehandle" -RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-constant-proxysubs += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-constant-proxysubs += "${PN}-module-extutils-constant-utils" -RDEPENDS_${PN}-module-extutils-constant-proxysubs += "${PN}-module-extutils-constant-xs" -RDEPENDS_${PN}-module-extutils-constant-proxysubs += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-constant-proxysubs += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-constant-utils += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-constant-utils += "${PN}-module-constant" -RDEPENDS_${PN}-module-extutils-constant-utils += "${PN}-module-posix" -RDEPENDS_${PN}-module-extutils-constant-utils += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-constant-utils += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-constant-xs += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-constant-xs += "${PN}-module-data-dumper" -RDEPENDS_${PN}-module-extutils-constant-xs += "${PN}-module-extutils-constant" -RDEPENDS_${PN}-module-extutils-constant-xs += "${PN}-module-extutils-constant-base" -RDEPENDS_${PN}-module-extutils-constant-xs += "${PN}-module-extutils-constant-utils" -RDEPENDS_${PN}-module-extutils-constant-xs += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-constant-xs += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-config" -RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-exporter" -RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-extutils-liblist" -RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-extutils-makemaker" -RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-getopt-std" -RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-config" -RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-data-dumper" -RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-extutils-makemaker" -RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-extutils-packlist" -RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-file-find" -RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-install += "${PN}-module-autosplit" -RDEPENDS_${PN}-module-extutils-install += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-install += "${PN}-module-config" -RDEPENDS_${PN}-module-extutils-install += "${PN}-module-cwd" -RDEPENDS_${PN}-module-extutils-install += "${PN}-module-exporter" -RDEPENDS_${PN}-module-extutils-install += "${PN}-module-extutils-packlist" -RDEPENDS_${PN}-module-extutils-install += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-extutils-install += "${PN}-module-file-compare" -RDEPENDS_${PN}-module-extutils-install += "${PN}-module-file-copy" -RDEPENDS_${PN}-module-extutils-install += "${PN}-module-file-find" -RDEPENDS_${PN}-module-extutils-install += "${PN}-module-file-path" -RDEPENDS_${PN}-module-extutils-install += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-install += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-install += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-liblist-kid += "${PN}-module-cwd" -RDEPENDS_${PN}-module-extutils-liblist-kid += "${PN}-module-extutils-makemaker-config" -RDEPENDS_${PN}-module-extutils-liblist-kid += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-extutils-liblist-kid += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-liblist-kid += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-liblist-kid += "${PN}-module-text-parsewords" -RDEPENDS_${PN}-module-extutils-liblist-kid += "${PN}-module-warnings" -RDEPENDS_${PN}-module-extutils-liblist += "${PN}-module-extutils-liblist-kid" -RDEPENDS_${PN}-module-extutils-liblist += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-liblist += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-makemaker-config += "${PN}-module-config" -RDEPENDS_${PN}-module-extutils-makemaker-config += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-makemaker-locale += "${PN}-module-base" -RDEPENDS_${PN}-module-extutils-makemaker-locale += "${PN}-module-encode" -RDEPENDS_${PN}-module-extutils-makemaker-locale += "${PN}-module-encode-alias" -RDEPENDS_${PN}-module-extutils-makemaker-locale += "${PN}-module-i18n-langinfo" -RDEPENDS_${PN}-module-extutils-makemaker-locale += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-b" -RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-cwd" -RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-exporter" -RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-extutils-makemaker-config" -RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-extutils-makemaker-version" -RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-extutils-manifest" -RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-extutils-mm" -RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-extutils-my" -RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-file-path" -RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-version" -RDEPENDS_${PN}-module-extutils-makemaker-version += "${PN}-module-extutils-makemaker-version-regex" -RDEPENDS_${PN}-module-extutils-makemaker-version += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-makemaker-version += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-makemaker-version-regex += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-makemaker-version-regex += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-makemaker-version-vpp += "${PN}-module-b" -RDEPENDS_${PN}-module-extutils-makemaker-version-vpp += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-makemaker-version-vpp += "${PN}-module-config" -RDEPENDS_${PN}-module-extutils-makemaker-version-vpp += "${PN}-module-constant" -RDEPENDS_${PN}-module-extutils-makemaker-version-vpp += "${PN}-module-extutils-makemaker-version-regex" -RDEPENDS_${PN}-module-extutils-makemaker-version-vpp += "${PN}-module-locale" -RDEPENDS_${PN}-module-extutils-makemaker-version-vpp += "${PN}-module-overload" -RDEPENDS_${PN}-module-extutils-makemaker-version-vpp += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-makemaker-version-vpp += "${PN}-module-universal" -RDEPENDS_${PN}-module-extutils-makemaker-version-vpp += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-config" -RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-exporter" -RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-file-copy" -RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-file-find" -RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-file-path" -RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-warnings" -RDEPENDS_${PN}-module-extutils-miniperl += "${PN}-module-exporter" -RDEPENDS_${PN}-module-extutils-miniperl += "${PN}-module-extutils-embed" -RDEPENDS_${PN}-module-extutils-miniperl += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-miniperl += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-mkbootstrap += "${PN}-module-config" -RDEPENDS_${PN}-module-extutils-mkbootstrap += "${PN}-module-dynaloader" -RDEPENDS_${PN}-module-extutils-mkbootstrap += "${PN}-module-exporter" -RDEPENDS_${PN}-module-extutils-mkbootstrap += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mksymlists += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-mksymlists += "${PN}-module-config" -RDEPENDS_${PN}-module-extutils-mksymlists += "${PN}-module-exporter" -RDEPENDS_${PN}-module-extutils-mksymlists += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-aix += "${PN}-module-extutils-makemaker" -RDEPENDS_${PN}-module-extutils-mm-aix += "${PN}-module-extutils-mm-unix" -RDEPENDS_${PN}-module-extutils-mm-aix += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-autosplit" -RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-cpan" -RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-data-dumper" -RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-extutils-makemaker" -RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-extutils-makemaker-config" -RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-file-find" -RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-version" -RDEPENDS_${PN}-module-extutils-mm-beos += "${PN}-module-extutils-makemaker-config" -RDEPENDS_${PN}-module-extutils-mm-beos += "${PN}-module-extutils-mm-any" -RDEPENDS_${PN}-module-extutils-mm-beos += "${PN}-module-extutils-mm-unix" -RDEPENDS_${PN}-module-extutils-mm-beos += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-mm-beos += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-cygwin += "${PN}-module-extutils-makemaker-config" -RDEPENDS_${PN}-module-extutils-mm-cygwin += "${PN}-module-extutils-mm-unix" -RDEPENDS_${PN}-module-extutils-mm-cygwin += "${PN}-module-extutils-mm-win32" -RDEPENDS_${PN}-module-extutils-mm-cygwin += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-mm-cygwin += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-darwin += "${PN}-module-extutils-mm-unix" -RDEPENDS_${PN}-module-extutils-mm-darwin += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-dos += "${PN}-module-extutils-mm-any" -RDEPENDS_${PN}-module-extutils-mm-dos += "${PN}-module-extutils-mm-unix" -RDEPENDS_${PN}-module-extutils-mm-dos += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-macos += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-nw5 += "${PN}-module-extutils-makemaker" -RDEPENDS_${PN}-module-extutils-mm-nw5 += "${PN}-module-extutils-makemaker-config" -RDEPENDS_${PN}-module-extutils-mm-nw5 += "${PN}-module-extutils-mm-win32" -RDEPENDS_${PN}-module-extutils-mm-nw5 += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-extutils-mm-nw5 += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-os2 += "${PN}-module-extutils-makemaker" -RDEPENDS_${PN}-module-extutils-mm-os2 += "${PN}-module-extutils-mm-any" -RDEPENDS_${PN}-module-extutils-mm-os2 += "${PN}-module-extutils-mm-unix" -RDEPENDS_${PN}-module-extutils-mm-os2 += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-mm-os2 += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm += "${PN}-module-extutils-liblist" -RDEPENDS_${PN}-module-extutils-mm += "${PN}-module-extutils-makemaker" -RDEPENDS_${PN}-module-extutils-mm += "${PN}-module-extutils-makemaker-config" -RDEPENDS_${PN}-module-extutils-mm += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-qnx += "${PN}-module-extutils-mm-unix" -RDEPENDS_${PN}-module-extutils-mm-qnx += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-cwd" -RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-dirhandle" -RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-encode" -RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-extutils-liblist" -RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-extutils-makemaker" -RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-extutils-makemaker-config" -RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-extutils-mm-any" -RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-file-find" -RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-version" -RDEPENDS_${PN}-module-extutils-mm-uwin += "${PN}-module-extutils-mm-unix" -RDEPENDS_${PN}-module-extutils-mm-uwin += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-exporter" -RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-extutils-liblist-kid" -RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-extutils-makemaker" -RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-extutils-makemaker-config" -RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-extutils-mm-any" -RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-extutils-mm-unix" -RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-file-find" -RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-vos += "${PN}-module-extutils-mm-unix" -RDEPENDS_${PN}-module-extutils-mm-vos += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-win32 += "${PN}-module-extutils-makemaker" -RDEPENDS_${PN}-module-extutils-mm-win32 += "${PN}-module-extutils-makemaker-config" -RDEPENDS_${PN}-module-extutils-mm-win32 += "${PN}-module-extutils-mm-any" -RDEPENDS_${PN}-module-extutils-mm-win32 += "${PN}-module-extutils-mm-unix" -RDEPENDS_${PN}-module-extutils-mm-win32 += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-extutils-mm-win32 += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-mm-win32 += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-mm-win95 += "${PN}-module-extutils-makemaker-config" -RDEPENDS_${PN}-module-extutils-mm-win95 += "${PN}-module-extutils-mm-win32" -RDEPENDS_${PN}-module-extutils-mm-win95 += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-my += "${PN}-module-extutils-mm" -RDEPENDS_${PN}-module-extutils-my += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-packlist += "${PN}-module-carp" -RDEPENDS_${PN}-module-extutils-packlist += "${PN}-module-config" -RDEPENDS_${PN}-module-extutils-packlist += "${PN}-module-cwd" -RDEPENDS_${PN}-module-extutils-packlist += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-packlist += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-packlist += "${PN}-module-vars" -RDEPENDS_${PN}-module-extutils-parsexs-constants += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-parsexs-constants += "${PN}-module-symbol" -RDEPENDS_${PN}-module-extutils-parsexs-constants += "${PN}-module-warnings" -RDEPENDS_${PN}-module-extutils-parsexs-countlines += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-parsexs-eval += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-parsexs-eval += "${PN}-module-warnings" -RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-config" -RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-cwd" -RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-exporter" -RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-extutils-parsexs-constants" -RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-extutils-parsexs-countlines" -RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-extutils-parsexs-eval" -RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-extutils-parsexs-utilities" -RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-re" -RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-symbol" -RDEPENDS_${PN}-module-extutils-parsexs-utilities += "${PN}-module-exporter" -RDEPENDS_${PN}-module-extutils-parsexs-utilities += "${PN}-module-extutils-parsexs-constants" -RDEPENDS_${PN}-module-extutils-parsexs-utilities += "${PN}-module-extutils-typemaps" -RDEPENDS_${PN}-module-extutils-parsexs-utilities += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-parsexs-utilities += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-parsexs-utilities += "${PN}-module-warnings" -RDEPENDS_${PN}-module-extutils-testlib += "${PN}-module-cwd" -RDEPENDS_${PN}-module-extutils-testlib += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-extutils-testlib += "${PN}-module-lib" -RDEPENDS_${PN}-module-extutils-testlib += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-testlib += "${PN}-module-warnings" -RDEPENDS_${PN}-module-extutils-typemaps-cmd += "${PN}-module-exporter" -RDEPENDS_${PN}-module-extutils-typemaps-cmd += "${PN}-module-extutils-typemaps" -RDEPENDS_${PN}-module-extutils-typemaps-cmd += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-typemaps-cmd += "${PN}-module-warnings" -RDEPENDS_${PN}-module-extutils-typemaps-inputmap += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-typemaps-inputmap += "${PN}-module-warnings" -RDEPENDS_${PN}-module-extutils-typemaps-outputmap += "${PN}-module-re" -RDEPENDS_${PN}-module-extutils-typemaps-outputmap += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-typemaps-outputmap += "${PN}-module-warnings" -RDEPENDS_${PN}-module-extutils-typemaps += "${PN}-module-extutils-parsexs" -RDEPENDS_${PN}-module-extutils-typemaps += "${PN}-module-extutils-parsexs-constants" -RDEPENDS_${PN}-module-extutils-typemaps += "${PN}-module-extutils-typemaps-inputmap" -RDEPENDS_${PN}-module-extutils-typemaps += "${PN}-module-extutils-typemaps-outputmap" -RDEPENDS_${PN}-module-extutils-typemaps += "${PN}-module-extutils-typemaps-type" -RDEPENDS_${PN}-module-extutils-typemaps += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-typemaps += "${PN}-module-warnings" -RDEPENDS_${PN}-module-extutils-typemaps-type += "${PN}-module-extutils-typemaps" -RDEPENDS_${PN}-module-extutils-typemaps-type += "${PN}-module-strict" -RDEPENDS_${PN}-module-extutils-typemaps-type += "${PN}-module-warnings" -RDEPENDS_${PN}-module-fatal += "${PN}-module-autodie-exception-system" -RDEPENDS_${PN}-module-fatal += "${PN}-module-autodie-hints" -RDEPENDS_${PN}-module-fatal += "${PN}-module-autodie-scopeutil" -RDEPENDS_${PN}-module-fatal += "${PN}-module-carp" -RDEPENDS_${PN}-module-fatal += "${PN}-module-config" -RDEPENDS_${PN}-module-fatal += "${PN}-module-constant" -RDEPENDS_${PN}-module-fatal += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-fatal += "${PN}-module-posix" -RDEPENDS_${PN}-module-fatal += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-fatal += "${PN}-module-strict" -RDEPENDS_${PN}-module-fatal += "${PN}-module-tie-refhash" -RDEPENDS_${PN}-module-fatal += "${PN}-module-warnings" -RDEPENDS_${PN}-module-fcntl += "${PN}-module-exporter" -RDEPENDS_${PN}-module-fcntl += "${PN}-module-strict" -RDEPENDS_${PN}-module-fcntl += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-feature += "${PN}-module-carp" -RDEPENDS_${PN}-module-fields += "${PN}-module-base" -RDEPENDS_${PN}-module-fields += "${PN}-module-carp" -RDEPENDS_${PN}-module-fields += "${PN}-module-hash-util" -RDEPENDS_${PN}-module-fields += "${PN}-module-strict" -RDEPENDS_${PN}-module-fields += "${PN}-module-vars" -RDEPENDS_${PN}-module-file-basename += "${PN}-module-carp" -RDEPENDS_${PN}-module-file-basename += "${PN}-module-exporter" -RDEPENDS_${PN}-module-file-basename += "${PN}-module-re" -RDEPENDS_${PN}-module-file-basename += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-basename += "${PN}-module-warnings" -RDEPENDS_${PN}-module-filecache += "${PN}-module-carp" -RDEPENDS_${PN}-module-filecache += "${PN}-module-parent" -RDEPENDS_${PN}-module-filecache += "${PN}-module-strict" -RDEPENDS_${PN}-module-filecache += "${PN}-module-vars" -RDEPENDS_${PN}-module-file-compare += "${PN}-module-carp" -RDEPENDS_${PN}-module-file-compare += "${PN}-module-exporter" -RDEPENDS_${PN}-module-file-compare += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-compare += "${PN}-module-warnings" -RDEPENDS_${PN}-module-file-copy += "${PN}-module-carp" -RDEPENDS_${PN}-module-file-copy += "${PN}-module-config" -RDEPENDS_${PN}-module-file-copy += "${PN}-module-exporter" -RDEPENDS_${PN}-module-file-copy += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-file-copy += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-file-copy += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-copy += "${PN}-module-warnings" -RDEPENDS_${PN}-module-file-dosglob += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-dosglob += "${PN}-module-text-parsewords" -RDEPENDS_${PN}-module-file-dosglob += "${PN}-module-warnings" -RDEPENDS_${PN}-module-file-dosglob += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-carp" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-constant" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-cwd" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-file-copy" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-filehandle" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-file-path" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-file-spec-unix" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-file-temp" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-ipc-cmd" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-locale-maketext-simple" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-load" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-params-check" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-fetch += "${PN}-module-vars" -RDEPENDS_${PN}-module-file-find += "${PN}-module-config" -RDEPENDS_${PN}-module-file-find += "${PN}-module-cwd" -RDEPENDS_${PN}-module-file-find += "${PN}-module-exporter" -RDEPENDS_${PN}-module-file-find += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-file-find += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-file-find += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-find += "${PN}-module-warnings" -RDEPENDS_${PN}-module-file-find += "${PN}-module-warnings-register" -RDEPENDS_${PN}-module-file-globmapper += "${PN}-module-carp" -RDEPENDS_${PN}-module-file-globmapper += "${PN}-module-file-glob" -RDEPENDS_${PN}-module-file-globmapper += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-globmapper += "${PN}-module-warnings" -RDEPENDS_${PN}-module-file-glob += "${PN}-module-exporter" -RDEPENDS_${PN}-module-file-glob += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-glob += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-filehandle += "${PN}-module-exporter" -RDEPENDS_${PN}-module-filehandle += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-filehandle += "${PN}-module-io-file" -RDEPENDS_${PN}-module-filehandle += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-path += "${PN}-module-carp" -RDEPENDS_${PN}-module-file-path += "${PN}-module-cwd" -RDEPENDS_${PN}-module-file-path += "${PN}-module-exporter" -RDEPENDS_${PN}-module-file-path += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-file-path += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-file-path += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-path += "${PN}-module-vars" -RDEPENDS_${PN}-module-file-spec-cygwin += "${PN}-module-file-spec-unix" -RDEPENDS_${PN}-module-file-spec-cygwin += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-spec-cygwin += "${PN}-module-vars" -RDEPENDS_${PN}-module-file-spec-epoc += "${PN}-module-file-spec-unix" -RDEPENDS_${PN}-module-file-spec-epoc += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-spec-epoc += "${PN}-module-vars" -RDEPENDS_${PN}-module-file-spec-functions += "${PN}-module-exporter" -RDEPENDS_${PN}-module-file-spec-functions += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-file-spec-functions += "${PN}-module-file-spec-unix" -RDEPENDS_${PN}-module-file-spec-functions += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-spec-functions += "${PN}-module-vars" -RDEPENDS_${PN}-module-file-spec-mac += "${PN}-module-file-spec-unix" -RDEPENDS_${PN}-module-file-spec-mac += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-spec-mac += "${PN}-module-vars" -RDEPENDS_${PN}-module-file-spec-os2 += "${PN}-module-file-spec-unix" -RDEPENDS_${PN}-module-file-spec-os2 += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-spec-os2 += "${PN}-module-vars" -RDEPENDS_${PN}-module-file-spec += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-spec += "${PN}-module-vars" -RDEPENDS_${PN}-module-file-spec-unix += "${PN}-module-constant" -RDEPENDS_${PN}-module-file-spec-unix += "${PN}-module-cwd" -RDEPENDS_${PN}-module-file-spec-unix += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-file-spec-unix += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-spec-unix += "${PN}-module-vars" -RDEPENDS_${PN}-module-file-spec-unix += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-file-spec-vms += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-file-spec-vms += "${PN}-module-file-spec-unix" -RDEPENDS_${PN}-module-file-spec-vms += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-spec-vms += "${PN}-module-vars" -RDEPENDS_${PN}-module-file-spec-win32 += "${PN}-module-cwd " -RDEPENDS_${PN}-module-file-spec-win32 += "${PN}-module-file-spec-unix" -RDEPENDS_${PN}-module-file-spec-win32 += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-spec-win32 += "${PN}-module-vars" -RDEPENDS_${PN}-module-file-stat += "${PN}-module-carp" -RDEPENDS_${PN}-module-file-stat += "${PN}-module-class-struct" -RDEPENDS_${PN}-module-file-stat += "${PN}-module-constant" -RDEPENDS_${PN}-module-file-stat += "${PN}-module-exporter" -RDEPENDS_${PN}-module-file-stat += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-file-stat += "${PN}-module-overload " -RDEPENDS_${PN}-module-file-stat += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-stat += "${PN}-module-symbol" -RDEPENDS_${PN}-module-file-stat += "${PN}-module-vars" -RDEPENDS_${PN}-module-file-stat += "${PN}-module-warnings" -RDEPENDS_${PN}-module-file-stat += "${PN}-module-warnings-register" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-carp" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-constant" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-cwd" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-errno" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-exporter" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-file-path" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-io-seekable" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-overload" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-parent" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-posix" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-strict" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-symbol" -RDEPENDS_${PN}-module-file-temp += "${PN}-module-vars" -RDEPENDS_${PN}-module-filter-simple += "${PN}-module-carp" -RDEPENDS_${PN}-module-filter-simple += "${PN}-module-filter-util-call" -RDEPENDS_${PN}-module-filter-simple += "${PN}-module-text-balanced" -RDEPENDS_${PN}-module-filter-simple += "${PN}-module-vars" -RDEPENDS_${PN}-module-filter-util-call += "${PN}-module-carp " -RDEPENDS_${PN}-module-filter-util-call += "${PN}-module-dynaloader" -RDEPENDS_${PN}-module-filter-util-call += "${PN}-module-exporter" -RDEPENDS_${PN}-module-filter-util-call += "${PN}-module-strict" -RDEPENDS_${PN}-module-filter-util-call += "${PN}-module-vars" -RDEPENDS_${PN}-module-filter-util-call += "${PN}-module-warnings" -RDEPENDS_${PN}-module-findbin += "${PN}-module-carp" -RDEPENDS_${PN}-module-findbin += "${PN}-module-cwd" -RDEPENDS_${PN}-module-findbin += "${PN}-module-exporter" -RDEPENDS_${PN}-module-findbin += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-findbin += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-getopt-long += "${PN}-module-constant" -RDEPENDS_${PN}-module-getopt-long += "${PN}-module-exporter" -RDEPENDS_${PN}-module-getopt-long += "${PN}-module-overload" -RDEPENDS_${PN}-module-getopt-long += "${PN}-module-pod-usage" -RDEPENDS_${PN}-module-getopt-long += "${PN}-module-strict" -RDEPENDS_${PN}-module-getopt-long += "${PN}-module-text-parsewords" -RDEPENDS_${PN}-module-getopt-long += "${PN}-module-vars" -RDEPENDS_${PN}-module-getopt-std += "${PN}-module-exporter" -RDEPENDS_${PN}-module-hash-util-fieldhash += "${PN}-module-exporter" -RDEPENDS_${PN}-module-hash-util-fieldhash += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-hash-util-fieldhash += "${PN}-module-strict" -RDEPENDS_${PN}-module-hash-util-fieldhash += "${PN}-module-warnings" -RDEPENDS_${PN}-module-hash-util-fieldhash += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-hash-util += "${PN}-module-carp" -RDEPENDS_${PN}-module-hash-util += "${PN}-module-exporter" -RDEPENDS_${PN}-module-hash-util += "${PN}-module-hash-util-fieldhash" -RDEPENDS_${PN}-module-hash-util += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-hash-util += "${PN}-module-strict" -RDEPENDS_${PN}-module-hash-util += "${PN}-module-warnings" -RDEPENDS_${PN}-module-hash-util += "${PN}-module-warnings-register" -RDEPENDS_${PN}-module-hash-util += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-i18n-collate += "${PN}-module-exporter" -RDEPENDS_${PN}-module-i18n-collate += "${PN}-module-overload" -RDEPENDS_${PN}-module-i18n-collate += "${PN}-module-posix" -RDEPENDS_${PN}-module-i18n-collate += "${PN}-module-strict" -RDEPENDS_${PN}-module-i18n-collate += "${PN}-module-warnings-register" -RDEPENDS_${PN}-module-i18n-langinfo += "${PN}-module-carp" -RDEPENDS_${PN}-module-i18n-langinfo += "${PN}-module-exporter" -RDEPENDS_${PN}-module-i18n-langinfo += "${PN}-module-strict" -RDEPENDS_${PN}-module-i18n-langinfo += "${PN}-module-warnings" -RDEPENDS_${PN}-module-i18n-langinfo += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-i18n-langtags-detect += "${PN}-module-i18n-langtags" -RDEPENDS_${PN}-module-i18n-langtags-detect += "${PN}-module-strict" -RDEPENDS_${PN}-module-i18n-langtags-detect += "${PN}-module-vars" -RDEPENDS_${PN}-module-i18n-langtags-list += "${PN}-module-strict" -RDEPENDS_${PN}-module-i18n-langtags-list += "${PN}-module-vars" -RDEPENDS_${PN}-module-i18n-langtags += "${PN}-module-exporter" -RDEPENDS_${PN}-module-i18n-langtags += "${PN}-module-strict" -RDEPENDS_${PN}-module-i18n-langtags += "${PN}-module-vars" -RDEPENDS_${PN}-module-io-compress-adapter-bzip2 += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-compress-adapter-bzip2 += "${PN}-module-compress-raw-bzip2" -RDEPENDS_${PN}-module-io-compress-adapter-bzip2 += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-compress-adapter-bzip2 += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-compress-adapter-bzip2 += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-compress-adapter-deflate += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-compress-adapter-deflate += "${PN}-module-compress-raw-zlib" -RDEPENDS_${PN}-module-io-compress-adapter-deflate += "${PN}-module-exporter" -RDEPENDS_${PN}-module-io-compress-adapter-deflate += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-compress-adapter-deflate += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-compress-adapter-deflate += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-compress-adapter-identity += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-compress-adapter-identity += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-compress-adapter-identity += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-compress-adapter-identity += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-carp" -RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-constant" -RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-encode" -RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-exporter" -RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-file-globmapper" -RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-utf8" -RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-carp" -RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-io-file" -RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-io-handle " -RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-symbol" -RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-compress-bzip2 += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-compress-bzip2 += "${PN}-module-exporter " -RDEPENDS_${PN}-module-io-compress-bzip2 += "${PN}-module-io-compress-adapter-bzip2" -RDEPENDS_${PN}-module-io-compress-bzip2 += "${PN}-module-io-compress-base" -RDEPENDS_${PN}-module-io-compress-bzip2 += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-compress-bzip2 += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-compress-bzip2 += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-exporter " -RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-io-compress-adapter-deflate" -RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-io-compress-rawdeflate" -RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-io-compress-zlib-constants" -RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-compress-gzip-constants += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-compress-gzip-constants += "${PN}-module-constant" -RDEPENDS_${PN}-module-io-compress-gzip-constants += "${PN}-module-exporter" -RDEPENDS_${PN}-module-io-compress-gzip-constants += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-compress-gzip-constants += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-exporter " -RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-io-compress-adapter-deflate" -RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-io-compress-gzip-constants" -RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-io-compress-rawdeflate" -RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-io-compress-zlib-extra" -RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-compress-raw-zlib" -RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-exporter " -RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-io-compress-adapter-deflate" -RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-io-compress-base" -RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-compress-zip-constants += "${PN}-module-constant" -RDEPENDS_${PN}-module-io-compress-zip-constants += "${PN}-module-exporter" -RDEPENDS_${PN}-module-io-compress-zip-constants += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-compress-zip-constants += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-compress-raw-zlib" -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-config" -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-exporter " -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-io-compress-adapter-deflate" -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-io-compress-adapter-identity" -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-io-compress-bzip2 " -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-io-compress-rawdeflate" -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-io-compress-zip-constants" -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-io-compress-zlib-extra" -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-compress-zlib-constants += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-compress-zlib-constants += "${PN}-module-constant" -RDEPENDS_${PN}-module-io-compress-zlib-constants += "${PN}-module-exporter" -RDEPENDS_${PN}-module-io-compress-zlib-constants += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-compress-zlib-constants += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-compress-zlib-extra += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-compress-zlib-extra += "${PN}-module-io-compress-gzip-constants" -RDEPENDS_${PN}-module-io-compress-zlib-extra += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-compress-zlib-extra += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-dir += "${PN}-module-carp" -RDEPENDS_${PN}-module-io-dir += "${PN}-module-exporter" -RDEPENDS_${PN}-module-io-dir += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-io-dir += "${PN}-module-file-stat" -RDEPENDS_${PN}-module-io-dir += "${PN}-module-io-file" -RDEPENDS_${PN}-module-io-dir += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-dir += "${PN}-module-symbol" -RDEPENDS_${PN}-module-io-dir += "${PN}-module-tie-hash" -RDEPENDS_${PN}-module-io-file += "${PN}-module-carp" -RDEPENDS_${PN}-module-io-file += "${PN}-module-exporter" -RDEPENDS_${PN}-module-io-file += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-io-file += "${PN}-module-io-seekable" -RDEPENDS_${PN}-module-io-file += "${PN}-module-selectsaver" -RDEPENDS_${PN}-module-io-file += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-file += "${PN}-module-symbol" -RDEPENDS_${PN}-module-io-handle += "${PN}-module-carp" -RDEPENDS_${PN}-module-io-handle += "${PN}-module-exporter" -RDEPENDS_${PN}-module-io-handle += "${PN}-module-io" -RDEPENDS_${PN}-module-io-handle += "${PN}-module-io-file" -RDEPENDS_${PN}-module-io-handle += "${PN}-module-selectsaver" -RDEPENDS_${PN}-module-io-handle += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-handle += "${PN}-module-symbol" -RDEPENDS_${PN}-module-io += "${PN}-module-carp" -RDEPENDS_${PN}-module-io += "${PN}-module-strict" -RDEPENDS_${PN}-module-io += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-io-pipe += "${PN}-module-carp" -RDEPENDS_${PN}-module-io-pipe += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-io-pipe += "${PN}-module-io-handle" -RDEPENDS_${PN}-module-io-pipe += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-pipe += "${PN}-module-symbol" -RDEPENDS_${PN}-module-io-poll += "${PN}-module-exporter" -RDEPENDS_${PN}-module-io-poll += "${PN}-module-io-handle" -RDEPENDS_${PN}-module-io-poll += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-seekable += "${PN}-module-carp" -RDEPENDS_${PN}-module-io-seekable += "${PN}-module-exporter" -RDEPENDS_${PN}-module-io-seekable += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-io-seekable += "${PN}-module-io-handle" -RDEPENDS_${PN}-module-io-seekable += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-select += "${PN}-module-exporter" -RDEPENDS_${PN}-module-io-select += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-select += "${PN}-module-vars" -RDEPENDS_${PN}-module-io-select += "${PN}-module-warnings-register" -RDEPENDS_${PN}-module-io-socket-inet += "${PN}-module-carp" -RDEPENDS_${PN}-module-io-socket-inet += "${PN}-module-errno" -RDEPENDS_${PN}-module-io-socket-inet += "${PN}-module-exporter" -RDEPENDS_${PN}-module-io-socket-inet += "${PN}-module-io-socket" -RDEPENDS_${PN}-module-io-socket-inet += "${PN}-module-socket" -RDEPENDS_${PN}-module-io-socket-inet += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-base" -RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-carp" -RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-constant" -RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-errno" -RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-posix" -RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-socket" -RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-socket += "${PN}-module-carp" -RDEPENDS_${PN}-module-io-socket += "${PN}-module-errno" -RDEPENDS_${PN}-module-io-socket += "${PN}-module-exporter" -RDEPENDS_${PN}-module-io-socket += "${PN}-module-io-handle" -RDEPENDS_${PN}-module-io-socket += "${PN}-module-io-select" -RDEPENDS_${PN}-module-io-socket += "${PN}-module-io-socket-inet" -RDEPENDS_${PN}-module-io-socket += "${PN}-module-io-socket-unix" -RDEPENDS_${PN}-module-io-socket += "${PN}-module-socket" -RDEPENDS_${PN}-module-io-socket += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-socket-unix += "${PN}-module-carp" -RDEPENDS_${PN}-module-io-socket-unix += "${PN}-module-io-socket" -RDEPENDS_${PN}-module-io-socket-unix += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-uncompress-adapter-bunzip2 += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-uncompress-adapter-bunzip2 += "${PN}-module-compress-raw-bzip2" -RDEPENDS_${PN}-module-io-uncompress-adapter-bunzip2 += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-uncompress-adapter-bunzip2 += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-uncompress-adapter-bunzip2 += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-uncompress-adapter-identity += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-uncompress-adapter-identity += "${PN}-module-compress-raw-zlib" -RDEPENDS_${PN}-module-io-uncompress-adapter-identity += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-uncompress-adapter-identity += "${PN}-module-io-compress-zip-constants " -RDEPENDS_${PN}-module-io-uncompress-adapter-identity += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-uncompress-adapter-identity += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-uncompress-adapter-inflate += "${PN}-module-compress-raw-zlib" -RDEPENDS_${PN}-module-io-uncompress-adapter-inflate += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-uncompress-adapter-inflate += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-uncompress-adapter-inflate += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-exporter " -RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-io-uncompress-adapter-inflate" -RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-io-uncompress-base" -RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-io-uncompress-gunzip" -RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-io-uncompress-inflate" -RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-io-uncompress-rawinflate" -RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-io-uncompress-unzip" -RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-uncompress-anyuncompress += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-uncompress-anyuncompress += "${PN}-module-exporter " -RDEPENDS_${PN}-module-io-uncompress-anyuncompress += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-uncompress-anyuncompress += "${PN}-module-io-uncompress-base" -RDEPENDS_${PN}-module-io-uncompress-anyuncompress += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-uncompress-anyuncompress += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-carp " -RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-constant" -RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-io-file " -RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-list-util" -RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-symbol" -RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-uncompress-bunzip2 += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-uncompress-bunzip2 += "${PN}-module-exporter " -RDEPENDS_${PN}-module-io-uncompress-bunzip2 += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-uncompress-bunzip2 += "${PN}-module-io-uncompress-adapter-bunzip2" -RDEPENDS_${PN}-module-io-uncompress-bunzip2 += "${PN}-module-io-uncompress-base" -RDEPENDS_${PN}-module-io-uncompress-bunzip2 += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-uncompress-bunzip2 += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-compress-raw-zlib" -RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-exporter " -RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-io-compress-gzip-constants" -RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-io-compress-zlib-extra" -RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-io-uncompress-rawinflate" -RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-uncompress-inflate += "${PN}-module-bytes" -RDEPENDS_${PN}-module-io-uncompress-inflate += "${PN}-module-exporter " -RDEPENDS_${PN}-module-io-uncompress-inflate += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-uncompress-inflate += "${PN}-module-io-compress-zlib-constants" -RDEPENDS_${PN}-module-io-uncompress-inflate += "${PN}-module-io-uncompress-rawinflate" -RDEPENDS_${PN}-module-io-uncompress-inflate += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-uncompress-inflate += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-uncompress-rawinflate += "${PN}-module-compress-raw-zlib" -RDEPENDS_${PN}-module-io-uncompress-rawinflate += "${PN}-module-exporter " -RDEPENDS_${PN}-module-io-uncompress-rawinflate += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-uncompress-rawinflate += "${PN}-module-io-uncompress-adapter-inflate" -RDEPENDS_${PN}-module-io-uncompress-rawinflate += "${PN}-module-io-uncompress-base" -RDEPENDS_${PN}-module-io-uncompress-rawinflate += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-uncompress-rawinflate += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-compress-raw-zlib" -RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-constant" -RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-exporter " -RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-io-compress-base-common" -RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-io-compress-zip-constants" -RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-io-compress-zlib-extra" -RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-io-file" -RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-io-uncompress-adapter-identity" -RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-io-uncompress-adapter-inflate" -RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-io-uncompress-rawinflate" -RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-posix" -RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-strict " -RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-warnings" -RDEPENDS_${PN}-module-io-zlib += "${PN}-module-carp" -RDEPENDS_${PN}-module-io-zlib += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-io-zlib += "${PN}-module-io-handle" -RDEPENDS_${PN}-module-io-zlib += "${PN}-module-strict" -RDEPENDS_${PN}-module-io-zlib += "${PN}-module-symbol" -RDEPENDS_${PN}-module-io-zlib += "${PN}-module-tie-handle" -RDEPENDS_${PN}-module-io-zlib += "${PN}-module-vars" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-carp" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-constant" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-exporter" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-extutils-makemaker" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-filehandle" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-io-handle" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-io-select" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-ipc-open3" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-locale-maketext-simple" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-load" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-params-check" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-posix" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-socket" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-strict" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-symbol" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-text-parsewords" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-time-hires" -RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-vars" -RDEPENDS_${PN}-module-ipc-msg += "${PN}-module-carp" -RDEPENDS_${PN}-module-ipc-msg += "${PN}-module-class-struct" -RDEPENDS_${PN}-module-ipc-msg += "${PN}-module-ipc-sysv" -RDEPENDS_${PN}-module-ipc-msg += "${PN}-module-strict" -RDEPENDS_${PN}-module-ipc-msg += "${PN}-module-vars" -RDEPENDS_${PN}-module-ipc-open2 += "${PN}-module-exporter" -RDEPENDS_${PN}-module-ipc-open2 += "${PN}-module-ipc-open3" -RDEPENDS_${PN}-module-ipc-open2 += "${PN}-module-strict" -RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-carp" -RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-constant" -RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-exporter" -RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-io-pipe" -RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-posix" -RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-strict" -RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-symbol" -RDEPENDS_${PN}-module-ipc-semaphore += "${PN}-module-carp" -RDEPENDS_${PN}-module-ipc-semaphore += "${PN}-module-class-struct" -RDEPENDS_${PN}-module-ipc-semaphore += "${PN}-module-ipc-sysv" -RDEPENDS_${PN}-module-ipc-semaphore += "${PN}-module-strict" -RDEPENDS_${PN}-module-ipc-semaphore += "${PN}-module-vars" -RDEPENDS_${PN}-module-ipc-sharedmem += "${PN}-module-carp" -RDEPENDS_${PN}-module-ipc-sharedmem += "${PN}-module-class-struct" -RDEPENDS_${PN}-module-ipc-sharedmem += "${PN}-module-ipc-sysv" -RDEPENDS_${PN}-module-ipc-sharedmem += "${PN}-module-strict" -RDEPENDS_${PN}-module-ipc-sharedmem += "${PN}-module-vars" -RDEPENDS_${PN}-module-ipc-sysv += "${PN}-module-carp" -RDEPENDS_${PN}-module-ipc-sysv += "${PN}-module-config" -RDEPENDS_${PN}-module-ipc-sysv += "${PN}-module-dynaloader" -RDEPENDS_${PN}-module-ipc-sysv += "${PN}-module-exporter" -RDEPENDS_${PN}-module-ipc-sysv += "${PN}-module-strict" -RDEPENDS_${PN}-module-ipc-sysv += "${PN}-module-vars" -RDEPENDS_${PN}-module-json-pp-boolean += "${PN}-module-json-pp" -RDEPENDS_${PN}-module-json-pp-boolean += "${PN}-module-strict" -RDEPENDS_${PN}-module-json-pp += "${PN}-module-b" -RDEPENDS_${PN}-module-json-pp += "${PN}-module-base" -RDEPENDS_${PN}-module-json-pp += "${PN}-module-bytes" -RDEPENDS_${PN}-module-json-pp += "${PN}-module-carp" -RDEPENDS_${PN}-module-json-pp += "${PN}-module-constant" -RDEPENDS_${PN}-module-json-pp += "${PN}-module-encode" -RDEPENDS_${PN}-module-json-pp += "${PN}-module-math-bigfloat" -RDEPENDS_${PN}-module-json-pp += "${PN}-module-math-bigint" -RDEPENDS_${PN}-module-json-pp += "${PN}-module-overload" -RDEPENDS_${PN}-module-json-pp += "${PN}-module-strict" -RDEPENDS_${PN}-module-json-pp += "${PN}-module-subs" -RDEPENDS_${PN}-module-less += "${PN}-module-strict" -RDEPENDS_${PN}-module-less += "${PN}-module-warnings" -RDEPENDS_${PN}-module-lib += "${PN}-module-carp" -RDEPENDS_${PN}-module-lib += "${PN}-module-config" -RDEPENDS_${PN}-module-lib += "${PN}-module-strict" -RDEPENDS_${PN}-module-list-util += "${PN}-module-exporter" -RDEPENDS_${PN}-module-list-util += "${PN}-module-strict" -RDEPENDS_${PN}-module-list-util += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-list-util-xs += "${PN}-module-list-util" -RDEPENDS_${PN}-module-list-util-xs += "${PN}-module-strict" -RDEPENDS_${PN}-module-loaded += "${PN}-module-carp" -RDEPENDS_${PN}-module-loaded += "${PN}-module-strict" -RDEPENDS_${PN}-module-loaded += "${PN}-module-vars" -RDEPENDS_${PN}-module-load += "${PN}-module-carp" -RDEPENDS_${PN}-module-load += "${PN}-module-config" -RDEPENDS_${PN}-module-load += "${PN}-module-constant" -RDEPENDS_${PN}-module-load += "${PN}-module-exporter" -RDEPENDS_${PN}-module-load += "${PN}-module-filehandle" -RDEPENDS_${PN}-module-load += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-load += "${PN}-module-locale-maketext-simple" -RDEPENDS_${PN}-module-load += "${PN}-module-corelist" -RDEPENDS_${PN}-module-load += "${PN}-module-params-check" -RDEPENDS_${PN}-module-load += "${PN}-module-strict" -RDEPENDS_${PN}-module-load += "${PN}-module-vars" -RDEPENDS_${PN}-module-load += "${PN}-module-version" -RDEPENDS_${PN}-module-load += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-constants += "${PN}-module-constant" -RDEPENDS_${PN}-module-locale-codes-constants += "${PN}-module-exporter" -RDEPENDS_${PN}-module-locale-codes-constants += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-constants += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-country-codes += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-country-codes += "${PN}-module-utf8" -RDEPENDS_${PN}-module-locale-codes-country-codes += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-carp" -RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-exporter" -RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-locale-codes" -RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-locale-codes-constants" -RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-locale-codes-country-codes" -RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-locale-codes-country-retired" -RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-country-retired += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-country-retired += "${PN}-module-utf8" -RDEPENDS_${PN}-module-locale-codes-country-retired += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-currency-codes += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-currency-codes += "${PN}-module-utf8" -RDEPENDS_${PN}-module-locale-codes-currency-codes += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-carp" -RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-exporter" -RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-locale-codes" -RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-locale-codes-constants" -RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-locale-codes-currency-codes" -RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-locale-codes-currency-retired" -RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-currency-retired += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-currency-retired += "${PN}-module-utf8" -RDEPENDS_${PN}-module-locale-codes-currency-retired += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-langext-codes += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-langext-codes += "${PN}-module-utf8" -RDEPENDS_${PN}-module-locale-codes-langext-codes += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-carp" -RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-exporter" -RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-locale-codes" -RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-locale-codes-constants" -RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-locale-codes-langext-codes" -RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-locale-codes-langext-retired" -RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-langext-retired += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-langext-retired += "${PN}-module-utf8" -RDEPENDS_${PN}-module-locale-codes-langext-retired += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-langfam-codes += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-langfam-codes += "${PN}-module-utf8" -RDEPENDS_${PN}-module-locale-codes-langfam-codes += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-carp" -RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-exporter" -RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-locale-codes" -RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-locale-codes-constants" -RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-locale-codes-langfam-codes" -RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-locale-codes-langfam-retired" -RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-langfam-retired += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-langfam-retired += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-language-codes += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-language-codes += "${PN}-module-utf8" -RDEPENDS_${PN}-module-locale-codes-language-codes += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-carp" -RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-exporter" -RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-locale-codes" -RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-locale-codes-constants" -RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-locale-codes-language-codes" -RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-locale-codes-language-retired" -RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-language-retired += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-language-retired += "${PN}-module-utf8" -RDEPENDS_${PN}-module-locale-codes-language-retired += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-langvar-codes += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-langvar-codes += "${PN}-module-utf8" -RDEPENDS_${PN}-module-locale-codes-langvar-codes += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-carp" -RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-exporter" -RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-locale-codes" -RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-locale-codes-constants" -RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-locale-codes-langvar-codes" -RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-locale-codes-langvar-retired" -RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-langvar-retired += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-langvar-retired += "${PN}-module-utf8" -RDEPENDS_${PN}-module-locale-codes-langvar-retired += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes += "${PN}-module-carp" -RDEPENDS_${PN}-module-locale-codes += "${PN}-module-locale-codes-constants" -RDEPENDS_${PN}-module-locale-codes += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-script-codes += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-script-codes += "${PN}-module-utf8" -RDEPENDS_${PN}-module-locale-codes-script-codes += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-carp" -RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-exporter" -RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-locale-codes" -RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-locale-codes-constants" -RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-locale-codes-script-codes" -RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-locale-codes-script-retired" -RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-codes-script-retired += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-codes-script-retired += "${PN}-module-utf8" -RDEPENDS_${PN}-module-locale-codes-script-retired += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-country += "${PN}-module-exporter" -RDEPENDS_${PN}-module-locale-country += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-country += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-currency += "${PN}-module-exporter" -RDEPENDS_${PN}-module-locale-currency += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-currency += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-language += "${PN}-module-exporter" -RDEPENDS_${PN}-module-locale-language += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-language += "${PN}-module-warnings" -RDEPENDS_${PN}-module-locale-maketext-gutsloader += "${PN}-module-locale-maketext" -RDEPENDS_${PN}-module-locale-maketext-guts += "${PN}-module-locale-maketext" -RDEPENDS_${PN}-module-locale-maketext += "${PN}-module-carp" -RDEPENDS_${PN}-module-locale-maketext += "${PN}-module-i18n-langtags" -RDEPENDS_${PN}-module-locale-maketext += "${PN}-module-i18n-langtags-detect" -RDEPENDS_${PN}-module-locale-maketext += "${PN}-module-integer" -RDEPENDS_${PN}-module-locale-maketext += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-maketext += "${PN}-module-vars" -RDEPENDS_${PN}-module-locale-maketext-simple += "${PN}-module-base" -RDEPENDS_${PN}-module-locale-maketext-simple += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale += "${PN}-module-carp" -RDEPENDS_${PN}-module-locale += "${PN}-module-config" -RDEPENDS_${PN}-module-locale-script += "${PN}-module-exporter" -RDEPENDS_${PN}-module-locale-script += "${PN}-module-strict" -RDEPENDS_${PN}-module-locale-script += "${PN}-module-warnings" -RDEPENDS_${PN}-module-math-bigfloat += "${PN}-module-carp" -RDEPENDS_${PN}-module-math-bigfloat += "${PN}-module-exporter" -RDEPENDS_${PN}-module-math-bigfloat += "${PN}-module-math-bigint" -RDEPENDS_${PN}-module-math-bigfloat += "${PN}-module-overload" -RDEPENDS_${PN}-module-math-bigfloat += "${PN}-module-strict" -RDEPENDS_${PN}-module-math-bigfloat += "${PN}-module-vars" -RDEPENDS_${PN}-module-math-bigfloat-trace += "${PN}-module-exporter" -RDEPENDS_${PN}-module-math-bigfloat-trace += "${PN}-module-math-bigfloat" -RDEPENDS_${PN}-module-math-bigfloat-trace += "${PN}-module-overload" -RDEPENDS_${PN}-module-math-bigfloat-trace += "${PN}-module-strict" -RDEPENDS_${PN}-module-math-bigfloat-trace += "${PN}-module-vars" -RDEPENDS_${PN}-module-math-bigint-calcemu += "${PN}-module-strict" -RDEPENDS_${PN}-module-math-bigint-calcemu += "${PN}-module-vars" -RDEPENDS_${PN}-module-math-bigint-calc += "${PN}-module-carp" -RDEPENDS_${PN}-module-math-bigint-calc += "${PN}-module-constant" -RDEPENDS_${PN}-module-math-bigint-calc += "${PN}-module-integer" -RDEPENDS_${PN}-module-math-bigint-calc += "${PN}-module-strict" -RDEPENDS_${PN}-module-math-bigint-fastcalc += "${PN}-module-math-bigint-calc" -RDEPENDS_${PN}-module-math-bigint-fastcalc += "${PN}-module-strict" -RDEPENDS_${PN}-module-math-bigint-fastcalc += "${PN}-module-vars" -RDEPENDS_${PN}-module-math-bigint-fastcalc += "${PN}-module-warnings" -RDEPENDS_${PN}-module-math-bigint-fastcalc += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-math-bigint += "${PN}-module-carp" -RDEPENDS_${PN}-module-math-bigint += "${PN}-module-exporter" -RDEPENDS_${PN}-module-math-bigint += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-math-bigint += "${PN}-module-math-bigfloat" -RDEPENDS_${PN}-module-math-bigint += "${PN}-module-overload" -RDEPENDS_${PN}-module-math-bigint += "${PN}-module-strict" -RDEPENDS_${PN}-module-math-bigint += "${PN}-module-vars" -RDEPENDS_${PN}-module-math-bigint-trace += "${PN}-module-exporter" -RDEPENDS_${PN}-module-math-bigint-trace += "${PN}-module-math-bigint" -RDEPENDS_${PN}-module-math-bigint-trace += "${PN}-module-overload" -RDEPENDS_${PN}-module-math-bigint-trace += "${PN}-module-strict" -RDEPENDS_${PN}-module-math-bigint-trace += "${PN}-module-vars" -RDEPENDS_${PN}-module-math-bigrat += "${PN}-module-carp" -RDEPENDS_${PN}-module-math-bigrat += "${PN}-module-math-bigfloat" -RDEPENDS_${PN}-module-math-bigrat += "${PN}-module-math-bigint" -RDEPENDS_${PN}-module-math-bigrat += "${PN}-module-overload" -RDEPENDS_${PN}-module-math-bigrat += "${PN}-module-strict" -RDEPENDS_${PN}-module-math-bigrat += "${PN}-module-vars" -RDEPENDS_${PN}-module-math-complex += "${PN}-module-config" -RDEPENDS_${PN}-module-math-complex += "${PN}-module-exporter" -RDEPENDS_${PN}-module-math-complex += "${PN}-module-overload" -RDEPENDS_${PN}-module-math-complex += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-math-complex += "${PN}-module-strict" -RDEPENDS_${PN}-module-math-complex += "${PN}-module-warnings" -RDEPENDS_${PN}-module-math-trig += "${PN}-module-exporter" -RDEPENDS_${PN}-module-math-trig += "${PN}-module-math-complex" -RDEPENDS_${PN}-module-math-trig += "${PN}-module-strict" -RDEPENDS_${PN}-module-memoize-anydbm-file += "${PN}-module-vars" -RDEPENDS_${PN}-module-memoize-expirefile += "${PN}-module-carp" -RDEPENDS_${PN}-module-memoize-expire += "${PN}-module-carp" -RDEPENDS_${PN}-module-memoize += "${PN}-module-carp" -RDEPENDS_${PN}-module-memoize += "${PN}-module-config" -RDEPENDS_${PN}-module-memoize += "${PN}-module-exporter" -RDEPENDS_${PN}-module-memoize += "${PN}-module-strict" -RDEPENDS_${PN}-module-memoize += "${PN}-module-vars" -RDEPENDS_${PN}-module-memoize-sdbm-file += "${PN}-module-sdbm-file" -RDEPENDS_${PN}-module-memoize-storable += "${PN}-module-carp" -RDEPENDS_${PN}-module-memoize-storable += "${PN}-module-storable" -RDEPENDS_${PN}-module-meta-notation += "${PN}-module-strict" -RDEPENDS_${PN}-module-meta-notation += "${PN}-module-warnings" -RDEPENDS_${PN}-module-mime-base64 += "${PN}-module-exporter" -RDEPENDS_${PN}-module-mime-base64 += "${PN}-module-strict" -RDEPENDS_${PN}-module-mime-base64 += "${PN}-module-vars" -RDEPENDS_${PN}-module-mime-base64 += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-mime-quotedprint += "${PN}-module-exporter" -RDEPENDS_${PN}-module-mime-quotedprint += "${PN}-module-mime-base64" -RDEPENDS_${PN}-module-mime-quotedprint += "${PN}-module-strict" -RDEPENDS_${PN}-module-mime-quotedprint += "${PN}-module-vars" -RDEPENDS_${PN}-module-mro += "${PN}-module-strict" -RDEPENDS_${PN}-module-mro += "${PN}-module-warnings" -RDEPENDS_${PN}-module-mro += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-net-cmd += "${PN}-module-carp" -RDEPENDS_${PN}-module-net-cmd += "${PN}-module-constant" -RDEPENDS_${PN}-module-net-cmd += "${PN}-module-exporter" -RDEPENDS_${PN}-module-net-cmd += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-cmd += "${PN}-module-symbol" -RDEPENDS_${PN}-module-net-cmd += "${PN}-module-warnings" -RDEPENDS_${PN}-module-net-config += "${PN}-module-exporter" -RDEPENDS_${PN}-module-net-config += "${PN}-module-socket" -RDEPENDS_${PN}-module-net-config += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-config += "${PN}-module-warnings" -RDEPENDS_${PN}-module-net-domain += "${PN}-module-carp" -RDEPENDS_${PN}-module-net-domain += "${PN}-module-exporter" -RDEPENDS_${PN}-module-net-domain += "${PN}-module-net-config" -RDEPENDS_${PN}-module-net-domain += "${PN}-module-posix" -RDEPENDS_${PN}-module-net-domain += "${PN}-module-socket" -RDEPENDS_${PN}-module-net-domain += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-domain += "${PN}-module-warnings" -RDEPENDS_${PN}-module-net-ftp-a += "${PN}-module-carp" -RDEPENDS_${PN}-module-net-ftp-a += "${PN}-module-net-ftp-dataconn" -RDEPENDS_${PN}-module-net-ftp-a += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-ftp-a += "${PN}-module-warnings" -RDEPENDS_${PN}-module-net-ftp-dataconn += "${PN}-module-carp" -RDEPENDS_${PN}-module-net-ftp-dataconn += "${PN}-module-errno" -RDEPENDS_${PN}-module-net-ftp-dataconn += "${PN}-module-net-cmd" -RDEPENDS_${PN}-module-net-ftp-dataconn += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-ftp-dataconn += "${PN}-module-warnings" -RDEPENDS_${PN}-module-net-ftp-e += "${PN}-module-net-ftp-i" -RDEPENDS_${PN}-module-net-ftp-e += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-ftp-e += "${PN}-module-warnings" -RDEPENDS_${PN}-module-net-ftp-i += "${PN}-module-carp" -RDEPENDS_${PN}-module-net-ftp-i += "${PN}-module-net-ftp-dataconn" -RDEPENDS_${PN}-module-net-ftp-i += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-ftp-i += "${PN}-module-warnings" -RDEPENDS_${PN}-module-net-ftp-l += "${PN}-module-net-ftp-i" -RDEPENDS_${PN}-module-net-ftp-l += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-ftp-l += "${PN}-module-warnings" -RDEPENDS_${PN}-module-net-ftp += "${PN}-module-carp" -RDEPENDS_${PN}-module-net-ftp += "${PN}-module-constant" -RDEPENDS_${PN}-module-net-ftp += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-net-ftp += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-net-ftp += "${PN}-module-io-socket" -RDEPENDS_${PN}-module-net-ftp += "${PN}-module-io-socket-ip" -RDEPENDS_${PN}-module-net-ftp += "${PN}-module-net-cmd" -RDEPENDS_${PN}-module-net-ftp += "${PN}-module-net-config" -RDEPENDS_${PN}-module-net-ftp += "${PN}-module-net-ftp-a" -RDEPENDS_${PN}-module-net-ftp += "${PN}-module-net-netrc" -RDEPENDS_${PN}-module-net-ftp += "${PN}-module-socket" -RDEPENDS_${PN}-module-net-ftp += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-ftp += "${PN}-module-time-local" -RDEPENDS_${PN}-module-net-ftp += "${PN}-module-warnings" -RDEPENDS_${PN}-module-net-hostent += "${PN}-module-class-struct" -RDEPENDS_${PN}-module-net-hostent += "${PN}-module-exporter" -RDEPENDS_${PN}-module-net-hostent += "${PN}-module-socket" -RDEPENDS_${PN}-module-net-hostent += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-hostent += "${PN}-module-vars" -RDEPENDS_${PN}-module-net-netent += "${PN}-module-class-struct" -RDEPENDS_${PN}-module-net-netent += "${PN}-module-exporter" -RDEPENDS_${PN}-module-net-netent += "${PN}-module-socket" -RDEPENDS_${PN}-module-net-netent += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-netent += "${PN}-module-vars" -RDEPENDS_${PN}-module-net-netrc += "${PN}-module-carp" -RDEPENDS_${PN}-module-net-netrc += "${PN}-module-filehandle" -RDEPENDS_${PN}-module-net-netrc += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-netrc += "${PN}-module-warnings" -RDEPENDS_${PN}-module-net-nntp += "${PN}-module-carp" -RDEPENDS_${PN}-module-net-nntp += "${PN}-module-io-socket" -RDEPENDS_${PN}-module-net-nntp += "${PN}-module-io-socket-ip" -RDEPENDS_${PN}-module-net-nntp += "${PN}-module-net-cmd" -RDEPENDS_${PN}-module-net-nntp += "${PN}-module-net-config" -RDEPENDS_${PN}-module-net-nntp += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-nntp += "${PN}-module-time-local" -RDEPENDS_${PN}-module-net-nntp += "${PN}-module-warnings" -RDEPENDS_${PN}-module-net-ping += "${PN}-module-carp" -RDEPENDS_${PN}-module-net-ping += "${PN}-module-constant" -RDEPENDS_${PN}-module-net-ping += "${PN}-module-exporter" -RDEPENDS_${PN}-module-net-ping += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-net-ping += "${PN}-module-filehandle" -RDEPENDS_${PN}-module-net-ping += "${PN}-module-posix" -RDEPENDS_${PN}-module-net-ping += "${PN}-module-socket" -RDEPENDS_${PN}-module-net-ping += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-ping += "${PN}-module-time-hires" -RDEPENDS_${PN}-module-net-ping += "${PN}-module-vars" -RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-carp" -RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-io-socket" -RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-io-socket-ip" -RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-mime-base64" -RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-net-cmd" -RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-net-config" -RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-net-netrc" -RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-warnings" -RDEPENDS_${PN}-module-net-protoent += "${PN}-module-class-struct" -RDEPENDS_${PN}-module-net-protoent += "${PN}-module-exporter" -RDEPENDS_${PN}-module-net-protoent += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-protoent += "${PN}-module-vars" -RDEPENDS_${PN}-module-net-servent += "${PN}-module-class-struct" -RDEPENDS_${PN}-module-net-servent += "${PN}-module-exporter" -RDEPENDS_${PN}-module-net-servent += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-servent += "${PN}-module-vars" -RDEPENDS_${PN}-module-net-smtp += "${PN}-module-carp" -RDEPENDS_${PN}-module-net-smtp += "${PN}-module-io-socket" -RDEPENDS_${PN}-module-net-smtp += "${PN}-module-io-socket-ip" -RDEPENDS_${PN}-module-net-smtp += "${PN}-module-mime-base64" -RDEPENDS_${PN}-module-net-smtp += "${PN}-module-net-cmd" -RDEPENDS_${PN}-module-net-smtp += "${PN}-module-net-config" -RDEPENDS_${PN}-module-net-smtp += "${PN}-module-socket" -RDEPENDS_${PN}-module-net-smtp += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-smtp += "${PN}-module-warnings" -RDEPENDS_${PN}-module-net-time += "${PN}-module-carp" -RDEPENDS_${PN}-module-net-time += "${PN}-module-exporter" -RDEPENDS_${PN}-module-net-time += "${PN}-module-io-select" -RDEPENDS_${PN}-module-net-time += "${PN}-module-io-socket" -RDEPENDS_${PN}-module-net-time += "${PN}-module-net-config" -RDEPENDS_${PN}-module-net-time += "${PN}-module-strict" -RDEPENDS_${PN}-module-net-time += "${PN}-module-warnings" -RDEPENDS_${PN}-module-next += "${PN}-module-carp" -RDEPENDS_${PN}-module-next += "${PN}-module-overload" -RDEPENDS_${PN}-module-next += "${PN}-module-strict" -RDEPENDS_${PN}-module-odbm-file += "${PN}-module-strict" -RDEPENDS_${PN}-module-odbm-file += "${PN}-module-tie-hash" -RDEPENDS_${PN}-module-odbm-file += "${PN}-module-warnings" -RDEPENDS_${PN}-module-odbm-file += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-ok += "${PN}-module-strict" -RDEPENDS_${PN}-module-ok += "${PN}-module-test-more" -RDEPENDS_${PN}-module-opcode += "${PN}-module-carp" -RDEPENDS_${PN}-module-opcode += "${PN}-module-exporter" -RDEPENDS_${PN}-module-opcode += "${PN}-module-strict" -RDEPENDS_${PN}-module-opcode += "${PN}-module-subs" -RDEPENDS_${PN}-module-opcode += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-open += "${PN}-module-carp" -RDEPENDS_${PN}-module-open += "${PN}-module-encode" -RDEPENDS_${PN}-module-open += "${PN}-module-encoding" -RDEPENDS_${PN}-module-open += "${PN}-module-warnings" -RDEPENDS_${PN}-module-o += "${PN}-module-b" -RDEPENDS_${PN}-module-o += "${PN}-module-carp" -RDEPENDS_${PN}-module-ops += "${PN}-module-opcode" -RDEPENDS_${PN}-module-overloading += "${PN}-module-overload-numbers" -RDEPENDS_${PN}-module-overloading += "${PN}-module-warnings" -RDEPENDS_${PN}-module-overload += "${PN}-module-mro" -RDEPENDS_${PN}-module-overload += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-overload += "${PN}-module-warnings-register" -RDEPENDS_${PN}-module-params-check += "${PN}-module-carp" -RDEPENDS_${PN}-module-params-check += "${PN}-module-exporter" -RDEPENDS_${PN}-module-params-check += "${PN}-module-locale-maketext-simple" -RDEPENDS_${PN}-module-params-check += "${PN}-module-strict" -RDEPENDS_${PN}-module-params-check += "${PN}-module-vars" -RDEPENDS_${PN}-module-parent += "${PN}-module-strict" -RDEPENDS_${PN}-module-parent += "${PN}-module-vars" -RDEPENDS_${PN}-module-perlfaq += "${PN}-module-strict" -RDEPENDS_${PN}-module-perlfaq += "${PN}-module-warnings" -RDEPENDS_${PN}-module-perlio-encoding += "${PN}-module-strict" -RDEPENDS_${PN}-module-perlio-encoding += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-perlio-mmap += "${PN}-module-strict" -RDEPENDS_${PN}-module-perlio-mmap += "${PN}-module-warnings" -RDEPENDS_${PN}-module-perlio-mmap += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-perlio-scalar += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-perlio-via += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-perlio-via-quotedprint += "${PN}-module-mime-quotedprint" -RDEPENDS_${PN}-module-perlio-via-quotedprint += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-checker += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-checker += "${PN}-module-exporter" -RDEPENDS_${PN}-module-pod-checker += "${PN}-module-pod-parser" -RDEPENDS_${PN}-module-pod-checker += "${PN}-module-pod-parseutils" -RDEPENDS_${PN}-module-pod-checker += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-checker += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-escapes += "${PN}-module-exporter" -RDEPENDS_${PN}-module-pod-escapes += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-escapes += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-escapes += "${PN}-module-warnings" -RDEPENDS_${PN}-module-pod-find += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-find += "${PN}-module-config" -RDEPENDS_${PN}-module-pod-find += "${PN}-module-cwd" -RDEPENDS_${PN}-module-pod-find += "${PN}-module-exporter" -RDEPENDS_${PN}-module-pod-find += "${PN}-module-file-find" -RDEPENDS_${PN}-module-pod-find += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-pod-find += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-find += "${PN}-module-symbol" -RDEPENDS_${PN}-module-pod-find += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-functions += "${PN}-module-exporter" -RDEPENDS_${PN}-module-pod-functions += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-inputobjects += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-inputobjects += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-man += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-man += "${PN}-module-encode" -RDEPENDS_${PN}-module-pod-man += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-pod-man += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-pod-man += "${PN}-module-pod-simple" -RDEPENDS_${PN}-module-pod-man += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-man += "${PN}-module-subs" -RDEPENDS_${PN}-module-pod-man += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-parselink += "${PN}-module-exporter" -RDEPENDS_${PN}-module-pod-parselink += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-parselink += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-parser += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-parser += "${PN}-module-exporter" -RDEPENDS_${PN}-module-pod-parser += "${PN}-module-pod-inputobjects" -RDEPENDS_${PN}-module-pod-parser += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-parser += "${PN}-module-symbol" -RDEPENDS_${PN}-module-pod-parser += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-parseutils += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-parseutils += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-parseutils += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-perldoc-baseto += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-perldoc-baseto += "${PN}-module-config" -RDEPENDS_${PN}-module-pod-perldoc-baseto += "${PN}-module-file-spec-functions" -RDEPENDS_${PN}-module-pod-perldoc-baseto += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-perldoc-baseto += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-perldoc-baseto += "${PN}-module-warnings" -RDEPENDS_${PN}-module-pod-perldoc-getoptsoo += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-perldoc-getoptsoo += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-config" -RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-encode" -RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-file-spec-functions" -RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-file-temp" -RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-pod-perldoc-getoptsoo" -RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-text-parsewords" -RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-warnings" -RDEPENDS_${PN}-module-pod-perldoc-toansi += "${PN}-module-parent" -RDEPENDS_${PN}-module-pod-perldoc-toansi += "${PN}-module-pod-text-color" -RDEPENDS_${PN}-module-pod-perldoc-toansi += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-perldoc-toansi += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-perldoc-toansi += "${PN}-module-warnings" -RDEPENDS_${PN}-module-pod-perldoc-tochecker += "${PN}-module-pod-checker" -RDEPENDS_${PN}-module-pod-perldoc-tochecker += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-perldoc-tochecker += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-perldoc-tochecker += "${PN}-module-warnings" -RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-encode" -RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-file-spec-functions" -RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-io-handle" -RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-io-select" -RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-ipc-open3" -RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-parent" -RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-pod-man" -RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-pod-perldoc-topod" -RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-symbol" -RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-warnings" -RDEPENDS_${PN}-module-pod-perldoc-tonroff += "${PN}-module-parent" -RDEPENDS_${PN}-module-pod-perldoc-tonroff += "${PN}-module-pod-man" -RDEPENDS_${PN}-module-pod-perldoc-tonroff += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-perldoc-tonroff += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-perldoc-tonroff += "${PN}-module-warnings" -RDEPENDS_${PN}-module-pod-perldoc-topod += "${PN}-module-parent" -RDEPENDS_${PN}-module-pod-perldoc-topod += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-perldoc-topod += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-perldoc-topod += "${PN}-module-warnings" -RDEPENDS_${PN}-module-pod-perldoc-tortf += "${PN}-module-parent" -RDEPENDS_${PN}-module-pod-perldoc-tortf += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-perldoc-tortf += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-perldoc-tortf += "${PN}-module-warnings" -RDEPENDS_${PN}-module-pod-perldoc-toterm += "${PN}-module-parent" -RDEPENDS_${PN}-module-pod-perldoc-toterm += "${PN}-module-pod-text-termcap" -RDEPENDS_${PN}-module-pod-perldoc-toterm += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-perldoc-toterm += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-perldoc-toterm += "${PN}-module-warnings" -RDEPENDS_${PN}-module-pod-perldoc-totext += "${PN}-module-parent" -RDEPENDS_${PN}-module-pod-perldoc-totext += "${PN}-module-pod-text" -RDEPENDS_${PN}-module-pod-perldoc-totext += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-perldoc-totext += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-perldoc-totext += "${PN}-module-warnings" -RDEPENDS_${PN}-module-pod-perldoc-toxml += "${PN}-module-parent" -RDEPENDS_${PN}-module-pod-perldoc-toxml += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-perldoc-toxml += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-perldoc-toxml += "${PN}-module-warnings" -RDEPENDS_${PN}-module-pod-plaintext += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-plaintext += "${PN}-module-pod-select" -RDEPENDS_${PN}-module-pod-plaintext += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-plaintext += "${PN}-module-symbol" -RDEPENDS_${PN}-module-pod-plaintext += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-select += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-select += "${PN}-module-pod-parser" -RDEPENDS_${PN}-module-pod-select += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-select += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-blackbox += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple-blackbox += "${PN}-module-integer" -RDEPENDS_${PN}-module-pod-simple-blackbox += "${PN}-module-pod-simple" -RDEPENDS_${PN}-module-pod-simple-blackbox += "${PN}-module-pod-simple-transcode" -RDEPENDS_${PN}-module-pod-simple-blackbox += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-blackbox += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-checker += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple-checker += "${PN}-module-pod-simple" -RDEPENDS_${PN}-module-pod-simple-checker += "${PN}-module-pod-simple-methody" -RDEPENDS_${PN}-module-pod-simple-checker += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-checker += "${PN}-module-text-wrap" -RDEPENDS_${PN}-module-pod-simple-checker += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-debug += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple-debug += "${PN}-module-pod-simple" -RDEPENDS_${PN}-module-pod-simple-debug += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-debug += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-dumpastext += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple-dumpastext += "${PN}-module-pod-simple" -RDEPENDS_${PN}-module-pod-simple-dumpastext += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-dumpasxml += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple-dumpasxml += "${PN}-module-pod-simple" -RDEPENDS_${PN}-module-pod-simple-dumpasxml += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-dumpasxml += "${PN}-module-text-wrap" -RDEPENDS_${PN}-module-pod-simple-linksection += "${PN}-module-overload" -RDEPENDS_${PN}-module-pod-simple-linksection += "${PN}-module-pod-simple-blackbox" -RDEPENDS_${PN}-module-pod-simple-linksection += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-linksection += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-methody += "${PN}-module-pod-simple" -RDEPENDS_${PN}-module-pod-simple-methody += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-methody += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple += "${PN}-module-integer" -RDEPENDS_${PN}-module-pod-simple += "${PN}-module-pod-escapes" -RDEPENDS_${PN}-module-pod-simple += "${PN}-module-pod-simple-blackbox" -RDEPENDS_${PN}-module-pod-simple += "${PN}-module-pod-simple-linksection" -RDEPENDS_${PN}-module-pod-simple += "${PN}-module-pod-simple-tiedoutfh" -RDEPENDS_${PN}-module-pod-simple += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple += "${PN}-module-symbol" -RDEPENDS_${PN}-module-pod-simple += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-progress += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-pullparserendtoken += "${PN}-module-pod-simple-pullparsertoken" -RDEPENDS_${PN}-module-pod-simple-pullparserendtoken += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-pullparserendtoken += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-pullparser += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple-pullparser += "${PN}-module-pod-simple" -RDEPENDS_${PN}-module-pod-simple-pullparser += "${PN}-module-pod-simple-pullparserendtoken" -RDEPENDS_${PN}-module-pod-simple-pullparser += "${PN}-module-pod-simple-pullparserstarttoken" -RDEPENDS_${PN}-module-pod-simple-pullparser += "${PN}-module-pod-simple-pullparsertexttoken" -RDEPENDS_${PN}-module-pod-simple-pullparser += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-pullparserstarttoken += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple-pullparserstarttoken += "${PN}-module-pod-simple-pullparsertoken" -RDEPENDS_${PN}-module-pod-simple-pullparserstarttoken += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-pullparserstarttoken += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-pullparsertexttoken += "${PN}-module-pod-simple-pullparsertoken" -RDEPENDS_${PN}-module-pod-simple-pullparsertexttoken += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-pullparsertexttoken += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-pullparsertoken += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-rtf += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple-rtf += "${PN}-module-integer" -RDEPENDS_${PN}-module-pod-simple-rtf += "${PN}-module-pod-simple-pullparser" -RDEPENDS_${PN}-module-pod-simple-rtf += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-rtf += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-search += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple-search += "${PN}-module-config" -RDEPENDS_${PN}-module-pod-simple-search += "${PN}-module-cwd" -RDEPENDS_${PN}-module-pod-simple-search += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-pod-simple-search += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-pod-simple-search += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-search += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-simpletree += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple-simpletree += "${PN}-module-pod-simple" -RDEPENDS_${PN}-module-pod-simple-simpletree += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-simpletree += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-textcontent += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple-textcontent += "${PN}-module-pod-simple" -RDEPENDS_${PN}-module-pod-simple-textcontent += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-textcontent += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-text += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple-text += "${PN}-module-pod-simple" -RDEPENDS_${PN}-module-pod-simple-text += "${PN}-module-pod-simple-methody" -RDEPENDS_${PN}-module-pod-simple-text += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-text += "${PN}-module-text-wrap" -RDEPENDS_${PN}-module-pod-simple-text += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-tiedoutfh += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple-tiedoutfh += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-tiedoutfh += "${PN}-module-symbol" -RDEPENDS_${PN}-module-pod-simple-tiedoutfh += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-transcodedumb += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-transcodedumb += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-transcode += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-transcodesmart += "${PN}-module-encode" -RDEPENDS_${PN}-module-pod-simple-transcodesmart += "${PN}-module-pod-simple" -RDEPENDS_${PN}-module-pod-simple-transcodesmart += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-transcodesmart += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-simple-xmloutstream += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-simple-xmloutstream += "${PN}-module-pod-simple" -RDEPENDS_${PN}-module-pod-simple-xmloutstream += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-simple-xmloutstream += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-text-color += "${PN}-module-pod-text" -RDEPENDS_${PN}-module-pod-text-color += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-text-color += "${PN}-module-term-ansicolor" -RDEPENDS_${PN}-module-pod-text-color += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-text-overstrike += "${PN}-module-pod-text" -RDEPENDS_${PN}-module-pod-text-overstrike += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-text-overstrike += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-text += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-text += "${PN}-module-encode" -RDEPENDS_${PN}-module-pod-text += "${PN}-module-exporter" -RDEPENDS_${PN}-module-pod-text += "${PN}-module-pod-simple" -RDEPENDS_${PN}-module-pod-text += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-text += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-text-termcap += "${PN}-module-pod-text" -RDEPENDS_${PN}-module-pod-text-termcap += "${PN}-module-posix" -RDEPENDS_${PN}-module-pod-text-termcap += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-text-termcap += "${PN}-module-term-cap" -RDEPENDS_${PN}-module-pod-text-termcap += "${PN}-module-vars" -RDEPENDS_${PN}-module-pod-usage += "${PN}-module-carp" -RDEPENDS_${PN}-module-pod-usage += "${PN}-module-config" -RDEPENDS_${PN}-module-pod-usage += "${PN}-module-exporter" -RDEPENDS_${PN}-module-pod-usage += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-pod-usage += "${PN}-module-strict" -RDEPENDS_${PN}-module-pod-usage += "${PN}-module-vars" -RDEPENDS_${PN}-module-posix += "${PN}-module-exporter" -RDEPENDS_${PN}-module-posix += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-posix += "${PN}-module-strict" -RDEPENDS_${PN}-module-posix += "${PN}-module-tie-hash" -RDEPENDS_${PN}-module-posix += "${PN}-module-warnings" -RDEPENDS_${PN}-module-posix += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-re += "${PN}-module-carp" -RDEPENDS_${PN}-module-re += "${PN}-module-exporter" -RDEPENDS_${PN}-module-re += "${PN}-module-strict" -RDEPENDS_${PN}-module-re += "${PN}-module-term-cap" -RDEPENDS_${PN}-module-re += "${PN}-module-warnings" -RDEPENDS_${PN}-module-re += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-safe += "${PN}-module-b" -RDEPENDS_${PN}-module-safe += "${PN}-module-carp" -RDEPENDS_${PN}-module-safe += "${PN}-module-carp-heavy" -RDEPENDS_${PN}-module-safe += "${PN}-module-opcode" -RDEPENDS_${PN}-module-safe += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-safe += "${PN}-module-strict" -RDEPENDS_${PN}-module-safe += "${PN}-module-utf8" -RDEPENDS_${PN}-module-scalar-util += "${PN}-module-carp" -RDEPENDS_${PN}-module-scalar-util += "${PN}-module-exporter" -RDEPENDS_${PN}-module-scalar-util += "${PN}-module-list-util" -RDEPENDS_${PN}-module-scalar-util += "${PN}-module-strict" -RDEPENDS_${PN}-module-sdbm-file += "${PN}-module-exporter" -RDEPENDS_${PN}-module-sdbm-file += "${PN}-module-strict" -RDEPENDS_${PN}-module-sdbm-file += "${PN}-module-tie-hash" -RDEPENDS_${PN}-module-sdbm-file += "${PN}-module-warnings" -RDEPENDS_${PN}-module-sdbm-file += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-search-dict += "${PN}-module-exporter" -RDEPENDS_${PN}-module-search-dict += "${PN}-module-feature" -RDEPENDS_${PN}-module-search-dict += "${PN}-module-strict" -RDEPENDS_${PN}-module-selectsaver += "${PN}-module-carp" -RDEPENDS_${PN}-module-selectsaver += "${PN}-module-symbol" -RDEPENDS_${PN}-module-selfloader += "${PN}-module-exporter" -RDEPENDS_${PN}-module-selfloader += "${PN}-module-io-handle" -RDEPENDS_${PN}-module-selfloader += "${PN}-module-strict" -RDEPENDS_${PN}-module-selfloader += "${PN}-module-vars" -RDEPENDS_${PN}-module-sigtrap += "${PN}-module-carp" -RDEPENDS_${PN}-module-sigtrap += "${PN}-module-symbol" -RDEPENDS_${PN}-module-socket += "${PN}-module-carp" -RDEPENDS_${PN}-module-socket += "${PN}-module-exporter" -RDEPENDS_${PN}-module-socket += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-socket += "${PN}-module-strict" -RDEPENDS_${PN}-module-socket += "${PN}-module-warnings-register" -RDEPENDS_${PN}-module-socket += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-sort += "${PN}-module-carp" -RDEPENDS_${PN}-module-sort += "${PN}-module-strict" -RDEPENDS_${PN}-module-storable += "${PN}-module-carp" -RDEPENDS_${PN}-module-storable += "${PN}-module-config" -RDEPENDS_${PN}-module-storable += "${PN}-module-exporter" -RDEPENDS_${PN}-module-storable += "${PN}-module-io-file" -RDEPENDS_${PN}-module-storable += "${PN}-module-vars" -RDEPENDS_${PN}-module-storable += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-sub-util += "${PN}-module-exporter" -RDEPENDS_${PN}-module-sub-util += "${PN}-module-list-util" -RDEPENDS_${PN}-module-sub-util += "${PN}-module-strict" -RDEPENDS_${PN}-module-sub-util += "${PN}-module-warnings" -RDEPENDS_${PN}-module-symbol += "${PN}-module-exporter" -RDEPENDS_${PN}-module-sys-hostname += "${PN}-module-carp" -RDEPENDS_${PN}-module-sys-hostname += "${PN}-module-exporter" -RDEPENDS_${PN}-module-sys-hostname += "${PN}-module-posix" -RDEPENDS_${PN}-module-sys-hostname += "${PN}-module-strict" -RDEPENDS_${PN}-module-sys-hostname += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-carp" -RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-dynaloader" -RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-exporter" -RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-posix" -RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-socket" -RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-strict" -RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-sys-hostname" -RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-vars" -RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-warnings" -RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-warnings-register" -RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-tap-base += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-base += "${PN}-module-constant" -RDEPENDS_${PN}-module-tap-base += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-base += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-formatter-base += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-formatter-base += "${PN}-module-posix" -RDEPENDS_${PN}-module-tap-formatter-base += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-formatter-base += "${PN}-module-tap-formatter-color" -RDEPENDS_${PN}-module-tap-formatter-base += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-formatter-color += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-formatter-color += "${PN}-module-constant" -RDEPENDS_${PN}-module-tap-formatter-color += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-formatter-color += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-formatter-console-parallelsession += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-formatter-console-parallelsession += "${PN}-module-carp" -RDEPENDS_${PN}-module-tap-formatter-console-parallelsession += "${PN}-module-constant" -RDEPENDS_${PN}-module-tap-formatter-console-parallelsession += "${PN}-module-file-path" -RDEPENDS_${PN}-module-tap-formatter-console-parallelsession += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-tap-formatter-console-parallelsession += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-formatter-console-parallelsession += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-formatter-console += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-formatter-console += "${PN}-module-posix" -RDEPENDS_${PN}-module-tap-formatter-console += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-formatter-console += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-formatter-console-session += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-formatter-console-session += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-formatter-console-session += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-formatter-file += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-formatter-file += "${PN}-module-posix" -RDEPENDS_${PN}-module-tap-formatter-file += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-formatter-file += "${PN}-module-tap-formatter-file-session" -RDEPENDS_${PN}-module-tap-formatter-file += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-formatter-file-session += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-formatter-file-session += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-formatter-file-session += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-formatter-session += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-formatter-session += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-formatter-session += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-harness-env += "${PN}-module-constant" -RDEPENDS_${PN}-module-tap-harness-env += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-harness-env += "${PN}-module-tap-object" -RDEPENDS_${PN}-module-tap-harness-env += "${PN}-module-text-parsewords" -RDEPENDS_${PN}-module-tap-harness-env += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-harness += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-harness += "${PN}-module-carp" -RDEPENDS_${PN}-module-tap-harness += "${PN}-module-file-path" -RDEPENDS_${PN}-module-tap-harness += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-tap-harness += "${PN}-module-io-handle" -RDEPENDS_${PN}-module-tap-harness += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-harness += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-object += "${PN}-module-carp" -RDEPENDS_${PN}-module-tap-object += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-object += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-aggregator += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-aggregator += "${PN}-module-benchmark" -RDEPENDS_${PN}-module-tap-parser-aggregator += "${PN}-module-carp" -RDEPENDS_${PN}-module-tap-parser-aggregator += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-aggregator += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-grammar += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-grammar += "${PN}-module-carp" -RDEPENDS_${PN}-module-tap-parser-grammar += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-grammar += "${PN}-module-tap-parser-resultfactory" -RDEPENDS_${PN}-module-tap-parser-grammar += "${PN}-module-tap-parser-yamlish-reader" -RDEPENDS_${PN}-module-tap-parser-grammar += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-iterator-array += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-iterator-array += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-iterator-array += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-iteratorfactory += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-iteratorfactory += "${PN}-module-carp" -RDEPENDS_${PN}-module-tap-parser-iteratorfactory += "${PN}-module-constant" -RDEPENDS_${PN}-module-tap-parser-iteratorfactory += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-tap-parser-iteratorfactory += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-iteratorfactory += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-iterator += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-iterator += "${PN}-module-carp" -RDEPENDS_${PN}-module-tap-parser-iterator += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-iterator += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-iterator-process += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-iterator-process += "${PN}-module-config" -RDEPENDS_${PN}-module-tap-parser-iterator-process += "${PN}-module-io-handle" -RDEPENDS_${PN}-module-tap-parser-iterator-process += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-iterator-process += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-iterator-stream += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-iterator-stream += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-iterator-stream += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-multiplexer += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-multiplexer += "${PN}-module-constant" -RDEPENDS_${PN}-module-tap-parser-multiplexer += "${PN}-module-io-select" -RDEPENDS_${PN}-module-tap-parser-multiplexer += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-multiplexer += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-carp" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-grammar" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-iterator" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-iteratorfactory" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-result" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-resultfactory" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-source" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-sourcehandler-executable" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-sourcehandler-file" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-sourcehandler-handle" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-sourcehandler-perl" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-sourcehandler-rawtap" -RDEPENDS_${PN}-module-tap-parser += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-result-bailout += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-result-bailout += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-result-bailout += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-result-comment += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-result-comment += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-result-comment += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-carp" -RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-bailout" -RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-comment" -RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-plan" -RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-pragma" -RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-test" -RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-unknown" -RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-version" -RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-yaml" -RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-result += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-result += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-result += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-result-plan += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-result-plan += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-result-plan += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-result-pragma += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-result-pragma += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-result-pragma += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-result-test += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-result-test += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-result-test += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-result-unknown += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-result-unknown += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-result-unknown += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-result-version += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-result-version += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-result-version += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-result-yaml += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-result-yaml += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-result-yaml += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-scheduler-job += "${PN}-module-carp" -RDEPENDS_${PN}-module-tap-parser-scheduler-job += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-scheduler-job += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-scheduler += "${PN}-module-carp" -RDEPENDS_${PN}-module-tap-parser-scheduler += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-scheduler += "${PN}-module-tap-parser-scheduler-job" -RDEPENDS_${PN}-module-tap-parser-scheduler += "${PN}-module-tap-parser-scheduler-spinner" -RDEPENDS_${PN}-module-tap-parser-scheduler += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-scheduler-spinner += "${PN}-module-carp" -RDEPENDS_${PN}-module-tap-parser-scheduler-spinner += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-scheduler-spinner += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-executable += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-executable += "${PN}-module-constant" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-executable += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-executable += "${PN}-module-tap-parser-iteratorfactory" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-executable += "${PN}-module-tap-parser-iterator-process" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-executable += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-file += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-file += "${PN}-module-constant" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-file += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-file += "${PN}-module-tap-parser-iteratorfactory" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-file += "${PN}-module-tap-parser-iterator-stream" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-file += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-handle += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-handle += "${PN}-module-constant" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-handle += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-handle += "${PN}-module-tap-parser-iteratorfactory" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-handle += "${PN}-module-tap-parser-iterator-stream" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-handle += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-sourcehandler += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-sourcehandler += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-sourcehandler += "${PN}-module-tap-parser-iterator" -RDEPENDS_${PN}-module-tap-parser-sourcehandler += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-config" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-constant" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-tap-parser-iteratorfactory" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-tap-parser-iterator-process" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-text-parsewords" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-rawtap += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-rawtap += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-rawtap += "${PN}-module-tap-parser-iterator-array" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-rawtap += "${PN}-module-tap-parser-iteratorfactory" -RDEPENDS_${PN}-module-tap-parser-sourcehandler-rawtap += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-source += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-source += "${PN}-module-constant" -RDEPENDS_${PN}-module-tap-parser-source += "${PN}-module-file-basename" -RDEPENDS_${PN}-module-tap-parser-source += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-source += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-yamlish-reader += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-yamlish-reader += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-yamlish-reader += "${PN}-module-warnings" -RDEPENDS_${PN}-module-tap-parser-yamlish-writer += "${PN}-module-base" -RDEPENDS_${PN}-module-tap-parser-yamlish-writer += "${PN}-module-strict" -RDEPENDS_${PN}-module-tap-parser-yamlish-writer += "${PN}-module-warnings" -RDEPENDS_${PN}-module-term-ansicolor += "${PN}-module-carp" -RDEPENDS_${PN}-module-term-ansicolor += "${PN}-module-exporter" -RDEPENDS_${PN}-module-term-ansicolor += "${PN}-module-strict" -RDEPENDS_${PN}-module-term-ansicolor += "${PN}-module-warnings" -RDEPENDS_${PN}-module-term-cap += "${PN}-module-carp" -RDEPENDS_${PN}-module-term-cap += "${PN}-module-strict" -RDEPENDS_${PN}-module-term-cap += "${PN}-module-vars" -RDEPENDS_${PN}-module-term-complete += "${PN}-module-exporter" -RDEPENDS_${PN}-module-term-complete += "${PN}-module-strict" -RDEPENDS_${PN}-module-term-readline += "${PN}-module-strict" -RDEPENDS_${PN}-module-term-readline += "${PN}-module-term-cap" -RDEPENDS_${PN}-module-test-builder-module += "${PN}-module-exporter" -RDEPENDS_${PN}-module-test-builder-module += "${PN}-module-strict" -RDEPENDS_${PN}-module-test-builder-module += "${PN}-module-test-builder" -RDEPENDS_${PN}-module-test-builder += "${PN}-module-config" -RDEPENDS_${PN}-module-test-builder += "${PN}-module-perlio" -RDEPENDS_${PN}-module-test-builder += "${PN}-module-strict" -RDEPENDS_${PN}-module-test-builder += "${PN}-module-threads-shared" -RDEPENDS_${PN}-module-test-builder += "${PN}-module-warnings" -RDEPENDS_${PN}-module-test-builder-tester-color += "${PN}-module-strict" -RDEPENDS_${PN}-module-test-builder-tester-color += "${PN}-module-test-builder-tester" -RDEPENDS_${PN}-module-test-builder-tester += "${PN}-module-carp" -RDEPENDS_${PN}-module-test-builder-tester += "${PN}-module-exporter" -RDEPENDS_${PN}-module-test-builder-tester += "${PN}-module-strict" -RDEPENDS_${PN}-module-test-builder-tester += "${PN}-module-symbol" -RDEPENDS_${PN}-module-test-builder-tester += "${PN}-module-test-builder" -RDEPENDS_${PN}-module-test-harness += "${PN}-module-base" -RDEPENDS_${PN}-module-test-harness += "${PN}-module-config" -RDEPENDS_${PN}-module-test-harness += "${PN}-module-constant" -RDEPENDS_${PN}-module-test-harness += "${PN}-module-strict" -RDEPENDS_${PN}-module-test-harness += "${PN}-module-tap-harness" -RDEPENDS_${PN}-module-test-harness += "${PN}-module-tap-parser-aggregator" -RDEPENDS_${PN}-module-test-harness += "${PN}-module-tap-parser-source" -RDEPENDS_${PN}-module-test-harness += "${PN}-module-tap-parser-sourcehandler-perl" -RDEPENDS_${PN}-module-test-harness += "${PN}-module-text-parsewords" -RDEPENDS_${PN}-module-test-harness += "${PN}-module-warnings" -RDEPENDS_${PN}-module-test-more += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-test-more += "${PN}-module-strict" -RDEPENDS_${PN}-module-test-more += "${PN}-module-test-builder-module" -RDEPENDS_${PN}-module-test-more += "${PN}-module-warnings" -RDEPENDS_${PN}-module-test += "${PN}-module-carp" -RDEPENDS_${PN}-module-test += "${PN}-module-exporter" -RDEPENDS_${PN}-module-test += "${PN}-module-file-temp" -RDEPENDS_${PN}-module-test += "${PN}-module-strict" -RDEPENDS_${PN}-module-test += "${PN}-module-vars" -RDEPENDS_${PN}-module-test-simple += "${PN}-module-strict" -RDEPENDS_${PN}-module-test-simple += "${PN}-module-test-builder-module" -RDEPENDS_${PN}-module-test-tester-capture += "${PN}-module-config" -RDEPENDS_${PN}-module-test-tester-capture += "${PN}-module-strict" -RDEPENDS_${PN}-module-test-tester-capture += "${PN}-module-test-builder" -RDEPENDS_${PN}-module-test-tester-capture += "${PN}-module-threads-shared" -RDEPENDS_${PN}-module-test-tester-capture += "${PN}-module-vars" -RDEPENDS_${PN}-module-test-tester-capturerunner += "${PN}-module-exporter" -RDEPENDS_${PN}-module-test-tester-capturerunner += "${PN}-module-strict" -RDEPENDS_${PN}-module-test-tester-capturerunner += "${PN}-module-test-tester-capture" -RDEPENDS_${PN}-module-test-tester-delegate += "${PN}-module-strict" -RDEPENDS_${PN}-module-test-tester-delegate += "${PN}-module-vars" -RDEPENDS_${PN}-module-test-tester-delegate += "${PN}-module-warnings" -RDEPENDS_${PN}-module-test-tester += "${PN}-module-exporter" -RDEPENDS_${PN}-module-test-tester += "${PN}-module-strict" -RDEPENDS_${PN}-module-test-tester += "${PN}-module-test-builder" -RDEPENDS_${PN}-module-test-tester += "${PN}-module-test-tester-capturerunner" -RDEPENDS_${PN}-module-test-tester += "${PN}-module-test-tester-delegate" -RDEPENDS_${PN}-module-test-tester += "${PN}-module-vars" -RDEPENDS_${PN}-module-text-abbrev += "${PN}-module-exporter" -RDEPENDS_${PN}-module-text-balanced += "${PN}-module-carp" -RDEPENDS_${PN}-module-text-balanced += "${PN}-module-exporter" -RDEPENDS_${PN}-module-text-balanced += "${PN}-module-overload" -RDEPENDS_${PN}-module-text-balanced += "${PN}-module-selfloader" -RDEPENDS_${PN}-module-text-balanced += "${PN}-module-strict" -RDEPENDS_${PN}-module-text-balanced += "${PN}-module-vars" -RDEPENDS_${PN}-module-text-parsewords += "${PN}-module-carp" -RDEPENDS_${PN}-module-text-parsewords += "${PN}-module-exporter" -RDEPENDS_${PN}-module-text-parsewords += "${PN}-module-strict" -RDEPENDS_${PN}-module-text-tabs += "${PN}-module-exporter" -RDEPENDS_${PN}-module-text-tabs += "${PN}-module-strict" -RDEPENDS_${PN}-module-text-tabs += "${PN}-module-vars" -RDEPENDS_${PN}-module-text-wrap += "${PN}-module-exporter" -RDEPENDS_${PN}-module-text-wrap += "${PN}-module-re" -RDEPENDS_${PN}-module-text-wrap += "${PN}-module-strict" -RDEPENDS_${PN}-module-text-wrap += "${PN}-module-text-tabs" -RDEPENDS_${PN}-module-text-wrap += "${PN}-module-vars" -RDEPENDS_${PN}-module-text-wrap += "${PN}-module-warnings-register" -RDEPENDS_${PN}-module-thread += "${PN}-module-config" -RDEPENDS_${PN}-module-thread += "${PN}-module-exporter" -RDEPENDS_${PN}-module-thread += "${PN}-module-strict" -RDEPENDS_${PN}-module-thread += "${PN}-module-threads" -RDEPENDS_${PN}-module-thread += "${PN}-module-threads-shared" -RDEPENDS_${PN}-module-thread += "${PN}-module-warnings" -RDEPENDS_${PN}-module-thread-queue += "${PN}-module-carp" -RDEPENDS_${PN}-module-thread-queue += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-thread-queue += "${PN}-module-strict" -RDEPENDS_${PN}-module-thread-queue += "${PN}-module-threads-shared" -RDEPENDS_${PN}-module-thread-queue += "${PN}-module-warnings" -RDEPENDS_${PN}-module-thread-semaphore += "${PN}-module-carp" -RDEPENDS_${PN}-module-thread-semaphore += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-thread-semaphore += "${PN}-module-strict" -RDEPENDS_${PN}-module-thread-semaphore += "${PN}-module-threads-shared" -RDEPENDS_${PN}-module-thread-semaphore += "${PN}-module-warnings" -RDEPENDS_${PN}-module-threads += "${PN}-module-carp" -RDEPENDS_${PN}-module-threads += "${PN}-module-config" -RDEPENDS_${PN}-module-threads += "${PN}-module-overload" -RDEPENDS_${PN}-module-threads += "${PN}-module-strict" -RDEPENDS_${PN}-module-threads += "${PN}-module-warnings" -RDEPENDS_${PN}-module-threads += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-threads-shared += "${PN}-module-carp" -RDEPENDS_${PN}-module-threads-shared += "${PN}-module-scalar-util" -RDEPENDS_${PN}-module-threads-shared += "${PN}-module-strict" -RDEPENDS_${PN}-module-threads-shared += "${PN}-module-warnings" -RDEPENDS_${PN}-module-threads-shared += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-tie-array += "${PN}-module-carp" -RDEPENDS_${PN}-module-tie-array += "${PN}-module-strict" -RDEPENDS_${PN}-module-tie-array += "${PN}-module-vars" -RDEPENDS_${PN}-module-tie-file += "${PN}-module-carp" -RDEPENDS_${PN}-module-tie-file += "${PN}-module-fcntl" -RDEPENDS_${PN}-module-tie-file += "${PN}-module-posix" -RDEPENDS_${PN}-module-tie-file += "${PN}-module-strict" -RDEPENDS_${PN}-module-tie-file += "${PN}-module-symbol" -RDEPENDS_${PN}-module-tie-handle += "${PN}-module-carp" -RDEPENDS_${PN}-module-tie-handle += "${PN}-module-tie-stdhandle" -RDEPENDS_${PN}-module-tie-handle += "${PN}-module-warnings-register" -RDEPENDS_${PN}-module-tie-hash-namedcapture += "${PN}-module-strict" -RDEPENDS_${PN}-module-tie-hash-namedcapture += "${PN}-module-xsloader" -RDEPENDS_${PN}-module-tie-hash += "${PN}-module-carp" -RDEPENDS_${PN}-module-tie-hash += "${PN}-module-warnings-register" -RDEPENDS_${PN}-module-tie-memoize += "${PN}-module-strict" -RDEPENDS_${PN}-module-tie-memoize += "${PN}-module-tie-hash" -RDEPENDS_${PN}-module-tie-refhash += "${PN}-module-carp" -RDEPENDS_${PN}-module-tie-refhash += "${PN}-module-config" -RDEPENDS_${PN}-module-tie-refhash += "${PN}-module-overload" -RDEPENDS_${PN}-module-tie-refhash += "${PN}-module-strict" -RDEPENDS_${PN}-module-tie-refhash += "${PN}-module-tie-hash" -RDEPENDS_${PN}-module-tie-refhash += "${PN}-module-vars" -RDEPENDS_${PN}-module-tie-scalar += "${PN}-module-carp" -RDEPENDS_${PN}-module-tie-scalar += "${PN}-module-warnings-register" -RDEPENDS_${PN}-module-tie-stdhandle += "${PN}-module-strict" -RDEPENDS_${PN}-module-tie-stdhandle += "${PN}-module-tie-handle" -RDEPENDS_${PN}-module-tie-stdhandle += "${PN}-module-vars" -RDEPENDS_${PN}-module-tie-substrhash += "${PN}-module-carp" -RDEPENDS_${PN}-module-tie-substrhash += "${PN}-module-integer" -RDEPENDS_${PN}-module-time-gmtime += "${PN}-module-exporter" -RDEPENDS_${PN}-module-time-gmtime += "${PN}-module-strict" -RDEPENDS_${PN}-module-time-gmtime += "${PN}-module-time-tm" -RDEPENDS_${PN}-module-time-gmtime += "${PN}-module-vars" -RDEPENDS_${PN}-module-time-hires += "${PN}-module-carp" -RDEPENDS_${PN}-module-time-hires += "${PN}-module-dynaloader" -RDEPENDS_${PN}-module-time-hires += "${PN}-module-exporter" -RDEPENDS_${PN}-module-time-hires += "${PN}-module-strict" -RDEPENDS_${PN}-module-time-local += "${PN}-module-carp" -RDEPENDS_${PN}-module-time-local += "${PN}-module-config" -RDEPENDS_${PN}-module-time-local += "${PN}-module-constant" -RDEPENDS_${PN}-module-time-local += "${PN}-module-exporter" -RDEPENDS_${PN}-module-time-local += "${PN}-module-strict" -RDEPENDS_${PN}-module-time-local += "${PN}-module-vars" -RDEPENDS_${PN}-module-time-localtime += "${PN}-module-exporter" -RDEPENDS_${PN}-module-time-localtime += "${PN}-module-strict" -RDEPENDS_${PN}-module-time-localtime += "${PN}-module-time-tm" -RDEPENDS_${PN}-module-time-localtime += "${PN}-module-vars" -RDEPENDS_${PN}-module-time-piece += "${PN}-module-carp" -RDEPENDS_${PN}-module-time-piece += "${PN}-module-constant" -RDEPENDS_${PN}-module-time-piece += "${PN}-module-dynaloader" -RDEPENDS_${PN}-module-time-piece += "${PN}-module-exporter" -RDEPENDS_${PN}-module-time-piece += "${PN}-module-integer" -RDEPENDS_${PN}-module-time-piece += "${PN}-module-overload" -RDEPENDS_${PN}-module-time-piece += "${PN}-module-strict" -RDEPENDS_${PN}-module-time-piece += "${PN}-module-time-local" -RDEPENDS_${PN}-module-time-piece += "${PN}-module-time-seconds" -RDEPENDS_${PN}-module-time-seconds += "${PN}-module-constant" -RDEPENDS_${PN}-module-time-seconds += "${PN}-module-exporter" -RDEPENDS_${PN}-module-time-seconds += "${PN}-module-overload " -RDEPENDS_${PN}-module-time-seconds += "${PN}-module-strict" -RDEPENDS_${PN}-module-time-seconds += "${PN}-module-vars" -RDEPENDS_${PN}-module-time-tm += "${PN}-module-class-struct" -RDEPENDS_${PN}-module-time-tm += "${PN}-module-strict" -RDEPENDS_${PN}-module-unicode-collate-cjk-big5 += "${PN}-module-strict" -RDEPENDS_${PN}-module-unicode-collate-cjk-big5 += "${PN}-module-warnings" -RDEPENDS_${PN}-module-unicode-collate-cjk-gb2312 += "${PN}-module-strict" -RDEPENDS_${PN}-module-unicode-collate-cjk-gb2312 += "${PN}-module-warnings" -RDEPENDS_${PN}-module-unicode-collate-cjk-jisx0208 += "${PN}-module-strict" -RDEPENDS_${PN}-module-unicode-collate-cjk-jisx0208 += "${PN}-module-warnings" -RDEPENDS_${PN}-module-unicode-collate-cjk-korean += "${PN}-module-strict" -RDEPENDS_${PN}-module-unicode-collate-cjk-korean += "${PN}-module-unicode-collate" -RDEPENDS_${PN}-module-unicode-collate-cjk-korean += "${PN}-module-warnings" -RDEPENDS_${PN}-module-unicode-collate-cjk-pinyin += "${PN}-module-strict" -RDEPENDS_${PN}-module-unicode-collate-cjk-pinyin += "${PN}-module-warnings" -RDEPENDS_${PN}-module-unicode-collate-cjk-stroke += "${PN}-module-strict" -RDEPENDS_${PN}-module-unicode-collate-cjk-stroke += "${PN}-module-warnings" -RDEPENDS_${PN}-module-unicode-collate-cjk-zhuyin += "${PN}-module-strict" -RDEPENDS_${PN}-module-unicode-collate-cjk-zhuyin += "${PN}-module-warnings" -RDEPENDS_${PN}-module-unicode-collate-locale += "${PN}-module-base" -RDEPENDS_${PN}-module-unicode-collate-locale += "${PN}-module-carp" -RDEPENDS_${PN}-module-unicode-collate-locale += "${PN}-module-strict" -RDEPENDS_${PN}-module-unicode-collate-locale += "${PN}-module-warnings" -RDEPENDS_${PN}-module-unicode-collate += "${PN}-module-carp" -RDEPENDS_${PN}-module-unicode-collate += "${PN}-module-constant" -RDEPENDS_${PN}-module-unicode-collate += "${PN}-module-dynaloader" -RDEPENDS_${PN}-module-unicode-collate += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-unicode-collate += "${PN}-module-strict" -RDEPENDS_${PN}-module-unicode-collate += "${PN}-module-warnings" -RDEPENDS_${PN}-module-unicode-normalize += "${PN}-module-carp" -RDEPENDS_${PN}-module-unicode-normalize += "${PN}-module-constant" -RDEPENDS_${PN}-module-unicode-normalize += "${PN}-module-exporter" -RDEPENDS_${PN}-module-unicode-normalize += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-unicode-normalize += "${PN}-module-strict" -RDEPENDS_${PN}-module-unicode-normalize += "${PN}-module-warnings" -RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-carp" -RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-charnames" -RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-exporter" -RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-feature" -RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-file-spec" -RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-if" -RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-integer" -RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-strict" -RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-unicode-normalize" -RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-unicore" -RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-utf8-heavy" -RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-warnings" -RDEPENDS_${PN}-module-unicore += "${PN}-module-integer" -RDEPENDS_${PN}-module-universal += "${PN}-module-carp" -RDEPENDS_${PN}-module-user-grent += "${PN}-module-class-struct" -RDEPENDS_${PN}-module-user-grent += "${PN}-module-exporter" -RDEPENDS_${PN}-module-user-grent += "${PN}-module-strict" -RDEPENDS_${PN}-module-user-grent += "${PN}-module-vars" -RDEPENDS_${PN}-module-user-pwent += "${PN}-module-carp" -RDEPENDS_${PN}-module-user-pwent += "${PN}-module-class-struct" -RDEPENDS_${PN}-module-user-pwent += "${PN}-module-config" -RDEPENDS_${PN}-module-user-pwent += "${PN}-module-exporter" -RDEPENDS_${PN}-module-user-pwent += "${PN}-module-strict" -RDEPENDS_${PN}-module-user-pwent += "${PN}-module-vars" -RDEPENDS_${PN}-module-user-pwent += "${PN}-module-warnings" -RDEPENDS_${PN}-module-utf8 += "${PN}-module-carp" -RDEPENDS_${PN}-module-utf8 += "${PN}-module-utf8-heavy" -RDEPENDS_${PN}-module-version += "${PN}-module-strict" -RDEPENDS_${PN}-module-version += "${PN}-module-vars" -RDEPENDS_${PN}-module-version += "${PN}-module-version-regex" -RDEPENDS_${PN}-module-version-regex += "${PN}-module-strict" -RDEPENDS_${PN}-module-version-regex += "${PN}-module-vars" -RDEPENDS_${PN}-module-version-vpp += "${PN}-module-b" -RDEPENDS_${PN}-module-version-vpp += "${PN}-module-carp" -RDEPENDS_${PN}-module-version-vpp += "${PN}-module-config" -RDEPENDS_${PN}-module-version-vpp += "${PN}-module-constant" -RDEPENDS_${PN}-module-version-vpp += "${PN}-module-if" -RDEPENDS_${PN}-module-version-vpp += "${PN}-module-overload" -RDEPENDS_${PN}-module-version-vpp += "${PN}-module-posix" -RDEPENDS_${PN}-module-version-vpp += "${PN}-module-strict" -RDEPENDS_${PN}-module-version-vpp += "${PN}-module-universal" -RDEPENDS_${PN}-module-version-vpp += "${PN}-module-vars" -RDEPENDS_${PN}-module-version-vpp += "${PN}-module-version-regex" -RDEPENDS_${PN}-module-xsloader += "${PN}-module-carp" -RDEPENDS_${PN}-module-xsloader += "${PN}-module-dynaloader" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-rdepends_5.24.1.inc b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-rdepends_5.24.1.inc new file mode 100644 index 000000000..10e3c0429 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl-rdepends_5.24.1.inc @@ -0,0 +1,2575 @@ +# To create/update the perl-rdepends_${PV}.inc use this piece of ugly script (modified for your arch/paths etc): + +#jiahongxu:5.20.0-r1$ pwd +#/home/jiahongxu/yocto/build-20140618-perl/tmp/work/i586-poky-linux/perl/5.20.0-r1 + +#1 cp -r packages-split packages-split.new && cd packages-split.new +#2 find . -name \*.pm | xargs sed -i '/^=head/,/^=cut/d' +#3 egrep -r "^\s*(\ */+= \"perl-module-/g;s/CPANPLUS::.*/cpanplus/g;s/CPAN::.*/cpan/g;s/::/-/g;s/ [^+\"].*//g;s/_/-/g;s/\.pl\"$/\"/;s/\"\?\$/\"/;s/(//;" | tr [:upper:] [:lower:] +#| awk '{if ($3 != "\x22"$1"\x22"){ print $0}}' +#| grep -v -e "\-vms\-" -e module-5 -e "^$" -e "\\$" -e your -e tk -e autoperl -e html -e http -e parse-cpan -e perl-ostype -e ndbm-file -e module-mac -e fcgi -e lwp -e dbd -e dbix +#| sort -u +#| sed 's/^/RDEPENDS_/;s/perl-module-/${PN}-module-/g;s/module-\(module-\)/\1/g;s/\(module-load\)-conditional/\1/g;s/encode-configlocal/&-pm/;' +#| egrep -wv '=>|module-a|module-apache.?|module-apr|module-authen-sasl|module-b-asmdata|module-convert-ebcdic|module-devel-size|module-digest-perl-md5|module-dumpvalue|module-extutils-constant-aaargh56hash|module-extutils-xssymset|module-file-bsdglob|module-for|module-it|module-io-string|module-ipc-system-simple|module-lexical|module-local-lib|metadata|module-modperl-util|module-pluggable-object|module-test-builder-io-scalar|module-text-unidecode|module-win32|objects\sload|syscall.ph|systeminfo.ph|%s' > /tmp/perl-rdepends + +RDEPENDS_perl-misc += "perl perl-modules" +RDEPENDS_${PN}-pod += "perl" + +# Some additional dependencies that the above doesn't manage to figure out +RDEPENDS_${PN}-module-file-spec += "${PN}-module-file-spec-unix" +RDEPENDS_${PN}-module-math-bigint += "${PN}-module-math-bigint-calc" +RDEPENDS_${PN}-module-thread-queue += "${PN}-module-attributes" +RDEPENDS_${PN}-module-overload += "${PN}-module-overloading" + +# Depends list +# copy contents of /tmp/perl-rdepends in this file +RDEPENDS_${PN}-module-anydbm-file += "${PN}-module-strict" +RDEPENDS_${PN}-module-anydbm-file += "${PN}-module-warnings" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-carp" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-config" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-constant" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-cpan" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-cwd" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-data-dumper" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-file-find" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-file-spec-functions" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-getopt-std" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-if" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-net-ping" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-strict" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-user-pwent" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-vars" +RDEPENDS_${PN}-module-app-cpan += "${PN}-module-warnings" +RDEPENDS_${PN}-module-app-prove += "${PN}-module-app-prove-state" +RDEPENDS_${PN}-module-app-prove += "${PN}-module-base" +RDEPENDS_${PN}-module-app-prove += "${PN}-module-carp" +RDEPENDS_${PN}-module-app-prove += "${PN}-module-constant" +RDEPENDS_${PN}-module-app-prove += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-app-prove += "${PN}-module-getopt-long" +RDEPENDS_${PN}-module-app-prove += "${PN}-module-strict" +RDEPENDS_${PN}-module-app-prove += "${PN}-module-tap-harness" +RDEPENDS_${PN}-module-app-prove += "${PN}-module-tap-harness-env" +RDEPENDS_${PN}-module-app-prove += "${PN}-module-text-parsewords" +RDEPENDS_${PN}-module-app-prove += "${PN}-module-warnings" +RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-app-prove-state-result" +RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-base" +RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-carp" +RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-constant" +RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-file-find" +RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-strict" +RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-tap-parser-yamlish-reader" +RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-tap-parser-yamlish-writer" +RDEPENDS_${PN}-module-app-prove-state += "${PN}-module-warnings" +RDEPENDS_${PN}-module-app-prove-state-result += "${PN}-module-app-prove-state-result-test" +RDEPENDS_${PN}-module-app-prove-state-result += "${PN}-module-carp" +RDEPENDS_${PN}-module-app-prove-state-result += "${PN}-module-constant" +RDEPENDS_${PN}-module-app-prove-state-result += "${PN}-module-strict" +RDEPENDS_${PN}-module-app-prove-state-result += "${PN}-module-warnings" +RDEPENDS_${PN}-module-app-prove-state-result-test += "${PN}-module-strict" +RDEPENDS_${PN}-module-app-prove-state-result-test += "${PN}-module-warnings" +RDEPENDS_${PN}-module-archive-tar-constant += "${PN}-module-constant" +RDEPENDS_${PN}-module-archive-tar-constant += "${PN}-module-exporter" +RDEPENDS_${PN}-module-archive-tar-constant += "${PN}-module-io-compress-bzip2" +RDEPENDS_${PN}-module-archive-tar-constant += "${PN}-module-time-local" +RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-archive-tar" +RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-archive-tar-constant" +RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-carp" +RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-file-spec-unix" +RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-io-file" +RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-strict" +RDEPENDS_${PN}-module-archive-tar-file += "${PN}-module-vars" +RDEPENDS_${PN}-module-archive-tar += "${PN}-module-archive-tar-constant" +RDEPENDS_${PN}-module-archive-tar += "${PN}-module-archive-tar-file" +RDEPENDS_${PN}-module-archive-tar += "${PN}-module-carp" +RDEPENDS_${PN}-module-archive-tar += "${PN}-module-config" +RDEPENDS_${PN}-module-archive-tar += "${PN}-module-cwd" +RDEPENDS_${PN}-module-archive-tar += "${PN}-module-exporter" +RDEPENDS_${PN}-module-archive-tar += "${PN}-module-file-path" +RDEPENDS_${PN}-module-archive-tar += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-archive-tar += "${PN}-module-file-spec-unix" +RDEPENDS_${PN}-module-archive-tar += "${PN}-module-io-file" +RDEPENDS_${PN}-module-archive-tar += "${PN}-module-io-zlib" +RDEPENDS_${PN}-module-archive-tar += "${PN}-module-strict" +RDEPENDS_${PN}-module-archive-tar += "${PN}-module-vars" +RDEPENDS_${PN}-module-arybase += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-attribute-handlers += "${PN}-module-carp" +RDEPENDS_${PN}-module-attribute-handlers += "${PN}-module-strict" +RDEPENDS_${PN}-module-attribute-handlers += "${PN}-module-vars" +RDEPENDS_${PN}-module-attribute-handlers += "${PN}-module-warnings" +RDEPENDS_${PN}-module-attributes += "${PN}-module-carp" +RDEPENDS_${PN}-module-attributes += "${PN}-module-exporter" +RDEPENDS_${PN}-module-attributes += "${PN}-module-strict" +RDEPENDS_${PN}-module-attributes += "${PN}-module-warnings" +RDEPENDS_${PN}-module-attributes += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-autodie-exception += "${PN}-module-carp" +RDEPENDS_${PN}-module-autodie-exception += "${PN}-module-constant" +RDEPENDS_${PN}-module-autodie-exception += "${PN}-module-fatal" +RDEPENDS_${PN}-module-autodie-exception += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-autodie-exception += "${PN}-module-overload" +RDEPENDS_${PN}-module-autodie-exception += "${PN}-module-strict" +RDEPENDS_${PN}-module-autodie-exception += "${PN}-module-warnings" +RDEPENDS_${PN}-module-autodie-exception-system += "${PN}-module-carp" +RDEPENDS_${PN}-module-autodie-exception-system += "${PN}-module-parent" +RDEPENDS_${PN}-module-autodie-exception-system += "${PN}-module-strict" +RDEPENDS_${PN}-module-autodie-exception-system += "${PN}-module-warnings" +RDEPENDS_${PN}-module-autodie-hints += "${PN}-module-b" +RDEPENDS_${PN}-module-autodie-hints += "${PN}-module-carp" +RDEPENDS_${PN}-module-autodie-hints += "${PN}-module-constant" +RDEPENDS_${PN}-module-autodie-hints += "${PN}-module-strict" +RDEPENDS_${PN}-module-autodie-hints += "${PN}-module-warnings" +RDEPENDS_${PN}-module-autodie += "${PN}-module-carp" +RDEPENDS_${PN}-module-autodie += "${PN}-module-constant" +RDEPENDS_${PN}-module-autodie += "${PN}-module-lib" +RDEPENDS_${PN}-module-autodie += "${PN}-module-parent" +RDEPENDS_${PN}-module-autodie += "${PN}-module-strict" +RDEPENDS_${PN}-module-autodie += "${PN}-module-warnings" +RDEPENDS_${PN}-module-autodie-scope-guard += "${PN}-module-strict" +RDEPENDS_${PN}-module-autodie-scope-guard += "${PN}-module-warnings" +RDEPENDS_${PN}-module-autodie-scope-guardstack += "${PN}-module-autodie-scope-guard" +RDEPENDS_${PN}-module-autodie-scope-guardstack += "${PN}-module-strict" +RDEPENDS_${PN}-module-autodie-scope-guardstack += "${PN}-module-warnings" +RDEPENDS_${PN}-module-autodie-skip += "${PN}-module-strict" +RDEPENDS_${PN}-module-autodie-skip += "${PN}-module-warnings" +RDEPENDS_${PN}-module-autodie-util += "${PN}-module-autodie-scope-guardstack" +RDEPENDS_${PN}-module-autodie-util += "${PN}-module-exporter" +RDEPENDS_${PN}-module-autodie-util += "${PN}-module-strict" +RDEPENDS_${PN}-module-autodie-util += "${PN}-module-warnings" +RDEPENDS_${PN}-module-autoloader += "${PN}-module-carp" +RDEPENDS_${PN}-module-autoloader += "${PN}-module-strict" +RDEPENDS_${PN}-module-autosplit += "${PN}-module-carp" +RDEPENDS_${PN}-module-autosplit += "${PN}-module-config" +RDEPENDS_${PN}-module-autosplit += "${PN}-module-exporter" +RDEPENDS_${PN}-module-autosplit += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-autosplit += "${PN}-module-file-path" +RDEPENDS_${PN}-module-autosplit += "${PN}-module-file-spec-functions" +RDEPENDS_${PN}-module-autosplit += "${PN}-module-strict" +RDEPENDS_${PN}-module-autouse += "${PN}-module-carp" +RDEPENDS_${PN}-module-base += "${PN}-module-carp" +RDEPENDS_${PN}-module-base += "${PN}-module-strict" +RDEPENDS_${PN}-module-base += "${PN}-module-vars" +RDEPENDS_${PN}-module-b-concise += "${PN}-module-b" +RDEPENDS_${PN}-module-b-concise += "${PN}-module-b-op-private" +RDEPENDS_${PN}-module-b-concise += "${PN}-module-config" +RDEPENDS_${PN}-module-b-concise += "${PN}-module-exporter" +RDEPENDS_${PN}-module-b-concise += "${PN}-module-strict" +RDEPENDS_${PN}-module-b-concise += "${PN}-module-warnings" +RDEPENDS_${PN}-module-b-debug += "${PN}-module-b" +RDEPENDS_${PN}-module-b-debug += "${PN}-module-config" +RDEPENDS_${PN}-module-b-debug += "${PN}-module-strict" +RDEPENDS_${PN}-module-b-deparse += "${PN}-module-b" +RDEPENDS_${PN}-module-b-deparse += "${PN}-module-carp" +RDEPENDS_${PN}-module-b-deparse += "${PN}-module-data-dumper" +RDEPENDS_${PN}-module-b-deparse += "${PN}-module-feature" +RDEPENDS_${PN}-module-b-deparse += "${PN}-module-overloading" +RDEPENDS_${PN}-module-b-deparse += "${PN}-module-re" +RDEPENDS_${PN}-module-b-deparse += "${PN}-module-strict" +RDEPENDS_${PN}-module-b-deparse += "${PN}-module-vars" +RDEPENDS_${PN}-module-b-deparse += "${PN}-module-warnings" +RDEPENDS_${PN}-module-benchmark += "${PN}-module-carp" +RDEPENDS_${PN}-module-benchmark += "${PN}-module-exporter" +RDEPENDS_${PN}-module-benchmark += "${PN}-module-strict" +RDEPENDS_${PN}-module-bigint += "${PN}-module-carp" +RDEPENDS_${PN}-module-bigint += "${PN}-module-constant" +RDEPENDS_${PN}-module-bigint += "${PN}-module-exporter" +RDEPENDS_${PN}-module-bigint += "${PN}-module-math-bigint" +RDEPENDS_${PN}-module-bigint += "${PN}-module-math-bigint-trace" +RDEPENDS_${PN}-module-bigint += "${PN}-module-overload" +RDEPENDS_${PN}-module-bigint += "${PN}-module-strict" +RDEPENDS_${PN}-module-bigint += "${PN}-module-warnings" +RDEPENDS_${PN}-module-bignum += "${PN}-module-bigint" +RDEPENDS_${PN}-module-bignum += "${PN}-module-carp" +RDEPENDS_${PN}-module-bignum += "${PN}-module-exporter" +RDEPENDS_${PN}-module-bignum += "${PN}-module-math-bigfloat" +RDEPENDS_${PN}-module-bignum += "${PN}-module-math-bigfloat-trace" +RDEPENDS_${PN}-module-bignum += "${PN}-module-math-bigint" +RDEPENDS_${PN}-module-bignum += "${PN}-module-math-bigint-trace" +RDEPENDS_${PN}-module-bignum += "${PN}-module-overload" +RDEPENDS_${PN}-module-bignum += "${PN}-module-strict" +RDEPENDS_${PN}-module-bignum += "${PN}-module-warnings" +RDEPENDS_${PN}-module-bigrat += "${PN}-module-bigint" +RDEPENDS_${PN}-module-bigrat += "${PN}-module-carp" +RDEPENDS_${PN}-module-bigrat += "${PN}-module-exporter" +RDEPENDS_${PN}-module-bigrat += "${PN}-module-math-bigfloat" +RDEPENDS_${PN}-module-bigrat += "${PN}-module-math-bigint" +RDEPENDS_${PN}-module-bigrat += "${PN}-module-math-bigint-trace" +RDEPENDS_${PN}-module-bigrat += "${PN}-module-math-bigrat" +RDEPENDS_${PN}-module-bigrat += "${PN}-module-overload" +RDEPENDS_${PN}-module-bigrat += "${PN}-module-strict" +RDEPENDS_${PN}-module-bigrat += "${PN}-module-warnings" +RDEPENDS_${PN}-module-blib += "${PN}-module-cwd" +RDEPENDS_${PN}-module-blib += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-blib += "${PN}-module-vars" +RDEPENDS_${PN}-module-b += "${PN}-module-exporter" +RDEPENDS_${PN}-module-b += "${PN}-module-strict" +RDEPENDS_${PN}-module-b += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-b-showlex += "${PN}-module-b" +RDEPENDS_${PN}-module-b-showlex += "${PN}-module-b-concise" +RDEPENDS_${PN}-module-b-showlex += "${PN}-module-b-terse" +RDEPENDS_${PN}-module-b-showlex += "${PN}-module-strict" +RDEPENDS_${PN}-module-b-terse += "${PN}-module-b" +RDEPENDS_${PN}-module-b-terse += "${PN}-module-b-concise" +RDEPENDS_${PN}-module-b-terse += "${PN}-module-carp" +RDEPENDS_${PN}-module-b-terse += "${PN}-module-strict" +RDEPENDS_${PN}-module-b-xref += "${PN}-module-b" +RDEPENDS_${PN}-module-b-xref += "${PN}-module-config" +RDEPENDS_${PN}-module-b-xref += "${PN}-module-strict" +RDEPENDS_${PN}-module-bytes += "${PN}-module-bytes-heavy" +RDEPENDS_${PN}-module-bytes += "${PN}-module-carp" +RDEPENDS_${PN}-module-carp-heavy += "${PN}-module-carp" +RDEPENDS_${PN}-module-carp += "${PN}-module-exporter" +RDEPENDS_${PN}-module-carp += "${PN}-module-strict" +RDEPENDS_${PN}-module-carp += "${PN}-module-warnings" +RDEPENDS_${PN}-module--charnames += "${PN}-module-bytes" +RDEPENDS_${PN}-module-charnames += "${PN}-module-bytes" +RDEPENDS_${PN}-module--charnames += "${PN}-module-carp" +RDEPENDS_${PN}-module-charnames += "${PN}-module--charnames" +RDEPENDS_${PN}-module--charnames += "${PN}-module-file-spec" +RDEPENDS_${PN}-module--charnames += "${PN}-module-re" +RDEPENDS_${PN}-module-charnames += "${PN}-module-re" +RDEPENDS_${PN}-module--charnames += "${PN}-module-strict" +RDEPENDS_${PN}-module-charnames += "${PN}-module-strict" +RDEPENDS_${PN}-module--charnames += "${PN}-module-warnings" +RDEPENDS_${PN}-module-charnames += "${PN}-module-warnings" +RDEPENDS_${PN}-module-class-struct += "${PN}-module-carp" +RDEPENDS_${PN}-module-class-struct += "${PN}-module-exporter" +RDEPENDS_${PN}-module-class-struct += "${PN}-module-strict" +RDEPENDS_${PN}-module-class-struct += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-bytes " +RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-carp " +RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-constant" +RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-dynaloader" +RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-exporter" +RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-strict " +RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-warnings " +RDEPENDS_${PN}-module-compress-raw-bzip2 += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-bytes " +RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-carp " +RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-constant" +RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-dynaloader" +RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-exporter" +RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-strict " +RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-warnings " +RDEPENDS_${PN}-module-compress-raw-zlib += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-bytes " +RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-carp " +RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-compress-raw-zlib" +RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-constant" +RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-exporter" +RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-io-compress-gzip" +RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-io-compress-gzip-constants" +RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-io-handle " +RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-io-uncompress-gunzip" +RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-strict " +RDEPENDS_${PN}-module-compress-zlib += "${PN}-module-warnings " +RDEPENDS_${PN}-module-config-extensions += "${PN}-module-config" +RDEPENDS_${PN}-module-config-extensions += "${PN}-module-exporter" +RDEPENDS_${PN}-module-config-extensions += "${PN}-module-strict" +RDEPENDS_${PN}-module-config-extensions += "${PN}-module-vars" +RDEPENDS_${PN}-module-config-perl-v += "${PN}-module-config" +RDEPENDS_${PN}-module-config-perl-v += "${PN}-module-exporter" +RDEPENDS_${PN}-module-config-perl-v += "${PN}-module-strict" +RDEPENDS_${PN}-module-config-perl-v += "${PN}-module-vars" +RDEPENDS_${PN}-module-config-perl-v += "${PN}-module-warnings" +RDEPENDS_${PN}-module-constant += "${PN}-module-carp" +RDEPENDS_${PN}-module-constant += "${PN}-module-strict" +RDEPENDS_${PN}-module-constant += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-corelist += "${PN}-module-list-util" +RDEPENDS_${PN}-module-corelist += "${PN}-module-corelist" +RDEPENDS_${PN}-module-corelist += "${PN}-module-strict" +RDEPENDS_${PN}-module-corelist += "${PN}-module-vars" +RDEPENDS_${PN}-module-corelist += "${PN}-module-version" +RDEPENDS_${PN}-module-corelist += "${PN}-module-warnings" +RDEPENDS_${PN}-module-cpan += "${PN}-module-b" +RDEPENDS_${PN}-module-cpan += "${PN}-module-carp" +RDEPENDS_${PN}-module-cpan += "${PN}-module-config" +RDEPENDS_${PN}-module-cpan += "${PN}-module-cwd" +RDEPENDS_${PN}-module-cpan += "${PN}-module-data-dumper" +RDEPENDS_${PN}-module-cpan += "${PN}-module-dirhandle" +RDEPENDS_${PN}-module-cpan += "${PN}-module-exporter" +RDEPENDS_${PN}-module-cpan += "${PN}-module-extutils-makemaker" +RDEPENDS_${PN}-module-cpan += "${PN}-module-extutils-manifest" +RDEPENDS_${PN}-module-cpan += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-cpan += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-cpan += "${PN}-module-file-copy" +RDEPENDS_${PN}-module-cpan += "${PN}-module-file-find" +RDEPENDS_${PN}-module-cpan += "${PN}-module-filehandle" +RDEPENDS_${PN}-module-cpan += "${PN}-module-file-path" +RDEPENDS_${PN}-module-cpan += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-cpan += "${PN}-module-lib" +RDEPENDS_${PN}-module-cpan += "${PN}-module-net-ping" +RDEPENDS_${PN}-module-cpan += "${PN}-module-overload" +RDEPENDS_${PN}-module-cpan += "${PN}-module-posix" +RDEPENDS_${PN}-module-cpan += "${PN}-module-safe" +RDEPENDS_${PN}-module-cpan += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-cpan += "${PN}-module-strict" +RDEPENDS_${PN}-module-cpan += "${PN}-module-sys-hostname" +RDEPENDS_${PN}-module-cpan += "${PN}-module-term-readline" +RDEPENDS_${PN}-module-cpan += "${PN}-module-text-parsewords" +RDEPENDS_${PN}-module-cpan += "${PN}-module-text-wrap" +RDEPENDS_${PN}-module-cpan += "${PN}-module-time-local" +RDEPENDS_${PN}-module-cpan += "${PN}-module-vars" +RDEPENDS_${PN}-module-cpan += "${PN}-module-warnings" +RDEPENDS_${PN}-module-cwd += "${PN}-module-dynaloader" +RDEPENDS_${PN}-module-cwd += "${PN}-module-exporter" +RDEPENDS_${PN}-module-cwd += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-cwd += "${PN}-module-strict" +RDEPENDS_${PN}-module-cwd += "${PN}-module-vars" +RDEPENDS_${PN}-module-cwd += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-data-dumper += "${PN}-module-b-deparse" +RDEPENDS_${PN}-module-data-dumper += "${PN}-module-carp" +RDEPENDS_${PN}-module-data-dumper += "${PN}-module-config" +RDEPENDS_${PN}-module-data-dumper += "${PN}-module-constant" +RDEPENDS_${PN}-module-data-dumper += "${PN}-module-exporter" +RDEPENDS_${PN}-module-data-dumper += "${PN}-module-overload" +RDEPENDS_${PN}-module-data-dumper += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-data-dumper += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-dbm-filter-compress += "${PN}-module-carp" +RDEPENDS_${PN}-module-dbm-filter-compress += "${PN}-module-strict" +RDEPENDS_${PN}-module-dbm-filter-compress += "${PN}-module-warnings" +RDEPENDS_${PN}-module-dbm-filter-encode += "${PN}-module-carp" +RDEPENDS_${PN}-module-dbm-filter-encode += "${PN}-module-strict" +RDEPENDS_${PN}-module-dbm-filter-encode += "${PN}-module-warnings" +RDEPENDS_${PN}-module-dbm-filter-int32 += "${PN}-module-strict" +RDEPENDS_${PN}-module-dbm-filter-int32 += "${PN}-module-warnings" +RDEPENDS_${PN}-module-dbm-filter-null += "${PN}-module-strict" +RDEPENDS_${PN}-module-dbm-filter-null += "${PN}-module-warnings" +RDEPENDS_${PN}-module-dbm-filter += "${PN}-module-carp" +RDEPENDS_${PN}-module-dbm-filter += "${PN}-module-strict" +RDEPENDS_${PN}-module-dbm-filter += "${PN}-module-warnings" +RDEPENDS_${PN}-module-dbm-filter-utf8 += "${PN}-module-carp" +RDEPENDS_${PN}-module-dbm-filter-utf8 += "${PN}-module-strict" +RDEPENDS_${PN}-module-dbm-filter-utf8 += "${PN}-module-warnings" +RDEPENDS_${PN}-module-db += "${PN}-module-strict" +RDEPENDS_${PN}-module-deprecate += "${PN}-module-carp" +RDEPENDS_${PN}-module-deprecate += "${PN}-module-strict" +RDEPENDS_${PN}-module-deprecate += "${PN}-module-warnings" +RDEPENDS_${PN}-module-devel-peek += "${PN}-module-exporter" +RDEPENDS_${PN}-module-devel-peek += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-devel-ppport += "${PN}-module-file-find" +RDEPENDS_${PN}-module-devel-ppport += "${PN}-module-getopt-long" +RDEPENDS_${PN}-module-devel-ppport += "${PN}-module-strict" +RDEPENDS_${PN}-module-devel-ppport += "${PN}-module-vars" +RDEPENDS_${PN}-module-devel-selfstubber += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-devel-selfstubber += "${PN}-module-selfloader" +RDEPENDS_${PN}-module-diagnostics += "${PN}-module-carp" +RDEPENDS_${PN}-module-diagnostics += "${PN}-module-config" +RDEPENDS_${PN}-module-diagnostics += "${PN}-module-getopt-std" +RDEPENDS_${PN}-module-diagnostics += "${PN}-module-strict" +RDEPENDS_${PN}-module-diagnostics += "${PN}-module-text-tabs" +RDEPENDS_${PN}-module-digest-base += "${PN}-module-carp" +RDEPENDS_${PN}-module-digest-base += "${PN}-module-mime-base64" +RDEPENDS_${PN}-module-digest-base += "${PN}-module-strict" +RDEPENDS_${PN}-module-digest-base += "${PN}-module-vars" +RDEPENDS_${PN}-module-digest-file += "${PN}-module-carp" +RDEPENDS_${PN}-module-digest-file += "${PN}-module-digest" +RDEPENDS_${PN}-module-digest-file += "${PN}-module-exporter" +RDEPENDS_${PN}-module-digest-file += "${PN}-module-strict" +RDEPENDS_${PN}-module-digest-file += "${PN}-module-vars" +RDEPENDS_${PN}-module-digest-md5 += "${PN}-module-digest-base" +RDEPENDS_${PN}-module-digest-md5 += "${PN}-module-exporter" +RDEPENDS_${PN}-module-digest-md5 += "${PN}-module-strict" +RDEPENDS_${PN}-module-digest-md5 += "${PN}-module-vars" +RDEPENDS_${PN}-module-digest-md5 += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-digest += "${PN}-module-strict" +RDEPENDS_${PN}-module-digest += "${PN}-module-vars" +RDEPENDS_${PN}-module-digest-sha += "${PN}-module-carp" +RDEPENDS_${PN}-module-digest-sha += "${PN}-module-digest-base" +RDEPENDS_${PN}-module-digest-sha += "${PN}-module-dynaloader" +RDEPENDS_${PN}-module-digest-sha += "${PN}-module-exporter" +RDEPENDS_${PN}-module-digest-sha += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-digest-sha += "${PN}-module-integer" +RDEPENDS_${PN}-module-digest-sha += "${PN}-module-strict" +RDEPENDS_${PN}-module-digest-sha += "${PN}-module-vars" +RDEPENDS_${PN}-module-digest-sha += "${PN}-module-warnings" +RDEPENDS_${PN}-module-dirhandle += "${PN}-module-carp" +RDEPENDS_${PN}-module-dirhandle += "${PN}-module-symbol" +RDEPENDS_${PN}-module-dynaloader += "${PN}-module-carp" +RDEPENDS_${PN}-module-dynaloader += "${PN}-module-config" +RDEPENDS_${PN}-module-encode-alias += "${PN}-module-constant" +RDEPENDS_${PN}-module-encode-alias += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-alias += "${PN}-module-exporter" +RDEPENDS_${PN}-module-encode-alias += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-alias += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-byte += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-byte += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-byte += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-byte += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-encode-cjkconstants += "${PN}-module-carp" +RDEPENDS_${PN}-module-encode-cjkconstants += "${PN}-module-exporter" +RDEPENDS_${PN}-module-encode-cjkconstants += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-cjkconstants += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-cn-hz += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-cn-hz += "${PN}-module-parent" +RDEPENDS_${PN}-module-encode-cn-hz += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-cn-hz += "${PN}-module-utf8" +RDEPENDS_${PN}-module-encode-cn-hz += "${PN}-module-vars" +RDEPENDS_${PN}-module-encode-cn-hz += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-cn += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-cn += "${PN}-module-encode-cn-hz" +RDEPENDS_${PN}-module-encode-cn += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-cn += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-cn += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-encode-config += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-config += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-ebcdic += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-ebcdic += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-ebcdic += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-ebcdic += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-encode-encoder += "${PN}-module-carp" +RDEPENDS_${PN}-module-encode-encoder += "${PN}-module-constant" +RDEPENDS_${PN}-module-encode-encoder += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-encoder += "${PN}-module-exporter" +RDEPENDS_${PN}-module-encode-encoder += "${PN}-module-overload" +RDEPENDS_${PN}-module-encode-encoder += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-encoder += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-encoding += "${PN}-module-carp" +RDEPENDS_${PN}-module-encode-encoding += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-encoding += "${PN}-module-encode-mime-name" +RDEPENDS_${PN}-module-encode-encoding += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-encoding += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-gsm0338 += "${PN}-module-carp" +RDEPENDS_${PN}-module-encode-gsm0338 += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-gsm0338 += "${PN}-module-parent" +RDEPENDS_${PN}-module-encode-gsm0338 += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-gsm0338 += "${PN}-module-utf8" +RDEPENDS_${PN}-module-encode-gsm0338 += "${PN}-module-vars" +RDEPENDS_${PN}-module-encode-gsm0338 += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-guess += "${PN}-module-bytes" +RDEPENDS_${PN}-module-encode-guess += "${PN}-module-carp" +RDEPENDS_${PN}-module-encode-guess += "${PN}-module-constant" +RDEPENDS_${PN}-module-encode-guess += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-guess += "${PN}-module-encode-unicode" +RDEPENDS_${PN}-module-encode-guess += "${PN}-module-parent" +RDEPENDS_${PN}-module-encode-guess += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-guess += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-jp-h2z += "${PN}-module-encode-cjkconstants" +RDEPENDS_${PN}-module-encode-jp-h2z += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-jp-h2z += "${PN}-module-vars" +RDEPENDS_${PN}-module-encode-jp-h2z += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-jp-jis7 += "${PN}-module-bytes" +RDEPENDS_${PN}-module-encode-jp-jis7 += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-jp-jis7 += "${PN}-module-encode-cjkconstants" +RDEPENDS_${PN}-module-encode-jp-jis7 += "${PN}-module-encode-jp-h2z" +RDEPENDS_${PN}-module-encode-jp-jis7 += "${PN}-module-parent" +RDEPENDS_${PN}-module-encode-jp-jis7 += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-jp-jis7 += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-jp += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-jp += "${PN}-module-encode-jp-jis7" +RDEPENDS_${PN}-module-encode-jp += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-jp += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-jp += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-encode-kr-2022-kr += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-kr-2022-kr += "${PN}-module-encode-cjkconstants" +RDEPENDS_${PN}-module-encode-kr-2022-kr += "${PN}-module-parent" +RDEPENDS_${PN}-module-encode-kr-2022-kr += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-kr-2022-kr += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-kr += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-kr += "${PN}-module-encode-kr-2022-kr" +RDEPENDS_${PN}-module-encode-kr += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-kr += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-kr += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-encode-mime-header-iso-2022-jp += "${PN}-module-constant" +RDEPENDS_${PN}-module-encode-mime-header-iso-2022-jp += "${PN}-module-encode-cjkconstants" +RDEPENDS_${PN}-module-encode-mime-header-iso-2022-jp += "${PN}-module-parent" +RDEPENDS_${PN}-module-encode-mime-header-iso-2022-jp += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-mime-header-iso-2022-jp += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-bytes" +RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-carp" +RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-constant" +RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-mime-base64" +RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-parent" +RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-utf8" +RDEPENDS_${PN}-module-encode-mime-header += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-mime-name += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-mime-name += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode += "${PN}-module-bytes" +RDEPENDS_${PN}-module-encode += "${PN}-module-carp" +RDEPENDS_${PN}-module-encode += "${PN}-module-constant" +RDEPENDS_${PN}-module-encode += "${PN}-module-encode-alias" +RDEPENDS_${PN}-module-encode += "${PN}-module-encode-config" +RDEPENDS_${PN}-module-encode += "${PN}-module-encode-configlocal-pm" +RDEPENDS_${PN}-module-encode += "${PN}-module-encode-encoding" +RDEPENDS_${PN}-module-encode += "${PN}-module-exporter" +RDEPENDS_${PN}-module-encode += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-encode-symbol += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-symbol += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-symbol += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-symbol += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-encode-tw += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-tw += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-tw += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-tw += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-encode-unicode += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-unicode += "${PN}-module-parent" +RDEPENDS_${PN}-module-encode-unicode += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-unicode += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encode-unicode += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-encode-unicode-utf7 += "${PN}-module-encode" +RDEPENDS_${PN}-module-encode-unicode-utf7 += "${PN}-module-mime-base64" +RDEPENDS_${PN}-module-encode-unicode-utf7 += "${PN}-module-parent" +RDEPENDS_${PN}-module-encode-unicode-utf7 += "${PN}-module-re" +RDEPENDS_${PN}-module-encode-unicode-utf7 += "${PN}-module-strict" +RDEPENDS_${PN}-module-encode-unicode-utf7 += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encoding += "${PN}-module-carp" +RDEPENDS_${PN}-module-encoding += "${PN}-module-config" +RDEPENDS_${PN}-module-encoding += "${PN}-module-constant" +RDEPENDS_${PN}-module-encoding += "${PN}-module-encode" +RDEPENDS_${PN}-module-encoding += "${PN}-module-filter-util-call" +RDEPENDS_${PN}-module-encoding += "${PN}-module-i18n-langinfo" +RDEPENDS_${PN}-module-encoding += "${PN}-module-posix" +RDEPENDS_${PN}-module-encoding += "${PN}-module-strict" +RDEPENDS_${PN}-module-encoding += "${PN}-module-utf8" +RDEPENDS_${PN}-module-encoding += "${PN}-module-warnings" +RDEPENDS_${PN}-module-encoding-warnings += "${PN}-module-carp" +RDEPENDS_${PN}-module-encoding-warnings += "${PN}-module-strict" +RDEPENDS_${PN}-module-encoding-warnings += "${PN}-module-warnings" +RDEPENDS_${PN}-module-english += "${PN}-module-carp " +RDEPENDS_${PN}-module-english += "${PN}-module-exporter" +RDEPENDS_${PN}-module-env += "${PN}-module-config" +RDEPENDS_${PN}-module-env += "${PN}-module-tie-array" +RDEPENDS_${PN}-module-errno += "${PN}-module-carp" +RDEPENDS_${PN}-module-errno += "${PN}-module-exporter" +RDEPENDS_${PN}-module-errno += "${PN}-module-strict" +RDEPENDS_${PN}-module-experimental += "${PN}-module-carp" +RDEPENDS_${PN}-module-experimental += "${PN}-module-feature" +RDEPENDS_${PN}-module-experimental += "${PN}-module-strict" +RDEPENDS_${PN}-module-experimental += "${PN}-module-version" +RDEPENDS_${PN}-module-experimental += "${PN}-module-warnings" +RDEPENDS_${PN}-module-exporter-heavy += "${PN}-module-carp" +RDEPENDS_${PN}-module-exporter-heavy += "${PN}-module-exporter" +RDEPENDS_${PN}-module-exporter-heavy += "${PN}-module-strict" +RDEPENDS_${PN}-module-exporter += "${PN}-module-exporter-heavy" +RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-config" +RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-cwd" +RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-dynaloader" +RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-extutils-mksymlists" +RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-file-temp" +RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-ipc-cmd" +RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-text-parsewords" +RDEPENDS_${PN}-module-extutils-cbuilder-base += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-cbuilder += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-extutils-cbuilder += "${PN}-module-file-path" +RDEPENDS_${PN}-module-extutils-cbuilder += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-cbuilder += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-cbuilder += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-cbuilder += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-aix += "${PN}-module-extutils-cbuilder-platform-unix" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-aix += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-aix += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-aix += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-aix += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-android += "${PN}-module-config" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-android += "${PN}-module-extutils-cbuilder-platform-unix" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-android += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-android += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-android += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-android += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-cygwin += "${PN}-module-extutils-cbuilder-platform-unix" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-cygwin += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-cygwin += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-cygwin += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-cygwin += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-darwin += "${PN}-module-extutils-cbuilder-platform-unix" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-darwin += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-darwin += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-darwin += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-dec-osf += "${PN}-module-extutils-cbuilder-platform-unix" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-dec-osf += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-dec-osf += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-dec-osf += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-dec-osf += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-os2 += "${PN}-module-extutils-cbuilder-platform-unix" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-os2 += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-os2 += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-os2 += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-unix += "${PN}-module-extutils-cbuilder-base" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-unix += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-unix += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-unix += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-vms += "${PN}-module-config" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-vms += "${PN}-module-extutils-cbuilder-base" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-vms += "${PN}-module-file-spec-functions" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-vms += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-vms += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-vms += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows-bcc += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows-bcc += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows-gcc += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows-gcc += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows-msvc += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows-msvc += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows += "${PN}-module-extutils-cbuilder-base" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows += "${PN}-module-io-file" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-cbuilder-platform-windows += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-exporter" +RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-extutils-command" +RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-extutils-install" +RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-getopt-long" +RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-test-harness" +RDEPENDS_${PN}-module-extutils-command-mm += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-command += "${PN}-module-carp" +RDEPENDS_${PN}-module-extutils-command += "${PN}-module-exporter" +RDEPENDS_${PN}-module-extutils-command += "${PN}-module-file-copy" +RDEPENDS_${PN}-module-extutils-command += "${PN}-module-file-find" +RDEPENDS_${PN}-module-extutils-command += "${PN}-module-file-path" +RDEPENDS_${PN}-module-extutils-command += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-command += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-command += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-constant-base += "${PN}-module-carp" +RDEPENDS_${PN}-module-extutils-constant-base += "${PN}-module-constant" +RDEPENDS_${PN}-module-extutils-constant-base += "${PN}-module-extutils-constant-utils" +RDEPENDS_${PN}-module-extutils-constant-base += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-constant-base += "${PN}-module-text-wrap" +RDEPENDS_${PN}-module-extutils-constant-base += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-carp" +RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-exporter" +RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-extutils-constant-proxysubs" +RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-extutils-constant-utils" +RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-extutils-constant-xs" +RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-filehandle" +RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-constant += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-constant-proxysubs += "${PN}-module-carp" +RDEPENDS_${PN}-module-extutils-constant-proxysubs += "${PN}-module-extutils-constant-utils" +RDEPENDS_${PN}-module-extutils-constant-proxysubs += "${PN}-module-extutils-constant-xs" +RDEPENDS_${PN}-module-extutils-constant-proxysubs += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-constant-proxysubs += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-constant-utils += "${PN}-module-carp" +RDEPENDS_${PN}-module-extutils-constant-utils += "${PN}-module-constant" +RDEPENDS_${PN}-module-extutils-constant-utils += "${PN}-module-posix" +RDEPENDS_${PN}-module-extutils-constant-utils += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-constant-utils += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-constant-xs += "${PN}-module-carp" +RDEPENDS_${PN}-module-extutils-constant-xs += "${PN}-module-data-dumper" +RDEPENDS_${PN}-module-extutils-constant-xs += "${PN}-module-extutils-constant" +RDEPENDS_${PN}-module-extutils-constant-xs += "${PN}-module-extutils-constant-base" +RDEPENDS_${PN}-module-extutils-constant-xs += "${PN}-module-extutils-constant-utils" +RDEPENDS_${PN}-module-extutils-constant-xs += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-constant-xs += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-config" +RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-exporter" +RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-extutils-liblist" +RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-extutils-makemaker" +RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-getopt-std" +RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-embed += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-carp" +RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-config" +RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-data-dumper" +RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-extutils-makemaker" +RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-extutils-packlist" +RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-file-find" +RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-installed += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-install += "${PN}-module-autosplit" +RDEPENDS_${PN}-module-extutils-install += "${PN}-module-carp" +RDEPENDS_${PN}-module-extutils-install += "${PN}-module-config" +RDEPENDS_${PN}-module-extutils-install += "${PN}-module-cwd" +RDEPENDS_${PN}-module-extutils-install += "${PN}-module-exporter" +RDEPENDS_${PN}-module-extutils-install += "${PN}-module-extutils-packlist" +RDEPENDS_${PN}-module-extutils-install += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-extutils-install += "${PN}-module-file-compare" +RDEPENDS_${PN}-module-extutils-install += "${PN}-module-file-copy" +RDEPENDS_${PN}-module-extutils-install += "${PN}-module-file-find" +RDEPENDS_${PN}-module-extutils-install += "${PN}-module-file-path" +RDEPENDS_${PN}-module-extutils-install += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-install += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-install += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-liblist-kid += "${PN}-module-cwd" +RDEPENDS_${PN}-module-extutils-liblist-kid += "${PN}-module-extutils-makemaker-config" +RDEPENDS_${PN}-module-extutils-liblist-kid += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-extutils-liblist-kid += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-liblist-kid += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-liblist-kid += "${PN}-module-text-parsewords" +RDEPENDS_${PN}-module-extutils-liblist-kid += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-liblist += "${PN}-module-extutils-liblist-kid" +RDEPENDS_${PN}-module-extutils-liblist += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-liblist += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-makemaker-config += "${PN}-module-config" +RDEPENDS_${PN}-module-extutils-makemaker-config += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-makemaker-locale += "${PN}-module-base" +RDEPENDS_${PN}-module-extutils-makemaker-locale += "${PN}-module-encode" +RDEPENDS_${PN}-module-extutils-makemaker-locale += "${PN}-module-encode-alias" +RDEPENDS_${PN}-module-extutils-makemaker-locale += "${PN}-module-i18n-langinfo" +RDEPENDS_${PN}-module-extutils-makemaker-locale += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-b" +RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-carp" +RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-cwd" +RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-exporter" +RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-extutils-makemaker-config" +RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-extutils-makemaker-version" +RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-extutils-manifest" +RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-extutils-mm" +RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-extutils-my" +RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-file-path" +RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-makemaker += "${PN}-module-version" +RDEPENDS_${PN}-module-extutils-makemaker-version += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-makemaker-version += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-carp" +RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-config" +RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-exporter" +RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-file-copy" +RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-file-find" +RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-file-path" +RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-manifest += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-miniperl += "${PN}-module-exporter" +RDEPENDS_${PN}-module-extutils-miniperl += "${PN}-module-extutils-embed" +RDEPENDS_${PN}-module-extutils-miniperl += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-miniperl += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-mkbootstrap += "${PN}-module-config" +RDEPENDS_${PN}-module-extutils-mkbootstrap += "${PN}-module-dynaloader" +RDEPENDS_${PN}-module-extutils-mkbootstrap += "${PN}-module-exporter" +RDEPENDS_${PN}-module-extutils-mkbootstrap += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mksymlists += "${PN}-module-carp" +RDEPENDS_${PN}-module-extutils-mksymlists += "${PN}-module-config" +RDEPENDS_${PN}-module-extutils-mksymlists += "${PN}-module-exporter" +RDEPENDS_${PN}-module-extutils-mksymlists += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-aix += "${PN}-module-extutils-makemaker" +RDEPENDS_${PN}-module-extutils-mm-aix += "${PN}-module-extutils-mm-unix" +RDEPENDS_${PN}-module-extutils-mm-aix += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-autosplit" +RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-carp" +RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-cpan" +RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-data-dumper" +RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-extutils-makemaker" +RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-extutils-makemaker-config" +RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-file-find" +RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-any += "${PN}-module-version" +RDEPENDS_${PN}-module-extutils-mm-beos += "${PN}-module-extutils-makemaker-config" +RDEPENDS_${PN}-module-extutils-mm-beos += "${PN}-module-extutils-mm-any" +RDEPENDS_${PN}-module-extutils-mm-beos += "${PN}-module-extutils-mm-unix" +RDEPENDS_${PN}-module-extutils-mm-beos += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-mm-beos += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-cygwin += "${PN}-module-extutils-makemaker-config" +RDEPENDS_${PN}-module-extutils-mm-cygwin += "${PN}-module-extutils-mm-unix" +RDEPENDS_${PN}-module-extutils-mm-cygwin += "${PN}-module-extutils-mm-win32" +RDEPENDS_${PN}-module-extutils-mm-cygwin += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-mm-cygwin += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-darwin += "${PN}-module-extutils-mm-unix" +RDEPENDS_${PN}-module-extutils-mm-darwin += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-dos += "${PN}-module-extutils-mm-any" +RDEPENDS_${PN}-module-extutils-mm-dos += "${PN}-module-extutils-mm-unix" +RDEPENDS_${PN}-module-extutils-mm-dos += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-macos += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-nw5 += "${PN}-module-extutils-makemaker" +RDEPENDS_${PN}-module-extutils-mm-nw5 += "${PN}-module-extutils-makemaker-config" +RDEPENDS_${PN}-module-extutils-mm-nw5 += "${PN}-module-extutils-mm-win32" +RDEPENDS_${PN}-module-extutils-mm-nw5 += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-extutils-mm-nw5 += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-os2 += "${PN}-module-extutils-makemaker" +RDEPENDS_${PN}-module-extutils-mm-os2 += "${PN}-module-extutils-mm-any" +RDEPENDS_${PN}-module-extutils-mm-os2 += "${PN}-module-extutils-mm-unix" +RDEPENDS_${PN}-module-extutils-mm-os2 += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-mm-os2 += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm += "${PN}-module-extutils-liblist" +RDEPENDS_${PN}-module-extutils-mm += "${PN}-module-extutils-makemaker" +RDEPENDS_${PN}-module-extutils-mm += "${PN}-module-extutils-makemaker-config" +RDEPENDS_${PN}-module-extutils-mm += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-qnx += "${PN}-module-extutils-mm-unix" +RDEPENDS_${PN}-module-extutils-mm-qnx += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-carp" +RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-cwd" +RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-dirhandle" +RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-encode" +RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-extutils-liblist" +RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-extutils-makemaker" +RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-extutils-makemaker-config" +RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-extutils-mm-any" +RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-file-find" +RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-mm-unix += "${PN}-module-version" +RDEPENDS_${PN}-module-extutils-mm-uwin += "${PN}-module-extutils-mm-unix" +RDEPENDS_${PN}-module-extutils-mm-uwin += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-exporter" +RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-extutils-liblist-kid" +RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-extutils-makemaker" +RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-extutils-makemaker-config" +RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-extutils-mm-any" +RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-extutils-mm-unix" +RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-file-find" +RDEPENDS_${PN}-module-extutils-mm-vms += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-vos += "${PN}-module-extutils-mm-unix" +RDEPENDS_${PN}-module-extutils-mm-vos += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-win32 += "${PN}-module-extutils-makemaker" +RDEPENDS_${PN}-module-extutils-mm-win32 += "${PN}-module-extutils-makemaker-config" +RDEPENDS_${PN}-module-extutils-mm-win32 += "${PN}-module-extutils-mm-any" +RDEPENDS_${PN}-module-extutils-mm-win32 += "${PN}-module-extutils-mm-unix" +RDEPENDS_${PN}-module-extutils-mm-win32 += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-extutils-mm-win32 += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-mm-win32 += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-mm-win95 += "${PN}-module-extutils-makemaker-config" +RDEPENDS_${PN}-module-extutils-mm-win95 += "${PN}-module-extutils-mm-win32" +RDEPENDS_${PN}-module-extutils-mm-win95 += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-my += "${PN}-module-extutils-mm" +RDEPENDS_${PN}-module-extutils-my += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-packlist += "${PN}-module-carp" +RDEPENDS_${PN}-module-extutils-packlist += "${PN}-module-config" +RDEPENDS_${PN}-module-extutils-packlist += "${PN}-module-cwd" +RDEPENDS_${PN}-module-extutils-packlist += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-packlist += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-packlist += "${PN}-module-vars" +RDEPENDS_${PN}-module-extutils-parsexs-constants += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-parsexs-constants += "${PN}-module-symbol" +RDEPENDS_${PN}-module-extutils-parsexs-constants += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-parsexs-countlines += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-parsexs-eval += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-parsexs-eval += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-config" +RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-cwd" +RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-exporter" +RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-extutils-parsexs-constants" +RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-extutils-parsexs-countlines" +RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-extutils-parsexs-eval" +RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-extutils-parsexs-utilities" +RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-re" +RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-parsexs += "${PN}-module-symbol" +RDEPENDS_${PN}-module-extutils-parsexs-utilities += "${PN}-module-exporter" +RDEPENDS_${PN}-module-extutils-parsexs-utilities += "${PN}-module-extutils-parsexs-constants" +RDEPENDS_${PN}-module-extutils-parsexs-utilities += "${PN}-module-extutils-typemaps" +RDEPENDS_${PN}-module-extutils-parsexs-utilities += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-parsexs-utilities += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-parsexs-utilities += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-testlib += "${PN}-module-cwd" +RDEPENDS_${PN}-module-extutils-testlib += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-extutils-testlib += "${PN}-module-lib" +RDEPENDS_${PN}-module-extutils-testlib += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-testlib += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-typemaps-cmd += "${PN}-module-exporter" +RDEPENDS_${PN}-module-extutils-typemaps-cmd += "${PN}-module-extutils-typemaps" +RDEPENDS_${PN}-module-extutils-typemaps-cmd += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-typemaps-cmd += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-typemaps-inputmap += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-typemaps-inputmap += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-typemaps-outputmap += "${PN}-module-re" +RDEPENDS_${PN}-module-extutils-typemaps-outputmap += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-typemaps-outputmap += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-typemaps += "${PN}-module-extutils-parsexs" +RDEPENDS_${PN}-module-extutils-typemaps += "${PN}-module-extutils-parsexs-constants" +RDEPENDS_${PN}-module-extutils-typemaps += "${PN}-module-extutils-typemaps-inputmap" +RDEPENDS_${PN}-module-extutils-typemaps += "${PN}-module-extutils-typemaps-outputmap" +RDEPENDS_${PN}-module-extutils-typemaps += "${PN}-module-extutils-typemaps-type" +RDEPENDS_${PN}-module-extutils-typemaps += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-typemaps += "${PN}-module-warnings" +RDEPENDS_${PN}-module-extutils-typemaps-type += "${PN}-module-extutils-typemaps" +RDEPENDS_${PN}-module-extutils-typemaps-type += "${PN}-module-strict" +RDEPENDS_${PN}-module-extutils-typemaps-type += "${PN}-module-warnings" +RDEPENDS_${PN}-module-fatal += "${PN}-module-autodie" +RDEPENDS_${PN}-module-fatal += "${PN}-module-autodie-exception-system" +RDEPENDS_${PN}-module-fatal += "${PN}-module-autodie-hints" +RDEPENDS_${PN}-module-fatal += "${PN}-module-autodie-util" +RDEPENDS_${PN}-module-fatal += "${PN}-module-carp" +RDEPENDS_${PN}-module-fatal += "${PN}-module-config" +RDEPENDS_${PN}-module-fatal += "${PN}-module-constant" +RDEPENDS_${PN}-module-fatal += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-fatal += "${PN}-module-posix" +RDEPENDS_${PN}-module-fatal += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-fatal += "${PN}-module-strict" +RDEPENDS_${PN}-module-fatal += "${PN}-module-tie-refhash" +RDEPENDS_${PN}-module-fatal += "${PN}-module-warnings" +RDEPENDS_${PN}-module-fcntl += "${PN}-module-exporter" +RDEPENDS_${PN}-module-fcntl += "${PN}-module-strict" +RDEPENDS_${PN}-module-fcntl += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-feature += "${PN}-module-carp" +RDEPENDS_${PN}-module-fields += "${PN}-module-base" +RDEPENDS_${PN}-module-fields += "${PN}-module-carp" +RDEPENDS_${PN}-module-fields += "${PN}-module-hash-util" +RDEPENDS_${PN}-module-fields += "${PN}-module-strict" +RDEPENDS_${PN}-module-fields += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-basename += "${PN}-module-carp" +RDEPENDS_${PN}-module-file-basename += "${PN}-module-exporter" +RDEPENDS_${PN}-module-file-basename += "${PN}-module-re" +RDEPENDS_${PN}-module-file-basename += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-basename += "${PN}-module-warnings" +RDEPENDS_${PN}-module-filecache += "${PN}-module-carp" +RDEPENDS_${PN}-module-filecache += "${PN}-module-parent" +RDEPENDS_${PN}-module-filecache += "${PN}-module-strict" +RDEPENDS_${PN}-module-filecache += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-compare += "${PN}-module-carp" +RDEPENDS_${PN}-module-file-compare += "${PN}-module-exporter" +RDEPENDS_${PN}-module-file-compare += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-compare += "${PN}-module-warnings" +RDEPENDS_${PN}-module-file-copy += "${PN}-module-carp" +RDEPENDS_${PN}-module-file-copy += "${PN}-module-config" +RDEPENDS_${PN}-module-file-copy += "${PN}-module-exporter" +RDEPENDS_${PN}-module-file-copy += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-file-copy += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-file-copy += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-copy += "${PN}-module-warnings" +RDEPENDS_${PN}-module-file-dosglob += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-dosglob += "${PN}-module-text-parsewords" +RDEPENDS_${PN}-module-file-dosglob += "${PN}-module-warnings" +RDEPENDS_${PN}-module-file-dosglob += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-carp" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-constant" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-cwd" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-file-copy" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-filehandle" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-file-path" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-file-spec-unix" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-file-temp" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-ipc-cmd" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-locale-maketext-simple" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-load" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-params-check" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-fetch += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-find += "${PN}-module-config" +RDEPENDS_${PN}-module-file-find += "${PN}-module-cwd" +RDEPENDS_${PN}-module-file-find += "${PN}-module-exporter" +RDEPENDS_${PN}-module-file-find += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-file-find += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-file-find += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-find += "${PN}-module-warnings" +RDEPENDS_${PN}-module-file-find += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-file-globmapper += "${PN}-module-carp" +RDEPENDS_${PN}-module-file-globmapper += "${PN}-module-file-glob" +RDEPENDS_${PN}-module-file-globmapper += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-globmapper += "${PN}-module-warnings" +RDEPENDS_${PN}-module-file-glob += "${PN}-module-exporter" +RDEPENDS_${PN}-module-file-glob += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-glob += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-filehandle += "${PN}-module-exporter" +RDEPENDS_${PN}-module-filehandle += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-filehandle += "${PN}-module-io-file" +RDEPENDS_${PN}-module-filehandle += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-path += "${PN}-module-carp" +RDEPENDS_${PN}-module-file-path += "${PN}-module-cwd" +RDEPENDS_${PN}-module-file-path += "${PN}-module-exporter" +RDEPENDS_${PN}-module-file-path += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-file-path += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-file-path += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-path += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-spec-amigaos += "${PN}-module-file-spec-unix" +RDEPENDS_${PN}-module-file-spec-amigaos += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-spec-amigaos += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-spec-cygwin += "${PN}-module-file-spec-unix" +RDEPENDS_${PN}-module-file-spec-cygwin += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-spec-cygwin += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-spec-epoc += "${PN}-module-file-spec-unix" +RDEPENDS_${PN}-module-file-spec-epoc += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-spec-epoc += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-spec-functions += "${PN}-module-exporter" +RDEPENDS_${PN}-module-file-spec-functions += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-file-spec-functions += "${PN}-module-file-spec-unix" +RDEPENDS_${PN}-module-file-spec-functions += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-spec-functions += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-spec-mac += "${PN}-module-file-spec-unix" +RDEPENDS_${PN}-module-file-spec-mac += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-spec-mac += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-spec-os2 += "${PN}-module-file-spec-unix" +RDEPENDS_${PN}-module-file-spec-os2 += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-spec-os2 += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-spec += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-spec += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-spec-unix += "${PN}-module-constant" +RDEPENDS_${PN}-module-file-spec-unix += "${PN}-module-cwd" +RDEPENDS_${PN}-module-file-spec-unix += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-file-spec-unix += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-spec-unix += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-spec-unix += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-file-spec-vms += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-file-spec-vms += "${PN}-module-file-spec-unix" +RDEPENDS_${PN}-module-file-spec-vms += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-spec-vms += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-spec-win32 += "${PN}-module-cwd " +RDEPENDS_${PN}-module-file-spec-win32 += "${PN}-module-file-spec-unix" +RDEPENDS_${PN}-module-file-spec-win32 += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-spec-win32 += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-stat += "${PN}-module-carp" +RDEPENDS_${PN}-module-file-stat += "${PN}-module-class-struct" +RDEPENDS_${PN}-module-file-stat += "${PN}-module-constant" +RDEPENDS_${PN}-module-file-stat += "${PN}-module-exporter" +RDEPENDS_${PN}-module-file-stat += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-file-stat += "${PN}-module-overload " +RDEPENDS_${PN}-module-file-stat += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-stat += "${PN}-module-symbol" +RDEPENDS_${PN}-module-file-stat += "${PN}-module-vars" +RDEPENDS_${PN}-module-file-stat += "${PN}-module-warnings" +RDEPENDS_${PN}-module-file-stat += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-carp" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-constant" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-cwd" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-errno" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-exporter" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-file-path" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-io-seekable" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-overload" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-parent" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-posix" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-strict" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-symbol" +RDEPENDS_${PN}-module-file-temp += "${PN}-module-vars" +RDEPENDS_${PN}-module-filter-simple += "${PN}-module-carp" +RDEPENDS_${PN}-module-filter-simple += "${PN}-module-filter-util-call" +RDEPENDS_${PN}-module-filter-simple += "${PN}-module-text-balanced" +RDEPENDS_${PN}-module-filter-simple += "${PN}-module-vars" +RDEPENDS_${PN}-module-filter-util-call += "${PN}-module-carp " +RDEPENDS_${PN}-module-filter-util-call += "${PN}-module-dynaloader" +RDEPENDS_${PN}-module-filter-util-call += "${PN}-module-exporter" +RDEPENDS_${PN}-module-filter-util-call += "${PN}-module-strict" +RDEPENDS_${PN}-module-filter-util-call += "${PN}-module-vars" +RDEPENDS_${PN}-module-filter-util-call += "${PN}-module-warnings" +RDEPENDS_${PN}-module-findbin += "${PN}-module-carp" +RDEPENDS_${PN}-module-findbin += "${PN}-module-cwd" +RDEPENDS_${PN}-module-findbin += "${PN}-module-exporter" +RDEPENDS_${PN}-module-findbin += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-findbin += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-getopt-long += "${PN}-module-constant" +RDEPENDS_${PN}-module-getopt-long += "${PN}-module-exporter" +RDEPENDS_${PN}-module-getopt-long += "${PN}-module-overload" +RDEPENDS_${PN}-module-getopt-long += "${PN}-module-pod-usage" +RDEPENDS_${PN}-module-getopt-long += "${PN}-module-strict" +RDEPENDS_${PN}-module-getopt-long += "${PN}-module-text-parsewords" +RDEPENDS_${PN}-module-getopt-long += "${PN}-module-vars" +RDEPENDS_${PN}-module-getopt-std += "${PN}-module-exporter" +RDEPENDS_${PN}-module-hash-util-fieldhash += "${PN}-module-exporter" +RDEPENDS_${PN}-module-hash-util-fieldhash += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-hash-util-fieldhash += "${PN}-module-strict" +RDEPENDS_${PN}-module-hash-util-fieldhash += "${PN}-module-warnings" +RDEPENDS_${PN}-module-hash-util-fieldhash += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-hash-util += "${PN}-module-carp" +RDEPENDS_${PN}-module-hash-util += "${PN}-module-exporter" +RDEPENDS_${PN}-module-hash-util += "${PN}-module-hash-util-fieldhash" +RDEPENDS_${PN}-module-hash-util += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-hash-util += "${PN}-module-strict" +RDEPENDS_${PN}-module-hash-util += "${PN}-module-warnings" +RDEPENDS_${PN}-module-hash-util += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-hash-util += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-i18n-collate += "${PN}-module-exporter" +RDEPENDS_${PN}-module-i18n-collate += "${PN}-module-overload" +RDEPENDS_${PN}-module-i18n-collate += "${PN}-module-posix" +RDEPENDS_${PN}-module-i18n-collate += "${PN}-module-strict" +RDEPENDS_${PN}-module-i18n-collate += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-i18n-langinfo += "${PN}-module-carp" +RDEPENDS_${PN}-module-i18n-langinfo += "${PN}-module-exporter" +RDEPENDS_${PN}-module-i18n-langinfo += "${PN}-module-strict" +RDEPENDS_${PN}-module-i18n-langinfo += "${PN}-module-warnings" +RDEPENDS_${PN}-module-i18n-langinfo += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-i18n-langtags-detect += "${PN}-module-i18n-langtags" +RDEPENDS_${PN}-module-i18n-langtags-detect += "${PN}-module-strict" +RDEPENDS_${PN}-module-i18n-langtags-detect += "${PN}-module-vars" +RDEPENDS_${PN}-module-i18n-langtags-list += "${PN}-module-strict" +RDEPENDS_${PN}-module-i18n-langtags-list += "${PN}-module-vars" +RDEPENDS_${PN}-module-i18n-langtags += "${PN}-module-exporter" +RDEPENDS_${PN}-module-i18n-langtags += "${PN}-module-strict" +RDEPENDS_${PN}-module-i18n-langtags += "${PN}-module-vars" +RDEPENDS_${PN}-module-io-compress-adapter-bzip2 += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-compress-adapter-bzip2 += "${PN}-module-compress-raw-bzip2" +RDEPENDS_${PN}-module-io-compress-adapter-bzip2 += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-compress-adapter-bzip2 += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-compress-adapter-bzip2 += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-compress-adapter-deflate += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-compress-adapter-deflate += "${PN}-module-compress-raw-zlib" +RDEPENDS_${PN}-module-io-compress-adapter-deflate += "${PN}-module-exporter" +RDEPENDS_${PN}-module-io-compress-adapter-deflate += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-compress-adapter-deflate += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-compress-adapter-deflate += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-compress-adapter-identity += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-compress-adapter-identity += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-compress-adapter-identity += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-compress-adapter-identity += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-carp" +RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-constant" +RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-encode" +RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-exporter" +RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-file-globmapper" +RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-utf8" +RDEPENDS_${PN}-module-io-compress-base-common += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-carp" +RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-io-file" +RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-io-handle " +RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-symbol" +RDEPENDS_${PN}-module-io-compress-base += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-compress-bzip2 += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-compress-bzip2 += "${PN}-module-exporter " +RDEPENDS_${PN}-module-io-compress-bzip2 += "${PN}-module-io-compress-adapter-bzip2" +RDEPENDS_${PN}-module-io-compress-bzip2 += "${PN}-module-io-compress-base" +RDEPENDS_${PN}-module-io-compress-bzip2 += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-compress-bzip2 += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-compress-bzip2 += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-exporter " +RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-io-compress-adapter-deflate" +RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-io-compress-rawdeflate" +RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-io-compress-zlib-constants" +RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-compress-deflate += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-compress-gzip-constants += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-compress-gzip-constants += "${PN}-module-constant" +RDEPENDS_${PN}-module-io-compress-gzip-constants += "${PN}-module-exporter" +RDEPENDS_${PN}-module-io-compress-gzip-constants += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-compress-gzip-constants += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-exporter " +RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-io-compress-adapter-deflate" +RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-io-compress-gzip-constants" +RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-io-compress-rawdeflate" +RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-io-compress-zlib-extra" +RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-compress-gzip += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-compress-raw-zlib" +RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-exporter " +RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-io-compress-adapter-deflate" +RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-io-compress-base" +RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-compress-rawdeflate += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-compress-zip-constants += "${PN}-module-constant" +RDEPENDS_${PN}-module-io-compress-zip-constants += "${PN}-module-exporter" +RDEPENDS_${PN}-module-io-compress-zip-constants += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-compress-zip-constants += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-compress-raw-zlib" +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-config" +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-exporter " +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-io-compress-adapter-deflate" +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-io-compress-adapter-identity" +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-io-compress-bzip2" +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-io-compress-rawdeflate" +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-io-compress-zip-constants" +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-io-compress-zlib-extra" +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-compress-zip += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-compress-zlib-constants += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-compress-zlib-constants += "${PN}-module-constant" +RDEPENDS_${PN}-module-io-compress-zlib-constants += "${PN}-module-exporter" +RDEPENDS_${PN}-module-io-compress-zlib-constants += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-compress-zlib-constants += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-compress-zlib-extra += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-compress-zlib-extra += "${PN}-module-io-compress-gzip-constants" +RDEPENDS_${PN}-module-io-compress-zlib-extra += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-compress-zlib-extra += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-dir += "${PN}-module-carp" +RDEPENDS_${PN}-module-io-dir += "${PN}-module-exporter" +RDEPENDS_${PN}-module-io-dir += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-io-dir += "${PN}-module-file-stat" +RDEPENDS_${PN}-module-io-dir += "${PN}-module-io-file" +RDEPENDS_${PN}-module-io-dir += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-dir += "${PN}-module-symbol" +RDEPENDS_${PN}-module-io-dir += "${PN}-module-tie-hash" +RDEPENDS_${PN}-module-io-file += "${PN}-module-carp" +RDEPENDS_${PN}-module-io-file += "${PN}-module-exporter" +RDEPENDS_${PN}-module-io-file += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-io-file += "${PN}-module-io-seekable" +RDEPENDS_${PN}-module-io-file += "${PN}-module-selectsaver" +RDEPENDS_${PN}-module-io-file += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-file += "${PN}-module-symbol" +RDEPENDS_${PN}-module-io-handle += "${PN}-module-carp" +RDEPENDS_${PN}-module-io-handle += "${PN}-module-exporter" +RDEPENDS_${PN}-module-io-handle += "${PN}-module-io" +RDEPENDS_${PN}-module-io-handle += "${PN}-module-io-file" +RDEPENDS_${PN}-module-io-handle += "${PN}-module-selectsaver" +RDEPENDS_${PN}-module-io-handle += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-handle += "${PN}-module-symbol" +RDEPENDS_${PN}-module-io += "${PN}-module-carp" +RDEPENDS_${PN}-module-io += "${PN}-module-strict" +RDEPENDS_${PN}-module-io += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-io-pipe += "${PN}-module-carp" +RDEPENDS_${PN}-module-io-pipe += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-io-pipe += "${PN}-module-io-handle" +RDEPENDS_${PN}-module-io-pipe += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-pipe += "${PN}-module-symbol" +RDEPENDS_${PN}-module-io-poll += "${PN}-module-exporter" +RDEPENDS_${PN}-module-io-poll += "${PN}-module-io-handle" +RDEPENDS_${PN}-module-io-poll += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-seekable += "${PN}-module-carp" +RDEPENDS_${PN}-module-io-seekable += "${PN}-module-exporter" +RDEPENDS_${PN}-module-io-seekable += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-io-seekable += "${PN}-module-io-handle" +RDEPENDS_${PN}-module-io-seekable += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-select += "${PN}-module-exporter" +RDEPENDS_${PN}-module-io-select += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-select += "${PN}-module-vars" +RDEPENDS_${PN}-module-io-select += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-io-socket-inet += "${PN}-module-carp" +RDEPENDS_${PN}-module-io-socket-inet += "${PN}-module-errno" +RDEPENDS_${PN}-module-io-socket-inet += "${PN}-module-exporter" +RDEPENDS_${PN}-module-io-socket-inet += "${PN}-module-io-socket" +RDEPENDS_${PN}-module-io-socket-inet += "${PN}-module-socket" +RDEPENDS_${PN}-module-io-socket-inet += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-base" +RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-carp" +RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-constant" +RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-errno" +RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-posix" +RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-socket" +RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-socket-ip += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-socket += "${PN}-module-carp" +RDEPENDS_${PN}-module-io-socket += "${PN}-module-errno" +RDEPENDS_${PN}-module-io-socket += "${PN}-module-exporter" +RDEPENDS_${PN}-module-io-socket += "${PN}-module-io-handle" +RDEPENDS_${PN}-module-io-socket += "${PN}-module-io-select" +RDEPENDS_${PN}-module-io-socket += "${PN}-module-io-socket-inet" +RDEPENDS_${PN}-module-io-socket += "${PN}-module-io-socket-unix" +RDEPENDS_${PN}-module-io-socket += "${PN}-module-socket" +RDEPENDS_${PN}-module-io-socket += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-socket-unix += "${PN}-module-carp" +RDEPENDS_${PN}-module-io-socket-unix += "${PN}-module-io-socket" +RDEPENDS_${PN}-module-io-socket-unix += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-uncompress-adapter-bunzip2 += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-uncompress-adapter-bunzip2 += "${PN}-module-compress-raw-bzip2" +RDEPENDS_${PN}-module-io-uncompress-adapter-bunzip2 += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-uncompress-adapter-bunzip2 += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-uncompress-adapter-bunzip2 += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-uncompress-adapter-identity += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-uncompress-adapter-identity += "${PN}-module-compress-raw-zlib" +RDEPENDS_${PN}-module-io-uncompress-adapter-identity += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-uncompress-adapter-identity += "${PN}-module-io-compress-zip-constants " +RDEPENDS_${PN}-module-io-uncompress-adapter-identity += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-uncompress-adapter-identity += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-uncompress-adapter-inflate += "${PN}-module-compress-raw-zlib" +RDEPENDS_${PN}-module-io-uncompress-adapter-inflate += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-uncompress-adapter-inflate += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-uncompress-adapter-inflate += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-exporter " +RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-io-uncompress-adapter-inflate" +RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-io-uncompress-base" +RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-io-uncompress-gunzip" +RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-io-uncompress-inflate" +RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-io-uncompress-rawinflate" +RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-io-uncompress-unzip" +RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-uncompress-anyinflate += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-uncompress-anyuncompress += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-uncompress-anyuncompress += "${PN}-module-exporter " +RDEPENDS_${PN}-module-io-uncompress-anyuncompress += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-uncompress-anyuncompress += "${PN}-module-io-uncompress-base" +RDEPENDS_${PN}-module-io-uncompress-anyuncompress += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-uncompress-anyuncompress += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-carp " +RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-constant" +RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-io-file " +RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-list-util" +RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-symbol" +RDEPENDS_${PN}-module-io-uncompress-base += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-uncompress-bunzip2 += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-uncompress-bunzip2 += "${PN}-module-exporter " +RDEPENDS_${PN}-module-io-uncompress-bunzip2 += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-uncompress-bunzip2 += "${PN}-module-io-uncompress-adapter-bunzip2" +RDEPENDS_${PN}-module-io-uncompress-bunzip2 += "${PN}-module-io-uncompress-base" +RDEPENDS_${PN}-module-io-uncompress-bunzip2 += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-uncompress-bunzip2 += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-compress-raw-zlib" +RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-exporter " +RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-io-compress-gzip-constants" +RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-io-compress-zlib-extra" +RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-io-uncompress-rawinflate" +RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-uncompress-gunzip += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-uncompress-inflate += "${PN}-module-bytes" +RDEPENDS_${PN}-module-io-uncompress-inflate += "${PN}-module-exporter " +RDEPENDS_${PN}-module-io-uncompress-inflate += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-uncompress-inflate += "${PN}-module-io-compress-zlib-constants" +RDEPENDS_${PN}-module-io-uncompress-inflate += "${PN}-module-io-uncompress-rawinflate" +RDEPENDS_${PN}-module-io-uncompress-inflate += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-uncompress-inflate += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-uncompress-rawinflate += "${PN}-module-compress-raw-zlib" +RDEPENDS_${PN}-module-io-uncompress-rawinflate += "${PN}-module-exporter " +RDEPENDS_${PN}-module-io-uncompress-rawinflate += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-uncompress-rawinflate += "${PN}-module-io-uncompress-adapter-inflate" +RDEPENDS_${PN}-module-io-uncompress-rawinflate += "${PN}-module-io-uncompress-base" +RDEPENDS_${PN}-module-io-uncompress-rawinflate += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-uncompress-rawinflate += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-compress-raw-zlib" +RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-constant" +RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-exporter " +RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-io-compress-base-common" +RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-io-compress-zip-constants" +RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-io-compress-zlib-extra" +RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-io-file" +RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-io-uncompress-adapter-identity" +RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-io-uncompress-adapter-inflate" +RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-io-uncompress-rawinflate" +RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-posix" +RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-strict " +RDEPENDS_${PN}-module-io-uncompress-unzip += "${PN}-module-warnings" +RDEPENDS_${PN}-module-io-zlib += "${PN}-module-carp" +RDEPENDS_${PN}-module-io-zlib += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-io-zlib += "${PN}-module-io-handle" +RDEPENDS_${PN}-module-io-zlib += "${PN}-module-strict" +RDEPENDS_${PN}-module-io-zlib += "${PN}-module-symbol" +RDEPENDS_${PN}-module-io-zlib += "${PN}-module-tie-handle" +RDEPENDS_${PN}-module-io-zlib += "${PN}-module-vars" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-carp" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-constant" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-exporter" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-extutils-makemaker" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-filehandle" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-io-handle" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-io-select" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-ipc-open3" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-locale-maketext-simple" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-load" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-params-check" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-posix" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-socket" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-strict" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-symbol" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-text-parsewords" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-time-hires" +RDEPENDS_${PN}-module-ipc-cmd += "${PN}-module-vars" +RDEPENDS_${PN}-module-ipc-msg += "${PN}-module-carp" +RDEPENDS_${PN}-module-ipc-msg += "${PN}-module-class-struct" +RDEPENDS_${PN}-module-ipc-msg += "${PN}-module-ipc-sysv" +RDEPENDS_${PN}-module-ipc-msg += "${PN}-module-strict" +RDEPENDS_${PN}-module-ipc-msg += "${PN}-module-vars" +RDEPENDS_${PN}-module-ipc-open2 += "${PN}-module-exporter" +RDEPENDS_${PN}-module-ipc-open2 += "${PN}-module-ipc-open3" +RDEPENDS_${PN}-module-ipc-open2 += "${PN}-module-strict" +RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-carp" +RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-constant" +RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-exporter" +RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-io-pipe" +RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-posix" +RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-strict" +RDEPENDS_${PN}-module-ipc-open3 += "${PN}-module-symbol" +RDEPENDS_${PN}-module-ipc-semaphore += "${PN}-module-carp" +RDEPENDS_${PN}-module-ipc-semaphore += "${PN}-module-class-struct" +RDEPENDS_${PN}-module-ipc-semaphore += "${PN}-module-ipc-sysv" +RDEPENDS_${PN}-module-ipc-semaphore += "${PN}-module-strict" +RDEPENDS_${PN}-module-ipc-semaphore += "${PN}-module-vars" +RDEPENDS_${PN}-module-ipc-sharedmem += "${PN}-module-carp" +RDEPENDS_${PN}-module-ipc-sharedmem += "${PN}-module-class-struct" +RDEPENDS_${PN}-module-ipc-sharedmem += "${PN}-module-ipc-sysv" +RDEPENDS_${PN}-module-ipc-sharedmem += "${PN}-module-strict" +RDEPENDS_${PN}-module-ipc-sharedmem += "${PN}-module-vars" +RDEPENDS_${PN}-module-ipc-sysv += "${PN}-module-carp" +RDEPENDS_${PN}-module-ipc-sysv += "${PN}-module-config" +RDEPENDS_${PN}-module-ipc-sysv += "${PN}-module-dynaloader" +RDEPENDS_${PN}-module-ipc-sysv += "${PN}-module-exporter" +RDEPENDS_${PN}-module-ipc-sysv += "${PN}-module-strict" +RDEPENDS_${PN}-module-ipc-sysv += "${PN}-module-vars" +RDEPENDS_${PN}-module-json-pp-boolean += "${PN}-module-json-pp" +RDEPENDS_${PN}-module-json-pp-boolean += "${PN}-module-strict" +RDEPENDS_${PN}-module-json-pp += "${PN}-module-b" +RDEPENDS_${PN}-module-json-pp += "${PN}-module-base" +RDEPENDS_${PN}-module-json-pp += "${PN}-module-bytes" +RDEPENDS_${PN}-module-json-pp += "${PN}-module-carp" +RDEPENDS_${PN}-module-json-pp += "${PN}-module-constant" +RDEPENDS_${PN}-module-json-pp += "${PN}-module-encode" +RDEPENDS_${PN}-module-json-pp += "${PN}-module-math-bigfloat" +RDEPENDS_${PN}-module-json-pp += "${PN}-module-math-bigint" +RDEPENDS_${PN}-module-json-pp += "${PN}-module-overload" +RDEPENDS_${PN}-module-json-pp += "${PN}-module-strict" +RDEPENDS_${PN}-module-json-pp += "${PN}-module-subs" +RDEPENDS_${PN}-module-less += "${PN}-module-strict" +RDEPENDS_${PN}-module-less += "${PN}-module-warnings" +RDEPENDS_${PN}-module-lib += "${PN}-module-carp" +RDEPENDS_${PN}-module-lib += "${PN}-module-config" +RDEPENDS_${PN}-module-lib += "${PN}-module-strict" +RDEPENDS_${PN}-module-list-util += "${PN}-module-exporter" +RDEPENDS_${PN}-module-list-util += "${PN}-module-strict" +RDEPENDS_${PN}-module-list-util += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-list-util-xs += "${PN}-module-list-util" +RDEPENDS_${PN}-module-list-util-xs += "${PN}-module-strict" +RDEPENDS_${PN}-module-loaded += "${PN}-module-carp" +RDEPENDS_${PN}-module-loaded += "${PN}-module-strict" +RDEPENDS_${PN}-module-loaded += "${PN}-module-vars" +RDEPENDS_${PN}-module-load += "${PN}-module-carp" +RDEPENDS_${PN}-module-load += "${PN}-module-config" +RDEPENDS_${PN}-module-load += "${PN}-module-constant" +RDEPENDS_${PN}-module-load += "${PN}-module-exporter" +RDEPENDS_${PN}-module-load += "${PN}-module-filehandle" +RDEPENDS_${PN}-module-load += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-load += "${PN}-module-locale-maketext-simple" +RDEPENDS_${PN}-module-load += "${PN}-module-corelist" +RDEPENDS_${PN}-module-load += "${PN}-module-load" +RDEPENDS_${PN}-module-load += "${PN}-module-params-check" +RDEPENDS_${PN}-module-load += "${PN}-module-strict" +RDEPENDS_${PN}-module-load += "${PN}-module-vars" +RDEPENDS_${PN}-module-load += "${PN}-module-version" +RDEPENDS_${PN}-module-load += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-constants += "${PN}-module-constant" +RDEPENDS_${PN}-module-locale-codes-constants += "${PN}-module-exporter" +RDEPENDS_${PN}-module-locale-codes-constants += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-constants += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-country-codes += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-country-codes += "${PN}-module-utf8" +RDEPENDS_${PN}-module-locale-codes-country-codes += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-carp" +RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-exporter" +RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-locale-codes" +RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-locale-codes-constants" +RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-locale-codes-country-codes" +RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-locale-codes-country-retired" +RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-country += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-country-retired += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-country-retired += "${PN}-module-utf8" +RDEPENDS_${PN}-module-locale-codes-country-retired += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-currency-codes += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-currency-codes += "${PN}-module-utf8" +RDEPENDS_${PN}-module-locale-codes-currency-codes += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-carp" +RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-exporter" +RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-locale-codes" +RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-locale-codes-constants" +RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-locale-codes-currency-codes" +RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-locale-codes-currency-retired" +RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-currency += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-currency-retired += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-currency-retired += "${PN}-module-utf8" +RDEPENDS_${PN}-module-locale-codes-currency-retired += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-langext-codes += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-langext-codes += "${PN}-module-utf8" +RDEPENDS_${PN}-module-locale-codes-langext-codes += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-carp" +RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-exporter" +RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-locale-codes" +RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-locale-codes-constants" +RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-locale-codes-langext-codes" +RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-locale-codes-langext-retired" +RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-langext += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-langext-retired += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-langext-retired += "${PN}-module-utf8" +RDEPENDS_${PN}-module-locale-codes-langext-retired += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-langfam-codes += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-langfam-codes += "${PN}-module-utf8" +RDEPENDS_${PN}-module-locale-codes-langfam-codes += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-carp" +RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-exporter" +RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-locale-codes" +RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-locale-codes-constants" +RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-locale-codes-langfam-codes" +RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-locale-codes-langfam-retired" +RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-langfam += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-langfam-retired += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-langfam-retired += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-language-codes += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-language-codes += "${PN}-module-utf8" +RDEPENDS_${PN}-module-locale-codes-language-codes += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-carp" +RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-exporter" +RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-locale-codes" +RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-locale-codes-constants" +RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-locale-codes-language-codes" +RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-locale-codes-language-retired" +RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-language += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-language-retired += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-language-retired += "${PN}-module-utf8" +RDEPENDS_${PN}-module-locale-codes-language-retired += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-langvar-codes += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-langvar-codes += "${PN}-module-utf8" +RDEPENDS_${PN}-module-locale-codes-langvar-codes += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-carp" +RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-exporter" +RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-locale-codes" +RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-locale-codes-constants" +RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-locale-codes-langvar-codes" +RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-locale-codes-langvar-retired" +RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-langvar += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-langvar-retired += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-langvar-retired += "${PN}-module-utf8" +RDEPENDS_${PN}-module-locale-codes-langvar-retired += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes += "${PN}-module-carp" +RDEPENDS_${PN}-module-locale-codes += "${PN}-module-locale-codes-constants" +RDEPENDS_${PN}-module-locale-codes += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-script-codes += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-script-codes += "${PN}-module-utf8" +RDEPENDS_${PN}-module-locale-codes-script-codes += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-carp" +RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-exporter" +RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-locale-codes" +RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-locale-codes-constants" +RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-locale-codes-script-codes" +RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-locale-codes-script-retired" +RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-script += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-codes-script-retired += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-codes-script-retired += "${PN}-module-utf8" +RDEPENDS_${PN}-module-locale-codes-script-retired += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-country += "${PN}-module-exporter" +RDEPENDS_${PN}-module-locale-country += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-country += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-currency += "${PN}-module-exporter" +RDEPENDS_${PN}-module-locale-currency += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-currency += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-language += "${PN}-module-exporter" +RDEPENDS_${PN}-module-locale-language += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-language += "${PN}-module-warnings" +RDEPENDS_${PN}-module-locale-maketext-gutsloader += "${PN}-module-locale-maketext" +RDEPENDS_${PN}-module-locale-maketext-guts += "${PN}-module-locale-maketext" +RDEPENDS_${PN}-module-locale-maketext += "${PN}-module-carp" +RDEPENDS_${PN}-module-locale-maketext += "${PN}-module-i18n-langtags" +RDEPENDS_${PN}-module-locale-maketext += "${PN}-module-i18n-langtags-detect" +RDEPENDS_${PN}-module-locale-maketext += "${PN}-module-integer" +RDEPENDS_${PN}-module-locale-maketext += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-maketext += "${PN}-module-vars" +RDEPENDS_${PN}-module-locale-maketext-simple += "${PN}-module-base" +RDEPENDS_${PN}-module-locale-maketext-simple += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale += "${PN}-module-carp" +RDEPENDS_${PN}-module-locale += "${PN}-module-config" +RDEPENDS_${PN}-module-locale-script += "${PN}-module-exporter" +RDEPENDS_${PN}-module-locale-script += "${PN}-module-strict" +RDEPENDS_${PN}-module-locale-script += "${PN}-module-warnings" +RDEPENDS_${PN}-module-math-bigfloat += "${PN}-module-carp" +RDEPENDS_${PN}-module-math-bigfloat += "${PN}-module-exporter" +RDEPENDS_${PN}-module-math-bigfloat += "${PN}-module-math-bigint" +RDEPENDS_${PN}-module-math-bigfloat += "${PN}-module-math-complex" +RDEPENDS_${PN}-module-math-bigfloat += "${PN}-module-overload" +RDEPENDS_${PN}-module-math-bigfloat += "${PN}-module-strict" +RDEPENDS_${PN}-module-math-bigfloat += "${PN}-module-warnings" +RDEPENDS_${PN}-module-math-bigfloat-trace += "${PN}-module-exporter" +RDEPENDS_${PN}-module-math-bigfloat-trace += "${PN}-module-math-bigfloat" +RDEPENDS_${PN}-module-math-bigfloat-trace += "${PN}-module-overload" +RDEPENDS_${PN}-module-math-bigfloat-trace += "${PN}-module-strict" +RDEPENDS_${PN}-module-math-bigfloat-trace += "${PN}-module-warnings" +RDEPENDS_${PN}-module-math-bigint-calcemu += "${PN}-module-strict" +RDEPENDS_${PN}-module-math-bigint-calcemu += "${PN}-module-warnings" +RDEPENDS_${PN}-module-math-bigint-calc += "${PN}-module-carp" +RDEPENDS_${PN}-module-math-bigint-calc += "${PN}-module-constant" +RDEPENDS_${PN}-module-math-bigint-calc += "${PN}-module-integer" +RDEPENDS_${PN}-module-math-bigint-calc += "${PN}-module-strict" +RDEPENDS_${PN}-module-math-bigint-calc += "${PN}-module-warnings" +RDEPENDS_${PN}-module-math-bigint-fastcalc += "${PN}-module-math-bigint-calc" +RDEPENDS_${PN}-module-math-bigint-fastcalc += "${PN}-module-strict" +RDEPENDS_${PN}-module-math-bigint-fastcalc += "${PN}-module-warnings" +RDEPENDS_${PN}-module-math-bigint-fastcalc += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-math-bigint += "${PN}-module-carp" +RDEPENDS_${PN}-module-math-bigint += "${PN}-module-exporter" +RDEPENDS_${PN}-module-math-bigint += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-math-bigint += "${PN}-module-math-bigfloat" +RDEPENDS_${PN}-module-math-bigint += "${PN}-module-math-complex" +RDEPENDS_${PN}-module-math-bigint += "${PN}-module-overload" +RDEPENDS_${PN}-module-math-bigint += "${PN}-module-strict" +RDEPENDS_${PN}-module-math-bigint += "${PN}-module-warnings" +RDEPENDS_${PN}-module-math-bigint-trace += "${PN}-module-exporter" +RDEPENDS_${PN}-module-math-bigint-trace += "${PN}-module-math-bigint" +RDEPENDS_${PN}-module-math-bigint-trace += "${PN}-module-overload" +RDEPENDS_${PN}-module-math-bigint-trace += "${PN}-module-strict" +RDEPENDS_${PN}-module-math-bigint-trace += "${PN}-module-warnings" +RDEPENDS_${PN}-module-math-bigrat += "${PN}-module-carp" +RDEPENDS_${PN}-module-math-bigrat += "${PN}-module-math-bigfloat" +RDEPENDS_${PN}-module-math-bigrat += "${PN}-module-math-bigint" +RDEPENDS_${PN}-module-math-bigrat += "${PN}-module-overload" +RDEPENDS_${PN}-module-math-bigrat += "${PN}-module-strict" +RDEPENDS_${PN}-module-math-bigrat += "${PN}-module-warnings" +RDEPENDS_${PN}-module-math-complex += "${PN}-module-config" +RDEPENDS_${PN}-module-math-complex += "${PN}-module-exporter" +RDEPENDS_${PN}-module-math-complex += "${PN}-module-overload" +RDEPENDS_${PN}-module-math-complex += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-math-complex += "${PN}-module-strict" +RDEPENDS_${PN}-module-math-complex += "${PN}-module-warnings" +RDEPENDS_${PN}-module-math-trig += "${PN}-module-exporter" +RDEPENDS_${PN}-module-math-trig += "${PN}-module-math-complex" +RDEPENDS_${PN}-module-math-trig += "${PN}-module-strict" +RDEPENDS_${PN}-module-memoize-anydbm-file += "${PN}-module-vars" +RDEPENDS_${PN}-module-memoize-expirefile += "${PN}-module-carp" +RDEPENDS_${PN}-module-memoize-expire += "${PN}-module-carp" +RDEPENDS_${PN}-module-memoize += "${PN}-module-carp" +RDEPENDS_${PN}-module-memoize += "${PN}-module-config" +RDEPENDS_${PN}-module-memoize += "${PN}-module-exporter" +RDEPENDS_${PN}-module-memoize += "${PN}-module-strict" +RDEPENDS_${PN}-module-memoize += "${PN}-module-vars" +RDEPENDS_${PN}-module-memoize-sdbm-file += "${PN}-module-sdbm-file" +RDEPENDS_${PN}-module-memoize-storable += "${PN}-module-carp" +RDEPENDS_${PN}-module-memoize-storable += "${PN}-module-storable" +RDEPENDS_${PN}-module-meta-notation += "${PN}-module-strict" +RDEPENDS_${PN}-module-meta-notation += "${PN}-module-warnings" +RDEPENDS_${PN}-module-mime-base64 += "${PN}-module-exporter" +RDEPENDS_${PN}-module-mime-base64 += "${PN}-module-strict" +RDEPENDS_${PN}-module-mime-base64 += "${PN}-module-vars" +RDEPENDS_${PN}-module-mime-base64 += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-mime-quotedprint += "${PN}-module-exporter" +RDEPENDS_${PN}-module-mime-quotedprint += "${PN}-module-mime-base64" +RDEPENDS_${PN}-module-mime-quotedprint += "${PN}-module-strict" +RDEPENDS_${PN}-module-mime-quotedprint += "${PN}-module-vars" +RDEPENDS_${PN}-module-mro += "${PN}-module-strict" +RDEPENDS_${PN}-module-mro += "${PN}-module-warnings" +RDEPENDS_${PN}-module-mro += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-net-cmd += "${PN}-module-carp" +RDEPENDS_${PN}-module-net-cmd += "${PN}-module-constant" +RDEPENDS_${PN}-module-net-cmd += "${PN}-module-errno" +RDEPENDS_${PN}-module-net-cmd += "${PN}-module-exporter" +RDEPENDS_${PN}-module-net-cmd += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-cmd += "${PN}-module-symbol" +RDEPENDS_${PN}-module-net-cmd += "${PN}-module-warnings" +RDEPENDS_${PN}-module-net-config += "${PN}-module-exporter" +RDEPENDS_${PN}-module-net-config += "${PN}-module-socket" +RDEPENDS_${PN}-module-net-config += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-config += "${PN}-module-warnings" +RDEPENDS_${PN}-module-net-domain += "${PN}-module-carp" +RDEPENDS_${PN}-module-net-domain += "${PN}-module-exporter" +RDEPENDS_${PN}-module-net-domain += "${PN}-module-net-config" +RDEPENDS_${PN}-module-net-domain += "${PN}-module-posix" +RDEPENDS_${PN}-module-net-domain += "${PN}-module-socket" +RDEPENDS_${PN}-module-net-domain += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-domain += "${PN}-module-warnings" +RDEPENDS_${PN}-module-net-ftp-a += "${PN}-module-carp" +RDEPENDS_${PN}-module-net-ftp-a += "${PN}-module-net-ftp-dataconn" +RDEPENDS_${PN}-module-net-ftp-a += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-ftp-a += "${PN}-module-warnings" +RDEPENDS_${PN}-module-net-ftp-dataconn += "${PN}-module-carp" +RDEPENDS_${PN}-module-net-ftp-dataconn += "${PN}-module-errno" +RDEPENDS_${PN}-module-net-ftp-dataconn += "${PN}-module-net-cmd" +RDEPENDS_${PN}-module-net-ftp-dataconn += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-ftp-dataconn += "${PN}-module-warnings" +RDEPENDS_${PN}-module-net-ftp-e += "${PN}-module-net-ftp-i" +RDEPENDS_${PN}-module-net-ftp-e += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-ftp-e += "${PN}-module-warnings" +RDEPENDS_${PN}-module-net-ftp-i += "${PN}-module-carp" +RDEPENDS_${PN}-module-net-ftp-i += "${PN}-module-net-ftp-dataconn" +RDEPENDS_${PN}-module-net-ftp-i += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-ftp-i += "${PN}-module-warnings" +RDEPENDS_${PN}-module-net-ftp-l += "${PN}-module-net-ftp-i" +RDEPENDS_${PN}-module-net-ftp-l += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-ftp-l += "${PN}-module-warnings" +RDEPENDS_${PN}-module-net-ftp += "${PN}-module-carp" +RDEPENDS_${PN}-module-net-ftp += "${PN}-module-constant" +RDEPENDS_${PN}-module-net-ftp += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-net-ftp += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-net-ftp += "${PN}-module-io-socket" +RDEPENDS_${PN}-module-net-ftp += "${PN}-module-io-socket-ip" +RDEPENDS_${PN}-module-net-ftp += "${PN}-module-net-cmd" +RDEPENDS_${PN}-module-net-ftp += "${PN}-module-net-config" +RDEPENDS_${PN}-module-net-ftp += "${PN}-module-net-ftp-a" +RDEPENDS_${PN}-module-net-ftp += "${PN}-module-net-netrc" +RDEPENDS_${PN}-module-net-ftp += "${PN}-module-socket" +RDEPENDS_${PN}-module-net-ftp += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-ftp += "${PN}-module-time-local" +RDEPENDS_${PN}-module-net-ftp += "${PN}-module-warnings" +RDEPENDS_${PN}-module-net-hostent += "${PN}-module-class-struct" +RDEPENDS_${PN}-module-net-hostent += "${PN}-module-exporter" +RDEPENDS_${PN}-module-net-hostent += "${PN}-module-socket" +RDEPENDS_${PN}-module-net-hostent += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-hostent += "${PN}-module-vars" +RDEPENDS_${PN}-module-net-netent += "${PN}-module-class-struct" +RDEPENDS_${PN}-module-net-netent += "${PN}-module-exporter" +RDEPENDS_${PN}-module-net-netent += "${PN}-module-socket" +RDEPENDS_${PN}-module-net-netent += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-netent += "${PN}-module-vars" +RDEPENDS_${PN}-module-net-netrc += "${PN}-module-carp" +RDEPENDS_${PN}-module-net-netrc += "${PN}-module-filehandle" +RDEPENDS_${PN}-module-net-netrc += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-netrc += "${PN}-module-warnings" +RDEPENDS_${PN}-module-net-nntp += "${PN}-module-carp" +RDEPENDS_${PN}-module-net-nntp += "${PN}-module-io-socket" +RDEPENDS_${PN}-module-net-nntp += "${PN}-module-io-socket-ip" +RDEPENDS_${PN}-module-net-nntp += "${PN}-module-net-cmd" +RDEPENDS_${PN}-module-net-nntp += "${PN}-module-net-config" +RDEPENDS_${PN}-module-net-nntp += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-nntp += "${PN}-module-time-local" +RDEPENDS_${PN}-module-net-nntp += "${PN}-module-warnings" +RDEPENDS_${PN}-module-net-ping += "${PN}-module-carp" +RDEPENDS_${PN}-module-net-ping += "${PN}-module-constant" +RDEPENDS_${PN}-module-net-ping += "${PN}-module-exporter" +RDEPENDS_${PN}-module-net-ping += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-net-ping += "${PN}-module-filehandle" +RDEPENDS_${PN}-module-net-ping += "${PN}-module-posix" +RDEPENDS_${PN}-module-net-ping += "${PN}-module-socket" +RDEPENDS_${PN}-module-net-ping += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-ping += "${PN}-module-time-hires" +RDEPENDS_${PN}-module-net-ping += "${PN}-module-vars" +RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-carp" +RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-io-socket" +RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-io-socket-ip" +RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-mime-base64" +RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-net-cmd" +RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-net-config" +RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-net-netrc" +RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-pop3 += "${PN}-module-warnings" +RDEPENDS_${PN}-module-net-protoent += "${PN}-module-class-struct" +RDEPENDS_${PN}-module-net-protoent += "${PN}-module-exporter" +RDEPENDS_${PN}-module-net-protoent += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-protoent += "${PN}-module-vars" +RDEPENDS_${PN}-module-net-servent += "${PN}-module-class-struct" +RDEPENDS_${PN}-module-net-servent += "${PN}-module-exporter" +RDEPENDS_${PN}-module-net-servent += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-servent += "${PN}-module-vars" +RDEPENDS_${PN}-module-net-smtp += "${PN}-module-carp" +RDEPENDS_${PN}-module-net-smtp += "${PN}-module-io-socket" +RDEPENDS_${PN}-module-net-smtp += "${PN}-module-io-socket-ip" +RDEPENDS_${PN}-module-net-smtp += "${PN}-module-mime-base64" +RDEPENDS_${PN}-module-net-smtp += "${PN}-module-net-cmd" +RDEPENDS_${PN}-module-net-smtp += "${PN}-module-net-config" +RDEPENDS_${PN}-module-net-smtp += "${PN}-module-socket" +RDEPENDS_${PN}-module-net-smtp += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-smtp += "${PN}-module-warnings" +RDEPENDS_${PN}-module-net-time += "${PN}-module-carp" +RDEPENDS_${PN}-module-net-time += "${PN}-module-exporter" +RDEPENDS_${PN}-module-net-time += "${PN}-module-io-select" +RDEPENDS_${PN}-module-net-time += "${PN}-module-io-socket" +RDEPENDS_${PN}-module-net-time += "${PN}-module-net-config" +RDEPENDS_${PN}-module-net-time += "${PN}-module-strict" +RDEPENDS_${PN}-module-net-time += "${PN}-module-warnings" +RDEPENDS_${PN}-module-next += "${PN}-module-carp" +RDEPENDS_${PN}-module-next += "${PN}-module-overload" +RDEPENDS_${PN}-module-next += "${PN}-module-strict" +RDEPENDS_${PN}-module-odbm-file += "${PN}-module-strict" +RDEPENDS_${PN}-module-odbm-file += "${PN}-module-tie-hash" +RDEPENDS_${PN}-module-odbm-file += "${PN}-module-warnings" +RDEPENDS_${PN}-module-odbm-file += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-ok += "${PN}-module-strict" +RDEPENDS_${PN}-module-ok += "${PN}-module-test-more" +RDEPENDS_${PN}-module-opcode += "${PN}-module-carp" +RDEPENDS_${PN}-module-opcode += "${PN}-module-exporter" +RDEPENDS_${PN}-module-opcode += "${PN}-module-strict" +RDEPENDS_${PN}-module-opcode += "${PN}-module-subs" +RDEPENDS_${PN}-module-opcode += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-open += "${PN}-module-carp" +RDEPENDS_${PN}-module-open += "${PN}-module-encode" +RDEPENDS_${PN}-module-open += "${PN}-module-encoding" +RDEPENDS_${PN}-module-open += "${PN}-module-warnings" +RDEPENDS_${PN}-module-o += "${PN}-module-b" +RDEPENDS_${PN}-module-o += "${PN}-module-carp" +RDEPENDS_${PN}-module-ops += "${PN}-module-opcode" +RDEPENDS_${PN}-module-overloading += "${PN}-module-overload-numbers" +RDEPENDS_${PN}-module-overloading += "${PN}-module-warnings" +RDEPENDS_${PN}-module-overload += "${PN}-module-mro" +RDEPENDS_${PN}-module-overload += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-overload += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-params-check += "${PN}-module-carp" +RDEPENDS_${PN}-module-params-check += "${PN}-module-exporter" +RDEPENDS_${PN}-module-params-check += "${PN}-module-locale-maketext-simple" +RDEPENDS_${PN}-module-params-check += "${PN}-module-strict" +RDEPENDS_${PN}-module-params-check += "${PN}-module-vars" +RDEPENDS_${PN}-module-parent += "${PN}-module-strict" +RDEPENDS_${PN}-module-parent += "${PN}-module-vars" +RDEPENDS_${PN}-module-perlfaq += "${PN}-module-strict" +RDEPENDS_${PN}-module-perlfaq += "${PN}-module-warnings" +RDEPENDS_${PN}-module-perlio-encoding += "${PN}-module-strict" +RDEPENDS_${PN}-module-perlio-encoding += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-perlio-mmap += "${PN}-module-strict" +RDEPENDS_${PN}-module-perlio-mmap += "${PN}-module-warnings" +RDEPENDS_${PN}-module-perlio-mmap += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-perlio-scalar += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-perlio-via += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-perlio-via-quotedprint += "${PN}-module-mime-quotedprint" +RDEPENDS_${PN}-module-perlio-via-quotedprint += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-checker += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-checker += "${PN}-module-exporter" +RDEPENDS_${PN}-module-pod-checker += "${PN}-module-pod-parser" +RDEPENDS_${PN}-module-pod-checker += "${PN}-module-pod-parseutils" +RDEPENDS_${PN}-module-pod-checker += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-checker += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-escapes += "${PN}-module-exporter" +RDEPENDS_${PN}-module-pod-escapes += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-escapes += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-escapes += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-find += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-find += "${PN}-module-config" +RDEPENDS_${PN}-module-pod-find += "${PN}-module-cwd" +RDEPENDS_${PN}-module-pod-find += "${PN}-module-exporter" +RDEPENDS_${PN}-module-pod-find += "${PN}-module-file-find" +RDEPENDS_${PN}-module-pod-find += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-pod-find += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-find += "${PN}-module-symbol" +RDEPENDS_${PN}-module-pod-find += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-functions += "${PN}-module-exporter" +RDEPENDS_${PN}-module-pod-functions += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-inputobjects += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-inputobjects += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-man += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-man += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-pod-man += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-pod-man += "${PN}-module-pod-simple" +RDEPENDS_${PN}-module-pod-man += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-man += "${PN}-module-subs" +RDEPENDS_${PN}-module-pod-man += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-man += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-parselink += "${PN}-module-exporter" +RDEPENDS_${PN}-module-pod-parselink += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-parselink += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-parselink += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-parser += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-parser += "${PN}-module-exporter" +RDEPENDS_${PN}-module-pod-parser += "${PN}-module-pod-inputobjects" +RDEPENDS_${PN}-module-pod-parser += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-parser += "${PN}-module-symbol" +RDEPENDS_${PN}-module-pod-parser += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-parseutils += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-parseutils += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-parseutils += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-perldoc-baseto += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-perldoc-baseto += "${PN}-module-config" +RDEPENDS_${PN}-module-pod-perldoc-baseto += "${PN}-module-file-spec-functions" +RDEPENDS_${PN}-module-pod-perldoc-baseto += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-perldoc-baseto += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-perldoc-baseto += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-perldoc-getoptsoo += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-perldoc-getoptsoo += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-config" +RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-encode" +RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-file-spec-functions" +RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-file-temp" +RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-pod-perldoc-getoptsoo" +RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-text-parsewords" +RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-perldoc += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-perldoc-toansi += "${PN}-module-parent" +RDEPENDS_${PN}-module-pod-perldoc-toansi += "${PN}-module-pod-text-color" +RDEPENDS_${PN}-module-pod-perldoc-toansi += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-perldoc-toansi += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-perldoc-toansi += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-perldoc-tochecker += "${PN}-module-pod-checker" +RDEPENDS_${PN}-module-pod-perldoc-tochecker += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-perldoc-tochecker += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-perldoc-tochecker += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-encode" +RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-file-spec-functions" +RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-io-handle" +RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-io-select" +RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-ipc-open3" +RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-parent" +RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-pod-man" +RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-pod-perldoc-topod" +RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-symbol" +RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-perldoc-toman += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-perldoc-tonroff += "${PN}-module-parent" +RDEPENDS_${PN}-module-pod-perldoc-tonroff += "${PN}-module-pod-man" +RDEPENDS_${PN}-module-pod-perldoc-tonroff += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-perldoc-tonroff += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-perldoc-tonroff += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-perldoc-topod += "${PN}-module-parent" +RDEPENDS_${PN}-module-pod-perldoc-topod += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-perldoc-topod += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-perldoc-topod += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-perldoc-tortf += "${PN}-module-parent" +RDEPENDS_${PN}-module-pod-perldoc-tortf += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-perldoc-tortf += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-perldoc-tortf += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-perldoc-toterm += "${PN}-module-parent" +RDEPENDS_${PN}-module-pod-perldoc-toterm += "${PN}-module-pod-text-termcap" +RDEPENDS_${PN}-module-pod-perldoc-toterm += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-perldoc-toterm += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-perldoc-toterm += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-perldoc-totext += "${PN}-module-parent" +RDEPENDS_${PN}-module-pod-perldoc-totext += "${PN}-module-pod-text" +RDEPENDS_${PN}-module-pod-perldoc-totext += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-perldoc-totext += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-perldoc-totext += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-perldoc-toxml += "${PN}-module-parent" +RDEPENDS_${PN}-module-pod-perldoc-toxml += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-perldoc-toxml += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-perldoc-toxml += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-plaintext += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-plaintext += "${PN}-module-pod-select" +RDEPENDS_${PN}-module-pod-plaintext += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-plaintext += "${PN}-module-symbol" +RDEPENDS_${PN}-module-pod-plaintext += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-select += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-select += "${PN}-module-pod-parser" +RDEPENDS_${PN}-module-pod-select += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-select += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-blackbox += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple-blackbox += "${PN}-module-integer" +RDEPENDS_${PN}-module-pod-simple-blackbox += "${PN}-module-pod-simple" +RDEPENDS_${PN}-module-pod-simple-blackbox += "${PN}-module-pod-simple-transcode" +RDEPENDS_${PN}-module-pod-simple-blackbox += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-blackbox += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-checker += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple-checker += "${PN}-module-pod-simple" +RDEPENDS_${PN}-module-pod-simple-checker += "${PN}-module-pod-simple-methody" +RDEPENDS_${PN}-module-pod-simple-checker += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-checker += "${PN}-module-text-wrap" +RDEPENDS_${PN}-module-pod-simple-checker += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-debug += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple-debug += "${PN}-module-pod-simple" +RDEPENDS_${PN}-module-pod-simple-debug += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-debug += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-dumpastext += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple-dumpastext += "${PN}-module-pod-simple" +RDEPENDS_${PN}-module-pod-simple-dumpastext += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-dumpasxml += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple-dumpasxml += "${PN}-module-pod-simple" +RDEPENDS_${PN}-module-pod-simple-dumpasxml += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-dumpasxml += "${PN}-module-text-wrap" +RDEPENDS_${PN}-module-pod-simple-linksection += "${PN}-module-overload" +RDEPENDS_${PN}-module-pod-simple-linksection += "${PN}-module-pod-simple-blackbox" +RDEPENDS_${PN}-module-pod-simple-linksection += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-linksection += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-methody += "${PN}-module-pod-simple" +RDEPENDS_${PN}-module-pod-simple-methody += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-methody += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple += "${PN}-module-integer" +RDEPENDS_${PN}-module-pod-simple += "${PN}-module-pod-escapes" +RDEPENDS_${PN}-module-pod-simple += "${PN}-module-pod-simple-blackbox" +RDEPENDS_${PN}-module-pod-simple += "${PN}-module-pod-simple-linksection" +RDEPENDS_${PN}-module-pod-simple += "${PN}-module-pod-simple-tiedoutfh" +RDEPENDS_${PN}-module-pod-simple += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple += "${PN}-module-symbol" +RDEPENDS_${PN}-module-pod-simple += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-progress += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-pullparserendtoken += "${PN}-module-pod-simple-pullparsertoken" +RDEPENDS_${PN}-module-pod-simple-pullparserendtoken += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-pullparserendtoken += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-pullparser += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple-pullparser += "${PN}-module-pod-simple" +RDEPENDS_${PN}-module-pod-simple-pullparser += "${PN}-module-pod-simple-pullparserendtoken" +RDEPENDS_${PN}-module-pod-simple-pullparser += "${PN}-module-pod-simple-pullparserstarttoken" +RDEPENDS_${PN}-module-pod-simple-pullparser += "${PN}-module-pod-simple-pullparsertexttoken" +RDEPENDS_${PN}-module-pod-simple-pullparser += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-pullparserstarttoken += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple-pullparserstarttoken += "${PN}-module-pod-simple-pullparsertoken" +RDEPENDS_${PN}-module-pod-simple-pullparserstarttoken += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-pullparserstarttoken += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-pullparsertexttoken += "${PN}-module-pod-simple-pullparsertoken" +RDEPENDS_${PN}-module-pod-simple-pullparsertexttoken += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-pullparsertexttoken += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-pullparsertoken += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-rtf += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple-rtf += "${PN}-module-integer" +RDEPENDS_${PN}-module-pod-simple-rtf += "${PN}-module-pod-simple-pullparser" +RDEPENDS_${PN}-module-pod-simple-rtf += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-rtf += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-search += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple-search += "${PN}-module-config" +RDEPENDS_${PN}-module-pod-simple-search += "${PN}-module-cwd" +RDEPENDS_${PN}-module-pod-simple-search += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-pod-simple-search += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-pod-simple-search += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-search += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-simpletree += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple-simpletree += "${PN}-module-pod-simple" +RDEPENDS_${PN}-module-pod-simple-simpletree += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-simpletree += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-textcontent += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple-textcontent += "${PN}-module-pod-simple" +RDEPENDS_${PN}-module-pod-simple-textcontent += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-textcontent += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-text += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple-text += "${PN}-module-pod-simple" +RDEPENDS_${PN}-module-pod-simple-text += "${PN}-module-pod-simple-methody" +RDEPENDS_${PN}-module-pod-simple-text += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-text += "${PN}-module-text-wrap" +RDEPENDS_${PN}-module-pod-simple-text += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-tiedoutfh += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple-tiedoutfh += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-tiedoutfh += "${PN}-module-symbol" +RDEPENDS_${PN}-module-pod-simple-tiedoutfh += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-transcodedumb += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-transcodedumb += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-transcode += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-transcode += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-transcodesmart += "${PN}-module-encode" +RDEPENDS_${PN}-module-pod-simple-transcodesmart += "${PN}-module-pod-simple" +RDEPENDS_${PN}-module-pod-simple-transcodesmart += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-transcodesmart += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-simple-xmloutstream += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-simple-xmloutstream += "${PN}-module-pod-simple" +RDEPENDS_${PN}-module-pod-simple-xmloutstream += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-simple-xmloutstream += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-text-color += "${PN}-module-pod-text" +RDEPENDS_${PN}-module-pod-text-color += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-text-color += "${PN}-module-term-ansicolor" +RDEPENDS_${PN}-module-pod-text-color += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-text-color += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-text-overstrike += "${PN}-module-pod-text" +RDEPENDS_${PN}-module-pod-text-overstrike += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-text-overstrike += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-text-overstrike += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-text += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-text += "${PN}-module-encode" +RDEPENDS_${PN}-module-pod-text += "${PN}-module-exporter" +RDEPENDS_${PN}-module-pod-text += "${PN}-module-pod-simple" +RDEPENDS_${PN}-module-pod-text += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-text += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-text += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-text-termcap += "${PN}-module-pod-text" +RDEPENDS_${PN}-module-pod-text-termcap += "${PN}-module-posix" +RDEPENDS_${PN}-module-pod-text-termcap += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-text-termcap += "${PN}-module-term-cap" +RDEPENDS_${PN}-module-pod-text-termcap += "${PN}-module-vars" +RDEPENDS_${PN}-module-pod-text-termcap += "${PN}-module-warnings" +RDEPENDS_${PN}-module-pod-usage += "${PN}-module-carp" +RDEPENDS_${PN}-module-pod-usage += "${PN}-module-config" +RDEPENDS_${PN}-module-pod-usage += "${PN}-module-exporter" +RDEPENDS_${PN}-module-pod-usage += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-pod-usage += "${PN}-module-strict" +RDEPENDS_${PN}-module-pod-usage += "${PN}-module-vars" +RDEPENDS_${PN}-module-posix += "${PN}-module-exporter" +RDEPENDS_${PN}-module-posix += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-posix += "${PN}-module-strict" +RDEPENDS_${PN}-module-posix += "${PN}-module-tie-hash" +RDEPENDS_${PN}-module-posix += "${PN}-module-warnings" +RDEPENDS_${PN}-module-posix += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-re += "${PN}-module-carp" +RDEPENDS_${PN}-module-re += "${PN}-module-exporter" +RDEPENDS_${PN}-module-re += "${PN}-module-strict" +RDEPENDS_${PN}-module-re += "${PN}-module-term-cap" +RDEPENDS_${PN}-module-re += "${PN}-module-warnings" +RDEPENDS_${PN}-module-re += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-safe += "${PN}-module-b" +RDEPENDS_${PN}-module-safe += "${PN}-module-carp" +RDEPENDS_${PN}-module-safe += "${PN}-module-carp-heavy" +RDEPENDS_${PN}-module-safe += "${PN}-module-opcode" +RDEPENDS_${PN}-module-safe += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-safe += "${PN}-module-strict" +RDEPENDS_${PN}-module-safe += "${PN}-module-utf8" +RDEPENDS_${PN}-module-scalar-util += "${PN}-module-carp" +RDEPENDS_${PN}-module-scalar-util += "${PN}-module-exporter" +RDEPENDS_${PN}-module-scalar-util += "${PN}-module-list-util" +RDEPENDS_${PN}-module-scalar-util += "${PN}-module-strict" +RDEPENDS_${PN}-module-sdbm-file += "${PN}-module-exporter" +RDEPENDS_${PN}-module-sdbm-file += "${PN}-module-strict" +RDEPENDS_${PN}-module-sdbm-file += "${PN}-module-tie-hash" +RDEPENDS_${PN}-module-sdbm-file += "${PN}-module-warnings" +RDEPENDS_${PN}-module-sdbm-file += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-search-dict += "${PN}-module-exporter" +RDEPENDS_${PN}-module-search-dict += "${PN}-module-feature" +RDEPENDS_${PN}-module-search-dict += "${PN}-module-strict" +RDEPENDS_${PN}-module-selectsaver += "${PN}-module-carp" +RDEPENDS_${PN}-module-selectsaver += "${PN}-module-symbol" +RDEPENDS_${PN}-module-selfloader += "${PN}-module-exporter" +RDEPENDS_${PN}-module-selfloader += "${PN}-module-io-handle" +RDEPENDS_${PN}-module-selfloader += "${PN}-module-strict" +RDEPENDS_${PN}-module-selfloader += "${PN}-module-vars" +RDEPENDS_${PN}-module-sigtrap += "${PN}-module-carp" +RDEPENDS_${PN}-module-sigtrap += "${PN}-module-symbol" +RDEPENDS_${PN}-module-socket += "${PN}-module-carp" +RDEPENDS_${PN}-module-socket += "${PN}-module-exporter" +RDEPENDS_${PN}-module-socket += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-socket += "${PN}-module-strict" +RDEPENDS_${PN}-module-socket += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-socket += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-sort += "${PN}-module-carp" +RDEPENDS_${PN}-module-sort += "${PN}-module-strict" +RDEPENDS_${PN}-module-storable += "${PN}-module-carp" +RDEPENDS_${PN}-module-storable += "${PN}-module-config" +RDEPENDS_${PN}-module-storable += "${PN}-module-exporter" +RDEPENDS_${PN}-module-storable += "${PN}-module-io-file" +RDEPENDS_${PN}-module-storable += "${PN}-module-vars" +RDEPENDS_${PN}-module-storable += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-sub-util += "${PN}-module-exporter" +RDEPENDS_${PN}-module-sub-util += "${PN}-module-list-util" +RDEPENDS_${PN}-module-sub-util += "${PN}-module-strict" +RDEPENDS_${PN}-module-sub-util += "${PN}-module-warnings" +RDEPENDS_${PN}-module-symbol += "${PN}-module-exporter" +RDEPENDS_${PN}-module-sys-hostname += "${PN}-module-carp" +RDEPENDS_${PN}-module-sys-hostname += "${PN}-module-exporter" +RDEPENDS_${PN}-module-sys-hostname += "${PN}-module-posix" +RDEPENDS_${PN}-module-sys-hostname += "${PN}-module-strict" +RDEPENDS_${PN}-module-sys-hostname += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-carp" +RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-dynaloader" +RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-exporter" +RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-posix" +RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-socket" +RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-strict" +RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-sys-hostname" +RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-vars" +RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-warnings" +RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-sys-syslog += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-tap-base += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-base += "${PN}-module-constant" +RDEPENDS_${PN}-module-tap-base += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-base += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-formatter-base += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-formatter-base += "${PN}-module-posix" +RDEPENDS_${PN}-module-tap-formatter-base += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-formatter-base += "${PN}-module-tap-formatter-color" +RDEPENDS_${PN}-module-tap-formatter-base += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-formatter-color += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-formatter-color += "${PN}-module-constant" +RDEPENDS_${PN}-module-tap-formatter-color += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-formatter-color += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-formatter-console-parallelsession += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-formatter-console-parallelsession += "${PN}-module-carp" +RDEPENDS_${PN}-module-tap-formatter-console-parallelsession += "${PN}-module-constant" +RDEPENDS_${PN}-module-tap-formatter-console-parallelsession += "${PN}-module-file-path" +RDEPENDS_${PN}-module-tap-formatter-console-parallelsession += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-tap-formatter-console-parallelsession += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-formatter-console-parallelsession += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-formatter-console += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-formatter-console += "${PN}-module-posix" +RDEPENDS_${PN}-module-tap-formatter-console += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-formatter-console += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-formatter-console-session += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-formatter-console-session += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-formatter-console-session += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-formatter-file += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-formatter-file += "${PN}-module-posix" +RDEPENDS_${PN}-module-tap-formatter-file += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-formatter-file += "${PN}-module-tap-formatter-file-session" +RDEPENDS_${PN}-module-tap-formatter-file += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-formatter-file-session += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-formatter-file-session += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-formatter-file-session += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-formatter-session += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-formatter-session += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-formatter-session += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-harness-env += "${PN}-module-constant" +RDEPENDS_${PN}-module-tap-harness-env += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-harness-env += "${PN}-module-tap-object" +RDEPENDS_${PN}-module-tap-harness-env += "${PN}-module-text-parsewords" +RDEPENDS_${PN}-module-tap-harness-env += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-harness += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-harness += "${PN}-module-carp" +RDEPENDS_${PN}-module-tap-harness += "${PN}-module-file-path" +RDEPENDS_${PN}-module-tap-harness += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-tap-harness += "${PN}-module-io-handle" +RDEPENDS_${PN}-module-tap-harness += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-harness += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-object += "${PN}-module-carp" +RDEPENDS_${PN}-module-tap-object += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-object += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-aggregator += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-aggregator += "${PN}-module-benchmark" +RDEPENDS_${PN}-module-tap-parser-aggregator += "${PN}-module-carp" +RDEPENDS_${PN}-module-tap-parser-aggregator += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-aggregator += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-grammar += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-grammar += "${PN}-module-carp" +RDEPENDS_${PN}-module-tap-parser-grammar += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-grammar += "${PN}-module-tap-parser-resultfactory" +RDEPENDS_${PN}-module-tap-parser-grammar += "${PN}-module-tap-parser-yamlish-reader" +RDEPENDS_${PN}-module-tap-parser-grammar += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-iterator-array += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-iterator-array += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-iterator-array += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-iteratorfactory += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-iteratorfactory += "${PN}-module-carp" +RDEPENDS_${PN}-module-tap-parser-iteratorfactory += "${PN}-module-constant" +RDEPENDS_${PN}-module-tap-parser-iteratorfactory += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-tap-parser-iteratorfactory += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-iteratorfactory += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-iterator += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-iterator += "${PN}-module-carp" +RDEPENDS_${PN}-module-tap-parser-iterator += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-iterator += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-iterator-process += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-iterator-process += "${PN}-module-config" +RDEPENDS_${PN}-module-tap-parser-iterator-process += "${PN}-module-io-handle" +RDEPENDS_${PN}-module-tap-parser-iterator-process += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-iterator-process += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-iterator-stream += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-iterator-stream += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-iterator-stream += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-multiplexer += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-multiplexer += "${PN}-module-constant" +RDEPENDS_${PN}-module-tap-parser-multiplexer += "${PN}-module-io-select" +RDEPENDS_${PN}-module-tap-parser-multiplexer += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-multiplexer += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-carp" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-grammar" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-iterator" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-iteratorfactory" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-result" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-resultfactory" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-source" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-sourcehandler-executable" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-sourcehandler-file" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-sourcehandler-handle" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-sourcehandler-perl" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-tap-parser-sourcehandler-rawtap" +RDEPENDS_${PN}-module-tap-parser += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-result-bailout += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-result-bailout += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-result-bailout += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-result-comment += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-result-comment += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-result-comment += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-carp" +RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-bailout" +RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-comment" +RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-plan" +RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-pragma" +RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-test" +RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-unknown" +RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-version" +RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-tap-parser-result-yaml" +RDEPENDS_${PN}-module-tap-parser-resultfactory += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-result += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-result += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-result += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-result-plan += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-result-plan += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-result-plan += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-result-pragma += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-result-pragma += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-result-pragma += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-result-test += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-result-test += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-result-test += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-result-unknown += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-result-unknown += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-result-unknown += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-result-version += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-result-version += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-result-version += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-result-yaml += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-result-yaml += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-result-yaml += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-scheduler-job += "${PN}-module-carp" +RDEPENDS_${PN}-module-tap-parser-scheduler-job += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-scheduler-job += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-scheduler += "${PN}-module-carp" +RDEPENDS_${PN}-module-tap-parser-scheduler += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-scheduler += "${PN}-module-tap-parser-scheduler-job" +RDEPENDS_${PN}-module-tap-parser-scheduler += "${PN}-module-tap-parser-scheduler-spinner" +RDEPENDS_${PN}-module-tap-parser-scheduler += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-scheduler-spinner += "${PN}-module-carp" +RDEPENDS_${PN}-module-tap-parser-scheduler-spinner += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-scheduler-spinner += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-executable += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-executable += "${PN}-module-constant" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-executable += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-executable += "${PN}-module-tap-parser-iteratorfactory" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-executable += "${PN}-module-tap-parser-iterator-process" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-executable += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-file += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-file += "${PN}-module-constant" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-file += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-file += "${PN}-module-tap-parser-iteratorfactory" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-file += "${PN}-module-tap-parser-iterator-stream" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-file += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-handle += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-handle += "${PN}-module-constant" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-handle += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-handle += "${PN}-module-tap-parser-iteratorfactory" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-handle += "${PN}-module-tap-parser-iterator-stream" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-handle += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-sourcehandler += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-sourcehandler += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-sourcehandler += "${PN}-module-tap-parser-iterator" +RDEPENDS_${PN}-module-tap-parser-sourcehandler += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-config" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-constant" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-tap-parser-iteratorfactory" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-tap-parser-iterator-process" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-text-parsewords" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-perl += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-rawtap += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-rawtap += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-rawtap += "${PN}-module-tap-parser-iterator-array" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-rawtap += "${PN}-module-tap-parser-iteratorfactory" +RDEPENDS_${PN}-module-tap-parser-sourcehandler-rawtap += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-source += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-source += "${PN}-module-constant" +RDEPENDS_${PN}-module-tap-parser-source += "${PN}-module-file-basename" +RDEPENDS_${PN}-module-tap-parser-source += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-source += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-yamlish-reader += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-yamlish-reader += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-yamlish-reader += "${PN}-module-warnings" +RDEPENDS_${PN}-module-tap-parser-yamlish-writer += "${PN}-module-base" +RDEPENDS_${PN}-module-tap-parser-yamlish-writer += "${PN}-module-strict" +RDEPENDS_${PN}-module-tap-parser-yamlish-writer += "${PN}-module-warnings" +RDEPENDS_${PN}-module-term-ansicolor += "${PN}-module-carp" +RDEPENDS_${PN}-module-term-ansicolor += "${PN}-module-exporter" +RDEPENDS_${PN}-module-term-ansicolor += "${PN}-module-strict" +RDEPENDS_${PN}-module-term-ansicolor += "${PN}-module-warnings" +RDEPENDS_${PN}-module-term-cap += "${PN}-module-carp" +RDEPENDS_${PN}-module-term-cap += "${PN}-module-strict" +RDEPENDS_${PN}-module-term-cap += "${PN}-module-vars" +RDEPENDS_${PN}-module-term-complete += "${PN}-module-exporter" +RDEPENDS_${PN}-module-term-complete += "${PN}-module-strict" +RDEPENDS_${PN}-module-term-readline += "${PN}-module-strict" +RDEPENDS_${PN}-module-term-readline += "${PN}-module-term-cap" +RDEPENDS_${PN}-module-test-builder-module += "${PN}-module-exporter" +RDEPENDS_${PN}-module-test-builder-module += "${PN}-module-strict" +RDEPENDS_${PN}-module-test-builder-module += "${PN}-module-test-builder" +RDEPENDS_${PN}-module-test-builder += "${PN}-module-config" +RDEPENDS_${PN}-module-test-builder += "${PN}-module-perlio" +RDEPENDS_${PN}-module-test-builder += "${PN}-module-strict" +RDEPENDS_${PN}-module-test-builder += "${PN}-module-threads-shared" +RDEPENDS_${PN}-module-test-builder += "${PN}-module-warnings" +RDEPENDS_${PN}-module-test-builder-tester-color += "${PN}-module-strict" +RDEPENDS_${PN}-module-test-builder-tester-color += "${PN}-module-test-builder-tester" +RDEPENDS_${PN}-module-test-builder-tester += "${PN}-module-carp" +RDEPENDS_${PN}-module-test-builder-tester += "${PN}-module-exporter" +RDEPENDS_${PN}-module-test-builder-tester += "${PN}-module-strict" +RDEPENDS_${PN}-module-test-builder-tester += "${PN}-module-symbol" +RDEPENDS_${PN}-module-test-builder-tester += "${PN}-module-test-builder" +RDEPENDS_${PN}-module-test-harness += "${PN}-module-base" +RDEPENDS_${PN}-module-test-harness += "${PN}-module-config" +RDEPENDS_${PN}-module-test-harness += "${PN}-module-constant" +RDEPENDS_${PN}-module-test-harness += "${PN}-module-strict" +RDEPENDS_${PN}-module-test-harness += "${PN}-module-tap-harness" +RDEPENDS_${PN}-module-test-harness += "${PN}-module-tap-parser-aggregator" +RDEPENDS_${PN}-module-test-harness += "${PN}-module-tap-parser-source" +RDEPENDS_${PN}-module-test-harness += "${PN}-module-tap-parser-sourcehandler-perl" +RDEPENDS_${PN}-module-test-harness += "${PN}-module-text-parsewords" +RDEPENDS_${PN}-module-test-harness += "${PN}-module-warnings" +RDEPENDS_${PN}-module-test-more += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-test-more += "${PN}-module-strict" +RDEPENDS_${PN}-module-test-more += "${PN}-module-test-builder-module" +RDEPENDS_${PN}-module-test-more += "${PN}-module-warnings" +RDEPENDS_${PN}-module-test += "${PN}-module-carp" +RDEPENDS_${PN}-module-test += "${PN}-module-exporter" +RDEPENDS_${PN}-module-test += "${PN}-module-file-temp" +RDEPENDS_${PN}-module-test += "${PN}-module-strict" +RDEPENDS_${PN}-module-test += "${PN}-module-vars" +RDEPENDS_${PN}-module-test-simple += "${PN}-module-strict" +RDEPENDS_${PN}-module-test-simple += "${PN}-module-test-builder-module" +RDEPENDS_${PN}-module-test-tester-capture += "${PN}-module-config" +RDEPENDS_${PN}-module-test-tester-capture += "${PN}-module-strict" +RDEPENDS_${PN}-module-test-tester-capture += "${PN}-module-test-builder" +RDEPENDS_${PN}-module-test-tester-capture += "${PN}-module-threads-shared" +RDEPENDS_${PN}-module-test-tester-capture += "${PN}-module-vars" +RDEPENDS_${PN}-module-test-tester-capturerunner += "${PN}-module-exporter" +RDEPENDS_${PN}-module-test-tester-capturerunner += "${PN}-module-strict" +RDEPENDS_${PN}-module-test-tester-capturerunner += "${PN}-module-test-tester-capture" +RDEPENDS_${PN}-module-test-tester-delegate += "${PN}-module-strict" +RDEPENDS_${PN}-module-test-tester-delegate += "${PN}-module-vars" +RDEPENDS_${PN}-module-test-tester-delegate += "${PN}-module-warnings" +RDEPENDS_${PN}-module-test-tester += "${PN}-module-exporter" +RDEPENDS_${PN}-module-test-tester += "${PN}-module-strict" +RDEPENDS_${PN}-module-test-tester += "${PN}-module-test-builder" +RDEPENDS_${PN}-module-test-tester += "${PN}-module-test-tester-capturerunner" +RDEPENDS_${PN}-module-test-tester += "${PN}-module-test-tester-delegate" +RDEPENDS_${PN}-module-test-tester += "${PN}-module-vars" +RDEPENDS_${PN}-module-text-abbrev += "${PN}-module-exporter" +RDEPENDS_${PN}-module-text-balanced += "${PN}-module-carp" +RDEPENDS_${PN}-module-text-balanced += "${PN}-module-exporter" +RDEPENDS_${PN}-module-text-balanced += "${PN}-module-overload" +RDEPENDS_${PN}-module-text-balanced += "${PN}-module-selfloader" +RDEPENDS_${PN}-module-text-balanced += "${PN}-module-strict" +RDEPENDS_${PN}-module-text-balanced += "${PN}-module-vars" +RDEPENDS_${PN}-module-text-parsewords += "${PN}-module-carp" +RDEPENDS_${PN}-module-text-parsewords += "${PN}-module-exporter" +RDEPENDS_${PN}-module-text-parsewords += "${PN}-module-strict" +RDEPENDS_${PN}-module-text-tabs += "${PN}-module-exporter" +RDEPENDS_${PN}-module-text-tabs += "${PN}-module-strict" +RDEPENDS_${PN}-module-text-tabs += "${PN}-module-vars" +RDEPENDS_${PN}-module-text-wrap += "${PN}-module-exporter" +RDEPENDS_${PN}-module-text-wrap += "${PN}-module-re" +RDEPENDS_${PN}-module-text-wrap += "${PN}-module-strict" +RDEPENDS_${PN}-module-text-wrap += "${PN}-module-text-tabs" +RDEPENDS_${PN}-module-text-wrap += "${PN}-module-vars" +RDEPENDS_${PN}-module-text-wrap += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-thread += "${PN}-module-config" +RDEPENDS_${PN}-module-thread += "${PN}-module-exporter" +RDEPENDS_${PN}-module-thread += "${PN}-module-strict" +RDEPENDS_${PN}-module-thread += "${PN}-module-threads" +RDEPENDS_${PN}-module-thread += "${PN}-module-threads-shared" +RDEPENDS_${PN}-module-thread += "${PN}-module-warnings" +RDEPENDS_${PN}-module-thread-queue += "${PN}-module-carp" +RDEPENDS_${PN}-module-thread-queue += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-thread-queue += "${PN}-module-strict" +RDEPENDS_${PN}-module-thread-queue += "${PN}-module-threads-shared" +RDEPENDS_${PN}-module-thread-queue += "${PN}-module-warnings" +RDEPENDS_${PN}-module-thread-semaphore += "${PN}-module-carp" +RDEPENDS_${PN}-module-thread-semaphore += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-thread-semaphore += "${PN}-module-strict" +RDEPENDS_${PN}-module-thread-semaphore += "${PN}-module-threads-shared" +RDEPENDS_${PN}-module-thread-semaphore += "${PN}-module-warnings" +RDEPENDS_${PN}-module-threads += "${PN}-module-carp" +RDEPENDS_${PN}-module-threads += "${PN}-module-config" +RDEPENDS_${PN}-module-threads += "${PN}-module-overload" +RDEPENDS_${PN}-module-threads += "${PN}-module-strict" +RDEPENDS_${PN}-module-threads += "${PN}-module-warnings" +RDEPENDS_${PN}-module-threads += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-threads-shared += "${PN}-module-carp" +RDEPENDS_${PN}-module-threads-shared += "${PN}-module-scalar-util" +RDEPENDS_${PN}-module-threads-shared += "${PN}-module-strict" +RDEPENDS_${PN}-module-threads-shared += "${PN}-module-warnings" +RDEPENDS_${PN}-module-threads-shared += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-tie-array += "${PN}-module-carp" +RDEPENDS_${PN}-module-tie-array += "${PN}-module-strict" +RDEPENDS_${PN}-module-tie-array += "${PN}-module-vars" +RDEPENDS_${PN}-module-tie-file += "${PN}-module-carp" +RDEPENDS_${PN}-module-tie-file += "${PN}-module-fcntl" +RDEPENDS_${PN}-module-tie-file += "${PN}-module-posix" +RDEPENDS_${PN}-module-tie-file += "${PN}-module-strict" +RDEPENDS_${PN}-module-tie-file += "${PN}-module-symbol" +RDEPENDS_${PN}-module-tie-handle += "${PN}-module-carp" +RDEPENDS_${PN}-module-tie-handle += "${PN}-module-tie-stdhandle" +RDEPENDS_${PN}-module-tie-handle += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-tie-hash-namedcapture += "${PN}-module-strict" +RDEPENDS_${PN}-module-tie-hash-namedcapture += "${PN}-module-xsloader" +RDEPENDS_${PN}-module-tie-hash += "${PN}-module-carp" +RDEPENDS_${PN}-module-tie-hash += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-tie-memoize += "${PN}-module-strict" +RDEPENDS_${PN}-module-tie-memoize += "${PN}-module-tie-hash" +RDEPENDS_${PN}-module-tie-refhash += "${PN}-module-carp" +RDEPENDS_${PN}-module-tie-refhash += "${PN}-module-config" +RDEPENDS_${PN}-module-tie-refhash += "${PN}-module-overload" +RDEPENDS_${PN}-module-tie-refhash += "${PN}-module-strict" +RDEPENDS_${PN}-module-tie-refhash += "${PN}-module-tie-hash" +RDEPENDS_${PN}-module-tie-refhash += "${PN}-module-vars" +RDEPENDS_${PN}-module-tie-scalar += "${PN}-module-carp" +RDEPENDS_${PN}-module-tie-scalar += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-tie-stdhandle += "${PN}-module-strict" +RDEPENDS_${PN}-module-tie-stdhandle += "${PN}-module-tie-handle" +RDEPENDS_${PN}-module-tie-stdhandle += "${PN}-module-vars" +RDEPENDS_${PN}-module-tie-substrhash += "${PN}-module-carp" +RDEPENDS_${PN}-module-tie-substrhash += "${PN}-module-integer" +RDEPENDS_${PN}-module-time-gmtime += "${PN}-module-exporter" +RDEPENDS_${PN}-module-time-gmtime += "${PN}-module-strict" +RDEPENDS_${PN}-module-time-gmtime += "${PN}-module-time-tm" +RDEPENDS_${PN}-module-time-gmtime += "${PN}-module-vars" +RDEPENDS_${PN}-module-time-hires += "${PN}-module-carp" +RDEPENDS_${PN}-module-time-hires += "${PN}-module-dynaloader" +RDEPENDS_${PN}-module-time-hires += "${PN}-module-exporter" +RDEPENDS_${PN}-module-time-hires += "${PN}-module-strict" +RDEPENDS_${PN}-module-time-local += "${PN}-module-carp" +RDEPENDS_${PN}-module-time-local += "${PN}-module-config" +RDEPENDS_${PN}-module-time-local += "${PN}-module-constant" +RDEPENDS_${PN}-module-time-local += "${PN}-module-exporter" +RDEPENDS_${PN}-module-time-local += "${PN}-module-strict" +RDEPENDS_${PN}-module-time-local += "${PN}-module-vars" +RDEPENDS_${PN}-module-time-localtime += "${PN}-module-exporter" +RDEPENDS_${PN}-module-time-localtime += "${PN}-module-strict" +RDEPENDS_${PN}-module-time-localtime += "${PN}-module-time-tm" +RDEPENDS_${PN}-module-time-localtime += "${PN}-module-vars" +RDEPENDS_${PN}-module-time-piece += "${PN}-module-carp" +RDEPENDS_${PN}-module-time-piece += "${PN}-module-constant" +RDEPENDS_${PN}-module-time-piece += "${PN}-module-dynaloader" +RDEPENDS_${PN}-module-time-piece += "${PN}-module-exporter" +RDEPENDS_${PN}-module-time-piece += "${PN}-module-integer" +RDEPENDS_${PN}-module-time-piece += "${PN}-module-overload" +RDEPENDS_${PN}-module-time-piece += "${PN}-module-strict" +RDEPENDS_${PN}-module-time-piece += "${PN}-module-time-local" +RDEPENDS_${PN}-module-time-piece += "${PN}-module-time-seconds" +RDEPENDS_${PN}-module-time-seconds += "${PN}-module-constant" +RDEPENDS_${PN}-module-time-seconds += "${PN}-module-exporter" +RDEPENDS_${PN}-module-time-seconds += "${PN}-module-overload" +RDEPENDS_${PN}-module-time-seconds += "${PN}-module-strict" +RDEPENDS_${PN}-module-time-tm += "${PN}-module-class-struct" +RDEPENDS_${PN}-module-time-tm += "${PN}-module-strict" +RDEPENDS_${PN}-module-unicode-collate-cjk-big5 += "${PN}-module-strict" +RDEPENDS_${PN}-module-unicode-collate-cjk-big5 += "${PN}-module-warnings" +RDEPENDS_${PN}-module-unicode-collate-cjk-gb2312 += "${PN}-module-strict" +RDEPENDS_${PN}-module-unicode-collate-cjk-gb2312 += "${PN}-module-warnings" +RDEPENDS_${PN}-module-unicode-collate-cjk-jisx0208 += "${PN}-module-strict" +RDEPENDS_${PN}-module-unicode-collate-cjk-jisx0208 += "${PN}-module-warnings" +RDEPENDS_${PN}-module-unicode-collate-cjk-korean += "${PN}-module-strict" +RDEPENDS_${PN}-module-unicode-collate-cjk-korean += "${PN}-module-warnings" +RDEPENDS_${PN}-module-unicode-collate-cjk-pinyin += "${PN}-module-strict" +RDEPENDS_${PN}-module-unicode-collate-cjk-pinyin += "${PN}-module-warnings" +RDEPENDS_${PN}-module-unicode-collate-cjk-stroke += "${PN}-module-strict" +RDEPENDS_${PN}-module-unicode-collate-cjk-stroke += "${PN}-module-warnings" +RDEPENDS_${PN}-module-unicode-collate-cjk-zhuyin += "${PN}-module-strict" +RDEPENDS_${PN}-module-unicode-collate-cjk-zhuyin += "${PN}-module-warnings" +RDEPENDS_${PN}-module-unicode-collate-locale += "${PN}-module-base" +RDEPENDS_${PN}-module-unicode-collate-locale += "${PN}-module-carp" +RDEPENDS_${PN}-module-unicode-collate-locale += "${PN}-module-strict" +RDEPENDS_${PN}-module-unicode-collate-locale += "${PN}-module-warnings" +RDEPENDS_${PN}-module-unicode-collate += "${PN}-module-carp" +RDEPENDS_${PN}-module-unicode-collate += "${PN}-module-constant" +RDEPENDS_${PN}-module-unicode-collate += "${PN}-module-dynaloader" +RDEPENDS_${PN}-module-unicode-collate += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-unicode-collate += "${PN}-module-strict" +RDEPENDS_${PN}-module-unicode-collate += "${PN}-module-warnings" +RDEPENDS_${PN}-module-unicode-normalize += "${PN}-module-carp" +RDEPENDS_${PN}-module-unicode-normalize += "${PN}-module-dynaloader" +RDEPENDS_${PN}-module-unicode-normalize += "${PN}-module-exporter" +RDEPENDS_${PN}-module-unicode-normalize += "${PN}-module-strict" +RDEPENDS_${PN}-module-unicode-normalize += "${PN}-module-warnings" +RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-carp" +RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-charnames" +RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-exporter" +RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-feature" +RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-file-spec" +RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-if" +RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-integer" +RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-strict" +RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-unicode-normalize" +RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-unicore" +RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-utf8-heavy" +RDEPENDS_${PN}-module-unicode-ucd += "${PN}-module-warnings" +RDEPENDS_${PN}-module-unicore += "${PN}-module-integer" +RDEPENDS_${PN}-module-universal += "${PN}-module-carp" +RDEPENDS_${PN}-module-user-grent += "${PN}-module-class-struct" +RDEPENDS_${PN}-module-user-grent += "${PN}-module-exporter" +RDEPENDS_${PN}-module-user-grent += "${PN}-module-strict" +RDEPENDS_${PN}-module-user-grent += "${PN}-module-vars" +RDEPENDS_${PN}-module-user-pwent += "${PN}-module-carp" +RDEPENDS_${PN}-module-user-pwent += "${PN}-module-class-struct" +RDEPENDS_${PN}-module-user-pwent += "${PN}-module-config" +RDEPENDS_${PN}-module-user-pwent += "${PN}-module-exporter" +RDEPENDS_${PN}-module-user-pwent += "${PN}-module-strict" +RDEPENDS_${PN}-module-user-pwent += "${PN}-module-vars" +RDEPENDS_${PN}-module-user-pwent += "${PN}-module-warnings" +RDEPENDS_${PN}-module-utf8 += "${PN}-module-carp" +RDEPENDS_${PN}-module-utf8 += "${PN}-module-utf8-heavy" +RDEPENDS_${PN}-module-version += "${PN}-module-strict" +RDEPENDS_${PN}-module-version += "${PN}-module-vars" +RDEPENDS_${PN}-module-version += "${PN}-module-version-regex" +RDEPENDS_${PN}-module-version += "${PN}-module-warnings-register" +RDEPENDS_${PN}-module-version-regex += "${PN}-module-strict" +RDEPENDS_${PN}-module-version-regex += "${PN}-module-vars" +RDEPENDS_${PN}-module-xsloader += "${PN}-module-carp" +RDEPENDS_${PN}-module-xsloader += "${PN}-module-dynaloader" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/0001-Configure-Remove-fstack-protector-strong-for-native-.patch b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/0001-Configure-Remove-fstack-protector-strong-for-native-.patch new file mode 100644 index 000000000..7391ac583 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/0001-Configure-Remove-fstack-protector-strong-for-native-.patch @@ -0,0 +1,103 @@ +Some distributions (like opensuse421) supported by the project +comes with older gcc releases, -fstack-protector=strong is supported +by GCC>=4.9. + +This causes a build failure when install perl-native from a sstate that +comes from a machine supporting -fstack-protector=strong [1]. + +So disable usage of this flag in perl-native builds, this patch could +be removed when all supported distros comes with GCC>=4.9. + +[YOCTO #10338] + +Upstream-status: Inappropriate [configuration] + +[1] http://errors.yoctoproject.org/Errors/Details/109589/ + +Signed-off-by: Aníbal Limón +--- + Configure | 54 ------------------------------------------------------ + 1 file changed, 54 deletions(-) + +diff --git a/Configure b/Configure +index efbdcfd..d5bd98c 100755 +--- a/Configure ++++ b/Configure +@@ -5468,30 +5468,6 @@ default|recommended) + eval $checkccflag + ;; + esac +- +- # on x86_64 (at least) we require an extra library (libssp) in the +- # link command line. This library is not named, so I infer that it is +- # an implementation detail that may change. Hence the safest approach +- # is to add the flag to the flags passed to the compiler at link time, +- # as that way the compiler can do the right implementation dependant +- # thing. (NWC) +- case "$osname" in +- amigaos) ;; # -fstack-protector builds but doesn't work +- *) case "$gccversion" in +- ?*) set stack-protector-strong -fstack-protector-strong +- eval $checkccflag +- case "$dflt" in +- *-fstack-protector-strong*) ;; # It got added. +- *) # Try the plain/older -fstack-protector. +- set stack-protector -fstack-protector +- eval $checkccflag +- ;; +- esac +- ;; +- esac +- ;; +- esac +- ;; + esac + + case "$mips_type" in +@@ -5634,21 +5610,6 @@ case "$ldflags" in + ;; + *) dflt="$ldflags";; + esac +-# See note above about -fstack-protector +-case "$ccflags" in +-*-fstack-protector-strong*) +- case "$dflt" in +- *-fstack-protector-strong*) ;; # Don't add it again +- *) dflt="$dflt -fstack-protector-strong" ;; +- esac +- ;; +-*-fstack-protector*) +- case "$dflt" in +- *-fstack-protector*) ;; # Don't add it again +- *) dflt="$dflt -fstack-protector" ;; +- esac +- ;; +-esac + + : Try to guess additional flags to pick up local libraries. + for thislibdir in $libpth; do +@@ -8571,21 +8532,6 @@ EOM + ''|' ') dflt='none' ;; + esac + +- case "$ldflags" in +- *-fstack-protector-strong*) +- case "$dflt" in +- *-fstack-protector-strong*) ;; # Don't add it again +- *) dflt="$dflt -fstack-protector-strong" ;; +- esac +- ;; +- *-fstack-protector*) +- case "$dflt" in +- *-fstack-protector*) ;; # Don't add it again +- *) dflt="$dflt -fstack-protector" ;; +- esac +- ;; +- esac +- + rp="Any special flags to pass to $ld to create a dynamically loaded library?" + . ./myread + case "$ans" in +-- +2.1.4 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/Makefile.SH.patch b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/Makefile.SH.patch index edf49d60e..fd8237fe7 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/Makefile.SH.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/Makefile.SH.patch @@ -1,15 +1,22 @@ +From 4dd9f3f81d8c81c96ff1ea0890ea55685f28b7e9 Mon Sep 17 00:00:00 2001 +From: Hongxu Jia +Date: Tue, 1 Jul 2014 15:51:53 +0800 +Subject: [PATCH 28/49] perl, perl-native, perl-ptest: upgrade from 5.14.3 to + 5.20.0 + Upstream-Status:Inappropriate [embedded specific] Signed-off-by: Hongxu Jia + --- - Makefile.SH | 145 +++++++++++++++++++++++++++-------------------------------- - 1 file changed, 67 insertions(+), 78 deletions(-) + Makefile.SH | 147 ++++++++++++++++++++++++++++-------------------------------- + 1 file changed, 68 insertions(+), 79 deletions(-) -Index: perl-5.22.1/Makefile.SH -=================================================================== ---- perl-5.22.1.orig/Makefile.SH -+++ perl-5.22.1/Makefile.SH -@@ -39,10 +39,10 @@ case "$useshrplib" in +diff --git a/Makefile.SH b/Makefile.SH +index 916b332..b143d46 100755 +--- a/Makefile.SH ++++ b/Makefile.SH +@@ -48,10 +48,10 @@ case "$useshrplib" in true) # Prefix all runs of 'miniperl' and 'perl' with # $ldlibpth so that ./perl finds *this* shared libperl. @@ -24,7 +31,7 @@ Index: perl-5.22.1/Makefile.SH pldlflags="$cccdlflags" static_ldflags='' -@@ -122,10 +122,11 @@ true) +@@ -131,10 +131,11 @@ true) *) eval "ldlibpthval=\"\$$ldlibpthname\"" @@ -40,7 +47,7 @@ Index: perl-5.22.1/Makefile.SH ;; esac -@@ -141,18 +142,7 @@ true) +@@ -150,18 +151,7 @@ true) # INSTALL file, under "Building a shared perl library". # If there is no pre-existing $libperl, we don't need # to do anything further. @@ -60,17 +67,13 @@ Index: perl-5.22.1/Makefile.SH ;; os390) test -f /bin/env && ldlibpth="/bin/env $ldlibpth" ;; -@@ -566,13 +556,23 @@ splintfiles = $(c1) +@@ -596,13 +586,23 @@ splintfiles = $(c1) @echo `$(CCCMDSRC)` -S $*.c @`$(CCCMDSRC)` -S $*.c -all: $(FIRSTMAKEFILE) $(MINIPERL_EXE) $(generated_pods) $(private) $(unidatafiles) $(public) $(dynamic_ext) $(nonxs_ext) extras.make $(MANIFEST_SRT) - @echo " "; - @echo " Everything is up to date. Type '$(MAKE) test' to run test suite." -- --$(MANIFEST_SRT): MANIFEST $(PERL_EXE) -- @$(RUN_PERL) Porting/manisort -q || (echo "WARNING: re-sorting MANIFEST"; \ -- $(RUN_PERL) Porting/manisort -q -o MANIFEST; sh -c true) +#all: $(FIRSTMAKEFILE) $(MINIPERL_EXE) $(generated_pods) $(private) $(unidatafiles) $(public) $(dynamic_ext) $(nonxs_ext) extras.make $(MANIFEST_SRT) +# @echo " "; +# @echo " Everything is up to date. Type '$(MAKE) test' to run test suite." @@ -80,7 +83,10 @@ Index: perl-5.22.1/Makefile.SH +more: $(generated_pods) $(private) $(public) + +more2: $(dynamic_ext) -+ + +-$(MANIFEST_SRT): MANIFEST $(PERL_EXE) +- @$(RUN_PERL) Porting/manisort -q || (echo "WARNING: re-sorting MANIFEST"; \ +- $(RUN_PERL) Porting/manisort -q -o MANIFEST; sh -c true) +more3: $(nonxs_ext) + +more4: extras.make $(MANIFEST_SRT) @@ -91,7 +97,7 @@ Index: perl-5.22.1/Makefile.SH @touch $(MANIFEST_SRT) .PHONY: all utilities -@@ -581,7 +581,7 @@ $(MANIFEST_SRT): MANIFEST $(PERL_EXE) +@@ -611,7 +611,7 @@ $(MANIFEST_SRT): MANIFEST $(PERL_EXE) # by make_patchnum.pl. git_version.h: lib/Config_git.pl @@ -100,7 +106,7 @@ Index: perl-5.22.1/Makefile.SH $(MINIPERL) make_patchnum.pl # make sure that we recompile perl.c if the git version changes -@@ -594,7 +594,7 @@ perl$(OBJ_EXT): git_version.h +@@ -624,7 +624,7 @@ perl$(OBJ_EXT): git_version.h # loading, we need to build perl first. case "$usedl$static_cwd" in defineundef) @@ -109,7 +115,7 @@ Index: perl-5.22.1/Makefile.SH ;; definedefine) util_deps='$(PERL_EXE) $(CONFIGPM) FORCE' -@@ -604,7 +604,7 @@ definedefine) +@@ -634,7 +634,7 @@ definedefine) esac $spitshell >>$Makefile <>$Makefile <<'!NO!SUBS!' -perlmain.c: $(MINIPERL_EXE) ext/ExtUtils-Miniperl/pm_to_blib +perlmain.c: ext/ExtUtils-Miniperl/lib/ExtUtils/Miniperl.pm $(MINIPERL) -Ilib -MExtUtils::Miniperl -e 'writemain(\"perlmain.c", @ARGV)' DynaLoader $(static_ext) # The file ext.libs is a list of libraries that must be linked in -@@ -760,7 +760,7 @@ PERLEXPORT = perl.exp +@@ -806,7 +806,7 @@ PERLEXPORT = perl.exp ;; esac $spitshell >>$Makefile <<'!NO!SUBS!' @@ -136,7 +142,7 @@ Index: perl-5.22.1/Makefile.SH ./$(MINIPERLEXP) makedef.pl --sort-fold PLATFORM=aix CC_FLAGS="$(OPTIMIZE)" > perl.exp !NO!SUBS! -@@ -769,7 +769,7 @@ os2) +@@ -815,7 +815,7 @@ os2) $spitshell >>$Makefile <<'!NO!SUBS!' MINIPERLEXP = miniperl @@ -145,19 +151,19 @@ Index: perl-5.22.1/Makefile.SH ./$(MINIPERLEXP) makedef.pl PLATFORM=os2 -DPERL_DLL=$(PERL_DLL) CC_FLAGS="$(OPTIMIZE)" > perl5.def !NO!SUBS! -@@ -829,7 +829,7 @@ $(LIBPERL): $& $(obj) $(DYNALOADER) $(LI +@@ -878,7 +878,7 @@ $(LIBPERL): $& $(perllib_objs) $(DYNALOADER) $(LIBPERLEXPORT) true) $spitshell >>$Makefile <<'!NO!SUBS!' rm -f $@ -- $(LD) -o $@ $(SHRPLDFLAGS) $(obj) $(DYNALOADER) $(libs) -+ $(LD) -o $@ $(SHRPLDFLAGS) $(obj) $(DYNALOADER) $(libs) -Wl,-soname,libperl.so.5 +- $(LD) -o $@ $(SHRPLDFLAGS) $(perllib_objs) $(DYNALOADER) $(libs) ++ $(LD) -o $@ $(SHRPLDFLAGS) $(perllib_objs) $(DYNALOADER) $(libs) -Wl,-soname,libperl.so.5 !NO!SUBS! case "$osname" in aix) -@@ -877,7 +877,9 @@ $(MINIPERL_EXE): lib/buildcustomize.pl +@@ -936,15 +936,19 @@ lib/buildcustomize.pl: $& $(miniperl_objs) write_buildcustomize.pl $spitshell >>$Makefile <<'!NO!SUBS!' - lib/buildcustomize.pl: $& $(mini_obj) - $(CC) -o $(MINIPERL_EXE) $(CLDFLAGS) $(mini_obj) $(libs) + lib/buildcustomize.pl: $& $(miniperl_objs) + $(CC) -o $(MINIPERL_EXE) $(CLDFLAGS) $(miniperl_objs) $(libs) - $(LDLIBPTH) ./miniperl$(HOST_EXE_EXT) -w -Ilib -Idist/Exporter/lib -MExporter -e '' || sh -c 'echo >&2 Failed to build miniperl. Please run make minitest; exit 1' + mv -f miniperl miniperl-target + ln -s hostperl miniperl @@ -165,21 +171,22 @@ Index: perl-5.22.1/Makefile.SH $(MINIPERL) -f write_buildcustomize.pl !NO!SUBS! ;; -@@ -885,7 +887,9 @@ lib/buildcustomize.pl: $& $(mini_obj) + next4*) $spitshell >>$Makefile <<'!NO!SUBS!' - lib/buildcustomize.pl: $& $(mini_obj) write ldcustomize.pl - $(CC) -o $(MINIPERL_EXE) $(mini_obj libs) + lib/buildcustomize.pl: $& $(miniperl_objs) write ldcustomize.pl +- $(CC) -o $(MINIPERL_EXE) $(miniperl_objs libs) - $(LDLIBPTH) ./miniperl$(HOST _EXT) -w -Ilib -Idist/Exporter/lib -MExporter -e '' || sh -c 'echo >&2 Failed to build miniperl. Please run make minitest; exit 1' ++ $(CC) -o $(MINIPERL_EXE) $(miniperl_objs libs) + mv -f miniperl miniperl-target + ln -s hostperl miniperl + #$(LDLIBPTH) ./miniperl$(HOST _EXT) -w -Ilib -Idist/Exporter/lib -MExporter -e '' || sh -c 'echo >&2 Failed to build miniperl. Please run make minitest; exit 1' $(MINIPERL) -f write_buildcustomize.pl !NO!SUBS! ;; -@@ -907,7 +911,9 @@ lib/buildcustomize.pl: $& $(mini_obj) wr +@@ -966,7 +970,9 @@ lib/buildcustomize.pl: $& $(miniperl_objs) write_buildcustomize.pl -@rm -f miniperl.xok $(CC) $(CLDFLAGS) $(NAMESPACEFLAGS) -o $(MINIPERL_EXE) \ - $(mini_obj) $(libs) + $(miniperl_objs) $(libs) - $(LDLIBPTH) ./miniperl$(HOST_EXE_EXT) -w -Ilib -Idist/Exporter/lib -MExporter -e '' || sh -c 'echo >&2 Failed to build miniperl. Please run make minitest; exit 1' + mv -f miniperl miniperl-target + ln -s hostperl miniperl @@ -187,7 +194,7 @@ Index: perl-5.22.1/Makefile.SH $(MINIPERL) -f write_buildcustomize.pl !NO!SUBS! ;; -@@ -918,7 +924,9 @@ lib/buildcustomize.pl: \$& \$(mini_obj) +@@ -977,7 +983,9 @@ lib/buildcustomize.pl: \$& \$(miniperl_objs) write_buildcustomize.pl -@rm -f miniperl.xok -@rm \$(MINIPERL_EXE) \$(LNS) \$(HOST_PERL) \$(MINIPERL_EXE) @@ -198,10 +205,10 @@ Index: perl-5.22.1/Makefile.SH \$(MINIPERL) -f write_buildcustomize.pl 'osname' "$osname" !GROK!THIS! else -@@ -927,7 +935,9 @@ lib/buildcustomize.pl: $& $(mini_obj) wr +@@ -986,7 +994,9 @@ lib/buildcustomize.pl: $& $(miniperl_objs) write_buildcustomize.pl -@rm -f miniperl.xok $(CC) $(CLDFLAGS) -o $(MINIPERL_EXE) \ - $(mini_obj) $(libs) + $(miniperl_objs) $(libs) - $(LDLIBPTH) ./miniperl$(HOST_EXE_EXT) -w -Ilib -Idist/Exporter/lib -MExporter -e '' || sh -c 'echo >&2 Failed to build miniperl. Please run make minitest; exit 1' + mv -f miniperl miniperl-target + ln -s hostperl miniperl @@ -209,7 +216,7 @@ Index: perl-5.22.1/Makefile.SH $(MINIPERL) -f write_buildcustomize.pl !NO!SUBS! fi -@@ -971,7 +981,7 @@ case "${osname}" in +@@ -1036,7 +1046,7 @@ case "${osname}" in catamount) $spitshell >>$Makefile <>$Makefile <<'!NO!SUBS!' .PHONY: makeppport @@ -227,7 +234,7 @@ Index: perl-5.22.1/Makefile.SH $(MINIPERL) mkppport !NO!SUBS! -@@ -991,16 +1001,16 @@ esac +@@ -1056,16 +1066,16 @@ esac $spitshell >>$Makefile <<'!NO!SUBS!' .PHONY: preplibrary @@ -247,7 +254,7 @@ Index: perl-5.22.1/Makefile.SH $(MINIPERL) lib/unicore/mktables -C lib/unicore -P pod -maketest -makelist -p # Commented out so always runs, mktables looks at far more files than we # can in this makefile to decide if needs to run or not -@@ -1009,12 +1019,12 @@ uni.data: $(MINIPERL_EXE) $(CONFIGPM) li +@@ -1074,12 +1084,12 @@ uni.data: $(MINIPERL_EXE) $(CONFIGPM) lib/unicore/mktables $(nonxs_ext) # $(PERL_EXE) and ext because pod_lib.pl needs Digest::MD5 # But also this ensures that all extensions are built before we try to scan # them, which picks up Devel::PPPort's documentation. @@ -263,16 +270,16 @@ Index: perl-5.22.1/Makefile.SH $(MINIPERL) autodoc.pl pod/perlmodlib.pod: $(MINIPERL_EXE) pod/perlmodlib.PL MANIFEST -@@ -1024,7 +1034,7 @@ pod/perl5221delta.pod: pod/perldelta.pod - $(RMS) pod/perl5221delta.pod - $(LNS) perldelta.pod pod/perl5221delta.pod +@@ -1089,7 +1099,7 @@ pod/perl5240delta.pod: pod/perldelta.pod + $(RMS) pod/perl5240delta.pod + $(LNS) perldelta.pod pod/perl5240delta.pod -extra.pods: $(MINIPERL_EXE) +extra.pods: -@test ! -f extra.pods || rm -f `cat extra.pods` -@rm -f extra.pods -@for x in `grep -l '^=[a-z]' README.* | grep -v README.vms` ; do \ -@@ -1068,8 +1078,7 @@ EOT +@@ -1133,8 +1143,7 @@ EOT $spitshell >>$Makefile <>$Makefile <<'!NO!SUBS!' install.perl: $(INSTALL_DEPENDENCE) installperl @@ -295,7 +302,7 @@ Index: perl-5.22.1/Makefile.SH # XXX Experimental. Hardwired values, but useful for testing. # Eventually Configure could ask for some of these values. -@@ -1101,24 +1106,8 @@ install.html: all installhtml +@@ -1166,24 +1171,8 @@ install.html: all installhtml else $spitshell >>$Makefile <<'!NO!SUBS!' install.perl: $(INSTALL_DEPENDENCE) installperl @@ -321,7 +328,7 @@ Index: perl-5.22.1/Makefile.SH !NO!SUBS! fi -@@ -1210,13 +1199,13 @@ manicheck: FORCE +@@ -1275,13 +1264,13 @@ manicheck: FORCE # # DynaLoader may be needed for extensions that use Makefile.PL. @@ -338,7 +345,7 @@ Index: perl-5.22.1/Makefile.SH $(MINIPERL) make_ext.pl $@ $(MAKE_EXT_ARGS) MAKE="$(MAKE)" LIBPERL_A=$(LIBPERL) LINKTYPE=static $(STATIC_LDFLAGS) n_dummy $(nonxs_ext): $(MINIPERL_EXE) lib/buildcustomize.pl preplibrary FORCE -@@ -1405,14 +1394,14 @@ test_prep_pre: preplibrary utilities $(n +@@ -1479,14 +1468,14 @@ test_prep_pre: preplibrary utilities $(nonxs_ext) case "$targethost" in '') $spitshell >>$Makefile <<'!NO!SUBS!' @@ -355,7 +362,7 @@ Index: perl-5.22.1/Makefile.SH \$(dynamic_ext) \$(TEST_PERL_DLL) runtests \ \$(generated_pods) $to libperl.* -@@ -1465,7 +1454,7 @@ test_prep test-prep: test_prep_pre \$(MI +@@ -1539,7 +1528,7 @@ test_prep test-prep: test_prep_pre \$(MINIPERL_EXE) \$(unidatafiles) \$(PERL_EXE esac $spitshell >>$Makefile <<'!NO!SUBS!' @@ -364,12 +371,15 @@ Index: perl-5.22.1/Makefile.SH $(MINIPERL) make_ext.pl $(dynamic_ext_re) MAKE="$(MAKE)" LIBPERL_A=$(LIBPERL) LINKTYPE=dynamic cd t && (rm -f $(PERL_EXE); $(LNS) ../$(PERL_EXE) $(PERL_EXE)) !NO!SUBS! -@@ -1526,7 +1515,7 @@ minitest_prep: +@@ -1600,7 +1589,7 @@ minitest_prep: MINITEST_TESTS = base/*.t comp/*.t cmd/*.t run/*.t io/*.t re/*.t opbasic/*.t op/*.t uni/*.t perf/*.t -minitest: $(MINIPERL_EXE) minitest_prep +minitest: minitest_prep - - cd t && $(RUN_PERL) TEST $(MINITEST_TESTS) Date: Fri, 16 Dec 2005 01:32:14 +1100 -Subject: Remove Errno version check due to upgrade problems with long-running - processes. +Subject: [PATCH 6/8] Remove Errno version check due to upgrade problems with + long-running processes. Bug-Debian: http://bugs.debian.org/343351 @@ -11,25 +11,27 @@ processes embedding perl when upgrading to a newer version, compatible, but built on a different machine. Patch-Name: debian/errno_ver.diff + --- ext/Errno/Errno_pm.PL | 5 ----- 1 file changed, 5 deletions(-) diff --git a/ext/Errno/Errno_pm.PL b/ext/Errno/Errno_pm.PL -index c6bfa06..519e5c7 100644 +index 6251a3c..eeed445 100644 --- a/ext/Errno/Errno_pm.PL +++ b/ext/Errno/Errno_pm.PL -@@ -278,13 +278,8 @@ sub write_errno_pm { - - package Errno; - require Exporter; +@@ -294,11 +294,6 @@ EDQ + # they've already declared perl doesn't need to worry about this risk. + if(!$ENV{'PERL_BUILD_EXPAND_CONFIG_VARS'}) { + print <<"CONFIG_CHECK_END"; -use Config; - use strict; - -"\$Config{'archname'}-\$Config{'osvers'}" eq -"$archname-$Config{'osvers'}" or - die "Errno architecture ($archname-$Config{'osvers'}) does not match executable architecture (\$Config{'archname'}-\$Config{'osvers'})"; - - our \$VERSION = "$VERSION"; - \$VERSION = eval \$VERSION; - our \@ISA = 'Exporter'; + CONFIG_CHECK_END + } + +-- +2.1.4 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/debian/prune_libs.diff b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/debian/prune_libs.diff index b4443f647..d153e0efc 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/debian/prune_libs.diff +++ b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/debian/prune_libs.diff @@ -1,7 +1,8 @@ -From 21bc033755f0b609f9bb2aa9e98b10d58424406f Mon Sep 17 00:00:00 2001 +From c4538f966c49e356599358def7d1febf61bca21f Mon Sep 17 00:00:00 2001 From: Brendan O'Dea Date: Fri, 18 Mar 2005 22:22:25 +1100 -Subject: Prune the list of libraries wanted to what we actually need. +Subject: [PATCH 15/49] Prune the list of libraries wanted to what we actually + need. Bug-Debian: http://bugs.debian.org/128355 @@ -9,25 +10,26 @@ We want to keep the dependencies on perl-base as small as possible, and some of the original list may be present on buildds (see Bug#128355). Patch-Name: debian/prune_libs.diff + --- Configure | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/Configure b/Configure -index 0a405d3..6fcc3bc 100755 +index 5a353d6..a00df64 100755 --- a/Configure +++ b/Configure -@@ -1454,8 +1454,7 @@ libswanted_uselargefiles='' - : set usesocks on the Configure command line to enable socks. +@@ -1479,8 +1479,7 @@ archname='' + usereentrant='undef' : List of libraries we want. : If anyone needs extra -lxxx, put those in a hint file. --libswanted="cl pthread socket bind inet nsl nm ndbm gdbm dbm db malloc dl ld" +-libswanted="cl pthread socket bind inet nsl ndbm gdbm dbm db malloc dl ld" -libswanted="$libswanted sun m crypt sec util c cposix posix ucb bsd BSD" +libswanted='gdbm gdbm_compat db dl m c crypt' : We probably want to search /usr/shlib before most other libraries. : This is only used by the lib/ExtUtils/MakeMaker.pm routine extliblist. glibpth=`echo " $glibpth " | sed -e 's! /usr/shlib ! !'` -@@ -23593,7 +23592,7 @@ sunos*X4*) +@@ -24007,7 +24006,7 @@ sunos*X4*) ;; *) case "$usedl" in $define|true|[yY]*) @@ -36,3 +38,6 @@ index 0a405d3..6fcc3bc 100755 shift perllibs="$*" ;; +-- +2.1.4 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/dynaloaderhack.patch b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/dynaloaderhack.patch index 3df593346..719f07c9c 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/dynaloaderhack.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/dynaloaderhack.patch @@ -1,3 +1,8 @@ +From 2e0ce5d27e70defd66ace0661af7c24daae34f8b Mon Sep 17 00:00:00 2001 +From: Richard Purdie +Date: Sat, 19 Jan 2013 23:49:24 +0000 +Subject: [PATCH 7/8] perl: Add dyanloader build hack + Hack the dynamic module loader so that we use native modules since we can't load the target ones. @@ -6,14 +11,18 @@ Upstream-Status: Inappropriate RP 2013/01/13 -Index: perl-5.14.2/ext/DynaLoader/DynaLoader_pm.PL -=================================================================== ---- perl-5.14.2.orig/ext/DynaLoader/DynaLoader_pm.PL 2011-09-19 13:18:22.000000000 +0000 -+++ perl-5.14.2/ext/DynaLoader/DynaLoader_pm.PL 2013-01-19 16:09:51.020584945 +0000 -@@ -310,6 +310,10 @@ +--- + ext/DynaLoader/DynaLoader_pm.PL | 4 ++++ + 1 file changed, 4 insertions(+) + +diff --git a/ext/DynaLoader/DynaLoader_pm.PL b/ext/DynaLoader/DynaLoader_pm.PL +index e828f35..4021559 100644 +--- a/ext/DynaLoader/DynaLoader_pm.PL ++++ b/ext/DynaLoader/DynaLoader_pm.PL +@@ -343,6 +343,10 @@ sub bootstrap { foreach (@INC) { <<$^O-eq-VMS>>chop($_ = VMS::Filespec::unixpath($_));<> - my $dir = "$_/auto/$modpname"; + $dir = "$_/auto/$modpname"; + + if (defined $ENV{PERL_LIB} and defined $ENV{PERLHOSTLIB}) { + $dir =~ s/$ENV{PERL_LIB}/$ENV{PERLHOSTLIB}/g; @@ -21,3 +30,6 @@ Index: perl-5.14.2/ext/DynaLoader/DynaLoader_pm.PL next unless -d $dir; # skip over uninteresting directories +-- +2.1.4 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2015-8607.patch b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2015-8607.patch deleted file mode 100644 index 7b4a0015c..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2015-8607.patch +++ /dev/null @@ -1,74 +0,0 @@ -From 652c8d4852a69f1bb4d387946f9b76350a1f0d0e Mon Sep 17 00:00:00 2001 -From: Tony Cook -Date: Tue, 15 Dec 2015 10:56:54 +1100 -Subject: [PATCH] perl: fix CVE-2015-8607 - -ensure File::Spec::canonpath() preserves taint - -Previously the unix specific XS implementation of canonpath() would -return an untainted path when supplied a tainted path. - -For the empty string case, newSVpvs() already sets taint as needed on -its result. - -This issue was assigned CVE-2015-8607. [perl #126862] - -Backport patch from http://perl5.git.perl.org/perl.git/commitdiff/0b6f93036de171c12ba95d415e264d9cf7f4e1fd - -Upstream-Status: Backport -CVE: CVE-2015-8607 -Signed-off-by: Mingli Yu ---- - dist/PathTools/Cwd.xs | 1 + - dist/PathTools/t/taint.t | 19 ++++++++++++++++++- - 2 files changed, 19 insertions(+), 1 deletion(-) - -diff --git a/dist/PathTools/Cwd.xs b/dist/PathTools/Cwd.xs -index 9d4dcf0..3d018dc 100644 ---- a/dist/PathTools/Cwd.xs -+++ b/dist/PathTools/Cwd.xs -@@ -535,6 +535,7 @@ THX_unix_canonpath(pTHX_ SV *path) - *o = 0; - SvPOK_on(retval); - SvCUR_set(retval, o - SvPVX(retval)); -+ SvTAINT(retval); - return retval; - } - -diff --git a/dist/PathTools/t/taint.t b/dist/PathTools/t/taint.t -index 309b3e5..48f8c5b 100644 ---- a/dist/PathTools/t/taint.t -+++ b/dist/PathTools/t/taint.t -@@ -12,7 +12,7 @@ use Test::More; - BEGIN { - plan( - ${^TAINT} -- ? (tests => 17) -+ ? (tests => 21) - : (skip_all => "A perl without taint support") - ); - } -@@ -34,3 +34,20 @@ foreach my $func (@Functions) { - - # Previous versions of Cwd tainted $^O - is !tainted($^O), 1, "\$^O should not be tainted"; -+ -+{ -+ # [perl #126862] canonpath() loses taint -+ my $tainted = substr($ENV{PATH}, 0, 0); -+ # yes, getcwd()'s result should be tainted, and is tested above -+ # but be sure -+ ok tainted(File::Spec->canonpath($tainted . Cwd::getcwd)), -+ "canonpath() keeps taint on non-empty string"; -+ ok tainted(File::Spec->canonpath($tainted)), -+ "canonpath() keeps taint on empty string"; -+ -+ (Cwd::getcwd() =~ /^(.*)/); -+ my $untainted = $1; -+ ok !tainted($untainted), "make sure our untainted value is untainted"; -+ ok !tainted(File::Spec->canonpath($untainted)), -+ "canonpath() doesn't add taint to untainted string"; -+} --- -2.8.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2016-1238.patch b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2016-1238.patch deleted file mode 100644 index 730ef178a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2016-1238.patch +++ /dev/null @@ -1,352 +0,0 @@ -From 9987be3d24286d96d9dccec0433253ee8ad894b4 Mon Sep 17 00:00:00 2001 -From: Tony Cook -Date: Tue, 21 Jun 2016 10:02:02 +1000 -Subject: [PATCH] perl: fix CVE-2016-1238 - -(perl #127834) remove . from the end of @INC if complex modules are loaded - -While currently Encode and Storable are know to attempt to load modules -not included in the core, updates to other modules may lead to those -also attempting to load new modules, so be safe and remove . for those -as well. - -Backport patch from http://perl5.git.perl.org/perl.git/commitdiff/cee96d52c39b1e7b36e1c62d38bcd8d86e9a41ab - -Upstream-Status: Backport -CVE: CVE-2016-1238 -Signed-off-by: Mingli Yu ---- - cpan/Archive-Tar/bin/ptar | 1 + - cpan/Archive-Tar/bin/ptardiff | 1 + - cpan/Archive-Tar/bin/ptargrep | 1 + - cpan/CPAN/scripts/cpan | 1 + - cpan/Digest-SHA/shasum | 1 + - cpan/Encode/bin/enc2xs | 1 + - cpan/Encode/bin/encguess | 1 + - cpan/Encode/bin/piconv | 1 + - cpan/Encode/bin/ucmlint | 1 + - cpan/Encode/bin/unidump | 1 + - cpan/ExtUtils-MakeMaker/bin/instmodsh | 1 + - cpan/IO-Compress/bin/zipdetails | 1 + - cpan/JSON-PP/bin/json_pp | 1 + - cpan/Test-Harness/bin/prove | 1 + - dist/ExtUtils-ParseXS/lib/ExtUtils/xsubpp | 1 + - dist/Module-CoreList/corelist | 1 + - ext/Pod-Html/bin/pod2html | 1 + - utils/c2ph.PL | 1 + - utils/h2ph.PL | 2 ++ - utils/h2xs.PL | 2 ++ - utils/libnetcfg.PL | 1 + - utils/perlbug.PL | 1 + - utils/perldoc.PL | 5 ++++- - utils/perlivp.PL | 2 ++ - utils/splain.PL | 6 ++++++ - 25 files changed, 36 insertions(+), 1 deletion(-) - -diff --git a/cpan/Archive-Tar/bin/ptar b/cpan/Archive-Tar/bin/ptar -index 0eaffa7..9dc6402 100644 ---- a/cpan/Archive-Tar/bin/ptar -+++ b/cpan/Archive-Tar/bin/ptar -@@ -1,6 +1,7 @@ - #!/usr/bin/perl - use strict; - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use File::Find; - use Getopt::Std; - use Archive::Tar; -diff --git a/cpan/Archive-Tar/bin/ptardiff b/cpan/Archive-Tar/bin/ptardiff -index 66bd859..4668fa6 100644 ---- a/cpan/Archive-Tar/bin/ptardiff -+++ b/cpan/Archive-Tar/bin/ptardiff -@@ -1,5 +1,6 @@ - #!/usr/bin/perl - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use strict; - use Archive::Tar; - use Getopt::Std; -diff --git a/cpan/Archive-Tar/bin/ptargrep b/cpan/Archive-Tar/bin/ptargrep -index 1a320f1..8dc6b4f 100644 ---- a/cpan/Archive-Tar/bin/ptargrep -+++ b/cpan/Archive-Tar/bin/ptargrep -@@ -4,6 +4,7 @@ - # archive. See 'ptargrep --help' for more documentation. - # - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use strict; - use warnings; - -diff --git a/cpan/CPAN/scripts/cpan b/cpan/CPAN/scripts/cpan -index 5f4320e..ccba47e 100644 ---- a/cpan/CPAN/scripts/cpan -+++ b/cpan/CPAN/scripts/cpan -@@ -1,5 +1,6 @@ - #!/usr/local/bin/perl - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use strict; - use vars qw($VERSION); - -diff --git a/cpan/Digest-SHA/shasum b/cpan/Digest-SHA/shasum -index 14ddd60..62a2b0e 100644 ---- a/cpan/Digest-SHA/shasum -+++ b/cpan/Digest-SHA/shasum -@@ -13,6 +13,7 @@ - ## "-0" option for reading bit strings, and - ## "-p" option for portable digests (to be deprecated). - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use strict; - use warnings; - use Fcntl; -diff --git a/cpan/Encode/bin/enc2xs b/cpan/Encode/bin/enc2xs -index 4d64e38..473a15c 100644 ---- a/cpan/Encode/bin/enc2xs -+++ b/cpan/Encode/bin/enc2xs -@@ -4,6 +4,7 @@ BEGIN { - # with $ENV{PERL_CORE} set - # In case we need it in future... - require Config; import Config; -+ pop @INC if $INC[-1] eq '.'; - } - use strict; - use warnings; -diff --git a/cpan/Encode/bin/encguess b/cpan/Encode/bin/encguess -index 5d7ac80..0be5c7c 100644 ---- a/cpan/Encode/bin/encguess -+++ b/cpan/Encode/bin/encguess -@@ -1,5 +1,6 @@ - #!./perl - use 5.008001; -+BEGIN { pop @INC if $INC[-1] eq '.' } - use strict; - use warnings; - use Encode; -diff --git a/cpan/Encode/bin/piconv b/cpan/Encode/bin/piconv -index c1dad9e..60b2a59 100644 ---- a/cpan/Encode/bin/piconv -+++ b/cpan/Encode/bin/piconv -@@ -1,6 +1,7 @@ - #!./perl - # $Id: piconv,v 2.7 2014/05/31 09:48:48 dankogai Exp $ - # -+BEGIN { pop @INC if $INC[-1] eq '.' } - use 5.8.0; - use strict; - use Encode ; -diff --git a/cpan/Encode/bin/ucmlint b/cpan/Encode/bin/ucmlint -index 622376d..25e0d67 100644 ---- a/cpan/Encode/bin/ucmlint -+++ b/cpan/Encode/bin/ucmlint -@@ -3,6 +3,7 @@ - # $Id: ucmlint,v 2.2 2008/03/12 09:51:11 dankogai Exp $ - # - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use strict; - our $VERSION = do { my @r = (q$Revision: 2.2 $ =~ /\d+/g); sprintf "%d."."%02d" x $#r, @r }; - -diff --git a/cpan/Encode/bin/unidump b/cpan/Encode/bin/unidump -index ae0da30..f190827 100644 ---- a/cpan/Encode/bin/unidump -+++ b/cpan/Encode/bin/unidump -@@ -1,5 +1,6 @@ - #!./perl - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use strict; - use Encode; - use Getopt::Std; -diff --git a/cpan/ExtUtils-MakeMaker/bin/instmodsh b/cpan/ExtUtils-MakeMaker/bin/instmodsh -index e551434..b3b109f 100644 ---- a/cpan/ExtUtils-MakeMaker/bin/instmodsh -+++ b/cpan/ExtUtils-MakeMaker/bin/instmodsh -@@ -1,5 +1,6 @@ - #!/usr/bin/perl -w - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use strict; - use IO::File; - use ExtUtils::Packlist; -diff --git a/cpan/IO-Compress/bin/zipdetails b/cpan/IO-Compress/bin/zipdetails -index 0249850..1b9c70a 100644 ---- a/cpan/IO-Compress/bin/zipdetails -+++ b/cpan/IO-Compress/bin/zipdetails -@@ -5,6 +5,7 @@ - # Display info on the contents of a Zip file - # - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use strict; - use warnings ; - -diff --git a/cpan/JSON-PP/bin/json_pp b/cpan/JSON-PP/bin/json_pp -index df9d243..896cd2f 100644 ---- a/cpan/JSON-PP/bin/json_pp -+++ b/cpan/JSON-PP/bin/json_pp -@@ -1,5 +1,6 @@ - #!/usr/bin/perl - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use strict; - use Getopt::Long; - -diff --git a/cpan/Test-Harness/bin/prove b/cpan/Test-Harness/bin/prove -index 6637cc4..d71b238 100644 ---- a/cpan/Test-Harness/bin/prove -+++ b/cpan/Test-Harness/bin/prove -@@ -1,5 +1,6 @@ - #!/usr/bin/perl -w - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use strict; - use warnings; - use App::Prove; -diff --git a/dist/ExtUtils-ParseXS/lib/ExtUtils/xsubpp b/dist/ExtUtils-ParseXS/lib/ExtUtils/xsubpp -index e2ac71a..d596cdf 100644 ---- a/dist/ExtUtils-ParseXS/lib/ExtUtils/xsubpp -+++ b/dist/ExtUtils-ParseXS/lib/ExtUtils/xsubpp -@@ -1,5 +1,6 @@ - #!perl - use 5.006; -+BEGIN { pop @INC if $INC[-1] eq '.' } - use strict; - eval { - require ExtUtils::ParseXS; -diff --git a/dist/Module-CoreList/corelist b/dist/Module-CoreList/corelist -index aa4a945..bbe61cc 100644 ---- a/dist/Module-CoreList/corelist -+++ b/dist/Module-CoreList/corelist -@@ -130,6 +130,7 @@ requested perl versions. - - =cut - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use Module::CoreList; - use Getopt::Long qw(:config no_ignore_case); - use Pod::Usage; -diff --git a/ext/Pod-Html/bin/pod2html b/ext/Pod-Html/bin/pod2html -index b022859..7d1d232 100644 ---- a/ext/Pod-Html/bin/pod2html -+++ b/ext/Pod-Html/bin/pod2html -@@ -216,6 +216,7 @@ This program is distributed under the Artistic License. - - =cut - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use Pod::Html; - - pod2html @ARGV; -diff --git a/utils/c2ph.PL b/utils/c2ph.PL -index 13389ec..cef0b5c 100644 ---- a/utils/c2ph.PL -+++ b/utils/c2ph.PL -@@ -280,6 +280,7 @@ Anyway, here it is. Should run on perl v4 or greater. Maybe less. - - $RCSID = '$Id: c2ph,v 1.7 95/10/28 10:41:47 tchrist Exp Locker: tchrist $'; - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use File::Temp; - - ###################################################################### -diff --git a/utils/h2ph.PL b/utils/h2ph.PL -index 55c1f72..300b756 100644 ---- a/utils/h2ph.PL -+++ b/utils/h2ph.PL -@@ -36,6 +36,8 @@ $Config{startperl} - - print OUT <<'!NO!SUBS!'; - -+BEGIN { pop @INC if $INC[-1] eq '.' } -+ - use strict; - - use Config; -diff --git a/utils/h2xs.PL b/utils/h2xs.PL -index 268f680..f95ee0c 100644 ---- a/utils/h2xs.PL -+++ b/utils/h2xs.PL -@@ -35,6 +35,8 @@ $Config{startperl} - - print OUT <<'!NO!SUBS!'; - -+BEGIN { pop @INC if $INC[-1] eq '.' } -+ - use warnings; - - =head1 NAME -diff --git a/utils/libnetcfg.PL b/utils/libnetcfg.PL -index 59a2de8..26d2f99 100644 ---- a/utils/libnetcfg.PL -+++ b/utils/libnetcfg.PL -@@ -97,6 +97,7 @@ Jarkko Hietaniemi, conversion into libnetcfg for inclusion into Perl 5.8. - - # $Id: Configure,v 1.8 1997/03/04 09:22:32 gbarr Exp $ - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use strict; - use IO::File; - use Getopt::Std; -diff --git a/utils/perlbug.PL b/utils/perlbug.PL -index 885785a..ae8c343 100644 ---- a/utils/perlbug.PL -+++ b/utils/perlbug.PL -@@ -57,6 +57,7 @@ print OUT <<'!NO!SUBS!'; - my @patches = Config::local_patches(); - my $patch_tags = join "", map /(\S+)/ ? "+$1 " : (), @patches; - -+BEGIN { pop @INC if $INC[-1] eq '.' } - use warnings; - use strict; - use Config; -diff --git a/utils/perldoc.PL b/utils/perldoc.PL -index e201de9..cd60bd4 100644 ---- a/utils/perldoc.PL -+++ b/utils/perldoc.PL -@@ -44,7 +44,10 @@ $Config{startperl} - # This "$file" file was generated by "$0" - - require 5; --BEGIN { \$^W = 1 if \$ENV{'PERLDOCDEBUG'} } -+BEGIN { -+ \$^W = 1 if \$ENV{'PERLDOCDEBUG'}; -+ pop \@INC if \$INC[-1] eq '.'; -+} - use Pod::Perldoc; - exit( Pod::Perldoc->run() ); - -diff --git a/utils/perlivp.PL b/utils/perlivp.PL -index cc49f96..696a44e 100644 ---- a/utils/perlivp.PL -+++ b/utils/perlivp.PL -@@ -39,6 +39,8 @@ print OUT "\n# perlivp $^V\n"; - - print OUT <<'!NO!SUBS!'; - -+BEGIN { pop @INC if $INC[-1] eq '.' } -+ - sub usage { - warn "@_\n" if @_; - print << " EOUSAGE"; -diff --git a/utils/splain.PL b/utils/splain.PL -index 9c70b61..cae84a0 100644 ---- a/utils/splain.PL -+++ b/utils/splain.PL -@@ -38,6 +38,12 @@ $Config{startperl} - if \$running_under_some_shell; - !GROK!THIS! - -+print <<'!NO!SUBS!'; -+ -+BEGIN { pop @INC if $INC[-1] eq '.' } -+ -+!NO!SUBS! -+ - while () { - print OUT unless /^package diagnostics/; - } --- -2.8.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2016-2381.patch b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2016-2381.patch deleted file mode 100644 index 99fa8d9a6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2016-2381.patch +++ /dev/null @@ -1,114 +0,0 @@ -CVE: CVE-2016-2381 -Upstream-Status: Backport - -Backport patch to fix CVE-2016-2381 from - -http://perl5.git.perl.org/perl.git/commitdiff/ae37b791a73a9e78dedb89fb2429d2628cf58076 - -Signed-off-by: Kai Kang ---- -From: Tony Cook -Date: Wed, 27 Jan 2016 00:52:15 +0000 (+1100) -Subject: remove duplicate environment variables from environ -X-Git-Tag: v5.23.9~170 -X-Git-Url: http://perl5.git.perl.org/perl.git/commitdiff_plain/ae37b791a73a9e78dedb89fb2429d2628cf58076 - -remove duplicate environment variables from environ - -If we see duplicate environment variables while iterating over -environ[]: - -a) make sure we use the same value in %ENV that getenv() returns. - -Previously on a duplicate, %ENV would have the last entry for the name -from environ[], but a typical getenv() would return the first entry. - -Rather than assuming all getenv() implementations return the first entry -explicitly call getenv() to ensure they agree. - -b) remove duplicate entries from environ - -Previously if there was a duplicate definition for a name in environ[] -setting that name in %ENV could result in an unsafe value being passed -to a child process, so ensure environ[] has no duplicates. - -CVE-2016-2381 ---- - -diff --git a/perl.c b/perl.c -index 4a324c6..5c71fd0 100644 ---- a/perl.c -+++ b/perl.c -@@ -4329,23 +4329,70 @@ S_init_postdump_symbols(pTHX_ int argc, char **argv, char **env) - } - if (env) { - char *s, *old_var; -+ STRLEN nlen; - SV *sv; -+ HV *dups = newHV(); -+ - for (; *env; env++) { - old_var = *env; - - if (!(s = strchr(old_var,'=')) || s == old_var) - continue; -+ nlen = s - old_var; - - #if defined(MSDOS) && !defined(DJGPP) - *s = '\0'; - (void)strupr(old_var); - *s = '='; - #endif -- sv = newSVpv(s+1, 0); -- (void)hv_store(hv, old_var, s - old_var, sv, 0); -+ if (hv_exists(hv, old_var, nlen)) { -+ const char *name = savepvn(old_var, nlen); -+ -+ /* make sure we use the same value as getenv(), otherwise code that -+ uses getenv() (like setlocale()) might see a different value to %ENV -+ */ -+ sv = newSVpv(PerlEnv_getenv(name), 0); -+ -+ /* keep a count of the dups of this name so we can de-dup environ later */ -+ if (hv_exists(dups, name, nlen)) -+ ++SvIVX(*hv_fetch(dups, name, nlen, 0)); -+ else -+ (void)hv_store(dups, name, nlen, newSViv(1), 0); -+ -+ Safefree(name); -+ } -+ else { -+ sv = newSVpv(s+1, 0); -+ } -+ (void)hv_store(hv, old_var, nlen, sv, 0); - if (env_is_not_environ) - mg_set(sv); - } -+ if (HvKEYS(dups)) { -+ /* environ has some duplicate definitions, remove them */ -+ HE *entry; -+ hv_iterinit(dups); -+ while ((entry = hv_iternext_flags(dups, 0))) { -+ STRLEN nlen; -+ const char *name = HePV(entry, nlen); -+ IV count = SvIV(HeVAL(entry)); -+ IV i; -+ SV **valp = hv_fetch(hv, name, nlen, 0); -+ -+ assert(valp); -+ -+ /* try to remove any duplicate names, depending on the -+ * implementation used in my_setenv() the iteration might -+ * not be necessary, but let's be safe. -+ */ -+ for (i = 0; i < count; ++i) -+ my_setenv(name, 0); -+ -+ /* and set it back to the value we set $ENV{name} to */ -+ my_setenv(name, SvPV_nolen(*valp)); -+ } -+ } -+ SvREFCNT_dec_NN(dups); - } - #endif /* USE_ENVIRON_ARRAY */ - #endif /* !PERL_MICRO */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2016-6185.patch b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2016-6185.patch deleted file mode 100644 index 2722af35b..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-fix-CVE-2016-6185.patch +++ /dev/null @@ -1,128 +0,0 @@ -From 7cedaa8bc2ca9e63369d0e2d4c4c23af9febb93a Mon Sep 17 00:00:00 2001 -From: Father Chrysostomos -Date: Sat, 2 Jul 2016 22:56:51 -0700 -Subject: [PATCH] perl: fix CVE-2016-6185 -MIME-Version: 1.0 - -Don't let XSLoader load relative paths - -[rt.cpan.org #115808] - -The logic in XSLoader for determining the library goes like this: - - my $c = () = split(/::/,$caller,-1); - $modlibname =~ s,[\\/][^\\/]+$,, while $c--; # Q&D basename - my $file = "$modlibname/auto/$modpname/$modfname.bundle"; - -(That last line varies by platform.) - -$caller is the calling package. $modlibname is the calling file. It -removes as many path segments from $modlibname as there are segments -in $caller. So if you have Foo/Bar/XS.pm calling XSLoader from the -Foo::Bar package, the $modlibname will end up containing the path in -@INC where XS.pm was found, followed by "/Foo". Usually the fallback -to Dynaloader::bootstrap_inherit, which does an @INC search, makes -things Just Work. - -But if our hypothetical Foo/Bar/XS.pm actually calls -XSLoader::load from inside a string eval, then path ends up being -"(eval 1)/auto/Foo/Bar/Bar.bundle". - -So if someone creates a directory named '(eval 1)' with a naughty -binary file in it, it will be loaded if a script using Foo::Bar is run -in the parent directory. - -This commit makes XSLoader fall back to Dynaloader's @INC search if -the calling file has a relative path that is not found in @INC. - -Backport patch from http://perl5.git.perl.org/perl.git/commitdiff/08e3451d7 - -Upstream-Status: Backport -CVE: CVE-2016-6185 -Signed-off-by: Mingli Yu ---- - dist/XSLoader/XSLoader_pm.PL | 25 +++++++++++++++++++++++++ - dist/XSLoader/t/XSLoader.t | 27 ++++++++++++++++++++++++++- - 2 files changed, 51 insertions(+), 1 deletion(-) - -diff --git a/dist/XSLoader/XSLoader_pm.PL b/dist/XSLoader/XSLoader_pm.PL -index 668411d..778e46b 100644 ---- a/dist/XSLoader/XSLoader_pm.PL -+++ b/dist/XSLoader/XSLoader_pm.PL -@@ -104,6 +104,31 @@ print OUT <<'EOT'; - my $modpname = join('/',@modparts); - my $c = () = split(/::/,$caller,-1); - $modlibname =~ s,[\\/][^\\/]+$,, while $c--; # Q&D basename -+ # Does this look like a relative path? -+ if ($modlibname !~ m|^[\\/]|) { -+ # Someone may have a #line directive that changes the file name, or -+ # may be calling XSLoader::load from inside a string eval. We cer- -+ # tainly do not want to go loading some code that is not in @INC, -+ # as it could be untrusted. -+ # -+ # We could just fall back to DynaLoader here, but then the rest of -+ # this function would go untested in the perl core, since all @INC -+ # paths are relative during testing. That would be a time bomb -+ # waiting to happen, since bugs could be introduced into the code. -+ # -+ # So look through @INC to see if $modlibname is in it. A rela- -+ # tive $modlibname is not a common occurrence, so this block is -+ # not hot code. -+ FOUND: { -+ for (@INC) { -+ if ($_ eq $modlibname) { -+ last FOUND; -+ } -+ } -+ # Not found. Fall back to DynaLoader. -+ goto \&XSLoader::bootstrap_inherit; -+ } -+ } - EOT - - my $dl_dlext = quotemeta($Config::Config{'dlext'}); -diff --git a/dist/XSLoader/t/XSLoader.t b/dist/XSLoader/t/XSLoader.t -index 2ff11fe..1e86faa 100644 ---- a/dist/XSLoader/t/XSLoader.t -+++ b/dist/XSLoader/t/XSLoader.t -@@ -33,7 +33,7 @@ my %modules = ( - 'Time::HiRes'=> q| ::can_ok( 'Time::HiRes' => 'usleep' ) |, # 5.7.3 - ); - --plan tests => keys(%modules) * 3 + 9; -+plan tests => keys(%modules) * 3 + 10; - - # Try to load the module - use_ok( 'XSLoader' ); -@@ -125,3 +125,28 @@ XSLoader::load("Devel::Peek"); - EOS - or ::diag $@; - } -+ -+SKIP: { -+ skip "File::Path not available", 1 -+ unless eval { require File::Path }; -+ my $name = "phooo$$"; -+ File::Path::make_path("$name/auto/Foo/Bar"); -+ open my $fh, -+ ">$name/auto/Foo/Bar/Bar.$Config::Config{'dlext'}"; -+ close $fh; -+ my $fell_back; -+ local *XSLoader::bootstrap_inherit = sub { -+ $fell_back++; -+ # Break out of the calling subs -+ goto the_test; -+ }; -+ eval < -Date: Sun, 3 Jan 2016 08:40:33 +0100 -Subject: [PATCH] Remove nm from libswanted - -Nm stood for "New Math" library in the context of 1994. 2014 a conflicting -library libnm appeared that has a network manager context. - -Upstream-Status: Backport [commit 4732711e on branch blead, tag v5.25.0] - ---- - Configure | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/Configure b/Configure -index 0e71b4b..cfbdaa1 100755 ---- a/Configure -+++ b/Configure -@@ -1464,7 +1464,7 @@ libswanted_uselargefiles='' - : set usesocks on the Configure command line to enable socks. - : List of libraries we want. - : If anyone needs extra -lxxx, put those in a hint file. --libswanted="cl pthread socket bind inet nsl nm ndbm gdbm dbm db malloc dl ld" -+libswanted="cl pthread socket bind inet nsl ndbm gdbm dbm db malloc dl ld" - libswanted="$libswanted sun m crypt sec util c cposix posix ucb bsd BSD" - : We probably want to search /usr/shlib before most other libraries. - : This is only used by the lib/ExtUtils/MakeMaker.pm routine extliblist. --- -2.9.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-test-customized.patch b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-test-customized.patch index 477be29ef..90e4dcd5f 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-test-customized.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl/perl-test-customized.patch @@ -1,61 +1,86 @@ -The OE core recipies customize some ExtUtils-MakeMaker modules, -which causes their MD5 sum to mismatch the provided table and the -corresponding tests to fail. Also, we patch several test files with -a backported patch. Update list of hashes to reflect the patched files. +From 64df09205b6ccb5a434a4e53e8e0a32377ab634f Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?An=C3=ADbal=20Lim=C3=B3n?= +Date: Thu, 24 Nov 2016 10:49:55 -0600 +Subject: [PATCH] The OE core recipies customize some ExtUtils-MakeMaker + modules, which causes their MD5 sum to mismatch the provided table and the + corresponding tests to fail. Also, we patch several test files with a + backported patch. Update list of hashes to reflect the patched files. +MIME-Version: 1.0 +Content-Type: text/plain; charset=UTF-8 +Content-Transfer-Encoding: 8bit Upstream-Status: Inappropriate [embedded specific] Signed-off-by: Bill Randle +Signed-off-by: Aníbal Limón +--- + t/porting/customized.dat | 16 ++++++++-------- + 1 file changed, 8 insertions(+), 8 deletions(-) + +diff --git a/t/porting/customized.dat b/t/porting/customized.dat +index defeae1..b5d3c46 100644 +--- a/t/porting/customized.dat ++++ b/t/porting/customized.dat +@@ -18,12 +18,12 @@ Encode cpan/Encode/bin/unidump 715f47c2fcc661268f3c6cd3de0d27c72b745cd2 + Encode cpan/Encode/Encode.pm e146861ff2e6aaa62defa4887eade68dd7b17c8e + Encode cpan/Encode/encoding.pm 51c19efc9bfe8467d6ae12a4654f6e7f980715bf + ExtUtils::Constant cpan/ExtUtils-Constant/t/Constant.t a0369c919e216fb02767a637666bb4577ad79b02 +-ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/bin/instmodsh 5bc04a0173b8b787f465271b6186220326ae8eef ++ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/bin/instmodsh 2070fe968fa344d89aea1bdc6a8dbb0c467d0612 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/Command.pm e3a372e07392179711ea9972087c1105a2780fad + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/Command/MM.pm b72721bd6aa9bf7ec328bda99a8fdb63cac6114d + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/Liblist.pm 0e1e4c25eddb999fec6c4dc66593f76db34cfd16 +-ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/Liblist/Kid.pm bfd2aa00ca4ed251f342e1d1ad704abbaf5a615e +-ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MakeMaker.pm 5529ae3064365eafd99536621305d52f4ab31b45 ++ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/Liblist/Kid.pm d593d8fdc5c0ebcb6d3701c70fc6640c50d93455 ++ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MakeMaker.pm bf9174c70a0e50ff2fee4552c7df89b37d292da1 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MakeMaker/Config.pm bc88b275af73b8faac6abd59a9aad3f625925810 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MakeMaker/FAQ.pod 062e5d14a803fbbec8d61803086a3d7997e8a473 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MakeMaker/Tutorial.pod a8a9cab7d67922ed3d6883c864e1fe29aaa6ad89 +@@ -33,7 +33,7 @@ ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/Mkbootstrap.pm 412e95c3 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/Mksymlists.pm 8559ef191b4371d0c381472464856a8a73825b2a + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM.pm 09d579ed9daea95c3bf47de2e0b8fe3aa0ff6447 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_AIX.pm f720c13748293b792f7073aa96e7daecb590b183 +-ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_Any.pm 243649a399d293ae7ad0f26b7eab2668aa864ce8 ++ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_Any.pm ec39f68802a6fee8daaa914fc7131f40533cfc23 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_BeOS.pm b63c90129303b2c17d084fb828aa2c02a2ad85b8 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_Cygwin.pm cabd1c97eaa427067811d92807e34c17940c7350 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_Darwin.pm 6a185d897a600c34615a6073f4de0ac2f54fef3e +@@ -42,7 +42,7 @@ ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_MacOS.pm 1f5eb772eed + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_NW5.pm de777d7809c0d73e5d4622a29921731c7e5dff48 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_OS2.pm 01e8f08a82b5304009574e3ac0892b4066ff7639 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_QNX.pm 5340052b58557a6764f5ac9f8b807fefec404a06 +-ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_Unix.pm 3c3b93f431b0a51b9592b3d69624dbf5409f6f74 ++ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_Unix.pm 0d6ed5e4bdcdcd28e968e8629a592fdd0cc84818 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_UWIN.pm 40397f4cd2d49700b80b4ef490da98add24c5b37 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_VMS.pm 147e97fbabb74841f0733dbd5d1b9f3fa51f87c1 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_VOS.pm 3f13ed7045ff3443bcb4dd6c95c98b9bd705820f +@@ -51,7 +51,7 @@ ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_Win95.pm 48e8a2fe176 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MY.pm 6fefe99045b64459905d4721f3a494d8d50f7ab9 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/testlib.pm 172778ad21c065a89cd270668eb9f99a7364b41c + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/cd.t 0a71fbd646a7be8358b07b6f64f838243cc0aef4 +-ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/echo.t 37aec8f794c52e037540757eb5b2556f79419ff7 ++ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/echo.t 1a93dd8834e4bb0e5facf08204e782807567b2eb + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/lib/MakeMaker/Test/NoXS.pm 371cdff1b2375017907cfbc9c8f4a31f5ad10582 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/prereq.t 53bda2c549fd13a6b6c13a070ca6bc79883081c0 + ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/vstrings.t 90035a2bdbf45f15b9c3196d072d7cba7e662871 +@@ -165,7 +165,7 @@ bignum cpan/bignum/lib/bigrat.pm 7fccc9df30e43dbbae6e5ea91b26c8046545c9a9 + bignum cpan/bignum/lib/Math/BigFloat/Trace.pm a6b4b995e18f4083252e6dc72e9bef69671893dd + bignum cpan/bignum/lib/Math/BigInt/Trace.pm d9596963673760cae3eeeb752c1eeeec50bb2290 + libnet cpan/libnet/lib/Net/Cmd.pm a44a10c939a4c35f923c4638054178c32f1d283a +-libnet cpan/libnet/lib/Net/Config.pm 9bd49bf4de0dc438bceee0ef4baf8ba7a6633327 ++libnet cpan/libnet/lib/Net/Config.pm 2873da5efbffed67934dd297ef6f360b3558cb0b + libnet cpan/libnet/lib/Net/Domain.pm 1bbed50f70fd1ff3e1cdf087b19a9349cddfaced + libnet cpan/libnet/lib/Net/FTP.pm 40dba553c8d44e1530daec2d07a6e50910401f2e + libnet cpan/libnet/lib/Net/FTP/A.pm c570b10730b168990034dcf9cb00e305a100f336 +@@ -176,6 +176,6 @@ libnet cpan/libnet/lib/Net/FTP/L.pm ac1599c775faee0474710e4f75051c8949f13df2 + libnet cpan/libnet/lib/Net/Netrc.pm 009cfc08f8a5bf247257acb64a21e1b6ad8b2c9c + libnet cpan/libnet/lib/Net/NNTP.pm 6325fc05fd9ef81dc8d461a77b2a3f56ad1ae114 + libnet cpan/libnet/lib/Net/POP3.pm 2d8065646df80061dae5a9e3465a36a6557165fd +-libnet cpan/libnet/lib/Net/SMTP.pm f3ed7a177b49ee0ba65ac1c414de797cdbbe6886 ++libnet cpan/libnet/lib/Net/SMTP.pm f1beb42bfbef4333ed24ad63d5dd1aa5c67b20c7 + libnet cpan/libnet/lib/Net/Time.pm b3df8bbaa3bc253fbf77e8386c59a1b2aae13627 + version cpan/version/lib/version.pm ff75e2076be10bd4c05133cd979fda0b38ca8653 +-- +2.1.4 -Index: perl-5.22.1/t/porting/customized.dat -=================================================================== ---- perl-5.22.1.orig/t/porting/customized.dat 2015-10-31 13:36:16.000000000 +0000 -+++ perl-5.22.1/t/porting/customized.dat 2016-06-02 12:50:10.381030204 -0700 -@@ -1,8 +1,8 @@ - CPAN cpan/CPAN/lib/CPAN.pm ce62c43d72f101c011184dbbc59e21c2790826f0 - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/Command/MM.pm 7f4dfd0fe884bd42412bcf04ca80ef97b39c1d54 - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/Liblist.pm bef099988b15fb0b2a1f5ac48c01af1f7f36d329 --ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/Liblist/Kid.pm 8168e18f0e3ce3ece4bb7e7c72d57ec07c67c402 --ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MakeMaker.pm 7115e97a53559cb3ec061dd6f7f344e522724c4a -+ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/Liblist/Kid.pm 8c22e119b96d674f1f268a9c495bb4aa04e1100b -+ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MakeMaker.pm 3d7abd674b15ed323f743594ef0bd09db76b1aee - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MakeMaker/Config.pm f8db8d4245bf0684b8210c811f50d7cfb1a27d78 - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MakeMaker/FAQ.pod 757bffb47857521311f8f3bde43ebe165f8d5191 - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MakeMaker/Locale.pm 82be06851deb84c6419ad003ce9b6d1957f395f3 -@@ -14,7 +14,7 @@ - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/Mksymlists.pm ab80029ab16d38d4f2e41dc88d2ceb9f3790e477 - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM.pm 453e0abbc4bb38db4c0820ad5c4846f313b66291 - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_AIX.pm c1b1babda8f43ae7a2caba1cb4f70f92af5a0e34 --ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_Any.pm 6f90d94ad3e7aa0045a3b1a10a1bb18391f89f57 -+ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_Any.pm 21bde53290bf1a4da4457290b65bd1b0ca6f1d16 - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_BeOS.pm cab2b3ce08b71a4ce89aa630f236eb08b852439d - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_Cygwin.pm 61fced0faf518bf87c265fcb51ed330ba354623f - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_Darwin.pm ae0ef51a7b6dd0b05aa61c779df7167dda5f5119 -@@ -23,7 +23,7 @@ - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_NW5.pm 433135eecb85b4b1d90d3027432f329436c78447 - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_OS2.pm 1fbb5605bfb47feee4083feba8aa85e659628f70 - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_QNX.pm 5b66d1f485a6034d96fc24ba1665b1bad9e447f1 --ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_Unix.pm 8cef99a9bd370ecfd07ddb0efbdcbb4101255e45 -+ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_Unix.pm 860d520234d7c9036d91f0b143a1dddf2a5e8cb7 - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_UWIN.pm 939572fde3d59ba77c2e677fe2df2bed4bed5898 - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_VMS.pm 09c2049bfd25b735e2a5bcf56a6cff7b4827f9c8 - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_VOS.pm d65d63f8217a55c36f62e249814988974f593c79 -@@ -31,8 +31,16 @@ - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MM_Win95.pm 12df38eacceeed73cab94c423236bfaed0fbbfec - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/MY.pm 22fe9596a0237252f45399a36abc83b7813bc328 - ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/lib/ExtUtils/testlib.pm 7fbc42ca2ebc6c677b79ae5fd5647243cf069463 --ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/basic.t 6cdc7701b50e586bc9c4cfb1616de8eb0b1baf34 --ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/pm_to_blib.t 71ebcee355691ce374fcad251b12d8b2412462b3 -+ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/basic.t d78fdec7a4512dc8d2a7abd62b8104530af8ecf9 -+ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/echo.t 9427f4adebbb13b57b4a76fef2972adf63c9bd96 -+ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/INSTALL_BASE.t ef356c196bb5c3c428ae309d7f989bdd6d79b86d -+ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/INST_PREFIX.t 3e6d4d6eb2eab42e983ac70eb5737a759af0916f -+ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/INST.t e553fa0d53c894c8d36aafb69edd55b38a9355f8 -+ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/min_perl_version.t e930ec9217de5a1785d0247c30b159e6f7f5673f -+ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/PL_FILES.t 7e49ab6c4d467826d22023fa03d77b85f935b58e -+ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/pm_to_blib.t a04c96eecfab17e4094604e0fb998dd93cf93b93 -+ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/recurs.t 94cce3bff875a878ed27816b3f5df2ca4225c714 -+ExtUtils::MakeMaker cpan/ExtUtils-MakeMaker/t/several_authors.t f811d993c0835c66dc501ed55083acb29bf33bf7 - Text::ParseWords cpan/Text-ParseWords/t/ParseWords.t 9bae51c9b944cd5c0bbabe9d397e573976a2be8e - Win32API::File cpan/Win32API-File/buffers.h 02d230ac9ac7091365128161a0ed671898baefae - Win32API::File cpan/Win32API-File/cFile.h fca7e383e76979c3ac3adf12d11d1bcd2618e489 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl_5.22.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl_5.22.1.bb deleted file mode 100644 index 792a65b5b..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl_5.22.1.bb +++ /dev/null @@ -1,375 +0,0 @@ -require perl.inc - -# We need gnugrep (for -I) -DEPENDS = "virtual/db grep-native" -DEPENDS += "gdbm zlib" - -# Pick up patches from debian -# http://ftp.de.debian.org/debian/pool/main/p/perl/perl_5.22.0-1.debian.tar.xz -SRC_URI += " \ - file://debian/cpan_definstalldirs.diff \ - file://debian/db_file_ver.diff \ - file://debian/doc_info.diff \ - file://debian/enc2xs_inc.diff \ - file://debian/errno_ver.diff \ - file://debian/libperl_embed_doc.diff \ - file://debian/fixes/respect_umask.diff \ - file://debian/writable_site_dirs.diff \ - file://debian/extutils_set_libperl_path.diff \ - file://debian/no_packlist_perllocal.diff \ - file://debian/prefix_changes.diff \ - file://debian/instmodsh_doc.diff \ - file://debian/ld_run_path.diff \ - file://debian/libnet_config_path.diff \ - file://debian/mod_paths.diff \ - file://debian/prune_libs.diff \ - file://debian/fixes/net_smtp_docs.diff \ - file://debian/perlivp.diff \ - file://debian/squelch-locale-warnings.diff \ - file://debian/skip-upstream-git-tests.diff \ - file://debian/skip-kfreebsd-crash.diff \ - file://debian/fixes/document_makemaker_ccflags.diff \ - file://debian/find_html2text.diff \ - file://debian/perl5db-x-terminal-emulator.patch \ - file://debian/cpan-missing-site-dirs.diff \ - file://debian/fixes/memoize_storable_nstore.diff \ - file://debian/regen-skip.diff \ -" - -SRC_URI += " \ - file://Makefile.patch \ - file://Makefile.SH.patch \ - file://installperl.patch \ - file://perl-dynloader.patch \ - file://perl-moreconfig.patch \ - file://letgcc-find-errno.patch \ - file://generate-sh.patch \ - file://native-perlinc.patch \ - file://perl-enable-gdbm.patch \ - file://cross-generate_uudmap.patch \ - file://fix_bad_rpath.patch \ - file://perl-archlib-exp.patch \ - file://dynaloaderhack.patch \ - file://config.sh \ - file://config.sh-32 \ - file://config.sh-32-le \ - file://config.sh-32-be \ - file://config.sh-64 \ - file://config.sh-64-le \ - file://config.sh-64-be \ - file://make_ext.pl-fix-regenerate-makefile-failed-while-cc-.patch \ - file://t-run-switches.t-perl5-perl.patch \ - file://ext-ODBM_File-hints-linux.pl-link-libgdbm_compat.patch \ - file://ext-ODBM_File-t-odbm.t-fix-the-path-of-dbmt_common.p.patch \ - file://perl-PathTools-don-t-filter-out-blib-from-INC.patch \ - file://perl-errno-generation-gcc5.patch \ - file://perl-fix-conflict-between-skip_all-and-END.patch \ - file://perl-test-customized.patch \ - file://perl-fix-CVE-2016-2381.patch \ - file://perl-fix-CVE-2016-6185.patch \ - file://perl-fix-CVE-2015-8607.patch \ - file://perl-fix-CVE-2016-1238.patch \ -" - -# Fix test case issues -SRC_URI_append_class-target = " \ - file://test/dist-threads-t-join.t-adjust-ps-option.patch \ - file://test/ext-DynaLoader-t-DynaLoader.t-fix-calling-dl_findfil.patch \ - " - -SRC_URI[md5sum] = "6671e4829cbaf9cecafa9a84f141b0a3" -SRC_URI[sha256sum] = "9e87317d693ce828095204be0d09af8d60b8785533fadea1a82b6f0e071e5c79" - -inherit perlnative siteinfo - -# Where to find the native perl -HOSTPERL = "${STAGING_BINDIR_NATIVE}/perl-native/perl${PV}" - -# Where to find .so files - use the -native versions not those from the target build -export PERLHOSTLIB = "${STAGING_LIBDIR_NATIVE}/perl-native/perl/${PV}/" - -# Where to find perl @INC/#include files -# - use the -native versions not those from the target build -export PERL_LIB = "${STAGING_LIBDIR_NATIVE}/perl-native/perl/${PV}/" -export PERL_ARCHLIB = "${STAGING_LIBDIR_NATIVE}/perl-native/perl/${PV}/" - -EXTRA_OEMAKE = "-e MAKEFLAGS=" - -# LDFLAGS for shared libraries -export LDDLFLAGS = "${LDFLAGS} -shared" - -LDFLAGS_append = " -fstack-protector" - -# We're almost Debian, aren't we? -CFLAGS += "-DDEBIAN" - -do_nolargefile() { - sed -i -e "s,\(uselargefiles=\)'define',\1'undef',g" \ - -e "s,\(d_readdir64_r=\)'define',\1'undef',g" \ - -e "s,\(readdir64_r_proto=\)'\w+',\1'0',g" \ - -e "/ccflags_uselargefiles/d" \ - -e "s/-Duselargefiles//" \ - -e "s/-D_FILE_OFFSET_BITS=64//" \ - -e "s/-D_LARGEFILE_SOURCE//" \ - ${S}/Cross/config.sh-${TARGET_ARCH}-${TARGET_OS} -} - -do_configure() { - # Make hostperl in build directory be the native perl - ln -sf ${HOSTPERL} hostperl - - if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then - if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" -a -e Makefile ]; then - ${MAKE} clean - fi - find ${S} -name *.so -delete - fi - if [ -n "${CONFIGURESTAMPFILE}" ]; then - echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE} - fi - - # Do our work in the cross subdir - cd Cross - - # Generate configuration - rm -f config.sh-${TARGET_ARCH}-${TARGET_OS} - for i in ${WORKDIR}/config.sh \ - ${WORKDIR}/config.sh-${SITEINFO_BITS} \ - ${WORKDIR}/config.sh-${SITEINFO_BITS}-${SITEINFO_ENDIANNESS}; do - cat $i >> config.sh-${TARGET_ARCH}-${TARGET_OS} - done - - # Fixups for uclibc - if [ "${TARGET_OS}" = "linux-uclibc" -o "${TARGET_OS}" = "linux-uclibceabi" ]; then - sed -i -e "s,\(d_crypt_r=\)'define',\1'undef',g" \ - -e "s,\(d_futimes=\)'define',\1'undef',g" \ - -e "s,\(d_finitel=\)'define',\1'undef',g" \ - -e "s,\(crypt_r_proto=\)'\w+',\1'0',g" \ - -e "s,\(d_getnetbyname_r=\)'define',\1'undef',g" \ - -e "s,\(getnetbyname_r_proto=\)'\w+',\1'0',g" \ - -e "s,\(d_getnetbyaddr_r=\)'define',\1'undef',g" \ - -e "s,\(getnetbyaddr_r_proto=\)'\w+',\1'0',g" \ - -e "s,\(d_getnetent_r=\)'define',\1'undef',g" \ - -e "s,\(getnetent_r_proto=\)'\w+',\1'0',g" \ - -e "s,\(d_sockatmark=\)'define',\1'undef',g" \ - -e "s,\(d_sockatmarkproto=\)'\w+',\1'0',g" \ - -e "s,\(d_eaccess=\)'define',\1'undef',g" \ - -e "s,\(d_stdio_ptr_lval=\)'define',\1'undef',g" \ - -e "s,\(d_stdio_ptr_lval_sets_cnt=\)'define',\1'undef',g" \ - -e "s,\(d_stdiobase=\)'define',\1'undef',g" \ - -e "s,\(d_stdstdio=\)'define',\1'undef',g" \ - -e "s,-fstack-protector,-fno-stack-protector,g" \ - config.sh-${TARGET_ARCH}-${TARGET_OS} - fi - # Fixups for musl - if [ "${TARGET_OS}" = "linux-musl" -o "${TARGET_OS}" = "linux-musleabi" ]; then - sed -i -e "s,\(d_libm_lib_version=\)'define',\1'undef',g" \ - -e "s,\(d_stdio_ptr_lval=\)'define',\1'undef',g" \ - -e "s,\(d_stdio_ptr_lval_sets_cnt=\)'define',\1'undef',g" \ - -e "s,\(d_stdiobase=\)'define',\1'undef',g" \ - -e "s,\(d_stdstdio=\)'define',\1'undef',g" \ - -e "s,\(d_getnetbyname_r=\)'define',\1'undef',g" \ - -e "s,\(d_finitel=\)'define',\1'undef',g" \ - -e "s,\(getprotobyname_r=\)'define',\1'undef',g" \ - -e "s,\(getpwent_r=\)'define',\1'undef',g" \ - -e "s,\(getservent_r=\)'define',\1'undef',g" \ - -e "s,\(gethostent_r=\)'define',\1'undef',g" \ - -e "s,\(getnetent_r=\)'define',\1'undef',g" \ - -e "s,\(getnetbyaddr_r=\)'define',\1'undef',g" \ - -e "s,\(getprotoent_r=\)'define',\1'undef',g" \ - -e "s,\(getprotobynumber_r=\)'define',\1'undef',g" \ - -e "s,\(getgrent_r=\)'define',\1'undef',g" \ - -e "s,\(i_fcntl=\)'undef',\1'define',g" \ - -e "s,\(h_fcntl=\)'false',\1'true',g" \ - -e "s,-fstack-protector,-fno-stack-protector,g" \ - -e "s,-lnsl,,g" \ - config.sh-${TARGET_ARCH}-${TARGET_OS} - fi - - ${@bb.utils.contains('DISTRO_FEATURES', 'largefile', '', 'do_nolargefile', d)} - - # Update some paths in the configuration - sed -i -e 's,@ARCH@-thread-multi,,g' \ - -e 's,@ARCH@,${TARGET_ARCH}-${TARGET_OS},g' \ - -e 's,@STAGINGDIR@,${STAGING_DIR_HOST},g' \ - -e "s,@INCLUDEDIR@,${STAGING_INCDIR},g" \ - -e "s,@LIBDIR@,${libdir},g" \ - -e "s,@BASELIBDIR@,${base_libdir},g" \ - -e "s,@EXECPREFIX@,${exec_prefix},g" \ - -e 's,@USRBIN@,${bindir},g' \ - config.sh-${TARGET_ARCH}-${TARGET_OS} - - case "${TARGET_ARCH}" in - x86_64 | powerpc | s390) - sed -i -e "s,\(need_va_copy=\)'undef',\1'define',g" \ - config.sh-${TARGET_ARCH}-${TARGET_OS} - ;; - arm) - sed -i -e "s,\(d_u32align=\)'undef',\1'define',g" \ - config.sh-${TARGET_ARCH}-${TARGET_OS} - ;; - esac - # These are strewn all over the source tree - for foo in `grep -I --exclude="*.patch" --exclude="*.diff" --exclude="*.pod" --exclude="README*" -m1 "/usr/include/.*\.h" ${S}/* -r -l` ${S}/utils/h2xs.PL ; do - echo Fixing: $foo - sed -e 's|\([ "^'\''I]\+\)/usr/include/|\1${STAGING_INCDIR}/|g' -i $foo - done - - rm -f config - echo "ARCH = ${TARGET_ARCH}" > config - echo "OS = ${TARGET_OS}" >> config -} - -do_compile() { - # Fix to avoid recursive substitution of path - sed -i -e 's|(@libpath, ".*"|(@libpath, "${STAGING_LIBDIR}"|g' cpan/ExtUtils-MakeMaker/lib/ExtUtils/Liblist/Kid.pm - - cd Cross - oe_runmake perl LD="${CCLD}" -} - -do_install() { - #export hostperl="${STAGING_BINDIR_NATIVE}/perl-native/perl${PV}" - oe_runmake install DESTDIR=${D} - # Add perl pointing at current version - ln -sf perl${PV} ${D}${bindir}/perl - - ln -sf perl ${D}/${libdir}/perl5 - - # Remove unwanted file and empty directories - rm -f ${D}/${libdir}/perl/${PV}/.packlist - rmdir ${D}/${libdir}/perl/site_perl/${PV} - rmdir ${D}/${libdir}/perl/site_perl - - # Fix up shared library - mv ${D}/${libdir}/perl/${PV}/CORE/libperl.so ${D}/${libdir}/libperl.so.${PV} - ln -sf libperl.so.${PV} ${D}/${libdir}/libperl.so.5 - ln -sf ../../../libperl.so.${PV} ${D}/${libdir}/perl/${PV}/CORE/libperl.so - - # target config, used by cpan.bbclass to extract version information - install config.sh ${D}${libdir}/perl - - ln -s Config_heavy.pl ${D}${libdir}/perl/${PV}/Config_heavy-target.pl -} - -do_install_append_class-nativesdk () { - create_wrapper ${D}${bindir}/perl \ - PERL5LIB='$PERL5LIB:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/site_perl/${PV}:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/vendor_perl/${PV}:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/${PV}' -} - -PACKAGE_PREPROCESS_FUNCS += "perl_package_preprocess" - -perl_package_preprocess () { - # Fix up installed configuration - sed -i -e "s,${D},,g" \ - -e "s,--sysroot=${STAGING_DIR_HOST},,g" \ - -e "s,-isystem${STAGING_INCDIR} ,,g" \ - -e "s,${STAGING_LIBDIR},${libdir},g" \ - -e "s,${STAGING_BINDIR},${bindir},g" \ - -e "s,${STAGING_INCDIR},${includedir},g" \ - -e "s,${STAGING_BINDIR_NATIVE}/perl-native/,${bindir}/,g" \ - -e "s,${STAGING_BINDIR_NATIVE}/,,g" \ - -e "s,${STAGING_BINDIR_TOOLCHAIN}/${TARGET_PREFIX},${bindir},g" \ - ${PKGD}${bindir}/h2xs \ - ${PKGD}${bindir}/h2ph \ - ${PKGD}${bindir}/pod2man \ - ${PKGD}${bindir}/pod2text \ - ${PKGD}${bindir}/pod2usage \ - ${PKGD}${bindir}/podchecker \ - ${PKGD}${bindir}/podselect \ - ${PKGD}${libdir}/perl/${PV}/CORE/config.h \ - ${PKGD}${libdir}/perl/${PV}/CORE/perl.h \ - ${PKGD}${libdir}/perl/${PV}/CORE/pp.h \ - ${PKGD}${libdir}/perl/${PV}/Config.pm \ - ${PKGD}${libdir}/perl/${PV}/Config.pod \ - ${PKGD}${libdir}/perl/${PV}/Config_heavy.pl \ - ${PKGD}${libdir}/perl/${PV}/ExtUtils/Liblist/Kid.pm \ - ${PKGD}${libdir}/perl/${PV}/FileCache.pm \ - ${PKGD}${libdir}/perl/${PV}/pod/*.pod \ - ${PKGD}${libdir}/perl/config.sh -} - -PACKAGES = "perl-dbg perl perl-misc perl-dev perl-pod perl-doc perl-lib \ - perl-module-cpan perl-module-cpanplus perl-module-unicore" -FILES_${PN} = "${bindir}/perl ${bindir}/perl${PV} \ - ${libdir}/perl/${PV}/Config.pm \ - ${libdir}/perl/${PV}/strict.pm \ - ${libdir}/perl/${PV}/warnings.pm \ - ${libdir}/perl/${PV}/warnings \ - ${libdir}/perl/${PV}/vars.pm \ - " -FILES_${PN}_append_class-nativesdk = " ${bindir}/perl.real" -RPROVIDES_${PN} += "perl-module-strict perl-module-vars perl-module-config perl-module-warnings \ - perl-module-warnings-register" -FILES_${PN}-dev = "${libdir}/perl/${PV}/CORE" -FILES_${PN}-lib = "${libdir}/libperl.so* \ - ${libdir}/perl5 \ - ${libdir}/perl/config.sh \ - ${libdir}/perl/${PV}/Config_heavy.pl \ - ${libdir}/perl/${PV}/Config_heavy-target.pl" -FILES_${PN}-pod = "${libdir}/perl/${PV}/pod \ - ${libdir}/perl/${PV}/*.pod \ - ${libdir}/perl/${PV}/*/*.pod \ - ${libdir}/perl/${PV}/*/*/*.pod " -FILES_perl-misc = "${bindir}/*" -FILES_${PN}-doc = "${libdir}/perl/${PV}/*/*.txt \ - ${libdir}/perl/${PV}/*/*/*.txt \ - ${libdir}/perl/${PV}/auto/XS/Typemap \ - ${libdir}/perl/${PV}/B/assemble \ - ${libdir}/perl/${PV}/B/cc_harness \ - ${libdir}/perl/${PV}/B/disassemble \ - ${libdir}/perl/${PV}/B/makeliblinks \ - ${libdir}/perl/${PV}/CGI/eg \ - ${libdir}/perl/${PV}/CPAN/PAUSE2003.pub \ - ${libdir}/perl/${PV}/CPAN/SIGNATURE \ - ${libdir}/perl/${PV}/CPANPLUS/Shell/Default/Plugins/HOWTO.pod \ - ${libdir}/perl/${PV}/Encode/encode.h \ - ${libdir}/perl/${PV}/ExtUtils/MANIFEST.SKIP \ - ${libdir}/perl/${PV}/ExtUtils/NOTES \ - ${libdir}/perl/${PV}/ExtUtils/PATCHING \ - ${libdir}/perl/${PV}/ExtUtils/typemap \ - ${libdir}/perl/${PV}/ExtUtils/xsubpp \ - ${libdir}/perl/${PV}/ExtUtils/Changes_EU-Install \ - ${libdir}/perl/${PV}/Net/*.eg \ - ${libdir}/perl/${PV}/unicore/mktables \ - ${libdir}/perl/${PV}/unicore/mktables.lst \ - ${libdir}/perl/${PV}/unicore/version " - -FILES_perl-module-cpan += "${libdir}/perl/${PV}/CPAN \ - ${libdir}/perl/${PV}/CPAN.pm" -FILES_perl-module-cpanplus += "${libdir}/perl/${PV}/CPANPLUS \ - ${libdir}/perl/${PV}/CPANPLUS.pm" -FILES_perl-module-unicore += "${libdir}/perl/${PV}/unicore" - -# Create a perl-modules package recommending all the other perl -# packages (actually the non modules packages and not created too) -ALLOW_EMPTY_perl-modules = "1" -PACKAGES_append = " perl-modules " - -PACKAGESPLITFUNCS_prepend = "split_perl_packages " - -python split_perl_packages () { - libdir = d.expand('${libdir}/perl/${PV}') - do_split_packages(d, libdir, 'auto/([^.]*)/[^/]*\.(so|ld|ix|al)', 'perl-module-%s', 'perl module %s', recursive=True, match_path=True, prepend=False) - do_split_packages(d, libdir, 'Module/([^\/]*)\.pm', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False) - do_split_packages(d, libdir, 'Module/([^\/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False) - do_split_packages(d, libdir, '(^(?!(CPAN\/|CPANPLUS\/|Module\/|unicore\/|auto\/)[^\/]).*)\.(pm|pl|e2x)', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False) - - # perl-modules should recommend every perl module, and only the - # modules. Don't attempt to use the result of do_split_packages() as some - # modules are manually split (eg. perl-module-unicore). - packages = filter(lambda p: 'perl-module-' in p, d.getVar('PACKAGES', True).split()) - d.setVar(d.expand("RRECOMMENDS_${PN}-modules"), ' '.join(packages)) -} - -PACKAGES_DYNAMIC += "^perl-module-.*" -PACKAGES_DYNAMIC_class-nativesdk += "^nativesdk-perl-module-.*" - -RPROVIDES_perl-lib = "perl-lib" - -require perl-rdepends_${PV}.inc -require perl-ptest.inc - -SSTATE_SCAN_FILES += "*.pm *.pod *.h *.pl *.sh" - -BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/perl/perl_5.24.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl_5.24.1.bb new file mode 100644 index 000000000..cf7a8e121 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/perl/perl_5.24.1.bb @@ -0,0 +1,358 @@ +require perl.inc + +# We need gnugrep (for -I) +DEPENDS = "virtual/db grep-native" +DEPENDS += "gdbm zlib" + +# Pick up patches from debian +# http://ftp.de.debian.org/debian/pool/main/p/perl/perl_5.22.0-1.debian.tar.xz +SRC_URI += " \ + file://debian/cpan_definstalldirs.diff \ + file://debian/db_file_ver.diff \ + file://debian/doc_info.diff \ + file://debian/enc2xs_inc.diff \ + file://debian/errno_ver.diff \ + file://debian/libperl_embed_doc.diff \ + file://debian/fixes/respect_umask.diff \ + file://debian/writable_site_dirs.diff \ + file://debian/extutils_set_libperl_path.diff \ + file://debian/no_packlist_perllocal.diff \ + file://debian/prefix_changes.diff \ + file://debian/instmodsh_doc.diff \ + file://debian/ld_run_path.diff \ + file://debian/libnet_config_path.diff \ + file://debian/mod_paths.diff \ + file://debian/prune_libs.diff \ + file://debian/fixes/net_smtp_docs.diff \ + file://debian/perlivp.diff \ + file://debian/squelch-locale-warnings.diff \ + file://debian/skip-upstream-git-tests.diff \ + file://debian/skip-kfreebsd-crash.diff \ + file://debian/fixes/document_makemaker_ccflags.diff \ + file://debian/find_html2text.diff \ + file://debian/perl5db-x-terminal-emulator.patch \ + file://debian/cpan-missing-site-dirs.diff \ + file://debian/fixes/memoize_storable_nstore.diff \ + file://debian/regen-skip.diff \ +" + +SRC_URI += " \ + file://Makefile.patch \ + file://Makefile.SH.patch \ + file://installperl.patch \ + file://perl-archlib-exp.patch \ + file://perl-dynloader.patch \ + file://perl-moreconfig.patch \ + file://letgcc-find-errno.patch \ + file://generate-sh.patch \ + file://native-perlinc.patch \ + file://perl-enable-gdbm.patch \ + file://cross-generate_uudmap.patch \ + file://fix_bad_rpath.patch \ + file://dynaloaderhack.patch \ + file://config.sh \ + file://config.sh-32 \ + file://config.sh-32-le \ + file://config.sh-32-be \ + file://config.sh-64 \ + file://config.sh-64-le \ + file://config.sh-64-be \ + file://make_ext.pl-fix-regenerate-makefile-failed-while-cc-.patch \ + file://t-run-switches.t-perl5-perl.patch \ + file://ext-ODBM_File-hints-linux.pl-link-libgdbm_compat.patch \ + file://ext-ODBM_File-t-odbm.t-fix-the-path-of-dbmt_common.p.patch \ + file://perl-PathTools-don-t-filter-out-blib-from-INC.patch \ + file://perl-errno-generation-gcc5.patch \ + file://perl-fix-conflict-between-skip_all-and-END.patch \ + file://perl-test-customized.patch \ +" + +# Fix test case issues +SRC_URI_append_class-target = " \ + file://test/dist-threads-t-join.t-adjust-ps-option.patch \ + file://test/ext-DynaLoader-t-DynaLoader.t-fix-calling-dl_findfil.patch \ + " + +SRC_URI[md5sum] = "af6a84c7c3e2b8b269c105a5db2f6d53" +SRC_URI[sha256sum] = "03a77bac4505c270f1890ece75afc7d4b555090b41aa41ea478747e23b2afb3f" + +inherit perlnative siteinfo + +# Where to find the native perl +HOSTPERL = "${STAGING_BINDIR_NATIVE}/perl-native/perl${PV}" + +# Where to find .so files - use the -native versions not those from the target build +export PERLHOSTLIB = "${STAGING_LIBDIR_NATIVE}/perl-native/perl/${PV}/" + +# Where to find perl @INC/#include files +# - use the -native versions not those from the target build +export PERL_LIB = "${STAGING_LIBDIR_NATIVE}/perl-native/perl/${PV}/" +export PERL_ARCHLIB = "${STAGING_LIBDIR_NATIVE}/perl-native/perl/${PV}/" + +EXTRA_OEMAKE = "-e MAKEFLAGS=" + +# LDFLAGS for shared libraries +export LDDLFLAGS = "${LDFLAGS} -shared" + +LDFLAGS_append = " -fstack-protector" + +# We're almost Debian, aren't we? +CFLAGS += "-DDEBIAN" + +do_configure() { + # Make hostperl in build directory be the native perl + ln -sf ${HOSTPERL} hostperl + + if [ -n "${CONFIGURESTAMPFILE}" -a -e "${CONFIGURESTAMPFILE}" ]; then + if [ "`cat ${CONFIGURESTAMPFILE}`" != "${BB_TASKHASH}" -a -e Makefile ]; then + ${MAKE} clean + fi + find ${S} -name *.so -delete + fi + if [ -n "${CONFIGURESTAMPFILE}" ]; then + echo ${BB_TASKHASH} > ${CONFIGURESTAMPFILE} + fi + + # Do our work in the cross subdir + cd Cross + + # Generate configuration + rm -f config.sh-${TARGET_ARCH}-${TARGET_OS} + for i in ${WORKDIR}/config.sh \ + ${WORKDIR}/config.sh-${SITEINFO_BITS} \ + ${WORKDIR}/config.sh-${SITEINFO_BITS}-${SITEINFO_ENDIANNESS}; do + cat $i >> config.sh-${TARGET_ARCH}-${TARGET_OS} + done + + # Fixups for uclibc + if [ "${TARGET_OS}" = "linux-uclibc" -o "${TARGET_OS}" = "linux-uclibceabi" ]; then + sed -i -e "s,\(d_crypt_r=\)'define',\1'undef',g" \ + -e "s,\(d_futimes=\)'define',\1'undef',g" \ + -e "s,\(d_finitel=\)'define',\1'undef',g" \ + -e "s,\(crypt_r_proto=\)'\w+',\1'0',g" \ + -e "s,\(d_getnetbyname_r=\)'define',\1'undef',g" \ + -e "s,\(getnetbyname_r_proto=\)'\w+',\1'0',g" \ + -e "s,\(d_getnetbyaddr_r=\)'define',\1'undef',g" \ + -e "s,\(getnetbyaddr_r_proto=\)'\w+',\1'0',g" \ + -e "s,\(d_getnetent_r=\)'define',\1'undef',g" \ + -e "s,\(getnetent_r_proto=\)'\w+',\1'0',g" \ + -e "s,\(d_sockatmark=\)'define',\1'undef',g" \ + -e "s,\(d_sockatmarkproto=\)'\w+',\1'0',g" \ + -e "s,\(d_eaccess=\)'define',\1'undef',g" \ + -e "s,\(d_stdio_ptr_lval=\)'define',\1'undef',g" \ + -e "s,\(d_stdio_ptr_lval_sets_cnt=\)'define',\1'undef',g" \ + -e "s,\(d_stdiobase=\)'define',\1'undef',g" \ + -e "s,\(d_stdstdio=\)'define',\1'undef',g" \ + -e "s,-fstack-protector,-fno-stack-protector,g" \ + config.sh-${TARGET_ARCH}-${TARGET_OS} + fi + # Fixups for musl + if [ "${TARGET_OS}" = "linux-musl" -o "${TARGET_OS}" = "linux-musleabi" ]; then + sed -i -e "s,\(d_libm_lib_version=\)'define',\1'undef',g" \ + -e "s,\(d_stdio_ptr_lval=\)'define',\1'undef',g" \ + -e "s,\(d_stdio_ptr_lval_sets_cnt=\)'define',\1'undef',g" \ + -e "s,\(d_stdiobase=\)'define',\1'undef',g" \ + -e "s,\(d_stdstdio=\)'define',\1'undef',g" \ + -e "s,\(d_getnetbyname_r=\)'define',\1'undef',g" \ + -e "s,\(d_finitel=\)'define',\1'undef',g" \ + -e "s,\(getprotobyname_r=\)'define',\1'undef',g" \ + -e "s,\(getpwent_r=\)'define',\1'undef',g" \ + -e "s,\(getservent_r=\)'define',\1'undef',g" \ + -e "s,\(gethostent_r=\)'define',\1'undef',g" \ + -e "s,\(getnetent_r=\)'define',\1'undef',g" \ + -e "s,\(getnetbyaddr_r=\)'define',\1'undef',g" \ + -e "s,\(getprotoent_r=\)'define',\1'undef',g" \ + -e "s,\(getprotobynumber_r=\)'define',\1'undef',g" \ + -e "s,\(getgrent_r=\)'define',\1'undef',g" \ + -e "s,\(i_fcntl=\)'undef',\1'define',g" \ + -e "s,\(h_fcntl=\)'false',\1'true',g" \ + -e "s,-fstack-protector,-fno-stack-protector,g" \ + -e "s,-lnsl,,g" \ + config.sh-${TARGET_ARCH}-${TARGET_OS} + fi + + # Update some paths in the configuration + sed -i -e 's,@ARCH@-thread-multi,,g' \ + -e 's,@ARCH@,${TARGET_ARCH}-${TARGET_OS},g' \ + -e 's,@STAGINGDIR@,${STAGING_DIR_HOST},g' \ + -e "s,@INCLUDEDIR@,${STAGING_INCDIR},g" \ + -e "s,@LIBDIR@,${libdir},g" \ + -e "s,@BASELIBDIR@,${base_libdir},g" \ + -e "s,@EXECPREFIX@,${exec_prefix},g" \ + -e 's,@USRBIN@,${bindir},g' \ + config.sh-${TARGET_ARCH}-${TARGET_OS} + + case "${TARGET_ARCH}" in + x86_64 | powerpc | s390) + sed -i -e "s,\(need_va_copy=\)'undef',\1'define',g" \ + config.sh-${TARGET_ARCH}-${TARGET_OS} + ;; + arm) + sed -i -e "s,\(d_u32align=\)'undef',\1'define',g" \ + config.sh-${TARGET_ARCH}-${TARGET_OS} + ;; + esac + # These are strewn all over the source tree + for foo in `grep -I --exclude="*.patch" --exclude="*.diff" --exclude="*.pod" --exclude="README*" -m1 "/usr/include/.*\.h" ${S}/* -r -l` ${S}/utils/h2xs.PL ; do + echo Fixing: $foo + sed -e 's|\([ "^'\''I]\+\)/usr/include/|\1${STAGING_INCDIR}/|g' -i $foo + done + + rm -f config + echo "ARCH = ${TARGET_ARCH}" > config + echo "OS = ${TARGET_OS}" >> config +} + +do_compile() { + # Fix to avoid recursive substitution of path + sed -i -e 's|(@libpath, ".*"|(@libpath, "${STAGING_LIBDIR}"|g' cpan/ExtUtils-MakeMaker/lib/ExtUtils/Liblist/Kid.pm + + cd Cross + oe_runmake perl LD="${CCLD}" +} + +do_install() { + #export hostperl="${STAGING_BINDIR_NATIVE}/perl-native/perl${PV}" + oe_runmake install DESTDIR=${D} + # Add perl pointing at current version + ln -sf perl${PV} ${D}${bindir}/perl + + ln -sf perl ${D}/${libdir}/perl5 + + # Remove unwanted file and empty directories + rm -f ${D}/${libdir}/perl/${PV}/.packlist + rmdir ${D}/${libdir}/perl/site_perl/${PV} + rmdir ${D}/${libdir}/perl/site_perl + + # Fix up shared library + mv ${D}/${libdir}/perl/${PV}/CORE/libperl.so ${D}/${libdir}/libperl.so.${PV} + ln -sf libperl.so.${PV} ${D}/${libdir}/libperl.so.5 + ln -sf ../../../libperl.so.${PV} ${D}/${libdir}/perl/${PV}/CORE/libperl.so + + # target config, used by cpan.bbclass to extract version information + install config.sh ${D}${libdir}/perl + + ln -s Config_heavy.pl ${D}${libdir}/perl/${PV}/Config_heavy-target.pl +} + +do_install_append_class-nativesdk () { + create_wrapper ${D}${bindir}/perl \ + PERL5LIB='$PERL5LIB:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/site_perl/${PV}:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/vendor_perl/${PV}:$OECORE_NATIVE_SYSROOT/${libdir_nativesdk}/perl/${PV}' +} + +PACKAGE_PREPROCESS_FUNCS += "perl_package_preprocess" + +perl_package_preprocess () { + # Fix up installed configuration + sed -i -e "s,${D},,g" \ + -e "s,--sysroot=${STAGING_DIR_HOST},,g" \ + -e "s,-isystem${STAGING_INCDIR} ,,g" \ + -e "s,${STAGING_LIBDIR},${libdir},g" \ + -e "s,${STAGING_BINDIR},${bindir},g" \ + -e "s,${STAGING_INCDIR},${includedir},g" \ + -e "s,${STAGING_BINDIR_NATIVE}/perl-native/,${bindir}/,g" \ + -e "s,${STAGING_BINDIR_NATIVE}/,,g" \ + -e "s,${STAGING_BINDIR_TOOLCHAIN}/${TARGET_PREFIX},${bindir},g" \ + ${PKGD}${bindir}/h2xs \ + ${PKGD}${bindir}/h2ph \ + ${PKGD}${bindir}/pod2man \ + ${PKGD}${bindir}/pod2text \ + ${PKGD}${bindir}/pod2usage \ + ${PKGD}${bindir}/podchecker \ + ${PKGD}${bindir}/podselect \ + ${PKGD}${libdir}/perl/${PV}/CORE/config.h \ + ${PKGD}${libdir}/perl/${PV}/CORE/perl.h \ + ${PKGD}${libdir}/perl/${PV}/CORE/pp.h \ + ${PKGD}${libdir}/perl/${PV}/Config.pm \ + ${PKGD}${libdir}/perl/${PV}/Config.pod \ + ${PKGD}${libdir}/perl/${PV}/Config_heavy.pl \ + ${PKGD}${libdir}/perl/${PV}/ExtUtils/Liblist/Kid.pm \ + ${PKGD}${libdir}/perl/${PV}/FileCache.pm \ + ${PKGD}${libdir}/perl/${PV}/pod/*.pod \ + ${PKGD}${libdir}/perl/config.sh +} + +PACKAGES = "perl-dbg perl perl-misc perl-dev perl-pod perl-doc perl-lib \ + perl-module-cpan perl-module-cpanplus perl-module-unicore" +FILES_${PN} = "${bindir}/perl ${bindir}/perl${PV} \ + ${libdir}/perl/${PV}/Config.pm \ + ${libdir}/perl/${PV}/strict.pm \ + ${libdir}/perl/${PV}/warnings.pm \ + ${libdir}/perl/${PV}/warnings \ + ${libdir}/perl/${PV}/vars.pm \ + " +FILES_${PN}_append_class-nativesdk = " ${bindir}/perl.real" +RPROVIDES_${PN} += "perl-module-strict perl-module-vars perl-module-config perl-module-warnings \ + perl-module-warnings-register" +FILES_${PN}-dev = "${libdir}/perl/${PV}/CORE" +FILES_${PN}-lib = "${libdir}/libperl.so* \ + ${libdir}/perl5 \ + ${libdir}/perl/config.sh \ + ${libdir}/perl/${PV}/Config_heavy.pl \ + ${libdir}/perl/${PV}/Config_heavy-target.pl" +FILES_${PN}-pod = "${libdir}/perl/${PV}/pod \ + ${libdir}/perl/${PV}/*.pod \ + ${libdir}/perl/${PV}/*/*.pod \ + ${libdir}/perl/${PV}/*/*/*.pod " +FILES_perl-misc = "${bindir}/*" +FILES_${PN}-doc = "${libdir}/perl/${PV}/*/*.txt \ + ${libdir}/perl/${PV}/*/*/*.txt \ + ${libdir}/perl/${PV}/auto/XS/Typemap \ + ${libdir}/perl/${PV}/B/assemble \ + ${libdir}/perl/${PV}/B/cc_harness \ + ${libdir}/perl/${PV}/B/disassemble \ + ${libdir}/perl/${PV}/B/makeliblinks \ + ${libdir}/perl/${PV}/CGI/eg \ + ${libdir}/perl/${PV}/CPAN/PAUSE2003.pub \ + ${libdir}/perl/${PV}/CPAN/SIGNATURE \ + ${libdir}/perl/${PV}/CPANPLUS/Shell/Default/Plugins/HOWTO.pod \ + ${libdir}/perl/${PV}/Encode/encode.h \ + ${libdir}/perl/${PV}/ExtUtils/MANIFEST.SKIP \ + ${libdir}/perl/${PV}/ExtUtils/NOTES \ + ${libdir}/perl/${PV}/ExtUtils/PATCHING \ + ${libdir}/perl/${PV}/ExtUtils/typemap \ + ${libdir}/perl/${PV}/ExtUtils/xsubpp \ + ${libdir}/perl/${PV}/ExtUtils/Changes_EU-Install \ + ${libdir}/perl/${PV}/Net/*.eg \ + ${libdir}/perl/${PV}/unicore/mktables \ + ${libdir}/perl/${PV}/unicore/mktables.lst \ + ${libdir}/perl/${PV}/unicore/version " + +FILES_perl-module-cpan += "${libdir}/perl/${PV}/CPAN \ + ${libdir}/perl/${PV}/CPAN.pm" +FILES_perl-module-cpanplus += "${libdir}/perl/${PV}/CPANPLUS \ + ${libdir}/perl/${PV}/CPANPLUS.pm" +FILES_perl-module-unicore += "${libdir}/perl/${PV}/unicore" + +# Create a perl-modules package recommending all the other perl +# packages (actually the non modules packages and not created too) +ALLOW_EMPTY_perl-modules = "1" +PACKAGES_append = " perl-modules " + +PACKAGESPLITFUNCS_prepend = "split_perl_packages " + +python split_perl_packages () { + libdir = d.expand('${libdir}/perl/${PV}') + do_split_packages(d, libdir, 'auto/([^.]*)/[^/]*\.(so|ld|ix|al)', 'perl-module-%s', 'perl module %s', recursive=True, match_path=True, prepend=False) + do_split_packages(d, libdir, 'Module/([^\/]*)\.pm', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False) + do_split_packages(d, libdir, 'Module/([^\/]*)/.*', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False) + do_split_packages(d, libdir, '(^(?!(CPAN\/|CPANPLUS\/|Module\/|unicore\/|auto\/)[^\/]).*)\.(pm|pl|e2x)', 'perl-module-%s', 'perl module %s', recursive=True, allow_dirs=False, match_path=True, prepend=False) + + # perl-modules should recommend every perl module, and only the + # modules. Don't attempt to use the result of do_split_packages() as some + # modules are manually split (eg. perl-module-unicore). + packages = filter(lambda p: 'perl-module-' in p, d.getVar('PACKAGES').split()) + d.setVar(d.expand("RRECOMMENDS_${PN}-modules"), ' '.join(packages)) +} + +PACKAGES_DYNAMIC += "^perl-module-.*" +PACKAGES_DYNAMIC_class-nativesdk += "^nativesdk-perl-module-.*" + +RPROVIDES_perl-lib = "perl-lib" + +require perl-rdepends_${PV}.inc +require perl-ptest.inc + +SSTATE_SCAN_FILES += "*.pm *.pod *.h *.pl *.sh" + +BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pkgconfig/pkgconfig_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/pkgconfig/pkgconfig_git.bb index ff8254cfc..dc44992c7 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/pkgconfig/pkgconfig_git.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/pkgconfig/pkgconfig_git.bb @@ -8,10 +8,6 @@ SECTION = "console/utils" LICENSE = "GPLv2+" LIC_FILES_CHKSUM = "file://COPYING;md5=b234ee4d69f5fce4486a80fdaf4a4263" -DEPENDS = "glib-2.0" -DEPENDS_class-native = "" -DEPENDS_class-nativesdk = "" - SRCREV = "87152c05be88ca8be71a3a563f275b3686d32c28" PV = "0.29.1+git${SRCPV}" @@ -26,9 +22,16 @@ S = "${WORKDIR}/git" inherit autotools -EXTRA_OECONF = "--without-internal-glib" -EXTRA_OECONF_class-native = "--with-internal-glib" -EXTRA_OECONF_class-nativesdk = "--with-internal-glib" +# Because of a faulty test, the current auto mode always evaluates to no, +# so just continue that behaviour. +# +EXTRA_OECONF += "--disable-indirect-deps" + +PACKAGECONFIG ??= "glib" +PACKAGECONFIG_class-native = "" +PACKAGECONFIG_class-nativesdk = "" + +PACKAGECONFIG[glib] = "--without-internal-glib,--with-internal-glib,glib-2.0 pkgconfig-native" acpaths = "-I ." diff --git a/import-layers/yocto-poky/meta/recipes-devtools/postinst-intercept/nativesdk-postinst-intercept_1.0.bb b/import-layers/yocto-poky/meta/recipes-devtools/postinst-intercept/nativesdk-postinst-intercept_1.0.bb index 7dc45c68f..938ac8ad7 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/postinst-intercept/nativesdk-postinst-intercept_1.0.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/postinst-intercept/nativesdk-postinst-intercept_1.0.bb @@ -1,6 +1,5 @@ SUMMARY = "Postinstall scriptlets" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" FILES_${PN}_append = " ${datadir}/postinst-intercepts/*" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/prelink/prelink_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/prelink/prelink_git.bb index 0856b1c66..4529dbfcf 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/prelink/prelink_git.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/prelink/prelink_git.bb @@ -132,7 +132,7 @@ python do_linkerpaths () { } python () { - overrides = d.getVar("OVERRIDES", True).split(":") + overrides = d.getVar("OVERRIDES").split(":") if "class-target" in overrides: bb.build.addtask('do_linkerpaths', 'do_configure', 'do_patch', d) } diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0001-Don-t-send-SIGUSR1-to-init.patch b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0001-Don-t-send-SIGUSR1-to-init.patch deleted file mode 100644 index 6c694ceb0..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0001-Don-t-send-SIGUSR1-to-init.patch +++ /dev/null @@ -1,48 +0,0 @@ -From befc6dbd6469d428c9e0830dbe51bdf7ac39d9ae Mon Sep 17 00:00:00 2001 -From: Seebs -Date: Thu, 22 Sep 2016 14:35:04 -0500 -Subject: [PATCH] Don't send SIGUSR1 to init. - -If the parent exits due to child process being slow, getppid() will return -1, and we'll send SIGUSR1 to init, which can break things like dumbinit -which aren't adequately protected against non-root processes sending them -signals. - -Signed-off-by: Seebs - -Upstream-Status: Backport (commit befc6dbd6469d428c9e0830dbe51bdf7ac39d9ae) - -[YOCTO #10324] - -This resolves an issue where a docker container running builds would die -due to it's 'mini init' being signaled by pseudo. - -Signed-off-by: Mark Hatle ---- - pseudo_server.c | 11 ++++++++--- - -diff --git a/pseudo_server.c b/pseudo_server.c -index 65102dd..8731d20 100644 ---- a/pseudo_server.c -+++ b/pseudo_server.c -@@ -358,9 +358,14 @@ pseudo_server_start(int daemonize) { - signal(SIGTERM, quit_now); - /* tell parent process to stop waiting */ - if (daemonize) { -- pseudo_diag("Setup complete, sending SIGUSR1 to pid %d.\n", -- getppid()); -- kill(getppid(), SIGUSR1); -+ pid_t ppid = getppid(); -+ if (ppid == 1) { -+ pseudo_diag("Setup complete, but parent is init, not sending SIGUSR1.\n"); -+ } else { -+ pseudo_diag("Setup complete, sending SIGUSR1 to pid %d.\n", -+ ppid); -+ kill(ppid, SIGUSR1); -+ } - } - pseudo_server_loop(); - return 0; --- -2.5.5 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0001-Quiet-diagnostics-during-startup-for-pseudo-d.patch b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0001-Quiet-diagnostics-during-startup-for-pseudo-d.patch deleted file mode 100644 index d4b9f6a0f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0001-Quiet-diagnostics-during-startup-for-pseudo-d.patch +++ /dev/null @@ -1,54 +0,0 @@ -From eead8a505245a292c43f070c0e836cdfeb7bd7bd Mon Sep 17 00:00:00 2001 -From: Seebs -Date: Wed, 28 Sep 2016 17:05:17 -0500 -Subject: [PATCH 1/2] Quiet diagnostics during startup for pseudo -d - -When the client spawns a pseudo server, it starts out sending diagnostics -to stderr. This can be spammy in some cases with races during startup; -everything resolves, but we get scary-looking diagnostics. So shove -those into a log file. - -Signed-off-by: Seebs - -Upstream-Status: Backport -Signed-off-by: Robert Yang ---- - ChangeLog.txt | 5 +++++ - pseudo_server.c | 4 +++- - 2 files changed, 8 insertions(+), 1 deletion(-) - -diff --git a/ChangeLog.txt b/ChangeLog.txt -index d6359ca..4cc24de 100644 ---- a/ChangeLog.txt -+++ b/ChangeLog.txt -@@ -1,3 +1,8 @@ -+2016-09-28: -+ * (seebs) Send errors to log when daemonizing, but do that a lot -+ sooner to prevent startup messages which can show up spuriously -+ with multiple clients. -+ - 2016-07-28: - * (seebs) Fix performance issue on deletion with xattr changes. - -diff --git a/pseudo_server.c b/pseudo_server.c -index 8731d20..7c2db2f 100644 ---- a/pseudo_server.c -+++ b/pseudo_server.c -@@ -162,6 +162,9 @@ pseudo_server_start(int daemonize) { - * SIGUSR1, or until too much time has passed. */ - if (daemonize) { - int child; -+ -+ /* make startup messages go away when invoked-as-daemon */ -+ pseudo_debug_logfile(PSEUDO_LOGFILE, 2); - child = fork(); - if (child == -1) { - pseudo_diag("Couldn't fork child process: %s\n", -@@ -231,7 +234,6 @@ pseudo_server_start(int daemonize) { - setsid(); - fclose(stdin); - fclose(stdout); -- pseudo_debug_logfile(PSEUDO_LOGFILE, 2); - /* and then just execute the server code normally. */ - /* Any logging will presumably go to logfile, but - * exit status will make it back to the parent for diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0002-Use-correct-file-descriptor.patch b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0002-Use-correct-file-descriptor.patch deleted file mode 100644 index dd6fd87ae..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0002-Use-correct-file-descriptor.patch +++ /dev/null @@ -1,53 +0,0 @@ -From 7a0632cad851826d804db0540d9a59773e6bf29c Mon Sep 17 00:00:00 2001 -From: Seebs -Date: Wed, 28 Sep 2016 22:12:29 -0500 -Subject: [PATCH 2/2] Use correct file descriptor - -So it turns out that pseudo_logfile() was returning 0 or -1, and -pseudo_debug_logfile() was expecting it to be the correct file descriptor -to use. And it's basically a mystery that any of that ever worked. - -Signed-off-by: Seebs - -Upstream-Status: Backport -Signed-off-by: Robert Yang ---- - ChangeLog.txt | 3 +++ - pseudo_util.c | 3 ++- - 2 files changed, 5 insertions(+), 1 deletion(-) - -diff --git a/ChangeLog.txt b/ChangeLog.txt -index 4cc24de..49a6c36 100644 ---- a/ChangeLog.txt -+++ b/ChangeLog.txt -@@ -2,6 +2,9 @@ - * (seebs) Send errors to log when daemonizing, but do that a lot - sooner to prevent startup messages which can show up spuriously - with multiple clients. -+ * (seebs) return file descriptor from pseudo_logfile, so we set -+ pseudo_util_debug_fd to the right value instead of to stdin. -+ Nice bug. - - 2016-07-28: - * (seebs) Fix performance issue on deletion with xattr changes. -diff --git a/pseudo_util.c b/pseudo_util.c -index 0c156cf..a60c74b 100644 ---- a/pseudo_util.c -+++ b/pseudo_util.c -@@ -1569,7 +1569,7 @@ pseudo_logfile(char *filename, char *defname, int prefer_fd) { - if (fd == -1) - return -1; - else -- return 0; -+ return fd; - } - - int -@@ -1579,6 +1579,7 @@ pseudo_debug_logfile(char *defname, int prefer_fd) { - - fd = pseudo_logfile(filename, defname, prefer_fd); - if (fd > -1) { -+ pseudo_diag("debug_logfile: fd %d\n", fd); - pseudo_util_debug_fd = fd; - return 0; - } diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0003-Fix-renameat-parallel-to-previous-fix-to-rename.patch b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0003-Fix-renameat-parallel-to-previous-fix-to-rename.patch deleted file mode 100644 index 739c03ee6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/0003-Fix-renameat-parallel-to-previous-fix-to-rename.patch +++ /dev/null @@ -1,64 +0,0 @@ -From d9ab3a0acc94151048498b1ea4d69e7707df1526 Mon Sep 17 00:00:00 2001 -From: Seebs -Date: Fri, 30 Sep 2016 10:56:35 -0500 -Subject: [PATCH 3/3] Fix renameat (parallel to previous fix to rename) - -There was a bug in rename(), which was duplicated when renameat() was -implemented, and which got fixed two years ago for rename(), but no -one ever uses renameat() so it didn't get fixed there. Thanks -to Anton Gerasimov for the bug report -and patch. - -Signed-off-by: Seebs - -Upstream-Status: Backport -Signed-off-by: Joshua Lock - ---- - ChangeLog.txt | 4 ++++ - ports/unix/guts/renameat.c | 7 ++++++- - 2 files changed, 10 insertions(+), 1 deletion(-) - -diff --git a/ChangeLog.txt b/ChangeLog.txt -index 65b9759..ca04cc0 100644 ---- a/ChangeLog.txt -+++ b/ChangeLog.txt -@@ -1,3 +1,7 @@ -+2016-09-30: -+ * (seebs) Fix rename at, matching fix from ee00f63d for rename. Bug -+ and fix provided by Anton Gerasimov . -+ - 2016-09-28: - * (seebs) Send errors to log when daemonizing, but do that a lot - sooner to prevent startup messages which can show up spuriously -diff --git a/ports/unix/guts/renameat.c b/ports/unix/guts/renameat.c -index ade0509..d5e36fa 100644 ---- a/ports/unix/guts/renameat.c -+++ b/ports/unix/guts/renameat.c -@@ -11,6 +11,7 @@ - int oldrc, newrc; - int save_errno; - int old_db_entry = 0; -+ int may_unlinked = 0; - - pseudo_debug(PDBGF_FILE, "renameat: %d,%s->%d,%s\n", - olddirfd, oldpath ? oldpath : "", -@@ -44,10 +45,14 @@ - /* as with unlink, we have to mark that the file may get deleted */ - msg = pseudo_client_op(OP_MAY_UNLINK, 0, -1, newdirfd, newpath, newrc ? NULL : &newbuf); - if (msg && msg->result == RESULT_SUCCEED) -+ may_unlinked = 1; -+ msg = pseudo_client_op(OP_STAT, 0, -1, olddirfd, oldpath, oldrc ? NULL : &oldbuf); -+ if (msg && msg->result == RESULT_SUCCEED) - old_db_entry = 1; -+ - rc = real_renameat(olddirfd, oldpath, newdirfd, newpath); - save_errno = errno; -- if (old_db_entry) { -+ if (may_unlinked) { - if (rc == -1) { - /* since we failed, that wasn't really unlinked -- put - * it back. --- -2.7.4 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/Fix-xattr-performance.patch b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/Fix-xattr-performance.patch deleted file mode 100644 index 4e072e6c4..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/Fix-xattr-performance.patch +++ /dev/null @@ -1,117 +0,0 @@ -From 0d9071f3090bbd7880558f3b488b236ac19b44fc Mon Sep 17 00:00:00 2001 -From: seebs -Date: Thu, 28 Jul 2016 14:02:12 -0500 -Subject: [PATCH 1/2] Fix xattr performance - -When deleting files, we *do* know the inode and attribute, most of the -time, so we pass those in whenever possible. The full purge of unmatched -xattrs should not happen when the correct dev/ino are believed to be known. - -Signed-off-by: Seebs - -[YOCTO #9929] -Upstream-Status: Backport (0d9071f3090bbd7880558f3b488b236ac19b44fc) -Signed-off-by: Joshua Lock ---- - ChangeLog.txt | 3 +++ - pseudo.c | 11 ++++++++--- - pseudo_db.c | 15 +++++++++------ - pseudo_db.h | 2 +- - 4 files changed, 21 insertions(+), 10 deletions(-) - -diff --git a/ChangeLog.txt b/ChangeLog.txt -index 131f163..d6359ca 100644 ---- a/ChangeLog.txt -+++ b/ChangeLog.txt -@@ -1,3 +1,6 @@ -+2016-07-28: -+ * (seebs) Fix performance issue on deletion with xattr changes. -+ - 2016-07-08: - * (RP) release 1.8.1 - * (joshuagl) Fix log table creation issue -diff --git a/pseudo.c b/pseudo.c -index 52f649f..db1c400 100644 ---- a/pseudo.c -+++ b/pseudo.c -@@ -600,7 +600,12 @@ pseudo_op(pseudo_msg_t *msg, const char *program, const char *tag, char **respon - if (by_path.deleting != 0) { - pseudo_debug(PDBGF_FILE, "inode mismatch for '%s' -- old one was marked for deletion, deleting.\n", - msg->path); -- pdb_did_unlink_file(msg->path, by_path.deleting); -+ /* in this case, we don't trust the -+ * existing entries, so we will do the -+ * more expensive sweep for stray -+ * xattrs. -+ */ -+ pdb_did_unlink_file(msg->path, NULL, by_path.deleting); - } else { - pseudo_diag("inode mismatch: '%s' ino %llu in db, %llu in request.\n", - msg->path, -@@ -698,7 +703,7 @@ pseudo_op(pseudo_msg_t *msg, const char *program, const char *tag, char **respon - if (by_ino.deleting != 0) { - pseudo_debug(PDBGF_FILE, "inode mismatch for '%s' -- old one was marked for deletion, deleting.\n", - msg->path); -- pdb_did_unlink_file(path_by_ino, by_ino.deleting); -+ pdb_did_unlink_file(path_by_ino, &by_ino, by_ino.deleting); - } else { - pseudo_diag("path mismatch [%d link%s]: ino %llu db '%s' req '%s'.\n", - msg->nlink, -@@ -930,7 +935,7 @@ pseudo_op(pseudo_msg_t *msg, const char *program, const char *tag, char **respon - } - break; - case OP_DID_UNLINK: -- pdb_did_unlink_file(msg->path, msg->client); -+ pdb_did_unlink_file(msg->path, msg, msg->client); - break; - case OP_CANCEL_UNLINK: - pdb_cancel_unlink_file(msg); -diff --git a/pseudo_db.c b/pseudo_db.c -index 289bb29..e7dd193 100644 ---- a/pseudo_db.c -+++ b/pseudo_db.c -@@ -1848,7 +1848,7 @@ pdb_did_unlink_files(int deleting) { - - /* confirm deletion of a specific file by a given client */ - int --pdb_did_unlink_file(char *path, int deleting) { -+pdb_did_unlink_file(char *path, pseudo_msg_t *msg, int deleting) { - static sqlite3_stmt *delete_exact; - int rc, exact; - char *sql_delete_exact = "DELETE FROM files WHERE path = ? AND deleting = ?;"; -@@ -1878,11 +1878,14 @@ pdb_did_unlink_file(char *path, int deleting) { - exact = sqlite3_changes(file_db); - pseudo_debug(PDBGF_DB, "(exact %d)\n", exact); - sqlite3_reset(delete_exact); -- sqlite3_clear_bindings(delete_exact); -- /* we have to clean everything because we don't know for sure the -- * device/inode... -- */ -- pdb_clear_unused_xattrs(); -+ if (msg) { -+ pdb_clear_xattrs(msg); -+ } else { -+ /* we have to clean everything because we don't know for sure the -+ * device/inode... -+ */ -+ pdb_clear_unused_xattrs(); -+ } - return rc != SQLITE_DONE; - } - -diff --git a/pseudo_db.h b/pseudo_db.h -index a54f3c1..1b2599c 100644 ---- a/pseudo_db.h -+++ b/pseudo_db.h -@@ -39,7 +39,7 @@ typedef struct { - - extern int pdb_maybe_backup(void); - extern int pdb_cancel_unlink_file(pseudo_msg_t *msg); --extern int pdb_did_unlink_file(char *path, int deleting); -+extern int pdb_did_unlink_file(char *path, pseudo_msg_t *msg, int deleting); - extern int pdb_did_unlink_files(int deleting); - extern int pdb_link_file(pseudo_msg_t *msg); - extern int pdb_may_unlink_file(pseudo_msg_t *msg, int deleting); --- -2.7.4 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/More-correctly-fix-xattrs.patch b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/More-correctly-fix-xattrs.patch deleted file mode 100644 index 3d178f9b4..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/More-correctly-fix-xattrs.patch +++ /dev/null @@ -1,37 +0,0 @@ -From 45eca34c754d416a38bee90fb2d3c110a0b6cc5f Mon Sep 17 00:00:00 2001 -From: Seebs -Date: Thu, 3 Nov 2016 11:36:12 -0500 -Subject: [PATCH] More-correctly fix xattrs - -Fix provided by Patrick Ohly . This resolves -the actual cause of the path length mismatches, and explains why -I couldn't quite explain why the previous one had only sometimes -worked, also why it showed up on directories but not plain files. - -Signed-off-by: Seebs - -Fixes [YOCTO #10623] - -Upstream-Status: Backport [commit 45eca34c754d416a38bee90fb2d3c110a0b6cc5f] - -Signed-off-by: Patrick Ohly ---- - pseudo_client.c | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/pseudo_client.c b/pseudo_client.c -index 6a08df3..b1a00fa 100644 ---- a/pseudo_client.c -+++ b/pseudo_client.c -@@ -1676,7 +1676,7 @@ pseudo_client_op(pseudo_op_t op, int access, int fd, int dirfd, const char *path - * empty path for that. - */ - if (path_extra_1) { -- size_t full_len = path_extra_1len + 1 + pathlen; -+ size_t full_len = path_extra_1len + 1 + pathlen - strip_slash; - size_t partial_len = pathlen - 1 - strip_slash; - if (path_extra_2) { - full_len += path_extra_2len + 1; --- -2.1.4 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/b6b68db896f9963558334aff7fca61adde4ec10f.patch b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/b6b68db896f9963558334aff7fca61adde4ec10f.patch new file mode 100644 index 000000000..3045a3b73 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/b6b68db896f9963558334aff7fca61adde4ec10f.patch @@ -0,0 +1,48 @@ +From b6b68db896f9963558334aff7fca61adde4ec10f Mon Sep 17 00:00:00 2001 +From: Seebs +Date: Thu, 13 Apr 2017 18:12:01 -0500 +Subject: Prevent bash from segfaulting when unloading pseudo + +bash's extremely fancy internal awareness of how the environment looks +means that, if you directly call the underlying libc "unsetenv" on +a variable, bash can end up trying to access a null pointer. Fixing +this generically is actually rather hard; you can't really avoid +writing to environ on fork() or popen(), even if you change all +execv*() functions to use the execv*e() variants. So for now, instead +of unsetting the variable, set it to an empty string. + +Thanks to Saur in IRC for spotting this and helping debug it. + +Signed-off-by: Seebs + +Upstream-Status: Backport + +diff --git a/ChangeLog.txt b/ChangeLog.txt +index a2d30e9..8ba1ffa 100644 +--- a/ChangeLog.txt ++++ b/ChangeLog.txt +@@ -1,3 +1,8 @@ ++2017-04-13: ++ * (seebs) don't unset LD_PRELOAD or the like, because if you ++ do that, bash can segfault because it "knows" how many ++ fields are in environ. ++ + 2017-02-24: + * (seebs) import posix_acl_default fix from Anton Gerasimov + +diff --git a/pseudo_util.c b/pseudo_util.c +index 172990b..6a1fac2 100644 +--- a/pseudo_util.c ++++ b/pseudo_util.c +@@ -844,7 +844,7 @@ void pseudo_dropenv() { + if (ld_preload && strlen(ld_preload)) { + SETENV(PRELINK_LIBRARIES, ld_preload, 1); + } else { +- UNSETENV(PRELINK_LIBRARIES); ++ SETENV(PRELINK_LIBRARIES, "", 1); + } + } + } +-- +cgit v0.10.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/efe0be279901006f939cd357ccee47b651c786da.patch b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/efe0be279901006f939cd357ccee47b651c786da.patch new file mode 100644 index 000000000..64fc58c4f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/efe0be279901006f939cd357ccee47b651c786da.patch @@ -0,0 +1,99 @@ +From efe0be279901006f939cd357ccee47b651c786da Mon Sep 17 00:00:00 2001 +From: Seebs +Date: Fri, 24 Feb 2017 12:47:38 -0600 +Subject: Don't try to record 0-length posix_acl_default xattrs + +Based on a submission from Anton Gerasimov + +On some systems, with some kernel configs, "cp -a" apparently tries to +set an empty ACL list, with a valid header but no contents, which causes +strange and mysterious behavior later if we actually create such an entry. +So filter that out, also sanity-check a couple of other things. + +Signed-off-by: Seebs + +Upstream-Status: Backport + +diff --git a/ChangeLog.txt b/ChangeLog.txt +index ae2a6e9..a2d30e9 100644 +--- a/ChangeLog.txt ++++ b/ChangeLog.txt +@@ -1,3 +1,6 @@ ++2017-02-24: ++ * (seebs) import posix_acl_default fix from Anton Gerasimov ++ + 2017-02-01: + * (seebs) handle xattr deletion slightly more carefully. + * (seebs) tag this as 1.8.2 +diff --git a/ports/linux/xattr/pseudo_wrappers.c b/ports/linux/xattr/pseudo_wrappers.c +index 46bc053..d69d53e 100644 +--- a/ports/linux/xattr/pseudo_wrappers.c ++++ b/ports/linux/xattr/pseudo_wrappers.c +@@ -62,9 +62,9 @@ static int + posix_permissions(const acl_header *header, int entries, int *extra, int *mode) { + int acl_seen = 0; + if (le32(header->version) != 2) { +- pseudo_diag("Fatal: ACL support no available for header version %d.\n", ++ pseudo_diag("Fatal: ACL support not available for header version %d.\n", + le32(header->version)); +- return 1; ++ return -1; + } + *mode = 0; + *extra = 0; +@@ -140,12 +140,38 @@ static int shared_setxattr(const char *path, int fd, const char *name, const voi + pseudo_debug(PDBGF_XATTR, "setxattr(%s [fd %d], %s => '%.*s')\n", + path ? path : "", fd, name, (int) size, (char *) value); + ++ /* Filter out erroneous sizes for POSIX ACL ++ * see posix_acl_xattr_count in include/linux/posix_acl_xattr.h of Linux source code */ ++ /* I don't think there's any posix_acl_* values that aren't in this format */ ++ if (!strncmp(name, "system.posix_acl_", 17)) { ++ // ACL is corrupt, issue an error ++ if(size < sizeof(acl_header) || (size - sizeof(acl_header)) % sizeof(acl_entry) != 0) { ++ pseudo_debug(PDBGF_XATTR, "invalid data size for %s: %d\n", ++ name, (int) size); ++ errno = EINVAL; ++ return -1; ++ } ++ ++ // ACL is empty, do nothing ++ if((size - sizeof(acl_header)) / sizeof(acl_entry) == 0) { ++ /* on some systems, "cp -a" will attempt to clone the ++ * posix_acl_default entry for a directory (which would specify ++ * default ACLs for new files in that directory), but if the ++ * original was empty, we get a header but no entries. With ++ * real xattr, that ends up being silently discarded, apparently, ++ * so we discard it too. ++ */ ++ pseudo_debug(PDBGF_XATTR, "0-length ACL entry %s.\n", name); ++ return 0; ++ } ++ } + /* this may be a plain chmod */ + if (!strcmp(name, "system.posix_acl_access")) { + int extra; + int mode; + int entries = (size - sizeof(acl_header)) / sizeof(acl_entry); +- if (!posix_permissions(value, entries, &extra, &mode)) { ++ int res = posix_permissions(value, entries, &extra, &mode); ++ if (res == 0) { + pseudo_debug(PDBGF_XATTR, "posix_acl_access translated to mode %04o. Remaining attribute(s): %d.\n", + mode, extra); + buf.st_mode = mode; +@@ -164,8 +190,12 @@ static int shared_setxattr(const char *path, int fd, const char *name, const voi + if (!extra) { + return 0; + } ++ } else if (res == -1) { ++ errno = EOPNOTSUPP; ++ return -1; + } + } ++ + if (!strcmp(name, "user.pseudo_data")) { + pseudo_debug(PDBGF_XATTR | PDBGF_XATTRDB, "user.pseudo_data xattribute does not get to go in database.\n"); + return -1; +-- +cgit v0.10.2 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/fallback-passwd b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/fallback-passwd index 0889c5704..08611baaf 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/fallback-passwd +++ b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/files/fallback-passwd @@ -1,2 +1,3 @@ root::0:0:root:/home/root:/bin/sh +pseudopasswd:*:1:1:this-is-the-pseudo-passwd:/nonexistent:/bin/sh nobody:*:65534:65534:nobody:/nonexistent:/bin/sh diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/pseudo_1.8.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/pseudo_1.8.1.bb deleted file mode 100644 index 90b53c0c1..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/pseudo_1.8.1.bb +++ /dev/null @@ -1,17 +0,0 @@ -require pseudo.inc - -SRC_URI = "http://downloads.yoctoproject.org/releases/pseudo/${BPN}-${PV}.tar.bz2 \ - file://0001-configure-Prune-PIE-flags.patch \ - file://fallback-passwd \ - file://fallback-group \ - file://moreretries.patch \ - file://Fix-xattr-performance.patch \ - file://0001-Don-t-send-SIGUSR1-to-init.patch \ - file://0001-Quiet-diagnostics-during-startup-for-pseudo-d.patch \ - file://0002-Use-correct-file-descriptor.patch \ - file://0003-Fix-renameat-parallel-to-previous-fix-to-rename.patch \ - file://More-correctly-fix-xattrs.patch \ - " - -SRC_URI[md5sum] = "ee38e4fb62ff88ad067b1a5a3825bac7" -SRC_URI[sha256sum] = "dac4ad2d21228053151121320f629d41dd5c0c87695ac4e7aea286c414192ab5" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/pseudo_1.8.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/pseudo_1.8.2.bb new file mode 100644 index 000000000..b427b9ac3 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/pseudo_1.8.2.bb @@ -0,0 +1,13 @@ +require pseudo.inc + +SRC_URI = "http://downloads.yoctoproject.org/releases/pseudo/${BPN}-${PV}.tar.bz2 \ + file://0001-configure-Prune-PIE-flags.patch \ + file://fallback-passwd \ + file://fallback-group \ + file://moreretries.patch \ + file://efe0be279901006f939cd357ccee47b651c786da.patch \ + file://b6b68db896f9963558334aff7fca61adde4ec10f.patch \ + " + +SRC_URI[md5sum] = "7d41e72188fbea1f696c399c1a435675" +SRC_URI[sha256sum] = "ceb456bd47770a37ca20784a91d715c5a7601e07e26ab11b0c77e9203ed3d196" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/pseudo_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/pseudo_git.bb index ac923bbb7..42c7b2ea5 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/pseudo/pseudo_git.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/pseudo/pseudo_git.bb @@ -1,7 +1,7 @@ require pseudo.inc -SRCREV = "45eca34c754d416a38bee90fb2d3c110a0b6cc5f" -PV = "1.8.1+git${SRCPV}" +SRCREV = "02168305b0a19f981ffe857f36eb256ba8810b77" +PV = "1.8.2+git${SRCPV}" DEFAULT_PREFERENCE = "-1" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips/_numpyconfig.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips/_numpyconfig.h deleted file mode 100644 index 4c465c216..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips/_numpyconfig.h +++ /dev/null @@ -1,32 +0,0 @@ -#define NPY_HAVE_ENDIAN_H 1 -#define NPY_SIZEOF_SHORT SIZEOF_SHORT -#define NPY_SIZEOF_INT SIZEOF_INT -#define NPY_SIZEOF_LONG SIZEOF_LONG -#define NPY_SIZEOF_FLOAT 4 -#define NPY_SIZEOF_COMPLEX_FLOAT 8 -#define NPY_SIZEOF_DOUBLE 8 -#define NPY_SIZEOF_COMPLEX_DOUBLE 16 -#define NPY_SIZEOF_LONGDOUBLE 8 -#define NPY_SIZEOF_COMPLEX_LONGDOUBLE 16 -#define NPY_ENABLE_SEPARATE_COMPILATION 1 -#define NPY_SIZEOF_PY_INTPTR_T 4 -#define NPY_SIZEOF_PY_LONG_LONG 8 -#define NPY_SIZEOF_LONGLONG 8 -#define NPY_SIZEOF_OFF_T 8 -#define NPY_NO_SMP 0 -#define NPY_HAVE_DECL_ISNAN -#define NPY_HAVE_DECL_ISINF -#define NPY_HAVE_DECL_ISFINITE -#define NPY_HAVE_DECL_SIGNBIT -#define NPY_USE_C99_COMPLEX 1 -#define NPY_HAVE_COMPLEX_DOUBLE 1 -#define NPY_HAVE_COMPLEX_FLOAT 1 -#define NPY_HAVE_COMPLEX_LONG_DOUBLE 1 -#define NPY_USE_C99_FORMATS 1 -#define NPY_VISIBILITY_HIDDEN __attribute__((visibility("hidden"))) -#define NPY_ABI_VERSION 0x01000009 -#define NPY_API_VERSION 0x0000000A - -#ifndef __STDC_FORMAT_MACROS -#define __STDC_FORMAT_MACROS 1 -#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips/config.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips/config.h deleted file mode 100644 index 2f6135adc..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips/config.h +++ /dev/null @@ -1,139 +0,0 @@ -#define HAVE_ENDIAN_H 1 -#define SIZEOF_PY_INTPTR_T 4 -#define SIZEOF_PY_LONG_LONG 8 -#define MATHLIB m -#define HAVE_SIN 1 -#define HAVE_COS 1 -#define HAVE_TAN 1 -#define HAVE_SINH 1 -#define HAVE_COSH 1 -#define HAVE_TANH 1 -#define HAVE_FABS 1 -#define HAVE_FLOOR 1 -#define HAVE_CEIL 1 -#define HAVE_SQRT 1 -#define HAVE_LOG10 1 -#define HAVE_LOG 1 -#define HAVE_EXP 1 -#define HAVE_ASIN 1 -#define HAVE_ACOS 1 -#define HAVE_ATAN 1 -#define HAVE_FMOD 1 -#define HAVE_MODF 1 -#define HAVE_FREXP 1 -#define HAVE_LDEXP 1 -#define HAVE_RINT 1 -#define HAVE_TRUNC 1 -#define HAVE_EXP2 1 -#define HAVE_LOG2 1 -#define HAVE_ATAN2 1 -#define HAVE_POW 1 -#define HAVE_NEXTAFTER 1 -#define HAVE_SINF 1 -#define HAVE_COSF 1 -#define HAVE_TANF 1 -#define HAVE_SINHF 1 -#define HAVE_COSHF 1 -#define HAVE_TANHF 1 -#define HAVE_FABSF 1 -#define HAVE_FLOORF 1 -#define HAVE_CEILF 1 -#define HAVE_RINTF 1 -#define HAVE_TRUNCF 1 -#define HAVE_SQRTF 1 -#define HAVE_LOG10F 1 -#define HAVE_LOGF 1 -#define HAVE_LOG1PF 1 -#define HAVE_EXPF 1 -#define HAVE_EXPM1F 1 -#define HAVE_ASINF 1 -#define HAVE_ACOSF 1 -#define HAVE_ATANF 1 -#define HAVE_ASINHF 1 -#define HAVE_ACOSHF 1 -#define HAVE_ATANHF 1 -#define HAVE_HYPOTF 1 -#define HAVE_ATAN2F 1 -#define HAVE_POWF 1 -#define HAVE_FMODF 1 -#define HAVE_MODFF 1 -#define HAVE_FREXPF 1 -#define HAVE_LDEXPF 1 -#define HAVE_EXP2F 1 -#define HAVE_LOG2F 1 -#define HAVE_COPYSIGNF 1 -#define HAVE_NEXTAFTERF 1 -#define HAVE_SINL 1 -#define HAVE_COSL 1 -#define HAVE_TANL 1 -#define HAVE_SINHL 1 -#define HAVE_COSHL 1 -#define HAVE_TANHL 1 -#define HAVE_FABSL 1 -#define HAVE_FLOORL 1 -#define HAVE_CEILL 1 -#define HAVE_RINTL 1 -#define HAVE_TRUNCL 1 -#define HAVE_SQRTL 1 -#define HAVE_LOG10L 1 -#define HAVE_LOGL 1 -#define HAVE_LOG1PL 1 -#define HAVE_EXPL 1 -#define HAVE_EXPM1L 1 -#define HAVE_ASINL 1 -#define HAVE_ACOSL 1 -#define HAVE_ATANL 1 -#define HAVE_ASINHL 1 -#define HAVE_ACOSHL 1 -#define HAVE_ATANHL 1 -#define HAVE_HYPOTL 1 -#define HAVE_ATAN2L 1 -#define HAVE_POWL 1 -#define HAVE_FMODL 1 -#define HAVE_MODFL 1 -#define HAVE_FREXPL 1 -#define HAVE_LDEXPL 1 -#define HAVE_EXP2L 1 -#define HAVE_LOG2L 1 -#define HAVE_COPYSIGNL 1 -#define HAVE_NEXTAFTERL 1 -#define HAVE_DECL_SIGNBIT -#define HAVE_COMPLEX_H 1 -#define HAVE_CREAL 1 -#define HAVE_CIMAG 1 -#define HAVE_CABS 1 -#define HAVE_CARG 1 -#define HAVE_CEXP 1 -#define HAVE_CSQRT 1 -#define HAVE_CLOG 1 -#define HAVE_CCOS 1 -#define HAVE_CSIN 1 -#define HAVE_CPOW 1 -#define HAVE_CREALF 1 -#define HAVE_CIMAGF 1 -#define HAVE_CABSF 1 -#define HAVE_CARGF 1 -#define HAVE_CEXPF 1 -#define HAVE_CSQRTF 1 -#define HAVE_CLOGF 1 -#define HAVE_CCOSF 1 -#define HAVE_CSINF 1 -#define HAVE_CPOWF 1 -#define HAVE_CREALL 1 -#define HAVE_CIMAGL 1 -#define HAVE_CABSL 1 -#define HAVE_CARGL 1 -#define HAVE_CEXPL 1 -#define HAVE_CSQRTL 1 -#define HAVE_CLOGL 1 -#define HAVE_CCOSL 1 -#define HAVE_CSINL 1 -#define HAVE_CPOWL 1 -#define HAVE_LDOUBLE_IEEE_DOUBLE_BE 1 -#ifndef __cplusplus -/* #undef inline */ -#endif - -#ifndef _NPY_NPY_CONFIG_H_ -#error config.h should never be included directly, include npy_config.h instead -#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64/_numpyconfig.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64/_numpyconfig.h deleted file mode 100644 index debb39009..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64/_numpyconfig.h +++ /dev/null @@ -1,32 +0,0 @@ -#define NPY_HAVE_ENDIAN_H 1 -#define NPY_SIZEOF_SHORT SIZEOF_SHORT -#define NPY_SIZEOF_INT SIZEOF_INT -#define NPY_SIZEOF_LONG SIZEOF_LONG -#define NPY_SIZEOF_FLOAT 4 -#define NPY_SIZEOF_COMPLEX_FLOAT 8 -#define NPY_SIZEOF_DOUBLE 8 -#define NPY_SIZEOF_COMPLEX_DOUBLE 16 -#define NPY_SIZEOF_LONGDOUBLE 16 -#define NPY_SIZEOF_COMPLEX_LONGDOUBLE 32 -#define NPY_ENABLE_SEPARATE_COMPILATION 1 -#define NPY_SIZEOF_PY_INTPTR_T 8 -#define NPY_SIZEOF_PY_LONG_LONG 8 -#define NPY_SIZEOF_LONGLONG 8 -#define NPY_SIZEOF_OFF_T 8 -#define NPY_NO_SMP 0 -#define NPY_HAVE_DECL_ISNAN -#define NPY_HAVE_DECL_ISINF -#define NPY_HAVE_DECL_ISFINITE -#define NPY_HAVE_DECL_SIGNBIT -#define NPY_USE_C99_COMPLEX 1 -#define NPY_HAVE_COMPLEX_DOUBLE 1 -#define NPY_HAVE_COMPLEX_FLOAT 1 -#define NPY_HAVE_COMPLEX_LONG_DOUBLE 1 -#define NPY_USE_C99_FORMATS 1 -#define NPY_VISIBILITY_HIDDEN __attribute__((visibility("hidden"))) -#define NPY_ABI_VERSION 0x01000009 -#define NPY_API_VERSION 0x0000000A - -#ifndef __STDC_FORMAT_MACROS -#define __STDC_FORMAT_MACROS 1 -#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64/config.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64/config.h deleted file mode 100644 index c30b868f2..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64/config.h +++ /dev/null @@ -1,139 +0,0 @@ -#define HAVE_ENDIAN_H 1 -#define SIZEOF_PY_INTPTR_T 8 -#define SIZEOF_PY_LONG_LONG 8 -#define MATHLIB m -#define HAVE_SIN 1 -#define HAVE_COS 1 -#define HAVE_TAN 1 -#define HAVE_SINH 1 -#define HAVE_COSH 1 -#define HAVE_TANH 1 -#define HAVE_FABS 1 -#define HAVE_FLOOR 1 -#define HAVE_CEIL 1 -#define HAVE_SQRT 1 -#define HAVE_LOG10 1 -#define HAVE_LOG 1 -#define HAVE_EXP 1 -#define HAVE_ASIN 1 -#define HAVE_ACOS 1 -#define HAVE_ATAN 1 -#define HAVE_FMOD 1 -#define HAVE_MODF 1 -#define HAVE_FREXP 1 -#define HAVE_LDEXP 1 -#define HAVE_RINT 1 -#define HAVE_TRUNC 1 -#define HAVE_EXP2 1 -#define HAVE_LOG2 1 -#define HAVE_ATAN2 1 -#define HAVE_POW 1 -#define HAVE_NEXTAFTER 1 -#define HAVE_SINF 1 -#define HAVE_COSF 1 -#define HAVE_TANF 1 -#define HAVE_SINHF 1 -#define HAVE_COSHF 1 -#define HAVE_TANHF 1 -#define HAVE_FABSF 1 -#define HAVE_FLOORF 1 -#define HAVE_CEILF 1 -#define HAVE_RINTF 1 -#define HAVE_TRUNCF 1 -#define HAVE_SQRTF 1 -#define HAVE_LOG10F 1 -#define HAVE_LOGF 1 -#define HAVE_LOG1PF 1 -#define HAVE_EXPF 1 -#define HAVE_EXPM1F 1 -#define HAVE_ASINF 1 -#define HAVE_ACOSF 1 -#define HAVE_ATANF 1 -#define HAVE_ASINHF 1 -#define HAVE_ACOSHF 1 -#define HAVE_ATANHF 1 -#define HAVE_HYPOTF 1 -#define HAVE_ATAN2F 1 -#define HAVE_POWF 1 -#define HAVE_FMODF 1 -#define HAVE_MODFF 1 -#define HAVE_FREXPF 1 -#define HAVE_LDEXPF 1 -#define HAVE_EXP2F 1 -#define HAVE_LOG2F 1 -#define HAVE_COPYSIGNF 1 -#define HAVE_NEXTAFTERF 1 -#define HAVE_SINL 1 -#define HAVE_COSL 1 -#define HAVE_TANL 1 -#define HAVE_SINHL 1 -#define HAVE_COSHL 1 -#define HAVE_TANHL 1 -#define HAVE_FABSL 1 -#define HAVE_FLOORL 1 -#define HAVE_CEILL 1 -#define HAVE_RINTL 1 -#define HAVE_TRUNCL 1 -#define HAVE_SQRTL 1 -#define HAVE_LOG10L 1 -#define HAVE_LOGL 1 -#define HAVE_LOG1PL 1 -#define HAVE_EXPL 1 -#define HAVE_EXPM1L 1 -#define HAVE_ASINL 1 -#define HAVE_ACOSL 1 -#define HAVE_ATANL 1 -#define HAVE_ASINHL 1 -#define HAVE_ACOSHL 1 -#define HAVE_ATANHL 1 -#define HAVE_HYPOTL 1 -#define HAVE_ATAN2L 1 -#define HAVE_POWL 1 -#define HAVE_FMODL 1 -#define HAVE_MODFL 1 -#define HAVE_FREXPL 1 -#define HAVE_LDEXPL 1 -#define HAVE_EXP2L 1 -#define HAVE_LOG2L 1 -#define HAVE_COPYSIGNL 1 -#define HAVE_NEXTAFTERL 1 -#define HAVE_DECL_SIGNBIT -#define HAVE_COMPLEX_H 1 -#define HAVE_CREAL 1 -#define HAVE_CIMAG 1 -#define HAVE_CABS 1 -#define HAVE_CARG 1 -#define HAVE_CEXP 1 -#define HAVE_CSQRT 1 -#define HAVE_CLOG 1 -#define HAVE_CCOS 1 -#define HAVE_CSIN 1 -#define HAVE_CPOW 1 -#define HAVE_CREALF 1 -#define HAVE_CIMAGF 1 -#define HAVE_CABSF 1 -#define HAVE_CARGF 1 -#define HAVE_CEXPF 1 -#define HAVE_CSQRTF 1 -#define HAVE_CLOGF 1 -#define HAVE_CCOSF 1 -#define HAVE_CSINF 1 -#define HAVE_CPOWF 1 -#define HAVE_CREALL 1 -#define HAVE_CIMAGL 1 -#define HAVE_CABSL 1 -#define HAVE_CARGL 1 -#define HAVE_CEXPL 1 -#define HAVE_CSQRTL 1 -#define HAVE_CLOGL 1 -#define HAVE_CCOSL 1 -#define HAVE_CSINL 1 -#define HAVE_CPOWL 1 -#define HAVE_LDOUBLE_IEEE_QUAD_LE 1 -#ifndef __cplusplus -/* #undef inline */ -#endif - -#ifndef _NPY_NPY_CONFIG_H_ -#error config.h should never be included directly, include npy_config.h instead -#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64n32/_numpyconfig.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64n32/_numpyconfig.h deleted file mode 100644 index 8e2b5d094..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64n32/_numpyconfig.h +++ /dev/null @@ -1,31 +0,0 @@ -#define NPY_HAVE_ENDIAN_H 1 -#define NPY_SIZEOF_SHORT SIZEOF_SHORT -#define NPY_SIZEOF_INT SIZEOF_INT -#define NPY_SIZEOF_LONG SIZEOF_LONG -#define NPY_SIZEOF_FLOAT 4 -#define NPY_SIZEOF_COMPLEX_FLOAT 8 -#define NPY_SIZEOF_DOUBLE 8 -#define NPY_SIZEOF_COMPLEX_DOUBLE 16 -#define NPY_SIZEOF_LONGDOUBLE 16 -#define NPY_SIZEOF_COMPLEX_LONGDOUBLE 32 -#define NPY_ENABLE_SEPARATE_COMPILATION 1 -#define NPY_SIZEOF_PY_INTPTR_T 8 -#define NPY_SIZEOF_PY_LONG_LONG 8 -#define NPY_SIZEOF_LONGLONG 8 -#define NPY_NO_SMP 0 -#define NPY_HAVE_DECL_ISNAN -#define NPY_HAVE_DECL_ISINF -#define NPY_HAVE_DECL_ISFINITE -#define NPY_HAVE_DECL_SIGNBIT -#define NPY_USE_C99_COMPLEX 1 -#define NPY_HAVE_COMPLEX_DOUBLE 1 -#define NPY_HAVE_COMPLEX_FLOAT 1 -#define NPY_HAVE_COMPLEX_LONG_DOUBLE 1 -#define NPY_USE_C99_FORMATS 1 -#define NPY_VISIBILITY_HIDDEN __attribute__((visibility("hidden"))) -#define NPY_ABI_VERSION 0x01000009 -#define NPY_API_VERSION 0x0000000A - -#ifndef __STDC_FORMAT_MACROS -#define __STDC_FORMAT_MACROS 1 -#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64n32/config.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64n32/config.h deleted file mode 100644 index c30b868f2..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mips64n32/config.h +++ /dev/null @@ -1,139 +0,0 @@ -#define HAVE_ENDIAN_H 1 -#define SIZEOF_PY_INTPTR_T 8 -#define SIZEOF_PY_LONG_LONG 8 -#define MATHLIB m -#define HAVE_SIN 1 -#define HAVE_COS 1 -#define HAVE_TAN 1 -#define HAVE_SINH 1 -#define HAVE_COSH 1 -#define HAVE_TANH 1 -#define HAVE_FABS 1 -#define HAVE_FLOOR 1 -#define HAVE_CEIL 1 -#define HAVE_SQRT 1 -#define HAVE_LOG10 1 -#define HAVE_LOG 1 -#define HAVE_EXP 1 -#define HAVE_ASIN 1 -#define HAVE_ACOS 1 -#define HAVE_ATAN 1 -#define HAVE_FMOD 1 -#define HAVE_MODF 1 -#define HAVE_FREXP 1 -#define HAVE_LDEXP 1 -#define HAVE_RINT 1 -#define HAVE_TRUNC 1 -#define HAVE_EXP2 1 -#define HAVE_LOG2 1 -#define HAVE_ATAN2 1 -#define HAVE_POW 1 -#define HAVE_NEXTAFTER 1 -#define HAVE_SINF 1 -#define HAVE_COSF 1 -#define HAVE_TANF 1 -#define HAVE_SINHF 1 -#define HAVE_COSHF 1 -#define HAVE_TANHF 1 -#define HAVE_FABSF 1 -#define HAVE_FLOORF 1 -#define HAVE_CEILF 1 -#define HAVE_RINTF 1 -#define HAVE_TRUNCF 1 -#define HAVE_SQRTF 1 -#define HAVE_LOG10F 1 -#define HAVE_LOGF 1 -#define HAVE_LOG1PF 1 -#define HAVE_EXPF 1 -#define HAVE_EXPM1F 1 -#define HAVE_ASINF 1 -#define HAVE_ACOSF 1 -#define HAVE_ATANF 1 -#define HAVE_ASINHF 1 -#define HAVE_ACOSHF 1 -#define HAVE_ATANHF 1 -#define HAVE_HYPOTF 1 -#define HAVE_ATAN2F 1 -#define HAVE_POWF 1 -#define HAVE_FMODF 1 -#define HAVE_MODFF 1 -#define HAVE_FREXPF 1 -#define HAVE_LDEXPF 1 -#define HAVE_EXP2F 1 -#define HAVE_LOG2F 1 -#define HAVE_COPYSIGNF 1 -#define HAVE_NEXTAFTERF 1 -#define HAVE_SINL 1 -#define HAVE_COSL 1 -#define HAVE_TANL 1 -#define HAVE_SINHL 1 -#define HAVE_COSHL 1 -#define HAVE_TANHL 1 -#define HAVE_FABSL 1 -#define HAVE_FLOORL 1 -#define HAVE_CEILL 1 -#define HAVE_RINTL 1 -#define HAVE_TRUNCL 1 -#define HAVE_SQRTL 1 -#define HAVE_LOG10L 1 -#define HAVE_LOGL 1 -#define HAVE_LOG1PL 1 -#define HAVE_EXPL 1 -#define HAVE_EXPM1L 1 -#define HAVE_ASINL 1 -#define HAVE_ACOSL 1 -#define HAVE_ATANL 1 -#define HAVE_ASINHL 1 -#define HAVE_ACOSHL 1 -#define HAVE_ATANHL 1 -#define HAVE_HYPOTL 1 -#define HAVE_ATAN2L 1 -#define HAVE_POWL 1 -#define HAVE_FMODL 1 -#define HAVE_MODFL 1 -#define HAVE_FREXPL 1 -#define HAVE_LDEXPL 1 -#define HAVE_EXP2L 1 -#define HAVE_LOG2L 1 -#define HAVE_COPYSIGNL 1 -#define HAVE_NEXTAFTERL 1 -#define HAVE_DECL_SIGNBIT -#define HAVE_COMPLEX_H 1 -#define HAVE_CREAL 1 -#define HAVE_CIMAG 1 -#define HAVE_CABS 1 -#define HAVE_CARG 1 -#define HAVE_CEXP 1 -#define HAVE_CSQRT 1 -#define HAVE_CLOG 1 -#define HAVE_CCOS 1 -#define HAVE_CSIN 1 -#define HAVE_CPOW 1 -#define HAVE_CREALF 1 -#define HAVE_CIMAGF 1 -#define HAVE_CABSF 1 -#define HAVE_CARGF 1 -#define HAVE_CEXPF 1 -#define HAVE_CSQRTF 1 -#define HAVE_CLOGF 1 -#define HAVE_CCOSF 1 -#define HAVE_CSINF 1 -#define HAVE_CPOWF 1 -#define HAVE_CREALL 1 -#define HAVE_CIMAGL 1 -#define HAVE_CABSL 1 -#define HAVE_CARGL 1 -#define HAVE_CEXPL 1 -#define HAVE_CSQRTL 1 -#define HAVE_CLOGL 1 -#define HAVE_CCOSL 1 -#define HAVE_CSINL 1 -#define HAVE_CPOWL 1 -#define HAVE_LDOUBLE_IEEE_QUAD_LE 1 -#ifndef __cplusplus -/* #undef inline */ -#endif - -#ifndef _NPY_NPY_CONFIG_H_ -#error config.h should never be included directly, include npy_config.h instead -#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32eb/_numpyconfig.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32eb/_numpyconfig.h new file mode 100644 index 000000000..8e2b5d094 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32eb/_numpyconfig.h @@ -0,0 +1,31 @@ +#define NPY_HAVE_ENDIAN_H 1 +#define NPY_SIZEOF_SHORT SIZEOF_SHORT +#define NPY_SIZEOF_INT SIZEOF_INT +#define NPY_SIZEOF_LONG SIZEOF_LONG +#define NPY_SIZEOF_FLOAT 4 +#define NPY_SIZEOF_COMPLEX_FLOAT 8 +#define NPY_SIZEOF_DOUBLE 8 +#define NPY_SIZEOF_COMPLEX_DOUBLE 16 +#define NPY_SIZEOF_LONGDOUBLE 16 +#define NPY_SIZEOF_COMPLEX_LONGDOUBLE 32 +#define NPY_ENABLE_SEPARATE_COMPILATION 1 +#define NPY_SIZEOF_PY_INTPTR_T 8 +#define NPY_SIZEOF_PY_LONG_LONG 8 +#define NPY_SIZEOF_LONGLONG 8 +#define NPY_NO_SMP 0 +#define NPY_HAVE_DECL_ISNAN +#define NPY_HAVE_DECL_ISINF +#define NPY_HAVE_DECL_ISFINITE +#define NPY_HAVE_DECL_SIGNBIT +#define NPY_USE_C99_COMPLEX 1 +#define NPY_HAVE_COMPLEX_DOUBLE 1 +#define NPY_HAVE_COMPLEX_FLOAT 1 +#define NPY_HAVE_COMPLEX_LONG_DOUBLE 1 +#define NPY_USE_C99_FORMATS 1 +#define NPY_VISIBILITY_HIDDEN __attribute__((visibility("hidden"))) +#define NPY_ABI_VERSION 0x01000009 +#define NPY_API_VERSION 0x0000000A + +#ifndef __STDC_FORMAT_MACROS +#define __STDC_FORMAT_MACROS 1 +#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32eb/config.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32eb/config.h new file mode 100644 index 000000000..c30b868f2 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32eb/config.h @@ -0,0 +1,139 @@ +#define HAVE_ENDIAN_H 1 +#define SIZEOF_PY_INTPTR_T 8 +#define SIZEOF_PY_LONG_LONG 8 +#define MATHLIB m +#define HAVE_SIN 1 +#define HAVE_COS 1 +#define HAVE_TAN 1 +#define HAVE_SINH 1 +#define HAVE_COSH 1 +#define HAVE_TANH 1 +#define HAVE_FABS 1 +#define HAVE_FLOOR 1 +#define HAVE_CEIL 1 +#define HAVE_SQRT 1 +#define HAVE_LOG10 1 +#define HAVE_LOG 1 +#define HAVE_EXP 1 +#define HAVE_ASIN 1 +#define HAVE_ACOS 1 +#define HAVE_ATAN 1 +#define HAVE_FMOD 1 +#define HAVE_MODF 1 +#define HAVE_FREXP 1 +#define HAVE_LDEXP 1 +#define HAVE_RINT 1 +#define HAVE_TRUNC 1 +#define HAVE_EXP2 1 +#define HAVE_LOG2 1 +#define HAVE_ATAN2 1 +#define HAVE_POW 1 +#define HAVE_NEXTAFTER 1 +#define HAVE_SINF 1 +#define HAVE_COSF 1 +#define HAVE_TANF 1 +#define HAVE_SINHF 1 +#define HAVE_COSHF 1 +#define HAVE_TANHF 1 +#define HAVE_FABSF 1 +#define HAVE_FLOORF 1 +#define HAVE_CEILF 1 +#define HAVE_RINTF 1 +#define HAVE_TRUNCF 1 +#define HAVE_SQRTF 1 +#define HAVE_LOG10F 1 +#define HAVE_LOGF 1 +#define HAVE_LOG1PF 1 +#define HAVE_EXPF 1 +#define HAVE_EXPM1F 1 +#define HAVE_ASINF 1 +#define HAVE_ACOSF 1 +#define HAVE_ATANF 1 +#define HAVE_ASINHF 1 +#define HAVE_ACOSHF 1 +#define HAVE_ATANHF 1 +#define HAVE_HYPOTF 1 +#define HAVE_ATAN2F 1 +#define HAVE_POWF 1 +#define HAVE_FMODF 1 +#define HAVE_MODFF 1 +#define HAVE_FREXPF 1 +#define HAVE_LDEXPF 1 +#define HAVE_EXP2F 1 +#define HAVE_LOG2F 1 +#define HAVE_COPYSIGNF 1 +#define HAVE_NEXTAFTERF 1 +#define HAVE_SINL 1 +#define HAVE_COSL 1 +#define HAVE_TANL 1 +#define HAVE_SINHL 1 +#define HAVE_COSHL 1 +#define HAVE_TANHL 1 +#define HAVE_FABSL 1 +#define HAVE_FLOORL 1 +#define HAVE_CEILL 1 +#define HAVE_RINTL 1 +#define HAVE_TRUNCL 1 +#define HAVE_SQRTL 1 +#define HAVE_LOG10L 1 +#define HAVE_LOGL 1 +#define HAVE_LOG1PL 1 +#define HAVE_EXPL 1 +#define HAVE_EXPM1L 1 +#define HAVE_ASINL 1 +#define HAVE_ACOSL 1 +#define HAVE_ATANL 1 +#define HAVE_ASINHL 1 +#define HAVE_ACOSHL 1 +#define HAVE_ATANHL 1 +#define HAVE_HYPOTL 1 +#define HAVE_ATAN2L 1 +#define HAVE_POWL 1 +#define HAVE_FMODL 1 +#define HAVE_MODFL 1 +#define HAVE_FREXPL 1 +#define HAVE_LDEXPL 1 +#define HAVE_EXP2L 1 +#define HAVE_LOG2L 1 +#define HAVE_COPYSIGNL 1 +#define HAVE_NEXTAFTERL 1 +#define HAVE_DECL_SIGNBIT +#define HAVE_COMPLEX_H 1 +#define HAVE_CREAL 1 +#define HAVE_CIMAG 1 +#define HAVE_CABS 1 +#define HAVE_CARG 1 +#define HAVE_CEXP 1 +#define HAVE_CSQRT 1 +#define HAVE_CLOG 1 +#define HAVE_CCOS 1 +#define HAVE_CSIN 1 +#define HAVE_CPOW 1 +#define HAVE_CREALF 1 +#define HAVE_CIMAGF 1 +#define HAVE_CABSF 1 +#define HAVE_CARGF 1 +#define HAVE_CEXPF 1 +#define HAVE_CSQRTF 1 +#define HAVE_CLOGF 1 +#define HAVE_CCOSF 1 +#define HAVE_CSINF 1 +#define HAVE_CPOWF 1 +#define HAVE_CREALL 1 +#define HAVE_CIMAGL 1 +#define HAVE_CABSL 1 +#define HAVE_CARGL 1 +#define HAVE_CEXPL 1 +#define HAVE_CSQRTL 1 +#define HAVE_CLOGL 1 +#define HAVE_CCOSL 1 +#define HAVE_CSINL 1 +#define HAVE_CPOWL 1 +#define HAVE_LDOUBLE_IEEE_QUAD_LE 1 +#ifndef __cplusplus +/* #undef inline */ +#endif + +#ifndef _NPY_NPY_CONFIG_H_ +#error config.h should never be included directly, include npy_config.h instead +#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32el/_numpyconfig.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32el/_numpyconfig.h new file mode 100644 index 000000000..8e2b5d094 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32el/_numpyconfig.h @@ -0,0 +1,31 @@ +#define NPY_HAVE_ENDIAN_H 1 +#define NPY_SIZEOF_SHORT SIZEOF_SHORT +#define NPY_SIZEOF_INT SIZEOF_INT +#define NPY_SIZEOF_LONG SIZEOF_LONG +#define NPY_SIZEOF_FLOAT 4 +#define NPY_SIZEOF_COMPLEX_FLOAT 8 +#define NPY_SIZEOF_DOUBLE 8 +#define NPY_SIZEOF_COMPLEX_DOUBLE 16 +#define NPY_SIZEOF_LONGDOUBLE 16 +#define NPY_SIZEOF_COMPLEX_LONGDOUBLE 32 +#define NPY_ENABLE_SEPARATE_COMPILATION 1 +#define NPY_SIZEOF_PY_INTPTR_T 8 +#define NPY_SIZEOF_PY_LONG_LONG 8 +#define NPY_SIZEOF_LONGLONG 8 +#define NPY_NO_SMP 0 +#define NPY_HAVE_DECL_ISNAN +#define NPY_HAVE_DECL_ISINF +#define NPY_HAVE_DECL_ISFINITE +#define NPY_HAVE_DECL_SIGNBIT +#define NPY_USE_C99_COMPLEX 1 +#define NPY_HAVE_COMPLEX_DOUBLE 1 +#define NPY_HAVE_COMPLEX_FLOAT 1 +#define NPY_HAVE_COMPLEX_LONG_DOUBLE 1 +#define NPY_USE_C99_FORMATS 1 +#define NPY_VISIBILITY_HIDDEN __attribute__((visibility("hidden"))) +#define NPY_ABI_VERSION 0x01000009 +#define NPY_API_VERSION 0x0000000A + +#ifndef __STDC_FORMAT_MACROS +#define __STDC_FORMAT_MACROS 1 +#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32el/config.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32el/config.h new file mode 100644 index 000000000..48727039a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn32el/config.h @@ -0,0 +1,138 @@ +#define SIZEOF_PY_INTPTR_T 8 +#define SIZEOF_PY_LONG_LONG 8 +#define MATHLIB m +#define HAVE_SIN 1 +#define HAVE_COS 1 +#define HAVE_TAN 1 +#define HAVE_SINH 1 +#define HAVE_COSH 1 +#define HAVE_TANH 1 +#define HAVE_FABS 1 +#define HAVE_FLOOR 1 +#define HAVE_CEIL 1 +#define HAVE_SQRT 1 +#define HAVE_LOG10 1 +#define HAVE_LOG 1 +#define HAVE_EXP 1 +#define HAVE_ASIN 1 +#define HAVE_ACOS 1 +#define HAVE_ATAN 1 +#define HAVE_FMOD 1 +#define HAVE_MODF 1 +#define HAVE_FREXP 1 +#define HAVE_LDEXP 1 +#define HAVE_RINT 1 +#define HAVE_TRUNC 1 +#define HAVE_EXP2 1 +#define HAVE_LOG2 1 +#define HAVE_ATAN2 1 +#define HAVE_POW 1 +#define HAVE_NEXTAFTER 1 +#define HAVE_SINF 1 +#define HAVE_COSF 1 +#define HAVE_TANF 1 +#define HAVE_SINHF 1 +#define HAVE_COSHF 1 +#define HAVE_TANHF 1 +#define HAVE_FABSF 1 +#define HAVE_FLOORF 1 +#define HAVE_CEILF 1 +#define HAVE_RINTF 1 +#define HAVE_TRUNCF 1 +#define HAVE_SQRTF 1 +#define HAVE_LOG10F 1 +#define HAVE_LOGF 1 +#define HAVE_LOG1PF 1 +#define HAVE_EXPF 1 +#define HAVE_EXPM1F 1 +#define HAVE_ASINF 1 +#define HAVE_ACOSF 1 +#define HAVE_ATANF 1 +#define HAVE_ASINHF 1 +#define HAVE_ACOSHF 1 +#define HAVE_ATANHF 1 +#define HAVE_HYPOTF 1 +#define HAVE_ATAN2F 1 +#define HAVE_POWF 1 +#define HAVE_FMODF 1 +#define HAVE_MODFF 1 +#define HAVE_FREXPF 1 +#define HAVE_LDEXPF 1 +#define HAVE_EXP2F 1 +#define HAVE_LOG2F 1 +#define HAVE_COPYSIGNF 1 +#define HAVE_NEXTAFTERF 1 +#define HAVE_SINL 1 +#define HAVE_COSL 1 +#define HAVE_TANL 1 +#define HAVE_SINHL 1 +#define HAVE_COSHL 1 +#define HAVE_TANHL 1 +#define HAVE_FABSL 1 +#define HAVE_FLOORL 1 +#define HAVE_CEILL 1 +#define HAVE_RINTL 1 +#define HAVE_TRUNCL 1 +#define HAVE_SQRTL 1 +#define HAVE_LOG10L 1 +#define HAVE_LOGL 1 +#define HAVE_LOG1PL 1 +#define HAVE_EXPL 1 +#define HAVE_EXPM1L 1 +#define HAVE_ASINL 1 +#define HAVE_ACOSL 1 +#define HAVE_ATANL 1 +#define HAVE_ASINHL 1 +#define HAVE_ACOSHL 1 +#define HAVE_ATANHL 1 +#define HAVE_HYPOTL 1 +#define HAVE_ATAN2L 1 +#define HAVE_POWL 1 +#define HAVE_FMODL 1 +#define HAVE_MODFL 1 +#define HAVE_FREXPL 1 +#define HAVE_LDEXPL 1 +#define HAVE_EXP2L 1 +#define HAVE_LOG2L 1 +#define HAVE_COPYSIGNL 1 +#define HAVE_NEXTAFTERL 1 +#define HAVE_DECL_SIGNBIT +#define HAVE_COMPLEX_H 1 +#define HAVE_CREAL 1 +#define HAVE_CIMAG 1 +#define HAVE_CABS 1 +#define HAVE_CARG 1 +#define HAVE_CEXP 1 +#define HAVE_CSQRT 1 +#define HAVE_CLOG 1 +#define HAVE_CCOS 1 +#define HAVE_CSIN 1 +#define HAVE_CPOW 1 +#define HAVE_CREALF 1 +#define HAVE_CIMAGF 1 +#define HAVE_CABSF 1 +#define HAVE_CARGF 1 +#define HAVE_CEXPF 1 +#define HAVE_CSQRTF 1 +#define HAVE_CLOGF 1 +#define HAVE_CCOSF 1 +#define HAVE_CSINF 1 +#define HAVE_CPOWF 1 +#define HAVE_CREALL 1 +#define HAVE_CIMAGL 1 +#define HAVE_CABSL 1 +#define HAVE_CARGL 1 +#define HAVE_CEXPL 1 +#define HAVE_CSQRTL 1 +#define HAVE_CLOGL 1 +#define HAVE_CCOSL 1 +#define HAVE_CSINL 1 +#define HAVE_CPOWL 1 +#define HAVE_LDOUBLE_IEEE_QUAD_LE 1 +#ifndef __cplusplus +/* #undef inline */ +#endif + +#ifndef _NPY_NPY_CONFIG_H_ +#error config.h should never be included directly, include npy_config.h instead +#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64eb/_numpyconfig.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64eb/_numpyconfig.h new file mode 100644 index 000000000..debb39009 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64eb/_numpyconfig.h @@ -0,0 +1,32 @@ +#define NPY_HAVE_ENDIAN_H 1 +#define NPY_SIZEOF_SHORT SIZEOF_SHORT +#define NPY_SIZEOF_INT SIZEOF_INT +#define NPY_SIZEOF_LONG SIZEOF_LONG +#define NPY_SIZEOF_FLOAT 4 +#define NPY_SIZEOF_COMPLEX_FLOAT 8 +#define NPY_SIZEOF_DOUBLE 8 +#define NPY_SIZEOF_COMPLEX_DOUBLE 16 +#define NPY_SIZEOF_LONGDOUBLE 16 +#define NPY_SIZEOF_COMPLEX_LONGDOUBLE 32 +#define NPY_ENABLE_SEPARATE_COMPILATION 1 +#define NPY_SIZEOF_PY_INTPTR_T 8 +#define NPY_SIZEOF_PY_LONG_LONG 8 +#define NPY_SIZEOF_LONGLONG 8 +#define NPY_SIZEOF_OFF_T 8 +#define NPY_NO_SMP 0 +#define NPY_HAVE_DECL_ISNAN +#define NPY_HAVE_DECL_ISINF +#define NPY_HAVE_DECL_ISFINITE +#define NPY_HAVE_DECL_SIGNBIT +#define NPY_USE_C99_COMPLEX 1 +#define NPY_HAVE_COMPLEX_DOUBLE 1 +#define NPY_HAVE_COMPLEX_FLOAT 1 +#define NPY_HAVE_COMPLEX_LONG_DOUBLE 1 +#define NPY_USE_C99_FORMATS 1 +#define NPY_VISIBILITY_HIDDEN __attribute__((visibility("hidden"))) +#define NPY_ABI_VERSION 0x01000009 +#define NPY_API_VERSION 0x0000000A + +#ifndef __STDC_FORMAT_MACROS +#define __STDC_FORMAT_MACROS 1 +#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64eb/config.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64eb/config.h new file mode 100644 index 000000000..c30b868f2 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64eb/config.h @@ -0,0 +1,139 @@ +#define HAVE_ENDIAN_H 1 +#define SIZEOF_PY_INTPTR_T 8 +#define SIZEOF_PY_LONG_LONG 8 +#define MATHLIB m +#define HAVE_SIN 1 +#define HAVE_COS 1 +#define HAVE_TAN 1 +#define HAVE_SINH 1 +#define HAVE_COSH 1 +#define HAVE_TANH 1 +#define HAVE_FABS 1 +#define HAVE_FLOOR 1 +#define HAVE_CEIL 1 +#define HAVE_SQRT 1 +#define HAVE_LOG10 1 +#define HAVE_LOG 1 +#define HAVE_EXP 1 +#define HAVE_ASIN 1 +#define HAVE_ACOS 1 +#define HAVE_ATAN 1 +#define HAVE_FMOD 1 +#define HAVE_MODF 1 +#define HAVE_FREXP 1 +#define HAVE_LDEXP 1 +#define HAVE_RINT 1 +#define HAVE_TRUNC 1 +#define HAVE_EXP2 1 +#define HAVE_LOG2 1 +#define HAVE_ATAN2 1 +#define HAVE_POW 1 +#define HAVE_NEXTAFTER 1 +#define HAVE_SINF 1 +#define HAVE_COSF 1 +#define HAVE_TANF 1 +#define HAVE_SINHF 1 +#define HAVE_COSHF 1 +#define HAVE_TANHF 1 +#define HAVE_FABSF 1 +#define HAVE_FLOORF 1 +#define HAVE_CEILF 1 +#define HAVE_RINTF 1 +#define HAVE_TRUNCF 1 +#define HAVE_SQRTF 1 +#define HAVE_LOG10F 1 +#define HAVE_LOGF 1 +#define HAVE_LOG1PF 1 +#define HAVE_EXPF 1 +#define HAVE_EXPM1F 1 +#define HAVE_ASINF 1 +#define HAVE_ACOSF 1 +#define HAVE_ATANF 1 +#define HAVE_ASINHF 1 +#define HAVE_ACOSHF 1 +#define HAVE_ATANHF 1 +#define HAVE_HYPOTF 1 +#define HAVE_ATAN2F 1 +#define HAVE_POWF 1 +#define HAVE_FMODF 1 +#define HAVE_MODFF 1 +#define HAVE_FREXPF 1 +#define HAVE_LDEXPF 1 +#define HAVE_EXP2F 1 +#define HAVE_LOG2F 1 +#define HAVE_COPYSIGNF 1 +#define HAVE_NEXTAFTERF 1 +#define HAVE_SINL 1 +#define HAVE_COSL 1 +#define HAVE_TANL 1 +#define HAVE_SINHL 1 +#define HAVE_COSHL 1 +#define HAVE_TANHL 1 +#define HAVE_FABSL 1 +#define HAVE_FLOORL 1 +#define HAVE_CEILL 1 +#define HAVE_RINTL 1 +#define HAVE_TRUNCL 1 +#define HAVE_SQRTL 1 +#define HAVE_LOG10L 1 +#define HAVE_LOGL 1 +#define HAVE_LOG1PL 1 +#define HAVE_EXPL 1 +#define HAVE_EXPM1L 1 +#define HAVE_ASINL 1 +#define HAVE_ACOSL 1 +#define HAVE_ATANL 1 +#define HAVE_ASINHL 1 +#define HAVE_ACOSHL 1 +#define HAVE_ATANHL 1 +#define HAVE_HYPOTL 1 +#define HAVE_ATAN2L 1 +#define HAVE_POWL 1 +#define HAVE_FMODL 1 +#define HAVE_MODFL 1 +#define HAVE_FREXPL 1 +#define HAVE_LDEXPL 1 +#define HAVE_EXP2L 1 +#define HAVE_LOG2L 1 +#define HAVE_COPYSIGNL 1 +#define HAVE_NEXTAFTERL 1 +#define HAVE_DECL_SIGNBIT +#define HAVE_COMPLEX_H 1 +#define HAVE_CREAL 1 +#define HAVE_CIMAG 1 +#define HAVE_CABS 1 +#define HAVE_CARG 1 +#define HAVE_CEXP 1 +#define HAVE_CSQRT 1 +#define HAVE_CLOG 1 +#define HAVE_CCOS 1 +#define HAVE_CSIN 1 +#define HAVE_CPOW 1 +#define HAVE_CREALF 1 +#define HAVE_CIMAGF 1 +#define HAVE_CABSF 1 +#define HAVE_CARGF 1 +#define HAVE_CEXPF 1 +#define HAVE_CSQRTF 1 +#define HAVE_CLOGF 1 +#define HAVE_CCOSF 1 +#define HAVE_CSINF 1 +#define HAVE_CPOWF 1 +#define HAVE_CREALL 1 +#define HAVE_CIMAGL 1 +#define HAVE_CABSL 1 +#define HAVE_CARGL 1 +#define HAVE_CEXPL 1 +#define HAVE_CSQRTL 1 +#define HAVE_CLOGL 1 +#define HAVE_CCOSL 1 +#define HAVE_CSINL 1 +#define HAVE_CPOWL 1 +#define HAVE_LDOUBLE_IEEE_QUAD_LE 1 +#ifndef __cplusplus +/* #undef inline */ +#endif + +#ifndef _NPY_NPY_CONFIG_H_ +#error config.h should never be included directly, include npy_config.h instead +#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64el/_numpyconfig.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64el/_numpyconfig.h new file mode 100644 index 000000000..debb39009 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64el/_numpyconfig.h @@ -0,0 +1,32 @@ +#define NPY_HAVE_ENDIAN_H 1 +#define NPY_SIZEOF_SHORT SIZEOF_SHORT +#define NPY_SIZEOF_INT SIZEOF_INT +#define NPY_SIZEOF_LONG SIZEOF_LONG +#define NPY_SIZEOF_FLOAT 4 +#define NPY_SIZEOF_COMPLEX_FLOAT 8 +#define NPY_SIZEOF_DOUBLE 8 +#define NPY_SIZEOF_COMPLEX_DOUBLE 16 +#define NPY_SIZEOF_LONGDOUBLE 16 +#define NPY_SIZEOF_COMPLEX_LONGDOUBLE 32 +#define NPY_ENABLE_SEPARATE_COMPILATION 1 +#define NPY_SIZEOF_PY_INTPTR_T 8 +#define NPY_SIZEOF_PY_LONG_LONG 8 +#define NPY_SIZEOF_LONGLONG 8 +#define NPY_SIZEOF_OFF_T 8 +#define NPY_NO_SMP 0 +#define NPY_HAVE_DECL_ISNAN +#define NPY_HAVE_DECL_ISINF +#define NPY_HAVE_DECL_ISFINITE +#define NPY_HAVE_DECL_SIGNBIT +#define NPY_USE_C99_COMPLEX 1 +#define NPY_HAVE_COMPLEX_DOUBLE 1 +#define NPY_HAVE_COMPLEX_FLOAT 1 +#define NPY_HAVE_COMPLEX_LONG_DOUBLE 1 +#define NPY_USE_C99_FORMATS 1 +#define NPY_VISIBILITY_HIDDEN __attribute__((visibility("hidden"))) +#define NPY_ABI_VERSION 0x01000009 +#define NPY_API_VERSION 0x0000000A + +#ifndef __STDC_FORMAT_MACROS +#define __STDC_FORMAT_MACROS 1 +#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64el/config.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64el/config.h new file mode 100644 index 000000000..48727039a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarchn64el/config.h @@ -0,0 +1,138 @@ +#define SIZEOF_PY_INTPTR_T 8 +#define SIZEOF_PY_LONG_LONG 8 +#define MATHLIB m +#define HAVE_SIN 1 +#define HAVE_COS 1 +#define HAVE_TAN 1 +#define HAVE_SINH 1 +#define HAVE_COSH 1 +#define HAVE_TANH 1 +#define HAVE_FABS 1 +#define HAVE_FLOOR 1 +#define HAVE_CEIL 1 +#define HAVE_SQRT 1 +#define HAVE_LOG10 1 +#define HAVE_LOG 1 +#define HAVE_EXP 1 +#define HAVE_ASIN 1 +#define HAVE_ACOS 1 +#define HAVE_ATAN 1 +#define HAVE_FMOD 1 +#define HAVE_MODF 1 +#define HAVE_FREXP 1 +#define HAVE_LDEXP 1 +#define HAVE_RINT 1 +#define HAVE_TRUNC 1 +#define HAVE_EXP2 1 +#define HAVE_LOG2 1 +#define HAVE_ATAN2 1 +#define HAVE_POW 1 +#define HAVE_NEXTAFTER 1 +#define HAVE_SINF 1 +#define HAVE_COSF 1 +#define HAVE_TANF 1 +#define HAVE_SINHF 1 +#define HAVE_COSHF 1 +#define HAVE_TANHF 1 +#define HAVE_FABSF 1 +#define HAVE_FLOORF 1 +#define HAVE_CEILF 1 +#define HAVE_RINTF 1 +#define HAVE_TRUNCF 1 +#define HAVE_SQRTF 1 +#define HAVE_LOG10F 1 +#define HAVE_LOGF 1 +#define HAVE_LOG1PF 1 +#define HAVE_EXPF 1 +#define HAVE_EXPM1F 1 +#define HAVE_ASINF 1 +#define HAVE_ACOSF 1 +#define HAVE_ATANF 1 +#define HAVE_ASINHF 1 +#define HAVE_ACOSHF 1 +#define HAVE_ATANHF 1 +#define HAVE_HYPOTF 1 +#define HAVE_ATAN2F 1 +#define HAVE_POWF 1 +#define HAVE_FMODF 1 +#define HAVE_MODFF 1 +#define HAVE_FREXPF 1 +#define HAVE_LDEXPF 1 +#define HAVE_EXP2F 1 +#define HAVE_LOG2F 1 +#define HAVE_COPYSIGNF 1 +#define HAVE_NEXTAFTERF 1 +#define HAVE_SINL 1 +#define HAVE_COSL 1 +#define HAVE_TANL 1 +#define HAVE_SINHL 1 +#define HAVE_COSHL 1 +#define HAVE_TANHL 1 +#define HAVE_FABSL 1 +#define HAVE_FLOORL 1 +#define HAVE_CEILL 1 +#define HAVE_RINTL 1 +#define HAVE_TRUNCL 1 +#define HAVE_SQRTL 1 +#define HAVE_LOG10L 1 +#define HAVE_LOGL 1 +#define HAVE_LOG1PL 1 +#define HAVE_EXPL 1 +#define HAVE_EXPM1L 1 +#define HAVE_ASINL 1 +#define HAVE_ACOSL 1 +#define HAVE_ATANL 1 +#define HAVE_ASINHL 1 +#define HAVE_ACOSHL 1 +#define HAVE_ATANHL 1 +#define HAVE_HYPOTL 1 +#define HAVE_ATAN2L 1 +#define HAVE_POWL 1 +#define HAVE_FMODL 1 +#define HAVE_MODFL 1 +#define HAVE_FREXPL 1 +#define HAVE_LDEXPL 1 +#define HAVE_EXP2L 1 +#define HAVE_LOG2L 1 +#define HAVE_COPYSIGNL 1 +#define HAVE_NEXTAFTERL 1 +#define HAVE_DECL_SIGNBIT +#define HAVE_COMPLEX_H 1 +#define HAVE_CREAL 1 +#define HAVE_CIMAG 1 +#define HAVE_CABS 1 +#define HAVE_CARG 1 +#define HAVE_CEXP 1 +#define HAVE_CSQRT 1 +#define HAVE_CLOG 1 +#define HAVE_CCOS 1 +#define HAVE_CSIN 1 +#define HAVE_CPOW 1 +#define HAVE_CREALF 1 +#define HAVE_CIMAGF 1 +#define HAVE_CABSF 1 +#define HAVE_CARGF 1 +#define HAVE_CEXPF 1 +#define HAVE_CSQRTF 1 +#define HAVE_CLOGF 1 +#define HAVE_CCOSF 1 +#define HAVE_CSINF 1 +#define HAVE_CPOWF 1 +#define HAVE_CREALL 1 +#define HAVE_CIMAGL 1 +#define HAVE_CABSL 1 +#define HAVE_CARGL 1 +#define HAVE_CEXPL 1 +#define HAVE_CSQRTL 1 +#define HAVE_CLOGL 1 +#define HAVE_CCOSL 1 +#define HAVE_CSINL 1 +#define HAVE_CPOWL 1 +#define HAVE_LDOUBLE_IEEE_QUAD_LE 1 +#ifndef __cplusplus +/* #undef inline */ +#endif + +#ifndef _NPY_NPY_CONFIG_H_ +#error config.h should never be included directly, include npy_config.h instead +#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32eb/_numpyconfig.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32eb/_numpyconfig.h new file mode 100644 index 000000000..4c465c216 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32eb/_numpyconfig.h @@ -0,0 +1,32 @@ +#define NPY_HAVE_ENDIAN_H 1 +#define NPY_SIZEOF_SHORT SIZEOF_SHORT +#define NPY_SIZEOF_INT SIZEOF_INT +#define NPY_SIZEOF_LONG SIZEOF_LONG +#define NPY_SIZEOF_FLOAT 4 +#define NPY_SIZEOF_COMPLEX_FLOAT 8 +#define NPY_SIZEOF_DOUBLE 8 +#define NPY_SIZEOF_COMPLEX_DOUBLE 16 +#define NPY_SIZEOF_LONGDOUBLE 8 +#define NPY_SIZEOF_COMPLEX_LONGDOUBLE 16 +#define NPY_ENABLE_SEPARATE_COMPILATION 1 +#define NPY_SIZEOF_PY_INTPTR_T 4 +#define NPY_SIZEOF_PY_LONG_LONG 8 +#define NPY_SIZEOF_LONGLONG 8 +#define NPY_SIZEOF_OFF_T 8 +#define NPY_NO_SMP 0 +#define NPY_HAVE_DECL_ISNAN +#define NPY_HAVE_DECL_ISINF +#define NPY_HAVE_DECL_ISFINITE +#define NPY_HAVE_DECL_SIGNBIT +#define NPY_USE_C99_COMPLEX 1 +#define NPY_HAVE_COMPLEX_DOUBLE 1 +#define NPY_HAVE_COMPLEX_FLOAT 1 +#define NPY_HAVE_COMPLEX_LONG_DOUBLE 1 +#define NPY_USE_C99_FORMATS 1 +#define NPY_VISIBILITY_HIDDEN __attribute__((visibility("hidden"))) +#define NPY_ABI_VERSION 0x01000009 +#define NPY_API_VERSION 0x0000000A + +#ifndef __STDC_FORMAT_MACROS +#define __STDC_FORMAT_MACROS 1 +#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32eb/config.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32eb/config.h new file mode 100644 index 000000000..2f6135adc --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32eb/config.h @@ -0,0 +1,139 @@ +#define HAVE_ENDIAN_H 1 +#define SIZEOF_PY_INTPTR_T 4 +#define SIZEOF_PY_LONG_LONG 8 +#define MATHLIB m +#define HAVE_SIN 1 +#define HAVE_COS 1 +#define HAVE_TAN 1 +#define HAVE_SINH 1 +#define HAVE_COSH 1 +#define HAVE_TANH 1 +#define HAVE_FABS 1 +#define HAVE_FLOOR 1 +#define HAVE_CEIL 1 +#define HAVE_SQRT 1 +#define HAVE_LOG10 1 +#define HAVE_LOG 1 +#define HAVE_EXP 1 +#define HAVE_ASIN 1 +#define HAVE_ACOS 1 +#define HAVE_ATAN 1 +#define HAVE_FMOD 1 +#define HAVE_MODF 1 +#define HAVE_FREXP 1 +#define HAVE_LDEXP 1 +#define HAVE_RINT 1 +#define HAVE_TRUNC 1 +#define HAVE_EXP2 1 +#define HAVE_LOG2 1 +#define HAVE_ATAN2 1 +#define HAVE_POW 1 +#define HAVE_NEXTAFTER 1 +#define HAVE_SINF 1 +#define HAVE_COSF 1 +#define HAVE_TANF 1 +#define HAVE_SINHF 1 +#define HAVE_COSHF 1 +#define HAVE_TANHF 1 +#define HAVE_FABSF 1 +#define HAVE_FLOORF 1 +#define HAVE_CEILF 1 +#define HAVE_RINTF 1 +#define HAVE_TRUNCF 1 +#define HAVE_SQRTF 1 +#define HAVE_LOG10F 1 +#define HAVE_LOGF 1 +#define HAVE_LOG1PF 1 +#define HAVE_EXPF 1 +#define HAVE_EXPM1F 1 +#define HAVE_ASINF 1 +#define HAVE_ACOSF 1 +#define HAVE_ATANF 1 +#define HAVE_ASINHF 1 +#define HAVE_ACOSHF 1 +#define HAVE_ATANHF 1 +#define HAVE_HYPOTF 1 +#define HAVE_ATAN2F 1 +#define HAVE_POWF 1 +#define HAVE_FMODF 1 +#define HAVE_MODFF 1 +#define HAVE_FREXPF 1 +#define HAVE_LDEXPF 1 +#define HAVE_EXP2F 1 +#define HAVE_LOG2F 1 +#define HAVE_COPYSIGNF 1 +#define HAVE_NEXTAFTERF 1 +#define HAVE_SINL 1 +#define HAVE_COSL 1 +#define HAVE_TANL 1 +#define HAVE_SINHL 1 +#define HAVE_COSHL 1 +#define HAVE_TANHL 1 +#define HAVE_FABSL 1 +#define HAVE_FLOORL 1 +#define HAVE_CEILL 1 +#define HAVE_RINTL 1 +#define HAVE_TRUNCL 1 +#define HAVE_SQRTL 1 +#define HAVE_LOG10L 1 +#define HAVE_LOGL 1 +#define HAVE_LOG1PL 1 +#define HAVE_EXPL 1 +#define HAVE_EXPM1L 1 +#define HAVE_ASINL 1 +#define HAVE_ACOSL 1 +#define HAVE_ATANL 1 +#define HAVE_ASINHL 1 +#define HAVE_ACOSHL 1 +#define HAVE_ATANHL 1 +#define HAVE_HYPOTL 1 +#define HAVE_ATAN2L 1 +#define HAVE_POWL 1 +#define HAVE_FMODL 1 +#define HAVE_MODFL 1 +#define HAVE_FREXPL 1 +#define HAVE_LDEXPL 1 +#define HAVE_EXP2L 1 +#define HAVE_LOG2L 1 +#define HAVE_COPYSIGNL 1 +#define HAVE_NEXTAFTERL 1 +#define HAVE_DECL_SIGNBIT +#define HAVE_COMPLEX_H 1 +#define HAVE_CREAL 1 +#define HAVE_CIMAG 1 +#define HAVE_CABS 1 +#define HAVE_CARG 1 +#define HAVE_CEXP 1 +#define HAVE_CSQRT 1 +#define HAVE_CLOG 1 +#define HAVE_CCOS 1 +#define HAVE_CSIN 1 +#define HAVE_CPOW 1 +#define HAVE_CREALF 1 +#define HAVE_CIMAGF 1 +#define HAVE_CABSF 1 +#define HAVE_CARGF 1 +#define HAVE_CEXPF 1 +#define HAVE_CSQRTF 1 +#define HAVE_CLOGF 1 +#define HAVE_CCOSF 1 +#define HAVE_CSINF 1 +#define HAVE_CPOWF 1 +#define HAVE_CREALL 1 +#define HAVE_CIMAGL 1 +#define HAVE_CABSL 1 +#define HAVE_CARGL 1 +#define HAVE_CEXPL 1 +#define HAVE_CSQRTL 1 +#define HAVE_CLOGL 1 +#define HAVE_CCOSL 1 +#define HAVE_CSINL 1 +#define HAVE_CPOWL 1 +#define HAVE_LDOUBLE_IEEE_DOUBLE_BE 1 +#ifndef __cplusplus +/* #undef inline */ +#endif + +#ifndef _NPY_NPY_CONFIG_H_ +#error config.h should never be included directly, include npy_config.h instead +#endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32el/config.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32el/config.h new file mode 100644 index 000000000..17ef186d5 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32el/config.h @@ -0,0 +1,21 @@ +/* ./src.linux-i686-2.5/numpy/core/include/numpy/config.h */ +/* #define SIZEOF_SHORT 2 */ +/* #define SIZEOF_INT 4 */ +/* #define SIZEOF_LONG 4 */ +/* #define SIZEOF_FLOAT 4 */ +/* #define SIZEOF_DOUBLE 8 */ +#define SIZEOF_LONG_DOUBLE 12 +#define SIZEOF_PY_INTPTR_T 4 +/* #define SIZEOF_LONG_LONG 8 */ +#define SIZEOF_PY_LONG_LONG 8 +/* #define CHAR_BIT 8 */ +#define MATHLIB m +#define HAVE_FLOAT_FUNCS +#define HAVE_LOG1P +#define HAVE_EXPM1 +#define HAVE_INVERSE_HYPERBOLIC +#define HAVE_INVERSE_HYPERBOLIC_FLOAT +#define HAVE_ISNAN +#define HAVE_ISINF +#define HAVE_RINT + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32el/numpyconfig.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32el/numpyconfig.h new file mode 100644 index 000000000..0b7cd51af --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsarcho32el/numpyconfig.h @@ -0,0 +1,18 @@ +/* cat ./src.linux-i686-2.5/numpy/core/include/numpy/numpyconfig.h */ +/* + * * This file is generated by numpy/core/setup.pyc. DO NOT EDIT + * */ +#define NPY_SIZEOF_SHORT 2 +#define NPY_SIZEOF_INT 4 +#define NPY_SIZEOF_LONG 4 +#define NPY_SIZEOF_FLOAT 4 +#define NPY_SIZEOF_DOUBLE 8 +#define NPY_SIZEOF_LONGDOUBLE 12 +#define NPY_SIZEOF_PY_INTPTR_T 4 +#define NPY_NO_SMP 0 + +#define NPY_SIZEOF_LONGLONG 8 +#define NPY_SIZEOF_PY_LONG_LONG 8 +#define NPY_SIZEOF_OFF_T 8 +/* #define CHAR_BIT 8 */ + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsel/config.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsel/config.h deleted file mode 100644 index 17ef186d5..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsel/config.h +++ /dev/null @@ -1,21 +0,0 @@ -/* ./src.linux-i686-2.5/numpy/core/include/numpy/config.h */ -/* #define SIZEOF_SHORT 2 */ -/* #define SIZEOF_INT 4 */ -/* #define SIZEOF_LONG 4 */ -/* #define SIZEOF_FLOAT 4 */ -/* #define SIZEOF_DOUBLE 8 */ -#define SIZEOF_LONG_DOUBLE 12 -#define SIZEOF_PY_INTPTR_T 4 -/* #define SIZEOF_LONG_LONG 8 */ -#define SIZEOF_PY_LONG_LONG 8 -/* #define CHAR_BIT 8 */ -#define MATHLIB m -#define HAVE_FLOAT_FUNCS -#define HAVE_LOG1P -#define HAVE_EXPM1 -#define HAVE_INVERSE_HYPERBOLIC -#define HAVE_INVERSE_HYPERBOLIC_FLOAT -#define HAVE_ISNAN -#define HAVE_ISINF -#define HAVE_RINT - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsel/numpyconfig.h b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsel/numpyconfig.h deleted file mode 100644 index 0b7cd51af..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/files/mipsel/numpyconfig.h +++ /dev/null @@ -1,18 +0,0 @@ -/* cat ./src.linux-i686-2.5/numpy/core/include/numpy/numpyconfig.h */ -/* - * * This file is generated by numpy/core/setup.pyc. DO NOT EDIT - * */ -#define NPY_SIZEOF_SHORT 2 -#define NPY_SIZEOF_INT 4 -#define NPY_SIZEOF_LONG 4 -#define NPY_SIZEOF_FLOAT 4 -#define NPY_SIZEOF_DOUBLE 8 -#define NPY_SIZEOF_LONGDOUBLE 12 -#define NPY_SIZEOF_PY_INTPTR_T 4 -#define NPY_NO_SMP 0 - -#define NPY_SIZEOF_LONGLONG 8 -#define NPY_SIZEOF_PY_LONG_LONG 8 -#define NPY_SIZEOF_OFF_T 8 -/* #define CHAR_BIT 8 */ - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python-numpy_1.11.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python-numpy_1.11.1.bb deleted file mode 100644 index c94f5c3d1..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python-numpy_1.11.1.bb +++ /dev/null @@ -1,105 +0,0 @@ -SUMMARY = "A sophisticated Numeric Processing Package for Python" -SECTION = "devel/python" -LICENSE = "PSF" -LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=7e51a5677b22b865abbfb3dff6ffb2d0" - -SRCNAME = "numpy" - -SRC_URI = "https://files.pythonhosted.org/packages/source/n/${SRCNAME}/${SRCNAME}-${PV}.tar.gz \ - file://0001-Don-t-search-usr-and-so-on-for-libraries-by-default-.patch \ - file://remove-build-path-in-comments.patch \ - file://fix_shebang_f2py.patch \ - file://d70d37b7c4aa2af3fe879a0d858c54f2aa32a725.patch \ - ${CONFIGFILESURI} " -UPSTREAM_CHECK_URI = "https://sourceforge.net/projects/numpy/files/" - -CONFIGFILESURI ?= "" - -CONFIGFILESURI_aarch64 = " \ - file://config.h \ - file://_numpyconfig.h \ -" -CONFIGFILESURI_arm = " \ - file://config.h \ - file://numpyconfig.h \ -" -CONFIGFILESURI_armeb = " \ - file://config.h \ - file://numpyconfig.h \ -" -CONFIGFILESURI_mipsel = " \ - file://config.h \ - file://numpyconfig.h \ -" -CONFIGFILESURI_x86 = " \ - file://config.h \ - file://numpyconfig.h \ -" -CONFIGFILESURI_x86-64 = " \ - file://config.h \ - file://_numpyconfig.h \ -" -CONFIGFILESURI_mips = " \ - file://config.h \ - file://_numpyconfig.h \ -" -CONFIGFILESURI_powerpc = " \ - file://config.h \ - file://_numpyconfig.h \ -" -CONFIGFILESURI_powerpc64 = " \ - file://config.h \ - file://_numpyconfig.h \ -" -CONFIGFILESURI_mips64 = " \ - file://config.h \ - file://_numpyconfig.h \ -" -CONFIGFILESURI_mips64n32 = " \ - file://config.h \ - file://_numpyconfig.h \ -" - -S = "${WORKDIR}/numpy-${PV}" - -inherit setuptools - -# Make the build fail and replace *config.h with proper one -# This is a ugly, ugly hack - Koen -do_compile_prepend_class-target() { - ${STAGING_BINDIR_NATIVE}/python-native/python setup.py build ${DISTUTILS_BUILD_ARGS} || \ - true - cp ${WORKDIR}/*config.h ${S}/build/$(ls ${S}/build | grep src)/numpy/core/include/numpy/ -} - -FILES_${PN}-staticdev += "${PYTHON_SITEPACKAGES_DIR}/numpy/core/lib/*.a" - -SRC_URI[md5sum] = "2f44a895a8104ffac140c3a70edbd450" -SRC_URI[sha256sum] = "dc4082c43979cc856a2bf352a8297ea109ccb3244d783ae067eb2ee5b0d577cd" - -# install what is needed for numpy.test() -RDEPENDS_${PN} = "python-unittest \ - python-difflib \ - python-pprint \ - python-pickle \ - python-shell \ - python-nose \ - python-doctest \ - python-datetime \ - python-distutils \ - python-misc \ - python-mmap \ - python-netclient \ - python-numbers \ - python-pydoc \ - python-pkgutil \ - python-email \ - python-subprocess \ - python-compression \ - python-ctypes \ - python-threading \ -" - -RDEPENDS_${PN}_class-native = "" - -BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python-numpy_1.11.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python-numpy_1.11.2.bb new file mode 100644 index 000000000..04c96d7b5 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python-numpy_1.11.2.bb @@ -0,0 +1,113 @@ +SUMMARY = "A sophisticated Numeric Processing Package for Python" +SECTION = "devel/python" +LICENSE = "PSF" +LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=7e51a5677b22b865abbfb3dff6ffb2d0" + +SRCNAME = "numpy" + +SRC_URI = "https://files.pythonhosted.org/packages/source/n/${SRCNAME}/${SRCNAME}-${PV}.tar.gz \ + file://0001-Don-t-search-usr-and-so-on-for-libraries-by-default-.patch \ + file://remove-build-path-in-comments.patch \ + file://fix_shebang_f2py.patch \ + file://d70d37b7c4aa2af3fe879a0d858c54f2aa32a725.patch \ + ${CONFIGFILESURI} " +UPSTREAM_CHECK_URI = "https://sourceforge.net/projects/numpy/files/" + +CONFIGFILESURI ?= "" + +CONFIGFILESURI_aarch64 = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_arm = " \ + file://config.h \ + file://numpyconfig.h \ +" +CONFIGFILESURI_armeb = " \ + file://config.h \ + file://numpyconfig.h \ +" +CONFIGFILESURI_mipsarcho32el = " \ + file://config.h \ + file://numpyconfig.h \ +" +CONFIGFILESURI_x86 = " \ + file://config.h \ + file://numpyconfig.h \ +" +CONFIGFILESURI_x86-64 = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_mipsarcho32eb = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_powerpc = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_powerpc64 = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_mipsarchn64eb = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_mipsarchn64el = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_mipsarchn32eb = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_mipsarchn32el = " \ + file://config.h \ + file://_numpyconfig.h \ +" + +S = "${WORKDIR}/numpy-${PV}" + +inherit setuptools + +# Make the build fail and replace *config.h with proper one +# This is a ugly, ugly hack - Koen +do_compile_prepend_class-target() { + ${STAGING_BINDIR_NATIVE}/python-native/python setup.py build ${DISTUTILS_BUILD_ARGS} || \ + true + cp ${WORKDIR}/*config.h ${S}/build/$(ls ${S}/build | grep src)/numpy/core/include/numpy/ +} + +FILES_${PN}-staticdev += "${PYTHON_SITEPACKAGES_DIR}/numpy/core/lib/*.a" + +SRC_URI[md5sum] = "03bd7927c314c43780271bf1ab795ebc" +SRC_URI[sha256sum] = "04db2fbd64e2e7c68e740b14402b25af51418fc43a59d9e54172b38b906b0f69" + +# install what is needed for numpy.test() +RDEPENDS_${PN} = "python-unittest \ + python-difflib \ + python-pprint \ + python-pickle \ + python-shell \ + python-nose \ + python-doctest \ + python-datetime \ + python-distutils \ + python-misc \ + python-mmap \ + python-netclient \ + python-numbers \ + python-pydoc \ + python-pkgutil \ + python-email \ + python-subprocess \ + python-compression \ + python-ctypes \ + python-threading \ +" + +RDEPENDS_${PN}_class-native = "" + +BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python3-numpy_1.11.0.bb b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python3-numpy_1.11.0.bb deleted file mode 100644 index 8b502febc..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python3-numpy_1.11.0.bb +++ /dev/null @@ -1,106 +0,0 @@ -SUMMARY = "A sophisticated Numeric Processing Package for Python" -SECTION = "devel/python" -LICENSE = "PSF" -LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=7e51a5677b22b865abbfb3dff6ffb2d0" - -SRCNAME = "numpy" - -SRC_URI = "https://files.pythonhosted.org/packages/source/n/${SRCNAME}/${SRCNAME}-${PV}.tar.gz \ - file://0001-Don-t-search-usr-and-so-on-for-libraries-by-default-.patch \ - file://remove-build-path-in-comments.patch \ - file://fix_shebang_f2py.patch \ - file://d70d37b7c4aa2af3fe879a0d858c54f2aa32a725.patch \ - ${CONFIGFILESURI} " -UPSTREAM_CHECK_URI = "https://sourceforge.net/projects/numpy/files/" - -CONFIGFILESURI ?= "" - -CONFIGFILESURI_aarch64 = " \ - file://config.h \ - file://_numpyconfig.h \ -" -CONFIGFILESURI_arm = " \ - file://config.h \ - file://numpyconfig.h \ -" -CONFIGFILESURI_armeb = " \ - file://config.h \ - file://numpyconfig.h \ -" -CONFIGFILESURI_mipsel = " \ - file://config.h \ - file://numpyconfig.h \ -" -CONFIGFILESURI_x86 = " \ - file://config.h \ - file://numpyconfig.h \ -" -CONFIGFILESURI_x86-64 = " \ - file://config.h \ - file://_numpyconfig.h \ -" -CONFIGFILESURI_mips = " \ - file://config.h \ - file://_numpyconfig.h \ -" -CONFIGFILESURI_powerpc = " \ - file://config.h \ - file://_numpyconfig.h \ -" -CONFIGFILESURI_powerpc64 = " \ - file://config.h \ - file://_numpyconfig.h \ -" -CONFIGFILESURI_mips64 = " \ - file://config.h \ - file://_numpyconfig.h \ -" -CONFIGFILESURI_mips64n32 = " \ - file://config.h \ - file://_numpyconfig.h \ -" - -S = "${WORKDIR}/numpy-${PV}" - -inherit setuptools3 - -# Make the build fail and replace *config.h with proper one -# This is a ugly, ugly hack - Koen -do_compile_prepend_class-target() { - ${STAGING_BINDIR_NATIVE}/python3-native/python3 setup.py build ${DISTUTILS_BUILD_ARGS} || \ - true - cp ${WORKDIR}/*config.h ${S}/build/$(ls ${S}/build | grep src)/numpy/core/include/numpy/ -} - -FILES_${PN}-staticdev += "${PYTHON_SITEPACKAGES_DIR}/numpy/core/lib/*.a" - -SRC_URI[md5sum] = "bc56fb9fc2895aa4961802ffbdb31d0b" -SRC_URI[sha256sum] = "a1d1268d200816bfb9727a7a27b78d8e37ecec2e4d5ebd33eb64e2789e0db43e" - -# install what is needed for numpy.test() -RDEPENDS_${PN} = "python3-unittest \ - python3-difflib \ - python3-pprint \ - python3-pickle \ - python3-shell \ - python3-nose \ - python3-doctest \ - python3-datetime \ - python3-distutils \ - python3-misc \ - python3-mmap \ - python3-netclient \ - python3-numbers \ - python3-pydoc \ - python3-pkgutil \ - python3-email \ - python3-subprocess \ - python3-compression \ - python3-ctypes \ - python3-threading \ - python3-textutils \ -" - -RDEPENDS_${PN}_class-native = "" - -BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python3-numpy_1.11.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python3-numpy_1.11.2.bb new file mode 100644 index 000000000..8f9665f92 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python-numpy/python3-numpy_1.11.2.bb @@ -0,0 +1,114 @@ +SUMMARY = "A sophisticated Numeric Processing Package for Python" +SECTION = "devel/python" +LICENSE = "PSF" +LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=7e51a5677b22b865abbfb3dff6ffb2d0" + +SRCNAME = "numpy" + +SRC_URI = "https://files.pythonhosted.org/packages/source/n/${SRCNAME}/${SRCNAME}-${PV}.tar.gz \ + file://0001-Don-t-search-usr-and-so-on-for-libraries-by-default-.patch \ + file://remove-build-path-in-comments.patch \ + file://fix_shebang_f2py.patch \ + file://d70d37b7c4aa2af3fe879a0d858c54f2aa32a725.patch \ + ${CONFIGFILESURI} " +UPSTREAM_CHECK_URI = "https://sourceforge.net/projects/numpy/files/" + +CONFIGFILESURI ?= "" + +CONFIGFILESURI_aarch64 = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_arm = " \ + file://config.h \ + file://numpyconfig.h \ +" +CONFIGFILESURI_armeb = " \ + file://config.h \ + file://numpyconfig.h \ +" +CONFIGFILESURI_mipsarcho32el = " \ + file://config.h \ + file://numpyconfig.h \ +" +CONFIGFILESURI_x86 = " \ + file://config.h \ + file://numpyconfig.h \ +" +CONFIGFILESURI_x86-64 = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_mipsarcho32eb = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_powerpc = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_powerpc64 = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_mipsarchn64eb = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_mipsarchn64el = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_mipsarchn32eb = " \ + file://config.h \ + file://_numpyconfig.h \ +" +CONFIGFILESURI_mipsarchn32el = " \ + file://config.h \ + file://_numpyconfig.h \ +" + +S = "${WORKDIR}/numpy-${PV}" + +inherit setuptools3 + +# Make the build fail and replace *config.h with proper one +# This is a ugly, ugly hack - Koen +do_compile_prepend_class-target() { + ${STAGING_BINDIR_NATIVE}/python3-native/python3 setup.py build ${DISTUTILS_BUILD_ARGS} || \ + true + cp ${WORKDIR}/*config.h ${S}/build/$(ls ${S}/build | grep src)/numpy/core/include/numpy/ +} + +FILES_${PN}-staticdev += "${PYTHON_SITEPACKAGES_DIR}/numpy/core/lib/*.a" + +SRC_URI[md5sum] = "03bd7927c314c43780271bf1ab795ebc" +SRC_URI[sha256sum] = "04db2fbd64e2e7c68e740b14402b25af51418fc43a59d9e54172b38b906b0f69" + +# install what is needed for numpy.test() +RDEPENDS_${PN} = "python3-unittest \ + python3-difflib \ + python3-pprint \ + python3-pickle \ + python3-shell \ + python3-nose \ + python3-doctest \ + python3-datetime \ + python3-distutils \ + python3-misc \ + python3-mmap \ + python3-netclient \ + python3-numbers \ + python3-pydoc \ + python3-pkgutil \ + python3-email \ + python3-subprocess \ + python3-compression \ + python3-ctypes \ + python3-threading \ + python3-textutils \ +" + +RDEPENDS_${PN}_class-native = "" + +BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-2.7-manifest.inc b/import-layers/yocto-poky/meta/recipes-devtools/python/python-2.7-manifest.inc index 621024f6a..7ed254bbd 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-2.7-manifest.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-2.7-manifest.inc @@ -1,7 +1,6 @@ # WARNING: This file is AUTO GENERATED: Manual edits will be lost next time I regenerate the file. # Generator: '../../../scripts/contrib/python/generate-manifest-2.7.py' Version 20110222.2 (C) 2002-2010 Michael 'Mickey' Lauer -# Visit the Python for Embedded Systems Site => http://www.Vanille.de/projects/python.spy @@ -47,7 +46,7 @@ FILES_${PN}-contextlib="${libdir}/python${PYTHON_MAJMIN}/contextlib.* " SUMMARY_${PN}-core="Python interpreter and core modules" RDEPENDS_${PN}-core="${PN}-lang ${PN}-re" -FILES_${PN}-core="${libdir}/python2.7/__future__.* ${libdir}/python2.7/_abcoll.* ${libdir}/python2.7/abc.* ${libdir}/python2.7/ast.* ${libdir}/python2.7/copy.* ${libdir}/python2.7/copy_reg.* ${libdir}/python2.7/ConfigParser.* ${libdir}/python2.7/genericpath.* ${libdir}/python2.7/getopt.* ${libdir}/python2.7/linecache.* ${libdir}/python2.7/new.* ${libdir}/python2.7/os.* ${libdir}/python2.7/posixpath.* ${libdir}/python2.7/struct.* ${libdir}/python2.7/warnings.* ${libdir}/python2.7/site.* ${libdir}/python2.7/stat.* ${libdir}/python2.7/UserDict.* ${libdir}/python2.7/UserList.* ${libdir}/python2.7/UserString.* ${libdir}/python2.7/lib-dynload/binascii.so ${libdir}/python2.7/lib-dynload/_struct.so ${libdir}/python2.7/lib-dynload/time.so ${libdir}/python2.7/lib-dynload/xreadlines.so ${libdir}/python2.7/types.* ${libdir}/python2.7/platform.* ${bindir}/python* ${libdir}/python2.7/_weakrefset.* ${libdir}/python2.7/sysconfig.* ${libdir}/python2.7/_sysconfigdata.* ${libdir}/python2.7/config/Makefile ${includedir}/python${PYTHON_MAJMIN}/pyconfig*.h ${libdir}/python${PYTHON_MAJMIN}/sitecustomize.py " +FILES_${PN}-core="${libdir}/python2.7/__future__.* ${libdir}/python2.7/_abcoll.* ${libdir}/python2.7/abc.* ${libdir}/python2.7/ast.* ${libdir}/python2.7/copy.* ${libdir}/python2.7/copy_reg.* ${libdir}/python2.7/ConfigParser.* ${libdir}/python2.7/genericpath.* ${libdir}/python2.7/getopt.* ${libdir}/python2.7/linecache.* ${libdir}/python2.7/new.* ${libdir}/python2.7/os.* ${libdir}/python2.7/posixpath.* ${libdir}/python2.7/struct.* ${libdir}/python2.7/warnings.* ${libdir}/python2.7/site.* ${libdir}/python2.7/stat.* ${libdir}/python2.7/UserDict.* ${libdir}/python2.7/UserList.* ${libdir}/python2.7/UserString.* ${libdir}/python2.7/lib-dynload/binascii.so ${libdir}/python2.7/lib-dynload/_struct.so ${libdir}/python2.7/lib-dynload/time.so ${libdir}/python2.7/lib-dynload/xreadlines.so ${libdir}/python2.7/types.* ${libdir}/python2.7/platform.* ${bindir}/python* ${libdir}/python2.7/_weakrefset.* ${libdir}/python2.7/sysconfig.* ${libdir}/python2.7/_sysconfigdata.* ${includedir}/python${PYTHON_MAJMIN}/pyconfig*.h ${libdir}/python${PYTHON_MAJMIN}/sitecustomize.py " SUMMARY_${PN}-crypt="Python basic cryptographic and hashing support" RDEPENDS_${PN}-crypt="${PN}-core" @@ -75,7 +74,7 @@ FILES_${PN}-debugger="${libdir}/python2.7/bdb.* ${libdir}/python2.7/pdb.* " SUMMARY_${PN}-dev="Python development package" RDEPENDS_${PN}-dev="${PN}-core" -FILES_${PN}-dev="${includedir} ${libdir}/lib*${SOLIBSDEV} ${libdir}/*.la ${libdir}/*.a ${libdir}/*.o ${libdir}/pkgconfig ${base_libdir}/*.a ${base_libdir}/*.o ${datadir}/aclocal ${datadir}/pkgconfig " +FILES_${PN}-dev="${includedir} ${libdir}/lib*${SOLIBSDEV} ${libdir}/*.la ${libdir}/*.a ${libdir}/*.o ${libdir}/pkgconfig ${base_libdir}/*.a ${base_libdir}/*.o ${datadir}/aclocal ${datadir}/pkgconfig ${libdir}/python2.7/config/Makefile " SUMMARY_${PN}-difflib="Python helpers for computing deltas between objects" RDEPENDS_${PN}-difflib="${PN}-lang ${PN}-re" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-3.5-manifest.inc b/import-layers/yocto-poky/meta/recipes-devtools/python/python-3.5-manifest.inc index 6c690db80..1e20f00c6 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-3.5-manifest.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-3.5-manifest.inc @@ -1,280 +1,283 @@ # WARNING: This file is AUTO GENERATED: Manual edits will be lost next time I regenerate the file. # Generator: '../../../scripts/contrib/python/generate-manifest-3.5.py' Version 20140131 (C) 2002-2010 Michael 'Mickey' Lauer -# Visit the Python for Embedded Systems Site => http://www.Vanille.de/projects/python.spy -PROVIDES+="${PN}-2to3 ${PN}-argparse ${PN}-asyncio ${PN}-audio ${PN}-codecs ${PN}-compile ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-dev ${PN}-difflib ${PN}-distutils ${PN}-distutils-staticdev ${PN}-doctest ${PN}-email ${PN}-enum ${PN}-fcntl ${PN}-gdbm ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-reprlib ${PN}-resource ${PN}-selectors ${PN}-shell ${PN}-signal ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc " +PROVIDES+="${PN}-2to3 ${PN}-argparse ${PN}-asyncio ${PN}-audio ${PN}-codecs ${PN}-compile ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-dev ${PN}-difflib ${PN}-distutils ${PN}-distutils-staticdev ${PN}-doctest ${PN}-email ${PN}-enum ${PN}-fcntl ${PN}-gdbm ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-reprlib ${PN}-resource ${PN}-selectors ${PN}-shell ${PN}-signal ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-typing ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc " -PACKAGES="${PN}-dbg ${PN}-2to3 ${PN}-argparse ${PN}-asyncio ${PN}-audio ${PN}-codecs ${PN}-compile ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-dev ${PN}-difflib ${PN}-distutils-staticdev ${PN}-distutils ${PN}-doctest ${PN}-email ${PN}-enum ${PN}-fcntl ${PN}-gdbm ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-reprlib ${PN}-resource ${PN}-selectors ${PN}-shell ${PN}-signal ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc ${PN}-modules" +PACKAGES="${PN}-dbg ${PN}-2to3 ${PN}-argparse ${PN}-asyncio ${PN}-audio ${PN}-codecs ${PN}-compile ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-dev ${PN}-difflib ${PN}-distutils-staticdev ${PN}-distutils ${PN}-doctest ${PN}-email ${PN}-enum ${PN}-fcntl ${PN}-gdbm ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-reprlib ${PN}-resource ${PN}-selectors ${PN}-shell ${PN}-signal ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-typing ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc ${PN}-modules" SUMMARY_${PN}-2to3="Python automated Python 2 to 3 code translator" RDEPENDS_${PN}-2to3="${PN}-core" -FILES_${PN}-2to3="${libdir}/python3.5/lib2to3 " +FILES_${PN}-2to3="${libdir}/python3.5/lib2to3 ${libdir}/python3.5/lib2to3/__pycache__ " SUMMARY_${PN}-argparse="Python command line argument parser" RDEPENDS_${PN}-argparse="${PN}-core ${PN}-codecs ${PN}-textutils" -FILES_${PN}-argparse="${libdir}/python3.5/argparse.* " +FILES_${PN}-argparse="${libdir}/python3.5/argparse.* ${libdir}/python3.5/__pycache__/argparse.* " SUMMARY_${PN}-asyncio="Python Asynchronous I/O, event loop, coroutines and tasks" RDEPENDS_${PN}-asyncio="${PN}-core" -FILES_${PN}-asyncio="${libdir}/python3.5/asyncio " +FILES_${PN}-asyncio="${libdir}/python3.5/asyncio ${libdir}/python3.5/asyncio/__pycache__ " SUMMARY_${PN}-audio="Python Audio Handling" RDEPENDS_${PN}-audio="${PN}-core" -FILES_${PN}-audio="${libdir}/python3.5/wave.* ${libdir}/python3.5/chunk.* ${libdir}/python3.5/sndhdr.* ${libdir}/python3.5/lib-dynload/ossaudiodev.*.so ${libdir}/python3.5/lib-dynload/audioop.*.so ${libdir}/python3.5/audiodev.* ${libdir}/python3.5/sunaudio.* ${libdir}/python3.5/sunau.* ${libdir}/python3.5/toaiff.* " +FILES_${PN}-audio="${libdir}/python3.5/wave.* ${libdir}/python3.5/__pycache__/wave.* ${libdir}/python3.5/chunk.* ${libdir}/python3.5/__pycache__/chunk.* ${libdir}/python3.5/sndhdr.* ${libdir}/python3.5/__pycache__/sndhdr.* ${libdir}/python3.5/lib-dynload/ossaudiodev.*.so ${libdir}/python3.5/lib-dynload/__pycache__/ossaudiodev.*.so ${libdir}/python3.5/lib-dynload/audioop.*.so ${libdir}/python3.5/lib-dynload/__pycache__/audioop.*.so ${libdir}/python3.5/audiodev.* ${libdir}/python3.5/__pycache__/audiodev.* ${libdir}/python3.5/sunaudio.* ${libdir}/python3.5/__pycache__/sunaudio.* ${libdir}/python3.5/sunau.* ${libdir}/python3.5/__pycache__/sunau.* ${libdir}/python3.5/toaiff.* ${libdir}/python3.5/__pycache__/toaiff.* " SUMMARY_${PN}-codecs="Python codecs, encodings & i18n support" RDEPENDS_${PN}-codecs="${PN}-core ${PN}-lang" -FILES_${PN}-codecs="${libdir}/python3.5/codecs.* ${libdir}/python3.5/encodings ${libdir}/python3.5/gettext.* ${libdir}/python3.5/locale.* ${libdir}/python3.5/lib-dynload/_locale.*.so ${libdir}/python3.5/lib-dynload/_codecs* ${libdir}/python3.5/lib-dynload/_multibytecodec.*.so ${libdir}/python3.5/lib-dynload/unicodedata.*.so ${libdir}/python3.5/stringprep.* ${libdir}/python3.5/xdrlib.* " +FILES_${PN}-codecs="${libdir}/python3.5/codecs.* ${libdir}/python3.5/__pycache__/codecs.* ${libdir}/python3.5/encodings ${libdir}/python3.5/encodings/__pycache__ ${libdir}/python3.5/gettext.* ${libdir}/python3.5/__pycache__/gettext.* ${libdir}/python3.5/locale.* ${libdir}/python3.5/__pycache__/locale.* ${libdir}/python3.5/lib-dynload/_locale.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_locale.*.so ${libdir}/python3.5/lib-dynload/_codecs* ${libdir}/python3.5/lib-dynload/_codecs*/__pycache__ ${libdir}/python3.5/lib-dynload/_multibytecodec.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_multibytecodec.*.so ${libdir}/python3.5/lib-dynload/unicodedata.*.so ${libdir}/python3.5/lib-dynload/__pycache__/unicodedata.*.so ${libdir}/python3.5/stringprep.* ${libdir}/python3.5/__pycache__/stringprep.* ${libdir}/python3.5/xdrlib.* ${libdir}/python3.5/__pycache__/xdrlib.* " SUMMARY_${PN}-compile="Python bytecode compilation support" RDEPENDS_${PN}-compile="${PN}-core" -FILES_${PN}-compile="${libdir}/python3.5/py_compile.* ${libdir}/python3.5/compileall.* " +FILES_${PN}-compile="${libdir}/python3.5/py_compile.* ${libdir}/python3.5/__pycache__/py_compile.* ${libdir}/python3.5/compileall.* ${libdir}/python3.5/__pycache__/compileall.* " SUMMARY_${PN}-compression="Python high-level compression support" RDEPENDS_${PN}-compression="${PN}-core ${PN}-codecs ${PN}-importlib ${PN}-threading ${PN}-shell" -FILES_${PN}-compression="${libdir}/python3.5/gzip.* ${libdir}/python3.5/zipfile.* ${libdir}/python3.5/tarfile.* ${libdir}/python3.5/lib-dynload/bz2.*.so ${libdir}/python3.5/lib-dynload/zlib.*.so " +FILES_${PN}-compression="${libdir}/python3.5/gzip.* ${libdir}/python3.5/__pycache__/gzip.* ${libdir}/python3.5/zipfile.* ${libdir}/python3.5/__pycache__/zipfile.* ${libdir}/python3.5/tarfile.* ${libdir}/python3.5/__pycache__/tarfile.* ${libdir}/python3.5/lib-dynload/bz2.*.so ${libdir}/python3.5/lib-dynload/__pycache__/bz2.*.so ${libdir}/python3.5/lib-dynload/zlib.*.so ${libdir}/python3.5/lib-dynload/__pycache__/zlib.*.so " SUMMARY_${PN}-core="Python interpreter and core modules" RDEPENDS_${PN}-core="${PN}-lang ${PN}-re ${PN}-reprlib ${PN}-codecs ${PN}-io ${PN}-math" -FILES_${PN}-core="${libdir}/python3.5/__future__.* ${libdir}/python3.5/_abcoll.* ${libdir}/python3.5/abc.* ${libdir}/python3.5/ast.* ${libdir}/python3.5/copy.* ${libdir}/python3.5/copyreg.* ${libdir}/python3.5/configparser.* ${libdir}/python3.5/genericpath.* ${libdir}/python3.5/getopt.* ${libdir}/python3.5/linecache.* ${libdir}/python3.5/new.* ${libdir}/python3.5/os.* ${libdir}/python3.5/posixpath.* ${libdir}/python3.5/struct.* ${libdir}/python3.5/warnings.* ${libdir}/python3.5/site.* ${libdir}/python3.5/stat.* ${libdir}/python3.5/UserDict.* ${libdir}/python3.5/UserList.* ${libdir}/python3.5/UserString.* ${libdir}/python3.5/lib-dynload/binascii.*.so ${libdir}/python3.5/lib-dynload/_struct.*.so ${libdir}/python3.5/lib-dynload/time.*.so ${libdir}/python3.5/lib-dynload/xreadlines.*.so ${libdir}/python3.5/types.* ${libdir}/python3.5/platform.* ${bindir}/python* ${libdir}/python3.5/_weakrefset.* ${libdir}/python3.5/sysconfig.* ${libdir}/python3.5/_sysconfigdata.* ${libdir}/python3.5/config/Makefile ${includedir}/python${PYTHON_BINABI}/pyconfig*.h ${libdir}/python${PYTHON_MAJMIN}/collections ${libdir}/python${PYTHON_MAJMIN}/_collections_abc.* ${libdir}/python${PYTHON_MAJMIN}/_sitebuiltins.* ${libdir}/python${PYTHON_MAJMIN}/sitecustomize.py " +FILES_${PN}-core="${libdir}/python3.5/__future__.* ${libdir}/python3.5/__pycache__/__future__.* ${libdir}/python3.5/_abcoll.* ${libdir}/python3.5/__pycache__/_abcoll.* ${libdir}/python3.5/abc.* ${libdir}/python3.5/__pycache__/abc.* ${libdir}/python3.5/ast.* ${libdir}/python3.5/__pycache__/ast.* ${libdir}/python3.5/copy.* ${libdir}/python3.5/__pycache__/copy.* ${libdir}/python3.5/copyreg.* ${libdir}/python3.5/__pycache__/copyreg.* ${libdir}/python3.5/configparser.* ${libdir}/python3.5/__pycache__/configparser.* ${libdir}/python3.5/genericpath.* ${libdir}/python3.5/__pycache__/genericpath.* ${libdir}/python3.5/getopt.* ${libdir}/python3.5/__pycache__/getopt.* ${libdir}/python3.5/linecache.* ${libdir}/python3.5/__pycache__/linecache.* ${libdir}/python3.5/new.* ${libdir}/python3.5/__pycache__/new.* ${libdir}/python3.5/os.* ${libdir}/python3.5/__pycache__/os.* ${libdir}/python3.5/posixpath.* ${libdir}/python3.5/__pycache__/posixpath.* ${libdir}/python3.5/struct.* ${libdir}/python3.5/__pycache__/struct.* ${libdir}/python3.5/warnings.* ${libdir}/python3.5/__pycache__/warnings.* ${libdir}/python3.5/site.* ${libdir}/python3.5/__pycache__/site.* ${libdir}/python3.5/stat.* ${libdir}/python3.5/__pycache__/stat.* ${libdir}/python3.5/UserDict.* ${libdir}/python3.5/__pycache__/UserDict.* ${libdir}/python3.5/UserList.* ${libdir}/python3.5/__pycache__/UserList.* ${libdir}/python3.5/UserString.* ${libdir}/python3.5/__pycache__/UserString.* ${libdir}/python3.5/lib-dynload/binascii.*.so ${libdir}/python3.5/lib-dynload/__pycache__/binascii.*.so ${libdir}/python3.5/lib-dynload/_struct.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_struct.*.so ${libdir}/python3.5/lib-dynload/time.*.so ${libdir}/python3.5/lib-dynload/__pycache__/time.*.so ${libdir}/python3.5/lib-dynload/xreadlines.*.so ${libdir}/python3.5/lib-dynload/__pycache__/xreadlines.*.so ${libdir}/python3.5/types.* ${libdir}/python3.5/__pycache__/types.* ${libdir}/python3.5/platform.* ${libdir}/python3.5/__pycache__/platform.* ${bindir}/python* ${libdir}/python3.5/_weakrefset.* ${libdir}/python3.5/__pycache__/_weakrefset.* ${libdir}/python3.5/sysconfig.* ${libdir}/python3.5/__pycache__/sysconfig.* ${libdir}/python3.5/_sysconfigdata.* ${libdir}/python3.5/__pycache__/_sysconfigdata.* ${includedir}/python${PYTHON_BINABI}/pyconfig*.h ${libdir}/python${PYTHON_MAJMIN}/collections ${libdir}/python${PYTHON_MAJMIN}/_collections_abc.* ${libdir}/python${PYTHON_MAJMIN}/_sitebuiltins.* ${libdir}/python${PYTHON_MAJMIN}/sitecustomize.py " SUMMARY_${PN}-crypt="Python basic cryptographic and hashing support" RDEPENDS_${PN}-crypt="${PN}-core" -FILES_${PN}-crypt="${libdir}/python3.5/hashlib.* ${libdir}/python3.5/md5.* ${libdir}/python3.5/sha.* ${libdir}/python3.5/lib-dynload/crypt.*.so ${libdir}/python3.5/lib-dynload/_hashlib.*.so ${libdir}/python3.5/lib-dynload/_sha256.*.so ${libdir}/python3.5/lib-dynload/_sha512.*.so " +FILES_${PN}-crypt="${libdir}/python3.5/hashlib.* ${libdir}/python3.5/__pycache__/hashlib.* ${libdir}/python3.5/md5.* ${libdir}/python3.5/__pycache__/md5.* ${libdir}/python3.5/sha.* ${libdir}/python3.5/__pycache__/sha.* ${libdir}/python3.5/lib-dynload/crypt.*.so ${libdir}/python3.5/lib-dynload/__pycache__/crypt.*.so ${libdir}/python3.5/lib-dynload/_hashlib.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_hashlib.*.so ${libdir}/python3.5/lib-dynload/_sha256.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_sha256.*.so ${libdir}/python3.5/lib-dynload/_sha512.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_sha512.*.so " SUMMARY_${PN}-ctypes="Python C types support" RDEPENDS_${PN}-ctypes="${PN}-core ${PN}-subprocess" -FILES_${PN}-ctypes="${libdir}/python3.5/ctypes ${libdir}/python3.5/lib-dynload/_ctypes.*.so ${libdir}/python3.5/lib-dynload/_ctypes_test.*.so " +FILES_${PN}-ctypes="${libdir}/python3.5/ctypes ${libdir}/python3.5/ctypes/__pycache__ ${libdir}/python3.5/lib-dynload/_ctypes.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_ctypes.*.so ${libdir}/python3.5/lib-dynload/_ctypes_test.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_ctypes_test.*.so " SUMMARY_${PN}-curses="Python curses support" RDEPENDS_${PN}-curses="${PN}-core" -FILES_${PN}-curses="${libdir}/python3.5/curses ${libdir}/python3.5/lib-dynload/_curses.*.so ${libdir}/python3.5/lib-dynload/_curses_panel.*.so " +FILES_${PN}-curses="${libdir}/python3.5/curses ${libdir}/python3.5/curses/__pycache__ ${libdir}/python3.5/lib-dynload/_curses.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_curses.*.so ${libdir}/python3.5/lib-dynload/_curses_panel.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_curses_panel.*.so " SUMMARY_${PN}-datetime="Python calendar and time support" RDEPENDS_${PN}-datetime="${PN}-core ${PN}-codecs" -FILES_${PN}-datetime="${libdir}/python3.5/_strptime.* ${libdir}/python3.5/calendar.* ${libdir}/python3.5/datetime.* ${libdir}/python3.5/lib-dynload/_datetime.*.so " +FILES_${PN}-datetime="${libdir}/python3.5/_strptime.* ${libdir}/python3.5/__pycache__/_strptime.* ${libdir}/python3.5/calendar.* ${libdir}/python3.5/__pycache__/calendar.* ${libdir}/python3.5/datetime.* ${libdir}/python3.5/__pycache__/datetime.* ${libdir}/python3.5/lib-dynload/_datetime.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_datetime.*.so " SUMMARY_${PN}-db="Python file-based database support" RDEPENDS_${PN}-db="${PN}-core" -FILES_${PN}-db="${libdir}/python3.5/anydbm.* ${libdir}/python3.5/dumbdbm.* ${libdir}/python3.5/whichdb.* ${libdir}/python3.5/dbm ${libdir}/python3.5/lib-dynload/_dbm.*.so " +FILES_${PN}-db="${libdir}/python3.5/anydbm.* ${libdir}/python3.5/__pycache__/anydbm.* ${libdir}/python3.5/dumbdbm.* ${libdir}/python3.5/__pycache__/dumbdbm.* ${libdir}/python3.5/whichdb.* ${libdir}/python3.5/__pycache__/whichdb.* ${libdir}/python3.5/dbm ${libdir}/python3.5/dbm/__pycache__ ${libdir}/python3.5/lib-dynload/_dbm.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_dbm.*.so " SUMMARY_${PN}-debugger="Python debugger" RDEPENDS_${PN}-debugger="${PN}-core ${PN}-io ${PN}-lang ${PN}-re ${PN}-stringold ${PN}-shell ${PN}-pprint ${PN}-importlib ${PN}-pkgutil" -FILES_${PN}-debugger="${libdir}/python3.5/bdb.* ${libdir}/python3.5/pdb.* " +FILES_${PN}-debugger="${libdir}/python3.5/bdb.* ${libdir}/python3.5/__pycache__/bdb.* ${libdir}/python3.5/pdb.* ${libdir}/python3.5/__pycache__/pdb.* " SUMMARY_${PN}-dev="Python development package" RDEPENDS_${PN}-dev="${PN}-core" -FILES_${PN}-dev="${includedir} ${libdir}/lib*${SOLIBSDEV} ${libdir}/*.la ${libdir}/*.a ${libdir}/*.o ${libdir}/pkgconfig ${base_libdir}/*.a ${base_libdir}/*.o ${datadir}/aclocal ${datadir}/pkgconfig " +FILES_${PN}-dev="${includedir} ${libdir}/lib*${SOLIBSDEV} ${libdir}/*.la ${libdir}/*.a ${libdir}/*.o ${libdir}/pkgconfig ${base_libdir}/*.a ${base_libdir}/*.o ${datadir}/aclocal ${datadir}/pkgconfig ${libdir}/python3.5/config/Makefile ${libdir}/python3.5/config/Makefile/__pycache__ " SUMMARY_${PN}-difflib="Python helpers for computing deltas between objects" RDEPENDS_${PN}-difflib="${PN}-lang ${PN}-re" -FILES_${PN}-difflib="${libdir}/python3.5/difflib.* " +FILES_${PN}-difflib="${libdir}/python3.5/difflib.* ${libdir}/python3.5/__pycache__/difflib.* " SUMMARY_${PN}-distutils="Python Distribution Utilities" RDEPENDS_${PN}-distutils="${PN}-core ${PN}-email" -FILES_${PN}-distutils="${libdir}/python3.5/config ${libdir}/python3.5/distutils " +FILES_${PN}-distutils="${libdir}/python3.5/config ${libdir}/python3.5/config/__pycache__ ${libdir}/python3.5/distutils ${libdir}/python3.5/distutils/__pycache__ " SUMMARY_${PN}-distutils-staticdev="Python distribution utilities (static libraries)" RDEPENDS_${PN}-distutils-staticdev="${PN}-distutils" -FILES_${PN}-distutils-staticdev="${libdir}/python3.5/config/lib*.a " +FILES_${PN}-distutils-staticdev="${libdir}/python3.5/config/lib*.a ${libdir}/python3.5/config/__pycache__/lib*.a " SUMMARY_${PN}-doctest="Python framework for running examples in docstrings" RDEPENDS_${PN}-doctest="${PN}-core ${PN}-lang ${PN}-io ${PN}-re ${PN}-unittest ${PN}-debugger ${PN}-difflib" -FILES_${PN}-doctest="${libdir}/python3.5/doctest.* " +FILES_${PN}-doctest="${libdir}/python3.5/doctest.* ${libdir}/python3.5/__pycache__/doctest.* " SUMMARY_${PN}-email="Python email support" RDEPENDS_${PN}-email="${PN}-core ${PN}-io ${PN}-re ${PN}-mime ${PN}-audio ${PN}-image ${PN}-netclient" -FILES_${PN}-email="${libdir}/python3.5/imaplib.* ${libdir}/python3.5/email " +FILES_${PN}-email="${libdir}/python3.5/imaplib.* ${libdir}/python3.5/__pycache__/imaplib.* ${libdir}/python3.5/email ${libdir}/python3.5/email/__pycache__ " SUMMARY_${PN}-enum="Python support for enumerations" RDEPENDS_${PN}-enum="${PN}-core" -FILES_${PN}-enum="${libdir}/python3.5/enum.* " +FILES_${PN}-enum="${libdir}/python3.5/enum.* ${libdir}/python3.5/__pycache__/enum.* " SUMMARY_${PN}-fcntl="Python's fcntl interface" RDEPENDS_${PN}-fcntl="${PN}-core" -FILES_${PN}-fcntl="${libdir}/python3.5/lib-dynload/fcntl.*.so " +FILES_${PN}-fcntl="${libdir}/python3.5/lib-dynload/fcntl.*.so ${libdir}/python3.5/lib-dynload/__pycache__/fcntl.*.so " SUMMARY_${PN}-gdbm="Python GNU database support" RDEPENDS_${PN}-gdbm="${PN}-core" -FILES_${PN}-gdbm="${libdir}/python3.5/lib-dynload/_gdbm.*.so " +FILES_${PN}-gdbm="${libdir}/python3.5/lib-dynload/_gdbm.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_gdbm.*.so " SUMMARY_${PN}-html="Python HTML processing support" RDEPENDS_${PN}-html="${PN}-core" -FILES_${PN}-html="${libdir}/python3.5/formatter.* ${libdir}/python3.5/htmlentitydefs.* ${libdir}/python3.5/htmllib.* ${libdir}/python3.5/markupbase.* ${libdir}/python3.5/sgmllib.* ${libdir}/python3.5/HTMLParser.* " +FILES_${PN}-html="${libdir}/python3.5/formatter.* ${libdir}/python3.5/__pycache__/formatter.* ${libdir}/python3.5/htmlentitydefs.* ${libdir}/python3.5/__pycache__/htmlentitydefs.* ${libdir}/python3.5/html ${libdir}/python3.5/html/__pycache__ ${libdir}/python3.5/htmllib.* ${libdir}/python3.5/__pycache__/htmllib.* ${libdir}/python3.5/markupbase.* ${libdir}/python3.5/__pycache__/markupbase.* ${libdir}/python3.5/sgmllib.* ${libdir}/python3.5/__pycache__/sgmllib.* ${libdir}/python3.5/HTMLParser.* ${libdir}/python3.5/__pycache__/HTMLParser.* " SUMMARY_${PN}-idle="Python Integrated Development Environment" RDEPENDS_${PN}-idle="${PN}-core ${PN}-tkinter" -FILES_${PN}-idle="${bindir}/idle ${libdir}/python3.5/idlelib " +FILES_${PN}-idle="${bindir}/idle ${libdir}/python3.5/idlelib ${libdir}/python3.5/idlelib/__pycache__ " SUMMARY_${PN}-image="Python graphical image handling" RDEPENDS_${PN}-image="${PN}-core" -FILES_${PN}-image="${libdir}/python3.5/colorsys.* ${libdir}/python3.5/imghdr.* ${libdir}/python3.5/lib-dynload/imageop.*.so ${libdir}/python3.5/lib-dynload/rgbimg.*.so " +FILES_${PN}-image="${libdir}/python3.5/colorsys.* ${libdir}/python3.5/__pycache__/colorsys.* ${libdir}/python3.5/imghdr.* ${libdir}/python3.5/__pycache__/imghdr.* ${libdir}/python3.5/lib-dynload/imageop.*.so ${libdir}/python3.5/lib-dynload/__pycache__/imageop.*.so ${libdir}/python3.5/lib-dynload/rgbimg.*.so ${libdir}/python3.5/lib-dynload/__pycache__/rgbimg.*.so " SUMMARY_${PN}-importlib="Python import implementation library" RDEPENDS_${PN}-importlib="${PN}-core ${PN}-lang" -FILES_${PN}-importlib="${libdir}/python3.5/importlib ${libdir}/python3.5/imp.* " +FILES_${PN}-importlib="${libdir}/python3.5/importlib ${libdir}/python3.5/importlib/__pycache__ ${libdir}/python3.5/imp.* ${libdir}/python3.5/__pycache__/imp.* " SUMMARY_${PN}-io="Python low-level I/O" RDEPENDS_${PN}-io="${PN}-core ${PN}-math" -FILES_${PN}-io="${libdir}/python3.5/lib-dynload/_socket.*.so ${libdir}/python3.5/lib-dynload/_io.*.so ${libdir}/python3.5/lib-dynload/_ssl.*.so ${libdir}/python3.5/lib-dynload/select.*.so ${libdir}/python3.5/lib-dynload/termios.*.so ${libdir}/python3.5/lib-dynload/cStringIO.*.so ${libdir}/python3.5/pipes.* ${libdir}/python3.5/socket.* ${libdir}/python3.5/ssl.* ${libdir}/python3.5/tempfile.* ${libdir}/python3.5/StringIO.* ${libdir}/python3.5/io.* ${libdir}/python3.5/_pyio.* " +FILES_${PN}-io="${libdir}/python3.5/lib-dynload/_socket.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_socket.*.so ${libdir}/python3.5/lib-dynload/_io.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_io.*.so ${libdir}/python3.5/lib-dynload/_ssl.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_ssl.*.so ${libdir}/python3.5/lib-dynload/select.*.so ${libdir}/python3.5/lib-dynload/__pycache__/select.*.so ${libdir}/python3.5/lib-dynload/termios.*.so ${libdir}/python3.5/lib-dynload/__pycache__/termios.*.so ${libdir}/python3.5/lib-dynload/cStringIO.*.so ${libdir}/python3.5/lib-dynload/__pycache__/cStringIO.*.so ${libdir}/python3.5/ipaddress.* ${libdir}/python3.5/__pycache__/ipaddress.* ${libdir}/python3.5/pipes.* ${libdir}/python3.5/__pycache__/pipes.* ${libdir}/python3.5/socket.* ${libdir}/python3.5/__pycache__/socket.* ${libdir}/python3.5/ssl.* ${libdir}/python3.5/__pycache__/ssl.* ${libdir}/python3.5/tempfile.* ${libdir}/python3.5/__pycache__/tempfile.* ${libdir}/python3.5/StringIO.* ${libdir}/python3.5/__pycache__/StringIO.* ${libdir}/python3.5/io.* ${libdir}/python3.5/__pycache__/io.* ${libdir}/python3.5/_pyio.* ${libdir}/python3.5/__pycache__/_pyio.* " SUMMARY_${PN}-json="Python JSON support" RDEPENDS_${PN}-json="${PN}-core ${PN}-math ${PN}-re" -FILES_${PN}-json="${libdir}/python3.5/json ${libdir}/python3.5/lib-dynload/_json.*.so " +FILES_${PN}-json="${libdir}/python3.5/json ${libdir}/python3.5/json/__pycache__ ${libdir}/python3.5/lib-dynload/_json.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_json.*.so " SUMMARY_${PN}-lang="Python low-level language support" RDEPENDS_${PN}-lang="${PN}-core ${PN}-importlib" -FILES_${PN}-lang="${libdir}/python3.5/lib-dynload/_bisect.*.so ${libdir}/python3.5/lib-dynload/_collections.*.so ${libdir}/python3.5/lib-dynload/_heapq.*.so ${libdir}/python3.5/lib-dynload/_weakref.*.so ${libdir}/python3.5/lib-dynload/_functools.*.so ${libdir}/python3.5/lib-dynload/array.*.so ${libdir}/python3.5/lib-dynload/itertools.*.so ${libdir}/python3.5/lib-dynload/operator.*.so ${libdir}/python3.5/lib-dynload/parser.*.so ${libdir}/python3.5/atexit.* ${libdir}/python3.5/bisect.* ${libdir}/python3.5/code.* ${libdir}/python3.5/codeop.* ${libdir}/python3.5/collections.* ${libdir}/python3.5/_collections_abc.* ${libdir}/python3.5/contextlib.* ${libdir}/python3.5/dis.* ${libdir}/python3.5/functools.* ${libdir}/python3.5/heapq.* ${libdir}/python3.5/inspect.* ${libdir}/python3.5/keyword.* ${libdir}/python3.5/opcode.* ${libdir}/python3.5/operator.* ${libdir}/python3.5/symbol.* ${libdir}/python3.5/repr.* ${libdir}/python3.5/token.* ${libdir}/python3.5/tokenize.* ${libdir}/python3.5/traceback.* ${libdir}/python3.5/weakref.* " +FILES_${PN}-lang="${libdir}/python3.5/lib-dynload/_bisect.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_bisect.*.so ${libdir}/python3.5/lib-dynload/_collections.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_collections.*.so ${libdir}/python3.5/lib-dynload/_heapq.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_heapq.*.so ${libdir}/python3.5/lib-dynload/_weakref.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_weakref.*.so ${libdir}/python3.5/lib-dynload/_functools.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_functools.*.so ${libdir}/python3.5/lib-dynload/array.*.so ${libdir}/python3.5/lib-dynload/__pycache__/array.*.so ${libdir}/python3.5/lib-dynload/itertools.*.so ${libdir}/python3.5/lib-dynload/__pycache__/itertools.*.so ${libdir}/python3.5/lib-dynload/operator.*.so ${libdir}/python3.5/lib-dynload/__pycache__/operator.*.so ${libdir}/python3.5/lib-dynload/parser.*.so ${libdir}/python3.5/lib-dynload/__pycache__/parser.*.so ${libdir}/python3.5/atexit.* ${libdir}/python3.5/__pycache__/atexit.* ${libdir}/python3.5/bisect.* ${libdir}/python3.5/__pycache__/bisect.* ${libdir}/python3.5/code.* ${libdir}/python3.5/__pycache__/code.* ${libdir}/python3.5/codeop.* ${libdir}/python3.5/__pycache__/codeop.* ${libdir}/python3.5/collections.* ${libdir}/python3.5/__pycache__/collections.* ${libdir}/python3.5/_collections_abc.* ${libdir}/python3.5/__pycache__/_collections_abc.* ${libdir}/python3.5/contextlib.* ${libdir}/python3.5/__pycache__/contextlib.* ${libdir}/python3.5/dis.* ${libdir}/python3.5/__pycache__/dis.* ${libdir}/python3.5/functools.* ${libdir}/python3.5/__pycache__/functools.* ${libdir}/python3.5/heapq.* ${libdir}/python3.5/__pycache__/heapq.* ${libdir}/python3.5/inspect.* ${libdir}/python3.5/__pycache__/inspect.* ${libdir}/python3.5/keyword.* ${libdir}/python3.5/__pycache__/keyword.* ${libdir}/python3.5/opcode.* ${libdir}/python3.5/__pycache__/opcode.* ${libdir}/python3.5/operator.* ${libdir}/python3.5/__pycache__/operator.* ${libdir}/python3.5/symbol.* ${libdir}/python3.5/__pycache__/symbol.* ${libdir}/python3.5/repr.* ${libdir}/python3.5/__pycache__/repr.* ${libdir}/python3.5/token.* ${libdir}/python3.5/__pycache__/token.* ${libdir}/python3.5/tokenize.* ${libdir}/python3.5/__pycache__/tokenize.* ${libdir}/python3.5/traceback.* ${libdir}/python3.5/__pycache__/traceback.* ${libdir}/python3.5/weakref.* ${libdir}/python3.5/__pycache__/weakref.* " SUMMARY_${PN}-logging="Python logging support" RDEPENDS_${PN}-logging="${PN}-core ${PN}-io ${PN}-lang ${PN}-pickle ${PN}-stringold" -FILES_${PN}-logging="${libdir}/python3.5/logging " +FILES_${PN}-logging="${libdir}/python3.5/logging ${libdir}/python3.5/logging/__pycache__ " SUMMARY_${PN}-mailbox="Python mailbox format support" RDEPENDS_${PN}-mailbox="${PN}-core ${PN}-mime" -FILES_${PN}-mailbox="${libdir}/python3.5/mailbox.* " +FILES_${PN}-mailbox="${libdir}/python3.5/mailbox.* ${libdir}/python3.5/__pycache__/mailbox.* " SUMMARY_${PN}-math="Python math support" RDEPENDS_${PN}-math="${PN}-core ${PN}-crypt" -FILES_${PN}-math="${libdir}/python3.5/lib-dynload/cmath.*.so ${libdir}/python3.5/lib-dynload/math.*.so ${libdir}/python3.5/lib-dynload/_random.*.so ${libdir}/python3.5/random.* ${libdir}/python3.5/sets.* " +FILES_${PN}-math="${libdir}/python3.5/lib-dynload/cmath.*.so ${libdir}/python3.5/lib-dynload/__pycache__/cmath.*.so ${libdir}/python3.5/lib-dynload/math.*.so ${libdir}/python3.5/lib-dynload/__pycache__/math.*.so ${libdir}/python3.5/lib-dynload/_random.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_random.*.so ${libdir}/python3.5/random.* ${libdir}/python3.5/__pycache__/random.* ${libdir}/python3.5/sets.* ${libdir}/python3.5/__pycache__/sets.* " SUMMARY_${PN}-mime="Python MIME handling APIs" RDEPENDS_${PN}-mime="${PN}-core ${PN}-io" -FILES_${PN}-mime="${libdir}/python3.5/mimetools.* ${libdir}/python3.5/uu.* ${libdir}/python3.5/quopri.* ${libdir}/python3.5/rfc822.* ${libdir}/python3.5/MimeWriter.* " +FILES_${PN}-mime="${libdir}/python3.5/mimetools.* ${libdir}/python3.5/__pycache__/mimetools.* ${libdir}/python3.5/uu.* ${libdir}/python3.5/__pycache__/uu.* ${libdir}/python3.5/quopri.* ${libdir}/python3.5/__pycache__/quopri.* ${libdir}/python3.5/rfc822.* ${libdir}/python3.5/__pycache__/rfc822.* ${libdir}/python3.5/MimeWriter.* ${libdir}/python3.5/__pycache__/MimeWriter.* " SUMMARY_${PN}-mmap="Python memory-mapped file support" RDEPENDS_${PN}-mmap="${PN}-core ${PN}-io" -FILES_${PN}-mmap="${libdir}/python3.5/lib-dynload/mmap.*.so " +FILES_${PN}-mmap="${libdir}/python3.5/lib-dynload/mmap.*.so ${libdir}/python3.5/lib-dynload/__pycache__/mmap.*.so " SUMMARY_${PN}-multiprocessing="Python multiprocessing support" RDEPENDS_${PN}-multiprocessing="${PN}-core ${PN}-io ${PN}-lang ${PN}-pickle ${PN}-threading ${PN}-ctypes ${PN}-mmap" -FILES_${PN}-multiprocessing="${libdir}/python3.5/lib-dynload/_multiprocessing.*.so ${libdir}/python3.5/multiprocessing " +FILES_${PN}-multiprocessing="${libdir}/python3.5/lib-dynload/_multiprocessing.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_multiprocessing.*.so ${libdir}/python3.5/multiprocessing ${libdir}/python3.5/multiprocessing/__pycache__ " SUMMARY_${PN}-netclient="Python Internet Protocol clients" -RDEPENDS_${PN}-netclient="${PN}-core ${PN}-crypt ${PN}-datetime ${PN}-io ${PN}-lang ${PN}-logging ${PN}-mime" -FILES_${PN}-netclient="${libdir}/python3.5/*Cookie*.* ${libdir}/python3.5/base64.* ${libdir}/python3.5/cookielib.* ${libdir}/python3.5/ftplib.* ${libdir}/python3.5/gopherlib.* ${libdir}/python3.5/hmac.* ${libdir}/python3.5/httplib.* ${libdir}/python3.5/mimetypes.* ${libdir}/python3.5/nntplib.* ${libdir}/python3.5/poplib.* ${libdir}/python3.5/smtplib.* ${libdir}/python3.5/telnetlib.* ${libdir}/python3.5/urllib ${libdir}/python3.5/uuid.* ${libdir}/python3.5/rfc822.* ${libdir}/python3.5/mimetools.* " +RDEPENDS_${PN}-netclient="${PN}-argparse ${PN}-core ${PN}-crypt ${PN}-datetime ${PN}-io ${PN}-lang ${PN}-logging ${PN}-mime ${PN}-html" +FILES_${PN}-netclient="${libdir}/python3.5/*Cookie*.* ${libdir}/python3.5/__pycache__/*Cookie*.* ${libdir}/python3.5/base64.* ${libdir}/python3.5/__pycache__/base64.* ${libdir}/python3.5/cookielib.* ${libdir}/python3.5/__pycache__/cookielib.* ${libdir}/python3.5/ftplib.* ${libdir}/python3.5/__pycache__/ftplib.* ${libdir}/python3.5/gopherlib.* ${libdir}/python3.5/__pycache__/gopherlib.* ${libdir}/python3.5/hmac.* ${libdir}/python3.5/__pycache__/hmac.* ${libdir}/python3.5/http* ${libdir}/python3.5/http*/__pycache__ ${libdir}/python3.5/httplib.* ${libdir}/python3.5/__pycache__/httplib.* ${libdir}/python3.5/mimetypes.* ${libdir}/python3.5/__pycache__/mimetypes.* ${libdir}/python3.5/nntplib.* ${libdir}/python3.5/__pycache__/nntplib.* ${libdir}/python3.5/poplib.* ${libdir}/python3.5/__pycache__/poplib.* ${libdir}/python3.5/smtplib.* ${libdir}/python3.5/__pycache__/smtplib.* ${libdir}/python3.5/telnetlib.* ${libdir}/python3.5/__pycache__/telnetlib.* ${libdir}/python3.5/urllib ${libdir}/python3.5/urllib/__pycache__ ${libdir}/python3.5/uuid.* ${libdir}/python3.5/__pycache__/uuid.* ${libdir}/python3.5/rfc822.* ${libdir}/python3.5/__pycache__/rfc822.* ${libdir}/python3.5/mimetools.* ${libdir}/python3.5/__pycache__/mimetools.* " SUMMARY_${PN}-netserver="Python Internet Protocol servers" RDEPENDS_${PN}-netserver="${PN}-core ${PN}-netclient ${PN}-shell ${PN}-threading" -FILES_${PN}-netserver="${libdir}/python3.5/cgi.* ${libdir}/python3.5/*HTTPServer.* ${libdir}/python3.5/SocketServer.* " +FILES_${PN}-netserver="${libdir}/python3.5/cgi.* ${libdir}/python3.5/__pycache__/cgi.* ${libdir}/python3.5/socketserver.* ${libdir}/python3.5/__pycache__/socketserver.* ${libdir}/python3.5/*HTTPServer.* ${libdir}/python3.5/__pycache__/*HTTPServer.* ${libdir}/python3.5/SocketServer.* ${libdir}/python3.5/__pycache__/SocketServer.* " SUMMARY_${PN}-numbers="Python number APIs" RDEPENDS_${PN}-numbers="${PN}-core ${PN}-lang ${PN}-re" -FILES_${PN}-numbers="${libdir}/python3.5/decimal.* ${libdir}/python3.5/fractions.* ${libdir}/python3.5/numbers.* " +FILES_${PN}-numbers="${libdir}/python3.5/decimal.* ${libdir}/python3.5/__pycache__/decimal.* ${libdir}/python3.5/fractions.* ${libdir}/python3.5/__pycache__/fractions.* ${libdir}/python3.5/numbers.* ${libdir}/python3.5/__pycache__/numbers.* " SUMMARY_${PN}-pickle="Python serialisation/persistence support" RDEPENDS_${PN}-pickle="${PN}-core ${PN}-codecs ${PN}-io ${PN}-re" -FILES_${PN}-pickle="${libdir}/python3.5/pickle.* ${libdir}/python3.5/shelve.* ${libdir}/python3.5/lib-dynload/cPickle.*.so ${libdir}/python3.5/pickletools.* " +FILES_${PN}-pickle="${libdir}/python3.5/_compat_pickle.* ${libdir}/python3.5/__pycache__/_compat_pickle.* ${libdir}/python3.5/pickle.* ${libdir}/python3.5/__pycache__/pickle.* ${libdir}/python3.5/shelve.* ${libdir}/python3.5/__pycache__/shelve.* ${libdir}/python3.5/lib-dynload/cPickle.*.so ${libdir}/python3.5/lib-dynload/__pycache__/cPickle.*.so ${libdir}/python3.5/pickletools.* ${libdir}/python3.5/__pycache__/pickletools.* " SUMMARY_${PN}-pkgutil="Python package extension utility support" RDEPENDS_${PN}-pkgutil="${PN}-core" -FILES_${PN}-pkgutil="${libdir}/python3.5/pkgutil.* " +FILES_${PN}-pkgutil="${libdir}/python3.5/pkgutil.* ${libdir}/python3.5/__pycache__/pkgutil.* " SUMMARY_${PN}-pprint="Python pretty-print support" RDEPENDS_${PN}-pprint="${PN}-core ${PN}-io" -FILES_${PN}-pprint="${libdir}/python3.5/pprint.* " +FILES_${PN}-pprint="${libdir}/python3.5/pprint.* ${libdir}/python3.5/__pycache__/pprint.* " SUMMARY_${PN}-profile="Python basic performance profiling support" RDEPENDS_${PN}-profile="${PN}-core ${PN}-textutils" -FILES_${PN}-profile="${libdir}/python3.5/profile.* ${libdir}/python3.5/pstats.* ${libdir}/python3.5/cProfile.* ${libdir}/python3.5/lib-dynload/_lsprof.*.so " +FILES_${PN}-profile="${libdir}/python3.5/profile.* ${libdir}/python3.5/__pycache__/profile.* ${libdir}/python3.5/pstats.* ${libdir}/python3.5/__pycache__/pstats.* ${libdir}/python3.5/cProfile.* ${libdir}/python3.5/__pycache__/cProfile.* ${libdir}/python3.5/lib-dynload/_lsprof.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_lsprof.*.so " SUMMARY_${PN}-pydoc="Python interactive help support" RDEPENDS_${PN}-pydoc="${PN}-core ${PN}-lang ${PN}-stringold ${PN}-re" -FILES_${PN}-pydoc="${bindir}/pydoc ${libdir}/python3.5/pydoc.* ${libdir}/python3.5/pydoc_data " +FILES_${PN}-pydoc="${bindir}/pydoc ${libdir}/python3.5/pydoc.* ${libdir}/python3.5/__pycache__/pydoc.* ${libdir}/python3.5/pydoc_data ${libdir}/python3.5/pydoc_data/__pycache__ " SUMMARY_${PN}-re="Python Regular Expression APIs" RDEPENDS_${PN}-re="${PN}-core" -FILES_${PN}-re="${libdir}/python3.5/re.* ${libdir}/python3.5/sre.* ${libdir}/python3.5/sre_compile.* ${libdir}/python3.5/sre_constants* ${libdir}/python3.5/sre_parse.* " +FILES_${PN}-re="${libdir}/python3.5/re.* ${libdir}/python3.5/__pycache__/re.* ${libdir}/python3.5/sre.* ${libdir}/python3.5/__pycache__/sre.* ${libdir}/python3.5/sre_compile.* ${libdir}/python3.5/__pycache__/sre_compile.* ${libdir}/python3.5/sre_constants* ${libdir}/python3.5/sre_constants*/__pycache__ ${libdir}/python3.5/sre_parse.* ${libdir}/python3.5/__pycache__/sre_parse.* " SUMMARY_${PN}-readline="Python readline support" RDEPENDS_${PN}-readline="${PN}-core" -FILES_${PN}-readline="${libdir}/python3.5/lib-dynload/readline.*.so ${libdir}/python3.5/rlcompleter.* " +FILES_${PN}-readline="${libdir}/python3.5/lib-dynload/readline.*.so ${libdir}/python3.5/lib-dynload/__pycache__/readline.*.so ${libdir}/python3.5/rlcompleter.* ${libdir}/python3.5/__pycache__/rlcompleter.* " SUMMARY_${PN}-reprlib="Python alternate repr() implementation" RDEPENDS_${PN}-reprlib="${PN}-core" -FILES_${PN}-reprlib="${libdir}/python3.5/reprlib.py " +FILES_${PN}-reprlib="${libdir}/python3.5/reprlib.py ${libdir}/python3.5/__pycache__/reprlib.py " SUMMARY_${PN}-resource="Python resource control interface" RDEPENDS_${PN}-resource="${PN}-core" -FILES_${PN}-resource="${libdir}/python3.5/lib-dynload/resource.*.so " +FILES_${PN}-resource="${libdir}/python3.5/lib-dynload/resource.*.so ${libdir}/python3.5/lib-dynload/__pycache__/resource.*.so " SUMMARY_${PN}-selectors="Python High-level I/O multiplexing" RDEPENDS_${PN}-selectors="${PN}-core" -FILES_${PN}-selectors="${libdir}/python3.5/selectors.* " +FILES_${PN}-selectors="${libdir}/python3.5/selectors.* ${libdir}/python3.5/__pycache__/selectors.* " SUMMARY_${PN}-shell="Python shell-like functionality" RDEPENDS_${PN}-shell="${PN}-core ${PN}-re ${PN}-compression" -FILES_${PN}-shell="${libdir}/python3.5/cmd.* ${libdir}/python3.5/commands.* ${libdir}/python3.5/dircache.* ${libdir}/python3.5/fnmatch.* ${libdir}/python3.5/glob.* ${libdir}/python3.5/popen2.* ${libdir}/python3.5/shlex.* ${libdir}/python3.5/shutil.* " +FILES_${PN}-shell="${libdir}/python3.5/cmd.* ${libdir}/python3.5/__pycache__/cmd.* ${libdir}/python3.5/commands.* ${libdir}/python3.5/__pycache__/commands.* ${libdir}/python3.5/dircache.* ${libdir}/python3.5/__pycache__/dircache.* ${libdir}/python3.5/fnmatch.* ${libdir}/python3.5/__pycache__/fnmatch.* ${libdir}/python3.5/glob.* ${libdir}/python3.5/__pycache__/glob.* ${libdir}/python3.5/popen2.* ${libdir}/python3.5/__pycache__/popen2.* ${libdir}/python3.5/shlex.* ${libdir}/python3.5/__pycache__/shlex.* ${libdir}/python3.5/shutil.* ${libdir}/python3.5/__pycache__/shutil.* " SUMMARY_${PN}-signal="Python set handlers for asynchronous events support" RDEPENDS_${PN}-signal="${PN}-core ${PN}-enum" -FILES_${PN}-signal="${libdir}/python3.5/signal.* " +FILES_${PN}-signal="${libdir}/python3.5/signal.* ${libdir}/python3.5/__pycache__/signal.* " SUMMARY_${PN}-smtpd="Python Simple Mail Transport Daemon" RDEPENDS_${PN}-smtpd="${PN}-core ${PN}-netserver ${PN}-email ${PN}-mime" -FILES_${PN}-smtpd="${bindir}/smtpd.* ${libdir}/python3.5/smtpd.* " +FILES_${PN}-smtpd="${bindir}/smtpd.* ${libdir}/python3.5/smtpd.* ${libdir}/python3.5/__pycache__/smtpd.* " SUMMARY_${PN}-sqlite3="Python Sqlite3 database support" RDEPENDS_${PN}-sqlite3="${PN}-core ${PN}-datetime ${PN}-lang ${PN}-crypt ${PN}-io ${PN}-threading" -FILES_${PN}-sqlite3="${libdir}/python3.5/lib-dynload/_sqlite3.*.so ${libdir}/python3.5/sqlite3/dbapi2.* ${libdir}/python3.5/sqlite3/__init__.* ${libdir}/python3.5/sqlite3/dump.* " +FILES_${PN}-sqlite3="${libdir}/python3.5/lib-dynload/_sqlite3.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_sqlite3.*.so ${libdir}/python3.5/sqlite3/dbapi2.* ${libdir}/python3.5/sqlite3/__pycache__/dbapi2.* ${libdir}/python3.5/sqlite3/__init__.* ${libdir}/python3.5/sqlite3/__pycache__/__init__.* ${libdir}/python3.5/sqlite3/dump.* ${libdir}/python3.5/sqlite3/__pycache__/dump.* " SUMMARY_${PN}-sqlite3-tests="Python Sqlite3 database support tests" RDEPENDS_${PN}-sqlite3-tests="${PN}-core ${PN}-sqlite3" -FILES_${PN}-sqlite3-tests="${libdir}/python3.5/sqlite3/test " +FILES_${PN}-sqlite3-tests="${libdir}/python3.5/sqlite3/test ${libdir}/python3.5/sqlite3/test/__pycache__ " SUMMARY_${PN}-stringold="Python string APIs [deprecated]" RDEPENDS_${PN}-stringold="${PN}-core ${PN}-re" -FILES_${PN}-stringold="${libdir}/python3.5/lib-dynload/strop.*.so ${libdir}/python3.5/string.* ${libdir}/python3.5/stringold.* " +FILES_${PN}-stringold="${libdir}/python3.5/lib-dynload/strop.*.so ${libdir}/python3.5/lib-dynload/__pycache__/strop.*.so ${libdir}/python3.5/string.* ${libdir}/python3.5/__pycache__/string.* ${libdir}/python3.5/stringold.* ${libdir}/python3.5/__pycache__/stringold.* " SUMMARY_${PN}-subprocess="Python subprocess support" RDEPENDS_${PN}-subprocess="${PN}-core ${PN}-io ${PN}-re ${PN}-fcntl ${PN}-pickle ${PN}-threading ${PN}-signal ${PN}-selectors" -FILES_${PN}-subprocess="${libdir}/python3.5/subprocess.* ${libdir}/python3.5/lib-dynload/_posixsubprocess.*.so " +FILES_${PN}-subprocess="${libdir}/python3.5/subprocess.* ${libdir}/python3.5/__pycache__/subprocess.* ${libdir}/python3.5/lib-dynload/_posixsubprocess.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_posixsubprocess.*.so " SUMMARY_${PN}-syslog="Python syslog interface" RDEPENDS_${PN}-syslog="${PN}-core" -FILES_${PN}-syslog="${libdir}/python3.5/lib-dynload/syslog.*.so " +FILES_${PN}-syslog="${libdir}/python3.5/lib-dynload/syslog.*.so ${libdir}/python3.5/lib-dynload/__pycache__/syslog.*.so " SUMMARY_${PN}-terminal="Python terminal controlling support" RDEPENDS_${PN}-terminal="${PN}-core ${PN}-io" -FILES_${PN}-terminal="${libdir}/python3.5/pty.* ${libdir}/python3.5/tty.* " +FILES_${PN}-terminal="${libdir}/python3.5/pty.* ${libdir}/python3.5/__pycache__/pty.* ${libdir}/python3.5/tty.* ${libdir}/python3.5/__pycache__/tty.* " SUMMARY_${PN}-tests="Python tests" RDEPENDS_${PN}-tests="${PN}-core" -FILES_${PN}-tests="${libdir}/python3.5/test " +FILES_${PN}-tests="${libdir}/python3.5/test ${libdir}/python3.5/test/__pycache__ " SUMMARY_${PN}-textutils="Python option parsing, text wrapping and CSV support" RDEPENDS_${PN}-textutils="${PN}-core ${PN}-io ${PN}-re ${PN}-stringold" -FILES_${PN}-textutils="${libdir}/python3.5/lib-dynload/_csv.*.so ${libdir}/python3.5/csv.* ${libdir}/python3.5/optparse.* ${libdir}/python3.5/textwrap.* " +FILES_${PN}-textutils="${libdir}/python3.5/lib-dynload/_csv.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_csv.*.so ${libdir}/python3.5/csv.* ${libdir}/python3.5/__pycache__/csv.* ${libdir}/python3.5/optparse.* ${libdir}/python3.5/__pycache__/optparse.* ${libdir}/python3.5/textwrap.* ${libdir}/python3.5/__pycache__/textwrap.* " SUMMARY_${PN}-threading="Python threading & synchronization support" RDEPENDS_${PN}-threading="${PN}-core ${PN}-lang" -FILES_${PN}-threading="${libdir}/python3.5/_threading_local.* ${libdir}/python3.5/dummy_thread.* ${libdir}/python3.5/dummy_threading.* ${libdir}/python3.5/mutex.* ${libdir}/python3.5/threading.* ${libdir}/python3.5/queue.* " +FILES_${PN}-threading="${libdir}/python3.5/_threading_local.* ${libdir}/python3.5/__pycache__/_threading_local.* ${libdir}/python3.5/dummy_thread.* ${libdir}/python3.5/__pycache__/dummy_thread.* ${libdir}/python3.5/dummy_threading.* ${libdir}/python3.5/__pycache__/dummy_threading.* ${libdir}/python3.5/mutex.* ${libdir}/python3.5/__pycache__/mutex.* ${libdir}/python3.5/threading.* ${libdir}/python3.5/__pycache__/threading.* ${libdir}/python3.5/queue.* ${libdir}/python3.5/__pycache__/queue.* " SUMMARY_${PN}-tkinter="Python Tcl/Tk bindings" RDEPENDS_${PN}-tkinter="${PN}-core" -FILES_${PN}-tkinter="${libdir}/python3.5/lib-dynload/_tkinter.*.so ${libdir}/python3.5/lib-tk ${libdir}/python3.5/tkinter " +FILES_${PN}-tkinter="${libdir}/python3.5/lib-dynload/_tkinter.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_tkinter.*.so ${libdir}/python3.5/lib-tk ${libdir}/python3.5/lib-tk/__pycache__ ${libdir}/python3.5/tkinter ${libdir}/python3.5/tkinter/__pycache__ " + +SUMMARY_${PN}-typing="Python typing support" +RDEPENDS_${PN}-typing="${PN}-core" +FILES_${PN}-typing="${libdir}/python3.5/typing.* ${libdir}/python3.5/__pycache__/typing.* " SUMMARY_${PN}-unittest="Python unit testing framework" RDEPENDS_${PN}-unittest="${PN}-core ${PN}-stringold ${PN}-lang ${PN}-io ${PN}-difflib ${PN}-pprint ${PN}-shell" -FILES_${PN}-unittest="${libdir}/python3.5/unittest/ " +FILES_${PN}-unittest="${libdir}/python3.5/unittest/ ${libdir}/python3.5/unittest/__pycache__ " SUMMARY_${PN}-unixadmin="Python Unix administration support" RDEPENDS_${PN}-unixadmin="${PN}-core" -FILES_${PN}-unixadmin="${libdir}/python3.5/lib-dynload/nis.*.so ${libdir}/python3.5/lib-dynload/grp.*.so ${libdir}/python3.5/lib-dynload/pwd.*.so ${libdir}/python3.5/getpass.* " +FILES_${PN}-unixadmin="${libdir}/python3.5/lib-dynload/nis.*.so ${libdir}/python3.5/lib-dynload/__pycache__/nis.*.so ${libdir}/python3.5/lib-dynload/grp.*.so ${libdir}/python3.5/lib-dynload/__pycache__/grp.*.so ${libdir}/python3.5/lib-dynload/pwd.*.so ${libdir}/python3.5/lib-dynload/__pycache__/pwd.*.so ${libdir}/python3.5/getpass.* ${libdir}/python3.5/__pycache__/getpass.* " SUMMARY_${PN}-xml="Python basic XML support" RDEPENDS_${PN}-xml="${PN}-core ${PN}-re" -FILES_${PN}-xml="${libdir}/python3.5/lib-dynload/_elementtree.*.so ${libdir}/python3.5/lib-dynload/pyexpat.*.so ${libdir}/python3.5/xml ${libdir}/python3.5/xmllib.* " +FILES_${PN}-xml="${libdir}/python3.5/lib-dynload/_elementtree.*.so ${libdir}/python3.5/lib-dynload/__pycache__/_elementtree.*.so ${libdir}/python3.5/lib-dynload/pyexpat.*.so ${libdir}/python3.5/lib-dynload/__pycache__/pyexpat.*.so ${libdir}/python3.5/xml ${libdir}/python3.5/xml/__pycache__ ${libdir}/python3.5/xmllib.* ${libdir}/python3.5/__pycache__/xmllib.* " SUMMARY_${PN}-xmlrpc="Python XML-RPC support" -RDEPENDS_${PN}-xmlrpc="${PN}-core ${PN}-xml ${PN}-netserver ${PN}-lang" -FILES_${PN}-xmlrpc="${libdir}/python3.5/xmlrpclib.* ${libdir}/python3.5/SimpleXMLRPCServer.* ${libdir}/python3.5/DocXMLRPCServer.* ${libdir}/python3.5/xmlrpc " +RDEPENDS_${PN}-xmlrpc="${PN}-core ${PN}-xml ${PN}-netserver ${PN}-lang ${PN}-pydoc" +FILES_${PN}-xmlrpc="${libdir}/python3.5/xmlrpclib.* ${libdir}/python3.5/__pycache__/xmlrpclib.* ${libdir}/python3.5/SimpleXMLRPCServer.* ${libdir}/python3.5/__pycache__/SimpleXMLRPCServer.* ${libdir}/python3.5/DocXMLRPCServer.* ${libdir}/python3.5/__pycache__/DocXMLRPCServer.* ${libdir}/python3.5/xmlrpc ${libdir}/python3.5/xmlrpc/__pycache__ " SUMMARY_${PN}-modules="All Python modules" -RDEPENDS_${PN}-modules="${PN}-2to3 ${PN}-argparse ${PN}-asyncio ${PN}-audio ${PN}-codecs ${PN}-compile ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-difflib ${PN}-distutils ${PN}-doctest ${PN}-email ${PN}-enum ${PN}-fcntl ${PN}-gdbm ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-reprlib ${PN}-resource ${PN}-selectors ${PN}-shell ${PN}-signal ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc " +RDEPENDS_${PN}-modules="${PN}-2to3 ${PN}-argparse ${PN}-asyncio ${PN}-audio ${PN}-codecs ${PN}-compile ${PN}-compression ${PN}-core ${PN}-crypt ${PN}-ctypes ${PN}-curses ${PN}-datetime ${PN}-db ${PN}-debugger ${PN}-difflib ${PN}-distutils ${PN}-doctest ${PN}-email ${PN}-enum ${PN}-fcntl ${PN}-gdbm ${PN}-html ${PN}-idle ${PN}-image ${PN}-importlib ${PN}-io ${PN}-json ${PN}-lang ${PN}-logging ${PN}-mailbox ${PN}-math ${PN}-mime ${PN}-mmap ${PN}-multiprocessing ${PN}-netclient ${PN}-netserver ${PN}-numbers ${PN}-pickle ${PN}-pkgutil ${PN}-pprint ${PN}-profile ${PN}-pydoc ${PN}-re ${PN}-readline ${PN}-reprlib ${PN}-resource ${PN}-selectors ${PN}-shell ${PN}-signal ${PN}-smtpd ${PN}-sqlite3 ${PN}-sqlite3-tests ${PN}-stringold ${PN}-subprocess ${PN}-syslog ${PN}-terminal ${PN}-tests ${PN}-textutils ${PN}-threading ${PN}-tkinter ${PN}-typing ${PN}-unittest ${PN}-unixadmin ${PN}-xml ${PN}-xmlrpc " ALLOW_EMPTY_${PN}-modules = "1" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-git.inc b/import-layers/yocto-poky/meta/recipes-devtools/python/python-git.inc index 13c097abf..feddf275b 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-git.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-git.inc @@ -10,8 +10,8 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=8b8d26c37c1d5a04f9b0186edbebc183" SRC_URI = "https://files.pythonhosted.org/packages/source/G/GitPython/GitPython-${PV}.tar.gz" -SRC_URI[md5sum] = "aa0ba9df0abe4c8f35dd7bb9be85d56e" -SRC_URI[sha256sum] = "d8e7adaacceedd3d043e6cd2544f57dbe00c53fc26374880b7cea67f3188aa68" +SRC_URI[md5sum] = "77f8339e68dedb6d7c4e26371a588ed9" +SRC_URI[sha256sum] = "e96f8e953cf9fee0a7599fc587667591328760b6341a0081ef311a942fc96204" UPSTREAM_CHECK_URI = "https://pypi.python.org/pypi/GitPython/" UPSTREAM_CHECK_REGEX = "/GitPython/(?P(\d+[\.\-_]*)+)" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-git_2.0.7.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-git_2.0.7.bb deleted file mode 100644 index e49dbea54..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-git_2.0.7.bb +++ /dev/null @@ -1,7 +0,0 @@ -require python-git.inc - -DEPENDS = "python-gitdb" - -inherit setuptools - -RDEPENDS_${PN} += "python-gitdb python-lang python-io python-shell python-math python-re python-subprocess python-stringold python-unixadmin" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-git_2.1.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-git_2.1.1.bb new file mode 100644 index 000000000..e49dbea54 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-git_2.1.1.bb @@ -0,0 +1,7 @@ +require python-git.inc + +DEPENDS = "python-gitdb" + +inherit setuptools + +RDEPENDS_${PN} += "python-gitdb python-lang python-io python-shell python-math python-re python-subprocess python-stringold python-unixadmin" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-mako.inc b/import-layers/yocto-poky/meta/recipes-devtools/python/python-mako.inc index 85ec21768..10364db97 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-mako.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-mako.inc @@ -5,8 +5,8 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=1bb21fa2d2f7a534c884b990430a6863" SRC_URI = "https://files.pythonhosted.org/packages/source/M/Mako/Mako-${PV}.tar.gz" -SRC_URI[md5sum] = "c5fc31a323dd4990683d2f2da02d4e20" -SRC_URI[sha256sum] = "fed99dbe4d0ddb27a33ee4910d8708aca9ef1fe854e668387a9ab9a90cbf9059" +SRC_URI[md5sum] = "a28e22a339080316b2acc352b9ee631c" +SRC_URI[sha256sum] = "48559ebd872a8e77f92005884b3d88ffae552812cdf17db6768e5c3be5ebbe0d" UPSTREAM_CHECK_URI = "https://pypi.python.org/pypi/mako/" UPSTREAM_CHECK_REGEX = "/Mako/(?P(\d+[\.\-_]*)+)" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-mako_1.0.4.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-mako_1.0.4.bb deleted file mode 100644 index 230044e1a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-mako_1.0.4.bb +++ /dev/null @@ -1,17 +0,0 @@ -require python-mako.inc - -inherit setuptools - -RDEPENDS_${PN} = "python-threading \ - python-netclient \ - python-html \ -" -RDEPENDS_${PN}_class-native = "" - -BBCLASSEXTEND = "native nativesdk" - -# The same utility is packaged in python3-mako, so it would conflict -do_install_append() { - rm -f ${D}${bindir}/mako-render - rmdir ${D}${bindir} -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-mako_1.0.6.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-mako_1.0.6.bb new file mode 100644 index 000000000..230044e1a --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-mako_1.0.6.bb @@ -0,0 +1,17 @@ +require python-mako.inc + +inherit setuptools + +RDEPENDS_${PN} = "python-threading \ + python-netclient \ + python-html \ +" +RDEPENDS_${PN}_class-native = "" + +BBCLASSEXTEND = "native nativesdk" + +# The same utility is packaged in python3-mako, so it would conflict +do_install_append() { + rm -f ${D}${bindir}/mako-render + rmdir ${D}${bindir} +} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-native-2.7-manifest.inc b/import-layers/yocto-poky/meta/recipes-devtools/python/python-native-2.7-manifest.inc new file mode 100644 index 000000000..581a37ab1 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-native-2.7-manifest.inc @@ -0,0 +1,10 @@ + +# WARNING: This file is AUTO GENERATED: Manual edits will be lost next time I regenerate the file. +# Generator: '../../../scripts/contrib/python/generate-manifest-2.7.py --native' Version 20110222.2 (C) 2002-2010 Michael 'Mickey' Lauer + + + +RPROVIDES+="python-2to3-native python-argparse-native python-audio-native python-bsddb-native python-codecs-native python-compile-native python-compiler-native python-compression-native python-contextlib-native python-core-native python-crypt-native python-ctypes-native python-curses-native python-datetime-native python-db-native python-debugger-native python-dev-native python-difflib-native python-distutils-native python-distutils-staticdev-native python-doctest-native python-email-native python-fcntl-native python-gdbm-native python-hotshot-native python-html-native python-idle-native python-image-native python-importlib-native python-io-native python-json-native python-lang-native python-logging-native python-mailbox-native python-math-native python-mime-native python-mmap-native python-multiprocessing-native python-netclient-native python-netserver-native python-numbers-native python-pickle-native python-pkgutil-native python-plistlib-native python-pprint-native python-profile-native python-pydoc-native python-re-native python-readline-native python-resource-native python-robotparser-native python-shell-native python-smtpd-native python-sqlite3-native python-sqlite3-tests-native python-stringold-native python-subprocess-native python-syslog-native python-terminal-native python-tests-native python-textutils-native python-threading-native python-tkinter-native python-unittest-native python-unixadmin-native python-xml-native python-xmlrpc-native python-zlib-native " + + + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-native-3.5-manifest.inc b/import-layers/yocto-poky/meta/recipes-devtools/python/python-native-3.5-manifest.inc new file mode 100644 index 000000000..10be3e9bb --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-native-3.5-manifest.inc @@ -0,0 +1,10 @@ + +# WARNING: This file is AUTO GENERATED: Manual edits will be lost next time I regenerate the file. +# Generator: '../../../scripts/contrib/python/generate-manifest-3.5.py --native' Version 20140131 (C) 2002-2010 Michael 'Mickey' Lauer + + + +RPROVIDES+="python3-2to3-native python3-argparse-native python3-asyncio-native python3-audio-native python3-codecs-native python3-compile-native python3-compression-native python3-core-native python3-crypt-native python3-ctypes-native python3-curses-native python3-datetime-native python3-db-native python3-debugger-native python3-dev-native python3-difflib-native python3-distutils-native python3-distutils-staticdev-native python3-doctest-native python3-email-native python3-enum-native python3-fcntl-native python3-gdbm-native python3-html-native python3-idle-native python3-image-native python3-importlib-native python3-io-native python3-json-native python3-lang-native python3-logging-native python3-mailbox-native python3-math-native python3-mime-native python3-mmap-native python3-multiprocessing-native python3-netclient-native python3-netserver-native python3-numbers-native python3-pickle-native python3-pkgutil-native python3-pprint-native python3-profile-native python3-pydoc-native python3-re-native python3-readline-native python3-reprlib-native python3-resource-native python3-selectors-native python3-shell-native python3-signal-native python3-smtpd-native python3-sqlite3-native python3-sqlite3-tests-native python3-stringold-native python3-subprocess-native python3-syslog-native python3-terminal-native python3-tests-native python3-textutils-native python3-threading-native python3-tkinter-native python3-typing-native python3-unittest-native python3-unixadmin-native python3-xml-native python3-xmlrpc-native " + + + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-native/avoid_parallel_make_races_on_pgen.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-native/avoid_parallel_make_races_on_pgen.patch deleted file mode 100644 index a592ea40a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-native/avoid_parallel_make_races_on_pgen.patch +++ /dev/null @@ -1,27 +0,0 @@ -Upstream-Status: Pending - -Avoids parallel make races linking errors when making Parser/PGEN - -- Implements Richard Purdie's idea - -Signed-off-by: Richard Purdie -Signed-off-by: Alejandro Hernandez - -Index: Python-2.7.9/Makefile.pre.in -=================================================================== ---- Python-2.7.9.orig/Makefile.pre.in -+++ Python-2.7.9/Makefile.pre.in -@@ -610,12 +610,10 @@ Modules/grpmodule.o: $(srcdir)/Modules/g - - Modules/pwdmodule.o: $(srcdir)/Modules/pwdmodule.c $(srcdir)/Modules/posixmodule.h - --$(GRAMMAR_H): $(GRAMMAR_INPUT) $(PGENSRCS) -+$(GRAMMAR_H): $(GRAMMAR_INPUT) $(PGENSRCS) $(PGEN) - @$(MKDIR_P) Include -- $(MAKE) $(PGEN) - $(PGEN) $(GRAMMAR_INPUT) $(GRAMMAR_H) $(GRAMMAR_C) - $(GRAMMAR_C): $(GRAMMAR_H) $(GRAMMAR_INPUT) $(PGENSRCS) -- $(MAKE) $(GRAMMAR_H) - touch $(GRAMMAR_C) - - $(PGEN): $(PGENOBJS) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-native/multilib.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-native/multilib.patch index 916b40f4a..ad4608509 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-native/multilib.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-native/multilib.patch @@ -12,10 +12,10 @@ Upstream-Status: Inappropriate [oe-specific] Signed-off-by: Nitin A Kamble -Index: Python-2.7.12/Include/pythonrun.h +Index: Python-2.7.13/Include/pythonrun.h =================================================================== ---- Python-2.7.12.orig/Include/pythonrun.h -+++ Python-2.7.12/Include/pythonrun.h +--- Python-2.7.13.orig/Include/pythonrun.h ++++ Python-2.7.13/Include/pythonrun.h @@ -108,6 +108,7 @@ PyAPI_FUNC(char *) Py_GetPath(void); /* In their own files */ PyAPI_FUNC(const char *) Py_GetVersion(void); @@ -24,10 +24,10 @@ Index: Python-2.7.12/Include/pythonrun.h PyAPI_FUNC(const char *) Py_GetCopyright(void); PyAPI_FUNC(const char *) Py_GetCompiler(void); PyAPI_FUNC(const char *) Py_GetBuildInfo(void); -Index: Python-2.7.12/Lib/distutils/command/install.py +Index: Python-2.7.13/Lib/distutils/command/install.py =================================================================== ---- Python-2.7.12.orig/Lib/distutils/command/install.py -+++ Python-2.7.12/Lib/distutils/command/install.py +--- Python-2.7.13.orig/Lib/distutils/command/install.py ++++ Python-2.7.13/Lib/distutils/command/install.py @@ -22,6 +22,8 @@ from site import USER_BASE from site import USER_SITE @@ -46,23 +46,23 @@ Index: Python-2.7.12/Lib/distutils/command/install.py 'headers': '$base/include/python$py_version_short/$dist_name', 'scripts': '$base/bin', 'data' : '$base', -Index: Python-2.7.12/Lib/pydoc.py +Index: Python-2.7.13/Lib/pydoc.py =================================================================== ---- Python-2.7.12.orig/Lib/pydoc.py -+++ Python-2.7.12/Lib/pydoc.py -@@ -384,7 +384,7 @@ class Doc: - - docloc = os.environ.get("PYTHONDOCS", - "http://docs.python.org/library") -- basedir = os.path.join(sys.exec_prefix, "lib", -+ basedir = os.path.join(sys.exec_prefix, sys.lib, - "python"+sys.version[0:3]) - if (isinstance(object, type(os)) and - (object.__name__ in ('errno', 'exceptions', 'gc', 'imp', -Index: Python-2.7.12/Lib/site.py +--- Python-2.7.13.orig/Lib/pydoc.py ++++ Python-2.7.13/Lib/pydoc.py +@@ -375,7 +375,7 @@ class Doc: + docmodule = docclass = docroutine = docother = docproperty = docdata = fail + + def getdocloc(self, object, +- basedir=os.path.join(sys.exec_prefix, "lib", ++ basedir=os.path.join(sys.exec_prefix, "sys.lib", + "python"+sys.version[0:3])): + """Return the location of module docs or None""" + +Index: Python-2.7.13/Lib/site.py =================================================================== ---- Python-2.7.12.orig/Lib/site.py -+++ Python-2.7.12/Lib/site.py +--- Python-2.7.13.orig/Lib/site.py ++++ Python-2.7.13/Lib/site.py @@ -288,13 +288,19 @@ def getsitepackages(): if sys.platform in ('os2emx', 'riscos'): sitepackages.append(os.path.join(prefix, "Lib", "site-packages")) @@ -83,13 +83,13 @@ Index: Python-2.7.12/Lib/site.py sitepackages.append(prefix) - sitepackages.append(os.path.join(prefix, "lib", "site-packages")) + sitepackages.append(os.path.join(prefix, sys.lib, "site-packages")) - if sys.platform == "darwin": - # for framework builds *only* we add the standard Apple - # locations. -Index: Python-2.7.12/Lib/test/test_dl.py + return sitepackages + + def addsitepackages(known_paths): +Index: Python-2.7.13/Lib/test/test_dl.py =================================================================== ---- Python-2.7.12.orig/Lib/test/test_dl.py -+++ Python-2.7.12/Lib/test/test_dl.py +--- Python-2.7.13.orig/Lib/test/test_dl.py ++++ Python-2.7.13/Lib/test/test_dl.py @@ -4,10 +4,11 @@ import unittest from test.test_support import verbose, import_module @@ -104,10 +104,10 @@ Index: Python-2.7.12/Lib/test/test_dl.py ('/usr/bin/cygwin1.dll', 'getpid'), ('/usr/lib/libc.dylib', 'getpid'), ] -Index: Python-2.7.12/Lib/trace.py +Index: Python-2.7.13/Lib/trace.py =================================================================== ---- Python-2.7.12.orig/Lib/trace.py -+++ Python-2.7.12/Lib/trace.py +--- Python-2.7.13.orig/Lib/trace.py ++++ Python-2.7.13/Lib/trace.py @@ -754,10 +754,10 @@ def main(argv=None): # should I also call expanduser? (after all, could use $HOME) @@ -121,10 +121,10 @@ Index: Python-2.7.12/Lib/trace.py "python" + sys.version[:3])) s = os.path.normpath(s) ignore_dirs.append(s) -Index: Python-2.7.12/Makefile.pre.in +Index: Python-2.7.13/Makefile.pre.in =================================================================== ---- Python-2.7.12.orig/Makefile.pre.in -+++ Python-2.7.12/Makefile.pre.in +--- Python-2.7.13.orig/Makefile.pre.in ++++ Python-2.7.13/Makefile.pre.in @@ -92,6 +92,7 @@ PY_CFLAGS= $(CFLAGS) $(CPPFLAGS) $(CFLAG # Machine-dependent subdirectories @@ -142,7 +142,7 @@ Index: Python-2.7.12/Makefile.pre.in # Detailed destination directories BINLIBDEST= $(LIBDIR)/python$(VERSION) -@@ -669,6 +670,7 @@ Modules/getpath.o: $(srcdir)/Modules/get +@@ -668,6 +669,7 @@ Modules/getpath.o: $(srcdir)/Modules/get -DEXEC_PREFIX='"$(exec_prefix)"' \ -DVERSION='"$(VERSION)"' \ -DVPATH='"$(VPATH)"' \ @@ -150,7 +150,7 @@ Index: Python-2.7.12/Makefile.pre.in -o $@ $(srcdir)/Modules/getpath.c Modules/python.o: $(srcdir)/Modules/python.c -@@ -720,7 +722,7 @@ $(AST_C): $(AST_ASDL) $(ASDLGEN_FILES) +@@ -708,7 +710,7 @@ $(AST_C): $(AST_ASDL) $(ASDLGEN_FILES) Python/compile.o Python/symtable.o Python/ast.o: $(GRAMMAR_H) $(AST_H) Python/getplatform.o: $(srcdir)/Python/getplatform.c @@ -159,10 +159,10 @@ Index: Python-2.7.12/Makefile.pre.in Python/importdl.o: $(srcdir)/Python/importdl.c $(CC) -c $(PY_CFLAGS) -I$(DLINCLDIR) -o $@ $(srcdir)/Python/importdl.c -Index: Python-2.7.12/Modules/getpath.c +Index: Python-2.7.13/Modules/getpath.c =================================================================== ---- Python-2.7.12.orig/Modules/getpath.c -+++ Python-2.7.12/Modules/getpath.c +--- Python-2.7.13.orig/Modules/getpath.c ++++ Python-2.7.13/Modules/getpath.c @@ -100,6 +100,13 @@ #error "PREFIX, EXEC_PREFIX, VERSION, and VPATH must be constant defined" #endif @@ -186,10 +186,10 @@ Index: Python-2.7.12/Modules/getpath.c static void reduce(char *dir) -Index: Python-2.7.12/Python/getplatform.c +Index: Python-2.7.13/Python/getplatform.c =================================================================== ---- Python-2.7.12.orig/Python/getplatform.c -+++ Python-2.7.12/Python/getplatform.c +--- Python-2.7.13.orig/Python/getplatform.c ++++ Python-2.7.13/Python/getplatform.c @@ -10,3 +10,13 @@ Py_GetPlatform(void) { return PLATFORM; @@ -204,10 +204,10 @@ Index: Python-2.7.12/Python/getplatform.c +{ + return LIB; +} -Index: Python-2.7.12/Python/sysmodule.c +Index: Python-2.7.13/Python/sysmodule.c =================================================================== ---- Python-2.7.12.orig/Python/sysmodule.c -+++ Python-2.7.12/Python/sysmodule.c +--- Python-2.7.13.orig/Python/sysmodule.c ++++ Python-2.7.13/Python/sysmodule.c @@ -1437,6 +1437,8 @@ _PySys_Init(void) PyString_FromString(Py_GetCopyright())); SET_SYS_FROM_STRING("platform", @@ -217,11 +217,11 @@ Index: Python-2.7.12/Python/sysmodule.c SET_SYS_FROM_STRING("executable", PyString_FromString(Py_GetProgramFullPath())); SET_SYS_FROM_STRING("prefix", -Index: Python-2.7.12/configure.ac +Index: Python-2.7.13/configure.ac =================================================================== ---- Python-2.7.12.orig/configure.ac -+++ Python-2.7.12/configure.ac -@@ -756,6 +756,11 @@ SunOS*) +--- Python-2.7.13.orig/configure.ac ++++ Python-2.7.13/configure.ac +@@ -759,6 +759,11 @@ SunOS*) ;; esac diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-native_2.7.12.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-native_2.7.12.bb deleted file mode 100644 index de83cbd76..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-native_2.7.12.bb +++ /dev/null @@ -1,61 +0,0 @@ -require python.inc - -EXTRANATIVEPATH += "bzip2-native" -DEPENDS = "openssl-native bzip2-replacement-native zlib-native readline-native sqlite3-native expat-native" -PR = "${INC_PR}.1" - -SRC_URI += "\ - file://05-enable-ctypes-cross-build.patch \ - file://10-distutils-fix-swig-parameter.patch \ - file://11-distutils-never-modify-shebang-line.patch \ - file://0001-distutils-set-the-prefix-to-be-inside-staging-direct.patch \ - file://debug.patch \ - file://unixccompiler.patch \ - file://nohostlibs.patch \ - file://multilib.patch \ - file://add-md5module-support.patch \ - file://builddir.patch \ - file://parallel-makeinst-create-bindir.patch \ - file://revert_use_of_sysconfigdata.patch \ - " - -S = "${WORKDIR}/Python-${PV}" - -FILESEXTRAPATHS =. "${FILE_DIRNAME}/${PN}:" - -inherit native - -RPROVIDES += "python-distutils-native python-compression-native python-textutils-native python-codecs-native python-core-native python-unittest-native" - -EXTRA_OECONF_append = " --bindir=${bindir}/${PN} --with-system-expat=${STAGING_DIR_HOST}" - -EXTRA_OEMAKE = '\ - LIBC="" \ - STAGING_LIBDIR=${STAGING_LIBDIR_NATIVE} \ - STAGING_INCDIR=${STAGING_INCDIR_NATIVE} \ -' - -do_configure_append() { - autoreconf --verbose --install --force --exclude=autopoint ../Python-${PV}/Modules/_ctypes/libffi -} - -do_install() { - oe_runmake 'DESTDIR=${D}' install - install -d ${D}${bindir}/${PN} - install -m 0755 Parser/pgen ${D}${bindir}/${PN} - - # Make sure we use /usr/bin/env python - for PYTHSCRIPT in `grep -rIl ${bindir}/${PN}/python ${D}${bindir}/${PN}`; do - sed -i -e '1s|^#!.*|#!/usr/bin/env python|' $PYTHSCRIPT - done - - # Add a symlink to the native Python so that scripts can just invoke - # "nativepython" and get the right one without needing absolute paths - # (these often end up too long for the #! parser in the kernel as the - # buffer is 128 bytes long). - ln -s python-native/python ${D}${bindir}/nativepython - - # We don't want modules in ~/.local being used in preference to those - # installed in the native sysroot, so disable user site support. - sed -i -e 's,^\(ENABLE_USER_SITE = \).*,\1False,' ${D}${libdir}/python${PYTHON_MAJMIN}/site.py -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-native_2.7.13.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-native_2.7.13.bb new file mode 100644 index 000000000..7edf15348 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-native_2.7.13.bb @@ -0,0 +1,61 @@ +require python.inc + +EXTRANATIVEPATH += "bzip2-native" +DEPENDS = "openssl-native bzip2-replacement-native zlib-native readline-native sqlite3-native expat-native" +PR = "${INC_PR}.1" + +SRC_URI += "\ + file://05-enable-ctypes-cross-build.patch \ + file://10-distutils-fix-swig-parameter.patch \ + file://11-distutils-never-modify-shebang-line.patch \ + file://0001-distutils-set-the-prefix-to-be-inside-staging-direct.patch \ + file://debug.patch \ + file://unixccompiler.patch \ + file://nohostlibs.patch \ + file://multilib.patch \ + file://add-md5module-support.patch \ + file://builddir.patch \ + file://parallel-makeinst-create-bindir.patch \ + file://revert_use_of_sysconfigdata.patch \ + " + +S = "${WORKDIR}/Python-${PV}" + +FILESEXTRAPATHS =. "${FILE_DIRNAME}/${PN}:" + +inherit native + +require python-native-${PYTHON_MAJMIN}-manifest.inc + +EXTRA_OECONF_append = " --bindir=${bindir}/${PN} --with-system-expat=${STAGING_DIR_HOST}" + +EXTRA_OEMAKE = '\ + LIBC="" \ + STAGING_LIBDIR=${STAGING_LIBDIR_NATIVE} \ + STAGING_INCDIR=${STAGING_INCDIR_NATIVE} \ +' + +do_configure_append() { + autoreconf --verbose --install --force --exclude=autopoint ../Python-${PV}/Modules/_ctypes/libffi +} + +do_install() { + oe_runmake 'DESTDIR=${D}' install + install -d ${D}${bindir}/${PN} + install -m 0755 Parser/pgen ${D}${bindir}/${PN} + + # Make sure we use /usr/bin/env python + for PYTHSCRIPT in `grep -rIl ${bindir}/${PN}/python ${D}${bindir}/${PN}`; do + sed -i -e '1s|^#!.*|#!/usr/bin/env python|' $PYTHSCRIPT + done + + # Add a symlink to the native Python so that scripts can just invoke + # "nativepython" and get the right one without needing absolute paths + # (these often end up too long for the #! parser in the kernel as the + # buffer is 128 bytes long). + ln -s python-native/python ${D}${bindir}/nativepython + + # We don't want modules in ~/.local being used in preference to those + # installed in the native sysroot, so disable user site support. + sed -i -e 's,^\(ENABLE_USER_SITE = \).*,\1False,' ${D}${libdir}/python${PYTHON_MAJMIN}/site.py +} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-pexpect_4.2.0.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-pexpect_4.2.0.bb deleted file mode 100644 index 82e0fa886..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-pexpect_4.2.0.bb +++ /dev/null @@ -1,28 +0,0 @@ -SUMMARY = "A Pure Python Expect like Module for Python" -HOMEPAGE = "http://pexpect.readthedocs.org/" -SECTION = "devel/python" -LICENSE = "ISC" -LIC_FILES_CHKSUM = "file://LICENSE;md5=1c7a725251880af8c6a148181665385b" - -SRCNAME = "pexpect" - -SRC_URI = "https://files.pythonhosted.org/packages/source/p/${SRCNAME}/${SRCNAME}-${PV}.tar.gz" -SRC_URI[md5sum] = "8071ec5df0f3d515daedafad672d1632" -SRC_URI[sha256sum] = "bf6816b8cc8d301a499e7adf338828b39bc7548eb64dbed4dd410ed93d95f853" - -UPSTREAM_CHECK_URI = "https://pypi.python.org/pypi/pexpect" - -S = "${WORKDIR}/pexpect-${PV}" - -inherit setuptools - -RDEPENDS_${PN} = "\ - python-core \ - python-io \ - python-terminal \ - python-resource \ - python-fcntl \ - python-ptyprocess \ -" - -BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-pexpect_4.2.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-pexpect_4.2.1.bb new file mode 100644 index 000000000..1321797ef --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-pexpect_4.2.1.bb @@ -0,0 +1,28 @@ +SUMMARY = "A Pure Python Expect like Module for Python" +HOMEPAGE = "http://pexpect.readthedocs.org/" +SECTION = "devel/python" +LICENSE = "ISC" +LIC_FILES_CHKSUM = "file://LICENSE;md5=1c7a725251880af8c6a148181665385b" + +SRCNAME = "pexpect" + +SRC_URI = "https://files.pythonhosted.org/packages/source/p/${SRCNAME}/${SRCNAME}-${PV}.tar.gz" +SRC_URI[md5sum] = "3694410001a99dff83f0b500a1ca1c95" +SRC_URI[sha256sum] = "3d132465a75b57aa818341c6521392a06cc660feb3988d7f1074f39bd23c9a92" + +UPSTREAM_CHECK_URI = "https://pypi.python.org/pypi/pexpect" + +S = "${WORKDIR}/pexpect-${PV}" + +inherit setuptools + +RDEPENDS_${PN} = "\ + python-core \ + python-io \ + python-terminal \ + python-resource \ + python-fcntl \ + python-ptyprocess \ +" + +BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-ptyprocess_0.5.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-ptyprocess_0.5.1.bb index 931e5a7d4..eed24ad2e 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-ptyprocess_0.5.1.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-ptyprocess_0.5.1.bb @@ -20,4 +20,4 @@ RDEPENDS_${PN} = "\ python-core \ " -BBCLASSEXTEND = "nativesdk" +BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-pycurl.inc b/import-layers/yocto-poky/meta/recipes-devtools/python/python-pycurl.inc new file mode 100644 index 000000000..d26318b6d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-pycurl.inc @@ -0,0 +1,31 @@ +SUMMARY = "Python bindings for libcurl" +HOMEPAGE = "http://pycurl.sourceforge.net/" +SECTION = "devel/python" +LICENSE = "LGPLv2.1+ | MIT" +LIC_FILES_CHKSUM = "file://README.rst;beginline=166;endline=182;md5=a84a1caa65b89d4584b693d3680062fb \ + file://COPYING-LGPL;md5=3579a9fd0221d49a237aaa33492f988c \ + file://COPYING-MIT;md5=b7e434aeb228ed731c00bcf177e79b19" + +DEPENDS = "curl ${PYTHON_PN}" +RDEPENDS_${PN} = "${PYTHON_PN}-core curl" +SRCNAME = "pycurl" + +SRC_URI = "\ + http://${SRCNAME}.sourceforge.net/download/${SRCNAME}-${PV}.tar.gz;name=archive \ + file://no-static-link.patch \ +" + +SRC_URI[archive.md5sum] = "bca7bf47320082588db544ced2ba8717" +SRC_URI[archive.sha256sum] = "8a1e0eb55573388275a1d6c2534ca4cfca5d7fa772b99b505c08fa149b27aed0" +S = "${WORKDIR}/${SRCNAME}-${PV}" + +BBCLASSEXTEND = "native" + +# Ensure the docstrings are generated as make clean will remove them +do_compile_prepend() { + ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py docstrings +} + +do_install_append() { + rm -rf ${D}${datadir}/share +} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-pycurl_7.21.5.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-pycurl_7.21.5.bb index 7b41f6d33..eb70cea66 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-pycurl_7.21.5.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-pycurl_7.21.5.bb @@ -1,33 +1,3 @@ -SUMMARY = "Python bindings for libcurl" -HOMEPAGE = "http://pycurl.sourceforge.net/" -SECTION = "devel/python" -LICENSE = "LGPLv2.1+ | MIT" -LIC_FILES_CHKSUM = "file://README.rst;beginline=166;endline=182;md5=a84a1caa65b89d4584b693d3680062fb \ - file://COPYING-LGPL;md5=3579a9fd0221d49a237aaa33492f988c \ - file://COPYING-MIT;md5=b7e434aeb228ed731c00bcf177e79b19" - -DEPENDS = "curl python" -RDEPENDS_${PN} = "python-core curl" -SRCNAME = "pycurl" - -SRC_URI = "\ - http://${SRCNAME}.sourceforge.net/download/${SRCNAME}-${PV}.tar.gz;name=archive \ - file://no-static-link.patch \ -" - -SRC_URI[archive.md5sum] = "bca7bf47320082588db544ced2ba8717" -SRC_URI[archive.sha256sum] = "8a1e0eb55573388275a1d6c2534ca4cfca5d7fa772b99b505c08fa149b27aed0" -S = "${WORKDIR}/${SRCNAME}-${PV}" +require python-pycurl.inc inherit distutils - -BBCLASSEXTEND = "native" - -# Ensure the docstrings are generated as make clean will remove them -do_compile_prepend() { - ${STAGING_BINDIR_NATIVE}/${PYTHON_PN}-native/${PYTHON_PN} setup.py docstrings -} - -do_install_append() { - rm -rf ${D}${datadir}/share -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-scons-native_2.5.0.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-scons-native_2.5.0.bb deleted file mode 100644 index dae89ab5d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-scons-native_2.5.0.bb +++ /dev/null @@ -1,8 +0,0 @@ -require python-scons_${PV}.bb -inherit native pythonnative -DEPENDS = "python-native" -RDEPENDS_${PN} = "" - -do_install_append() { - create_wrapper ${D}${bindir}/scons SCONS_LIB_DIR='${STAGING_DIR_HOST}/${PYTHON_SITEPACKAGES_DIR}' -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-scons-native_2.5.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-scons-native_2.5.1.bb new file mode 100644 index 000000000..dae89ab5d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-scons-native_2.5.1.bb @@ -0,0 +1,8 @@ +require python-scons_${PV}.bb +inherit native pythonnative +DEPENDS = "python-native" +RDEPENDS_${PN} = "" + +do_install_append() { + create_wrapper ${D}${bindir}/scons SCONS_LIB_DIR='${STAGING_DIR_HOST}/${PYTHON_SITEPACKAGES_DIR}' +} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-scons_2.5.0.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-scons_2.5.0.bb deleted file mode 100644 index 8543c4182..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-scons_2.5.0.bb +++ /dev/null @@ -1,23 +0,0 @@ -SUMMARY = "Software Construction tool (make/autotools replacement)" -SECTION = "devel/python" -LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=3a885dff6d14e4cd876d9008a09a42de" -SRCNAME = "scons" - -SRC_URI = "https://files.pythonhosted.org/packages/source/s/${SRCNAME}/${SRCNAME}-${PV}.tar.gz" - -SRC_URI[md5sum] = "bda5530a70a41a7831d83c8b191c021e" -SRC_URI[sha256sum] = "01f1b3d6023516a8e1b5e77799e5a82a23b32953b1102d339059ffeca8600493" - -UPSTREAM_CHECK_URI = "https://pypi.python.org/pypi/SCons/" - -S = "${WORKDIR}/${SRCNAME}-${PV}" - -inherit setuptools - -RDEPENDS_${PN} = "\ - python-fcntl \ - python-io \ - python-json \ - python-subprocess \ - " diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-scons_2.5.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-scons_2.5.1.bb new file mode 100644 index 000000000..3f4385619 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-scons_2.5.1.bb @@ -0,0 +1,23 @@ +SUMMARY = "Software Construction tool (make/autotools replacement)" +SECTION = "devel/python" +LICENSE = "MIT" +LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=3a885dff6d14e4cd876d9008a09a42de" +SRCNAME = "scons" + +SRC_URI = "https://files.pythonhosted.org/packages/source/s/${SRCNAME}/${SRCNAME}-${PV}.tar.gz" + +SRC_URI[md5sum] = "3eac81e5e8206304a9b4683c57665aa4" +SRC_URI[sha256sum] = "c8de85fc02ed1a687b1f2ac791eaa0c1707b4382a204f17d782b5b111b9fdf07" + +UPSTREAM_CHECK_URI = "https://pypi.python.org/pypi/SCons/" + +S = "${WORKDIR}/${SRCNAME}-${PV}" + +inherit setuptools + +RDEPENDS_${PN} = "\ + python-fcntl \ + python-io \ + python-json \ + python-subprocess \ + " diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-setuptools.inc b/import-layers/yocto-poky/meta/recipes-devtools/python/python-setuptools.inc index 92ca9a002..40f47d4bc 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-setuptools.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-setuptools.inc @@ -3,14 +3,14 @@ HOMEPAGE = "https://pypi.python.org/pypi/setuptools" SECTION = "devel/python" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://setup.py;beginline=134;endline=134;md5=3e8df024d6c1442d18e84acf8fbbc475" +LIC_FILES_CHKSUM = "file://setup.py;beginline=146;endline=146;md5=3e8df024d6c1442d18e84acf8fbbc475" SRCNAME = "setuptools" SRC_URI = "https://files.pythonhosted.org/packages/source/s/${SRCNAME}/${SRCNAME}-${PV}.tar.gz" -SRC_URI[md5sum] = "869f3029dcc66a64ba39875e2a2f044a" -SRC_URI[sha256sum] = "19aad19471052d5daefe96f2c1fa2e88dcdb17488bf8708d7e6356881ea833cb" +SRC_URI[md5sum] = "8b67868c3430e978833ebd0d1b766694" +SRC_URI[sha256sum] = "8303fb24306385f09bf8b0e5a385c1548e42e8efc08558d64049bc0d55ea012d" UPSTREAM_CHECK_URI = "https://pypi.python.org/pypi/setuptools" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-setuptools_22.0.5.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-setuptools_22.0.5.bb deleted file mode 100644 index 526474c7e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-setuptools_22.0.5.bb +++ /dev/null @@ -1,38 +0,0 @@ -require python-setuptools.inc - -PROVIDES = "python-distribute" - -DEPENDS += "python" -DEPENDS_class-native += "python-native" - -inherit distutils - -DISTUTILS_INSTALL_ARGS += "--install-lib=${D}${PYTHON_SITEPACKAGES_DIR}" - -RDEPENDS_${PN} = "\ - python-stringold \ - python-email \ - python-shell \ - python-distutils \ - python-compression \ - python-pkgutil \ - python-plistlib \ - python-numbers \ - python-html \ - python-netserver \ - python-ctypes \ - python-subprocess \ - python-unittest \ - python-compile \ -" - -RDEPENDS_${PN}_class-native = "\ - python-distutils \ - python-compression \ -" - -RREPLACES_${PN} = "python-distribute" -RPROVIDES_${PN} = "python-distribute" -RCONFLICTS_${PN} = "python-distribute" - -BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-setuptools_32.1.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-setuptools_32.1.1.bb new file mode 100644 index 000000000..526474c7e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python-setuptools_32.1.1.bb @@ -0,0 +1,38 @@ +require python-setuptools.inc + +PROVIDES = "python-distribute" + +DEPENDS += "python" +DEPENDS_class-native += "python-native" + +inherit distutils + +DISTUTILS_INSTALL_ARGS += "--install-lib=${D}${PYTHON_SITEPACKAGES_DIR}" + +RDEPENDS_${PN} = "\ + python-stringold \ + python-email \ + python-shell \ + python-distutils \ + python-compression \ + python-pkgutil \ + python-plistlib \ + python-numbers \ + python-html \ + python-netserver \ + python-ctypes \ + python-subprocess \ + python-unittest \ + python-compile \ +" + +RDEPENDS_${PN}_class-native = "\ + python-distutils \ + python-compression \ +" + +RREPLACES_${PN} = "python-distribute" +RPROVIDES_${PN} = "python-distribute" +RCONFLICTS_${PN} = "python-distribute" + +BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/channels-rpm_sys-use-md5sum-instead-of-mtime-as-the-.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/channels-rpm_sys-use-md5sum-instead-of-mtime-as-the-.patch deleted file mode 100644 index 2f14a124e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/channels-rpm_sys-use-md5sum-instead-of-mtime-as-the-.patch +++ /dev/null @@ -1,38 +0,0 @@ -channels/rpm_sys: use md5sum instead of mtime as the digest - -Use the internal getFileDigest() function (which defaults to md5) instead of -mtime for getting the file digest. On some systems mtime proved to be -unreliable because of delayed update. This caused smart to miss rpm db updates -and thus get its understanding of installed packages out of sync. - -Upstream-Status: Pending - -Signed-off-by: Markus Lehtonen ---- - smart/channels/rpm_sys.py | 3 ++- - 1 file changed, 2 insertions(+), 1 deletion(-) - -diff --git a/smart/channels/rpm_sys.py b/smart/channels/rpm_sys.py -index b9fda27..6f1fe94 100644 ---- a/smart/channels/rpm_sys.py -+++ b/smart/channels/rpm_sys.py -@@ -22,6 +22,7 @@ - from smart.backends.rpm.header import RPMDBLoader - from smart.backends.rpm.base import getTS, rpm_join_dbpath - from smart.channel import PackageChannel -+from smart.util.filetools import getFileDigest - from smart import * - import os - -@@ -35,7 +36,7 @@ class RPMSysChannel(PackageChannel): - dbdir = rpm_join_dbpath(sysconf.get("rpm-root", "/"), - sysconf.get("rpm-dbpath", "var/lib/rpm")) - path = os.path.join(dbdir, "Packages") -- digest = os.path.getmtime(path) -+ digest = getFileDigest(path) - if digest == self._digest: - return True - self.removeLoaders() --- -2.6.6 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-add-deugging-when-targetpath-is-empty.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-add-deugging-when-targetpath-is-empty.patch deleted file mode 100644 index 5e80804bf..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-add-deugging-when-targetpath-is-empty.patch +++ /dev/null @@ -1,47 +0,0 @@ -From 01e51afd03131947f8d74b9a23fdbc0078249499 Mon Sep 17 00:00:00 2001 -From: Mariano Lopez -Date: Wed, 3 Aug 2016 07:47:09 +0000 -Subject: [PATCH] fetcher.py: Add debugging when targetpath is empty - -There are several errors when openining files or manipulating -path strings, those errors point targetpath passed to -setSucceeded() is empty. This patch won't solve the problems, -but will add debugging to give an idea why is failing. - -Upstream-Status: Inappropriate [debugging] - -Signed-off-by: Mariano Lopez ---- - smart/fetcher.py | 16 ++++++++++++++++ - 1 file changed, 16 insertions(+) - -diff --git a/smart/fetcher.py b/smart/fetcher.py -index dd3ff6b..64aa979 100644 ---- a/smart/fetcher.py -+++ b/smart/fetcher.py -@@ -594,6 +594,22 @@ class FetchItem(object): - self._eta = None - - def setSucceeded(self, targetpath, fetchedsize=0): -+ # It seems the in some odd cases targetpath here -+ # is empty, this will lead to bugs in several places -+ if not targetpath: -+ import traceback -+ tb_str = "" -+ for threadId, stack in sys._current_frames().items(): -+ tb_str += '\nThreadID: %s' % threadId -+ for filename, lineno, name, line in traceback.extract_stack(stack): -+ tb_str += '\nFile: "%s", line %d, in %s' % (filename, lineno, name) -+ if line: -+ tb_str += "\n %s" % line.strip() -+ error_string = ["No file path specified", -+ "URL: %s" % self._url, -+ "Status: %s" % self._status, -+ "Traceback: %s" % tb_str] -+ raise Error, _("\n".join(error_string)) - if self._status is not FAILED: - self._status = SUCCEEDED - self._targetpath = targetpath --- -2.6.6 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-add-for-rpm-ignoresize-check.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-add-for-rpm-ignoresize-check.patch deleted file mode 100644 index fe98d070d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-add-for-rpm-ignoresize-check.patch +++ /dev/null @@ -1,35 +0,0 @@ -python-smartpm: Add checking for "rpm-ignoresize" option - -The do_rootfs takes a very long time when build host has mounted many NFS -devices. syscall lstat() was being called on every filesystem mounted on the -build host during building. -The reason for the lstat() is that rpm is verifying that enough free disk space -is available to do the install. However, since the install is into the target -rootfs it should not matter how much free space there is in the host mounts. -Add checking for "rpm-ignoresize", by it, smart can make whether RPM skip -checking for diskspace when install a rpm package. - -Upstream-Status: Pending - -Signed-off-by: wenlin.kang -Signed-off-by: Chong Lu ---- - smart/backends/rpm/pm.py | 4 ++++ - 1 file changed, 4 insertions(+) - -Index: git/smart/backends/rpm/pm.py -=================================================================== ---- git.orig/smart/backends/rpm/pm.py -+++ git/smart/backends/rpm/pm.py -@@ -233,6 +233,11 @@ class RPMPackageManager(PackageManager): - if sysconf.get("rpm-order"): - ts.order() - probfilter = rpm.RPMPROB_FILTER_OLDPACKAGE -+ -+ if sysconf.get("rpm-ignoresize", False): -+ probfilter |= rpm.RPMPROB_FILTER_DISKNODES -+ probfilter |= rpm.RPMPROB_FILTER_DISKSPACE -+ - if force or reinstall: - probfilter |= rpm.RPMPROB_FILTER_REPLACEPKG - probfilter |= rpm.RPMPROB_FILTER_REPLACEOLDFILES diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-already-installed-message.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-already-installed-message.patch deleted file mode 100644 index 9055555cd..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-already-installed-message.patch +++ /dev/null @@ -1,54 +0,0 @@ -From a74a9a9eb9d75964a0e978950e8b191d7a18d763 Mon Sep 17 00:00:00 2001 -From: Paul Eggleton -Date: Fri, 5 Jun 2015 17:07:16 +0100 -Subject: [PATCH] smart: change "is already installed" message from warning to - info - -This doesn't need to be a warning. - -Upstream-Status: Pending - -Signed-off-by: Paul Eggleton ---- - smart/commands/install.py | 4 ++-- - smart/interfaces/text/interactive.py | 2 +- - 2 files changed, 3 insertions(+), 3 deletions(-) - -diff --git a/smart/commands/install.py b/smart/commands/install.py -index 6ef9682..80d456b 100644 ---- a/smart/commands/install.py -+++ b/smart/commands/install.py -@@ -152,7 +152,7 @@ def main(ctrl, opts): - for obj in results: - for pkg in obj.packages: - if pkg.installed: -- iface.warning(_("%s (for %s) is already installed") -+ iface.info(_("%s (for %s) is already installed") - % (pkg, arg)) - installed = True - break -@@ -184,7 +184,7 @@ def main(ctrl, opts): - for name in names: - pkg = names[name][0] - if pkg.installed: -- iface.warning(_("%s is already installed") % pkg) -+ iface.info(_("%s is already installed") % pkg) - else: - trans.enqueue(pkg, INSTALL) - -diff --git a/smart/interfaces/text/interactive.py b/smart/interfaces/text/interactive.py -index 9865584..190867b 100644 ---- a/smart/interfaces/text/interactive.py -+++ b/smart/interfaces/text/interactive.py -@@ -278,7 +278,7 @@ class Interpreter(Cmd): - for name in names: - pkg = names[name][0] - if pkg.installed: -- iface.warning(_("%s is already installed") % pkg) -+ iface.info(_("%s is already installed") % pkg) - else: - found = True - transaction.enqueue(pkg, INSTALL) --- -2.1.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-attempt-fix.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-attempt-fix.patch deleted file mode 100644 index 6e672b332..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-attempt-fix.patch +++ /dev/null @@ -1,158 +0,0 @@ -Sadly, smart is not deterministic so the same build can go down multiple different -pathways. We'd expect to see the same warnings however depending on the pathway -taken, it may or may not warn, particularly with Recommends since they're optional. - -For example, where a Recommended package is available but has Conflicts, we'd expect -to see an warning that we couldn't install it. Some code paths silently hide this -(its a LOCKED_CONFLICT). We add printing of warnings for this case. - -Also, if there are two compatible feeds available (e.g. i586 and core2_32), this -changes the code path from direct _install() to _pending() since there are multiple -providers. This patch adds warning handling to _pending() so we don't hit hard -failures there. This is as seen with the mysterious libspeexdsp failures for x86-lsb -on the autobuilder. - -Upstream-Status: Pending -RP -2015/7/16 - -Index: git/smart/transaction.py -=================================================================== ---- git.orig/smart/transaction.py -+++ git/smart/transaction.py -@@ -651,13 +651,14 @@ class Transaction(object): - - if not prvpkgs: - # No packages provide it at all. Give up. -+ -+ reasons = [] -+ for prv in req.providedby: -+ for prvpkg in prv.packages: -+ lockedres = lockedpkgs.get(prvpkg, None) -+ if lockedres: -+ reasons.append(lock_reason(prvpkg, lockedres)) - if reqrequired: -- reasons = [] -- for prv in req.providedby: -- for prvpkg in prv.packages: -- lockedres = lockedpkgs.get(prvpkg, None) -- if lockedres: -- reasons.append(lock_reason(prvpkg, lockedres)) - if reasons: - raise Failed, _("Can't install %s: unable to install provider for %s:\n %s") % \ - (pkg, req, '\n '.join(reasons)) -@@ -665,7 +666,11 @@ class Transaction(object): - raise Failed, _("Can't install %s: no package provides %s") % \ - (pkg, req) - else: -+ if reasons: -+ iface.warning(_("Can't install %s: unable to install provider for %s:\n %s") % \ -+ (pkg, req, '\n '.join(reasons))) -+ - # It's only a recommend, skip - continue - - if len(prvpkgs) == 1: -@@ -846,6 +852,14 @@ class Transaction(object): - isinst = changeset.installed - getweight = self._policy.getWeight - -+ attempt = sysconf.has("attempt-install", soft=True) -+ -+ def handle_failure(msg): -+ if attempt: -+ iface.warning(msg) -+ else: -+ raise Failed, msg -+ - updown = [] - while pending: - item = pending.pop(0) -@@ -870,8 +884,9 @@ class Transaction(object): - - if not prvpkgs: - # No packages provide it at all. Give up. -- raise Failed, _("Can't install %s: no package " -- "provides %s") % (pkg, req) -+ handle_failure(_("Can't install %s: no package " -+ "provides %s") % (pkg, req)) -+ continue - - if len(prvpkgs) > 1: - # More than one package provide it. We use _pending here, -@@ -894,9 +909,10 @@ class Transaction(object): - keeporder, cs, lk)) - keeporder += 0.000001 - if not alternatives: -- raise Failed, _("Can't install %s: all packages " -+ handle_failure(_("Can't install %s: all packages " - "providing %s failed to install:\n%s")\ -- % (pkg, req, "\n".join(failures)) -+ % (pkg, req, "\n".join(failures))) -+ continue - alternatives.sort() - changeset.setState(alternatives[0][1]) - if len(alternatives) == 1: -@@ -954,18 +970,20 @@ class Transaction(object): - - for reqpkg in reqpkgs: - if reqpkg in locked and isinst(reqpkg): -- raise Failed, _("Can't remove %s: requiring " -+ handle_failure(_("Can't remove %s: requiring " - "package %s is locked") % \ -- (pkg, reqpkg) -+ (pkg, reqpkg)) -+ continue - for reqpkg in reqpkgs: - # We check again, since other actions may have - # changed their state. - if not isinst(reqpkg): - continue - if reqpkg in locked: -- raise Failed, _("Can't remove %s: requiring " -+ handle_failure(_("Can't remove %s: requiring " - "package %s is locked") % \ -- (pkg, reqpkg) -+ (pkg, reqpkg)) -+ continue - self._remove(reqpkg, changeset, locked, - pending, depth) - continue -@@ -978,12 +996,14 @@ class Transaction(object): - try: - for reqpkg in reqpkgs: - if reqpkg in locked and isinst(reqpkg): -- raise Failed, _("%s is locked") % reqpkg -+ handle_failure(_("%s is locked") % reqpkg) -+ continue - for reqpkg in reqpkgs: - if not cs.installed(reqpkg): - continue - if reqpkg in lk: -- raise Failed, _("%s is locked") % reqpkg -+ handle_failure(_("%s is locked") % reqpkg) -+ continue - self._remove(reqpkg, cs, lk, None, depth) - except Failed, e: - failures.append(unicode(e)) -@@ -991,9 +1011,10 @@ class Transaction(object): - alternatives.append((getweight(cs), cs, lk)) - - if not alternatives: -- raise Failed, _("Can't install %s: all packages providing " -+ handle_failure(_("Can't install %s: all packages providing " - "%s failed to install:\n%s") \ -- % (pkg, prv, "\n".join(failures)) -+ % (pkg, prv, "\n".join(failures))) -+ continue - - alternatives.sort() - changeset.setState(alternatives[0][1]) -@@ -1246,6 +1267,7 @@ class Transaction(object): - changeset.setRequested(pkg, True) - except Failed, e: - if sysconf.has("attempt-install", soft=True): -+ iface.warning(_("Can't install %s: %s") % (pkg, str(e))) - if pkg in changeset: - del changeset[pkg] - continue diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-attempt.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-attempt.patch deleted file mode 100644 index 5aedc8826..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-attempt.patch +++ /dev/null @@ -1,177 +0,0 @@ -From b105e7fe812da3ccaf7155c0fe14c8728b0d39a5 Mon Sep 17 00:00:00 2001 -From: Mark Hatle -Date: Mon, 20 Jan 2014 14:30:52 +0000 -Subject: [PATCH] Add mechanism to attempt install without failing - -In OpenEmbedded, for complementary and 'attemptonly' package processing, -we need a way to instruct smart to try to install, but ignore any -failures (usually conflicts). - -This option only works for the install operation. - -If a complementary install fails, an actual error occurred, one that -we can't ignore without losing the entire attempted transaction. Keep -this as an error so that we can catch these cases in the futre. - -Upstream-Status: Pending - -Signed-off-by: Mark Hatle -Signed-off-by: Paul Eggleton ---- - backends/rpm/pm.py | 35 ++++++++++++++++++++++++++++++++++- - transaction.py | 50 +++++++++++++++++++++++++++++++++++++------------- - 2 files changed, 71 insertions(+), 14 deletions(-) - -diff --git a/smart/backends/rpm/pm.py b/smart/backends/rpm/pm.py -index 9bbd952..ba6405a 100644 ---- a/smart/backends/rpm/pm.py -+++ b/smart/backends/rpm/pm.py -@@ -241,15 +241,48 @@ class RPMPackageManager(PackageManager): - cb = RPMCallback(prog, upgradednames) - cb.grabOutput(True) - probs = None -+ retry = 0 - try: - probs = ts.run(cb, None) - finally: - del getTS.ts - cb.grabOutput(False) -+ if (probs is not None) and sysconf.has("attempt-install", soft=True): -+ def remove_conflict(pkgNEVR): -+ for key in changeset.keys(): -+ if pkgNEVR == str(key): -+ del changeset[key] -+ del pkgpaths[key] -+ iface.warning("Removing %s due to file %s conflicting with %s" % (pkgNEVR, fname, altNEVR)) -+ break -+ -+ retry = 1 -+ for prob in probs: -+ if prob[1][0] == rpm.RPMPROB_NEW_FILE_CONFLICT: -+ msg = prob[0].split() -+ fname = msg[1] -+ pkgNEVR = msg[7] -+ altNEVR = msg[9] -+ pkgNEVR = pkgNEVR.rsplit('.', 1)[0] + '@' + pkgNEVR.rsplit('.', 1)[1] -+ altNEVR = altNEVR.rsplit('.', 1)[0] + '@' + altNEVR.rsplit('.', 1)[1] -+ remove_conflict(pkgNEVR) -+ elif prob[1][0] == rpm.RPMPROB_FILE_CONFLICT: -+ msg = prob[0].split() -+ fname = msg[1] -+ pkgNEVR = msg[5] -+ altNEVR = msg[11] -+ pkgNEVR = pkgNEVR.rsplit('.', 1)[0] + '@' + pkgNEVR.rsplit('.', 1)[1] -+ altNEVR = altNEVR.rsplit('.', 1)[0] + '@' + altNEVR.rsplit('.', 1)[1] -+ remove_conflict(pkgNEVR) -+ else: -+ retry = 0 -+ - prog.setDone() -- if probs is not None: -+ if (probs is not None) and (not retry): - raise Error, "\n".join([x[0] for x in probs]) - prog.stop() -+ if retry and len(changeset): -+ self.commit(changeset, pkgpaths) - - class RPMCallback: - def __init__(self, prog, upgradednames): -diff --git a/smart/transaction.py b/smart/transaction.py -index 4b90cb7..3e043e9 100644 ---- a/smart/transaction.py -+++ b/smart/transaction.py -@@ -555,6 +555,8 @@ class Transaction(object): - changeset.set(pkg, INSTALL) - isinst = changeset.installed - -+ attempt = sysconf.has("attempt-install", soft=True) -+ - # Remove packages conflicted by this one. - for cnf in pkg.conflicts: - for prv in cnf.providedby: -@@ -564,11 +566,16 @@ class Transaction(object): - if not isinst(prvpkg): - locked[prvpkg] = (LOCKED_CONFLICT_BY, pkg) - continue -- if prvpkg in locked: -- raise Failed, _("Can't install %s: conflicted package " -- "%s is locked") % (pkg, prvpkg) -- self._remove(prvpkg, changeset, locked, pending, depth) -- pending.append((PENDING_UPDOWN, prvpkg)) -+ if attempt: -+ del changeset[pkg] -+ raise Failed, _("Can't install %s: it conflicts with package " -+ "%s") % (pkg, prvpkg) -+ else: -+ if prvpkg in locked: -+ raise Failed, _("Can't install %s: conflicted package " -+ "%s is locked") % (pkg, prvpkg) -+ self._remove(prvpkg, changeset, locked, pending, depth) -+ pending.append((PENDING_UPDOWN, prvpkg)) - - # Remove packages conflicting with this one. - for prv in pkg.provides: -@@ -579,12 +586,18 @@ class Transaction(object): - if not isinst(cnfpkg): - locked[cnfpkg] = (LOCKED_CONFLICT, pkg) - continue -- if cnfpkg in locked: -+ if attempt: -+ del changeset[pkg] - raise Failed, _("Can't install %s: it's conflicted by " -- "the locked package %s") \ -- % (pkg, cnfpkg) -- self._remove(cnfpkg, changeset, locked, pending, depth) -- pending.append((PENDING_UPDOWN, cnfpkg)) -+ "the package %s") \ -+ % (pkg, cnfpkg) -+ else: -+ if cnfpkg in locked: -+ raise Failed, _("Can't install %s: it's conflicted by " -+ "the locked package %s") \ -+ % (pkg, cnfpkg) -+ self._remove(cnfpkg, changeset, locked, pending, depth) -+ pending.append((PENDING_UPDOWN, cnfpkg)) - - # Remove packages with the same name that can't - # coexist with this one. -@@ -594,10 +607,15 @@ class Transaction(object): - if not isinst(namepkg): - locked[namepkg] = (LOCKED_NO_COEXIST, pkg) - continue -- if namepkg in locked: -+ if attempt: -+ del changeset[pkg] - raise Failed, _("Can't install %s: it can't coexist " - "with %s") % (pkg, namepkg) -- self._remove(namepkg, changeset, locked, pending, depth) -+ else: -+ if namepkg in locked: -+ raise Failed, _("Can't install %s: it can't coexist " -+ "with %s") % (pkg, namepkg) -+ self._remove(namepkg, changeset, locked, pending, depth) - - # Install packages required by this one. - for req in pkg.requires + pkg.recommends: -@@ -1176,6 +1194,8 @@ class Transaction(object): - - self._policy.runStarting() - -+ attempt = sysconf.has("attempt-install", soft=True) -+ - try: - changeset = self._changeset.copy() - isinst = changeset.installed -@@ -1190,7 +1210,11 @@ class Transaction(object): - locked[pkg] = (LOCKED_KEEP, None) - elif op is INSTALL: - if not isinst(pkg) and pkg in locked: -- raise Failed, _("Can't install %s: it's locked") % pkg -+ if attempt: -+ iface.warning(_("Can't install %s: it's locked") % pkg) -+ del changeset[pkg] -+ else: -+ raise Failed, _("Can't install %s: it's locked") % pkg - changeset.set(pkg, INSTALL) - locked[pkg] = (LOCKED_INSTALL, None) - elif op is REMOVE: diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-cache.py-getPackages-matches-name-version.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-cache.py-getPackages-matches-name-version.patch deleted file mode 100644 index 225b02f96..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-cache.py-getPackages-matches-name-version.patch +++ /dev/null @@ -1,43 +0,0 @@ -From ee05e55e84b53f4bb0d0baba13ca47a8f84b7cb4 Mon Sep 17 00:00:00 2001 -From: Robert Yang -Date: Wed, 30 Sep 2015 01:12:52 -0700 -Subject: [PATCH] smart:cache.py: getPackages() matches name + arch - -It only matched name ony in the past, for example: -smart install busybox (matched) -but: -smart install busybox@core2_64 (didn't match) - -The installation is very slow when no match since it would seach all the -packages in the repo -This patch makes it match both. - -Upstream-Status: Pending - -Signed-off-by: Robert Yang ---- - smart/cache.py | 3 ++- - smart/ccache.c | 9 ++++++++- - 2 files changed, 10 insertions(+), 2 deletions(-) - -diff --git a/smart/control.py b/smart/control.py -index d44abe7..f23a604 100644 ---- a/smart/control.py -+++ b/smart/control.py -@@ -876,9 +876,13 @@ class Control(object): - objects = [] - - # If we find packages with exactly the given -- # name or name-version, use them. -- for pkg in self._cache.getPackages(s): -- if pkg.name == s or "%s-%s" % (pkg.name, pkg.version) == s: -+ # name, name-version, or name@arch, use them. -+ s_name = s -+ if "@" in s: -+ s_name = s.split("@")[0] -+ for pkg in self._cache.getPackages(s_name): -+ if pkg.name == s or "%s-%s" % (pkg.name, pkg.version) == s \ -+ or "%s@%s" % (pkg.name, pkg.version.split('@')[1]) == s: - objects.append((1.0, pkg)) - - if not objects: diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-channel-remove-all.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-channel-remove-all.patch deleted file mode 100644 index da23e7ce4..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-channel-remove-all.patch +++ /dev/null @@ -1,33 +0,0 @@ -From 6d2363a705697f615d9e5af5d6703b291e618b46 Mon Sep 17 00:00:00 2001 -From: Daniel Klauer -Date: Thu, 12 May 2016 17:55:01 +0200 -Subject: [PATCH] Fix channel command --remove-all option parsing - -Option.take_action() stores a list of options given for validation later. -It strips leading dashes and turns remaining dashes into underscores. -This list is what ensure_action() will compare its arguments against, -thus we must use underscores here. - -Upstream-Status: Pending - -Signed-off-by: Daniel Klauer ---- - smart/commands/channel.py | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/smart/commands/channel.py b/smart/commands/channel.py -index 108f3f1..6234f69 100644 ---- a/smart/commands/channel.py -+++ b/smart/commands/channel.py -@@ -164,7 +164,7 @@ def main(ctrl, opts): - opts.check_args_of_option("edit", 0) - opts.check_args_of_option("enable", -1) - opts.check_args_of_option("disable", -1) -- opts.ensure_action("channel", ["add", "set", "remove", "remove-all", -+ opts.ensure_action("channel", ["add", "set", "remove", "remove_all", - "list", "show", "yaml", "enable", "disable"]) - opts.check_remaining_args() - --- -1.9.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-channelsdir.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-channelsdir.patch deleted file mode 100644 index e621b3387..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-channelsdir.patch +++ /dev/null @@ -1,24 +0,0 @@ -Make CHANNELSDIR in smart empty, since this causes host contamination issues -on some RPM-based hosts on which smart is already installed. - -[YOCTO #3881] - -Upstream-Status: Inappropriate [embedded specific] - -diff --git a/smart/plugins/channelsync.py b/smart/plugins/channelsync.py -index 3ba95ff..646d696 100644 ---- a/smart/plugins/channelsync.py -+++ b/smart/plugins/channelsync.py -@@ -23,7 +23,11 @@ from smart.channel import * - from smart import * - import os - --CHANNELSDIR = "/etc/smart/channels/" -+# For now, we leave the definition of CHANNELSDIR empty. This prevents smart -+# from erroneously consider the build host's channels while setting up its -+# channels [YOCTO #3881]. If this feature will be used in the future, CHANNELSDIR -+# should be set to a proper value. -+CHANNELSDIR = "" - - def syncChannels(channelsdir, force=None): - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-locale.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-locale.patch deleted file mode 100644 index 0f1dfb91d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-locale.patch +++ /dev/null @@ -1,27 +0,0 @@ -rpm or commands run by rpm can use output which isn't strictly acsii such -as quotation characters around expression which are character 0xe2. - -Use utf-8 as an encoding rather than whatever the system suggests to -ensure smart copes with this rather than erroring with unicode errors. - -RP 2016/5/19 -Upstream-Status: Pending - - -Index: git/smart/backends/rpm/pm.py -=================================================================== ---- git.orig/smart/backends/rpm/pm.py -+++ git/smart/backends/rpm/pm.py -@@ -32,11 +32,7 @@ from smart.pm import PackageManager - from smart import sysconf, iface, Error, _ - - --try: -- ENCODING = locale.getpreferredencoding() --except locale.Error: -- ENCODING = "ascii" -- -+ENCODING = "utf-8" - - def get_public_key(header): - return header.sprintf("%|DSAHEADER?{%{DSAHEADER:pgpsig}}:" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-recommends.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-recommends.patch deleted file mode 100644 index d607fc475..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-recommends.patch +++ /dev/null @@ -1,381 +0,0 @@ -Handle recommended packages in core and rpm backends - -Identify and store recommended packages in the cache, add a query option -to read them and ignore them if they are not present when installing. - -Initial identification code from Mark Hatle . - -Upstream-Status: Pending - -Signed-off-by: Paul Eggleton - -diff --git a/smart/backends/rpm/base.py b/smart/backends/rpm/base.py -index 9332ea0..4fcfbee 100644 ---- a/smart/backends/rpm/base.py -+++ b/smart/backends/rpm/base.py -@@ -225,6 +225,52 @@ class RPMPackage(Package): - break - else: - return False -+ srecs = fk(self.recommends) -+ orecs = fk(other.recommends) -+ if srecs != orecs: -+ for srec in srecs: -+ if srec.name[0] == "/" or srec in orecs: -+ continue -+ for orec in orecs: -+ if (srec.name == orec.name and -+ srec.relation == orec.relation and -+ checkver(srec.version, orec.version)): -+ break -+ else: -+ return False -+ for orec in orecs: -+ if orec.name[0] == "/" or orec in srecs: -+ continue -+ for srec in srecs: -+ if (srec.name == orec.name and -+ srec.relation == orec.relation and -+ checkver(srec.version, orec.version)): -+ break -+ else: -+ return False -+ srecs = fk(self.recommends) -+ orecs = fk(other.recommends) -+ if srecs != orecs: -+ for srec in srecs: -+ if srec.name[0] == "/" or srec in orecs: -+ continue -+ for orec in orecs: -+ if (srec.name == orec.name and -+ srec.relation == orec.relation and -+ checkver(srec.version, orec.version)): -+ break -+ else: -+ return False -+ for orec in orecs: -+ if orec.name[0] == "/" or orec in srecs: -+ continue -+ for srec in srecs: -+ if (srec.name == orec.name and -+ srec.relation == orec.relation and -+ checkver(srec.version, orec.version)): -+ break -+ else: -+ return False - return True - - def coexists(self, other): -diff --git a/smart/ccache.c b/smart/ccache.c -index 7193185..8b66515 100644 ---- a/smart/ccache.c -+++ b/smart/ccache.c -@@ -500,6 +500,46 @@ Package_equals(PackageObject *self, PackageObject *other) - } - } - -+ ilen = 0; -+ jlen = 0; -+ for (i = 0; i != PyList_GET_SIZE(self->recommends); i++) { -+ PyObject *item = PyList_GET_ITEM(self->recommends, i); -+ if (!PyObject_IsInstance(item, (PyObject *)&Depends_Type)) { -+ PyErr_SetString(PyExc_TypeError, "Depends instance expected"); -+ return NULL; -+ } -+ if (STR(((DependsObject *)item)->name)[0] != '/') -+ ilen += 1; -+ } -+ for (j = 0; j != PyList_GET_SIZE(other->recommends); j++) { -+ PyObject *item = PyList_GET_ITEM(other->recommends, j); -+ if (!PyObject_IsInstance(item, (PyObject *)&Depends_Type)) { -+ PyErr_SetString(PyExc_TypeError, "Depends instance expected"); -+ return NULL; -+ } -+ if (STR(((DependsObject *)item)->name)[0] != '/') -+ jlen += 1; -+ } -+ if (ilen != jlen) { -+ ret = Py_False; -+ goto exit; -+ } -+ -+ ilen = PyList_GET_SIZE(self->recommends); -+ jlen = PyList_GET_SIZE(other->recommends); -+ for (i = 0; i != ilen; i++) { -+ PyObject *item = PyList_GET_ITEM(self->recommends, i); -+ if (STR(((DependsObject *)item)->name)[0] != '/') { -+ for (j = 0; j != jlen; j++) -+ if (item == PyList_GET_ITEM(other->recommends, j)) -+ break; -+ if (j == jlen) { -+ ret = Py_False; -+ goto exit; -+ } -+ } -+ } -+ - exit: - Py_INCREF(ret); - return ret; -@@ -1813,6 +1853,59 @@ Loader_buildPackage(LoaderObject *self, PyObject *args) - } - } - -+ /* if recargs: */ -+ if (recargs) { -+ int i = 0; -+ int len = PyList_GET_SIZE(recargs); -+ /* pkg.recommends = [] */ -+ Py_DECREF(pkgobj->recommends); -+ pkgobj->recommends = PyList_New(len); -+ /* for args in recargs: */ -+ for (; i != len; i++) { -+ PyObject *args = PyList_GET_ITEM(recargs, i); -+ DependsObject *recobj; -+ PyObject *rec; -+ -+ if (!PyTuple_Check(args)) { -+ PyErr_SetString(PyExc_TypeError, -+ "Item in recargs is not a tuple"); -+ return NULL; -+ } -+ -+ /* rec = cache._objmap.get(args) */ -+ rec = PyDict_GetItem(cache->_objmap, args); -+ recobj = (DependsObject *)rec; -+ -+ /* if not rec: */ -+ if (!rec) { -+ if (!PyTuple_Check(args) || PyTuple_GET_SIZE(args) < 2) { -+ PyErr_SetString(PyExc_ValueError, "Invalid recargs tuple"); -+ return NULL; -+ } -+ /* rec = args[0](*args[1:]) */ -+ callargs = PyTuple_GetSlice(args, 1, PyTuple_GET_SIZE(args)); -+ rec = PyObject_CallObject(PyTuple_GET_ITEM(args, 0), callargs); -+ Py_DECREF(callargs); -+ if (!rec) return NULL; -+ recobj = (DependsObject *)rec; -+ -+ /* cache._objmap[args] = rec */ -+ PyDict_SetItem(cache->_objmap, args, rec); -+ Py_DECREF(rec); -+ -+ /* cache._recommends.append(rec) */ -+ PyList_Append(cache->_recommends, rec); -+ } -+ -+ /* relpkgs.append(rec.packages) */ -+ PyList_Append(relpkgs, recobj->packages); -+ -+ /* pkg.recommends.append(rec) */ -+ Py_INCREF(rec); -+ PyList_SET_ITEM(pkgobj->recommends, i, rec); -+ } -+ } -+ - /* if upgargs: */ - if (upgargs) { - int i = 0; -@@ -2592,6 +2685,16 @@ Cache_reset(CacheObject *self, PyObject *args) - if (PyList_Check(reqobj->providedby)) - LIST_CLEAR(reqobj->providedby); - } -+ len = PyList_GET_SIZE(self->_recommends); -+ for (i = 0; i != len; i++) { -+ DependsObject *reqobj; -+ PyObject *req; -+ req = PyList_GET_ITEM(self->_recommends, i); -+ reqobj = (DependsObject *)req; -+ LIST_CLEAR(reqobj->packages); -+ if (PyList_Check(reqobj->providedby)) -+ LIST_CLEAR(reqobj->providedby); -+ } - len = PyList_GET_SIZE(self->_upgrades); - for (i = 0; i != len; i++) { - DependsObject *upgobj; -@@ -2834,6 +2937,30 @@ Cache__reload(CacheObject *self, PyObject *args) - } - - /* -+ for rec in pkg.recommends: -+ rec.packages.append(pkg) -+ if rec not in recommends: -+ recommends[rec] = True -+ objmap[rec.getInitArgs()] = rec -+ */ -+ if (PyList_Check(pkg->recommends)) { -+ klen = PyList_GET_SIZE(pkg->recommends); -+ for (k = 0; k != klen; k++) { -+ PyObject *rec = PyList_GET_ITEM(pkg->recommends, k); -+ PyList_Append(((DependsObject *)rec)->packages, -+ (PyObject *)pkg); -+ if (!PyDict_GetItem(recommends, rec)) { -+ PyDict_SetItem(recommends, rec, Py_True); -+ args = PyObject_CallMethod(rec, "getInitArgs", -+ NULL); -+ if (!args) return NULL; -+ PyDict_SetItem(objmap, args, rec); -+ Py_DECREF(args); -+ } -+ } -+ } -+ -+ /* - for upg in pkg.upgrades: - upg.packages.append(pkg) - if upg not in upgrades: -@@ -3097,6 +3224,47 @@ Cache_linkDeps(CacheObject *self, PyObject *args) - Py_DECREF(seq); - } - -+ /* recnames = {} */ -+ recnames = PyDict_New(); -+ /* for rec in self._recommends: */ -+ len = PyList_GET_SIZE(self->_recommends); -+ for (i = 0; i != len; i++) { -+ PyObject *rec = PyList_GET_ITEM(self->_recommends, i); -+ -+ /* for name in rec.getMatchNames(): */ -+ PyObject *names = PyObject_CallMethod(rec, "getMatchNames", NULL); -+ PyObject *seq = PySequence_Fast(names, "getMatchNames() returned " -+ "non-sequence object"); -+ int nameslen; -+ if (!seq) return NULL; -+ nameslen = PySequence_Fast_GET_SIZE(seq); -+ for (j = 0; j != nameslen; j++) { -+ PyObject *name = PySequence_Fast_GET_ITEM(seq, j); -+ -+ /* lst = recnames.get(name) */ -+ lst = PyDict_GetItem(recnames, name); -+ -+ /* -+ if lst: -+ lst.append(rec) -+ else: -+ recnames[name] = [rec] -+ */ -+ if (lst) { -+ PyList_Append(lst, rec); -+ } else { -+ lst = PyList_New(1); -+ Py_INCREF(rec); -+ PyList_SET_ITEM(lst, 0, rec); -+ PyDict_SetItem(recnames, name, lst); -+ Py_DECREF(lst); -+ } -+ } -+ -+ Py_DECREF(names); -+ Py_DECREF(seq); -+ } -+ - /* upgnames = {} */ - upgnames = PyDict_New(); - /* for upg in self._upgrades: */ -@@ -3286,6 +3454,56 @@ Cache_linkDeps(CacheObject *self, PyObject *args) - } - } - -+ /* lst = recnames.get(prv.name) */ -+ lst = PyDict_GetItem(recnames, prv->name); -+ -+ /* if lst: */ -+ if (lst) { -+ /* for rec in lst: */ -+ int reclen = PyList_GET_SIZE(lst); -+ for (j = 0; j != reclen; j++) { -+ DependsObject *rec = (DependsObject *)PyList_GET_ITEM(lst, j); -+ /* if rec.matches(prv): */ -+ PyObject *ret = PyObject_CallMethod((PyObject *)rec, "matches", -+ "O", (PyObject *)prv); -+ if (!ret) return NULL; -+ if (PyObject_IsTrue(ret)) { -+ /* -+ if rec.providedby: -+ rec.providedby.append(prv) -+ else: -+ rec.providedby = [prv] -+ */ -+ if (PyList_Check(rec->providedby)) { -+ PyList_Append(rec->providedby, (PyObject *)prv); -+ } else { -+ PyObject *_lst = PyList_New(1); -+ Py_INCREF(prv); -+ PyList_SET_ITEM(_lst, 0, (PyObject *)prv); -+ Py_DECREF(rec->providedby); -+ rec->providedby = _lst; -+ } -+ -+ /* -+ if prv.recommendedby: -+ prv.recommendedby.append(prv) -+ else: -+ prv.recommendedby = [prv] -+ */ -+ if (PyList_Check(prv->recommendedby)) { -+ PyList_Append(prv->recommendedby, (PyObject *)rec); -+ } else { -+ PyObject *_lst = PyList_New(1); -+ Py_INCREF(rec); -+ PyList_SET_ITEM(_lst, 0, (PyObject *)rec); -+ Py_DECREF(prv->recommendedby); -+ prv->recommendedby = _lst; -+ } -+ } -+ Py_DECREF(ret); -+ } -+ } -+ - /* lst = upgnames.get(prv.name) */ - lst = PyDict_GetItem(upgnames, prv->name); - -@@ -3821,6 +4094,21 @@ Cache__setstate__(CacheObject *self, PyObject *state) - } - - /* -+ for rec in pkg.recommends: -+ rec.packages.append(pkg) -+ recommends[rec] = True -+ */ -+ if (PyList_Check(pkgobj->recommends)) { -+ jlen = PyList_GET_SIZE(pkgobj->recommends); -+ for (j = 0; j != jlen; j++) { -+ PyObject *rec = PyList_GET_ITEM(pkgobj->recommends, j); -+ DependsObject *recobj = (DependsObject *)rec; -+ PyList_Append(recobj->packages, pkg); -+ PyDict_SetItem(recommends, rec, Py_True); -+ } -+ } -+ -+ /* - for upg in pkg.upgrades: - upg.packages.append(pkg) - upgrades[upg] = True -diff --git a/smart/commands/query.py b/smart/commands/query.py -index 9265cd9..b6f5697 100644 ---- a/smart/commands/query.py -+++ b/smart/commands/query.py -@@ -750,6 +750,22 @@ class TextOutput(NullOutput): - name = str(prvpkg) - print " ", "%s (%s)" % (name, prv) - -+ def showRecommends(self, pkg, rec): -+ if self._firstrecommends: -+ self._firstrecommends = False -+ print " ", _("Recommends:") -+ print " ", rec -+ -+ def showRecommendsProvidedBy(self, pkg, req, prv, prvpkg): -+ if self._firstrecommendsprovidedby: -+ self._firstrecommendsprovidedby = False -+ print " ", _("Provided By:") -+ if self.opts.hide_version: -+ name = prvpkg.name -+ else: -+ name = str(prvpkg) -+ print " ", "%s (%s)" % (name, prv) -+ - def showUpgrades(self, pkg, upg): - if self._firstupgrades: - self._firstupgrades = False diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-rpm-transaction-failure-check.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-rpm-transaction-failure-check.patch deleted file mode 100644 index bb8c3afdb..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-rpm-transaction-failure-check.patch +++ /dev/null @@ -1,57 +0,0 @@ -From 0c55d7e18f40465e95e8e4bf22af01f5d4477d3c Mon Sep 17 00:00:00 2001 -From: Daniel Klauer -Date: Wed, 11 May 2016 17:22:49 +0200 -Subject: [PATCH] rpm: Don't ignore transaction error with empty problems list - -SmartPM could misinterpret RPM transaction error as success, -if ts.run() (RPM Python API) returns an empty problems list, -because of incorrect check for None which treated empty list -to be the same as None when it has different meaning. - -ts.run() returns: -* None in case of success -* problems list in case of error, may be empty -(look at rpmts_Run() in rpm-5.4.14/python/rpmts-py.c [1]) - -"if mylist" is not good enough to check for error here, because it will -treat an empty list as "false" because its len() == 0 [2]. - -ts.check() seems to be different (it's ok for it to return an empty list), -but for consistency it should be made clear that it can return either None, -an empty list or a non-empty list. - -[1] http://rpm5.org/cvs/fileview?f=rpm/python/rpmts-py.c&v=1.111.2.3 -[2] https://docs.python.org/2/library/stdtypes.html#truth-value-testing - -Upstream-Status: Pending - -Signed-off-by: Daniel Klauer ---- - smart/backends/rpm/pm.py | 4 ++-- - 1 file changed, 2 insertions(+), 2 deletions(-) - -diff --git a/smart/backends/rpm/pm.py b/smart/backends/rpm/pm.py -index 9bbd952..635f726 100644 ---- a/smart/backends/rpm/pm.py -+++ b/smart/backends/rpm/pm.py -@@ -208,7 +208,7 @@ class RPMPackageManager(PackageManager): - force = sysconf.get("rpm-force", False) - if not force: - probs = ts.check() -- if probs: -+ if (probs is not None) and (len(probs) != 0): - problines = [] - for prob in probs: - name1 = "%s-%s-%s" % prob[0] -@@ -247,7 +247,7 @@ class RPMPackageManager(PackageManager): - del getTS.ts - cb.grabOutput(False) - prog.setDone() -- if probs: -+ if probs is not None: - raise Error, "\n".join([x[0] for x in probs]) - prog.stop() - --- -1.9.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-set-noprogress-for-pycurl.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-set-noprogress-for-pycurl.patch deleted file mode 100644 index 2885998ac..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smart-set-noprogress-for-pycurl.patch +++ /dev/null @@ -1,20 +0,0 @@ -Set NOPROGRESS for pycurl just as same as default operation in pycurl module itself. -If set NOPROGRESS with 0 for pycurl, it causes dead lock issue of Python GIL when -call smart library by python gui just like pygtk. - -Upstream-Status: Pending - -Signed-off-by: Kai Kang ---- -diff -u smart-1.4.1/smart.orig/fetcher.py smart-1.4.1/smart/fetcher.py ---- smart-1.4.1/smart.orig/fetcher.py 2014-07-15 16:42:19.240437080 +0800 -+++ smart-1.4.1/smart/fetcher.py 2014-07-15 17:02:37.812470289 +0800 -@@ -1720,7 +1720,7 @@ - handle.setopt(pycurl.OPT_FILETIME, 1) - handle.setopt(pycurl.LOW_SPEED_LIMIT, 1) - handle.setopt(pycurl.LOW_SPEED_TIME, SOCKETTIMEOUT) -- handle.setopt(pycurl.NOPROGRESS, 0) -+ handle.setopt(pycurl.NOPROGRESS, 1) - handle.setopt(pycurl.PROGRESSFUNCTION, progress) - handle.setopt(pycurl.WRITEDATA, local) - handle.setopt(pycurl.FOLLOWLOCATION, 1) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smartpm-rpm5-support-check-signatures.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smartpm-rpm5-support-check-signatures.patch deleted file mode 100644 index 4067a90a0..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm/smartpm-rpm5-support-check-signatures.patch +++ /dev/null @@ -1,112 +0,0 @@ -From 5b79e28bd70a0ec5b34c5ff19b66cbbdd1e48835 Mon Sep 17 00:00:00 2001 -From: Haiqing Bai -Date: Fri, 18 Mar 2016 13:34:07 +0800 -Subject: [PATCH] Make smartpm to support check signatures of rpmv5. - -The original support for 'rpm-check-signatures' has been -disabled for the RPMv5 does not support '_RPMVSF_NOSIGNATURES' -now. This fix replaces the '_RPMVSF_NOSIGNATURES' with -rpm VS flags set:RPMVSF_NODSAHEADER|RPMVSF_NORSAHEADER| -RPMVSF_NODSA|RPMVSF_NORSA. - -Upstream-Status: Pending -Signed-off-by: Haiqing Bai ---- - smart/backends/rpm/base.py | 43 +++++++++++++++++++++++++++++++---------- - smart/backends/rpm/pm.py | 2 +- - smart/plugins/yumchannelsync.py | 5 +++-- - 3 files changed, 37 insertions(+), 13 deletions(-) - -diff --git a/smart/backends/rpm/base.py b/smart/backends/rpm/base.py -index 85f4d49..dbd6165 100644 ---- a/smart/backends/rpm/base.py -+++ b/smart/backends/rpm/base.py -@@ -63,11 +63,23 @@ def getTS(new=False): - if sysconf.get("rpm-dbpath"): - rpm.addMacro('_dbpath', "/" + sysconf.get("rpm-dbpath")) - getTS.ts = rpm.ts(getTS.root) -- if not sysconf.get("rpm-check-signatures", False): -- if hasattr(rpm, '_RPMVSF_NOSIGNATURES'): -- getTS.ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES) -- else: -- raise Error, _("rpm requires checking signatures") -+ -+ # _RPMVSF_NOSIGNATURES is not supported in RPMv5, so here uses -+ # RPMVSF_NODSAHEADER|RPMVSF_NORSAHEADER|RPMVSF_NODSA|RPMVSF_NORSA -+ # to replace '_RPMVSF_NOSIGNATURES' to continue to support check -+ # rpm signatures -+ -+ #if not sysconf.get("rpm-check-signatures", False): -+ # if hasattr(rpm, '_RPMVSF_NOSIGNATURES'): -+ # getTS.ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES) -+ # else: -+ # raise Error, _("rpm requires checking signatures") -+ if sysconf.get("rpm-check-signatures") == False: -+ getTS.ts.setVSFlags(rpm.RPMVSF_NODSAHEADER|rpm.RPMVSF_NORSAHEADER|\ -+ rpm.RPMVSF_NODSA|rpm.RPMVSF_NORSA) -+ else: -+ getTS.ts.setVSFlags(0) -+ - rpm_dbpath = sysconf.get("rpm-dbpath", "var/lib/rpm") - dbdir = rpm_join_dbpath(getTS.root, rpm_dbpath) - if not os.path.isdir(dbdir): -@@ -89,11 +101,22 @@ def getTS(new=False): - if sysconf.get("rpm-dbpath"): - rpm.addMacro('_dbpath', "/" + sysconf.get("rpm-dbpath")) - ts = rpm.ts(getTS.root) -- if not sysconf.get("rpm-check-signatures", False): -- if hasattr(rpm, '_RPMVSF_NOSIGNATURES'): -- ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES) -- else: -- raise Error, _("rpm requires checking signatures") -+ -+ # _RPMVSF_NOSIGNATURES is not supported in RPMv5, so here uses -+ # RPMVSF_NODSAHEADER|RPMVSF_NORSAHEADER|RPMVSF_NODSA|RPMVSF_NORSA -+ # to replace '_RPMVSF_NOSIGNATURES' to continue to support check -+ # rpm signatures -+ -+ #if not sysconf.get("rpm-check-signatures", False): -+ # if hasattr(rpm, '_RPMVSF_NOSIGNATURES'): -+ # ts.setVSFlags(rpm._RPMVSF_NOSIGNATURES) -+ # else: -+ # raise Error, _("rpm requires checking signatures") -+ if sysconf.get("rpm-check-signatures") == False: -+ ts.setVSFlags(rpm.RPMVSF_NODSAHEADER|rpm.RPMVSF_NORSAHEADER|\ -+ rpm.RPMVSF_NODSA|rpm.RPMVSF_NORSA) -+ else: -+ ts.setVSFlags(0) - return ts - else: - return getTS.ts -diff --git a/smart/backends/rpm/pm.py b/smart/backends/rpm/pm.py -index b57a844..7b651b5 100644 ---- a/smart/backends/rpm/pm.py -+++ b/smart/backends/rpm/pm.py -@@ -180,7 +180,7 @@ class RPMPackageManager(PackageManager): - fd = os.open(path, os.O_RDONLY) - try: - h = ts.hdrFromFdno(fd) -- if sysconf.get("rpm-check-signatures", False): -+ if sysconf.get("rpm-check-signatures", True): - if get_public_key(h) == '(none)': - raise rpm.error('package is not signed') - except rpm.error, e: -diff --git a/smart/plugins/yumchannelsync.py b/smart/plugins/yumchannelsync.py -index f8107e6..2dc5482 100644 ---- a/smart/plugins/yumchannelsync.py -+++ b/smart/plugins/yumchannelsync.py -@@ -56,8 +56,9 @@ def _getreleasever(): - - rpmroot = sysconf.get("rpm-root", "/") - ts = rpmUtils.transaction.initReadOnlyTransaction(root=rpmroot) -- if hasattr(rpm, '_RPMVSF_NOSIGNATURES') and hasattr(rpm, '_RPMVSF_NODIGESTS'): -- ts.pushVSFlags(~(rpm._RPMVSF_NOSIGNATURES|rpm._RPMVSF_NODIGESTS)) -+ #_RPMVSF_NOSIGNATURES is not supported in RPMv5 -+ #if hasattr(rpm, '_RPMVSF_NOSIGNATURES') and hasattr(rpm, '_RPMVSF_NODIGESTS'): -+ # ts.pushVSFlags(~(rpm._RPMVSF_NOSIGNATURES|rpm._RPMVSF_NODIGESTS)) - releasever = None - # HACK: we're hard-coding the most used distros, will add more if needed - idx = ts.dbMatch('provides', 'fedora-release') --- -1.9.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm_git.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm_git.bb deleted file mode 100644 index 861910cc2..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python-smartpm_git.bb +++ /dev/null @@ -1,141 +0,0 @@ -SUMMARY = "The Smart Package Manager" -DESCRIPTION = "The Smart Package Manager project has the ambitious objective of creating \ -smart and portable algorithms for solving adequately the problem of managing software \ -upgrades and installation." - -HOMEPAGE = "http://labix.org/smart/" -SECTION = "devel/python" -LICENSE = "GPLv2" -LIC_FILES_CHKSUM = "file://LICENSE;md5=393a5ca445f6965873eca0259a17f833" - -DEPENDS = "python rpm gettext-native python-rpm" -SRCNAME = "smart" - -SRC_URI = "\ - git://github.com/smartpm/smart.git \ - file://smart-recommends.patch \ - file://smart-channelsdir.patch \ - file://smart-rpm-transaction-failure-check.patch \ - file://smart-attempt.patch \ - file://smart-attempt-fix.patch \ - file://smart-add-for-rpm-ignoresize-check.patch \ - file://smart-already-installed-message.patch \ - file://smart-set-noprogress-for-pycurl.patch \ - file://smart-cache.py-getPackages-matches-name-version.patch \ - file://smart-channel-remove-all.patch \ - file://smart-locale.patch \ - file://smartpm-rpm5-support-check-signatures.patch \ - file://smart-add-deugging-when-targetpath-is-empty.patch \ - file://channels-rpm_sys-use-md5sum-instead-of-mtime-as-the-.patch \ - " - -SRCREV = "407a7eca766431257dcd1da15175cc36a1bb22d0" -PV = "1.5+git${SRCPV}" - -S = "${WORKDIR}/git" - -# Options - rpm, qt4, gtk -PACKAGECONFIG ??= "rpm" - -RPM_RDEP = "${PN}-backend-rpm" -QT_RDEP = "${PN}-interface-qt4" -GTK_RDEP = "${PN}-interface-gtk" - -RPM_RDEP_class-native = "" -QT_RDEP_class-native = "" -GTK_RDEP_class-native = "" - -RPM_RDEP_class-nativesdk = "" -QT_RDEP_class-nativesdk = "" -GTK_RDEP_class-nativesdk = "" - -PACKAGECONFIG[rpm] = ",,rpm,${RPM_RDEP}" -PACKAGECONFIG[qt4] = ",,qt4-x11,${QT_RDEP}" -PACKAGECONFIG[gtk] = ",,gtk+,${GTK_RDEP}" - -inherit distutils - -do_install_append() { - # We don't support the following items - rm -rf ${D}${PYTHON_SITEPACKAGES_DIR}/smart/backends/slack - rm -rf ${D}${PYTHON_SITEPACKAGES_DIR}/smart/backends/arch - rm -rf ${D}${PYTHON_SITEPACKAGES_DIR}/smart/interfaces/qt - - # Temporary, debian support in OE is missing the python module - rm -f ${D}${PYTHON_SITEPACKAGES_DIR}/smart/plugins/aptchannelsync.py* - rm -f ${D}${PYTHON_SITEPACKAGES_DIR}/smart/plugins/debdir.py* - rm -rf ${D}${PYTHON_SITEPACKAGES_DIR}/smart/backends/deb - - # Disable automatic channel detection - rm -f ${D}${PYTHON_SITEPACKAGES_DIR}/smart/plugins/detectsys.py* - - # Disable landscape support - rm -f ${D}${PYTHON_SITEPACKAGES_DIR}/smart/plugins/landscape.py* - - # Disable urpmi channel support - rm -f ${D}${PYTHON_SITEPACKAGES_DIR}/smart/plugins/urpmichannelsync.py* - - # Disable yum channel support - rm -f ${D}${PYTHON_SITEPACKAGES_DIR}/smart/plugins/yumchannelsync.py* - - # Disable zypper channel support - rm -f ${D}${PYTHON_SITEPACKAGES_DIR}/smart/plugins/zyppchannelsync.py* - - if [ -z "${@bb.utils.contains('PACKAGECONFIG', 'rpm', 'rpm', '', d)}" ]; then - rm -f ${D}${PYTHON_SITEPACKAGES_DIR}/smart/plugins/rpmdir.py* - rm -rf ${D}${PYTHON_SITEPACKAGES_DIR}/smart/backends/rpm - fi - - if [ -z "${@bb.utils.contains('PACKAGECONFIG', 'qt4', 'qt4', '', d)}" ]; then - rm -rf ${D}${PYTHON_SITEPACKAGES_DIR}/smart/interfaces/qt4 - fi - - if [ -z "${@bb.utils.contains('PACKAGECONFIG', 'gtk+', 'gtk', '', d)}" ]; then - rm -rf ${D}${PYTHON_SITEPACKAGES_DIR}/smart/interfaces/gtk - fi -} - -add_native_wrapper() { - create_wrapper ${D}/${bindir}/smart \ - RPM_USRLIBRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('libdir', True), d.getVar('bindir', True))}/rpm \ - RPM_ETCRPM='$'{RPM_ETCRPM-'`dirname $''realpath`'/${@os.path.relpath(d.getVar('sysconfdir', True), d.getVar('bindir', True))}/rpm} \ - RPM_LOCALEDIRRPM='`dirname $''realpath`'/${@os.path.relpath(d.getVar('datadir', True), d.getVar('bindir', True))}/locale -} - -do_install_append_class-native() { - sed -i -e 's|^#!.*/usr/bin/env python|#! /usr/bin/env nativepython|' ${D}${bindir}/smart - add_native_wrapper -} - -do_install_append_class-nativesdk() { - add_native_wrapper -} - -PACKAGES = "${PN}-dev ${PN}-dbg ${PN}-doc smartpm \ - ${@bb.utils.contains('PACKAGECONFIG', 'rpm', '${PN}-backend-rpm', '', d)} \ - ${@bb.utils.contains('PACKAGECONFIG', 'qt4', '${PN}-interface-qt4', '', d)} \ - ${@bb.utils.contains('PACKAGECONFIG', 'gtk', '${PN}-interface-gtk', '', d)} \ - ${PN}-interface-images ${PN}" - -RDEPENDS_smartpm = "${PN}" - -RDEPENDS_${PN} += "${PN}-backend-rpm python-codecs python-textutils python-xml python-fcntl \ - python-pickle python-crypt python-compression python-shell \ - python-resource python-netclient python-threading python-unixadmin python-pprint" -RDEPENDS_${PN}_class-native = "" - -RDEPENDS_${PN}-backend-rpm = "python-rpm" - -RDEPENDS_${PN}-interface-qt4 = "qt4-x11 ${PN}-interface-images" -RDEPENDS_${PN}-interface-gtk = "gtk+ ${PN}-interface-images" - -FILES_smartpm = "${bindir}/smart" - -FILES_${PN}-backend-rpm = "${PYTHON_SITEPACKAGES_DIR}/smart/backends/rpm" - -FILES_${PN}-interface-qt4 = "${PYTHON_SITEPACKAGES_DIR}/smart/interfaces/qt4" -FILES_${PN}-interface-gtk = "${PYTHON_SITEPACKAGES_DIR}/smart/interfaces/gtk" -FILES_${PN}-interface-images = "${datadir}/${baselib}/python*/site-packages/smart/interfaces/images" - -BBCLASSEXTEND = "native nativesdk" - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python.inc b/import-layers/yocto-poky/meta/recipes-devtools/python/python.inc index 79a431c7e..b40f551ab 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python.inc @@ -9,16 +9,18 @@ LIC_FILES_CHKSUM = "file://LICENSE;md5=6b60258130e4ed10d3101517eb5b9385" SRC_URI = "http://www.python.org/ftp/python/${PV}/Python-${PV}.tar.xz" -SRC_URI[md5sum] = "57dffcee9cee8bb2ab5f82af1d8e9a69" -SRC_URI[sha256sum] = "d7837121dd5652a05fef807c361909d255d173280c4e1a4ded94d73d80a1f978" +SRC_URI[md5sum] = "53b43534153bb2a0363f08bae8b9d990" +SRC_URI[sha256sum] = "35d543986882f78261f97787fd3e06274bfa6df29fac9b4a94f73930ff98f731" # python recipe is actually python 2.x # also, exclude pre-releases for both python 2.x and 3.x UPSTREAM_CHECK_REGEX = "[Pp]ython-(?P2(\.\d+)+).tar" +CVE_PRODUCT = "python" + PYTHON_MAJMIN = "2.7" -inherit autotools +inherit autotools pkgconfig EXTRA_OECONF = "\ --with-threads \ @@ -30,3 +32,9 @@ EXTRA_OECONF = "\ ac_cv_header_bluetooth_bluetooth_h=no ac_cv_header_bluetooth_h=no \ ${PYTHONLSBOPTS} \ " + +do_install_append () { + sed -i -e 's:${HOSTTOOLS_DIR}/install:install:g' \ + -e 's:${HOSTTOOLS_DIR}/mkdir:mkdir:g' \ + ${D}/${libdir}/python${PYTHON_MAJMIN}/_sysconfigdata.py +} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python/01-use-proper-tools-for-cross-build.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python/01-use-proper-tools-for-cross-build.patch index b2a8c3b5a..366ce3e40 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python/01-use-proper-tools-for-cross-build.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python/01-use-proper-tools-for-cross-build.patch @@ -9,11 +9,11 @@ Signed-off-by: Paul Eggleton Rebased for python-2.7.9 Signed-off-by: Alejandro Hernandez -Index: Python-2.7.12/Makefile.pre.in +Index: Python-2.7.13/Makefile.pre.in =================================================================== ---- Python-2.7.12.orig/Makefile.pre.in -+++ Python-2.7.12/Makefile.pre.in -@@ -246,6 +246,7 @@ LIBFFI_INCLUDEDIR= @LIBFFI_INCLUDEDIR@ +--- Python-2.7.13.orig/Makefile.pre.in ++++ Python-2.7.13/Makefile.pre.in +@@ -245,6 +245,7 @@ LIBFFI_INCLUDEDIR= @LIBFFI_INCLUDEDIR@ ########################################################################## # Parser PGEN= Parser/pgen$(EXE) @@ -21,7 +21,7 @@ Index: Python-2.7.12/Makefile.pre.in PSRCS= \ Parser/acceler.c \ -@@ -513,7 +514,7 @@ $(BUILDPYTHON): Modules/python.o $(LIBRA +@@ -512,7 +513,7 @@ $(BUILDPYTHON): Modules/python.o $(LIBRA $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) platform: $(BUILDPYTHON) pybuilddir.txt @@ -30,16 +30,16 @@ Index: Python-2.7.12/Makefile.pre.in # Create build directory and generate the sysconfig build-time data there. # pybuilddir.txt contains the name of the build dir and is used for -@@ -684,7 +685,7 @@ $(GRAMMAR_H): $(GRAMMAR_INPUT) $(PGEN) +@@ -681,7 +682,7 @@ Modules/pwdmodule.o: $(srcdir)/Modules/p + + $(GRAMMAR_H): @GENERATED_COMMENT@ $(GRAMMAR_INPUT) $(PGEN) @$(MKDIR_P) Include - # Avoid copying the file onto itself for an in-tree build - if test "$(cross_compiling)" != "yes"; then \ -- $(PGEN) $(GRAMMAR_INPUT) $(GRAMMAR_H) $(GRAMMAR_C); \ -+ $(HOSTPGEN) $(GRAMMAR_INPUT) $(GRAMMAR_H) $(GRAMMAR_C); \ - else \ - cp $(srcdir)/Include/graminit.h $(GRAMMAR_H).tmp; \ - mv $(GRAMMAR_H).tmp $(GRAMMAR_H); \ -@@ -1133,27 +1134,27 @@ libinstall: build_all $(srcdir)/Lib/$(PL +- $(PGEN) $(GRAMMAR_INPUT) $(GRAMMAR_H) $(GRAMMAR_C) ++ $(HOSTPGEN) $(GRAMMAR_INPUT) $(GRAMMAR_H) $(GRAMMAR_C) + $(GRAMMAR_C): @GENERATED_COMMENT@ $(GRAMMAR_H) + touch $(GRAMMAR_C) + +@@ -1121,27 +1122,27 @@ libinstall: build_all $(srcdir)/Lib/$(PL $(DESTDIR)$(LIBDEST)/distutils/tests ; \ fi PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ @@ -73,10 +73,10 @@ Index: Python-2.7.12/Makefile.pre.in # Create the PLATDIR source directory, if one wasn't distributed.. $(srcdir)/Lib/$(PLATDIR): -Index: Python-2.7.12/setup.py +Index: Python-2.7.13/setup.py =================================================================== ---- Python-2.7.12.orig/setup.py -+++ Python-2.7.12/setup.py +--- Python-2.7.13.orig/setup.py ++++ Python-2.7.13/setup.py @@ -350,6 +350,7 @@ class PyBuildExt(build_ext): self.failed.append(ext.name) self.announce('*** WARNING: renaming "%s" since importing it' diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python/CVE-2016-5636.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python/CVE-2016-5636.patch deleted file mode 100644 index 9a3747145..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python/CVE-2016-5636.patch +++ /dev/null @@ -1,44 +0,0 @@ - -# HG changeset patch -# User Benjamin Peterson -# Date 1453357424 28800 -# Node ID 985fc64c60d6adffd1138b6cc46df388ca91ca5d -# Parent 7ec954b9fc54448a35b56d271340ba109eb381b9 -prevent buffer overflow in get_data (closes #26171) - -Upstream-Status: Backport -https://hg.python.org/cpython/rev/985fc64c60d6 - -CVE: CVE-2016-5636 -Signed-off-by: Armin Kuster - -Index: Python-2.7.11/Misc/NEWS -=================================================================== ---- Python-2.7.11.orig/Misc/NEWS -+++ Python-2.7.11/Misc/NEWS -@@ -7,6 +7,9 @@ What's New in Python 2.7.11? - - *Release date: 2015-12-05* - -+- Issue #26171: Fix possible integer overflow and heap corruption in -+ zipimporter.get_data(). -+ - Library - ------- - -Index: Python-2.7.11/Modules/zipimport.c -=================================================================== ---- Python-2.7.11.orig/Modules/zipimport.c -+++ Python-2.7.11/Modules/zipimport.c -@@ -895,6 +895,11 @@ get_data(char *archive, PyObject *toc_en - PyMarshal_ReadShortFromFile(fp); /* local header size */ - file_offset += l; /* Start of file data */ - -+ if (data_size > LONG_MAX - 1) { -+ fclose(fp); -+ PyErr_NoMemory(); -+ return NULL; -+ } - raw_data = PyString_FromStringAndSize((char *)NULL, compress == 0 ? - data_size : data_size + 1); - if (raw_data == NULL) { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python/Don-t-use-getentropy-on-Linux.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python/Don-t-use-getentropy-on-Linux.patch new file mode 100644 index 000000000..38e53778d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python/Don-t-use-getentropy-on-Linux.patch @@ -0,0 +1,41 @@ +Upstream-Status: Backport + +Signed-off-by: Andreas Oberritter + +From 905d1b30ac7cb0e31c57cec0533825c8f170b942 Mon Sep 17 00:00:00 2001 +From: Victor Stinner +Date: Mon, 9 Jan 2017 11:10:41 +0100 +Subject: [PATCH] Don't use getentropy() on Linux + +Issue #29188: Support glibc 2.24 on Linux: don't use getentropy() function but +read from /dev/urandom to get random bytes, for example in os.urandom(). On +Linux, getentropy() is implemented which getrandom() is blocking mode, whereas +os.urandom() should not block. + +(cherry picked from commit 2687486756721e39164fa9f597e468c35d495227) +--- + Python/random.c | 11 +++++++++-- + 1 file changed, 9 insertions(+), 2 deletions(-) + +diff --git a/Python/random.c b/Python/random.c +index b4bc1f3..f3f5d14 100644 +--- a/Python/random.c ++++ b/Python/random.c +@@ -94,8 +94,15 @@ win32_urandom(unsigned char *buffer, Py_ssize_t size, int raise) + } + + /* Issue #25003: Don't use getentropy() on Solaris (available since +- * Solaris 11.3), it is blocking whereas os.urandom() should not block. */ +-#elif defined(HAVE_GETENTROPY) && !defined(sun) ++ Solaris 11.3), it is blocking whereas os.urandom() should not block. ++ ++ Issue #29188: Don't use getentropy() on Linux since the glibc 2.24 ++ implements it with the getrandom() syscall which can fail with ENOSYS, ++ and this error is not supported in py_getentropy() and getrandom() is called ++ with flags=0 which blocks until system urandom is initialized, which is not ++ the desired behaviour to seed the Python hash secret nor for os.urandom(): ++ see the PEP 524 which was only implemented in Python 3.6. */ ++#elif defined(HAVE_GETENTROPY) && !defined(sun) && !defined(linux) + #define PY_GETENTROPY 1 + + /* Fill buffer with size pseudo-random bytes generated by getentropy(). diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python/avoid_parallel_make_races_on_pgen.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python/avoid_parallel_make_races_on_pgen.patch deleted file mode 100644 index 8012245af..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python/avoid_parallel_make_races_on_pgen.patch +++ /dev/null @@ -1,27 +0,0 @@ -Upstream-Status: Pending - -Avoids parallel make races linking errors when making Parser/PGEN - -- Implements Richard Purdie's idea - -Signed-off-by: Richard Purdie -Signed-off-by: Alejandro Hernandez - -Index: Python-2.7.9/Makefile.pre.in -=================================================================== ---- Python-2.7.9.orig/Makefile.pre.in -+++ Python-2.7.9/Makefile.pre.in -@@ -611,12 +611,10 @@ Modules/grpmodule.o: $(srcdir)/Modules/g - - Modules/pwdmodule.o: $(srcdir)/Modules/pwdmodule.c $(srcdir)/Modules/posixmodule.h - --$(GRAMMAR_H): $(GRAMMAR_INPUT) $(PGENSRCS) -+$(GRAMMAR_H): $(GRAMMAR_INPUT) $(PGENSRCS) $(PGEN) - @$(MKDIR_P) Include -- $(MAKE) $(PGEN) - $(HOSTPGEN) $(GRAMMAR_INPUT) $(GRAMMAR_H) $(GRAMMAR_C) - $(GRAMMAR_C): $(GRAMMAR_H) $(GRAMMAR_INPUT) $(PGENSRCS) -- $(MAKE) $(GRAMMAR_H) - touch $(GRAMMAR_C) - - $(PGEN): $(PGENOBJS) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python/multilib.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python/multilib.patch index b169133d7..50cc5911a 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python/multilib.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python/multilib.patch @@ -1,11 +1,11 @@ Rebased for python-2.7.9 Signed-off-by: Alejandro Hernandez -Index: Python-2.7.12/configure.ac +Index: Python-2.7.13/configure.ac =================================================================== ---- Python-2.7.12.orig/configure.ac -+++ Python-2.7.12/configure.ac -@@ -756,6 +756,10 @@ SunOS*) +--- Python-2.7.13.orig/configure.ac ++++ Python-2.7.13/configure.ac +@@ -759,6 +759,10 @@ SunOS*) ;; esac @@ -16,10 +16,10 @@ Index: Python-2.7.12/configure.ac AC_SUBST(LIBRARY) AC_MSG_CHECKING(LIBRARY) -Index: Python-2.7.12/Include/pythonrun.h +Index: Python-2.7.13/Include/pythonrun.h =================================================================== ---- Python-2.7.12.orig/Include/pythonrun.h -+++ Python-2.7.12/Include/pythonrun.h +--- Python-2.7.13.orig/Include/pythonrun.h ++++ Python-2.7.13/Include/pythonrun.h @@ -108,6 +108,7 @@ PyAPI_FUNC(char *) Py_GetPath(void); /* In their own files */ PyAPI_FUNC(const char *) Py_GetVersion(void); @@ -28,10 +28,10 @@ Index: Python-2.7.12/Include/pythonrun.h PyAPI_FUNC(const char *) Py_GetCopyright(void); PyAPI_FUNC(const char *) Py_GetCompiler(void); PyAPI_FUNC(const char *) Py_GetBuildInfo(void); -Index: Python-2.7.12/Lib/distutils/command/install.py +Index: Python-2.7.13/Lib/distutils/command/install.py =================================================================== ---- Python-2.7.12.orig/Lib/distutils/command/install.py -+++ Python-2.7.12/Lib/distutils/command/install.py +--- Python-2.7.13.orig/Lib/distutils/command/install.py ++++ Python-2.7.13/Lib/distutils/command/install.py @@ -22,6 +22,8 @@ from site import USER_BASE from site import USER_SITE @@ -50,10 +50,10 @@ Index: Python-2.7.12/Lib/distutils/command/install.py 'headers': '$base/include/python$py_version_short/$dist_name', 'scripts': '$base/bin', 'data' : '$base', -Index: Python-2.7.12/Lib/distutils/sysconfig.py +Index: Python-2.7.13/Lib/distutils/sysconfig.py =================================================================== ---- Python-2.7.12.orig/Lib/distutils/sysconfig.py -+++ Python-2.7.12/Lib/distutils/sysconfig.py +--- Python-2.7.13.orig/Lib/distutils/sysconfig.py ++++ Python-2.7.13/Lib/distutils/sysconfig.py @@ -119,8 +119,11 @@ def get_python_lib(plat_specific=0, stan prefix = plat_specific and EXEC_PREFIX or PREFIX @@ -68,23 +68,23 @@ Index: Python-2.7.12/Lib/distutils/sysconfig.py if standard_lib: return libpython else: -Index: Python-2.7.12/Lib/pydoc.py +Index: Python-2.7.13/Lib/pydoc.py =================================================================== ---- Python-2.7.12.orig/Lib/pydoc.py -+++ Python-2.7.12/Lib/pydoc.py -@@ -384,7 +384,7 @@ class Doc: +--- Python-2.7.13.orig/Lib/pydoc.py ++++ Python-2.7.13/Lib/pydoc.py +@@ -375,7 +375,7 @@ class Doc: + docmodule = docclass = docroutine = docother = docproperty = docdata = fail - docloc = os.environ.get("PYTHONDOCS", - "http://docs.python.org/library") -- basedir = os.path.join(sys.exec_prefix, "lib", -+ basedir = os.path.join(sys.exec_prefix, sys.lib, - "python"+sys.version[0:3]) - if (isinstance(object, type(os)) and - (object.__name__ in ('errno', 'exceptions', 'gc', 'imp', -Index: Python-2.7.12/Lib/site.py + def getdocloc(self, object, +- basedir=os.path.join(sys.exec_prefix, "lib", ++ basedir=os.path.join(sys.exec_prefix, "sys.lib", + "python"+sys.version[0:3])): + """Return the location of module docs or None""" + +Index: Python-2.7.13/Lib/site.py =================================================================== ---- Python-2.7.12.orig/Lib/site.py -+++ Python-2.7.12/Lib/site.py +--- Python-2.7.13.orig/Lib/site.py ++++ Python-2.7.13/Lib/site.py @@ -288,13 +288,18 @@ def getsitepackages(): if sys.platform in ('os2emx', 'riscos'): sitepackages.append(os.path.join(prefix, "Lib", "site-packages")) @@ -104,13 +104,13 @@ Index: Python-2.7.12/Lib/site.py sitepackages.append(prefix) - sitepackages.append(os.path.join(prefix, "lib", "site-packages")) + sitepackages.append(os.path.join(prefix, sys.lib, "site-packages")) - if sys.platform == "darwin": - # for framework builds *only* we add the standard Apple - # locations. -Index: Python-2.7.12/Lib/sysconfig.py + return sitepackages + + def addsitepackages(known_paths): +Index: Python-2.7.13/Lib/sysconfig.py =================================================================== ---- Python-2.7.12.orig/Lib/sysconfig.py -+++ Python-2.7.12/Lib/sysconfig.py +--- Python-2.7.13.orig/Lib/sysconfig.py ++++ Python-2.7.13/Lib/sysconfig.py @@ -7,10 +7,10 @@ from os.path import pardir, realpath _INSTALL_SCHEMES = { @@ -139,10 +139,10 @@ Index: Python-2.7.12/Lib/sysconfig.py 'include': '{userbase}/include/python{py_version_short}', 'scripts': '{userbase}/bin', 'data' : '{userbase}', -Index: Python-2.7.12/Lib/test/test_dl.py +Index: Python-2.7.13/Lib/test/test_dl.py =================================================================== ---- Python-2.7.12.orig/Lib/test/test_dl.py -+++ Python-2.7.12/Lib/test/test_dl.py +--- Python-2.7.13.orig/Lib/test/test_dl.py ++++ Python-2.7.13/Lib/test/test_dl.py @@ -4,10 +4,11 @@ import unittest from test.test_support import verbose, import_module @@ -157,14 +157,14 @@ Index: Python-2.7.12/Lib/test/test_dl.py ('/usr/bin/cygwin1.dll', 'getpid'), ('/usr/lib/libc.dylib', 'getpid'), ] -Index: Python-2.7.12/Lib/test/test_site.py +Index: Python-2.7.13/Lib/test/test_site.py =================================================================== ---- Python-2.7.12.orig/Lib/test/test_site.py -+++ Python-2.7.12/Lib/test/test_site.py -@@ -246,12 +246,16 @@ class HelperFunctionsTests(unittest.Test - self.assertEqual(dirs[2], wanted) +--- Python-2.7.13.orig/Lib/test/test_site.py ++++ Python-2.7.13/Lib/test/test_site.py +@@ -235,12 +235,16 @@ class HelperFunctionsTests(unittest.Test + self.assertEqual(dirs[0], wanted) elif os.sep == '/': - # OS X non-framwework builds, Linux, FreeBSD, etc + # OS X, Linux, FreeBSD, etc - self.assertEqual(len(dirs), 2) wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3], 'site-packages') @@ -181,10 +181,10 @@ Index: Python-2.7.12/Lib/test/test_site.py else: # other platforms self.assertEqual(len(dirs), 2) -Index: Python-2.7.12/Lib/trace.py +Index: Python-2.7.13/Lib/trace.py =================================================================== ---- Python-2.7.12.orig/Lib/trace.py -+++ Python-2.7.12/Lib/trace.py +--- Python-2.7.13.orig/Lib/trace.py ++++ Python-2.7.13/Lib/trace.py @@ -754,10 +754,10 @@ def main(argv=None): # should I also call expanduser? (after all, could use $HOME) @@ -198,10 +198,10 @@ Index: Python-2.7.12/Lib/trace.py "python" + sys.version[:3])) s = os.path.normpath(s) ignore_dirs.append(s) -Index: Python-2.7.12/Makefile.pre.in +Index: Python-2.7.13/Makefile.pre.in =================================================================== ---- Python-2.7.12.orig/Makefile.pre.in -+++ Python-2.7.12/Makefile.pre.in +--- Python-2.7.13.orig/Makefile.pre.in ++++ Python-2.7.13/Makefile.pre.in @@ -92,6 +92,7 @@ PY_CFLAGS= $(CFLAGS) $(CPPFLAGS) $(CFLAG # Machine-dependent subdirectories @@ -219,7 +219,7 @@ Index: Python-2.7.12/Makefile.pre.in # Detailed destination directories BINLIBDEST= $(LIBDIR)/python$(VERSION) -@@ -670,6 +671,7 @@ Modules/getpath.o: $(srcdir)/Modules/get +@@ -669,6 +670,7 @@ Modules/getpath.o: $(srcdir)/Modules/get -DEXEC_PREFIX='"$(exec_prefix)"' \ -DVERSION='"$(VERSION)"' \ -DVPATH='"$(VPATH)"' \ @@ -227,7 +227,7 @@ Index: Python-2.7.12/Makefile.pre.in -o $@ $(srcdir)/Modules/getpath.c Modules/python.o: $(srcdir)/Modules/python.c -@@ -721,7 +723,7 @@ $(AST_C): $(AST_ASDL) $(ASDLGEN_FILES) +@@ -709,7 +711,7 @@ $(AST_C): $(AST_ASDL) $(ASDLGEN_FILES) Python/compile.o Python/symtable.o Python/ast.o: $(GRAMMAR_H) $(AST_H) Python/getplatform.o: $(srcdir)/Python/getplatform.c @@ -236,10 +236,10 @@ Index: Python-2.7.12/Makefile.pre.in Python/importdl.o: $(srcdir)/Python/importdl.c $(CC) -c $(PY_CFLAGS) -I$(DLINCLDIR) -o $@ $(srcdir)/Python/importdl.c -Index: Python-2.7.12/Modules/getpath.c +Index: Python-2.7.13/Modules/getpath.c =================================================================== ---- Python-2.7.12.orig/Modules/getpath.c -+++ Python-2.7.12/Modules/getpath.c +--- Python-2.7.13.orig/Modules/getpath.c ++++ Python-2.7.13/Modules/getpath.c @@ -100,6 +100,13 @@ #error "PREFIX, EXEC_PREFIX, VERSION, and VPATH must be constant defined" #endif @@ -263,10 +263,10 @@ Index: Python-2.7.12/Modules/getpath.c static void reduce(char *dir) -Index: Python-2.7.12/Python/getplatform.c +Index: Python-2.7.13/Python/getplatform.c =================================================================== ---- Python-2.7.12.orig/Python/getplatform.c -+++ Python-2.7.12/Python/getplatform.c +--- Python-2.7.13.orig/Python/getplatform.c ++++ Python-2.7.13/Python/getplatform.c @@ -10,3 +10,13 @@ Py_GetPlatform(void) { return PLATFORM; @@ -281,10 +281,10 @@ Index: Python-2.7.12/Python/getplatform.c +{ + return LIB; +} -Index: Python-2.7.12/Python/sysmodule.c +Index: Python-2.7.13/Python/sysmodule.c =================================================================== ---- Python-2.7.12.orig/Python/sysmodule.c -+++ Python-2.7.12/Python/sysmodule.c +--- Python-2.7.13.orig/Python/sysmodule.c ++++ Python-2.7.13/Python/sysmodule.c @@ -1437,6 +1437,8 @@ _PySys_Init(void) PyString_FromString(Py_GetCopyright())); SET_SYS_FROM_STRING("platform", diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python/python-fix-CVE-2016-1000110.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python/python-fix-CVE-2016-1000110.patch deleted file mode 100644 index 97888e2b0..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python/python-fix-CVE-2016-1000110.patch +++ /dev/null @@ -1,162 +0,0 @@ -From cb25fbd5abc0f4eb07dbb8ea819e9c26bda4fc99 Mon Sep 17 00:00:00 2001 -From: Senthil Kumaran -Date: Sat, 30 Jul 2016 05:49:53 -0700 -Subject: [PATCH] python: fix CVE-2016-1000110 -MIME-Version: 1.0 -Content-Type: text/plain; charset=UTF-8 -Content-Transfer-Encoding: 8bit - -Prevent HTTPoxy attack (CVE-2016-1000110) - -Ignore the HTTP_PROXY variable when REQUEST_METHOD environment is set, which -indicates that the script is in CGI mode. - -Issue reported and patch contributed by Rémi Rampin. - -Backport patch from https://hg.python.org/cpython/rev/ba915d561667/ - -Upstream-Status: Backport -CVE: CVE-2016-1000110 -Signed-off-by: Mingli Yu ---- - Doc/howto/urllib2.rst | 5 +++++ - Doc/library/urllib.rst | 10 ++++++++++ - Doc/library/urllib2.rst | 5 +++++ - Lib/test/test_urllib.py | 12 ++++++++++++ - Lib/urllib.py | 9 +++++++++ - Misc/ACKS | 1 + - Misc/NEWS | 4 ++++ - 7 files changed, 46 insertions(+) - -diff --git a/Doc/howto/urllib2.rst b/Doc/howto/urllib2.rst -index 6bb06d4..5cf2c0c 100644 ---- a/Doc/howto/urllib2.rst -+++ b/Doc/howto/urllib2.rst -@@ -525,6 +525,11 @@ setting up a `Basic Authentication`_ handler: :: - through a proxy. However, this can be enabled by extending urllib2 as - shown in the recipe [#]_. - -+.. note:: -+ -+ ``HTTP_PROXY`` will be ignored if a variable ``REQUEST_METHOD`` is set; see -+ the documentation on :func:`~urllib.getproxies`. -+ - - Sockets and Layers - ================== -diff --git a/Doc/library/urllib.rst b/Doc/library/urllib.rst -index 3b5dc16..bddcba9 100644 ---- a/Doc/library/urllib.rst -+++ b/Doc/library/urllib.rst -@@ -295,6 +295,16 @@ Utility functions - If both lowercase and uppercase environment variables exist (and disagree), - lowercase is preferred. - -+ .. note:: -+ -+ If the environment variable ``REQUEST_METHOD`` is set, which usually -+ indicates your script is running in a CGI environment, the environment -+ variable ``HTTP_PROXY`` (uppercase ``_PROXY``) will be ignored. This is -+ because that variable can be injected by a client using the "Proxy:" -+ HTTP header. If you need to use an HTTP proxy in a CGI environment, -+ either use ``ProxyHandler`` explicitly, or make sure the variable name -+ is in lowercase (or at least the ``_proxy`` suffix). -+ - .. note:: - urllib also exposes certain utility functions like splittype, splithost and - others parsing URL into various components. But it is recommended to use -diff --git a/Doc/library/urllib2.rst b/Doc/library/urllib2.rst -index 8a4c80e..b808b98 100644 ---- a/Doc/library/urllib2.rst -+++ b/Doc/library/urllib2.rst -@@ -229,6 +229,11 @@ The following classes are provided: - - To disable autodetected proxy pass an empty dictionary. - -+ .. note:: -+ -+ ``HTTP_PROXY`` will be ignored if a variable ``REQUEST_METHOD`` is set; -+ see the documentation on :func:`~urllib.getproxies`. -+ - - .. class:: HTTPPasswordMgr() - -diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py -index 434d533..27a1d38 100644 ---- a/Lib/test/test_urllib.py -+++ b/Lib/test/test_urllib.py -@@ -170,6 +170,18 @@ class ProxyTests(unittest.TestCase): - self.assertTrue(urllib.proxy_bypass_environment('anotherdomain.com:8888')) - self.assertTrue(urllib.proxy_bypass_environment('newdomain.com:1234')) - -+ def test_proxy_cgi_ignore(self): -+ try: -+ self.env.set('HTTP_PROXY', 'http://somewhere:3128') -+ proxies = urllib.getproxies_environment() -+ self.assertEqual('http://somewhere:3128', proxies['http']) -+ self.env.set('REQUEST_METHOD', 'GET') -+ proxies = urllib.getproxies_environment() -+ self.assertNotIn('http', proxies) -+ finally: -+ self.env.unset('REQUEST_METHOD') -+ self.env.unset('HTTP_PROXY') -+ - def test_proxy_bypass_environment_host_match(self): - bypass = urllib.proxy_bypass_environment - self.env.set('NO_PROXY', -diff --git a/Lib/urllib.py b/Lib/urllib.py -index 139fab9..c3ba2c9 100644 ---- a/Lib/urllib.py -+++ b/Lib/urllib.py -@@ -1380,12 +1380,21 @@ def getproxies_environment(): - If you need a different way, you can pass a proxies dictionary to the - [Fancy]URLopener constructor. - """ -+ # Get all variables - proxies = {} - for name, value in os.environ.items(): - name = name.lower() - if value and name[-6:] == '_proxy': - proxies[name[:-6]] = value - -+ # CVE-2016-1000110 - If we are running as CGI script, forget HTTP_PROXY -+ # (non-all-lowercase) as it may be set from the web server by a "Proxy:" -+ # header from the client -+ # If "proxy" is lowercase, it will still be used thanks to the next block -+ if 'REQUEST_METHOD' in os.environ: -+ proxies.pop('http', None) -+ -+ # Get lowercase variables - for name, value in os.environ.items(): - if name[-6:] == '_proxy': - name = name.lower() -diff --git a/Misc/ACKS b/Misc/ACKS -index ee3a465..9c374b7 100644 ---- a/Misc/ACKS -+++ b/Misc/ACKS -@@ -1121,6 +1121,7 @@ Burton Radons - Jeff Ramnani - Varpu Rantala - Brodie Rao -+Rémi Rampin - Senko Rasic - Antti Rasinen - Nikolaus Rath -diff --git a/Misc/NEWS b/Misc/NEWS -index 4ab3a70..cc2f65b 100644 ---- a/Misc/NEWS -+++ b/Misc/NEWS -@@ -187,6 +187,10 @@ Library - - Issue #26644: Raise ValueError rather than SystemError when a negative - length is passed to SSLSocket.recv() or read(). - -+- Issue #27568: Prevent HTTPoxy attack (CVE-2016-1000110). Ignore the -+ HTTP_PROXY variable when REQUEST_METHOD environment is set, which indicates -+ that the script is in CGI mode. -+ - - Issue #23804: Fix SSL recv(0) and read(0) methods to return zero bytes - instead of up to 1024. - --- -2.8.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-docutils_0.12.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-docutils_0.12.bb deleted file mode 100644 index e78fa3bbc..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-docutils_0.12.bb +++ /dev/null @@ -1,18 +0,0 @@ -SUMMARY = "Text processing system for documentation" -HOMEPAGE = "http://docutils.sourceforge.net" -SECTION = "devel/python" -LICENSE = "PSF & BSD-2-Clause & GPLv3" -LIC_FILES_CHKSUM = "file://COPYING.txt;md5=a722fbdc20347db7b69223594dd54574" - -DEPENDS = "python3" - -SRC_URI = "${SOURCEFORGE_MIRROR}/docutils/docutils-${PV}.tar.gz" -SRC_URI[md5sum] = "4622263b62c5c771c03502afa3157768" -SRC_URI[sha256sum] = "c7db717810ab6965f66c8cf0398a98c9d8df982da39b4cd7f162911eb89596fa" - -S = "${WORKDIR}/docutils-${PV}" - -inherit distutils3 - -BBCLASSEXTEND = "native" - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-docutils_0.13.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-docutils_0.13.1.bb new file mode 100644 index 000000000..e36388c6f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-docutils_0.13.1.bb @@ -0,0 +1,18 @@ +SUMMARY = "Text processing system for documentation" +HOMEPAGE = "http://docutils.sourceforge.net" +SECTION = "devel/python" +LICENSE = "PSF & BSD-2-Clause & GPLv3" +LIC_FILES_CHKSUM = "file://COPYING.txt;md5=7a4646907ab9083c826280b19e103106" + +DEPENDS = "python3" + +SRC_URI = "${SOURCEFORGE_MIRROR}/docutils/docutils-${PV}.tar.gz" +SRC_URI[md5sum] = "ea4a893c633c788be9b8078b6b305d53" +SRC_URI[sha256sum] = "718c0f5fb677be0f34b781e04241c4067cbd9327b66bdd8e763201130f5175be" + +S = "${WORKDIR}/docutils-${PV}" + +inherit distutils3 + +BBCLASSEXTEND = "native" + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-git_2.0.7.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-git_2.0.7.bb deleted file mode 100644 index c9fe9baa2..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-git_2.0.7.bb +++ /dev/null @@ -1,7 +0,0 @@ -require python-git.inc - -DEPENDS = "python3-gitdb" - -inherit setuptools3 - -RDEPENDS_${PN} += "python3-gitdb python3-lang python3-io python3-shell python3-math python3-re python3-subprocess python3-stringold python3-unixadmin python3-enum python3-logging python3-datetime python3-netclient" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-git_2.1.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-git_2.1.1.bb new file mode 100644 index 000000000..7a2d452a2 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-git_2.1.1.bb @@ -0,0 +1,7 @@ +require python-git.inc + +DEPENDS = "python3-gitdb" + +inherit setuptools3 + +RDEPENDS_${PN} += "python3-gitdb python3-lang python3-io python3-shell python3-math python3-re python3-subprocess python3-stringold python3-unixadmin python3-enum python3-logging python3-datetime python3-netclient python3-unittest python3-argparse" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-iniparse/0001-Add-python-3-compatibility.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-iniparse/0001-Add-python-3-compatibility.patch new file mode 100644 index 000000000..44090a20d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-iniparse/0001-Add-python-3-compatibility.patch @@ -0,0 +1,552 @@ +From 8a98e4d44a5e59439a4b6bd95368cc362412c995 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 24 Mar 2017 18:06:08 +0200 +Subject: [PATCH] Add python 3 compatibility. + +Taken from +http://pkgs.fedoraproject.org/cgit/rpms/python-iniparse.git/tree/python-iniparse-python3-compat.patch + +Upstream-Status: Inappropriate [upstream is defunct] +Signed-off-by: Alexander Kanavin +--- + iniparse/__init__.py | 20 ++++++++++---------- + iniparse/compat.py | 30 ++++++++++++++++-------------- + iniparse/config.py | 16 ++++++++-------- + iniparse/configparser.py | 7 +++++++ + iniparse/ini.py | 20 ++++++++++++-------- + iniparse/utils.py | 4 ++-- + tests/__init__.py | 14 +++++++------- + tests/test_compat.py | 23 +++++++++++++++-------- + tests/test_fuzz.py | 18 +++++++++--------- + tests/test_ini.py | 8 ++++---- + tests/test_misc.py | 4 ++-- + tests/test_tidy.py | 2 +- + tests/test_unicode.py | 10 +++++----- + 13 files changed, 98 insertions(+), 78 deletions(-) + create mode 100644 iniparse/configparser.py + +diff --git a/iniparse/__init__.py b/iniparse/__init__.py +index 8de756f..7193f92 100644 +--- a/iniparse/__init__.py ++++ b/iniparse/__init__.py +@@ -3,17 +3,17 @@ + # Copyright (c) 2007 Tim Lauridsen + # All Rights Reserved. See LICENSE-PSF & LICENSE for details. + +-from ini import INIConfig, change_comment_syntax +-from config import BasicConfig, ConfigNamespace +-from compat import RawConfigParser, ConfigParser, SafeConfigParser +-from utils import tidy ++from .ini import INIConfig, change_comment_syntax ++from .config import BasicConfig, ConfigNamespace ++from .compat import RawConfigParser, ConfigParser, SafeConfigParser ++from .utils import tidy + +-from ConfigParser import DuplicateSectionError, \ +- NoSectionError, NoOptionError, \ +- InterpolationMissingOptionError, \ +- InterpolationDepthError, \ +- InterpolationSyntaxError, \ +- DEFAULTSECT, MAX_INTERPOLATION_DEPTH ++from .configparser import DuplicateSectionError, \ ++ NoSectionError, NoOptionError, \ ++ InterpolationMissingOptionError, \ ++ InterpolationDepthError, \ ++ InterpolationSyntaxError, \ ++ DEFAULTSECT, MAX_INTERPOLATION_DEPTH + + __all__ = [ + 'BasicConfig', 'ConfigNamespace', +diff --git a/iniparse/compat.py b/iniparse/compat.py +index db89ed8..f95c25c 100644 +--- a/iniparse/compat.py ++++ b/iniparse/compat.py +@@ -12,19 +12,21 @@ The underlying INIConfig object can be accessed as cfg.data + """ + + import re +-from ConfigParser import DuplicateSectionError, \ +- NoSectionError, NoOptionError, \ +- InterpolationMissingOptionError, \ +- InterpolationDepthError, \ +- InterpolationSyntaxError, \ +- DEFAULTSECT, MAX_INTERPOLATION_DEPTH ++from .configparser import DuplicateSectionError, \ ++ NoSectionError, NoOptionError, \ ++ InterpolationMissingOptionError, \ ++ InterpolationDepthError, \ ++ InterpolationSyntaxError, \ ++ DEFAULTSECT, MAX_INTERPOLATION_DEPTH + + # These are imported only for compatiability. + # The code below does not reference them directly. +-from ConfigParser import Error, InterpolationError, \ +- MissingSectionHeaderError, ParsingError ++from .configparser import Error, InterpolationError, \ ++ MissingSectionHeaderError, ParsingError + +-import ini ++import six ++ ++from . import ini + + class RawConfigParser(object): + def __init__(self, defaults=None, dict_type=dict): +@@ -56,7 +58,7 @@ class RawConfigParser(object): + # The default section is the only one that gets the case-insensitive + # treatment - so it is special-cased here. + if section.lower() == "default": +- raise ValueError, 'Invalid section name: %s' % section ++ raise ValueError('Invalid section name: %s' % section) + + if self.has_section(section): + raise DuplicateSectionError(section) +@@ -88,7 +90,7 @@ class RawConfigParser(object): + filename may also be given. + """ + files_read = [] +- if isinstance(filenames, basestring): ++ if isinstance(filenames, six.string_types): + filenames = [filenames] + for filename in filenames: + try: +@@ -143,7 +145,7 @@ class RawConfigParser(object): + def getboolean(self, section, option): + v = self.get(section, option) + if v.lower() not in self._boolean_states: +- raise ValueError, 'Not a boolean: %s' % v ++ raise ValueError('Not a boolean: %s' % v) + return self._boolean_states[v.lower()] + + def has_option(self, section, option): +@@ -234,7 +236,7 @@ class ConfigParser(RawConfigParser): + if "%(" in value: + try: + value = value % vars +- except KeyError, e: ++ except KeyError as e: + raise InterpolationMissingOptionError( + option, section, rawval, e.args[0]) + else: +@@ -283,7 +285,7 @@ class SafeConfigParser(ConfigParser): + _badpercent_re = re.compile(r"%[^%]|%$") + + def set(self, section, option, value): +- if not isinstance(value, basestring): ++ if not isinstance(value, six.string_types): + raise TypeError("option values must be strings") + # check for bad percent signs: + # first, replace all "good" interpolations +diff --git a/iniparse/config.py b/iniparse/config.py +index 5cfa2ea..3b28549 100644 +--- a/iniparse/config.py ++++ b/iniparse/config.py +@@ -143,7 +143,7 @@ class BasicConfig(ConfigNamespace): + + >>> n.aaa = 42 + >>> del n.x +- >>> print n ++ >>> print(n) + aaa = 42 + name.first = paramjit + name.last = oberoi +@@ -152,7 +152,7 @@ class BasicConfig(ConfigNamespace): + + >>> isinstance(n.name, ConfigNamespace) + True +- >>> print n.name ++ >>> print(n.name) + first = paramjit + last = oberoi + >>> sorted(list(n.name)) +@@ -160,7 +160,7 @@ class BasicConfig(ConfigNamespace): + + Finally, values can be read from a file as follows: + +- >>> from StringIO import StringIO ++ >>> from six import StringIO + >>> sio = StringIO(''' + ... # comment + ... ui.height = 100 +@@ -171,7 +171,7 @@ class BasicConfig(ConfigNamespace): + ... ''') + >>> n = BasicConfig() + >>> n._readfp(sio) +- >>> print n ++ >>> print(n) + complexity = medium + data.secret.password = goodness=gracious me + have_python +@@ -199,7 +199,7 @@ class BasicConfig(ConfigNamespace): + + def __str__(self, prefix=''): + lines = [] +- keys = self._data.keys() ++ keys = list(self._data.keys()) + keys.sort() + for name in keys: + value = self._data[name] +@@ -258,7 +258,7 @@ def update_config(target, source): + >>> n.ui.display_clock = True + >>> n.ui.display_qlength = True + >>> n.ui.width = 150 +- >>> print n ++ >>> print(n) + playlist.expand_playlist = True + ui.display_clock = True + ui.display_qlength = True +@@ -267,7 +267,7 @@ def update_config(target, source): + >>> from iniparse import ini + >>> i = ini.INIConfig() + >>> update_config(i, n) +- >>> print i ++ >>> print(i) + [playlist] + expand_playlist = True + +@@ -277,7 +277,7 @@ def update_config(target, source): + width = 150 + + """ +- for name in source: ++ for name in sorted(source): + value = source[name] + if isinstance(value, ConfigNamespace): + if name in target: +diff --git a/iniparse/configparser.py b/iniparse/configparser.py +new file mode 100644 +index 0000000..c543d50 +--- /dev/null ++++ b/iniparse/configparser.py +@@ -0,0 +1,7 @@ ++try: ++ from ConfigParser import * ++ # not all objects get imported with __all__ ++ from ConfigParser import Error, InterpolationMissingOptionError ++except ImportError: ++ from configparser import * ++ from configparser import Error, InterpolationMissingOptionError +diff --git a/iniparse/ini.py b/iniparse/ini.py +index 408354d..052d9e9 100644 +--- a/iniparse/ini.py ++++ b/iniparse/ini.py +@@ -7,7 +7,7 @@ + + Example: + +- >>> from StringIO import StringIO ++ >>> from six import StringIO + >>> sio = StringIO('''# configure foo-application + ... [foo] + ... bar1 = qualia +@@ -16,14 +16,14 @@ Example: + ... special = 1''') + + >>> cfg = INIConfig(sio) +- >>> print cfg.foo.bar1 ++ >>> print(cfg.foo.bar1) + qualia +- >>> print cfg['foo-ext'].special ++ >>> print(cfg['foo-ext'].special) + 1 + >>> cfg.foo.newopt = 'hi!' + >>> cfg.baz.enabled = 0 + +- >>> print cfg ++ >>> print(cfg) + # configure foo-application + [foo] + bar1 = qualia +@@ -42,9 +42,11 @@ Example: + # Backward-compatiable with ConfigParser + + import re +-from ConfigParser import DEFAULTSECT, ParsingError, MissingSectionHeaderError ++from .configparser import DEFAULTSECT, ParsingError, MissingSectionHeaderError + +-import config ++import six ++ ++from . import config + + class LineType(object): + line = None +@@ -278,6 +280,8 @@ class LineContainer(object): + value = property(get_value, set_value) + + def __str__(self): ++ for c in self.contents: ++ pass#print(c.__str__()) + s = [x.__str__() for x in self.contents] + return '\n'.join(s) + +@@ -465,7 +469,7 @@ class INIConfig(config.ConfigNamespace): + self._sections = {} + if defaults is None: defaults = {} + self._defaults = INISection(LineContainer(), optionxformsource=self) +- for name, value in defaults.iteritems(): ++ for name, value in defaults.items(): + self._defaults[name] = value + if fp is not None: + self._readfp(fp) +@@ -551,7 +555,7 @@ class INIConfig(config.ConfigNamespace): + + for line in readline_iterator(fp): + # Check for BOM on first line +- if linecount == 0 and isinstance(line, unicode): ++ if linecount == 0 and isinstance(line, six.text_type): + if line[0] == u'\ufeff': + line = line[1:] + self._bom = True +diff --git a/iniparse/utils.py b/iniparse/utils.py +index 829fc28..f8b773a 100644 +--- a/iniparse/utils.py ++++ b/iniparse/utils.py +@@ -1,5 +1,5 @@ +-import compat +-from ini import LineContainer, EmptyLine ++from . import compat ++from .ini import LineContainer, EmptyLine + + def tidy(cfg): + """Clean up blank lines. +diff --git a/tests/__init__.py b/tests/__init__.py +index f1fa321..88689fb 100644 +--- a/tests/__init__.py ++++ b/tests/__init__.py +@@ -1,12 +1,12 @@ + import unittest, doctest + +-import test_ini +-import test_misc +-import test_fuzz +-import test_compat +-import test_unicode +-import test_tidy +-import test_multiprocessing ++from . import test_ini ++from . import test_misc ++from . import test_fuzz ++from . import test_compat ++from . import test_unicode ++from . import test_tidy ++from . import test_multiprocessing + from iniparse import config + from iniparse import ini + +diff --git a/tests/test_compat.py b/tests/test_compat.py +index b8da3d5..b6dfb5c 100644 +--- a/tests/test_compat.py ++++ b/tests/test_compat.py +@@ -1,9 +1,16 @@ + from iniparse import compat as ConfigParser +-import StringIO ++from six import StringIO ++try: ++ import UserDict ++except ImportError: ++ import collections as UserDict + import unittest +-import UserDict + +-from test import test_support ++import sys ++if sys.version_info[0] < 3: ++ from test import test_support ++else: ++ from test import support as test_support + + class SortedDict(UserDict.UserDict): + def items(self): +@@ -35,7 +42,7 @@ class TestCaseBase(unittest.TestCase): + + def fromstring(self, string, defaults=None): + cf = self.newconfig(defaults) +- sio = StringIO.StringIO(string) ++ sio = StringIO(string) + cf.readfp(sio) + return cf + +@@ -161,7 +168,7 @@ class TestCaseBase(unittest.TestCase): + "No Section!\n") + + def parse_error(self, exc, src): +- sio = StringIO.StringIO(src) ++ sio = StringIO(src) + self.assertRaises(exc, self.cf.readfp, sio) + + def test_query_errors(self): +@@ -181,7 +188,7 @@ class TestCaseBase(unittest.TestCase): + def get_error(self, exc, section, option): + try: + self.cf.get(section, option) +- except exc, e: ++ except exc as e: + return e + else: + self.fail("expected exception type %s.%s" +@@ -227,7 +234,7 @@ class TestCaseBase(unittest.TestCase): + "foo: another very\n" + " long line" + ) +- output = StringIO.StringIO() ++ output = StringIO() + cf.write(output) + self.assertEqual( + output.getvalue(), +@@ -465,7 +472,7 @@ class SortedTestCase(RawConfigParserTestCase): + "o1=4\n" + "[a]\n" + "k=v\n") +- output = StringIO.StringIO() ++ output = StringIO() + self.cf.write(output) + self.assertEquals(output.getvalue(), + "[a]\n" +diff --git a/tests/test_fuzz.py b/tests/test_fuzz.py +index 5420dcc..b219500 100644 +--- a/tests/test_fuzz.py ++++ b/tests/test_fuzz.py +@@ -1,9 +1,10 @@ + import re + import os + import random ++import sys + import unittest +-import ConfigParser +-from StringIO import StringIO ++from six import StringIO ++from six.moves import configparser + from iniparse import compat, ini, tidy + + # TODO: +@@ -96,24 +97,25 @@ class test_fuzz(unittest.TestCase): + s = '\n'.join(good_lines) + cc = compat.RawConfigParser() + cc.readfp(StringIO(s)) +- cc_py = ConfigParser.RawConfigParser() ++ cc_py = configparser.RawConfigParser() + cc_py.readfp(StringIO(s)) + # compare the two configparsers + self.assertEqualConfig(cc_py, cc) + # check that tidy does not change semantics + tidy(cc) +- cc_tidy = ConfigParser.RawConfigParser() ++ cc_tidy = configparser.RawConfigParser() + cc_tidy.readfp(StringIO(str(cc.data))) + self.assertEqualConfig(cc_py, cc_tidy) + except AssertionError: + fname = 'fuzz-test-iter-%d.ini' % fuzz_iter +- print 'Fuzz test failed at iteration', fuzz_iter +- print 'Writing out failing INI file as', fname ++ print('Fuzz test failed at iteration', fuzz_iter) ++ print('Writing out failing INI file as', fname) + f = open(fname, 'w') + f.write(s) + f.close() + raise + ++ @unittest.skipIf(sys.version_info[0] > 2, 'http://code.google.com/p/iniparse/issues/detail?id=22#c9') + def assertEqualConfig(self, c1, c2): + self.assertEqualSorted(c1.sections(), c2.sections()) + self.assertEqualSorted(c1.defaults().items(), c2.defaults().items()) +@@ -123,9 +125,7 @@ class test_fuzz(unittest.TestCase): + self.assertEqual(c1.get(sec, opt), c2.get(sec, opt)) + + def assertEqualSorted(self, l1, l2): +- l1.sort() +- l2.sort() +- self.assertEqual(l1, l2) ++ self.assertEqual(sorted(l1), sorted(l2)) + + class suite(unittest.TestSuite): + def __init__(self): +diff --git a/tests/test_ini.py b/tests/test_ini.py +index 6a76edb..07d4f4e 100644 +--- a/tests/test_ini.py ++++ b/tests/test_ini.py +@@ -1,5 +1,5 @@ + import unittest +-from StringIO import StringIO ++from six import StringIO + + from iniparse import ini + from iniparse import compat +@@ -196,13 +196,13 @@ but = also me + self.assertEqual(p._data.find('section2').find('just').value, 'kidding') + + itr = p._data.finditer('section1') +- v = itr.next() ++ v = next(itr) + self.assertEqual(v.find('help').value, 'yourself') + self.assertEqual(v.find('but').value, 'also me') +- v = itr.next() ++ v = next(itr) + self.assertEqual(v.find('help').value, 'me') + self.assertEqual(v.find('I\'m').value, 'desperate') +- self.assertRaises(StopIteration, itr.next) ++ self.assertRaises(StopIteration, next, itr) + + self.assertRaises(KeyError, p._data.find, 'section') + self.assertRaises(KeyError, p._data.find('section2').find, 'ahem') +diff --git a/tests/test_misc.py b/tests/test_misc.py +index 31cf4da..96ef035 100644 +--- a/tests/test_misc.py ++++ b/tests/test_misc.py +@@ -1,9 +1,9 @@ + import re + import unittest + import pickle +-import ConfigParser ++from six.moves import configparser ++from six import StringIO + from textwrap import dedent +-from StringIO import StringIO + from iniparse import compat, ini + + class CaseSensitiveConfigParser(compat.ConfigParser): +diff --git a/tests/test_tidy.py b/tests/test_tidy.py +index 7304747..26b6cde 100644 +--- a/tests/test_tidy.py ++++ b/tests/test_tidy.py +@@ -1,6 +1,6 @@ + import unittest + from textwrap import dedent +-from StringIO import StringIO ++from six import StringIO + + from iniparse import tidy,INIConfig + from iniparse.ini import EmptyLine +diff --git a/tests/test_unicode.py b/tests/test_unicode.py +index a56fcab..14d4fbd 100644 +--- a/tests/test_unicode.py ++++ b/tests/test_unicode.py +@@ -1,5 +1,5 @@ + import unittest +-from StringIO import StringIO ++import six + from iniparse import compat, ini + + class test_unicode(unittest.TestCase): +@@ -17,14 +17,14 @@ baz = Marc-Andr\202 + """ + + def basic_tests(self, s, strable): +- f = StringIO(s) ++ f = six.StringIO(s) + i = ini.INIConfig(f) +- self.assertEqual(unicode(i), s) +- self.assertEqual(type(i.foo.bar), unicode) ++ self.assertEqual(six.text_type(i), s) ++ self.assertEqual(type(i.foo.bar), six.text_type) + if strable: + self.assertEqual(str(i), str(s)) + else: +- self.assertRaises(UnicodeEncodeError, lambda: str(i)) ++ self.assertRaises(UnicodeEncodeError, lambda: six.text_type(i).encode('ascii')) + return i + + def test_ascii(self): +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-iniparse_0.4.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-iniparse_0.4.bb new file mode 100644 index 000000000..f51ce647c --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-iniparse_0.4.bb @@ -0,0 +1,20 @@ +SUMMARY = "Accessing and Modifying INI files" +HOMEPAGE = "https://pypi.python.org/pypi/iniparse/" +LICENSE = "MIT & PSF" +LIC_FILES_CHKSUM = "file://LICENSE-PSF;md5=1c78a5bb3584b353496d5f6f34edb4b2 \ + file://LICENSE;md5=52f28065af11d69382693b45b5a8eb54" + +SRC_URI = "https://files.pythonhosted.org/packages/source/i/iniparse/iniparse-${PV}.tar.gz \ + file://0001-Add-python-3-compatibility.patch " +SRC_URI[md5sum] = "5e573e9e9733d97623881ce9bbe5eca6" +SRC_URI[sha256sum] = "abc1ee12d2cfb2506109072d6c21e40b6c75a3fe90a9c924327d80bc0d99c054" +UPSTREAM_CHECK_URI = "https://pypi.python.org/pypi/iniparse/" + +inherit distutils3 + +RDEPENDS_${PN} += "python3-core python3-six" +DEPENDS += "python3-six" + +BBCLASSEXTEND = "native nativesdk" + +S = "${WORKDIR}/iniparse-${PV}" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-mako_1.0.4.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-mako_1.0.4.bb deleted file mode 100644 index 2b50ffd08..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-mako_1.0.4.bb +++ /dev/null @@ -1,11 +0,0 @@ -require python-mako.inc - -inherit setuptools3 - -RDEPENDS_${PN} = "python3-threading \ - python3-netclient \ - python3-html \ -" -RDEPENDS_${PN}_class-native = "" - -BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-mako_1.0.6.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-mako_1.0.6.bb new file mode 100644 index 000000000..2b50ffd08 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-mako_1.0.6.bb @@ -0,0 +1,11 @@ +require python-mako.inc + +inherit setuptools3 + +RDEPENDS_${PN} = "python3-threading \ + python3-netclient \ + python3-html \ +" +RDEPENDS_${PN}_class-native = "" + +BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-native_3.5.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-native_3.5.2.bb index f32f05cca..782e2cda9 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-native_3.5.2.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-native_3.5.2.bb @@ -8,7 +8,6 @@ SRC_URI = "http://www.python.org/ftp/python/${PV}/Python-${PV}.tar.xz \ file://12-distutils-prefix-is-inside-staging-area.patch \ file://python-config.patch \ file://000-cross-compile.patch \ -file://020-dont-compile-python-files.patch \ file://030-fixup-include-dirs.patch \ file://070-dont-clean-ipkg-install.patch \ file://080-distutils-dont_adjust_files.patch \ @@ -42,28 +41,7 @@ DEPENDS = "openssl-native bzip2-replacement-native zlib-native readline-native s inherit native -RPROVIDES += " \ - python3-compression-native \ - python3-core-native \ - python3-distutils-native \ - python3-email-native \ - python3-importlib-native \ - python3-io-native \ - python3-json-native \ - python3-lang-native \ - python3-misc-native \ - python3-netclient-native \ - python3-netserver-native \ - python3-numbers-native \ - python3-pkgutil-native \ - python3-pprint-native \ - python3-re-native \ - python3-shell-native \ - python3-subprocess-native \ - python3-textutils-native \ - python3-threading-native \ - python3-unittest-native \ -" +require python-native-${PYTHON_MAJMIN}-manifest.inc # uninative may be used on pre glibc 2.25 systems which don't have getentropy EXTRA_OECONF_append = " --bindir=${bindir}/${PN} --without-ensurepip ac_cv_func_getentropy=no" @@ -97,4 +75,9 @@ do_install() { for PYTHSCRIPT in `grep -rIl ${bindir}/${PN}/python ${D}${bindir}/${PN}`; do sed -i -e '1s|^#!.*|#!/usr/bin/env python3|' $PYTHSCRIPT done + + # Tests are large and we don't need them in the native sysroot + rm ${D}${libdir}/python${PYTHON_MAJMIN}/test -rf } + +RPROVIDES += "python3-misc-native" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pip_8.1.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pip_8.1.2.bb deleted file mode 100644 index eefb4cb79..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pip_8.1.2.bb +++ /dev/null @@ -1,47 +0,0 @@ -SUMMARY = "The PyPA recommended tool for installing Python packages" -sHOMEPAGEsss = "https://pypi.python.org/pypi/pip" -SECTION = "devel/python" -LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=25fba45109565f87de20bae85bc39452" - -SRCNAME = "pip" -DEPENDS += "python3 python3-setuptools-native" - -SRC_URI = "https://files.pythonhosted.org/packages/source/p/${SRCNAME}/${SRCNAME}-${PV}.tar.gz" - -SRC_URI[md5sum] = "87083c0b9867963b29f7aba3613e8f4a" -SRC_URI[sha256sum] = "4d24b03ffa67638a3fa931c09fd9e0273ffa904e95ebebe7d4b1a54c93d7b732" - -UPSTREAM_CHECK_URI = "https://pypi.python.org/pypi/pip" - -S = "${WORKDIR}/${SRCNAME}-${PV}" - -inherit distutils3 - -DISTUTILS_INSTALL_ARGS += "--install-lib=${D}${PYTHON_SITEPACKAGES_DIR}" - -do_install_prepend() { - install -d ${D}${PYTHON_SITEPACKAGES_DIR} -} - -# Use setuptools site.py instead, avoid shared state issue -do_install_append() { - rm ${D}${PYTHON_SITEPACKAGES_DIR}/site.py - rm ${D}${PYTHON_SITEPACKAGES_DIR}/__pycache__/site.cpython-*.pyc - - # Install as pip3 and leave pip2 as default - rm ${D}/${bindir}/pip - - # Installed eggs need to be passed directly to the interpreter via a pth file - echo "./${SRCNAME}-${PV}-py${PYTHON_BASEVERSION}.egg" > ${D}${PYTHON_SITEPACKAGES_DIR}/${SRCNAME}-${PV}.pth -} - -RDEPENDS_${PN} = "\ - python3-compile \ - python3-io \ - python3-json \ - python3-netserver \ - python3-setuptools \ - python3-unixadmin \ - python3-xmlrpc \ -" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pip_9.0.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pip_9.0.1.bb new file mode 100644 index 000000000..4456b9b1b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pip_9.0.1.bb @@ -0,0 +1,56 @@ +SUMMARY = "The PyPA recommended tool for installing Python packages" +sHOMEPAGEsss = "https://pypi.python.org/pypi/pip" +SECTION = "devel/python" +LICENSE = "MIT" +LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=25fba45109565f87de20bae85bc39452" + +SRCNAME = "pip" +DEPENDS += "python3 python3-setuptools-native" + +SRC_URI = "https://files.pythonhosted.org/packages/source/p/${SRCNAME}/${SRCNAME}-${PV}.tar.gz" + +SRC_URI[md5sum] = "35f01da33009719497f01a4ba69d63c9" +SRC_URI[sha256sum] = "09f243e1a7b461f654c26a725fa373211bb7ff17a9300058b205c61658ca940d" + +UPSTREAM_CHECK_URI = "https://pypi.python.org/pypi/pip" + +S = "${WORKDIR}/${SRCNAME}-${PV}" + +inherit distutils3 + +DISTUTILS_INSTALL_ARGS += "--install-lib=${D}${PYTHON_SITEPACKAGES_DIR}" + +do_install_prepend() { + install -d ${D}${PYTHON_SITEPACKAGES_DIR} +} + +# Use setuptools site.py instead, avoid shared state issue +do_install_append() { + rm ${D}${PYTHON_SITEPACKAGES_DIR}/site.py + rm ${D}${PYTHON_SITEPACKAGES_DIR}/__pycache__/site.cpython-*.pyc + + # Install as pip3 and leave pip2 as default + rm ${D}/${bindir}/pip + + # Installed eggs need to be passed directly to the interpreter via a pth file + echo "./${SRCNAME}-${PV}-py${PYTHON_BASEVERSION}.egg" > ${D}${PYTHON_SITEPACKAGES_DIR}/${SRCNAME}-${PV}.pth + + # Make sure we use /usr/bin/env python3 + for PYTHSCRIPT in `grep -rIl ${bindir} ${D}${bindir}/pip3*`; do + sed -i -e '1s|^#!.*|#!/usr/bin/env python3|' $PYTHSCRIPT + done +} + +RDEPENDS_${PN} = "\ + python3-compile \ + python3-io \ + python3-enum \ + python3-html \ + python3-json \ + python3-netserver \ + python3-setuptools \ + python3-unixadmin \ + python3-xmlrpc \ +" + +BBCLASSEXTEND = "native" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pycurl_7.21.5.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pycurl_7.21.5.bb new file mode 100644 index 000000000..5d11192ab --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pycurl_7.21.5.bb @@ -0,0 +1,5 @@ +FILESEXTRAPATHS_prepend := "${THISDIR}/python-pycurl:" + +require python-pycurl.inc + +inherit distutils3 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pygobject_3.20.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pygobject_3.20.1.bb deleted file mode 100644 index cda16f17a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pygobject_3.20.1.bb +++ /dev/null @@ -1,25 +0,0 @@ -SUMMARY = "Python GObject bindings" -SECTION = "devel/python" -LICENSE = "LGPLv2.1" -LIC_FILES_CHKSUM = "file://COPYING;md5=a916467b91076e631dd8edb7424769c7" - -inherit autotools pkgconfig gnomebase distutils3-base gobject-introspection upstream-version-is-even - -DEPENDS += "python3 glib-2.0" - -SRCNAME="pygobject" -SRC_URI = " \ - http://ftp.gnome.org/pub/GNOME/sources/${SRCNAME}/${@gnome_verdir("${PV}")}/${SRCNAME}-${PV}.tar.xz \ - file://0001-configure.ac-add-sysroot-path-to-GI_DATADIR-don-t-se.patch \ -" - -SRC_URI[md5sum] = "4354c6283b135f859563b72457f6a321" -SRC_URI[sha256sum] = "3d261005d6fed6a92ac4c25f283792552f7dad865d1b7e0c03c2b84c04dbd745" - -S = "${WORKDIR}/${SRCNAME}-${PV}" - -BBCLASSEXTEND = "native" - -EXTRA_OECONF = "--disable-cairo" - -RDEPENDS_${PN} += "python3-setuptools python3-importlib" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pygobject_3.22.0.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pygobject_3.22.0.bb new file mode 100644 index 000000000..143048d9e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pygobject_3.22.0.bb @@ -0,0 +1,31 @@ +SUMMARY = "Python GObject bindings" +SECTION = "devel/python" +LICENSE = "LGPLv2.1" +LIC_FILES_CHKSUM = "file://COPYING;md5=a916467b91076e631dd8edb7424769c7" + +inherit autotools pkgconfig gnomebase distutils3-base gobject-introspection upstream-version-is-even + +DEPENDS += "gnome-common-native python3 glib-2.0" + +SRCNAME="pygobject" +SRC_URI = " \ + http://ftp.gnome.org/pub/GNOME/sources/${SRCNAME}/${@gnome_verdir("${PV}")}/${SRCNAME}-${PV}.tar.xz \ + file://0001-configure.ac-add-sysroot-path-to-GI_DATADIR-don-t-se.patch \ +" + +SRC_URI[md5sum] = "ed4117ed5d554d25fd7718807fbf819f" +SRC_URI[sha256sum] = "08b29cfb08efc80f7a8630a2734dec65a99c1b59f1e5771c671d2e4ed8a5cbe7" + +S = "${WORKDIR}/${SRCNAME}-${PV}" + + +PACKAGECONFIG ??= "${@bb.utils.contains_any('DISTRO_FEATURES', [ 'directfb', 'wayland', 'x11' ], 'cairo', '', d)}" + +# python3-pycairo is checked on configuration -> DEPENDS +# we don't link against python3-pycairo -> RDEPENDS +PACKAGECONFIG[cairo] = "--enable-cairo,--disable-cairo,cairo python3-pycairo, python3-pycairo" + +RDEPENDS_${PN} += "python3-setuptools python3-importlib" + +BBCLASSEXTEND = "native" +PACKAGECONFIG_class-native = "" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pygpgme_0.3.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pygpgme_0.3.bb new file mode 100644 index 000000000..495f677cf --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-pygpgme_0.3.bb @@ -0,0 +1,18 @@ +SUMMARY = "A Python module for working with OpenPGP messages" +HOMEPAGE = "https://launchpad.net/pygpgme" +LICENSE = "LGPLv2.1" +LIC_FILES_CHKSUM = "file://README;md5=2dc15a76acf01e126188c8de634ae4b3" + +SRC_URI = "https://launchpad.net/pygpgme/trunk/${PV}/+download/pygpgme-${PV}.tar.gz" +SRC_URI[md5sum] = "d38355af73f0352cde3d410b25f34fd0" +SRC_URI[sha256sum] = "5fd887c407015296a8fd3f4b867fe0fcca3179de97ccde90449853a3dfb802e1" + +S = "${WORKDIR}/pygpgme-${PV}" + +inherit distutils3 + +DEPENDS = "gpgme python3" + +RDEPENDS_${PN} += "python3-core" + +BBCLASSEXTEND = "native nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-setuptools_22.0.5.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-setuptools_22.0.5.bb deleted file mode 100644 index 65af6f0da..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-setuptools_22.0.5.bb +++ /dev/null @@ -1,37 +0,0 @@ -require python-setuptools.inc - -DEPENDS += "python3" -DEPENDS_class-native += "python3-native" - -inherit distutils3 - -DISTUTILS_INSTALL_ARGS += "--install-lib=${D}${PYTHON_SITEPACKAGES_DIR}" - -# The installer puts the wrong path in the setuptools.pth file. Correct it. -do_install_append() { - rm ${D}${PYTHON_SITEPACKAGES_DIR}/setuptools.pth - mv ${D}${bindir}/easy_install ${D}${bindir}/easy3_install - echo "./${SRCNAME}-${PV}-py${PYTHON_BASEVERSION}.egg" > ${D}${PYTHON_SITEPACKAGES_DIR}/setuptools.pth -} - -RDEPENDS_${PN} = "\ - python3-distutils \ - python3-compression \ -" -RDEPENDS_${PN}_class-target = "\ - python3-ctypes \ - python3-distutils \ - python3-email \ - python3-importlib \ - python3-numbers \ - python3-compression \ - python3-shell \ - python3-subprocess \ - python3-textutils \ - python3-pkgutil \ - python3-threading \ - python3-misc \ - python3-unittest \ - python3-xml \ -" -BBCLASSEXTEND = "native" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3-setuptools_32.1.1.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-setuptools_32.1.1.bb new file mode 100644 index 000000000..65af6f0da --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python3-setuptools_32.1.1.bb @@ -0,0 +1,37 @@ +require python-setuptools.inc + +DEPENDS += "python3" +DEPENDS_class-native += "python3-native" + +inherit distutils3 + +DISTUTILS_INSTALL_ARGS += "--install-lib=${D}${PYTHON_SITEPACKAGES_DIR}" + +# The installer puts the wrong path in the setuptools.pth file. Correct it. +do_install_append() { + rm ${D}${PYTHON_SITEPACKAGES_DIR}/setuptools.pth + mv ${D}${bindir}/easy_install ${D}${bindir}/easy3_install + echo "./${SRCNAME}-${PV}-py${PYTHON_BASEVERSION}.egg" > ${D}${PYTHON_SITEPACKAGES_DIR}/setuptools.pth +} + +RDEPENDS_${PN} = "\ + python3-distutils \ + python3-compression \ +" +RDEPENDS_${PN}_class-target = "\ + python3-ctypes \ + python3-distutils \ + python3-email \ + python3-importlib \ + python3-numbers \ + python3-compression \ + python3-shell \ + python3-subprocess \ + python3-textutils \ + python3-pkgutil \ + python3-threading \ + python3-misc \ + python3-unittest \ + python3-xml \ +" +BBCLASSEXTEND = "native" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3/020-dont-compile-python-files.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python3/020-dont-compile-python-files.patch deleted file mode 100644 index 819ba69ed..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python3/020-dont-compile-python-files.patch +++ /dev/null @@ -1,48 +0,0 @@ -Dont cross compile site packages - --Khem - -Upstream-Status: Inappropriate[Embedded-Specific] - ---- - Makefile.pre.in | 16 ---------------- - 1 file changed, 16 deletions(-) - -Index: Python-3.5.0/Makefile.pre.in -=================================================================== ---- Python-3.5.0.orig/Makefile.pre.in -+++ Python-3.5.0/Makefile.pre.in -@@ -1262,33 +1262,6 @@ libinstall: build_all $(srcdir)/Lib/$(PL - $(INSTALL_DATA) $(srcdir)/Modules/xxmodule.c \ - $(DESTDIR)$(LIBDEST)/distutils/tests ; \ - fi -- -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ -- $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \ -- -d $(LIBDEST) -f \ -- -x 'bad_coding|badsyntax|site-packages|lib2to3/tests/data' \ -- $(DESTDIR)$(LIBDEST) -- -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ -- $(PYTHON_FOR_BUILD) -Wi -O $(DESTDIR)$(LIBDEST)/compileall.py \ -- -d $(LIBDEST) -f \ -- -x 'bad_coding|badsyntax|site-packages|lib2to3/tests/data' \ -- $(DESTDIR)$(LIBDEST) -- -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ -- $(PYTHON_FOR_BUILD) -Wi -OO $(DESTDIR)$(LIBDEST)/compileall.py \ -- -d $(LIBDEST) -f \ -- -x 'bad_coding|badsyntax|site-packages|lib2to3/tests/data' \ -- $(DESTDIR)$(LIBDEST) -- -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ -- $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \ -- -d $(LIBDEST)/site-packages -f \ -- -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages -- -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ -- $(PYTHON_FOR_BUILD) -Wi -O $(DESTDIR)$(LIBDEST)/compileall.py \ -- -d $(LIBDEST)/site-packages -f \ -- -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages -- -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ -- $(PYTHON_FOR_BUILD) -Wi -OO $(DESTDIR)$(LIBDEST)/compileall.py \ -- -d $(LIBDEST)/site-packages -f \ -- -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages - -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ - $(PYTHON_FOR_BUILD) -m lib2to3.pgen2.driver $(DESTDIR)$(LIBDEST)/lib2to3/Grammar.txt - -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3/CVE-2016-5636.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python3/CVE-2016-5636.patch deleted file mode 100644 index 0d494d20f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python3/CVE-2016-5636.patch +++ /dev/null @@ -1,44 +0,0 @@ - -# HG changeset patch -# User Benjamin Peterson -# Date 1453357506 28800 -# Node ID 10dad6da1b28ea4af78ad9529e469fdbf4ebbc8f -# Parent a3ac2cd93db9d5336dfd7b5b27efde2c568d8794# Parent 01ddd608b85c85952537d95a43bbabf4fb655057 -merge 3.4 (#26171) - -Upstream-Status: Backport -CVE: CVE-2016-5636 - -https://hg.python.org/cpython/raw-rev/10dad6da1b28 -Signed-off-by: Armin Kuster - -Index: Python-3.5.1/Misc/NEWS -=================================================================== ---- Python-3.5.1.orig/Misc/NEWS -+++ Python-3.5.1/Misc/NEWS -@@ -91,6 +91,9 @@ Core and Builtins - Python.h header to fix a compilation error with OpenMP. PyThreadState_GET() - becomes an alias to PyThreadState_Get() to avoid ABI incompatibilies. - -+- Issue #26171: Fix possible integer overflow and heap corruption in -+ zipimporter.get_data(). -+ - Library - ------- - -Index: Python-3.5.1/Modules/zipimport.c -=================================================================== ---- Python-3.5.1.orig/Modules/zipimport.c -+++ Python-3.5.1/Modules/zipimport.c -@@ -1112,6 +1112,11 @@ get_data(PyObject *archive, PyObject *to - } - file_offset += l; /* Start of file data */ - -+ if (data_size > LONG_MAX - 1) { -+ fclose(fp); -+ PyErr_NoMemory(); -+ return NULL; -+ } - bytes_size = compress == 0 ? data_size : data_size + 1; - if (bytes_size == 0) - bytes_size++; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3/python-3.3-multilib.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python3/python-3.3-multilib.patch index 056e8e763..08c4403cb 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python3/python-3.3-multilib.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python3/python-3.3-multilib.patch @@ -1,16 +1,34 @@ -Upstream-Status: Pending - -get the sys.lib from python itself and do not use hardcoded value of 'lib' +From 51fe6f22d0ba113674fb358bd11d75fe659bd26e Mon Sep 17 00:00:00 2001 +From: Khem Raj +Date: Tue, 14 May 2013 15:00:26 -0700 +Subject: [PATCH 01/13] get the sys.lib from python itself and do not use + hardcoded value of 'lib' 02/2015 Rebased for 3.4.2 +Upstream-Status: Pending Signed-off-by: Khem Raj Signed-off-by: Alejandro Hernandez -Index: Python-3.5.2/Include/pythonrun.h -=================================================================== ---- Python-3.5.2.orig/Include/pythonrun.h -+++ Python-3.5.2/Include/pythonrun.h +--- + Include/pythonrun.h | 3 +++ + Lib/distutils/command/install.py | 4 +++- + Lib/pydoc.py | 2 +- + Lib/site.py | 4 ++-- + Lib/sysconfig.py | 18 +++++++++--------- + Lib/trace.py | 4 ++-- + Makefile.pre.in | 7 +++++-- + Modules/getpath.c | 10 +++++++++- + Python/getplatform.c | 20 ++++++++++++++++++++ + Python/sysmodule.c | 4 ++++ + configure.ac | 35 +++++++++++++++++++++++++++++++++++ + setup.py | 9 ++++----- + 12 files changed, 97 insertions(+), 23 deletions(-) + +diff --git a/Include/pythonrun.h b/Include/pythonrun.h +index 9c2e813..2f79cb6 100644 +--- a/Include/pythonrun.h ++++ b/Include/pythonrun.h @@ -23,6 +23,9 @@ typedef struct { } PyCompilerFlags; #endif @@ -21,10 +39,10 @@ Index: Python-3.5.2/Include/pythonrun.h #ifndef Py_LIMITED_API PyAPI_FUNC(int) PyRun_SimpleStringFlags(const char *, PyCompilerFlags *); PyAPI_FUNC(int) PyRun_AnyFileFlags(FILE *, const char *, PyCompilerFlags *); -Index: Python-3.5.2/Lib/distutils/command/install.py -=================================================================== ---- Python-3.5.2.orig/Lib/distutils/command/install.py -+++ Python-3.5.2/Lib/distutils/command/install.py +diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py +index 67db007..b46b45b 100644 +--- a/Lib/distutils/command/install.py ++++ b/Lib/distutils/command/install.py @@ -19,6 +19,8 @@ from site import USER_BASE from site import USER_SITE HAS_USER_SITE = True @@ -43,10 +61,10 @@ Index: Python-3.5.2/Lib/distutils/command/install.py 'headers': '$base/include/python$py_version_short$abiflags/$dist_name', 'scripts': '$base/bin', 'data' : '$base', -Index: Python-3.5.2/Lib/pydoc.py -=================================================================== ---- Python-3.5.2.orig/Lib/pydoc.py -+++ Python-3.5.2/Lib/pydoc.py +diff --git a/Lib/pydoc.py b/Lib/pydoc.py +index 3ca08c9..6528730 100755 +--- a/Lib/pydoc.py ++++ b/Lib/pydoc.py @@ -384,7 +384,7 @@ class Doc: docmodule = docclass = docroutine = docother = docproperty = docdata = fail @@ -56,10 +74,75 @@ Index: Python-3.5.2/Lib/pydoc.py "python%d.%d" % sys.version_info[:2])): """Return the location of module docs or None""" -Index: Python-3.5.2/Lib/trace.py -=================================================================== ---- Python-3.5.2.orig/Lib/trace.py -+++ Python-3.5.2/Lib/trace.py +diff --git a/Lib/site.py b/Lib/site.py +index 3f78ef5..511931e 100644 +--- a/Lib/site.py ++++ b/Lib/site.py +@@ -303,12 +303,12 @@ def getsitepackages(prefixes=None): + seen.add(prefix) + + if os.sep == '/': +- sitepackages.append(os.path.join(prefix, "lib", ++ sitepackages.append(os.path.join(prefix, sys.lib, + "python" + sys.version[:3], + "site-packages")) + else: + sitepackages.append(prefix) +- sitepackages.append(os.path.join(prefix, "lib", "site-packages")) ++ sitepackages.append(os.path.join(prefix, sys.lib, "site-packages")) + if sys.platform == "darwin": + # for framework builds *only* we add the standard Apple + # locations. +diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py +index 9c34be0..3d1181a 100644 +--- a/Lib/sysconfig.py ++++ b/Lib/sysconfig.py +@@ -20,10 +20,10 @@ __all__ = [ + + _INSTALL_SCHEMES = { + 'posix_prefix': { +- 'stdlib': '{installed_base}/lib/python{py_version_short}', +- 'platstdlib': '{platbase}/lib/python{py_version_short}', ++ 'stdlib': '{installed_base}/'+sys.lib+'/python{py_version_short}', ++ 'platstdlib': '{platbase}/'+sys.lib+'/python{py_version_short}', + 'purelib': '{base}/lib/python{py_version_short}/site-packages', +- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages', ++ 'platlib': '{platbase}/'+sys.lib+'/python{py_version_short}/site-packages', + 'include': + '{installed_base}/include/python{py_version_short}{abiflags}', + 'platinclude': +@@ -32,10 +32,10 @@ _INSTALL_SCHEMES = { + 'data': '{base}', + }, + 'posix_home': { +- 'stdlib': '{installed_base}/lib/python', +- 'platstdlib': '{base}/lib/python', ++ 'stdlib': '{installed_base}/'+sys.lib+'/python', ++ 'platstdlib': '{base}/'+sys.lib+'/python', + 'purelib': '{base}/lib/python', +- 'platlib': '{base}/lib/python', ++ 'platlib': '{base}/'+sys.lib+'/python', + 'include': '{installed_base}/include/python', + 'platinclude': '{installed_base}/include/python', + 'scripts': '{base}/bin', +@@ -61,10 +61,10 @@ _INSTALL_SCHEMES = { + 'data': '{userbase}', + }, + 'posix_user': { +- 'stdlib': '{userbase}/lib/python{py_version_short}', +- 'platstdlib': '{userbase}/lib/python{py_version_short}', ++ 'stdlib': '{userbase}/'+sys.lib+'/python{py_version_short}', ++ 'platstdlib': '{userbase}/'+sys.lib+'/python{py_version_short}', + 'purelib': '{userbase}/lib/python{py_version_short}/site-packages', +- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages', ++ 'platlib': '{userbase}/'+sys.lib+'/python{py_version_short}/site-packages', + 'include': '{userbase}/include/python{py_version_short}', + 'scripts': '{userbase}/bin', + 'data': '{userbase}', +diff --git a/Lib/trace.py b/Lib/trace.py +index f108266..7fd83f2 100755 +--- a/Lib/trace.py ++++ b/Lib/trace.py @@ -749,10 +749,10 @@ def main(argv=None): # should I also call expanduser? (after all, could use $HOME) @@ -73,11 +156,11 @@ Index: Python-3.5.2/Lib/trace.py "python" + sys.version[:3])) s = os.path.normpath(s) ignore_dirs.append(s) -Index: Python-3.5.2/Makefile.pre.in -=================================================================== ---- Python-3.5.2.orig/Makefile.pre.in -+++ Python-3.5.2/Makefile.pre.in -@@ -106,6 +106,8 @@ PY_CORE_CFLAGS= $(PY_CFLAGS) $(PY_CFLAGS +diff --git a/Makefile.pre.in b/Makefile.pre.in +index 109f402..61a41e2 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -106,6 +106,8 @@ PY_CORE_CFLAGS= $(PY_CFLAGS) $(PY_CFLAGS_NODIST) $(PY_CPPFLAGS) $(CFLAGSFORSHARE # Machine-dependent subdirectories MACHDEP= @MACHDEP@ @@ -95,7 +178,7 @@ Index: Python-3.5.2/Makefile.pre.in ABIFLAGS= @ABIFLAGS@ # Detailed destination directories -@@ -755,6 +757,7 @@ Modules/getpath.o: $(srcdir)/Modules/get +@@ -755,6 +757,7 @@ Modules/getpath.o: $(srcdir)/Modules/getpath.c Makefile -DEXEC_PREFIX='"$(exec_prefix)"' \ -DVERSION='"$(VERSION)"' \ -DVPATH='"$(VPATH)"' \ @@ -103,7 +186,7 @@ Index: Python-3.5.2/Makefile.pre.in -o $@ $(srcdir)/Modules/getpath.c Programs/python.o: $(srcdir)/Programs/python.c -@@ -835,7 +838,7 @@ $(OPCODE_H): $(srcdir)/Lib/opcode.py $(O +@@ -835,7 +838,7 @@ $(OPCODE_H): $(srcdir)/Lib/opcode.py $(OPCODE_H_SCRIPT) Python/compile.o Python/symtable.o Python/ast.o: $(GRAMMAR_H) $(AST_H) Python/getplatform.o: $(srcdir)/Python/getplatform.c @@ -112,10 +195,10 @@ Index: Python-3.5.2/Makefile.pre.in Python/importdl.o: $(srcdir)/Python/importdl.c $(CC) -c $(PY_CORE_CFLAGS) -I$(DLINCLDIR) -o $@ $(srcdir)/Python/importdl.c -Index: Python-3.5.2/Modules/getpath.c -=================================================================== ---- Python-3.5.2.orig/Modules/getpath.c -+++ Python-3.5.2/Modules/getpath.c +diff --git a/Modules/getpath.c b/Modules/getpath.c +index 18deb60..a01c3f8 100644 +--- a/Modules/getpath.c ++++ b/Modules/getpath.c @@ -105,6 +105,13 @@ #error "PREFIX, EXEC_PREFIX, VERSION, and VPATH must be constant defined" #endif @@ -138,10 +221,19 @@ Index: Python-3.5.2/Modules/getpath.c /* Get file status. Encode the path to the locale encoding. */ -Index: Python-3.5.2/Python/getplatform.c -=================================================================== ---- Python-3.5.2.orig/Python/getplatform.c -+++ Python-3.5.2/Python/getplatform.c +@@ -494,7 +502,7 @@ calculate_path(void) + _pythonpath = Py_DecodeLocale(PYTHONPATH, NULL); + _prefix = Py_DecodeLocale(PREFIX, NULL); + _exec_prefix = Py_DecodeLocale(EXEC_PREFIX, NULL); +- lib_python = Py_DecodeLocale("lib/python" VERSION, NULL); ++ lib_python = Py_DecodeLocale(LIB_PYTHON, NULL); + + if (!_pythonpath || !_prefix || !_exec_prefix || !lib_python) { + Py_FatalError( +diff --git a/Python/getplatform.c b/Python/getplatform.c +index 6899140..66a49c6 100644 +--- a/Python/getplatform.c ++++ b/Python/getplatform.c @@ -10,3 +10,23 @@ Py_GetPlatform(void) { return PLATFORM; @@ -166,10 +258,10 @@ Index: Python-3.5.2/Python/getplatform.c +{ + return LIB; +} -Index: Python-3.5.2/Python/sysmodule.c -=================================================================== ---- Python-3.5.2.orig/Python/sysmodule.c -+++ Python-3.5.2/Python/sysmodule.c +diff --git a/Python/sysmodule.c b/Python/sysmodule.c +index 8d7e05a..d9dee0f 100644 +--- a/Python/sysmodule.c ++++ b/Python/sysmodule.c @@ -1790,6 +1790,10 @@ _PySys_Init(void) PyUnicode_FromString(Py_GetCopyright())); SET_SYS_FROM_STRING("platform", @@ -181,94 +273,11 @@ Index: Python-3.5.2/Python/sysmodule.c SET_SYS_FROM_STRING("executable", PyUnicode_FromWideChar( Py_GetProgramFullPath(), -1)); -Index: Python-3.5.2/setup.py -=================================================================== ---- Python-3.5.2.orig/setup.py -+++ Python-3.5.2/setup.py -@@ -492,7 +492,7 @@ class PyBuildExt(build_ext): - # directories (i.e. '.' and 'Include') must be first. See issue - # 10520. - if not cross_compiling: -- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') -+ add_dir_to_list(self.compiler.library_dirs, os.path.join('/usr/local', sys.lib)) - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - # only change this for cross builds for 3.3, issues on Mageia - if cross_compiling: -@@ -550,8 +550,7 @@ class PyBuildExt(build_ext): - # be assumed that no additional -I,-L directives are needed. - if not cross_compiling: - lib_dirs = self.compiler.library_dirs + [ -- '/lib64', '/usr/lib64', -- '/lib', '/usr/lib', -+ '/' + sys.lib, '/usr/' + sys.lib, - ] - inc_dirs = self.compiler.include_dirs + ['/usr/include'] - else: -@@ -743,11 +742,11 @@ class PyBuildExt(build_ext): - elif curses_library: - readline_libs.append(curses_library) - elif self.compiler.find_library_file(lib_dirs + -- ['/usr/lib/termcap'], -+ ['/usr/'+sys.lib+'/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], -- library_dirs=['/usr/lib/termcap'], -+ library_dirs=['/usr/'+sys.lib+'/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: -Index: Python-3.5.2/Lib/sysconfig.py -=================================================================== ---- Python-3.5.2.orig/Lib/sysconfig.py -+++ Python-3.5.2/Lib/sysconfig.py -@@ -20,10 +20,10 @@ __all__ = [ - - _INSTALL_SCHEMES = { - 'posix_prefix': { -- 'stdlib': '{installed_base}/lib/python{py_version_short}', -- 'platstdlib': '{platbase}/lib/python{py_version_short}', -+ 'stdlib': '{installed_base}/'+sys.lib+'/python{py_version_short}', -+ 'platstdlib': '{platbase}/'+sys.lib+'/python{py_version_short}', - 'purelib': '{base}/lib/python{py_version_short}/site-packages', -- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages', -+ 'platlib': '{platbase}/'+sys.lib+'/python{py_version_short}/site-packages', - 'include': - '{installed_base}/include/python{py_version_short}{abiflags}', - 'platinclude': -@@ -32,10 +32,10 @@ _INSTALL_SCHEMES = { - 'data': '{base}', - }, - 'posix_home': { -- 'stdlib': '{installed_base}/lib/python', -- 'platstdlib': '{base}/lib/python', -+ 'stdlib': '{installed_base}/'+sys.lib+'/python', -+ 'platstdlib': '{base}/'+sys.lib+'/python', - 'purelib': '{base}/lib/python', -- 'platlib': '{base}/lib/python', -+ 'platlib': '{base}/'+sys.lib+'/python', - 'include': '{installed_base}/include/python', - 'platinclude': '{installed_base}/include/python', - 'scripts': '{base}/bin', -@@ -61,10 +61,10 @@ _INSTALL_SCHEMES = { - 'data': '{userbase}', - }, - 'posix_user': { -- 'stdlib': '{userbase}/lib/python{py_version_short}', -- 'platstdlib': '{userbase}/lib/python{py_version_short}', -+ 'stdlib': '{userbase}/'+sys.lib+'/python{py_version_short}', -+ 'platstdlib': '{userbase}/'+sys.lib+'/python{py_version_short}', - 'purelib': '{userbase}/lib/python{py_version_short}/site-packages', -- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages', -+ 'platlib': '{userbase}/'+sys.lib+'/python{py_version_short}/site-packages', - 'include': '{userbase}/include/python{py_version_short}', - 'scripts': '{userbase}/bin', - 'data': '{userbase}', -Index: Python-3.5.2/configure.ac -=================================================================== ---- Python-3.5.2.orig/configure.ac -+++ Python-3.5.2/configure.ac -@@ -876,6 +876,41 @@ PLATDIR=plat-$MACHDEP +diff --git a/configure.ac b/configure.ac +index 707324d..e8d59a3 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -883,6 +883,41 @@ PLATDIR=plat-$MACHDEP AC_SUBST(PLATDIR) AC_SUBST(PLATFORM_TRIPLET) @@ -310,3 +319,43 @@ Index: Python-3.5.2/configure.ac AC_MSG_CHECKING([for -Wl,--no-as-needed]) save_LDFLAGS="$LDFLAGS" +diff --git a/setup.py b/setup.py +index 6d26deb..7b14215 100644 +--- a/setup.py ++++ b/setup.py +@@ -495,7 +495,7 @@ class PyBuildExt(build_ext): + # directories (i.e. '.' and 'Include') must be first. See issue + # 10520. + if not cross_compiling: +- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') ++ add_dir_to_list(self.compiler.library_dirs, os.path.join('/usr/local', sys.lib)) + add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') + # only change this for cross builds for 3.3, issues on Mageia + if cross_compiling: +@@ -553,8 +553,7 @@ class PyBuildExt(build_ext): + # be assumed that no additional -I,-L directives are needed. + if not cross_compiling: + lib_dirs = self.compiler.library_dirs + [ +- '/lib64', '/usr/lib64', +- '/lib', '/usr/lib', ++ '/' + sys.lib, '/usr/' + sys.lib, + ] + inc_dirs = self.compiler.include_dirs + ['/usr/include'] + else: +@@ -746,11 +745,11 @@ class PyBuildExt(build_ext): + elif curses_library: + readline_libs.append(curses_library) + elif self.compiler.find_library_file(lib_dirs + +- ['/usr/lib/termcap'], ++ ['/usr/'+sys.lib+'/termcap'], + 'termcap'): + readline_libs.append('termcap') + exts.append( Extension('readline', ['readline.c'], +- library_dirs=['/usr/lib/termcap'], ++ library_dirs=['/usr/'+sys.lib+'/termcap'], + extra_link_args=readline_extra_link_args, + libraries=readline_libs) ) + else: +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3/upstream-random-fixes.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python3/upstream-random-fixes.patch new file mode 100644 index 000000000..0d9152ccd --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python3/upstream-random-fixes.patch @@ -0,0 +1,721 @@ +This patch updates random.c to match upstream python's code at revision +8125d9a8152b. This addresses various issues around problems with glibc 2.24 +and 2.25 such that python would fail to start with: + +[rpurdie@centos7 ~]$ /tmp/t2/sysroots/x86_64-pokysdk-linux/usr/bin/python3 +Fatal Python error: getentropy() failed +Aborted + +(taken from our buildtools-tarball also breaks eSDK) + +Upstream-Status: Backport + +# HG changeset patch +# User Victor Stinner +# Date 1483957133 -3600 +# Node ID 8125d9a8152b79e712cb09c7094b9129b9bcea86 +# Parent 337461574c90281630751b6095c4e1baf380cf7d +Issue #29157: Prefer getrandom() over getentropy() + +Copy and then adapt Python/random.c from default branch. Difference between 3.5 +and default branches: + +* Python 3.5 only uses getrandom() in non-blocking mode: flags=GRND_NONBLOCK +* If getrandom() fails with EAGAIN: py_getrandom() immediately fails and + remembers that getrandom() doesn't work. +* Python 3.5 has no _PyOS_URandomNonblock() function: _PyOS_URandom() + works in non-blocking mode on Python 3.5 + +RP 2017/1/22 + +Index: Python-3.5.2/Python/random.c +=================================================================== +--- Python-3.5.2.orig/Python/random.c ++++ Python-3.5.2/Python/random.c +@@ -1,6 +1,9 @@ + #include "Python.h" + #ifdef MS_WINDOWS + # include ++/* All sample MSDN wincrypt programs include the header below. It is at least ++ * required with Min GW. */ ++# include + #else + # include + # ifdef HAVE_SYS_STAT_H +@@ -36,10 +39,9 @@ win32_urandom_init(int raise) + return 0; + + error: +- if (raise) ++ if (raise) { + PyErr_SetFromWindowsErr(0); +- else +- Py_FatalError("Failed to initialize Windows random API (CryptoGen)"); ++ } + return -1; + } + +@@ -52,8 +54,9 @@ win32_urandom(unsigned char *buffer, Py_ + + if (hCryptProv == 0) + { +- if (win32_urandom_init(raise) == -1) ++ if (win32_urandom_init(raise) == -1) { + return -1; ++ } + } + + while (size > 0) +@@ -62,11 +65,9 @@ win32_urandom(unsigned char *buffer, Py_ + if (!CryptGenRandom(hCryptProv, (DWORD)chunk, buffer)) + { + /* CryptGenRandom() failed */ +- if (raise) ++ if (raise) { + PyErr_SetFromWindowsErr(0); +- else +- Py_FatalError("Failed to initialized the randomized hash " +- "secret using CryptoGen)"); ++ } + return -1; + } + buffer += chunk; +@@ -75,55 +76,29 @@ win32_urandom(unsigned char *buffer, Py_ + return 0; + } + +-/* Issue #25003: Don't use getentropy() on Solaris (available since +- * Solaris 11.3), it is blocking whereas os.urandom() should not block. */ +-#elif defined(HAVE_GETENTROPY) && !defined(sun) +-#define PY_GETENTROPY 1 +- +-/* Fill buffer with size pseudo-random bytes generated by getentropy(). +- Return 0 on success, or raise an exception and return -1 on error. +- +- If fatal is nonzero, call Py_FatalError() instead of raising an exception +- on error. */ +-static int +-py_getentropy(unsigned char *buffer, Py_ssize_t size, int fatal) +-{ +- while (size > 0) { +- Py_ssize_t len = Py_MIN(size, 256); +- int res; +- +- if (!fatal) { +- Py_BEGIN_ALLOW_THREADS +- res = getentropy(buffer, len); +- Py_END_ALLOW_THREADS +- +- if (res < 0) { +- PyErr_SetFromErrno(PyExc_OSError); +- return -1; +- } +- } +- else { +- res = getentropy(buffer, len); +- if (res < 0) +- Py_FatalError("getentropy() failed"); +- } +- +- buffer += len; +- size -= len; +- } +- return 0; +-} +- +-#else ++#else /* !MS_WINDOWS */ + + #if defined(HAVE_GETRANDOM) || defined(HAVE_GETRANDOM_SYSCALL) + #define PY_GETRANDOM 1 + ++/* Call getrandom() to get random bytes: ++ ++ - Return 1 on success ++ - Return 0 if getrandom() is not available (failed with ENOSYS or EPERM), ++ or if getrandom(GRND_NONBLOCK) failed with EAGAIN (system urandom not ++ initialized yet). ++ - Raise an exception (if raise is non-zero) and return -1 on error: ++ if getrandom() failed with EINTR, raise is non-zero and the Python signal ++ handler raised an exception, or if getrandom() failed with a different ++ error. ++ ++ getrandom() is retried if it failed with EINTR: interrupted by a signal. */ + static int + py_getrandom(void *buffer, Py_ssize_t size, int raise) + { +- /* Is getrandom() supported by the running kernel? +- * Need Linux kernel 3.17 or newer, or Solaris 11.3 or newer */ ++ /* Is getrandom() supported by the running kernel? Set to 0 if getrandom() ++ failed with ENOSYS or EPERM. Need Linux kernel 3.17 or newer, or Solaris ++ 11.3 or newer */ + static int getrandom_works = 1; + + /* getrandom() on Linux will block if called before the kernel has +@@ -132,84 +107,165 @@ py_getrandom(void *buffer, Py_ssize_t si + * see https://bugs.python.org/issue26839. To avoid this, use the + * GRND_NONBLOCK flag. */ + const int flags = GRND_NONBLOCK; +- int n; ++ char *dest; ++ long n; + +- if (!getrandom_works) ++ if (!getrandom_works) { + return 0; ++ } + ++ dest = buffer; + while (0 < size) { + #ifdef sun + /* Issue #26735: On Solaris, getrandom() is limited to returning up +- to 1024 bytes */ ++ to 1024 bytes. Call it multiple times if more bytes are ++ requested. */ + n = Py_MIN(size, 1024); + #else +- n = size; ++ n = Py_MIN(size, LONG_MAX); + #endif + + errno = 0; + #ifdef HAVE_GETRANDOM + if (raise) { + Py_BEGIN_ALLOW_THREADS +- n = getrandom(buffer, n, flags); ++ n = getrandom(dest, n, flags); + Py_END_ALLOW_THREADS + } + else { +- n = getrandom(buffer, n, flags); ++ n = getrandom(dest, n, flags); + } + #else + /* On Linux, use the syscall() function because the GNU libc doesn't +- * expose the Linux getrandom() syscall yet. See: +- * https://sourceware.org/bugzilla/show_bug.cgi?id=17252 */ ++ expose the Linux getrandom() syscall yet. See: ++ https://sourceware.org/bugzilla/show_bug.cgi?id=17252 */ + if (raise) { + Py_BEGIN_ALLOW_THREADS +- n = syscall(SYS_getrandom, buffer, n, flags); ++ n = syscall(SYS_getrandom, dest, n, flags); + Py_END_ALLOW_THREADS + } + else { +- n = syscall(SYS_getrandom, buffer, n, flags); ++ n = syscall(SYS_getrandom, dest, n, flags); + } + #endif + + if (n < 0) { +- if (errno == ENOSYS) { ++ /* ENOSYS: the syscall is not supported by the kernel. ++ EPERM: the syscall is blocked by a security policy (ex: SECCOMP) ++ or something else. */ ++ if (errno == ENOSYS || errno == EPERM) { + getrandom_works = 0; + return 0; + } ++ + if (errno == EAGAIN) { +- /* If we failed with EAGAIN, the entropy pool was +- * uninitialized. In this case, we return failure to fall +- * back to reading from /dev/urandom. +- * +- * Note: In this case the data read will not be random so +- * should not be used for cryptographic purposes. Retaining +- * the existing semantics for practical purposes. */ ++ /* getrandom(GRND_NONBLOCK) fails with EAGAIN if the system ++ urandom is not initialiazed yet. In this case, fall back on ++ reading from /dev/urandom. ++ ++ Note: In this case the data read will not be random so ++ should not be used for cryptographic purposes. Retaining ++ the existing semantics for practical purposes. */ + getrandom_works = 0; + return 0; + } + + if (errno == EINTR) { +- if (PyErr_CheckSignals()) { +- if (!raise) +- Py_FatalError("getrandom() interrupted by a signal"); +- return -1; ++ if (raise) { ++ if (PyErr_CheckSignals()) { ++ return -1; ++ } + } +- /* retry getrandom() */ ++ ++ /* retry getrandom() if it was interrupted by a signal */ + continue; + } + +- if (raise) ++ if (raise) { + PyErr_SetFromErrno(PyExc_OSError); +- else +- Py_FatalError("getrandom() failed"); ++ } + return -1; + } + +- buffer += n; ++ dest += n; + size -= n; + } + return 1; + } +-#endif ++ ++#elif defined(HAVE_GETENTROPY) ++#define PY_GETENTROPY 1 ++ ++/* Fill buffer with size pseudo-random bytes generated by getentropy(): ++ ++ - Return 1 on success ++ - Return 0 if getentropy() syscall is not available (failed with ENOSYS or ++ EPERM). ++ - Raise an exception (if raise is non-zero) and return -1 on error: ++ if getentropy() failed with EINTR, raise is non-zero and the Python signal ++ handler raised an exception, or if getentropy() failed with a different ++ error. ++ ++ getentropy() is retried if it failed with EINTR: interrupted by a signal. */ ++static int ++py_getentropy(char *buffer, Py_ssize_t size, int raise) ++{ ++ /* Is getentropy() supported by the running kernel? Set to 0 if ++ getentropy() failed with ENOSYS or EPERM. */ ++ static int getentropy_works = 1; ++ ++ if (!getentropy_works) { ++ return 0; ++ } ++ ++ while (size > 0) { ++ /* getentropy() is limited to returning up to 256 bytes. Call it ++ multiple times if more bytes are requested. */ ++ Py_ssize_t len = Py_MIN(size, 256); ++ int res; ++ ++ if (raise) { ++ Py_BEGIN_ALLOW_THREADS ++ res = getentropy(buffer, len); ++ Py_END_ALLOW_THREADS ++ } ++ else { ++ res = getentropy(buffer, len); ++ } ++ ++ if (res < 0) { ++ /* ENOSYS: the syscall is not supported by the running kernel. ++ EPERM: the syscall is blocked by a security policy (ex: SECCOMP) ++ or something else. */ ++ if (errno == ENOSYS || errno == EPERM) { ++ getentropy_works = 0; ++ return 0; ++ } ++ ++ if (errno == EINTR) { ++ if (raise) { ++ if (PyErr_CheckSignals()) { ++ return -1; ++ } ++ } ++ ++ /* retry getentropy() if it was interrupted by a signal */ ++ continue; ++ } ++ ++ if (raise) { ++ PyErr_SetFromErrno(PyExc_OSError); ++ } ++ return -1; ++ } ++ ++ buffer += len; ++ size -= len; ++ } ++ return 1; ++} ++#endif /* defined(HAVE_GETENTROPY) && !defined(sun) */ ++ + + static struct { + int fd; +@@ -217,127 +273,123 @@ static struct { + ino_t st_ino; + } urandom_cache = { -1 }; + ++/* Read random bytes from the /dev/urandom device: + +-/* Read size bytes from /dev/urandom into buffer. +- Call Py_FatalError() on error. */ +-static void +-dev_urandom_noraise(unsigned char *buffer, Py_ssize_t size) +-{ +- int fd; +- Py_ssize_t n; ++ - Return 0 on success ++ - Raise an exception (if raise is non-zero) and return -1 on error + +- assert (0 < size); ++ Possible causes of errors: + +-#ifdef PY_GETRANDOM +- if (py_getrandom(buffer, size, 0) == 1) +- return; +- /* getrandom() is not supported by the running kernel, fall back +- * on reading /dev/urandom */ +-#endif ++ - open() failed with ENOENT, ENXIO, ENODEV, EACCES: the /dev/urandom device ++ was not found. For example, it was removed manually or not exposed in a ++ chroot or container. ++ - open() failed with a different error ++ - fstat() failed ++ - read() failed or returned 0 + +- fd = _Py_open_noraise("/dev/urandom", O_RDONLY); +- if (fd < 0) +- Py_FatalError("Failed to open /dev/urandom"); ++ read() is retried if it failed with EINTR: interrupted by a signal. + +- while (0 < size) +- { +- do { +- n = read(fd, buffer, (size_t)size); +- } while (n < 0 && errno == EINTR); +- if (n <= 0) +- { +- /* stop on error or if read(size) returned 0 */ +- Py_FatalError("Failed to read bytes from /dev/urandom"); +- break; +- } +- buffer += n; +- size -= (Py_ssize_t)n; +- } +- close(fd); +-} ++ The file descriptor of the device is kept open between calls to avoid using ++ many file descriptors when run in parallel from multiple threads: ++ see the issue #18756. ++ ++ st_dev and st_ino fields of the file descriptor (from fstat()) are cached to ++ check if the file descriptor was replaced by a different file (which is ++ likely a bug in the application): see the issue #21207. + +-/* Read size bytes from /dev/urandom into buffer. +- Return 0 on success, raise an exception and return -1 on error. */ ++ If the file descriptor was closed or replaced, open a new file descriptor ++ but don't close the old file descriptor: it probably points to something ++ important for some third-party code. */ + static int +-dev_urandom_python(char *buffer, Py_ssize_t size) ++dev_urandom(char *buffer, Py_ssize_t size, int raise) + { + int fd; + Py_ssize_t n; +- struct _Py_stat_struct st; +-#ifdef PY_GETRANDOM +- int res; +-#endif + +- if (size <= 0) +- return 0; ++ if (raise) { ++ struct _Py_stat_struct st; + +-#ifdef PY_GETRANDOM +- res = py_getrandom(buffer, size, 1); +- if (res < 0) +- return -1; +- if (res == 1) +- return 0; +- /* getrandom() is not supported by the running kernel, fall back +- * on reading /dev/urandom */ +-#endif +- +- if (urandom_cache.fd >= 0) { +- /* Does the fd point to the same thing as before? (issue #21207) */ +- if (_Py_fstat_noraise(urandom_cache.fd, &st) +- || st.st_dev != urandom_cache.st_dev +- || st.st_ino != urandom_cache.st_ino) { +- /* Something changed: forget the cached fd (but don't close it, +- since it probably points to something important for some +- third-party code). */ +- urandom_cache.fd = -1; +- } +- } +- if (urandom_cache.fd >= 0) +- fd = urandom_cache.fd; +- else { +- fd = _Py_open("/dev/urandom", O_RDONLY); +- if (fd < 0) { +- if (errno == ENOENT || errno == ENXIO || +- errno == ENODEV || errno == EACCES) +- PyErr_SetString(PyExc_NotImplementedError, +- "/dev/urandom (or equivalent) not found"); +- /* otherwise, keep the OSError exception raised by _Py_open() */ +- return -1; +- } + if (urandom_cache.fd >= 0) { +- /* urandom_fd was initialized by another thread while we were +- not holding the GIL, keep it. */ +- close(fd); +- fd = urandom_cache.fd; ++ /* Does the fd point to the same thing as before? (issue #21207) */ ++ if (_Py_fstat_noraise(urandom_cache.fd, &st) ++ || st.st_dev != urandom_cache.st_dev ++ || st.st_ino != urandom_cache.st_ino) { ++ /* Something changed: forget the cached fd (but don't close it, ++ since it probably points to something important for some ++ third-party code). */ ++ urandom_cache.fd = -1; ++ } + } ++ if (urandom_cache.fd >= 0) ++ fd = urandom_cache.fd; + else { +- if (_Py_fstat(fd, &st)) { +- close(fd); ++ fd = _Py_open("/dev/urandom", O_RDONLY); ++ if (fd < 0) { ++ if (errno == ENOENT || errno == ENXIO || ++ errno == ENODEV || errno == EACCES) { ++ PyErr_SetString(PyExc_NotImplementedError, ++ "/dev/urandom (or equivalent) not found"); ++ } ++ /* otherwise, keep the OSError exception raised by _Py_open() */ + return -1; + } ++ if (urandom_cache.fd >= 0) { ++ /* urandom_fd was initialized by another thread while we were ++ not holding the GIL, keep it. */ ++ close(fd); ++ fd = urandom_cache.fd; ++ } + else { +- urandom_cache.fd = fd; +- urandom_cache.st_dev = st.st_dev; +- urandom_cache.st_ino = st.st_ino; ++ if (_Py_fstat(fd, &st)) { ++ close(fd); ++ return -1; ++ } ++ else { ++ urandom_cache.fd = fd; ++ urandom_cache.st_dev = st.st_dev; ++ urandom_cache.st_ino = st.st_ino; ++ } + } + } +- } + +- do { +- n = _Py_read(fd, buffer, (size_t)size); +- if (n == -1) +- return -1; +- if (n == 0) { +- PyErr_Format(PyExc_RuntimeError, +- "Failed to read %zi bytes from /dev/urandom", +- size); ++ do { ++ n = _Py_read(fd, buffer, (size_t)size); ++ if (n == -1) ++ return -1; ++ if (n == 0) { ++ PyErr_Format(PyExc_RuntimeError, ++ "Failed to read %zi bytes from /dev/urandom", ++ size); ++ return -1; ++ } ++ ++ buffer += n; ++ size -= n; ++ } while (0 < size); ++ } ++ else { ++ fd = _Py_open_noraise("/dev/urandom", O_RDONLY); ++ if (fd < 0) { + return -1; + } + +- buffer += n; +- size -= n; +- } while (0 < size); ++ while (0 < size) ++ { ++ do { ++ n = read(fd, buffer, (size_t)size); ++ } while (n < 0 && errno == EINTR); + ++ if (n <= 0) { ++ /* stop on error or if read(size) returned 0 */ ++ close(fd); ++ return -1; ++ } ++ ++ buffer += n; ++ size -= n; ++ } ++ close(fd); ++ } + return 0; + } + +@@ -349,8 +401,8 @@ dev_urandom_close(void) + urandom_cache.fd = -1; + } + } ++#endif /* !MS_WINDOWS */ + +-#endif + + /* Fill buffer with pseudo-random bytes generated by a linear congruent + generator (LCG): +@@ -373,29 +425,98 @@ lcg_urandom(unsigned int x0, unsigned ch + } + } + +-/* Fill buffer with size pseudo-random bytes from the operating system random +- number generator (RNG). It is suitable for most cryptographic purposes +- except long living private keys for asymmetric encryption. ++/* Read random bytes: + +- Return 0 on success, raise an exception and return -1 on error. */ +-int +-_PyOS_URandom(void *buffer, Py_ssize_t size) ++ - Return 0 on success ++ - Raise an exception (if raise is non-zero) and return -1 on error ++ ++ Used sources of entropy ordered by preference, preferred source first: ++ ++ - CryptGenRandom() on Windows ++ - getrandom() function (ex: Linux and Solaris): call py_getrandom() ++ - getentropy() function (ex: OpenBSD): call py_getentropy() ++ - /dev/urandom device ++ ++ Read from the /dev/urandom device if getrandom() or getentropy() function ++ is not available or does not work. ++ ++ Prefer getrandom() over getentropy() because getrandom() supports blocking ++ and non-blocking mode and Python requires non-blocking RNG at startup to ++ initialize its hash secret: see the PEP 524. ++ ++ Prefer getrandom() and getentropy() over reading directly /dev/urandom ++ because these functions don't need file descriptors and so avoid ENFILE or ++ EMFILE errors (too many open files): see the issue #18756. ++ ++ Only use RNG running in the kernel. They are more secure because it is ++ harder to get the internal state of a RNG running in the kernel land than a ++ RNG running in the user land. The kernel has a direct access to the hardware ++ and has access to hardware RNG, they are used as entropy sources. ++ ++ Note: the OpenSSL RAND_pseudo_bytes() function does not automatically reseed ++ its RNG on fork(), two child processes (with the same pid) generate the same ++ random numbers: see issue #18747. Kernel RNGs don't have this issue, ++ they have access to good quality entropy sources. ++ ++ If raise is zero: ++ ++ - Don't raise an exception on error ++ - Don't call the Python signal handler (don't call PyErr_CheckSignals()) if ++ a function fails with EINTR: retry directly the interrupted function ++ - Don't release the GIL to call functions. ++*/ ++static int ++pyurandom(void *buffer, Py_ssize_t size, int raise) + { ++#if defined(PY_GETRANDOM) || defined(PY_GETENTROPY) ++ int res; ++#endif ++ + if (size < 0) { +- PyErr_Format(PyExc_ValueError, +- "negative argument not allowed"); ++ if (raise) { ++ PyErr_Format(PyExc_ValueError, ++ "negative argument not allowed"); ++ } + return -1; + } +- if (size == 0) ++ ++ if (size == 0) { + return 0; ++ } + + #ifdef MS_WINDOWS +- return win32_urandom((unsigned char *)buffer, size, 1); +-#elif defined(PY_GETENTROPY) +- return py_getentropy(buffer, size, 0); ++ return win32_urandom((unsigned char *)buffer, size, raise); ++#else ++ ++#if defined(PY_GETRANDOM) || defined(PY_GETENTROPY) ++#ifdef PY_GETRANDOM ++ res = py_getrandom(buffer, size, raise); + #else +- return dev_urandom_python((char*)buffer, size); ++ res = py_getentropy(buffer, size, raise); + #endif ++ if (res < 0) { ++ return -1; ++ } ++ if (res == 1) { ++ return 0; ++ } ++ /* getrandom() or getentropy() function is not available: failed with ++ ENOSYS, EPERM or EAGAIN. Fall back on reading from /dev/urandom. */ ++#endif ++ ++ return dev_urandom(buffer, size, raise); ++#endif ++} ++ ++/* Fill buffer with size pseudo-random bytes from the operating system random ++ number generator (RNG). It is suitable for most cryptographic purposes ++ except long living private keys for asymmetric encryption. ++ ++ Return 0 on success. Raise an exception and return -1 on error. */ ++int ++_PyOS_URandom(void *buffer, Py_ssize_t size) ++{ ++ return pyurandom(buffer, size, 1); + } + + void +@@ -436,13 +557,14 @@ _PyRandom_Init(void) + } + } + else { +-#ifdef MS_WINDOWS +- (void)win32_urandom(secret, secret_size, 0); +-#elif defined(PY_GETENTROPY) +- (void)py_getentropy(secret, secret_size, 1); +-#else +- dev_urandom_noraise(secret, secret_size); +-#endif ++ int res; ++ ++ /* _PyRandom_Init() is called very early in the Python initialization ++ and so exceptions cannot be used (use raise=0). */ ++ res = pyurandom(secret, secret_size, 0); ++ if (res < 0) { ++ Py_FatalError("failed to get random numbers to initialize Python"); ++ } + } + } + +@@ -454,8 +576,6 @@ _PyRandom_Fini(void) + CryptReleaseContext(hCryptProv, 0); + hCryptProv = 0; + } +-#elif defined(PY_GETENTROPY) +- /* nothing to clean */ + #else + dev_urandom_close(); + #endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3/use_packed_importlib.patch b/import-layers/yocto-poky/meta/recipes-devtools/python/python3/use_packed_importlib.patch deleted file mode 100644 index 70708a0ad..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python3/use_packed_importlib.patch +++ /dev/null @@ -1,31 +0,0 @@ - -Upstream-Status: Inappropriate [embedded specific] - -The binary _freeze_importlib is built and also used during build time to generate -importlib*.h files, this is impossible to do on a cross-compile environment, -this avoids executing the binary and forces it to use the packed importlib*.h files. - - -Signed-off-by: Alejandro Hernandez - -Index: Python-3.5.0/Makefile.pre.in -=================================================================== ---- Python-3.5.0.orig/Makefile.pre.in -+++ Python-3.5.0/Makefile.pre.in -@@ -700,12 +700,12 @@ Programs/_freeze_importlib.o: Programs/_ - Programs/_freeze_importlib: Programs/_freeze_importlib.o $(LIBRARY_OBJS_OMIT_FROZEN) - $(LINKCC) $(PY_LDFLAGS) -o $@ Programs/_freeze_importlib.o $(LIBRARY_OBJS_OMIT_FROZEN) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) - --Python/importlib_external.h: $(srcdir)/Lib/importlib/_bootstrap_external.py Programs/_freeze_importlib -- ./Programs/_freeze_importlib \ -+#Python/importlib_external.h: $(srcdir)/Lib/importlib/_bootstrap_external.py Programs/_freeze_importlib -+# ./Programs/_freeze_importlib \ - $(srcdir)/Lib/importlib/_bootstrap_external.py Python/importlib_external.h - --Python/importlib.h: $(srcdir)/Lib/importlib/_bootstrap.py Programs/_freeze_importlib -- ./Programs/_freeze_importlib \ -+#Python/importlib.h: $(srcdir)/Lib/importlib/_bootstrap.py Programs/_freeze_importlib -+# ./Programs/_freeze_importlib \ - $(srcdir)/Lib/importlib/_bootstrap.py Python/importlib.h - - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python3_3.5.2.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python3_3.5.2.bb index e6cbb9c56..2ff7c9e27 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python3_3.5.2.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python3_3.5.2.bb @@ -9,7 +9,6 @@ DISTRO_SRC_URI_linuxstdbase = "" SRC_URI = "http://www.python.org/ftp/python/${PV}/Python-${PV}.tar.xz \ file://python-config.patch \ file://000-cross-compile.patch \ -file://020-dont-compile-python-files.patch \ file://030-fixup-include-dirs.patch \ file://070-dont-clean-ipkg-install.patch \ file://080-distutils-dont_adjust_files.patch \ @@ -37,6 +36,7 @@ SRC_URI += "\ file://setup.py-find-libraries-in-staging-dirs.patch \ file://configure.ac-fix-LIBPL.patch \ file://python3-fix-CVE-2016-1000110.patch \ + file://upstream-random-fixes.patch \ " SRC_URI[md5sum] = "8906efbacfcdc7c3c9198aeefafd159e" SRC_URI[sha256sum] = "0010f56100b9b74259ebcd5d4b295a32324b58b517403a10d1a2aa7cb22bca40" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python_2.7.12.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python_2.7.12.bb deleted file mode 100644 index 9fe35db2d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/python/python_2.7.12.bb +++ /dev/null @@ -1,171 +0,0 @@ -require python.inc -DEPENDS = "python-native libffi bzip2 db gdbm openssl readline sqlite3 zlib" -PR = "${INC_PR}" - -DISTRO_SRC_URI ?= "file://sitecustomize.py" -DISTRO_SRC_URI_linuxstdbase = "" -SRC_URI += "\ - file://01-use-proper-tools-for-cross-build.patch \ - file://03-fix-tkinter-detection.patch \ - file://06-avoid_usr_lib_termcap_path_in_linking.patch \ - ${DISTRO_SRC_URI} \ - file://multilib.patch \ - file://cgi_py.patch \ - file://setup_py_skip_cross_import_check.patch \ - file://add-md5module-support.patch \ - file://host_include_contamination.patch \ - file://fix_for_using_different_libdir.patch \ - file://setuptweaks.patch \ - file://check-if-target-is-64b-not-host.patch \ - file://search_db_h_in_inc_dirs_and_avoid_warning.patch \ - file://avoid_warning_about_tkinter.patch \ - file://avoid_warning_for_sunos_specific_module.patch \ - file://python-2.7.3-remove-bsdb-rpath.patch \ - file://fix-makefile-for-ptest.patch \ - file://run-ptest \ - file://parallel-makeinst-create-bindir.patch \ - file://use_sysroot_ncurses_instead_of_host.patch \ - file://add-CROSSPYTHONPATH-for-PYTHON_FOR_BUILD.patch \ - file://python-fix-CVE-2016-1000110.patch \ -" - -S = "${WORKDIR}/Python-${PV}" - -inherit autotools multilib_header python-dir pythonnative - -CONFIGUREOPTS += " --with-system-ffi " - -EXTRA_OECONF += "ac_cv_file__dev_ptmx=yes ac_cv_file__dev_ptc=no" - -do_configure_append() { - rm -f ${S}/Makefile.orig - autoreconf -Wcross --verbose --install --force --exclude=autopoint ../Python-${PV}/Modules/_ctypes/libffi -} - -do_compile() { - # regenerate platform specific files, because they depend on system headers - cd ${S}/Lib/plat-linux2 - include=${STAGING_INCDIR} ${STAGING_BINDIR_NATIVE}/python-native/python \ - ${S}/Tools/scripts/h2py.py -i '(u_long)' \ - ${STAGING_INCDIR}/dlfcn.h \ - ${STAGING_INCDIR}/linux/cdrom.h \ - ${STAGING_INCDIR}/netinet/in.h \ - ${STAGING_INCDIR}/sys/types.h - sed -e 's,${STAGING_DIR_HOST},,g' -i *.py - cd - - - # remove any bogus LD_LIBRARY_PATH - sed -i -e s,RUNSHARED=.*,RUNSHARED=, Makefile - - if [ ! -f Makefile.orig ]; then - install -m 0644 Makefile Makefile.orig - fi - sed -i -e 's#^LDFLAGS=.*#LDFLAGS=${LDFLAGS} -L. -L${STAGING_LIBDIR}#g' \ - -e 's,libdir=${libdir},libdir=${STAGING_LIBDIR},g' \ - -e 's,libexecdir=${libexecdir},libexecdir=${STAGING_DIR_HOST}${libexecdir},g' \ - -e 's,^LIBDIR=.*,LIBDIR=${STAGING_LIBDIR},g' \ - -e 's,includedir=${includedir},includedir=${STAGING_INCDIR},g' \ - -e 's,^INCLUDEDIR=.*,INCLUDE=${STAGING_INCDIR},g' \ - -e 's,^CONFINCLUDEDIR=.*,CONFINCLUDE=${STAGING_INCDIR},g' \ - Makefile - # save copy of it now, because if we do it in do_install and - # then call do_install twice we get Makefile.orig == Makefile.sysroot - install -m 0644 Makefile Makefile.sysroot - - export CROSS_COMPILE="${TARGET_PREFIX}" - export PYTHONBUILDDIR="${B}" - - oe_runmake HOSTPGEN=${STAGING_BINDIR_NATIVE}/python-native/pgen \ - HOSTPYTHON=${STAGING_BINDIR_NATIVE}/python-native/python \ - STAGING_LIBDIR=${STAGING_LIBDIR} \ - STAGING_INCDIR=${STAGING_INCDIR} \ - STAGING_BASELIBDIR=${STAGING_BASELIBDIR} \ - OPT="${CFLAGS}" -} - -do_install() { - # make install needs the original Makefile, or otherwise the inclues would - # go to ${D}${STAGING...}/... - install -m 0644 Makefile.orig Makefile - - export CROSS_COMPILE="${TARGET_PREFIX}" - export PYTHONBUILDDIR="${B}" - - # After swizzling the makefile, we need to run the build again. - # install can race with the build so we have to run this first, then install - oe_runmake HOSTPGEN=${STAGING_BINDIR_NATIVE}/python-native/pgen \ - HOSTPYTHON=${STAGING_BINDIR_NATIVE}/python-native/python \ - CROSSPYTHONPATH=${STAGING_LIBDIR_NATIVE}/python${PYTHON_MAJMIN}/lib-dynload/ \ - STAGING_LIBDIR=${STAGING_LIBDIR} \ - STAGING_INCDIR=${STAGING_INCDIR} \ - STAGING_BASELIBDIR=${STAGING_BASELIBDIR} \ - DESTDIR=${D} LIBDIR=${libdir} - - oe_runmake HOSTPGEN=${STAGING_BINDIR_NATIVE}/python-native/pgen \ - HOSTPYTHON=${STAGING_BINDIR_NATIVE}/python-native/python \ - CROSSPYTHONPATH=${STAGING_LIBDIR_NATIVE}/python${PYTHON_MAJMIN}/lib-dynload/ \ - STAGING_LIBDIR=${STAGING_LIBDIR} \ - STAGING_INCDIR=${STAGING_INCDIR} \ - STAGING_BASELIBDIR=${STAGING_BASELIBDIR} \ - DESTDIR=${D} LIBDIR=${libdir} install - - install -m 0644 Makefile.sysroot ${D}/${libdir}/python${PYTHON_MAJMIN}/config/Makefile - - if [ -e ${WORKDIR}/sitecustomize.py ]; then - install -m 0644 ${WORKDIR}/sitecustomize.py ${D}/${libdir}/python${PYTHON_MAJMIN} - fi - - oe_multilib_header python${PYTHON_MAJMIN}/pyconfig.h -} - -do_install_append_class-nativesdk () { - create_wrapper ${D}${bindir}/python2.7 PYTHONHOME='${prefix}' TERMINFO_DIRS='${sysconfdir}/terminfo:/etc/terminfo:/usr/share/terminfo:/usr/share/misc/terminfo:/lib/terminfo' -} - -SSTATE_SCAN_FILES += "Makefile" -PACKAGE_PREPROCESS_FUNCS += "py_package_preprocess" - -py_package_preprocess () { - # copy back the old Makefile to fix target package - install -m 0644 ${B}/Makefile.orig ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/config/Makefile - - # Remove references to buildmachine paths in target Makefile and _sysconfigdata - sed -i -e 's:--sysroot=${STAGING_DIR_TARGET}::g' -e s:'--with-libtool-sysroot=${STAGING_DIR_TARGET}'::g \ - ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/config/Makefile \ - ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/_sysconfigdata.py - python -m py_compile ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/_sysconfigdata.py -} - -require python-${PYTHON_MAJMIN}-manifest.inc - -# manual dependency additions -RPROVIDES_${PN}-core = "${PN}" -RRECOMMENDS_${PN}-core = "${PN}-readline" -RRECOMMENDS_${PN}-core_append_class-nativesdk = " nativesdk-python-modules" -RRECOMMENDS_${PN}-crypt = "openssl" - -# package libpython2 -PACKAGES =+ "lib${BPN}2" -FILES_lib${BPN}2 = "${libdir}/libpython*.so.*" - -# catch all the rest (unsorted) -PACKAGES += "${PN}-misc" -FILES_${PN}-misc = "${libdir}/python${PYTHON_MAJMIN}" -RDEPENDS_${PN}-modules += "${PN}-misc" -RDEPENDS_${PN}-ptest = "${PN}-modules" -#inherit ptest after "require python-${PYTHON_MAJMIN}-manifest.inc" so PACKAGES doesn't get overwritten -inherit ptest - -# This must come after inherit ptest for the override to take effect -do_install_ptest() { - cp ${B}/Makefile ${D}${PTEST_PATH} - sed -e s:LIBDIR/python/ptest:${PTEST_PATH}:g \ - -e s:LIBDIR:${libdir}:g \ - -i ${D}${PTEST_PATH}/run-ptest -} - -# catch manpage -PACKAGES += "${PN}-man" -FILES_${PN}-man = "${datadir}/man" - -BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/python/python_2.7.13.bb b/import-layers/yocto-poky/meta/recipes-devtools/python/python_2.7.13.bb new file mode 100644 index 000000000..4ef99523e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/python/python_2.7.13.bb @@ -0,0 +1,171 @@ +require python.inc +DEPENDS = "python-native libffi bzip2 db gdbm openssl readline sqlite3 zlib" +PR = "${INC_PR}" + +DISTRO_SRC_URI ?= "file://sitecustomize.py" +DISTRO_SRC_URI_linuxstdbase = "" +SRC_URI += "\ + file://01-use-proper-tools-for-cross-build.patch \ + file://03-fix-tkinter-detection.patch \ + file://06-avoid_usr_lib_termcap_path_in_linking.patch \ + ${DISTRO_SRC_URI} \ + file://multilib.patch \ + file://cgi_py.patch \ + file://setup_py_skip_cross_import_check.patch \ + file://add-md5module-support.patch \ + file://host_include_contamination.patch \ + file://fix_for_using_different_libdir.patch \ + file://setuptweaks.patch \ + file://check-if-target-is-64b-not-host.patch \ + file://search_db_h_in_inc_dirs_and_avoid_warning.patch \ + file://avoid_warning_about_tkinter.patch \ + file://avoid_warning_for_sunos_specific_module.patch \ + file://python-2.7.3-remove-bsdb-rpath.patch \ + file://fix-makefile-for-ptest.patch \ + file://run-ptest \ + file://parallel-makeinst-create-bindir.patch \ + file://use_sysroot_ncurses_instead_of_host.patch \ + file://add-CROSSPYTHONPATH-for-PYTHON_FOR_BUILD.patch \ + file://Don-t-use-getentropy-on-Linux.patch \ +" + +S = "${WORKDIR}/Python-${PV}" + +inherit autotools multilib_header python-dir pythonnative + +CONFIGUREOPTS += " --with-system-ffi " + +EXTRA_OECONF += "ac_cv_file__dev_ptmx=yes ac_cv_file__dev_ptc=no" + +do_configure_append() { + rm -f ${S}/Makefile.orig + autoreconf -Wcross --verbose --install --force --exclude=autopoint ../Python-${PV}/Modules/_ctypes/libffi +} + +do_compile() { + # regenerate platform specific files, because they depend on system headers + cd ${S}/Lib/plat-linux2 + include=${STAGING_INCDIR} ${STAGING_BINDIR_NATIVE}/python-native/python \ + ${S}/Tools/scripts/h2py.py -i '(u_long)' \ + ${STAGING_INCDIR}/dlfcn.h \ + ${STAGING_INCDIR}/linux/cdrom.h \ + ${STAGING_INCDIR}/netinet/in.h \ + ${STAGING_INCDIR}/sys/types.h + sed -e 's,${STAGING_DIR_HOST},,g' -i *.py + cd - + + # remove any bogus LD_LIBRARY_PATH + sed -i -e s,RUNSHARED=.*,RUNSHARED=, Makefile + + if [ ! -f Makefile.orig ]; then + install -m 0644 Makefile Makefile.orig + fi + sed -i -e 's#^LDFLAGS=.*#LDFLAGS=${LDFLAGS} -L. -L${STAGING_LIBDIR}#g' \ + -e 's,libdir=${libdir},libdir=${STAGING_LIBDIR},g' \ + -e 's,libexecdir=${libexecdir},libexecdir=${STAGING_DIR_HOST}${libexecdir},g' \ + -e 's,^LIBDIR=.*,LIBDIR=${STAGING_LIBDIR},g' \ + -e 's,includedir=${includedir},includedir=${STAGING_INCDIR},g' \ + -e 's,^INCLUDEDIR=.*,INCLUDE=${STAGING_INCDIR},g' \ + -e 's,^CONFINCLUDEDIR=.*,CONFINCLUDE=${STAGING_INCDIR},g' \ + Makefile + # save copy of it now, because if we do it in do_install and + # then call do_install twice we get Makefile.orig == Makefile.sysroot + install -m 0644 Makefile Makefile.sysroot + + export CROSS_COMPILE="${TARGET_PREFIX}" + export PYTHONBUILDDIR="${B}" + + oe_runmake HOSTPGEN=${STAGING_BINDIR_NATIVE}/python-native/pgen \ + HOSTPYTHON=${STAGING_BINDIR_NATIVE}/python-native/python \ + STAGING_LIBDIR=${STAGING_LIBDIR} \ + STAGING_INCDIR=${STAGING_INCDIR} \ + STAGING_BASELIBDIR=${STAGING_BASELIBDIR} \ + OPT="${CFLAGS}" +} + +do_install() { + # make install needs the original Makefile, or otherwise the inclues would + # go to ${D}${STAGING...}/... + install -m 0644 Makefile.orig Makefile + + export CROSS_COMPILE="${TARGET_PREFIX}" + export PYTHONBUILDDIR="${B}" + + # After swizzling the makefile, we need to run the build again. + # install can race with the build so we have to run this first, then install + oe_runmake HOSTPGEN=${STAGING_BINDIR_NATIVE}/python-native/pgen \ + HOSTPYTHON=${STAGING_BINDIR_NATIVE}/python-native/python \ + CROSSPYTHONPATH=${STAGING_LIBDIR_NATIVE}/python${PYTHON_MAJMIN}/lib-dynload/ \ + STAGING_LIBDIR=${STAGING_LIBDIR} \ + STAGING_INCDIR=${STAGING_INCDIR} \ + STAGING_BASELIBDIR=${STAGING_BASELIBDIR} \ + DESTDIR=${D} LIBDIR=${libdir} + + oe_runmake HOSTPGEN=${STAGING_BINDIR_NATIVE}/python-native/pgen \ + HOSTPYTHON=${STAGING_BINDIR_NATIVE}/python-native/python \ + CROSSPYTHONPATH=${STAGING_LIBDIR_NATIVE}/python${PYTHON_MAJMIN}/lib-dynload/ \ + STAGING_LIBDIR=${STAGING_LIBDIR} \ + STAGING_INCDIR=${STAGING_INCDIR} \ + STAGING_BASELIBDIR=${STAGING_BASELIBDIR} \ + DESTDIR=${D} LIBDIR=${libdir} install + + install -m 0644 Makefile.sysroot ${D}/${libdir}/python${PYTHON_MAJMIN}/config/Makefile + + if [ -e ${WORKDIR}/sitecustomize.py ]; then + install -m 0644 ${WORKDIR}/sitecustomize.py ${D}/${libdir}/python${PYTHON_MAJMIN} + fi + + oe_multilib_header python${PYTHON_MAJMIN}/pyconfig.h +} + +do_install_append_class-nativesdk () { + create_wrapper ${D}${bindir}/python2.7 PYTHONHOME='${prefix}' TERMINFO_DIRS='${sysconfdir}/terminfo:/etc/terminfo:/usr/share/terminfo:/usr/share/misc/terminfo:/lib/terminfo' +} + +SSTATE_SCAN_FILES += "Makefile" +PACKAGE_PREPROCESS_FUNCS += "py_package_preprocess" + +py_package_preprocess () { + # copy back the old Makefile to fix target package + install -m 0644 ${B}/Makefile.orig ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/config/Makefile + + # Remove references to buildmachine paths in target Makefile and _sysconfigdata + sed -i -e 's:--sysroot=${STAGING_DIR_TARGET}::g' -e s:'--with-libtool-sysroot=${STAGING_DIR_TARGET}'::g \ + ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/config/Makefile \ + ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/_sysconfigdata.py + python -m py_compile ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/_sysconfigdata.py +} + +require python-${PYTHON_MAJMIN}-manifest.inc + +# manual dependency additions +RPROVIDES_${PN}-core = "${PN}" +RRECOMMENDS_${PN}-core = "${PN}-readline" +RRECOMMENDS_${PN}-core_append_class-nativesdk = " nativesdk-python-modules" +RRECOMMENDS_${PN}-crypt = "openssl" + +# package libpython2 +PACKAGES =+ "lib${BPN}2" +FILES_lib${BPN}2 = "${libdir}/libpython*.so.*" + +# catch all the rest (unsorted) +PACKAGES += "${PN}-misc" +FILES_${PN}-misc = "${libdir}/python${PYTHON_MAJMIN}" +RDEPENDS_${PN}-modules += "${PN}-misc" +RDEPENDS_${PN}-ptest = "${PN}-modules" +#inherit ptest after "require python-${PYTHON_MAJMIN}-manifest.inc" so PACKAGES doesn't get overwritten +inherit ptest + +# This must come after inherit ptest for the override to take effect +do_install_ptest() { + cp ${B}/Makefile ${D}${PTEST_PATH} + sed -e s:LIBDIR/python/ptest:${PTEST_PATH}:g \ + -e s:LIBDIR:${libdir}:g \ + -i ${D}${PTEST_PATH}/run-ptest +} + +# catch manpage +PACKAGES += "${PN}-man" +FILES_${PN}-man = "${datadir}/man" + +BBCLASSEXTEND = "nativesdk" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu-helper-native_1.0.bb b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu-helper-native_1.0.bb index 8d27c4db6..27d53157d 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu-helper-native_1.0.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu-helper-native_1.0.bb @@ -19,3 +19,7 @@ do_install() { install -d ${D}${bindir} install tunctl ${D}${bindir}/ } + +RM_WORK_EXCLUDE_ITEMS += "recipe-sysroot-native" +DEPENDS += "qemu-native" +addtask addto_recipe_sysroot after do_populate_sysroot before do_build diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu-targets.inc b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu-targets.inc index a3e46a894..66dd675ed 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu-targets.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu-targets.inc @@ -4,8 +4,8 @@ def get_qemu_target_list(d): import bb - archs = d.getVar('QEMU_TARGETS', True).split() - tos = d.getVar('HOST_OS', True) + archs = d.getVar('QEMU_TARGETS').split() + tos = d.getVar('HOST_OS') softmmuonly = "" for arch in ['ppcemb']: if arch in archs: diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu.inc b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu.inc index b46e7f72e..0e1411af6 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu.inc @@ -3,35 +3,33 @@ HOMEPAGE = "http://qemu.org" LICENSE = "GPLv2 & LGPLv2.1" DEPENDS = "glib-2.0 zlib pixman" RDEPENDS_${PN}_class-target += "bash" -RDEPENDS_${PN}-ptest = "bash make" require qemu-targets.inc -inherit autotools ptest +inherit autotools pkgconfig bluetooth BBCLASSEXTEND = "native nativesdk" -PR = "r1" - # QEMU_TARGETS is overridable variable QEMU_TARGETS ?= "arm aarch64 i386 mips mipsel mips64 mips64el ppc sh4 x86_64" -SRC_URI = "\ - file://powerpc_rom.bin \ - file://disable-grabs.patch \ - file://exclude-some-arm-EABI-obsolete-syscalls.patch \ - file://wacom.patch \ - file://add-ptest-in-makefile.patch \ - file://run-ptest \ - file://0001-target-mips-add-24KEc-CPU-definition.patch \ - " - -SRC_URI_append_class-native = "\ - file://fix-libcap-header-issue-on-some-distro.patch \ - file://cpus.c-qemu_cpu_kick_thread_debugging.patch \ +EXTRA_OECONF = " \ + --prefix=${prefix} \ + --bindir=${bindir} \ + --includedir=${includedir} \ + --libdir=${libdir} \ + --mandir=${mandir} \ + --datadir=${datadir} \ + --docdir=${docdir}/${BPN} \ + --sysconfdir=${sysconfdir} \ + --libexecdir=${libexecdir} \ + --localstatedir=${localstatedir} \ + --with-confsuffix=/${BPN} \ + --disable-strip \ + --disable-werror \ + --target-list=${@get_qemu_target_list(d)} \ + --with-system-pixman \ + --extra-cflags='${CFLAGS}' \ " - -EXTRA_OECONF += "--target-list=${@get_qemu_target_list(d)} --disable-werror --disable-bluez --disable-libiscsi --with-system-pixman --extra-cflags='${CFLAGS}'" - -EXTRA_OECONF_class-nativesdk = "--target-list=${@get_qemu_target_list(d)} --disable-werror" +EXTRA_OECONF_append_class-native = " --python=${USRBINPATH}/python2.7" EXTRA_OEMAKE_append_class-native = " LD='${LD}' AR='${AR}' OBJCOPY='${OBJCOPY}' LDFLAGS='${LDFLAGS}'" @@ -52,38 +50,14 @@ do_configure_prepend_class-native() { "${S}"/Makefile "${S}"/Makefile.target } -KVMENABLE = "--enable-kvm" -KVMENABLE_darwin = "--disable-kvm" -KVMENABLE_mingw32 = "--disable-kvm" - do_configure() { - # Handle distros such as CentOS 5 32-bit that do not have kvm support - KVMOPTS="--disable-kvm" - if [ "${PN}" != "qemu-native" -a "${PN}" != "nativesdk-qemu" ] \ - || [ -f /usr/include/linux/kvm.h ] ; then - KVMOPTS="${KVMENABLE}" - fi - - ${S}/configure --prefix=${prefix} --sysconfdir=${sysconfdir} --libexecdir=${libexecdir} --localstatedir=${localstatedir} --disable-strip ${EXTRA_OECONF} $KVMOPTS + ${S}/configure ${EXTRA_OECONF} test ! -e ${S}/target-i386/beginend_funcs.sh || chmod a+x ${S}/target-i386/beginend_funcs.sh } -do_compile_ptest() { - make buildtest-TESTS -} - -do_install_ptest() { - cp -rL ${B}/tests ${D}${PTEST_PATH} - find ${D}${PTEST_PATH}/tests -type f -name "*.[Sshcod]" | xargs -i rm -rf {} - - cp ${S}/tests/Makefile.include ${D}${PTEST_PATH}/tests -} - do_install () { export STRIP="true" autotools_do_install - install -d ${D}${datadir}/qemu - install -m 0755 ${WORKDIR}/powerpc_rom.bin ${D}${datadir}/qemu } # The following fragment will create a wrapper for qemu-mips user emulation @@ -102,11 +76,18 @@ do_install_append() { PACKAGECONFIG ??= " \ fdt sdl \ - ${@bb.utils.contains('DISTRO_FEATURES', 'alsa', 'alsa', '', d)} \ - ${@bb.utils.contains('DISTRO_FEATURES', 'xen', 'xen', '', d)} \ + fdt sdl kvm \ + ${@bb.utils.filter('DISTRO_FEATURES', 'alsa xen', d)} \ " -PACKAGECONFIG_class-native ??= "fdt alsa uuid" -PACKAGECONFIG_class-nativesdk ??= "fdt sdl" +PACKAGECONFIG_class-native ??= "fdt alsa kvm" +PACKAGECONFIG_class-nativesdk ??= "fdt sdl kvm" + +# Handle distros such as CentOS 5 32-bit that do not have kvm support +PACKAGECONFIG_class-native_remove = "${@'kvm' if not os.path.exists('/usr/include/linux/kvm.h') else ''}" + +# Disable kvm on targets that do not support it +PACKAGECONFIG_remove_darwin = "kvm" +PACKAGECONFIG_remove_mingw32 = "kvm" PACKAGECONFIG[sdl] = "--enable-sdl,--disable-sdl,libsdl" PACKAGECONFIG[virtfs] = "--enable-virtfs --enable-attr,--disable-virtfs,libcap attr," @@ -118,22 +99,22 @@ PACKAGECONFIG[vnc-jpeg] = "--enable-vnc --enable-vnc-jpeg,--disable-vnc-jpeg,jpe PACKAGECONFIG[vnc-png] = "--enable-vnc --enable-vnc-png,--disable-vnc-png,libpng," PACKAGECONFIG[libcurl] = "--enable-curl,--disable-curl,libcurl," PACKAGECONFIG[nss] = "--enable-smartcard,--disable-smartcard,nss," -PACKAGECONFIG[uuid] = "--enable-uuid,--disable-uuid,util-linux," PACKAGECONFIG[curses] = "--enable-curses,--disable-curses,ncurses," PACKAGECONFIG[gtk+] = "--enable-gtk --with-gtkabi=3.0 --enable-vte,--disable-gtk --disable-vte,gtk+3 vte" PACKAGECONFIG[libcap-ng] = "--enable-cap-ng,--disable-cap-ng,libcap-ng," PACKAGECONFIG[ssh2] = "--enable-libssh2,--disable-libssh2,libssh2," -PACKAGECONFIG[gcrypt] = "--enable-gcrypt,--disable-gcrypt,gcrypt," +PACKAGECONFIG[gcrypt] = "--enable-gcrypt,--disable-gcrypt,libgcrypt," PACKAGECONFIG[nettle] = "--enable-nettle,--disable-nettle,nettle" PACKAGECONFIG[libusb] = "--enable-libusb,--disable-libusb,libusb1" PACKAGECONFIG[fdt] = "--enable-fdt,--disable-fdt,dtc" -PACKAGECONFIG[alsa] = ",,alsa-lib" +PACKAGECONFIG[alsa] = "--audio-drv-list='oss alsa',,alsa-lib" PACKAGECONFIG[glx] = "--enable-opengl,--disable-opengl,mesa" PACKAGECONFIG[lzo] = "--enable-lzo,--disable-lzo,lzo" PACKAGECONFIG[numa] = "--enable-numa,--disable-numa,numactl" PACKAGECONFIG[gnutls] = "--enable-gnutls,--disable-gnutls,gnutls" PACKAGECONFIG[bzip2] = "--enable-bzip2,--disable-bzip2,bzip2" - -EXTRA_OECONF += "${@bb.utils.contains('PACKAGECONFIG', 'alsa', '--audio-drv-list=oss,alsa', '', d)}" +PACKAGECONFIG[bluez] = "--enable-bluez,--disable-bluez,${BLUEZ}" +PACKAGECONFIG[libiscsi] = "--enable-libiscsi,--disable-libiscsi" +PACKAGECONFIG[kvm] = "--enable-kvm,--disable-kvm" INSANE_SKIP_${PN} = "arch" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0001-Provide-support-for-the-CUSE-TPM.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0001-Provide-support-for-the-CUSE-TPM.patch new file mode 100644 index 000000000..74dc6f5df --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0001-Provide-support-for-the-CUSE-TPM.patch @@ -0,0 +1,870 @@ +From 8737eef18f39ed087fd911d0a0886e8174d0468c Mon Sep 17 00:00:00 2001 +From: Stefan Berger +Date: Sat, 31 Dec 2016 11:23:32 -0500 +Subject: [PATCH 1/4] Provide support for the CUSE TPM + +Rather than integrating TPM functionality into QEMU directly +using the TPM emulation of libtpms, we now integrate an external +emulated TPM device. This device is expected to implement a Linux +CUSE interface (CUSE = character device in userspace). + +QEMU talks to the CUSE TPM using much functionality of the +passthrough driver. For example, the TPM commands and responses +are sent to the CUSE TPM using the read()/write() interface. +However, some out-of-band control needs to be done using the CUSE +TPM's ioctls. The CUSE TPM currently defines and implements 15 +different ioctls for controlling certain life-cycle aspects of +the emulated TPM. The ioctls can be regarded as a replacement for +direct function calls to a TPM emulator if the TPM were to be +directly integrated into QEMU. + +One of the ioctls allows to get a bitmask of supported capabilities. +Each returned bit indicates which capabilities have been implemented. +An include file defining the various ioctls is added to QEMU. + +The CUSE TPM and associated tools can be found here: + +https://github.com/stefanberger/swtpm + +(please use the latest version) + +To use the external CUSE TPM, the CUSE TPM should be started as follows: + +/usr/bin/swtpm_ioctl -s /dev/vtpm-test + +/usr/bin/swtpm_cuse -n vtpm-test + +QEMU can then be started using the following parameters: + +qemu-system-x86_64 \ + [...] \ + -tpmdev cuse-tpm,id=tpm0,cancel-path=/dev/null,path=/dev/vtpm-test \ + -device tpm-tis,id=tpm0,tpmdev=tpm0 \ + [...] + +Signed-off-by: Stefan Berger +Cc: Eric Blake + +Conflicts: + docs/qmp-commands.txt + +Patch cherry-picked from https://github.com/stefanberger/qemu-tpm, branch v2.8.0+tpm, +commit 27d6cd856d5a14061955df7a93ee490697a7a174. Applied cleanly except for +docs/qmp-commands.txt which did not exist yet in qemu 2.7. + +Upstream-Status: Pending [https://lists.nongnu.org/archive/html/qemu-devel/2016-06/msg00252.html] +Signed-off-by: Patrick Ohly +--- + hmp.c | 6 ++ + hw/tpm/tpm_int.h | 1 + + hw/tpm/tpm_ioctl.h | 215 +++++++++++++++++++++++++++++++++++++ + hw/tpm/tpm_passthrough.c | 274 +++++++++++++++++++++++++++++++++++++++++++++-- + qapi-schema.json | 18 +++- + qemu-options.hx | 21 +++- + tpm.c | 11 +- + 7 files changed, 529 insertions(+), 17 deletions(-) + create mode 100644 hw/tpm/tpm_ioctl.h + +diff --git a/hmp.c b/hmp.c +index cc2056e9e2..277b45ef5a 100644 +--- a/hmp.c ++++ b/hmp.c +@@ -883,6 +883,12 @@ void hmp_info_tpm(Monitor *mon, const QDict *qdict) + tpo->has_cancel_path ? ",cancel-path=" : "", + tpo->has_cancel_path ? tpo->cancel_path : ""); + break; ++ case TPM_TYPE_OPTIONS_KIND_CUSE_TPM: ++ tpo = ti->options->u.passthrough.data; ++ monitor_printf(mon, "%s%s", ++ tpo->has_path ? ",path=" : "", ++ tpo->has_path ? tpo->path : ""); ++ break; + case TPM_TYPE_OPTIONS_KIND__MAX: + break; + } +diff --git a/hw/tpm/tpm_int.h b/hw/tpm/tpm_int.h +index f2f285b3cc..6b2c9c953a 100644 +--- a/hw/tpm/tpm_int.h ++++ b/hw/tpm/tpm_int.h +@@ -61,6 +61,7 @@ struct tpm_resp_hdr { + #define TPM_TAG_RSP_AUTH1_COMMAND 0xc5 + #define TPM_TAG_RSP_AUTH2_COMMAND 0xc6 + ++#define TPM_SUCCESS 0 + #define TPM_FAIL 9 + + #define TPM_ORD_ContinueSelfTest 0x53 +diff --git a/hw/tpm/tpm_ioctl.h b/hw/tpm/tpm_ioctl.h +new file mode 100644 +index 0000000000..a341e15741 +--- /dev/null ++++ b/hw/tpm/tpm_ioctl.h +@@ -0,0 +1,215 @@ ++/* ++ * tpm_ioctl.h ++ * ++ * (c) Copyright IBM Corporation 2014, 2015. ++ * ++ * This file is licensed under the terms of the 3-clause BSD license ++ */ ++#ifndef _TPM_IOCTL_H_ ++#define _TPM_IOCTL_H_ ++ ++#include ++#include ++#include ++#include ++ ++/* ++ * Every response from a command involving a TPM command execution must hold ++ * the ptm_res as the first element. ++ * ptm_res corresponds to the error code of a command executed by the TPM. ++ */ ++ ++typedef uint32_t ptm_res; ++ ++/* PTM_GET_TPMESTABLISHED: get the establishment bit */ ++struct ptm_est { ++ union { ++ struct { ++ ptm_res tpm_result; ++ unsigned char bit; /* TPM established bit */ ++ } resp; /* response */ ++ } u; ++}; ++ ++/* PTM_RESET_TPMESTABLISHED: reset establishment bit */ ++struct ptm_reset_est { ++ union { ++ struct { ++ uint8_t loc; /* locality to use */ ++ } req; /* request */ ++ struct { ++ ptm_res tpm_result; ++ } resp; /* response */ ++ } u; ++}; ++ ++/* PTM_INIT */ ++struct ptm_init { ++ union { ++ struct { ++ uint32_t init_flags; /* see definitions below */ ++ } req; /* request */ ++ struct { ++ ptm_res tpm_result; ++ } resp; /* response */ ++ } u; ++}; ++ ++/* above init_flags */ ++#define PTM_INIT_FLAG_DELETE_VOLATILE (1 << 0) ++ /* delete volatile state file after reading it */ ++ ++/* PTM_SET_LOCALITY */ ++struct ptm_loc { ++ union { ++ struct { ++ uint8_t loc; /* locality to set */ ++ } req; /* request */ ++ struct { ++ ptm_res tpm_result; ++ } resp; /* response */ ++ } u; ++}; ++ ++/* PTM_HASH_DATA: hash given data */ ++struct ptm_hdata { ++ union { ++ struct { ++ uint32_t length; ++ uint8_t data[4096]; ++ } req; /* request */ ++ struct { ++ ptm_res tpm_result; ++ } resp; /* response */ ++ } u; ++}; ++ ++/* ++ * size of the TPM state blob to transfer; x86_64 can handle 8k, ++ * ppc64le only ~7k; keep the response below a 4k page size ++ */ ++#define PTM_STATE_BLOB_SIZE (3 * 1024) ++ ++/* ++ * The following is the data structure to get state blobs from the TPM. ++ * If the size of the state blob exceeds the PTM_STATE_BLOB_SIZE, multiple reads ++ * with this ioctl and with adjusted offset are necessary. All bytes ++ * must be transferred and the transfer is done once the last byte has been ++ * returned. ++ * It is possible to use the read() interface for reading the data; however, ++ * the first bytes of the state blob will be part of the response to the ioctl(); ++ * a subsequent read() is only necessary if the total length (totlength) exceeds ++ * the number of received bytes. seek() is not supported. ++ */ ++struct ptm_getstate { ++ union { ++ struct { ++ uint32_t state_flags; /* may be: PTM_STATE_FLAG_DECRYPTED */ ++ uint32_t type; /* which blob to pull */ ++ uint32_t offset; /* offset from where to read */ ++ } req; /* request */ ++ struct { ++ ptm_res tpm_result; ++ uint32_t state_flags; /* may be: PTM_STATE_FLAG_ENCRYPTED */ ++ uint32_t totlength; /* total length that will be transferred */ ++ uint32_t length; /* number of bytes in following buffer */ ++ uint8_t data[PTM_STATE_BLOB_SIZE]; ++ } resp; /* response */ ++ } u; ++}; ++ ++/* TPM state blob types */ ++#define PTM_BLOB_TYPE_PERMANENT 1 ++#define PTM_BLOB_TYPE_VOLATILE 2 ++#define PTM_BLOB_TYPE_SAVESTATE 3 ++ ++/* state_flags above : */ ++#define PTM_STATE_FLAG_DECRYPTED 1 /* on input: get decrypted state */ ++#define PTM_STATE_FLAG_ENCRYPTED 2 /* on output: state is encrypted */ ++ ++/* ++ * The following is the data structure to set state blobs in the TPM. ++ * If the size of the state blob exceeds the PTM_STATE_BLOB_SIZE, multiple ++ * 'writes' using this ioctl are necessary. The last packet is indicated ++ * by the length being smaller than the PTM_STATE_BLOB_SIZE. ++ * The very first packet may have a length indicator of '0' enabling ++ * a write() with all the bytes from a buffer. If the write() interface ++ * is used, a final ioctl with a non-full buffer must be made to indicate ++ * that all data were transferred (a write with 0 bytes would not work). ++ */ ++struct ptm_setstate { ++ union { ++ struct { ++ uint32_t state_flags; /* may be PTM_STATE_FLAG_ENCRYPTED */ ++ uint32_t type; /* which blob to set */ ++ uint32_t length; /* length of the data; ++ use 0 on the first packet to ++ transfer using write() */ ++ uint8_t data[PTM_STATE_BLOB_SIZE]; ++ } req; /* request */ ++ struct { ++ ptm_res tpm_result; ++ } resp; /* response */ ++ } u; ++}; ++ ++/* ++ * PTM_GET_CONFIG: Data structure to get runtime configuration information ++ * such as which keys are applied. ++ */ ++struct ptm_getconfig { ++ union { ++ struct { ++ ptm_res tpm_result; ++ uint32_t flags; ++ } resp; /* response */ ++ } u; ++}; ++ ++#define PTM_CONFIG_FLAG_FILE_KEY 0x1 ++#define PTM_CONFIG_FLAG_MIGRATION_KEY 0x2 ++ ++ ++typedef uint64_t ptm_cap; ++typedef struct ptm_est ptm_est; ++typedef struct ptm_reset_est ptm_reset_est; ++typedef struct ptm_loc ptm_loc; ++typedef struct ptm_hdata ptm_hdata; ++typedef struct ptm_init ptm_init; ++typedef struct ptm_getstate ptm_getstate; ++typedef struct ptm_setstate ptm_setstate; ++typedef struct ptm_getconfig ptm_getconfig; ++ ++/* capability flags returned by PTM_GET_CAPABILITY */ ++#define PTM_CAP_INIT (1) ++#define PTM_CAP_SHUTDOWN (1<<1) ++#define PTM_CAP_GET_TPMESTABLISHED (1<<2) ++#define PTM_CAP_SET_LOCALITY (1<<3) ++#define PTM_CAP_HASHING (1<<4) ++#define PTM_CAP_CANCEL_TPM_CMD (1<<5) ++#define PTM_CAP_STORE_VOLATILE (1<<6) ++#define PTM_CAP_RESET_TPMESTABLISHED (1<<7) ++#define PTM_CAP_GET_STATEBLOB (1<<8) ++#define PTM_CAP_SET_STATEBLOB (1<<9) ++#define PTM_CAP_STOP (1<<10) ++#define PTM_CAP_GET_CONFIG (1<<11) ++ ++enum { ++ PTM_GET_CAPABILITY = _IOR('P', 0, ptm_cap), ++ PTM_INIT = _IOWR('P', 1, ptm_init), ++ PTM_SHUTDOWN = _IOR('P', 2, ptm_res), ++ PTM_GET_TPMESTABLISHED = _IOR('P', 3, ptm_est), ++ PTM_SET_LOCALITY = _IOWR('P', 4, ptm_loc), ++ PTM_HASH_START = _IOR('P', 5, ptm_res), ++ PTM_HASH_DATA = _IOWR('P', 6, ptm_hdata), ++ PTM_HASH_END = _IOR('P', 7, ptm_res), ++ PTM_CANCEL_TPM_CMD = _IOR('P', 8, ptm_res), ++ PTM_STORE_VOLATILE = _IOR('P', 9, ptm_res), ++ PTM_RESET_TPMESTABLISHED = _IOWR('P', 10, ptm_reset_est), ++ PTM_GET_STATEBLOB = _IOWR('P', 11, ptm_getstate), ++ PTM_SET_STATEBLOB = _IOWR('P', 12, ptm_setstate), ++ PTM_STOP = _IOR('P', 13, ptm_res), ++ PTM_GET_CONFIG = _IOR('P', 14, ptm_getconfig), ++}; ++ ++#endif /* _TPM_IOCTL_H */ +diff --git a/hw/tpm/tpm_passthrough.c b/hw/tpm/tpm_passthrough.c +index e88c0d20bc..050f2ba850 100644 +--- a/hw/tpm/tpm_passthrough.c ++++ b/hw/tpm/tpm_passthrough.c +@@ -33,6 +33,7 @@ + #include "sysemu/tpm_backend_int.h" + #include "tpm_tis.h" + #include "tpm_util.h" ++#include "tpm_ioctl.h" + + #define DEBUG_TPM 0 + +@@ -45,6 +46,7 @@ + #define TYPE_TPM_PASSTHROUGH "tpm-passthrough" + #define TPM_PASSTHROUGH(obj) \ + OBJECT_CHECK(TPMPassthruState, (obj), TYPE_TPM_PASSTHROUGH) ++#define TYPE_TPM_CUSE "tpm-cuse" + + static const TPMDriverOps tpm_passthrough_driver; + +@@ -71,12 +73,18 @@ struct TPMPassthruState { + bool had_startup_error; + + TPMVersion tpm_version; ++ ptm_cap cuse_cap; /* capabilities of the CUSE TPM */ ++ uint8_t cur_locty_number; /* last set locality */ + }; + + typedef struct TPMPassthruState TPMPassthruState; + + #define TPM_PASSTHROUGH_DEFAULT_DEVICE "/dev/tpm0" + ++#define TPM_PASSTHROUGH_USES_CUSE_TPM(tpm_pt) (tpm_pt->cuse_cap != 0) ++ ++#define TPM_CUSE_IMPLEMENTS_ALL(S, cap) (((S)->cuse_cap & (cap)) == (cap)) ++ + /* functions */ + + static void tpm_passthrough_cancel_cmd(TPMBackend *tb); +@@ -148,7 +156,28 @@ static bool tpm_passthrough_is_selftest(const uint8_t *in, uint32_t in_len) + return false; + } + ++static int tpm_passthrough_set_locality(TPMPassthruState *tpm_pt, ++ uint8_t locty_number) ++{ ++ ptm_loc loc; ++ ++ if (TPM_PASSTHROUGH_USES_CUSE_TPM(tpm_pt)) { ++ if (tpm_pt->cur_locty_number != locty_number) { ++ loc.u.req.loc = locty_number; ++ if (ioctl(tpm_pt->tpm_fd, PTM_SET_LOCALITY, &loc) < 0) { ++ error_report("tpm_cuse: could not set locality on " ++ "CUSE TPM: %s", ++ strerror(errno)); ++ return -1; ++ } ++ tpm_pt->cur_locty_number = locty_number; ++ } ++ } ++ return 0; ++} ++ + static int tpm_passthrough_unix_tx_bufs(TPMPassthruState *tpm_pt, ++ uint8_t locality_number, + const uint8_t *in, uint32_t in_len, + uint8_t *out, uint32_t out_len, + bool *selftest_done) +@@ -157,6 +186,11 @@ static int tpm_passthrough_unix_tx_bufs(TPMPassthruState *tpm_pt, + bool is_selftest; + const struct tpm_resp_hdr *hdr; + ++ ret = tpm_passthrough_set_locality(tpm_pt, locality_number); ++ if (ret < 0) { ++ goto err_exit; ++ } ++ + tpm_pt->tpm_op_canceled = false; + tpm_pt->tpm_executing = true; + *selftest_done = false; +@@ -207,10 +241,12 @@ err_exit: + } + + static int tpm_passthrough_unix_transfer(TPMPassthruState *tpm_pt, ++ uint8_t locality_number, + const TPMLocality *locty_data, + bool *selftest_done) + { + return tpm_passthrough_unix_tx_bufs(tpm_pt, ++ locality_number, + locty_data->w_buffer.buffer, + locty_data->w_offset, + locty_data->r_buffer.buffer, +@@ -231,6 +267,7 @@ static void tpm_passthrough_worker_thread(gpointer data, + switch (cmd) { + case TPM_BACKEND_CMD_PROCESS_CMD: + tpm_passthrough_unix_transfer(tpm_pt, ++ thr_parms->tpm_state->locty_number, + thr_parms->tpm_state->locty_data, + &selftest_done); + +@@ -247,6 +284,93 @@ static void tpm_passthrough_worker_thread(gpointer data, + } + + /* ++ * Gracefully shut down the external CUSE TPM ++ */ ++static void tpm_passthrough_shutdown(TPMPassthruState *tpm_pt) ++{ ++ ptm_res res; ++ ++ if (TPM_PASSTHROUGH_USES_CUSE_TPM(tpm_pt)) { ++ if (ioctl(tpm_pt->tpm_fd, PTM_SHUTDOWN, &res) < 0) { ++ error_report("tpm_cuse: Could not cleanly shut down " ++ "the CUSE TPM: %s", ++ strerror(errno)); ++ } ++ } ++} ++ ++/* ++ * Probe for the CUSE TPM by sending an ioctl() requesting its ++ * capability flags. ++ */ ++static int tpm_passthrough_cuse_probe(TPMPassthruState *tpm_pt) ++{ ++ int rc = 0; ++ ++ if (ioctl(tpm_pt->tpm_fd, PTM_GET_CAPABILITY, &tpm_pt->cuse_cap) < 0) { ++ error_report("Error: CUSE TPM was requested, but probing failed"); ++ rc = -1; ++ } ++ ++ return rc; ++} ++ ++static int tpm_passthrough_cuse_check_caps(TPMPassthruState *tpm_pt) ++{ ++ int rc = 0; ++ ptm_cap caps = 0; ++ const char *tpm = NULL; ++ ++ /* check for min. required capabilities */ ++ switch (tpm_pt->tpm_version) { ++ case TPM_VERSION_1_2: ++ caps = PTM_CAP_INIT | PTM_CAP_SHUTDOWN | PTM_CAP_GET_TPMESTABLISHED | ++ PTM_CAP_SET_LOCALITY; ++ tpm = "1.2"; ++ break; ++ case TPM_VERSION_2_0: ++ caps = PTM_CAP_INIT | PTM_CAP_SHUTDOWN | PTM_CAP_GET_TPMESTABLISHED | ++ PTM_CAP_SET_LOCALITY | PTM_CAP_RESET_TPMESTABLISHED; ++ tpm = "2"; ++ break; ++ case TPM_VERSION_UNSPEC: ++ error_report("tpm_cuse: %s: TPM version has not been set", ++ __func__); ++ return -1; ++ } ++ ++ if (!TPM_CUSE_IMPLEMENTS_ALL(tpm_pt, caps)) { ++ error_report("tpm_cuse: TPM does not implement minimum set of required " ++ "capabilities for TPM %s (0x%x)", tpm, (int)caps); ++ rc = -1; ++ } ++ ++ return rc; ++} ++ ++/* ++ * Initialize the external CUSE TPM ++ */ ++static int tpm_passthrough_cuse_init(TPMPassthruState *tpm_pt) ++{ ++ int rc = 0; ++ ptm_init init = { ++ .u.req.init_flags = PTM_INIT_FLAG_DELETE_VOLATILE, ++ }; ++ ++ if (TPM_PASSTHROUGH_USES_CUSE_TPM(tpm_pt)) { ++ if (ioctl(tpm_pt->tpm_fd, PTM_INIT, &init) < 0) { ++ error_report("tpm_cuse: Detected CUSE TPM but could not " ++ "send INIT: %s", ++ strerror(errno)); ++ rc = -1; ++ } ++ } ++ ++ return rc; ++} ++ ++/* + * Start the TPM (thread). If it had been started before, then terminate + * and start it again. + */ +@@ -261,6 +385,8 @@ static int tpm_passthrough_startup_tpm(TPMBackend *tb) + tpm_passthrough_worker_thread, + &tpm_pt->tpm_thread_params); + ++ tpm_passthrough_cuse_init(tpm_pt); ++ + return 0; + } + +@@ -291,14 +417,43 @@ static int tpm_passthrough_init(TPMBackend *tb, TPMState *s, + + static bool tpm_passthrough_get_tpm_established_flag(TPMBackend *tb) + { ++ TPMPassthruState *tpm_pt = TPM_PASSTHROUGH(tb); ++ ptm_est est; ++ ++ if (TPM_PASSTHROUGH_USES_CUSE_TPM(tpm_pt)) { ++ if (ioctl(tpm_pt->tpm_fd, PTM_GET_TPMESTABLISHED, &est) < 0) { ++ error_report("tpm_cuse: Could not get the TPM established " ++ "flag from the CUSE TPM: %s", ++ strerror(errno)); ++ return false; ++ } ++ return (est.u.resp.bit != 0); ++ } + return false; + } + + static int tpm_passthrough_reset_tpm_established_flag(TPMBackend *tb, + uint8_t locty) + { ++ TPMPassthruState *tpm_pt = TPM_PASSTHROUGH(tb); ++ int rc = 0; ++ ptm_reset_est ptmreset_est; ++ + /* only a TPM 2.0 will support this */ +- return 0; ++ if (tpm_pt->tpm_version == TPM_VERSION_2_0) { ++ if (TPM_PASSTHROUGH_USES_CUSE_TPM(tpm_pt)) { ++ ptmreset_est.u.req.loc = tpm_pt->cur_locty_number; ++ ++ if (ioctl(tpm_pt->tpm_fd, PTM_RESET_TPMESTABLISHED, ++ &ptmreset_est) < 0) { ++ error_report("tpm_cuse: Could not reset the establishment bit " ++ "failed: %s", ++ strerror(errno)); ++ rc = -1; ++ } ++ } ++ } ++ return rc; + } + + static bool tpm_passthrough_get_startup_error(TPMBackend *tb) +@@ -329,7 +484,8 @@ static void tpm_passthrough_deliver_request(TPMBackend *tb) + static void tpm_passthrough_cancel_cmd(TPMBackend *tb) + { + TPMPassthruState *tpm_pt = TPM_PASSTHROUGH(tb); +- int n; ++ ptm_res res; ++ static bool error_printed; + + /* + * As of Linux 3.7 the tpm_tis driver does not properly cancel +@@ -338,17 +494,34 @@ static void tpm_passthrough_cancel_cmd(TPMBackend *tb) + * command, e.g., a command executed on the host. + */ + if (tpm_pt->tpm_executing) { +- if (tpm_pt->cancel_fd >= 0) { +- n = write(tpm_pt->cancel_fd, "-", 1); +- if (n != 1) { +- error_report("Canceling TPM command failed: %s", +- strerror(errno)); +- } else { +- tpm_pt->tpm_op_canceled = true; ++ if (TPM_PASSTHROUGH_USES_CUSE_TPM(tpm_pt)) { ++ if (TPM_CUSE_IMPLEMENTS_ALL(tpm_pt, PTM_CAP_CANCEL_TPM_CMD)) { ++ if (ioctl(tpm_pt->tpm_fd, PTM_CANCEL_TPM_CMD, &res) < 0) { ++ error_report("tpm_cuse: Could not cancel command on " ++ "CUSE TPM: %s", ++ strerror(errno)); ++ } else if (res != TPM_SUCCESS) { ++ if (!error_printed) { ++ error_report("TPM error code from command " ++ "cancellation of CUSE TPM: 0x%x", res); ++ error_printed = true; ++ } ++ } else { ++ tpm_pt->tpm_op_canceled = true; ++ } + } + } else { +- error_report("Cannot cancel TPM command due to missing " +- "TPM sysfs cancel entry"); ++ if (tpm_pt->cancel_fd >= 0) { ++ if (write(tpm_pt->cancel_fd, "-", 1) != 1) { ++ error_report("Canceling TPM command failed: %s", ++ strerror(errno)); ++ } else { ++ tpm_pt->tpm_op_canceled = true; ++ } ++ } else { ++ error_report("Cannot cancel TPM command due to missing " ++ "TPM sysfs cancel entry"); ++ } + } + } + } +@@ -378,6 +551,11 @@ static int tpm_passthrough_open_sysfs_cancel(TPMBackend *tb) + char *dev; + char path[PATH_MAX]; + ++ if (TPM_PASSTHROUGH_USES_CUSE_TPM(tpm_pt)) { ++ /* not needed, but so we have a fd */ ++ return qemu_open("/dev/null", O_WRONLY); ++ } ++ + if (tb->cancel_path) { + fd = qemu_open(tb->cancel_path, O_WRONLY); + if (fd < 0) { +@@ -412,12 +590,22 @@ static int tpm_passthrough_handle_device_opts(QemuOpts *opts, TPMBackend *tb) + { + TPMPassthruState *tpm_pt = TPM_PASSTHROUGH(tb); + const char *value; ++ bool have_cuse = false; ++ ++ value = qemu_opt_get(opts, "type"); ++ if (value != NULL && !strcmp("cuse-tpm", value)) { ++ have_cuse = true; ++ } + + value = qemu_opt_get(opts, "cancel-path"); + tb->cancel_path = g_strdup(value); + + value = qemu_opt_get(opts, "path"); + if (!value) { ++ if (have_cuse) { ++ error_report("Missing path to access CUSE TPM"); ++ goto err_free_parameters; ++ } + value = TPM_PASSTHROUGH_DEFAULT_DEVICE; + } + +@@ -432,15 +620,36 @@ static int tpm_passthrough_handle_device_opts(QemuOpts *opts, TPMBackend *tb) + goto err_free_parameters; + } + ++ tpm_pt->cur_locty_number = ~0; ++ ++ if (have_cuse) { ++ if (tpm_passthrough_cuse_probe(tpm_pt)) { ++ goto err_close_tpmdev; ++ } ++ /* init TPM for probing */ ++ if (tpm_passthrough_cuse_init(tpm_pt)) { ++ goto err_close_tpmdev; ++ } ++ } ++ + if (tpm_util_test_tpmdev(tpm_pt->tpm_fd, &tpm_pt->tpm_version)) { + error_report("'%s' is not a TPM device.", + tpm_pt->tpm_dev); + goto err_close_tpmdev; + } + ++ if (have_cuse) { ++ if (tpm_passthrough_cuse_check_caps(tpm_pt)) { ++ goto err_close_tpmdev; ++ } ++ } ++ ++ + return 0; + + err_close_tpmdev: ++ tpm_passthrough_shutdown(tpm_pt); ++ + qemu_close(tpm_pt->tpm_fd); + tpm_pt->tpm_fd = -1; + +@@ -491,6 +700,8 @@ static void tpm_passthrough_destroy(TPMBackend *tb) + + tpm_backend_thread_end(&tpm_pt->tbt); + ++ tpm_passthrough_shutdown(tpm_pt); ++ + qemu_close(tpm_pt->tpm_fd); + qemu_close(tpm_pt->cancel_fd); + +@@ -564,3 +775,44 @@ static void tpm_passthrough_register(void) + } + + type_init(tpm_passthrough_register) ++ ++/* CUSE TPM */ ++static const char *tpm_passthrough_cuse_create_desc(void) ++{ ++ return "CUSE TPM backend driver"; ++} ++ ++static const TPMDriverOps tpm_cuse_driver = { ++ .type = TPM_TYPE_CUSE_TPM, ++ .opts = tpm_passthrough_cmdline_opts, ++ .desc = tpm_passthrough_cuse_create_desc, ++ .create = tpm_passthrough_create, ++ .destroy = tpm_passthrough_destroy, ++ .init = tpm_passthrough_init, ++ .startup_tpm = tpm_passthrough_startup_tpm, ++ .realloc_buffer = tpm_passthrough_realloc_buffer, ++ .reset = tpm_passthrough_reset, ++ .had_startup_error = tpm_passthrough_get_startup_error, ++ .deliver_request = tpm_passthrough_deliver_request, ++ .cancel_cmd = tpm_passthrough_cancel_cmd, ++ .get_tpm_established_flag = tpm_passthrough_get_tpm_established_flag, ++ .reset_tpm_established_flag = tpm_passthrough_reset_tpm_established_flag, ++ .get_tpm_version = tpm_passthrough_get_tpm_version, ++}; ++ ++static const TypeInfo tpm_cuse_info = { ++ .name = TYPE_TPM_CUSE, ++ .parent = TYPE_TPM_BACKEND, ++ .instance_size = sizeof(TPMPassthruState), ++ .class_init = tpm_passthrough_class_init, ++ .instance_init = tpm_passthrough_inst_init, ++ .instance_finalize = tpm_passthrough_inst_finalize, ++}; ++ ++static void tpm_cuse_register(void) ++{ ++ type_register_static(&tpm_cuse_info); ++ tpm_register_driver(&tpm_cuse_driver); ++} ++ ++type_init(tpm_cuse_register) +diff --git a/qapi-schema.json b/qapi-schema.json +index 5658723b37..53120d0f63 100644 +--- a/qapi-schema.json ++++ b/qapi-schema.json +@@ -3522,10 +3522,12 @@ + # An enumeration of TPM types + # + # @passthrough: TPM passthrough type ++# @cuse-tpm: CUSE TPM type ++# Since: 2.6 + # + # Since: 1.5 + ## +-{ 'enum': 'TpmType', 'data': [ 'passthrough' ] } ++{ 'enum': 'TpmType', 'data': [ 'passthrough', 'cuse-tpm' ] } + + ## + # @query-tpm-types: +@@ -3554,6 +3556,17 @@ + '*cancel-path' : 'str'} } + + ## ++# @TPMCuseOptions: ++# ++# Information about the CUSE TPM type ++# ++# @path: string describing the path used for accessing the TPM device ++# ++# Since: 2.6 ++## ++{ 'struct': 'TPMCuseOptions', 'data': { 'path' : 'str'}} ++ ++## + # @TpmTypeOptions: + # + # A union referencing different TPM backend types' configuration options +@@ -3563,7 +3576,8 @@ + # Since: 1.5 + ## + { 'union': 'TpmTypeOptions', +- 'data': { 'passthrough' : 'TPMPassthroughOptions' } } ++ 'data': { 'passthrough' : 'TPMPassthroughOptions', ++ 'cuse-tpm' : 'TPMCuseOptions' } } + + ## + # @TpmInfo: +diff --git a/qemu-options.hx b/qemu-options.hx +index a71aaf8ea8..e0f1d8e676 100644 +--- a/qemu-options.hx ++++ b/qemu-options.hx +@@ -2763,7 +2763,10 @@ DEF("tpmdev", HAS_ARG, QEMU_OPTION_tpmdev, \ + "-tpmdev passthrough,id=id[,path=path][,cancel-path=path]\n" + " use path to provide path to a character device; default is /dev/tpm0\n" + " use cancel-path to provide path to TPM's cancel sysfs entry; if\n" +- " not provided it will be searched for in /sys/class/misc/tpm?/device\n", ++ " not provided it will be searched for in /sys/class/misc/tpm?/device\n" ++ "-tpmdev cuse-tpm,id=id,path=path\n" ++ " use path to provide path to a character device to talk to the\n" ++ " TPM emulator providing a CUSE interface\n", + QEMU_ARCH_ALL) + STEXI + +@@ -2772,8 +2775,8 @@ The general form of a TPM device option is: + + @item -tpmdev @var{backend} ,id=@var{id} [,@var{options}] + @findex -tpmdev +-Backend type must be: +-@option{passthrough}. ++Backend type must be either one of the following: ++@option{passthrough}, @option{cuse-tpm}. + + The specific backend type will determine the applicable options. + The @code{-tpmdev} option creates the TPM backend and requires a +@@ -2823,6 +2826,18 @@ To create a passthrough TPM use the following two options: + Note that the @code{-tpmdev} id is @code{tpm0} and is referenced by + @code{tpmdev=tpm0} in the device option. + ++@item -tpmdev cuse-tpm, id=@var{id}, path=@var{path} ++ ++(Linux-host only) Enable access to a TPM emulator with a CUSE interface. ++ ++@option{path} specifies the path to the CUSE TPM character device. ++ ++To create a backend device accessing the CUSE TPM emulator using /dev/vtpm ++use the following two options: ++@example ++-tpmdev cuse-tpm,id=tpm0,path=/dev/vtpm -device tpm-tis,tpmdev=tpm0 ++@end example ++ + @end table + + ETEXI +diff --git a/tpm.c b/tpm.c +index 9a7c7114d3..5ec2373286 100644 +--- a/tpm.c ++++ b/tpm.c +@@ -25,7 +25,7 @@ static QLIST_HEAD(, TPMBackend) tpm_backends = + + + #define TPM_MAX_MODELS 1 +-#define TPM_MAX_DRIVERS 1 ++#define TPM_MAX_DRIVERS 2 + + static TPMDriverOps const *be_drivers[TPM_MAX_DRIVERS] = { + NULL, +@@ -272,6 +272,15 @@ static TPMInfo *qmp_query_tpm_inst(TPMBackend *drv) + tpo->has_cancel_path = true; + } + break; ++ case TPM_TYPE_CUSE_TPM: ++ res->options->type = TPM_TYPE_OPTIONS_KIND_CUSE_TPM; ++ tpo = g_new0(TPMPassthroughOptions, 1); ++ res->options->u.passthrough.data = tpo; ++ if (drv->path) { ++ tpo->path = g_strdup(drv->path); ++ tpo->has_path = true; ++ } ++ break; + case TPM_TYPE__MAX: + break; + } +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0001-target-mips-add-24KEc-CPU-definition.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0001-target-mips-add-24KEc-CPU-definition.patch deleted file mode 100644 index c4dbee7d7..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0001-target-mips-add-24KEc-CPU-definition.patch +++ /dev/null @@ -1,54 +0,0 @@ -From 926bc194f918d46bd93557b15da8153b6a94a1d5 Mon Sep 17 00:00:00 2001 -From: =?UTF-8?q?Andr=C3=A9=20Draszik?= -Date: Mon, 25 Jul 2016 23:58:22 +0100 -Subject: [PATCH] target-mips: add 24KEc CPU definition -MIME-Version: 1.0 -Content-Type: text/plain; charset=UTF-8 -Content-Transfer-Encoding: 8bit - -Define a new CPU definition supporting 24KEc cores, similar to -the existing 24Kc, but with added support for DSP instructions -and MIPS16e (and without FPU). - -Signed-off-by: André Draszik ---- -Upstream-Status: Submitted [http://lists.nongnu.org/archive/html/qemu-devel/2016-07/msg05778.html] - target-mips/translate_init.c | 22 ++++++++++++++++++++++ - 1 file changed, 22 insertions(+) - -diff --git a/target-mips/translate_init.c b/target-mips/translate_init.c -index 39ed5c4..6ae23e4 100644 ---- a/target-mips/translate_init.c -+++ b/target-mips/translate_init.c -@@ -256,6 +256,28 @@ static const mips_def_t mips_defs[] = - .mmu_type = MMU_TYPE_R4000, - }, - { -+ .name = "24KEc", -+ .CP0_PRid = 0x00019600, -+ .CP0_Config0 = MIPS_CONFIG0 | (0x1 << CP0C0_AR) | -+ (MMU_TYPE_R4000 << CP0C0_MT), -+ .CP0_Config1 = MIPS_CONFIG1 | (15 << CP0C1_MMU) | -+ (0 << CP0C1_IS) | (3 << CP0C1_IL) | (1 << CP0C1_IA) | -+ (0 << CP0C1_DS) | (3 << CP0C1_DL) | (1 << CP0C1_DA) | -+ (1 << CP0C1_CA), -+ .CP0_Config2 = MIPS_CONFIG2, -+ .CP0_Config3 = MIPS_CONFIG3 | (1 << CP0C3_DSPP) | (0 << CP0C3_VInt), -+ .CP0_LLAddr_rw_bitmask = 0, -+ .CP0_LLAddr_shift = 4, -+ .SYNCI_Step = 32, -+ .CCRes = 2, -+ /* we have a DSP, but no FPU */ -+ .CP0_Status_rw_bitmask = 0x1378FF1F, -+ .SEGBITS = 32, -+ .PABITS = 32, -+ .insn_flags = CPU_MIPS32R2 | ASE_MIPS16 | ASE_DSP, -+ .mmu_type = MMU_TYPE_R4000, -+ }, -+ { - .name = "24Kf", - .CP0_PRid = 0x00019300, - .CP0_Config0 = MIPS_CONFIG0 | (0x1 << CP0C0_AR) | --- -2.8.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0001-virtio-zero-vq-inuse-in-virtio_reset.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0001-virtio-zero-vq-inuse-in-virtio_reset.patch deleted file mode 100644 index 86955d043..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0001-virtio-zero-vq-inuse-in-virtio_reset.patch +++ /dev/null @@ -1,57 +0,0 @@ -Upstream-Status: Backport - -Supplementary fix of CVE-2016-5403 which is backported from: - -http://git.qemu.org/?p=qemu.git;a=commit;h=4b7f91e - -Signed-off-by: Kai Kang ---- -From 4b7f91ed0270a371e1933efa21ba600b6da23ab9 Mon Sep 17 00:00:00 2001 -From: Stefan Hajnoczi -Date: Wed, 7 Sep 2016 11:51:25 -0400 -Subject: [PATCH] virtio: zero vq->inuse in virtio_reset() - -vq->inuse must be zeroed upon device reset like most other virtqueue -fields. - -In theory, virtio_reset() just needs assert(vq->inuse == 0) since -devices must clean up in-flight requests during reset (requests cannot -not be leaked!). - -In practice, it is difficult to achieve vq->inuse == 0 across reset -because balloon, blk, 9p, etc implement various different strategies for -cleaning up requests. Most devices call g_free(elem) directly without -telling virtio.c that the VirtQueueElement is cleaned up. Therefore -vq->inuse is not decremented during reset. - -This patch zeroes vq->inuse and trusts that devices are not leaking -VirtQueueElements across reset. - -I will send a follow-up series that refactors request life-cycle across -all devices and converts vq->inuse = 0 into assert(vq->inuse == 0) but -this more invasive approach is not appropriate for stable trees. - -Signed-off-by: Stefan Hajnoczi -Cc: qemu-stable -Reviewed-by: Michael S. Tsirkin -Signed-off-by: Michael S. Tsirkin -Reviewed-by: Ladi Prosek ---- - hw/virtio/virtio.c | 1 + - 1 file changed, 1 insertion(+) - -diff --git a/hw/virtio/virtio.c b/hw/virtio/virtio.c -index 74c085c..e8a13a5 100644 ---- a/hw/virtio/virtio.c -+++ b/hw/virtio/virtio.c -@@ -822,6 +822,7 @@ void virtio_reset(void *opaque) - vdev->vq[i].signalled_used_valid = false; - vdev->vq[i].notification = true; - vdev->vq[i].vring.num = vdev->vq[i].vring.num_default; -+ vdev->vq[i].inuse = 0; - } - } - --- -2.9.3 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0002-Introduce-condition-to-notify-waiters-of-completed-c.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0002-Introduce-condition-to-notify-waiters-of-completed-c.patch new file mode 100644 index 000000000..c88c98e56 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0002-Introduce-condition-to-notify-waiters-of-completed-c.patch @@ -0,0 +1,86 @@ +From b5ffd3aa4e9bd4edb09cc84c46f78da72697a946 Mon Sep 17 00:00:00 2001 +From: Stefan Berger +Date: Sat, 31 Dec 2016 11:23:32 -0500 +Subject: [PATCH 2/4] Introduce condition to notify waiters of completed + command + +Introduce a lock and a condition to notify anyone waiting for the completion +of the execution of a TPM command by the backend (thread). The backend +uses the condition to signal anyone waiting for command completion. +We need to place the condition in two locations: one is invoked by the +backend thread, the other by the bottom half thread. +We will use the signalling to wait for command completion before VM +suspend. + +Signed-off-by: Stefan Berger + +Upstream-Status: Pending [https://lists.nongnu.org/archive/html/qemu-devel/2016-06/msg00252.html] +Signed-off-by: Patrick Ohly +--- + hw/tpm/tpm_int.h | 3 +++ + hw/tpm/tpm_tis.c | 14 ++++++++++++++ + 2 files changed, 17 insertions(+) + +diff --git a/hw/tpm/tpm_int.h b/hw/tpm/tpm_int.h +index 6b2c9c953a..70be1ad8d9 100644 +--- a/hw/tpm/tpm_int.h ++++ b/hw/tpm/tpm_int.h +@@ -30,6 +30,9 @@ struct TPMState { + char *backend; + TPMBackend *be_driver; + TPMVersion be_tpm_version; ++ ++ QemuMutex state_lock; ++ QemuCond cmd_complete; + }; + + #define TPM(obj) OBJECT_CHECK(TPMState, (obj), TYPE_TPM_TIS) +diff --git a/hw/tpm/tpm_tis.c b/hw/tpm/tpm_tis.c +index 381e7266ea..14d9e83ea2 100644 +--- a/hw/tpm/tpm_tis.c ++++ b/hw/tpm/tpm_tis.c +@@ -368,6 +368,8 @@ static void tpm_tis_receive_bh(void *opaque) + TPMTISEmuState *tis = &s->s.tis; + uint8_t locty = s->locty_number; + ++ qemu_mutex_lock(&s->state_lock); ++ + tpm_tis_sts_set(&tis->loc[locty], + TPM_TIS_STS_VALID | TPM_TIS_STS_DATA_AVAILABLE); + tis->loc[locty].state = TPM_TIS_STATE_COMPLETION; +@@ -384,6 +386,10 @@ static void tpm_tis_receive_bh(void *opaque) + tpm_tis_raise_irq(s, locty, + TPM_TIS_INT_DATA_AVAILABLE | TPM_TIS_INT_STS_VALID); + #endif ++ ++ /* notify of completed command */ ++ qemu_cond_signal(&s->cmd_complete); ++ qemu_mutex_unlock(&s->state_lock); + } + + /* +@@ -403,6 +409,11 @@ static void tpm_tis_receive_cb(TPMState *s, uint8_t locty, + } + } + ++ qemu_mutex_lock(&s->state_lock); ++ /* notify of completed command */ ++ qemu_cond_signal(&s->cmd_complete); ++ qemu_mutex_unlock(&s->state_lock); ++ + qemu_bh_schedule(tis->bh); + } + +@@ -1072,6 +1083,9 @@ static void tpm_tis_initfn(Object *obj) + memory_region_init_io(&s->mmio, OBJECT(s), &tpm_tis_memory_ops, + s, "tpm-tis-mmio", + TPM_TIS_NUM_LOCALITIES << TPM_TIS_LOCALITY_SHIFT); ++ ++ qemu_mutex_init(&s->state_lock); ++ qemu_cond_init(&s->cmd_complete); + } + + static void tpm_tis_class_init(ObjectClass *klass, void *data) +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0002-fix-CVE-2016-7423.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0002-fix-CVE-2016-7423.patch deleted file mode 100644 index fdf58a3d6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0002-fix-CVE-2016-7423.patch +++ /dev/null @@ -1,45 +0,0 @@ -Upstream-Status: Backport - -Backport patch to fix CVE-2016-7423 from: - -http://git.qemu.org/?p=qemu.git;a=commit;h=670e56d3ed - -CVE: CVE-2016-7423 - -Signed-off-by: Kai Kang ---- -From 670e56d3ed2918b3861d9216f2c0540d9e9ae0d5 Mon Sep 17 00:00:00 2001 -From: Li Qiang -Date: Mon, 12 Sep 2016 18:14:11 +0530 -Subject: [PATCH] scsi: mptsas: use g_new0 to allocate MPTSASRequest object - -When processing IO request in mptsas, it uses g_new to allocate -a 'req' object. If an error occurs before 'req->sreq' is -allocated, It could lead to an OOB write in mptsas_free_request -function. Use g_new0 to avoid it. - -Reported-by: Li Qiang -Signed-off-by: Prasad J Pandit -Message-Id: <1473684251-17476-1-git-send-email-ppandit@redhat.com> -Cc: qemu-stable@nongnu.org -Signed-off-by: Paolo Bonzini ---- - hw/scsi/mptsas.c | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/hw/scsi/mptsas.c b/hw/scsi/mptsas.c -index 0e0a22f..eaae1bb 100644 ---- a/hw/scsi/mptsas.c -+++ b/hw/scsi/mptsas.c -@@ -304,7 +304,7 @@ static int mptsas_process_scsi_io_request(MPTSASState *s, - goto bad; - } - -- req = g_new(MPTSASRequest, 1); -+ req = g_new0(MPTSASRequest, 1); - QTAILQ_INSERT_TAIL(&s->pending, req, next); - req->scsi_io = *scsi_io; - req->dev = s; --- -2.9.3 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0003-Introduce-condition-in-TPM-backend-for-notification.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0003-Introduce-condition-in-TPM-backend-for-notification.patch new file mode 100644 index 000000000..e58f01906 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0003-Introduce-condition-in-TPM-backend-for-notification.patch @@ -0,0 +1,79 @@ +From 732a8e046948fd62b32cd1dd76a6798eb1caf4d6 Mon Sep 17 00:00:00 2001 +From: Stefan Berger +Date: Sat, 31 Dec 2016 11:23:32 -0500 +Subject: [PATCH 3/4] Introduce condition in TPM backend for notification + +TPM backends will suspend independently of the frontends. Also +here we need to be able to wait for the TPM command to have been +completely processed. + +Signed-off-by: Stefan Berger + +Upstream-Status: Pending [https://lists.nongnu.org/archive/html/qemu-devel/2016-06/msg00252.html] +Signed-off-by: Patrick Ohly +--- + hw/tpm/tpm_passthrough.c | 20 ++++++++++++++++++++ + 1 file changed, 20 insertions(+) + +diff --git a/hw/tpm/tpm_passthrough.c b/hw/tpm/tpm_passthrough.c +index 050f2ba850..44739ebad2 100644 +--- a/hw/tpm/tpm_passthrough.c ++++ b/hw/tpm/tpm_passthrough.c +@@ -75,6 +75,10 @@ struct TPMPassthruState { + TPMVersion tpm_version; + ptm_cap cuse_cap; /* capabilities of the CUSE TPM */ + uint8_t cur_locty_number; /* last set locality */ ++ ++ QemuMutex state_lock; ++ QemuCond cmd_complete; /* singnaled once tpm_busy is false */ ++ bool tpm_busy; + }; + + typedef struct TPMPassthruState TPMPassthruState; +@@ -274,6 +278,11 @@ static void tpm_passthrough_worker_thread(gpointer data, + thr_parms->recv_data_callback(thr_parms->tpm_state, + thr_parms->tpm_state->locty_number, + selftest_done); ++ /* result delivered */ ++ qemu_mutex_lock(&tpm_pt->state_lock); ++ tpm_pt->tpm_busy = false; ++ qemu_cond_signal(&tpm_pt->cmd_complete); ++ qemu_mutex_unlock(&tpm_pt->state_lock); + break; + case TPM_BACKEND_CMD_INIT: + case TPM_BACKEND_CMD_END: +@@ -401,6 +410,7 @@ static void tpm_passthrough_reset(TPMBackend *tb) + tpm_backend_thread_end(&tpm_pt->tbt); + + tpm_pt->had_startup_error = false; ++ tpm_pt->tpm_busy = false; + } + + static int tpm_passthrough_init(TPMBackend *tb, TPMState *s, +@@ -478,6 +488,11 @@ static void tpm_passthrough_deliver_request(TPMBackend *tb) + { + TPMPassthruState *tpm_pt = TPM_PASSTHROUGH(tb); + ++ /* TPM considered busy once TPM Request scheduled for processing */ ++ qemu_mutex_lock(&tpm_pt->state_lock); ++ tpm_pt->tpm_busy = true; ++ qemu_mutex_unlock(&tpm_pt->state_lock); ++ + tpm_backend_thread_deliver_request(&tpm_pt->tbt); + } + +@@ -746,6 +761,11 @@ static const TPMDriverOps tpm_passthrough_driver = { + + static void tpm_passthrough_inst_init(Object *obj) + { ++ TPMBackend *tb = TPM_BACKEND(obj); ++ TPMPassthruState *tpm_pt = TPM_PASSTHROUGH(tb); ++ ++ qemu_mutex_init(&tpm_pt->state_lock); ++ qemu_cond_init(&tpm_pt->cmd_complete); + } + + static void tpm_passthrough_inst_finalize(Object *obj) +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0003-fix-CVE-2016-7908.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0003-fix-CVE-2016-7908.patch deleted file mode 100644 index 05cc3d9d1..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0003-fix-CVE-2016-7908.patch +++ /dev/null @@ -1,62 +0,0 @@ -Upstream-Status: Backport - -Backport patch to fix CVE-2016-7908 from: - -http://git.qemu.org/?p=qemu.git;a=commit;h=070c4b92b8c - -CVE: CVE-2016-7908 - -Signed-off-by: Kai Kang ---- -From 070c4b92b8cd5390889716677a0b92444d6e087a Mon Sep 17 00:00:00 2001 -From: Prasad J Pandit -Date: Thu, 22 Sep 2016 16:02:37 +0530 -Subject: [PATCH] net: mcf: limit buffer descriptor count - -ColdFire Fast Ethernet Controller uses buffer descriptors to manage -data flow to/fro receive & transmit queues. While transmitting -packets, it could continue to read buffer descriptors if a buffer -descriptor has length of zero and has crafted values in bd.flags. -Set upper limit to number of buffer descriptors. - -Reported-by: Li Qiang -Signed-off-by: Prasad J Pandit -Reviewed-by: Paolo Bonzini -Signed-off-by: Jason Wang ---- - hw/net/mcf_fec.c | 5 +++-- - 1 file changed, 3 insertions(+), 2 deletions(-) - -diff --git a/hw/net/mcf_fec.c b/hw/net/mcf_fec.c -index 0ee8ad9..d31fea1 100644 ---- a/hw/net/mcf_fec.c -+++ b/hw/net/mcf_fec.c -@@ -23,6 +23,7 @@ do { printf("mcf_fec: " fmt , ## __VA_ARGS__); } while (0) - #define DPRINTF(fmt, ...) do {} while(0) - #endif - -+#define FEC_MAX_DESC 1024 - #define FEC_MAX_FRAME_SIZE 2032 - - typedef struct { -@@ -149,7 +150,7 @@ static void mcf_fec_do_tx(mcf_fec_state *s) - uint32_t addr; - mcf_fec_bd bd; - int frame_size; -- int len; -+ int len, descnt = 0; - uint8_t frame[FEC_MAX_FRAME_SIZE]; - uint8_t *ptr; - -@@ -157,7 +158,7 @@ static void mcf_fec_do_tx(mcf_fec_state *s) - ptr = frame; - frame_size = 0; - addr = s->tx_descriptor; -- while (1) { -+ while (descnt++ < FEC_MAX_DESC) { - mcf_fec_read_bd(&bd, addr); - DPRINTF("tx_bd %x flags %04x len %d data %08x\n", - addr, bd.flags, bd.length, bd.data); --- -2.9.3 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0004-Add-support-for-VM-suspend-resume-for-TPM-TIS.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0004-Add-support-for-VM-suspend-resume-for-TPM-TIS.patch new file mode 100644 index 000000000..b8a783d4e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0004-Add-support-for-VM-suspend-resume-for-TPM-TIS.patch @@ -0,0 +1,719 @@ +From 5e9dd9063f514447ea4f54046793f4f01c297ed4 Mon Sep 17 00:00:00 2001 +From: Stefan Berger +Date: Sat, 31 Dec 2016 11:23:32 -0500 +Subject: [PATCH 4/4] Add support for VM suspend/resume for TPM TIS + +Extend the TPM TIS code to support suspend/resume. In case a command +is being processed by the external TPM when suspending, wait for the command +to complete to catch the result. In case the bottom half did not run, +run the one function the bottom half is supposed to run. This then +makes the resume operation work. + +The passthrough backend does not support suspend/resume operation +and is therefore blocked from suspend/resume and migration. + +The CUSE TPM's supported capabilities are tested and if sufficient +capabilities are implemented, suspend/resume, snapshotting and +migration are supported by the CUSE TPM. + +Signed-off-by: Stefan Berger + +Upstream-Status: Pending [https://lists.nongnu.org/archive/html/qemu-devel/2016-06/msg00252.html] +Signed-off-by: Patrick Ohly +--- + hw/tpm/tpm_passthrough.c | 130 +++++++++++++++++++++++-- + hw/tpm/tpm_tis.c | 137 +++++++++++++++++++++++++- + hw/tpm/tpm_tis.h | 2 + + hw/tpm/tpm_util.c | 223 +++++++++++++++++++++++++++++++++++++++++++ + hw/tpm/tpm_util.h | 7 ++ + include/sysemu/tpm_backend.h | 12 +++ + 6 files changed, 503 insertions(+), 8 deletions(-) + +diff --git a/hw/tpm/tpm_passthrough.c b/hw/tpm/tpm_passthrough.c +index 44739ebad2..bc8072d0bc 100644 +--- a/hw/tpm/tpm_passthrough.c ++++ b/hw/tpm/tpm_passthrough.c +@@ -34,6 +34,8 @@ + #include "tpm_tis.h" + #include "tpm_util.h" + #include "tpm_ioctl.h" ++#include "migration/migration.h" ++#include "qapi/error.h" + + #define DEBUG_TPM 0 + +@@ -49,6 +51,7 @@ + #define TYPE_TPM_CUSE "tpm-cuse" + + static const TPMDriverOps tpm_passthrough_driver; ++static const VMStateDescription vmstate_tpm_cuse; + + /* data structures */ + typedef struct TPMPassthruThreadParams { +@@ -79,6 +82,10 @@ struct TPMPassthruState { + QemuMutex state_lock; + QemuCond cmd_complete; /* singnaled once tpm_busy is false */ + bool tpm_busy; ++ ++ Error *migration_blocker; ++ ++ TPMBlobBuffers tpm_blobs; + }; + + typedef struct TPMPassthruState TPMPassthruState; +@@ -306,6 +313,10 @@ static void tpm_passthrough_shutdown(TPMPassthruState *tpm_pt) + strerror(errno)); + } + } ++ if (tpm_pt->migration_blocker) { ++ migrate_del_blocker(tpm_pt->migration_blocker); ++ error_free(tpm_pt->migration_blocker); ++ } + } + + /* +@@ -360,12 +371,14 @@ static int tpm_passthrough_cuse_check_caps(TPMPassthruState *tpm_pt) + /* + * Initialize the external CUSE TPM + */ +-static int tpm_passthrough_cuse_init(TPMPassthruState *tpm_pt) ++static int tpm_passthrough_cuse_init(TPMPassthruState *tpm_pt, ++ bool is_resume) + { + int rc = 0; +- ptm_init init = { +- .u.req.init_flags = PTM_INIT_FLAG_DELETE_VOLATILE, +- }; ++ ptm_init init; ++ if (is_resume) { ++ init.u.req.init_flags = PTM_INIT_FLAG_DELETE_VOLATILE; ++ } + + if (TPM_PASSTHROUGH_USES_CUSE_TPM(tpm_pt)) { + if (ioctl(tpm_pt->tpm_fd, PTM_INIT, &init) < 0) { +@@ -394,7 +407,7 @@ static int tpm_passthrough_startup_tpm(TPMBackend *tb) + tpm_passthrough_worker_thread, + &tpm_pt->tpm_thread_params); + +- tpm_passthrough_cuse_init(tpm_pt); ++ tpm_passthrough_cuse_init(tpm_pt, false); + + return 0; + } +@@ -466,6 +479,32 @@ static int tpm_passthrough_reset_tpm_established_flag(TPMBackend *tb, + return rc; + } + ++static int tpm_cuse_get_state_blobs(TPMBackend *tb, ++ bool decrypted_blobs, ++ TPMBlobBuffers *tpm_blobs) ++{ ++ TPMPassthruState *tpm_pt = TPM_PASSTHROUGH(tb); ++ ++ assert(TPM_PASSTHROUGH_USES_CUSE_TPM(tpm_pt)); ++ ++ return tpm_util_cuse_get_state_blobs(tpm_pt->tpm_fd, decrypted_blobs, ++ tpm_blobs); ++} ++ ++static int tpm_cuse_set_state_blobs(TPMBackend *tb, ++ TPMBlobBuffers *tpm_blobs) ++{ ++ TPMPassthruState *tpm_pt = TPM_PASSTHROUGH(tb); ++ ++ assert(TPM_PASSTHROUGH_USES_CUSE_TPM(tpm_pt)); ++ ++ if (tpm_util_cuse_set_state_blobs(tpm_pt->tpm_fd, tpm_blobs)) { ++ return 1; ++ } ++ ++ return tpm_passthrough_cuse_init(tpm_pt, true); ++} ++ + static bool tpm_passthrough_get_startup_error(TPMBackend *tb) + { + TPMPassthruState *tpm_pt = TPM_PASSTHROUGH(tb); +@@ -488,7 +527,7 @@ static void tpm_passthrough_deliver_request(TPMBackend *tb) + { + TPMPassthruState *tpm_pt = TPM_PASSTHROUGH(tb); + +- /* TPM considered busy once TPM Request scheduled for processing */ ++ /* TPM considered busy once TPM request scheduled for processing */ + qemu_mutex_lock(&tpm_pt->state_lock); + tpm_pt->tpm_busy = true; + qemu_mutex_unlock(&tpm_pt->state_lock); +@@ -601,6 +640,25 @@ static int tpm_passthrough_open_sysfs_cancel(TPMBackend *tb) + return fd; + } + ++static void tpm_passthrough_block_migration(TPMPassthruState *tpm_pt) ++{ ++ ptm_cap caps; ++ ++ if (TPM_PASSTHROUGH_USES_CUSE_TPM(tpm_pt)) { ++ caps = PTM_CAP_GET_STATEBLOB | PTM_CAP_SET_STATEBLOB | ++ PTM_CAP_STOP; ++ if (!TPM_CUSE_IMPLEMENTS_ALL(tpm_pt, caps)) { ++ error_setg(&tpm_pt->migration_blocker, ++ "Migration disabled: CUSE TPM lacks necessary capabilities"); ++ migrate_add_blocker(tpm_pt->migration_blocker); ++ } ++ } else { ++ error_setg(&tpm_pt->migration_blocker, ++ "Migration disabled: Passthrough TPM does not support migration"); ++ migrate_add_blocker(tpm_pt->migration_blocker); ++ } ++} ++ + static int tpm_passthrough_handle_device_opts(QemuOpts *opts, TPMBackend *tb) + { + TPMPassthruState *tpm_pt = TPM_PASSTHROUGH(tb); +@@ -642,7 +700,7 @@ static int tpm_passthrough_handle_device_opts(QemuOpts *opts, TPMBackend *tb) + goto err_close_tpmdev; + } + /* init TPM for probing */ +- if (tpm_passthrough_cuse_init(tpm_pt)) { ++ if (tpm_passthrough_cuse_init(tpm_pt, false)) { + goto err_close_tpmdev; + } + } +@@ -659,6 +717,7 @@ static int tpm_passthrough_handle_device_opts(QemuOpts *opts, TPMBackend *tb) + } + } + ++ tpm_passthrough_block_migration(tpm_pt); + + return 0; + +@@ -766,10 +825,13 @@ static void tpm_passthrough_inst_init(Object *obj) + + qemu_mutex_init(&tpm_pt->state_lock); + qemu_cond_init(&tpm_pt->cmd_complete); ++ ++ vmstate_register(NULL, -1, &vmstate_tpm_cuse, obj); + } + + static void tpm_passthrough_inst_finalize(Object *obj) + { ++ vmstate_unregister(NULL, &vmstate_tpm_cuse, obj); + } + + static void tpm_passthrough_class_init(ObjectClass *klass, void *data) +@@ -802,6 +864,60 @@ static const char *tpm_passthrough_cuse_create_desc(void) + return "CUSE TPM backend driver"; + } + ++static void tpm_cuse_pre_save(void *opaque) ++{ ++ TPMPassthruState *tpm_pt = opaque; ++ TPMBackend *tb = &tpm_pt->parent; ++ ++ qemu_mutex_lock(&tpm_pt->state_lock); ++ /* wait for TPM to finish processing */ ++ if (tpm_pt->tpm_busy) { ++ qemu_cond_wait(&tpm_pt->cmd_complete, &tpm_pt->state_lock); ++ } ++ qemu_mutex_unlock(&tpm_pt->state_lock); ++ ++ /* get the decrypted state blobs from the TPM */ ++ tpm_cuse_get_state_blobs(tb, TRUE, &tpm_pt->tpm_blobs); ++} ++ ++static int tpm_cuse_post_load(void *opaque, ++ int version_id __attribute__((unused))) ++{ ++ TPMPassthruState *tpm_pt = opaque; ++ TPMBackend *tb = &tpm_pt->parent; ++ ++ return tpm_cuse_set_state_blobs(tb, &tpm_pt->tpm_blobs); ++} ++ ++static const VMStateDescription vmstate_tpm_cuse = { ++ .name = "cuse-tpm", ++ .version_id = 1, ++ .minimum_version_id = 0, ++ .minimum_version_id_old = 0, ++ .pre_save = tpm_cuse_pre_save, ++ .post_load = tpm_cuse_post_load, ++ .fields = (VMStateField[]) { ++ VMSTATE_UINT32(tpm_blobs.permanent_flags, TPMPassthruState), ++ VMSTATE_UINT32(tpm_blobs.permanent.size, TPMPassthruState), ++ VMSTATE_VBUFFER_ALLOC_UINT32(tpm_blobs.permanent.buffer, ++ TPMPassthruState, 1, NULL, 0, ++ tpm_blobs.permanent.size), ++ ++ VMSTATE_UINT32(tpm_blobs.volatil_flags, TPMPassthruState), ++ VMSTATE_UINT32(tpm_blobs.volatil.size, TPMPassthruState), ++ VMSTATE_VBUFFER_ALLOC_UINT32(tpm_blobs.volatil.buffer, ++ TPMPassthruState, 1, NULL, 0, ++ tpm_blobs.volatil.size), ++ ++ VMSTATE_UINT32(tpm_blobs.savestate_flags, TPMPassthruState), ++ VMSTATE_UINT32(tpm_blobs.savestate.size, TPMPassthruState), ++ VMSTATE_VBUFFER_ALLOC_UINT32(tpm_blobs.savestate.buffer, ++ TPMPassthruState, 1, NULL, 0, ++ tpm_blobs.savestate.size), ++ VMSTATE_END_OF_LIST() ++ } ++}; ++ + static const TPMDriverOps tpm_cuse_driver = { + .type = TPM_TYPE_CUSE_TPM, + .opts = tpm_passthrough_cmdline_opts, +diff --git a/hw/tpm/tpm_tis.c b/hw/tpm/tpm_tis.c +index 14d9e83ea2..9b660cf737 100644 +--- a/hw/tpm/tpm_tis.c ++++ b/hw/tpm/tpm_tis.c +@@ -368,6 +368,8 @@ static void tpm_tis_receive_bh(void *opaque) + TPMTISEmuState *tis = &s->s.tis; + uint8_t locty = s->locty_number; + ++ tis->bh_scheduled = false; ++ + qemu_mutex_lock(&s->state_lock); + + tpm_tis_sts_set(&tis->loc[locty], +@@ -415,6 +417,8 @@ static void tpm_tis_receive_cb(TPMState *s, uint8_t locty, + qemu_mutex_unlock(&s->state_lock); + + qemu_bh_schedule(tis->bh); ++ ++ tis->bh_scheduled = true; + } + + /* +@@ -1030,9 +1034,140 @@ static void tpm_tis_reset(DeviceState *dev) + tpm_tis_do_startup_tpm(s); + } + ++ ++/* persistent state handling */ ++ ++static void tpm_tis_pre_save(void *opaque) ++{ ++ TPMState *s = opaque; ++ TPMTISEmuState *tis = &s->s.tis; ++ uint8_t locty = tis->active_locty; ++ ++ DPRINTF("tpm_tis: suspend: locty = %d : r_offset = %d, w_offset = %d\n", ++ locty, tis->loc[0].r_offset, tis->loc[0].w_offset); ++#ifdef DEBUG_TIS ++ tpm_tis_dump_state(opaque, 0); ++#endif ++ ++ qemu_mutex_lock(&s->state_lock); ++ ++ /* wait for outstanding request to complete */ ++ if (TPM_TIS_IS_VALID_LOCTY(locty) && ++ tis->loc[locty].state == TPM_TIS_STATE_EXECUTION) { ++ /* ++ * If we get here when the bh is scheduled but did not run, ++ * we won't get notified... ++ */ ++ if (!tis->bh_scheduled) { ++ /* backend thread to notify us */ ++ qemu_cond_wait(&s->cmd_complete, &s->state_lock); ++ } ++ if (tis->loc[locty].state == TPM_TIS_STATE_EXECUTION) { ++ /* bottom half did not run - run its function */ ++ qemu_mutex_unlock(&s->state_lock); ++ tpm_tis_receive_bh(opaque); ++ qemu_mutex_lock(&s->state_lock); ++ } ++ } ++ ++ qemu_mutex_unlock(&s->state_lock); ++ ++ /* copy current active read or write buffer into the buffer ++ written to disk */ ++ if (TPM_TIS_IS_VALID_LOCTY(locty)) { ++ switch (tis->loc[locty].state) { ++ case TPM_TIS_STATE_RECEPTION: ++ memcpy(tis->buf, ++ tis->loc[locty].w_buffer.buffer, ++ MIN(sizeof(tis->buf), ++ tis->loc[locty].w_buffer.size)); ++ tis->offset = tis->loc[locty].w_offset; ++ break; ++ case TPM_TIS_STATE_COMPLETION: ++ memcpy(tis->buf, ++ tis->loc[locty].r_buffer.buffer, ++ MIN(sizeof(tis->buf), ++ tis->loc[locty].r_buffer.size)); ++ tis->offset = tis->loc[locty].r_offset; ++ break; ++ default: ++ /* leak nothing */ ++ memset(tis->buf, 0x0, sizeof(tis->buf)); ++ break; ++ } ++ } ++} ++ ++static int tpm_tis_post_load(void *opaque, ++ int version_id __attribute__((unused))) ++{ ++ TPMState *s = opaque; ++ TPMTISEmuState *tis = &s->s.tis; ++ ++ uint8_t locty = tis->active_locty; ++ ++ if (TPM_TIS_IS_VALID_LOCTY(locty)) { ++ switch (tis->loc[locty].state) { ++ case TPM_TIS_STATE_RECEPTION: ++ memcpy(tis->loc[locty].w_buffer.buffer, ++ tis->buf, ++ MIN(sizeof(tis->buf), ++ tis->loc[locty].w_buffer.size)); ++ tis->loc[locty].w_offset = tis->offset; ++ break; ++ case TPM_TIS_STATE_COMPLETION: ++ memcpy(tis->loc[locty].r_buffer.buffer, ++ tis->buf, ++ MIN(sizeof(tis->buf), ++ tis->loc[locty].r_buffer.size)); ++ tis->loc[locty].r_offset = tis->offset; ++ break; ++ default: ++ break; ++ } ++ } ++ ++ DPRINTF("tpm_tis: resume : locty = %d : r_offset = %d, w_offset = %d\n", ++ locty, tis->loc[0].r_offset, tis->loc[0].w_offset); ++ ++ return 0; ++} ++ ++static const VMStateDescription vmstate_locty = { ++ .name = "loc", ++ .version_id = 1, ++ .minimum_version_id = 0, ++ .minimum_version_id_old = 0, ++ .fields = (VMStateField[]) { ++ VMSTATE_UINT32(state, TPMLocality), ++ VMSTATE_UINT32(inte, TPMLocality), ++ VMSTATE_UINT32(ints, TPMLocality), ++ VMSTATE_UINT8(access, TPMLocality), ++ VMSTATE_UINT32(sts, TPMLocality), ++ VMSTATE_UINT32(iface_id, TPMLocality), ++ VMSTATE_END_OF_LIST(), ++ } ++}; ++ + static const VMStateDescription vmstate_tpm_tis = { + .name = "tpm", +- .unmigratable = 1, ++ .version_id = 1, ++ .minimum_version_id = 0, ++ .minimum_version_id_old = 0, ++ .pre_save = tpm_tis_pre_save, ++ .post_load = tpm_tis_post_load, ++ .fields = (VMStateField[]) { ++ VMSTATE_UINT32(s.tis.offset, TPMState), ++ VMSTATE_BUFFER(s.tis.buf, TPMState), ++ VMSTATE_UINT8(s.tis.active_locty, TPMState), ++ VMSTATE_UINT8(s.tis.aborting_locty, TPMState), ++ VMSTATE_UINT8(s.tis.next_locty, TPMState), ++ ++ VMSTATE_STRUCT_ARRAY(s.tis.loc, TPMState, TPM_TIS_NUM_LOCALITIES, 1, ++ vmstate_locty, TPMLocality), ++ ++ VMSTATE_END_OF_LIST() ++ } + }; + + static Property tpm_tis_properties[] = { +diff --git a/hw/tpm/tpm_tis.h b/hw/tpm/tpm_tis.h +index a1df41fa21..b7fc0ea1a9 100644 +--- a/hw/tpm/tpm_tis.h ++++ b/hw/tpm/tpm_tis.h +@@ -54,6 +54,8 @@ typedef struct TPMLocality { + + typedef struct TPMTISEmuState { + QEMUBH *bh; ++ bool bh_scheduled; /* bh scheduled but did not run yet */ ++ + uint32_t offset; + uint8_t buf[TPM_TIS_BUFFER_MAX]; + +diff --git a/hw/tpm/tpm_util.c b/hw/tpm/tpm_util.c +index 7b35429725..b6ff74d946 100644 +--- a/hw/tpm/tpm_util.c ++++ b/hw/tpm/tpm_util.c +@@ -22,6 +22,17 @@ + #include "qemu/osdep.h" + #include "tpm_util.h" + #include "tpm_int.h" ++#include "tpm_ioctl.h" ++#include "qemu/error-report.h" ++ ++#define DEBUG_TPM 0 ++ ++#define DPRINTF(fmt, ...) do { \ ++ if (DEBUG_TPM) { \ ++ fprintf(stderr, fmt, ## __VA_ARGS__); \ ++ } \ ++} while (0) ++ + + /* + * A basic test of a TPM device. We expect a well formatted response header +@@ -125,3 +136,215 @@ int tpm_util_test_tpmdev(int tpm_fd, TPMVersion *tpm_version) + + return 1; + } ++ ++static void tpm_sized_buffer_reset(TPMSizedBuffer *tsb) ++{ ++ g_free(tsb->buffer); ++ tsb->buffer = NULL; ++ tsb->size = 0; ++} ++ ++/* ++ * Transfer a TPM state blob from the TPM into a provided buffer. ++ * ++ * @fd: file descriptor to talk to the CUSE TPM ++ * @type: the type of blob to transfer ++ * @decrypted_blob: whether we request to receive decrypted blobs ++ * @tsb: the TPMSizeBuffer to fill with the blob ++ * @flags: the flags to return to the caller ++ */ ++static int tpm_util_cuse_get_state_blob(int fd, ++ uint8_t type, ++ bool decrypted_blob, ++ TPMSizedBuffer *tsb, ++ uint32_t *flags) ++{ ++ ptm_getstate pgs; ++ uint16_t offset = 0; ++ ptm_res res; ++ ssize_t n; ++ size_t to_read; ++ ++ tpm_sized_buffer_reset(tsb); ++ ++ pgs.u.req.state_flags = (decrypted_blob) ? PTM_STATE_FLAG_DECRYPTED : 0; ++ pgs.u.req.type = type; ++ pgs.u.req.offset = offset; ++ ++ if (ioctl(fd, PTM_GET_STATEBLOB, &pgs) < 0) { ++ error_report("CUSE TPM PTM_GET_STATEBLOB ioctl failed: %s", ++ strerror(errno)); ++ goto err_exit; ++ } ++ res = pgs.u.resp.tpm_result; ++ if (res != 0 && (res & 0x800) == 0) { ++ error_report("Getting the stateblob (type %d) failed with a TPM " ++ "error 0x%x", type, res); ++ goto err_exit; ++ } ++ ++ *flags = pgs.u.resp.state_flags; ++ ++ tsb->buffer = g_malloc(pgs.u.resp.totlength); ++ memcpy(tsb->buffer, pgs.u.resp.data, pgs.u.resp.length); ++ tsb->size = pgs.u.resp.length; ++ ++ /* if there are bytes left to get use read() interface */ ++ while (tsb->size < pgs.u.resp.totlength) { ++ to_read = pgs.u.resp.totlength - tsb->size; ++ if (unlikely(to_read > SSIZE_MAX)) { ++ to_read = SSIZE_MAX; ++ } ++ ++ n = read(fd, &tsb->buffer[tsb->size], to_read); ++ if (n != to_read) { ++ error_report("Could not read stateblob (type %d) : %s", ++ type, strerror(errno)); ++ goto err_exit; ++ } ++ tsb->size += to_read; ++ } ++ ++ DPRINTF("tpm_util: got state blob type %d, %d bytes, flags 0x%08x, " ++ "decrypted=%d\n", type, tsb->size, *flags, decrypted_blob); ++ ++ return 0; ++ ++err_exit: ++ return 1; ++} ++ ++int tpm_util_cuse_get_state_blobs(int tpm_fd, ++ bool decrypted_blobs, ++ TPMBlobBuffers *tpm_blobs) ++{ ++ if (tpm_util_cuse_get_state_blob(tpm_fd, PTM_BLOB_TYPE_PERMANENT, ++ decrypted_blobs, ++ &tpm_blobs->permanent, ++ &tpm_blobs->permanent_flags) || ++ tpm_util_cuse_get_state_blob(tpm_fd, PTM_BLOB_TYPE_VOLATILE, ++ decrypted_blobs, ++ &tpm_blobs->volatil, ++ &tpm_blobs->volatil_flags) || ++ tpm_util_cuse_get_state_blob(tpm_fd, PTM_BLOB_TYPE_SAVESTATE, ++ decrypted_blobs, ++ &tpm_blobs->savestate, ++ &tpm_blobs->savestate_flags)) { ++ goto err_exit; ++ } ++ ++ return 0; ++ ++ err_exit: ++ tpm_sized_buffer_reset(&tpm_blobs->volatil); ++ tpm_sized_buffer_reset(&tpm_blobs->permanent); ++ tpm_sized_buffer_reset(&tpm_blobs->savestate); ++ ++ return 1; ++} ++ ++static int tpm_util_cuse_do_set_stateblob_ioctl(int fd, ++ uint32_t flags, ++ uint32_t type, ++ uint32_t length) ++{ ++ ptm_setstate pss; ++ ++ pss.u.req.state_flags = flags; ++ pss.u.req.type = type; ++ pss.u.req.length = length; ++ ++ if (ioctl(fd, PTM_SET_STATEBLOB, &pss) < 0) { ++ error_report("CUSE TPM PTM_SET_STATEBLOB ioctl failed: %s", ++ strerror(errno)); ++ return 1; ++ } ++ ++ if (pss.u.resp.tpm_result != 0) { ++ error_report("Setting the stateblob (type %d) failed with a TPM " ++ "error 0x%x", type, pss.u.resp.tpm_result); ++ return 1; ++ } ++ ++ return 0; ++} ++ ++ ++/* ++ * Transfer a TPM state blob to the CUSE TPM. ++ * ++ * @fd: file descriptor to talk to the CUSE TPM ++ * @type: the type of TPM state blob to transfer ++ * @tsb: TPMSizeBuffer containing the TPM state blob ++ * @flags: Flags describing the (encryption) state of the TPM state blob ++ */ ++static int tpm_util_cuse_set_state_blob(int fd, ++ uint32_t type, ++ TPMSizedBuffer *tsb, ++ uint32_t flags) ++{ ++ uint32_t offset = 0; ++ ssize_t n; ++ size_t to_write; ++ ++ /* initiate the transfer to the CUSE TPM */ ++ if (tpm_util_cuse_do_set_stateblob_ioctl(fd, flags, type, 0)) { ++ return 1; ++ } ++ ++ /* use the write() interface for transferring the state blob */ ++ while (offset < tsb->size) { ++ to_write = tsb->size - offset; ++ if (unlikely(to_write > SSIZE_MAX)) { ++ to_write = SSIZE_MAX; ++ } ++ ++ n = write(fd, &tsb->buffer[offset], to_write); ++ if (n != to_write) { ++ error_report("Writing the stateblob (type %d) failed: %s", ++ type, strerror(errno)); ++ goto err_exit; ++ } ++ offset += to_write; ++ } ++ ++ /* inidicate that the transfer is finished */ ++ if (tpm_util_cuse_do_set_stateblob_ioctl(fd, flags, type, 0)) { ++ goto err_exit; ++ } ++ ++ DPRINTF("tpm_util: set the state blob type %d, %d bytes, flags 0x%08x\n", ++ type, tsb->size, flags); ++ ++ return 0; ++ ++err_exit: ++ return 1; ++} ++ ++int tpm_util_cuse_set_state_blobs(int tpm_fd, ++ TPMBlobBuffers *tpm_blobs) ++{ ++ ptm_res res; ++ ++ if (ioctl(tpm_fd, PTM_STOP, &res) < 0) { ++ error_report("tpm_passthrough: Could not stop " ++ "the CUSE TPM: %s (%i)", ++ strerror(errno), errno); ++ return 1; ++ } ++ ++ if (tpm_util_cuse_set_state_blob(tpm_fd, PTM_BLOB_TYPE_PERMANENT, ++ &tpm_blobs->permanent, ++ tpm_blobs->permanent_flags) || ++ tpm_util_cuse_set_state_blob(tpm_fd, PTM_BLOB_TYPE_VOLATILE, ++ &tpm_blobs->volatil, ++ tpm_blobs->volatil_flags) || ++ tpm_util_cuse_set_state_blob(tpm_fd, PTM_BLOB_TYPE_SAVESTATE, ++ &tpm_blobs->savestate, ++ tpm_blobs->savestate_flags)) { ++ return 1; ++ } ++ ++ return 0; ++} +diff --git a/hw/tpm/tpm_util.h b/hw/tpm/tpm_util.h +index df76245e6e..c24071d812 100644 +--- a/hw/tpm/tpm_util.h ++++ b/hw/tpm/tpm_util.h +@@ -26,4 +26,11 @@ + + int tpm_util_test_tpmdev(int tpm_fd, TPMVersion *tpm_version); + ++int tpm_util_cuse_get_state_blobs(int tpm_fd, ++ bool decrypted_blobs, ++ TPMBlobBuffers *tpm_blobs); ++ ++int tpm_util_cuse_set_state_blobs(int tpm_fd, ++ TPMBlobBuffers *tpm_blobs); ++ + #endif /* TPM_TPM_UTIL_H */ +diff --git a/include/sysemu/tpm_backend.h b/include/sysemu/tpm_backend.h +index b58f52d39f..3403821b9d 100644 +--- a/include/sysemu/tpm_backend.h ++++ b/include/sysemu/tpm_backend.h +@@ -62,6 +62,18 @@ typedef struct TPMSizedBuffer { + uint8_t *buffer; + } TPMSizedBuffer; + ++/* blobs from the TPM; part of VM state when migrating */ ++typedef struct TPMBlobBuffers { ++ uint32_t permanent_flags; ++ TPMSizedBuffer permanent; ++ ++ uint32_t volatil_flags; ++ TPMSizedBuffer volatil; ++ ++ uint32_t savestate_flags; ++ TPMSizedBuffer savestate; ++} TPMBlobBuffers; ++ + struct TPMDriverOps { + enum TpmType type; + const QemuOptDesc *opts; +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0004-fix-CVE-2016-7909.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0004-fix-CVE-2016-7909.patch deleted file mode 100644 index e71bbf620..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/0004-fix-CVE-2016-7909.patch +++ /dev/null @@ -1,42 +0,0 @@ -Upstream-Status: Backport [http://git.qemu.org/?p=qemu.git;a=commit;h=34e29ce] -CVE: CVE-2016-7909 - -Signed-off-by: Kai Kang ---- -From 34e29ce754c02bb6b3bdd244fbb85033460feaff Mon Sep 17 00:00:00 2001 -From: Prasad J Pandit -Date: Fri, 30 Sep 2016 00:27:33 +0530 -Subject: [PATCH] net: pcnet: check rx/tx descriptor ring length - -The AMD PC-Net II emulator has set of control and status(CSR) -registers. Of these, CSR76 and CSR78 hold receive and transmit -descriptor ring length respectively. This ring length could range -from 1 to 65535. Setting ring length to zero leads to an infinite -loop in pcnet_rdra_addr() or pcnet_transmit(). Add check to avoid it. - -Reported-by: Li Qiang -Signed-off-by: Prasad J Pandit -Signed-off-by: Jason Wang ---- - hw/net/pcnet.c | 3 +++ - 1 file changed, 3 insertions(+) - -diff --git a/hw/net/pcnet.c b/hw/net/pcnet.c -index 198a01f..3078de8 100644 ---- a/hw/net/pcnet.c -+++ b/hw/net/pcnet.c -@@ -1429,8 +1429,11 @@ static void pcnet_csr_writew(PCNetState *s, uint32_t rap, uint32_t new_value) - case 47: /* POLLINT */ - case 72: - case 74: -+ break; - case 76: /* RCVRL */ - case 78: /* XMTRL */ -+ val = (val > 0) ? val : 512; -+ break; - case 112: - if (CSR_STOP(s) || CSR_SPND(s)) - break; --- -2.10.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-9908.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-9908.patch new file mode 100644 index 000000000..e0f7a1a3f --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-9908.patch @@ -0,0 +1,44 @@ +From 7139ccbc907441337b4b59cde2c5b5a54cb5b2cc Mon Sep 17 00:00:00 2001 +From: Sona Sarmadi + +virtio-gpu: fix information leak in capset get dispatch + +In virgl_cmd_get_capset function, it uses g_malloc to allocate +a response struct to the guest. As the 'resp'struct hasn't been full +initialized it will lead the 'resp->padding' field to the guest. +Use g_malloc0 to avoid this. + +Signed-off-by: Li Qiang +Reviewed-by: Marc-André Lureau +Message-id: 58188cae.4a6ec20a.3d2d1.aff2@mx.google.com + +[Sona: backported from master to v2.8.0 and resolved conflict] + +Reference to upstream patch: +http://git.qemu-project.org/?p=qemu.git;a=commit;h=85d9d044471f93c48c5c396f7e217b4ef12f69f8 + +CVE: CVE-2016-9908 +Upstream-Status: Backport + +Signed-off-by: Gerd Hoffmann +Signed-off-by: Sona Sarmadi +--- + hw/display/virtio-gpu-3d.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/hw/display/virtio-gpu-3d.c b/hw/display/virtio-gpu-3d.c +index 23f39de..d98b140 100644 +--- a/hw/display/virtio-gpu-3d.c ++++ b/hw/display/virtio-gpu-3d.c +@@ -371,7 +371,7 @@ static void virgl_cmd_get_capset(VirtIOGPU *g, + + virgl_renderer_get_cap_set(gc.capset_id, &max_ver, + &max_size); +- resp = g_malloc(sizeof(*resp) + max_size); ++ resp = g_malloc0(sizeof(*resp) + max_size); + + resp->hdr.type = VIRTIO_GPU_RESP_OK_CAPSET; + virgl_renderer_fill_caps(gc.capset_id, +-- +1.9.1 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-9912.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-9912.patch new file mode 100644 index 000000000..c009ffd96 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/CVE-2016-9912.patch @@ -0,0 +1,45 @@ +From b8e23926c568f2e963af39028b71c472e3023793 Mon Sep 17 00:00:00 2001 +From: Li Qiang +Date: Mon, 28 Nov 2016 21:29:25 -0500 +Subject: [PATCH] virtio-gpu: call cleanup mapping function in resource destroy + +If the guest destroy the resource before detach banking, the 'iov' +and 'addrs' field in resource is not freed thus leading memory +leak issue. This patch avoid this. + +CVE: CVE-2016-9912 +Upstream-Status: Backport + +Signed-off-by: Li Qiang +Reviewed-by: Marc-André Lureau +Message-id: 1480386565-10077-1-git-send-email-liq3ea@gmail.com +Signed-off-by: Gerd Hoffmann +Signed-off-by: Sona Sarmadi +--- + hw/display/virtio-gpu.c | 3 +++ + 1 file changed, 3 insertions(+) + +diff --git a/hw/display/virtio-gpu.c b/hw/display/virtio-gpu.c +index ed2b6d3..6a26258 100644 +--- a/hw/display/virtio-gpu.c ++++ b/hw/display/virtio-gpu.c +@@ -28,6 +28,8 @@ + static struct virtio_gpu_simple_resource* + virtio_gpu_find_resource(VirtIOGPU *g, uint32_t resource_id); + ++static void virtio_gpu_cleanup_mapping(struct virtio_gpu_simple_resource *res); ++ + #ifdef CONFIG_VIRGL + #include + #define VIRGL(_g, _virgl, _simple, ...) \ +@@ -364,6 +366,7 @@ static void virtio_gpu_resource_destroy(VirtIOGPU *g, + struct virtio_gpu_simple_resource *res) + { + pixman_image_unref(res->image); ++ virtio_gpu_cleanup_mapping(res); + QTAILQ_REMOVE(&g->reslist, res, next); + g->hostmem -= res->hostmem; + g_free(res); +-- +1.9.1 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/Qemu-Arm-versatilepb-Add-memory-size-checking.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/Qemu-Arm-versatilepb-Add-memory-size-checking.patch deleted file mode 100644 index 1a6cf5119..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/Qemu-Arm-versatilepb-Add-memory-size-checking.patch +++ /dev/null @@ -1,46 +0,0 @@ -From 896fa02c24347e6e9259812cfda187b1d6ca6199 Mon Sep 17 00:00:00 2001 -From: Jiang Lu -Date: Wed, 13 Nov 2013 10:38:08 +0800 -Subject: [PATCH] Qemu:Arm:versatilepb: Add memory size checking - -The machine can not work with memory over 256M, so add a checking -at startup. If the memory size exceed 256M, just stop emulation then -throw out warning about memory limitation. - -Upstream-Status: Pending - -Signed-off-by: Jiang Lu - -Updated it on 2014-01-15 for rebasing - -Signed-off-by: Robert Yang - -Update it when upgrade qemu to 2.2.0 - -Signed-off-by: Kai Kang -Signed-off-by: Cristian Iorga ---- - hw/arm/versatilepb.c | 7 +++++++ - 1 file changed, 7 insertions(+) - -diff --git a/hw/arm/versatilepb.c b/hw/arm/versatilepb.c -index 6c69f4e..9278d90 100644 ---- a/hw/arm/versatilepb.c -+++ b/hw/arm/versatilepb.c -@@ -204,6 +204,13 @@ static void versatile_init(MachineState *machine, int board_id) - exit(1); - } - -+ if (machine->ram_size > (256 << 20)) { -+ fprintf(stderr, -+ "qemu: Too much memory for this machine: %d MB, maximum 256 MB\n", -+ ((unsigned int)ram_size / (1 << 20))); -+ exit(1); -+ } -+ - cpuobj = object_new(object_class_get_name(cpu_oc)); - - /* By default ARM1176 CPUs have EL3 enabled. This board does not --- -2.1.0 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/exclude-some-arm-EABI-obsolete-syscalls.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/exclude-some-arm-EABI-obsolete-syscalls.patch index 171bda7e9..f593cf9ae 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/exclude-some-arm-EABI-obsolete-syscalls.patch +++ b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/exclude-some-arm-EABI-obsolete-syscalls.patch @@ -27,14 +27,14 @@ or kernel header: Signed-off-by: Roy.Li --- - qemu-seccomp.c | 14 ++++++++------ - 1 file changed, 8 insertions(+), 6 deletions(-) + qemu-seccomp.c | 13 ++++++++----- + 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/qemu-seccomp.c b/qemu-seccomp.c -index caa926e..5a78502 100644 +index df75d9c..0e577f8 100644 --- a/qemu-seccomp.c +++ b/qemu-seccomp.c -@@ -25,15 +25,21 @@ static const struct QemuSeccompSyscall seccomp_whitelist[] = { +@@ -35,15 +35,21 @@ static const struct QemuSeccompSyscall seccomp_whitelist[] = { { SCMP_SYS(timer_settime), 255 }, { SCMP_SYS(timer_gettime), 254 }, { SCMP_SYS(futex), 253 }, @@ -58,7 +58,7 @@ index caa926e..5a78502 100644 { SCMP_SYS(mprotect), 246 }, { SCMP_SYS(execve), 245 }, { SCMP_SYS(open), 245 }, -@@ -48,13 +54,11 @@ static const struct QemuSeccompSyscall seccomp_whitelist[] = { +@@ -58,7 +64,6 @@ static const struct QemuSeccompSyscall seccomp_whitelist[] = { { SCMP_SYS(bind), 245 }, { SCMP_SYS(listen), 245 }, { SCMP_SYS(semget), 245 }, @@ -66,13 +66,7 @@ index caa926e..5a78502 100644 { SCMP_SYS(gettimeofday), 245 }, { SCMP_SYS(readlink), 245 }, { SCMP_SYS(access), 245 }, - { SCMP_SYS(prctl), 245 }, - { SCMP_SYS(signalfd), 245 }, -- { SCMP_SYS(getrlimit), 245 }, - { SCMP_SYS(set_tid_address), 245 }, - { SCMP_SYS(statfs), 245 }, - { SCMP_SYS(unlink), 245 }, -@@ -93,7 +97,6 @@ static const struct QemuSeccompSyscall seccomp_whitelist[] = { +@@ -104,7 +109,6 @@ static const struct QemuSeccompSyscall seccomp_whitelist[] = { { SCMP_SYS(times), 245 }, { SCMP_SYS(exit), 245 }, { SCMP_SYS(clock_gettime), 245 }, @@ -80,7 +74,7 @@ index caa926e..5a78502 100644 { SCMP_SYS(restart_syscall), 245 }, { SCMP_SYS(pwrite64), 245 }, { SCMP_SYS(nanosleep), 245 }, -@@ -182,7 +185,6 @@ static const struct QemuSeccompSyscall seccomp_whitelist[] = { +@@ -194,7 +198,6 @@ static const struct QemuSeccompSyscall seccomp_whitelist[] = { { SCMP_SYS(lstat64), 241 }, { SCMP_SYS(sendfile64), 241 }, { SCMP_SYS(ugetrlimit), 241 }, @@ -89,5 +83,5 @@ index caa926e..5a78502 100644 { SCMP_SYS(rt_sigqueueinfo), 241 }, { SCMP_SYS(rt_tgsigqueueinfo), 241 }, -- -1.9.1 +2.1.4 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/glibc-2.25.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/glibc-2.25.patch new file mode 100644 index 000000000..a6908bdbf --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/glibc-2.25.patch @@ -0,0 +1,88 @@ +From: Christopher Covington +Date: Wed, 28 Dec 2016 15:04:33 -0500 +Subject: [Qemu-devel] [PATCH v3] build: include sys/sysmacros.h for major() and minor()a + +The definition of the major() and minor() macros are moving within glibc to +. Include this header when it is available to avoid the +following sorts of build-stopping messages: + +qga/commands-posix.c: In function ‘dev_major_minor’: +qga/commands-posix.c:656:13: error: In the GNU C Library, "major" is defined + by . For historical compatibility, it is + currently defined by as well, but we plan to + remove this soon. To use "major", include + directly. If you did not intend to use a system-defined macro + "major", you should undefine it after including . [-Werror] + *devmajor = major(st.st_rdev); + ^~~~~~~~~~~~~~~~~~~~~~~~~~ + +qga/commands-posix.c:657:13: error: In the GNU C Library, "minor" is defined + by . For historical compatibility, it is + currently defined by as well, but we plan to + remove this soon. To use "minor", include + directly. If you did not intend to use a system-defined macro + "minor", you should undefine it after including . [-Werror] + *devminor = minor(st.st_rdev); + ^~~~~~~~~~~~~~~~~~~~~~~~~~ + +The additional include allows the build to complete on Fedora 26 (Rawhide) +with glibc version 2.24.90. + +Signed-off-by: Christopher Covington +Signed-off-by: Martin Jansa + +Upstream-Status: Submitted https://lists.gnu.org/archive/html/qemu-devel/2016-12/msg03548.html + +diff -uNr qemu-2.8.0.orig/configure qemu-2.8.0/configure +--- qemu-2.8.0.orig/configure 2017-02-21 19:05:13.180094708 +0100 ++++ qemu-2.8.0/configure 2017-02-21 19:08:53.114087084 +0100 +@@ -4727,6 +4727,20 @@ + then + fi + + ########################################## ++# check for sysmacros.h ++ ++have_sysmacros=no ++cat > $TMPC << EOF ++#include ++int main(void) { ++ return makedev(0, 0); ++} ++EOF ++if compile_prog "" "" ; then ++ have_sysmacros=yes ++fi ++ ++########################################## + # End of CC checks + # After here, no more $cc or $ld runs + +diff -uNr qemu-2.8.0.orig/configure qemu-2.8.0/configure +--- qemu-2.8.0.orig/configure 2017-02-21 19:05:13.180094708 +0100 ++++ qemu-2.8.0/configure 2017-02-21 19:08:53.114087084 +0100 +@@ -5695,6 +5709,10 @@ + echo "CONFIG_AF_VSOCK=y" >> $config_host_mak + fi + ++if test "$have_sysmacros" = "yes" ; then ++ echo "CONFIG_SYSMACROS=y" >> $config_host_mak ++fi ++ + # Hold two types of flag: + # CONFIG_THREAD_SETNAME_BYTHREAD - we've got a way of setting the name on + # a thread we have a handle to +diff -uNr qemu-2.8.0.orig/include/sysemu/os-posix.h qemu-2.8.0/include/sysemu/os-posix.h +--- qemu-2.8.0.orig/include/sysemu/os-posix.h 2016-12-20 21:16:48.000000000 +0100 ++++ qemu-2.8.0/include/sysemu/os-posix.h 2017-02-21 19:07:18.009090381 +0100 +@@ -34,6 +34,10 @@ + #include + #include + ++#ifdef CONFIG_SYSMACROS ++#include ++#endif ++ + void os_set_line_buffering(void); + void os_set_proc_name(const char *s); + void os_setup_signal_handling(void); diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/target-ppc-fix-user-mode.patch b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/target-ppc-fix-user-mode.patch new file mode 100644 index 000000000..ba21e71b0 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu/target-ppc-fix-user-mode.patch @@ -0,0 +1,48 @@ +[Qemu-ppc] [PATCH 1/1] target-ppc, tcg: fix usermode segfault with pthread + +From: Sam Bobroff +Subject: [Qemu-ppc] [PATCH 1/1] target-ppc, tcg: fix usermode segfault with pthread_create() +Date: Mon, 30 Jan 2017 16:08:07 +1100 +Programs run under qemu-ppc64 on an x86_64 host currently segfault +if they use pthread_create() due to the adjustment made to the NIP in +commit bd6fefe71cec5a0c7d2be4ac96307f25db56abf9. + +This patch changes cpu_loop() to set the NIP back to the +pre-incremented value before calling do_syscall(), which causes the +correct address to be used for the new thread and corrects the fault. + +Signed-off-by: Sam Bobroff + +Upstream-Status: Backport + +--- + +linux-user/main.c | 4 +++- +1 file changed, 3 insertions(+), 1 deletion(-) + +diff --git a/linux-user/main.c b/linux-user/main.c +index 30049581ef..b5dee01541 100644 +--- a/linux-user/main.c ++++ b/linux-user/main.c +@@ -1712,18 +1712,20 @@ void cpu_loop(CPUPPCState *env) + * in syscalls. + */ + env->crf[0] &= ~0x1; ++ env->nip += 4; + ret = do_syscall(env, env->gpr[0], env->gpr[3], env->gpr[4], + env->gpr[5], env->gpr[6], env->gpr[7], + env->gpr[8], 0, 0); + if (ret == -TARGET_ERESTARTSYS) { ++ env->nip -= 4; + break; + } + if (ret == (target_ulong)(-TARGET_QEMU_ESIGRETURN)) { ++ env->nip -= 4; + /* Returning from a successful sigreturn syscall. + Avoid corrupting register state. */ + break; + } +- env->nip += 4; + if (ret > (target_ulong)(-515)) { + env->crf[0] |= 0x1; + ret = -ret; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu_2.7.0.bb b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu_2.7.0.bb deleted file mode 100644 index 85aadecf0..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu_2.7.0.bb +++ /dev/null @@ -1,28 +0,0 @@ -require qemu.inc - -LIC_FILES_CHKSUM = "file://COPYING;md5=441c28d2cf86e15a37fa47e15a72fbac \ - file://COPYING.LIB;endline=24;md5=c04def7ae38850e7d3ef548588159913" - -SRC_URI += "file://configure-fix-Darwin-target-detection.patch \ - file://qemu-enlarge-env-entry-size.patch \ - file://Qemu-Arm-versatilepb-Add-memory-size-checking.patch \ - file://no-valgrind.patch \ - file://pathlimit.patch \ - file://qemu-2.5.0-cflags.patch \ - file://0001-virtio-zero-vq-inuse-in-virtio_reset.patch \ - file://0002-fix-CVE-2016-7423.patch \ - file://0003-fix-CVE-2016-7908.patch \ - file://0004-fix-CVE-2016-7909.patch \ - file://04b33e21866412689f18b7ad6daf0a54d8f959a7.patch \ -" - -SRC_URI_prepend = "http://wiki.qemu-project.org/download/${BP}.tar.bz2" -SRC_URI[md5sum] = "08d4d06d1cb598efecd796137f4844ab" -SRC_URI[sha256sum] = "326e739506ba690daf69fc17bd3913a6c313d9928d743bd8eddb82f403f81e53" - -COMPATIBLE_HOST_class-target_mips64 = "null" - -do_install_append() { - # Prevent QA warnings about installed ${localstatedir}/run - if [ -d ${D}${localstatedir}/run ]; then rmdir ${D}${localstatedir}/run; fi -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu_2.8.0.bb b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu_2.8.0.bb new file mode 100644 index 000000000..fa70009f7 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemu_2.8.0.bb @@ -0,0 +1,65 @@ +require qemu.inc + +inherit ptest + +RDEPENDS_${PN}-ptest = "bash make" + +LIC_FILES_CHKSUM = "file://COPYING;md5=441c28d2cf86e15a37fa47e15a72fbac \ + file://COPYING.LIB;endline=24;md5=c04def7ae38850e7d3ef548588159913" + +SRC_URI += " \ + file://powerpc_rom.bin \ + file://disable-grabs.patch \ + file://exclude-some-arm-EABI-obsolete-syscalls.patch \ + file://wacom.patch \ + file://add-ptest-in-makefile.patch \ + file://run-ptest \ + file://configure-fix-Darwin-target-detection.patch \ + file://qemu-enlarge-env-entry-size.patch \ + file://no-valgrind.patch \ + file://pathlimit.patch \ + file://qemu-2.5.0-cflags.patch \ + file://target-ppc-fix-user-mode.patch \ + file://glibc-2.25.patch \ + file://04b33e21866412689f18b7ad6daf0a54d8f959a7.patch \ +" + +SRC_URI += " \ + file://0001-Provide-support-for-the-CUSE-TPM.patch \ + file://0002-Introduce-condition-to-notify-waiters-of-completed-c.patch \ + file://0003-Introduce-condition-in-TPM-backend-for-notification.patch \ + file://0004-Add-support-for-VM-suspend-resume-for-TPM-TIS.patch \ + file://CVE-2016-9908.patch \ + file://CVE-2016-9912.patch \ +" + +SRC_URI_append_class-native = " \ + file://fix-libcap-header-issue-on-some-distro.patch \ + file://cpus.c-qemu_cpu_kick_thread_debugging.patch \ + " + +SRC_URI =+ "http://wiki.qemu-project.org/download/${BP}.tar.bz2" + +SRC_URI[md5sum] = "17940dce063b6ce450a12e719a6c9c43" +SRC_URI[sha256sum] = "dafd5d7f649907b6b617b822692f4c82e60cf29bc0fc58bc2036219b591e5e62" + +COMPATIBLE_HOST_mipsarchn32 = "null" +COMPATIBLE_HOST_mipsarchn64 = "null" + +do_install_append() { + # Prevent QA warnings about installed ${localstatedir}/run + if [ -d ${D}${localstatedir}/run ]; then rmdir ${D}${localstatedir}/run; fi + install -Dm 0755 ${WORKDIR}/powerpc_rom.bin ${D}${datadir}/qemu +} + +do_compile_ptest() { + make buildtest-TESTS +} + +do_install_ptest() { + cp -rL ${B}/tests ${D}${PTEST_PATH} + find ${D}${PTEST_PATH}/tests -type f -name "*.[Sshcod]" | xargs -i rm -rf {} + + cp ${S}/tests/Makefile.include ${D}${PTEST_PATH}/tests +} + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemuwrapper-cross_1.0.bb b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemuwrapper-cross_1.0.bb index 53b302d65..e40cdafc9 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemuwrapper-cross_1.0.bb +++ b/import-layers/yocto-poky/meta/recipes-devtools/qemu/qemuwrapper-cross_1.0.bb @@ -1,11 +1,14 @@ SUMMARY = "QEMU wrapper script" LICENSE = "MIT" -LIC_FILES_CHKSUM = "file://${COREBASE}/meta/COPYING.MIT;md5=3da9cfbcb788c80a0384361b4de20420" S = "${WORKDIR}" +DEPENDS += "qemu-native" + inherit qemu +do_populate_sysroot[depends] = "" + do_install () { install -d ${D}${bindir_crossscripts}/ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt-native.inc b/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt-native.inc index 5c4b0a217..c7067042e 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt-native.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt-native.inc @@ -5,7 +5,7 @@ INHIBIT_AUTOTOOLS_DEPS = "1" inherit native PATCHTOOL = "patch" -EXTRA_OECONF = "--disable-nls" +EXTRA_OECONF_append = " --disable-nls" do_configure () { oe_runconf diff --git a/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt-native_0.64.bb b/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt-native_0.64.bb deleted file mode 100644 index 6bc7dcdb7..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt-native_0.64.bb +++ /dev/null @@ -1,2 +0,0 @@ -require quilt.inc -require quilt-native.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt-native_0.65.bb b/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt-native_0.65.bb new file mode 100644 index 000000000..6bc7dcdb7 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt-native_0.65.bb @@ -0,0 +1,2 @@ +require quilt.inc +require quilt-native.inc diff --git a/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt.inc b/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt.inc index 512b798d7..c7bb741b4 100644 --- a/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt.inc +++ b/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt.inc @@ -5,14 +5,14 @@ LICENSE = "GPLv2" LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f" SRC_URI = "${SAVANNAH_GNU_MIRROR}/quilt/quilt-${PV}.tar.gz \ - file://install.patch \ file://run-ptest \ file://Makefile \ - file://test.sh \ + file://test.sh \ + file://0001-tests-Allow-different-output-from-mv.patch \ " -SRC_URI[md5sum] = "fc0310db5868a0873d602d4332a76d43" -SRC_URI[sha256sum] = "c4bfd3282214a288e8d3e921ae4d52e73e24c4fead72b5446752adee99a7affd" +SRC_URI[md5sum] = "c67ba0228f5b7b8bbe469474661f92d6" +SRC_URI[sha256sum] = "f6cbc788e5cbbb381a3c6eab5b9efce67c776a8662a7795c7432fd27aa096819" inherit autotools-brokensep ptest @@ -34,8 +34,21 @@ RDEPENDS_${PN} = "bash" EXTRA_OE_MAKE_ARGS_darwin ?= "" EXTRA_OE_MAKE_ARGS ?= "BUILD_ROOT=${D}" +EXTRA_OECONF = "--with-perl='${USRBINPATH}/env perl' --with-patch=patch" + CACHED_CONFIGUREVARS += "ac_cv_path_BASH=/bin/bash" +# Make sure we don't have "-w" in shebang lines: it breaks using +# "/usr/bin/env perl" as parser +do_configure_prepend () { + find ${S} -name "*.in" -exec sed -i -e "1s,^#\!.*@PERL@ -w$,#\! @PERL@\nuse warnings;," {} \; +} + +# Don't setup symlinks to host utilities, we don't need them +do_configure_append () { + sed -e 's,^COMPAT_SYMLINKS.*:=.*,COMPAT_SYMLINKS :=,' -i ${S}/Makefile +} + # quilt ignores DESTDIR do_install () { oe_runmake ${EXTRA_OE_MAKE_ARGS} install diff --git a/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt/0001-tests-Allow-different-output-from-mv.patch b/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt/0001-tests-Allow-different-output-from-mv.patch new file mode 100644 index 000000000..21219a0bb --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt/0001-tests-Allow-different-output-from-mv.patch @@ -0,0 +1,29 @@ +From 1530138960cfafbeefb95f2a760954c00b4d0ef0 Mon Sep 17 00:00:00 2001 +From: Jussi Kukkonen +Date: Wed, 29 Mar 2017 15:11:59 +0300 +Subject: [PATCH] tests: Allow different output from mv + +busybox mv has different error messages: fix the test + +Upstream-Status: Inappropriate [embedded] +Signed-off-by: Jussi Kukkonen +--- + test/failbackup.test | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/test/failbackup.test b/test/failbackup.test +index 37046f7..fce6725 100644 +--- a/test/failbackup.test ++++ b/test/failbackup.test +@@ -16,7 +16,7 @@ What happens when refresh fails because of a permission error? + $ cat > test.txt + < This is updated test.txt. + $ quilt refresh --backup +- >~ mv: cannot move [`']?%{P}test.diff'? to [`']?%{P}test.diff~'?: Permission denied ++ >~ mv: .*: Permission denied + $ echo %{?} + > 1 + +-- +2.1.4 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt/install.patch b/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt/install.patch deleted file mode 100644 index e2a7af655..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt/install.patch +++ /dev/null @@ -1,13 +0,0 @@ -Upstream-Status: Pending - ---- quilt-0.47/Makefile.in 2008-12-31 19:09:13.000000000 +0000 -+++ quilt-0.47/Makefile.in.orig 2008-08-21 13:21:32.000000000 +0100 -@@ -13,7 +13,7 @@ - mandir := @mandir@ - localedir := $(datadir)/locale - emacsdir := $(datadir)/emacs/site-lisp --etcdir := $(subst /usr/etc,/etc,$(prefix)/etc) -+etcdir := @sysconfdir@ - - INSTALL := @INSTALL@ - POD2MAN := @POD2MAN@ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt_0.64.bb b/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt_0.64.bb deleted file mode 100644 index 00f900a11..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt_0.64.bb +++ /dev/null @@ -1,16 +0,0 @@ -require quilt.inc -inherit gettext -RDEPENDS_${PN} += "patch diffstat bzip2 util-linux" -SRC_URI += "file://aclocal.patch \ - file://gnu_patch_test_fix_target.patch \ - " - -# fix build-distro specific perl path in the target perl scripts -do_install_append() { - for perlscript in ${D}${datadir}/quilt/scripts/remove-trailing-ws ${D}${datadir}/quilt/scripts/dependency-graph ${D}${datadir}/quilt/scripts/edmail ${D}${bindir}/guards - do - if [ -f $perlscript ]; then - sed -i -e '1s,#!.*perl,#! ${USRBINPATH}/env perl,' $perlscript - fi - done -} diff --git a/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt_0.65.bb b/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt_0.65.bb new file mode 100644 index 000000000..12859f0bc --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/quilt/quilt_0.65.bb @@ -0,0 +1,6 @@ +require quilt.inc +inherit gettext +RDEPENDS_${PN} += "patch diffstat bzip2 util-linux" +SRC_URI += "file://aclocal.patch \ + file://gnu_patch_test_fix_target.patch \ + " diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Add-PYTHON_ABI-when-searching-for-python-libraries.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Add-PYTHON_ABI-when-searching-for-python-libraries.patch new file mode 100644 index 000000000..b809332f2 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Add-PYTHON_ABI-when-searching-for-python-libraries.patch @@ -0,0 +1,30 @@ +From 36cf0ff26ece53e529e8b4f2d2f09acd8794b055 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 24 Mar 2017 15:35:47 +0200 +Subject: [PATCH] Add PYTHON_ABI when searching for python libraries. + +It has a value of 'm' when using Python3, and so without it +configure will not find the libraries. + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + configure.ac | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/configure.ac b/configure.ac +index 9c58467c1..a506ec819 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -642,7 +642,7 @@ AS_IF([test "$enable_python" = yes],[ + ]) + CPPFLAGS="$save_CPPFLAGS" + save_LIBS="$LIBS" +- AC_SEARCH_LIBS([Py_Main],[python${PYTHON_VERSION} python],[ ++ AC_SEARCH_LIBS([Py_Main],[python${PYTHON_VERSION}${PYTHON_ABI} python],[ + WITH_PYTHON_LIB="$ac_res" + ],[AC_MSG_ERROR([missing python library]) + ]) +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Add-a-color-setting-for-mips64_n32-binaries.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Add-a-color-setting-for-mips64_n32-binaries.patch new file mode 100644 index 000000000..ac6dcaf10 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Add-a-color-setting-for-mips64_n32-binaries.patch @@ -0,0 +1,40 @@ +From e3eff024826550aec4a6a5baef7210a29faf299d Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Thu, 9 Mar 2017 18:54:02 +0200 +Subject: [PATCH] Add a color setting for mips64_n32 binaries + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + build/rpmfc.c | 1 + + rpmrc.in | 2 ++ + 2 files changed, 3 insertions(+) + +diff --git a/build/rpmfc.c b/build/rpmfc.c +index d38a10916..c8e2f876a 100644 +--- a/build/rpmfc.c ++++ b/build/rpmfc.c +@@ -622,6 +622,7 @@ exit: + static const struct rpmfcTokens_s rpmfcTokens[] = { + { "directory", RPMFC_INCLUDE }, + ++ { "N32 MIPS64", RPMFC_ELFMIPSN32|RPMFC_INCLUDE }, + { "ELF 32-bit", RPMFC_ELF32|RPMFC_INCLUDE }, + { "ELF 64-bit", RPMFC_ELF64|RPMFC_INCLUDE }, + +diff --git a/rpmrc.in b/rpmrc.in +index abc08fc31..f5bc820d8 100644 +--- a/rpmrc.in ++++ b/rpmrc.in +@@ -133,6 +133,8 @@ archcolor: mipsr6el 1 + archcolor: mips64r6 2 + archcolor: mips64r6el 2 + ++archcolor: mips64_n32 4 ++ + archcolor: m68k 1 + + archcolor: m68kmint 1 +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-add-an-unsatisfiable-dependency-when-building.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-add-an-unsatisfiable-dependency-when-building.patch new file mode 100644 index 000000000..80e2f0fad --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-add-an-unsatisfiable-dependency-when-building.patch @@ -0,0 +1,33 @@ +From 87cfc0db1ed6fe381a5ed5f0016d8c3344a31a11 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Mon, 9 Jan 2017 18:52:11 +0200 +Subject: [PATCH] Do not add an unsatisfiable dependency when building rpms in + a short-circuited way. + +Upstream permits short-circuiting only for local testing; Yocto on the other +hand produces rpms that way by design. + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + build/pack.c | 4 ---- + 1 file changed, 4 deletions(-) + +diff --git a/build/pack.c b/build/pack.c +index 1261cdbba..bb2d6f4f6 100644 +--- a/build/pack.c ++++ b/build/pack.c +@@ -595,10 +595,6 @@ rpmRC packageBinaries(rpmSpec spec, const char *cookie, int cheating) + headerPutBin(pkg->header, RPMTAG_SOURCEPKGID, spec->sourcePkgId,16); + } + +- if (cheating) { +- (void) rpmlibNeedsFeature(pkg, "ShortCircuited", "4.9.0-1"); +- } +- + { char *binFormat = rpmGetPath("%{_rpmfilename}", NULL); + char *binRpm, *binDir; + binRpm = headerFormat(pkg->header, binFormat, &errorString); +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-hardcode-lib-rpm-as-the-installation-path-for.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-hardcode-lib-rpm-as-the-installation-path-for.patch new file mode 100644 index 000000000..d99ddeb9e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-hardcode-lib-rpm-as-the-installation-path-for.patch @@ -0,0 +1,61 @@ +From d82691b8d58201dd03e30585daacd8ffd1556ae2 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Mon, 27 Feb 2017 09:43:30 +0200 +Subject: [PATCH] Do not hardcode "lib/rpm" as the installation path for + default configuration and macros. + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin + +--- + configure.ac | 2 +- + macros.in | 2 +- + rpm.am | 4 ++-- + 3 files changed, 4 insertions(+), 4 deletions(-) + +diff --git a/configure.ac b/configure.ac +index 4f3be8770..92ffd3d68 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -875,7 +875,7 @@ else + usrprefix=$prefix + fi + +-RPMCONFIGDIR="`echo ${usrprefix}/lib/rpm`" ++RPMCONFIGDIR="`echo ${libdir}/rpm`" + AC_SUBST(RPMCONFIGDIR) + + AC_SUBST(OBJDUMP) +diff --git a/macros.in b/macros.in +index c6d5a6b03..84ae25275 100644 +--- a/macros.in ++++ b/macros.in +@@ -877,7 +877,7 @@ package or when debugging this package.\ + %_sharedstatedir %{_prefix}/com + %_localstatedir %{_prefix}/var + %_lib lib +-%_libdir %{_exec_prefix}/%{_lib} ++%_libdir @libdir@ + %_includedir %{_prefix}/include + %_infodir %{_datadir}/info + %_mandir %{_datadir}/man +diff --git a/rpm.am b/rpm.am +index 1f43ad8a0..6854ff6ba 100644 +--- a/rpm.am ++++ b/rpm.am +@@ -1,10 +1,10 @@ + # Internal binaries + ## HACK: It probably should be $(libexecdir)/rpm or $(libdir)/rpm +-rpmlibexecdir = $(prefix)/lib/rpm ++rpmlibexecdir = $(libdir)/rpm + + # Host independent config files + ## HACK: it probably should be $(datadir)/rpm +-rpmconfigdir = $(prefix)/lib/rpm ++rpmconfigdir = $(libdir)/rpm + + # Libtool version (current-revision-age) for all our libraries + rpm_version_info = 7:0:0 +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-read-config-files-from-HOME.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-read-config-files-from-HOME.patch new file mode 100644 index 000000000..96eb41895 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-read-config-files-from-HOME.patch @@ -0,0 +1,38 @@ +From 35381b6cd6c1b571bf7e6b0640de0f54dbf94386 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Tue, 10 Jan 2017 14:11:30 +0200 +Subject: [PATCH] Do not read config files from $HOME + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + lib/rpmrc.c | 6 ++---- + 1 file changed, 2 insertions(+), 4 deletions(-) + +diff --git a/lib/rpmrc.c b/lib/rpmrc.c +index 4ed991321..19fe80f98 100644 +--- a/lib/rpmrc.c ++++ b/lib/rpmrc.c +@@ -458,8 +458,7 @@ static void setDefaults(void) + if (!defrcfiles) { + defrcfiles = rstrscat(NULL, confdir, "/rpmrc", ":", + confdir, "/" RPMCANONVENDOR "/rpmrc", ":", +- SYSCONFDIR "/rpmrc", ":", +- "~/.rpmrc", NULL); ++ SYSCONFDIR "/rpmrc", ":"); + } + + #ifndef MACROFILES +@@ -471,8 +470,7 @@ static void setDefaults(void) + confdir, "/" RPMCANONVENDOR "/macros", ":", + SYSCONFDIR "/rpm/macros.*", ":", + SYSCONFDIR "/rpm/macros", ":", +- SYSCONFDIR "/rpm/%{_target}/macros", ":", +- "~/.rpmmacros", NULL); ++ SYSCONFDIR "/rpm/%{_target}/macros", ":"); + } + #else + macrofiles = MACROFILES; +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-reset-the-PATH-environment-variable-before-ru.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-reset-the-PATH-environment-variable-before-ru.patch new file mode 100644 index 000000000..a38675f89 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Do-not-reset-the-PATH-environment-variable-before-ru.patch @@ -0,0 +1,30 @@ +From ffb5301a8594140ad7a58bc0f2053be8ca2b2946 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 20 Jan 2017 13:32:06 +0200 +Subject: [PATCH 1/2] Do not reset the PATH environment variable before running + scriptlets. + +We add lots of native stuff into it and scriptlets rely on that. + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + lib/rpmscript.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/lib/rpmscript.c b/lib/rpmscript.c +index 92f949fa2..7c1aa75a8 100644 +--- a/lib/rpmscript.c ++++ b/lib/rpmscript.c +@@ -201,7 +201,7 @@ static void doScriptExec(ARGV_const_t argv, ARGV_const_t prefixes, + if (ipath && ipath[5] != '%') + path = ipath; + +- xx = setenv("PATH", path, 1); ++ //xx = setenv("PATH", path, 1); + free(ipath); + } + +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Fix-build-with-musl-C-library.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Fix-build-with-musl-C-library.patch new file mode 100644 index 000000000..95c70136b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-Fix-build-with-musl-C-library.patch @@ -0,0 +1,74 @@ +From 211c2d11200e6657132c52e7ac68f8c118231262 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Mon, 27 Feb 2017 14:43:21 +0200 +Subject: [PATCH] Fix build with musl C library. + +Upstream-Status: Pending +Signed-off-by: Alexander Kanavin + + +Signed-off-by: Alexander Kanavin +--- + configure.ac | 3 ++- + misc/Makefile.am | 3 +-- + misc/rpmxprogname.c | 3 +-- + 3 files changed, 4 insertions(+), 5 deletions(-) + +diff --git a/configure.ac b/configure.ac +index 92ffd3d68..9c58467c1 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -229,6 +229,7 @@ AC_SEARCH_LIBS(dlopen, [dl]) + # Check for libelf library. Prefer external, otherwise none. + WITH_LIBELF_LIB= + AC_CHECK_HEADER([libelf.h]) ++AC_CHECK_HEADERS([error.h], [WITH_ERROR_H=yes]) + AC_CHECK_HEADERS([gelf.h], [ + AC_CHECK_LIB(elf, gelf_getvernaux, [ + AC_DEFINE(HAVE_LIBELF, 1, [Define to 1 if you have the 'elf' library (-lelf).]) +@@ -237,7 +238,7 @@ AC_CHECK_HEADERS([gelf.h], [ + ]) + ]) + AC_SUBST(WITH_LIBELF_LIB) +-AM_CONDITIONAL(LIBELF,[test "$WITH_LIBELF" = yes]) ++AM_CONDITIONAL(LIBELF,[test "$WITH_LIBELF" = yes && test "$WITH_ERROR_H" = yes]) + + AC_CHECK_HEADERS([dwarf.h], [ + WITH_LIBDWARF=yes +diff --git a/misc/Makefile.am b/misc/Makefile.am +index 8bf0093d9..b9db3d31a 100644 +--- a/misc/Makefile.am ++++ b/misc/Makefile.am +@@ -5,10 +5,9 @@ AM_CPPFLAGS += -I$(top_srcdir)/misc + + EXTRA_DIST = \ + fnmatch.c fnmatch.h \ +- rpmxprogname.c rpmxprogname.h \ + stpcpy.c stpncpy.c + + noinst_LTLIBRARIES = libmisc.la + +-libmisc_la_SOURCES = fts.c fts.h ++libmisc_la_SOURCES = fts.c fts.h rpmxprogname.c rpmxprogname.h + libmisc_la_LIBADD = @LTLIBOBJS@ +diff --git a/misc/rpmxprogname.c b/misc/rpmxprogname.c +index f89600613..e94625ea8 100644 +--- a/misc/rpmxprogname.c ++++ b/misc/rpmxprogname.c +@@ -13,7 +13,7 @@ char *_rpmxgetprogname(void) + { + const char *empty = ""; + +- if (_rpmxprognam != NULL) /* never return NULL string */ ++ if (_rpmxprogname != NULL) /* never return NULL string */ + return _rpmxprogname; + else + return empty; +@@ -30,4 +30,3 @@ void _rpmxsetprogname(const char *pn) + } + } + +-#endif /* _RPMXPROGNAME_H */ +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-When-cross-installing-execute-package-scriptlets-wit.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-When-cross-installing-execute-package-scriptlets-wit.patch new file mode 100644 index 000000000..2be3cb5af --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-When-cross-installing-execute-package-scriptlets-wit.patch @@ -0,0 +1,37 @@ +From a6f269f879221f2777169c5f7291322afe6b661b Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Tue, 17 Jan 2017 14:07:17 +0200 +Subject: [PATCH] When cross-installing, execute package scriptlets without + chrooting into destination rootfs + +This is triggered only when RPM_NO_CHROOT_FOR_SCRIPTS environment variable is defined. +Otherwise they will trigger an explosion of failures, obviously. + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + lib/rpmscript.c | 8 +++++++- + 1 file changed, 7 insertions(+), 1 deletion(-) + +diff --git a/lib/rpmscript.c b/lib/rpmscript.c +index 98d3f420d..b95b5d606 100644 +--- a/lib/rpmscript.c ++++ b/lib/rpmscript.c +@@ -467,7 +467,13 @@ rpmRC rpmScriptRun(rpmScript script, int arg1, int arg2, FD_t scriptFd, + + if (rc != RPMRC_FAIL) { + if (script_type & RPMSCRIPTLET_EXEC) { +- rc = runExtScript(plugins, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2, &script->nextFileFunc); ++ if (getenv("RPM_NO_CHROOT_FOR_SCRIPTS") != NULL) { ++ rpmChrootOut(); ++ rc = runExtScript(plugins, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2, &script->nextFileFunc); ++ rpmChrootIn(); ++ } else { ++ rc = runExtScript(plugins, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2, &script->nextFileFunc); ++ } + } else { + rc = runLuaScript(plugins, prefixes, script->descr, lvl, scriptFd, &args, script->body, arg1, arg2, &script->nextFileFunc); + } +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-When-nice-value-cannot-be-reset-issue-a-notice-inste.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-When-nice-value-cannot-be-reset-issue-a-notice-inste.patch new file mode 100644 index 000000000..9648cac7c --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-When-nice-value-cannot-be-reset-issue-a-notice-inste.patch @@ -0,0 +1,31 @@ +From d42ece6fa15b98d7f9221b90b85b78631df2c0a0 Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Tue, 14 Feb 2017 13:51:19 +0200 +Subject: [PATCH] When nice value cannot be reset, issue a notice instead of a + warning + +Otherwise build logs on the autobuilder get very clutter, as it +doesn't allow the nice value to be reset for some reason. + +Upstream-Status: Inappropriate [oe specific] +Signed-off-by: Alexander Kanavin +--- + lib/rpmscript.c | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/lib/rpmscript.c b/lib/rpmscript.c +index 5e1e99906..3975aead8 100644 +--- a/lib/rpmscript.c ++++ b/lib/rpmscript.c +@@ -347,7 +347,7 @@ static rpmRC runExtScript(rpmPlugins plugins, ARGV_const_t prefixes, + int ret; + ret = setpriority(PRIO_PROCESS, 0, 0); + if (ret == -1) { +- rpmlog(RPMLOG_WARNING, _("Unable to reset nice value: %s"), ++ rpmlog(RPMLOG_NOTICE, _("Unable to reset nice value: %s\n"), + strerror(errno)); + } + +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-perl-disable-auto-reqs.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-perl-disable-auto-reqs.patch new file mode 100644 index 000000000..a6c58699d --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0001-perl-disable-auto-reqs.patch @@ -0,0 +1,32 @@ +perl: disable auto requires + +When generating automatic requirements, it's possible for perl scripts to +declare 'optional' dependencies. These seem to often be incorrect and will +cause installation failures in OE. Instead of fixing the perl scripts, it +was decided it is better to simply disable the automatic dependency +generation. This matches the behavior from the previous RPM5 implementation. + +Upstream-Status: Inappropriate [OE specific configuration] + +Signed-off-by: Mark Hatle + +Index: git/fileattrs/perl.attr +=================================================================== +--- git.orig/fileattrs/perl.attr ++++ git/fileattrs/perl.attr +@@ -1,3 +1,3 @@ +-%__perl_requires %{_rpmconfigdir}/perl.req ++#__perl_requires %{_rpmconfigdir}/perl.req + %__perl_magic ^.*[Pp]erl .*$ + %__perl_flags exeonly +Index: git/fileattrs/perllib.attr +=================================================================== +--- git.orig/fileattrs/perllib.attr ++++ git/fileattrs/perllib.attr +@@ -1,5 +1,5 @@ + %__perllib_provides %{_rpmconfigdir}/perl.prov +-%__perllib_requires %{_rpmconfigdir}/perl.req ++#__perllib_requires %{_rpmconfigdir}/perl.req + %__perllib_magic ^Perl[[:digit:]] module source.* + %__perllib_path \\.pm$ + %__perllib_flags magic_and_path diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0002-Add-support-for-prefixing-etc-from-RPM_ETCCONFIGDIR-.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0002-Add-support-for-prefixing-etc-from-RPM_ETCCONFIGDIR-.patch new file mode 100644 index 000000000..b3dbc319b --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0002-Add-support-for-prefixing-etc-from-RPM_ETCCONFIGDIR-.patch @@ -0,0 +1,72 @@ +From 383c0b097b7eba16801a9e3c4b8e36a4b6de74ab Mon Sep 17 00:00:00 2001 +From: Alexander Kanavin +Date: Fri, 20 Jan 2017 13:33:05 +0200 +Subject: [PATCH 2/2] Add support for prefixing /etc from RPM_ETCCONFIGDIR + environment variable + +This is needed so that rpm can pick up target-specific configuration +from target rootfs instead of its own native sysroot. + +Upstream-Status: Inappropriate [oe-core specific] +Signed-off-by: Alexander Kanavin +--- + lib/rpmrc.c | 19 ++++++++++++++----- + 1 file changed, 14 insertions(+), 5 deletions(-) + +diff --git a/lib/rpmrc.c b/lib/rpmrc.c +index 19fe80f98..6b27b3941 100644 +--- a/lib/rpmrc.c ++++ b/lib/rpmrc.c +@@ -455,10 +455,14 @@ const char * lookupInDefaultTable(const char * name, + static void setDefaults(void) + { + const char *confdir = rpmConfigDir(); ++ const char *etcconfdir = getenv("RPM_ETCCONFIGDIR"); ++ if (etcconfdir == NULL) ++ etcconfdir = ""; ++ + if (!defrcfiles) { + defrcfiles = rstrscat(NULL, confdir, "/rpmrc", ":", + confdir, "/" RPMCANONVENDOR "/rpmrc", ":", +- SYSCONFDIR "/rpmrc", ":"); ++ etcconfdir, SYSCONFDIR "/rpmrc", ":", NULL); + } + + #ifndef MACROFILES +@@ -468,9 +472,9 @@ static void setDefaults(void) + confdir, "/platform/%{_target}/macros", ":", + confdir, "/fileattrs/*.attr", ":", + confdir, "/" RPMCANONVENDOR "/macros", ":", +- SYSCONFDIR "/rpm/macros.*", ":", +- SYSCONFDIR "/rpm/macros", ":", +- SYSCONFDIR "/rpm/%{_target}/macros", ":"); ++ etcconfdir, SYSCONFDIR "/rpm/macros.*", ":", ++ etcconfdir, SYSCONFDIR "/rpm/macros", ":", ++ etcconfdir, SYSCONFDIR "/rpm/%{_target}/macros", ":", NULL); + } + #else + macrofiles = MACROFILES; +@@ -989,7 +993,11 @@ static void read_auxv(void) + */ + static void defaultMachine(rpmrcCtx ctx, const char ** arch, const char ** os) + { +- const char * const platform_path = SYSCONFDIR "/rpm/platform"; ++ const char *etcconfdir = getenv("RPM_ETCCONFIGDIR"); ++ if (etcconfdir == NULL) ++ etcconfdir = ""; ++ ++ const char * const platform_path = rstrscat(NULL, etcconfdir, SYSCONFDIR "/rpm/platform", NULL); + static struct utsname un; + char * chptr; + canonEntry canon; +@@ -1286,6 +1294,7 @@ static void defaultMachine(rpmrcCtx ctx, const char ** arch, const char ** os) + + if (arch) *arch = un.machine; + if (os) *os = un.sysname; ++ free(platform_path); + } + + static +-- +2.11.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0011-Do-not-require-that-ELF-binaries-are-executable-to-b.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0011-Do-not-require-that-ELF-binaries-are-executable-to-b.patch new file mode 100644 index 000000000..c910a478e --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0011-Do-not-require-that-ELF-binaries-are-executable-to-b.patch @@ -0,0 +1,32 @@ +From d65d6e8760afbd7f70b22a1f3297a037bc475fea Mon Sep 17 00:00:00 2001 +From: Peter Kjellerstedt +Date: Mon, 15 May 2017 10:21:08 +0200 +Subject: [PATCH 11/13] Do not require that ELF binaries are executable to be + identifiable + +There is nothing that requires, e.g., a DSO to be executable, but it +is still an ELF binary and should be identified as such. + +Upstream probably expects all ELF binaries to be marked as executable, +but rather than imposing such a limitation for OE, allow any file to +be identified as an ELF binary regardless of whether it is executable +or not. + +Upstream-Status: Inappropriate +Signed-off-by: Peter Kjellerstedt +--- + fileattrs/elf.attr | 1 - + 1 file changed, 1 deletion(-) + +diff --git a/fileattrs/elf.attr b/fileattrs/elf.attr +index 595b33e09..bac52649d 100644 +--- a/fileattrs/elf.attr ++++ b/fileattrs/elf.attr +@@ -1,4 +1,3 @@ + %__elf_provides %{_rpmconfigdir}/elfdeps --provides %{?__filter_GLIBC_PRIVATE:--filter-private} + %__elf_requires %{_rpmconfigdir}/elfdeps --requires %{?__filter_GLIBC_PRIVATE:--filter-private} + %__elf_magic ^(setuid )?(setgid )?(sticky )?ELF (32|64)-bit.*$ +-%__elf_flags exeonly +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0012-Use-conditional-to-access-_docdir-in-macros.in.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0012-Use-conditional-to-access-_docdir-in-macros.in.patch new file mode 100644 index 000000000..996da90d4 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0012-Use-conditional-to-access-_docdir-in-macros.in.patch @@ -0,0 +1,36 @@ +From 77808db4036dc4a012c47aca36255549ed764a6a Mon Sep 17 00:00:00 2001 +From: Peter Kjellerstedt +Date: Tue, 16 May 2017 10:58:18 +0200 +Subject: [PATCH 12/13] Use conditional to access %{_docdir} in macros.in + +This avoids the following warning: + +warning: Ignoring invalid regex %{_docdir} + +when runing `rpmdeps -R `, since %{_docdir} is only defined when +parsing a spec file (in parseSpec()). + +Upstream-Status: Accepted [https://github.com/rpm-software-management/rpm/pull/216] +Signed-off-by: Peter Kjellerstedt +--- + macros.in | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/macros.in b/macros.in +index 84ae25275..cca67a500 100644 +--- a/macros.in ++++ b/macros.in +@@ -200,8 +200,8 @@ package or when debugging this package.\ + # Their purpouse is to set up global filtering for all packages. If you need + # to set up specific filtering for your package use %__requires_exclude_from + # and %__provides_exclude_from instead. +-%__global_requires_exclude_from %{_docdir} +-%__global_provides_exclude_from %{_docdir} ++%__global_requires_exclude_from %{?_docdir:%{_docdir}} ++%__global_provides_exclude_from %{?_docdir:%{_docdir}} + + # The path to the gzip executable (legacy, use %{__gzip} instead). + %_gzipbin %{__gzip} +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0013-Add-a-new-option-alldeps-to-rpmdeps.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0013-Add-a-new-option-alldeps-to-rpmdeps.patch new file mode 100644 index 000000000..faaf62960 --- /dev/null +++ b/import-layers/yocto-poky/meta/recipes-devtools/rpm/files/0013-Add-a-new-option-alldeps-to-rpmdeps.patch @@ -0,0 +1,152 @@ +From 3bf20a6116ae3e1a5a3a6907bee7e881b17efb2f Mon Sep 17 00:00:00 2001 +From: Peter Kjellerstedt +Date: Mon, 15 May 2017 11:23:26 +0200 +Subject: [PATCH 13/13] Add a new option --alldeps to rpmdeps + +This will send the output from rpmfcPrint() to stdout. This is an +alternative to using the --rpmfcdebug option, which will send the same +output to stderr. The two options have totally different use cases +though. While --alldeps is used when the output from rpmfcPrint() is +what is wanted, --rpmfcdebug can be used together with the other +output options, e.g., --requires, without affecting their output. + +Upstream-Status: Submitted [https://github.com/rpm-software-management/rpm/pull/220] +Signed-off-by: Peter Kjellerstedt +--- + build/rpmfc.c | 27 ++++++++++++++------------- + build/rpmfc.h | 1 - + tools/rpmdeps.c | 44 +++++++++++++++++++++++++------------------- + 3 files changed, 39 insertions(+), 33 deletions(-) + +diff --git a/build/rpmfc.c b/build/rpmfc.c +index c8e2f876a..44f1cdc9a 100644 +--- a/build/rpmfc.c ++++ b/build/rpmfc.c +@@ -732,7 +732,6 @@ static rpm_color_t rpmfcColor(const char * fmstr) + + void rpmfcPrint(const char * msg, rpmfc fc, FILE * fp) + { +- rpm_color_t fcolor; + int ndx; + int dx; + int fx; +@@ -744,21 +743,23 @@ void rpmfcPrint(const char * msg, rpmfc fc, FILE * fp) + + if (fc) + for (fx = 0; fx < fc->nfiles; fx++) { +- rpmsid cx = fc->fcdictx[fx] + 1; /* id's are one off */ +- fcolor = fc->fcolor[fx]; +- ARGV_t fattrs = fc->fattrs[fx]; +- + fprintf(fp, "%3d %s", fx, fc->fn[fx]); +- if (fcolor != RPMFC_BLACK) ++ if (_rpmfc_debug) { ++ rpmsid cx = fc->fcdictx[fx] + 1; /* id's are one off */ ++ rpm_color_t fcolor = fc->fcolor[fx]; ++ ARGV_t fattrs = fc->fattrs[fx]; ++ ++ if (fcolor != RPMFC_BLACK) + fprintf(fp, "\t0x%x", fc->fcolor[fx]); +- else ++ else + fprintf(fp, "\t%s", rpmstrPoolStr(fc->cdict, cx)); +- if (fattrs) { +- char *attrs = argvJoin(fattrs, ","); +- fprintf(fp, " [%s]", attrs); +- free(attrs); +- } else { +- fprintf(fp, " [none]"); ++ if (fattrs) { ++ char *attrs = argvJoin(fattrs, ","); ++ fprintf(fp, " [%s]", attrs); ++ free(attrs); ++ } else { ++ fprintf(fp, " [none]"); ++ } + } + fprintf(fp, "\n"); + +diff --git a/build/rpmfc.h b/build/rpmfc.h +index dae8ea5b1..3d87b31cf 100644 +--- a/build/rpmfc.h ++++ b/build/rpmfc.h +@@ -45,7 +45,6 @@ typedef const struct rpmfcTokens_s * rpmfcToken; + + /** \ingroup rpmfc + * Print results of file classification. +- * @todo Remove debugging routine. + * @param msg message prefix (NULL for none) + * @param fc file classifier + * @param fp output file handle (NULL for stderr) +diff --git a/tools/rpmdeps.c b/tools/rpmdeps.c +index a414b6343..f260a38c4 100644 +--- a/tools/rpmdeps.c ++++ b/tools/rpmdeps.c +@@ -23,6 +23,8 @@ static int print_conflicts; + + static int print_obsoletes; + ++static int print_alldeps; ++ + static void rpmdsPrint(const char * msg, rpmds ds, FILE * fp) + { + if (fp == NULL) fp = stderr; +@@ -57,6 +59,8 @@ static struct poptOption optionsTable[] = { + NULL, NULL }, + { "obsoletes", '\0', POPT_ARG_VAL, &print_obsoletes, -1, + NULL, NULL }, ++ { "alldeps", '\0', POPT_ARG_VAL, &print_alldeps, -1, ++ NULL, NULL }, + + POPT_AUTOALIAS + POPT_AUTOHELP +@@ -100,25 +104,27 @@ main(int argc, char *argv[]) + if (rpmfcClassify(fc, av, NULL) || rpmfcApply(fc)) + goto exit; + +- if (_rpmfc_debug) +- rpmfcPrint(buf, fc, NULL); +- +- if (print_provides) +- rpmdsPrint(NULL, rpmfcProvides(fc), stdout); +- if (print_requires) +- rpmdsPrint(NULL, rpmfcRequires(fc), stdout); +- if (print_recommends) +- rpmdsPrint(NULL, rpmfcRecommends(fc), stdout); +- if (print_suggests) +- rpmdsPrint(NULL, rpmfcSuggests(fc), stdout); +- if (print_supplements) +- rpmdsPrint(NULL, rpmfcSupplements(fc), stdout); +- if (print_enhances) +- rpmdsPrint(NULL, rpmfcEnhances(fc), stdout); +- if (print_conflicts) +- rpmdsPrint(NULL, rpmfcConflicts(fc), stdout); +- if (print_obsoletes) +- rpmdsPrint(NULL, rpmfcObsoletes(fc), stdout); ++ if (print_alldeps || _rpmfc_debug) ++ rpmfcPrint(NULL, fc, print_alldeps ? stdout : NULL); ++ ++ if (!print_alldeps) { ++ if (print_provides) ++ rpmdsPrint(NULL, rpmfcProvides(fc), stdout); ++ if (print_requires) ++ rpmdsPrint(NULL, rpmfcRequires(fc), stdout); ++ if (print_recommends) ++ rpmdsPrint(NULL, rpmfcRecommends(fc), stdout); ++ if (print_suggests) ++ rpmdsPrint(NULL, rpmfcSuggests(fc), stdout); ++ if (print_supplements) ++ rpmdsPrint(NULL, rpmfcSupplements(fc), stdout); ++ if (print_enhances) ++ rpmdsPrint(NULL, rpmfcEnhances(fc), stdout); ++ if (print_conflicts) ++ rpmdsPrint(NULL, rpmfcConflicts(fc), stdout); ++ if (print_obsoletes) ++ rpmdsPrint(NULL, rpmfcObsoletes(fc), stdout); ++ } + + ec = 0; + +-- +2.12.0 + diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-Disable-__sync_add_and_fetch_8-on-nios2.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-Disable-__sync_add_and_fetch_8-on-nios2.patch deleted file mode 100644 index e9b73b8bf..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-Disable-__sync_add_and_fetch_8-on-nios2.patch +++ /dev/null @@ -1,30 +0,0 @@ -From 06967a50f20095f5ca30b8214f4c98ba0f5262bf Mon Sep 17 00:00:00 2001 -From: Marek Vasut -Date: Sun, 3 Apr 2016 06:55:25 +0200 -Subject: [PATCH] Disable __sync_add_and_fetch_8 on nios2 - -The NIOS2 softcore does not implement the __sync_add_and_fetch_8, -so disable it accordingly. - -Signed-off-by: Marek Vasut -Upstream-Status: Submitted ---- - rpmio/bson.h | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/rpmio/bson.h b/rpmio/bson.h -index 57023f1..60c7d02 100644 ---- a/rpmio/bson.h -+++ b/rpmio/bson.h -@@ -880,7 +880,7 @@ BSON_END_DECLS - BSON_BEGIN_DECLS - - /* Some architectures do not support __sync_add_and_fetch_8 */ --#if (__mips == 32) || (defined(__PPC__) && !defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8)) -+#if (__mips == 32) || (__nios2__) || (defined(__PPC__) && !defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8)) - # define __BSON_NEED_ATOMIC_64 1 - #endif - --- -2.8.0.rc3 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-define-EM_AARCH64.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-define-EM_AARCH64.patch deleted file mode 100644 index c9fb26868..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-define-EM_AARCH64.patch +++ /dev/null @@ -1,35 +0,0 @@ -[PATCH] define EM_AARCH64 - -Upstream-Status: Submitted [RPM5 maintainer] - -EM_AARCH64 maybe not be defined due to old version elf.h, and lead to -that debugedit can not work on aarch64 elf object files, since there is -no other dependence, except these two macro, define them to make -debugedit work on aarch64 elf files. - -Signed-off-by: Roy Li ---- - tools/debugedit.c | 7 +++++++ - 1 file changed, 7 insertions(+) - -diff --git a/tools/debugedit.c b/tools/debugedit.c -index de693ed..d16a641 100644 ---- a/tools/debugedit.c -+++ b/tools/debugedit.c -@@ -35,6 +35,13 @@ - - #include - -+#ifndef EM_AARCH64 -+#define EM_AARCH64 183 /* ARM AARCH64 */ -+#endif -+#ifndef R_AARCH64_ABS32 -+#define R_AARCH64_ABS32 258 -+#endif -+ - /* some defines taken from the dwarf standard */ - - #define DW_TAG_compile_unit 0x11 --- -1.9.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-macros-add-_gpg_sign_cmd_extra_args.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-macros-add-_gpg_sign_cmd_extra_args.patch deleted file mode 100644 index eb43a8734..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-macros-add-_gpg_sign_cmd_extra_args.patch +++ /dev/null @@ -1,43 +0,0 @@ -From fa9726ff69f86d6a87c4c4bd7e3d2881999a872a Mon Sep 17 00:00:00 2001 -From: Markus Lehtonen -Date: Thu, 23 Feb 2017 11:14:20 +0200 -Subject: [PATCH] macros: add %_gpg_sign_cmd_extra_args - -Similar to what rpm4 has. This macro can be used to customize the -gpg command line options when signing packages. This is needed for -gpg 2.1 which requires "--pinentry-mode loopback" to allow -non-interactive signing. - -Upstream-Status: Pending - -Signed-off-by: Markus Lehtonen ---- - macros/macros.in | 5 ++++- - 1 file changed, 4 insertions(+), 1 deletion(-) - -diff --git a/macros/macros.in b/macros/macros.in -index 8bc5840..fda3c66 100644 ---- a/macros/macros.in -+++ b/macros/macros.in -@@ -524,7 +524,9 @@ $_arbitrary_tags_tests Foo:Bar - %_gpg_passphrase_way %{?_gpg_passphrase:--passphrase "%{_gpg_passphrase}"}%{!?_gpg_passphrase:--passphrase-fd 3} - - %__gpg_check_password_cmd %{__gpg} \ -- gpg --batch --no-verbose %{_gpg_passphrase_way} -u "%{_gpg_name}" -so - -+ gpg --batch --no-verbose %{_gpg_passphrase_way} \ -+ %{?_gpg_sign_cmd_extra_args:%{_gpg_sign_cmd_extra_args}} \ -+ -u "%{_gpg_name}" -so - - #%__pgp_check_password_cmd %{__pgp} \ - # pgp +batchmode=on +verbose=0 "%{_pgp_name}" -sf - #%__pgp5_check_password_cmd %{__pgp} \ -@@ -532,6 +534,7 @@ $_arbitrary_tags_tests Foo:Bar - - %__gpg_sign_cmd %{__gpg} \ - gpg --batch --no-verbose --no-armor %{_gpg_passphrase_way} --no-secmem-warning \ -+ %{?_gpg_sign_cmd_extra_args:%{_gpg_sign_cmd_extra_args}} \ - -u "%{_gpg_name}" -sbo %{__signature_filename} %{__plaintext_filename} - #%__pgp_sign_cmd %{__pgp} \ - # pgp +batchmode=on +verbose=0 +armor=off \ --- -2.10.2 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-rpm-Fix-build-on-musl.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-rpm-Fix-build-on-musl.patch deleted file mode 100644 index 70dd4ff53..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-rpm-Fix-build-on-musl.patch +++ /dev/null @@ -1,294 +0,0 @@ -From 0af17c2ae86c1e8e42b96f6dface08f535bb55ad Mon Sep 17 00:00:00 2001 -From: Khem Raj -Date: Sun, 14 Feb 2016 08:33:24 +0000 -Subject: [PATCH] rpm: Fix build on musl - -Provide alternatives to assumptions about glibc -on linux - -Signed-off-by: Khem Raj - -Updated to 5.4.16 (CVS) - -The patch will likely need additional rework before it can be accepted upsteam -due to the way MUSL changes are patched in. - -Signed-off-by: Mark Hatle - ---- -Upstream-Status: Pending - - lib/poptALL.c | 2 ++ - rpmio/fts.c | 4 ++++ - rpmio/poptIO.c | 2 ++ - rpmqv.c | 2 ++ - system.h | 13 ++++++------- - tools/debugedit.c | 47 +++++++++++++++++++++++++++++++++++++++++++++++ - tools/rpm2cpio.c | 2 ++ - tools/rpmcache.c | 2 ++ - tools/rpmcmp.c | 2 ++ - tools/rpmdeps-oecore.c | 2 ++ - tools/rpmdeps.c | 2 ++ - tools/rpmdigest.c | 2 ++ - tools/rpmfind.c | 6 +++--- - 13 files changed, 78 insertions(+), 10 deletions(-) - -Index: rpm-5.4.15/rpmio/fts.c -=================================================================== ---- rpm-5.4.15.orig/rpmio/fts.c -+++ rpm-5.4.15/rpmio/fts.c -@@ -124,6 +124,10 @@ static char sccsid[] = "@(#)fts.c 8.6 (B - # define __fxstat64(_stat_ver, _fd, _sbp) fstat((_fd), (_sbp)) - #endif - -+#ifndef _STAT_VER -+# define _STAT_VER 0 -+#endif -+ - #if !defined(_D_EXACT_NAMLEN) - # define _D_EXACT_NAMLEN(d) (strlen((d)->d_name)) - #endif -Index: rpm-5.4.15/tools/debugedit.c -=================================================================== ---- rpm-5.4.15.orig/tools/debugedit.c -+++ rpm-5.4.15/tools/debugedit.c -@@ -22,7 +22,12 @@ - #include - #include - #include -+#ifdef __GLIBC__ - #include -+#else -+#include -+void error(int, int, const char *, ...); -+#endif - #include - #include - #include -@@ -1535,6 +1540,48 @@ handle_build_id (DSO *dso, Elf_Data *bui - puts (hex); - } - } -+#ifndef __GLIBC__ -+extern char *__progname; -+ -+void (*error_print_progname)(void) = 0; -+unsigned int error_message_count = 0; -+int error_one_per_line = 0; -+ -+static void eprint(int status, int e, const char *file, unsigned int line, const char *fmt, va_list ap) -+{ -+ if (file && error_one_per_line) { -+ static const char *oldfile; -+ static unsigned int oldline; -+ if (line == oldline && strcmp(file, oldfile) == 0) -+ return; -+ oldfile = file; -+ oldline = line; -+ } -+ if (error_print_progname) -+ error_print_progname(); -+ else -+ fprintf(stderr, "%s: ", __progname); -+ if (file) -+ fprintf(stderr, "%s:%u: ", file, line); -+ vfprintf(stderr, fmt, ap); -+ if (e) -+ fprintf(stderr, ": %s", strerror(e)); -+ putc('\n', stderr); -+ fflush(stderr); -+ error_message_count++; -+ if (status) -+ exit(status); -+} -+ -+void error(int status, int e, const char *fmt, ...) -+{ -+ va_list ap; -+ va_start(ap,fmt); -+ eprint(status, e, 0, 0, fmt, ap); -+ va_end(ap); -+} -+ -+#endif - - /* It avoided the segment fault while file's bss offset have a large number. - See https://bugzilla.redhat.com/show_bug.cgi?id=1019707 -Index: rpm-5.4.15/tools/rpmfind.c -=================================================================== ---- rpm-5.4.15.orig/tools/rpmfind.c -+++ rpm-5.4.15/tools/rpmfind.c -@@ -1175,7 +1175,7 @@ find_parsenum(PLAN *plan, const char *op - * and endchar points to the beginning of the string we know we have - * a syntax error. - */ --#if defined(__sun) -+#if defined(__sun) || !defined(__GLIBC_) - value = strtoll(str, &endchar, 10); - #else - value = strtoq(str, &endchar, 10); -@@ -1215,7 +1215,7 @@ find_parsetime(PLAN *plan, const char *o - break; - } - --#if defined(__sun) -+#if defined(__sun) || !defined(__GLIBC_) - value = strtoll(str, &unit, 10); - #else - value = strtoq(str, &unit, 10); -@@ -1253,7 +1253,7 @@ find_parsetime(PLAN *plan, const char *o - str = unit + 1; - if (*str == '\0') /* EOS */ - break; --#if defined(__sun) -+#if defined(__sun) || !defined(__GLIBC_) - value = strtoll(str, &unit, 10); - #else - value = strtoq(str, &unit, 10); -Index: rpm-5.4.15/system.h -=================================================================== ---- rpm-5.4.15.orig/system.h -+++ rpm-5.4.15/system.h -@@ -372,16 +372,15 @@ extern int _tolower(int) __THROW /*@*/; - #define __progname __assert_program_name - #endif - #define setprogname(pn) -+/*@unchecked@*/ -+extern const char *__progname; - #else --#define __progname program_name --#define setprogname(pn) \ -- { if ((__progname = strrchr(pn, '/')) != NULL) __progname++; \ -- else __progname = pn; \ -- } --#endif -+#define setprogname(pn) -+#define progname __progname - - /*@unchecked@*/ --extern const char *__progname; -+extern char *__progname; -+#endif - - /* -- Retrofit missing prototypes (if needed). */ - #ifdef __cplusplus -Index: rpm-5.4.15/rpmio/poptIO.c -=================================================================== ---- rpm-5.4.15.orig/rpmio/poptIO.c -+++ rpm-5.4.15/rpmio/poptIO.c -@@ -65,7 +65,9 @@ extern int _rpmsvn_debug; - GENfree(rpmioP) - #endif /* __cplusplus */ - -+#ifdef __GLIBC__ - const char *__progname; -+#endif - - #if !defined(POPT_ARGFLAG_TOGGLE) /* XXX compat with popt < 1.15 */ - #define POPT_ARGFLAG_TOGGLE 0 -Index: rpm-5.4.15/lib/poptALL.c -=================================================================== ---- rpm-5.4.15.orig/lib/poptALL.c -+++ rpm-5.4.15/lib/poptALL.c -@@ -4,7 +4,9 @@ - */ - - #include "system.h" -+#ifdef __GLIBC__ - extern const char *__progname; -+#endif - - #if defined(RPM_VENDOR_WINDRIVER) - const char *__usrlibrpm = USRLIBRPM; -Index: rpm-5.4.15/tools/rpm2cpio.c -=================================================================== ---- rpm-5.4.15.orig/tools/rpm2cpio.c -+++ rpm-5.4.15/tools/rpm2cpio.c -@@ -1,7 +1,9 @@ - /* rpmarchive: spit out the main archive portion of a package */ - - #include "system.h" -+#ifdef __GLIBC__ - const char *__progname; -+#endif - - #include - #include /* XXX fnpyKey */ -Index: rpm-5.4.15/tools/rpmcache.c -=================================================================== ---- rpm-5.4.15.orig/tools/rpmcache.c -+++ rpm-5.4.15/tools/rpmcache.c -@@ -3,7 +3,9 @@ - */ - - #include "system.h" -+#ifdef __GLIBC__ - const char *__progname; -+#endif - - #include - #include -Index: rpm-5.4.15/tools/rpmdeps-oecore.c -=================================================================== ---- rpm-5.4.15.orig/tools/rpmdeps-oecore.c -+++ rpm-5.4.15/tools/rpmdeps-oecore.c -@@ -1,5 +1,7 @@ - #include "system.h" -+#ifdef __GLIBC__ - const char *__progname; -+#endif - - #include - #include -Index: rpm-5.4.15/tools/rpmdeps.c -=================================================================== ---- rpm-5.4.15.orig/tools/rpmdeps.c -+++ rpm-5.4.15/tools/rpmdeps.c -@@ -1,5 +1,7 @@ - #include "system.h" -+#ifdef __GLIBC__ - const char *__progname; -+#endif - - #include - #include -Index: rpm-5.4.15/tools/rpmdigest.c -=================================================================== ---- rpm-5.4.15.orig/tools/rpmdigest.c -+++ rpm-5.4.15/tools/rpmdigest.c -@@ -1,6 +1,8 @@ - #include "system.h" -+#ifdef __GLIBC__ - /*@unchecked@*/ - extern const char * __progname; -+#endif - - #define _RPMIOB_INTERNAL - #include -Index: rpm-5.4.15/tools/rpmcmp.c -=================================================================== ---- rpm-5.4.15.orig/tools/rpmcmp.c -+++ rpm-5.4.15/tools/rpmcmp.c -@@ -13,8 +13,10 @@ - - #include "debug.h" - -+#ifdef __GLIBC__ - const char *__progname; - #define progname __progname -+#endif - - static int pointRpmEVR(ARGV_t av) - { -Index: rpm-5.4.15/rpmqv.c -=================================================================== ---- rpm-5.4.15.orig/rpmqv.c -+++ rpm-5.4.15/rpmqv.c -@@ -1,5 +1,7 @@ - #include "system.h" -+#ifdef __GLIBC__ - extern const char *__progname; -+#endif - - /* Copyright (C) 1998-2002 - Red Hat, Inc. */ - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-system.h-query.c-support-nosignature.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-system.h-query.c-support-nosignature.patch deleted file mode 100644 index 77dc5b67d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-system.h-query.c-support-nosignature.patch +++ /dev/null @@ -1,63 +0,0 @@ -From d8fcc3cbeec4defb5babde5bd09ea294dd3fd08b Mon Sep 17 00:00:00 2001 -From: Robert Yang -Date: Sun, 3 Jul 2016 19:01:59 -0700 -Subject: [PATCH] system.h/query.c: support nosignature - -* Define SUPPORT_NOSIGNATURES to 1 in system.h -* !QVA_ISSET(qva->qva_flags, SIGNATURE) -> QVA_ISSET(qva->qva_flags, SIGNATURE), - otherwise, when use --nosignature would read database and verify - signature, this is not expected. - -Upstream-Status: Submitted [Sent email to rpm-devel@rpm5.org] - -Signed-off-by: Robert Yang ---- - lib/query.c | 6 +++--- - system.h | 4 ++-- - 2 files changed, 5 insertions(+), 5 deletions(-) - -diff --git a/lib/query.c b/lib/query.c -index 50a7453..b761d76 100644 ---- a/lib/query.c -+++ b/lib/query.c -@@ -954,19 +954,19 @@ JBJDEBUG((stderr, "--> %s(%p,%p,%p)\n", __FUNCTION__, ts, qva, argv)); - vsflags = (rpmVSFlags) rpmExpandNumeric("%{?_vsflags_query}"); - vsflags = (rpmVSFlags) 0; /* XXX FIXME: ignore default disablers. */ - #if defined(SUPPORT_NOSIGNATURES) -- if (!QVA_ISSET(qva->qva_flags, DIGEST)) { -+ if (QVA_ISSET(qva->qva_flags, DIGEST)) { - VSF_SET(vsflags, NOSHA1HEADER); - VSF_SET(vsflags, NOMD5HEADER); - VSF_SET(vsflags, NOSHA1); - VSF_SET(vsflags, NOMD5); - } -- if (!QVA_ISSET(qva->qva_flags, SIGNATURE)) { -+ if (QVA_ISSET(qva->qva_flags, SIGNATURE)) { - VSF_SET(vsflags, NODSAHEADER); - VSF_SET(vsflags, NORSAHEADER); - VSF_SET(vsflags, NODSA); - VSF_SET(vsflags, NORSA); - } -- if (!QVA_ISSET(qva->qva_flags, HDRCHK)) { -+ if (QVA_ISSET(qva->qva_flags, HDRCHK)) { - VSF_SET(vsflags, NOHDRCHK); - } - VSF_CLR(vsflags, NEEDPAYLOAD); /* XXX needed? */ -diff --git a/system.h b/system.h -index 2ff8906..ad4619a 100644 ---- a/system.h -+++ b/system.h -@@ -787,9 +787,9 @@ static inline const char *rcsid(const char *p) { \ - #define SUPPORT_I18NSTRING_TYPE 1 - - /** -- * Eliminate signature/digest disablers. -+ * Signature/digest disablers. - */ --#undef SUPPORT_NOSIGNATURES -+#define SUPPORT_NOSIGNATURES 1 - - /** - * Permit ar(1) payloads. Disabled while rpmio/iosm.c is under development. --- -2.9.0 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-using-poptParseArgvString-to-parse-the-_gpg_check_pa.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-using-poptParseArgvString-to-parse-the-_gpg_check_pa.patch deleted file mode 100644 index 712825009..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/0001-using-poptParseArgvString-to-parse-the-_gpg_check_pa.patch +++ /dev/null @@ -1,49 +0,0 @@ -From 64851c6622aff64787a9fcea26cccde183b7c743 Mon Sep 17 00:00:00 2001 -From: "Roy.Li" -Date: Tue, 11 Nov 2014 16:28:22 +0800 -Subject: [PATCH] using poptParseArgvString to parse the - _gpg_check_password_cmd - -Upstream-Status: Submitted [RPM5 maintainer] - -Both __gpg_check_password_cmd and __gpg_sign_cmd include "%{_gpg_name}", but -strace shows that gpg_name has a quote when run _gpg_check_password, -but not when run __gpg_sign_cmd; for example, if gpg_name is "tester" - - execve("/usr/bin/gpg", ["gpg", "--batch", "--no-verbose", - "--passphrase-fd", "3", "-u", "\"tester\"", "-so", "-"], [/* 20 vars */]) = 0 - - execve("/usr/bin/gpg", ["gpg", "--batch", "--no-verbose", "--no-armor", - "--passphrase-fd", "3", "--no-secmem-warning", "-u", "tester", "-sbo"..,) = 0 - -it can be fixed by removing the quote around %{gpg_name} when define -__gpg_check_password_cmd in macros/macros, like below, but if gpg_name includes -space, it will not work. - - %__gpg_check_password_cmd %{__gpg} \ - gpg --batch --no-verbose --passphrase-fd 3 -u %{_gpg_name} -so - - -The poptParseArgvString function is used to parse _gpg_sign_cmd, so using -poptParseArgvString to parse __gpg_check_password_cmd to fix this issue. - -Signed-off-by: Roy.Li ---- - rpmdb/signature.c | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/rpmdb/signature.c b/rpmdb/signature.c -index c35e0ab..016e8d1 100644 ---- a/rpmdb/signature.c -+++ b/rpmdb/signature.c -@@ -529,7 +529,7 @@ int rpmCheckPassPhrase(const char * passPhrase) - (void) setenv("GNUPGHOME", gpg_path, 1); - - cmd = rpmExpand("%{?__gpg_check_password_cmd}", NULL); -- rc = argvSplit(&av, cmd, NULL); -+ rc = poptParseArgvString(cmd, NULL, (const char ***)&av); - if (!rc) - rc = execve(av[0], (char *const *)av+1, environ); - --- -1.9.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/configure.ac-check-for-both-gpg2-and-gpg.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/configure.ac-check-for-both-gpg2-and-gpg.patch deleted file mode 100644 index 7894a4263..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/configure.ac-check-for-both-gpg2-and-gpg.patch +++ /dev/null @@ -1,29 +0,0 @@ -configure.ac: search for both gpg2 and gpg - -On some platforms the GnuPG binary is named 'gpg2' whereas others have 'gpg'. -This patch increases compatibility by searching for 'gpg' in addition to -'gpg2'. - -Upstream-Status: Pending - -Signed-off-by: Markus Lehtonen ---- - configure.ac | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/configure.ac b/configure.ac -index 6746b4c..f6922ae 100644 ---- a/configure.ac -+++ b/configure.ac -@@ -562,7 +562,7 @@ AC_PATH_PROG(__DIFF, diff, /bin/diff, $MYPATH) - AC_PATH_PROG(__DITTO, ditto, %{_bindir}/ditto, $MYPATH) - AC_PATH_PROG(__FILE, file, %{_bindir}/file, $MYPATH) - AC_PATH_PROG(__GIT, git, %{_bindir}/git, $MYPATH) --AC_PATH_PROG(__GPG, gpg2, %{_bindir}/gpg2, $MYPATH) -+AC_PATH_PROGS(__GPG, [gpg2 gpg], %{_bindir}/gpg2, $MYPATH) - AC_PATH_PROG(__GSR, gsr, %{_bindir}/gsr, $MYPATH) - AC_PATH_PROG(__GST_INSPECT, gst-inspect-0.10, %{_bindir}/gst-inspect-0.10, $MYPATH) - AC_PATH_PROG(__GZIP, gzip, /bin/gzip, $MYPATH) --- -2.1.4 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/dbconvert.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/dbconvert.patch deleted file mode 100644 index 1fdbf0908..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/dbconvert.patch +++ /dev/null @@ -1,27 +0,0 @@ -Upstream-Status: Inappropriate - -Hack to prevent unneeded demo app from building on older libc -where it will break the build - -Index: rpm-5.4.14/tools/Makefile.am -=================================================================== ---- rpm-5.4.14.orig/tools/Makefile.am -+++ rpm-5.4.14/tools/Makefile.am -@@ -29,7 +29,7 @@ EXTRA_PROGRAMS = nix-copy-closure nix-en - xiu-instantiate xiu-store - noinst_PROGRAMS = - --EXTRA_PROGRAMS += augtool cudftool dbconvert debugedit \ -+EXTRA_PROGRAMS += augtool cudftool debugedit \ - nix-build nix-channel nix-collect-garbage \ - nix-log2xml nix-prefetch-url nix-pull nix-push \ - xiu-echo xiu-hash \ -@@ -64,7 +64,7 @@ pkgbin_PROGRAMS = \ - rpmcache rpmdigest rpmrepo rpmspecdump \ - rpmcmp rpmdeps rpmdeps-oecore sqlite3 @WITH_KEYUTILS_RPMKEY@ @WITH_LIBELF_DEBUGEDIT@ - if WITH_DB --pkgbin_PROGRAMS += dbconvert -+pkgbin_PROGRAMS += - endif - dist_man_MANS = rpmgrep.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/debugedit-segv.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/debugedit-segv.patch deleted file mode 100644 index c83c8b5f3..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/debugedit-segv.patch +++ /dev/null @@ -1,100 +0,0 @@ -During the recalculation of the buildid, it's necessary to change the word -back to the original endian. However, if we do this in-place, we've also -affected the headers that we're also working on. The side effect of this is -we can no longer rely on 'sh_type' as it may have been changed. - -This patch ensures that any time we translate the loaded data to the machine -format, we only do it in a backup copy and never the original copy. - -Note: in all other places a backup copy was used, just not buildid processing. - -Also the process (...) function was modified to verify the data is not -NULL as well. This is an extra check and is not strictly necessary. - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm/tools/debugedit.c -=================================================================== ---- rpm.orig/tools/debugedit.c -+++ rpm/tools/debugedit.c -@@ -1403,7 +1403,8 @@ static inline void process (hashFunction - const void *data, size_t size) - { - memchunk chunk = { .data = (void *) data, .size = size }; -- hashFunctionContextUpdateMC (ctx, &chunk); -+ if (data != NULL && size != 0) -+ hashFunctionContextUpdateMC (ctx, &chunk); - } - - /* Compute a fresh build ID bit-string from the editted file contents. */ -@@ -1456,14 +1457,16 @@ handle_build_id (DSO *dso, Elf_Data *bui - GElf_Ehdr ehdr; - GElf_Phdr phdr; - GElf_Shdr shdr; -- } u; -- Elf_Data x = { .d_version = EV_CURRENT, .d_buf = &u }; -- -- x.d_type = ELF_T_EHDR; -- x.d_size = sizeof u.ehdr; -- u.ehdr = dso->ehdr; -- u.ehdr.e_phoff = u.ehdr.e_shoff = 0; -- if (elf64_xlatetom (&x, &x, dso->ehdr.e_ident[EI_DATA]) == NULL) -+ } u1, u2; -+ Elf_Data src = { .d_version = EV_CURRENT, .d_buf = &u1 }; -+ Elf_Data dest = { .d_version = EV_CURRENT, .d_buf = &u2 }; -+ -+ src.d_type = ELF_T_EHDR; -+ src.d_size = sizeof u1.ehdr; -+ dest.d_size = sizeof u2.ehdr; -+ u1.ehdr = dso->ehdr; -+ u1.ehdr.e_phoff = u1.ehdr.e_shoff = 0; -+ if (elf64_xlatetom (&dest, &src, dso->ehdr.e_ident[EI_DATA]) == NULL) - { - bad: - fprintf (stderr, "Failed to compute header checksum: %s\n", -@@ -1471,29 +1474,31 @@ handle_build_id (DSO *dso, Elf_Data *bui - exit (1); - } - -- x.d_type = ELF_T_PHDR; -- x.d_size = sizeof u.phdr; -+ src.d_type = ELF_T_PHDR; -+ src.d_size = sizeof u1.phdr; -+ dest.d_size = sizeof u2.phdr; - for (i = 0; i < dso->ehdr.e_phnum; ++i) - { -- if (gelf_getphdr (dso->elf, i, &u.phdr) == NULL) -+ if (gelf_getphdr (dso->elf, i, &u1.phdr) == NULL) - goto bad; -- if (elf64_xlatetom (&x, &x, dso->ehdr.e_ident[EI_DATA]) == NULL) -+ if (elf64_xlatetom (&dest, &src, dso->ehdr.e_ident[EI_DATA]) == NULL) - goto bad; -- process (&ctx, x.d_buf, x.d_size); -+ process (&ctx, dest.d_buf, dest.d_size); - } - -- x.d_type = ELF_T_SHDR; -- x.d_size = sizeof u.shdr; -+ src.d_type = ELF_T_SHDR; -+ src.d_size = sizeof u1.shdr; -+ dest.d_size = sizeof u2.shdr; - for (i = 0; i < dso->ehdr.e_shnum; ++i) - if (dso->scn[i] != NULL) - { -- u.shdr = dso->shdr[i]; -- u.shdr.sh_offset = 0; -- if (elf64_xlatetom (&x, &x, dso->ehdr.e_ident[EI_DATA]) == NULL) -+ u1.shdr = dso->shdr[i]; -+ u1.shdr.sh_offset = 0; -+ if (elf64_xlatetom (&dest, &src, dso->ehdr.e_ident[EI_DATA]) == NULL) - goto bad; -- process (&ctx, x.d_buf, x.d_size); -+ process (&ctx, dest.d_buf, dest.d_size); - -- if (u.shdr.sh_type != SHT_NOBITS) -+ if (u1.shdr.sh_type != SHT_NOBITS) - { - Elf_Data *d = elf_rawdata (dso->scn[i], NULL); - if (d == NULL) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/debugedit-valid-file-to-fix-segment-fault.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/debugedit-valid-file-to-fix-segment-fault.patch deleted file mode 100644 index 804048253..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/debugedit-valid-file-to-fix-segment-fault.patch +++ /dev/null @@ -1,65 +0,0 @@ -debugedit: fix segment fault while file's bss offset have a large number - -While ELF_C_RDWR_MMAP was used, elf_begin invoked mmap() to map file -into memory. While the file's bss Offset has a large number, elf_update -caculated file size by __elf64_updatenull_wrlock and the size was -enlarged. - -In this situation, elf_update invoked ftruncate to enlarge the file, -and memory size (elf->maximum_size) also was incorrectly updated. -There was segment fault in elf_end which invoked munmap with the -length is the enlarged file size, not the mmap's length. - -Before the above operations, invoke elf_begin/elf_update/elf_end -with ELF_C_RDWR and ELF_F_LAYOUT set to enlarge the above file, it -could make sure the file is safe for the following elf operations. - -Upstream-Status: Submitted [RPM5 maintainer] -Signed-off-by: Hongxu Jia ---- - tools/debugedit.c | 25 +++++++++++++++++++++++++ - 1 file changed, 25 insertions(+) - -Index: rpm-5.4.14/tools/debugedit.c -=================================================================== ---- rpm-5.4.14.orig/tools/debugedit.c -+++ rpm-5.4.14/tools/debugedit.c -@@ -1525,6 +1525,28 @@ handle_build_id (DSO *dso, Elf_Data *bui - } - } - -+/* It avoided the segment fault while file's bss offset have a large number. -+ See https://bugzilla.redhat.com/show_bug.cgi?id=1019707 -+ https://bugzilla.redhat.com/show_bug.cgi?id=1020842 for detail. */ -+void valid_file(int fd) -+{ -+ Elf *elf = elf_begin (fd, ELF_C_RDWR, NULL); -+ if (elf == NULL) -+ { -+ error (1, 0, "elf_begin: %s", elf_errmsg (-1)); -+ return; -+ } -+ -+ elf_flagelf (elf, ELF_C_SET, ELF_F_LAYOUT); -+ -+ if (elf_update (elf, ELF_C_WRITE) < 0) -+ error (1, 0, "elf_update: %s", elf_errmsg (-1)); -+ -+ elf_end (elf); -+ -+ return; -+} -+ - int - main (int argc, char *argv[]) - { -@@ -1621,6 +1643,9 @@ main (int argc, char *argv[]) - exit (1); - } - -+ /* Make sure the file is valid. */ -+ valid_file(fd); -+ - dso = fdopen_dso (fd, file); - if (dso == NULL) - exit (1); diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/gcc6-stdlib.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/gcc6-stdlib.patch deleted file mode 100644 index 0a372c6da..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/gcc6-stdlib.patch +++ /dev/null @@ -1,54 +0,0 @@ -gcc6 has fixed a long standing c++ include issue where -was different from inclusion via - -https://gcc.gnu.org/ml/libstdc++/2016-01/msg00025.html - -and its also descibed in https://gcc.gnu.org/gcc-6/porting_to.html -rpmio component uses some .cpp and .cc fies which need to use -C stdlib.h from C library and not the C++ libstdc++ header -therefore we pass _GLIBCXX_INCLUDE_NEXT_C_HEADERS so that it -keeps the old behavior - -/a/build/tmp/sysroots/raspberrypi2/usr/include/c++/6.0.1/cstdlib:143:11: error: '::getenv' has not been declared - using ::getenv; - ^~~~~~ -In file included from ../../rpm-5.4.15/system.h:201:0, - from ../../rpm-5.4.15/rpmio/rpmjs.cpp:1: -/a/build/tmp/sysroots/raspberrypi2/usr/include/c++/6.0.1/stdlib.h:62:12: error: 'std::getenv' has not been declared - using std::getenv; - -Upstream-Status: Pending -Signed-off-by: Khem Raj - -Index: rpm-5.4.15/rpmio/Makefile.am -=================================================================== ---- rpm-5.4.15.orig/rpmio/Makefile.am -+++ rpm-5.4.15/rpmio/Makefile.am -@@ -151,7 +151,7 @@ librpmio_la_SOURCES = \ - groestl.c hamsi.c jh.c keccak.c lane.c luffa.c md2.c md6.c radiogatun.c\ - salsa10.c salsa20.c shabal.c shavite3.c simd.c skein.c tib3.c tiger.c \ - rpmgit.c rpmio-stub.c \ -- rpmjs.cpp rpmjsio.c rpmkeyring.c \ -+ rpmjni.cc rpmjs.cpp rpmjsio.c rpmkeyring.c \ - rpmnix.c rpmodbc.c rpmsql.c set.c \ - ar.c \ - argv.c \ -@@ -195,7 +195,6 @@ librpmio_la_SOURCES = \ - rpmhook.c \ - rpmio.c \ - rpmiob.c \ -- rpmjni.cc \ - rpmku.c \ - rpmlog.c \ - rpmltc.c \ -@@ -279,7 +278,9 @@ keccak.lo: $(top_srcdir)/rpmio/keccak.c - #rpmjs.lo: $(top_srcdir)/rpmio/rpmjs.c - # @$(LTCOMPILE) -O0 -c $< - rpmjs.lo: $(top_srcdir)/rpmio/rpmjs.cpp -- @$(LTCOMPILE) -O0 -c $< -+ @$(LTCOMPILE) -O0 -c -D_GLIBCXX_INCLUDE_NEXT_C_HEADERS $< -+rpmjni.lo: $(top_srcdir)/rpmio/rpmjni.cc -+ @$(LTCOMPILE) -O0 -c -D_GLIBCXX_INCLUDE_NEXT_C_HEADERS $< - - YACC = byacc -d - getdate.c: getdate.y diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/header-include-fix.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/header-include-fix.patch deleted file mode 100644 index e72df45b5..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/header-include-fix.patch +++ /dev/null @@ -1,36 +0,0 @@ -Update two rpm headers to include other headers. - -Using rpmdb.h w/o including errno.h may result in a warning. - -Using rpmtag.h w/o also adding stdint.h will result in numerous failures -about unknown types on modern compilers. - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Qing He -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/rpmdb/rpmdb.h -=================================================================== ---- rpm-5.4.14.orig/rpmdb/rpmdb.h -+++ rpm-5.4.14/rpmdb/rpmdb.h -@@ -9,6 +9,7 @@ - - #include - #include -+#include - - #include - #include /* XXX Header typedef */ -Index: rpm-5.4.14/rpmdb/rpmtag.h -=================================================================== ---- rpm-5.4.14.orig/rpmdb/rpmtag.h -+++ rpm-5.4.14/rpmdb/rpmtag.h -@@ -7,6 +7,7 @@ - - #include - #include -+#include - - #ifdef __cplusplus - extern "C" { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/makefile-am-exec-hook.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/makefile-am-exec-hook.patch deleted file mode 100644 index 5d936db5a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/makefile-am-exec-hook.patch +++ /dev/null @@ -1,33 +0,0 @@ -rpm: Resolve parallel install issue when installing lua - -When lua is being installed on some systems that are being run with high levels -of parallelization there are cases where install-data-am and install-exec-hook -are processed at the same or a very short time apart. This causes -install-pkgbinPROGRAMS, which is a dependency of both, to be run at around the -same time This sometimes causes file contention and will sometimes be in a state -where install-exec-hook is looking for a file that is being installed or both -install-pkgbinPROGRAMS being run are installing the same file and fail because -it cannon create a new file. - -This patch allows install-exec-hook to be called by install-data-am instead of -install-exec-am. It also removed the dependency in install-data-hook on -install-pkgbinPROGRAMS. This means install-pkgbinPROGRAMS will only be run once -so there whould be any file contention. - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Morgan Little - -Index: rpm-5.4.14/lua/Makefile.am -=================================================================== ---- rpm-5.4.14.orig/lua/Makefile.am -+++ rpm-5.4.14/lua/Makefile.am -@@ -326,7 +326,7 @@ clean-local: - # XXX Build & install as rpmlua/rpmluac with hardlinks to lua/luac post install. - # XXX CVS has lua/luac sub-directories in the Attic that collide with the - # XXX lua/luac executable names when using cvs update. --install-exec-hook: install-pkgbinPROGRAMS -+install-data-hook: - mkdir -p $(DESTDIR)$(pkgbindir) - $(__RM) -f $(DESTDIR)$(pkgbindir)/lua - $(__LN) $(DESTDIR)$(pkgbindir)/rpmlua $(DESTDIR)$(pkgbindir)/lua diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/no-ldflags-in-pkgconfig.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/no-ldflags-in-pkgconfig.patch deleted file mode 100644 index 410623fab..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/no-ldflags-in-pkgconfig.patch +++ /dev/null @@ -1,14 +0,0 @@ -Don't put LDFLAGS into the Libs.private, all it generally contains is a linker -hash selection (harmless) and the absolute path to the libdir in the sysroot -(actively harmful). - -Upstream-Status: Pending -Signed-off-by: Ross Burton - -diff --git a/scripts/rpm.pc.in b/scripts/rpm.pc.in -index 8293471..2ea469c 100644 ---- a/scripts/rpm.pc.in -+++ b/scripts/rpm.pc.in -@@ -17 +17 @@ Libs: -L${libdir} -lrpmbuild${suffix} -lrpm${suffix} -lrpmdb${suffix} -lrpmio${s --Libs.private: @LDFLAGS@ @LIBS@ -+Libs.private: @LIBS@ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/perfile_rpmdeps.sh b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/perfile_rpmdeps.sh deleted file mode 100755 index b72c9f026..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/perfile_rpmdeps.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash - -: ${RPMDEPS:=rpmdeps} - -process() { - while read file_name ; do - printf "%s\t" ${file_name} - if [ ! -d $file_name ]; then - printf "%s " $($RPMDEPS $1 $file_name | sed -e 's,rpmlib(.*,,' -e 's,\([<>\=]\+ \+[^ ]*\),(\1),g') - fi - printf "\n" - done -} - -usage() { - echo "$0 {-P|--provides} {-R|--requires} FILE ..." -} - -while [ $# -gt 0 ]; do - case "$1" in - --rpmdeps) - RPMDEPS=$2 - shift - shift - ;; - -R|--requires) - process_type=--requires - shift - ;; - -P|--provides) - process_type=--provides - shift - ;; - *) - break; - ;; - esac -done - -if [ -z "$process_type" ]; then - usage - exit 1 -fi - -if [ $# -gt 0 ]; then - find "$@" | process $process_type - exit $? -fi - -process $process_type diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/popt-disable-auto-stack-protector.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/popt-disable-auto-stack-protector.patch deleted file mode 100644 index bcad8dcb7..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/popt-disable-auto-stack-protector.patch +++ /dev/null @@ -1,27 +0,0 @@ -popt: Disable default stack protection on internal version of popt - -Upstream-Status: Inappropriate [configuration] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.15/popt/configure.ac -=================================================================== ---- rpm-5.4.15.orig/popt/configure.ac -+++ rpm-5.4.15/popt/configure.ac -@@ -123,7 +123,6 @@ AS_IF([test "x$popt_gcc_warnings" = xyes - popt_CFLAGS_ADD([-Wjump-misses-init],[POPT_CFLAGS]) - popt_CFLAGS_ADD([-Wno-format-nonliteral],[POPT_CFLAGS]) - popt_CFLAGS_ADD([-Wframe-larger-than=$MAX_STACK_SIZE],[POPT_CFLAGS]) -- popt_CFLAGS_ADD([-fstack-protector-all],[POPT_CFLAGS]) - popt_CFLAGS_ADD([-fasynchronous-unwind-tables],[POPT_CFLAGS]) - popt_CFLAGS_ADD([-fdiagnostics-show-option],[POPT_CFLAGS]) - popt_CFLAGS_ADD([-funit-at-a-time],[POPT_CFLAGS]) -@@ -203,7 +202,7 @@ AC_SUBST([POPT_LDFLAGS]) - # -fno-delete-null-pointer as the kernel does http://patchwork.kernel.org/patch/36060/ - # GNU GCC (usually "gcc") - AS_IF([test "x$GCC" != x], -- [ for c in -fno-delete-null-pointer-checks -Wp,-D_FORTIFY_SOURCE=2 -fstack-protector -fexceptions -+ [ for c in -fno-delete-null-pointer-checks -fexceptions - do - popt_CFLAGS_ADD([$c], [POPT_CFLAGS]) - done diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/python-rpm-rpmsense.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/python-rpm-rpmsense.patch deleted file mode 100644 index 590f58d89..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/python-rpm-rpmsense.patch +++ /dev/null @@ -1,31 +0,0 @@ -rpmmodule.c: Export a few additional RPMSENSE values - -We want to see the RPMSENSE_SCRIPT values for use with SMART. We also -want to see the MISSINGOK value so we can avoid recommended packages causing -failures. - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm/python/rpmmodule.c -=================================================================== ---- rpm.orig/python/rpmmodule.c -+++ rpm/python/rpmmodule.c -@@ -525,12 +525,15 @@ static int initModule(PyObject *m) - REGISTER_ENUM(RPMSENSE_PREREQ); - REGISTER_ENUM(RPMSENSE_PRETRANS); - REGISTER_ENUM(RPMSENSE_INTERP); -+#else -+ #if defined(RPM_VENDOR_WINDRIVER) || defined(RPM_VENDOR_OE) - REGISTER_ENUM(RPMSENSE_SCRIPT_PRE); - REGISTER_ENUM(RPMSENSE_SCRIPT_POST); - REGISTER_ENUM(RPMSENSE_SCRIPT_PREUN); - REGISTER_ENUM(RPMSENSE_SCRIPT_POSTUN); - REGISTER_ENUM(RPMSENSE_SCRIPT_VERIFY); --#else -+ REGISTER_ENUM(RPMSENSE_MISSINGOK); -+ #endif - REGISTER_ENUM(RPMSENSE_NOTEQUAL); - #endif - REGISTER_ENUM(RPMSENSE_FIND_REQUIRES); diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/pythondeps.sh b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/pythondeps.sh deleted file mode 100755 index 083b174f1..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/pythondeps.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/sh - -[ $# -ge 1 ] || { - cat > /dev/null - exit 0 -} - -case $1 in --R|--requires) - shift - grep "/usr/\(lib[^/]*\|share\)/python[^/]*/" >/dev/null && echo "python" - exit 0 - ;; -esac - -exit 0 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-atomic-ops.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-atomic-ops.patch deleted file mode 100644 index cc241f4f1..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-atomic-ops.patch +++ /dev/null @@ -1,73 +0,0 @@ -Some architectures do not have __sync_add_and_fetch_8 implemented. - -MIPS (32-bit) and some PPC systems do not have sync_add_and_fetch_8. - -Provide an alternative. This alternative function is based on code from: - https://github.com/mongodb/libbson/blob/master/src/bson/bson-atomic.c - -Code is under an Apache 2.0 License. - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.15/rpmio/bson.h -=================================================================== ---- rpm-5.4.15.orig/rpmio/bson.h -+++ rpm-5.4.15/rpmio/bson.h -@@ -879,10 +879,18 @@ BSON_END_DECLS - - BSON_BEGIN_DECLS - -+/* Some architectures do not support __sync_add_and_fetch_8 */ -+#if (__mips == 32) || (defined(__PPC__) && !defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8)) -+# define __BSON_NEED_ATOMIC_64 1 -+#endif - - #if defined(__GNUC__) - # define bson_atomic_int_add(p, v) (__sync_add_and_fetch(p, v)) --# define bson_atomic_int64_add(p, v) (__sync_add_and_fetch_8(p, v)) -+#ifndef __BSON_NEED_ATOMIC_64 -+# define bson_atomic_int64_add(p, v) (__sync_add_and_fetch_8(p, v)) -+# else -+ int64_t bson_atomic_int64_add (volatile int64_t *p, int64_t n); -+# endif - # define bson_memory_barrier __sync_synchronize - #elif defined(_MSC_VER) || defined(_WIN32) - # define bson_atomic_int_add(p, v) (InterlockedExchangeAdd((long int *)(p), v)) -Index: rpm-5.4.15/rpmio/bson.c -=================================================================== ---- rpm-5.4.15.orig/rpmio/bson.c -+++ rpm-5.4.15/rpmio/bson.c -@@ -3863,13 +3863,30 @@ _bson_context_get_oid_seq64_threadsafe ( - #elif defined BSON_OS_WIN32 - uint64_t seq = InterlockedIncrement64 ((int64_t *)&context->seq64); - #else -- uint64_t seq = __sync_fetch_and_add_8 (&context->seq64, 1); -+ uint64_t seq = bson_atomic_int64_add (&context->seq64, 1); - #endif - - seq = BSON_UINT64_TO_BE (seq); - memcpy (&oid->bytes[4], &seq, 8); - } - -+#ifdef __BSON_NEED_ATOMIC_64 -+#include -+static pthread_mutex_t gSync64 = PTHREAD_MUTEX_INITIALIZER; -+int64_t -+bson_atomic_int64_add (volatile int64_t *p, -+ int64_t n) -+{ -+ int64_t ret; -+ -+ pthread_mutex_lock (&gSync64); -+ *p += n; -+ ret = *p; -+ pthread_mutex_unlock (&gSync64); -+ -+ return ret; -+} -+#endif - - /** - * bson_context_new: diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-autogen-force.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-autogen-force.patch deleted file mode 100644 index 258a7f6e7..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-autogen-force.patch +++ /dev/null @@ -1,78 +0,0 @@ -In order to enable musl or other libc support, force update the config.guess - -In order to reliably replace config.guess and config.sub, we need to remove -them prior to the call to automake. Adding the --force-missing is likely -not necessary, but matching normal OE autoreconf usage. - -Upstream-Status: Inappropriate [configuration] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.15/autogen.sh -=================================================================== ---- rpm-5.4.15.orig/autogen.sh -+++ rpm-5.4.15/autogen.sh -@@ -73,7 +73,8 @@ rm -f aclocal.m4 - aclocal -I m4 - autoheader -I m4 - echo "---> generate files via GNU automake (automake)" --automake -Wall -Wno-override -a -c -+rm -f config.guess config.sub -+automake -Wall -Wno-override -a -c --force-missing - echo "---> generate files via GNU autoconf (autoconf)" - autoconf -I m4 - echo "<=== rpm" -Index: rpm-5.4.15/beecrypt/autogen.sh -=================================================================== ---- rpm-5.4.15.orig/beecrypt/autogen.sh -+++ rpm-5.4.15/beecrypt/autogen.sh -@@ -25,6 +25,7 @@ libtoolize () { - - libtoolize --force --copy - aclocal --automake -a -c -+rm -f config.guess config.sub -+automake -a -c --force-missing - autoconf - autoheader -Index: rpm-5.4.15/libtpm/autogen.sh -=================================================================== ---- rpm-5.4.15.orig/libtpm/autogen.sh -+++ rpm-5.4.15/libtpm/autogen.sh -@@ -46,7 +46,8 @@ echo "---> generate files via GNU autoco - aclocal - autoheader - echo "---> generate files via GNU automake (automake)" --automake -Wall -Wno-override -a -c -+rm -f config.guess config.sub -+automake -Wall -Wno-override -a -c --force-missing - echo "---> generate files via GNU autoconf (autoconf)" - autoconf - -Index: rpm-5.4.15/neon/autogen.sh -=================================================================== ---- rpm-5.4.15.orig/neon/autogen.sh -+++ rpm-5.4.15/neon/autogen.sh -@@ -63,7 +63,8 @@ echo "---> generate files via GNU autoco - ${ACLOCAL:-aclocal} -I macros - ${AUTOHEADER:-autoheader} - echo "---> generate files via GNU automake (automake)" --${AUTOMAKE:-automake} -Wall -Wno-override -a -c -+rm -f config.guess config.sub -+${AUTOMAKE:-automake} -Wall -Wno-override -a -c --force-missing - echo "---> generate files via GNU autoconf (autoconf)" - ${AUTOCONF:-autoconf} -Wall - -Index: rpm-5.4.15/syck/autogen.sh -=================================================================== ---- rpm-5.4.15.orig/syck/autogen.sh -+++ rpm-5.4.15/syck/autogen.sh -@@ -40,6 +40,7 @@ echo "---> generate files via GNU autoco - aclocal - autoheader - echo "---> generate files via GNU automake (automake)" --automake -Wall -Wno-override -a -c -+rm -f config.guess config.sub -+automake -Wall -Wno-override -a -c --force-missing - echo "---> generate files via GNU autoconf (autoconf)" - autoconf diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-autogen.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-autogen.patch deleted file mode 100644 index 8771235d3..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-autogen.patch +++ /dev/null @@ -1,25 +0,0 @@ -Remove the sanity checking from the rpm autogen.sh. This is required because -we may have slightly different, but yet compatible versions. If we do end -up breaking things, we'll deal with it at that time. - -Upstream-Status: Inappropriate [configuration] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/syck/autogen.sh -=================================================================== ---- rpm-5.4.14.orig/syck/autogen.sh -+++ rpm-5.4.14/syck/autogen.sh -@@ -34,12 +34,6 @@ libtoolize () { - eval $_libtoolize $_libtoolize_args - } - --# requirements sanity check --[ "`automake --version | head -1`" != "$AMV" ] && echo "$USAGE" # && exit 1 --[ "`autoconf --version | head -1`" != "$ACV" ] && echo "$USAGE" # && exit 1 --[ "`libtoolize --version | head -1`" != "$LTV" ] && echo "$USAGE" # && exit 1 --[ "`gettextize --version | head -1 | sed -e 's;^.*/\\(gettextize\\);\\1;'`" != "$GTT" ] && echo "$USAGE" # && exit 1 -- - echo "---> generate files via GNU libtool (libtoolize)" - libtoolize --quiet --copy --force --install - echo "---> generate files via GNU autoconf (aclocal, autoheader)" diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-canonarch.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-canonarch.patch deleted file mode 100644 index 81fc84937..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-canonarch.patch +++ /dev/null @@ -1,136 +0,0 @@ -lib/rpmrc.c: Update --target processing to support full GNU canonical arch - -Prior to this patch, when using --target, RPM supported the format: - - - - --gnu - -- - ---gnu - -This patch changes the list of supported items to: - - - - --gnu - -- - --- - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/lib/rpmrc.c -=================================================================== ---- rpm-5.4.14.orig/lib/rpmrc.c -+++ rpm-5.4.14/lib/rpmrc.c -@@ -925,8 +925,8 @@ static void getMachineInfo(int type, /*@ - - static void rpmRebuildTargetVars(const char ** target, const char ** canontarget) - { -- -- char *ca = NULL, *co = NULL, *ct = NULL; -+ /* ca = arch, cv = vendor, co = os, ce = extension, ct = canon target */ -+ char *ca = NULL, *cv = NULL, *co = NULL, *ce = NULL, *ct = NULL; - int x; - - /* Rebuild the compat table to recalculate the current target arch. */ -@@ -936,23 +936,60 @@ static void rpmRebuildTargetVars(const c - rpmSetTables(RPM_MACHTABLE_BUILDARCH, RPM_MACHTABLE_BUILDOS); - - if (target && *target) { -+ /* GNU canonical format is: -+ * --[-extension] -+ * -+ * We support the both the GNU canonical format -+ * as well as the traditional RPM formats: -+ * -+ * -[-gnu] -+ */ - char *c; - /* Set arch and os from specified build target */ - ca = xstrdup(*target); -- if ((c = strchr(ca, '-')) != NULL) { -+ if ((c = strchr(ca, '-')) == NULL) { -+ /* Format is */ -+ ; -+ } else { - *c++ = '\0'; -- -- if ((co = strrchr(c, '-')) == NULL) { -- co = c; -+ cv = c; -+ -+ if ((c = strchr(c, '-')) == NULL) { -+ /* Format is - */ -+ co = cv; -+ cv = NULL; - } else { -- if (!xstrcasecmp(co, "-gnu")) -- *co = '\0'; -- if ((co = strrchr(c, '-')) == NULL) -- co = c; -- else -- co++; -+ *c++ = '\0'; -+ co = c; -+ -+ if ((c = strchr(c, '-')) == NULL) { -+ /* Might be: -+ * -- -+ * --gnu -+ */ -+ if (!xstrcasecmp(co, "gnu")) { -+ /* Format was --gnu */ -+ ce = co; -+ co = cv; -+ cv = NULL; -+ } -+ } else { -+ /* Format was --- */ -+ *c++ = '\0'; -+ ce = c; -+ } - } -+ if (cv != NULL) cv = xstrdup(cv); - if (co != NULL) co = xstrdup(co); -+ if (ce != NULL) { -+ /* We need to prefix it with a "-" */ -+ char * lce = NULL; -+ -+ lce = xmalloc(strlen(ce) + sizeof("-")); -+ sprintf(lce, "-%s", ce); -+ -+ ce = lce; -+ } - } - } else { - const char *a = NULL; -@@ -995,8 +1032,16 @@ static void rpmRebuildTargetVars(const c - addMacro(NULL, "_target", NULL, ct, RMIL_RPMRC); - delMacro(NULL, "_target_cpu"); - addMacro(NULL, "_target_cpu", NULL, ca, RMIL_RPMRC); -+ if (cv) { -+ delMacro(NULL, "_target_vendor"); -+ addMacro(NULL, "_target_vendor", NULL, cv, RMIL_RPMRC); -+ } - delMacro(NULL, "_target_os"); - addMacro(NULL, "_target_os", NULL, co, RMIL_RPMRC); -+ if (ce) { -+ delMacro(NULL, "_gnu"); -+ addMacro(NULL, "_gnu", NULL, ce, RMIL_RPMRC); -+ } - - if (canontarget) - *canontarget = ct; -@@ -1004,8 +1049,12 @@ static void rpmRebuildTargetVars(const c - ct = _free(ct); - ca = _free(ca); - /*@-usereleased@*/ -+ cv = _free(cv); -+ /*@-usereleased@*/ - co = _free(co); - /*@=usereleased@*/ -+ ce = _free(ce); -+ /*@-usereleased@*/ - } - - void rpmFreeRpmrc(void) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-check-rootpath-reasonableness.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-check-rootpath-reasonableness.patch deleted file mode 100644 index 3d8d645a7..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-check-rootpath-reasonableness.patch +++ /dev/null @@ -1,96 +0,0 @@ -rpm: check if the argument(rootpath) exists or be writable - -When user execute the command "rpm -qai --root=$dir",if $dir doesn't -exist or is unwritable as result of making a typo in rootpath,then -it will create dirent $dir and subdirectory. -So we should add the check function to fix it before creating relational -subdirectory,and warn the incorrect rootpath to user. It just checks the -rootpath reasonableness when the user input the argument(--root=/-r=). - -Upstream-Status: Pending - -Signed-off-by: Zhixiong Chi ---- - rpmqv.c | 45 +++++++++++++++++++++++++++++++++++++++++++++ - 1 file changed, 45 insertions(+) - -diff --git a/rpmqv.c b/rpmqv.c -index 40c42bd..88d85ab 100644 ---- a/rpmqv.c -+++ b/rpmqv.c -@@ -206,6 +206,8 @@ static struct poptOption optionsTable[] = { - POPT_TABLEEND - }; - -+static int _rpmqv_rootpath_state = 0; -+ - #ifdef __MINT__ - /* MiNT cannot dynamically increase the stack. */ - long _stksize = 64 * 1024L; -@@ -427,6 +429,41 @@ static void integrity_check(const char *progname, enum modes progmode_num) - } - #endif - -+/*check if the rootdir is writable or exists */ -+int access_file(const char *rootdir) -+{ -+ int ret,rootdir_len; -+ -+ if(rootdir == NULL) { -+ return -1; -+ } -+ -+ rootdir_len = strlen(rootdir); -+ /*make sure that dirent argument trailing is "/" */ -+ if(!(rootdir_len && rootdir[rootdir_len - 1] == '/')){ -+ char *t = (char *)malloc(rootdir_len + 2); -+ *t = '\0'; -+ (void)stpcpy(stpcpy(t,rootdir),"/"); -+ ret = access(t,F_OK|W_OK); -+ free(t); -+ }else{ -+ ret = access(rootdir,F_OK|W_OK); -+ } -+ return ret; -+} -+ -+/*check if input the argument "--root/-r" */ -+void check_argument_root(int argc,char * const argv[]) -+{ -+ int i; -+ for (i = 0; i < argc; i++) { -+ if(strncmp(argv[i],"--root=",7) == 0 || strncmp(argv[i],"-r=",3) == 0) { -+ _rpmqv_rootpath_state = 1; -+ break; -+ } -+ } -+} -+ - /*@-bounds@*/ /* LCL: segfault */ - /*@-mods@*/ /* FIX: shrug */ - #if !defined(__GLIBC__) && !defined(__LCLINT__) -@@ -476,6 +513,8 @@ int main(int argc, const char ** argv) - int xx; - #endif - -+ check_argument_root(argc,(char *const *)argv); -+ - #if !defined(__GLIBC__) && !defined(__LCLINT__) - environ = envp; - #else -@@ -715,6 +754,12 @@ int main(int argc, const char ** argv) - argerror(_("arguments to --root (-r) must begin with a /")); - break; - } -+ if (_rpmqv_rootpath_state) { -+ if (access_file(rpmioRootDir)) { -+ fprintf(stderr, _("Invalid directory:%s, ensure it exists or be writable\n"),rpmioRootDir); -+ exit(EXIT_FAILURE); -+ } -+ } - } - - #if defined(RPM_VENDOR_OPENPKG) /* integrity-checking */ --- -1.9.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db-reduce.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db-reduce.patch deleted file mode 100644 index c86937653..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db-reduce.patch +++ /dev/null @@ -1,19 +0,0 @@ -Upstream-Status: Inappropriate [configuration] - -Change cache size to reduce the usage of disk space from 62MB to 26MB. - -Signed-off-by: Mei Lei - -Index: rpm-5.4.14/rpmdb/DB_CONFIG.in -=================================================================== ---- rpm-5.4.14.orig/rpmdb/DB_CONFIG.in -+++ rpm-5.4.14/rpmdb/DB_CONFIG.in -@@ -29,7 +29,7 @@ set_thread_count 64 - - # ================ Memory Pool - #XXX initializing dbenv with set_cachesize has unimplemented prerequsites --#set_cachesize 0 1048576 0 -+set_cachesize 0 1048576 0 - set_mp_mmapsize 268435456 - - # ================ Locking diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db5-or-db6.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db5-or-db6.patch deleted file mode 100644 index 5d08d279a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db5-or-db6.patch +++ /dev/null @@ -1,174 +0,0 @@ -From 7bad268de8b32281e2a12ccd88038b3ec5eb1be3 Mon Sep 17 00:00:00 2001 -From: Yuanjie Huang -Date: Tue, 15 Dec 2015 18:50:21 +0800 -Subject: [PATCH] Support both db5 and db6. - -Upstream-Status: Inappropriate [configuration] - -Signed-off-by: Yuanjie Huang ---- - configure.ac | 103 ++++++++++++++++++++++++++++++++++++++++++++++------------- - 1 file changed, 81 insertions(+), 22 deletions(-) - -Index: rpm/configure.ac -=================================================================== ---- rpm.orig/configure.ac -+++ rpm/configure.ac -@@ -871,8 +871,6 @@ else - MYPATH=$PATH - fi - --DBXY=db61 -- - AC_PATH_PROG(__BASH, bash, %{_bindir}/bash, $MYPATH) - AC_PATH_PROG(__BZIP2, bzip2, %{_bindir}/bzip2, $MYPATH) - AC_PATH_PROG(__CAT, cat, /bin/cat, $MYPATH) -@@ -884,22 +882,6 @@ AC_PATH_PROG(__CMAKE, cmake, %{_bindir}/ - AC_PATH_PROG(__CPIO, cpio, /bin/cpio, $MYPATH) - AC_PATH_PROG(__CURL, curl, %{_bindir}/curl, $MYPATH) - AC_PATH_PROG(__CVS, cvs, %{_bindir}/cvs, $MYPATH) --AC_PATH_PROG(__DB_ARCHIVE, ${DBXY}_archive, %{_bindir}/${DBXY}_archive, $MYPATH) --AC_PATH_PROG(__DB_CHECKPOINT, ${DBXY}_checkpoint, %{_bindir}/${DBXY}_checkpoint, $MYPATH) --AC_PATH_PROG(__DB_DEADLOCK, ${DBXY}_deadlock, %{_bindir}/${DBXY}_deadlock, $MYPATH) --AC_PATH_PROG(__DB_DUMP, ${DBXY}_dump, %{_bindir}/${DBXY}_dump, $MYPATH) --AC_PATH_PROG(__DB_HOTBACKUP, ${DBXY}_hotbackup, %{_bindir}/${DBXY}_hotbackup, $MYPATH) --AC_PATH_PROG(__DB_LOAD, ${DBXY}_load, %{_bindir}/${DBXY}_load, $MYPATH) --AC_PATH_PROG(__DB_LOG_VERIFY, ${DBXY}_log_verify, %{_bindir}/${DBXY}_log_verify, $MYPATH) --AC_PATH_PROG(__DB_PRINTLOG, ${DBXY}_printlog, %{_bindir}/${DBXY}_printlog, $MYPATH) --AC_PATH_PROG(__DB_RECOVER, ${DBXY}_recover, %{_bindir}/${DBXY}_recover, $MYPATH) --AC_PATH_PROG(__DB_REPLICATE, ${DBXY}_replicate, %{_bindir}/${DBXY}_replicate, $MYPATH) --AC_PATH_PROG(__DBSQL, ${DBXY}sql, %{_bindir}/${DBXY}sql, $MYPATH) --AC_PATH_PROG(__DB_SQL_CODEGEN, ${DBXY}_sql_codegen, %{_bindir}/${DBXY}_sql_codegen, $MYPATH) --AC_PATH_PROG(__DB_STAT, ${DBXY}_stat, %{_bindir}/${DBXY}_stat, $MYPATH) --AC_PATH_PROG(__DB_TUNER, ${DBXY}_tuner, %{_bindir}/${DBXY}_tuner, $MYPATH) --AC_PATH_PROG(__DB_UPGRADE, ${DBXY}_upgrade, %{_bindir}/${DBXY}_upgrade, $MYPATH) --AC_PATH_PROG(__DB_VERIFY, ${DBXY}_verify, %{_bindir}/${DBXY}_verify, $MYPATH) - AC_PATH_PROG(__DIFF, diff, /bin/diff, $MYPATH) - AC_PATH_PROG(__DITTO, ditto, %{_bindir}/ditto, $MYPATH) - AC_PATH_PROG(__FILE, file, %{_bindir}/file, $MYPATH) -@@ -2050,13 +2032,46 @@ RPM_CHECK_LIB( - - dnl # Berkeley-DB & SQLite - DBLIBSRCS="" -+DBXY=db -+ - # XXX won't handle --includedir override --CPPFLAGS="${CPPFLAGS} -I${prefix}/include/${DBXY}" --RPM_CHECK_LIB( -+CPPFLAGS_save="${CPPFLAGS}" -+CPPFLAGS="${CPPFLAGS_save}" -+with_db_save="${with_db}" -+ -+AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ -+#include -+]], -+[[ -+#if DB_VERSION_MAJOR < 6 -+#error DB_VERSION_MAJOR is below 6 -+#endif -+]])], -+[RPM_CHECK_LIB( - [Berkeley-DB], [db], - [db-6.1], [db_create], [db.h], -- [yes,external], [db3], -+ [yes,external], [db6], - [ DBLIBSRCS="$DBLIBSRCS db3.c" -+ DBXY=db61 -+ AM_CONDITIONAL(WITH_DB, [ true ]) -+ AM_CONDITIONAL(WITH_DB_INTERNAL, [ test ".$RPM_CHECK_LIB_LOCATION" = .internal ]) -+ if test ".$RPM_CHECK_LIB_LOCATION" = .internal; then -+ AC_DEFINE(HAVE_DB_H, 1, [Have header]) -+ else -+ WITH_DB_SUBDIR="" -+ fi -+ ], -+ [ AM_CONDITIONAL(WITH_DB, [ false ]) -+ AM_CONDITIONAL(WITH_DB_INTERNAL, [ false ]) -+ ])], -+[with_db="${with_db_save}" -+ CPPFLAGS="${CPPFLAGS_save}" -+ RPM_CHECK_LIB( -+ [Berkeley-DB], [db], -+ [db-5.3], [db_create], [db.h], -+ [yes,external], [db53], -+ [ DBLIBSRCS="$DBLIBSRCS db3.c" -+ DBXY=db53 - AM_CONDITIONAL(WITH_DB, [ true ]) - AM_CONDITIONAL(WITH_DB_INTERNAL, [ test ".$RPM_CHECK_LIB_LOCATION" = .internal ]) - if test ".$RPM_CHECK_LIB_LOCATION" = .internal; then -@@ -2068,6 +2083,11 @@ RPM_CHECK_LIB( - [ AM_CONDITIONAL(WITH_DB, [ false ]) - AM_CONDITIONAL(WITH_DB_INTERNAL, [ false ]) - ]) -+]) -+ -+if test ".$ac_cv_lib_db_6_1_db_create" != .yes -a ".$ac_cv_lib_db_5_3_db_create" != .yes; then -+ CPPFLAGS="${CPPFLAGS_save}" -+fi - - dnl # Sqlite external - RPM_CHECK_LIB( -@@ -2078,10 +2098,11 @@ RPM_CHECK_LIB( - []) - - dnl # Sqlite 3.8.3.1 from db-6.1.19 -+if test ".$ac_cv_lib_db_6_1_db_create" = .yes; then - RPM_CHECK_LIB( - [Berkeley-DB (+SQLite3)], [dbsql], - [db_sql-6.1], [sqlite3_open], [dbsql.h], -- [yes,external], [db3/sql], -+ [yes,external], [db6/sql], - [ - AM_CONDITIONAL(WITH_DBSQL, [ true ]) - AC_DEFINE(WITH_SQLITE, 1, [Define as 1 if building with SQLite library]) -@@ -2095,12 +2116,50 @@ RPM_CHECK_LIB( - ], [ - AM_CONDITIONAL(WITH_DBSQL, [ false ]) - ]) -+elif test ".$ac_cv_lib_db_5_3_db_create" = .yes; then -+RPM_CHECK_LIB( -+ [Berkeley-DB (+SQLite3)], [dbsql], -+ [db_sql-5.3], [sqlite3_open], [dbsql.h], -+ [yes,external], [db53/sql], -+ [ -+ AM_CONDITIONAL(WITH_DBSQL, [ true ]) -+ AC_DEFINE(WITH_SQLITE, 1, [Define as 1 if building with SQLite library]) -+ if test ".$RPM_CHECK_LIB_LOCATION" = .internal; then -+ WITH_DB_CPPFLAGS="${WITH_DB_CPPFLAGS} -I\$(top_srcdir)/db/sql/generated" -+ WITH_DB_LIBS="${WITH_DBSQL_LIBS}" -+ DBLIBSRCS="$DBLIBSRCS sqlite.c" -+ else -+ WITH_DBSQL_SUBDIR="" -+ fi -+ ], [ -+ AM_CONDITIONAL(WITH_DBSQL, [ false ]) -+ ]) -+else -+AM_CONDITIONAL(WITH_DBSQL, [ false ]) -+fi - - DBLIBOBJS=`echo $DBLIBSRCS | sed -e "s/\.c/\.lo/g"` - - AC_SUBST(DBLIBSRCS) - AC_SUBST(DBLIBOBJS) - -+AC_PATH_PROG(__DB_ARCHIVE, ${DBXY}_archive, %{_bindir}/${DBXY}_archive, $MYPATH) -+AC_PATH_PROG(__DB_CHECKPOINT, ${DBXY}_checkpoint, %{_bindir}/${DBXY}_checkpoint, $MYPATH) -+AC_PATH_PROG(__DB_DEADLOCK, ${DBXY}_deadlock, %{_bindir}/${DBXY}_deadlock, $MYPATH) -+AC_PATH_PROG(__DB_DUMP, ${DBXY}_dump, %{_bindir}/${DBXY}_dump, $MYPATH) -+AC_PATH_PROG(__DB_HOTBACKUP, ${DBXY}_hotbackup, %{_bindir}/${DBXY}_hotbackup, $MYPATH) -+AC_PATH_PROG(__DB_LOAD, ${DBXY}_load, %{_bindir}/${DBXY}_load, $MYPATH) -+AC_PATH_PROG(__DB_LOG_VERIFY, ${DBXY}_log_verify, %{_bindir}/${DBXY}_log_verify, $MYPATH) -+AC_PATH_PROG(__DB_PRINTLOG, ${DBXY}_printlog, %{_bindir}/${DBXY}_printlog, $MYPATH) -+AC_PATH_PROG(__DB_RECOVER, ${DBXY}_recover, %{_bindir}/${DBXY}_recover, $MYPATH) -+AC_PATH_PROG(__DB_REPLICATE, ${DBXY}_replicate, %{_bindir}/${DBXY}_replicate, $MYPATH) -+AC_PATH_PROG(__DBSQL, ${DBXY}sql, %{_bindir}/${DBXY}sql, $MYPATH) -+AC_PATH_PROG(__DB_SQL_CODEGEN, ${DBXY}_sql_codegen, %{_bindir}/${DBXY}_sql_codegen, $MYPATH) -+AC_PATH_PROG(__DB_STAT, ${DBXY}_stat, %{_bindir}/${DBXY}_stat, $MYPATH) -+AC_PATH_PROG(__DB_TUNER, ${DBXY}_tuner, %{_bindir}/${DBXY}_tuner, $MYPATH) -+AC_PATH_PROG(__DB_UPGRADE, ${DBXY}_upgrade, %{_bindir}/${DBXY}_upgrade, $MYPATH) -+AC_PATH_PROG(__DB_VERIFY, ${DBXY}_verify, %{_bindir}/${DBXY}_verify, $MYPATH) -+ - AC_ARG_WITH(db-largefile, AS_HELP_STRING([--with-db-largefile], [build Berkeley-DB with LARGEFILE support])) - AC_ARG_WITH(db-mutex, AS_HELP_STRING([--with-db-mutex=ARG], [build Berkeley-DB with MUTEX type ARG])) - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db60.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db60.patch deleted file mode 100644 index b4df8b751..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db60.patch +++ /dev/null @@ -1,56 +0,0 @@ -Set the DB 6 version to match oe-core db 6.0.30 - -Upstream-Status: Inappropriate [configuration] - -Signed-off-by: Mark Hatle - -Index: rpm/configure.ac -=================================================================== ---- rpm.orig/configure.ac -+++ rpm/configure.ac -@@ -2049,10 +2049,10 @@ AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ - ]])], - [RPM_CHECK_LIB( - [Berkeley-DB], [db], -- [db-6.1], [db_create], [db.h], -+ [db-6.0], [db_create], [db.h], - [yes,external], [db6], - [ DBLIBSRCS="$DBLIBSRCS db3.c" -- DBXY=db61 -+ DBXY=db60 - AM_CONDITIONAL(WITH_DB, [ true ]) - AM_CONDITIONAL(WITH_DB_INTERNAL, [ test ".$RPM_CHECK_LIB_LOCATION" = .internal ]) - if test ".$RPM_CHECK_LIB_LOCATION" = .internal; then -@@ -2085,7 +2085,7 @@ AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ - ]) - ]) - --if test ".$ac_cv_lib_db_6_1_db_create" != .yes -a ".$ac_cv_lib_db_5_3_db_create" != .yes; then -+if test ".$ac_cv_lib_db_6_0_db_create" != .yes -a ".$ac_cv_lib_db_5_3_db_create" != .yes; then - CPPFLAGS="${CPPFLAGS_save}" - fi - -@@ -2097,11 +2097,11 @@ RPM_CHECK_LIB( - [ DBLIBSRCS="$DBLIBSRCS sqlite.c" ], - []) - --dnl # Sqlite 3.8.3.1 from db-6.1.19 --if test ".$ac_cv_lib_db_6_1_db_create" = .yes; then -+dnl # Sqlite 3.8.3.1 from db-6.0.30 -+if test ".$ac_cv_lib_db_6_0_db_create" = .yes; then - RPM_CHECK_LIB( - [Berkeley-DB (+SQLite3)], [dbsql], -- [db_sql-6.1], [sqlite3_open], [dbsql.h], -+ [db_sql-6.0], [sqlite3_open], [dbsql.h], - [yes,external], [db6/sql], - [ - AM_CONDITIONAL(WITH_DBSQL, [ true ]) -@@ -2253,7 +2253,7 @@ AC_SUBST(WITH_RUBY_CPPFLAGS) - AC_SUBST(WITH_RUBY_SUBDIR) - AC_SUBST(WITH_RUBY_VENDORARCHDIR) - --dnl # Java prerequisites (swiped from db-6.1.19/dist/aclocal_java et al) -+dnl # Java prerequisites (swiped from db-6.0.30/dist/aclocal_java et al) - WITH_JAVA=no - AC_ARG_WITH([java], - AS_HELP_STRING([--with-java], [build RPM with java support]), diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db_buffer_small.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db_buffer_small.patch deleted file mode 100644 index 16b8e30a1..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-db_buffer_small.patch +++ /dev/null @@ -1,77 +0,0 @@ -In certain cases with BerkleyDB 5.3.x we are getting the error: - -db3.c:1443: dbcursor->pget(-30999): BDB0063 DB_BUFFER_SMALL: User memory too small for return value - -See https://bugs.launchpad.net/rpm/+bug/934420 for more information. - -It appears to be some type of a bug in the BerkleyDB 5.3.x. In an attempt -to workaround the problem, when we encounter this situation we attempt -to adjust the size of the mmap buffer until the call works, or we -end up trying 10 times. The new size is either the updated vp->size -from the failed pget call, or the previous size + 1024. - -If DBI debugging is enabled, additional diagnostics are printed, otherwise -a basic retry and success message is added to show that the failure was -resolved. - -Upstream-Status: Inappropriate (workaround) - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/rpmdb/rpmdb.c -=================================================================== ---- rpm-5.4.14.orig/rpmdb/rpmdb.c -+++ rpm-5.4.14/rpmdb/rpmdb.c -@@ -2212,8 +2212,12 @@ static int rpmmiGet(dbiIndex dbi, DBC * - vp->flags |= DB_DBT_USERMEM; - rc = dbiGet(dbi, dbcursor, kp, vp, flags); - if (rc == DB_BUFFER_SMALL) { -+ int retry = 0; -+ size_t origlen = vp->size; - size_t uhlen = vp->size; -- void * uh = mmap(NULL, uhlen, _prot, _flags, _fdno, _off); -+ void * uh; -+retry_get: -+ uh = mmap(NULL, uhlen, _prot, _flags, _fdno, _off); - if (uh == NULL || uh == (void *)-1) - fprintf(stderr, - "==> mmap(%p[%u], 0x%x, 0x%x, %d, 0x%x) error(%d): %s\n", -@@ -2235,6 +2239,25 @@ static int rpmmiGet(dbiIndex dbi, DBC * - if (munmap(uh, uhlen) != 0) - fprintf(stderr, "==> munmap(%p[%u]) error(%d): %s\n", - uh, (unsigned)uhlen, errno, strerror(errno)); -+ /* We want to be sure to limit the number of retry attempts to avoid a loop! */ -+ if (rc == DB_BUFFER_SMALL && retry < 10) { -+ /* If we got a largr vp-size back, use that, otherwise increment the size by 1k */ -+ uhlen = vp->size > uhlen ? vp->size : uhlen + 1024; -+ retry++; -+ if ((dbi)->dbi_debug) -+ fprintf(stderr, "==> DB_BUFFER_SMALL orig requested (%d), configured (%d), forcing larger buffer (%d), new size (%d)\n", -+ origlen, vp->ulen, uhlen, vp->size); -+ else -+ fprintf(stderr, "==> retry (%d) db3cpget (%d)\n", retry, uhlen); -+ goto retry_get; -+ } -+ } -+ if (retry) { -+ if ((dbi)->dbi_debug) -+ fprintf(stderr, "==> success orig requested (%d), configured buffer (%d), buffer (%d), size after dbiGet (%d)\n", -+ origlen, vp->ulen, uhlen, vp->size); -+ else -+ fprintf(stderr, "==> success\n"); - } - } - } else -Index: rpm-5.4.14/rpmdb/db3.c -=================================================================== ---- rpm-5.4.14.orig/rpmdb/db3.c -+++ rpm-5.4.14/rpmdb/db3.c -@@ -1509,7 +1509,7 @@ assert(db != NULL); - #endif - } - --DBIDEBUG(dbi, (stderr, "<-- %s(%p,%p,%p,%p,%p,0x%x) rc %d %s%s\n", __FUNCTION__, dbi, dbcursor, key, pkey, data, flags, rc, _DBCFLAGS(flags), _KEYDATA(key, pkey, data, NULL))); -+DBIDEBUG(dbi, (stderr, "<-- %s(%p,%p,%p,%p,%p,0x%x) rc %d %s%s\n", __FUNCTION__, dbi, dbcursor, key, pkey, data, flags, rc, _DBCFLAGS(flags), _KEYDATA(key, pkey, rc == DB_BUFFER_SMALL ? NULL : data, NULL))); - return rc; - } - /*@=mustmod@*/ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-debug-platform.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-debug-platform.patch deleted file mode 100644 index 2eb6e0f26..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-debug-platform.patch +++ /dev/null @@ -1,65 +0,0 @@ -Debug the platform score generation... - -Index: rpm-5.4.14/lib/rpmrc.c -=================================================================== ---- rpm-5.4.14.orig/lib/rpmrc.c -+++ rpm-5.4.14/lib/rpmrc.c -@@ -465,6 +465,8 @@ static rpmRC rpmPlatform(const char * pl - - rc = (rpmRC) rpmiobSlurp(platform, &iob); - -+ fprintf(stderr, "D: rpmPlatform file %s\n", platform); -+ - if (rc || iob == NULL) { - rc = RPMRC_FAIL; - goto exit; -@@ -486,6 +488,7 @@ static rpmRC rpmPlatform(const char * pl - while (--t > p && xisspace(*t)) - *t = '\0'; - if (t > p) { -+ fprintf(stderr, "D: rpmPlatform mireAppend REGEX %s\n", p); - xx = mireAppend(RPMMIRE_REGEX, 0, p, NULL, &mi_re, &mi_nre); - } - continue; -@@ -503,6 +506,11 @@ static rpmRC rpmPlatform(const char * pl - _gnu = rpmExpand("-", cvog->gnu, NULL); - - addMacro(NULL, "_platform_gnu", NULL, (_gnu ? _gnu : ""), -1); -+ fprintf(stderr, "D: rpmPlatform addMacro %s-%s-%s(%s)\n", -+ rpmExpand("%{_platform_cpu}", NULL), -+ rpmExpand("%{_platform_vendor}", NULL), -+ rpmExpand("%{_platform_os}", NULL), -+ rpmExpand("%{_platform_gnu}", NULL)); - #else - addMacro(NULL, "_host_cpu", NULL, cvog->cpu, -1); - addMacro(NULL, "_host_vendor", NULL, cvog->vendor, -1); -@@ -510,6 +518,7 @@ static rpmRC rpmPlatform(const char * pl - #endif - } - -+ fprintf(stderr, "D: rpmPlatform mireAppend STRCMP %s -- ", p); - #if !defined(RPM_VENDOR_OE) /* Skip the explicit-platform */ - #if defined(RPM_VENDOR_OPENPKG) /* explicit-platform */ - /* do not use vendor and GNU attribution */ -@@ -519,6 +528,7 @@ static rpmRC rpmPlatform(const char * pl - (cvog && *cvog->gnu ? "-" : NULL), - (cvog ? cvog->gnu : NULL), NULL); - #endif -+ fprintf(stderr, "%s\n", p); - xx = mireAppend(RPMMIRE_STRCMP, 0, p, NULL, &mi_re, &mi_nre); - p = _free(p); - #endif -@@ -688,9 +698,12 @@ int rpmPlatformScore(const char * platfo - - if ((mire = (miRE) mi_re) != NULL) - for (i = 0; i < mi_nre; i++) { -- if (mireRegexec(mire + i, platform, 0) >= 0) -+ if (mireRegexec(mire + i, platform, 0) >= 0) { -+ fprintf(stderr, "D: rpmPlatformScore %s (%d)\n", platform, i + 1); - return (i + 1); -+ } - } -+ fprintf(stderr, "D: rpmPlatformScore %s (%d)\n", platform, 0); - return 0; - } - /*@=onlytrans@*/ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-disable-auto-stack-protector.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-disable-auto-stack-protector.patch deleted file mode 100644 index 124606c0d..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-disable-auto-stack-protector.patch +++ /dev/null @@ -1,24 +0,0 @@ -Make security switches manual settings - -RPM checks for the availability of the stack protector switch and -transactional-memory support. If supported it unconditionally -enables the compiler options which can cause errors if the support has -not been built into the compiler. - -Upstream-Status: Inappropriate [configuration] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.15/configure.ac -=================================================================== ---- rpm-5.4.15.orig/configure.ac -+++ rpm-5.4.15/configure.ac -@@ -425,7 +425,7 @@ dnl # rpm_CFLAGS_ADD([-fstack-arrays],[ - dnl # build RPM instrumented for extra optimization/security (GCC only) - dnl # --- other optimizations - rpm_CFLAGS_ADD([-fexceptions], [RPM_CFLAGS]) -- rpm_CFLAGS_ADD([-D_FORTIFY_SOURCE=2 -fstack-protector], [RPM_CFLAGS]) -+dnl rpm_CFLAGS_ADD([-D_FORTIFY_SOURCE=2 -fstack-protector], [RPM_CFLAGS]) - dnl # rpm_CFLAGS_ADD([-fstack-protector-all],[RPM_CFLAGS]) - - if test \( ".`$CC --version 2>&1 | grep 'GCC'`" != . \); then diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-disable-blaketest.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-disable-blaketest.patch deleted file mode 100644 index adbef6df8..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-disable-blaketest.patch +++ /dev/null @@ -1,28 +0,0 @@ -rpmio: Disable building of the tblake2 test(s). - -There is some type of a dependency fault here that can occasionally result in: - -gcc: error: tblake2b.o: No such file or directory -or -gcc: error: tblake2bp.o: No such file or directory - -These items are simply test cases that are not packaged, so they can be -safely disabled to resolve the dependency issue. - -Upstream-Status: Inappropriate [workaround] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.15/rpmio/Makefile.am -=================================================================== ---- rpm-5.4.15.orig/rpmio/Makefile.am -+++ rpm-5.4.15/rpmio/Makefile.am -@@ -29,7 +29,7 @@ EXTRA_PROGRAMS += bsdiff bspatch pcrsed - tmire todbc toid tperl tpython tput trpmio tsexp tsvn tsw ttcl \ - dumpasn1 lookup3 trel twitter github tmicrojson duk - --noinst_PROGRAMS += b2sum tset tblake2b tblake2bp tblake2s tblake2sp tgfs -+#noinst_PROGRAMS += b2sum tset tblake2b tblake2bp tblake2s tblake2sp tgfs - if WITH_LIBGIT2 - noinst_PROGRAMS += tgit - else diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-ensure-rpm2cpio-call-rpm-relocation-code.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-ensure-rpm2cpio-call-rpm-relocation-code.patch deleted file mode 100644 index 63af10024..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-ensure-rpm2cpio-call-rpm-relocation-code.patch +++ /dev/null @@ -1,25 +0,0 @@ -We need to call rpmcliInit to ensure the rpm relocation code is called -and it correctly honours the relocation environmental variables. - -when we export the wrsdk and source the sdk, then execute rpm2cpio xxx.rpm|cpio -t. -we will get the following error : -"rpm-5.4.14/rpmdb/dbconfig.c:493: -db3New: Assertion `dbOpts != ((void *)0) && *dbOpts != '\0'' failed. - -Upstream-Status: Pending - -Signed-off-by: Richard Purdie -Signed-off-by: Zhixiong Chi -Index: rpm-5.4.15/tools/rpm2cpio.c -=================================================================== ---- rpm-5.4.15.orig/tools/rpm2cpio.c 2012-04-27 01:46:51.000000000 +0800 -+++ rpm-5.4.15/tools/rpm2cpio.c 2016-09-05 11:07:30.419903338 +0800 -@@ -87,6 +87,8 @@ - #endif - (void) rpmtsSetVSFlags(ts, vsflags); - -+ rpmcliInit(argc, argv, NULL); -+ - /*@-mustmod@*/ /* LCL: segfault */ - rc = rpmReadPackageFile(ts, fdi, "rpm2cpio", &h); - /*@=mustmod@*/ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fileclass.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fileclass.patch deleted file mode 100644 index b1db6fff7..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fileclass.patch +++ /dev/null @@ -1,36 +0,0 @@ -rpmfc.c: Always generate per-file information - -Even when the per-file dependency generate is disabled, we want to generate -per file classification and other associated data. - -Note: this is a temporary workaround. Eventually we will want to have a way -to seed per-file dependency and other information in order to generate a -package from previously determined information. - -Upstream-Status: Pending - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/lib/rpmfc.c -=================================================================== ---- rpm-5.4.14.orig/lib/rpmfc.c -+++ rpm-5.4.14/lib/rpmfc.c -@@ -1734,7 +1734,6 @@ rpmRC rpmfcGenerateDepends(void * _spec, - /* ... then generate dependencies using %{__find_requires} et al. */ - rc = rpmfcGenerateDependsHelper(spec, pkg, fi); - printDeps(pkg->header); -- return rc; - } - - /* Generate scriptlet Dependencies. */ -@@ -1762,8 +1761,8 @@ rpmRC rpmfcGenerateDepends(void * _spec, - av[ac] = NULL; - - fc = rpmfcNew(); -- fc->skipProv = !pkg->autoProv; -- fc->skipReq = !pkg->autoReq; -+ fc->skipProv = !pkg->autoProv || !internaldeps; -+ fc->skipReq = !pkg->autoReq || !internaldeps; - fc->tracked = 0; - - { const char * buildRootURL; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fix-logio-cp.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fix-logio-cp.patch deleted file mode 100644 index 290ec1aa1..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fix-logio-cp.patch +++ /dev/null @@ -1,30 +0,0 @@ -Occasionally the cp -p fails with a non-zero return code. This will cause -the system abort the build. - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm/rpmdb/Makefile.am -=================================================================== ---- rpm.orig/rpmdb/Makefile.am -+++ rpm/rpmdb/Makefile.am -@@ -234,11 +234,14 @@ lcov-report: - #lcov-upload: lcov - # rsync -rvz -e ssh --delete lcov/* ??? - -+$(builddir)/logio_recover_template: $(srcdir)/logio_recover_template -+ @if test ".$(builddir)" != ".$(srcdir)"; then \ -+ cp -fp $(srcdir)/logio_recover_template \ -+ $(builddir)/logio_recover_template ; \ -+ fi -+ - logio_BUILT = logio_auto.c logio_autop.c logio_auto.h logio_template --$(logio_BUILT): logio.awk logio.src logio_recover_template -- @test -e $(builddir)/logio_recover_template || \ -- cp -p $(srcdir)/logio_recover_template \ -- $(builddir)/logio_recover_template -+$(logio_BUILT): logio.awk logio.src $(builddir)/logio_recover_template - @rm -f $(logio_BUILT) - @$(AWK) -f $(srcdir)/logio.awk \ - -v header_file=logio_auto.h \ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fix-lua-tests-compilation-failure.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fix-lua-tests-compilation-failure.patch deleted file mode 100644 index 1a08243ab..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fix-lua-tests-compilation-failure.patch +++ /dev/null @@ -1,43 +0,0 @@ -Upstream-Status: Pending - -Subject: lua: fix to build test libs correctly - -This patch fixes errors like below. - - | gcc: error: lib21.c: No such file or directory - | gcc: fatal error: no input files - - -Signed-off-by: Chen Qi ---- - lua/tests/libs/Makefile.am | 8 ++++---- - 1 file changed, 4 insertions(+), 4 deletions(-) - -diff --git a/lua/tests/libs/Makefile.am b/lua/tests/libs/Makefile.am -index 19d1a0b..1e5de72 100644 ---- a/lua/tests/libs/Makefile.am -+++ b/lua/tests/libs/Makefile.am -@@ -28,16 +28,16 @@ clean: - rm -f lib1.so lib11.so lib2.so lib21.so lib2-v2.so - - lib1.so: lib1.c -- $(CC) $(CFLAGS) -o lib1.so lib1.c -+ $(CC) $(CFLAGS) -o lib1.so $(top_srcdir)/lua/tests/libs/lib1.c - - lib11.so: lib11.c -- $(CC) $(CFLAGS) -o lib11.so lib11.c -+ $(CC) $(CFLAGS) -o lib11.so $(top_srcdir)/lua/tests/libs/lib11.c - - lib2.so: lib2.c -- $(CC) $(CFLAGS) -o lib2.so lib2.c -+ $(CC) $(CFLAGS) -o lib2.so $(top_srcdir)/lua/tests/libs/lib2.c - - lib21.so: lib21.c -- $(CC) $(CFLAGS) -o lib21.so lib21.c -+ $(CC) $(CFLAGS) -o lib21.so $(top_srcdir)/lua/tests/libs/lib21.c - - lib2-v2.so: lib2.so - mv lib2.so ./lib2-v2.so --- -1.9.1 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fix-parseEmbedded.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fix-parseEmbedded.patch deleted file mode 100644 index c57f24cd7..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-fix-parseEmbedded.patch +++ /dev/null @@ -1,27 +0,0 @@ -Fix an issue where parseEmbedded is not defined, but is still used. - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm/rpmio/macro.c -=================================================================== ---- rpm.orig/rpmio/macro.c -+++ rpm/rpmio/macro.c -@@ -1616,8 +1616,6 @@ exit: - * @retval *avp invocation args - * @return script string - */ --#if defined(WITH_AUGEAS) || defined(WITH_FICL) || defined(WITH_MOZJS) || defined(WITH_JNIEMBED) || defined(WITH_PERLEMBED) || defined(WITH_PYTHONEMBED) || defined(WITH_RUBYEMBED) || defined(WITH_MRUBY_EMBED) || defined(WITH_SQLITE) || defined(WITH_SQUIRREL) || defined(WITH_TCL) -- - static char _FIXME_embedded_interpreter_eval_returned_null[] = - "FIXME: embedded interpreter eval returned null."; - -@@ -1668,7 +1666,6 @@ bingo: - script[nb] = '\0'; - return script; - } --#endif - - /** - * The main macro recursion loop. diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-gnu-atomic.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-gnu-atomic.patch deleted file mode 100644 index 36a418fc6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-gnu-atomic.patch +++ /dev/null @@ -1,64 +0,0 @@ -configure.ac: Check if the current compiler supports the transactions - -Some distributions appear to have compilers that are built without support -for transactions, even though they are GCC 4.7 or newer. - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.15/configure.ac -=================================================================== ---- rpm-5.4.15.orig/configure.ac -+++ rpm-5.4.15/configure.ac -@@ -425,9 +425,34 @@ dnl # --- other optimizations - rpm_CFLAGS_ADD([-D_FORTIFY_SOURCE=2 -fstack-protector], [RPM_CFLAGS]) - dnl # rpm_CFLAGS_ADD([-fstack-protector-all],[RPM_CFLAGS]) - -- if test \( ".`$CC --version 2>&1 | grep 'GCC'`" != . \); then -- rpm_CFLAGS_ADD([-fgnu-tm], [RPM_CFLAGS]) -- fi -+dnl # Check if the current gcc supports -fgnu-tm and __transaction_atomic -+AC_MSG_CHECKING([If the compiler supports __transaction_atomic]) -+save_CFLAGS="$CFLAGS" -+save_LDFLAGS="$LDFLAGS" -+CFLAGS="${CFLAGS} -fgnu-tm -litm" -+LDFLAGS="${LDFLAGS} -litm" -+AC_LINK_IFELSE([AC_LANG_SOURCE([[ -+int -+main() -+{ -+#if !__clang__ && ((__GNUC__ == 4 && __GNUC_MINOR__ >= 7) || (__GNUC__ > 4)) /* XXX gud enuf? */ -+ int i = 0; -+ __transaction_atomic { i++; } -+#else -+# error Compiler does not support __transaction_atomic -+#endif -+ return 0; -+} -+]])], [ -+ AC_DEFINE([HAVE_GNUC_TM_ATOMIC], [1], -+ [Define to 1 if the compiler supports __transaction_atomic.]) -+ AC_MSG_RESULT([yes]) -+], [ -+ CFLAGS="$save_CFLAGS" -+ LDFLAGS="$save_LDFLAGS" -+ AC_MSG_RESULT([no]) -+]) -+ - - dnl # --- options below are added to RPM_CFLAGS but _NOT_ added to CFLAGS - CPPFLAGS="$CPPFLAGS $RPM_CPPFLAGS" -Index: rpm-5.4.15/rpmio/rpmutil.h -=================================================================== ---- rpm-5.4.15.orig/rpmio/rpmutil.h -+++ rpm-5.4.15/rpmio/rpmutil.h -@@ -105,7 +105,7 @@ - # define RPM_GNUC_INTERNAL - #endif - --#if !__clang__ && __GNUC__ == 4 && __GNUC_MINOR__ >= 7 /* XXX gud enuf? */ -+#ifdef HAVE_GNUC_TM_ATOMIC - # define RPM_GNUC_TM_SAFE __attribute__((transaction_safe)) - # define RPM_GNUC_TM_PURE __attribute__((transaction_pure)) - # define RPM_GNUC_TM_CALLABLE __attribute__((transaction_callable)) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-hardlink-segfault-fix.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-hardlink-segfault-fix.patch deleted file mode 100644 index 057925fb6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-hardlink-segfault-fix.patch +++ /dev/null @@ -1,43 +0,0 @@ -We need to sanity check that the nlink size and our linksLeft counter -do match. If an rpm is badly constructed with identical inode values -for multiple hardlinked files, such an rpm will otherwise access memory -out of array bounds and cause memory corruption and crashes. - -The fix is to add in the sanity check and exit if bad circumstances -are found. We need to fix the caller to check the return code too. - -RP 2014/6/10 - -Upstream-Status: Submitted [RPM5 maintainer] - -Index: rpm-5.4.14/lib/fsm.c -=================================================================== ---- rpm-5.4.14.orig/lib/fsm.c -+++ rpm-5.4.14/lib/fsm.c -@@ -495,6 +495,11 @@ static int saveHardLink(/*@special@*/ /* - } - - if (fsm->goal == IOSM_PKGBUILD) --fsm->li->linksLeft; -+ if (fsm->li->linksLeft > st->st_nlink) { -+ rpmlog(RPMLOG_ERR, _("Corrupted hardlinks found (count %d does not match %d), exiting.\n"), fsm->li->linksLeft, st->st_nlink); -+ return -1; -+ } -+ - fsm->li->filex[fsm->li->linksLeft] = fsm->ix; - /*@-observertrans -dependenttrans@*/ - fsm->li->nsuffix[fsm->li->linksLeft] = fsm->nsuffix; -@@ -1878,8 +1883,13 @@ if (!(fsmGetFi(fsm)->mapflags & IOSM_PAY - fsm->postpone = iosmFileActionSkipped(fsm->action); - if (fsm->goal == IOSM_PKGINSTALL || fsm->goal == IOSM_PKGBUILD) { - /*@-evalorder@*/ /* FIX: saveHardLink can modify fsm */ -- if (S_ISREG(st->st_mode) && st->st_nlink > 1) -+ if (S_ISREG(st->st_mode) && st->st_nlink > 1) { - fsm->postpone = saveHardLink(fsm); -+ if (fsm->postpone < 0) { -+ rc = RPMRC_FAIL; -+ break; -+ } -+ } - /*@=evalorder@*/ - } - if (fsmGetFi(fsm)->mapflags & IOSM_PAYLOAD_LIST) fsm->postpone = 1; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-keccak-sse-intrin.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-keccak-sse-intrin.patch deleted file mode 100644 index 72884d4ad..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-keccak-sse-intrin.patch +++ /dev/null @@ -1,27 +0,0 @@ -rpm - rpmio/keccak.c: make SSE/MMX dependent upon gcc config - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.15/rpmio/keccak.c -=================================================================== ---- rpm-5.4.15.orig/rpmio/keccak.c -+++ rpm-5.4.15/rpmio/keccak.c -@@ -17,9 +17,13 @@ http://keccak.noekeon.org/ - #if OPTIMIZED == 64 - /* ===== "KeccakOpt64-settings.h" */ - #define Unrolling 18 --//#define UseBebigokimisa --#define UseSSE --//#define UseMMX -+#if defined(__SSE2__) -+ #define UseSSE -+#elif defined(__MMX__) -+ #define UseMMX -+#else -+ #define UseBebigokimisa -+#endif - /* ===== */ - #endif - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-lib-transaction.c-fix-file-conflicts-for-mips64-N32.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-lib-transaction.c-fix-file-conflicts-for-mips64-N32.patch deleted file mode 100644 index 1a48db63c..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-lib-transaction.c-fix-file-conflicts-for-mips64-N32.patch +++ /dev/null @@ -1,52 +0,0 @@ -From 67ec7531e6297200eaa97ef917d49b0a75876cb4 Mon Sep 17 00:00:00 2001 -From: Robert Yang -Date: Wed, 2 Dec 2015 00:56:07 -0800 -Subject: [PATCH] lib/transaction.c: fix file conflicts for MIPS64 N32 - -The following error can occur: - smart install libc6-2.22-r0.1@lib32_mips32r2octeon3 libc6-dbg-2.22-r0.1@lib32_mips32r2octeon3 -error: file /sbin/ldconfig conflicts between attempted installs of libc6-2.22-r0.1.lib32_mips32r2octeon3 and libc6-2.22-r0.1.octeon3_n32 -error: file /sbin/.debug/ldconfig conflicts between attempted installs of libc6-dbg-2.22-r0.1.lib32_mips32r2octeon3 and libc6-dbg-2.22-r0.1.octeon3_n32 - -This was because: -transactions_color = 001 (ELF32) & 010 (ELF64) & 100 (ELF32 N32 MIPS64) -FColor = Current file color (001) & transaction_color (111) -oFcolor = Previous file color (100) & transaction_color (111) - -There are two places where the conflict comparisons occur. In both places -the 'else' clause was too restrictive (opposite of the 'positive' clause). -This caused the system to only permit a binary comparison - "new preferred" or -"old preferred". It did not permissing "neither preferred". By removing the -else comparison the system will now perform a 'last-in-wins' resolution when -"neither is preferred". - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Robert Yang -Signed-off-by: Mark Hatle ---- - lib/transaction.c | 3 ++- - 1 file changed, 2 insertions(+), 1 deletion(-) - -Index: rpm-5.4.14/lib/transaction.c -=================================================================== ---- rpm-5.4.14.orig/lib/transaction.c -+++ rpm-5.4.14/lib/transaction.c -@@ -154,7 +154,7 @@ static int handleInstInstalledFile(const - fi->actions[fx] = FA_SKIPCOLOR; - #endif - rConflicts = 0; -- } else if (FColor & prefcolor) { -+ } else { - #ifdef REFERENCE - rpmfsSetAction(fs, fx, FA_CREATE); - #else -@@ -420,7 +420,7 @@ assert(otherFi != NULL); - fi->actions[i] = FA_SKIPCOLOR; - rConflicts = 0; - } else -- if (FColor == 0 && oFColor == 0) { -+ { - /* ... otherwise, do both, last in wins. */ - otherFi->actions[otherFileNum] = FA_CREATE; - fi->actions[i] = FA_CREATE; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-libsql-fix.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-libsql-fix.patch deleted file mode 100644 index e87e02b69..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-libsql-fix.patch +++ /dev/null @@ -1,22 +0,0 @@ -During installation, the libtool relink attempts to link to -lrpm... -The problem is that it hasn't been installed yet! So small change causes -the libtool to instead use the build version. - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/lib/Makefile.am -=================================================================== ---- rpm-5.4.14.orig/lib/Makefile.am -+++ rpm-5.4.14/lib/Makefile.am -@@ -120,6 +120,9 @@ librpm.la: $(librpm_la_OBJECTS) $(librpm - #libsql_la_SOURCES = libsql.c - #libsql_la_LIBADD = librpm.la $(RPMDB_LDADD_COMMON) - -+# pkglib libraries needs to have usrlib libraries already installed! -+install-pkglibLTLIBRARIES: install-usrlibLTLIBRARIES -+ - install-data-hook: - if !ENABLE_BUILD_LAFILES - -for l in $(usrlib_LTLIBRARIES); do \ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-log-auto-rm.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-log-auto-rm.patch deleted file mode 100644 index 3153f7a38..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-log-auto-rm.patch +++ /dev/null @@ -1,15 +0,0 @@ - -Upstream-Status: Inappropriate [configuration] - -Index: rpm-5.4.14/rpmdb/DB_CONFIG.in -=================================================================== ---- rpm-5.4.14.orig/rpmdb/DB_CONFIG.in -+++ rpm-5.4.14/rpmdb/DB_CONFIG.in -@@ -4,6 +4,7 @@ set_data_dir . - set_create_dir . - set_lg_dir ./log - set_tmp_dir ./tmp -+set_flags db_log_autoremove on - - # -- thread_count must be >= 8 - set_thread_count 64 diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-lsb-compatibility.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-lsb-compatibility.patch deleted file mode 100644 index a87518ba3..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-lsb-compatibility.patch +++ /dev/null @@ -1,24 +0,0 @@ -Enable platform tag matching workaround in OE. - -When installing some LSB packages the 'platform' field in the package -appears to be invalid. Instead of relying solely on the platform comparison -we also want to generate a perceived platform based on the valid rpm contents -of arch and os. - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/lib/depends.c -=================================================================== ---- rpm-5.4.14.orig/lib/depends.c -+++ rpm-5.4.14/lib/depends.c -@@ -595,7 +595,7 @@ int rpmtsAddInstallElement(rpmts ts, Hea - platform = rpmExpand(arch, "-unknown-", os, NULL); - - rc = rpmPlatformScore(platform, platpat, nplatpat); --#if defined(RPM_VENDOR_MANDRIVA) -+#if defined(RPM_VENDOR_MANDRIVA) || defined(RPM_VENDOR_OE) - /* - * If no match on platform tag, we'll try again with arch tag - * in case platform tag is inconsistent with it, which is the case diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-lua.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-lua.patch deleted file mode 100644 index a9930d670..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-lua.patch +++ /dev/null @@ -1,33 +0,0 @@ -Add support for cross compiling lua - -Upstream-Status: Pending - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/configure.ac -=================================================================== ---- rpm-5.4.14.orig/configure.ac -+++ rpm-5.4.14/configure.ac -@@ -121,6 +121,9 @@ AC_PROG_YACC - - AC_PATH_PROG(AS, as, as) - -+CC_FOR_BUILD=${CC_FOR_BUILD-\$(CC)} -+AC_SUBST(CC_FOR_BUILD) -+ - dnl # GCC specifics - AC_PROG_GCC_TRADITIONAL - AC_ARG_ENABLE(build-pic, -Index: rpm-5.4.14/lua/Makefile.am -=================================================================== ---- rpm-5.4.14.orig/lua/Makefile.am -+++ rpm-5.4.14/lua/Makefile.am -@@ -41,7 +41,7 @@ rpmluac_LDADD = liblua.la - - # --- bin2c doesn't need anything but a compiler - bin2c$(EXEEXT): bin2c.c -- $(CC) -o $@ $< -+ $(CC_FOR_BUILD) -o $@ $< - - liblua_la_SOURCES = - liblua_la_CFLAGS = @WITH_LUA_SUBDIR_DEF@ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-macros.in-disable-external-key-server.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-macros.in-disable-external-key-server.patch deleted file mode 100644 index a08412aa9..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-macros.in-disable-external-key-server.patch +++ /dev/null @@ -1,31 +0,0 @@ -disable external key server - -Upstream-Status: Pending - -When RPM experiences a signed package, with a signature that it does NOT know. -By default it will send the -fingerprint- (and only the 16 digit fingerprint) to -an external HKP server, trying to get the key down. - -This is probably not a reasonable default behavior for the system to do, instead -it should simply fail the key lookup. If someone wants to enable the HKP server -it's easy enough to do by enabling the necessary macros. - -Signed-off-by: yzhu1 -Signed-off-by: Mark Hatle -Index: rpm/macros/macros.in -=================================================================== ---- rpm.orig/macros/macros.in -+++ rpm/macros/macros.in -@@ -563,10 +563,10 @@ $_arbitrary_tags_tests Foo:Bar - - # Horowitz Key Protocol server configuration - # --%_hkp_keyserver hkp://keys.rpm5.org -+#%_hkp_keyserver hkp://keys.rpm5.org - #%_hkp_keyserver hkp://keys.n3npq.net - #%_hkp_keyserver hkp://pool.sks-keyservers.net --%_hkp_keyserver_query %{_hkp_keyserver}/pks/lookup?op=get&search= -+#%_hkp_keyserver_query %{_hkp_keyserver}/pks/lookup?op=get&search= - - - # NSS_InitContext() parameter configuration diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-macros.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-macros.patch deleted file mode 100644 index c7ab2d2bb..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-macros.patch +++ /dev/null @@ -1,64 +0,0 @@ -macros/macros.in: Revert settings to the same as RPM 5.4.0 - -Enable a reasonable set of rpmdeps dependency helper macros. These sets -were used by RPM 5.4.0. - -Upstream-Status: Inappropriate [configuration] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/macros/macros.in -=================================================================== ---- rpm-5.4.14.orig/macros/macros.in -+++ rpm-5.4.14/macros/macros.in -@@ -1022,7 +1022,7 @@ $_arbitrary_tags_tests Foo:Bar - - #============================================================================== - # ---- rpmbuild macros. --#%%{load:%{_usrlibrpm}/macros.rpmbuild} -+%{load:%{_usrlibrpm}/macros.rpmbuild} - - #------------------------------------------------------------------------ - # cmake(...) configuration -@@ -1038,15 +1038,15 @@ $_arbitrary_tags_tests Foo:Bar - - #------------------------------------------------------------------------ - # perl(...) configuration --#%%{load:%{_usrlibrpm}/macros.d/perl} -+%{load:%{_usrlibrpm}/macros.d/perl} - - #------------------------------------------------------------------------ - # python(...) configuration. --#%%{load:%{_usrlibrpm}/macros.d/python} -+%{load:%{_usrlibrpm}/macros.d/python} - - #------------------------------------------------------------------------ - # php(...) configuration. --#%%{load:%{_usrlibrpm}/macros.d/php} -+%{load:%{_usrlibrpm}/macros.d/php} - - #------------------------------------------------------------------------ - # java(...) configuration. -@@ -1054,11 +1054,11 @@ $_arbitrary_tags_tests Foo:Bar - - #------------------------------------------------------------------------ - # libtool(...) configuration. --#%%{load:%{_usrlibrpm}/macros.d/libtool} -+%{load:%{_usrlibrpm}/macros.d/libtool} - - #------------------------------------------------------------------------ - # pkgconfig(...) configuration. --#%%{load:%{_usrlibrpm}/macros.d/pkgconfig} -+%{load:%{_usrlibrpm}/macros.d/pkgconfig} - - #------------------------------------------------------------------------ - # mono(...) configuration. -@@ -1070,7 +1070,7 @@ $_arbitrary_tags_tests Foo:Bar - - #------------------------------------------------------------------------ - # tcl(...) configuration. --#%%{load:%{_usrlibrpm}/macros.d/tcl} -+%{load:%{_usrlibrpm}/macros.d/tcl} - - #------------------------------------------------------------------------ - # typelib(...) configuration. diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-mongodb-sasl.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-mongodb-sasl.patch deleted file mode 100644 index 6c3e4718b..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-mongodb-sasl.patch +++ /dev/null @@ -1,69 +0,0 @@ -Fix errors when building with sasl2 disabled - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm/rpmio/mongoc.c -=================================================================== ---- rpm.orig/rpmio/mongoc.c -+++ rpm/rpmio/mongoc.c -@@ -39,8 +39,10 @@ - # include - #endif - -+#ifdef HAVE_LIBSASL2 - #include - #include -+#endif - - #include - #include -@@ -14228,6 +14230,7 @@ mongoc_read_prefs_copy (const mongoc_rea - return ret; - } - -+#ifdef MONGOC_ENABLE_SASL - /*==============================================================*/ - /* --- mongoc-sasl.c */ - -@@ -14555,6 +14558,7 @@ _mongoc_sasl_step (mongoc_sasl_t *sasl, - - return true; - } -+#endif - - /*==============================================================*/ - /* --- mongoc-socket.c */ -Index: rpm/rpmio/mongoc.h -=================================================================== ---- rpm.orig/rpmio/mongoc.h -+++ rpm/rpmio/mongoc.h -@@ -38,8 +38,10 @@ - # include - #endif - -+#ifdef HAVE_LIBSASL2 - #include - #include -+#endif - - #include - #include -@@ -2455,6 +2457,8 @@ BSON_END_DECLS - /*==============================================================*/ - /* --- mongoc-sasl-private.h */ - -+#ifdef MONGOC_ENABLE_SASL -+ - BSON_BEGIN_DECLS - - -@@ -2498,6 +2502,7 @@ bool _mongoc_sasl_step (mong - - - BSON_END_DECLS -+#endif - - /*==============================================================*/ - /* --- mongoc-ssl-private.h */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-no-loopmsg.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-no-loopmsg.patch deleted file mode 100644 index e58cc13a6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-no-loopmsg.patch +++ /dev/null @@ -1,19 +0,0 @@ -lib/order.c: Make the dependency loop messages into debug msgs - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/lib/order.c -=================================================================== ---- rpm-5.4.14.orig/lib/order.c -+++ rpm-5.4.14/lib/order.c -@@ -2175,7 +2175,7 @@ rescan: - const char * dp; - rpmlogLvl msglvl = (anaconda || (rpmtsDFlags(ts) & RPMDEPS_FLAG_DEPLOOPS)) - ? RPMLOG_WARNING : RPMLOG_ERR; --#if defined(RPM_VENDOR_MANDRIVA) /* loop-detection-optional-loglevel */ -+#if defined(RPM_VENDOR_MANDRIVA) || defined(RPM_VENDOR_OE) /* loop-detection-optional-loglevel */ - // Report loops as debug-level message by default (7 = RPMLOG_DEBUG), overridable - msglvl = rpmExpandNumeric("%{?_loop_detection_loglevel}%{?!_loop_detection_loglevel:7}"); - #endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-no-perl-urpm.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-no-perl-urpm.patch deleted file mode 100644 index 58182296c..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-no-perl-urpm.patch +++ /dev/null @@ -1,47 +0,0 @@ -Disable perl-URPM support - -This causes a configure failure when perl-URPM is not used. - -| configure.ac:1159: required file `perl-URPM/Makefile.PL.in' not found -| configure.ac:1159: required file `perl-URPM/Makefile.in' not found - -Upstream-Status: Inappropriate [disable feature] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/configure.ac -=================================================================== ---- rpm-5.4.14.orig/configure.ac -+++ rpm-5.4.14/configure.ac -@@ -1186,30 +1186,10 @@ AC_SUBST(WITH_PERL_SUBDIR) - AC_SUBST(WITH_PERL_SUBPACKAGE) - AC_SUBST(WITH_PERL_LIBS) - --dnl # optional Perl-URPM language bindings -+dnl # disable Perl-URPM language bindings - WITH_PERL_URPM_SUBDIR="" - WITH_PERL_URPM_SUBPACKAGE=0 - WITH_PERL_URPM_LIBS="" --if test ".$WITH_PTHREADS" = .yes; then -- WITH_PERL_URPM_LIBS="$LIBS" --fi --AC_ARG_WITH(perl-urpm, AS_HELP_STRING([--with-perl-urpm], [build with Perl URPM language bindings]), [ -- if test ".$withval" != .no; then -- PERL_URPM_INSTALLDIRS="" -- if test "$withval" == "vendor"; then -- PERL_URPM_INSTALLDIRS="'INSTALLDIRS' => 'vendor'," -- fi -- if test "$withval" == "site"; then -- PERL_URPM_INSTALLDIRS="'INSTALLDIRS' => 'site'," -- fi -- AC_DEFINE_UNQUOTED([PERL_URPM_INSTALLDIRS], [$PERL_URPM_INSTALLDIRS], [Perl install directory (vendor/site)]) -- AC_SUBST(PERL_URPM_INSTALLDIRS) -- WITH_PERL_URPM_SUBDIR=perl-URPM -- WITH_PERL_URPM_SUBPACKAGE=1 -- AC_CONFIG_FILES([perl-URPM/Makefile.PL]) -- AC_CONFIG_FILES([perl-URPM/Makefile]) -- fi --]) - AC_SUBST(WITH_PERL_URPM_SUBDIR) - AC_SUBST(WITH_PERL_URPM_SUBPACKAGE) - AC_SUBST(WITH_PERL_URPM_LIBS) diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-opendb-before-verifyscript-to-avoid-null-point.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-opendb-before-verifyscript-to-avoid-null-point.patch deleted file mode 100644 index e2191245f..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-opendb-before-verifyscript-to-avoid-null-point.patch +++ /dev/null @@ -1,24 +0,0 @@ -rpm: opendb before rpmverifyscript to avoid null point input - -If the command is "rpm -V" and the return value of (headerIsEntry(h, RPMTAG_VERIFYSCRIPT) -|| headerIsEntry(h, RPMTAG_SANITYCHECK)) located in /lib/verify.c is true, it will call -rpmpsmStage function(rpmVerifyScript->rpmpsmScriptStage->rpmpsmStage) and occur segment -fault because of null point(rpmtsGetRdb(ts) == NULL and rpmtsGetRdb(ts)->db_txn). -So we open rpmdb to avoid bad input when find headerIsEntry true. - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Zhixiong Chi -Index: rpm-5.4.14/lib/verify.c -=================================================================== ---- rpm-5.4.14.orig/lib/verify.c 2015-07-22 22:09:59.992895355 +0800 -+++ rpm-5.4.14/lib/verify.c 2015-08-13 10:20:33.752177906 +0800 -@@ -613,6 +613,8 @@ - { - FD_t fdo = fdDup(STDOUT_FILENO); - -+ rpmtsOpenDB(ts, O_RDONLY); /*Open the DB to avoid null point input in function rpmpsmStage()*/ -+ - rc = rpmfiSetHeader(fi, h); - if ((rc = rpmVerifyScript(qva, ts, fi, fdo)) != 0) - ec += rc; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-ossp-uuid.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-ossp-uuid.patch deleted file mode 100644 index 691aba9ea..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-ossp-uuid.patch +++ /dev/null @@ -1,23 +0,0 @@ -Fix integration of ossp-uuid - -We need to avoid including the util-linux uuid library, instead -we need ossp-uuid. There is a related hack in do_configure to -make sure that we use the right .pc file as well. - -Upstream-Status: Inappropriate [disable feature] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/configure.ac -=================================================================== ---- rpm-5.4.14.orig/configure.ac -+++ rpm-5.4.14/configure.ac -@@ -1951,7 +1951,7 @@ grep -v 'define HAVE_UNISTD_H' confdefs. - esac - RPM_CHECK_LIB( - [OSSP uuid], [uuid], -- [ossp-uuid uuid], [uuid_import], [uuid.h], -+ [ossp-uuid], [uuid_import], [uuid.h], - [no,external:none], [], - [ dnl # enable OSSP uuid native API support for embedded Lua - if test ".$WITH_LUA" = .yes; then diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-packageorigin.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-packageorigin.patch deleted file mode 100644 index 57fc6ce88..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-packageorigin.patch +++ /dev/null @@ -1,25 +0,0 @@ -Add the ability to query the packageorigin - -Written by jbj at rpm5.org - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/rpmdb/hdrfmt.c -=================================================================== ---- rpm-5.4.14.orig/rpmdb/hdrfmt.c -+++ rpm-5.4.14/rpmdb/hdrfmt.c -@@ -2409,8 +2409,10 @@ static int pkgoriginTag(Header h, HE_t h - int rc = 1; - - he->tag = RPMTAG_PACKAGEORIGIN; -- if (!headerGet(h, he, HEADERGET_NOEXTENSION) -- && (origin = headerGetOrigin(h)) != NULL) -+ /* XXX two sources for tag data: what search precedence? */ -+ if (headerGet(h, he, HEADERGET_NOEXTENSION)) -+ rc = 0; -+ else if ((origin = headerGetOrigin(h)) != NULL) - { - he->t = RPM_STRING_TYPE; - he->p.str = xstrdup(origin); diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-payload-use-hashed-inode.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-payload-use-hashed-inode.patch deleted file mode 100644 index af643b1c6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-payload-use-hashed-inode.patch +++ /dev/null @@ -1,126 +0,0 @@ -If we run builds on a filesystem with 64 bit inodes like XFS, we need to -map the inode numbers to something 32 bit since the cpio header only allows -for 32 bit inode values. If we don't do this: - -#define SET_NUM_FIELD(phys, val, space) \ - sprintf(space, "%8.8lx", (unsigned long) (val)); \ - memcpy(phys, space, 8) - -from cpio.c will print larger that 8 character values and then truncate the -LSBs. This generates cpio files where hardlinked files may have the same -inode number. The resulting rpms are then corrupted. - -There is a separate patch for the crash the identical inode numbers causes -when extracting the rpm. - -Patch taken from http://git.pld-linux.org/?p=packages/rpm.git;a=commitdiff;h=10526c23aac60b7b636e4c93862887dbef8e8f15 - -RP 2014/6/10 - -Upstream-Status: Submitted [RPM5 maintainer] - -Index: rpm-5.4.14/build/files.c -=================================================================== ---- rpm-5.4.14.orig/build/files.c -+++ rpm-5.4.14/build/files.c -@@ -1328,6 +1328,26 @@ static rpmuint32_t getDigestAlgo(Header - return dalgo; - } - -+static int isHardLink(FileListRec flp, FileListRec tlp) -+{ -+ return ((S_ISREG(flp->fl_mode) && S_ISREG(tlp->fl_mode)) && -+ ((flp->fl_nlink > 1) && (flp->fl_nlink == tlp->fl_nlink)) && -+ (flp->fl_ino == tlp->fl_ino) && -+ (flp->fl_dev == tlp->fl_dev)); -+} -+ -+static int seenHardLink(FileList fl, FileListRec flp, ino_t *fileid) -+{ -+ FileListRec ilp; -+ for (ilp = fl->fileList; ilp < flp; ilp++) { -+ if (isHardLink(flp, ilp)) { -+ *fileid = ilp - fl->fileList; -+ return 1; -+ } -+ } -+ return 0; -+} -+ - /** - * Add file entries to header. - * @todo Should directories have %doc/%config attributes? (#14531) -@@ -1374,6 +1394,7 @@ memset(buf, 0, sizeof(buf)); /* XXX valg - - for (i = 0, flp = fl->fileList; i < fl->fileListRecsUsed; i++, flp++) { - const char *s; -+ ino_t fileid = flp - fl->fileList; - - /* Merge duplicate entries. */ - while (i < (fl->fileListRecsUsed - 1) && -@@ -1436,6 +1457,13 @@ memset(buf, 0, sizeof(buf)); /* XXX valg - /* Leave room for both dirname and basename NUL's */ - dpathlen += (strlen(flp->diskURL) + 2); - -+ /* Excludes and dupes have been filtered out by now. */ -+ if (S_ISREG(flp->fl_mode)) { -+ if (flp->fl_nlink == 1 || !seenHardLink(fl, flp, &fileid)) { -+ fl->totalFileSize += flp->fl_size; -+ } -+ } -+ - /* - * Make the header, the OLDFILENAMES will get converted to a - * compressed file list write before we write the actual package to -@@ -1518,7 +1546,11 @@ memset(buf, 0, sizeof(buf)); /* XXX valg - - /* XXX Hash instead of 64b->32b truncate to prevent aliasing. */ - { ino_t _ino = flp->fl_ino; -+ /* don't use hash here, as hash collisions which happen on large packages -+ cause bus errors in rpmbuild - ui32 = hashFunctionString(0, &_ino, sizeof(_ino)); -+ */ -+ ui32 = fileid + 1; - } - he->tag = RPMTAG_FILEINODES; - he->t = RPM_UINT32_TYPE; -@@ -1751,39 +1783,6 @@ if (_rpmbuildFlags & 4) { - IOSM_MAP_TYPE | IOSM_MAP_MODE | IOSM_MAP_UID | IOSM_MAP_GID; - if (isSrc) - fi->fmapflags[i] |= IOSM_FOLLOW_SYMLINKS; -- -- if (S_ISREG(flp->fl_mode)) { -- int bingo = 1; -- /* Hard links need be tallied only once. */ -- if (flp->fl_nlink > 1) { -- FileListRec jlp = flp + 1; -- int j = i + 1; -- for (; (unsigned)j < fi->fc; j++, jlp++) { -- /* follow outer loop logic */ -- while (((jlp - fl->fileList) < (fl->fileListRecsUsed - 1)) && -- !strcmp(jlp->fileURL, jlp[1].fileURL)) -- jlp++; -- if (jlp->flags & RPMFILE_EXCLUDE) { -- j--; -- /*@innercontinue@*/ continue; -- } -- if (jlp->flags & RPMFILE_GHOST) -- /*@innercontinue@*/ continue; -- if (!S_ISREG(jlp->fl_mode)) -- /*@innercontinue@*/ continue; -- if (flp->fl_nlink != jlp->fl_nlink) -- /*@innercontinue@*/ continue; -- if (flp->fl_ino != jlp->fl_ino) -- /*@innercontinue@*/ continue; -- if (flp->fl_dev != jlp->fl_dev) -- /*@innercontinue@*/ continue; -- bingo = 0; /* don't tally hardlink yet. */ -- /*@innerbreak@*/ break; -- } -- } -- if (bingo) -- fl->totalFileSize += flp->fl_size; -- } - } - - ui32 = fl->totalFileSize; diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-pkgconfigdeps.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-pkgconfigdeps.patch deleted file mode 100644 index 656de86d7..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-pkgconfigdeps.patch +++ /dev/null @@ -1,37 +0,0 @@ -pkgconfigdeps.sh: Change to restricting pkgconfig to the local directory - -Using PKG_CONFIG_PATH will allow pkg-config to fall back to the system paths, -we don't want this as it may lead to inaccurate results in some corner cases. - -PKG_CONFIG_LIBDIR will ensure pkg-config stays within the install directory. - -Upstream-Status: Inappropriate [configuration] - -Signed-off-by: Mark Hatle - -Index: rpm/scripts/pkgconfigdeps.sh -=================================================================== ---- rpm.orig/scripts/pkgconfigdeps.sh -+++ rpm/scripts/pkgconfigdeps.sh -@@ -18,8 +18,8 @@ case $1 in - *.pc) - # Query the dependencies of the package. - DIR=$(dirname ${filename}) -- PKG_CONFIG_PATH="$DIR:$DIR/../../share/pkgconfig" -- export PKG_CONFIG_PATH -+ PKG_CONFIG_LIBDIR="$DIR:$DIR/../../share/pkgconfig" -+ export PKG_CONFIG_LIBDIR - $pkgconfig --print-provides "$filename" 2> /dev/null | while read n r v ; do - [ -n "$n" ] || continue - # We have a dependency. Make a note that we need the pkgconfig -@@ -42,8 +42,8 @@ case $1 in - [ -n "$oneshot" ] && echo "$oneshot"; oneshot="" - # Query the dependencies of the package. - DIR=$(dirname ${filename}) -- PKG_CONFIG_PATH="$DIR:$DIR/../../share/pkgconfig" -- export PKG_CONFIG_PATH -+ PKG_CONFIG_LIBDIR="$DIR:$DIR/../../share/pkgconfig" -+ export PKG_CONFIG_LIBDIR - $pkgconfig --print-requires "$filename" 2> /dev/null | while read n r v ; do - [ -n "$n" ] || continue - if [ -n "$r" ] && [ -n "$v" ]; then diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-platform-file-fix.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-platform-file-fix.patch deleted file mode 100644 index 200964f39..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-platform-file-fix.patch +++ /dev/null @@ -1,28 +0,0 @@ -Don't add the first line of /etc/rpm/platform to the list of patterns -to match when computing an arch score, use it just for getting -information about the platform (cpu/vendor/os). Fixes #3864. - -Upstream-Status: Submitted [RPM5 maintainer] - -Index: rpm-5.4.14/lib/rpmrc.c -=================================================================== ---- rpm-5.4.14.orig/lib/rpmrc.c -+++ rpm-5.4.14/lib/rpmrc.c -@@ -510,6 +510,7 @@ static rpmRC rpmPlatform(const char * pl - #endif - } - -+#if !defined(RPM_VENDOR_OE) /* Skip the explicit-platform */ - #if defined(RPM_VENDOR_OPENPKG) /* explicit-platform */ - /* do not use vendor and GNU attribution */ - p = rpmExpand("%{_host_cpu}-%{_host_os}", NULL); -@@ -520,7 +521,8 @@ static rpmRC rpmPlatform(const char * pl - #endif - xx = mireAppend(RPMMIRE_STRCMP, 0, p, NULL, &mi_re, &mi_nre); - p = _free(p); -- -+#endif -+ - init_platform++; - } - rc = (init_platform ? RPMRC_OK : RPMRC_FAIL); diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-platform.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-platform.patch deleted file mode 100644 index 3b40fea0e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-platform.patch +++ /dev/null @@ -1,137 +0,0 @@ -Fix up platform and related sysinfo file loading. - -Upstream-Status: Submitted [RPM5 maintainer] - -This ensures that RPM knows the compatible set of package types at all times. - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/lib/depends.c -=================================================================== ---- rpm-5.4.14.orig/lib/depends.c -+++ rpm-5.4.14/lib/depends.c -@@ -250,7 +250,7 @@ static int rpmtsAddUpgrades(rpmts ts, rp - he->p.ptr = _free(he->p.ptr); - } - --#if defined(RPM_VENDOR_WINDRIVER) -+#if defined(RPM_VENDOR_WINDRIVER) && !defined(RPM_VENDOR_OE) - /* - * If we're capable of installing multiple colors - * but at least one of the packages are white (0), we -@@ -507,7 +507,7 @@ assert(lastx >= 0 && lastx < ts->orderCo - return 0; - } - --#if defined(RPM_VENDOR_WINDRIVER) -+#if defined(RPM_VENDOR_WINDRIVER) && !defined(RPM_VENDOR_OE) - /* Is "compat" compatible w/ arch? */ - int _isCompatibleArch(const char * arch, const char * compat) - { -@@ -663,7 +663,7 @@ assert(he->p.str != NULL); - - if (arch == NULL || (parch = rpmteA(p)) == NULL) - continue; --#if defined(RPM_VENDOR_WINDRIVER) -+#if defined(RPM_VENDOR_WINDRIVER) && !defined(RPM_VENDOR_OE) - /* XXX hackery for alias matching. */ - if (!_isCompatibleArch(arch, parch)) - continue; -@@ -829,6 +829,12 @@ int rpmtsAddEraseElement(rpmts ts, Heade - return rc; - } - -+#if defined(RPM_VENDOR_WINDRIVER) || defined(RPM_VENDOR_OE) -+#define _ETC_RPM_SYSINFO "%{_etcrpm}/sysinfo" -+#else -+#define _ETC_RPM_SYSINFO SYSCONFIGDIR "/sysinfo" -+#endif -+ - /*@only@*/ /*@null@*/ /*@unchecked@*/ - static char *sysinfo_path = NULL; - -@@ -1311,7 +1317,7 @@ retry: - sysinfo_path = rpmExpand("%{?_rpmds_sysinfo_path}", NULL); - if (!(sysinfo_path != NULL && *sysinfo_path == '/')) { - sysinfo_path = _free(sysinfo_path); -- sysinfo_path = xstrdup(SYSCONFIGDIR "/sysinfo"); -+ sysinfo_path = rpmExpand(_ETC_RPM_SYSINFO, NULL); - } - } - -Index: rpm-5.4.14/lib/rpmds.c -=================================================================== ---- rpm-5.4.14.orig/lib/rpmds.c -+++ rpm-5.4.14/lib/rpmds.c -@@ -1759,7 +1759,7 @@ int rpmdsSysinfo(rpmPRCO PRCO, const cha - /*@-observertrans @*/ - _sysinfo_path = _free(_sysinfo_path); - /*@=observertrans @*/ -- _sysinfo_path = xstrdup(_ETC_RPM_SYSINFO); -+ _sysinfo_path = rpmExpand(_ETC_RPM_SYSINFO, NULL); - } - } - /*@=modobserver@*/ -Index: rpm-5.4.14/lib/rpmrc.c -=================================================================== ---- rpm-5.4.14.orig/lib/rpmrc.c -+++ rpm-5.4.14/lib/rpmrc.c -@@ -38,7 +38,13 @@ - static const char * configTarget = NULL; - - /*@observer@*/ /*@unchecked@*/ --static const char * platform = SYSCONFIGDIR "/platform"; -+#if defined(RPM_VENDOR_WINDRIVER) -+#define _ETC_RPM_PLATFORM "%{_etcrpm}/platform" -+#else -+#define _ETC_RPM_PLATFORM SYSCONFIGDIR "/platform" -+#endif -+ -+static const char * _platform = NULL; - - /*@only@*/ /*@relnull@*/ /*@unchecked@*/ - void * platpat = NULL; -@@ -694,16 +700,17 @@ static void defaultMachine(/*@out@*/ con - int rc; - - while (!gotDefaults) { --#if defined(RPM_VENDOR_WINDRIVER) -- const char * _platform = rpmGetPath(__etcrpm, "/platform", NULL); --#else -- const char * _platform = platform; --#endif -+ if (_platform == NULL) { -+ _platform = rpmExpand("%{?_rpmrc_platform_path}", NULL); -+ /* XXX may need to validate path existence somewhen. */ -+ if (!(_platform != NULL && *_platform == '/')) { -+ _platform = _free(_platform); -+ _platform = rpmExpand(_ETC_RPM_PLATFORM, NULL); -+ } -+ } - CVOG_t cvog = NULL; - #if defined(RPM_VENDOR_OPENPKG) /* larger-utsname */ - const char *cp; --#endif --#if defined(RPM_VENDOR_OPENPKG) /* larger-utsname */ - /* utsname fields on some platforms (like HP-UX) are very small - (just about 8 characters). This is too small for OpenPKG, so cheat! */ - rc = uname(&un_real); -@@ -780,9 +787,7 @@ static void defaultMachine(/*@out@*/ con - if (cp != NULL && cp != _platform) - cp = _free(cp); - #endif --#if defined(RPM_VENDOR_WINDRIVER) - _platform = _free(_platform); --#endif - - if (configTarget && !parseCVOG(configTarget, &cvog) && cvog != NULL) { - gotDefaults = 1; -@@ -1101,6 +1106,8 @@ int rpmReadConfigFiles(/*@unused@*/ cons - - #ifdef PREMACROFILES - if (rpmReadRC(PREMACROFILES)) return -1; -+#else -+ if (rpmReadRC(NULL)) return -1; - #endif - - /* Reset umask to its default umask(2) value. */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-platform2.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-platform2.patch deleted file mode 100644 index b9675c71b..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-platform2.patch +++ /dev/null @@ -1,105 +0,0 @@ -Fix up platform and related sysinfo file loading (part 2). - -We need to ensure that we set the _gnu flag somehow. We do this by reading -from the platform file, and setting a new _platform_gnu and related vars. - -The default values of _host_cpu, _host_vendor and _host_os are changed to -reference either the automatically determined _target_... or _platform_... -values. The macros file uses the configure time defaults in _platform_... -versions have not been defined. This preserves existing behavior, but -ensures reasonable defaults are always available. - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/lib/rpmrc.c -=================================================================== ---- rpm-5.4.14.orig/lib/rpmrc.c -+++ rpm-5.4.14/lib/rpmrc.c -@@ -328,10 +328,15 @@ static void setDefaults(void) - /*@modifies rpmGlobalMacroContext, internalState @*/ - { - --#if defined(RPM_VENDOR_WINDRIVER) -+#if defined(RPM_VENDOR_WINDRIVER) || defined(RPM_VENDOR_OE) - addMacro(NULL, "_usrlibrpm", NULL, __usrlibrpm, RMIL_DEFAULT); - addMacro(NULL, "_etcrpm", NULL, __etcrpm, RMIL_DEFAULT); - addMacro(NULL, "_vendor", NULL, "%{?_host_vendor}%{!?_host_vendor:wrs}", RMIL_DEFAULT); -+ -+ addMacro(NULL, "_host_cpu", NULL, "%{?_platform_cpu}%{!?_platform_cpu:%{?_target_cpu}}", RMIL_DEFAULT); -+ addMacro(NULL, "_host_vendor", NULL, "%{?_platform_vendor}%{!?_platform_cpu:%{?_target_vendor}}", RMIL_DEFAULT); -+ addMacro(NULL, "_host_os", NULL, "%{?_platform_os}%{!?_platform_os:%{?_target_os}}", RMIL_DEFAULT); -+ addMacro(NULL, "_host_gnu", NULL, "%{?_platform_gnu}%{!?_platform_gnu:%{?_gnu}}", RMIL_DEFAULT); - #endif - - addMacro(NULL, "_usr", NULL, USRPREFIX, RMIL_DEFAULT); -@@ -487,9 +492,22 @@ static rpmRC rpmPlatform(const char * pl - } - - if (!parseCVOG(p, &cvog) && cvog != NULL) { -+#if defined(RPM_VENDOR_OE) -+ char * _gnu = NULL; -+ -+ addMacro(NULL, "_platform_cpu", NULL, cvog->cpu, -1); -+ addMacro(NULL, "_platform_vendor", NULL, cvog->vendor, -1); -+ addMacro(NULL, "_platform_os", NULL, cvog->os, -1); -+ -+ if (cvog->gnu && cvog->gnu[0] != '\0') -+ _gnu = rpmExpand("-", cvog->gnu, NULL); -+ -+ addMacro(NULL, "_platform_gnu", NULL, (_gnu ? _gnu : ""), -1); -+#else - addMacro(NULL, "_host_cpu", NULL, cvog->cpu, -1); - addMacro(NULL, "_host_vendor", NULL, cvog->vendor, -1); - addMacro(NULL, "_host_os", NULL, cvog->os, -1); -+#endif - } - - #if defined(RPM_VENDOR_OPENPKG) /* explicit-platform */ -Index: rpm-5.4.14/macros/macros.in -=================================================================== ---- rpm-5.4.14.orig/macros/macros.in -+++ rpm-5.4.14/macros/macros.in -@@ -900,9 +900,9 @@ $_arbitrary_tags_tests Foo:Bar - %_os @RPMCANONOS@ - %_gnu @RPMCANONGNU@ - --%_host_platform %{_host_cpu}-%{_host_vendor}-%{_host_os}%{?_gnu} --%_build_platform %{_build_cpu}-%{_build_vendor}-%{_build_os}%{?_gnu} --%_target_platform %{_target_cpu}-%{_target_vendor}-%{_target_os}%{?_gnu} -+%_host_platform %{_host_cpu}-%{_host_vendor}-%{_host_os}%{?_host_gnu}%{!?_host_gnu:%{?_gnu}} -+%_build_platform %{_build_cpu}-%{_build_vendor}-%{_build_os}%{?_host_gnu}%{!?_host_gnu:%{?_gnu}} -+%_target_platform %{_target_cpu}-%{_target_vendor}-%{_target_os}%{?_host_gnu}%{!?_host_gnu:%{?_gnu}} - - #============================================================================== - # ---- configure macros. -@@ -945,9 +945,10 @@ $_arbitrary_tags_tests Foo:Bar - %_build_os %{_host_os} - %_host @host@ - %_host_alias @host_alias@%{nil} --%_host_cpu @host_cpu@ --%_host_vendor @host_vendor@ --%_host_os @host_os@ -+%_host_cpu %{?_platform_cpu}%{!?_platform_cpu:%{_arch}} -+%_host_vendor %{?_platform_vendor}%{!?_platform_vendor:%{_vendor}} -+%_host_os %{?_platform_os}%{!?_platform_os:%{_os}} -+%_host_gnu %{?_platform_gnu}%{!?_platform_gnu:%{_gnu}} - %_target %{_host} - %_target_alias %{_host_alias} - %_target_cpu %{_host_cpu} -Index: rpm-5.4.14/python/rpmmodule.c -=================================================================== ---- rpm-5.4.14.orig/python/rpmmodule.c -+++ rpm-5.4.14/python/rpmmodule.c -@@ -65,8 +65,8 @@ static PyObject * archScore(PyObject * s - if (!PyArg_ParseTupleAndKeywords(args, kwds, "s", kwlist, &arch)) - return NULL; - --#if defined(RPM_VENDOR_WINDRIVER) -- platform = rpmExpand(arch, "-%{_host_vendor}", "-%{_host_os}%{?_gnu}", NULL); -+#if defined(RPM_VENDOR_WINDRIVER) || defined(RPM_VENDOR_OE) -+ platform = rpmExpand(arch, "-%{_host_vendor}", "-%{_host_os}%{?_host_gnu}%{!?_host_gnu:%{?_gnu}}", NULL); - #else - platform = rpmExpand(arch, "-", "%{_vendor}", "-", "%{_os}", NULL); - #endif diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-py-init.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-py-init.patch deleted file mode 100644 index 92ef1dc50..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-py-init.patch +++ /dev/null @@ -1,29 +0,0 @@ -python/rpmmodules.c: Change the way the python module loads the RPM config - -In order to support the RPM_VENDOR_WINDRIVER enhancement of dynamic -runtime relocation paths, we need to call rpmcliInit instead of -rpmReadConfigFiles. The rpmcliInit will end up calling rpmReadConfigFiles -after the necessary relocation processing (if enabled). - -Code derived from changes suggested by Paul Eggleton. - -Upstream-Status: Pending - -Signed-off-by: Mark Hatle - -Index: rpm/python/rpmmodule.c -=================================================================== ---- rpm.orig/python/rpmmodule.c -+++ rpm/python/rpmmodule.c -@@ -382,9 +382,8 @@ static int initModule(PyObject *m) - /* XXX add --noparentdirs --nolinktos to rpmtsCheck() */ - global_depFlags = (RPMDEPS_FLAG_NOPARENTDIRS | RPMDEPS_FLAG_NOLINKTOS); - -- /* failure to initialize rpm (crypto and all) is rather fatal too... */ -- if (rpmReadConfigFiles(NULL, NULL) == -1) -- return 0; -+ const char *argv[1] = {"rpmmodule", 0}; -+ rpmcliInit(1, argv, NULL); - - d = PyModule_GetDict(m); - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-python-AddErase.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-python-AddErase.patch deleted file mode 100644 index df6f4722e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-python-AddErase.patch +++ /dev/null @@ -1,35 +0,0 @@ -rpm/python: The RPM5 API requires a hdrNum to be passed in - -The former behavior of passing in -1 as the hdrNum resulting in erase -operations that did not complete, but also did not error. Changing to -using the header instance resolves this problem. - -Upstream-Status: Pending - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.15/python/rpmts-py.c -=================================================================== ---- rpm-5.4.15.orig/python/rpmts-py.c -+++ rpm-5.4.15/python/rpmts-py.c -@@ -241,12 +241,19 @@ static PyObject * - rpmts_AddErase(rpmtsObject * s, PyObject * args) - { - Header h; -+ uint32_t hdrNum; - - if (!PyArg_ParseTuple(args, "O&:AddErase", hdrFromPyObject, &h)) - return NULL; - --SPEW((stderr, "*** %s(%p,%p) ts %p\n", __FUNCTION__, s, h, s->ts)); -+ hdrNum = headerGetInstance(h); -+ -+SPEW((stderr, "*** %s(%p,%p) ts %p hdrNum %ld\n", __FUNCTION__, s, h, s->ts, hdrNum)); -+#ifdef REFERENCE /* this doesn't work, RPM5 requires a unique hdrNum */ - return PyBool_FromLong(rpmtsAddEraseElement(s->ts, h, -1) == 0); -+#else -+ return PyBool_FromLong(rpmtsAddEraseElement(s->ts, h, hdrNum) == 0); -+#endif - } - - static int diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-python-restore-origin.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-python-restore-origin.patch deleted file mode 100644 index 7a473db1a..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-python-restore-origin.patch +++ /dev/null @@ -1,49 +0,0 @@ -Fix an issue where the PACKAGEORIGIN is not properly stored. - -Restore the rpmtsCallback fdSetOpen call and related code. - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm/python/rpmts-py.c -=================================================================== ---- rpm.orig/python/rpmts-py.c -+++ rpm/python/rpmts-py.c -@@ -672,6 +672,8 @@ rpmtsCallback(const void * hd, const rpm - Header h = (Header) hd; - struct rpmtsCallbackType_s * cbInfo = data; - PyObject * pkgObj = (PyObject *) pkgKey; -+ PyObject * oh = NULL; -+ const char * origin = NULL; - PyObject * args, * result; - static FD_t fd; - -@@ -693,8 +695,16 @@ rpmtsCallback(const void * hd, const rpm - pkgObj = Py_None; - Py_INCREF(pkgObj); - } -- } else -+ } else { - Py_INCREF(pkgObj); -+ /* XXX yum has (h, rpmloc) tuple as pkgKey. Extract the path. */ -+ if (!(PyTuple_Check(pkgObj) && PyArg_ParseTuple(pkgObj, "|Os", &oh, &origin))) -+ origin = NULL; -+ /* XXX clean up the path, yum paths start "//..." */ -+ if (origin && origin[0] == '/' && origin[1] == '/') -+ origin++; -+ } -+ - - PyEval_RestoreThread(cbInfo->_save); - -@@ -723,6 +733,9 @@ SPEW((stderr, "\t%p = fdDup(%d)\n", fd, - - fcntl(Fileno(fd), F_SETFD, FD_CLOEXEC); - -+ if (origin != NULL) -+ (void) fdSetOpen(fd, origin, 0, 0); -+ - return fd; - } else - if (what == RPMCALLBACK_INST_CLOSE_FILE) { diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-python-tagname.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-python-tagname.patch deleted file mode 100644 index dfb551377..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-python-tagname.patch +++ /dev/null @@ -1,24 +0,0 @@ -rpm-python-module: Change the extension tag from PyCObject to PyInt - -Use the tagValue to determine the custom PyInt value to use for the extension -tag. Without this, any custom tag extensions will be returned in a format -that the tagNumFromPyObject and related functions like hdr_subscript will -failed to process. Usually the failure is error: expected a string or integer - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.15/python/rpmmodule.c -=================================================================== ---- rpm-5.4.15.orig/python/rpmmodule.c -+++ rpm-5.4.15/python/rpmmodule.c -@@ -316,7 +316,7 @@ static void addRpmTags(PyObject *module) - { - if (ext->name == NULL || ext->type != HEADER_EXT_TAG) - continue; -- PyDict_SetItemString(d, (char *) ext->name, to=PyCObject_FromVoidPtr((void *)ext, NULL)); -+ PyDict_SetItemString(d, (char *) ext->name, to=PyInt_FromLong(tagValue(ext->name))); - Py_XDECREF(to); - PyDict_SetItem(dict, to, o=PyString_FromString(ext->name + 7)); - Py_XDECREF(o); diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-realpath.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-realpath.patch deleted file mode 100644 index a810123ba..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-realpath.patch +++ /dev/null @@ -1,24 +0,0 @@ -chroot: realpath is required before expanding _dbpath - -_usr turned out to be a relative path to support dyanmic config, but it's -being used somewhere as a indicator to locate substrings, so we must get -the real path of it in advance. - -Upstream-Status: Inappropriate (OpenEmbedded specific) - -Signed-off-by: Ming Liu - -diff -urpN a/rpmio/rpmrpc.c b/rpmio/rpmrpc.c ---- a/rpmio/rpmrpc.c -+++ b/rpmio/rpmrpc.c -@@ -257,7 +257,9 @@ int Open(const char * path, int flags, m - /* XXX if the open(2) fails, try to strip a possible chroot(2) prefix. */ - if (fdno < 0 && errno == ENOENT) { - const char *dbpath = rpmExpand("%{?_dbpath}/", NULL); -- const char * fn = strstr(path + 1, dbpath); -+ char resolved_dbpath[PATH_MAX]; -+ realpath(dbpath, resolved_dbpath); -+ const char * fn = strstr(path + 1, resolved_dbpath); - if (fn) - fdno = open(fn, flags, mode); - dbpath = _free(dbpath); diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-reloc-macros.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-reloc-macros.patch deleted file mode 100644 index 59fe524df..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-reloc-macros.patch +++ /dev/null @@ -1,31 +0,0 @@ -macros: Use dyanmic config vs hard coded settings - -When the dynamic, runtime relocation is enables we need to make sure that the -RPM macro file does not override the dynamic settings. Fix this by forcing the -dynamic version to be used in all cases. - -Upstream-Status: Inappropriate (OpenEmbedded specific) - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/macros/macros.in -=================================================================== ---- rpm-5.4.14.orig/macros/macros.in -+++ rpm-5.4.14/macros/macros.in -@@ -27,11 +27,12 @@ - #============================================================================== - # ---- filesystem macros. - # --%_usr @usrprefix@ -+%_usr %{_usrlibrpm}/../.. - %_usrsrc %{_usr}/src --%_var @varprefix@ --%_usrlibrpm @USRLIBRPM@ --%_etcrpm @SYSCONFIGDIR@ -+%_var %{_usr}/../var -+# The dynamic relocation code sets the following two items -+#%_usrlibrpm @USRLIBRPM@ -+#%_etcrpm @SYSCONFIGDIR@ - - %__objext @objext@ - %__libext @libext@ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-resolvedep.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-resolvedep.patch deleted file mode 100644 index 526106d1e..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-resolvedep.patch +++ /dev/null @@ -1,40 +0,0 @@ -lib/rpmts.c: Ensure that we check both providename and filepaths - -When looking up a missing dependencies, such as /bin/sh, we need to lookup -in both the providename and filepaths DB, not just the filepaths DB. - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/lib/rpmts.c -=================================================================== ---- rpm-5.4.14.orig/lib/rpmts.c -+++ rpm-5.4.14/lib/rpmts.c -@@ -410,8 +410,8 @@ fprintf(stderr, "--> %s(%p,%p,%p)\n", __ - if (sdb == NULL) - continue; - -- /* Look for a matching Provides: in suggested universe. */ -- rpmtag = (*keyp == '/' ? RPMTAG_FILEPATHS : RPMTAG_PROVIDENAME); -+ rpmtag = RPMTAG_PROVIDENAME; -+ do { - mi = rpmmiInit(sdb, rpmtag, keyp, keylen); - while ((h = rpmmiNext(mi)) != NULL) { - size_t hnamelen; -@@ -446,6 +446,15 @@ fprintf(stderr, "--> %s(%p,%p,%p)\n", __ - bhnamelen = hnamelen; - } - mi = rpmmiFree(mi); -+ -+ if (bh == NULL && *keyp == '/' && rpmtag == RPMTAG_PROVIDENAME) { -+ rpmtag = RPMTAG_FILEPATHS; -+ continue; -+ } -+ -+ break; -+ } while (1); -+ - } - - /* Is there a suggested resolution? */ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmdb-grammar.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmdb-grammar.patch deleted file mode 100644 index 71dae4939..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmdb-grammar.patch +++ /dev/null @@ -1,124 +0,0 @@ -Disable various items that do not cross compile well. - -Upstream-Status: Inappropriate [Configuration] - -Signed-off-by: Mark Hatle - -Index: rpm/rpmdb/Makefile.am -=================================================================== ---- rpm.orig/rpmdb/Makefile.am -+++ rpm/rpmdb/Makefile.am -@@ -34,10 +34,10 @@ EXTRA_DIST = \ - db3.c sqlite.c db_emu.h librpmdb.vers bdb.sql libsqldb.c \ - logio.awk logio.src logio_recover_template logio_template logio.c \ - logio_rec.c logio_auto.c logio_autop.c logio_auto.h \ -- qf.l qf.y qf.inp tqf.l tqf.y tqf.inp grammar.y scanner.l json1.js -+ tqf.l tqf.y tqf.inp grammar.y scanner.l json1.js - --EXTRA_PROGRAMS = qfcalc qfgraph logio tjfn tqf # tbdb --noinst_PROGRAMS = json -+EXTRA_PROGRAMS = qfcalc qfgraph logio # tjfn tqf tbdb -+noinst_PROGRAMS = # json - - RPMMISC_LDADD_COMMON = \ - $(top_builddir)/misc/librpmmisc.la \ -@@ -321,54 +321,39 @@ BUILT_SOURCES += .syntastic_c_config - .syntastic_c_config: Makefile - @echo $(COMPILE) | tr ' ' '\n' | sed -e '1d' > $@ - --tjfn_SOURCES = tjfn.c --tjfn_LDADD = $(mylibs) -- --LEX = flex --LFLAGS= -d -T -v -8 -b --yylineno --reentrant --bison-bridge --perf-report --YACC = bison --YFLAGS= -Dapi.pure -t -d -v --report=all -- --BUILT_SOURCES += Jgrammar.c Jgrammar.h Jscanner.c --Jgrammar.c: grammar.y -- $(YACC) $(YFLAGS) -o $@ $< --Jscanner.c: scanner.l -- $(LEX) -R -o $@ $< --json_SOURCES = Jgrammar.c Jscanner.c json.c -- --testjson: json1.js json -- ./json json1.js -- --BUILT_SOURCES += Qgrammar.c Qgrammar.h Qscanner.c --Qgrammar.c: qf.y -- $(YACC) $(YFLAGS) -o $@ $< --Qscanner.c: qf.l -- $(LEX) -R -o $@ $< --qfcalc_SOURCES = Qgrammar.c Qscanner.c interpreter.c --qfgraph_SOURCES = Qgrammar.c Qscanner.c graph.c -- --testqf: qfcalc qfgraph -- ./qfcalc < qf.inp -- ./qfgraph < qf.inp -- --BUILT_SOURCES += Tgrammar.c Tgrammar.h Tscanner.c --Tgrammar.c Tgrammar.h: tqf.y -- $(YACC) $(YFLAGS) -o $@ $< --Tscanner.c Tscanner.h: tqf.l -- $(LEX) --prefix="Tyy" $(LFLAGS) -o $@ $< --tqf_SOURCES = Tgrammar.c Tscanner.c tgraph.c --tqf_CFLAGS = $(CFLAGS) -fsanitize=address # -DTSCANNER_MAIN --tqf_LDADD = ../lib/librpm.la \ -- ./librpmdb.la \ -- ../popt/libpopt.la -- --testdir = $(abs_top_builddir)/tests --foo: tqf -- -../libtool --mode=execute \ -- ./tqf \ -- --dbpath=$(testdir) \ -- -r $(testdir)/fodder/*.rpm \ -- $(testdir)/fodder/fmtmod.qf -+#tjfn_SOURCES = tjfn.c -+#tjfn_LDADD = $(mylibs) -+# -+#LFLAGS= -d -T -v -8 -b --yylineno --reentrant --bison-bridge --perf-report -+# -+#BUILT_SOURCES += Jgrammar.c Jgrammar.h Jscanner.c -+#Jgrammar.c Jgrammar.h: grammar.y -+# $(YACC) $(YFLAGS) -t -d -v -o $@ $< -+#Jscanner.c: scanner.l -+# $(LEX) -R -o $@ $< -+#json_SOURCES = Jgrammar.c Jscanner.c json.c -+# -+#testjson: json1.js json -+# ./json json1.js -+# -+#BUILT_SOURCES += Tgrammar.c Tgrammar.h Tscanner.c -+#Tgrammar.c Tgrammar.h: tqf.y -+# $(YACC) $(YFLAGS) -t -d -v -o $@ $< -+#Tscanner.c Tscanner.h: tqf.l -+# $(LEX) --prefix="Tyy" $(LFLAGS) -o $@ $< -+#tqf_SOURCES = Tgrammar.c Tscanner.c tgraph.c -+#tqf_CFLAGS = $(CFLAGS) -fsanitize=address # -DTSCANNER_MAIN -+#tqf_LDADD = ../lib/librpm.la \ -+# ./librpmdb.la \ -+# ../popt/libpopt.la -+ -+#testdir = $(abs_top_builddir)/tests -+#foo: tqf -+# -../libtool --mode=execute \ -+# ./tqf \ -+# --dbpath=$(testdir) \ -+# -r $(testdir)/fodder/*.rpm \ -+# $(testdir)/fodder/fmtmod.qf - - #tbdb_SOURCES = tbdb.c bdb.c - #tbdb_LDADD = $(mylibs) -Index: rpm/configure.ac -=================================================================== ---- rpm.orig/configure.ac -+++ rpm/configure.ac -@@ -119,6 +119,7 @@ AC_PROG_MAKE_SET - AC_PROG_LIBTOOL - AC_PROG_RANLIB - AC_PROG_YACC -+AM_PROG_LEX - - AC_PATH_PROG(AS, as, as) - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmfc.c-fix-for-N32-MIPS64.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmfc.c-fix-for-N32-MIPS64.patch deleted file mode 100644 index b9a2cbce2..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmfc.c-fix-for-N32-MIPS64.patch +++ /dev/null @@ -1,34 +0,0 @@ -From 16dc683aa50be9789d1674734b06a8a955ff22ad Mon Sep 17 00:00:00 2001 -From: Robert Yang -Date: Thu, 26 Nov 2015 01:36:40 -0800 -Subject: [PATCH] lib/rpmfc.c: fix for N32 MIPS64 - -It is 'N32 MIPS64', not 'N32 MIPS32' as command file shows: -$ file image/usr/bin/getent -getent: ELF 32-bit MSB executable, MIPS, N32 MIPS64 [snip] - -And "rpm -qp --filecolor" was wrong (it was 1, but should be 4). - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Robert Yang ---- - lib/rpmfc.c | 2 +- - 1 file changed, 1 insertion(+), 1 deletion(-) - -diff --git a/lib/rpmfc.c b/lib/rpmfc.c -index fde00c7..49779f8 100644 ---- a/lib/rpmfc.c -+++ b/lib/rpmfc.c -@@ -575,7 +575,7 @@ static struct rpmfcTokens_s rpmfcTokens[] = { - { " not stripped", RPMFC_NOTSTRIPPED }, - { " archive", RPMFC_ARCHIVE }, - -- { "MIPS, N32 MIPS32", RPMFC_ELFMIPSN32|RPMFC_INCLUDE }, -+ { "MIPS, N32 MIPS64", RPMFC_ELFMIPSN32|RPMFC_INCLUDE }, - { "ELF 32-bit", RPMFC_ELF32|RPMFC_INCLUDE }, - { "ELF 64-bit", RPMFC_ELF64|RPMFC_INCLUDE }, - --- -1.7.9.5 - diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmio-headers.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmio-headers.patch deleted file mode 100644 index 93645766c..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmio-headers.patch +++ /dev/null @@ -1,19 +0,0 @@ -Fix a typo in the rpmio Makefile.am - -Upstream-Status: Submitted [RPM5 maintainer] - -Signed-off-by: Mark Hatle - -Index: rpm/rpmio/Makefile.am -=================================================================== ---- rpm.orig/rpmio/Makefile.am -+++ rpm/rpmio/Makefile.am -@@ -121,7 +121,7 @@ luaLPATHdir = ${pkgsharedir)/lua - - pkgincdir = $(pkgincludedir)$(WITH_PATH_VERSIONED_SUFFIX) - pkginc_HEADERS = argv.h mire.h rpmzlog.h yarn.h \ -- rpmbf.h rpmcb.h rpmio.h rpmlog.h rpmiotypes.h rpmmacro.h -+ rpmbf.h rpmcb.h rpmio.h rpmlog.h rpmiotypes.h rpmmacro.h \ - rpmpgp.h rpmsw.h rpmutil.h - noinst_HEADERS = \ - ar.h bcon.h bson.h cpio.h crc.h envvar.h fnmatch.h fts.h glob.h iosm.h \ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmpgp-popt.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmpgp-popt.patch deleted file mode 100644 index 915d7efe6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-rpmpgp-popt.patch +++ /dev/null @@ -1,26 +0,0 @@ -rpmpgp.h: We do not require the popt header in order to use rpmpgp functions - -This can cause failures if the internal libpopt is used, as it's header is -not exported. - -Upstream-Status: Pending - -Signed-off-by: Mark Hatle - -Index: rpm-5.4.14/rpmio/rpmpgp.h -=================================================================== ---- rpm-5.4.14.orig/rpmio/rpmpgp.h -+++ rpm-5.4.14/rpmio/rpmpgp.h -@@ -11,11 +11,11 @@ - */ - - #include --#include - #include - #include - - #if defined(_RPMPGP_INTERNAL) -+#include - #include - - /*@unchecked@*/ diff --git a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-scriptletexechelper.patch b/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-scriptletexechelper.patch deleted file mode 100644 index b55fe22c6..000000000 --- a/import-layers/yocto-poky/meta/recipes-devtools/rpm/rpm/rpm-scriptletexechelper.patch +++ /dev/null @@ -1,159 +0,0 @@ -Enable a cross-install scriptlet helper. - -The helper is called from outside of the chroot with the arguments: - - + diff --git a/import-layers/yocto-poky/scripts/lib/build_perf/html/report.html b/import-layers/yocto-poky/scripts/lib/build_perf/html/report.html new file mode 100644 index 000000000..165cbb811 --- /dev/null +++ b/import-layers/yocto-poky/scripts/lib/build_perf/html/report.html @@ -0,0 +1,206 @@ + + + +{# Scripts, for visualization#} + + + + +{# Render measurement result charts #} +{% for test in test_data %} + {% if test.status == 'SUCCESS' %} + {% for measurement in test.measurements %} + {% set chart_elem_id = test.name + '_' + measurement.name + '_chart' %} + {% include 'measurement_chart.html' %} + {% endfor %} + {% endif %} +{% endfor %} + + + +{# Styles #} + + +{{ title }} + + +{% macro poky_link(commit) -%} + {{ commit[0:11] }} +{%- endmacro %} + +
+ {# Test metadata #} +

General

+
+ + + + + + + {% for key, item in metadata.items() %} + + + {%if key == 'commit' %} + + + {% else %} + + + {% endif %} + + {% endfor %} +
Current commitComparing with
{{ item.title }}{{ poky_link(item.value) }}{{ poky_link(item.value_old) }}{{ item.value }}{{ item.value_old }}
+ + {# Test result summary #} +

Test result summary

+
+ + {% for test in test_data %} + {% if loop.index is even %} + {% set row_style = 'style="background-color: #f2f2f2"' %} + {% else %} + {% set row_style = 'style="background-color: #ffffff"' %} + {% endif %} + + {% if test.status == 'SUCCESS' %} + {% for measurement in test.measurements %} + {# add empty cell in place of the test name#} + {% if loop.index > 1 %}{% endif %} + {% if measurement.absdiff > 0 %} + {% set result_style = "color: red" %} + {% elif measurement.absdiff == measurement.absdiff %} + {% set result_style = "color: green" %} + {% else %} + {% set result_style = "color: orange" %} + {%endif %} + + + + + + {% endfor %} + {% else %} + + + {% endif %} + + {% endfor %} +
{{ test.name }}: {{ test.description }}{{ measurement.description }}{{ measurement.value.mean }}{{ measurement.absdiff_str }}{{ measurement.reldiff }}
{{test.status }}
+ + {# Detailed test results #} + {% for test in test_data %} +

{{ test.name }}: {{ test.description }}

+
+ {% if test.status == 'SUCCESS' %} + {% for measurement in test.measurements %} +
+

{{ measurement.description }}

+
+ {{ measurement.value.mean }} + + {% if measurement.absdiff > 0 %} + + {% elif measurement.absdiff == measurement.absdiff %} + + {% else %} + + {% endif %} + {{ measurement.absdiff_str }} ({{measurement.reldiff}}) + +
+ + + + + +
+ {# Linechart #} +
+
+ {# Measurement statistics #} + + + + + + + + + + + + + + +
Test runs{{ measurement.value.sample_cnt }}
-/+-{{ measurement.value.minus }} / +{{ measurement.value.plus }}
Min{{ measurement.value.min }}
Max{{ measurement.value.max }}
Stdev{{ measurement.value.stdev }}
+
+
+ {% endfor %} + {# Unsuccessful test #} + {% else %} + {{ test.status }} + {% if test.err_type %}({{ test.err_type }}){% endif %} + +
{{ test.message }}
+ {% endif %} + {% endfor %} +
+ + diff --git a/import-layers/yocto-poky/scripts/lib/build_perf/report.py b/import-layers/yocto-poky/scripts/lib/build_perf/report.py new file mode 100644 index 000000000..eb00ccca2 --- /dev/null +++ b/import-layers/yocto-poky/scripts/lib/build_perf/report.py @@ -0,0 +1,342 @@ +# +# Copyright (c) 2017, Intel Corporation. +# +# This program is free software; you can redistribute it and/or modify it +# under the terms and conditions of the GNU General Public License, +# version 2, as published by the Free Software Foundation. +# +# This program is distributed in the hope it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for +# more details. +# +"""Handling of build perf test reports""" +from collections import OrderedDict, Mapping +from datetime import datetime, timezone +from numbers import Number +from statistics import mean, stdev, variance + + +def isofmt_to_timestamp(string): + """Convert timestamp string in ISO 8601 format into unix timestamp""" + if '.' in string: + dt = datetime.strptime(string, '%Y-%m-%dT%H:%M:%S.%f') + else: + dt = datetime.strptime(string, '%Y-%m-%dT%H:%M:%S') + return dt.replace(tzinfo=timezone.utc).timestamp() + + +def metadata_xml_to_json(elem): + """Convert metadata xml into JSON format""" + assert elem.tag == 'metadata', "Invalid metadata file format" + + def _xml_to_json(elem): + """Convert xml element to JSON object""" + out = OrderedDict() + for child in elem.getchildren(): + key = child.attrib.get('name', child.tag) + if len(child): + out[key] = _xml_to_json(child) + else: + out[key] = child.text + return out + return _xml_to_json(elem) + + +def results_xml_to_json(elem): + """Convert results xml into JSON format""" + rusage_fields = ('ru_utime', 'ru_stime', 'ru_maxrss', 'ru_minflt', + 'ru_majflt', 'ru_inblock', 'ru_oublock', 'ru_nvcsw', + 'ru_nivcsw') + iostat_fields = ('rchar', 'wchar', 'syscr', 'syscw', 'read_bytes', + 'write_bytes', 'cancelled_write_bytes') + + def _read_measurement(elem): + """Convert measurement to JSON""" + data = OrderedDict() + data['type'] = elem.tag + data['name'] = elem.attrib['name'] + data['legend'] = elem.attrib['legend'] + values = OrderedDict() + + # SYSRES measurement + if elem.tag == 'sysres': + for subel in elem: + if subel.tag == 'time': + values['start_time'] = isofmt_to_timestamp(subel.attrib['timestamp']) + values['elapsed_time'] = float(subel.text) + elif subel.tag == 'rusage': + rusage = OrderedDict() + for field in rusage_fields: + if 'time' in field: + rusage[field] = float(subel.attrib[field]) + else: + rusage[field] = int(subel.attrib[field]) + values['rusage'] = rusage + elif subel.tag == 'iostat': + values['iostat'] = OrderedDict([(f, int(subel.attrib[f])) + for f in iostat_fields]) + elif subel.tag == 'buildstats_file': + values['buildstats_file'] = subel.text + else: + raise TypeError("Unknown sysres value element '{}'".format(subel.tag)) + # DISKUSAGE measurement + elif elem.tag == 'diskusage': + values['size'] = int(elem.find('size').text) + else: + raise Exception("Unknown measurement tag '{}'".format(elem.tag)) + data['values'] = values + return data + + def _read_testcase(elem): + """Convert testcase into JSON""" + assert elem.tag == 'testcase', "Expecting 'testcase' element instead of {}".format(elem.tag) + + data = OrderedDict() + data['name'] = elem.attrib['name'] + data['description'] = elem.attrib['description'] + data['status'] = 'SUCCESS' + data['start_time'] = isofmt_to_timestamp(elem.attrib['timestamp']) + data['elapsed_time'] = float(elem.attrib['time']) + measurements = OrderedDict() + + for subel in elem.getchildren(): + if subel.tag == 'error' or subel.tag == 'failure': + data['status'] = subel.tag.upper() + data['message'] = subel.attrib['message'] + data['err_type'] = subel.attrib['type'] + data['err_output'] = subel.text + elif subel.tag == 'skipped': + data['status'] = 'SKIPPED' + data['message'] = subel.text + else: + measurements[subel.attrib['name']] = _read_measurement(subel) + data['measurements'] = measurements + return data + + def _read_testsuite(elem): + """Convert suite to JSON""" + assert elem.tag == 'testsuite', \ + "Expecting 'testsuite' element instead of {}".format(elem.tag) + + data = OrderedDict() + if 'hostname' in elem.attrib: + data['tester_host'] = elem.attrib['hostname'] + data['start_time'] = isofmt_to_timestamp(elem.attrib['timestamp']) + data['elapsed_time'] = float(elem.attrib['time']) + tests = OrderedDict() + + for case in elem.getchildren(): + tests[case.attrib['name']] = _read_testcase(case) + data['tests'] = tests + return data + + # Main function + assert elem.tag == 'testsuites', "Invalid test report format" + assert len(elem) == 1, "Too many testsuites" + + return _read_testsuite(elem.getchildren()[0]) + + +def aggregate_metadata(metadata): + """Aggregate metadata into one, basically a sanity check""" + mutable_keys = ('pretty_name', 'version_id') + + def aggregate_obj(aggregate, obj, assert_str=True): + """Aggregate objects together""" + assert type(aggregate) is type(obj), \ + "Type mismatch: {} != {}".format(type(aggregate), type(obj)) + if isinstance(obj, Mapping): + assert set(aggregate.keys()) == set(obj.keys()) + for key, val in obj.items(): + aggregate_obj(aggregate[key], val, key not in mutable_keys) + elif isinstance(obj, list): + assert len(aggregate) == len(obj) + for i, val in enumerate(obj): + aggregate_obj(aggregate[i], val) + elif not isinstance(obj, str) or (isinstance(obj, str) and assert_str): + assert aggregate == obj, "Data mismatch {} != {}".format(aggregate, obj) + + if not metadata: + return {} + + # Do the aggregation + aggregate = metadata[0].copy() + for testrun in metadata[1:]: + aggregate_obj(aggregate, testrun) + aggregate['testrun_count'] = len(metadata) + return aggregate + + +def aggregate_data(data): + """Aggregate multiple test results JSON structures into one""" + + mutable_keys = ('status', 'message', 'err_type', 'err_output') + + class SampleList(list): + """Container for numerical samples""" + pass + + def new_aggregate_obj(obj): + """Create new object for aggregate""" + if isinstance(obj, Number): + new_obj = SampleList() + new_obj.append(obj) + elif isinstance(obj, str): + new_obj = obj + else: + # Lists and and dicts are kept as is + new_obj = obj.__class__() + aggregate_obj(new_obj, obj) + return new_obj + + def aggregate_obj(aggregate, obj, assert_str=True): + """Recursive "aggregation" of JSON objects""" + if isinstance(obj, Number): + assert isinstance(aggregate, SampleList) + aggregate.append(obj) + return + + assert type(aggregate) == type(obj), \ + "Type mismatch: {} != {}".format(type(aggregate), type(obj)) + if isinstance(obj, Mapping): + for key, val in obj.items(): + if not key in aggregate: + aggregate[key] = new_aggregate_obj(val) + else: + aggregate_obj(aggregate[key], val, key not in mutable_keys) + elif isinstance(obj, list): + for i, val in enumerate(obj): + if i >= len(aggregate): + aggregate[key] = new_aggregate_obj(val) + else: + aggregate_obj(aggregate[i], val) + elif isinstance(obj, str): + # Sanity check for data + if assert_str: + assert aggregate == obj, "Data mismatch {} != {}".format(aggregate, obj) + else: + raise Exception("BUG: unable to aggregate '{}' ({})".format(type(obj), str(obj))) + + if not data: + return {} + + # Do the aggregation + aggregate = data[0].__class__() + for testrun in data: + aggregate_obj(aggregate, testrun) + return aggregate + + +class MeasurementVal(float): + """Base class representing measurement values""" + gv_data_type = 'number' + + def gv_value(self): + """Value formatting for visualization""" + if self != self: + return "null" + else: + return self + + +class TimeVal(MeasurementVal): + """Class representing time values""" + quantity = 'time' + gv_title = 'elapsed time' + gv_data_type = 'timeofday' + + def hms(self): + """Split time into hours, minutes and seconeds""" + hhh = int(abs(self) / 3600) + mmm = int((abs(self) % 3600) / 60) + sss = abs(self) % 60 + return hhh, mmm, sss + + def __str__(self): + if self != self: + return "nan" + hh, mm, ss = self.hms() + sign = '-' if self < 0 else '' + if hh > 0: + return '{}{:d}:{:02d}:{:02.0f}'.format(sign, hh, mm, ss) + elif mm > 0: + return '{}{:d}:{:04.1f}'.format(sign, mm, ss) + elif ss > 1: + return '{}{:.1f} s'.format(sign, ss) + else: + return '{}{:.2f} s'.format(sign, ss) + + def gv_value(self): + """Value formatting for visualization""" + if self != self: + return "null" + hh, mm, ss = self.hms() + return [hh, mm, int(ss), int(ss*1000) % 1000] + + +class SizeVal(MeasurementVal): + """Class representing time values""" + quantity = 'size' + gv_title = 'size in MiB' + gv_data_type = 'number' + + def __str__(self): + if self != self: + return "nan" + if abs(self) < 1024: + return '{:.1f} kiB'.format(self) + elif abs(self) < 1048576: + return '{:.2f} MiB'.format(self / 1024) + else: + return '{:.2f} GiB'.format(self / 1048576) + + def gv_value(self): + """Value formatting for visualization""" + if self != self: + return "null" + return self / 1024 + +def measurement_stats(meas, prefix=''): + """Get statistics of a measurement""" + if not meas: + return {prefix + 'sample_cnt': 0, + prefix + 'mean': MeasurementVal('nan'), + prefix + 'stdev': MeasurementVal('nan'), + prefix + 'variance': MeasurementVal('nan'), + prefix + 'min': MeasurementVal('nan'), + prefix + 'max': MeasurementVal('nan'), + prefix + 'minus': MeasurementVal('nan'), + prefix + 'plus': MeasurementVal('nan')} + + stats = {'name': meas['name']} + if meas['type'] == 'sysres': + val_cls = TimeVal + values = meas['values']['elapsed_time'] + elif meas['type'] == 'diskusage': + val_cls = SizeVal + values = meas['values']['size'] + else: + raise Exception("Unknown measurement type '{}'".format(meas['type'])) + stats['val_cls'] = val_cls + stats['quantity'] = val_cls.quantity + stats[prefix + 'sample_cnt'] = len(values) + + mean_val = val_cls(mean(values)) + min_val = val_cls(min(values)) + max_val = val_cls(max(values)) + + stats[prefix + 'mean'] = mean_val + if len(values) > 1: + stats[prefix + 'stdev'] = val_cls(stdev(values)) + stats[prefix + 'variance'] = val_cls(variance(values)) + else: + stats[prefix + 'stdev'] = float('nan') + stats[prefix + 'variance'] = float('nan') + stats[prefix + 'min'] = min_val + stats[prefix + 'max'] = max_val + stats[prefix + 'minus'] = val_cls(mean_val - min_val) + stats[prefix + 'plus'] = val_cls(max_val - mean_val) + + return stats + diff --git a/import-layers/yocto-poky/scripts/lib/build_perf/scrape-html-report.js b/import-layers/yocto-poky/scripts/lib/build_perf/scrape-html-report.js new file mode 100644 index 000000000..05a1f5700 --- /dev/null +++ b/import-layers/yocto-poky/scripts/lib/build_perf/scrape-html-report.js @@ -0,0 +1,56 @@ +var fs = require('fs'); +var system = require('system'); +var page = require('webpage').create(); + +// Examine console log for message from chart drawing +page.onConsoleMessage = function(msg) { + console.log(msg); + if (msg === "ALL CHARTS READY") { + window.charts_ready = true; + } + else if (msg.slice(0, 11) === "CHART READY") { + var chart_id = msg.split(" ")[2]; + console.log('grabbing ' + chart_id); + var png_data = page.evaluate(function (chart_id) { + var chart_div = document.getElementById(chart_id + '_png'); + return chart_div.outerHTML; + }, chart_id); + fs.write(args[2] + '/' + chart_id + '.png', png_data, 'w'); + } +}; + +// Check command line arguments +var args = system.args; +if (args.length != 3) { + console.log("USAGE: " + args[0] + " REPORT_HTML OUT_DIR\n"); + phantom.exit(1); +} + +// Open the web page +page.open(args[1], function(status) { + if (status == 'fail') { + console.log("Failed to open file '" + args[1] + "'"); + phantom.exit(1); + } +}); + +// Check status every 100 ms +interval = window.setInterval(function () { + //console.log('waiting'); + if (window.charts_ready) { + clearTimeout(timer); + clearInterval(interval); + + var fname = args[1].replace(/\/+$/, "").split("/").pop() + console.log("saving " + fname); + fs.write(args[2] + '/' + fname, page.content, 'w'); + phantom.exit(0); + } +}, 100); + +// Time-out after 10 seconds +timer = window.setTimeout(function () { + clearInterval(interval); + console.log("ERROR: timeout"); + phantom.exit(1); +}, 10000); diff --git a/import-layers/yocto-poky/scripts/lib/compatlayer/__init__.py b/import-layers/yocto-poky/scripts/lib/compatlayer/__init__.py new file mode 100644 index 000000000..7197e850e --- /dev/null +++ b/import-layers/yocto-poky/scripts/lib/compatlayer/__init__.py @@ -0,0 +1,392 @@ +# Yocto Project compatibility layer tool +# +# Copyright (C) 2017 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import re +import subprocess +from enum import Enum + +import bb.tinfoil + +class LayerType(Enum): + BSP = 0 + DISTRO = 1 + SOFTWARE = 2 + ERROR_NO_LAYER_CONF = 98 + ERROR_BSP_DISTRO = 99 + +def _get_configurations(path): + configs = [] + + for f in os.listdir(path): + file_path = os.path.join(path, f) + if os.path.isfile(file_path) and f.endswith('.conf'): + configs.append(f[:-5]) # strip .conf + return configs + +def _get_layer_collections(layer_path, lconf=None, data=None): + import bb.parse + import bb.data + + if lconf is None: + lconf = os.path.join(layer_path, 'conf', 'layer.conf') + + if data is None: + ldata = bb.data.init() + bb.parse.init_parser(ldata) + else: + ldata = data.createCopy() + + ldata.setVar('LAYERDIR', layer_path) + try: + ldata = bb.parse.handle(lconf, ldata, include=True) + except BaseException as exc: + raise LayerError(exc) + ldata.expandVarref('LAYERDIR') + + collections = (ldata.getVar('BBFILE_COLLECTIONS', True) or '').split() + if not collections: + name = os.path.basename(layer_path) + collections = [name] + + collections = {c: {} for c in collections} + for name in collections: + priority = ldata.getVar('BBFILE_PRIORITY_%s' % name, True) + pattern = ldata.getVar('BBFILE_PATTERN_%s' % name, True) + depends = ldata.getVar('LAYERDEPENDS_%s' % name, True) + collections[name]['priority'] = priority + collections[name]['pattern'] = pattern + collections[name]['depends'] = depends + + return collections + +def _detect_layer(layer_path): + """ + Scans layer directory to detect what type of layer + is BSP, Distro or Software. + + Returns a dictionary with layer name, type and path. + """ + + layer = {} + layer_name = os.path.basename(layer_path) + + layer['name'] = layer_name + layer['path'] = layer_path + layer['conf'] = {} + + if not os.path.isfile(os.path.join(layer_path, 'conf', 'layer.conf')): + layer['type'] = LayerType.ERROR_NO_LAYER_CONF + return layer + + machine_conf = os.path.join(layer_path, 'conf', 'machine') + distro_conf = os.path.join(layer_path, 'conf', 'distro') + + is_bsp = False + is_distro = False + + if os.path.isdir(machine_conf): + machines = _get_configurations(machine_conf) + if machines: + is_bsp = True + + if os.path.isdir(distro_conf): + distros = _get_configurations(distro_conf) + if distros: + is_distro = True + + if is_bsp and is_distro: + layer['type'] = LayerType.ERROR_BSP_DISTRO + elif is_bsp: + layer['type'] = LayerType.BSP + layer['conf']['machines'] = machines + elif is_distro: + layer['type'] = LayerType.DISTRO + layer['conf']['distros'] = distros + else: + layer['type'] = LayerType.SOFTWARE + + layer['collections'] = _get_layer_collections(layer['path']) + + return layer + +def detect_layers(layer_directories, no_auto): + layers = [] + + for directory in layer_directories: + directory = os.path.realpath(directory) + if directory[-1] == '/': + directory = directory[0:-1] + + if no_auto: + conf_dir = os.path.join(directory, 'conf') + if os.path.isdir(conf_dir): + layer = _detect_layer(directory) + if layer: + layers.append(layer) + else: + for root, dirs, files in os.walk(directory): + dir_name = os.path.basename(root) + conf_dir = os.path.join(root, 'conf') + if os.path.isdir(conf_dir): + layer = _detect_layer(root) + if layer: + layers.append(layer) + + return layers + +def _find_layer_depends(depend, layers): + for layer in layers: + for collection in layer['collections']: + if depend == collection: + return layer + return None + +def add_layer_dependencies(bblayersconf, layer, layers, logger): + def recurse_dependencies(depends, layer, layers, logger, ret = []): + logger.debug('Processing dependencies %s for layer %s.' % \ + (depends, layer['name'])) + + for depend in depends.split(): + # core (oe-core) is suppose to be provided + if depend == 'core': + continue + + layer_depend = _find_layer_depends(depend, layers) + if not layer_depend: + logger.error('Layer %s depends on %s and isn\'t found.' % \ + (layer['name'], depend)) + ret = None + continue + + # We keep processing, even if ret is None, this allows us to report + # multiple errors at once + if ret is not None and layer_depend not in ret: + ret.append(layer_depend) + + # Recursively process... + if 'collections' not in layer_depend: + continue + + for collection in layer_depend['collections']: + collect_deps = layer_depend['collections'][collection]['depends'] + if not collect_deps: + continue + ret = recurse_dependencies(collect_deps, layer_depend, layers, logger, ret) + + return ret + + layer_depends = [] + for collection in layer['collections']: + depends = layer['collections'][collection]['depends'] + if not depends: + continue + + layer_depends = recurse_dependencies(depends, layer, layers, logger, layer_depends) + + # Note: [] (empty) is allowed, None is not! + if layer_depends is None: + return False + else: + # Don't add a layer that is already present. + added = set() + output = check_command('Getting existing layers failed.', 'bitbake-layers show-layers').decode('utf-8') + for layer, path, pri in re.findall(r'^(\S+) +([^\n]*?) +(\d+)$', output, re.MULTILINE): + added.add(path) + + for layer_depend in layer_depends: + name = layer_depend['name'] + path = layer_depend['path'] + if path in added: + continue + else: + added.add(path) + logger.info('Adding layer dependency %s' % name) + with open(bblayersconf, 'a+') as f: + f.write("\nBBLAYERS += \"%s\"\n" % path) + return True + +def add_layer(bblayersconf, layer, layers, logger): + logger.info('Adding layer %s' % layer['name']) + with open(bblayersconf, 'a+') as f: + f.write("\nBBLAYERS += \"%s\"\n" % layer['path']) + + return True + +def check_command(error_msg, cmd): + ''' + Run a command under a shell, capture stdout and stderr in a single stream, + throw an error when command returns non-zero exit code. Returns the output. + ''' + + p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + output, _ = p.communicate() + if p.returncode: + msg = "%s\nCommand: %s\nOutput:\n%s" % (error_msg, cmd, output.decode('utf-8')) + raise RuntimeError(msg) + return output + +def get_signatures(builddir, failsafe=False, machine=None): + import re + + # some recipes needs to be excluded like meta-world-pkgdata + # because a layer can add recipes to a world build so signature + # will be change + exclude_recipes = ('meta-world-pkgdata',) + + sigs = {} + tune2tasks = {} + + cmd = '' + if machine: + cmd += 'MACHINE=%s ' % machine + cmd += 'bitbake ' + if failsafe: + cmd += '-k ' + cmd += '-S none world' + sigs_file = os.path.join(builddir, 'locked-sigs.inc') + if os.path.exists(sigs_file): + os.unlink(sigs_file) + try: + check_command('Generating signatures failed. This might be due to some parse error and/or general layer incompatibilities.', + cmd) + except RuntimeError as ex: + if failsafe and os.path.exists(sigs_file): + # Ignore the error here. Most likely some recipes active + # in a world build lack some dependencies. There is a + # separate test_machine_world_build which exposes the + # failure. + pass + else: + raise + + sig_regex = re.compile("^(?P.*:.*):(?P.*) .$") + tune_regex = re.compile("(^|\s)SIGGEN_LOCKEDSIGS_t-(?P\S*)\s*=\s*") + current_tune = None + with open(sigs_file, 'r') as f: + for line in f.readlines(): + line = line.strip() + t = tune_regex.search(line) + if t: + current_tune = t.group('tune') + s = sig_regex.match(line) + if s: + exclude = False + for er in exclude_recipes: + (recipe, task) = s.group('task').split(':') + if er == recipe: + exclude = True + break + if exclude: + continue + + sigs[s.group('task')] = s.group('hash') + tune2tasks.setdefault(current_tune, []).append(s.group('task')) + + if not sigs: + raise RuntimeError('Can\'t load signatures from %s' % sigs_file) + + return (sigs, tune2tasks) + +def get_depgraph(targets=['world'], failsafe=False): + ''' + Returns the dependency graph for the given target(s). + The dependency graph is taken directly from DepTreeEvent. + ''' + depgraph = None + with bb.tinfoil.Tinfoil() as tinfoil: + tinfoil.prepare(config_only=False) + tinfoil.set_event_mask(['bb.event.NoProvider', 'bb.event.DepTreeGenerated', 'bb.command.CommandCompleted']) + if not tinfoil.run_command('generateDepTreeEvent', targets, 'do_build'): + raise RuntimeError('starting generateDepTreeEvent failed') + while True: + event = tinfoil.wait_event(timeout=1000) + if event: + if isinstance(event, bb.command.CommandFailed): + raise RuntimeError('Generating dependency information failed: %s' % event.error) + elif isinstance(event, bb.command.CommandCompleted): + break + elif isinstance(event, bb.event.NoProvider): + if failsafe: + # The event is informational, we will get information about the + # remaining dependencies eventually and thus can ignore this + # here like we do in get_signatures(), if desired. + continue + if event._reasons: + raise RuntimeError('Nothing provides %s: %s' % (event._item, event._reasons)) + else: + raise RuntimeError('Nothing provides %s.' % (event._item)) + elif isinstance(event, bb.event.DepTreeGenerated): + depgraph = event._depgraph + + if depgraph is None: + raise RuntimeError('Could not retrieve the depgraph.') + return depgraph + +def compare_signatures(old_sigs, curr_sigs): + ''' + Compares the result of two get_signatures() calls. Returns None if no + problems found, otherwise a string that can be used as additional + explanation in self.fail(). + ''' + # task -> (old signature, new signature) + sig_diff = {} + for task in old_sigs: + if task in curr_sigs and \ + old_sigs[task] != curr_sigs[task]: + sig_diff[task] = (old_sigs[task], curr_sigs[task]) + + if not sig_diff: + return None + + # Beware, depgraph uses task=. whereas get_signatures() + # uses :. Need to convert sometimes. The output follows + # the convention from get_signatures() because that seems closer to + # normal bitbake output. + def sig2graph(task): + pn, taskname = task.rsplit(':', 1) + return pn + '.' + taskname + def graph2sig(task): + pn, taskname = task.rsplit('.', 1) + return pn + ':' + taskname + depgraph = get_depgraph(failsafe=True) + depends = depgraph['tdepends'] + + # If a task A has a changed signature, but none of its + # dependencies, then we need to report it because it is + # the one which introduces a change. Any task depending on + # A (directly or indirectly) will also have a changed + # signature, but we don't need to report it. It might have + # its own changes, which will become apparent once the + # issues that we do report are fixed and the test gets run + # again. + sig_diff_filtered = [] + for task, (old_sig, new_sig) in sig_diff.items(): + deps_tainted = False + for dep in depends.get(sig2graph(task), ()): + if graph2sig(dep) in sig_diff: + deps_tainted = True + break + if not deps_tainted: + sig_diff_filtered.append((task, old_sig, new_sig)) + + msg = [] + msg.append('%d signatures changed, initial differences (first hash before, second after):' % + len(sig_diff)) + for diff in sorted(sig_diff_filtered): + recipe, taskname = diff[0].rsplit(':', 1) + cmd = 'bitbake-diffsigs --task %s %s --signature %s %s' % \ + (recipe, taskname, diff[1], diff[2]) + msg.append(' %s: %s -> %s' % diff) + msg.append(' %s' % cmd) + try: + output = check_command('Determining signature difference failed.', + cmd).decode('utf-8') + except RuntimeError as error: + output = str(error) + if output: + msg.extend([' ' + line for line in output.splitlines()]) + msg.append('') + return '\n'.join(msg) diff --git a/import-layers/yocto-poky/scripts/lib/compatlayer/case.py b/import-layers/yocto-poky/scripts/lib/compatlayer/case.py new file mode 100644 index 000000000..54ce78aa6 --- /dev/null +++ b/import-layers/yocto-poky/scripts/lib/compatlayer/case.py @@ -0,0 +1,7 @@ +# Copyright (C) 2017 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +from oeqa.core.case import OETestCase + +class OECompatLayerTestCase(OETestCase): + pass diff --git a/import-layers/yocto-poky/scripts/lib/compatlayer/cases/__init__.py b/import-layers/yocto-poky/scripts/lib/compatlayer/cases/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/import-layers/yocto-poky/scripts/lib/compatlayer/cases/bsp.py b/import-layers/yocto-poky/scripts/lib/compatlayer/cases/bsp.py new file mode 100644 index 000000000..43efae406 --- /dev/null +++ b/import-layers/yocto-poky/scripts/lib/compatlayer/cases/bsp.py @@ -0,0 +1,204 @@ +# Copyright (C) 2017 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import unittest + +from compatlayer import LayerType, get_signatures, check_command, get_depgraph +from compatlayer.case import OECompatLayerTestCase + +class BSPCompatLayer(OECompatLayerTestCase): + @classmethod + def setUpClass(self): + if self.tc.layer['type'] != LayerType.BSP: + raise unittest.SkipTest("BSPCompatLayer: Layer %s isn't BSP one." %\ + self.tc.layer['name']) + + def test_bsp_defines_machines(self): + self.assertTrue(self.tc.layer['conf']['machines'], + "Layer is BSP but doesn't defines machines.") + + def test_bsp_no_set_machine(self): + from oeqa.utils.commands import get_bb_var + + machine = get_bb_var('MACHINE') + self.assertEqual(self.td['bbvars']['MACHINE'], machine, + msg="Layer %s modified machine %s -> %s" % \ + (self.tc.layer['name'], self.td['bbvars']['MACHINE'], machine)) + + + def test_machine_world(self): + ''' + "bitbake world" is expected to work regardless which machine is selected. + BSP layers sometimes break that by enabling a recipe for a certain machine + without checking whether that recipe actually can be built in the current + distro configuration (for example, OpenGL might not enabled). + + This test iterates over all machines. It would be nicer to instantiate + it once per machine. It merely checks for errors during parse + time. It does not actually attempt to build anything. + ''' + + if not self.td['machines']: + self.skipTest('No machines set with --machines.') + msg = [] + for machine in self.td['machines']: + # In contrast to test_machine_signatures() below, errors are fatal here. + try: + get_signatures(self.td['builddir'], failsafe=False, machine=machine) + except RuntimeError as ex: + msg.append(str(ex)) + if msg: + msg.insert(0, 'The following machines broke a world build:') + self.fail('\n'.join(msg)) + + def test_machine_signatures(self): + ''' + Selecting a machine may only affect the signature of tasks that are specific + to that machine. In other words, when MACHINE=A and MACHINE=B share a recipe + foo and the output of foo, then both machine configurations must build foo + in exactly the same way. Otherwise it is not possible to use both machines + in the same distribution. + + This criteria can only be tested by testing different machines in combination, + i.e. one main layer, potentially several additional BSP layers and an explicit + choice of machines: + yocto-compat-layer --additional-layers .../meta-intel --machines intel-corei7-64 imx6slevk -- .../meta-freescale + ''' + + if not self.td['machines']: + self.skipTest('No machines set with --machines.') + + # Collect signatures for all machines that we are testing + # and merge that into a hash: + # tune -> task -> signature -> list of machines with that combination + # + # It is an error if any tune/task pair has more than one signature, + # because that implies that the machines that caused those different + # signatures do not agree on how to execute the task. + tunes = {} + # Preserve ordering of machines as chosen by the user. + for machine in self.td['machines']: + curr_sigs, tune2tasks = get_signatures(self.td['builddir'], failsafe=True, machine=machine) + # Invert the tune -> [tasks] mapping. + tasks2tune = {} + for tune, tasks in tune2tasks.items(): + for task in tasks: + tasks2tune[task] = tune + for task, sighash in curr_sigs.items(): + tunes.setdefault(tasks2tune[task], {}).setdefault(task, {}).setdefault(sighash, []).append(machine) + + msg = [] + pruned = 0 + last_line_key = None + # do_fetch, do_unpack, ..., do_build + taskname_list = [] + if tunes: + # The output below is most useful when we start with tasks that are at + # the bottom of the dependency chain, i.e. those that run first. If + # those tasks differ, the rest also does. + # + # To get an ordering of tasks, we do a topological sort of the entire + # depgraph for the base configuration, then on-the-fly flatten that list by stripping + # out the recipe names and removing duplicates. The base configuration + # is not necessarily representative, but should be close enough. Tasks + # that were not encountered get a default priority. + depgraph = get_depgraph() + depends = depgraph['tdepends'] + WHITE = 1 + GRAY = 2 + BLACK = 3 + color = {} + found = set() + def visit(task): + color[task] = GRAY + for dep in depends.get(task, ()): + if color.setdefault(dep, WHITE) == WHITE: + visit(dep) + color[task] = BLACK + pn, taskname = task.rsplit('.', 1) + if taskname not in found: + taskname_list.append(taskname) + found.add(taskname) + for task in depends.keys(): + if color.setdefault(task, WHITE) == WHITE: + visit(task) + + taskname_order = dict([(task, index) for index, task in enumerate(taskname_list) ]) + def task_key(task): + pn, taskname = task.rsplit(':', 1) + return (pn, taskname_order.get(taskname, len(taskname_list)), taskname) + + for tune in sorted(tunes.keys()): + tasks = tunes[tune] + # As for test_signatures it would be nicer to sort tasks + # by dependencies here, but that is harder because we have + # to report on tasks from different machines, which might + # have different dependencies. We resort to pruning the + # output by reporting only one task per recipe if the set + # of machines matches. + # + # "bitbake-diffsigs -t -s" is intelligent enough to print + # diffs recursively, so often it does not matter that much + # if we don't pick the underlying difference + # here. However, sometimes recursion fails + # (https://bugzilla.yoctoproject.org/show_bug.cgi?id=6428). + # + # To mitigate that a bit, we use a hard-coded ordering of + # tasks that represents how they normally run and prefer + # to print the ones that run first. + for task in sorted(tasks.keys(), key=task_key): + signatures = tasks[task] + # do_build can be ignored: it is know to have + # different signatures in some cases, for example in + # the allarch ca-certificates due to RDEPENDS=openssl. + # That particular dependency is whitelisted via + # SIGGEN_EXCLUDE_SAFE_RECIPE_DEPS, but still shows up + # in the sstate signature hash because filtering it + # out would be hard and running do_build multiple + # times doesn't really matter. + if len(signatures.keys()) > 1 and \ + not task.endswith(':do_build'): + # Error! + # + # Sort signatures by machines, because the hex values don't mean anything. + # => all-arch adwaita-icon-theme:do_build: 1234... (beaglebone, qemux86) != abcdf... (qemux86-64) + # + # Skip the line if it is covered already by the predecessor (same pn, same sets of machines). + pn, taskname = task.rsplit(':', 1) + next_line_key = (pn, sorted(signatures.values())) + if next_line_key != last_line_key: + line = ' %s %s: ' % (tune, task) + line += ' != '.join(['%s (%s)' % (signature, ', '.join([m for m in signatures[signature]])) for + signature in sorted(signatures.keys(), key=lambda s: signatures[s])]) + last_line_key = next_line_key + msg.append(line) + # Randomly pick two mismatched signatures and remember how to invoke + # bitbake-diffsigs for them. + iterator = iter(signatures.items()) + a = next(iterator) + b = next(iterator) + diffsig_machines = '(%s) != (%s)' % (', '.join(a[1]), ', '.join(b[1])) + diffsig_params = '-t %s %s -s %s %s' % (pn, taskname, a[0], b[0]) + else: + pruned += 1 + + if msg: + msg.insert(0, 'The machines have conflicting signatures for some shared tasks:') + if pruned > 0: + msg.append('') + msg.append('%d tasks where not listed because some other task of the recipe already differed.' % pruned) + msg.append('It is likely that differences from different recipes also have the same root cause.') + msg.append('') + # Explain how to investigate... + msg.append('To investigate, run bitbake-diffsigs -t recipename taskname -s fromsig tosig.') + cmd = 'bitbake-diffsigs %s' % diffsig_params + msg.append('Example: %s in the last line' % diffsig_machines) + msg.append('Command: %s' % cmd) + # ... and actually do it automatically for that example, but without aborting + # when that fails. + try: + output = check_command('Comparing signatures failed.', cmd).decode('utf-8') + except RuntimeError as ex: + output = str(ex) + msg.extend([' ' + line for line in output.splitlines()]) + self.fail('\n'.join(msg)) diff --git a/import-layers/yocto-poky/scripts/lib/compatlayer/cases/common.py b/import-layers/yocto-poky/scripts/lib/compatlayer/cases/common.py new file mode 100644 index 000000000..55e8ba4c5 --- /dev/null +++ b/import-layers/yocto-poky/scripts/lib/compatlayer/cases/common.py @@ -0,0 +1,53 @@ +# Copyright (C) 2017 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import glob +import os +import unittest +from compatlayer import get_signatures, LayerType, check_command, get_depgraph, compare_signatures +from compatlayer.case import OECompatLayerTestCase + +class CommonCompatLayer(OECompatLayerTestCase): + def test_readme(self): + # The top-level README file may have a suffix (like README.rst or README.txt). + readme_files = glob.glob(os.path.join(self.tc.layer['path'], 'README*')) + self.assertTrue(len(readme_files) > 0, + msg="Layer doesn't contains README file.") + + # There might be more than one file matching the file pattern above + # (for example, README.rst and README-COPYING.rst). The one with the shortest + # name is considered the "main" one. + readme_file = sorted(readme_files)[0] + data = '' + with open(readme_file, 'r') as f: + data = f.read() + self.assertTrue(data, + msg="Layer contains a README file but it is empty.") + + def test_parse(self): + check_command('Layer %s failed to parse.' % self.tc.layer['name'], + 'bitbake -p') + + def test_show_environment(self): + check_command('Layer %s failed to show environment.' % self.tc.layer['name'], + 'bitbake -e') + + def test_world(self): + ''' + "bitbake world" is expected to work. test_signatures does not cover that + because it is more lenient and ignores recipes in a world build that + are not actually buildable, so here we fail when "bitbake -S none world" + fails. + ''' + get_signatures(self.td['builddir'], failsafe=False) + + def test_signatures(self): + if self.tc.layer['type'] == LayerType.SOFTWARE and \ + not self.tc.test_software_layer_signatures: + raise unittest.SkipTest("Not testing for signature changes in a software layer %s." \ + % self.tc.layer['name']) + + curr_sigs, _ = get_signatures(self.td['builddir'], failsafe=True) + msg = compare_signatures(self.td['sigs'], curr_sigs) + if msg is not None: + self.fail('Adding layer %s changed signatures.\n%s' % (self.tc.layer['name'], msg)) diff --git a/import-layers/yocto-poky/scripts/lib/compatlayer/cases/distro.py b/import-layers/yocto-poky/scripts/lib/compatlayer/cases/distro.py new file mode 100644 index 000000000..523acc1e7 --- /dev/null +++ b/import-layers/yocto-poky/scripts/lib/compatlayer/cases/distro.py @@ -0,0 +1,26 @@ +# Copyright (C) 2017 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import unittest + +from compatlayer import LayerType +from compatlayer.case import OECompatLayerTestCase + +class DistroCompatLayer(OECompatLayerTestCase): + @classmethod + def setUpClass(self): + if self.tc.layer['type'] != LayerType.DISTRO: + raise unittest.SkipTest("DistroCompatLayer: Layer %s isn't Distro one." %\ + self.tc.layer['name']) + + def test_distro_defines_distros(self): + self.assertTrue(self.tc.layer['conf']['distros'], + "Layer is BSP but doesn't defines machines.") + + def test_distro_no_set_distros(self): + from oeqa.utils.commands import get_bb_var + + distro = get_bb_var('DISTRO') + self.assertEqual(self.td['bbvars']['DISTRO'], distro, + msg="Layer %s modified distro %s -> %s" % \ + (self.tc.layer['name'], self.td['bbvars']['DISTRO'], distro)) diff --git a/import-layers/yocto-poky/scripts/lib/compatlayer/context.py b/import-layers/yocto-poky/scripts/lib/compatlayer/context.py new file mode 100644 index 000000000..7811d4ac2 --- /dev/null +++ b/import-layers/yocto-poky/scripts/lib/compatlayer/context.py @@ -0,0 +1,15 @@ +# Copyright (C) 2017 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import sys +import glob +import re + +from oeqa.core.context import OETestContext + +class CompatLayerTestContext(OETestContext): + def __init__(self, td=None, logger=None, layer=None, test_software_layer_signatures=True): + super(CompatLayerTestContext, self).__init__(td, logger) + self.layer = layer + self.test_software_layer_signatures = test_software_layer_signatures diff --git a/import-layers/yocto-poky/scripts/lib/devtool/__init__.py b/import-layers/yocto-poky/scripts/lib/devtool/__init__.py index e675133f6..d646b0cf6 100644 --- a/import-layers/yocto-poky/scripts/lib/devtool/__init__.py +++ b/import-layers/yocto-poky/scripts/lib/devtool/__init__.py @@ -23,6 +23,7 @@ import sys import subprocess import logging import re +import codecs logger = logging.getLogger('devtool') @@ -67,10 +68,10 @@ def exec_watch(cmd, **options): cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **options ) + reader = codecs.getreader('utf-8')(process.stdout) buf = '' while True: - out = process.stdout.read(1) - out = out.decode('utf-8') + out = reader.read(1, 1) if out: sys.stdout.write(out) sys.stdout.flush() @@ -86,13 +87,13 @@ def exec_watch(cmd, **options): def exec_fakeroot(d, cmd, **kwargs): """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions""" # Grab the command and check it actually exists - fakerootcmd = d.getVar('FAKEROOTCMD', True) + fakerootcmd = d.getVar('FAKEROOTCMD') if not os.path.exists(fakerootcmd): logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built') return 2 # Set up the appropriate environment newenv = dict(os.environ) - fakerootenv = d.getVar('FAKEROOTENV', True) + fakerootenv = d.getVar('FAKEROOTENV') for varvalue in fakerootenv.split(): if '=' in varvalue: splitval = varvalue.split('=', 1) @@ -113,40 +114,40 @@ def setup_tinfoil(config_only=False, basepath=None, tracking=False): import bb.tinfoil tinfoil = bb.tinfoil.Tinfoil(tracking=tracking) - tinfoil.prepare(config_only) - tinfoil.logger.setLevel(logger.getEffectiveLevel()) + try: + tinfoil.prepare(config_only) + tinfoil.logger.setLevel(logger.getEffectiveLevel()) + except bb.tinfoil.TinfoilUIException: + tinfoil.shutdown() + raise DevtoolError('Failed to start bitbake environment') + except: + tinfoil.shutdown() + raise finally: os.chdir(orig_cwd) return tinfoil -def get_recipe_file(cooker, pn): - """Find recipe file corresponding a package name""" - import oe.recipeutils - recipefile = oe.recipeutils.pn_to_recipe(cooker, pn) - if not recipefile: - skipreasons = oe.recipeutils.get_unavailable_reasons(cooker, pn) - if skipreasons: - logger.error('\n'.join(skipreasons)) - else: - logger.error("Unable to find any recipe file matching %s" % pn) - return recipefile - def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True): - """Parse recipe of a package""" - import oe.recipeutils - recipefile = get_recipe_file(tinfoil.cooker, pn) - if not recipefile: - # Error already logged + """Parse the specified recipe""" + try: + recipefile = tinfoil.get_recipe_file(pn) + except bb.providers.NoProvider as e: + logger.error(str(e)) return None if appends: - append_files = tinfoil.cooker.collection.get_file_appends(recipefile) + append_files = tinfoil.get_file_appends(recipefile) if filter_workspace: # Filter out appends from the workspace append_files = [path for path in append_files if not path.startswith(config.workspace_path)] else: append_files = None - return oe.recipeutils.parse_recipe(tinfoil.cooker, recipefile, append_files) + try: + rd = tinfoil.parse_recipe_file(recipefile, appends, append_files) + except Exception as e: + logger.error(str(e)) + return None + return rd def check_workspace_recipe(workspace, pn, checksrc=True, bbclassextend=False): """ @@ -190,7 +191,7 @@ def use_external_build(same_dir, no_same_dir, d): logger.info('Using source tree as build directory since --same-dir specified') elif bb.data.inherits_class('autotools-brokensep', d): logger.info('Using source tree as build directory since recipe inherits autotools-brokensep') - elif d.getVar('B', True) == os.path.abspath(d.getVar('S', True)): + elif d.getVar('B') == os.path.abspath(d.getVar('S')): logger.info('Using source tree as build directory since that would be the default for this recipe') else: b_is_s = False @@ -260,23 +261,28 @@ def get_bbclassextend_targets(recipefile, pn): targets.append('%s-%s' % (pn, variant)) return targets -def ensure_npm(config, basepath, fixed_setup=False): +def ensure_npm(config, basepath, fixed_setup=False, check_exists=True): """ Ensure that npm is available and either build it or show a reasonable error message """ - tinfoil = setup_tinfoil(config_only=True, basepath=basepath) - try: - nativepath = tinfoil.config_data.getVar('STAGING_BINDIR_NATIVE', True) - finally: - tinfoil.shutdown() + if check_exists: + tinfoil = setup_tinfoil(config_only=False, basepath=basepath) + try: + rd = tinfoil.parse_recipe('nodejs-native') + nativepath = rd.getVar('STAGING_BINDIR_NATIVE') + finally: + tinfoil.shutdown() + npmpath = os.path.join(nativepath, 'npm') + build_npm = not os.path.exists(npmpath) + else: + build_npm = True - npmpath = os.path.join(nativepath, 'npm') - if not os.path.exists(npmpath): + if build_npm: logger.info('Building nodejs-native') try: exec_build_env_command(config.init_path, basepath, - 'bitbake -q nodejs-native', watch=True) + 'bitbake -q nodejs-native -c addto_recipe_sysroot', watch=True) except bb.process.ExecutionError as e: if "Nothing PROVIDES 'nodejs-native'" in e.stdout: if fixed_setup: @@ -286,5 +292,3 @@ def ensure_npm(config, basepath, fixed_setup=False): raise DevtoolError(msg) else: raise - if not os.path.exists(npmpath): - raise DevtoolError('Built nodejs-native but npm binary still could not be found at %s' % npmpath) diff --git a/import-layers/yocto-poky/scripts/lib/devtool/build.py b/import-layers/yocto-poky/scripts/lib/devtool/build.py index 6be549dd5..252379e9b 100644 --- a/import-layers/yocto-poky/scripts/lib/devtool/build.py +++ b/import-layers/yocto-poky/scripts/lib/devtool/build.py @@ -80,7 +80,7 @@ def register_commands(subparsers, context): """Register devtool subcommands from this plugin""" parser_build = subparsers.add_parser('build', help='Build a recipe', description='Builds the specified recipe using bitbake (up to and including %s)' % ', '.join(_get_build_tasks(context.config)), - group='working') + group='working', order=50) parser_build.add_argument('recipename', help='Recipe to build') parser_build.add_argument('-s', '--disable-parallel-make', action="store_true", help='Disable make parallelism') parser_build.set_defaults(func=build) diff --git a/import-layers/yocto-poky/scripts/lib/devtool/build_image.py b/import-layers/yocto-poky/scripts/lib/devtool/build_image.py index ae75511dc..e5810389b 100644 --- a/import-layers/yocto-poky/scripts/lib/devtool/build_image.py +++ b/import-layers/yocto-poky/scripts/lib/devtool/build_image.py @@ -34,8 +34,8 @@ def _get_packages(tinfoil, workspace, config): result = [] for recipe in workspace: data = parse_recipe(config, tinfoil, recipe, True) - if 'class-target' in data.getVar('OVERRIDES', True).split(':'): - if recipe in data.getVar('PACKAGES', True).split(): + if 'class-target' in data.getVar('OVERRIDES').split(':'): + if recipe in data.getVar('PACKAGES').split(): result.append(recipe) else: logger.warning("Skipping recipe %s as it doesn't produce a " @@ -95,7 +95,7 @@ def build_image_task(config, basepath, workspace, image, add_packages=None, task raise TargetNotImageError() # Get the actual filename used and strip the .bb and full path - target_basename = rd.getVar('FILE', True) + target_basename = rd.getVar('FILE') target_basename = os.path.splitext(os.path.basename(target_basename))[0] config.set('SDK', 'target_basename', target_basename) config.write() @@ -132,9 +132,9 @@ def build_image_task(config, basepath, workspace, image, add_packages=None, task afile.write('%s\n' % line) if task in ['populate_sdk', 'populate_sdk_ext']: - outputdir = rd.getVar('SDK_DEPLOY', True) + outputdir = rd.getVar('SDK_DEPLOY') else: - outputdir = rd.getVar('DEPLOY_DIR_IMAGE', True) + outputdir = rd.getVar('DEPLOY_DIR_IMAGE') tmp_tinfoil = tinfoil tinfoil = None diff --git a/import-layers/yocto-poky/scripts/lib/devtool/deploy.py b/import-layers/yocto-poky/scripts/lib/devtool/deploy.py index c4c7bf6c7..b3730ae83 100644 --- a/import-layers/yocto-poky/scripts/lib/devtool/deploy.py +++ b/import-layers/yocto-poky/scripts/lib/devtool/deploy.py @@ -156,11 +156,11 @@ def deploy(args, config, basepath, workspace): tinfoil = setup_tinfoil(basepath=basepath) try: try: - rd = oe.recipeutils.parse_recipe_simple(tinfoil.cooker, args.recipename, tinfoil.config_data) + rd = tinfoil.parse_recipe(args.recipename) except Exception as e: raise DevtoolError('Exception parsing recipe %s: %s' % (args.recipename, e)) - recipe_outdir = rd.getVar('D', True) + recipe_outdir = rd.getVar('D') if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): raise DevtoolError('No files to deploy - have you built the %s ' 'recipe? If so, the install step has not installed ' @@ -192,6 +192,14 @@ def deploy(args, config, basepath, workspace): if not args.show_status: extraoptions += ' -q' + scp_port = '' + ssh_port = '' + if not args.port: + raise DevtoolError("If you specify -P/--port then you must provide the port to be used to connect to the target") + else: + scp_port = "-P %s" % args.port + ssh_port = "-p %s" % args.port + # In order to delete previously deployed files and have the manifest file on # the target, we write out a shell script and then copy it to the target # so we can then run it (piping tar output to it). @@ -213,7 +221,7 @@ def deploy(args, config, basepath, workspace): for fpath, fsize in filelist: f.write('%s %d\n' % (fpath, fsize)) # Copy them to the target - ret = subprocess.call("scp %s %s/* %s:%s" % (extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True) + ret = subprocess.call("scp %s %s %s/* %s:%s" % (scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True) if ret != 0: raise DevtoolError('Failed to copy script to %s - rerun with -s to ' 'get a complete error message' % args.target) @@ -221,7 +229,7 @@ def deploy(args, config, basepath, workspace): shutil.rmtree(tmpdir) # Now run the script - ret = exec_fakeroot(rd, 'tar cf - . | ssh %s %s \'sh %s %s %s %s\'' % (extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True) + ret = exec_fakeroot(rd, 'tar cf - . | ssh %s %s %s \'sh %s %s %s %s\'' % (ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True) if ret != 0: raise DevtoolError('Deploy failed - rerun with -s to get a complete ' 'error message') @@ -251,6 +259,14 @@ def undeploy(args, config, basepath, workspace): if not args.show_status: extraoptions += ' -q' + scp_port = '' + ssh_port = '' + if not args.port: + raise DevtoolError("If you specify -P/--port then you must provide the port to be used to connect to the target") + else: + scp_port = "-P %s" % args.port + ssh_port = "-p %s" % args.port + args.target = args.target.split(':')[0] tmpdir = tempfile.mkdtemp(prefix='devtool') @@ -261,7 +277,7 @@ def undeploy(args, config, basepath, workspace): with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f: f.write(shellscript) # Copy it to the target - ret = subprocess.call("scp %s %s/* %s:%s" % (extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True) + ret = subprocess.call("scp %s %s %s/* %s:%s" % (scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True) if ret != 0: raise DevtoolError('Failed to copy script to %s - rerun with -s to ' 'get a complete error message' % args.target) @@ -269,7 +285,7 @@ def undeploy(args, config, basepath, workspace): shutil.rmtree(tmpdir) # Now run the script - ret = subprocess.call('ssh %s %s \'sh %s %s\'' % (extraoptions, args.target, tmpscript, args.recipename), shell=True) + ret = subprocess.call('ssh %s %s %s \'sh %s %s\'' % (ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True) if ret != 0: raise DevtoolError('Undeploy failed - rerun with -s to get a complete ' 'error message') @@ -292,6 +308,7 @@ def register_commands(subparsers, context): parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true') parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true') parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true') + parser_deploy.add_argument('-P', '--port', default='22', help='Port to use for connection to the target') parser_deploy.set_defaults(func=deploy) parser_undeploy = subparsers.add_parser('undeploy-target', @@ -304,4 +321,5 @@ def register_commands(subparsers, context): parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true') parser_undeploy.add_argument('-a', '--all', help='Undeploy all recipes deployed on the target', action='store_true') parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true') + parser_undeploy.add_argument('-P', '--port', default='22', help='Port to use for connection to the target') parser_undeploy.set_defaults(func=undeploy) diff --git a/import-layers/yocto-poky/scripts/lib/devtool/package.py b/import-layers/yocto-poky/scripts/lib/devtool/package.py index afb5809a3..af9e8f15f 100644 --- a/import-layers/yocto-poky/scripts/lib/devtool/package.py +++ b/import-layers/yocto-poky/scripts/lib/devtool/package.py @@ -28,15 +28,13 @@ def package(args, config, basepath, workspace): """Entry point for the devtool 'package' subcommand""" check_workspace_recipe(workspace, args.recipename) - tinfoil = setup_tinfoil(basepath=basepath) + tinfoil = setup_tinfoil(basepath=basepath, config_only=True) try: - tinfoil.prepare(config_only=True) - image_pkgtype = config.get('Package', 'image_pkgtype', '') if not image_pkgtype: - image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE', True) + image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE') - deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper(), True) + deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper()) finally: tinfoil.shutdown() diff --git a/import-layers/yocto-poky/scripts/lib/devtool/runqemu.py b/import-layers/yocto-poky/scripts/lib/devtool/runqemu.py index ae25cee08..e26cf28c2 100644 --- a/import-layers/yocto-poky/scripts/lib/devtool/runqemu.py +++ b/import-layers/yocto-poky/scripts/lib/devtool/runqemu.py @@ -31,8 +31,10 @@ def runqemu(args, config, basepath, workspace): tinfoil = setup_tinfoil(config_only=True, basepath=basepath) try: - machine = tinfoil.config_data.getVar('MACHINE', True) - bindir_native = tinfoil.config_data.getVar('STAGING_BINDIR_NATIVE', True) + machine = tinfoil.config_data.getVar('MACHINE') + bindir_native = os.path.join(tinfoil.config_data.getVar('STAGING_DIR'), + tinfoil.config_data.getVar('BUILD_ARCH'), + tinfoil.config_data.getVar('bindir_native').lstrip(os.path.sep)) finally: tinfoil.shutdown() diff --git a/import-layers/yocto-poky/scripts/lib/devtool/sdk.py b/import-layers/yocto-poky/scripts/lib/devtool/sdk.py index 922277b79..e8bf0ad98 100644 --- a/import-layers/yocto-poky/scripts/lib/devtool/sdk.py +++ b/import-layers/yocto-poky/scripts/lib/devtool/sdk.py @@ -132,9 +132,9 @@ def sdk_update(args, config, basepath, workspace): # Grab variable values tinfoil = setup_tinfoil(config_only=True, basepath=basepath) try: - stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR', True) - sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS', True) - site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION', True) + stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR') + sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS') + site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION') finally: tinfoil.shutdown() @@ -273,7 +273,7 @@ def sdk_install(args, config, basepath, workspace): rd = parse_recipe(config, tinfoil, recipe, True) if not rd: return 1 - stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP', True), tasks[0]) + stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP'), tasks[0]) if checkstamp(recipe): logger.info('%s is already installed' % recipe) else: @@ -306,6 +306,12 @@ def sdk_install(args, config, basepath, workspace): if failed: return 2 + try: + exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots', watch=True) + except bb.process.ExecutionError as e: + raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e))) + + def register_commands(subparsers, context): """Register devtool subcommands from the sdk plugin""" if context.fixed_setup: diff --git a/import-layers/yocto-poky/scripts/lib/devtool/search.py b/import-layers/yocto-poky/scripts/lib/devtool/search.py index b44bed7f6..054985b85 100644 --- a/import-layers/yocto-poky/scripts/lib/devtool/search.py +++ b/import-layers/yocto-poky/scripts/lib/devtool/search.py @@ -31,7 +31,7 @@ def search(args, config, basepath, workspace): tinfoil = setup_tinfoil(config_only=False, basepath=basepath) try: - pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True) + pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR') defsummary = tinfoil.config_data.getVar('SUMMARY', False) or '' keyword_rc = re.compile(args.keyword) @@ -70,7 +70,7 @@ def search(args, config, basepath, workspace): if match: rd = parse_recipe(config, tinfoil, fn, True) - summary = rd.getVar('SUMMARY', True) + summary = rd.getVar('SUMMARY') if summary == rd.expand(defsummary): summary = '' print("%s %s" % (fn.ljust(20), summary)) diff --git a/import-layers/yocto-poky/scripts/lib/devtool/standard.py b/import-layers/yocto-poky/scripts/lib/devtool/standard.py index 4eff6f878..5ff1e230f 100644 --- a/import-layers/yocto-poky/scripts/lib/devtool/standard.py +++ b/import-layers/yocto-poky/scripts/lib/devtool/standard.py @@ -150,25 +150,32 @@ def add(args, config, basepath, workspace): extracmdopts += ' --src-subdir "%s"' % args.src_subdir if args.autorev: extracmdopts += ' -a' + if args.fetch_dev: + extracmdopts += ' --fetch-dev' tempdir = tempfile.mkdtemp(prefix='devtool') try: + builtnpm = False while True: try: - stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create -o %s "%s" %s' % (color, tempdir, source, extracmdopts)) + stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create --devtool -o %s \'%s\' %s' % (color, tempdir, source, extracmdopts), watch=True) except bb.process.ExecutionError as e: if e.exitcode == 14: + if builtnpm: + raise DevtoolError('Re-running recipetool still failed to find npm') # FIXME this is a horrible hack that is unfortunately # necessary due to the fact that we can't run bitbake from # inside recipetool since recipetool keeps tinfoil active # with references to it throughout the code, so we have # to exit out and come back here to do it. - ensure_npm(config, basepath, args.fixed_setup) + ensure_npm(config, basepath, args.fixed_setup, check_exists=False) + logger.info('Re-running recipe creation process after building nodejs') + builtnpm = True continue elif e.exitcode == 15: raise DevtoolError('Could not auto-determine recipe name, please specify it on the command line') else: - raise DevtoolError('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) + raise DevtoolError('Command \'%s\' failed' % e.command) break recipes = glob.glob(os.path.join(tempdir, '*.bb')) @@ -223,8 +230,17 @@ def add(args, config, basepath, workspace): tinfoil = setup_tinfoil(config_only=True, basepath=basepath) try: - rd = oe.recipeutils.parse_recipe(tinfoil.cooker, recipefile, None) + try: + rd = tinfoil.parse_recipe_file(recipefile, False) + except Exception as e: + logger.error(str(e)) + rd = None if not rd: + # Parsing failed. We just created this recipe and we shouldn't + # leave it in the workdir or it'll prevent bitbake from starting + movefn = '%s.parsefailed' % recipefile + logger.error('Parsing newly created recipe failed, moving recipe to %s for reference. If this looks to be caused by the recipe itself, please report this error.' % movefn) + shutil.move(recipefile, movefn) return 1 if args.fetchuri and not args.no_git: @@ -302,7 +318,7 @@ def _check_compatible_recipe(pn, d): raise DevtoolError("The %s recipe is a meta-recipe, and therefore is " "not supported by this tool" % pn, 4) - if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC', True): + if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'): # Not an incompatibility error per se, so we don't pass the error code raise DevtoolError("externalsrc is currently enabled for the %s " "recipe. This prevents the normal do_patch task " @@ -331,10 +347,11 @@ def _git_ls_tree(repodir, treeish='HEAD', recursive=False): cmd.append('-r') out, _ = bb.process.run(cmd, cwd=repodir) ret = {} - for line in out.split('\0'): - if line: - split = line.split(None, 4) - ret[split[3]] = split[0:3] + if out: + for line in out.split('\0'): + if line: + split = line.split(None, 4) + ret[split[3]] = split[0:3] return ret def _git_exclude_path(srctree, path): @@ -376,7 +393,7 @@ def extract(args, config, basepath, workspace): return 1 srctree = os.path.abspath(args.srctree) - initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, rd) + initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, rd, tinfoil) logger.info('Source tree extracted to %s' % srctree) if initial_rev: @@ -400,7 +417,7 @@ def sync(args, config, basepath, workspace): return 1 srctree = os.path.abspath(args.srctree) - initial_rev = _extract_source(srctree, args.keep_temp, args.branch, True, rd) + initial_rev = _extract_source(srctree, args.keep_temp, args.branch, True, rd, tinfoil) logger.info('Source tree %s synchronized' % srctree) if initial_rev: @@ -410,70 +427,6 @@ def sync(args, config, basepath, workspace): finally: tinfoil.shutdown() -class BbTaskExecutor(object): - """Class for executing bitbake tasks for a recipe - - FIXME: This is very awkward. Unfortunately it's not currently easy to - properly execute tasks outside of bitbake itself, until then this has to - suffice if we are to handle e.g. linux-yocto's extra tasks - """ - - def __init__(self, rdata): - self.rdata = rdata - self.executed = [] - - def exec_func(self, func, report): - """Run bitbake task function""" - if not func in self.executed: - deps = self.rdata.getVarFlag(func, 'deps', False) - if deps: - for taskdepfunc in deps: - self.exec_func(taskdepfunc, True) - if report: - logger.info('Executing %s...' % func) - fn = self.rdata.getVar('FILE', True) - localdata = bb.build._task_data(fn, func, self.rdata) - try: - bb.build.exec_func(func, localdata) - except bb.build.FuncFailed as e: - raise DevtoolError(str(e)) - self.executed.append(func) - - -class PatchTaskExecutor(BbTaskExecutor): - def __init__(self, rdata): - import oe.patch - self.check_git = False - self.useroptions = [] - oe.patch.GitApplyTree.gitCommandUserOptions(self.useroptions, d=rdata) - super(PatchTaskExecutor, self).__init__(rdata) - - def exec_func(self, func, report): - from oe.patch import GitApplyTree - srcsubdir = self.rdata.getVar('S', True) - haspatches = False - if func == 'do_patch': - patchdir = os.path.join(srcsubdir, 'patches') - if os.path.exists(patchdir): - if os.listdir(patchdir): - haspatches = True - else: - os.rmdir(patchdir) - - super(PatchTaskExecutor, self).exec_func(func, report) - if self.check_git and os.path.exists(srcsubdir): - if func == 'do_patch': - if os.path.exists(patchdir): - shutil.rmtree(patchdir) - if haspatches: - stdout, _ = bb.process.run('git status --porcelain patches', cwd=srcsubdir) - if stdout: - bb.process.run('git checkout patches', cwd=srcsubdir) - - stdout, _ = bb.process.run('git status --porcelain', cwd=srcsubdir) - if stdout: - bb.process.run('git add .; git %s commit -a -m "Committing changes from %s\n\n%s"' % (' '.join(self.useroptions), func, GitApplyTree.ignore_commit_prefix + ' - from %s' % func), cwd=srcsubdir) - def _prep_extract_operation(config, basepath, recipename, tinfoil=None): """HACK: Ugly workaround for making sure that requirements are met when @@ -497,22 +450,11 @@ def _prep_extract_operation(config, basepath, recipename, tinfoil=None): return tinfoil -def _extract_source(srctree, keep_temp, devbranch, sync, d): +def _extract_source(srctree, keep_temp, devbranch, sync, d, tinfoil): """Extract sources of a recipe""" - import bb.event import oe.recipeutils - def eventfilter(name, handler, event, d): - """Bitbake event filter for devtool extract operation""" - if name == 'base_eventhandler': - return True - else: - return False - - if hasattr(bb.event, 'set_eventfilter'): - bb.event.set_eventfilter(eventfilter) - - pn = d.getVar('PN', True) + pn = d.getVar('PN') _check_compatible_recipe(pn, d) @@ -537,45 +479,92 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d): bb.utils.mkdirhier(srctree) os.rmdir(srctree) - # We don't want notes to be printed, they are too verbose - origlevel = bb.logger.getEffectiveLevel() - if logger.getEffectiveLevel() > logging.DEBUG: - bb.logger.setLevel(logging.WARNING) - initial_rev = None - tempdir = tempfile.mkdtemp(prefix='devtool') + # We need to redirect WORKDIR, STAMPS_DIR etc. under a temporary + # directory so that: + # (a) we pick up all files that get unpacked to the WORKDIR, and + # (b) we don't disturb the existing build + # However, with recipe-specific sysroots the sysroots for the recipe + # will be prepared under WORKDIR, and if we used the system temporary + # directory (i.e. usually /tmp) as used by mkdtemp by default, then + # our attempts to hardlink files into the recipe-specific sysroots + # will fail on systems where /tmp is a different filesystem, and it + # would have to fall back to copying the files which is a waste of + # time. Put the temp directory under the WORKDIR to prevent that from + # being a problem. + tempbasedir = d.getVar('WORKDIR') + bb.utils.mkdirhier(tempbasedir) + tempdir = tempfile.mkdtemp(prefix='devtooltmp-', dir=tempbasedir) try: + tinfoil.logger.setLevel(logging.WARNING) + crd = d.createCopy() # Make a subdir so we guard against WORKDIR==S workdir = os.path.join(tempdir, 'workdir') crd.setVar('WORKDIR', workdir) - crd.setVar('T', os.path.join(tempdir, 'temp')) - if not crd.getVar('S', True).startswith(workdir): + if not crd.getVar('S').startswith(workdir): # Usually a shared workdir recipe (kernel, gcc) # Try to set a reasonable default if bb.data.inherits_class('kernel', d): crd.setVar('S', '${WORKDIR}/source') else: - crd.setVar('S', '${WORKDIR}/%s' % os.path.basename(d.getVar('S', True))) + crd.setVar('S', '${WORKDIR}/%s' % os.path.basename(d.getVar('S'))) if bb.data.inherits_class('kernel', d): # We don't want to move the source to STAGING_KERNEL_DIR here crd.setVar('STAGING_KERNEL_DIR', '${S}') - task_executor = PatchTaskExecutor(crd) + is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d) + if not is_kernel_yocto: + crd.setVar('PATCHTOOL', 'git') + crd.setVar('PATCH_COMMIT_FUNCTIONS', '1') + + # Apply our changes to the datastore to the server's datastore + for key in crd.localkeys(): + tinfoil.config_data.setVar('%s_pn-%s' % (key, pn), crd.getVar(key, False)) + + tinfoil.config_data.setVar('STAMPS_DIR', os.path.join(tempdir, 'stamps')) + tinfoil.config_data.setVar('T', os.path.join(tempdir, 'temp')) + tinfoil.config_data.setVar('BUILDCFG_FUNCS', '') + tinfoil.config_data.setVar('BUILDCFG_HEADER', '') + tinfoil.config_data.setVar('BB_HASH_IGNORE_MISMATCH', '1') + + tinfoil.set_event_mask(['bb.event.BuildStarted', + 'bb.event.BuildCompleted', + 'logging.LogRecord', + 'bb.command.CommandCompleted', + 'bb.command.CommandFailed', + 'bb.build.TaskStarted', + 'bb.build.TaskSucceeded', + 'bb.build.TaskFailed', + 'bb.build.TaskFailedSilent']) + + def runtask(target, task): + if tinfoil.build_file(target, task): + while True: + event = tinfoil.wait_event(0.25) + if event: + if isinstance(event, bb.command.CommandCompleted): + break + elif isinstance(event, bb.command.CommandFailed): + raise DevtoolError('Task do_%s failed: %s' % (task, event.error)) + elif isinstance(event, bb.build.TaskFailed): + raise DevtoolError('Task do_%s failed' % task) + elif isinstance(event, bb.build.TaskStarted): + logger.info('Executing %s...' % event._task) + elif isinstance(event, logging.LogRecord): + if event.levelno <= logging.INFO: + continue + logger.handle(event) - crd.setVar('EXTERNALSRC_forcevariable', '') + # we need virtual:native:/path/to/recipe if it's a BBCLASSEXTEND + fn = tinfoil.get_recipe_file(pn) + runtask(fn, 'unpack') - logger.info('Fetching %s...' % pn) - task_executor.exec_func('do_fetch', False) - logger.info('Unpacking...') - task_executor.exec_func('do_unpack', False) if bb.data.inherits_class('kernel-yocto', d): # Extra step for kernel to populate the source directory - logger.info('Doing kernel checkout...') - task_executor.exec_func('do_kernel_checkout', False) - srcsubdir = crd.getVar('S', True) + runtask(fn, 'kernel_checkout') - task_executor.check_git = True + srcsubdir = crd.getVar('S') # Move local source files into separate subdir recipe_patches = [os.path.basename(patch) for patch in @@ -605,7 +594,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d): os.path.basename(fname) not in recipe_patches] # Force separate S so that patch files can be left out from srctree srcsubdir = tempfile.mkdtemp(dir=workdir) - crd.setVar('S', srcsubdir) + tinfoil.config_data.setVar('S_task-patch', srcsubdir) # Move source files to S for path in src_files: _move_file(os.path.join(workdir, path), @@ -623,15 +612,13 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d): "doesn't use any source or the correct source " "directory could not be determined" % pn) - setup_git_repo(srcsubdir, crd.getVar('PV', True), devbranch, d=d) + setup_git_repo(srcsubdir, crd.getVar('PV'), devbranch, d=d) (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srcsubdir) initial_rev = stdout.rstrip() - crd.setVar('PATCHTOOL', 'git') - logger.info('Patching...') - task_executor.exec_func('do_patch', False) + runtask(fn, 'patch') bb.process.run('git tag -f devtool-patched', cwd=srcsubdir) @@ -639,8 +626,8 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d): if bb.data.inherits_class('kernel-yocto', d): # Store generate and store kernel config logger.info('Generating kernel config') - task_executor.exec_func('do_configure', False) - kconfig = os.path.join(crd.getVar('B', True), '.config') + runtask(fn, 'configure') + kconfig = os.path.join(crd.getVar('B'), '.config') tempdir_localdir = os.path.join(tempdir, 'oe-local-files') @@ -672,13 +659,34 @@ def _extract_source(srctree, keep_temp, devbranch, sync, d): shutil.move(srcsubdir, srctree) + if os.path.abspath(d.getVar('S')) == os.path.abspath(d.getVar('WORKDIR')): + # If recipe extracts to ${WORKDIR}, symlink the files into the srctree + # (otherwise the recipe won't build as expected) + local_files_dir = os.path.join(srctree, 'oe-local-files') + addfiles = [] + for root, _, files in os.walk(local_files_dir): + relpth = os.path.relpath(root, local_files_dir) + if relpth != '.': + bb.utils.mkdirhier(os.path.join(srctree, relpth)) + for fn in files: + if fn == '.gitignore': + continue + destpth = os.path.join(srctree, relpth, fn) + if os.path.exists(destpth): + os.unlink(destpth) + os.symlink('oe-local-files/%s' % fn, destpth) + addfiles.append(os.path.join(relpth, fn)) + if addfiles: + bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree) + useroptions = [] + oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=d) + bb.process.run('git %s commit -a -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree) + if kconfig: logger.info('Copying kernel config to srctree') shutil.copy2(kconfig, srctree) finally: - bb.logger.setLevel(origlevel) - if keep_temp: logger.info('Preserving temporary directory %s' % tempdir) else: @@ -748,7 +756,7 @@ def modify(args, config, basepath, workspace): if not rd: return 1 - pn = rd.getVar('PN', True) + pn = rd.getVar('PN') if pn != args.recipename: logger.info('Mapping %s to %s' % (args.recipename, pn)) if pn in workspace: @@ -769,8 +777,10 @@ def modify(args, config, basepath, workspace): if not tinfoil: # Error already shown return 1 + # We need to re-parse because tinfoil may have been re-initialised + rd = parse_recipe(config, tinfoil, args.recipename, True) - recipefile = rd.getVar('FILE', True) + recipefile = rd.getVar('FILE') appendfile = recipe_to_append(recipefile, config, args.wildcard) if os.path.exists(appendfile): raise DevtoolError("Another variant of recipe %s is already in your " @@ -783,7 +793,7 @@ def modify(args, config, basepath, workspace): initial_rev = None commits = [] if not args.no_extract: - initial_rev = _extract_source(srctree, False, args.branch, False, rd) + initial_rev = _extract_source(srctree, args.keep_temp, args.branch, False, rd, tinfoil) if not initial_rev: return 1 logger.info('Source tree extracted to %s' % srctree) @@ -807,8 +817,8 @@ def modify(args, config, basepath, workspace): initial_rev = stdout.rstrip() # Check that recipe isn't using a shared workdir - s = os.path.abspath(rd.getVar('S', True)) - workdir = os.path.abspath(rd.getVar('WORKDIR', True)) + s = os.path.abspath(rd.getVar('S')) + workdir = os.path.abspath(rd.getVar('WORKDIR')) if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: # Handle if S is set to a subdirectory of the source srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] @@ -851,6 +861,199 @@ def modify(args, config, basepath, workspace): return 0 + +def rename(args, config, basepath, workspace): + """Entry point for the devtool 'rename' subcommand""" + import bb + import oe.recipeutils + + check_workspace_recipe(workspace, args.recipename) + + if not (args.newname or args.version): + raise DevtoolError('You must specify a new name, a version with -V/--version, or both') + + recipefile = workspace[args.recipename]['recipefile'] + if not recipefile: + raise DevtoolError('devtool rename can only be used where the recipe file itself is in the workspace (e.g. after devtool add)') + + if args.newname and args.newname != args.recipename: + reason = oe.recipeutils.validate_pn(args.newname) + if reason: + raise DevtoolError(reason) + newname = args.newname + else: + newname = args.recipename + + append = workspace[args.recipename]['bbappend'] + appendfn = os.path.splitext(os.path.basename(append))[0] + splitfn = appendfn.split('_') + if len(splitfn) > 1: + origfnver = appendfn.split('_')[1] + else: + origfnver = '' + + recipefilemd5 = None + tinfoil = setup_tinfoil(basepath=basepath, tracking=True) + try: + rd = parse_recipe(config, tinfoil, args.recipename, True) + if not rd: + return 1 + + bp = rd.getVar('BP') + bpn = rd.getVar('BPN') + if newname != args.recipename: + localdata = rd.createCopy() + localdata.setVar('PN', newname) + newbpn = localdata.getVar('BPN') + else: + newbpn = bpn + s = rd.getVar('S', False) + src_uri = rd.getVar('SRC_URI', False) + pv = rd.getVar('PV') + + # Correct variable values that refer to the upstream source - these + # values must stay the same, so if the name/version are changing then + # we need to fix them up + new_s = s + new_src_uri = src_uri + if newbpn != bpn: + # ${PN} here is technically almost always incorrect, but people do use it + new_s = new_s.replace('${BPN}', bpn) + new_s = new_s.replace('${PN}', bpn) + new_s = new_s.replace('${BP}', '%s-${PV}' % bpn) + new_src_uri = new_src_uri.replace('${BPN}', bpn) + new_src_uri = new_src_uri.replace('${PN}', bpn) + new_src_uri = new_src_uri.replace('${BP}', '%s-${PV}' % bpn) + if args.version and origfnver == pv: + new_s = new_s.replace('${PV}', pv) + new_s = new_s.replace('${BP}', '${BPN}-%s' % pv) + new_src_uri = new_src_uri.replace('${PV}', pv) + new_src_uri = new_src_uri.replace('${BP}', '${BPN}-%s' % pv) + patchfields = {} + if new_s != s: + patchfields['S'] = new_s + if new_src_uri != src_uri: + patchfields['SRC_URI'] = new_src_uri + if patchfields: + recipefilemd5 = bb.utils.md5_file(recipefile) + oe.recipeutils.patch_recipe(rd, recipefile, patchfields) + newrecipefilemd5 = bb.utils.md5_file(recipefile) + finally: + tinfoil.shutdown() + + if args.version: + newver = args.version + else: + newver = origfnver + + if newver: + newappend = '%s_%s.bbappend' % (newname, newver) + newfile = '%s_%s.bb' % (newname, newver) + else: + newappend = '%s.bbappend' % newname + newfile = '%s.bb' % newname + + oldrecipedir = os.path.dirname(recipefile) + newrecipedir = os.path.join(config.workspace_path, 'recipes', newname) + if oldrecipedir != newrecipedir: + bb.utils.mkdirhier(newrecipedir) + + newappend = os.path.join(os.path.dirname(append), newappend) + newfile = os.path.join(newrecipedir, newfile) + + # Rename bbappend + logger.info('Renaming %s to %s' % (append, newappend)) + os.rename(append, newappend) + # Rename recipe file + logger.info('Renaming %s to %s' % (recipefile, newfile)) + os.rename(recipefile, newfile) + + # Rename source tree if it's the default path + appendmd5 = None + if not args.no_srctree: + srctree = workspace[args.recipename]['srctree'] + if os.path.abspath(srctree) == os.path.join(config.workspace_path, 'sources', args.recipename): + newsrctree = os.path.join(config.workspace_path, 'sources', newname) + logger.info('Renaming %s to %s' % (srctree, newsrctree)) + shutil.move(srctree, newsrctree) + # Correct any references (basically EXTERNALSRC*) in the .bbappend + appendmd5 = bb.utils.md5_file(newappend) + appendlines = [] + with open(newappend, 'r') as f: + for line in f: + appendlines.append(line) + with open(newappend, 'w') as f: + for line in appendlines: + if srctree in line: + line = line.replace(srctree, newsrctree) + f.write(line) + newappendmd5 = bb.utils.md5_file(newappend) + + bpndir = None + newbpndir = None + if newbpn != bpn: + bpndir = os.path.join(oldrecipedir, bpn) + if os.path.exists(bpndir): + newbpndir = os.path.join(newrecipedir, newbpn) + logger.info('Renaming %s to %s' % (bpndir, newbpndir)) + shutil.move(bpndir, newbpndir) + + bpdir = None + newbpdir = None + if newver != origfnver or newbpn != bpn: + bpdir = os.path.join(oldrecipedir, bp) + if os.path.exists(bpdir): + newbpdir = os.path.join(newrecipedir, '%s-%s' % (newbpn, newver)) + logger.info('Renaming %s to %s' % (bpdir, newbpdir)) + shutil.move(bpdir, newbpdir) + + if oldrecipedir != newrecipedir: + # Move any stray files and delete the old recipe directory + for entry in os.listdir(oldrecipedir): + oldpath = os.path.join(oldrecipedir, entry) + newpath = os.path.join(newrecipedir, entry) + logger.info('Renaming %s to %s' % (oldpath, newpath)) + shutil.move(oldpath, newpath) + os.rmdir(oldrecipedir) + + # Now take care of entries in .devtool_md5 + md5entries = [] + with open(os.path.join(config.workspace_path, '.devtool_md5'), 'r') as f: + for line in f: + md5entries.append(line) + + if bpndir and newbpndir: + relbpndir = os.path.relpath(bpndir, config.workspace_path) + '/' + else: + relbpndir = None + if bpdir and newbpdir: + relbpdir = os.path.relpath(bpdir, config.workspace_path) + '/' + else: + relbpdir = None + + with open(os.path.join(config.workspace_path, '.devtool_md5'), 'w') as f: + for entry in md5entries: + splitentry = entry.rstrip().split('|') + if len(splitentry) > 2: + if splitentry[0] == args.recipename: + splitentry[0] = newname + if splitentry[1] == os.path.relpath(append, config.workspace_path): + splitentry[1] = os.path.relpath(newappend, config.workspace_path) + if appendmd5 and splitentry[2] == appendmd5: + splitentry[2] = newappendmd5 + elif splitentry[1] == os.path.relpath(recipefile, config.workspace_path): + splitentry[1] = os.path.relpath(newfile, config.workspace_path) + if recipefilemd5 and splitentry[2] == recipefilemd5: + splitentry[2] = newrecipefilemd5 + elif relbpndir and splitentry[1].startswith(relbpndir): + splitentry[1] = os.path.relpath(os.path.join(newbpndir, splitentry[1][len(relbpndir):]), config.workspace_path) + elif relbpdir and splitentry[1].startswith(relbpdir): + splitentry[1] = os.path.relpath(os.path.join(newbpdir, splitentry[1][len(relbpdir):]), config.workspace_path) + entry = '|'.join(splitentry) + '\n' + f.write(entry) + return 0 + + def _get_patchset_revs(srctree, recipe_path, initial_rev=None): """Get initial and update rev of a recipe. These are the start point of the whole patchset and start point for the patches to be re-generated/updated. @@ -909,6 +1112,15 @@ def _remove_file_entries(srcuri, filelist): break return entries, remaining +def _replace_srcuri_entry(srcuri, filename, newentry): + """Replace entry corresponding to specified file with a new entry""" + basename = os.path.basename(filename) + for i in range(len(srcuri)): + if os.path.basename(srcuri[i].split(';')[0]) == basename: + srcuri.pop(i) + srcuri.insert(i, newentry) + break + def _remove_source_files(append, files, destpath): """Unlink existing patch files""" for path in files: @@ -933,7 +1145,7 @@ def _remove_source_files(append, files, destpath): raise -def _export_patches(srctree, rd, start_rev, destdir): +def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): """Export patches from srctree to given location. Returns three-tuple of dicts: 1. updated - patches that already exist in SRCURI @@ -962,18 +1174,44 @@ def _export_patches(srctree, rd, start_rev, destdir): # revision This does assume that people are using unique shortlog # values, but they ought to be anyway... new_basename = seqpatch_re.match(new_patch).group(2) - found = False + match_name = None for old_patch in existing_patches: old_basename = seqpatch_re.match(old_patch).group(2) - if new_basename == old_basename: - updated[new_patch] = existing_patches.pop(old_patch) - found = True - # Rename patch files - if new_patch != old_patch: - os.rename(os.path.join(destdir, new_patch), - os.path.join(destdir, old_patch)) + old_basename_splitext = os.path.splitext(old_basename) + if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: + old_patch_noext = os.path.splitext(old_patch)[0] + match_name = old_patch_noext + break + elif new_basename == old_basename: + match_name = old_patch break - if not found: + if match_name: + # Rename patch files + if new_patch != match_name: + os.rename(os.path.join(destdir, new_patch), + os.path.join(destdir, match_name)) + # Need to pop it off the list now before checking changed_revs + oldpath = existing_patches.pop(old_patch) + if changed_revs is not None: + # Avoid updating patches that have not actually changed + with open(os.path.join(destdir, match_name), 'r') as f: + firstlineitems = f.readline().split() + # Looking for "From " line + if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: + if not firstlineitems[1] in changed_revs: + continue + # Recompress if necessary + if oldpath.endswith(('.gz', '.Z')): + bb.process.run(['gzip', match_name], cwd=destdir) + if oldpath.endswith('.gz'): + match_name += '.gz' + else: + match_name += '.Z' + elif oldpath.endswith('.bz2'): + bb.process.run(['bzip2', match_name], cwd=destdir) + match_name += '.bz2' + updated[match_name] = oldpath + else: added[new_patch] = None return (updated, added, existing_patches) @@ -991,7 +1229,7 @@ def _create_kconfig_diff(srctree, rd, outfile): stdout, stderr = pipe.communicate() if pipe.returncode == 1: logger.info("Updating config fragment %s" % outfile) - with open(outfile, 'w') as fobj: + with open(outfile, 'wb') as fobj: fobj.write(stdout) elif pipe.returncode == 0: logger.info("Would remove config fragment %s" % outfile) @@ -1072,8 +1310,8 @@ def _export_local_files(srctree, rd, destdir): elif fname != '.gitignore': added[fname] = None - workdir = rd.getVar('WORKDIR', True) - s = rd.getVar('S', True) + workdir = rd.getVar('WORKDIR') + s = rd.getVar('S') if not s.endswith(os.sep): s += os.sep @@ -1095,14 +1333,14 @@ def _export_local_files(srctree, rd, destdir): def _determine_files_dir(rd): """Determine the appropriate files directory for a recipe""" - recipedir = rd.getVar('FILE_DIRNAME', True) - for entry in rd.getVar('FILESPATH', True).split(':'): + recipedir = rd.getVar('FILE_DIRNAME') + for entry in rd.getVar('FILESPATH').split(':'): relpth = os.path.relpath(entry, recipedir) if not os.sep in relpth: # One (or zero) levels below only, so we don't put anything in machine-specific directories if os.path.isdir(entry): return entry - return os.path.join(recipedir, rd.getVar('BPN', True)) + return os.path.join(recipedir, rd.getVar('BPN')) def _update_recipe_srcrev(srctree, rd, appendlayerdir, wildcard_version, no_remove): @@ -1110,7 +1348,7 @@ def _update_recipe_srcrev(srctree, rd, appendlayerdir, wildcard_version, no_remo import bb import oe.recipeutils - recipefile = rd.getVar('FILE', True) + recipefile = rd.getVar('FILE') logger.info('Updating SRCREV in recipe %s' % os.path.basename(recipefile)) # Get HEAD revision @@ -1192,7 +1430,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil import bb import oe.recipeutils - recipefile = rd.getVar('FILE', True) + recipefile = rd.getVar('FILE') append = workspace[recipename]['bbappend'] if not os.path.exists(append): raise DevtoolError('unable to find workspace bbappend for recipe %s' % @@ -1203,6 +1441,10 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil raise DevtoolError('Unable to find initial revision - please specify ' 'it with --initial-rev') + dl_dir = rd.getVar('DL_DIR') + if not dl_dir.endswith('/'): + dl_dir += '/' + tempdir = tempfile.mkdtemp(prefix='devtool') try: local_files_dir = tempfile.mkdtemp(dir=tempdir) @@ -1220,7 +1462,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil # Get updated patches from source tree patches_dir = tempfile.mkdtemp(dir=tempdir) upd_p, new_p, del_p = _export_patches(srctree, rd, update_rev, - patches_dir) + patches_dir, changed_revs) updatefiles = False updaterecipe = False destpath = None @@ -1247,6 +1489,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil logger.info('No patches or local source files needed updating') else: # Update existing files + files_dir = _determine_files_dir(rd) for basepath, path in upd_f.items(): logger.info('Updating file %s' % basepath) if os.path.isabs(basepath): @@ -1258,18 +1501,19 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil updatefiles = True for basepath, path in upd_p.items(): patchfn = os.path.join(patches_dir, basepath) - if changed_revs is not None: - # Avoid updating patches that have not actually changed - with open(patchfn, 'r') as f: - firstlineitems = f.readline().split() - if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: - if not firstlineitems[1] in changed_revs: - continue - logger.info('Updating patch %s' % basepath) + if os.path.dirname(path) + '/' == dl_dir: + # This is a a downloaded patch file - we now need to + # replace the entry in SRC_URI with our local version + logger.info('Replacing remote patch %s with updated local version' % basepath) + path = os.path.join(files_dir, basepath) + _replace_srcuri_entry(srcuri, basepath, 'file://%s' % basepath) + updaterecipe = True + else: + logger.info('Updating patch %s' % basepath) + logger.debug('Moving new patch %s to %s' % (patchfn, path)) _move_file(patchfn, path) updatefiles = True # Add any new files - files_dir = _determine_files_dir(rd) for basepath, path in new_f.items(): logger.info('Adding new file %s' % basepath) _move_file(os.path.join(local_files_dir, basepath), @@ -1356,7 +1600,7 @@ def update_recipe(args, config, basepath, workspace): updated = _update_recipe(args.recipename, workspace, rd, args.mode, args.append, args.wildcard_version, args.no_remove, args.initial_rev) if updated: - rf = rd.getVar('FILE', True) + rf = rd.getVar('FILE') if rf.startswith(config.workspace_path): logger.warn('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf) finally: @@ -1460,7 +1704,7 @@ def reset(args, config, basepath, workspace): def _get_layer(layername, d): """Determine the base layer path for the specified layer name/path""" - layerdirs = d.getVar('BBLAYERS', True).split() + layerdirs = d.getVar('BBLAYERS').split() layers = {os.path.basename(p): p for p in layerdirs} # Provide some shortcuts if layername.lower() in ['oe-core', 'openembedded-core']: @@ -1478,6 +1722,7 @@ def finish(args, config, basepath, workspace): check_workspace_recipe(workspace, args.recipename) + no_clean = False tinfoil = setup_tinfoil(basepath=basepath, tracking=True) try: rd = parse_recipe(config, tinfoil, args.recipename, True) @@ -1485,7 +1730,7 @@ def finish(args, config, basepath, workspace): return 1 destlayerdir = _get_layer(args.destination, tinfoil.config_data) - origlayerdir = oe.recipeutils.find_layerdir(rd.getVar('FILE', True)) + origlayerdir = oe.recipeutils.find_layerdir(rd.getVar('FILE')) if not os.path.isdir(destlayerdir): raise DevtoolError('Unable to find layer or directory matching "%s"' % args.destination) @@ -1515,6 +1760,11 @@ def finish(args, config, basepath, workspace): destpath = oe.recipeutils.get_bbfile_path(rd, destlayerdir, origrelpath) if not destpath: raise DevtoolError("Unable to determine destination layer path - check that %s specifies an actual layer and %s/conf/layer.conf specifies BBFILES. You may also need to specify a more complete path." % (args.destination, destlayerdir)) + # Warn if the layer isn't in bblayers.conf (the code to create a bbappend will do this in other cases) + layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()] + if not os.path.abspath(destlayerdir) in layerdirs: + bb.warn('Specified destination layer is not currently enabled in bblayers.conf, so the %s recipe will now be unavailable in your current configuration until you add the layer there' % args.recipename) + elif destlayerdir == origlayerdir: # Same layer, update the original recipe appendlayerdir = None @@ -1539,8 +1789,9 @@ def finish(args, config, basepath, workspace): if origlayerdir == config.workspace_path and destpath: # Recipe file itself is in the workspace - need to move it and any # associated files to the specified layer + no_clean = True logger.info('Moving recipe file to %s' % destpath) - recipedir = os.path.dirname(rd.getVar('FILE', True)) + recipedir = os.path.dirname(rd.getVar('FILE')) for root, _, files in os.walk(recipedir): for fn in files: srcpath = os.path.join(root, fn) @@ -1553,7 +1804,7 @@ def finish(args, config, basepath, workspace): tinfoil.shutdown() # Everything else has succeeded, we can now reset - _reset([args.recipename], no_clean=False, config=config, basepath=basepath, workspace=workspace) + _reset([args.recipename], no_clean=no_clean, config=config, basepath=basepath, workspace=workspace) return 0 @@ -1580,6 +1831,7 @@ def register_commands(subparsers, context): group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true") group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI') + parser_add.add_argument('--fetch-dev', help='For npm, also fetch devDependencies', action="store_true") parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)') parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true") parser_add.add_argument('--autorev', '-a', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true") @@ -1601,6 +1853,7 @@ def register_commands(subparsers, context): group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true") group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") parser_modify.add_argument('--branch', '-b', default="devtool", help='Name for development branch to checkout (when not using -n/--no-extract) (default "%(default)s")') + parser_modify.add_argument('--keep-temp', help='Keep temporary directory (for debugging)', action="store_true") parser_modify.set_defaults(func=modify) parser_extract = subparsers.add_parser('extract', help='Extract the source for an existing recipe', @@ -1622,6 +1875,15 @@ def register_commands(subparsers, context): parser_sync.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)') parser_sync.set_defaults(func=sync) + parser_rename = subparsers.add_parser('rename', help='Rename a recipe file in the workspace', + description='Renames the recipe file for a recipe in the workspace, changing the name or version part or both, ensuring that all references within the workspace are updated at the same time. Only works when the recipe file itself is in the workspace, e.g. after devtool add. Particularly useful when devtool add did not automatically determine the correct name.', + group='working', order=10) + parser_rename.add_argument('recipename', help='Current name of recipe to rename') + parser_rename.add_argument('newname', nargs='?', help='New name for recipe (optional, not needed if you only want to change the version)') + parser_rename.add_argument('--version', '-V', help='Change the version (NOTE: this does not change the version fetched by the recipe, just the version in the recipe file name)') + parser_rename.add_argument('--no-srctree', '-s', action='store_true', help='Do not rename the source tree directory (if the default source tree path has been used) - keeping the old name may be desirable if there are internal/other external references to this path') + parser_rename.set_defaults(func=rename) + parser_update_recipe = subparsers.add_parser('update-recipe', help='Apply changes from external source tree to recipe', description='Applies changes from external source tree to a recipe (updating/adding/removing patches as necessary, or by updating SRCREV). Note that these changes need to have been committed to the git repository in order to be recognised.', group='working', order=-90) diff --git a/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py b/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py index a4239f1cd..05fb9e5ed 100644 --- a/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py +++ b/import-layers/yocto-poky/scripts/lib/devtool/upgrade.py @@ -27,6 +27,10 @@ import argparse import scriptutils import errno import bb + +devtool_path = os.path.dirname(os.path.realpath(__file__)) + '/../../../meta/lib' +sys.path = sys.path + [devtool_path] + import oe.recipeutils from devtool import standard from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build @@ -68,7 +72,7 @@ def _remove_patch_dirs(recipefolder): shutil.rmtree(os.path.join(root,d)) def _recipe_contains(rd, var): - rf = rd.getVar('FILE', True) + rf = rd.getVar('FILE') varfiles = oe.recipeutils.get_var_files(rf, [var], rd) for var, fn in varfiles.items(): if fn and fn.startswith(os.path.dirname(rf) + os.sep): @@ -117,7 +121,7 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d) brf = os.path.basename(os.path.splitext(rc)[0]) # rc basename srctree = os.path.abspath(srctree) - pn = d.getVar('PN',True) + pn = d.getVar('PN') af = os.path.join(appendpath, '%s.bbappend' % brf) with open(af, 'w') as f: f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n') @@ -132,7 +136,7 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d) if rev: f.write('# initial_rev: %s\n' % rev) if copied: - f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE', True))) + f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE'))) f.write('# original_files: %s\n' % ' '.join(copied)) return af @@ -154,7 +158,7 @@ def _upgrade_error(e, rf, srctree): raise DevtoolError(e) def _get_uri(rd): - srcuris = rd.getVar('SRC_URI', True).split() + srcuris = rd.getVar('SRC_URI').split() if not len(srcuris): raise DevtoolError('SRC_URI not found on recipe') # Get first non-local entry in SRC_URI - usually by convention it's @@ -185,7 +189,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tin crd = rd.createCopy() - pv = crd.getVar('PV', True) + pv = crd.getVar('PV') crd.setVar('PV', newpv) tmpsrctree = None @@ -270,15 +274,15 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, branch, keep_temp, tin def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, workspace, tinfoil, rd): """Creates the new recipe under workspace""" - bpn = rd.getVar('BPN', True) + bpn = rd.getVar('BPN') path = os.path.join(workspace, 'recipes', bpn) bb.utils.mkdirhier(path) copied, _ = oe.recipeutils.copy_recipe_files(rd, path) - oldpv = rd.getVar('PV', True) + oldpv = rd.getVar('PV') if not newpv: newpv = oldpv - origpath = rd.getVar('FILE', True) + origpath = rd.getVar('FILE') fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path) logger.debug('Upgraded %s => %s' % (origpath, fullpath)) @@ -320,7 +324,7 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, workspace, tinfoil newvalues['SRC_URI[md5sum]'] = md5 newvalues['SRC_URI[sha256sum]'] = sha256 - rd = oe.recipeutils.parse_recipe(tinfoil.cooker, fullpath, None) + rd = tinfoil.parse_recipe_file(fullpath, False) oe.recipeutils.patch_recipe(rd, fullpath, newvalues) return fullpath, copied @@ -341,7 +345,7 @@ def upgrade(args, config, basepath, workspace): if not rd: return 1 - pn = rd.getVar('PN', True) + pn = rd.getVar('PN') if pn != args.recipename: logger.info('Mapping %s to %s' % (args.recipename, pn)) if pn in workspace: @@ -353,17 +357,17 @@ def upgrade(args, config, basepath, workspace): srctree = standard.get_default_srctree(config, pn) standard._check_compatible_recipe(pn, rd) - old_srcrev = rd.getVar('SRCREV', True) + old_srcrev = rd.getVar('SRCREV') if old_srcrev == 'INVALID': old_srcrev = None if old_srcrev and not args.srcrev: raise DevtoolError("Recipe specifies a SRCREV value; you must specify a new one when upgrading") - if rd.getVar('PV', True) == args.version and old_srcrev == args.srcrev: + if rd.getVar('PV') == args.version and old_srcrev == args.srcrev: raise DevtoolError("Current and upgrade versions are the same version") rf = None try: - rev1 = standard._extract_source(srctree, False, 'devtool-orig', False, rd) + rev1 = standard._extract_source(srctree, False, 'devtool-orig', False, rd, tinfoil) rev2, md5, sha256 = _extract_new_source(args.version, srctree, args.no_patch, args.srcrev, args.branch, args.keep_temp, tinfoil, rd) diff --git a/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py b/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py index b761a80f8..0437e6417 100644 --- a/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py +++ b/import-layers/yocto-poky/scripts/lib/devtool/utilcmds.py @@ -39,7 +39,7 @@ def edit_recipe(args, config, basepath, workspace): rd = parse_recipe(config, tinfoil, args.recipename, True) if not rd: return 1 - recipefile = rd.getVar('FILE', True) + recipefile = rd.getVar('FILE') finally: tinfoil.shutdown() else: @@ -62,20 +62,20 @@ def configure_help(args, config, basepath, workspace): rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) if not rd: return 1 - b = rd.getVar('B', True) - s = rd.getVar('S', True) + b = rd.getVar('B') + s = rd.getVar('S') configurescript = os.path.join(s, 'configure') confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or []) - configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS', True) or '') - extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF', True) or '') - extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE', True) or '') - do_configure = rd.getVar('do_configure', True) or '' + configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') + extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') + extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') + do_configure = rd.getVar('do_configure') or '' do_configure_noexpand = rd.getVar('do_configure', False) or '' packageconfig = rd.getVarFlags('PACKAGECONFIG') or [] autotools = bb.data.inherits_class('autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure) cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure) - cmake_do_configure = rd.getVar('cmake_do_configure', True) - pn = rd.getVar('PN', True) + cmake_do_configure = rd.getVar('cmake_do_configure') + pn = rd.getVar('PN') finally: tinfoil.shutdown() diff --git a/import-layers/yocto-poky/scripts/lib/recipetool/append.py b/import-layers/yocto-poky/scripts/lib/recipetool/append.py index 1e0fc1ee8..69c8bb77a 100644 --- a/import-layers/yocto-poky/scripts/lib/recipetool/append.py +++ b/import-layers/yocto-poky/scripts/lib/recipetool/append.py @@ -48,7 +48,7 @@ def find_target_file(targetpath, d, pkglist=None): """Find the recipe installing the specified target path, optionally limited to a select list of packages""" import json - pkgdata_dir = d.getVar('PKGDATA_DIR', True) + pkgdata_dir = d.getVar('PKGDATA_DIR') # The mix between /etc and ${sysconfdir} here may look odd, but it is just # being consistent with usage elsewhere @@ -97,25 +97,12 @@ def find_target_file(targetpath, d, pkglist=None): recipes[targetpath].append('!%s' % pn) return recipes -def _get_recipe_file(cooker, pn): - import oe.recipeutils - recipefile = oe.recipeutils.pn_to_recipe(cooker, pn) - if not recipefile: - skipreasons = oe.recipeutils.get_unavailable_reasons(cooker, pn) - if skipreasons: - logger.error('\n'.join(skipreasons)) - else: - logger.error("Unable to find any recipe file matching %s" % pn) - return recipefile - def _parse_recipe(pn, tinfoil): - import oe.recipeutils - recipefile = _get_recipe_file(tinfoil.cooker, pn) - if not recipefile: - # Error already logged + try: + rd = tinfoil.parse_recipe(pn) + except bb.providers.NoProvider as e: + logger.error(str(e)) return None - append_files = tinfoil.cooker.collection.get_file_appends(recipefile) - rd = oe.recipeutils.parse_recipe(tinfoil.cooker, recipefile, append_files) return rd def determine_file_source(targetpath, rd): @@ -123,8 +110,8 @@ def determine_file_source(targetpath, rd): import oe.recipeutils # See if it's in do_install for the recipe - workdir = rd.getVar('WORKDIR', True) - src_uri = rd.getVar('SRC_URI', True) + workdir = rd.getVar('WORKDIR') + src_uri = rd.getVar('SRC_URI') srcfile = '' modpatches = [] elements = check_do_install(rd, targetpath) @@ -134,7 +121,7 @@ def determine_file_source(targetpath, rd): logger.debug('source path: %s' % srcpath) if not srcpath.startswith('/'): # Handle non-absolute path - srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs', True).split()[-1], srcpath)) + srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath)) if srcpath.startswith(workdir): # OK, now we have the source file name, look for it in SRC_URI workdirfile = os.path.relpath(srcpath, workdir) @@ -203,22 +190,22 @@ def get_source_path(cmdelements): def get_func_deps(func, d): """Find the function dependencies of a shell function""" - deps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func, True)) - deps |= set((d.getVarFlag(func, "vardeps", True) or "").split()) + deps = bb.codeparser.ShellParser(func, logger).parse_shell(d.getVar(func)) + deps |= set((d.getVarFlag(func, "vardeps") or "").split()) funcdeps = [] for dep in deps: - if d.getVarFlag(dep, 'func', True): + if d.getVarFlag(dep, 'func'): funcdeps.append(dep) return funcdeps def check_do_install(rd, targetpath): """Look at do_install for a command that installs/copies the specified target path""" - instpath = os.path.abspath(os.path.join(rd.getVar('D', True), targetpath.lstrip('/'))) - do_install = rd.getVar('do_install', True) + instpath = os.path.abspath(os.path.join(rd.getVar('D'), targetpath.lstrip('/'))) + do_install = rd.getVar('do_install') # Handle where do_install calls other functions (somewhat crudely, but good enough for this purpose) deps = get_func_deps('do_install', rd) for dep in deps: - do_install = do_install.replace(dep, rd.getVar(dep, True)) + do_install = do_install.replace(dep, rd.getVar(dep)) # Look backwards through do_install as we want to catch where a later line (perhaps # from a bbappend) is writing over the top @@ -335,12 +322,12 @@ def appendfile(args): def appendsrc(args, files, rd, extralines=None): import oe.recipeutils - srcdir = rd.getVar('S', True) - workdir = rd.getVar('WORKDIR', True) + srcdir = rd.getVar('S') + workdir = rd.getVar('WORKDIR') import bb.fetch simplified = {} - src_uri = rd.getVar('SRC_URI', True).split() + src_uri = rd.getVar('SRC_URI').split() for uri in src_uri: if uri.endswith(';'): uri = uri[:-1] @@ -353,7 +340,7 @@ def appendsrc(args, files, rd, extralines=None): for newfile, srcfile in files.items(): src_destdir = os.path.dirname(srcfile) if not args.use_workdir: - if rd.getVar('S', True) == rd.getVar('STAGING_KERNEL_DIR', True): + if rd.getVar('S') == rd.getVar('STAGING_KERNEL_DIR'): srcdir = os.path.join(workdir, 'git') if not bb.data.inherits_class('kernel-yocto', rd): logger.warn('S == STAGING_KERNEL_DIR and non-kernel-yocto, unable to determine path to srcdir, defaulting to ${WORKDIR}/git') diff --git a/import-layers/yocto-poky/scripts/lib/recipetool/create.py b/import-layers/yocto-poky/scripts/lib/recipetool/create.py index d427d3206..4de52fc30 100644 --- a/import-layers/yocto-poky/scripts/lib/recipetool/create.py +++ b/import-layers/yocto-poky/scripts/lib/recipetool/create.py @@ -26,12 +26,24 @@ import logging import scriptutils from urllib.parse import urlparse, urldefrag, urlsplit import hashlib - +import bb.fetch2 logger = logging.getLogger('recipetool') tinfoil = None plugins = None +def log_error_cond(message, debugonly): + if debugonly: + logger.debug(message) + else: + logger.error(message) + +def log_info_cond(message, debugonly): + if debugonly: + logger.debug(message) + else: + logger.info(message) + def plugin_init(pluginlist): # Take a reference to the list so we can use it later global plugins @@ -47,6 +59,9 @@ class RecipeHandler(object): recipecmakefilemap = {} recipebinmap = {} + def __init__(self): + self._devtool = False + @staticmethod def load_libmap(d): '''Load library->recipe mapping''' @@ -56,8 +71,8 @@ class RecipeHandler(object): return # First build up library->package mapping shlib_providers = oe.package.read_shlib_providers(d) - libdir = d.getVar('libdir', True) - base_libdir = d.getVar('base_libdir', True) + libdir = d.getVar('libdir') + base_libdir = d.getVar('base_libdir') libpaths = list(set([base_libdir, libdir])) libname_re = re.compile('^lib(.+)\.so.*$') pkglibmap = {} @@ -73,7 +88,7 @@ class RecipeHandler(object): logger.debug('unable to extract library name from %s' % lib) # Now turn it into a library->recipe mapping - pkgdata_dir = d.getVar('PKGDATA_DIR', True) + pkgdata_dir = d.getVar('PKGDATA_DIR') for libname, pkg in pkglibmap.items(): try: with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: @@ -97,9 +112,9 @@ class RecipeHandler(object): '''Build up development file->recipe mapping''' if RecipeHandler.recipeheadermap: return - pkgdata_dir = d.getVar('PKGDATA_DIR', True) - includedir = d.getVar('includedir', True) - cmakedir = os.path.join(d.getVar('libdir', True), 'cmake') + pkgdata_dir = d.getVar('PKGDATA_DIR') + includedir = d.getVar('includedir') + cmakedir = os.path.join(d.getVar('libdir'), 'cmake') for pkg in glob.glob(os.path.join(pkgdata_dir, 'runtime', '*-dev')): with open(os.path.join(pkgdata_dir, 'runtime', pkg)) as f: pn = None @@ -128,9 +143,9 @@ class RecipeHandler(object): '''Build up native binary->recipe mapping''' if RecipeHandler.recipebinmap: return - sstate_manifests = d.getVar('SSTATE_MANIFESTS', True) - staging_bindir_native = d.getVar('STAGING_BINDIR_NATIVE', True) - build_arch = d.getVar('BUILD_ARCH', True) + sstate_manifests = d.getVar('SSTATE_MANIFESTS') + staging_bindir_native = d.getVar('STAGING_BINDIR_NATIVE') + build_arch = d.getVar('BUILD_ARCH') fileprefix = 'manifest-%s-' % build_arch for fn in glob.glob(os.path.join(sstate_manifests, '%s*-native.populate_sysroot' % fileprefix)): with open(fn, 'r') as f: @@ -222,7 +237,8 @@ class RecipeHandler(object): if deps: values['DEPENDS'] = ' '.join(deps) - def genfunction(self, outlines, funcname, content, python=False, forcespace=False): + @staticmethod + def genfunction(outlines, funcname, content, python=False, forcespace=False): if python: prefix = 'python ' else: @@ -323,7 +339,7 @@ def determine_from_url(srcuri): pn = res.group(1).strip().replace('_', '-') pv = res.group(2).strip().replace('_', '.') - if not pn and not pv: + if not pn and not pv and parseres.scheme not in ['git', 'gitsm', 'svn', 'hg']: srcfile = os.path.basename(parseres.path.rstrip('/')) pn, pv = determine_from_filename(srcfile) @@ -335,7 +351,6 @@ def supports_srcrev(uri): # This is a bit sad, but if you don't have this set there can be some # odd interactions with the urldata cache which lead to errors localdata.setVar('SRCREV', '${AUTOREV}') - bb.data.update_data(localdata) try: fetcher = bb.fetch2.Fetch([uri], localdata) urldata = fetcher.ud @@ -353,14 +368,31 @@ def reformat_git_uri(uri): '''Convert any http[s]://....git URI into git://...;protocol=http[s]''' checkuri = uri.split(';', 1)[0] if checkuri.endswith('.git') or '/git/' in checkuri or re.match('https?://github.com/[^/]+/[^/]+/?$', checkuri): - res = re.match('(http|https|ssh)://([^;]+(\.git)?)(;.*)?$', uri) - if res: - # Need to switch the URI around so that the git fetcher is used - return 'git://%s;protocol=%s%s' % (res.group(2), res.group(1), res.group(4) or '') - elif '@' in checkuri: - # Catch e.g. git@git.example.com:repo.git - return 'git://%s;protocol=ssh' % checkuri.replace(':', '/', 1) - return uri + # Appends scheme if the scheme is missing + if not '://' in uri: + uri = 'git://' + uri + scheme, host, path, user, pswd, parms = bb.fetch2.decodeurl(uri) + # Detection mechanism, this is required due to certain URL are formatter with ":" rather than "/" + # which causes decodeurl to fail getting the right host and path + if len(host.split(':')) > 1: + splitslash = host.split(':') + host = splitslash[0] + path = '/' + splitslash[1] + path + #Algorithm: + # if user is defined, append protocol=ssh or if a protocol is defined, then honor the user-defined protocol + # if no user & password is defined, check for scheme type and append the protocol with the scheme type + # finally if protocols or if the url is well-formed, do nothing and rejoin everything back to normal + # Need to repackage the arguments for encodeurl, the format is: (scheme, host, path, user, password, OrderedDict([('key', 'value')])) + if user: + if not 'protocol' in parms: + parms.update({('protocol', 'ssh')}) + elif (scheme == "http" or scheme == 'https' or scheme == 'ssh') and not ('protocol' in parms): + parms.update({('protocol', scheme)}) + # Always append 'git://' + fUrl = bb.fetch2.encodeurl(('git', host, path, user, pswd, parms)) + return fUrl + else: + return uri def is_package(url): '''Check if a URL points to a package''' @@ -404,12 +436,14 @@ def create_recipe(args): srcuri = rev_re.sub('', srcuri) tempsrc = tempfile.mkdtemp(prefix='recipetool-') srctree = tempsrc + d = bb.data.createCopy(tinfoil.config_data) if fetchuri.startswith('npm://'): # Check if npm is available - check_npm(tinfoil.config_data) + npm_bindir = check_npm(tinfoil, args.devtool) + d.prependVar('PATH', '%s:' % npm_bindir) logger.info('Fetching %s...' % srcuri) try: - checksums = scriptutils.fetch_uri(tinfoil.config_data, fetchuri, srctree, srcrev) + checksums = scriptutils.fetch_uri(d, fetchuri, srctree, srcrev) except bb.fetch2.BBFetchException as e: logger.error(str(e).rstrip()) sys.exit(1) @@ -448,8 +482,8 @@ def create_recipe(args): if pkgfile: if pkgfile.endswith(('.deb', '.ipk')): - stdout, _ = bb.process.run('ar x %s control.tar.gz' % pkgfile, cwd=tmpfdir) - stdout, _ = bb.process.run('tar xf control.tar.gz ./control', cwd=tmpfdir) + stdout, _ = bb.process.run('ar x %s' % pkgfile, cwd=tmpfdir) + stdout, _ = bb.process.run('tar xf control.tar.gz', cwd=tmpfdir) values = convert_debian(tmpfdir) extravalues.update(values) elif pkgfile.endswith(('.rpm', '.srpm')): @@ -554,7 +588,6 @@ def create_recipe(args): if name_pv and not realpv: realpv = name_pv - if not srcuri: lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)') lines_before.append('SRC_URI = "%s"' % srcuri) @@ -588,6 +621,11 @@ def create_recipe(args): lines_after.append('INSANE_SKIP_${PN} += "already-stripped"') lines_after.append('') + if args.fetch_dev: + extravalues['fetchdev'] = True + else: + extravalues['fetchdev'] = None + # Find all plugins that want to register handlers logger.debug('Loading recipe handlers') raw_handlers = [] @@ -604,6 +642,7 @@ def create_recipe(args): handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True) for handler, priority, _ in handlers: logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority)) + setattr(handler, '_devtool', args.devtool) handlers = [item[0] for item in handlers] # Apply the handlers @@ -640,7 +679,7 @@ def create_recipe(args): if not outfile: if not pn: - logger.error('Unable to determine short program name from source tree - please specify name with -N/--name or output file name with -o/--outfile') + log_error_cond('Unable to determine short program name from source tree - please specify name with -N/--name or output file name with -o/--outfile', args.devtool) # devtool looks for this specific exit code, so don't change it sys.exit(15) else: @@ -710,6 +749,15 @@ def create_recipe(args): if not bbclassextend: lines_after.append('BBCLASSEXTEND = "native"') + postinst = ("postinst", extravalues.pop('postinst', None)) + postrm = ("postrm", extravalues.pop('postrm', None)) + preinst = ("preinst", extravalues.pop('preinst', None)) + prerm = ("prerm", extravalues.pop('prerm', None)) + funcs = [postinst, postrm, preinst, prerm] + for func in funcs: + if func[1]: + RecipeHandler.genfunction(lines_after, 'pkg_%s_${PN}' % func[0], func[1]) + outlines = [] outlines.extend(lines_before) if classes: @@ -736,7 +784,7 @@ def create_recipe(args): shutil.move(srctree, args.extract_to) if tempsrc == srctree: tempsrc = None - logger.info('Source extracted to %s' % args.extract_to) + log_info_cond('Source extracted to %s' % args.extract_to, args.devtool) if outfile == '-': sys.stdout.write('\n'.join(outlines) + '\n') @@ -749,7 +797,7 @@ def create_recipe(args): continue f.write('%s\n' % line) lastline = line - logger.info('Recipe %s has been created; further editing may be required to make it fully functional' % outfile) + log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool) if tempsrc: if args.keep_temp: @@ -775,10 +823,12 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d): lines_before.append('# your responsibility to verify that the values are complete and correct.') if len(licvalues) > 1: lines_before.append('#') - lines_before.append('# NOTE: multiple licenses have been detected; if that is correct you should separate') - lines_before.append('# these in the LICENSE value using & if the multiple licenses all apply, or | if there') - lines_before.append('# is a choice between the multiple licenses. If in doubt, check the accompanying') - lines_before.append('# documentation to determine which situation is applicable.') + lines_before.append('# NOTE: multiple licenses have been detected; they have been separated with &') + lines_before.append('# in the LICENSE value for now since it is a reasonable assumption that all') + lines_before.append('# of the licenses apply. If instead there is a choice between the multiple') + lines_before.append('# licenses then you should change the value to separate the licenses with |') + lines_before.append('# instead of &. If there is any doubt, check the accompanying documentation') + lines_before.append('# to determine which situation is applicable.') if lic_unknown: lines_before.append('#') lines_before.append('# The following license files were not able to be identified and are') @@ -802,7 +852,7 @@ def handle_license_vars(srctree, lines_before, handled, extravalues, d): licenses = [pkg_license] else: lines_before.append('# NOTE: Original package metadata indicates license is: %s' % pkg_license) - lines_before.append('LICENSE = "%s"' % ' '.join(licenses)) + lines_before.append('LICENSE = "%s"' % ' & '.join(licenses)) lines_before.append('LIC_FILES_CHKSUM = "%s"' % ' \\\n '.join(lic_files_chksum)) lines_before.append('') handled.append(('license', licvalues)) @@ -813,7 +863,7 @@ def get_license_md5sums(d, static_only=False): md5sums = {} if not static_only: # Gather md5sums of license files in common license dir - commonlicdir = d.getVar('COMMON_LICENSE_DIR', True) + commonlicdir = d.getVar('COMMON_LICENSE_DIR') for fn in os.listdir(commonlicdir): md5value = bb.utils.md5_file(os.path.join(commonlicdir, fn)) md5sums[md5value] = fn @@ -983,7 +1033,7 @@ def split_pkg_licenses(licvalues, packages, outlines, fallback_licenses=None, pn return outlicenses def read_pkgconfig_provides(d): - pkgdatadir = d.getVar('PKGDATA_DIR', True) + pkgdatadir = d.getVar('PKGDATA_DIR') pkgmap = {} for fn in glob.glob(os.path.join(pkgdatadir, 'shlibs2', '*.pclist')): with open(fn, 'r') as f: @@ -1044,6 +1094,25 @@ def convert_debian(debpath): varname = value_map.get(key, None) if varname: values[varname] = value + postinst = os.path.join(debpath, 'postinst') + postrm = os.path.join(debpath, 'postrm') + preinst = os.path.join(debpath, 'preinst') + prerm = os.path.join(debpath, 'prerm') + sfiles = [postinst, postrm, preinst, prerm] + for sfile in sfiles: + if os.path.isfile(sfile): + logger.info("Converting %s file to recipe function..." % + os.path.basename(sfile).upper()) + content = [] + with open(sfile) as f: + for line in f: + if "#!/" in line: + continue + line = line.rstrip("\n") + if line.strip(): + content.append(line) + if content: + values[os.path.basename(f.name)] = content #if depends: # values['DEPENDS'] = ' '.join(depends) @@ -1073,10 +1142,21 @@ def convert_rpm_xml(xmlfile): return values -def check_npm(d): - if not os.path.exists(os.path.join(d.getVar('STAGING_BINDIR_NATIVE', True), 'npm')): - logger.error('npm required to process specified source, but npm is not available - you need to build nodejs-native first') +def check_npm(tinfoil, debugonly=False): + try: + rd = tinfoil.parse_recipe('nodejs-native') + except bb.providers.NoProvider: + # We still conditionally show the message and exit with the special + # return code, otherwise we can't show the proper message for eSDK + # users + log_error_cond('nodejs-native is required for npm but is not available - you will likely need to add a layer that provides nodejs', debugonly) + sys.exit(14) + bindir = rd.getVar('STAGING_BINDIR_NATIVE') + npmpath = os.path.join(bindir, 'npm') + if not os.path.exists(npmpath): + log_error_cond('npm required to process specified source, but npm is not available - you need to run bitbake -c addto_recipe_sysroot nodejs-native first', debugonly) sys.exit(14) + return bindir def register_commands(subparsers): parser_create = subparsers.add_parser('create', @@ -1093,5 +1173,10 @@ def register_commands(subparsers): parser_create.add_argument('--src-subdir', help='Specify subdirectory within source tree to use', metavar='SUBDIR') parser_create.add_argument('-a', '--autorev', help='When fetching from a git repository, set SRCREV in the recipe to a floating revision instead of fixed', action="store_true") parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)') - parser_create.set_defaults(func=create_recipe) + parser_create.add_argument('--fetch-dev', action="store_true", help='For npm, also fetch devDependencies') + parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS) + # FIXME I really hate having to set parserecipes for this, but given we may need + # to call into npm (and we don't know in advance if we will or not) and in order + # to do so we need to know npm's recipe sysroot path, there's not much alternative + parser_create.set_defaults(func=create_recipe, parserecipes=True) diff --git a/import-layers/yocto-poky/scripts/lib/recipetool/create_buildsys_python.py b/import-layers/yocto-poky/scripts/lib/recipetool/create_buildsys_python.py index 82a2be122..ec5449bee 100644 --- a/import-layers/yocto-poky/scripts/lib/recipetool/create_buildsys_python.py +++ b/import-layers/yocto-poky/scripts/lib/recipetool/create_buildsys_python.py @@ -532,11 +532,11 @@ class PythonRecipeHandler(RecipeHandler): def parse_pkgdata_for_python_packages(self): suffixes = [t[0] for t in imp.get_suffixes()] - pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True) + pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR') ldata = tinfoil.config_data.createCopy() bb.parse.handle('classes/python-dir.bbclass', ldata, True) - python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR', True) + python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR') dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload') python_dirs = [python_sitedir + os.sep, diff --git a/import-layers/yocto-poky/scripts/lib/recipetool/create_kernel.py b/import-layers/yocto-poky/scripts/lib/recipetool/create_kernel.py index 7dac59fd0..ca4996c7a 100644 --- a/import-layers/yocto-poky/scripts/lib/recipetool/create_kernel.py +++ b/import-layers/yocto-poky/scripts/lib/recipetool/create_kernel.py @@ -41,7 +41,7 @@ class KernelRecipeHandler(RecipeHandler): handled.append('buildsystem') del lines_after[:] del classes[:] - template = os.path.join(tinfoil.config_data.getVar('COREBASE', True), 'meta-skeleton', 'recipes-kernel', 'linux', 'linux-yocto-custom.bb') + template = os.path.join(tinfoil.config_data.getVar('COREBASE'), 'meta-skeleton', 'recipes-kernel', 'linux', 'linux-yocto-custom.bb') def handle_var(varname, origvalue, op, newlines): if varname in ['SRCREV', 'SRCREV_machine']: while newlines[-1].startswith('#'): @@ -85,7 +85,7 @@ class KernelRecipeHandler(RecipeHandler): elif varname == 'COMPATIBLE_MACHINE': while newlines[-1].startswith('#'): del newlines[-1] - machine = tinfoil.config_data.getVar('MACHINE', True) + machine = tinfoil.config_data.getVar('MACHINE') return machine, op, 0, True return origvalue, op, 0, True with open(template, 'r') as f: diff --git a/import-layers/yocto-poky/scripts/lib/recipetool/create_npm.py b/import-layers/yocto-poky/scripts/lib/recipetool/create_npm.py index 7bb844cb0..cb8f338b8 100644 --- a/import-layers/yocto-poky/scripts/lib/recipetool/create_npm.py +++ b/import-layers/yocto-poky/scripts/lib/recipetool/create_npm.py @@ -45,11 +45,29 @@ class NpmRecipeHandler(RecipeHandler): license = data['license'] if isinstance(license, dict): license = license.get('type', None) + if license: + if 'OR' in license: + license = license.replace('OR', '|') + license = license.replace('AND', '&') + license = license.replace(' ', '_') + if not license[0] == '(': + license = '(' + license + ')' + print('LICENSE: {}'.format(license)) + else: + license = license.replace('AND', '&') + if license[0] == '(': + license = license[1:] + if license[-1] == ')': + license = license[:-1] + license = license.replace('MIT/X11', 'MIT') + license = license.replace('Public Domain', 'PD') + license = license.replace('SEE LICENSE IN EULA', + 'SEE-LICENSE-IN-EULA') return license - def _shrinkwrap(self, srctree, localfilesdir, extravalues, lines_before): + def _shrinkwrap(self, srctree, localfilesdir, extravalues, lines_before, d): try: - runenv = dict(os.environ, PATH=tinfoil.config_data.getVar('PATH', True)) + runenv = dict(os.environ, PATH=d.getVar('PATH')) bb.process.run('npm shrinkwrap', cwd=srctree, stderr=subprocess.STDOUT, env=runenv, shell=True) except bb.process.ExecutionError as e: logger.warn('npm shrinkwrap failed:\n%s' % e.stdout) @@ -61,8 +79,8 @@ class NpmRecipeHandler(RecipeHandler): extravalues['extrafiles']['npm-shrinkwrap.json'] = tmpfile lines_before.append('NPM_SHRINKWRAP := "${THISDIR}/${PN}/npm-shrinkwrap.json"') - def _lockdown(self, srctree, localfilesdir, extravalues, lines_before): - runenv = dict(os.environ, PATH=tinfoil.config_data.getVar('PATH', True)) + def _lockdown(self, srctree, localfilesdir, extravalues, lines_before, d): + runenv = dict(os.environ, PATH=d.getVar('PATH')) if not NpmRecipeHandler.lockdownpath: NpmRecipeHandler.lockdownpath = tempfile.mkdtemp('recipetool-npm-lockdown') bb.process.run('npm install lockdown --prefix %s' % NpmRecipeHandler.lockdownpath, @@ -83,7 +101,7 @@ class NpmRecipeHandler(RecipeHandler): extravalues['extrafiles']['lockdown.json'] = tmpfile lines_before.append('NPM_LOCKDOWN := "${THISDIR}/${PN}/lockdown.json"') - def _handle_dependencies(self, d, deps, lines_before, srctree): + def _handle_dependencies(self, d, deps, optdeps, devdeps, lines_before, srctree): import scriptutils # If this isn't a single module we need to get the dependencies # and add them to SRC_URI @@ -92,8 +110,21 @@ class NpmRecipeHandler(RecipeHandler): if not origvalue.startswith('npm://'): src_uri = origvalue.split() changed = False - for dep, depdata in deps.items(): - version = self.get_node_version(dep, depdata, d) + deplist = {} + for dep, depver in optdeps.items(): + depdata = self.get_npm_data(dep, depver, d) + if self.check_npm_optional_dependency(depdata): + deplist[dep] = depdata + for dep, depver in devdeps.items(): + depdata = self.get_npm_data(dep, depver, d) + if self.check_npm_optional_dependency(depdata): + deplist[dep] = depdata + for dep, depver in deps.items(): + depdata = self.get_npm_data(dep, depver, d) + deplist[dep] = depdata + + for dep, depdata in deplist.items(): + version = depdata.get('version', None) if version: url = 'npm://registry.npmjs.org;name=%s;version=%s;subdir=node_modules/%s' % (dep, version, dep) scriptutils.fetch_uri(d, url, srctree) @@ -157,7 +188,9 @@ class NpmRecipeHandler(RecipeHandler): files = RecipeHandler.checkfiles(srctree, ['package.json']) if files: - check_npm(tinfoil.config_data) + d = bb.data.createCopy(tinfoil.config_data) + npm_bindir = check_npm(tinfoil, self._devtool) + d.prependVar('PATH', '%s:' % npm_bindir) data = read_package_json(files[0]) if 'name' in data and 'version' in data: @@ -170,18 +203,19 @@ class NpmRecipeHandler(RecipeHandler): if 'homepage' in data: extravalues['HOMEPAGE'] = data['homepage'] - deps = data.get('dependencies', {}) - updated = self._handle_dependencies(tinfoil.config_data, deps, lines_before, srctree) + fetchdev = extravalues['fetchdev'] or None + deps, optdeps, devdeps = self.get_npm_package_dependencies(data, fetchdev) + updated = self._handle_dependencies(d, deps, optdeps, devdeps, lines_before, srctree) if updated: # We need to redo the license stuff - self._replace_license_vars(srctree, lines_before, handled, extravalues, tinfoil.config_data) + self._replace_license_vars(srctree, lines_before, handled, extravalues, d) # Shrinkwrap localfilesdir = tempfile.mkdtemp(prefix='recipetool-npm') - self._shrinkwrap(srctree, localfilesdir, extravalues, lines_before) + self._shrinkwrap(srctree, localfilesdir, extravalues, lines_before, d) # Lockdown - self._lockdown(srctree, localfilesdir, extravalues, lines_before) + self._lockdown(srctree, localfilesdir, extravalues, lines_before, d) # Split each npm module out to is own package npmpackages = oe.package.npm_split_package_dirs(srctree) @@ -207,7 +241,9 @@ class NpmRecipeHandler(RecipeHandler): packages = OrderedDict((x,y[0]) for x,y in npmpackages.items()) packages['${PN}'] = '' pkglicenses = split_pkg_licenses(licvalues, packages, lines_after, licenses) - all_licenses = list(set([item for pkglicense in pkglicenses.values() for item in pkglicense])) + all_licenses = list(set([item.replace('_', ' ') for pkglicense in pkglicenses.values() for item in pkglicense])) + if '&' in all_licenses: + all_licenses.remove('&') # Go back and update the LICENSE value since we have a bit more # information than when that was written out (and we know all apply # vs. there being a choice, so we can join them with &) @@ -251,17 +287,58 @@ class NpmRecipeHandler(RecipeHandler): # FIXME this is effectively duplicated from lib/bb/fetch2/npm.py # (split out from _getdependencies()) - def get_node_version(self, pkg, version, d): + def get_npm_data(self, pkg, version, d): import bb.fetch2 pkgfullname = pkg if version != '*' and not '/' in version: pkgfullname += "@'%s'" % version logger.debug(2, "Calling getdeps on %s" % pkg) - runenv = dict(os.environ, PATH=d.getVar('PATH', True)) + runenv = dict(os.environ, PATH=d.getVar('PATH')) fetchcmd = "npm view %s --json" % pkgfullname output, _ = bb.process.run(fetchcmd, stderr=subprocess.STDOUT, env=runenv, shell=True) data = self._parse_view(output) - return data.get('version', None) + return data + + # FIXME this is effectively duplicated from lib/bb/fetch2/npm.py + # (split out from _getdependencies()) + def get_npm_package_dependencies(self, pdata, fetchdev): + dependencies = pdata.get('dependencies', {}) + optionalDependencies = pdata.get('optionalDependencies', {}) + dependencies.update(optionalDependencies) + if fetchdev: + devDependencies = pdata.get('devDependencies', {}) + dependencies.update(devDependencies) + else: + devDependencies = {} + depsfound = {} + optdepsfound = {} + devdepsfound = {} + for dep in dependencies: + if dep in optionalDependencies: + optdepsfound[dep] = dependencies[dep] + elif dep in devDependencies: + devdepsfound[dep] = dependencies[dep] + else: + depsfound[dep] = dependencies[dep] + return depsfound, optdepsfound, devdepsfound + + # FIXME this is effectively duplicated from lib/bb/fetch2/npm.py + # (split out from _getdependencies()) + def check_npm_optional_dependency(self, pdata): + pkg_os = pdata.get('os', None) + if pkg_os: + if not isinstance(pkg_os, list): + pkg_os = [pkg_os] + blacklist = False + for item in pkg_os: + if item.startswith('!'): + blacklist = True + break + if (not blacklist and 'linux' not in pkg_os) or '!linux' in pkg_os: + logger.debug(2, "Skipping %s since it's incompatible with Linux" % pkg) + return False + return True + def register_recipe_handlers(handlers): handlers.append((NpmRecipeHandler(), 60)) diff --git a/import-layers/yocto-poky/scripts/lib/recipetool/newappend.py b/import-layers/yocto-poky/scripts/lib/recipetool/newappend.py index fbdd7bcef..0b63759d8 100644 --- a/import-layers/yocto-poky/scripts/lib/recipetool/newappend.py +++ b/import-layers/yocto-poky/scripts/lib/recipetool/newappend.py @@ -39,18 +39,6 @@ def tinfoil_init(instance): tinfoil = instance -def _get_recipe_file(cooker, pn): - import oe.recipeutils - recipefile = oe.recipeutils.pn_to_recipe(cooker, pn) - if not recipefile: - skipreasons = oe.recipeutils.get_unavailable_reasons(cooker, pn) - if skipreasons: - logger.error('\n'.join(skipreasons)) - else: - logger.error("Unable to find any recipe file matching %s" % pn) - return recipefile - - def layer(layerpath): if not os.path.exists(os.path.join(layerpath, 'conf', 'layer.conf')): raise argparse.ArgumentTypeError('{0!r} must be a path to a valid layer'.format(layerpath)) @@ -60,7 +48,7 @@ def layer(layerpath): def newappend(args): import oe.recipeutils - recipe_path = _get_recipe_file(tinfoil.cooker, args.target) + recipe_path = tinfoil.get_recipe_file(args.target) rd = tinfoil.config_data.createCopy() rd.setVar('FILE', recipe_path) @@ -72,7 +60,7 @@ def newappend(args): if not path_ok: logger.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.', os.path.join(args.destlayer, 'conf', 'layer.conf'), os.path.dirname(append_path)) - layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS', True).split()] + layerdirs = [os.path.abspath(layerdir) for layerdir in rd.getVar('BBLAYERS').split()] if not os.path.abspath(args.destlayer) in layerdirs: logger.warn('Specified layer is not currently enabled in bblayers.conf, you will need to add it before this bbappend will be active') diff --git a/import-layers/yocto-poky/scripts/lib/recipetool/setvar.py b/import-layers/yocto-poky/scripts/lib/recipetool/setvar.py index 85701c06a..9de315a0e 100644 --- a/import-layers/yocto-poky/scripts/lib/recipetool/setvar.py +++ b/import-layers/yocto-poky/scripts/lib/recipetool/setvar.py @@ -51,7 +51,7 @@ def setvar(args): if args.recipe_only: patches = [oe.recipeutils.patch_recipe_file(args.recipefile, varvalues, patch=args.patch)] else: - rd = oe.recipeutils.parse_recipe(tinfoil.cooker, args.recipefile, None) + rd = tinfoil.parse_recipe_file(args.recipefile, False) if not rd: return 1 patches = oe.recipeutils.patch_recipe(rd, args.recipefile, varvalues, patch=args.patch) diff --git a/import-layers/yocto-poky/scripts/lib/scriptutils.py b/import-layers/yocto-poky/scripts/lib/scriptutils.py index 5ccc02796..92b601c7e 100644 --- a/import-layers/yocto-poky/scripts/lib/scriptutils.py +++ b/import-layers/yocto-poky/scripts/lib/scriptutils.py @@ -21,10 +21,12 @@ import logging import glob import argparse import subprocess +import tempfile +import shutil -def logger_create(name): +def logger_create(name, stream=None): logger = logging.getLogger(name) - loggerhandler = logging.StreamHandler() + loggerhandler = logging.StreamHandler(stream=stream) loggerhandler.setFormatter(logging.Formatter("%(levelname)s: %(message)s")) logger.addHandler(loggerhandler) logger.setLevel(logging.INFO) @@ -52,10 +54,14 @@ def load_plugins(logger, plugins, pluginpath): if fp: fp.close() + def plugin_name(filename): + return os.path.splitext(os.path.basename(filename))[0] + + known_plugins = [plugin_name(p.__name__) for p in plugins] logger.debug('Loading plugins from %s...' % pluginpath) for fn in glob.glob(os.path.join(pluginpath, '*.py')): - name = os.path.splitext(os.path.basename(fn))[0] - if name != '__init__': + name = plugin_name(fn) + if name != '__init__' and name not in known_plugins: plugin = load_plugin(name) if hasattr(plugin, 'plugin_init'): plugin.plugin_init(plugins) @@ -74,32 +80,47 @@ def git_convert_standalone_clone(repodir): def fetch_uri(d, uri, destdir, srcrev=None): """Fetch a URI to a local directory""" - import bb.data - bb.utils.mkdirhier(destdir) - localdata = bb.data.createCopy(d) - localdata.setVar('BB_STRICT_CHECKSUM', '') - localdata.setVar('SRCREV', srcrev) - ret = (None, None) - olddir = os.getcwd() + import bb + tmpparent = d.getVar('BASE_WORKDIR') + bb.utils.mkdirhier(tmpparent) + tmpworkdir = tempfile.mkdtemp(dir=tmpparent) try: - fetcher = bb.fetch2.Fetch([uri], localdata) - for u in fetcher.ud: - ud = fetcher.ud[u] - ud.ignore_checksums = True - fetcher.download() - for u in fetcher.ud: - ud = fetcher.ud[u] - if ud.localpath.rstrip(os.sep) == localdata.getVar('DL_DIR', True).rstrip(os.sep): - raise Exception('Local path is download directory - please check that the URI "%s" is correct' % uri) - fetcher.unpack(destdir) - for u in fetcher.ud: - ud = fetcher.ud[u] - if ud.method.recommends_checksum(ud): - md5value = bb.utils.md5_file(ud.localpath) - sha256value = bb.utils.sha256_file(ud.localpath) - ret = (md5value, sha256value) + bb.utils.mkdirhier(destdir) + localdata = bb.data.createCopy(d) + + # Set some values to allow extend_recipe_sysroot to work here we're we are not running from a task + localdata.setVar('WORKDIR', tmpworkdir) + localdata.setVar('BB_RUNTASK', 'do_fetch') + localdata.setVar('PN', 'dummy') + localdata.setVar('BB_LIMITEDDEPS', '1') + bb.build.exec_func("extend_recipe_sysroot", localdata) + + # Set some values for the benefit of the fetcher code + localdata.setVar('BB_STRICT_CHECKSUM', '') + localdata.setVar('SRCREV', srcrev) + ret = (None, None) + olddir = os.getcwd() + try: + fetcher = bb.fetch2.Fetch([uri], localdata) + for u in fetcher.ud: + ud = fetcher.ud[u] + ud.ignore_checksums = True + fetcher.download() + for u in fetcher.ud: + ud = fetcher.ud[u] + if ud.localpath.rstrip(os.sep) == localdata.getVar('DL_DIR').rstrip(os.sep): + raise Exception('Local path is download directory - please check that the URI "%s" is correct' % uri) + fetcher.unpack(destdir) + for u in fetcher.ud: + ud = fetcher.ud[u] + if ud.method.recommends_checksum(ud): + md5value = bb.utils.md5_file(ud.localpath) + sha256value = bb.utils.sha256_file(ud.localpath) + ret = (md5value, sha256value) + finally: + os.chdir(olddir) finally: - os.chdir(olddir) + shutil.rmtree(tmpworkdir) return ret def run_editor(fn): diff --git a/import-layers/yocto-poky/scripts/lib/wic/__init__.py b/import-layers/yocto-poky/scripts/lib/wic/__init__.py index 63c1d9c84..85876b138 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/__init__.py +++ b/import-layers/yocto-poky/scripts/lib/wic/__init__.py @@ -1,4 +1,20 @@ -import os, sys +#!/usr/bin/env python -tt +# +# Copyright (c) 2007 Red Hat, Inc. +# Copyright (c) 2011 Intel, Inc. +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the Free +# Software Foundation; version 2 of the License +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY +# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +# for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., 59 +# Temple Place - Suite 330, Boston, MA 02111-1307, USA. -cur_path = os.path.dirname(__file__) or '.' -sys.path.insert(0, cur_path + '/3rdparty') +class WicError(Exception): + pass diff --git a/import-layers/yocto-poky/scripts/lib/wic/__version__.py b/import-layers/yocto-poky/scripts/lib/wic/__version__.py deleted file mode 100644 index 5452a4671..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/__version__.py +++ /dev/null @@ -1 +0,0 @@ -VERSION = "2.00" diff --git a/import-layers/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg b/import-layers/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg index a16bd6ac6..d5a07d204 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg +++ b/import-layers/yocto-poky/scripts/lib/wic/canned-wks/directdisk-bootloader-config.cfg @@ -1,11 +1,27 @@ # This is an example configuration file for syslinux. -PROMPT 0 -TIMEOUT 10 - +TIMEOUT 50 ALLOWOPTIONS 1 SERIAL 0 115200 +PROMPT 0 + +UI vesamenu.c32 +menu title Select boot options +menu tabmsg Press [Tab] to edit, [Return] to select + +DEFAULT Graphics console boot + +LABEL Graphics console boot +KERNEL /vmlinuz +APPEND label=boot root=/dev/sda2 rootwait + +LABEL Serial console boot +KERNEL /vmlinuz +APPEND label=boot root=/dev/sda2 rootwait console=ttyS0,115200 + +LABEL Graphics console install +KERNEL /vmlinuz +APPEND label=install root=/dev/sda2 rootwait -DEFAULT boot -LABEL boot +LABEL Serial console install KERNEL /vmlinuz -APPEND label=boot root=/dev/sda2 rootwait rootfstype=ext4 video=vesafb vga=0x318 console=tty0 +APPEND label=install root=/dev/sda2 rootwait console=ttyS0,115200 diff --git a/import-layers/yocto-poky/scripts/lib/wic/canned-wks/mkgummidisk.wks b/import-layers/yocto-poky/scripts/lib/wic/canned-wks/mkgummidisk.wks deleted file mode 100644 index f3ae09099..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/canned-wks/mkgummidisk.wks +++ /dev/null @@ -1,11 +0,0 @@ -# short-description: Create an EFI disk image -# long-description: Creates a partitioned EFI disk image that the user -# can directly dd to boot media. - -part /boot --source bootimg-efi --sourceparams="loader=gummiboot" --ondisk sda --label msdos --active --align 1024 - -part / --source rootfs --ondisk sda --fstype=ext4 --label platform --align 1024 - -part swap --ondisk sda --size 44 --label swap1 --fstype=swap - -bootloader --ptable gpt --timeout=5 --append="rootwait rootfstype=ext4 console=ttyS0,115200 console=tty0" diff --git a/import-layers/yocto-poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks b/import-layers/yocto-poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks index a6518a0f4..db30bbced 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks +++ b/import-layers/yocto-poky/scripts/lib/wic/canned-wks/qemux86-directdisk.wks @@ -4,5 +4,5 @@ include common.wks.inc -bootloader --timeout=0 --append="vga=0 uvesafb.mode_option=640x480-32 root=/dev/vda2 rw mem=256M ip=192.168.7.2::192.168.7.1:255.255.255.0 oprofile.timer=1 rootfstype=ext4 " +bootloader --timeout=0 --append="vga=0 uvesafb.mode_option=640x480-32 root=/dev/sda2 rw mem=256M ip=192.168.7.2::192.168.7.1:255.255.255.0 oprofile.timer=1 rootfstype=ext4 " diff --git a/import-layers/yocto-poky/scripts/lib/wic/conf.py b/import-layers/yocto-poky/scripts/lib/wic/conf.py deleted file mode 100644 index 070ec3096..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/conf.py +++ /dev/null @@ -1,103 +0,0 @@ -#!/usr/bin/env python -tt -# -# Copyright (c) 2011 Intel, Inc. -# -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the Free -# Software Foundation; version 2 of the License -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY -# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License -# for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., 59 -# Temple Place - Suite 330, Boston, MA 02111-1307, USA. - -import os - -from wic.ksparser import KickStart, KickStartError -from wic import msger -from wic.utils import misc - - -def get_siteconf(): - wic_path = os.path.dirname(__file__) - eos = wic_path.find('scripts') + len('scripts') - scripts_path = wic_path[:eos] - - return scripts_path + "/lib/image/config/wic.conf" - -class ConfigMgr(object): - DEFAULTS = { - 'common': { - "distro_name": "Default Distribution", - "plugin_dir": "/usr/lib/wic/plugins"}, # TODO use prefix also? - 'create': { - "tmpdir": '/var/tmp/wic', - "outdir": './wic-output', - "release": None, - "logfile": None, - "name_prefix": None, - "name_suffix": None} - } - - # make the manager class as singleton - _instance = None - def __new__(cls, *args, **kwargs): - if not cls._instance: - cls._instance = super(ConfigMgr, cls).__new__(cls, *args, **kwargs) - - return cls._instance - - def __init__(self, ksconf=None, siteconf=None): - # reset config options - self.reset() - - if not siteconf: - siteconf = get_siteconf() - - # initial options from siteconf - self._siteconf = siteconf - - if ksconf: - self._ksconf = ksconf - - def reset(self): - self.__ksconf = None - self.__siteconf = None - self.create = {} - - # initialize the values with defaults - for sec, vals in self.DEFAULTS.items(): - setattr(self, sec, vals) - - def __set_ksconf(self, ksconf): - if not os.path.isfile(ksconf): - msger.error('Cannot find ks file: %s' % ksconf) - - self.__ksconf = ksconf - self._parse_kickstart(ksconf) - def __get_ksconf(self): - return self.__ksconf - _ksconf = property(__get_ksconf, __set_ksconf) - - def _parse_kickstart(self, ksconf=None): - if not ksconf: - return - - try: - ksobj = KickStart(ksconf) - except KickStartError as err: - msger.error(str(err)) - - self.create['ks'] = ksobj - self.create['name'] = os.path.splitext(os.path.basename(ksconf))[0] - - self.create['name'] = misc.build_name(ksconf, - self.create['release'], - self.create['name_prefix'], - self.create['name_suffix']) - -configmgr = ConfigMgr() diff --git a/import-layers/yocto-poky/scripts/lib/wic/config/wic.conf b/import-layers/yocto-poky/scripts/lib/wic/config/wic.conf deleted file mode 100644 index a51bcb55e..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/config/wic.conf +++ /dev/null @@ -1,6 +0,0 @@ -[common] -; general settings -distro_name = OpenEmbedded - -[create] -; settings for create subcommand diff --git a/import-layers/yocto-poky/scripts/lib/wic/creator.py b/import-layers/yocto-poky/scripts/lib/wic/creator.py deleted file mode 100644 index 8f7d1503f..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/creator.py +++ /dev/null @@ -1,125 +0,0 @@ -#!/usr/bin/env python -tt -# -# Copyright (c) 2011 Intel, Inc. -# -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the Free -# Software Foundation; version 2 of the License -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY -# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License -# for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., 59 -# Temple Place - Suite 330, Boston, MA 02111-1307, USA. - -import os, sys -from optparse import OptionParser, SUPPRESS_HELP - -from wic import msger -from wic.utils import errors -from wic.conf import configmgr -from wic.plugin import pluginmgr - - -class Creator(): - """${name}: create an image - - Usage: - ${name} SUBCOMMAND [OPTS] - - ${command_list} - ${option_list} - """ - - name = 'wic create(cr)' - - def __init__(self, *args, **kwargs): - self._subcmds = {} - - # get cmds from pluginmgr - # mix-in do_subcmd interface - for subcmd, klass in pluginmgr.get_plugins('imager').items(): - if not hasattr(klass, 'do_create'): - msger.warning("Unsupported subcmd: %s" % subcmd) - continue - - func = getattr(klass, 'do_create') - self._subcmds[subcmd] = func - - def get_optparser(self): - optparser = OptionParser() - optparser.add_option('-d', '--debug', action='store_true', - dest='debug', - help=SUPPRESS_HELP) - optparser.add_option('-v', '--verbose', action='store_true', - dest='verbose', - help=SUPPRESS_HELP) - optparser.add_option('', '--logfile', type='string', dest='logfile', - default=None, - help='Path of logfile') - optparser.add_option('-c', '--config', type='string', dest='config', - default=None, - help='Specify config file for wic') - optparser.add_option('-o', '--outdir', type='string', action='store', - dest='outdir', default=None, - help='Output directory') - optparser.add_option('', '--tmpfs', action='store_true', dest='enabletmpfs', - help='Setup tmpdir as tmpfs to accelerate, experimental' - ' feature, use it if you have more than 4G memory') - optparser.add_option('', '--bmap', action='store_true', help='generate .bmap') - return optparser - - def postoptparse(self, options): - abspath = lambda pth: os.path.abspath(os.path.expanduser(pth)) - - if options.verbose: - msger.set_loglevel('verbose') - if options.debug: - msger.set_loglevel('debug') - - if options.logfile: - logfile_abs_path = abspath(options.logfile) - if os.path.isdir(logfile_abs_path): - raise errors.Usage("logfile's path %s should be file" - % options.logfile) - if not os.path.exists(os.path.dirname(logfile_abs_path)): - os.makedirs(os.path.dirname(logfile_abs_path)) - msger.set_interactive(False) - msger.set_logfile(logfile_abs_path) - configmgr.create['logfile'] = options.logfile - - if options.config: - configmgr.reset() - configmgr._siteconf = options.config - - if options.outdir is not None: - configmgr.create['outdir'] = abspath(options.outdir) - - cdir = 'outdir' - if os.path.exists(configmgr.create[cdir]) \ - and not os.path.isdir(configmgr.create[cdir]): - msger.error('Invalid directory specified: %s' \ - % configmgr.create[cdir]) - - if options.enabletmpfs: - configmgr.create['enabletmpfs'] = options.enabletmpfs - - def main(self, argv=None): - if argv is None: - argv = sys.argv - else: - argv = argv[:] # don't modify caller's list - - pname = argv[0] - if pname not in self._subcmds: - msger.error('Unknown plugin: %s' % pname) - - optparser = self.get_optparser() - options, args = optparser.parse_args(argv) - - self.postoptparse(options) - - return self._subcmds[pname](options, *args[1:]) diff --git a/import-layers/yocto-poky/scripts/lib/wic/engine.py b/import-layers/yocto-poky/scripts/lib/wic/engine.py index 5b104631c..f59821fea 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/engine.py +++ b/import-layers/yocto-poky/scripts/lib/wic/engine.py @@ -28,14 +28,14 @@ # Tom Zanussi # +import logging import os -import sys -from wic import msger, creator -from wic.utils import misc -from wic.plugin import pluginmgr -from wic.utils.oe import misc +from wic import WicError +from wic.pluginbase import PluginMgr +from wic.utils.misc import get_bitbake_var +logger = logging.getLogger('wic') def verify_build_env(): """ @@ -44,23 +44,25 @@ def verify_build_env(): Returns True if it is, false otherwise """ if not os.environ.get("BUILDDIR"): - print("BUILDDIR not found, exiting. (Did you forget to source oe-init-build-env?)") - sys.exit(1) + raise WicError("BUILDDIR not found, exiting. (Did you forget to source oe-init-build-env?)") return True CANNED_IMAGE_DIR = "lib/wic/canned-wks" # relative to scripts SCRIPTS_CANNED_IMAGE_DIR = "scripts/" + CANNED_IMAGE_DIR +WIC_DIR = "wic" def build_canned_image_list(path): - layers_path = misc.get_bitbake_var("BBLAYERS") + layers_path = get_bitbake_var("BBLAYERS") canned_wks_layer_dirs = [] if layers_path is not None: for layer_path in layers_path.split(): - cpath = os.path.join(layer_path, SCRIPTS_CANNED_IMAGE_DIR) - canned_wks_layer_dirs.append(cpath) + for wks_path in (WIC_DIR, SCRIPTS_CANNED_IMAGE_DIR): + cpath = os.path.join(layer_path, wks_path) + if os.path.isdir(cpath): + canned_wks_layer_dirs.append(cpath) cpath = os.path.join(path, CANNED_IMAGE_DIR) canned_wks_layer_dirs.append(cpath) @@ -137,26 +139,24 @@ def list_source_plugins(): """ List the available source plugins i.e. plugins available for --source. """ - plugins = pluginmgr.get_source_plugins() + plugins = PluginMgr.get_plugins('source') for plugin in plugins: print(" %s" % plugin) def wic_create(wks_file, rootfs_dir, bootimg_dir, kernel_dir, - native_sysroot, scripts_path, image_output_dir, - compressor, bmap, debug): - """Create image + native_sysroot, options): + """ + Create image wks_file - user-defined OE kickstart file rootfs_dir - absolute path to the build's /rootfs dir bootimg_dir - absolute path to the build's boot artifacts directory kernel_dir - absolute path to the build's kernel directory native_sysroot - absolute path to the build's native sysroots dir - scripts_path - absolute path to /scripts dir image_output_dir - dirname to create for image - compressor - compressor utility to compress the image - bmap - enable generation of .bmap + options - wic command line options (debug, bmap, etc) Normally, the values for the build artifacts values are determined by 'wic -e' from the output of the 'bitbake -e' command given an @@ -179,22 +179,22 @@ def wic_create(wks_file, rootfs_dir, bootimg_dir, kernel_dir, try: oe_builddir = os.environ["BUILDDIR"] except KeyError: - print("BUILDDIR not found, exiting. (Did you forget to source oe-init-build-env?)") - sys.exit(1) + raise WicError("BUILDDIR not found, exiting. (Did you forget to source oe-init-build-env?)") - if debug: - msger.set_loglevel('debug') + if not os.path.exists(options.outdir): + os.makedirs(options.outdir) - crobj = creator.Creator() + pname = 'direct' + plugin_class = PluginMgr.get_plugins('imager').get(pname) + if not plugin_class: + raise WicError('Unknown plugin: %s' % pname) - cmdline = ["direct", native_sysroot, kernel_dir, bootimg_dir, rootfs_dir, - wks_file, image_output_dir, oe_builddir, compressor or ""] - if bmap: - cmdline.append('--bmap') + plugin = plugin_class(wks_file, rootfs_dir, bootimg_dir, kernel_dir, + native_sysroot, oe_builddir, options) - crobj.main(cmdline) + plugin.do_create() - print("\nThe image(s) were created using OE kickstart file:\n %s" % wks_file) + logger.info("The image(s) were created using OE kickstart file:\n %s", wks_file) def wic_list(args, scripts_path): @@ -214,12 +214,44 @@ def wic_list(args, scripts_path): wks_file = args[0] fullpath = find_canned_image(scripts_path, wks_file) if not fullpath: - print("No image named %s found, exiting. "\ - "(Use 'wic list images' to list available images, or "\ - "specify a fully-qualified OE kickstart (.wks) "\ - "filename)\n" % wks_file) - sys.exit(1) + raise WicError("No image named %s found, exiting. " + "(Use 'wic list images' to list available images, " + "or specify a fully-qualified OE kickstart (.wks) " + "filename)" % wks_file) + list_canned_image_help(scripts_path, fullpath) return True return False + +def find_canned(scripts_path, file_name): + """ + Find a file either by its path or by name in the canned files dir. + + Return None if not found + """ + if os.path.exists(file_name): + return file_name + + layers_canned_wks_dir = build_canned_image_list(scripts_path) + for canned_wks_dir in layers_canned_wks_dir: + for root, dirs, files in os.walk(canned_wks_dir): + for fname in files: + if fname == file_name: + fullpath = os.path.join(canned_wks_dir, fname) + return fullpath + +def get_custom_config(boot_file): + """ + Get the custom configuration to be used for the bootloader. + + Return None if the file can't be found. + """ + # Get the scripts path of poky + scripts_path = os.path.abspath("%s/../.." % os.path.dirname(__file__)) + + cfg_file = find_canned(scripts_path, boot_file) + if cfg_file: + with open(cfg_file, "r") as f: + config = f.read() + return config diff --git a/import-layers/yocto-poky/scripts/lib/wic/filemap.py b/import-layers/yocto-poky/scripts/lib/wic/filemap.py index 162603ed0..1f1aacc52 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/filemap.py +++ b/import-layers/yocto-poky/scripts/lib/wic/filemap.py @@ -530,13 +530,16 @@ def filemap(image, log=None): except ErrorNotSupp: return FilemapSeek(image, log) -def sparse_copy(src_fname, dst_fname, offset=0, skip=0): +def sparse_copy(src_fname, dst_fname, offset=0, skip=0, api=None): """Efficiently copy sparse file to or into another file.""" - fmap = filemap(src_fname) + if not api: + api = filemap + fmap = api(src_fname) try: dst_file = open(dst_fname, 'r+b') except IOError: dst_file = open(dst_fname, 'wb') + dst_file.truncate(os.path.getsize(src_fname)) for first, last in fmap.get_mapped_ranges(0, fmap.blocks_cnt): start = first * fmap.block_size diff --git a/import-layers/yocto-poky/scripts/lib/wic/help.py b/import-layers/yocto-poky/scripts/lib/wic/help.py index e5347ec4b..d6e027d25 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/help.py +++ b/import-layers/yocto-poky/scripts/lib/wic/help.py @@ -28,10 +28,12 @@ import subprocess import logging -from wic.plugin import pluginmgr, PLUGIN_TYPES +from wic.pluginbase import PluginMgr, PLUGIN_TYPES + +logger = logging.getLogger('wic') def subcommand_error(args): - logging.info("invalid subcommand %s" % args[0]) + logger.info("invalid subcommand %s", args[0]) def display_help(subcommand, subcommands): @@ -66,7 +68,7 @@ def get_wic_plugins_help(): result = wic_plugins_help for plugin_type in PLUGIN_TYPES: result += '\n\n%s PLUGINS\n\n' % plugin_type.upper() - for name, plugin in pluginmgr.get_plugins(plugin_type).items(): + for name, plugin in PluginMgr.get_plugins(plugin_type).items(): result += "\n %s plugin:\n" % name if plugin.__doc__: result += plugin.__doc__ @@ -81,13 +83,13 @@ def invoke_subcommand(args, parser, main_command_usage, subcommands): Should use argparse, but has to work in 2.6. """ if not args: - logging.error("No subcommand specified, exiting") + logger.error("No subcommand specified, exiting") parser.print_help() return 1 elif args[0] == "help": wic_help(args, main_command_usage, subcommands) elif args[0] not in subcommands: - logging.error("Unsupported subcommand %s, exiting\n" % (args[0])) + logger.error("Unsupported subcommand %s, exiting\n", args[0]) parser.print_help() return 1 else: @@ -371,12 +373,7 @@ DESCRIPTION This scheme is extensible - adding more hooks is a simple matter of adding more plugin methods to SourcePlugin and derived classes. - The code that then needs to call the plugin methods uses - plugin.get_source_plugin_methods() to find the method(s) needed by - the call; this is done by filling up a dict with keys containing - the method names of interest - on success, these will be filled in - with the actual methods. Please see the implementation for - examples and details. + Please see the implementation for details. """ wic_overview_help = """ @@ -646,6 +643,12 @@ DESCRIPTION not specified, the size is in MB. You do not need this option if you use --source. + --fixed-size: Exact partition size. Value format is the same + as for --size option. This option cannot be + specified along with --size. If partition data + is larger than --fixed-size and error will be + raised when assembling disk image. + --source: This option is a wic-specific option that names the source of the data that will populate the partition. The most common value for this option @@ -684,6 +687,8 @@ DESCRIPTION apply to partitions created using '--source rootfs' (see --source above). Valid values are: + vfat + msdos ext2 ext3 ext4 @@ -715,17 +720,25 @@ DESCRIPTION partition table. It may be useful for bootloaders. + --exclude-path: This option is specific to wic. It excludes the given + relative path from the resulting image. If the path + ends with a slash, only the content of the directory + is omitted, not the directory itself. This option only + has an effect with the rootfs source plugin. + --extra-space: This option is specific to wic. It adds extra space after the space filled by the content of the partition. The final size can go beyond the size specified by --size. - By default, 10MB. + By default, 10MB. This option cannot be used + with --fixed-size option. --overhead-factor: This option is specific to wic. The size of the partition is multiplied by this factor. It has to be greater than or - equal to 1. - The default value is 1.3. + equal to 1. The default value is 1.3. + This option cannot be used with --fixed-size + option. --part-type: This option is specific to wic. It specifies partition type GUID for GPT partitions. diff --git a/import-layers/yocto-poky/scripts/lib/wic/imager/__init__.py b/import-layers/yocto-poky/scripts/lib/wic/imager/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/import-layers/yocto-poky/scripts/lib/wic/imager/baseimager.py b/import-layers/yocto-poky/scripts/lib/wic/imager/baseimager.py deleted file mode 100644 index 1a52dd8b4..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/imager/baseimager.py +++ /dev/null @@ -1,191 +0,0 @@ -#!/usr/bin/env python -tt -# -# Copyright (c) 2007 Red Hat Inc. -# Copyright (c) 2009, 2010, 2011 Intel, Inc. -# -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the Free -# Software Foundation; version 2 of the License -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY -# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License -# for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., 59 -# Temple Place - Suite 330, Boston, MA 02111-1307, USA. - -import os -import tempfile -import shutil - -from wic import msger -from wic.utils.errors import CreatorError -from wic.utils import runner - -class BaseImageCreator(): - """Base class for image creation. - - BaseImageCreator is the simplest creator class available; it will - create a system image according to the supplied kickstart file. - - e.g. - - import wic.imgcreate as imgcreate - ks = imgcreate.read_kickstart("foo.ks") - imgcreate.ImageCreator(ks, "foo").create() - """ - - def __del__(self): - self.cleanup() - - def __init__(self, createopts=None): - """Initialize an ImageCreator instance. - - ks -- a pykickstart.KickstartParser instance; this instance will be - used to drive the install by e.g. providing the list of packages - to be installed, the system configuration and %post scripts - - name -- a name for the image; used for e.g. image filenames or - filesystem labels - """ - - self.__builddir = None - - self.ks = None - self.name = "target" - self.tmpdir = "/var/tmp/wic" - self.workdir = "/var/tmp/wic/build" - - # setup tmpfs tmpdir when enabletmpfs is True - self.enabletmpfs = False - - if createopts: - # Mapping table for variables that have different names. - optmap = {"outdir" : "destdir", - } - - # update setting from createopts - for key in createopts: - if key in optmap: - option = optmap[key] - else: - option = key - setattr(self, option, createopts[key]) - - self.destdir = os.path.abspath(os.path.expanduser(self.destdir)) - - self._dep_checks = ["ls", "bash", "cp", "echo"] - - # Output image file names - self.outimage = [] - - # No ks provided when called by convertor, so skip the dependency check - if self.ks: - # If we have btrfs partition we need to check necessary tools - for part in self.ks.partitions: - if part.fstype and part.fstype == "btrfs": - self._dep_checks.append("mkfs.btrfs") - break - - # make sure the specified tmpdir and cachedir exist - if not os.path.exists(self.tmpdir): - os.makedirs(self.tmpdir) - - - # - # Hooks for subclasses - # - def _create(self): - """Create partitions for the disk image(s) - - This is the hook where subclasses may create the partitions - that will be assembled into disk image(s). - - There is no default implementation. - """ - pass - - def _cleanup(self): - """Undo anything performed in _create(). - - This is the hook where subclasses must undo anything which was - done in _create(). - - There is no default implementation. - - """ - pass - - # - # Actual implementation - # - def __ensure_builddir(self): - if not self.__builddir is None: - return - - try: - self.workdir = os.path.join(self.tmpdir, "build") - if not os.path.exists(self.workdir): - os.makedirs(self.workdir) - self.__builddir = tempfile.mkdtemp(dir=self.workdir, - prefix="imgcreate-") - except OSError as err: - raise CreatorError("Failed create build directory in %s: %s" % - (self.tmpdir, err)) - - def __setup_tmpdir(self): - if not self.enabletmpfs: - return - - runner.show('mount -t tmpfs -o size=4G tmpfs %s' % self.workdir) - - def __clean_tmpdir(self): - if not self.enabletmpfs: - return - - runner.show('umount -l %s' % self.workdir) - - def create(self): - """Create partitions for the disk image(s) - - Create the partitions that will be assembled into disk - image(s). - """ - self.__setup_tmpdir() - self.__ensure_builddir() - - self._create() - - def cleanup(self): - """Undo anything performed in create(). - - Note, make sure to call this method once finished with the creator - instance in order to ensure no stale files are left on the host e.g.: - - creator = ImageCreator(ks, name) - try: - creator.create() - finally: - creator.cleanup() - - """ - if not self.__builddir: - return - - self._cleanup() - - shutil.rmtree(self.__builddir, ignore_errors=True) - self.__builddir = None - - self.__clean_tmpdir() - - - def print_outimage_info(self): - msg = "The new image can be found here:\n" - self.outimage.sort() - for path in self.outimage: - msg += ' %s\n' % os.path.abspath(path) - - msger.info(msg) diff --git a/import-layers/yocto-poky/scripts/lib/wic/imager/direct.py b/import-layers/yocto-poky/scripts/lib/wic/imager/direct.py deleted file mode 100644 index 4c547e04a..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/imager/direct.py +++ /dev/null @@ -1,419 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# Copyright (c) 2013, Intel Corporation. -# All rights reserved. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# DESCRIPTION -# This implements the 'direct' image creator class for 'wic' -# -# AUTHORS -# Tom Zanussi -# - -import os -import shutil -import uuid - -from wic import msger -from wic.utils.oe.misc import get_bitbake_var -from wic.utils.partitionedfs import Image -from wic.utils.errors import CreatorError, ImageError -from wic.imager.baseimager import BaseImageCreator -from wic.plugin import pluginmgr -from wic.utils.oe.misc import exec_cmd, exec_native_cmd - -disk_methods = { - "do_install_disk":None, -} - -class DiskImage(): - """ - A Disk backed by a file. - """ - def __init__(self, device, size): - self.size = size - self.device = device - self.created = False - - def exists(self): - return os.path.exists(self.device) - - def create(self): - if self.created: - return - # create sparse disk image - cmd = "truncate %s -s %s" % (self.device, self.size) - exec_cmd(cmd) - self.created = True - -class DirectImageCreator(BaseImageCreator): - """ - Installs a system into a file containing a partitioned disk image. - - DirectImageCreator is an advanced ImageCreator subclass; an image - file is formatted with a partition table, each partition created - from a rootfs or other OpenEmbedded build artifact and dd'ed into - the virtual disk. The disk image can subsequently be dd'ed onto - media and used on actual hardware. - """ - - def __init__(self, oe_builddir, image_output_dir, rootfs_dir, bootimg_dir, - kernel_dir, native_sysroot, compressor, creatoropts=None, - bmap=False): - """ - Initialize a DirectImageCreator instance. - - This method takes the same arguments as ImageCreator.__init__() - """ - BaseImageCreator.__init__(self, creatoropts) - - self.__image = None - self.__disks = {} - self.__disk_format = "direct" - self._disk_names = [] - self.ptable_format = self.ks.bootloader.ptable - - self.oe_builddir = oe_builddir - if image_output_dir: - self.tmpdir = image_output_dir - self.rootfs_dir = rootfs_dir - self.bootimg_dir = bootimg_dir - self.kernel_dir = kernel_dir - self.native_sysroot = native_sysroot - self.compressor = compressor - self.bmap = bmap - - def __get_part_num(self, num, parts): - """calculate the real partition number, accounting for partitions not - in the partition table and logical partitions - """ - realnum = 0 - for pnum, part in enumerate(parts, 1): - if not part.no_table: - realnum += 1 - if pnum == num: - if part.no_table: - return 0 - if self.ptable_format == 'msdos' and realnum > 3 and len(parts) > 4: - # account for logical partition numbering, ex. sda5.. - return realnum + 1 - return realnum - - def _write_fstab(self, image_rootfs): - """overriden to generate fstab (temporarily) in rootfs. This is called - from _create, make sure it doesn't get called from - BaseImage.create() - """ - if not image_rootfs: - return - - fstab_path = image_rootfs + "/etc/fstab" - if not os.path.isfile(fstab_path): - return - - with open(fstab_path) as fstab: - fstab_lines = fstab.readlines() - - if self._update_fstab(fstab_lines, self._get_parts()): - shutil.copyfile(fstab_path, fstab_path + ".orig") - - with open(fstab_path, "w") as fstab: - fstab.writelines(fstab_lines) - - return fstab_path - - def _update_fstab(self, fstab_lines, parts): - """Assume partition order same as in wks""" - updated = False - for num, part in enumerate(parts, 1): - pnum = self.__get_part_num(num, parts) - if not pnum or not part.mountpoint \ - or part.mountpoint in ("/", "/boot"): - continue - - # mmc device partitions are named mmcblk0p1, mmcblk0p2.. - prefix = 'p' if part.disk.startswith('mmcblk') else '' - device_name = "/dev/%s%s%d" % (part.disk, prefix, pnum) - - opts = part.fsopts if part.fsopts else "defaults" - line = "\t".join([device_name, part.mountpoint, part.fstype, - opts, "0", "0"]) + "\n" - - fstab_lines.append(line) - updated = True - - return updated - - def set_bootimg_dir(self, bootimg_dir): - """ - Accessor for bootimg_dir, the actual location used for the source - of the bootimg. Should be set by source plugins (only if they - change the default bootimg source) so the correct info gets - displayed for print_outimage_info(). - """ - self.bootimg_dir = bootimg_dir - - def _get_parts(self): - if not self.ks: - raise CreatorError("Failed to get partition info, " - "please check your kickstart setting.") - - # Set a default partition if no partition is given out - if not self.ks.partitions: - partstr = "part / --size 1900 --ondisk sda --fstype=ext3" - args = partstr.split() - part = self.ks.parse(args[1:]) - if part not in self.ks.partitions: - self.ks.partitions.append(part) - - # partitions list from kickstart file - return self.ks.partitions - - def get_disk_names(self): - """ Returns a list of physical target disk names (e.g., 'sdb') which - will be created. """ - - if self._disk_names: - return self._disk_names - - #get partition info from ks handler - parts = self._get_parts() - - for i in range(len(parts)): - if parts[i].disk: - disk_name = parts[i].disk - else: - raise CreatorError("Failed to create disks, no --ondisk " - "specified in partition line of ks file") - - if parts[i].mountpoint and not parts[i].fstype: - raise CreatorError("Failed to create disks, no --fstype " - "specified for partition with mountpoint " - "'%s' in the ks file") - - self._disk_names.append(disk_name) - - return self._disk_names - - def _full_name(self, name, extention): - """ Construct full file name for a file we generate. """ - return "%s-%s.%s" % (self.name, name, extention) - - def _full_path(self, path, name, extention): - """ Construct full file path to a file we generate. """ - return os.path.join(path, self._full_name(name, extention)) - - def get_default_source_plugin(self): - """ - The default source plugin i.e. the plugin that's consulted for - overall image generation tasks outside of any particular - partition. For convenience, we just hang it off the - bootloader handler since it's the one non-partition object in - any setup. By default the default plugin is set to the same - plugin as the /boot partition; since we hang it off the - bootloader object, the default can be explicitly set using the - --source bootloader param. - """ - return self.ks.bootloader.source - - # - # Actual implemention - # - def _create(self): - """ - For 'wic', we already have our build artifacts - we just create - filesystems from the artifacts directly and combine them into - a partitioned image. - """ - parts = self._get_parts() - - self.__image = Image(self.native_sysroot) - - disk_ids = {} - for num, part in enumerate(parts, 1): - # as a convenience, set source to the boot partition source - # instead of forcing it to be set via bootloader --source - if not self.ks.bootloader.source and part.mountpoint == "/boot": - self.ks.bootloader.source = part.source - - # generate parition UUIDs - if not part.uuid and part.use_uuid: - if self.ptable_format == 'gpt': - part.uuid = str(uuid.uuid4()) - else: # msdos partition table - if part.disk not in disk_ids: - disk_ids[part.disk] = int.from_bytes(os.urandom(4), 'little') - disk_id = disk_ids[part.disk] - part.uuid = '%0x-%02d' % (disk_id, self.__get_part_num(num, parts)) - - fstab_path = self._write_fstab(self.rootfs_dir.get("ROOTFS_DIR")) - - shutil.rmtree(self.workdir) - os.mkdir(self.workdir) - - for part in parts: - # get rootfs size from bitbake variable if it's not set in .ks file - if not part.size: - # and if rootfs name is specified for the partition - image_name = part.rootfs_dir - if image_name: - # Bitbake variable ROOTFS_SIZE is calculated in - # Image._get_rootfs_size method from meta/lib/oe/image.py - # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT, - # IMAGE_OVERHEAD_FACTOR and IMAGE_ROOTFS_EXTRA_SPACE - rsize_bb = get_bitbake_var('ROOTFS_SIZE', image_name) - if rsize_bb: - part.size = int(round(float(rsize_bb))) - # need to create the filesystems in order to get their - # sizes before we can add them and do the layout. - # Image.create() actually calls __format_disks() to create - # the disk images and carve out the partitions, then - # self.assemble() calls Image.assemble() which calls - # __write_partitition() for each partition to dd the fs - # into the partitions. - part.prepare(self, self.workdir, self.oe_builddir, self.rootfs_dir, - self.bootimg_dir, self.kernel_dir, self.native_sysroot) - - - self.__image.add_partition(int(part.size), - part.disk, - part.mountpoint, - part.source_file, - part.fstype, - part.label, - fsopts=part.fsopts, - boot=part.active, - align=part.align, - no_table=part.no_table, - part_type=part.part_type, - uuid=part.uuid, - system_id=part.system_id) - - if fstab_path: - shutil.move(fstab_path + ".orig", fstab_path) - - self.__image.layout_partitions(self.ptable_format) - - self.__imgdir = self.workdir - for disk_name, disk in self.__image.disks.items(): - full_path = self._full_path(self.__imgdir, disk_name, "direct") - msger.debug("Adding disk %s as %s with size %s bytes" \ - % (disk_name, full_path, disk['min_size'])) - disk_obj = DiskImage(full_path, disk['min_size']) - self.__disks[disk_name] = disk_obj - self.__image.add_disk(disk_name, disk_obj, disk_ids.get(disk_name)) - - self.__image.create() - - def assemble(self): - """ - Assemble partitions into disk image(s) - """ - for disk_name, disk in self.__image.disks.items(): - full_path = self._full_path(self.__imgdir, disk_name, "direct") - msger.debug("Assembling disk %s as %s with size %s bytes" \ - % (disk_name, full_path, disk['min_size'])) - self.__image.assemble(full_path) - - def finalize(self): - """ - Finalize the disk image. - - For example, prepare the image to be bootable by e.g. - creating and installing a bootloader configuration. - - """ - source_plugin = self.get_default_source_plugin() - if source_plugin: - self._source_methods = pluginmgr.get_source_plugin_methods(source_plugin, disk_methods) - for disk_name, disk in self.__image.disks.items(): - self._source_methods["do_install_disk"](disk, disk_name, self, - self.workdir, - self.oe_builddir, - self.bootimg_dir, - self.kernel_dir, - self.native_sysroot) - - for disk_name, disk in self.__image.disks.items(): - full_path = self._full_path(self.__imgdir, disk_name, "direct") - # Generate .bmap - if self.bmap: - msger.debug("Generating bmap file for %s" % disk_name) - exec_native_cmd("bmaptool create %s -o %s.bmap" % (full_path, full_path), - self.native_sysroot) - # Compress the image - if self.compressor: - msger.debug("Compressing disk %s with %s" % (disk_name, self.compressor)) - exec_cmd("%s %s" % (self.compressor, full_path)) - - def print_outimage_info(self): - """ - Print the image(s) and artifacts used, for the user. - """ - msg = "The new image(s) can be found here:\n" - - parts = self._get_parts() - - for disk_name in self.__image.disks: - extension = "direct" + {"gzip": ".gz", - "bzip2": ".bz2", - "xz": ".xz", - "": ""}.get(self.compressor) - full_path = self._full_path(self.__imgdir, disk_name, extension) - msg += ' %s\n\n' % full_path - - msg += 'The following build artifacts were used to create the image(s):\n' - for part in parts: - if part.rootfs_dir is None: - continue - if part.mountpoint == '/': - suffix = ':' - else: - suffix = '["%s"]:' % (part.mountpoint or part.label) - msg += ' ROOTFS_DIR%s%s\n' % (suffix.ljust(20), part.rootfs_dir) - - msg += ' BOOTIMG_DIR: %s\n' % self.bootimg_dir - msg += ' KERNEL_DIR: %s\n' % self.kernel_dir - msg += ' NATIVE_SYSROOT: %s\n' % self.native_sysroot - - msger.info(msg) - - @property - def rootdev(self): - """ - Get root device name to use as a 'root' parameter - in kernel command line. - - Assume partition order same as in wks - """ - parts = self._get_parts() - for num, part in enumerate(parts, 1): - if part.mountpoint == "/": - if part.uuid: - return "PARTUUID=%s" % part.uuid - else: - suffix = 'p' if part.disk.startswith('mmcblk') else '' - pnum = self.__get_part_num(num, parts) - return "/dev/%s%s%-d" % (part.disk, suffix, pnum) - - def _cleanup(self): - if not self.__image is None: - try: - self.__image.cleanup() - except ImageError as err: - msger.warning("%s" % err) - diff --git a/import-layers/yocto-poky/scripts/lib/wic/ksparser.py b/import-layers/yocto-poky/scripts/lib/wic/ksparser.py index 0894e2b19..d026caad0 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/ksparser.py +++ b/import-layers/yocto-poky/scripts/lib/wic/ksparser.py @@ -27,11 +27,14 @@ import os import shlex +import logging + from argparse import ArgumentParser, ArgumentError, ArgumentTypeError -from wic import msger +from wic.engine import find_canned from wic.partition import Partition -from wic.utils.misc import find_canned + +logger = logging.getLogger('wic') class KickStartError(Exception): """Custom exception.""" @@ -113,6 +116,9 @@ def systemidtype(arg): class KickStart(): """"Kickstart parser implementation.""" + DEFAULT_EXTRA_SPACE = 10*1024 + DEFAULT_OVERHEAD_FACTOR = 1.3 + def __init__(self, confpath): self.partitions = [] @@ -127,16 +133,27 @@ class KickStart(): part.add_argument('mountpoint', nargs='?') part.add_argument('--active', action='store_true') part.add_argument('--align', type=int) - part.add_argument("--extra-space", type=sizetype, default=10*1024) + part.add_argument('--exclude-path', nargs='+') + part.add_argument("--extra-space", type=sizetype) part.add_argument('--fsoptions', dest='fsopts') - part.add_argument('--fstype') + part.add_argument('--fstype', default='vfat', + choices=('ext2', 'ext3', 'ext4', 'btrfs', + 'squashfs', 'vfat', 'msdos', 'swap')) part.add_argument('--label') part.add_argument('--no-table', action='store_true') - part.add_argument('--ondisk', '--ondrive', dest='disk') - part.add_argument("--overhead-factor", type=overheadtype, default=1.3) + part.add_argument('--ondisk', '--ondrive', dest='disk', default='sda') + part.add_argument("--overhead-factor", type=overheadtype) part.add_argument('--part-type') part.add_argument('--rootfs-dir') - part.add_argument('--size', type=sizetype, default=0) + + # --size and --fixed-size cannot be specified together; options + # ----extra-space and --overhead-factor should also raise a parser + # --error, but since nesting mutually exclusive groups does not work, + # ----extra-space/--overhead-factor are handled later + sizeexcl = part.add_mutually_exclusive_group() + sizeexcl.add_argument('--size', type=sizetype, default=0) + sizeexcl.add_argument('--fixed-size', type=sizetype, default=0) + part.add_argument('--source') part.add_argument('--sourceparams') part.add_argument('--system-id', type=systemidtype) @@ -156,7 +173,7 @@ class KickStart(): self._parse(parser, confpath) if not self.bootloader: - msger.warning('bootloader config not specified, using defaults') + logger.warning('bootloader config not specified, using defaults\n') self.bootloader = bootloader.parse_args([]) def _parse(self, parser, confpath): @@ -170,11 +187,33 @@ class KickStart(): lineno += 1 if line and line[0] != '#': try: - parsed = parser.parse_args(shlex.split(line)) + line_args = shlex.split(line) + parsed = parser.parse_args(line_args) except ArgumentError as err: raise KickStartError('%s:%d: %s' % \ (confpath, lineno, err)) if line.startswith('part'): + # using ArgumentParser one cannot easily tell if option + # was passed as argument, if said option has a default + # value; --overhead-factor/--extra-space cannot be used + # with --fixed-size, so at least detect when these were + # passed with non-0 values ... + if parsed.fixed_size: + if parsed.overhead_factor or parsed.extra_space: + err = "%s:%d: arguments --overhead-factor and --extra-space not "\ + "allowed with argument --fixed-size" \ + % (confpath, lineno) + raise KickStartError(err) + else: + # ... and provide defaults if not using + # --fixed-size iff given option was not used + # (again, one cannot tell if option was passed but + # with value equal to 0) + if '--overhead-factor' not in line_args: + parsed.overhead_factor = self.DEFAULT_OVERHEAD_FACTOR + if '--extra-space' not in line_args: + parsed.extra_space = self.DEFAULT_EXTRA_SPACE + self.partnum += 1 self.partitions.append(Partition(parsed, self.partnum)) elif line.startswith('include'): diff --git a/import-layers/yocto-poky/scripts/lib/wic/msger.py b/import-layers/yocto-poky/scripts/lib/wic/msger.py deleted file mode 100644 index fb8336d94..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/msger.py +++ /dev/null @@ -1,235 +0,0 @@ -#!/usr/bin/env python -tt -# vim: ai ts=4 sts=4 et sw=4 -# -# Copyright (c) 2009, 2010, 2011 Intel, Inc. -# -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the Free -# Software Foundation; version 2 of the License -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY -# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License -# for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., 59 -# Temple Place - Suite 330, Boston, MA 02111-1307, USA. - -import os -import sys -import re -import time - -__ALL__ = ['get_loglevel', - 'set_loglevel', - 'set_logfile', - 'debug', - 'verbose', - 'info', - 'warning', - 'error', - ] - -# COLORs in ANSI -INFO_COLOR = 32 # green -WARN_COLOR = 33 # yellow -ERR_COLOR = 31 # red -ASK_COLOR = 34 # blue -NO_COLOR = 0 - -PREFIX_RE = re.compile('^<(.*?)>\s*(.*)', re.S) - -INTERACTIVE = True - -LOG_LEVEL = 1 -LOG_LEVELS = { - 'quiet': 0, - 'normal': 1, - 'verbose': 2, - 'debug': 3, - 'never': 4, -} - -LOG_FILE_FP = None -LOG_CONTENT = '' -CATCHERR_BUFFILE_FD = -1 -CATCHERR_BUFFILE_PATH = None -CATCHERR_SAVED_2 = -1 - -def _general_print(head, color, msg=None, stream=None, level='normal'): - global LOG_CONTENT - if not stream: - stream = sys.stdout - - if LOG_LEVELS[level] > LOG_LEVEL: - # skip - return - - errormsg = '' - if CATCHERR_BUFFILE_FD > 0: - size = os.lseek(CATCHERR_BUFFILE_FD, 0, os.SEEK_END) - os.lseek(CATCHERR_BUFFILE_FD, 0, os.SEEK_SET) - errormsg = os.read(CATCHERR_BUFFILE_FD, size) - os.ftruncate(CATCHERR_BUFFILE_FD, 0) - - # append error msg to LOG - if errormsg: - LOG_CONTENT += errormsg - - # append normal msg to LOG - save_msg = msg.strip() if msg else None - if save_msg: - timestr = time.strftime("[%m/%d %H:%M:%S %Z] ", time.localtime()) - LOG_CONTENT += timestr + save_msg + '\n' - - if errormsg: - _color_print('', NO_COLOR, errormsg, stream, level) - - _color_print(head, color, msg, stream, level) - -def _color_print(head, color, msg, stream, level): - colored = True - if color == NO_COLOR or \ - not stream.isatty() or \ - os.getenv('ANSI_COLORS_DISABLED') is not None: - colored = False - - if head.startswith('\r'): - # need not \n at last - newline = False - else: - newline = True - - if colored: - head = '\033[%dm%s:\033[0m ' %(color, head) - if not newline: - # ESC cmd to clear line - head = '\033[2K' + head - else: - if head: - head += ': ' - if head.startswith('\r'): - head = head.lstrip() - newline = True - - if msg is not None: - stream.write('%s%s' % (head, msg)) - if newline: - stream.write('\n') - - stream.flush() - -def _color_perror(head, color, msg, level='normal'): - if CATCHERR_BUFFILE_FD > 0: - _general_print(head, color, msg, sys.stdout, level) - else: - _general_print(head, color, msg, sys.stderr, level) - -def _split_msg(head, msg): - if isinstance(msg, list): - msg = '\n'.join(map(str, msg)) - - if msg.startswith('\n'): - # means print \n at first - msg = msg.lstrip() - head = '\n' + head - - elif msg.startswith('\r'): - # means print \r at first - msg = msg.lstrip() - head = '\r' + head - - match = PREFIX_RE.match(msg) - if match: - head += ' <%s>' % match.group(1) - msg = match.group(2) - - return head, msg - -def get_loglevel(): - return next((k for k, v in LOG_LEVELS.items() if v == LOG_LEVEL)) - -def set_loglevel(level): - global LOG_LEVEL - if level not in LOG_LEVELS: - # no effect - return - - LOG_LEVEL = LOG_LEVELS[level] - -def set_interactive(mode=True): - global INTERACTIVE - if mode: - INTERACTIVE = True - else: - INTERACTIVE = False - -def log(msg=''): - # log msg to LOG_CONTENT then save to logfile - global LOG_CONTENT - if msg: - LOG_CONTENT += msg - -def info(msg): - head, msg = _split_msg('Info', msg) - _general_print(head, INFO_COLOR, msg) - -def verbose(msg): - head, msg = _split_msg('Verbose', msg) - _general_print(head, INFO_COLOR, msg, level='verbose') - -def warning(msg): - head, msg = _split_msg('Warning', msg) - _color_perror(head, WARN_COLOR, msg) - -def debug(msg): - head, msg = _split_msg('Debug', msg) - _color_perror(head, ERR_COLOR, msg, level='debug') - -def error(msg): - head, msg = _split_msg('Error', msg) - _color_perror(head, ERR_COLOR, msg) - sys.exit(1) - -def set_logfile(fpath): - global LOG_FILE_FP - - def _savelogf(): - if LOG_FILE_FP: - with open(LOG_FILE_FP, 'w') as log: - log.write(LOG_CONTENT) - - if LOG_FILE_FP is not None: - warning('duplicate log file configuration') - - LOG_FILE_FP = fpath - - import atexit - atexit.register(_savelogf) - -def enable_logstderr(fpath): - global CATCHERR_BUFFILE_FD - global CATCHERR_BUFFILE_PATH - global CATCHERR_SAVED_2 - - if os.path.exists(fpath): - os.remove(fpath) - CATCHERR_BUFFILE_PATH = fpath - CATCHERR_BUFFILE_FD = os.open(CATCHERR_BUFFILE_PATH, os.O_RDWR|os.O_CREAT) - CATCHERR_SAVED_2 = os.dup(2) - os.dup2(CATCHERR_BUFFILE_FD, 2) - -def disable_logstderr(): - global CATCHERR_BUFFILE_FD - global CATCHERR_BUFFILE_PATH - global CATCHERR_SAVED_2 - - raw(msg=None) # flush message buffer and print it. - os.dup2(CATCHERR_SAVED_2, 2) - os.close(CATCHERR_SAVED_2) - os.close(CATCHERR_BUFFILE_FD) - os.unlink(CATCHERR_BUFFILE_PATH) - CATCHERR_BUFFILE_FD = -1 - CATCHERR_BUFFILE_PATH = None - CATCHERR_SAVED_2 = -1 diff --git a/import-layers/yocto-poky/scripts/lib/wic/partition.py b/import-layers/yocto-poky/scripts/lib/wic/partition.py index ec3aa6622..939e66731 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/partition.py +++ b/import-layers/yocto-poky/scripts/lib/wic/partition.py @@ -24,18 +24,15 @@ # Tom Zanussi # Ed Bartosh (at] linux.intel.com> +import logging import os import tempfile -from wic.utils.oe.misc import msger, parse_sourceparams -from wic.utils.oe.misc import exec_cmd, exec_native_cmd -from wic.plugin import pluginmgr +from wic import WicError +from wic.utils.misc import exec_cmd, exec_native_cmd, get_bitbake_var +from wic.pluginbase import PluginMgr -partition_methods = { - "do_stage_partition":None, - "do_prepare_partition":None, - "do_configure_partition":None, -} +logger = logging.getLogger('wic') class Partition(): @@ -44,16 +41,20 @@ class Partition(): self.active = args.active self.align = args.align self.disk = args.disk + self.device = None self.extra_space = args.extra_space + self.exclude_path = args.exclude_path self.fsopts = args.fsopts self.fstype = args.fstype self.label = args.label self.mountpoint = args.mountpoint self.no_table = args.no_table + self.num = None self.overhead_factor = args.overhead_factor self.part_type = args.part_type self.rootfs_dir = args.rootfs_dir self.size = args.size + self.fixed_size = args.fixed_size self.source = args.source self.sourceparams = args.sourceparams self.system_id = args.system_id @@ -71,100 +72,128 @@ class Partition(): number of (1k) blocks we need to add to get to --size, 0 if we're already there or beyond. """ - msger.debug("Requested partition size for %s: %d" % \ - (self.mountpoint, self.size)) + logger.debug("Requested partition size for %s: %d", + self.mountpoint, self.size) if not self.size: return 0 requested_blocks = self.size - msger.debug("Requested blocks %d, current_blocks %d" % \ - (requested_blocks, current_blocks)) + logger.debug("Requested blocks %d, current_blocks %d", + requested_blocks, current_blocks) if requested_blocks > current_blocks: return requested_blocks - current_blocks else: return 0 + def get_rootfs_size(self, actual_rootfs_size=0): + """ + Calculate the required size of rootfs taking into consideration + --size/--fixed-size flags as well as overhead and extra space, as + specified in kickstart file. Raises an error if the + `actual_rootfs_size` is larger than fixed-size rootfs. + + """ + if self.fixed_size: + rootfs_size = self.fixed_size + if actual_rootfs_size > rootfs_size: + raise WicError("Actual rootfs size (%d kB) is larger than " + "allowed size %d kB" % + (actual_rootfs_size, rootfs_size)) + else: + extra_blocks = self.get_extra_block_count(actual_rootfs_size) + if extra_blocks < self.extra_space: + extra_blocks = self.extra_space + + rootfs_size = actual_rootfs_size + extra_blocks + rootfs_size *= self.overhead_factor + + logger.debug("Added %d extra blocks to %s to get to %d total blocks", + extra_blocks, self.mountpoint, rootfs_size) + + return rootfs_size + + @property + def disk_size(self): + """ + Obtain on-disk size of partition taking into consideration + --size/--fixed-size options. + + """ + return self.fixed_size if self.fixed_size else self.size + def prepare(self, creator, cr_workdir, oe_builddir, rootfs_dir, bootimg_dir, kernel_dir, native_sysroot): """ Prepare content for individual partitions, depending on partition command parameters. """ - if self.sourceparams: - self.sourceparams_dict = parse_sourceparams(self.sourceparams) - if not self.source: - if not self.size: - msger.error("The %s partition has a size of zero. Please " - "specify a non-zero --size for that partition." % \ - self.mountpoint) - if self.fstype and self.fstype == "swap": + if not self.size and not self.fixed_size: + raise WicError("The %s partition has a size of zero. Please " + "specify a non-zero --size/--fixed-size for that " + "partition." % self.mountpoint) + + if self.fstype == "swap": self.prepare_swap_partition(cr_workdir, oe_builddir, native_sysroot) self.source_file = "%s/fs.%s" % (cr_workdir, self.fstype) - elif self.fstype: + else: + if self.fstype == 'squashfs': + raise WicError("It's not possible to create empty squashfs " + "partition '%s'" % (self.mountpoint)) + rootfs = "%s/fs_%s.%s.%s" % (cr_workdir, self.label, self.lineno, self.fstype) if os.path.isfile(rootfs): os.remove(rootfs) - for prefix in ("ext", "btrfs", "vfat", "squashfs"): - if self.fstype.startswith(prefix): - method = getattr(self, - "prepare_empty_partition_" + prefix) - method(rootfs, oe_builddir, native_sysroot) - self.source_file = rootfs - break + + prefix = "ext" if self.fstype.startswith("ext") else self.fstype + method = getattr(self, "prepare_empty_partition_" + prefix) + method(rootfs, oe_builddir, native_sysroot) + self.source_file = rootfs return - plugins = pluginmgr.get_source_plugins() + plugins = PluginMgr.get_plugins('source') if self.source not in plugins: - msger.error("The '%s' --source specified for %s doesn't exist.\n\t" - "See 'wic list source-plugins' for a list of available" - " --sources.\n\tSee 'wic help source-plugins' for " - "details on adding a new source plugin." % \ - (self.source, self.mountpoint)) - - self._source_methods = pluginmgr.get_source_plugin_methods(\ - self.source, partition_methods) - self._source_methods["do_configure_partition"](self, self.sourceparams_dict, - creator, cr_workdir, - oe_builddir, - bootimg_dir, - kernel_dir, - native_sysroot) - self._source_methods["do_stage_partition"](self, self.sourceparams_dict, - creator, cr_workdir, - oe_builddir, - bootimg_dir, kernel_dir, - native_sysroot) - self._source_methods["do_prepare_partition"](self, self.sourceparams_dict, - creator, cr_workdir, - oe_builddir, - bootimg_dir, kernel_dir, rootfs_dir, - native_sysroot) + raise WicError("The '%s' --source specified for %s doesn't exist.\n\t" + "See 'wic list source-plugins' for a list of available" + " --sources.\n\tSee 'wic help source-plugins' for " + "details on adding a new source plugin." % + (self.source, self.mountpoint)) + + srcparams_dict = {} + if self.sourceparams: + # Split sourceparams string of the form key1=val1[,key2=val2,...] + # into a dict. Also accepts valueless keys i.e. without = + splitted = self.sourceparams.split(',') + srcparams_dict = dict(par.split('=') for par in splitted if par) + + plugin = PluginMgr.get_plugins('source')[self.source] + plugin.do_configure_partition(self, srcparams_dict, creator, + cr_workdir, oe_builddir, bootimg_dir, + kernel_dir, native_sysroot) + plugin.do_stage_partition(self, srcparams_dict, creator, + cr_workdir, oe_builddir, bootimg_dir, + kernel_dir, native_sysroot) + plugin.do_prepare_partition(self, srcparams_dict, creator, + cr_workdir, oe_builddir, bootimg_dir, + kernel_dir, rootfs_dir, native_sysroot) + # further processing required Partition.size to be an integer, make # sure that it is one - if type(self.size) is not int: - msger.error("Partition %s internal size is not an integer. " \ - "This a bug in source plugin %s and needs to be fixed." \ - % (self.mountpoint, self.source)) + if not isinstance(self.size, int): + raise WicError("Partition %s internal size is not an integer. " + "This a bug in source plugin %s and needs to be fixed." % + (self.mountpoint, self.source)) - def prepare_rootfs_from_fs_image(self, cr_workdir, oe_builddir, - rootfs_dir): - """ - Handle an already-created partition e.g. xxx.ext3 - """ - rootfs = oe_builddir - du_cmd = "du -Lbks %s" % rootfs - out = exec_cmd(du_cmd) - rootfs_size = out.split()[0] - - self.size = int(rootfs_size) - self.source_file = rootfs + if self.fixed_size and self.size > self.fixed_size: + raise WicError("File system image of partition %s is " + "larger (%d kB) than its allowed size %d kB" % + (self.mountpoint, self.size, self.fixed_size)) def prepare_rootfs(self, cr_workdir, oe_builddir, rootfs_dir, native_sysroot): @@ -183,30 +212,36 @@ class Partition(): pseudo += "export PSEUDO_LOCALSTATEDIR=%s;" % p_localstatedir pseudo += "export PSEUDO_PASSWD=%s;" % p_passwd pseudo += "export PSEUDO_NOSYMLINKEXP=%s;" % p_nosymlinkexp - pseudo += "%s/usr/bin/pseudo " % native_sysroot + pseudo += "%s " % get_bitbake_var("FAKEROOTCMD") rootfs = "%s/rootfs_%s.%s.%s" % (cr_workdir, self.label, self.lineno, self.fstype) if os.path.isfile(rootfs): os.remove(rootfs) - if not self.fstype: - msger.error("File system for partition %s not specified in kickstart, " \ - "use --fstype option" % (self.mountpoint)) - - for prefix in ("ext", "btrfs", "vfat", "squashfs"): - if self.fstype.startswith(prefix): - method = getattr(self, "prepare_rootfs_" + prefix) - method(rootfs, oe_builddir, rootfs_dir, native_sysroot, pseudo) - - self.source_file = rootfs - - # get the rootfs size in the right units for kickstart (kB) - du_cmd = "du -Lbks %s" % rootfs - out = exec_cmd(du_cmd) - self.size = int(out.split()[0]) + # Get rootfs size from bitbake variable if it's not set in .ks file + if not self.size: + # Bitbake variable ROOTFS_SIZE is calculated in + # Image._get_rootfs_size method from meta/lib/oe/image.py + # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT, + # IMAGE_OVERHEAD_FACTOR and IMAGE_ROOTFS_EXTRA_SPACE + rsize_bb = get_bitbake_var('ROOTFS_SIZE') + if rsize_bb: + logger.warning('overhead-factor was specified, but size was not,' + ' so bitbake variables will be used for the size.' + ' In this case both IMAGE_OVERHEAD_FACTOR and ' + '--overhead-factor will be applied') + self.size = int(round(float(rsize_bb))) + + prefix = "ext" if self.fstype.startswith("ext") else self.fstype + method = getattr(self, "prepare_rootfs_" + prefix) + method(rootfs, oe_builddir, rootfs_dir, native_sysroot, pseudo) + self.source_file = rootfs - break + # get the rootfs size in the right units for kickstart (kB) + du_cmd = "du -Lbks %s" % rootfs + out = exec_cmd(du_cmd) + self.size = int(out.split()[0]) def prepare_rootfs_ext(self, rootfs, oe_builddir, rootfs_dir, native_sysroot, pseudo): @@ -217,17 +252,10 @@ class Partition(): out = exec_cmd(du_cmd) actual_rootfs_size = int(out.split()[0]) - extra_blocks = self.get_extra_block_count(actual_rootfs_size) - if extra_blocks < self.extra_space: - extra_blocks = self.extra_space - - rootfs_size = actual_rootfs_size + extra_blocks - rootfs_size *= self.overhead_factor - - msger.debug("Added %d extra blocks to %s to get to %d total blocks" % \ - (extra_blocks, self.mountpoint, rootfs_size)) + rootfs_size = self.get_rootfs_size(actual_rootfs_size) - exec_cmd("truncate %s -s %d" % (rootfs, rootfs_size * 1024)) + with open(rootfs, 'w') as sparse: + os.ftruncate(sparse.fileno(), rootfs_size * 1024) extra_imagecmd = "-i 8192" @@ -239,7 +267,7 @@ class Partition(): (self.fstype, extra_imagecmd, rootfs, label_str, rootfs_dir) exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) - mkfs_cmd = "fsck.%s -pvfD %s || [ $? -le 3 ]" % (self.fstype, rootfs) + mkfs_cmd = "fsck.%s -pvfD %s" % (self.fstype, rootfs) exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) def prepare_rootfs_btrfs(self, rootfs, oe_builddir, rootfs_dir, @@ -253,17 +281,10 @@ class Partition(): out = exec_cmd(du_cmd) actual_rootfs_size = int(out.split()[0]) - extra_blocks = self.get_extra_block_count(actual_rootfs_size) - if extra_blocks < self.extra_space: - extra_blocks = self.extra_space + rootfs_size = self.get_rootfs_size(actual_rootfs_size) - rootfs_size = actual_rootfs_size + extra_blocks - rootfs_size *= self.overhead_factor - - msger.debug("Added %d extra blocks to %s to get to %d total blocks" % \ - (extra_blocks, self.mountpoint, rootfs_size)) - - exec_cmd("truncate %s -s %d" % (rootfs, rootfs_size * 1024)) + with open(rootfs, 'w') as sparse: + os.ftruncate(sparse.fileno(), rootfs_size * 1024) label_str = "" if self.label: @@ -273,29 +294,27 @@ class Partition(): (self.fstype, rootfs_size * 1024, rootfs_dir, label_str, rootfs) exec_native_cmd(mkfs_cmd, native_sysroot, pseudo=pseudo) - def prepare_rootfs_vfat(self, rootfs, oe_builddir, rootfs_dir, - native_sysroot, pseudo): + def prepare_rootfs_msdos(self, rootfs, oe_builddir, rootfs_dir, + native_sysroot, pseudo): """ - Prepare content for a vfat rootfs partition. + Prepare content for a msdos/vfat rootfs partition. """ du_cmd = "du -bks %s" % rootfs_dir out = exec_cmd(du_cmd) blocks = int(out.split()[0]) - extra_blocks = self.get_extra_block_count(blocks) - if extra_blocks < self.extra_space: - extra_blocks = self.extra_space - - blocks += extra_blocks - - msger.debug("Added %d extra blocks to %s to get to %d total blocks" % \ - (extra_blocks, self.mountpoint, blocks)) + rootfs_size = self.get_rootfs_size(blocks) label_str = "-n boot" if self.label: label_str = "-n %s" % self.label - dosfs_cmd = "mkdosfs %s -S 512 -C %s %d" % (label_str, rootfs, blocks) + size_str = "" + if self.fstype == 'msdos': + size_str = "-F 16" # FAT 16 + + dosfs_cmd = "mkdosfs %s -S 512 %s -C %s %d" % (label_str, size_str, + rootfs, rootfs_size) exec_native_cmd(dosfs_cmd, native_sysroot) mcopy_cmd = "mcopy -i %s -s %s/* ::/" % (rootfs, rootfs_dir) @@ -304,6 +323,8 @@ class Partition(): chmod_cmd = "chmod 644 %s" % rootfs exec_cmd(chmod_cmd) + prepare_rootfs_vfat = prepare_rootfs_msdos + def prepare_rootfs_squashfs(self, rootfs, oe_builddir, rootfs_dir, native_sysroot, pseudo): """ @@ -318,7 +339,9 @@ class Partition(): """ Prepare an empty ext2/3/4 partition. """ - exec_cmd("truncate %s -s %d" % (rootfs, self.size * 1024)) + size = self.disk_size + with open(rootfs, 'w') as sparse: + os.ftruncate(sparse.fileno(), size * 1024) extra_imagecmd = "-i 8192" @@ -335,7 +358,9 @@ class Partition(): """ Prepare an empty btrfs partition. """ - exec_cmd("truncate %s -s %d" % (rootfs, self.size * 1024)) + size = self.disk_size + with open(rootfs, 'w') as sparse: + os.ftruncate(sparse.fileno(), size * 1024) label_str = "" if self.label: @@ -345,50 +370,29 @@ class Partition(): (self.fstype, self.size * 1024, label_str, rootfs) exec_native_cmd(mkfs_cmd, native_sysroot) - def prepare_empty_partition_vfat(self, rootfs, oe_builddir, - native_sysroot): + def prepare_empty_partition_msdos(self, rootfs, oe_builddir, + native_sysroot): """ Prepare an empty vfat partition. """ - blocks = self.size + blocks = self.disk_size label_str = "-n boot" if self.label: label_str = "-n %s" % self.label - dosfs_cmd = "mkdosfs %s -S 512 -C %s %d" % (label_str, rootfs, blocks) + size_str = "" + if self.fstype == 'msdos': + size_str = "-F 16" # FAT 16 + + dosfs_cmd = "mkdosfs %s -S 512 %s -C %s %d" % (label_str, size_str, + rootfs, blocks) exec_native_cmd(dosfs_cmd, native_sysroot) chmod_cmd = "chmod 644 %s" % rootfs exec_cmd(chmod_cmd) - def prepare_empty_partition_squashfs(self, cr_workdir, oe_builddir, - native_sysroot): - """ - Prepare an empty squashfs partition. - """ - msger.warning("Creating of an empty squashfs %s partition was attempted. " \ - "Proceeding as requested." % self.mountpoint) - - path = "%s/fs_%s.%s" % (cr_workdir, self.label, self.fstype) - os.path.isfile(path) and os.remove(path) - - # it is not possible to create a squashfs without source data, - # thus prepare an empty temp dir that is used as source - tmpdir = tempfile.mkdtemp() - - squashfs_cmd = "mksquashfs %s %s -noappend" % \ - (tmpdir, path) - exec_native_cmd(squashfs_cmd, native_sysroot) - - os.rmdir(tmpdir) - - # get the rootfs size in the right units for kickstart (kB) - du_cmd = "du -Lbks %s" % path - out = exec_cmd(du_cmd) - fs_size = out.split()[0] - - self.size = int(fs_size) + prepare_empty_partition_vfat = prepare_empty_partition_msdos def prepare_swap_partition(self, cr_workdir, oe_builddir, native_sysroot): """ @@ -396,7 +400,8 @@ class Partition(): """ path = "%s/fs.%s" % (cr_workdir, self.fstype) - exec_cmd("truncate %s -s %d" % (path, self.size * 1024)) + with open(path, 'w') as sparse: + os.ftruncate(sparse.fileno(), self.size * 1024) import uuid label_str = "" @@ -404,4 +409,3 @@ class Partition(): label_str = "-L %s" % self.label mkswap_cmd = "mkswap %s -U %s %s" % (label_str, str(uuid.uuid1()), path) exec_native_cmd(mkswap_cmd, native_sysroot) - diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugin.py b/import-layers/yocto-poky/scripts/lib/wic/plugin.py deleted file mode 100644 index 306b32437..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/plugin.py +++ /dev/null @@ -1,150 +0,0 @@ -#!/usr/bin/env python -tt -# -# Copyright (c) 2011 Intel, Inc. -# -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the Free -# Software Foundation; version 2 of the License -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY -# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License -# for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., 59 -# Temple Place - Suite 330, Boston, MA 02111-1307, USA. - -import os, sys - -from wic import msger -from wic import pluginbase -from wic.utils import errors -from wic.utils.oe.misc import get_bitbake_var - -__ALL__ = ['PluginMgr', 'pluginmgr'] - -PLUGIN_TYPES = ["imager", "source"] - -PLUGIN_DIR = "/lib/wic/plugins" # relative to scripts -SCRIPTS_PLUGIN_DIR = "scripts" + PLUGIN_DIR - -class PluginMgr(): - plugin_dirs = {} - - # make the manager class as singleton - _instance = None - def __new__(cls, *args, **kwargs): - if not cls._instance: - cls._instance = super(PluginMgr, cls).__new__(cls, *args, **kwargs) - - return cls._instance - - def __init__(self): - wic_path = os.path.dirname(__file__) - eos = wic_path.rfind('scripts') + len('scripts') - scripts_path = wic_path[:eos] - self.scripts_path = scripts_path - self.plugin_dir = scripts_path + PLUGIN_DIR - self.layers_path = None - - def _build_plugin_dir_list(self, plugin_dir, ptype): - if self.layers_path is None: - self.layers_path = get_bitbake_var("BBLAYERS") - layer_dirs = [] - - if self.layers_path is not None: - for layer_path in self.layers_path.split(): - path = os.path.join(layer_path, SCRIPTS_PLUGIN_DIR, ptype) - layer_dirs.append(path) - - path = os.path.join(plugin_dir, ptype) - layer_dirs.append(path) - - return layer_dirs - - def append_dirs(self, dirs): - for path in dirs: - self._add_plugindir(path) - - # load all the plugins AGAIN - self._load_all() - - def _add_plugindir(self, path): - path = os.path.abspath(os.path.expanduser(path)) - - if not os.path.isdir(path): - return - - if path not in self.plugin_dirs: - self.plugin_dirs[path] = False - # the value True/False means "loaded" - - def _load_all(self): - for (pdir, loaded) in self.plugin_dirs.items(): - if loaded: - continue - - sys.path.insert(0, pdir) - for mod in [x[:-3] for x in os.listdir(pdir) if x.endswith(".py")]: - if mod and mod != '__init__': - if mod in sys.modules: - #self.plugin_dirs[pdir] = True - msger.warning("Module %s already exists, skip" % mod) - else: - try: - pymod = __import__(mod) - self.plugin_dirs[pdir] = True - msger.debug("Plugin module %s:%s imported"\ - % (mod, pymod.__file__)) - except ImportError as err: - msg = 'Failed to load plugin %s/%s: %s' \ - % (os.path.basename(pdir), mod, err) - msger.warning(msg) - - del sys.path[0] - - def get_plugins(self, ptype): - """ the return value is dict of name:class pairs """ - - if ptype not in PLUGIN_TYPES: - raise errors.CreatorError('%s is not valid plugin type' % ptype) - - plugins_dir = self._build_plugin_dir_list(self.plugin_dir, ptype) - - self.append_dirs(plugins_dir) - - return pluginbase.get_plugins(ptype) - - def get_source_plugins(self): - """ - Return list of available source plugins. - """ - plugins_dir = self._build_plugin_dir_list(self.plugin_dir, 'source') - - self.append_dirs(plugins_dir) - - return self.get_plugins('source') - - - def get_source_plugin_methods(self, source_name, methods): - """ - The methods param is a dict with the method names to find. On - return, the dict values will be filled in with pointers to the - corresponding methods. If one or more methods are not found, - None is returned. - """ - return_methods = None - for _source_name, klass in self.get_plugins('source').items(): - if _source_name == source_name: - for _method_name in methods: - if not hasattr(klass, _method_name): - msger.warning("Unimplemented %s source interface for: %s"\ - % (_method_name, _source_name)) - return None - func = getattr(klass, _method_name) - methods[_method_name] = func - return_methods = methods - return return_methods - -pluginmgr = PluginMgr() diff --git a/import-layers/yocto-poky/scripts/lib/wic/pluginbase.py b/import-layers/yocto-poky/scripts/lib/wic/pluginbase.py index e737dee7b..fb3d179c2 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/pluginbase.py +++ b/import-layers/yocto-poky/scripts/lib/wic/pluginbase.py @@ -15,26 +15,74 @@ # with this program; if not, write to the Free Software Foundation, Inc., 59 # Temple Place - Suite 330, Boston, MA 02111-1307, USA. -__all__ = ['ImagerPlugin', 'SourcePlugin', 'get_plugins'] +__all__ = ['ImagerPlugin', 'SourcePlugin'] + +import os +import logging -import sys from collections import defaultdict +from importlib.machinery import SourceFileLoader + +from wic import WicError +from wic.utils.misc import get_bitbake_var + +PLUGIN_TYPES = ["imager", "source"] + +SCRIPTS_PLUGIN_DIR = "scripts/lib/wic/plugins" + +logger = logging.getLogger('wic') + +PLUGINS = defaultdict(dict) -from wic import msger +class PluginMgr: + _plugin_dirs = [] + + @classmethod + def get_plugins(cls, ptype): + """Get dictionary of : pairs.""" + if ptype not in PLUGIN_TYPES: + raise WicError('%s is not valid plugin type' % ptype) + + # collect plugin directories + if not cls._plugin_dirs: + cls._plugin_dirs = [os.path.join(os.path.dirname(__file__), 'plugins')] + layers = get_bitbake_var("BBLAYERS") or '' + for layer_path in layers.split(): + path = os.path.join(layer_path, SCRIPTS_PLUGIN_DIR) + path = os.path.abspath(os.path.expanduser(path)) + if path not in cls._plugin_dirs and os.path.isdir(path): + cls._plugin_dirs.insert(0, path) + + if ptype not in PLUGINS: + # load all ptype plugins + for pdir in cls._plugin_dirs: + ppath = os.path.join(pdir, ptype) + if os.path.isdir(ppath): + for fname in os.listdir(ppath): + if fname.endswith('.py'): + mname = fname[:-3] + mpath = os.path.join(ppath, fname) + logger.debug("loading plugin module %s", mpath) + SourceFileLoader(mname, mpath).load_module() + + return PLUGINS.get(ptype) class PluginMeta(type): - plugins = defaultdict(dict) def __new__(cls, name, bases, attrs): class_type = type.__new__(cls, name, bases, attrs) if 'name' in attrs: - cls.plugins[class_type.wic_plugin_type][attrs['name']] = class_type + PLUGINS[class_type.wic_plugin_type][attrs['name']] = class_type return class_type -class ImagerPlugin(PluginMeta("Plugin", (), {})): +class ImagerPlugin(metaclass=PluginMeta): wic_plugin_type = "imager" -class SourcePlugin(PluginMeta("Plugin", (), {})): + def do_create(self): + raise WicError("Method %s.do_create is not implemented" % + self.__class__.__name__) + +class SourcePlugin(metaclass=PluginMeta): wic_plugin_type = "source" """ The methods that can be implemented by --source plugins. @@ -50,7 +98,7 @@ class SourcePlugin(PluginMeta("Plugin", (), {})): disk image. This provides a hook to allow finalization of a disk image e.g. to write an MBR to it. """ - msger.debug("SourcePlugin: do_install_disk: disk: %s" % disk_name) + logger.debug("SourcePlugin: do_install_disk: disk: %s", disk_name) @classmethod def do_stage_partition(cls, part, source_params, creator, cr_workdir, @@ -67,7 +115,7 @@ class SourcePlugin(PluginMeta("Plugin", (), {})): Not that get_bitbake_var() allows you to acces non-standard variables that you might want to use for this. """ - msger.debug("SourcePlugin: do_stage_partition: part: %s" % part) + logger.debug("SourcePlugin: do_stage_partition: part: %s", part) @classmethod def do_configure_partition(cls, part, source_params, creator, cr_workdir, @@ -78,7 +126,7 @@ class SourcePlugin(PluginMeta("Plugin", (), {})): custom configuration files for a partition, for example syslinux or grub config files. """ - msger.debug("SourcePlugin: do_configure_partition: part: %s" % part) + logger.debug("SourcePlugin: do_configure_partition: part: %s", part) @classmethod def do_prepare_partition(cls, part, source_params, creator, cr_workdir, @@ -88,7 +136,5 @@ class SourcePlugin(PluginMeta("Plugin", (), {})): Called to do the actual content population for a partition i.e. it 'prepares' the partition to be incorporated into the image. """ - msger.debug("SourcePlugin: do_prepare_partition: part: %s" % part) + logger.debug("SourcePlugin: do_prepare_partition: part: %s", part) -def get_plugins(typen): - return PluginMeta.plugins.get(typen) diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/imager/direct.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/imager/direct.py new file mode 100644 index 000000000..f2e612733 --- /dev/null +++ b/import-layers/yocto-poky/scripts/lib/wic/plugins/imager/direct.py @@ -0,0 +1,561 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# Copyright (c) 2013, Intel Corporation. +# All rights reserved. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# DESCRIPTION +# This implements the 'direct' imager plugin class for 'wic' +# +# AUTHORS +# Tom Zanussi +# + +import logging +import os +import shutil +import tempfile +import uuid + +from time import strftime + +from wic import WicError +from wic.filemap import sparse_copy +from wic.ksparser import KickStart, KickStartError +from wic.pluginbase import PluginMgr, ImagerPlugin +from wic.utils.misc import get_bitbake_var, exec_cmd, exec_native_cmd + +logger = logging.getLogger('wic') + +class DirectPlugin(ImagerPlugin): + """ + Install a system into a file containing a partitioned disk image. + + An image file is formatted with a partition table, each partition + created from a rootfs or other OpenEmbedded build artifact and dd'ed + into the virtual disk. The disk image can subsequently be dd'ed onto + media and used on actual hardware. + """ + name = 'direct' + + def __init__(self, wks_file, rootfs_dir, bootimg_dir, kernel_dir, + native_sysroot, oe_builddir, options): + try: + self.ks = KickStart(wks_file) + except KickStartError as err: + raise WicError(str(err)) + + # parse possible 'rootfs=name' items + self.rootfs_dir = dict(rdir.split('=') for rdir in rootfs_dir.split(' ')) + self.bootimg_dir = bootimg_dir + self.kernel_dir = kernel_dir + self.native_sysroot = native_sysroot + self.oe_builddir = oe_builddir + + self.outdir = options.outdir + self.compressor = options.compressor + self.bmap = options.bmap + + self.name = "%s-%s" % (os.path.splitext(os.path.basename(wks_file))[0], + strftime("%Y%m%d%H%M")) + self.workdir = tempfile.mkdtemp(dir=self.outdir, prefix='tmp.wic.') + self._image = None + self.ptable_format = self.ks.bootloader.ptable + self.parts = self.ks.partitions + + # as a convenience, set source to the boot partition source + # instead of forcing it to be set via bootloader --source + for part in self.parts: + if not self.ks.bootloader.source and part.mountpoint == "/boot": + self.ks.bootloader.source = part.source + break + + image_path = self._full_path(self.workdir, self.parts[0].disk, "direct") + self._image = PartitionedImage(image_path, self.ptable_format, + self.parts, self.native_sysroot) + + def do_create(self): + """ + Plugin entry point. + """ + try: + self.create() + self.assemble() + self.finalize() + self.print_info() + finally: + self.cleanup() + + def _write_fstab(self, image_rootfs): + """overriden to generate fstab (temporarily) in rootfs. This is called + from _create, make sure it doesn't get called from + BaseImage.create() + """ + if not image_rootfs: + return + + fstab_path = image_rootfs + "/etc/fstab" + if not os.path.isfile(fstab_path): + return + + with open(fstab_path) as fstab: + fstab_lines = fstab.readlines() + + if self._update_fstab(fstab_lines, self.parts): + shutil.copyfile(fstab_path, fstab_path + ".orig") + + with open(fstab_path, "w") as fstab: + fstab.writelines(fstab_lines) + + return fstab_path + + def _update_fstab(self, fstab_lines, parts): + """Assume partition order same as in wks""" + updated = False + for part in parts: + if not part.realnum or not part.mountpoint \ + or part.mountpoint in ("/", "/boot"): + continue + + # mmc device partitions are named mmcblk0p1, mmcblk0p2.. + prefix = 'p' if part.disk.startswith('mmcblk') else '' + device_name = "/dev/%s%s%d" % (part.disk, prefix, part.realnum) + + opts = part.fsopts if part.fsopts else "defaults" + line = "\t".join([device_name, part.mountpoint, part.fstype, + opts, "0", "0"]) + "\n" + + fstab_lines.append(line) + updated = True + + return updated + + def _full_path(self, path, name, extention): + """ Construct full file path to a file we generate. """ + return os.path.join(path, "%s-%s.%s" % (self.name, name, extention)) + + # + # Actual implemention + # + def create(self): + """ + For 'wic', we already have our build artifacts - we just create + filesystems from the artifacts directly and combine them into + a partitioned image. + """ + fstab_path = self._write_fstab(self.rootfs_dir.get("ROOTFS_DIR")) + + for part in self.parts: + # get rootfs size from bitbake variable if it's not set in .ks file + if not part.size: + # and if rootfs name is specified for the partition + image_name = self.rootfs_dir.get(part.rootfs_dir) + if image_name and os.path.sep not in image_name: + # Bitbake variable ROOTFS_SIZE is calculated in + # Image._get_rootfs_size method from meta/lib/oe/image.py + # using IMAGE_ROOTFS_SIZE, IMAGE_ROOTFS_ALIGNMENT, + # IMAGE_OVERHEAD_FACTOR and IMAGE_ROOTFS_EXTRA_SPACE + rsize_bb = get_bitbake_var('ROOTFS_SIZE', image_name) + if rsize_bb: + part.size = int(round(float(rsize_bb))) + + self._image.prepare(self) + + if fstab_path: + shutil.move(fstab_path + ".orig", fstab_path) + + self._image.layout_partitions() + self._image.create() + + def assemble(self): + """ + Assemble partitions into disk image + """ + self._image.assemble() + + def finalize(self): + """ + Finalize the disk image. + + For example, prepare the image to be bootable by e.g. + creating and installing a bootloader configuration. + """ + source_plugin = self.ks.bootloader.source + disk_name = self.parts[0].disk + if source_plugin: + plugin = PluginMgr.get_plugins('source')[source_plugin] + plugin.do_install_disk(self._image, disk_name, self, self.workdir, + self.oe_builddir, self.bootimg_dir, + self.kernel_dir, self.native_sysroot) + + full_path = self._image.path + # Generate .bmap + if self.bmap: + logger.debug("Generating bmap file for %s", disk_name) + exec_native_cmd("bmaptool create %s -o %s.bmap" % (full_path, full_path), + self.native_sysroot) + # Compress the image + if self.compressor: + logger.debug("Compressing disk %s with %s", disk_name, self.compressor) + exec_cmd("%s %s" % (self.compressor, full_path)) + + def print_info(self): + """ + Print the image(s) and artifacts used, for the user. + """ + msg = "The new image(s) can be found here:\n" + + extension = "direct" + {"gzip": ".gz", + "bzip2": ".bz2", + "xz": ".xz", + None: ""}.get(self.compressor) + full_path = self._full_path(self.outdir, self.parts[0].disk, extension) + msg += ' %s\n\n' % full_path + + msg += 'The following build artifacts were used to create the image(s):\n' + for part in self.parts: + if part.rootfs_dir is None: + continue + if part.mountpoint == '/': + suffix = ':' + else: + suffix = '["%s"]:' % (part.mountpoint or part.label) + msg += ' ROOTFS_DIR%s%s\n' % (suffix.ljust(20), part.rootfs_dir) + + msg += ' BOOTIMG_DIR: %s\n' % self.bootimg_dir + msg += ' KERNEL_DIR: %s\n' % self.kernel_dir + msg += ' NATIVE_SYSROOT: %s\n' % self.native_sysroot + + logger.info(msg) + + @property + def rootdev(self): + """ + Get root device name to use as a 'root' parameter + in kernel command line. + + Assume partition order same as in wks + """ + for part in self.parts: + if part.mountpoint == "/": + if part.uuid: + return "PARTUUID=%s" % part.uuid + else: + suffix = 'p' if part.disk.startswith('mmcblk') else '' + return "/dev/%s%s%-d" % (part.disk, suffix, part.realnum) + + def cleanup(self): + if self._image: + self._image.cleanup() + + # Move results to the output dir + if not os.path.exists(self.outdir): + os.makedirs(self.outdir) + + for fname in os.listdir(self.workdir): + path = os.path.join(self.workdir, fname) + if os.path.isfile(path): + shutil.move(path, os.path.join(self.outdir, fname)) + + # remove work directory + shutil.rmtree(self.workdir, ignore_errors=True) + +# Overhead of the MBR partitioning scheme (just one sector) +MBR_OVERHEAD = 1 + +# Overhead of the GPT partitioning scheme +GPT_OVERHEAD = 34 + +# Size of a sector in bytes +SECTOR_SIZE = 512 + +class PartitionedImage(): + """ + Partitioned image in a file. + """ + + def __init__(self, path, ptable_format, partitions, native_sysroot=None): + self.path = path # Path to the image file + self.numpart = 0 # Number of allocated partitions + self.realpart = 0 # Number of partitions in the partition table + self.offset = 0 # Offset of next partition (in sectors) + self.min_size = 0 # Minimum required disk size to fit + # all partitions (in bytes) + self.ptable_format = ptable_format # Partition table format + # Disk system identifier + self.identifier = int.from_bytes(os.urandom(4), 'little') + + self.partitions = partitions + self.partimages = [] + # Size of a sector used in calculations + self.sector_size = SECTOR_SIZE + self.native_sysroot = native_sysroot + + # calculate the real partition number, accounting for partitions not + # in the partition table and logical partitions + realnum = 0 + for part in self.partitions: + if part.no_table: + part.realnum = 0 + else: + realnum += 1 + if self.ptable_format == 'msdos' and realnum > 3: + part.realnum = realnum + 1 + continue + part.realnum = realnum + + # generate parition UUIDs + for part in self.partitions: + if not part.uuid and part.use_uuid: + if self.ptable_format == 'gpt': + part.uuid = str(uuid.uuid4()) + else: # msdos partition table + part.uuid = '%08x-%02d' % (self.identifier, part.realnum) + + def prepare(self, imager): + """Prepare an image. Call prepare method of all image partitions.""" + for part in self.partitions: + # need to create the filesystems in order to get their + # sizes before we can add them and do the layout. + part.prepare(imager, imager.workdir, imager.oe_builddir, + imager.rootfs_dir, imager.bootimg_dir, + imager.kernel_dir, imager.native_sysroot) + + # Converting kB to sectors for parted + part.size_sec = part.disk_size * 1024 // self.sector_size + + def layout_partitions(self): + """ Layout the partitions, meaning calculate the position of every + partition on the disk. The 'ptable_format' parameter defines the + partition table format and may be "msdos". """ + + logger.debug("Assigning %s partitions to disks", self.ptable_format) + + # The number of primary and logical partitions. Extended partition and + # partitions not listed in the table are not included. + num_real_partitions = len([p for p in self.partitions if not p.no_table]) + + # Go through partitions in the order they are added in .ks file + for num in range(len(self.partitions)): + part = self.partitions[num] + + if self.ptable_format == 'msdos' and part.part_type: + # The --part-type can also be implemented for MBR partitions, + # in which case it would map to the 1-byte "partition type" + # filed at offset 3 of the partition entry. + raise WicError("setting custom partition type is not " \ + "implemented for msdos partitions") + + # Get the disk where the partition is located + self.numpart += 1 + if not part.no_table: + self.realpart += 1 + + if self.numpart == 1: + if self.ptable_format == "msdos": + overhead = MBR_OVERHEAD + elif self.ptable_format == "gpt": + overhead = GPT_OVERHEAD + + # Skip one sector required for the partitioning scheme overhead + self.offset += overhead + + if self.realpart > 3 and num_real_partitions > 4: + # Reserve a sector for EBR for every logical partition + # before alignment is performed. + if self.ptable_format == "msdos": + self.offset += 1 + + if part.align: + # If not first partition and we do have alignment set we need + # to align the partition. + # FIXME: This leaves a empty spaces to the disk. To fill the + # gaps we could enlargea the previous partition? + + # Calc how much the alignment is off. + align_sectors = self.offset % (part.align * 1024 // self.sector_size) + + if align_sectors: + # If partition is not aligned as required, we need + # to move forward to the next alignment point + align_sectors = (part.align * 1024 // self.sector_size) - align_sectors + + logger.debug("Realignment for %s%s with %s sectors, original" + " offset %s, target alignment is %sK.", + part.disk, self.numpart, align_sectors, + self.offset, part.align) + + # increase the offset so we actually start the partition on right alignment + self.offset += align_sectors + + part.start = self.offset + self.offset += part.size_sec + + part.type = 'primary' + if not part.no_table: + part.num = self.realpart + else: + part.num = 0 + + if self.ptable_format == "msdos": + # only count the partitions that are in partition table + if num_real_partitions > 4: + if self.realpart > 3: + part.type = 'logical' + part.num = self.realpart + 1 + + logger.debug("Assigned %s to %s%d, sectors range %d-%d size %d " + "sectors (%d bytes).", part.mountpoint, part.disk, + part.num, part.start, self.offset - 1, part.size_sec, + part.size_sec * self.sector_size) + + # Once all the partitions have been layed out, we can calculate the + # minumim disk size + self.min_size = self.offset + if self.ptable_format == "gpt": + self.min_size += GPT_OVERHEAD + + self.min_size *= self.sector_size + + def _create_partition(self, device, parttype, fstype, start, size): + """ Create a partition on an image described by the 'device' object. """ + + # Start is included to the size so we need to substract one from the end. + end = start + size - 1 + logger.debug("Added '%s' partition, sectors %d-%d, size %d sectors", + parttype, start, end, size) + + cmd = "parted -s %s unit s mkpart %s" % (device, parttype) + if fstype: + cmd += " %s" % fstype + cmd += " %d %d" % (start, end) + + return exec_native_cmd(cmd, self.native_sysroot) + + def create(self): + logger.debug("Creating sparse file %s", self.path) + with open(self.path, 'w') as sparse: + os.ftruncate(sparse.fileno(), self.min_size) + + logger.debug("Initializing partition table for %s", self.path) + exec_native_cmd("parted -s %s mklabel %s" % + (self.path, self.ptable_format), self.native_sysroot) + + logger.debug("Set disk identifier %x", self.identifier) + with open(self.path, 'r+b') as img: + img.seek(0x1B8) + img.write(self.identifier.to_bytes(4, 'little')) + + logger.debug("Creating partitions") + + for part in self.partitions: + if part.num == 0: + continue + + if self.ptable_format == "msdos" and part.num == 5: + # Create an extended partition (note: extended + # partition is described in MBR and contains all + # logical partitions). The logical partitions save a + # sector for an EBR just before the start of a + # partition. The extended partition must start one + # sector before the start of the first logical + # partition. This way the first EBR is inside of the + # extended partition. Since the extended partitions + # starts a sector before the first logical partition, + # add a sector at the back, so that there is enough + # room for all logical partitions. + self._create_partition(self.path, "extended", + None, part.start - 1, + self.offset - part.start + 1) + + if part.fstype == "swap": + parted_fs_type = "linux-swap" + elif part.fstype == "vfat": + parted_fs_type = "fat32" + elif part.fstype == "msdos": + parted_fs_type = "fat16" + if not part.system_id: + part.system_id = '0x6' # FAT16 + else: + # Type for ext2/ext3/ext4/btrfs + parted_fs_type = "ext2" + + # Boot ROM of OMAP boards require vfat boot partition to have an + # even number of sectors. + if part.mountpoint == "/boot" and part.fstype in ["vfat", "msdos"] \ + and part.size_sec % 2: + logger.debug("Subtracting one sector from '%s' partition to " + "get even number of sectors for the partition", + part.mountpoint) + part.size_sec -= 1 + + self._create_partition(self.path, part.type, + parted_fs_type, part.start, part.size_sec) + + if part.part_type: + logger.debug("partition %d: set type UID to %s", + part.num, part.part_type) + exec_native_cmd("sgdisk --typecode=%d:%s %s" % \ + (part.num, part.part_type, + self.path), self.native_sysroot) + + if part.uuid and self.ptable_format == "gpt": + logger.debug("partition %d: set UUID to %s", + part.num, part.uuid) + exec_native_cmd("sgdisk --partition-guid=%d:%s %s" % \ + (part.num, part.uuid, self.path), + self.native_sysroot) + + if part.label and self.ptable_format == "gpt": + logger.debug("partition %d: set name to %s", + part.num, part.label) + exec_native_cmd("parted -s %s name %d %s" % \ + (self.path, part.num, part.label), + self.native_sysroot) + + if part.active: + flag_name = "legacy_boot" if self.ptable_format == 'gpt' else "boot" + logger.debug("Set '%s' flag for partition '%s' on disk '%s'", + flag_name, part.num, self.path) + exec_native_cmd("parted -s %s set %d %s on" % \ + (self.path, part.num, flag_name), + self.native_sysroot) + if part.system_id: + exec_native_cmd("sfdisk --part-type %s %s %s" % \ + (self.path, part.num, part.system_id), + self.native_sysroot) + + def cleanup(self): + # remove partition images + for image in set(self.partimages): + os.remove(image) + + def assemble(self): + logger.debug("Installing partitions") + + for part in self.partitions: + source = part.source_file + if source: + # install source_file contents into a partition + sparse_copy(source, self.path, part.start * self.sector_size) + + logger.debug("Installed %s in partition %d, sectors %d-%d, " + "size %d sectors", source, part.num, part.start, + part.start + part.size_sec - 1, part.size_sec) + + partimage = self.path + '.p%d' % part.num + os.rename(source, partimage) + self.partimages.append(partimage) diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/imager/direct_plugin.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/imager/direct_plugin.py deleted file mode 100644 index 8fe393080..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/plugins/imager/direct_plugin.py +++ /dev/null @@ -1,103 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# Copyright (c) 2013, Intel Corporation. -# All rights reserved. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# DESCRIPTION -# This implements the 'direct' imager plugin class for 'wic' -# -# AUTHORS -# Tom Zanussi -# - -from wic.utils import errors -from wic.conf import configmgr - -import wic.imager.direct as direct -from wic.pluginbase import ImagerPlugin - -class DirectPlugin(ImagerPlugin): - """ - Install a system into a file containing a partitioned disk image. - - An image file is formatted with a partition table, each partition - created from a rootfs or other OpenEmbedded build artifact and dd'ed - into the virtual disk. The disk image can subsequently be dd'ed onto - media and used on actual hardware. - """ - - name = 'direct' - - @classmethod - def __rootfs_dir_to_dict(cls, rootfs_dirs): - """ - Gets a string that contain 'connection=dir' splitted by - space and return a dict - """ - krootfs_dir = {} - for rootfs_dir in rootfs_dirs.split(' '): - key, val = rootfs_dir.split('=') - krootfs_dir[key] = val - - return krootfs_dir - - @classmethod - def do_create(cls, opts, *args): - """ - Create direct image, called from creator as 'direct' cmd - """ - if len(args) != 8: - raise errors.Usage("Extra arguments given") - - native_sysroot = args[0] - kernel_dir = args[1] - bootimg_dir = args[2] - rootfs_dir = args[3] - - creatoropts = configmgr.create - ksconf = args[4] - - image_output_dir = args[5] - oe_builddir = args[6] - compressor = args[7] - - krootfs_dir = cls.__rootfs_dir_to_dict(rootfs_dir) - - configmgr._ksconf = ksconf - - creator = direct.DirectImageCreator(oe_builddir, - image_output_dir, - krootfs_dir, - bootimg_dir, - kernel_dir, - native_sysroot, - compressor, - creatoropts, - opts.bmap) - - try: - creator.create() - creator.assemble() - creator.finalize() - creator.print_outimage_info() - - except errors.CreatorError: - raise - finally: - creator.cleanup() - - return 0 diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-efi.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-efi.py index 4adb80bec..9879cb9fc 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-efi.py +++ b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-efi.py @@ -24,25 +24,28 @@ # Tom Zanussi # +import logging import os import shutil -from wic import msger +from wic import WicError +from wic.engine import get_custom_config from wic.pluginbase import SourcePlugin -from wic.utils.misc import get_custom_config -from wic.utils.oe.misc import exec_cmd, exec_native_cmd, get_bitbake_var, \ - BOOTDD_EXTRA_SPACE +from wic.utils.misc import (exec_cmd, exec_native_cmd, get_bitbake_var, + BOOTDD_EXTRA_SPACE) + +logger = logging.getLogger('wic') class BootimgEFIPlugin(SourcePlugin): """ Create EFI boot partition. - This plugin supports GRUB 2 and gummiboot bootloaders. + This plugin supports GRUB 2 and systemd-boot bootloaders. """ name = 'bootimg-efi' @classmethod - def do_configure_grubefi(cls, hdddir, creator, cr_workdir): + def do_configure_grubefi(cls, creator, cr_workdir): """ Create loader-specific (grub-efi) config """ @@ -53,11 +56,11 @@ class BootimgEFIPlugin(SourcePlugin): if custom_cfg: # Use a custom configuration for grub grubefi_conf = custom_cfg - msger.debug("Using custom configuration file " - "%s for grub.cfg" % configfile) + logger.debug("Using custom configuration file " + "%s for grub.cfg", configfile) else: - msger.error("configfile is specified but failed to " - "get it from %s." % configfile) + raise WicError("configfile is specified but failed to " + "get it from %s." % configfile) if not custom_cfg: # Create grub configuration using parameters from wks file @@ -75,14 +78,14 @@ class BootimgEFIPlugin(SourcePlugin): % (kernel, creator.rootdev, bootloader.append) grubefi_conf += "}\n" - msger.debug("Writing grubefi config %s/hdd/boot/EFI/BOOT/grub.cfg" \ - % cr_workdir) + logger.debug("Writing grubefi config %s/hdd/boot/EFI/BOOT/grub.cfg", + cr_workdir) cfg = open("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir, "w") cfg.write(grubefi_conf) cfg.close() @classmethod - def do_configure_gummiboot(cls, hdddir, creator, cr_workdir): + def do_configure_systemdboot(cls, hdddir, creator, cr_workdir, source_params): """ Create loader-specific systemd-boot/gummiboot config """ @@ -98,8 +101,21 @@ class BootimgEFIPlugin(SourcePlugin): loader_conf += "default boot\n" loader_conf += "timeout %d\n" % bootloader.timeout - msger.debug("Writing gummiboot config %s/hdd/boot/loader/loader.conf" \ - % cr_workdir) + initrd = source_params.get('initrd') + + if initrd: + # obviously we need to have a common common deploy var + bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") + if not bootimg_dir: + raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting") + + cp_cmd = "cp %s/%s %s" % (bootimg_dir, initrd, hdddir) + exec_cmd(cp_cmd, True) + else: + logger.debug("Ignoring missing initrd") + + logger.debug("Writing systemd-boot config " + "%s/hdd/boot/loader/loader.conf", cr_workdir) cfg = open("%s/hdd/boot/loader/loader.conf" % cr_workdir, "w") cfg.write(loader_conf) cfg.close() @@ -109,16 +125,16 @@ class BootimgEFIPlugin(SourcePlugin): if configfile: custom_cfg = get_custom_config(configfile) if custom_cfg: - # Use a custom configuration for gummiboot + # Use a custom configuration for systemd-boot boot_conf = custom_cfg - msger.debug("Using custom configuration file " - "%s for gummiboots's boot.conf" % configfile) + logger.debug("Using custom configuration file " + "%s for systemd-boots's boot.conf", configfile) else: - msger.error("configfile is specified but failed to " - "get it from %s." % configfile) + raise WicError("configfile is specified but failed to " + "get it from %s.", configfile) if not custom_cfg: - # Create gummiboot configuration using parameters from wks file + # Create systemd-boot configuration using parameters from wks file kernel = "/bzImage" boot_conf = "" @@ -127,8 +143,11 @@ class BootimgEFIPlugin(SourcePlugin): boot_conf += "options LABEL=Boot root=%s %s\n" % \ (creator.rootdev, bootloader.append) - msger.debug("Writing gummiboot config %s/hdd/boot/loader/entries/boot.conf" \ - % cr_workdir) + if initrd: + boot_conf += "initrd /%s\n" % initrd + + logger.debug("Writing systemd-boot config " + "%s/hdd/boot/loader/entries/boot.conf", cr_workdir) cfg = open("%s/hdd/boot/loader/entries/boot.conf" % cr_workdir, "w") cfg.write(boot_conf) cfg.close() @@ -148,14 +167,13 @@ class BootimgEFIPlugin(SourcePlugin): try: if source_params['loader'] == 'grub-efi': - cls.do_configure_grubefi(hdddir, creator, cr_workdir) - elif source_params['loader'] == 'gummiboot' \ - or source_params['loader'] == 'systemd-boot': - cls.do_configure_gummiboot(hdddir, creator, cr_workdir) + cls.do_configure_grubefi(creator, cr_workdir) + elif source_params['loader'] == 'systemd-boot': + cls.do_configure_systemdboot(hdddir, creator, cr_workdir, source_params) else: - msger.error("unrecognized bootimg-efi loader: %s" % source_params['loader']) + raise WicError("unrecognized bootimg-efi loader: %s" % source_params['loader']) except KeyError: - msger.error("bootimg-efi requires a loader, none specified") + raise WicError("bootimg-efi requires a loader, none specified") @classmethod @@ -167,12 +185,10 @@ class BootimgEFIPlugin(SourcePlugin): 'prepares' the partition to be incorporated into the image. In this case, prepare content for an EFI (grub) boot partition. """ - if not bootimg_dir: - bootimg_dir = get_bitbake_var("HDDDIR") - if not bootimg_dir: - msger.error("Couldn't find HDDDIR, exiting\n") - # just so the result notes display it - creator.set_bootimg_dir(bootimg_dir) + if not kernel_dir: + kernel_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") + if not kernel_dir: + raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting") staging_kernel_dir = kernel_dir @@ -182,24 +198,27 @@ class BootimgEFIPlugin(SourcePlugin): (staging_kernel_dir, hdddir) exec_cmd(install_cmd) + try: if source_params['loader'] == 'grub-efi': shutil.copyfile("%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir, "%s/grub.cfg" % cr_workdir) - cp_cmd = "cp %s/EFI/BOOT/* %s/EFI/BOOT" % (bootimg_dir, hdddir) - exec_cmd(cp_cmd, True) + for mod in [x for x in os.listdir(kernel_dir) if x.startswith("grub-efi-")]: + cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[9:]) + exec_cmd(cp_cmd, True) shutil.move("%s/grub.cfg" % cr_workdir, "%s/hdd/boot/EFI/BOOT/grub.cfg" % cr_workdir) - elif source_params['loader'] == 'gummiboot' \ - or source_params['loader'] == 'systemd-boot': - cp_cmd = "cp %s/EFI/BOOT/* %s/EFI/BOOT" % (bootimg_dir, hdddir) - exec_cmd(cp_cmd, True) + elif source_params['loader'] == 'systemd-boot': + for mod in [x for x in os.listdir(kernel_dir) if x.startswith("systemd-")]: + cp_cmd = "cp %s/%s %s/EFI/BOOT/%s" % (kernel_dir, mod, hdddir, mod[8:]) + exec_cmd(cp_cmd, True) else: - msger.error("unrecognized bootimg-efi loader: %s" % source_params['loader']) + raise WicError("unrecognized bootimg-efi loader: %s" % + source_params['loader']) except KeyError: - msger.error("bootimg-efi requires a loader, none specified") + raise WicError("bootimg-efi requires a loader, none specified") - startup = os.path.join(bootimg_dir, "startup.nsh") + startup = os.path.join(kernel_dir, "startup.nsh") if os.path.exists(startup): cp_cmd = "cp %s %s/" % (startup, hdddir) exec_cmd(cp_cmd, True) @@ -215,8 +234,8 @@ class BootimgEFIPlugin(SourcePlugin): blocks += extra_blocks - msger.debug("Added %d extra blocks to %s to get to %d total blocks" % \ - (extra_blocks, part.mountpoint, blocks)) + logger.debug("Added %d extra blocks to %s to get to %d total blocks", + extra_blocks, part.mountpoint, blocks) # dosfs image, created by mkdosfs bootimg = "%s/boot.img" % cr_workdir diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-partition.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-partition.py index b76c1211a..13fddbd47 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-partition.py +++ b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-partition.py @@ -23,14 +23,18 @@ # Maciej Borzecki # +import logging import os import re -from wic import msger -from wic.pluginbase import SourcePlugin -from wic.utils.oe.misc import exec_cmd, get_bitbake_var from glob import glob +from wic import WicError +from wic.pluginbase import SourcePlugin +from wic.utils.misc import exec_cmd, get_bitbake_var + +logger = logging.getLogger('wic') + class BootimgPartitionPlugin(SourcePlugin): """ Create an image of boot partition, copying over files @@ -39,26 +43,6 @@ class BootimgPartitionPlugin(SourcePlugin): name = 'bootimg-partition' - @classmethod - def do_install_disk(cls, disk, disk_name, cr, workdir, oe_builddir, - bootimg_dir, kernel_dir, native_sysroot): - """ - Called after all partitions have been prepared and assembled into a - disk image. Do nothing. - """ - pass - - @classmethod - def do_configure_partition(cls, part, source_params, cr, cr_workdir, - oe_builddir, bootimg_dir, kernel_dir, - native_sysroot): - """ - Called before do_prepare_partition(). Possibly prepare - configuration files of some sort. - - """ - pass - @classmethod def do_prepare_partition(cls, part, source_params, cr, cr_workdir, oe_builddir, bootimg_dir, kernel_dir, @@ -74,19 +58,19 @@ class BootimgPartitionPlugin(SourcePlugin): install_cmd = "install -d %s" % hdddir exec_cmd(install_cmd) - if not bootimg_dir: - bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") - if not bootimg_dir: - msger.error("Couldn't find DEPLOY_DIR_IMAGE, exiting\n") + if not kernel_dir: + kernel_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") + if not kernel_dir: + raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting") - msger.debug('Bootimg dir: %s' % bootimg_dir) + logger.debug('Kernel dir: %s', bootimg_dir) boot_files = get_bitbake_var("IMAGE_BOOT_FILES") if not boot_files: - msger.error('No boot files defined, IMAGE_BOOT_FILES unset') + raise WicError('No boot files defined, IMAGE_BOOT_FILES unset') - msger.debug('Boot files: %s' % boot_files) + logger.debug('Boot files: %s', boot_files) # list of tuples (src_name, dst_name) deploy_files = [] @@ -94,11 +78,11 @@ class BootimgPartitionPlugin(SourcePlugin): if ';' in src_entry: dst_entry = tuple(src_entry.split(';')) if not dst_entry[0] or not dst_entry[1]: - msger.error('Malformed boot file entry: %s' % (src_entry)) + raise WicError('Malformed boot file entry: %s' % src_entry) else: dst_entry = (src_entry, src_entry) - msger.debug('Destination entry: %r' % (dst_entry,)) + logger.debug('Destination entry: %r', dst_entry) deploy_files.append(dst_entry) for deploy_entry in deploy_files: @@ -114,27 +98,26 @@ class BootimgPartitionPlugin(SourcePlugin): os.path.join(dst, os.path.basename(name)) - srcs = glob(os.path.join(bootimg_dir, src)) + srcs = glob(os.path.join(kernel_dir, src)) - msger.debug('Globbed sources: %s' % (', '.join(srcs))) + logger.debug('Globbed sources: %s', ', '.join(srcs)) for entry in srcs: entry_dst_name = entry_name_fn(entry) install_task.append((entry, os.path.join(hdddir, entry_dst_name))) else: - install_task = [(os.path.join(bootimg_dir, src), + install_task = [(os.path.join(kernel_dir, src), os.path.join(hdddir, dst))] for task in install_task: src_path, dst_path = task - msger.debug('Install %s as %s' % (os.path.basename(src_path), - dst_path)) + logger.debug('Install %s as %s', + os.path.basename(src_path), dst_path) install_cmd = "install -m 0644 -D %s %s" \ % (src_path, dst_path) exec_cmd(install_cmd) - msger.debug('Prepare boot partition using rootfs in %s' % (hdddir)) + logger.debug('Prepare boot partition using rootfs in %s', hdddir) part.prepare_rootfs(cr_workdir, oe_builddir, hdddir, native_sysroot) - diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py index f204daa32..5890c1267 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py +++ b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/bootimg-pcbios.py @@ -24,15 +24,17 @@ # Tom Zanussi # +import logging import os -from wic.utils.errors import ImageError -from wic import msger +from wic import WicError +from wic.engine import get_custom_config from wic.utils import runner -from wic.utils.misc import get_custom_config from wic.pluginbase import SourcePlugin -from wic.utils.oe.misc import exec_cmd, exec_native_cmd, \ - get_bitbake_var, BOOTDD_EXTRA_SPACE +from wic.utils.misc import (exec_cmd, exec_native_cmd, + get_bitbake_var, BOOTDD_EXTRA_SPACE) + +logger = logging.getLogger('wic') class BootimgPcbiosPlugin(SourcePlugin): """ @@ -41,6 +43,18 @@ class BootimgPcbiosPlugin(SourcePlugin): name = 'bootimg-pcbios' + @classmethod + def _get_bootimg_dir(cls, bootimg_dir, dirname): + """ + Check if dirname exists in default bootimg_dir or + in wic-tools STAGING_DIR. + """ + for result in (bootimg_dir, get_bitbake_var("STAGING_DATADIR", "wic-tools")): + if os.path.exists("%s/%s" % (result, dirname)): + return result + + raise WicError("Couldn't find correct bootimg_dir, exiting") + @classmethod def do_install_disk(cls, disk, disk_name, creator, workdir, oe_builddir, bootimg_dir, kernel_dir, native_sysroot): @@ -48,27 +62,27 @@ class BootimgPcbiosPlugin(SourcePlugin): Called after all partitions have been prepared and assembled into a disk image. In this case, we install the MBR. """ + bootimg_dir = cls._get_bootimg_dir(bootimg_dir, 'syslinux') mbrfile = "%s/syslinux/" % bootimg_dir if creator.ptable_format == 'msdos': mbrfile += "mbr.bin" elif creator.ptable_format == 'gpt': mbrfile += "gptmbr.bin" else: - msger.error("Unsupported partition table: %s" % creator.ptable_format) + raise WicError("Unsupported partition table: %s" % + creator.ptable_format) if not os.path.exists(mbrfile): - msger.error("Couldn't find %s. If using the -e option, do you " - "have the right MACHINE set in local.conf? If not, " - "is the bootimg_dir path correct?" % mbrfile) + raise WicError("Couldn't find %s. If using the -e option, do you " + "have the right MACHINE set in local.conf? If not, " + "is the bootimg_dir path correct?" % mbrfile) full_path = creator._full_path(workdir, disk_name, "direct") - msger.debug("Installing MBR on disk %s as %s with size %s bytes" \ - % (disk_name, full_path, disk['min_size'])) + logger.debug("Installing MBR on disk %s as %s with size %s bytes", + disk_name, full_path, disk.min_size) - rcode = runner.show(['dd', 'if=%s' % mbrfile, - 'of=%s' % full_path, 'conv=notrunc']) - if rcode != 0: - raise ImageError("Unable to set MBR to %s" % full_path) + dd_cmd = "dd if=%s of=%s conv=notrunc" % (mbrfile, full_path) + exec_cmd(dd_cmd, native_sysroot) @classmethod def do_configure_partition(cls, part, source_params, creator, cr_workdir, @@ -90,11 +104,11 @@ class BootimgPcbiosPlugin(SourcePlugin): if custom_cfg: # Use a custom configuration for grub syslinux_conf = custom_cfg - msger.debug("Using custom configuration file " - "%s for syslinux.cfg" % bootloader.configfile) + logger.debug("Using custom configuration file %s " + "for syslinux.cfg", bootloader.configfile) else: - msger.error("configfile is specified but failed to " - "get it from %s." % bootloader.configfile) + raise WicError("configfile is specified but failed to " + "get it from %s." % bootloader.configfile) if not custom_cfg: # Create syslinux configuration using parameters from wks file @@ -122,8 +136,8 @@ class BootimgPcbiosPlugin(SourcePlugin): syslinux_conf += "APPEND label=boot root=%s %s\n" % \ (creator.rootdev, bootloader.append) - msger.debug("Writing syslinux config %s/hdd/boot/syslinux.cfg" \ - % cr_workdir) + logger.debug("Writing syslinux config %s/hdd/boot/syslinux.cfg", + cr_workdir) cfg = open("%s/hdd/boot/syslinux.cfg" % cr_workdir, "w") cfg.write(syslinux_conf) cfg.close() @@ -137,33 +151,25 @@ class BootimgPcbiosPlugin(SourcePlugin): 'prepares' the partition to be incorporated into the image. In this case, prepare content for legacy bios boot partition. """ - def _has_syslinux(dirname): - if dirname: - syslinux = "%s/syslinux" % dirname - if os.path.exists(syslinux): - return True - return False - - if not _has_syslinux(bootimg_dir): - bootimg_dir = get_bitbake_var("STAGING_DATADIR") - if not bootimg_dir: - msger.error("Couldn't find STAGING_DATADIR, exiting\n") - if not _has_syslinux(bootimg_dir): - msger.error("Please build syslinux first\n") - # just so the result notes display it - creator.set_bootimg_dir(bootimg_dir) + bootimg_dir = cls._get_bootimg_dir(bootimg_dir, 'syslinux') staging_kernel_dir = kernel_dir hdddir = "%s/hdd/boot" % cr_workdir - install_cmd = "install -m 0644 %s/bzImage %s/vmlinuz" \ - % (staging_kernel_dir, hdddir) - exec_cmd(install_cmd) + cmds = ("install -m 0644 %s/bzImage %s/vmlinuz" % + (staging_kernel_dir, hdddir), + "install -m 444 %s/syslinux/ldlinux.sys %s/ldlinux.sys" % + (bootimg_dir, hdddir), + "install -m 0644 %s/syslinux/vesamenu.c32 %s/vesamenu.c32" % + (bootimg_dir, hdddir), + "install -m 444 %s/syslinux/libcom32.c32 %s/libcom32.c32" % + (bootimg_dir, hdddir), + "install -m 444 %s/syslinux/libutil.c32 %s/libutil.c32" % + (bootimg_dir, hdddir)) - install_cmd = "install -m 444 %s/syslinux/ldlinux.sys %s/ldlinux.sys" \ - % (bootimg_dir, hdddir) - exec_cmd(install_cmd) + for install_cmd in cmds: + exec_cmd(install_cmd) du_cmd = "du -bks %s" % hdddir out = exec_cmd(du_cmd) @@ -176,8 +182,8 @@ class BootimgPcbiosPlugin(SourcePlugin): blocks += extra_blocks - msger.debug("Added %d extra blocks to %s to get to %d total blocks" % \ - (extra_blocks, part.mountpoint, blocks)) + logger.debug("Added %d extra blocks to %s to get to %d total blocks", + extra_blocks, part.mountpoint, blocks) # dosfs image, created by mkdosfs bootimg = "%s/boot.img" % cr_workdir @@ -198,7 +204,5 @@ class BootimgPcbiosPlugin(SourcePlugin): out = exec_cmd(du_cmd) bootimg_size = out.split()[0] - part.size = int(out.split()[0]) + part.size = int(bootimg_size) part.source_file = bootimg - - diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/fsimage.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/fsimage.py deleted file mode 100644 index f894e8936..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/fsimage.py +++ /dev/null @@ -1,73 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# - -import os - -from wic import msger -from wic.pluginbase import SourcePlugin -from wic.utils.oe.misc import get_bitbake_var - -class FSImagePlugin(SourcePlugin): - """ - Add an already existing filesystem image to the partition layout. - """ - - name = 'fsimage' - - @classmethod - def do_install_disk(cls, disk, disk_name, cr, workdir, oe_builddir, - bootimg_dir, kernel_dir, native_sysroot): - """ - Called after all partitions have been prepared and assembled into a - disk image. Do nothing. - """ - pass - - @classmethod - def do_configure_partition(cls, part, source_params, cr, cr_workdir, - oe_builddir, bootimg_dir, kernel_dir, - native_sysroot): - """ - Called before do_prepare_partition(). Possibly prepare - configuration files of some sort. - """ - pass - - @classmethod - def do_prepare_partition(cls, part, source_params, cr, cr_workdir, - oe_builddir, bootimg_dir, kernel_dir, - rootfs_dir, native_sysroot): - """ - Called to do the actual content population for a partition i.e. it - 'prepares' the partition to be incorporated into the image. - """ - if not bootimg_dir: - bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") - if not bootimg_dir: - msger.error("Couldn't find DEPLOY_DIR_IMAGE, exiting\n") - - msger.debug('Bootimg dir: %s' % bootimg_dir) - - if 'file' not in source_params: - msger.error("No file specified\n") - return - - src = os.path.join(bootimg_dir, source_params['file']) - - - msger.debug('Preparing partition using image %s' % (src)) - part.prepare_rootfs_from_fs_image(cr_workdir, src, "") diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py index 3858fd439..1ceba62be 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py +++ b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/isoimage-isohybrid.py @@ -20,15 +20,18 @@ # AUTHORS # Mihaly Varga +import glob +import logging import os import re import shutil -import glob -from wic import msger +from wic import WicError +from wic.engine import get_custom_config from wic.pluginbase import SourcePlugin -from wic.utils.misc import get_custom_config -from wic.utils.oe.misc import exec_cmd, exec_native_cmd, get_bitbake_var +from wic.utils.misc import exec_cmd, exec_native_cmd, get_bitbake_var + +logger = logging.getLogger('wic') class IsoImagePlugin(SourcePlugin): """ @@ -85,8 +88,9 @@ class IsoImagePlugin(SourcePlugin): syslinux_conf += "APPEND initrd=/initrd LABEL=boot %s\n" \ % bootloader.append - msger.debug("Writing syslinux config %s/ISO/isolinux/isolinux.cfg" \ - % cr_workdir) + logger.debug("Writing syslinux config %s/ISO/isolinux/isolinux.cfg", + cr_workdir) + with open("%s/ISO/isolinux/isolinux.cfg" % cr_workdir, "w") as cfg: cfg.write(syslinux_conf) @@ -99,11 +103,11 @@ class IsoImagePlugin(SourcePlugin): if configfile: grubefi_conf = get_custom_config(configfile) if grubefi_conf: - msger.debug("Using custom configuration file " - "%s for grub.cfg" % configfile) + logger.debug("Using custom configuration file %s for grub.cfg", + configfile) else: - msger.error("configfile is specified but failed to " - "get it from %s." % configfile) + raise WicError("configfile is specified " + "but failed to get it from %s", configfile) else: splash = os.path.join(cr_workdir, "EFI/boot/splash.jpg") if os.path.exists(splash): @@ -133,8 +137,8 @@ class IsoImagePlugin(SourcePlugin): if splashline: grubefi_conf += "%s\n" % splashline - msger.debug("Writing grubefi config %s/EFI/BOOT/grub.cfg" \ - % cr_workdir) + logger.debug("Writing grubefi config %s/EFI/BOOT/grub.cfg", cr_workdir) + with open("%s/EFI/BOOT/grub.cfg" % cr_workdir, "w") as cfg: cfg.write(grubefi_conf) @@ -144,25 +148,25 @@ class IsoImagePlugin(SourcePlugin): Create path for initramfs image """ - initrd = get_bitbake_var("INITRD") + initrd = get_bitbake_var("INITRD_LIVE") or get_bitbake_var("INITRD") if not initrd: initrd_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") if not initrd_dir: - msger.error("Couldn't find DEPLOY_DIR_IMAGE, exiting.\n") + raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting.") image_name = get_bitbake_var("IMAGE_BASENAME") if not image_name: - msger.error("Couldn't find IMAGE_BASENAME, exiting.\n") + raise WicError("Couldn't find IMAGE_BASENAME, exiting.") image_type = get_bitbake_var("INITRAMFS_FSTYPES") if not image_type: - msger.error("Couldn't find INITRAMFS_FSTYPES, exiting.\n") + raise WicError("Couldn't find INITRAMFS_FSTYPES, exiting.") - machine_arch = get_bitbake_var("MACHINE_ARCH") - if not machine_arch: - msger.error("Couldn't find MACHINE_ARCH, exiting.\n") + target_arch = get_bitbake_var("TRANSLATED_TARGET_ARCH") + if not target_arch: + raise WicError("Couldn't find TRANSLATED_TARGET_ARCH, exiting.") - initrd = glob.glob('%s/%s*%s.%s' % (initrd_dir, image_name, machine_arch, image_type))[0] + initrd = glob.glob('%s/%s*%s.%s' % (initrd_dir, image_name, target_arch, image_type))[0] if not os.path.exists(initrd): # Create initrd from rootfs directory @@ -183,7 +187,7 @@ class IsoImagePlugin(SourcePlugin): os.symlink(os.readlink("%s/sbin/init" % rootfs_dir), \ "%s/init" % initrd_dir) else: - msger.error("Couldn't find or build initrd, exiting.\n") + raise WicError("Couldn't find or build initrd, exiting.") exec_cmd("cd %s && find . | cpio -o -H newc -R +0:+0 >./initrd.cpio " \ % initrd_dir, as_shell=True) @@ -193,55 +197,6 @@ class IsoImagePlugin(SourcePlugin): return initrd - @classmethod - def do_stage_partition(cls, part, source_params, creator, cr_workdir, - oe_builddir, bootimg_dir, kernel_dir, - native_sysroot): - """ - Special content staging called before do_prepare_partition(). - It cheks if all necessary tools are available, if not - tries to instal them. - """ - # Make sure parted is available in native sysroot - if not os.path.isfile("%s/usr/sbin/parted" % native_sysroot): - msger.info("Building parted-native...\n") - exec_cmd("bitbake parted-native") - - # Make sure mkfs.ext2/3/4 is available in native sysroot - if not os.path.isfile("%s/sbin/mkfs.ext2" % native_sysroot): - msger.info("Building e2fsprogs-native...\n") - exec_cmd("bitbake e2fsprogs-native") - - # Make sure syslinux is available in sysroot and in native sysroot - syslinux_dir = get_bitbake_var("STAGING_DATADIR") - if not syslinux_dir: - msger.error("Couldn't find STAGING_DATADIR, exiting.\n") - if not os.path.exists("%s/syslinux" % syslinux_dir): - msger.info("Building syslinux...\n") - exec_cmd("bitbake syslinux") - if not os.path.exists("%s/syslinux" % syslinux_dir): - msger.error("Please build syslinux first\n") - - # Make sure syslinux is available in native sysroot - if not os.path.exists("%s/usr/bin/syslinux" % native_sysroot): - msger.info("Building syslinux-native...\n") - exec_cmd("bitbake syslinux-native") - - #Make sure mkisofs is available in native sysroot - if not os.path.isfile("%s/usr/bin/mkisofs" % native_sysroot): - msger.info("Building cdrtools-native...\n") - exec_cmd("bitbake cdrtools-native") - - # Make sure mkfs.vfat is available in native sysroot - if not os.path.isfile("%s/sbin/mkfs.vfat" % native_sysroot): - msger.info("Building dosfstools-native...\n") - exec_cmd("bitbake dosfstools-native") - - # Make sure mtools is available in native sysroot - if not os.path.isfile("%s/usr/bin/mcopy" % native_sysroot): - msger.info("Building mtools-native...\n") - exec_cmd("bitbake mtools-native") - @classmethod def do_configure_partition(cls, part, source_params, creator, cr_workdir, oe_builddir, bootimg_dir, kernel_dir, @@ -258,11 +213,11 @@ class IsoImagePlugin(SourcePlugin): exec_cmd(install_cmd) # Overwrite the name of the created image - msger.debug("%s" % source_params) + logger.debug(source_params) if 'image_name' in source_params and \ source_params['image_name'].strip(): creator.name = source_params['image_name'].strip() - msger.debug("The name of the image is: %s" % creator.name) + logger.debug("The name of the image is: %s", creator.name) @classmethod def do_prepare_partition(cls, part, source_params, creator, cr_workdir, @@ -278,7 +233,7 @@ class IsoImagePlugin(SourcePlugin): if part.rootfs_dir is None: if not 'ROOTFS_DIR' in rootfs_dir: - msger.error("Couldn't find --rootfs-dir, exiting.\n") + raise WicError("Couldn't find --rootfs-dir, exiting.") rootfs_dir = rootfs_dir['ROOTFS_DIR'] else: if part.rootfs_dir in rootfs_dir: @@ -286,24 +241,21 @@ class IsoImagePlugin(SourcePlugin): elif part.rootfs_dir: rootfs_dir = part.rootfs_dir else: - msg = "Couldn't find --rootfs-dir=%s connection " - msg += "or it is not a valid path, exiting.\n" - msger.error(msg % part.rootfs_dir) + raise WicError("Couldn't find --rootfs-dir=%s connection " + "or it is not a valid path, exiting." % + part.rootfs_dir) if not os.path.isdir(rootfs_dir): rootfs_dir = get_bitbake_var("IMAGE_ROOTFS") if not os.path.isdir(rootfs_dir): - msger.error("Couldn't find IMAGE_ROOTFS, exiting.\n") + raise WicError("Couldn't find IMAGE_ROOTFS, exiting.") part.rootfs_dir = rootfs_dir # Prepare rootfs.img - hdd_dir = get_bitbake_var("HDDDIR") + deploy_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") img_iso_dir = get_bitbake_var("ISODIR") - - rootfs_img = "%s/rootfs.img" % hdd_dir - if not os.path.isfile(rootfs_img): - rootfs_img = "%s/rootfs.img" % img_iso_dir + rootfs_img = "%s/rootfs.img" % img_iso_dir if not os.path.isfile(rootfs_img): # check if rootfs.img is in deploydir deploy_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") @@ -331,15 +283,22 @@ class IsoImagePlugin(SourcePlugin): if os.path.isfile(part.source_file): os.remove(part.source_file) - # Prepare initial ramdisk - initrd = "%s/initrd" % hdd_dir - if not os.path.isfile(initrd): - initrd = "%s/initrd" % img_iso_dir - if not os.path.isfile(initrd): - initrd = cls._build_initramfs_path(rootfs_dir, cr_workdir) - - install_cmd = "install -m 0644 %s %s/initrd" \ - % (initrd, isodir) + # Support using a different initrd other than default + if source_params.get('initrd'): + initrd = source_params['initrd'] + if not deploy_dir: + raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting") + cp_cmd = "cp %s/%s %s" % (deploy_dir, initrd, cr_workdir) + exec_cmd(cp_cmd) + else: + # Prepare initial ramdisk + initrd = "%s/initrd" % deploy_dir + if not os.path.isfile(initrd): + initrd = "%s/initrd" % img_iso_dir + if not os.path.isfile(initrd): + initrd = cls._build_initramfs_path(rootfs_dir, cr_workdir) + + install_cmd = "install -m 0644 %s %s/initrd" % (initrd, isodir) exec_cmd(install_cmd) # Remove the temporary file created by _build_initramfs_path function @@ -348,7 +307,7 @@ class IsoImagePlugin(SourcePlugin): # Install bzImage install_cmd = "install -m 0644 %s/bzImage %s/bzImage" % \ - (kernel_dir, isodir) + (kernel_dir, isodir) exec_cmd(install_cmd) #Create bootloader for efi boot @@ -371,7 +330,7 @@ class IsoImagePlugin(SourcePlugin): # didn't contains it target_arch = get_bitbake_var("TARGET_SYS") if not target_arch: - msger.error("Coludn't find target architecture\n") + raise WicError("Coludn't find target architecture") if re.match("x86_64", target_arch): grub_target = 'x86_64-efi' @@ -380,21 +339,18 @@ class IsoImagePlugin(SourcePlugin): grub_target = 'i386-efi' grub_image = "bootia32.efi" else: - msger.error("grub-efi is incompatible with target %s\n" \ - % target_arch) + raise WicError("grub-efi is incompatible with target %s" % + target_arch) if not os.path.isfile("%s/EFI/BOOT/%s" \ % (bootimg_dir, grub_image)): - grub_path = get_bitbake_var("STAGING_LIBDIR") + grub_path = get_bitbake_var("STAGING_LIBDIR", "wic-tools") if not grub_path: - msger.error("Couldn't find STAGING_LIBDIR, exiting.\n") + raise WicError("Couldn't find STAGING_LIBDIR, exiting.") grub_core = "%s/grub/%s" % (grub_path, grub_target) if not os.path.exists(grub_core): - msger.info("Building grub-efi...\n") - exec_cmd("bitbake grub-efi") - if not os.path.exists(grub_core): - msger.error("Please build grub-efi first\n") + raise WicError("Please build grub-efi first") grub_cmd = "grub-mkimage -p '/EFI/BOOT' " grub_cmd += "-d %s " % grub_core @@ -410,11 +366,10 @@ class IsoImagePlugin(SourcePlugin): exec_native_cmd(grub_cmd, native_sysroot) else: - # TODO: insert gummiboot stuff - msger.error("unrecognized bootimg-efi loader: %s" \ - % source_params['loader']) + raise WicError("unrecognized bootimg-efi loader: %s" % + source_params['loader']) except KeyError: - msger.error("bootimg-efi requires a loader, none specified") + raise WicError("bootimg-efi requires a loader, none specified") if os.path.exists("%s/EFI/BOOT" % isodir): shutil.rmtree("%s/EFI/BOOT" % isodir) @@ -437,9 +392,8 @@ class IsoImagePlugin(SourcePlugin): blocks = int(out.split()[0]) # Add some extra space for file system overhead blocks += 100 - msg = "Added 100 extra blocks to %s to get to %d total blocks" \ - % (part.mountpoint, blocks) - msger.debug(msg) + logger.debug("Added 100 extra blocks to %s to get to %d " + "total blocks", part.mountpoint, blocks) # dosfs image for EFI boot bootimg = "%s/efi.img" % isodir @@ -459,9 +413,9 @@ class IsoImagePlugin(SourcePlugin): exec_cmd(chmod_cmd) # Prepare files for legacy boot - syslinux_dir = get_bitbake_var("STAGING_DATADIR") + syslinux_dir = get_bitbake_var("STAGING_DATADIR", "wic-tools") if not syslinux_dir: - msger.error("Couldn't find STAGING_DATADIR, exiting.\n") + raise WicError("Couldn't find STAGING_DATADIR, exiting.") if os.path.exists("%s/isolinux" % isodir): shutil.rmtree("%s/isolinux" % isodir) @@ -501,7 +455,7 @@ class IsoImagePlugin(SourcePlugin): mkisofs_cmd += "-eltorito-platform 0xEF -eltorito-boot %s " % efi_img mkisofs_cmd += "-no-emul-boot %s " % isodir - msger.debug("running command: %s" % mkisofs_cmd) + logger.debug("running command: %s", mkisofs_cmd) exec_native_cmd(mkisofs_cmd, native_sysroot) shutil.rmtree(isodir) @@ -522,23 +476,19 @@ class IsoImagePlugin(SourcePlugin): utility for booting via BIOS from disk storage devices. """ + iso_img = "%s.p1" % disk.path full_path = creator._full_path(workdir, disk_name, "direct") - iso_img = "%s.p1" % full_path full_path_iso = creator._full_path(workdir, disk_name, "iso") isohybrid_cmd = "isohybrid -u %s" % iso_img - msger.debug("running command: %s" % \ - isohybrid_cmd) + logger.debug("running command: %s", isohybrid_cmd) exec_native_cmd(isohybrid_cmd, native_sysroot) # Replace the image created by direct plugin with the one created by # mkisofs command. This is necessary because the iso image created by # mkisofs has a very specific MBR is system area of the ISO image, and # direct plugin adds and configures an another MBR. - msger.debug("Replaceing the image created by direct plugin\n") - os.remove(full_path) + logger.debug("Replaceing the image created by direct plugin\n") + os.remove(disk.path) shutil.copy2(iso_img, full_path_iso) shutil.copy2(full_path_iso, full_path) - - # Remove temporary ISO file - os.remove(iso_img) diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rawcopy.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rawcopy.py index 618dd4475..e1c4f5e7d 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rawcopy.py +++ b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rawcopy.py @@ -15,13 +15,16 @@ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # +import logging import os -from wic import msger +from wic import WicError from wic.pluginbase import SourcePlugin -from wic.utils.oe.misc import exec_cmd, get_bitbake_var +from wic.utils.misc import exec_cmd, get_bitbake_var from wic.filemap import sparse_copy +logger = logging.getLogger('wic') + class RawCopyPlugin(SourcePlugin): """ Populate partition content from raw image file. @@ -29,25 +32,6 @@ class RawCopyPlugin(SourcePlugin): name = 'rawcopy' - @classmethod - def do_install_disk(cls, disk, disk_name, cr, workdir, oe_builddir, - bootimg_dir, kernel_dir, native_sysroot): - """ - Called after all partitions have been prepared and assembled into a - disk image. Do nothing. - """ - pass - - @classmethod - def do_configure_partition(cls, part, source_params, cr, cr_workdir, - oe_builddir, bootimg_dir, kernel_dir, - native_sysroot): - """ - Called before do_prepare_partition(). Possibly prepare - configuration files of some sort. - """ - pass - @classmethod def do_prepare_partition(cls, part, source_params, cr, cr_workdir, oe_builddir, bootimg_dir, kernel_dir, @@ -56,18 +40,17 @@ class RawCopyPlugin(SourcePlugin): Called to do the actual content population for a partition i.e. it 'prepares' the partition to be incorporated into the image. """ - if not bootimg_dir: - bootimg_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") - if not bootimg_dir: - msger.error("Couldn't find DEPLOY_DIR_IMAGE, exiting\n") + if not kernel_dir: + kernel_dir = get_bitbake_var("DEPLOY_DIR_IMAGE") + if not kernel_dir: + raise WicError("Couldn't find DEPLOY_DIR_IMAGE, exiting") - msger.debug('Bootimg dir: %s' % bootimg_dir) + logger.debug('Kernel dir: %s', kernel_dir) if 'file' not in source_params: - msger.error("No file specified\n") - return + raise WicError("No file specified") - src = os.path.join(bootimg_dir, source_params['file']) + src = os.path.join(kernel_dir, source_params['file']) dst = os.path.join(cr_workdir, "%s.%s" % (source_params['file'], part.lineno)) if 'skip' in source_params: @@ -84,4 +67,3 @@ class RawCopyPlugin(SourcePlugin): part.size = filesize part.source_file = dst - diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rootfs.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rootfs.py index 425da8b22..f2e2ca8a2 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rootfs.py +++ b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rootfs.py @@ -25,11 +25,17 @@ # Joao Henrique Ferreira de Freitas # +import logging import os +import shutil -from wic import msger +from oe.path import copyhardlinktree + +from wic import WicError from wic.pluginbase import SourcePlugin -from wic.utils.oe.misc import get_bitbake_var +from wic.utils.misc import get_bitbake_var, exec_cmd + +logger = logging.getLogger('wic') class RootfsPlugin(SourcePlugin): """ @@ -45,10 +51,9 @@ class RootfsPlugin(SourcePlugin): image_rootfs_dir = get_bitbake_var("IMAGE_ROOTFS", rootfs_dir) if not os.path.isdir(image_rootfs_dir): - msg = "No valid artifact IMAGE_ROOTFS from image named" - msg += " %s has been found at %s, exiting.\n" % \ - (rootfs_dir, image_rootfs_dir) - msger.error(msg) + raise WicError("No valid artifact IMAGE_ROOTFS from image " + "named %s has been found at %s, exiting." % + (rootfs_dir, image_rootfs_dir)) return image_rootfs_dir @@ -63,8 +68,8 @@ class RootfsPlugin(SourcePlugin): """ if part.rootfs_dir is None: if not 'ROOTFS_DIR' in krootfs_dir: - msg = "Couldn't find --rootfs-dir, exiting" - msger.error(msg) + raise WicError("Couldn't find --rootfs-dir, exiting") + rootfs_dir = krootfs_dir['ROOTFS_DIR'] else: if part.rootfs_dir in krootfs_dir: @@ -72,12 +77,49 @@ class RootfsPlugin(SourcePlugin): elif part.rootfs_dir: rootfs_dir = part.rootfs_dir else: - msg = "Couldn't find --rootfs-dir=%s connection" - msg += " or it is not a valid path, exiting" - msger.error(msg % part.rootfs_dir) + raise WicError("Couldn't find --rootfs-dir=%s connection or " + "it is not a valid path, exiting" % part.rootfs_dir) real_rootfs_dir = cls.__get_rootfs_dir(rootfs_dir) - part.rootfs_dir = real_rootfs_dir - part.prepare_rootfs(cr_workdir, oe_builddir, real_rootfs_dir, native_sysroot) + # Handle excluded paths. + if part.exclude_path is not None: + # We need a new rootfs directory we can delete files from. Copy to + # workdir. + new_rootfs = os.path.realpath(os.path.join(cr_workdir, "rootfs")) + + if os.path.lexists(new_rootfs): + shutil.rmtree(os.path.join(new_rootfs)) + + copyhardlinktree(real_rootfs_dir, new_rootfs) + real_rootfs_dir = new_rootfs + + for orig_path in part.exclude_path: + path = orig_path + if os.path.isabs(path): + msger.error("Must be relative: --exclude-path=%s" % orig_path) + + full_path = os.path.realpath(os.path.join(new_rootfs, path)) + + # Disallow climbing outside of parent directory using '..', + # because doing so could be quite disastrous (we will delete the + # directory). + if not full_path.startswith(new_rootfs): + msger.error("'%s' points to a path outside the rootfs" % orig_path) + + if path.endswith(os.sep): + # Delete content only. + for entry in os.listdir(full_path): + full_entry = os.path.join(full_path, entry) + if os.path.isdir(full_entry) and not os.path.islink(full_entry): + shutil.rmtree(full_entry) + else: + os.remove(full_entry) + else: + # Delete whole directory. + shutil.rmtree(full_path) + + part.rootfs_dir = real_rootfs_dir + part.prepare_rootfs(cr_workdir, oe_builddir, + real_rootfs_dir, native_sysroot) diff --git a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rootfs_pcbios_ext.py b/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rootfs_pcbios_ext.py deleted file mode 100644 index 3d60e6f0f..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/plugins/source/rootfs_pcbios_ext.py +++ /dev/null @@ -1,177 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# This program is free software; you can distribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for mo details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# AUTHOR -# Adrian Freihofer -# - -import os -from wic import msger -from wic.utils import syslinux -from wic.utils import runner -from wic.utils.oe import misc -from wic.utils.errors import ImageError -from wic.pluginbase import SourcePlugin - - -# pylint: disable=no-init -class RootfsPlugin(SourcePlugin): - """ - Create root partition and install syslinux bootloader - - This plugin creates a disk image containing a bootable root partition with - syslinux installed. The filesystem is ext2/3/4, no extra boot partition is - required. - - Example kickstart file: - part / --source rootfs-pcbios-ext --ondisk sda --fstype=ext4 --label rootfs --align 1024 - bootloader --source rootfs-pcbios-ext --timeout=0 --append="rootwait rootfstype=ext4" - - The first line generates a root file system including a syslinux.cfg file - The "--source rootfs-pcbios-ext" in the second line triggers the installation - of ldlinux.sys into the image. - """ - - name = 'rootfs-pcbios-ext' - - @staticmethod - def _get_rootfs_dir(rootfs_dir): - """ - Find rootfs pseudo dir - - If rootfs_dir is a directory consider it as rootfs directory. - Otherwise ask bitbake about the IMAGE_ROOTFS directory. - """ - if os.path.isdir(rootfs_dir): - return rootfs_dir - - image_rootfs_dir = misc.get_bitbake_var("IMAGE_ROOTFS", rootfs_dir) - if not os.path.isdir(image_rootfs_dir): - msg = "No valid artifact IMAGE_ROOTFS from image named" - msg += " %s has been found at %s, exiting.\n" % \ - (rootfs_dir, image_rootfs_dir) - msger.error(msg) - - return image_rootfs_dir - - # pylint: disable=unused-argument - @classmethod - def do_configure_partition(cls, part, source_params, image_creator, - image_creator_workdir, oe_builddir, bootimg_dir, - kernel_dir, native_sysroot): - """ - Creates syslinux config in rootfs directory - - Called before do_prepare_partition() - """ - bootloader = image_creator.ks.bootloader - - syslinux_conf = "" - syslinux_conf += "PROMPT 0\n" - - syslinux_conf += "TIMEOUT " + str(bootloader.timeout) + "\n" - syslinux_conf += "ALLOWOPTIONS 1\n" - - # Derive SERIAL... line from from kernel boot parameters - syslinux_conf += syslinux.serial_console_form_kargs(options) + "\n" - - syslinux_conf += "DEFAULT linux\n" - syslinux_conf += "LABEL linux\n" - syslinux_conf += " KERNEL /boot/bzImage\n" - - syslinux_conf += " APPEND label=boot root=%s %s\n" % \ - (image_creator.rootdev, bootloader.append) - - syslinux_cfg = os.path.join(image_creator.rootfs_dir['ROOTFS_DIR'], "boot", "syslinux.cfg") - msger.debug("Writing syslinux config %s" % syslinux_cfg) - with open(syslinux_cfg, "w") as cfg: - cfg.write(syslinux_conf) - - @classmethod - def do_prepare_partition(cls, part, source_params, image_creator, - image_creator_workdir, oe_builddir, bootimg_dir, - kernel_dir, krootfs_dir, native_sysroot): - """ - Creates partition out of rootfs directory - - Prepare content for a rootfs partition i.e. create a partition - and fill it from a /rootfs dir. - Install syslinux bootloader into root partition image file - """ - def is_exe(exepath): - """Verify exepath is an executable file""" - return os.path.isfile(exepath) and os.access(exepath, os.X_OK) - - # Make sure syslinux-nomtools is available in native sysroot or fail - native_syslinux_nomtools = os.path.join(native_sysroot, "usr/bin/syslinux-nomtools") - if not is_exe(native_syslinux_nomtools): - msger.info("building syslinux-native...") - misc.exec_cmd("bitbake syslinux-native") - if not is_exe(native_syslinux_nomtools): - msger.error("Couldn't find syslinux-nomtools (%s), exiting\n" % - native_syslinux_nomtools) - - if part.rootfs is None: - if 'ROOTFS_DIR' not in krootfs_dir: - msger.error("Couldn't find --rootfs-dir, exiting") - rootfs_dir = krootfs_dir['ROOTFS_DIR'] - else: - if part.rootfs in krootfs_dir: - rootfs_dir = krootfs_dir[part.rootfs] - elif part.rootfs: - rootfs_dir = part.rootfs - else: - msg = "Couldn't find --rootfs-dir=%s connection" - msg += " or it is not a valid path, exiting" - msger.error(msg % part.rootfs) - - real_rootfs_dir = cls._get_rootfs_dir(rootfs_dir) - - part.rootfs_dir = real_rootfs_dir - part.prepare_rootfs(image_creator_workdir, oe_builddir, real_rootfs_dir, native_sysroot) - - # install syslinux into rootfs partition - syslinux_cmd = "syslinux-nomtools -d /boot -i %s" % part.source_file - misc.exec_native_cmd(syslinux_cmd, native_sysroot) - - @classmethod - def do_install_disk(cls, disk, disk_name, image_creator, workdir, oe_builddir, - bootimg_dir, kernel_dir, native_sysroot): - """ - Assemble partitions to disk image - - Called after all partitions have been prepared and assembled into a - disk image. In this case, we install the MBR. - """ - mbrfile = os.path.join(native_sysroot, "usr/share/syslinux/") - if image_creator.ptable_format == 'msdos': - mbrfile += "mbr.bin" - elif image_creator.ptable_format == 'gpt': - mbrfile += "gptmbr.bin" - else: - msger.error("Unsupported partition table: %s" % \ - image_creator.ptable_format) - - if not os.path.exists(mbrfile): - msger.error("Couldn't find %s. Has syslinux-native been baked?" % mbrfile) - - full_path = disk['disk'].device - msger.debug("Installing MBR on disk %s as %s with size %s bytes" \ - % (disk_name, full_path, disk['min_size'])) - - ret_code = runner.show(['dd', 'if=%s' % mbrfile, 'of=%s' % full_path, 'conv=notrunc']) - if ret_code != 0: - raise ImageError("Unable to set MBR to %s" % full_path) diff --git a/import-layers/yocto-poky/scripts/lib/wic/test b/import-layers/yocto-poky/scripts/lib/wic/test deleted file mode 100644 index 9daeafb98..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/test +++ /dev/null @@ -1 +0,0 @@ -test diff --git a/import-layers/yocto-poky/scripts/lib/wic/utils/errors.py b/import-layers/yocto-poky/scripts/lib/wic/utils/errors.py deleted file mode 100644 index d1b514dd9..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/utils/errors.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python -tt -# -# Copyright (c) 2007 Red Hat, Inc. -# Copyright (c) 2011 Intel, Inc. -# -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the Free -# Software Foundation; version 2 of the License -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY -# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License -# for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., 59 -# Temple Place - Suite 330, Boston, MA 02111-1307, USA. - -class WicError(Exception): - pass - -class CreatorError(WicError): - pass - -class Usage(WicError): - pass - -class ImageError(WicError): - pass diff --git a/import-layers/yocto-poky/scripts/lib/wic/utils/misc.py b/import-layers/yocto-poky/scripts/lib/wic/utils/misc.py index 1415ae906..37e0ad6a3 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/utils/misc.py +++ b/import-layers/yocto-poky/scripts/lib/wic/utils/misc.py @@ -1,95 +1,230 @@ -#!/usr/bin/env python -tt +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- # -# Copyright (c) 2010, 2011 Intel Inc. +# Copyright (c) 2013, Intel Corporation. +# All rights reserved. # -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the Free -# Software Foundation; version 2 of the License +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. # -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY -# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License -# for more details. +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., 59 -# Temple Place - Suite 330, Boston, MA 02111-1307, USA. +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# DESCRIPTION +# This module provides a place to collect various wic-related utils +# for the OpenEmbedded Image Tools. +# +# AUTHORS +# Tom Zanussi +# +"""Miscellaneous functions.""" +import logging import os -import time -import wic.engine - -def build_name(kscfg, release=None, prefix=None, suffix=None): - """Construct and return an image name string. +import re + +from collections import defaultdict +from distutils import spawn + +from wic import WicError +from wic.utils import runner + +logger = logging.getLogger('wic') + +# executable -> recipe pairs for exec_native_cmd +NATIVE_RECIPES = {"bmaptool": "bmap-tools", + "grub-mkimage": "grub-efi", + "isohybrid": "syslinux", + "mcopy": "mtools", + "mkdosfs": "dosfstools", + "mkisofs": "cdrtools", + "mkfs.btrfs": "btrfs-tools", + "mkfs.ext2": "e2fsprogs", + "mkfs.ext3": "e2fsprogs", + "mkfs.ext4": "e2fsprogs", + "mkfs.vfat": "dosfstools", + "mksquashfs": "squashfs-tools", + "mkswap": "util-linux", + "mmd": "syslinux", + "parted": "parted", + "sfdisk": "util-linux", + "sgdisk": "gptfdisk", + "syslinux": "syslinux" + } + +def _exec_cmd(cmd_and_args, as_shell=False): + """ + Execute command, catching stderr, stdout - This is a utility function to help create sensible name and fslabel - strings. The name is constructed using the sans-prefix-and-extension - kickstart filename and the supplied prefix and suffix. + Need to execute as_shell if the command uses wildcards + """ + logger.debug("_exec_cmd: %s", cmd_and_args) + args = cmd_and_args.split() + logger.debug(args) - kscfg -- a path to a kickstart file - release -- a replacement to suffix for image release - prefix -- a prefix to prepend to the name; defaults to None, which causes - no prefix to be used - suffix -- a suffix to append to the name; defaults to None, which causes - a YYYYMMDDHHMM suffix to be used + if as_shell: + ret, out = runner.runtool(cmd_and_args) + else: + ret, out = runner.runtool(args) + out = out.strip() + if ret != 0: + raise WicError("_exec_cmd: %s returned '%s' instead of 0\noutput: %s" % \ + (cmd_and_args, ret, out)) - Note, if maxlen is less then the len(suffix), you get to keep both pieces. + logger.debug("_exec_cmd: output for %s (rc = %d): %s", + cmd_and_args, ret, out) - """ - name = os.path.basename(kscfg) - idx = name.rfind('.') - if idx >= 0: - name = name[:idx] + return ret, out - if release is not None: - suffix = "" - if prefix is None: - prefix = "" - if suffix is None: - suffix = time.strftime("%Y%m%d%H%M") - if name.startswith(prefix): - name = name[len(prefix):] +def exec_cmd(cmd_and_args, as_shell=False): + """ + Execute command, return output + """ + return _exec_cmd(cmd_and_args, as_shell)[1] - prefix = "%s-" % prefix if prefix else "" - suffix = "-%s" % suffix if suffix else "" - ret = prefix + name + suffix +def exec_native_cmd(cmd_and_args, native_sysroot, pseudo=""): + """ + Execute native command, catching stderr, stdout - return ret + Need to execute as_shell if the command uses wildcards -def find_canned(scripts_path, file_name): + Always need to execute native commands as_shell """ - Find a file either by its path or by name in the canned files dir. - - Return None if not found + # The reason -1 is used is because there may be "export" commands. + args = cmd_and_args.split(';')[-1].split() + logger.debug(args) + + if pseudo: + cmd_and_args = pseudo + cmd_and_args + + wtools_sysroot = get_bitbake_var("RECIPE_SYSROOT_NATIVE", "wic-tools") + + native_paths = \ + "%s/sbin:%s/usr/sbin:%s/usr/bin:%s/sbin:%s/usr/sbin:%s/usr/bin" % \ + (wtools_sysroot, wtools_sysroot, wtools_sysroot, + native_sysroot, native_sysroot, native_sysroot) + native_cmd_and_args = "export PATH=%s:$PATH;%s" % \ + (native_paths, cmd_and_args) + logger.debug("exec_native_cmd: %s", native_cmd_and_args) + + # If the command isn't in the native sysroot say we failed. + if spawn.find_executable(args[0], native_paths): + ret, out = _exec_cmd(native_cmd_and_args, True) + else: + ret = 127 + out = "can't find native executable %s in %s" % (args[0], native_paths) + + prog = args[0] + # shell command-not-found + if ret == 127 \ + or (pseudo and ret == 1 and out == "Can't find '%s' in $PATH." % prog): + msg = "A native program %s required to build the image "\ + "was not found (see details above).\n\n" % prog + recipe = NATIVE_RECIPES.get(prog) + if recipe: + msg += "Please make sure wic-tools have %s-native in its DEPENDS, bake it with 'bitbake wic-tools' "\ + "and try again.\n" % recipe + else: + msg += "Wic failed to find a recipe to build native %s. Please "\ + "file a bug against wic.\n" % prog + raise WicError(msg) + + return ret, out + +BOOTDD_EXTRA_SPACE = 16384 + +class BitbakeVars(defaultdict): """ - if os.path.exists(file_name): - return file_name - - layers_canned_wks_dir = wic.engine.build_canned_image_list(scripts_path) - for canned_wks_dir in layers_canned_wks_dir: - for root, dirs, files in os.walk(canned_wks_dir): - for fname in files: - if fname == file_name: - fullpath = os.path.join(canned_wks_dir, fname) - return fullpath - -def get_custom_config(boot_file): + Container for Bitbake variables. """ - Get the custom configuration to be used for the bootloader. - - Return None if the file can't be found. + def __init__(self): + defaultdict.__init__(self, dict) + + # default_image and vars_dir attributes should be set from outside + self.default_image = None + self.vars_dir = None + + def _parse_line(self, line, image, matcher=re.compile(r"^([a-zA-Z0-9\-_+./~]+)=(.*)")): + """ + Parse one line from bitbake -e output or from .env file. + Put result key-value pair into the storage. + """ + if "=" not in line: + return + match = matcher.match(line) + if not match: + return + key, val = match.groups() + self[image][key] = val.strip('"') + + def get_var(self, var, image=None, cache=True): + """ + Get bitbake variable from 'bitbake -e' output or from .env file. + This is a lazy method, i.e. it runs bitbake or parses file only when + only when variable is requested. It also caches results. + """ + if not image: + image = self.default_image + + if image not in self: + if image and self.vars_dir: + fname = os.path.join(self.vars_dir, image + '.env') + if os.path.isfile(fname): + # parse .env file + with open(fname) as varsfile: + for line in varsfile: + self._parse_line(line, image) + else: + print("Couldn't get bitbake variable from %s." % fname) + print("File %s doesn't exist." % fname) + return + else: + # Get bitbake -e output + cmd = "bitbake -e" + if image: + cmd += " %s" % image + + log_level = logger.getEffectiveLevel() + logger.setLevel(logging.INFO) + ret, lines = _exec_cmd(cmd) + logger.setLevel(log_level) + + if ret: + logger.error("Couldn't get '%s' output.", cmd) + logger.error("Bitbake failed with error:\n%s\n", lines) + return + + # Parse bitbake -e output + for line in lines.split('\n'): + self._parse_line(line, image) + + # Make first image a default set of variables + if cache: + images = [key for key in self if key] + if len(images) == 1: + self[None] = self[image] + + result = self[image].get(var) + if not cache: + self.pop(image, None) + + return result + +# Create BB_VARS singleton +BB_VARS = BitbakeVars() + +def get_bitbake_var(var, image=None, cache=True): + """ + Provide old get_bitbake_var API by wrapping + get_var method of BB_VARS singleton. """ - scripts_path = os.path.abspath(os.path.dirname(__file__)) - # Get the scripts path of poky - for x in range(0, 3): - scripts_path = os.path.dirname(scripts_path) - - cfg_file = find_canned(scripts_path, boot_file) - if cfg_file: - with open(cfg_file, "r") as f: - config = f.read() - return config - - return None + return BB_VARS.get_var(var, image, cache) diff --git a/import-layers/yocto-poky/scripts/lib/wic/utils/oe/__init__.py b/import-layers/yocto-poky/scripts/lib/wic/utils/oe/__init__.py deleted file mode 100644 index 0a81575a7..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/utils/oe/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -# OpenEmbedded wic utils library -# -# Copyright (c) 2013, Intel Corporation. -# All rights reserved. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# AUTHORS -# Tom Zanussi -# diff --git a/import-layers/yocto-poky/scripts/lib/wic/utils/oe/misc.py b/import-layers/yocto-poky/scripts/lib/wic/utils/oe/misc.py deleted file mode 100644 index fe188c9d2..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/utils/oe/misc.py +++ /dev/null @@ -1,246 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# Copyright (c) 2013, Intel Corporation. -# All rights reserved. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. -# -# DESCRIPTION -# This module provides a place to collect various wic-related utils -# for the OpenEmbedded Image Tools. -# -# AUTHORS -# Tom Zanussi -# -"""Miscellaneous functions.""" - -import os -from collections import defaultdict -from distutils import spawn - -from wic import msger -from wic.utils import runner - -# executable -> recipe pairs for exec_native_cmd -NATIVE_RECIPES = {"bmaptool": "bmap-tools", - "mcopy": "mtools", - "mkdosfs": "dosfstools", - "mkfs.btrfs": "btrfs-tools", - "mkfs.ext2": "e2fsprogs", - "mkfs.ext3": "e2fsprogs", - "mkfs.ext4": "e2fsprogs", - "mkfs.vfat": "dosfstools", - "mksquashfs": "squashfs-tools", - "mkswap": "util-linux", - "parted": "parted", - "sfdisk": "util-linux", - "sgdisk": "gptfdisk", - "syslinux": "syslinux" - } - -def _exec_cmd(cmd_and_args, as_shell=False, catch=3): - """ - Execute command, catching stderr, stdout - - Need to execute as_shell if the command uses wildcards - """ - msger.debug("_exec_cmd: %s" % cmd_and_args) - args = cmd_and_args.split() - msger.debug(args) - - if as_shell: - ret, out = runner.runtool(cmd_and_args, catch) - else: - ret, out = runner.runtool(args, catch) - out = out.strip() - msger.debug("_exec_cmd: output for %s (rc = %d): %s" % \ - (cmd_and_args, ret, out)) - - return (ret, out) - - -def exec_cmd(cmd_and_args, as_shell=False, catch=3): - """ - Execute command, catching stderr, stdout - - Exits if rc non-zero - """ - ret, out = _exec_cmd(cmd_and_args, as_shell, catch) - - if ret != 0: - msger.error("exec_cmd: %s returned '%s' instead of 0" % \ - (cmd_and_args, ret)) - - return out - -def exec_native_cmd(cmd_and_args, native_sysroot, catch=3, pseudo=""): - """ - Execute native command, catching stderr, stdout - - Need to execute as_shell if the command uses wildcards - - Always need to execute native commands as_shell - """ - # The reason -1 is used is because there may be "export" commands. - args = cmd_and_args.split(';')[-1].split() - msger.debug(args) - - if pseudo: - cmd_and_args = pseudo + cmd_and_args - native_paths = \ - "%s/sbin:%s/usr/sbin:%s/usr/bin" % \ - (native_sysroot, native_sysroot, native_sysroot) - native_cmd_and_args = "export PATH=%s:$PATH;%s" % \ - (native_paths, cmd_and_args) - msger.debug("exec_native_cmd: %s" % cmd_and_args) - - # If the command isn't in the native sysroot say we failed. - if spawn.find_executable(args[0], native_paths): - ret, out = _exec_cmd(native_cmd_and_args, True, catch) - else: - ret = 127 - - prog = args[0] - # shell command-not-found - if ret == 127 \ - or (pseudo and ret == 1 and out == "Can't find '%s' in $PATH." % prog): - msg = "A native program %s required to build the image "\ - "was not found (see details above).\n\n" % prog - recipe = NATIVE_RECIPES.get(prog) - if recipe: - msg += "Please bake it with 'bitbake %s-native' "\ - "and try again.\n" % recipe - else: - msg += "Wic failed to find a recipe to build native %s. Please "\ - "file a bug against wic.\n" % prog - msger.error(msg) - if out: - msger.debug('"%s" output: %s' % (args[0], out)) - - if ret != 0: - msger.error("exec_cmd: '%s' returned '%s' instead of 0" % \ - (cmd_and_args, ret)) - - return ret, out - -BOOTDD_EXTRA_SPACE = 16384 - -class BitbakeVars(defaultdict): - """ - Container for Bitbake variables. - """ - def __init__(self): - defaultdict.__init__(self, dict) - - # default_image and vars_dir attributes should be set from outside - self.default_image = None - self.vars_dir = None - - def _parse_line(self, line, image): - """ - Parse one line from bitbake -e output or from .env file. - Put result key-value pair into the storage. - """ - if "=" not in line: - return - try: - key, val = line.split("=") - except ValueError: - return - key = key.strip() - val = val.strip() - if key.replace('_', '').isalnum(): - self[image][key] = val.strip('"') - - def get_var(self, var, image=None): - """ - Get bitbake variable from 'bitbake -e' output or from .env file. - This is a lazy method, i.e. it runs bitbake or parses file only when - only when variable is requested. It also caches results. - """ - if not image: - image = self.default_image - - if image not in self: - if image and self.vars_dir: - fname = os.path.join(self.vars_dir, image + '.env') - if os.path.isfile(fname): - # parse .env file - with open(fname) as varsfile: - for line in varsfile: - self._parse_line(line, image) - else: - print("Couldn't get bitbake variable from %s." % fname) - print("File %s doesn't exist." % fname) - return - else: - # Get bitbake -e output - cmd = "bitbake -e" - if image: - cmd += " %s" % image - - log_level = msger.get_loglevel() - msger.set_loglevel('normal') - ret, lines = _exec_cmd(cmd) - msger.set_loglevel(log_level) - - if ret: - print("Couldn't get '%s' output." % cmd) - print("Bitbake failed with error:\n%s\n" % lines) - return - - # Parse bitbake -e output - for line in lines.split('\n'): - self._parse_line(line, image) - - # Make first image a default set of variables - images = [key for key in self if key] - if len(images) == 1: - self[None] = self[image] - - return self[image].get(var) - -# Create BB_VARS singleton -BB_VARS = BitbakeVars() - -def get_bitbake_var(var, image=None): - """ - Provide old get_bitbake_var API by wrapping - get_var method of BB_VARS singleton. - """ - return BB_VARS.get_var(var, image) - -def parse_sourceparams(sourceparams): - """ - Split sourceparams string of the form key1=val1[,key2=val2,...] - into a dict. Also accepts valueless keys i.e. without =. - - Returns dict of param key/val pairs (note that val may be None). - """ - params_dict = {} - - params = sourceparams.split(',') - if params: - for par in params: - if not par: - continue - if not '=' in par: - key = par - val = None - else: - key, val = par.split('=') - params_dict[key] = val - - return params_dict diff --git a/import-layers/yocto-poky/scripts/lib/wic/utils/partitionedfs.py b/import-layers/yocto-poky/scripts/lib/wic/utils/partitionedfs.py deleted file mode 100644 index 9ea4a30cb..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/utils/partitionedfs.py +++ /dev/null @@ -1,379 +0,0 @@ -#!/usr/bin/env python -tt -# -# Copyright (c) 2009, 2010, 2011 Intel, Inc. -# Copyright (c) 2007, 2008 Red Hat, Inc. -# Copyright (c) 2008 Daniel P. Berrange -# Copyright (c) 2008 David P. Huff -# -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the Free -# Software Foundation; version 2 of the License -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY -# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License -# for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., 59 -# Temple Place - Suite 330, Boston, MA 02111-1307, USA. - -import os -from wic import msger -from wic.utils.errors import ImageError -from wic.utils.oe.misc import exec_cmd, exec_native_cmd -from wic.filemap import sparse_copy - -# Overhead of the MBR partitioning scheme (just one sector) -MBR_OVERHEAD = 1 - -# Overhead of the GPT partitioning scheme -GPT_OVERHEAD = 34 - -# Size of a sector in bytes -SECTOR_SIZE = 512 - -class Image(): - """ - Generic base object for an image. - - An Image is a container for a set of DiskImages and associated - partitions. - """ - def __init__(self, native_sysroot=None): - self.disks = {} - self.partitions = [] - self.partimages = [] - # Size of a sector used in calculations - self.sector_size = SECTOR_SIZE - self._partitions_layed_out = False - self.native_sysroot = native_sysroot - - def __add_disk(self, disk_name): - """ Add a disk 'disk_name' to the internal list of disks. Note, - 'disk_name' is the name of the disk in the target system - (e.g., sdb). """ - - if disk_name in self.disks: - # We already have this disk - return - - assert not self._partitions_layed_out - - self.disks[disk_name] = \ - {'disk': None, # Disk object - 'numpart': 0, # Number of allocate partitions - 'realpart': 0, # Number of partitions in the partition table - 'partitions': [], # Indexes to self.partitions - 'offset': 0, # Offset of next partition (in sectors) - # Minimum required disk size to fit all partitions (in bytes) - 'min_size': 0, - 'ptable_format': "msdos", # Partition table format - 'identifier': None} # Disk system identifier - - def add_disk(self, disk_name, disk_obj, identifier): - """ Add a disk object which have to be partitioned. More than one disk - can be added. In case of multiple disks, disk partitions have to be - added for each disk separately with 'add_partition()". """ - - self.__add_disk(disk_name) - self.disks[disk_name]['disk'] = disk_obj - self.disks[disk_name]['identifier'] = identifier - - def __add_partition(self, part): - """ This is a helper function for 'add_partition()' which adds a - partition to the internal list of partitions. """ - - assert not self._partitions_layed_out - - self.partitions.append(part) - self.__add_disk(part['disk_name']) - - def add_partition(self, size, disk_name, mountpoint, source_file=None, fstype=None, - label=None, fsopts=None, boot=False, align=None, no_table=False, - part_type=None, uuid=None, system_id=None): - """ Add the next partition. Partitions have to be added in the - first-to-last order. """ - - ks_pnum = len(self.partitions) - - # Converting kB to sectors for parted - size = size * 1024 // self.sector_size - - part = {'ks_pnum': ks_pnum, # Partition number in the KS file - 'size': size, # In sectors - 'mountpoint': mountpoint, # Mount relative to chroot - 'source_file': source_file, # partition contents - 'fstype': fstype, # Filesystem type - 'fsopts': fsopts, # Filesystem mount options - 'label': label, # Partition label - 'disk_name': disk_name, # physical disk name holding partition - 'device': None, # kpartx device node for partition - 'num': None, # Partition number - 'boot': boot, # Bootable flag - 'align': align, # Partition alignment - 'no_table' : no_table, # Partition does not appear in partition table - 'part_type' : part_type, # Partition type - 'uuid': uuid, # Partition UUID - 'system_id': system_id} # Partition system id - - self.__add_partition(part) - - def layout_partitions(self, ptable_format="msdos"): - """ Layout the partitions, meaning calculate the position of every - partition on the disk. The 'ptable_format' parameter defines the - partition table format and may be "msdos". """ - - msger.debug("Assigning %s partitions to disks" % ptable_format) - - if self._partitions_layed_out: - return - - self._partitions_layed_out = True - - # Go through partitions in the order they are added in .ks file - for num in range(len(self.partitions)): - part = self.partitions[num] - - if part['disk_name'] not in self.disks: - raise ImageError("No disk %s for partition %s" \ - % (part['disk_name'], part['mountpoint'])) - - if ptable_format == 'msdos' and part['part_type']: - # The --part-type can also be implemented for MBR partitions, - # in which case it would map to the 1-byte "partition type" - # filed at offset 3 of the partition entry. - raise ImageError("setting custom partition type is not " \ - "implemented for msdos partitions") - - # Get the disk where the partition is located - disk = self.disks[part['disk_name']] - disk['numpart'] += 1 - if not part['no_table']: - disk['realpart'] += 1 - disk['ptable_format'] = ptable_format - - if disk['numpart'] == 1: - if ptable_format == "msdos": - overhead = MBR_OVERHEAD - elif ptable_format == "gpt": - overhead = GPT_OVERHEAD - - # Skip one sector required for the partitioning scheme overhead - disk['offset'] += overhead - - if disk['realpart'] > 3: - # Reserve a sector for EBR for every logical partition - # before alignment is performed. - if ptable_format == "msdos": - disk['offset'] += 1 - - - if part['align']: - # If not first partition and we do have alignment set we need - # to align the partition. - # FIXME: This leaves a empty spaces to the disk. To fill the - # gaps we could enlargea the previous partition? - - # Calc how much the alignment is off. - align_sectors = disk['offset'] % (part['align'] * 1024 // self.sector_size) - - if align_sectors: - # If partition is not aligned as required, we need - # to move forward to the next alignment point - align_sectors = (part['align'] * 1024 // self.sector_size) - align_sectors - - msger.debug("Realignment for %s%s with %s sectors, original" - " offset %s, target alignment is %sK." % - (part['disk_name'], disk['numpart'], align_sectors, - disk['offset'], part['align'])) - - # increase the offset so we actually start the partition on right alignment - disk['offset'] += align_sectors - - part['start'] = disk['offset'] - disk['offset'] += part['size'] - - part['type'] = 'primary' - if not part['no_table']: - part['num'] = disk['realpart'] - else: - part['num'] = 0 - - if disk['ptable_format'] == "msdos": - if len(self.partitions) > 4: - if disk['realpart'] > 3: - part['type'] = 'logical' - part['num'] = disk['realpart'] + 1 - - disk['partitions'].append(num) - msger.debug("Assigned %s to %s%d, sectors range %d-%d size %d " - "sectors (%d bytes)." \ - % (part['mountpoint'], part['disk_name'], part['num'], - part['start'], part['start'] + part['size'] - 1, - part['size'], part['size'] * self.sector_size)) - - # Once all the partitions have been layed out, we can calculate the - # minumim disk sizes. - for disk in self.disks.values(): - disk['min_size'] = disk['offset'] - if disk['ptable_format'] == "gpt": - disk['min_size'] += GPT_OVERHEAD - - disk['min_size'] *= self.sector_size - - def __create_partition(self, device, parttype, fstype, start, size): - """ Create a partition on an image described by the 'device' object. """ - - # Start is included to the size so we need to substract one from the end. - end = start + size - 1 - msger.debug("Added '%s' partition, sectors %d-%d, size %d sectors" % - (parttype, start, end, size)) - - cmd = "parted -s %s unit s mkpart %s" % (device, parttype) - if fstype: - cmd += " %s" % fstype - cmd += " %d %d" % (start, end) - - return exec_native_cmd(cmd, self.native_sysroot) - - def __format_disks(self): - self.layout_partitions() - - for dev in self.disks: - disk = self.disks[dev] - msger.debug("Initializing partition table for %s" % \ - (disk['disk'].device)) - exec_native_cmd("parted -s %s mklabel %s" % \ - (disk['disk'].device, disk['ptable_format']), - self.native_sysroot) - - if disk['identifier']: - msger.debug("Set disk identifier %x" % disk['identifier']) - with open(disk['disk'].device, 'r+b') as img: - img.seek(0x1B8) - img.write(disk['identifier'].to_bytes(4, 'little')) - - msger.debug("Creating partitions") - - for part in self.partitions: - if part['num'] == 0: - continue - - disk = self.disks[part['disk_name']] - if disk['ptable_format'] == "msdos" and part['num'] == 5: - # Create an extended partition (note: extended - # partition is described in MBR and contains all - # logical partitions). The logical partitions save a - # sector for an EBR just before the start of a - # partition. The extended partition must start one - # sector before the start of the first logical - # partition. This way the first EBR is inside of the - # extended partition. Since the extended partitions - # starts a sector before the first logical partition, - # add a sector at the back, so that there is enough - # room for all logical partitions. - self.__create_partition(disk['disk'].device, "extended", - None, part['start'] - 1, - disk['offset'] - part['start'] + 1) - - if part['fstype'] == "swap": - parted_fs_type = "linux-swap" - elif part['fstype'] == "vfat": - parted_fs_type = "fat32" - elif part['fstype'] == "msdos": - parted_fs_type = "fat16" - elif part['fstype'] == "ontrackdm6aux3": - parted_fs_type = "ontrackdm6aux3" - else: - # Type for ext2/ext3/ext4/btrfs - parted_fs_type = "ext2" - - # Boot ROM of OMAP boards require vfat boot partition to have an - # even number of sectors. - if part['mountpoint'] == "/boot" and part['fstype'] in ["vfat", "msdos"] \ - and part['size'] % 2: - msger.debug("Subtracting one sector from '%s' partition to " \ - "get even number of sectors for the partition" % \ - part['mountpoint']) - part['size'] -= 1 - - self.__create_partition(disk['disk'].device, part['type'], - parted_fs_type, part['start'], part['size']) - - if part['part_type']: - msger.debug("partition %d: set type UID to %s" % \ - (part['num'], part['part_type'])) - exec_native_cmd("sgdisk --typecode=%d:%s %s" % \ - (part['num'], part['part_type'], - disk['disk'].device), self.native_sysroot) - - if part['uuid'] and disk['ptable_format'] == "gpt": - msger.debug("partition %d: set UUID to %s" % \ - (part['num'], part['uuid'])) - exec_native_cmd("sgdisk --partition-guid=%d:%s %s" % \ - (part['num'], part['uuid'], disk['disk'].device), - self.native_sysroot) - - if part['boot']: - flag_name = "legacy_boot" if disk['ptable_format'] == 'gpt' else "boot" - msger.debug("Set '%s' flag for partition '%s' on disk '%s'" % \ - (flag_name, part['num'], disk['disk'].device)) - exec_native_cmd("parted -s %s set %d %s on" % \ - (disk['disk'].device, part['num'], flag_name), - self.native_sysroot) - if part['system_id']: - exec_native_cmd("sfdisk --part-type %s %s %s" % \ - (disk['disk'].device, part['num'], part['system_id']), - self.native_sysroot) - - # Parted defaults to enabling the lba flag for fat16 partitions, - # which causes compatibility issues with some firmware (and really - # isn't necessary). - if parted_fs_type == "fat16": - if disk['ptable_format'] == 'msdos': - msger.debug("Disable 'lba' flag for partition '%s' on disk '%s'" % \ - (part['num'], disk['disk'].device)) - exec_native_cmd("parted -s %s set %d lba off" % \ - (disk['disk'].device, part['num']), - self.native_sysroot) - - def cleanup(self): - if self.disks: - for dev in self.disks: - disk = self.disks[dev] - try: - disk['disk'].cleanup() - except: - pass - # remove partition images - for image in self.partimages: - if os.path.isfile(image): - os.remove(image) - - def assemble(self, image_file): - msger.debug("Installing partitions") - - for part in self.partitions: - source = part['source_file'] - if source: - # install source_file contents into a partition - sparse_copy(source, image_file, part['start'] * self.sector_size) - - msger.debug("Installed %s in partition %d, sectors %d-%d, " - "size %d sectors" % \ - (source, part['num'], part['start'], - part['start'] + part['size'] - 1, part['size'])) - - partimage = image_file + '.p%d' % part['num'] - os.rename(source, partimage) - self.partimages.append(partimage) - - def create(self): - for dev in self.disks: - disk = self.disks[dev] - disk['disk'].create() - - self.__format_disks() - - return diff --git a/import-layers/yocto-poky/scripts/lib/wic/utils/runner.py b/import-layers/yocto-poky/scripts/lib/wic/utils/runner.py index db536ba58..4aa00fbe2 100644 --- a/import-layers/yocto-poky/scripts/lib/wic/utils/runner.py +++ b/import-layers/yocto-poky/scripts/lib/wic/utils/runner.py @@ -14,29 +14,17 @@ # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., 59 # Temple Place - Suite 330, Boston, MA 02111-1307, USA. - -import os import subprocess -from wic import msger +from wic import WicError -def runtool(cmdln_or_args, catch=1): +def runtool(cmdln_or_args): """ wrapper for most of the subprocess calls input: cmdln_or_args: can be both args and cmdln str (shell=True) - catch: 0, quitely run - 1, only STDOUT - 2, only STDERR - 3, both STDOUT and STDERR return: - (rc, output) - if catch==0: the output will always None + rc, output """ - - if catch not in (0, 1, 2, 3): - # invalid catch selection, will cause exception, that's good - return None - if isinstance(cmdln_or_args, list): cmd = cmdln_or_args[0] shell = False @@ -45,66 +33,20 @@ def runtool(cmdln_or_args, catch=1): cmd = shlex.split(cmdln_or_args)[0] shell = True - if catch != 3: - dev_null = os.open("/dev/null", os.O_WRONLY) - - if catch == 0: - sout = dev_null - serr = dev_null - elif catch == 1: - sout = subprocess.PIPE - serr = dev_null - elif catch == 2: - sout = dev_null - serr = subprocess.PIPE - elif catch == 3: - sout = subprocess.PIPE - serr = subprocess.STDOUT + sout = subprocess.PIPE + serr = subprocess.STDOUT try: process = subprocess.Popen(cmdln_or_args, stdout=sout, stderr=serr, shell=shell) - (sout, serr) = process.communicate() + sout, serr = process.communicate() # combine stdout and stderr, filter None out and decode out = ''.join([out.decode('utf-8') for out in [sout, serr] if out]) except OSError as err: if err.errno == 2: # [Errno 2] No such file or directory - msger.error('Cannot run command: %s, lost dependency?' % cmd) + raise WicError('Cannot run command: %s, lost dependency?' % cmd) else: raise # relay - finally: - if catch != 3: - os.close(dev_null) - - return (process.returncode, out) - -def show(cmdln_or_args): - # show all the message using msger.verbose - - rcode, out = runtool(cmdln_or_args, catch=3) - - if isinstance(cmdln_or_args, list): - cmd = ' '.join(cmdln_or_args) - else: - cmd = cmdln_or_args - - msg = 'running command: "%s"' % cmd - if out: - out = out.strip() - if out: - msg += ', with output::' - msg += '\n +----------------' - for line in out.splitlines(): - msg += '\n | %s' % line - msg += '\n +----------------' - - msger.verbose(msg) - return rcode - -def outs(cmdln_or_args, catch=1): - # get the outputs of tools - return runtool(cmdln_or_args, catch)[1].strip() -def quiet(cmdln_or_args): - return runtool(cmdln_or_args, catch=0)[0] + return process.returncode, out diff --git a/import-layers/yocto-poky/scripts/lib/wic/utils/syslinux.py b/import-layers/yocto-poky/scripts/lib/wic/utils/syslinux.py deleted file mode 100644 index aace2863c..000000000 --- a/import-layers/yocto-poky/scripts/lib/wic/utils/syslinux.py +++ /dev/null @@ -1,58 +0,0 @@ -# ex:ts=4:sw=4:sts=4:et -# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- -# -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the Free -# Software Foundation; version 2 of the License -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY -# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License -# for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program; if not, write to the Free Software Foundation, Inc., 59 -# Temple Place - Suite 330, Boston, MA 02111-1307, USA. -# -# AUTHOR -# Adrian Freihofer - - -import re -from wic import msger - - -def serial_console_form_kargs(kernel_args): - """ - Create SERIAL... line from kernel parameters - - syslinux needs a line SERIAL port [baudrate [flowcontrol]] - in the syslinux.cfg file. The config line is generated based - on kernel boot parameters. The the parameters of the first - ttyS console are considered for syslinux config. - @param kernel_args kernel command line - @return line for syslinux config file e.g. "SERIAL 0 115200" - """ - syslinux_conf = "" - for param in kernel_args.split(): - param_match = re.match("console=ttyS([0-9]+),?([0-9]*)([noe]?)([0-9]?)(r?)", param) - if param_match: - syslinux_conf += "SERIAL " + param_match.group(1) - # baudrate - if param_match.group(2): - syslinux_conf += " " + param_match.group(2) - # parity - if param_match.group(3) and param_match.group(3) != 'n': - msger.warning("syslinux does not support parity for console. {} is ignored." - .format(param_match.group(3))) - # number of bits - if param_match.group(4) and param_match.group(4) != '8': - msger.warning("syslinux supports 8 bit console configuration only. {} is ignored." - .format(param_match.group(4))) - # flow control - if param_match.group(5) and param_match.group(5) != '': - msger.warning("syslinux console flowcontrol configuration. {} is ignored." - .format(param_match.group(5))) - break - - return syslinux_conf diff --git a/import-layers/yocto-poky/scripts/multilib_header_wrapper.h b/import-layers/yocto-poky/scripts/multilib_header_wrapper.h index 5a8754088..f516673b6 100644 --- a/import-layers/yocto-poky/scripts/multilib_header_wrapper.h +++ b/import-layers/yocto-poky/scripts/multilib_header_wrapper.h @@ -21,11 +21,23 @@ * */ -#include -#ifdef __WORDSIZE +#if defined (__arm__) +#define __MHWORDSIZE 32 +#elif defined (__aarch64__) && defined ( __LP64__) +#define __MHWORDSIZE 64 +#elif defined (__aarch64__) +#define __MHWORDSIZE 32 +#else +#include +#if defined (__WORDSIZE) +#define __MHWORDSIZE __WORDSIZE +#else +#error "__WORDSIZE is not defined" +#endif +#endif -#if __WORDSIZE == 32 +#if __MHWORDSIZE == 32 #ifdef _MIPS_SIM @@ -41,15 +53,9 @@ #include #endif -#elif __WORDSIZE == 64 +#elif __MHWORDSIZE == 64 #include #else #error "Unknown __WORDSIZE detected" #endif /* matches #if __WORDSIZE == 32 */ - -#else /* __WORDSIZE is not defined */ - -#error "__WORDSIZE is not defined" - -#endif diff --git a/import-layers/yocto-poky/scripts/oe-build-perf-report b/import-layers/yocto-poky/scripts/oe-build-perf-report new file mode 100755 index 000000000..6f0b84f9e --- /dev/null +++ b/import-layers/yocto-poky/scripts/oe-build-perf-report @@ -0,0 +1,534 @@ +#!/usr/bin/python3 +# +# Examine build performance test results +# +# Copyright (c) 2017, Intel Corporation. +# +# This program is free software; you can redistribute it and/or modify it +# under the terms and conditions of the GNU General Public License, +# version 2, as published by the Free Software Foundation. +# +# This program is distributed in the hope it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for +# more details. +# +import argparse +import json +import logging +import os +import re +import sys +from collections import namedtuple, OrderedDict +from operator import attrgetter +from xml.etree import ElementTree as ET + +# Import oe libs +scripts_path = os.path.dirname(os.path.realpath(__file__)) +sys.path.append(os.path.join(scripts_path, 'lib')) +import scriptpath +from build_perf import print_table +from build_perf.report import (metadata_xml_to_json, results_xml_to_json, + aggregate_data, aggregate_metadata, measurement_stats) +from build_perf import html + +scriptpath.add_oe_lib_path() + +from oeqa.utils.git import GitRepo + + +# Setup logging +logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") +log = logging.getLogger('oe-build-perf-report') + + +# Container class for tester revisions +TestedRev = namedtuple('TestedRev', 'commit commit_number tags') + + +def get_test_runs(repo, tag_name, **kwargs): + """Get a sorted list of test runs, matching given pattern""" + # First, get field names from the tag name pattern + field_names = [m.group(1) for m in re.finditer(r'{(\w+)}', tag_name)] + undef_fields = [f for f in field_names if f not in kwargs.keys()] + + # Fields for formatting tag name pattern + str_fields = dict([(f, '*') for f in field_names]) + str_fields.update(kwargs) + + # Get a list of all matching tags + tag_pattern = tag_name.format(**str_fields) + tags = repo.run_cmd(['tag', '-l', tag_pattern]).splitlines() + log.debug("Found %d tags matching pattern '%s'", len(tags), tag_pattern) + + # Parse undefined fields from tag names + str_fields = dict([(f, r'(?P<{}>[\w\-.()]+)'.format(f)) for f in field_names]) + str_fields['branch'] = r'(?P[\w\-.()/]+)' + str_fields['commit'] = '(?P[0-9a-f]{7,40})' + str_fields['commit_number'] = '(?P[0-9]{1,7})' + str_fields['tag_number'] = '(?P[0-9]{1,5})' + # escape parenthesis in fields in order to not messa up the regexp + fixed_fields = dict([(k, v.replace('(', r'\(').replace(')', r'\)')) for k, v in kwargs.items()]) + str_fields.update(fixed_fields) + tag_re = re.compile(tag_name.format(**str_fields)) + + # Parse fields from tags + revs = [] + for tag in tags: + m = tag_re.match(tag) + groups = m.groupdict() + revs.append([groups[f] for f in undef_fields] + [tag]) + + # Return field names and a sorted list of revs + return undef_fields, sorted(revs) + +def list_test_revs(repo, tag_name, **kwargs): + """Get list of all tested revisions""" + fields, revs = get_test_runs(repo, tag_name, **kwargs) + ignore_fields = ['tag_number'] + print_fields = [i for i, f in enumerate(fields) if f not in ignore_fields] + + # Sort revs + rows = [[fields[i].upper() for i in print_fields] + ['TEST RUNS']] + prev = [''] * len(revs) + for rev in revs: + # Only use fields that we want to print + rev = [rev[i] for i in print_fields] + + if rev != prev: + new_row = [''] * len(print_fields) + [1] + for i in print_fields: + if rev[i] != prev[i]: + break + new_row[i:-1] = rev[i:] + rows.append(new_row) + else: + rows[-1][-1] += 1 + prev = rev + + print_table(rows) + +def get_test_revs(repo, tag_name, **kwargs): + """Get list of all tested revisions""" + fields, runs = get_test_runs(repo, tag_name, **kwargs) + + revs = {} + commit_i = fields.index('commit') + commit_num_i = fields.index('commit_number') + for run in runs: + commit = run[commit_i] + commit_num = run[commit_num_i] + tag = run[-1] + if not commit in revs: + revs[commit] = TestedRev(commit, commit_num, [tag]) + else: + assert commit_num == revs[commit].commit_number, "Commit numbers do not match" + revs[commit].tags.append(tag) + + # Return in sorted table + revs = sorted(revs.values(), key=attrgetter('commit_number')) + log.debug("Found %d tested revisions:\n %s", len(revs), + "\n ".join(['{} ({})'.format(rev.commit_number, rev.commit) for rev in revs])) + return revs + +def rev_find(revs, attr, val): + """Search from a list of TestedRev""" + for i, rev in enumerate(revs): + if getattr(rev, attr) == val: + return i + raise ValueError("Unable to find '{}' value '{}'".format(attr, val)) + +def is_xml_format(repo, commit): + """Check if the commit contains xml (or json) data""" + if repo.rev_parse(commit + ':results.xml'): + log.debug("Detected report in xml format in %s", commit) + return True + else: + log.debug("No xml report in %s, assuming json formatted results", commit) + return False + +def read_results(repo, tags, xml=True): + """Read result files from repo""" + + def parse_xml_stream(data): + """Parse multiple concatenated XML objects""" + objs = [] + xml_d = "" + for line in data.splitlines(): + if xml_d and line.startswith('{wid}}', ' {:{wid}} ', '{:{wid}}', + ' {:>{wid}}', ' {:>{wid}}'] + num_cols = len(row_fmt) + for test in tests: + test_l = data_l['tests'][test] if test in data_l['tests'] else None + test_r = data_r['tests'][test] if test in data_r['tests'] else None + pref = ' ' + if test_l is None: + pref = '+' + elif test_r is None: + pref = '-' + descr = test_l['description'] if test_l else test_r['description'] + heading = "{} {}: {}".format(pref, test, descr) + + rows.append([heading]) + + # Generate the list of measurements + meas_l = test_l['measurements'] if test_l else {} + meas_r = test_r['measurements'] if test_r else {} + measurements = list(meas_l.keys()) + measurements += [m for m in list(meas_r.keys()) if m not in measurements] + + for meas in measurements: + m_pref = ' ' + if meas in meas_l: + stats_l = measurement_stats(meas_l[meas], 'l.') + else: + stats_l = measurement_stats(None, 'l.') + m_pref = '+' + if meas in meas_r: + stats_r = measurement_stats(meas_r[meas], 'r.') + else: + stats_r = measurement_stats(None, 'r.') + m_pref = '-' + stats = stats_l.copy() + stats.update(stats_r) + + absdiff = stats['val_cls'](stats['r.mean'] - stats['l.mean']) + reldiff = "{:+.1f} %".format(absdiff * 100 / stats['l.mean']) + if stats['r.mean'] > stats['l.mean']: + absdiff = '+' + str(absdiff) + else: + absdiff = str(absdiff) + rows.append(['', m_pref, stats['name'] + ' ' + stats['quantity'], + str(stats['l.mean']), '->', str(stats['r.mean']), + absdiff, reldiff]) + rows.append([''] * num_cols) + + print_table(rows, row_fmt) + + print() + + +def print_html_report(data, id_comp): + """Print report in html format""" + # Handle metadata + metadata = {'branch': {'title': 'Branch', 'value': 'master'}, + 'hostname': {'title': 'Hostname', 'value': 'foobar'}, + 'commit': {'title': 'Commit', 'value': '1234'} + } + metadata = metadata_diff(data[id_comp][0], data[-1][0]) + + + # Generate list of tests + tests = [] + for test in data[-1][1]['tests'].keys(): + test_r = data[-1][1]['tests'][test] + new_test = {'name': test_r['name'], + 'description': test_r['description'], + 'status': test_r['status'], + 'measurements': [], + 'err_type': test_r.get('err_type'), + } + # Limit length of err output shown + if 'message' in test_r: + lines = test_r['message'].splitlines() + if len(lines) > 20: + new_test['message'] = '...\n' + '\n'.join(lines[-20:]) + else: + new_test['message'] = test_r['message'] + + + # Generate the list of measurements + for meas in test_r['measurements'].keys(): + meas_r = test_r['measurements'][meas] + meas_type = 'time' if meas_r['type'] == 'sysres' else 'size' + new_meas = {'name': meas_r['name'], + 'legend': meas_r['legend'], + 'description': meas_r['name'] + ' ' + meas_type, + } + samples = [] + + # Run through all revisions in our data + for meta, test_data in data: + if (not test in test_data['tests'] or + not meas in test_data['tests'][test]['measurements']): + samples.append(measurement_stats(None)) + continue + test_i = test_data['tests'][test] + meas_i = test_i['measurements'][meas] + commit_num = get_data_item(meta, 'layers.meta.commit_count') + samples.append(measurement_stats(meas_i)) + samples[-1]['commit_num'] = commit_num + + absdiff = samples[-1]['val_cls'](samples[-1]['mean'] - samples[id_comp]['mean']) + new_meas['absdiff'] = absdiff + new_meas['absdiff_str'] = str(absdiff) if absdiff < 0 else '+' + str(absdiff) + new_meas['reldiff'] = "{:+.1f} %".format(absdiff * 100 / samples[id_comp]['mean']) + new_meas['samples'] = samples + new_meas['value'] = samples[-1] + new_meas['value_type'] = samples[-1]['val_cls'] + + new_test['measurements'].append(new_meas) + tests.append(new_test) + + # Chart options + chart_opts = {'haxis': {'min': get_data_item(data[0][0], 'layers.meta.commit_count'), + 'max': get_data_item(data[-1][0], 'layers.meta.commit_count')} + } + + print(html.template.render(metadata=metadata, test_data=tests, chart_opts=chart_opts)) + + +def auto_args(repo, args): + """Guess arguments, if not defined by the user""" + # Get the latest commit in the repo + log.debug("Guessing arguments from the latest commit") + msg = repo.run_cmd(['log', '-1', '--branches', '--remotes', '--format=%b']) + for line in msg.splitlines(): + split = line.split(':', 1) + if len(split) != 2: + continue + + key = split[0] + val = split[1].strip() + if key == 'hostname': + log.debug("Using hostname %s", val) + args.hostname = val + elif key == 'branch': + log.debug("Using branch %s", val) + args.branch = val + + +def parse_args(argv): + """Parse command line arguments""" + description = """ +Examine build performance test results from a Git repository""" + parser = argparse.ArgumentParser( + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + description=description) + + parser.add_argument('--debug', '-d', action='store_true', + help="Verbose logging") + parser.add_argument('--repo', '-r', required=True, + help="Results repository (local git clone)") + parser.add_argument('--list', '-l', action='store_true', + help="List available test runs") + parser.add_argument('--html', action='store_true', + help="Generate report in html format") + group = parser.add_argument_group('Tag and revision') + group.add_argument('--tag-name', '-t', + default='{hostname}/{branch}/{machine}/{commit_number}-g{commit}/{tag_number}', + help="Tag name (pattern) for finding results") + group.add_argument('--hostname', '-H') + group.add_argument('--branch', '-B', default='master') + group.add_argument('--machine', default='qemux86') + group.add_argument('--history-length', default=25, type=int, + help="Number of tested revisions to plot in html report") + group.add_argument('--commit', + help="Revision to search for") + group.add_argument('--commit-number', + help="Revision number to search for, redundant if " + "--commit is specified") + group.add_argument('--commit2', + help="Revision to compare with") + group.add_argument('--commit-number2', + help="Revision number to compare with, redundant if " + "--commit2 is specified") + + return parser.parse_args(argv) + + +def main(argv=None): + """Script entry point""" + args = parse_args(argv) + if args.debug: + log.setLevel(logging.DEBUG) + + repo = GitRepo(args.repo) + + if args.list: + list_test_revs(repo, args.tag_name) + return 0 + + # Determine hostname which to use + if not args.hostname: + auto_args(repo, args) + + revs = get_test_revs(repo, args.tag_name, hostname=args.hostname, + branch=args.branch, machine=args.machine) + if len(revs) < 2: + log.error("%d tester revisions found, unable to generate report", + len(revs)) + return 1 + + # Pick revisions + if args.commit: + if args.commit_number: + log.warning("Ignoring --commit-number as --commit was specified") + index1 = rev_find(revs, 'commit', args.commit) + elif args.commit_number: + index1 = rev_find(revs, 'commit_number', args.commit_number) + else: + index1 = len(revs) - 1 + + if args.commit2: + if args.commit_number2: + log.warning("Ignoring --commit-number2 as --commit2 was specified") + index2 = rev_find(revs, 'commit', args.commit2) + elif args.commit_number2: + index2 = rev_find(revs, 'commit_number', args.commit_number2) + else: + if index1 > 0: + index2 = index1 - 1 + else: + log.error("Unable to determine the other commit, use " + "--commit2 or --commit-number2 to specify it") + return 1 + + index_l = min(index1, index2) + index_r = max(index1, index2) + + rev_l = revs[index_l] + rev_r = revs[index_r] + log.debug("Using 'left' revision %s (%s), %s test runs:\n %s", + rev_l.commit_number, rev_l.commit, len(rev_l.tags), + '\n '.join(rev_l.tags)) + log.debug("Using 'right' revision %s (%s), %s test runs:\n %s", + rev_r.commit_number, rev_r.commit, len(rev_r.tags), + '\n '.join(rev_r.tags)) + + # Check report format used in the repo (assume all reports in the same fmt) + xml = is_xml_format(repo, revs[index_r].tags[-1]) + + if args.html: + index_0 = max(0, index_r - args.history_length) + rev_range = range(index_0, index_r + 1) + else: + # We do not need range of commits for text report (no graphs) + index_0 = index_l + rev_range = (index_l, index_r) + + # Read raw data + log.debug("Reading %d revisions, starting from %s (%s)", + len(rev_range), revs[index_0].commit_number, revs[index_0].commit) + raw_data = [read_results(repo, revs[i].tags, xml) for i in rev_range] + + data = [] + for raw_m, raw_d in raw_data: + data.append((aggregate_metadata(raw_m), aggregate_data(raw_d))) + + # Re-map list indexes to the new table starting from index 0 + index_r = index_r - index_0 + index_l = index_l - index_0 + + # Print report + if not args.html: + print_diff_report(data[index_l][0], data[index_l][1], + data[index_r][0], data[index_r][1]) + else: + print_html_report(data, index_l) + + return 0 + +if __name__ == "__main__": + sys.exit(main()) diff --git a/import-layers/yocto-poky/scripts/oe-build-perf-test b/import-layers/yocto-poky/scripts/oe-build-perf-test index 638e195ef..669470fa9 100755 --- a/import-layers/yocto-poky/scripts/oe-build-perf-test +++ b/import-layers/yocto-poky/scripts/oe-build-perf-test @@ -17,21 +17,23 @@ import argparse import errno import fcntl +import json import logging import os +import re import shutil import sys -import unittest from datetime import datetime sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)) + '/lib') import scriptpath scriptpath.add_oe_lib_path() +scriptpath.add_bitbake_lib_path() import oeqa.buildperf from oeqa.buildperf import (BuildPerfTestLoader, BuildPerfTestResult, BuildPerfTestRunner, KernelDropCaches) from oeqa.utils.commands import runCmd -from oeqa.utils.git import GitRepo, GitError +from oeqa.utils.metadata import metadata_from_bb, write_metadata_file # Set-up logging @@ -71,31 +73,6 @@ def pre_run_sanity_check(): return False return True -def init_git_repo(path): - """Check/create Git repository where to store results""" - path = os.path.abspath(path) - if os.path.isfile(path): - log.error("Invalid Git repo %s: path exists but is not a directory", path) - return False - if not os.path.isdir(path): - try: - os.mkdir(path) - except (FileNotFoundError, PermissionError) as err: - log.error("Failed to mkdir %s: %s", path, err) - return False - if not os.listdir(path): - log.info("Initializing a new Git repo at %s", path) - GitRepo.init(path) - try: - GitRepo(path, is_topdir=True) - except GitError: - log.error("No Git repository but a non-empty directory found at %s.\n" - "Please specify a Git repository, an empty directory or " - "a non-existing directory", path) - return False - return True - - def setup_file_logging(log_file): """Setup loggin to file""" log_dir = os.path.dirname(log_file) @@ -115,6 +92,38 @@ def archive_build_conf(out_dir): shutil.copytree(src_dir, tgt_dir) +def update_globalres_file(result_obj, filename, metadata): + """Write results to globalres csv file""" + # Map test names to time and size columns in globalres + # The tuples represent index and length of times and sizes + # respectively + gr_map = {'test1': ((0, 1), (8, 1)), + 'test12': ((1, 1), (None, None)), + 'test13': ((2, 1), (9, 1)), + 'test2': ((3, 1), (None, None)), + 'test3': ((4, 3), (None, None)), + 'test4': ((7, 1), (10, 2))} + + values = ['0'] * 12 + for status, test, _ in result_obj.all_results(): + if status in ['ERROR', 'SKIPPED']: + continue + (t_ind, t_len), (s_ind, s_len) = gr_map[test.name] + if t_ind is not None: + values[t_ind:t_ind + t_len] = test.times + if s_ind is not None: + values[s_ind:s_ind + s_len] = test.sizes + + log.debug("Writing globalres log to %s", filename) + rev_info = metadata['layers']['meta'] + with open(filename, 'a') as fobj: + fobj.write('{},{}:{},{},'.format(metadata['hostname'], + rev_info['branch'], + rev_info['commit'], + rev_info['commit'])) + fobj.write(','.join(values) + '\n') + + def parse_args(argv): """Parse command line arguments""" parser = argparse.ArgumentParser( @@ -131,20 +140,13 @@ def parse_args(argv): parser.add_argument('-o', '--out-dir', default='results-{date}', type=os.path.abspath, help="Output directory for test results") + parser.add_argument('-x', '--xml', action='store_true', + help='Enable JUnit xml output') parser.add_argument('--log-file', default='{out_dir}/oe-build-perf-test.log', help="Log file of this script") parser.add_argument('--run-tests', nargs='+', metavar='TEST', help="List of tests to run") - parser.add_argument('--commit-results', metavar='GIT_DIR', - type=os.path.abspath, - help="Commit result data to a (local) git repository") - parser.add_argument('--commit-results-branch', metavar='BRANCH', - default="{git_branch}", - help="Commit results to branch BRANCH.") - parser.add_argument('--commit-results-tag', metavar='TAG', - default="{git_branch}/{git_commit_count}-g{git_commit}/{tag_num}", - help="Tag results commit with TAG.") return parser.parse_args(argv) @@ -167,9 +169,6 @@ def main(argv=None): if not pre_run_sanity_check(): return 1 - if args.commit_results: - if not init_git_repo(args.commit_results): - return 1 # Check our capability to drop caches and ask pass if needed KernelDropCaches.check() @@ -181,7 +180,19 @@ def main(argv=None): else: suite = loader.loadTestsFromModule(oeqa.buildperf) + # Save test metadata + metadata = metadata_from_bb() + log.info("Testing Git revision branch:commit %s:%s (%s)", + metadata['layers']['meta']['branch'], + metadata['layers']['meta']['commit'], + metadata['layers']['meta']['commit_count']) + if args.xml: + write_metadata_file(os.path.join(out_dir, 'metadata.xml'), metadata) + else: + with open(os.path.join(out_dir, 'metadata.json'), 'w') as fobj: + json.dump(metadata, fobj, indent=2) archive_build_conf(out_dir) + runner = BuildPerfTestRunner(out_dir, verbosity=2) # Suppress logger output to stderr so that the output from unittest @@ -194,12 +205,13 @@ def main(argv=None): # Restore logger output to stderr log.handlers[0].setLevel(log.level) + if args.xml: + result.write_results_xml() + else: + result.write_results_json() + result.write_buildstats_json() if args.globalres_file: - result.update_globalres_file(args.globalres_file) - if args.commit_results: - result.git_commit_results(args.commit_results, - args.commit_results_branch, - args.commit_results_tag) + update_globalres_file(result, args.globalres_file, metadata) if result.wasSuccessful(): return 0 diff --git a/import-layers/yocto-poky/scripts/oe-buildenv-internal b/import-layers/yocto-poky/scripts/oe-buildenv-internal index 9fae3b4ec..c8905524f 100755 --- a/import-layers/yocto-poky/scripts/oe-buildenv-internal +++ b/import-layers/yocto-poky/scripts/oe-buildenv-internal @@ -18,6 +18,18 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +if ! $(return >/dev/null 2>&1) ; then + echo 'oe-buildenv-internal: error: this script must be sourced' + echo '' + echo 'Usage: . $OEROOT/scripts/oe-buildenv-internal &&' + echo '' + echo 'OpenEmbedded oe-buildenv-internal - an internal script that is' + echo 'used in oe-init-build-env and oe-init-build-env-memres to' + echo 'initialize oe build environment' + echo '' + exit 2 +fi + # It is assumed OEROOT is already defined when this is called if [ -z "$OEROOT" ]; then echo >&2 "Error: OEROOT is not defined!" diff --git a/import-layers/yocto-poky/scripts/oe-find-native-sysroot b/import-layers/yocto-poky/scripts/oe-find-native-sysroot index 81d62b888..235a67c95 100755 --- a/import-layers/yocto-poky/scripts/oe-find-native-sysroot +++ b/import-layers/yocto-poky/scripts/oe-find-native-sysroot @@ -2,14 +2,14 @@ # # Find a native sysroot to use - either from an in-tree OE build or # from a toolchain installation. It then ensures the variable -# $OECORE_NATIVE_SYSROOT is set to the sysroot's base directory, and sets +# $OECORE_NATIVE_SYSROOT is set to the sysroot's base directory, and sets # $PSEUDO to the path of the pseudo binary. # # This script is intended to be run within other scripts by source'ing # it, e.g: # # SYSROOT_SETUP_SCRIPT=`which oe-find-native-sysroot` -# . $SYSROOT_SETUP_SCRIPT +# . $SYSROOT_SETUP_SCRIPT # # This script will terminate execution of your calling program unless # you set a variable $SKIP_STRICT_SYSROOT_CHECK to a non-empty string @@ -30,6 +30,38 @@ # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +if [ "$1" = '--help' -o "$1" = '-h' -o $# -ne 1 ] ; then + echo 'Usage: oe-find-native-sysroot [-h|--help]' + echo '' + echo 'OpenEmbedded find-native-sysroot - helper script to set' + echo 'environment variables OECORE_NATIVE_SYSROOT and PSEUDO' + echo 'to the path of the native sysroot directory and pseudo' + echo 'executable binary' + echo '' + echo 'options:' + echo ' recipe its STAGING_DIR_NATIVE is used as native sysroot' + echo ' -h, --help show this help message and exit' + echo '' + exit 2 +fi + +# Global vars +BITBAKE_E="" +set_oe_native_sysroot(){ + echo "Running bitbake -e $1" + BITBAKE_E="`bitbake -e $1`" + OECORE_NATIVE_SYSROOT=`echo "$BITBAKE_E" | grep ^STAGING_DIR_NATIVE | cut -d '"' -f2` + + if [ "x$OECORE_NATIVE_SYSROOT" = "x" ]; then + # This indicates that there was an error running bitbake -e that + # the user needs to be informed of + echo "There was an error running bitbake to determine STAGING_DIR_NATIVE" + echo "Here is the output from bitbake -e $1" + echo $BITBAKE_E + exit 1 + fi +} + if [ "x$OECORE_NATIVE_SYSROOT" = "x" ]; then BITBAKE=`which bitbake 2> /dev/null` if [ "x$BITBAKE" != "x" ]; then @@ -40,10 +72,10 @@ if [ "x$OECORE_NATIVE_SYSROOT" = "x" ]; then exit 1 fi touch conf/sanity.conf - OECORE_NATIVE_SYSROOT=`bitbake -e | grep ^STAGING_DIR_NATIVE | cut -d '"' -f2` + set_oe_native_sysroot $1 rm -f conf/sanity.conf else - OECORE_NATIVE_SYSROOT=`bitbake -e | grep ^STAGING_DIR_NATIVE | cut -d '"' -f2` + set_oe_native_sysroot $1 fi else echo "Error: Unable to locate bitbake command." @@ -55,21 +87,15 @@ if [ "x$OECORE_NATIVE_SYSROOT" = "x" ]; then fi fi -if [ "x$OECORE_NATIVE_SYSROOT" = "x" ]; then - # This indicates that there was an error running bitbake -e that - # the user needs to be informed of - echo "There was an error running bitbake to determine STAGING_DIR_NATIVE" - echo "Here is the output from bitbake -e" - bitbake -e - exit 1 -fi - -# Set up pseudo command -if [ ! -e "$OECORE_NATIVE_SYSROOT/usr/bin/pseudo" ]; then - echo "Error: Unable to find pseudo binary in $OECORE_NATIVE_SYSROOT/usr/bin/" +if [ ! -e "$OECORE_NATIVE_SYSROOT/" ]; then + echo "Error: $OECORE_NATIVE_SYSROOT doesn't exist." if [ "x$OECORE_DISTRO_VERSION" = "x" ]; then - echo "Have you run 'bitbake meta-ide-support'?" + if [[ $1 =~ .*native.* ]]; then + echo "Have you run 'bitbake $1 -caddto_recipe_sysroot'?" + else + echo "Have you run 'bitbake $1 '?" + fi else echo "This shouldn't happen - something is wrong with your toolchain installation" fi @@ -78,4 +104,12 @@ if [ ! -e "$OECORE_NATIVE_SYSROOT/usr/bin/pseudo" ]; then exit 1 fi fi -PSEUDO="$OECORE_NATIVE_SYSROOT/usr/bin/pseudo" + +# Set up pseudo command +pseudo="$OECORE_NATIVE_SYSROOT/usr/bin/pseudo" +if [ -e "$pseudo" ]; then + echo "PSEUDO=$pseudo" + PSEUDO="$pseudo" +else + echo "PSEUDO $pseudo is not found." +fi diff --git a/import-layers/yocto-poky/scripts/oe-git-archive b/import-layers/yocto-poky/scripts/oe-git-archive new file mode 100755 index 000000000..ab19cb9aa --- /dev/null +++ b/import-layers/yocto-poky/scripts/oe-git-archive @@ -0,0 +1,271 @@ +#!/usr/bin/python3 +# +# Helper script for committing data to git and pushing upstream +# +# Copyright (c) 2017, Intel Corporation. +# +# This program is free software; you can redistribute it and/or modify it +# under the terms and conditions of the GNU General Public License, +# version 2, as published by the Free Software Foundation. +# +# This program is distributed in the hope it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or +# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for +# more details. +# +import argparse +import glob +import json +import logging +import math +import os +import re +import sys +from collections import namedtuple, OrderedDict +from datetime import datetime, timedelta, tzinfo +from operator import attrgetter + +# Import oe and bitbake libs +scripts_path = os.path.dirname(os.path.realpath(__file__)) +sys.path.append(os.path.join(scripts_path, 'lib')) +import scriptpath +scriptpath.add_bitbake_lib_path() +scriptpath.add_oe_lib_path() + +from oeqa.utils.git import GitRepo, GitError +from oeqa.utils.metadata import metadata_from_bb + + +# Setup logging +logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") +log = logging.getLogger() + + +class ArchiveError(Exception): + """Internal error handling of this script""" + + +def format_str(string, fields): + """Format string using the given fields (dict)""" + try: + return string.format(**fields) + except KeyError as err: + raise ArchiveError("Unable to expand string '{}': unknown field {} " + "(valid fields are: {})".format( + string, err, ', '.join(sorted(fields.keys())))) + + +def init_git_repo(path, no_create, bare): + """Initialize local Git repository""" + path = os.path.abspath(path) + if os.path.isfile(path): + raise ArchiveError("Invalid Git repo at {}: path exists but is not a " + "directory".format(path)) + if not os.path.isdir(path) or not os.listdir(path): + if no_create: + raise ArchiveError("No git repo at {}, refusing to create " + "one".format(path)) + if not os.path.isdir(path): + try: + os.mkdir(path) + except (FileNotFoundError, PermissionError) as err: + raise ArchiveError("Failed to mkdir {}: {}".format(path, err)) + if not os.listdir(path): + log.info("Initializing a new Git repo at %s", path) + repo = GitRepo.init(path, bare) + try: + repo = GitRepo(path, is_topdir=True) + except GitError: + raise ArchiveError("Non-empty directory that is not a Git repository " + "at {}\nPlease specify an existing Git repository, " + "an empty directory or a non-existing directory " + "path.".format(path)) + return repo + + +def git_commit_data(repo, data_dir, branch, message, exclude, notes): + """Commit data into a Git repository""" + log.info("Committing data into to branch %s", branch) + tmp_index = os.path.join(repo.git_dir, 'index.oe-git-archive') + try: + # Create new tree object from the data + env_update = {'GIT_INDEX_FILE': tmp_index, + 'GIT_WORK_TREE': os.path.abspath(data_dir)} + repo.run_cmd('add .', env_update) + + # Remove files that are excluded + if exclude: + repo.run_cmd(['rm', '--cached'] + [f for f in exclude], env_update) + + tree = repo.run_cmd('write-tree', env_update) + + # Create new commit object from the tree + parent = repo.rev_parse(branch) + git_cmd = ['commit-tree', tree, '-m', message] + if parent: + git_cmd += ['-p', parent] + commit = repo.run_cmd(git_cmd, env_update) + + # Create git notes + for ref, filename in notes: + ref = ref.format(branch_name=branch) + repo.run_cmd(['notes', '--ref', ref, 'add', + '-F', os.path.abspath(filename), commit]) + + # Update branch head + git_cmd = ['update-ref', 'refs/heads/' + branch, commit] + if parent: + git_cmd.append(parent) + repo.run_cmd(git_cmd) + + # Update current HEAD, if we're on branch 'branch' + if not repo.bare and repo.get_current_branch() == branch: + log.info("Updating %s HEAD to latest commit", repo.top_dir) + repo.run_cmd('reset --hard') + + return commit + finally: + if os.path.exists(tmp_index): + os.unlink(tmp_index) + + +def expand_tag_strings(repo, name_pattern, msg_subj_pattern, msg_body_pattern, + keywords): + """Generate tag name and message, with support for running id number""" + keyws = keywords.copy() + # Tag number is handled specially: if not defined, we autoincrement it + if 'tag_number' not in keyws: + # Fill in all other fields than 'tag_number' + keyws['tag_number'] = '{tag_number}' + tag_re = format_str(name_pattern, keyws) + # Replace parentheses for proper regex matching + tag_re = tag_re.replace('(', '\(').replace(')', '\)') + '$' + # Inject regex group pattern for 'tag_number' + tag_re = tag_re.format(tag_number='(?P[0-9]{1,5})') + + keyws['tag_number'] = 0 + for existing_tag in repo.run_cmd('tag').splitlines(): + match = re.match(tag_re, existing_tag) + + if match and int(match.group('tag_number')) >= keyws['tag_number']: + keyws['tag_number'] = int(match.group('tag_number')) + 1 + + tag_name = format_str(name_pattern, keyws) + msg_subj= format_str(msg_subj_pattern.strip(), keyws) + msg_body = format_str(msg_body_pattern, keyws) + return tag_name, msg_subj + '\n\n' + msg_body + + +def parse_args(argv): + """Parse command line arguments""" + parser = argparse.ArgumentParser( + description="Commit data to git and push upstream", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + + parser.add_argument('--debug', '-D', action='store_true', + help="Verbose logging") + parser.add_argument('--git-dir', '-g', required=True, + help="Local git directory to use") + parser.add_argument('--no-create', action='store_true', + help="If GIT_DIR is not a valid Git repository, do not " + "try to create one") + parser.add_argument('--bare', action='store_true', + help="Initialize a bare repository when creating a " + "new one") + parser.add_argument('--push', '-p', nargs='?', default=False, const=True, + help="Push to remote") + parser.add_argument('--branch-name', '-b', + default='{hostname}/{branch}/{machine}', + help="Git branch name (pattern) to use") + parser.add_argument('--no-tag', action='store_true', + help="Do not create Git tag") + parser.add_argument('--tag-name', '-t', + default='{hostname}/{branch}/{machine}/{commit_count}-g{commit}/{tag_number}', + help="Tag name (pattern) to use") + parser.add_argument('--commit-msg-subject', + default='Results of {branch}:{commit} on {hostname}', + help="Subject line (pattern) to use in the commit message") + parser.add_argument('--commit-msg-body', + default='branch: {branch}\ncommit: {commit}\nhostname: {hostname}', + help="Commit message body (pattern)") + parser.add_argument('--tag-msg-subject', + default='Test run #{tag_number} of {branch}:{commit} on {hostname}', + help="Subject line (pattern) of the tag message") + parser.add_argument('--tag-msg-body', + default='', + help="Tag message body (pattern)") + parser.add_argument('--exclude', action='append', default=[], + help="Glob to exclude files from the commit. Relative " + "to DATA_DIR. May be specified multiple times") + parser.add_argument('--notes', nargs=2, action='append', default=[], + metavar=('GIT_REF', 'FILE'), + help="Add a file as a note under refs/notes/GIT_REF. " + "{branch_name} in GIT_REF will be expanded to the " + "actual target branch name (specified by " + "--branch-name). This option may be specified " + "multiple times.") + parser.add_argument('data_dir', metavar='DATA_DIR', + help="Data to commit") + return parser.parse_args(argv) + + +def main(argv=None): + """Script entry point""" + args = parse_args(argv) + if args.debug: + log.setLevel(logging.DEBUG) + + try: + if not os.path.isdir(args.data_dir): + raise ArchiveError("Not a directory: {}".format(args.data_dir)) + + data_repo = init_git_repo(args.git_dir, args.no_create, args.bare) + + # Get keywords to be used in tag and branch names and messages + metadata = metadata_from_bb() + keywords = {'hostname': metadata['hostname'], + 'branch': metadata['layers']['meta']['branch'], + 'commit': metadata['layers']['meta']['commit'], + 'commit_count': metadata['layers']['meta']['commit_count'], + 'machine': metadata['config']['MACHINE']} + + # Expand strings early in order to avoid getting into inconsistent + # state (e.g. no tag even if data was committed) + commit_msg = format_str(args.commit_msg_subject.strip(), keywords) + commit_msg += '\n\n' + format_str(args.commit_msg_body, keywords) + branch_name = format_str(args.branch_name, keywords) + tag_name = None + if not args.no_tag and args.tag_name: + tag_name, tag_msg = expand_tag_strings(data_repo, args.tag_name, + args.tag_msg_subject, + args.tag_msg_body, keywords) + + # Commit data + commit = git_commit_data(data_repo, args.data_dir, branch_name, + commit_msg, args.exclude, args.notes) + + # Create tag + if tag_name: + log.info("Creating tag %s", tag_name) + data_repo.run_cmd(['tag', '-a', '-m', tag_msg, tag_name, commit]) + + # Push data to remote + if args.push: + cmd = ['push', '--tags'] + # If no remote is given we push with the default settings from + # gitconfig + if args.push is not True: + notes_refs = ['refs/notes/' + ref.format(branch_name=branch_name) + for ref, _ in args.notes] + cmd.extend([args.push, branch_name] + notes_refs) + log.info("Pushing data to remote") + data_repo.run_cmd(cmd) + + except ArchiveError as err: + log.error(str(err)) + return 1 + + return 0 + +if __name__ == "__main__": + sys.exit(main()) diff --git a/import-layers/yocto-poky/scripts/oe-git-proxy b/import-layers/yocto-poky/scripts/oe-git-proxy index 0078e9545..7a43fe6a6 100755 --- a/import-layers/yocto-poky/scripts/oe-git-proxy +++ b/import-layers/yocto-poky/scripts/oe-git-proxy @@ -18,6 +18,27 @@ # AUTHORS # Darren Hart +if [ $# -lt 2 -o "$1" = '--help' -o "$1" = '-h' ] ; then + echo 'oe-git-proxy: error: the following arguments are required: host port' + echo 'Usage: oe-git-proxy host port' + echo '' + echo 'OpenEmbedded git-proxy - a simple tool to be used via GIT_PROXY_COMMAND.' + echo 'It uses socat to make SOCKS or HTTPS proxy connections.' + echo 'It uses ALL_PROXY to determine the proxy server, protocol, and port.' + echo 'It uses NO_PROXY to skip using the proxy for a comma delimited list' + echo 'of hosts, host globs (*.example.com), IPs, or CIDR masks (192.168.1.0/24).' + echo 'It is known to work with both bash and dash shells.runs native tools' + echo '' + echo 'arguments:' + echo ' host proxy host to use' + echo ' port proxy port to use' + echo '' + echo 'options:' + echo ' -h, --help show this help message and exit' + echo '' + exit 2 +fi + # Locate the netcat binary SOCAT=$(which socat 2>/dev/null) if [ $? -ne 0 ]; then diff --git a/import-layers/yocto-poky/scripts/oe-pkgdata-util b/import-layers/yocto-poky/scripts/oe-pkgdata-util index dbffd6a36..677effeef 100755 --- a/import-layers/yocto-poky/scripts/oe-pkgdata-util +++ b/import-layers/yocto-poky/scripts/oe-pkgdata-util @@ -174,15 +174,16 @@ def read_value(args): logger.error("No packages specified") sys.exit(1) - def readvar(pkgdata_file, valuename): + def readvar(pkgdata_file, valuename, mappedpkg): val = "" with open(pkgdata_file, 'r') as f: for line in f: - if line.startswith(valuename + ":"): + if (line.startswith(valuename + ":") or + line.startswith(valuename + "_" + mappedpkg + ":")): val = line.split(': ', 1)[1].rstrip() return val - logger.debug("read-value('%s', '%s' '%s'" % (args.pkgdata_dir, args.valuename, packages)) + logger.debug("read-value('%s', '%s' '%s')" % (args.pkgdata_dir, args.valuename, packages)) for package in packages: pkg_split = package.split('_') pkg_name = pkg_split[0] @@ -192,18 +193,17 @@ def read_value(args): if os.path.exists(revlink): mappedpkg = os.path.basename(os.readlink(revlink)) qvar = args.valuename + value = readvar(revlink, qvar, mappedpkg) if qvar == "PKGSIZE": - # append packagename - qvar = "%s_%s" % (args.valuename, mappedpkg) # PKGSIZE is now in bytes, but we we want it in KB - pkgsize = (int(readvar(revlink, qvar)) + 1024 // 2) // 1024 + pkgsize = (int(value) + 1024 // 2) // 1024 value = "%d" % pkgsize - else: - value = readvar(revlink, qvar) if args.prefix_name: print('%s %s' % (pkg_name, value)) else: print(value) + else: + logger.debug("revlink %s does not exist", revlink) def lookup_pkglist(pkgs, pkgdata_dir, reverse): if reverse: @@ -325,8 +325,15 @@ def package_info(args): recipe_version = recipe_version + "-" + mappings[pkg]['PR'] pkg_size = mappings[pkg]['PKGSIZE'] - items.append("%s %s %s %s %s" % - (pkg, pkg_version, recipe, recipe_version, pkg_size)) + line = "%s %s %s %s %s" % (pkg, pkg_version, recipe, recipe_version, pkg_size) + + if args.extra: + for var in args.extra: + val = mappings[pkg][var].strip() + val = re.sub(r'\s+', ' ', val) + line += ' "%s"' % val + + items.append(line) print('\n'.join(items)) def get_recipe_pkgs(pkgdata_dir, recipe, unpackaged): @@ -530,6 +537,7 @@ def main(): description='Looks up the specified runtime package(s) and display information') parser_package_info.add_argument('pkg', nargs='*', help='Runtime package name to look up') parser_package_info.add_argument('-f', '--file', help='Read package names from the specified file (one per line, first field only)') + parser_package_info.add_argument('-e', '--extra', help='Extra variables to display, e.g., LICENSE (can be specified multiple times)', action='append') parser_package_info.set_defaults(func=package_info) parser_find_path = subparsers.add_parser('find-path', @@ -570,7 +578,7 @@ def main(): logger.debug('Found bitbake path: %s' % bitbakepath) tinfoil = tinfoil_init() try: - args.pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR', True) + args.pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR') finally: tinfoil.shutdown() logger.debug('Value of PKGDATA_DIR is "%s"' % args.pkgdata_dir) diff --git a/import-layers/yocto-poky/scripts/oe-publish-sdk b/import-layers/yocto-poky/scripts/oe-publish-sdk index 4fe8974de..9f7963c24 100755 --- a/import-layers/yocto-poky/scripts/oe-publish-sdk +++ b/import-layers/yocto-poky/scripts/oe-publish-sdk @@ -114,9 +114,9 @@ def publish(args): # Setting up the git repo if not is_remote: - cmd = 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; mv .git/hooks/post-update.sample .git/hooks/post-update; echo "*.pyc\n*.pyo" > .gitignore; fi; git add -A .; git config user.email "oe@oe.oe" && git config user.name "OE" && git commit -q -m "init repo" || true; git update-server-info' % (destination, destination) + cmd = 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; mv .git/hooks/post-update.sample .git/hooks/post-update; echo "*.pyc\n*.pyo\npyshtables.py" > .gitignore; fi; git add -A .; git config user.email "oe@oe.oe" && git config user.name "OE" && git commit -q -m "init repo" || true; git update-server-info' % (destination, destination) else: - cmd = "ssh %s 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; mv .git/hooks/post-update.sample .git/hooks/post-update; echo '*.pyc\n*.pyo' > .gitignore; fi; git add -A .; git config user.email 'oe@oe.oe' && git config user.name 'OE' && git commit -q -m \"init repo\" || true; git update-server-info'" % (host, destdir, destdir) + cmd = "ssh %s 'set -e; mkdir -p %s/layers; cd %s/layers; if [ ! -e .git ]; then git init .; mv .git/hooks/post-update.sample .git/hooks/post-update; echo '*.pyc\n*.pyo\npyshtables.py' > .gitignore; fi; git add -A .; git config user.email 'oe@oe.oe' && git config user.name 'OE' && git commit -q -m \"init repo\" || true; git update-server-info'" % (host, destdir, destdir) ret = subprocess.call(cmd, shell=True) if ret == 0: logger.info('SDK published successfully') diff --git a/import-layers/yocto-poky/scripts/oe-run-native b/import-layers/yocto-poky/scripts/oe-run-native index 496e34f70..1131122e6 100755 --- a/import-layers/yocto-poky/scripts/oe-run-native +++ b/import-layers/yocto-poky/scripts/oe-run-native @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash # # Copyright (c) 2016, Intel Corporation. # All Rights Reserved @@ -22,27 +22,47 @@ # if [ $# -lt 1 -o "$1" = '--help' -o "$1" = '-h' ] ; then - echo "Usage: $0 [parameters]" + echo 'oe-run-native: the following arguments are required: ' + echo 'Usage: oe-run-native native-recipe tool [parameters]' + echo '' + echo 'OpenEmbedded run-native - runs native tools' + echo '' + echo 'arguments:' + echo ' native-recipe The recipe which provoides tool' + echo ' tool Native tool to run' + echo '' + exit 2 +fi + +native_recipe="$1" +tool="$2" + +if [ "${native_recipe%-native}" = "$native_recipe" ]; then + echo Error: $native_recipe is not a native recipe + echo Error: Use \"oe-run-native -h\" for help exit 1 fi +shift + SYSROOT_SETUP_SCRIPT=`which oe-find-native-sysroot 2> /dev/null` if [ -z "$SYSROOT_SETUP_SCRIPT" ]; then echo "Error: Unable to find oe-find-native-sysroot script" exit 1 fi -. $SYSROOT_SETUP_SCRIPT +. $SYSROOT_SETUP_SCRIPT $native_recipe -OLDPATH=$PATH +OLD_PATH=$PATH # look for a tool only in native sysroot PATH=$OECORE_NATIVE_SYSROOT/usr/bin:$OECORE_NATIVE_SYSROOT/bin:$OECORE_NATIVE_SYSROOT/usr/sbin:$OECORE_NATIVE_SYSROOT/sbin -tool=`/usr/bin/which $1 2>/dev/null` +tool_find=`/usr/bin/which $tool 2>/dev/null` -if [ -n "$tool" ] ; then +if [ -n "$tool_find" ] ; then # add old path to allow usage of host tools PATH=$PATH:$OLD_PATH $@ else - echo "Error: Unable to find '$1' in native sysroot" + echo "Error: Unable to find '$tool' in $PATH" + echo "Error: Have you run 'bitbake $native_recipe -caddto_recipe_sysroot'?" exit 1 fi diff --git a/import-layers/yocto-poky/scripts/oe-selftest b/import-layers/yocto-poky/scripts/oe-selftest index d9ffd40e8..52366b1c8 100755 --- a/import-layers/yocto-poky/scripts/oe-selftest +++ b/import-layers/yocto-poky/scripts/oe-selftest @@ -46,6 +46,7 @@ import argparse_oe import oeqa.selftest import oeqa.utils.ftools as ftools from oeqa.utils.commands import runCmd, get_bb_var, get_test_layer +from oeqa.utils.metadata import metadata_from_bb, write_metadata_file from oeqa.selftest.base import oeSelfTest, get_available_machines try: @@ -61,7 +62,8 @@ log_prefix = "oe-selftest-" + t.strftime("%Y%m%d-%H%M%S") def logger_create(): log_file = log_prefix + ".log" - if os.path.exists("oe-selftest.log"): os.remove("oe-selftest.log") + if os.path.lexists("oe-selftest.log"): + os.remove("oe-selftest.log") os.symlink(log_file, "oe-selftest.log") log = logging.getLogger("selftest") @@ -85,7 +87,7 @@ def logger_create(): log = logger_create() def get_args_parser(): - description = "Script that runs unit tests agains bitbake and other Yocto related tools. The goal is to validate tools functionality and metadata integrity. Refer to https://wiki.yoctoproject.org/wiki/Oe-selftest for more information." + description = "Script that runs unit tests against bitbake and other Yocto related tools. The goal is to validate tools functionality and metadata integrity. Refer to https://wiki.yoctoproject.org/wiki/Oe-selftest for more information." parser = argparse_oe.ArgumentParser(description=description) group = parser.add_mutually_exclusive_group(required=True) group.add_argument('-r', '--run-tests', required=False, action='store', nargs='*', dest="run_tests", default=None, help='Select what tests to run (modules, classes or test methods). Format should be: ..') @@ -106,11 +108,17 @@ def get_args_parser(): help='List all tags that have been set to test cases.') parser.add_argument('--machine', required=False, dest='machine', choices=['random', 'all'], default=None, help='Run tests on different machines (random/all).') + parser.add_argument('--repository', required=False, dest='repository', default='', action='store', + help='Submit test results to a repository') return parser +builddir = None + def preflight_check(): + global builddir + log.info("Checking that everything is in order before running the tests") if not os.environ.get("BUILDDIR"): @@ -123,7 +131,27 @@ def preflight_check(): os.chdir(builddir) if not "meta-selftest" in get_bb_var("BBLAYERS"): - log.error("You don't seem to have the meta-selftest layer in BBLAYERS") + log.warn("meta-selftest layer not found in BBLAYERS, adding it") + meta_selftestdir = os.path.join( + get_bb_var("BBLAYERS_FETCH_DIR"), + 'meta-selftest') + if os.path.isdir(meta_selftestdir): + runCmd("bitbake-layers add-layer %s" %meta_selftestdir) + else: + log.error("could not locate meta-selftest in:\n%s" + %meta_selftestdir) + return False + + if "buildhistory.bbclass" in get_bb_var("BBINCLUDED"): + log.error("You have buildhistory enabled already and this isn't recommended for selftest, please disable it first.") + return False + + if get_bb_var("PRSERV_HOST"): + log.error("Please unset PRSERV_HOST in order to run oe-selftest") + return False + + if get_bb_var("SANITY_TESTED_DISTROS"): + log.error("Please unset SANITY_TESTED_DISTROS in order to run oe-selftest") return False log.info("Running bitbake -p") @@ -132,7 +160,7 @@ def preflight_check(): return True def add_include(): - builddir = os.environ.get("BUILDDIR") + global builddir if "#include added by oe-selftest.py" \ not in ftools.read_file(os.path.join(builddir, "conf/local.conf")): log.info("Adding: \"include selftest.inc\" in local.conf") @@ -146,7 +174,7 @@ def add_include(): "\n#include added by oe-selftest.py\ninclude bblayers.inc") def remove_include(): - builddir = os.environ.get("BUILDDIR") + global builddir if builddir is None: return if "#include added by oe-selftest.py" \ @@ -162,18 +190,21 @@ def remove_include(): "\n#include added by oe-selftest.py\ninclude bblayers.inc") def remove_inc_files(): + global builddir + if builddir is None: + return try: - os.remove(os.path.join(os.environ.get("BUILDDIR"), "conf/selftest.inc")) + os.remove(os.path.join(builddir, "conf/selftest.inc")) for root, _, files in os.walk(get_test_layer()): for f in files: if f == 'test_recipe.inc': os.remove(os.path.join(root, f)) - except (AttributeError, OSError,) as e: # AttributeError may happen if BUILDDIR is not set + except OSError as e: pass for incl_file in ['conf/bblayers.inc', 'conf/machine.inc']: try: - os.remove(os.path.join(os.environ.get("BUILDDIR"), incl_file)) + os.remove(os.path.join(builddir, incl_file)) except: pass @@ -336,10 +367,15 @@ def list_testsuite_by(criteria, keyword): # Get a testsuite based on 'keyword' # criteria: name, class, module, id, tag # keyword: a list of tests, classes, modules, ids, tags - - ts = sorted([ (tc.tcid, tc.tctag, tc.tcname, tc.tcclass, tc.tcmodule) for tc in get_testsuite_by(criteria, keyword) ]) - - print('%-4s\t%-20s\t%-60s\t%-25s\t%-20s' % ('id', 'tag', 'name', 'class', 'module')) + def tc_key(t): + if t[0] is None: + return (0,) + t[1:] + return t + # tcid may be None if no ID was assigned, in which case sorted() will throw + # a TypeError as Python 3 does not allow comparison (<,<=,>=,>) of + # heterogeneous types, handle this by using a custom key generator + ts = sorted([ (tc.tcid, tc.tctag, tc.tcname, tc.tcclass, tc.tcmodule) \ + for tc in get_testsuite_by(criteria, keyword) ], key=tc_key) print('_' * 150) for t in ts: if isinstance(t[1], (tuple, list)): @@ -386,7 +422,7 @@ def coverage_setup(coverage_source, coverage_include, coverage_omit): """ Set up the coverage measurement for the testcases to be run """ import datetime import subprocess - builddir = os.environ.get("BUILDDIR") + global builddir pokydir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) curcommit= subprocess.check_output(["git", "--git-dir", os.path.join(pokydir, ".git"), "rev-parse", "HEAD"]).decode('utf-8') coveragerc = "%s/.coveragerc" % builddir @@ -463,6 +499,9 @@ def main(): sys.path.extend(layer_libdirs) imp.reload(oeqa.selftest) + # act like bitbake and enforce en_US.UTF-8 locale + os.environ["LC_ALL"] = "en_US.UTF-8" + if args.run_tests_by and len(args.run_tests_by) >= 2: valid_options = ['name', 'class', 'module', 'id', 'tag'] if args.run_tests_by[0] not in valid_options: @@ -564,6 +603,76 @@ def main(): log.info("Finished") + if args.repository: + import git + # Commit tests results to repository + metadata = metadata_from_bb() + git_dir = os.path.join(os.getcwd(), 'selftest') + if not os.path.isdir(git_dir): + os.mkdir(git_dir) + + log.debug('Checking for git repository in %s' % git_dir) + try: + repo = git.Repo(git_dir) + except git.exc.InvalidGitRepositoryError: + log.debug("Couldn't find git repository %s; " + "cloning from %s" % (git_dir, args.repository)) + repo = git.Repo.clone_from(args.repository, git_dir) + + r_branches = repo.git.branch(r=True) + r_branches = set(r_branches.replace('origin/', '').split()) + l_branches = {str(branch) for branch in repo.branches} + branch = '%s/%s/%s' % (metadata['hostname'], + metadata['layers']['meta'].get('branch', '(nogit)'), + metadata['config']['MACHINE']) + + if branch in l_branches: + log.debug('Found branch in local repository, checking out') + repo.git.checkout(branch) + elif branch in r_branches: + log.debug('Found branch in remote repository, checking' + ' out and pulling') + repo.git.checkout(branch) + repo.git.pull() + else: + log.debug('New branch %s' % branch) + repo.git.checkout('master') + repo.git.checkout(b=branch) + + cleanResultsDir(repo) + xml_dir = os.path.join(os.getcwd(), log_prefix) + copyResultFiles(xml_dir, git_dir, repo) + metadata_file = os.path.join(git_dir, 'metadata.xml') + write_metadata_file(metadata_file, metadata) + repo.index.add([metadata_file]) + repo.index.write() + + # Get information for commit message + layer_info = '' + for layer, values in metadata['layers'].items(): + layer_info = '%s%-17s = %s:%s\n' % (layer_info, layer, + values.get('branch', '(nogit)'), values.get('commit', '0'*40)) + msg = 'Selftest for build %s of %s for machine %s on %s\n\n%s' % ( + log_prefix[12:], metadata['distro']['pretty_name'], + metadata['config']['MACHINE'], metadata['hostname'], layer_info) + + log.debug('Commiting results to local repository') + repo.index.commit(msg) + if not repo.is_dirty(): + try: + if branch in r_branches: + log.debug('Pushing changes to remote repository') + repo.git.push() + else: + log.debug('Pushing changes to remote repository ' + 'creating new branch') + repo.git.push('-u', 'origin', branch) + except GitCommandError: + log.error('Falied to push to remote repository') + return 1 + else: + log.error('Local repository is dirty, not pushing commits') + if result.wasSuccessful(): return 0 else: @@ -647,6 +756,35 @@ def buildResultClass(args): return StampedResult +def cleanResultsDir(repo): + """ Remove result files from directory """ + + xml_files = [] + directory = repo.working_tree_dir + for f in os.listdir(directory): + path = os.path.join(directory, f) + if os.path.isfile(path) and path.endswith('.xml'): + xml_files.append(f) + repo.index.remove(xml_files, working_tree=True) + +def copyResultFiles(src, dst, repo): + """ Copy result files from src to dst removing the time stamp. """ + + import shutil + + re_time = re.compile("-[0-9]+") + file_list = [] + + for root, subdirs, files in os.walk(src): + tmp_dir = root.replace(src, '').lstrip('/') + for s in subdirs: + os.mkdir(os.path.join(dst, tmp_dir, s)) + for f in files: + file_name = os.path.join(dst, tmp_dir, re_time.sub("", f)) + shutil.copy2(os.path.join(root, f), file_name) + file_list.append(file_name) + repo.index.add(file_list) + class TestRunner(_TestRunner): """Test runner class aware of exporting tests.""" def __init__(self, *args, **kwargs): diff --git a/import-layers/yocto-poky/scripts/oe-setup-builddir b/import-layers/yocto-poky/scripts/oe-setup-builddir index e53f73c9c..ef495517a 100755 --- a/import-layers/yocto-poky/scripts/oe-setup-builddir +++ b/import-layers/yocto-poky/scripts/oe-setup-builddir @@ -23,6 +23,14 @@ if [ -z "$BUILDDIR" ]; then exit 1 fi +if [ "$1" = '--help' -o "$1" = '-h' ]; then + echo 'Usage: oe-setup-builddir' + echo '' + echo "OpenEmbedded setup-builddir - setup build directory $BUILDDIR" + echo '' + exit 2 +fi + mkdir -p "$BUILDDIR/conf" if [ ! -d "$BUILDDIR" ]; then diff --git a/import-layers/yocto-poky/scripts/oe-setup-rpmrepo b/import-layers/yocto-poky/scripts/oe-setup-rpmrepo index 917b98b98..df1c61435 100755 --- a/import-layers/yocto-poky/scripts/oe-setup-rpmrepo +++ b/import-layers/yocto-poky/scripts/oe-setup-rpmrepo @@ -23,16 +23,6 @@ # Instead, use OE_TMPDIR for passing this in externally. TMPDIR="$OE_TMPDIR" -function usage() { - echo "Usage: $0 " - echo " : default is $TMPDIR/deploy/rpm" -} - -if [ $# -gt 1 ]; then - usage - exit 1 -fi - setup_tmpdir() { if [ -z "$TMPDIR" ]; then # Try to get TMPDIR from bitbake @@ -53,6 +43,23 @@ setup_tmpdir() { fi } +setup_tmpdir + +function usage() { + echo 'Usage: oe-setup-rpmrepo rpm-dir' + echo '' + echo 'OpenEmbedded setup-rpmrepo - setup rpm repository' + echo '' + echo 'arguments:' + echo " rpm-dir rpm repo directory, default is $TMPDIR/deploy/rpm" + echo '' +} + +if [ $# -gt 1 -o "$1" = '--help' -o "$1" = '-h' ]; then + usage + exit 2 +fi + setup_sysroot() { # Toolchain installs set up $OECORE_NATIVE_SYSROOT in their # environment script. If that variable isn't set, we're @@ -68,7 +75,6 @@ setup_sysroot() { fi } -setup_tmpdir setup_sysroot @@ -83,7 +89,7 @@ if [ ! -d "$RPM_DIR" ]; then exit 1 fi -CREATEREPO=$OECORE_NATIVE_SYSROOT/usr/bin/createrepo +CREATEREPO=$OECORE_NATIVE_SYSROOT/usr/bin/createrepo_c if [ ! -e "$CREATEREPO" ]; then echo "Error: can't find createrepo binary" echo "please run bitbake createrepo-native first" diff --git a/import-layers/yocto-poky/scripts/oe-test b/import-layers/yocto-poky/scripts/oe-test new file mode 100755 index 000000000..a1d282db3 --- /dev/null +++ b/import-layers/yocto-poky/scripts/oe-test @@ -0,0 +1,105 @@ +#!/usr/bin/env python3 + +# OpenEmbedded test tool +# +# Copyright (C) 2016 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import sys +import argparse +import importlib +import logging + +scripts_path = os.path.dirname(os.path.realpath(__file__)) +lib_path = scripts_path + '/lib' +sys.path = sys.path + [lib_path] +import argparse_oe +import scriptutils + +# oe-test is used for testexport and it doesn't have oe lib +# so we just skip adding these libraries (not used in testexport) +try: + import scriptpath + scriptpath.add_oe_lib_path() +except ImportError: + pass + +from oeqa.core.context import OETestContextExecutor + +logger = scriptutils.logger_create('oe-test') + +def _load_test_components(logger): + components = {} + + for path in sys.path: + base_dir = os.path.join(path, 'oeqa') + if os.path.exists(base_dir) and os.path.isdir(base_dir): + for file in os.listdir(base_dir): + comp_name = file + comp_context = os.path.join(base_dir, file, 'context.py') + if os.path.exists(comp_context): + comp_plugin = importlib.import_module('oeqa.%s.%s' % \ + (comp_name, 'context')) + try: + if not issubclass(comp_plugin._executor_class, + OETestContextExecutor): + raise TypeError("Component %s in %s, _executor_class "\ + "isn't derived from OETestContextExecutor."\ + % (comp_name, comp_context)) + + components[comp_name] = comp_plugin._executor_class() + except AttributeError: + raise AttributeError("Component %s in %s don't have "\ + "_executor_class defined." % (comp_name, comp_context)) + + return components + +def main(): + parser = argparse_oe.ArgumentParser(description="OpenEmbedded test tool", + add_help=False, + epilog="Use %(prog)s --help to get help on a specific command") + parser.add_argument('-d', '--debug', help='Enable debug output', action='store_true') + parser.add_argument('-q', '--quiet', help='Print only errors', action='store_true') + global_args, unparsed_args = parser.parse_known_args() + + # Help is added here rather than via add_help=True, as we don't want it to + # be handled by parse_known_args() + parser.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS, + help='show this help message and exit') + + if global_args.debug: + logger.setLevel(logging.DEBUG) + elif global_args.quiet: + logger.setLevel(logging.ERROR) + + components = _load_test_components(logger) + + subparsers = parser.add_subparsers(dest="subparser_name", title='subcommands', metavar='') + subparsers.add_subparser_group('components', 'Test components') + subparsers.required = True + for comp_name in sorted(components.keys()): + comp = components[comp_name] + comp.register_commands(logger, subparsers) + + try: + args = parser.parse_args(unparsed_args, namespace=global_args) + results = args.func(logger, args) + ret = 0 if results.wasSuccessful() else 1 + except SystemExit as err: + if err.code != 0: + raise err + ret = err.code + except argparse_oe.ArgumentUsageError as ae: + parser.error_subcommand(ae.message, ae.subcommand) + + return ret + +if __name__ == '__main__': + try: + ret = main() + except Exception: + ret = 1 + import traceback + traceback.print_exc() + sys.exit(ret) diff --git a/import-layers/yocto-poky/scripts/oe-trim-schemas b/import-layers/yocto-poky/scripts/oe-trim-schemas index 66a1b8d81..7c199ef1d 100755 --- a/import-layers/yocto-poky/scripts/oe-trim-schemas +++ b/import-layers/yocto-poky/scripts/oe-trim-schemas @@ -18,6 +18,15 @@ def children (elem, name=None): l = [e for e in l if e.tag == name] return l +if len(sys.argv) < 2 or sys.argv[1] in ('-h', '--help'): + print('oe-trim-schemas: error: the following arguments are required: schema\n' + 'Usage: oe-trim-schemas schema\n\n' + 'OpenEmbedded trim schemas - remove unneeded schema locale translations\n' + ' from gconf schema files\n\n' + 'arguments:\n' + ' schema gconf schema file to trim\n') + sys.exit(2) + xml = etree.parse(sys.argv[1]) for schema in child(xml.getroot(), "schemalist").getchildren(): diff --git a/import-layers/yocto-poky/scripts/oepydevshell-internal.py b/import-layers/yocto-poky/scripts/oepydevshell-internal.py index a22bec336..04621ae8a 100755 --- a/import-layers/yocto-poky/scripts/oepydevshell-internal.py +++ b/import-layers/yocto-poky/scripts/oepydevshell-internal.py @@ -22,9 +22,16 @@ def cbreaknoecho(fd): old[3] = old[3] &~ termios.ECHO &~ termios.ICANON termios.tcsetattr(fd, termios.TCSADRAIN, old) -if len(sys.argv) != 3: - print("Incorrect parameters") - sys.exit(1) +if len(sys.argv) != 3 or sys.argv[1] in ('-h', '--help'): + print('oepydevshell-internal.py: error: the following arguments are required: pty, pid\n' + 'Usage: oepydevshell-internal.py pty pid\n\n' + 'OpenEmbedded oepydevshell-internal.py - internal script called from meta/classes/devshell.bbclass\n\n' + 'arguments:\n' + ' pty pty device name\n' + ' pid parent process id\n\n' + 'options:\n' + ' -h, --help show this help message and exit\n') + sys.exit(2) pty = open(sys.argv[1], "w+b", 0) parent = int(sys.argv[2]) @@ -38,7 +45,7 @@ readline.parse_and_bind("tab: complete") try: readline.read_history_file(histfile) except IOError: - pass + pass try: diff --git a/import-layers/yocto-poky/scripts/postinst-intercepts/update_gio_module_cache b/import-layers/yocto-poky/scripts/postinst-intercepts/update_gio_module_cache index fe468092c..fc3f9d0d6 100644 --- a/import-layers/yocto-poky/scripts/postinst-intercepts/update_gio_module_cache +++ b/import-layers/yocto-poky/scripts/postinst-intercepts/update_gio_module_cache @@ -3,5 +3,7 @@ set -e PSEUDO_UNLOAD=1 qemuwrapper -L $D -E LD_LIBRARY_PATH=$D${libdir}:$D${base_libdir} \ - $D${libexecdir}/${binprefix}gio-querymodules $D${libdir}/gio/modules/ + $D${libexecdir}/${binprefix}gio-querymodules $D${libdir}/gio/modules/ +[ ! -e $D${libdir}/gio/modules/giomodule.cache ] || + chown root:root $D${libdir}/gio/modules/giomodule.cache diff --git a/import-layers/yocto-poky/scripts/pybootchartgui/pybootchartgui/draw.py b/import-layers/yocto-poky/scripts/pybootchartgui/pybootchartgui/draw.py index 8c574be50..201ce4577 100644 --- a/import-layers/yocto-poky/scripts/pybootchartgui/pybootchartgui/draw.py +++ b/import-layers/yocto-poky/scripts/pybootchartgui/pybootchartgui/draw.py @@ -133,6 +133,16 @@ TASK_COLOR_PACKAGE = (0.0, 1.00, 1.00, 1.0) # Package Write RPM/DEB/IPK task color TASK_COLOR_PACKAGE_WRITE = (0.0, 0.50, 0.50, 1.0) +# Distinct colors used for different disk volumnes. +# If we have more volumns, colors get re-used. +VOLUME_COLORS = [ + (1.0, 1.0, 0.00, 1.0), + (0.0, 1.00, 0.00, 1.0), + (1.0, 0.00, 1.00, 1.0), + (0.0, 0.00, 1.00, 1.0), + (0.0, 1.00, 1.00, 1.0), +] + # Process states STATE_UNDEFINED = 0 STATE_RUNNING = 1 @@ -256,7 +266,7 @@ def draw_chart(ctx, color, fill, chart_bounds, data, proc_tree, data_range): # avoid divide by zero if max_y == 0: max_y = 1.0 - xscale = float (chart_bounds[2]) / max_x + xscale = float (chart_bounds[2]) / (max_x - x_shift) # If data_range is given, scale the chart so that the value range in # data_range matches the chart bounds exactly. # Otherwise, scale so that the actual data matches the chart bounds. @@ -321,6 +331,16 @@ def extents(options, xscale, trace): w = int ((end - start) * sec_w_base * xscale) + 2 * off_x h = proc_h * processes + header_h + 2 * off_y + if options.charts: + if trace.cpu_stats: + h += 30 + bar_h + if trace.disk_stats: + h += 30 + bar_h + if trace.monitor_disk: + h += 30 + bar_h + if trace.mem_stats: + h += meminfo_bar_h + return (w, h) def clip_visible(clip, rect): @@ -334,80 +354,134 @@ def render_charts(ctx, options, clip, trace, curr_y, w, h, sec_w): proc_tree = options.proc_tree(trace) # render bar legend - ctx.set_font_size(LEGEND_FONT_SIZE) - - draw_legend_box(ctx, "CPU (user+sys)", CPU_COLOR, off_x, curr_y+20, leg_s) - draw_legend_box(ctx, "I/O (wait)", IO_COLOR, off_x + 120, curr_y+20, leg_s) - - # render I/O wait - chart_rect = (off_x, curr_y+30, w, bar_h) - if clip_visible (clip, chart_rect): - draw_box_ticks (ctx, chart_rect, sec_w) - draw_annotations (ctx, proc_tree, trace.times, chart_rect) - draw_chart (ctx, IO_COLOR, True, chart_rect, \ - [(sample.time, sample.user + sample.sys + sample.io) for sample in trace.cpu_stats], \ - proc_tree, None) - # render CPU load - draw_chart (ctx, CPU_COLOR, True, chart_rect, \ - [(sample.time, sample.user + sample.sys) for sample in trace.cpu_stats], \ - proc_tree, None) - - curr_y = curr_y + 30 + bar_h + if trace.cpu_stats: + ctx.set_font_size(LEGEND_FONT_SIZE) + + draw_legend_box(ctx, "CPU (user+sys)", CPU_COLOR, off_x, curr_y+20, leg_s) + draw_legend_box(ctx, "I/O (wait)", IO_COLOR, off_x + 120, curr_y+20, leg_s) + + # render I/O wait + chart_rect = (off_x, curr_y+30, w, bar_h) + if clip_visible (clip, chart_rect): + draw_box_ticks (ctx, chart_rect, sec_w) + draw_annotations (ctx, proc_tree, trace.times, chart_rect) + draw_chart (ctx, IO_COLOR, True, chart_rect, \ + [(sample.time, sample.user + sample.sys + sample.io) for sample in trace.cpu_stats], \ + proc_tree, None) + # render CPU load + draw_chart (ctx, CPU_COLOR, True, chart_rect, \ + [(sample.time, sample.user + sample.sys) for sample in trace.cpu_stats], \ + proc_tree, None) + + curr_y = curr_y + 30 + bar_h # render second chart - draw_legend_line(ctx, "Disk throughput", DISK_TPUT_COLOR, off_x, curr_y+20, leg_s) - draw_legend_box(ctx, "Disk utilization", IO_COLOR, off_x + 120, curr_y+20, leg_s) - - # render I/O utilization - chart_rect = (off_x, curr_y+30, w, bar_h) - if clip_visible (clip, chart_rect): - draw_box_ticks (ctx, chart_rect, sec_w) - draw_annotations (ctx, proc_tree, trace.times, chart_rect) - draw_chart (ctx, IO_COLOR, True, chart_rect, \ - [(sample.time, sample.util) for sample in trace.disk_stats], \ - proc_tree, None) - - # render disk throughput - max_sample = max (trace.disk_stats, key = lambda s: s.tput) - if clip_visible (clip, chart_rect): - draw_chart (ctx, DISK_TPUT_COLOR, False, chart_rect, \ - [(sample.time, sample.tput) for sample in trace.disk_stats], \ - proc_tree, None) - - pos_x = off_x + ((max_sample.time - proc_tree.start_time) * w / proc_tree.duration) - - shift_x, shift_y = -20, 20 - if (pos_x < off_x + 245): - shift_x, shift_y = 5, 40 - - label = "%dMB/s" % round ((max_sample.tput) / 1024.0) - draw_text (ctx, label, DISK_TPUT_COLOR, pos_x + shift_x, curr_y + shift_y) - - curr_y = curr_y + 30 + bar_h + if trace.disk_stats: + draw_legend_line(ctx, "Disk throughput", DISK_TPUT_COLOR, off_x, curr_y+20, leg_s) + draw_legend_box(ctx, "Disk utilization", IO_COLOR, off_x + 120, curr_y+20, leg_s) + + # render I/O utilization + chart_rect = (off_x, curr_y+30, w, bar_h) + if clip_visible (clip, chart_rect): + draw_box_ticks (ctx, chart_rect, sec_w) + draw_annotations (ctx, proc_tree, trace.times, chart_rect) + draw_chart (ctx, IO_COLOR, True, chart_rect, \ + [(sample.time, sample.util) for sample in trace.disk_stats], \ + proc_tree, None) + + # render disk throughput + max_sample = max (trace.disk_stats, key = lambda s: s.tput) + if clip_visible (clip, chart_rect): + draw_chart (ctx, DISK_TPUT_COLOR, False, chart_rect, \ + [(sample.time, sample.tput) for sample in trace.disk_stats], \ + proc_tree, None) + + pos_x = off_x + ((max_sample.time - proc_tree.start_time) * w / proc_tree.duration) + + shift_x, shift_y = -20, 20 + if (pos_x < off_x + 245): + shift_x, shift_y = 5, 40 + + label = "%dMB/s" % round ((max_sample.tput) / 1024.0) + draw_text (ctx, label, DISK_TPUT_COLOR, pos_x + shift_x, curr_y + shift_y) + + curr_y = curr_y + 30 + bar_h + + # render disk space usage + # + # Draws the amount of disk space used on each volume relative to the + # lowest recorded amount. The graphs for each volume are stacked above + # each other so that total disk usage is visible. + if trace.monitor_disk: + ctx.set_font_size(LEGEND_FONT_SIZE) + # Determine set of volumes for which we have + # information and the minimal amount of used disk + # space for each. Currently samples are allowed to + # not have a values for all volumes; drawing could be + # made more efficient if that wasn't the case. + volumes = set() + min_used = {} + for sample in trace.monitor_disk: + for volume, used in sample.records.items(): + volumes.add(volume) + if volume not in min_used or min_used[volume] > used: + min_used[volume] = used + volumes = sorted(list(volumes)) + disk_scale = 0 + for i, volume in enumerate(volumes): + volume_scale = max([sample.records[volume] - min_used[volume] + for sample in trace.monitor_disk + if volume in sample.records]) + # Does not take length of volume name into account, but fixed offset + # works okay in practice. + draw_legend_box(ctx, '%s (max: %u MiB)' % (volume, volume_scale / 1024 / 1024), + VOLUME_COLORS[i % len(VOLUME_COLORS)], + off_x + i * 250, curr_y+20, leg_s) + disk_scale += volume_scale + + # render used amount of disk space + chart_rect = (off_x, curr_y+30, w, bar_h) + if clip_visible (clip, chart_rect): + draw_box_ticks (ctx, chart_rect, sec_w) + draw_annotations (ctx, proc_tree, trace.times, chart_rect) + for i in range(len(volumes), 0, -1): + draw_chart (ctx, VOLUME_COLORS[(i - 1) % len(VOLUME_COLORS)], True, chart_rect, \ + [(sample.time, + # Sum up used space of all volumes including the current one + # so that the graphs appear as stacked on top of each other. + reduce(lambda x,y: x+y, + [sample.records[volume] - min_used[volume] + for volume in volumes[0:i] + if volume in sample.records], + 0)) + for sample in trace.monitor_disk], \ + proc_tree, [0, disk_scale]) + + curr_y = curr_y + 30 + bar_h # render mem usage chart_rect = (off_x, curr_y+30, w, meminfo_bar_h) mem_stats = trace.mem_stats if mem_stats and clip_visible (clip, chart_rect): - mem_scale = max(sample.records['MemTotal'] - sample.records['MemFree'] for sample in mem_stats) + mem_scale = max(sample.buffers for sample in mem_stats) draw_legend_box(ctx, "Mem cached (scale: %u MiB)" % (float(mem_scale) / 1024), MEM_CACHED_COLOR, off_x, curr_y+20, leg_s) draw_legend_box(ctx, "Used", MEM_USED_COLOR, off_x + 240, curr_y+20, leg_s) draw_legend_box(ctx, "Buffers", MEM_BUFFERS_COLOR, off_x + 360, curr_y+20, leg_s) - draw_legend_line(ctx, "Swap (scale: %u MiB)" % max([(sample.records['SwapTotal'] - sample.records['SwapFree'])/1024 for sample in mem_stats]), \ + draw_legend_line(ctx, "Swap (scale: %u MiB)" % max([(sample.swap)/1024 for sample in mem_stats]), \ MEM_SWAP_COLOR, off_x + 480, curr_y+20, leg_s) draw_box_ticks(ctx, chart_rect, sec_w) draw_annotations(ctx, proc_tree, trace.times, chart_rect) draw_chart(ctx, MEM_BUFFERS_COLOR, True, chart_rect, \ - [(sample.time, sample.records['MemTotal'] - sample.records['MemFree']) for sample in trace.mem_stats], \ + [(sample.time, sample.buffers) for sample in trace.mem_stats], \ proc_tree, [0, mem_scale]) draw_chart(ctx, MEM_USED_COLOR, True, chart_rect, \ - [(sample.time, sample.records['MemTotal'] - sample.records['MemFree'] - sample.records['Buffers']) for sample in mem_stats], \ + [(sample.time, sample.used) for sample in mem_stats], \ proc_tree, [0, mem_scale]) draw_chart(ctx, MEM_CACHED_COLOR, True, chart_rect, \ - [(sample.time, sample.records['Cached']) for sample in mem_stats], \ + [(sample.time, sample.cached) for sample in mem_stats], \ proc_tree, [0, mem_scale]) draw_chart(ctx, MEM_SWAP_COLOR, False, chart_rect, \ - [(sample.time, float(sample.records['SwapTotal'] - sample.records['SwapFree'])) for sample in mem_stats], \ + [(sample.time, float(sample.swap)) for sample in mem_stats], \ proc_tree, None) curr_y = curr_y + meminfo_bar_h @@ -415,7 +489,7 @@ def render_charts(ctx, options, clip, trace, curr_y, w, h, sec_w): return curr_y def render_processes_chart(ctx, options, trace, curr_y, w, h, sec_w): - chart_rect = [off_x, curr_y+header_h, w, h - 2 * off_y - (curr_y+header_h) + proc_h] + chart_rect = [off_x, curr_y+header_h, w, h - 2 * off_y - header_h - leg_s + proc_h] draw_legend_box (ctx, "Configure", \ TASK_COLOR_CONFIGURE, off_x , curr_y + 45, leg_s) @@ -496,6 +570,9 @@ def render(ctx, options, xscale, trace): w -= 2*off_x curr_y = off_y; + if options.charts: + curr_y = render_charts (ctx, options, clip, trace, curr_y, w, h, sec_w) + curr_y = render_processes_chart (ctx, options, trace, curr_y, w, h, sec_w) return @@ -513,9 +590,6 @@ def render(ctx, options, xscale, trace): else: curr_y = off_y; - if options.charts: - curr_y = render_charts (ctx, options, clip, trace, curr_y, w, h, sec_w) - # draw process boxes proc_height = h if proc_tree.taskstats and options.cumulative: diff --git a/import-layers/yocto-poky/scripts/pybootchartgui/pybootchartgui/parsing.py b/import-layers/yocto-poky/scripts/pybootchartgui/pybootchartgui/parsing.py index a3a0b0b33..bcfb2da56 100644 --- a/import-layers/yocto-poky/scripts/pybootchartgui/pybootchartgui/parsing.py +++ b/import-layers/yocto-poky/scripts/pybootchartgui/pybootchartgui/parsing.py @@ -38,16 +38,18 @@ class Trace: self.min = None self.max = None self.headers = None - self.disk_stats = None + self.disk_stats = [] self.ps_stats = None self.taskstats = None - self.cpu_stats = None + self.cpu_stats = [] self.cmdline = None self.kernel = None self.kernel_tree = None self.filename = None self.parent_map = None - self.mem_stats = None + self.mem_stats = [] + self.monitor_disk = None + self.times = [] # Always empty, but expected by draw.py when drawing system charts. if len(paths): parse_paths (writer, self, paths) @@ -58,6 +60,19 @@ class Trace: self.min = min(self.start.keys()) self.max = max(self.end.keys()) + + # Rendering system charts depends on start and end + # time. Provide them where the original drawing code expects + # them, i.e. in proc_tree. + class BitbakeProcessTree: + def __init__(self, start_time, end_time): + self.start_time = start_time + self.end_time = end_time + self.duration = self.end_time - self.start_time + self.proc_tree = BitbakeProcessTree(min(self.start.keys()), + max(self.end.keys())) + + return # Turn that parsed information into something more useful @@ -427,7 +442,13 @@ def _parse_proc_stat_log(file): # skip the rest of statistics lines return samples -def _parse_proc_disk_stat_log(file, numCpu): +def _parse_reduced_log(file, sample_class): + samples = [] + for time, lines in _parse_timed_blocks(file): + samples.append(sample_class(time, *[float(x) for x in lines[0].split()])) + return samples + +def _parse_proc_disk_stat_log(file): """ Parse file for disk stats, but only look at the whole device, eg. sda, not sda1, sda2 etc. The format of relevant lines should be: @@ -462,12 +483,31 @@ def _parse_proc_disk_stat_log(file, numCpu): sums = [ a - b for a, b in zip(sample1.diskdata, sample2.diskdata) ] readTput = sums[0] / 2.0 * 100.0 / interval writeTput = sums[1] / 2.0 * 100.0 / interval - util = float( sums[2] ) / 10 / interval / numCpu + util = float( sums[2] ) / 10 / interval util = max(0.0, min(1.0, util)) disk_stats.append(DiskSample(sample2.time, readTput, writeTput, util)) return disk_stats +def _parse_reduced_proc_meminfo_log(file): + """ + Parse file for global memory statistics with + 'MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapTotal', 'SwapFree' values + (in that order) directly stored on one line. + """ + used_values = ('MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapTotal', 'SwapFree',) + + mem_stats = [] + for time, lines in _parse_timed_blocks(file): + sample = MemSample(time) + for name, value in zip(used_values, lines[0].split()): + sample.add_value(name, int(value)) + + if sample.valid(): + mem_stats.append(DrawMemSample(sample)) + + return mem_stats + def _parse_proc_meminfo_log(file): """ Parse file for global memory statistics. @@ -484,14 +524,37 @@ def _parse_proc_meminfo_log(file): for line in lines: match = meminfo_re.match(line) if not match: - raise ParseError("Invalid meminfo line \"%s\"" % match.groups(0)) + raise ParseError("Invalid meminfo line \"%s\"" % line) sample.add_value(match.group(1), int(match.group(2))) if sample.valid(): - mem_stats.append(sample) + mem_stats.append(DrawMemSample(sample)) return mem_stats +def _parse_monitor_disk_log(file): + """ + Parse file with information about amount of diskspace used. + The format of relevant lines should be: ^volume path: number-of-bytes? + """ + disk_stats = [] + diskinfo_re = re.compile(r'^(.+):\s*(\d+)$') + + for time, lines in _parse_timed_blocks(file): + sample = DiskSpaceSample(time) + + for line in lines: + match = diskinfo_re.match(line) + if not match: + raise ParseError("Invalid monitor_disk line \"%s\"" % line) + sample.add_value(match.group(1), int(match.group(2))) + + if sample.valid(): + disk_stats.append(sample) + + return disk_stats + + # if we boot the kernel with: initcall_debug printk.time=1 we can # get all manner of interesting data from the dmesg output # We turn this into a pseudo-process tree: each event is @@ -628,6 +691,20 @@ def _parse_cmdline_log(writer, file): cmdLines[pid] = values return cmdLines +def _parse_bitbake_buildstats(writer, state, filename, file): + paths = filename.split("/") + task = paths[-1] + pn = paths[-2] + start = None + end = None + for line in file: + if line.startswith("Started:"): + start = int(float(line.split()[-1])) + elif line.startswith("Ended:"): + end = int(float(line.split()[-1])) + if start and end: + state.add_process(pn + ":" + task, start, end) + def get_num_cpus(headers): """Get the number of CPUs from the system.cpu header property. As the CPU utilization graphs are relative, the number of CPUs currently makes @@ -647,18 +724,25 @@ def get_num_cpus(headers): def _do_parse(writer, state, filename, file): writer.info("parsing '%s'" % filename) t1 = clock() - paths = filename.split("/") - task = paths[-1] - pn = paths[-2] - start = None - end = None - for line in file: - if line.startswith("Started:"): - start = int(float(line.split()[-1])) - elif line.startswith("Ended:"): - end = int(float(line.split()[-1])) - if start and end: - state.add_process(pn + ":" + task, start, end) + name = os.path.basename(filename) + if name == "proc_diskstats.log": + state.disk_stats = _parse_proc_disk_stat_log(file) + elif name == "reduced_proc_diskstats.log": + state.disk_stats = _parse_reduced_log(file, DiskSample) + elif name == "proc_stat.log": + state.cpu_stats = _parse_proc_stat_log(file) + elif name == "reduced_proc_stat.log": + state.cpu_stats = _parse_reduced_log(file, CPUSample) + elif name == "proc_meminfo.log": + state.mem_stats = _parse_proc_meminfo_log(file) + elif name == "reduced_proc_meminfo.log": + state.mem_stats = _parse_reduced_proc_meminfo_log(file) + elif name == "cmdline2.log": + state.cmdline = _parse_cmdline_log(writer, file) + elif name == "monitor_disk.log": + state.monitor_disk = _parse_monitor_disk_log(file) + elif not filename.endswith('.log'): + _parse_bitbake_buildstats(writer, state, filename, file) t2 = clock() writer.info(" %s seconds" % str(t2-t1)) return state diff --git a/import-layers/yocto-poky/scripts/pybootchartgui/pybootchartgui/samples.py b/import-layers/yocto-poky/scripts/pybootchartgui/pybootchartgui/samples.py index 015d743aa..9fc309b3a 100644 --- a/import-layers/yocto-poky/scripts/pybootchartgui/pybootchartgui/samples.py +++ b/import-layers/yocto-poky/scripts/pybootchartgui/pybootchartgui/samples.py @@ -53,6 +53,33 @@ class MemSample: # discard incomplete samples return [v for v in MemSample.used_values if v not in keys] == [] +class DrawMemSample: + """ + Condensed version of a MemSample with exactly the values used by the drawing code. + Initialized either from a valid MemSample or + a tuple/list of buffer/used/cached/swap values. + """ + def __init__(self, mem_sample): + self.time = mem_sample.time + if isinstance(mem_sample, MemSample): + self.buffers = mem_sample.records['MemTotal'] - mem_sample.records['MemFree'] + self.used = mem_sample.records['MemTotal'] - mem_sample.records['MemFree'] - mem_sample.records['Buffers'] + self.cached = mem_sample.records['Cached'] + self.swap = mem_sample.records['SwapTotal'] - mem_sample.records['SwapFree'] + else: + self.buffers, self.used, self.cached, self.swap = mem_sample + +class DiskSpaceSample: + def __init__(self, time): + self.time = time + self.records = {} + + def add_value(self, name, value): + self.records[name] = value + + def valid(self): + return bool(self.records) + class ProcessSample: def __init__(self, time, state, cpu_sample): self.time = time diff --git a/import-layers/yocto-poky/scripts/recipetool b/import-layers/yocto-poky/scripts/recipetool index 1052cd2b2..3765ec7cf 100755 --- a/import-layers/yocto-poky/scripts/recipetool +++ b/import-layers/yocto-poky/scripts/recipetool @@ -73,13 +73,14 @@ def main(): logger.error("Unable to find bitbake by searching parent directory of this script or PATH") sys.exit(1) logger.debug('Found bitbake path: %s' % bitbakepath) + scriptpath.add_oe_lib_path() scriptutils.logger_setup_color(logger, global_args.color) tinfoil = tinfoil_init(False) try: - for path in ([scripts_path] + - tinfoil.config_data.getVar('BBPATH', True).split(':')): + for path in (tinfoil.config_data.getVar('BBPATH').split(':') + + [scripts_path]): pluginpath = os.path.join(path, 'lib', 'recipetool') scriptutils.load_plugins(logger, plugins, pluginpath) diff --git a/import-layers/yocto-poky/scripts/relocate_sdk.py b/import-layers/yocto-poky/scripts/relocate_sdk.py index e47b4d916..c752fa2c6 100755 --- a/import-layers/yocto-poky/scripts/relocate_sdk.py +++ b/import-layers/yocto-poky/scripts/relocate_sdk.py @@ -103,6 +103,8 @@ def change_interpreter(elf_file_name): fname.startswith(b("/lib32/")) or fname.startswith(b("/usr/lib32/")) or \ fname.startswith(b("/usr/lib32/")) or fname.startswith(b("/usr/lib64/")): break + if p_filesz == 0: + break if (len(new_dl_path) >= p_filesz): print("ERROR: could not relocate %s, interp size = %i and %i is needed." \ % (elf_file_name, p_memsz, len(new_dl_path) + 1)) diff --git a/import-layers/yocto-poky/scripts/rpm2cpio.sh b/import-layers/yocto-poky/scripts/rpm2cpio.sh index 5df8c0f70..cf23472ba 100755 --- a/import-layers/yocto-poky/scripts/rpm2cpio.sh +++ b/import-layers/yocto-poky/scripts/rpm2cpio.sh @@ -1,53 +1,55 @@ -#!/bin/sh - -# This comes from the RPM5 5.4.0 distribution. - -pkg=$1 -if [ "$pkg" = "" -o ! -e "$pkg" ]; then - echo "no package supplied" 1>&2 - exit 1 -fi - -leadsize=96 -o=`expr $leadsize + 8` -set `od -j $o -N 8 -t u1 $pkg` -il=`expr 256 \* \( 256 \* \( 256 \* $2 + $3 \) + $4 \) + $5` -dl=`expr 256 \* \( 256 \* \( 256 \* $6 + $7 \) + $8 \) + $9` -# echo "sig il: $il dl: $dl" - -sigsize=`expr 8 + 16 \* $il + $dl` -o=`expr $o + $sigsize + \( 8 - \( $sigsize \% 8 \) \) \% 8 + 8` -set `od -j $o -N 8 -t u1 $pkg` -il=`expr 256 \* \( 256 \* \( 256 \* $2 + $3 \) + $4 \) + $5` -dl=`expr 256 \* \( 256 \* \( 256 \* $6 + $7 \) + $8 \) + $9` -# echo "hdr il: $il dl: $dl" - -hdrsize=`expr 8 + 16 \* $il + $dl` -o=`expr $o + $hdrsize` -EXTRACTOR="dd if=$pkg ibs=$o skip=1" - -COMPRESSION=`($EXTRACTOR |file -) 2>/dev/null` -if echo $COMPRESSION |grep -iq gzip; then - DECOMPRESSOR=gunzip -elif echo $COMPRESSION |grep -iq bzip2; then - DECOMPRESSOR=bunzip2 -elif echo $COMPRESSION |grep -iq xz; then - DECOMPRESSOR=unxz -elif echo $COMPRESSION |grep -iq cpio; then - DECOMPRESSOR=cat -else - # Most versions of file don't support LZMA, therefore we assume - # anything not detected is LZMA - DECOMPRESSOR=`which unlzma 2>/dev/null` - case "$DECOMPRESSOR" in - /* ) ;; - * ) DECOMPRESSOR=`which lzmash 2>/dev/null` - case "$DECOMPRESSOR" in - /* ) DECOMPRESSOR="lzmash -d -c" ;; - * ) DECOMPRESSOR=cat ;; - esac - ;; - esac -fi - -$EXTRACTOR 2>/dev/null | $DECOMPRESSOR +#!/bin/sh -efu + +# This file comes from rpm 4.x distribution + +fatal() { + echo "$*" >&2 + exit 1 +} + +pkg="$1" +[ -n "$pkg" -a -e "$pkg" ] || + fatal "No package supplied" + +_dd() { + local o="$1"; shift + dd if="$pkg" skip="$o" iflag=skip_bytes status=none $* +} + +calcsize() { + offset=$(($1 + 8)) + + local i b b0 b1 b2 b3 b4 b5 b6 b7 + + i=0 + while [ $i -lt 8 ]; do + b="$(_dd $(($offset + $i)) bs=1 count=1)" + [ -z "$b" ] && + b="0" || + b="$(exec printf '%u\n' "'$b")" + eval "b$i=\$b" + i=$(($i + 1)) + done + + rsize=$((8 + ((($b0 << 24) + ($b1 << 16) + ($b2 << 8) + $b3) << 4) + ($b4 << 24) + ($b5 << 16) + ($b6 << 8) + $b7)) + offset=$(($offset + $rsize)) +} + +case "$(_dd 0 bs=8 count=1)" in + "$(printf '\355\253\356\333')"*) ;; # '\xed\xab\xee\xdb' + *) fatal "File doesn't look like rpm: $pkg" ;; +esac + +calcsize 96 +sigsize=$rsize + +calcsize $(($offset + (8 - ($sigsize % 8)) % 8)) +hdrsize=$rsize + +case "$(_dd $offset bs=3 count=1)" in + "$(printf '\102\132')"*) _dd $offset | bunzip2 ;; # '\x42\x5a' + "$(printf '\037\213')"*) _dd $offset | gunzip ;; # '\x1f\x8b' + "$(printf '\375\067')"*) _dd $offset | xzcat ;; # '\xfd\x37' + "$(printf '\135\000')"*) _dd $offset | unlzma ;; # '\x5d\x00' + *) fatal "Unrecognized rpm file: $pkg" ;; +esac diff --git a/import-layers/yocto-poky/scripts/runqemu b/import-layers/yocto-poky/scripts/runqemu index 6748cb258..9b6d330f1 100755 --- a/import-layers/yocto-poky/scripts/runqemu +++ b/import-layers/yocto-poky/scripts/runqemu @@ -74,17 +74,19 @@ of the following environment variables (in any order): kvm-vhost - enable KVM with vhost when running x86/x86_64 (VT-capable CPU required) publicvnc - enable a VNC server open to all hosts audio - enable audio + [*/]ovmf* - OVMF firmware file or base name for booting with UEFI tcpserial= - specify tcp serial port number biosdir= - specify custom bios dir biosfilename= - specify bios filename qemuparams= - specify custom parameters to QEMU bootparams= - specify custom kernel parameters during boot - help: print this text + help, -h, --help: print this text Examples: + runqemu runqemu qemuarm runqemu tmp/deploy/images/qemuarm - runqemu tmp/deploy/images/qemux86/.qemuboot.conf + runqemu tmp/deploy/images/qemux86/ runqemu qemux86-64 core-image-sato ext4 runqemu qemux86-64 wic-image-minimal wic runqemu path/to/bzImage-qemux86.bin path/to/nfsrootdir/ serial @@ -96,7 +98,7 @@ Examples: """) def check_tun(): - """Check /dev/net/run""" + """Check /dev/net/tun""" dev_tun = '/dev/net/tun' if not os.path.exists(dev_tun): raise Exception("TUN control device %s is unavailable; you may need to enable TUN (e.g. sudo modprobe tun)" % dev_tun) @@ -147,21 +149,46 @@ def get_first_file(cmds): return f return '' +def check_free_port(host, port): + """ Check whether the port is free or not """ + import socket + from contextlib import closing + + with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: + if sock.connect_ex((host, port)) == 0: + # Port is open, so not free + return False + else: + # Port is not open, so free + return True + class BaseConfig(object): def __init__(self): - # Vars can be merged with .qemuboot.conf, use a dict to manage them. - self.d = { - 'MACHINE': '', - 'DEPLOY_DIR_IMAGE': '', - 'QB_KERNEL_ROOT': '/dev/vda', - } + # The self.d saved vars from self.set(), part of them are from qemuboot.conf + self.d = {'QB_KERNEL_ROOT': '/dev/vda'} + + # Supported env vars, add it here if a var can be got from env, + # and don't use os.getenv in the code. + self.env_vars = ('MACHINE', + 'ROOTFS', + 'KERNEL', + 'DEPLOY_DIR_IMAGE', + 'OE_TMPDIR', + 'OECORE_NATIVE_SYSROOT', + ) self.qemu_opt = '' self.qemu_opt_script = '' - self.nfs_dir = '' self.clean_nfs_dir = False self.nfs_server = '' self.rootfs = '' + # File name(s) of a OVMF firmware file or variable store, + # to be added with -drive if=pflash. + # Found in the same places as the rootfs, with or without one of + # these suffices: qcow2, bin. + # Setting one also adds "-vga std" because that is all that + # OVMF supports. + self.ovmf_bios = [] self.qemuboot = '' self.qbconfload = False self.kernel = '' @@ -187,6 +214,15 @@ class BaseConfig(object): self.snapshot = False self.fstypes = ('ext2', 'ext3', 'ext4', 'jffs2', 'nfs', 'btrfs', 'cpio.gz', 'cpio', 'ramfs') self.vmtypes = ('hddimg', 'hdddirect', 'wic', 'vmdk', 'qcow2', 'vdi', 'iso') + self.network_device = "-device e1000,netdev=net0,mac=@MAC@" + # Use different mac section for tap and slirp to avoid + # conflicts, e.g., when one is running with tap, the other is + # running with slirp. + # The last section is dynamic, which is for avoiding conflicts, + # when multiple qemus are running, e.g., when multiple tap or + # slirp qemus are running. + self.mac_tap = "52:54:00:12:34:" + self.mac_slirp = "52:54:00:12:35:" def acquire_lock(self): logger.info("Acquiring lockfile %s..." % self.lock) @@ -208,6 +244,8 @@ class BaseConfig(object): def get(self, key): if key in self.d: return self.d.get(key) + elif os.getenv(key): + return os.getenv(key) else: return '' @@ -219,7 +257,7 @@ class BaseConfig(object): if not re.search('.qemuboot.conf$', '\n'.join(os.listdir(p)), re.M): logger.info("Can't find required *.qemuboot.conf in %s" % p) return False - if not re.search('-image-', '\n'.join(os.listdir(p))): + if not any(map(lambda name: '-image-' in name, os.listdir(p))): logger.info("Can't find *-image-* in %s" % p) return False return True @@ -246,12 +284,11 @@ class BaseConfig(object): def check_arg_nfs(self, p): if os.path.isdir(p): - self.nfs_dir = p + self.rootfs = p else: m = re.match('(.*):(.*)', p) self.nfs_server = m.group(1) - self.nfs_dir = m.group(2) - self.rootfs = "" + self.rootfs = m.group(2) self.check_arg_fstype('nfs') def check_arg_path(self, p): @@ -260,6 +297,7 @@ class BaseConfig(object): - Check whether is a kernel file - Check whether is a image file - Check whether it is a nfs dir + - Check whether it is a OVMF flash file """ if p.endswith('.qemuboot.conf'): self.qemuboot = p @@ -268,37 +306,52 @@ class BaseConfig(object): re.search('zImage', p) or re.search('vmlinux', p) or \ re.search('fitImage', p) or re.search('uImage', p): self.kernel = p - elif os.path.exists(p) and (not os.path.isdir(p)) and re.search('-image-', os.path.basename(p)): + elif os.path.exists(p) and (not os.path.isdir(p)) and '-image-' in os.path.basename(p): self.rootfs = p - dirpath = os.path.dirname(p) - m = re.search('(.*)\.(.*)$', p) - if m: - qb = '%s%s' % (re.sub('\.rootfs$', '', m.group(1)), '.qemuboot.conf') + # Check filename against self.fstypes can hanlde .cpio.gz, + # otherwise, its type would be "gz", which is incorrect. + fst = "" + for t in self.fstypes: + if p.endswith(t): + fst = t + break + if not fst: + m = re.search('.*\.(.*)$', self.rootfs) + if m: + fst = m.group(1) + if fst: + self.check_arg_fstype(fst) + qb = re.sub('\.' + fst + "$", '', self.rootfs) + qb = '%s%s' % (re.sub('\.rootfs$', '', qb), '.qemuboot.conf') if os.path.exists(qb): self.qemuboot = qb self.qbconfload = True else: logger.warn("%s doesn't exist" % qb) - fst = m.group(2) - self.check_arg_fstype(fst) else: raise Exception("Can't find FSTYPE from: %s" % p) - elif os.path.isdir(p) or re.search(':', arg) and re.search('/', arg): + + elif os.path.isdir(p) or re.search(':', p) and re.search('/', p): if self.is_deploy_dir_image(p): logger.info('DEPLOY_DIR_IMAGE: %s' % p) self.set("DEPLOY_DIR_IMAGE", p) else: logger.info("Assuming %s is an nfs rootfs" % p) self.check_arg_nfs(p) + elif os.path.basename(p).startswith('ovmf'): + self.ovmf_bios.append(p) else: raise Exception("Unknown path arg %s" % p) def check_arg_machine(self, arg): """Check whether it is a machine""" - if self.get('MACHINE') and self.get('MACHINE') != arg or re.search('/', arg): - raise Exception("Unknown arg: %s" % arg) - elif self.get('MACHINE') == arg: + if self.get('MACHINE') == arg: return + elif self.get('MACHINE') and self.get('MACHINE') != arg: + raise Exception("Maybe conflicted MACHINE: %s vs %s" % (self.get('MACHINE'), arg)) + elif re.search('/', arg): + raise Exception("Unknown arg: %s" % arg) + logger.info('Assuming MACHINE = %s' % arg) # if we're running under testimage, or similarly as a child @@ -307,14 +360,14 @@ class BaseConfig(object): # FIXME: testimage.bbclass exports these two variables into env, # are there other scenarios in which we need to support being # invoked by bitbake? - deploy = os.environ.get('DEPLOY_DIR_IMAGE') - bbchild = deploy and os.environ.get('OE_TMPDIR') + deploy = self.get('DEPLOY_DIR_IMAGE') + bbchild = deploy and self.get('OE_TMPDIR') if bbchild: self.set_machine_deploy_dir(arg, deploy) return # also check whether we're running under a sourced toolchain # environment file - if os.environ.get('OECORE_NATIVE_SYSROOT'): + if self.get('OECORE_NATIVE_SYSROOT'): self.set("MACHINE", arg) return @@ -372,11 +425,13 @@ class BaseConfig(object): self.bootparams = arg[len('bootparams='):] elif os.path.exists(arg) or (re.search(':', arg) and re.search('/', arg)): self.check_arg_path(os.path.abspath(arg)) - elif re.search('-image-', arg): + elif re.search(r'-image-|-image$', arg): # Lazy rootfs self.rootfs = arg + elif arg.startswith('ovmf'): + self.ovmf_bios.append(arg) else: - # At last, assume is it the MACHINE + # At last, assume it is the MACHINE if (not unknown_arg) or unknown_arg == arg: unknown_arg = arg else: @@ -385,19 +440,20 @@ class BaseConfig(object): if unknown_arg: if self.get('MACHINE') == unknown_arg: return - if not self.get('DEPLOY_DIR_IMAGE'): - # Trying to get DEPLOY_DIR_IMAGE from env. - p = os.getenv('DEPLOY_DIR_IMAGE') - if p and self.is_deploy_dir_image(p): - machine = os.path.basename(p) - if unknown_arg == machine: - self.set_machine_deploy_dir(machine, p) - return - else: - logger.info('DEPLOY_DIR_IMAGE: %s' % p) - self.set("DEPLOY_DIR_IMAGE", p) + if self.get('DEPLOY_DIR_IMAGE'): + machine = os.path.basename(self.get('DEPLOY_DIR_IMAGE')) + if unknown_arg == machine: + self.set("MACHINE", machine) + return + self.check_arg_machine(unknown_arg) + if not self.get('DEPLOY_DIR_IMAGE'): + self.load_bitbake_env() + s = re.search('^DEPLOY_DIR_IMAGE="(.*)"', self.bitbake_e, re.M) + if s: + self.set("DEPLOY_DIR_IMAGE", s.group(1)) + def check_kvm(self): """Check kvm and kvm-host""" if not (self.kvm_enabled or self.vhost_enabled): @@ -426,6 +482,11 @@ class BaseConfig(object): if os.access(dev_kvm, os.W_OK|os.R_OK): self.qemu_opt_script += ' -enable-kvm' + if self.get('MACHINE') == "qemux86": + # Workaround for broken APIC window on pre 4.15 host kernels which causes boot hangs + # See YOCTO #12301 + # On 64 bit we use x2apic + self.kernel_cmdline_script += " clocksource=kvm-clock hpet=disable noapic nolapic" else: logger.error("You have no read or write permission on /dev/kvm.") logger.error("Please change the ownership of this file as described at:") @@ -454,6 +515,15 @@ class BaseConfig(object): def check_rootfs(self): """Check and set rootfs""" + if self.fstype == "none": + return + + if self.get('ROOTFS'): + if not self.rootfs: + self.rootfs = self.get('ROOTFS') + elif self.get('ROOTFS') != self.rootfs: + raise Exception("Maybe conflicted ROOTFS: %s vs %s" % (self.get('ROOTFS'), self.rootfs)) + if self.fstype == 'nfs': return @@ -473,15 +543,36 @@ class BaseConfig(object): if not os.path.exists(self.rootfs): raise Exception("Can't find rootfs: %s" % self.rootfs) + def check_ovmf(self): + """Check and set full path for OVMF firmware and variable file(s).""" + + for index, ovmf in enumerate(self.ovmf_bios): + if os.path.exists(ovmf): + continue + for suffix in ('qcow2', 'bin'): + path = '%s/%s.%s' % (self.get('DEPLOY_DIR_IMAGE'), ovmf, suffix) + if os.path.exists(path): + self.ovmf_bios[index] = path + break + else: + raise Exception("Can't find OVMF firmware: %s" % ovmf) + def check_kernel(self): """Check and set kernel, dtb""" # The vm image doesn't need a kernel if self.fstype in self.vmtypes: return + # QB_DEFAULT_KERNEL is always a full file path + kernel_name = os.path.basename(self.get('QB_DEFAULT_KERNEL')) + + # The user didn't want a kernel to be loaded + if kernel_name == "none": + return + deploy_dir_image = self.get('DEPLOY_DIR_IMAGE') if not self.kernel: - kernel_match_name = "%s/%s" % (deploy_dir_image, self.get('QB_DEFAULT_KERNEL')) + kernel_match_name = "%s/%s" % (deploy_dir_image, kernel_name) kernel_match_link = "%s/%s" % (deploy_dir_image, self.get('KERNEL_IMAGETYPE')) kernel_startswith = "%s/%s*" % (deploy_dir_image, self.get('KERNEL_IMAGETYPE')) cmds = (kernel_match_name, kernel_match_link, kernel_startswith) @@ -543,7 +634,8 @@ class BaseConfig(object): def check_and_set(self): """Check configs sanity and set when needed""" self.validate_paths() - check_tun() + if not self.slirp_enabled: + check_tun() # Check audio if self.audio_enabled: if not self.get('QB_AUDIO_DRV'): @@ -559,6 +651,7 @@ class BaseConfig(object): self.check_kvm() self.check_fstype() self.check_rootfs() + self.check_ovmf() self.check_kernel() self.check_biosdir() self.check_mem() @@ -568,8 +661,6 @@ class BaseConfig(object): if not self.qemuboot: if self.get('DEPLOY_DIR_IMAGE'): deploy_dir_image = self.get('DEPLOY_DIR_IMAGE') - elif os.getenv('DEPLOY_DIR_IMAGE'): - deploy_dir_image = os.getenv('DEPLOY_DIR_IMAGE') else: logger.info("Can't find qemuboot conf file, DEPLOY_DIR_IMAGE is NULL!") return @@ -586,7 +677,15 @@ class BaseConfig(object): logger.info('Running %s...' % cmd) qbs = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read().decode('utf-8') if qbs: - self.qemuboot = qbs.split()[0] + for qb in qbs.split(): + # Don't use initramfs when other choices unless fstype is ramfs + if '-initramfs-' in os.path.basename(qb) and self.fstype != 'cpio.gz': + continue + self.qemuboot = qb + break + if not self.qemuboot: + # Use the first one when no choice + self.qemuboot = qbs.split()[0] self.qbconfload = True if not self.qemuboot: @@ -595,7 +694,7 @@ class BaseConfig(object): return if not os.path.exists(self.qemuboot): - raise Exception("Failed to find .qemuboot.conf!") + raise Exception("Failed to find %s (wrong image name or BSP does not support running under qemu?)." % self.qemuboot) logger.info('CONFFILE: %s' % self.qemuboot) @@ -611,8 +710,8 @@ class BaseConfig(object): # artefacts are relative to that file, rather than in whatever # directory DEPLOY_DIR_IMAGE in the conf file points to. if self.qbconfload: - imgdir = os.path.dirname(self.qemuboot) - if imgdir != self.get('DEPLOY_DIR_IMAGE'): + imgdir = os.path.realpath(os.path.dirname(self.qemuboot)) + if imgdir != os.path.realpath(self.get('DEPLOY_DIR_IMAGE')): logger.info('Setting DEPLOY_DIR_IMAGE to folder containing %s (%s)' % (self.qemuboot, imgdir)) self.set('DEPLOY_DIR_IMAGE', imgdir) @@ -627,7 +726,7 @@ class BaseConfig(object): self.load_bitbake_env() if self.bitbake_e: - native_vars = ['STAGING_DIR_NATIVE', 'STAGING_BINDIR_NATIVE'] + native_vars = ['STAGING_DIR_NATIVE'] for nv in native_vars: s = re.search('^%s="(.*)"' % nv, self.bitbake_e, re.M) if s and s.group(1) != self.get(nv): @@ -638,8 +737,8 @@ class BaseConfig(object): # be able to call `bitbake -e`, then try: # - get OE_TMPDIR from environment and guess paths based on it # - get OECORE_NATIVE_SYSROOT from environment (for sdk) - tmpdir = os.environ.get('OE_TMPDIR', None) - oecore_native_sysroot = os.environ.get('OECORE_NATIVE_SYSROOT', None) + tmpdir = self.get('OE_TMPDIR') + oecore_native_sysroot = self.get('OECORE_NATIVE_SYSROOT') if tmpdir: logger.info('Setting STAGING_DIR_NATIVE and STAGING_BINDIR_NATIVE relative to OE_TMPDIR (%s)' % tmpdir) hostos, _, _, _, machine = os.uname() @@ -664,9 +763,11 @@ class BaseConfig(object): print('MACHINE: [%s]' % self.get('MACHINE')) print('FSTYPE: [%s]' % self.fstype) if self.fstype == 'nfs': - print('NFS_DIR: [%s]' % self.nfs_dir) + print('NFS_DIR: [%s]' % self.rootfs) else: print('ROOTFS: [%s]' % self.rootfs) + if self.ovmf_bios: + print('OVMF: %s' % self.ovmf_bios) print('CONFFILE: [%s]' % self.qemuboot) print('') @@ -707,13 +808,13 @@ class BaseConfig(object): self.unfs_opts="nfsvers=3,port=%s,mountprog=%s,nfsprog=%s,udp,mountport=%s" % (nfsd_port, mountd_rpcport, nfsd_rpcport, mountd_port) - # Extract .tar.bz2 or .tar.bz if no self.nfs_dir - if not self.nfs_dir: + # Extract .tar.bz2 or .tar.bz if no nfs dir + if not (self.rootfs and os.path.isdir(self.rootfs)): src_prefix = '%s/%s' % (self.get('DEPLOY_DIR_IMAGE'), self.get('IMAGE_LINK_NAME')) dest = "%s-nfsroot" % src_prefix if os.path.exists('%s.pseudo_state' % dest): logger.info('Use %s as NFS_DIR' % dest) - self.nfs_dir = dest + self.rootfs = dest else: src = "" src1 = '%s.tar.bz2' % src_prefix @@ -730,24 +831,49 @@ class BaseConfig(object): if subprocess.call(cmd, shell=True) != 0: raise Exception('Failed to run %s' % cmd) self.clean_nfs_dir = True - self.nfs_dir = dest + self.rootfs = dest # Start the userspace NFS server - cmd = 'runqemu-export-rootfs start %s' % self.nfs_dir + cmd = 'runqemu-export-rootfs start %s' % self.rootfs logger.info('Running %s...' % cmd) if subprocess.call(cmd, shell=True) != 0: raise Exception('Failed to run %s' % cmd) self.nfs_running = True - def setup_slirp(self): """Setup user networking""" if self.fstype == 'nfs': self.setup_nfs() self.kernel_cmdline_script += ' ip=dhcp' - self.set('NETWORK_CMD', self.get('QB_SLIRP_OPT')) + # Port mapping + hostfwd = ",hostfwd=tcp::2222-:22,hostfwd=tcp::2323-:23" + qb_slirp_opt_default = "-netdev user,id=net0%s" % hostfwd + qb_slirp_opt = self.get('QB_SLIRP_OPT') or qb_slirp_opt_default + # Figure out the port + ports = re.findall('hostfwd=[^-]*:([0-9]+)-[^,-]*', qb_slirp_opt) + ports = [int(i) for i in ports] + mac = 2 + # Find a free port to avoid conflicts + for p in ports[:]: + p_new = p + while not check_free_port('localhost', p_new): + p_new += 1 + mac += 1 + while p_new in ports: + p_new += 1 + mac += 1 + if p != p_new: + ports.append(p_new) + qb_slirp_opt = re.sub(':%s-' % p, ':%s-' % p_new, qb_slirp_opt) + logger.info("Port forward changed: %s -> %s" % (p, p_new)) + mac = "%s%02x" % (self.mac_slirp, mac) + self.set('NETWORK_CMD', '%s %s' % (self.network_device.replace('@MAC@', mac), qb_slirp_opt)) + # Print out port foward + hostfwd = re.findall('(hostfwd=[^,]*)', qb_slirp_opt) + if hostfwd: + logger.info('Port forward: %s' % ' '.join(hostfwd)) def setup_tap(self): """Setup tap""" @@ -799,7 +925,7 @@ class BaseConfig(object): gid = os.getgid() uid = os.getuid() logger.info("Setting up tap interface under sudo") - cmd = 'sudo %s %s %s %s' % (self.qemuifup, uid, gid, self.get('STAGING_DIR_NATIVE')) + cmd = 'sudo %s %s %s %s' % (self.qemuifup, uid, gid, self.bindir_native) tap = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read().decode('utf-8').rstrip('\n') lockfile = os.path.join(lockdir, tap) self.lock = lockfile + '.lock' @@ -816,27 +942,35 @@ class BaseConfig(object): client = gateway + 1 if self.fstype == 'nfs': self.setup_nfs() - self.kernel_cmdline_script += " ip=192.168.7.%s::192.168.7.%s:255.255.255.0" % (client, gateway) - mac = "52:54:00:12:34:%02x" % client + netconf = "192.168.7.%s::192.168.7.%s:255.255.255.0" % (client, gateway) + logger.info("Network configuration: %s", netconf) + self.kernel_cmdline_script += " ip=%s" % netconf + mac = "%s%02x" % (self.mac_tap, client) qb_tap_opt = self.get('QB_TAP_OPT') if qb_tap_opt: - qemu_tap_opt = qb_tap_opt.replace('@TAP@', tap).replace('@MAC@', mac) + qemu_tap_opt = qb_tap_opt.replace('@TAP@', tap) else: - qemu_tap_opt = "-device virtio-net-pci,netdev=net0,mac=%s -netdev tap,id=net0,ifname=%s,script=no,downscript=no" % (mac, self.tap) + qemu_tap_opt = "-netdev tap,id=net0,ifname=%s,script=no,downscript=no" % (self.tap) if self.vhost_enabled: qemu_tap_opt += ',vhost=on' - self.set('NETWORK_CMD', qemu_tap_opt) + self.set('NETWORK_CMD', '%s %s' % (self.network_device.replace('@MAC@', mac), qemu_tap_opt)) def setup_network(self): + if self.get('QB_NET') == 'none': + return cmd = "stty -g" self.saved_stty = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read().decode('utf-8') + self.network_device = self.get('QB_NETWORK_DEVICE') or self.network_device if self.slirp_enabled: self.setup_slirp() else: self.setup_tap() + def setup_rootfs(self): + if self.get('QB_ROOTFS') == 'none': + return rootfs_format = self.fstype if self.fstype in ('vmdk', 'qcow2', 'vdi') else 'raw' qb_rootfs_opt = self.get('QB_ROOTFS_OPT') @@ -849,31 +983,40 @@ class BaseConfig(object): self.kernel_cmdline = 'root=/dev/ram0 rw debugshell' self.rootfs_options = '-initrd %s' % self.rootfs else: + vm_drive = '' if self.fstype in self.vmtypes: if self.fstype == 'iso': vm_drive = '-cdrom %s' % self.rootfs - else: - cmd1 = "grep -q 'root=/dev/sd' %s" % self.rootfs - cmd2 = "grep -q 'root=/dev/hd' %s" % self.rootfs - if subprocess.call(cmd1, shell=True) == 0: + elif self.get('QB_DRIVE_TYPE'): + drive_type = self.get('QB_DRIVE_TYPE') + if drive_type.startswith("/dev/sd"): logger.info('Using scsi drive') vm_drive = '-drive if=none,id=hd,file=%s,format=%s -device virtio-scsi-pci,id=scsi -device scsi-hd,drive=hd' \ % (self.rootfs, rootfs_format) - elif subprocess.call(cmd2, shell=True) == 0: + elif drive_type.startswith("/dev/hd"): logger.info('Using ide drive') vm_drive = "%s,format=%s" % (self.rootfs, rootfs_format) else: - logger.warn("Can't detect drive type %s" % self.rootfs) - logger.warn('Trying to use virtio block drive') + # virtio might have been selected explicitly (just use it), or + # is used as fallback (then warn about that). + if not drive_type.startswith("/dev/vd"): + logger.warn("Unknown QB_DRIVE_TYPE: %s" % drive_type) + logger.warn("Failed to figure out drive type, consider define or fix QB_DRIVE_TYPE") + logger.warn('Trying to use virtio block drive') vm_drive = '-drive if=virtio,file=%s,format=%s' % (self.rootfs, rootfs_format) + + # All branches above set vm_drive. self.rootfs_options = '%s -no-reboot' % vm_drive self.kernel_cmdline = 'root=%s rw highres=off' % (self.get('QB_KERNEL_ROOT')) if self.fstype == 'nfs': self.rootfs_options = '' - k_root = '/dev/nfs nfsroot=%s:%s,%s' % (self.nfs_server, self.nfs_dir, self.unfs_opts) + k_root = '/dev/nfs nfsroot=%s:%s,%s' % (self.nfs_server, self.rootfs, self.unfs_opts) self.kernel_cmdline = 'root=%s rw highres=off' % k_root + if self.fstype == 'none': + self.rootfs_options = '' + self.set('ROOTFS_OPTIONS', self.rootfs_options) def guess_qb_system(self): @@ -921,13 +1064,38 @@ class BaseConfig(object): if not qemu_system: raise Exception("Failed to boot, QB_SYSTEM_NAME is NULL!") - qemu_bin = '%s/%s' % (self.get('STAGING_BINDIR_NATIVE'), qemu_system) + qemu_bin = '%s/%s' % (self.bindir_native, qemu_system) + + # It is possible to have qemu-native in ASSUME_PROVIDED, and it won't + # find QEMU in sysroot, it needs to use host's qemu. + if not os.path.exists(qemu_bin): + logger.info("QEMU binary not found in %s, trying host's QEMU" % qemu_bin) + for path in (os.environ['PATH'] or '').split(':'): + qemu_bin_tmp = os.path.join(path, qemu_system) + logger.info("Trying: %s" % qemu_bin_tmp) + if os.path.exists(qemu_bin_tmp): + qemu_bin = qemu_bin_tmp + if not os.path.isabs(qemu_bin): + qemu_bin = os.path.abspath(qemu_bin) + logger.info("Using host's QEMU: %s" % qemu_bin) + break + if not os.access(qemu_bin, os.X_OK): raise OEPathError("No QEMU binary '%s' could be found" % qemu_bin) check_libgl(qemu_bin) - self.qemu_opt = "%s %s %s %s %s" % (qemu_bin, self.get('NETWORK_CMD'), self.qemu_opt_script, self.get('ROOTFS_OPTIONS'), self.get('QB_OPT_APPEND')) + self.qemu_opt = "%s %s %s %s" % (qemu_bin, self.get('NETWORK_CMD'), self.get('ROOTFS_OPTIONS'), self.get('QB_OPT_APPEND')) + + for ovmf in self.ovmf_bios: + format = ovmf.rsplit('.', 1)[-1] + self.qemu_opt += ' -drive if=pflash,format=%s,file=%s' % (format, ovmf) + if self.ovmf_bios: + # OVMF only supports normal VGA, i.e. we need to override a -vga vmware + # that gets added for example for normal qemux86. + self.qemu_opt += ' -vga std' + + self.qemu_opt += ' ' + self.qemu_opt_script if self.snapshot: self.qemu_opt += " -snapshot" @@ -953,6 +1121,17 @@ class BaseConfig(object): elif serial_num == 1: self.qemu_opt += " %s" % self.get("QB_SERIAL_OPT") + # We always wants ttyS0 and ttyS1 in qemu machines (see SERIAL_CONSOLES), + # if not serial or serialtcp options was specified only ttyS0 is created + # and sysvinit shows an error trying to enable ttyS1: + # INIT: Id "S1" respawning too fast: disabled for 5 minutes + serial_num = len(re.findall("-serial", self.qemu_opt)) + if serial_num == 0: + if re.search("-nographic", self.qemu_opt): + self.qemu_opt += " -serial mon:stdio -serial null" + else: + self.qemu_opt += " -serial mon:vc -serial null" + def start_qemu(self): if self.kernel: kernel_opts = "-kernel %s -append '%s %s %s %s'" % (self.kernel, self.kernel_cmdline, @@ -969,7 +1148,7 @@ class BaseConfig(object): def cleanup(self): if self.cleantap: - cmd = 'sudo %s %s %s' % (self.qemuifdown, self.tap, self.get('STAGING_DIR_NATIVE')) + cmd = 'sudo %s %s %s' % (self.qemuifdown, self.tap, self.bindir_native) logger.info('Running %s' % cmd) subprocess.call(cmd, shell=True) if self.lock_descriptor: @@ -978,7 +1157,7 @@ class BaseConfig(object): if self.nfs_running: logger.info("Shutting down the userspace NFS server...") - cmd = "runqemu-export-rootfs stop %s" % self.nfs_dir + cmd = "runqemu-export-rootfs stop %s" % self.rootfs logger.info('Running %s' % cmd) subprocess.call(cmd, shell=True) @@ -987,9 +1166,9 @@ class BaseConfig(object): subprocess.call(cmd, shell=True) if self.clean_nfs_dir: - logger.info('Removing %s' % self.nfs_dir) - shutil.rmtree(self.nfs_dir) - shutil.rmtree('%s.pseudo_state' % self.nfs_dir) + logger.info('Removing %s' % self.rootfs) + shutil.rmtree(self.rootfs) + shutil.rmtree('%s.pseudo_state' % self.rootfs) def load_bitbake_env(self, mach=None): if self.bitbake_e: @@ -1014,8 +1193,30 @@ class BaseConfig(object): self.bitbake_e = '' logger.warn("Couldn't run 'bitbake -e' to gather environment information:\n%s" % err.output.decode('utf-8')) + @property + def bindir_native(self): + result = self.get('STAGING_BINDIR_NATIVE') + if result and os.path.exists(result): + return result + + cmd = 'bitbake qemu-helper-native -e' + logger.info('Running %s...' % cmd) + out = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE) + out = out.stdout.read().decode('utf-8') + + match = re.search('^STAGING_BINDIR_NATIVE="(.*)"', out, re.M) + if match: + result = match.group(1) + if os.path.exists(result): + self.set('STAGING_BINDIR_NATIVE', result) + return result + raise Exception("Native sysroot directory %s doesn't exist" % result) + else: + raise Exception("Can't find STAGING_BINDIR_NATIVE in '%s' output" % cmd) + + def main(): - if len(sys.argv) == 1 or "help" in sys.argv: + if "help" in sys.argv or '-h' in sys.argv or '--help' in sys.argv: print_usage() return 0 config = BaseConfig() @@ -1030,6 +1231,7 @@ def main(): config.print_config() try: config.setup_network() + config.setup_rootfs() config.setup_final() config.start_qemu() finally: diff --git a/import-layers/yocto-poky/scripts/runqemu-export-rootfs b/import-layers/yocto-poky/scripts/runqemu-export-rootfs index 7ebc07194..c7992d822 100755 --- a/import-layers/yocto-poky/scripts/runqemu-export-rootfs +++ b/import-layers/yocto-poky/scripts/runqemu-export-rootfs @@ -44,7 +44,7 @@ if [ -z "$SYSROOT_SETUP_SCRIPT" ]; then echo "Did you forget to source your build environment setup script?" exit 1 fi -. $SYSROOT_SETUP_SCRIPT +. $SYSROOT_SETUP_SCRIPT meta-ide-support if [ ! -e "$OECORE_NATIVE_SYSROOT/usr/bin/unfsd" ]; then echo "Error: Unable to find unfsd binary in $OECORE_NATIVE_SYSROOT/usr/bin/" diff --git a/import-layers/yocto-poky/scripts/runqemu-extract-sdk b/import-layers/yocto-poky/scripts/runqemu-extract-sdk index 32ddd485b..2a0dd50e0 100755 --- a/import-layers/yocto-poky/scripts/runqemu-extract-sdk +++ b/import-layers/yocto-poky/scripts/runqemu-extract-sdk @@ -35,7 +35,7 @@ if [ -z "$SYSROOT_SETUP_SCRIPT" ]; then echo "Did you forget to source your build system environment setup script?" exit 1 fi -. $SYSROOT_SETUP_SCRIPT +. $SYSROOT_SETUP_SCRIPT meta-ide-support PSEUDO_OPTS="-P $OECORE_NATIVE_SYSROOT/usr" ROOTFS_TARBALL=$1 diff --git a/import-layers/yocto-poky/scripts/runqemu-gen-tapdevs b/import-layers/yocto-poky/scripts/runqemu-gen-tapdevs index bfb60f44a..11de318c1 100755 --- a/import-layers/yocto-poky/scripts/runqemu-gen-tapdevs +++ b/import-layers/yocto-poky/scripts/runqemu-gen-tapdevs @@ -23,11 +23,13 @@ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. usage() { - echo "Usage: sudo $0 " + echo "Usage: sudo $0 " echo "Where is the numeric user id the tap devices will be owned by" echo "Where is the numeric group id the tap devices will be owned by" echo " is the number of tap devices to create (0 to remove all)" echo " is the path to the build system's native sysroot" + echo "e.g. $ bitbake qemu-helper-native" + echo "$ sudo $0 1000 1000 4 tmp/sysroots-components/x86_64/qemu-helper-native/usr/bin" exit 1 } @@ -44,9 +46,9 @@ fi TUID=$1 GID=$2 COUNT=$3 -SYSROOT=$4 +STAGING_BINDIR_NATIVE=$4 -TUNCTL=$SYSROOT/usr/bin/tunctl +TUNCTL=$STAGING_BINDIR_NATIVE/tunctl if [[ ! -x "$TUNCTL" || -d "$TUNCTL" ]]; then echo "Error: $TUNCTL is not an executable" usage @@ -85,7 +87,7 @@ if [ $COUNT -gt 0 ]; then echo "Creating $COUNT tap devices for UID: $TUID GID: $GID..." for ((index=0; index < $COUNT; index++)); do echo "Creating tap$index" - ifup=`$RUNQEMU_IFUP $TUID $GID $SYSROOT 2>&1` + ifup=`$RUNQEMU_IFUP $TUID $GID $STAGING_BINDIR_NATIVE 2>&1` if [ $? -ne 0 ]; then echo "Error running tunctl: $ifup" exit 1 diff --git a/import-layers/yocto-poky/scripts/runqemu-ifdown b/import-layers/yocto-poky/scripts/runqemu-ifdown index 8f66cfa2a..ffbc9de44 100755 --- a/import-layers/yocto-poky/scripts/runqemu-ifdown +++ b/import-layers/yocto-poky/scripts/runqemu-ifdown @@ -41,11 +41,11 @@ if [ $# -ne 2 ]; then fi TAP=$1 -NATIVE_SYSROOT_DIR=$2 +STAGING_BINDIR_NATIVE=$2 -TUNCTL=$NATIVE_SYSROOT_DIR/usr/bin/tunctl +TUNCTL=$STAGING_BINDIR_NATIVE/tunctl if [ ! -e "$TUNCTL" ]; then - echo "Error: Unable to find tunctl binary in '$NATIVE_SYSROOT_DIR/usr/bin', please bitbake qemu-helper-native" + echo "Error: Unable to find tunctl binary in '$STAGING_BINDIR_NATIVE', please bitbake qemu-helper-native" exit 1 fi diff --git a/import-layers/yocto-poky/scripts/runqemu-ifup b/import-layers/yocto-poky/scripts/runqemu-ifup index d9bd89412..59a15eaa2 100755 --- a/import-layers/yocto-poky/scripts/runqemu-ifup +++ b/import-layers/yocto-poky/scripts/runqemu-ifup @@ -49,11 +49,11 @@ fi USERID="-u $1" GROUP="-g $2" -NATIVE_SYSROOT_DIR=$3 +STAGING_BINDIR_NATIVE=$3 -TUNCTL=$NATIVE_SYSROOT_DIR/usr/bin/tunctl +TUNCTL=$STAGING_BINDIR_NATIVE/tunctl if [ ! -x "$TUNCTL" ]; then - echo "Error: Unable to find tunctl binary in '$NATIVE_SYSROOT_DIR/usr/bin', please bitbake qemu-helper-native" + echo "Error: Unable to find tunctl binary in '$STAGING_BINDIR_NATIVE', please bitbake qemu-helper-native" exit 1 fi diff --git a/import-layers/yocto-poky/scripts/sysroot-relativelinks.py b/import-layers/yocto-poky/scripts/sysroot-relativelinks.py index e44eba2b1..ffe254728 100755 --- a/import-layers/yocto-poky/scripts/sysroot-relativelinks.py +++ b/import-layers/yocto-poky/scripts/sysroot-relativelinks.py @@ -24,7 +24,7 @@ def handlelink(filep, subdir): os.symlink(os.path.relpath(topdir+link, subdir), filep) for subdir, dirs, files in os.walk(topdir): - for f in files: + for f in dirs + files: filep = os.path.join(subdir, f) if os.path.islink(filep): #print("Considering %s" % filep) diff --git a/import-layers/yocto-poky/scripts/task-time b/import-layers/yocto-poky/scripts/task-time new file mode 100755 index 000000000..e58040a9b --- /dev/null +++ b/import-layers/yocto-poky/scripts/task-time @@ -0,0 +1,132 @@ +#!/usr/bin/env python3 + +import argparse +import os +import re +import sys + +arg_parser = argparse.ArgumentParser( + description=""" +Reports time consumed for one or more task in a format similar to the standard +Bash 'time' builtin. Optionally sorts tasks by real (wall-clock), user (user +space CPU), or sys (kernel CPU) time. +""") + +arg_parser.add_argument( + "paths", + metavar="path", + nargs="+", + help=""" +A path containing task buildstats. If the path is a directory, e.g. +build/tmp/buildstats, then all task found (recursively) in it will be +processed. If the path is a single task buildstat, e.g. +build/tmp/buildstats/20161018083535/foo-1.0-r0/do_compile, then just that +buildstat will be processed. Multiple paths can be specified to process all of +them. Files whose names do not start with "do_" are ignored. +""") + +arg_parser.add_argument( + "--sort", + choices=("none", "real", "user", "sys"), + default="none", + help=""" +The measurement to sort the output by. Defaults to 'none', which means to sort +by the order paths were given on the command line. For other options, tasks are +sorted in descending order from the highest value. +""") + +args = arg_parser.parse_args() + +# Field names and regexes for parsing out their values from buildstat files +field_regexes = (("elapsed", ".*Elapsed time: ([0-9.]+)"), + ("user", "rusage ru_utime: ([0-9.]+)"), + ("sys", "rusage ru_stime: ([0-9.]+)"), + ("child user", "Child rusage ru_utime: ([0-9.]+)"), + ("child sys", "Child rusage ru_stime: ([0-9.]+)")) + +# A list of (, ) tuples, where is the path of a do_* task +# buildstat file and maps fields from the file to their values +task_infos = [] + +def save_times_for_task(path): + """Saves information for the buildstat file 'path' in 'task_infos'.""" + + if not os.path.basename(path).startswith("do_"): + return + + with open(path) as f: + fields = {} + + for line in f: + for name, regex in field_regexes: + match = re.match(regex, line) + if match: + fields[name] = float(match.group(1)) + break + + # Check that all expected fields were present + for name, regex in field_regexes: + if name not in fields: + print("Warning: Skipping '{}' because no field matching '{}' could be found" + .format(path, regex), + file=sys.stderr) + return + + task_infos.append((path, fields)) + +def save_times_for_dir(path): + """Runs save_times_for_task() for each file in path and its subdirs, recursively.""" + + # Raise an exception for os.walk() errors instead of ignoring them + def walk_onerror(e): + raise e + + for root, _, files in os.walk(path, onerror=walk_onerror): + for fname in files: + save_times_for_task(os.path.join(root, fname)) + +for path in args.paths: + if os.path.isfile(path): + save_times_for_task(path) + else: + save_times_for_dir(path) + +def elapsed_time(task_info): + return task_info[1]["elapsed"] + +def tot_user_time(task_info): + return task_info[1]["user"] + task_info[1]["child user"] + +def tot_sys_time(task_info): + return task_info[1]["sys"] + task_info[1]["child sys"] + +if args.sort != "none": + sort_fn = {"real": elapsed_time, "user": tot_user_time, "sys": tot_sys_time} + task_infos.sort(key=sort_fn[args.sort], reverse=True) + +first_entry = True + +# Catching BrokenPipeError avoids annoying errors when the output is piped into +# e.g. 'less' or 'head' and not completely read +try: + for task_info in task_infos: + real = elapsed_time(task_info) + user = tot_user_time(task_info) + sys = tot_sys_time(task_info) + + if not first_entry: + print() + first_entry = False + + # Mimic Bash's 'time' builtin + print("{}:\n" + "real\t{}m{:.3f}s\n" + "user\t{}m{:.3f}s\n" + "sys\t{}m{:.3f}s" + .format(task_info[0], + int(real//60), real%60, + int(user//60), user%60, + int(sys//60), sys%60)) + +except BrokenPipeError: + pass diff --git a/import-layers/yocto-poky/scripts/tiny/ksize.py b/import-layers/yocto-poky/scripts/tiny/ksize.py index b9d2b192c..ea1ca7ff2 100755 --- a/import-layers/yocto-poky/scripts/tiny/ksize.py +++ b/import-layers/yocto-poky/scripts/tiny/ksize.py @@ -41,7 +41,7 @@ def usage(): class Sizes: def __init__(self, glob): self.title = glob - p = Popen("size -t " + glob, shell=True, stdout=PIPE, stderr=PIPE) + p = Popen("size -t " + str(glob), shell=True, stdout=PIPE, stderr=PIPE) output = p.communicate()[0].splitlines() if len(output) > 2: sizes = output[-1].split()[0:4] @@ -62,18 +62,18 @@ class Report: r = Report(filename, title) path = os.path.dirname(filename) - p = Popen("ls " + path + "/*.o | grep -v built-in.o", + p = Popen("ls " + str(path) + "/*.o | grep -v built-in.o", shell=True, stdout=PIPE, stderr=PIPE) glob = ' '.join(p.communicate()[0].splitlines()) - oreport = Report(glob, path + "/*.o") - oreport.sizes.title = path + "/*.o" + oreport = Report(glob, str(path) + "/*.o") + oreport.sizes.title = str(path) + "/*.o" r.parts.append(oreport) if subglob: p = Popen("ls " + subglob, shell=True, stdout=PIPE, stderr=PIPE) for f in p.communicate()[0].splitlines(): path = os.path.dirname(f) - r.parts.append(Report.create(f, path, path + "/*/built-in.o")) + r.parts.append(Report.create(f, path, str(path) + "/*/built-in.o")) r.parts.sort(reverse=True) for b in r.parts: @@ -116,6 +116,13 @@ class Report: self.deltas["data"], self.deltas["bss"])) print("\n") + def __lt__(this, that): + if that is None: + return 1 + if not isinstance(that, Report): + raise TypeError + return this.sizes.total < that.sizes.total + def __cmp__(this, that): if that is None: return 1 diff --git a/import-layers/yocto-poky/scripts/tiny/ksum.py b/import-layers/yocto-poky/scripts/tiny/ksum.py new file mode 100755 index 000000000..d4f389215 --- /dev/null +++ b/import-layers/yocto-poky/scripts/tiny/ksum.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# Copyright (c) 2016, Intel Corporation. +# All rights reserved. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License version 2 as +# published by the Free Software Foundation. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +# +# DESCRIPTION 'ksum.py' generates a combined summary of vmlinux and +# module sizes for a built kernel, as a quick tool for comparing the +# overall effects of systemic tinification changes. Execute from the +# base directory of the kernel build you want to summarize. Setting +# the 'verbose' flag will display the sizes for each file included in +# the summary. +# +# AUTHORS +# Tom Zanussi +# + +__version__ = "0.1.0" + +# Python Standard Library modules +import os +import sys +import getopt +from subprocess import * + +def usage(): + prog = os.path.basename(sys.argv[0]) + print('Usage: %s [OPTION]...' % prog) + print(' -v, display sizes for each file') + print(' -h, --help display this help and exit') + print('') + print('Run %s from the top-level Linux kernel build directory.' % prog) + +verbose = False + +n_ko_files = 0 +ko_file_list = [] + +ko_text = 0 +ko_data = 0 +ko_bss = 0 +ko_total = 0 + +vmlinux_file = "" +vmlinux_level = 0 + +vmlinux_text = 0 +vmlinux_data = 0 +vmlinux_bss = 0 +vmlinux_total = 0 + +def is_vmlinux_file(filename): + global vmlinux_level + if filename == ("vmlinux") and vmlinux_level == 0: + vmlinux_level += 1 + return True + return False + +def is_ko_file(filename): + if filename.endswith(".ko"): + return True + return False + +def collect_object_files(): + print "Collecting object files recursively from %s..." % os.getcwd() + for dirpath, dirs, files in os.walk(os.getcwd()): + for filename in files: + if is_ko_file(filename): + ko_file_list.append(os.path.join(dirpath, filename)) + elif is_vmlinux_file(filename): + global vmlinux_file + vmlinux_file = os.path.join(dirpath, filename) + print "Collecting object files [DONE]" + +def add_ko_file(filename): + p = Popen("size -t " + filename, shell=True, stdout=PIPE, stderr=PIPE) + output = p.communicate()[0].splitlines() + if len(output) > 2: + sizes = output[-1].split()[0:4] + if verbose: + print " %10d %10d %10d %10d\t" % \ + (int(sizes[0]), int(sizes[1]), int(sizes[2]), int(sizes[3])), + print "%s" % filename[len(os.getcwd()) + 1:] + global n_ko_files, ko_text, ko_data, ko_bss, ko_total + ko_text += int(sizes[0]) + ko_data += int(sizes[1]) + ko_bss += int(sizes[2]) + ko_total += int(sizes[3]) + n_ko_files += 1 + +def get_vmlinux_totals(): + p = Popen("size -t " + vmlinux_file, shell=True, stdout=PIPE, stderr=PIPE) + output = p.communicate()[0].splitlines() + if len(output) > 2: + sizes = output[-1].split()[0:4] + if verbose: + print " %10d %10d %10d %10d\t" % \ + (int(sizes[0]), int(sizes[1]), int(sizes[2]), int(sizes[3])), + print "%s" % vmlinux_file[len(os.getcwd()) + 1:] + global vmlinux_text, vmlinux_data, vmlinux_bss, vmlinux_total + vmlinux_text += int(sizes[0]) + vmlinux_data += int(sizes[1]) + vmlinux_bss += int(sizes[2]) + vmlinux_total += int(sizes[3]) + +def sum_ko_files(): + for ko_file in ko_file_list: + add_ko_file(ko_file) + +def main(): + try: + opts, args = getopt.getopt(sys.argv[1:], "vh", ["help"]) + except getopt.GetoptError as err: + print('%s' % str(err)) + usage() + sys.exit(2) + + for o, a in opts: + if o == '-v': + global verbose + verbose = True + elif o in ('-h', '--help'): + usage() + sys.exit(0) + else: + assert False, "unhandled option" + + collect_object_files() + sum_ko_files() + get_vmlinux_totals() + + print "\nTotals:" + print "\nvmlinux:" + print " text\tdata\t\tbss\t\ttotal" + print " %-10d\t%-10d\t%-10d\t%-10d" % \ + (vmlinux_text, vmlinux_data, vmlinux_bss, vmlinux_total) + print "\nmodules (%d):" % n_ko_files + print " text\tdata\t\tbss\t\ttotal" + print " %-10d\t%-10d\t%-10d\t%-10d" % \ + (ko_text, ko_data, ko_bss, ko_total) + print "\nvmlinux + modules:" + print " text\tdata\t\tbss\t\ttotal" + print " %-10d\t%-10d\t%-10d\t%-10d" % \ + (vmlinux_text + ko_text, vmlinux_data + ko_data, \ + vmlinux_bss + ko_bss, vmlinux_total + ko_total) + +if __name__ == "__main__": + try: + ret = main() + except Exception: + ret = 1 + import traceback + traceback.print_exc(5) + sys.exit(ret) diff --git a/import-layers/yocto-poky/scripts/verify-bashisms b/import-layers/yocto-poky/scripts/verify-bashisms index 0741e1844..dab64ef50 100755 --- a/import-layers/yocto-poky/scripts/verify-bashisms +++ b/import-layers/yocto-poky/scripts/verify-bashisms @@ -6,7 +6,7 @@ whitelist = ( # type is supported by dash 'if type systemctl >/dev/null 2>/dev/null; then', 'if type systemd-tmpfiles >/dev/null 2>/dev/null; then', - 'if type update-rc.d >/dev/null 2>/dev/null; then', + 'type update-rc.d >/dev/null 2>/dev/null; then', 'command -v', # HOSTNAME is set locally 'buildhistory_single_commit "$CMDLINE" "$HOSTNAME"', @@ -22,7 +22,10 @@ def is_whitelisted(s): return True return False -def process(recipe, function, script): +SCRIPT_LINENO_RE = re.compile(r' line (\d+) ') +BASHISM_WARNING = re.compile(r'^(possible bashism in.*)$', re.MULTILINE) + +def process(filename, function, lineno, script): import tempfile if not script.startswith("#!"): @@ -40,18 +43,38 @@ def process(recipe, function, script): # TODO check exit code is 1 # Replace the temporary filename with the function and split it - output = e.output.replace(fn.name, function).splitlines() - if len(results) % 2 != 0: - print("Unexpected output from checkbashism: %s" % str(output)) - return - - # Turn the output into a list of (message, source) values + output = e.output.replace(fn.name, function) + if not output or not output.startswith('possible bashism'): + # Probably starts with or contains only warnings. Dump verbatim + # with one space indention. Can't do the splitting and whitelist + # checking below. + return '\n'.join([filename, + ' Unexpected output from checkbashisms.pl'] + + [' ' + x for x in output.splitlines()]) + + # We know that the first line matches and that therefore the first + # list entry will be empty - skip it. + output = BASHISM_WARNING.split(output)[1:] + # Turn the output into a single string like this: + # /.../foobar.bb + # possible bashism in updatercd_postrm line 2 (type): + # if ${@use_updatercd(d)} && type update-rc.d >/dev/null 2>/dev/null; then + # ... + # ... result = [] # Check the results against the whitelist for message, source in zip(output[0::2], output[1::2]): if not is_whitelisted(source): - result.append((message, source)) - return result + if lineno is not None: + message = SCRIPT_LINENO_RE.sub(lambda m: ' line %d ' % (int(m.group(1)) + int(lineno) - 1), + message) + result.append(' ' + message.strip()) + result.extend([' %s' % x for x in source.splitlines()]) + if result: + result.insert(0, filename) + return '\n'.join(result) + else: + return None def get_tinfoil(): scripts_path = os.path.dirname(os.path.realpath(__file__)) @@ -68,9 +91,19 @@ def get_tinfoil(): if __name__=='__main__': import shutil if shutil.which("checkbashisms.pl") is None: - print("Cannot find checkbashisms.pl on $PATH") + print("Cannot find checkbashisms.pl on $PATH, get it from https://anonscm.debian.org/cgit/collab-maint/devscripts.git/plain/scripts/checkbashisms.pl") sys.exit(1) + # The order of defining the worker function, + # initializing the pool and connecting to the + # bitbake server is crucial, don't change it. + def func(item): + (filename, key, lineno), script = item + return process(filename, key, lineno, script) + + import multiprocessing + pool = multiprocessing.Pool() + tinfoil = get_tinfoil() # This is only the default configuration and should iterate over @@ -83,34 +116,33 @@ if __name__=='__main__': else: initial_pns = sorted(pkg_pn) - pns = [] - print("Generating file list...") + pns = set() + scripts = {} + print("Generating scripts...") for pn in initial_pns: for fn in pkg_pn[pn]: # There's no point checking multiple BBCLASSEXTENDed variants of the same recipe + # (at least in general - there is some risk that the variants contain different scripts) realfn, _, _ = bb.cache.virtualfn2realfn(fn) if realfn not in pns: - pns.append(realfn) - + pns.add(realfn) + data = tinfoil.parse_recipe_file(realfn) + for key in data.keys(): + if data.getVarFlag(key, "func") and not data.getVarFlag(key, "python"): + script = data.getVar(key, False) + if script: + filename = data.getVarFlag(key, "filename") + lineno = data.getVarFlag(key, "lineno") + # There's no point in checking a function multiple + # times just because different recipes include it. + # We identify unique scripts by file, name, and (just in case) + # line number. + attributes = (filename or realfn, key, lineno) + scripts.setdefault(attributes, script) - def func(fn): - result = [] - data = tinfoil.parse_recipe_file(fn) - for key in data.keys(): - if data.getVarFlag(key, "func", True) and not data.getVarFlag(key, "python", True): - script = data.getVar(key, False) - if not script: continue - #print ("%s:%s" % (fn, key)) - r = process(fn, key, script) - if r: result.extend(r) - return fn, result print("Scanning scripts...\n") - import multiprocessing - pool = multiprocessing.Pool() - for pn,results in pool.imap(func, pns): - if results: - print(pn) - for message,source in results: - print(" %s\n %s" % (message, source)) - print() + for result in pool.imap(func, scripts.items()): + if result: + print(result) + tinfoil.shutdown() diff --git a/import-layers/yocto-poky/scripts/wic b/import-layers/yocto-poky/scripts/wic index fe2c33f0e..a5f2dbfc6 100755 --- a/import-layers/yocto-poky/scripts/wic +++ b/import-layers/yocto-poky/scripts/wic @@ -41,6 +41,8 @@ from distutils import spawn scripts_path = os.path.abspath(os.path.dirname(__file__)) lib_path = scripts_path + '/lib' sys.path.insert(0, lib_path) +oe_lib_path = os.path.join(os.path.dirname(scripts_path), 'meta', 'lib') +sys.path.insert(0, oe_lib_path) bitbake_exe = spawn.find_executable('bitbake') if bitbake_exe: @@ -51,11 +53,28 @@ if bitbake_exe: else: bitbake_main = None -from wic.utils.oe.misc import get_bitbake_var, BB_VARS -from wic.utils.errors import WicError +from wic import WicError +from wic.utils.misc import get_bitbake_var, BB_VARS from wic import engine from wic import help as hlp + +def wic_logger(): + """Create and convfigure wic logger.""" + logger = logging.getLogger('wic') + logger.setLevel(logging.INFO) + + handler = logging.StreamHandler() + + formatter = logging.Formatter('%(levelname)s: %(message)s') + handler.setFormatter(formatter) + + logger.addHandler(handler) + + return logger + +logger = wic_logger() + def rootfs_dir_to_args(krootfs_dir): """ Get a rootfs_dir dict and serialize to string @@ -88,7 +107,7 @@ def wic_create_subcommand(args, usage_str): """ parser = optparse.OptionParser(usage=usage_str) - parser.add_option("-o", "--outdir", dest="outdir", + parser.add_option("-o", "--outdir", dest="outdir", default='.', help="name of directory to create image in") parser.add_option("-e", "--image-name", dest="image_name", help="name of the image to use the artifacts from " @@ -107,7 +126,7 @@ def wic_create_subcommand(args, usage_str): parser.add_option("-n", "--native-sysroot", dest="native_sysroot", help="path to the native sysroot containing the tools " "to use to build the image") - parser.add_option("-p", "--skip-build-check", dest="build_check", + parser.add_option("-s", "--skip-build-check", dest="build_check", action="store_false", default=True, help="skip the build check") parser.add_option("-f", "--build-rootfs", action="store_true", help="build rootfs") parser.add_option("-c", "--compress-with", choices=("gzip", "bzip2", "xz"), @@ -123,13 +142,11 @@ def wic_create_subcommand(args, usage_str): (options, args) = parser.parse_args(args) if len(args) != 1: - logging.error("Wrong number of arguments, exiting\n") parser.print_help() - sys.exit(1) + raise WicError("Wrong number of arguments, exiting") if options.build_rootfs and not bitbake_main: - logging.error("Can't build roofs as bitbake is not in the $PATH") - sys.exit(1) + raise WicError("Can't build rootfs as bitbake is not in the $PATH") if not options.image_name: missed = [] @@ -140,9 +157,8 @@ def wic_create_subcommand(args, usage_str): if not val: missed.append(opt) if missed: - print("The following build artifacts are not specified:") - print(" " + ", ".join(missed)) - sys.exit(1) + raise WicError("The following build artifacts are not specified: %s" % + ", ".join(missed)) if options.image_name: BB_VARS.default_image = options.image_name @@ -152,15 +168,11 @@ def wic_create_subcommand(args, usage_str): if options.vars_dir: BB_VARS.vars_dir = options.vars_dir - if options.build_check: - print("Checking basic build environment...") - if not engine.verify_build_env(): - print("Couldn't verify build environment, exiting\n") - sys.exit(1) - else: - print("Done.\n") + if options.build_check and not engine.verify_build_env(): + raise WicError("Couldn't verify build environment, exiting") - bootimg_dir = "" + if options.debug: + logger.setLevel(logging.DEBUG) if options.image_name: if options.build_rootfs: @@ -168,33 +180,40 @@ def wic_create_subcommand(args, usage_str): if options.debug: argv.append("--debug") - print("Building rootfs...\n") + logger.info("Building rootfs...\n") if bitbake_main(BitBakeConfigParameters(argv), cookerdata.CookerConfiguration()): - sys.exit(1) + raise WicError("bitbake exited with error") rootfs_dir = get_bitbake_var("IMAGE_ROOTFS", options.image_name) kernel_dir = get_bitbake_var("DEPLOY_DIR_IMAGE", options.image_name) - native_sysroot = get_bitbake_var("STAGING_DIR_NATIVE", - options.image_name) + bootimg_dir = get_bitbake_var("STAGING_DATADIR", options.image_name) + native_sysroot = get_bitbake_var("RECIPE_SYSROOT_NATIVE", + options.image_name) #, cache=False) else: if options.build_rootfs: - print("Image name is not specified, exiting. (Use -e/--image-name to specify it)\n") - sys.exit(1) + raise WicError("Image name is not specified, exiting. " + "(Use -e/--image-name to specify it)") + native_sysroot = options.native_sysroot + + if not native_sysroot or not os.path.isdir(native_sysroot): + logger.info("Building wic-tools...\n") + if bitbake_main(BitBakeConfigParameters("bitbake wic-tools".split()), + cookerdata.CookerConfiguration()): + raise WicError("bitbake wic-tools failed") + native_sysroot = get_bitbake_var("RECIPE_SYSROOT_NATIVE", "wic-tools") + if not native_sysroot: + raise WicError("Unable to find the location of the native " + "tools sysroot to use") wks_file = args[0] if not wks_file.endswith(".wks"): wks_file = engine.find_canned_image(scripts_path, wks_file) if not wks_file: - print("No image named %s found, exiting. (Use 'wic list images' "\ - "to list available images, or specify a fully-qualified OE "\ - "kickstart (.wks) filename)\n" % args[0]) - sys.exit(1) - - image_output_dir = "" - if options.outdir: - image_output_dir = options.outdir + raise WicError("No image named %s found, exiting. (Use 'wic list images' " + "to list available images, or specify a fully-qualified OE " + "kickstart (.wks) filename)" % args[0]) if not options.image_name: rootfs_dir = '' @@ -204,17 +223,13 @@ def wic_create_subcommand(args, usage_str): kernel_dir = options.kernel_dir native_sysroot = options.native_sysroot if rootfs_dir and not os.path.isdir(rootfs_dir): - print("--roofs-dir (-r) not found, exiting\n") - sys.exit(1) + raise WicError("--rootfs-dir (-r) not found, exiting") if not os.path.isdir(bootimg_dir): - print("--bootimg-dir (-b) not found, exiting\n") - sys.exit(1) + raise WicError("--bootimg-dir (-b) not found, exiting") if not os.path.isdir(kernel_dir): - print("--kernel-dir (-k) not found, exiting\n") - sys.exit(1) + raise WicError("--kernel-dir (-k) not found, exiting") if not os.path.isdir(native_sysroot): - print("--native-sysroot (-n) not found, exiting\n") - sys.exit(1) + raise WicError("--native-sysroot (-n) not found, exiting") else: not_found = not_found_dir = "" if not os.path.isdir(rootfs_dir): @@ -226,13 +241,11 @@ def wic_create_subcommand(args, usage_str): if not_found: if not not_found_dir: not_found_dir = "Completely missing artifact - wrong image (.wks) used?" - print("Build artifacts not found, exiting.") - print(" (Please check that the build artifacts for the machine") - print(" selected in local.conf actually exist and that they") - print(" are the correct artifacts for the image (.wks file)).\n") - print("The artifact that couldn't be found was %s:\n %s" % \ - (not_found, not_found_dir)) - sys.exit(1) + logger.info("Build artifacts not found, exiting.") + logger.info(" (Please check that the build artifacts for the machine") + logger.info(" selected in local.conf actually exist and that they") + logger.info(" are the correct artifacts for the image (.wks file)).\n") + raise WicError("The artifact that couldn't be found was %s:\n %s", not_found, not_found_dir) krootfs_dir = options.rootfs_dir if krootfs_dir is None: @@ -241,10 +254,9 @@ def wic_create_subcommand(args, usage_str): rootfs_dir = rootfs_dir_to_args(krootfs_dir) - print("Creating image(s)...\n") + logger.info("Creating image(s)...\n") engine.wic_create(wks_file, rootfs_dir, bootimg_dir, kernel_dir, - native_sysroot, scripts_path, image_output_dir, - options.compressor, options.bmap, options.debug) + native_sysroot, options) def wic_list_subcommand(args, usage_str): @@ -256,9 +268,8 @@ def wic_list_subcommand(args, usage_str): args = parser.parse_args(args)[1] if not engine.wic_list(args, scripts_path): - logging.error("Bad list arguments, exiting\n") parser.print_help() - sys.exit(1) + raise WicError("Bad list arguments, exiting") def wic_help_topic_subcommand(args, usage_str): @@ -293,10 +304,6 @@ subcommands = { } -def start_logging(loglevel): - logging.basicConfig(filename='wic.log', filemode='w', level=loglevel) - - def main(argv): parser = optparse.OptionParser(version="wic version %s" % __version__, usage=hlp.wic_usage) @@ -309,7 +316,7 @@ def main(argv): if args[0] == "help": if len(args) == 1: parser.print_help() - sys.exit(1) + raise WicError("help command requires parameter") return hlp.invoke_subcommand(args, parser, hlp.wic_help_usage, subcommands) @@ -318,6 +325,6 @@ if __name__ == "__main__": try: sys.exit(main(sys.argv[1:])) except WicError as err: - print("ERROR:", err, file=sys.stderr) + print() + logger.error(err) sys.exit(1) - diff --git a/import-layers/yocto-poky/scripts/wipe-sysroot b/import-layers/yocto-poky/scripts/wipe-sysroot deleted file mode 100755 index 5e6b1a4e2..000000000 --- a/import-layers/yocto-poky/scripts/wipe-sysroot +++ /dev/null @@ -1,54 +0,0 @@ -#! /bin/sh - -# Wipe out all of the sysroots and all of the stamps that populated it. -# Author: Ross Burton -# -# Copyright (c) 2012 Intel Corporation -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License version 2 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -# See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - -set -e - -if [ $# -gt 0 ]; then - echo "Wipe all sysroots and sysroot-related stamps for the current build directory." >&2 - echo "Usage: $0" >&2 - exit 1 -fi - -ENVS=`mktemp --suffix -wipe-sysroot-envs` -bitbake -p -e > $ENVS - -eval `grep -F SSTATE_MANIFESTS= $ENVS` -eval `grep -F STAGING_DIR= $ENVS` -eval `grep -F STAMPS_DIR= $ENVS` -rm -f $ENVS - -if [ -z "$SSTATE_MANIFESTS" -o -z "$STAGING_DIR" -o -z "$STAMPS_DIR" ]; then - echo "Could not determine SSTATE_MANIFESTS/STAGING_DIR/STAMPS_DIR from bitbake, check above for errors" - exit 1 -fi - -echo "Deleting the sysroots in $STAGING_DIR, and selected stamps in $SSTATE_MANIFESTS and $STAMPS_DIR." - -# The sysroots themselves -rm -rf $STAGING_DIR ${STAGING_DIR}-uninative - -# The stamps that said the sysroot was populated -rm -rf $STAMPS_DIR/*/*/*.do_populate_sysroot.* -rm -rf $STAMPS_DIR/*/*/*.do_populate_sysroot_setscene.* -rm -rf $STAMPS_DIR/*/*/*.do_packagedata.* -rm -rf $STAMPS_DIR/*/*/*.do_packagedata_setscene.* - -# The sstate manifests -rm -rf $SSTATE_MANIFESTS/manifest-*.populate_sysroot diff --git a/import-layers/yocto-poky/scripts/yocto-compat-layer-wrapper b/import-layers/yocto-poky/scripts/yocto-compat-layer-wrapper new file mode 100755 index 000000000..db4b6871b --- /dev/null +++ b/import-layers/yocto-poky/scripts/yocto-compat-layer-wrapper @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +# Yocto Project compatibility layer tool wrapper +# +# Creates a temprary build directory to run Yocto Project Compatible +# script to avoid a contaminated environment. +# +# Copyright (C) 2017 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +if [ -z "$BUILDDIR" ]; then + echo "Please source oe-init-build-env before run this script." + exit 2 +fi + +base_dir=$(realpath $BUILDDIR/../) +cd $base_dir + +build_dir=$(mktemp -p $base_dir -d -t build-XXXX) + +source oe-init-build-env $build_dir +yocto-compat-layer.py "$@" +retcode=$? + +rm -rf $build_dir + +exit $retcode diff --git a/import-layers/yocto-poky/scripts/yocto-compat-layer.py b/import-layers/yocto-poky/scripts/yocto-compat-layer.py new file mode 100755 index 000000000..ba64b4d6e --- /dev/null +++ b/import-layers/yocto-poky/scripts/yocto-compat-layer.py @@ -0,0 +1,205 @@ +#!/usr/bin/env python3 + +# Yocto Project compatibility layer tool +# +# Copyright (C) 2017 Intel Corporation +# Released under the MIT license (see COPYING.MIT) + +import os +import sys +import argparse +import logging +import time +import signal +import shutil +import collections + +scripts_path = os.path.dirname(os.path.realpath(__file__)) +lib_path = scripts_path + '/lib' +sys.path = sys.path + [lib_path] +import scriptutils +import scriptpath +scriptpath.add_oe_lib_path() +scriptpath.add_bitbake_lib_path() + +from compatlayer import LayerType, detect_layers, add_layer, add_layer_dependencies, get_signatures +from oeqa.utils.commands import get_bb_vars + +PROGNAME = 'yocto-compat-layer' +CASES_PATHS = [os.path.join(os.path.abspath(os.path.dirname(__file__)), + 'lib', 'compatlayer', 'cases')] +logger = scriptutils.logger_create(PROGNAME, stream=sys.stdout) + +def test_layer_compatibility(td, layer, test_software_layer_signatures): + from compatlayer.context import CompatLayerTestContext + logger.info("Starting to analyze: %s" % layer['name']) + logger.info("----------------------------------------------------------------------") + + tc = CompatLayerTestContext(td=td, logger=logger, layer=layer, test_software_layer_signatures=test_software_layer_signatures) + tc.loadTests(CASES_PATHS) + return tc.runTests() + +def main(): + parser = argparse.ArgumentParser( + description="Yocto Project compatibility layer tool", + add_help=False) + parser.add_argument('layers', metavar='LAYER_DIR', nargs='+', + help='Layer to test compatibility with Yocto Project') + parser.add_argument('-o', '--output-log', + help='File to output log (optional)', action='store') + parser.add_argument('--dependency', nargs="+", + help='Layers to process for dependencies', action='store') + parser.add_argument('--machines', nargs="+", + help='List of MACHINEs to be used during testing', action='store') + parser.add_argument('--additional-layers', nargs="+", + help='List of additional layers to add during testing', action='store') + group = parser.add_mutually_exclusive_group() + group.add_argument('--with-software-layer-signature-check', action='store_true', dest='test_software_layer_signatures', + default=True, + help='check that software layers do not change signatures (on by default)') + group.add_argument('--without-software-layer-signature-check', action='store_false', dest='test_software_layer_signatures', + help='disable signature checking for software layers') + parser.add_argument('-n', '--no-auto', help='Disable auto layer discovery', + action='store_true') + parser.add_argument('-d', '--debug', help='Enable debug output', + action='store_true') + parser.add_argument('-q', '--quiet', help='Print only errors', + action='store_true') + + parser.add_argument('-h', '--help', action='help', + default=argparse.SUPPRESS, + help='show this help message and exit') + + args = parser.parse_args() + + if args.output_log: + fh = logging.FileHandler(args.output_log) + fh.setFormatter(logging.Formatter("%(levelname)s: %(message)s")) + logger.addHandler(fh) + if args.debug: + logger.setLevel(logging.DEBUG) + elif args.quiet: + logger.setLevel(logging.ERROR) + + if not 'BUILDDIR' in os.environ: + logger.error("You must source the environment before run this script.") + logger.error("$ source oe-init-build-env") + return 1 + builddir = os.environ['BUILDDIR'] + bblayersconf = os.path.join(builddir, 'conf', 'bblayers.conf') + + layers = detect_layers(args.layers, args.no_auto) + if not layers: + logger.error("Fail to detect layers") + return 1 + if args.additional_layers: + additional_layers = detect_layers(args.additional_layers, args.no_auto) + else: + additional_layers = [] + if args.dependency: + dep_layers = detect_layers(args.dependency, args.no_auto) + dep_layers = dep_layers + layers + else: + dep_layers = layers + + logger.info("Detected layers:") + for layer in layers: + if layer['type'] == LayerType.ERROR_BSP_DISTRO: + logger.error("%s: Can't be DISTRO and BSP type at the same time."\ + " The conf/distro and conf/machine folders was found."\ + % layer['name']) + layers.remove(layer) + elif layer['type'] == LayerType.ERROR_NO_LAYER_CONF: + logger.error("%s: Don't have conf/layer.conf file."\ + % layer['name']) + layers.remove(layer) + else: + logger.info("%s: %s, %s" % (layer['name'], layer['type'], + layer['path'])) + if not layers: + return 1 + + shutil.copyfile(bblayersconf, bblayersconf + '.backup') + def cleanup_bblayers(signum, frame): + shutil.copyfile(bblayersconf + '.backup', bblayersconf) + os.unlink(bblayersconf + '.backup') + signal.signal(signal.SIGTERM, cleanup_bblayers) + signal.signal(signal.SIGINT, cleanup_bblayers) + + td = {} + results = collections.OrderedDict() + results_status = collections.OrderedDict() + + layers_tested = 0 + for layer in layers: + if layer['type'] == LayerType.ERROR_NO_LAYER_CONF or \ + layer['type'] == LayerType.ERROR_BSP_DISTRO: + continue + + logger.info('') + logger.info("Setting up for %s(%s), %s" % (layer['name'], layer['type'], + layer['path'])) + + shutil.copyfile(bblayersconf + '.backup', bblayersconf) + + missing_dependencies = not add_layer_dependencies(bblayersconf, layer, dep_layers, logger) + if not missing_dependencies: + for additional_layer in additional_layers: + if not add_layer_dependencies(bblayersconf, additional_layer, dep_layers, logger): + missing_dependencies = True + break + if not add_layer_dependencies(bblayersconf, layer, dep_layers, logger) or \ + any(map(lambda additional_layer: not add_layer_dependencies(bblayersconf, additional_layer, dep_layers, logger), + additional_layers)): + logger.info('Skipping %s due to missing dependencies.' % layer['name']) + results[layer['name']] = None + results_status[layer['name']] = 'SKIPPED (Missing dependencies)' + layers_tested = layers_tested + 1 + continue + + if any(map(lambda additional_layer: not add_layer(bblayersconf, additional_layer, dep_layers, logger), + additional_layers)): + logger.info('Skipping %s due to missing additional layers.' % layer['name']) + results[layer['name']] = None + results_status[layer['name']] = 'SKIPPED (Missing additional layers)' + layers_tested = layers_tested + 1 + continue + + logger.info('Getting initial bitbake variables ...') + td['bbvars'] = get_bb_vars() + logger.info('Getting initial signatures ...') + td['builddir'] = builddir + td['sigs'], td['tunetasks'] = get_signatures(td['builddir']) + td['machines'] = args.machines + + if not add_layer(bblayersconf, layer, dep_layers, logger): + logger.info('Skipping %s ???.' % layer['name']) + results[layer['name']] = None + results_status[layer['name']] = 'SKIPPED (Unknown)' + layers_tested = layers_tested + 1 + continue + + result = test_layer_compatibility(td, layer, args.test_software_layer_signatures) + results[layer['name']] = result + results_status[layer['name']] = 'PASS' if results[layer['name']].wasSuccessful() else 'FAIL' + layers_tested = layers_tested + 1 + + if layers_tested: + logger.info('') + logger.info('Summary of results:') + logger.info('') + for layer_name in results_status: + logger.info('%s ... %s' % (layer_name, results_status[layer_name])) + + cleanup_bblayers(None, None) + + return 0 + +if __name__ == '__main__': + try: + ret = main() + except Exception: + ret = 1 + import traceback + traceback.print_exc() + sys.exit(ret) -- cgit v1.2.3